test:Add database migration service with normalization and masking functions
- Implemented `normalizeMigrationInput` to standardize database migration input for sqlite, mysql, and postgres. - Added `maskConnectionString` to obscure sensitive credentials in connection strings. - Created comprehensive tests for migration input normalization and connection string masking. - Established database schema creation logic for multiple dialects (sqlite, mysql, postgres). - Developed functions for managing database transactions, including begin, commit, and rollback. - Implemented data insertion logic from backup snapshots into the target database.
This commit is contained in:
Generated
+271
-2
@@ -21,8 +21,10 @@
|
||||
"fastify": "^5.7.4",
|
||||
"minimatch": "^10.2.4",
|
||||
"minimist": "^1.2.8",
|
||||
"mysql2": "^3.15.3",
|
||||
"node-cron": "^3.0.3",
|
||||
"nodemailer": "^8.0.1",
|
||||
"pg": "^8.16.3",
|
||||
"undici": "^6.20.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -31,6 +33,7 @@
|
||||
"@types/node": "^22.10.1",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/nodemailer": "^7.0.11",
|
||||
"@types/pg": "^8.15.6",
|
||||
"@types/react": "^18.3.12",
|
||||
"@types/react-dom": "^18.3.1",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
@@ -3909,7 +3912,6 @@
|
||||
"version": "22.19.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.11.tgz",
|
||||
"integrity": "sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.21.0"
|
||||
@@ -3932,6 +3934,18 @@
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/pg": {
|
||||
"version": "8.18.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.18.0.tgz",
|
||||
"integrity": "sha512-gT+oueVQkqnj6ajGJXblFR4iavIXWsGAFCk3dP4Kki5+a9R4NMt0JARdk6s8cUKcfUoqP5dAtDSLU8xYUTFV+Q==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"pg-protocol": "*",
|
||||
"pg-types": "^2.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/prop-types": {
|
||||
"version": "15.7.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz",
|
||||
@@ -4686,6 +4700,15 @@
|
||||
"fastq": "^1.17.1"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-ssl-profiles": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/aws-ssl-profiles/-/aws-ssl-profiles-1.1.2.tgz",
|
||||
"integrity": "sha512-NZKeq9AfyQvEeNlN0zSYAaWrmBffJh3IELMZfRpJVWgrpEbtEpnjvzqBPf+mxoI287JohRDoa+/nsfqqiZmF6g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/balanced-match": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
|
||||
@@ -5929,6 +5952,15 @@
|
||||
"robust-predicates": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/denque": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
||||
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/depd": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||
@@ -6502,6 +6534,15 @@
|
||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/generate-function": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz",
|
||||
"integrity": "sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-property": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/gensync": {
|
||||
"version": "1.0.0-beta.2",
|
||||
"resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
|
||||
@@ -6802,6 +6843,12 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/is-property": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz",
|
||||
"integrity": "sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/is-what": {
|
||||
"version": "5.5.0",
|
||||
"resolved": "https://registry.npmjs.org/is-what/-/is-what-5.5.0.tgz",
|
||||
@@ -7257,6 +7304,12 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/long": {
|
||||
"version": "5.3.2",
|
||||
"resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz",
|
||||
"integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==",
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"node_modules/loose-envify": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
|
||||
@@ -7292,6 +7345,21 @@
|
||||
"yallist": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/lru.min": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/lru.min/-/lru.min-1.1.4.tgz",
|
||||
"integrity": "sha512-DqC6n3QQ77zdFpCMASA1a3Jlb64Hv2N2DciFGkO/4L9+q/IpIAuRlKOvCXabtRW6cQf8usbmM6BE/TOPysCdIA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"bun": ">=1.0.0",
|
||||
"deno": ">=1.30.0",
|
||||
"node": ">=8.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/wellwelwel"
|
||||
}
|
||||
},
|
||||
"node_modules/magic-string": {
|
||||
"version": "0.30.21",
|
||||
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz",
|
||||
@@ -7585,6 +7653,56 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/mysql2": {
|
||||
"version": "3.18.2",
|
||||
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.18.2.tgz",
|
||||
"integrity": "sha512-UfEShBFAZZEAKjySnTUuE7BgqkYT4mx+RjoJ5aqtmwSSvNcJ/QxQPXz/y3jSxNiVRedPfgccmuBtiPCSiEEytw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"aws-ssl-profiles": "^1.1.2",
|
||||
"denque": "^2.1.0",
|
||||
"generate-function": "^2.3.1",
|
||||
"iconv-lite": "^0.7.2",
|
||||
"long": "^5.3.2",
|
||||
"lru.min": "^1.1.4",
|
||||
"named-placeholders": "^1.1.6",
|
||||
"sql-escaper": "^1.3.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/mysql2/node_modules/iconv-lite": {
|
||||
"version": "0.7.2",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz",
|
||||
"integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"safer-buffer": ">= 2.1.2 < 3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/express"
|
||||
}
|
||||
},
|
||||
"node_modules/named-placeholders": {
|
||||
"version": "1.1.6",
|
||||
"resolved": "https://registry.npmjs.org/named-placeholders/-/named-placeholders-1.1.6.tgz",
|
||||
"integrity": "sha512-Tz09sEL2EEuv5fFowm419c1+a/jSMiBjI9gHxVLrVdbUkkNUUfjsVYs9pVZu5oCon/kmRh9TfLEObFtkVxmY0w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"lru.min": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/nanoid": {
|
||||
"version": "3.3.11",
|
||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
|
||||
@@ -7807,6 +7925,95 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pg": {
|
||||
"version": "8.20.0",
|
||||
"resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz",
|
||||
"integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pg-connection-string": "^2.12.0",
|
||||
"pg-pool": "^3.13.0",
|
||||
"pg-protocol": "^1.13.0",
|
||||
"pg-types": "2.2.0",
|
||||
"pgpass": "1.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 16.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"pg-cloudflare": "^1.3.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"pg-native": ">=3.0.1"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"pg-native": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/pg-cloudflare": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz",
|
||||
"integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==",
|
||||
"license": "MIT",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/pg-connection-string": {
|
||||
"version": "2.12.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.12.0.tgz",
|
||||
"integrity": "sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pg-int8": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
|
||||
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">=4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-pool": {
|
||||
"version": "3.13.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.13.0.tgz",
|
||||
"integrity": "sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"pg": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pg-protocol": {
|
||||
"version": "1.13.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.13.0.tgz",
|
||||
"integrity": "sha512-zzdvXfS6v89r6v7OcFCHfHlyG/wvry1ALxZo4LqgUoy7W9xhBDMaqOuMiF3qEV45VqsN6rdlcehHrfDtlCPc8w==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pg-types": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
|
||||
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pg-int8": "1.0.1",
|
||||
"postgres-array": "~2.0.0",
|
||||
"postgres-bytea": "~1.0.0",
|
||||
"postgres-date": "~1.0.4",
|
||||
"postgres-interval": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/pgpass": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
|
||||
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"split2": "^4.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||
@@ -7939,6 +8146,45 @@
|
||||
"node": "^10 || ^12 || >=14"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-array": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
|
||||
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-bytea": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz",
|
||||
"integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-date": {
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
|
||||
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/postgres-interval": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
|
||||
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"xtend": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/preact": {
|
||||
"version": "10.28.4",
|
||||
"resolved": "https://registry.npmjs.org/preact/-/preact-10.28.4.tgz",
|
||||
@@ -8742,6 +8988,21 @@
|
||||
"node": ">= 10.x"
|
||||
}
|
||||
},
|
||||
"node_modules/sql-escaper": {
|
||||
"version": "1.3.3",
|
||||
"resolved": "https://registry.npmjs.org/sql-escaper/-/sql-escaper-1.3.3.tgz",
|
||||
"integrity": "sha512-BsTCV265VpTp8tm1wyIm1xqQCS+Q9NHx2Sr+WcnUrgLrQ6yiDIvHYJV5gHxsj1lMBy2zm5twLaZao8Jd+S8JJw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"bun": ">=1.0.0",
|
||||
"deno": ">=2.0.0",
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/mysqljs/sql-escaper?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/stackback": {
|
||||
"version": "0.0.2",
|
||||
"resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz",
|
||||
@@ -9562,7 +9823,6 @@
|
||||
"version": "6.21.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
|
||||
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
|
||||
"devOptional": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/unist-util-is": {
|
||||
@@ -12015,6 +12275,15 @@
|
||||
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/xtend": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
|
||||
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/y18n": {
|
||||
"version": "5.0.8",
|
||||
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
|
||||
|
||||
@@ -32,6 +32,10 @@
|
||||
"start": "node dist/server/index.js",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
"db:migrate": "tsx src/server/db/migrate.ts",
|
||||
"smoke:db": "tsx scripts/dev/db-smoke.ts",
|
||||
"smoke:db:sqlite": "tsx scripts/dev/db-smoke.ts --db-type sqlite",
|
||||
"smoke:db:mysql": "tsx scripts/dev/db-smoke.ts --db-type mysql",
|
||||
"smoke:db:postgres": "tsx scripts/dev/db-smoke.ts --db-type postgres",
|
||||
"test": "vitest run --root .",
|
||||
"test:watch": "vitest --root ."
|
||||
},
|
||||
@@ -48,8 +52,10 @@
|
||||
"fastify": "^5.7.4",
|
||||
"minimatch": "^10.2.4",
|
||||
"minimist": "^1.2.8",
|
||||
"mysql2": "^3.15.3",
|
||||
"node-cron": "^3.0.3",
|
||||
"nodemailer": "^8.0.1",
|
||||
"pg": "^8.16.3",
|
||||
"undici": "^6.20.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -58,6 +64,7 @@
|
||||
"@types/node": "^22.10.1",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/nodemailer": "^7.0.11",
|
||||
"@types/pg": "^8.15.6",
|
||||
"@types/react": "^18.3.12",
|
||||
"@types/react-dom": "^18.3.1",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
|
||||
@@ -0,0 +1,250 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import ts from 'typescript';
|
||||
|
||||
const mode = process.argv.includes('--write') ? 'write' : 'dry-run';
|
||||
const root = process.cwd();
|
||||
const targetDir = path.join(root, 'src', 'server');
|
||||
|
||||
const METHOD_NAMES = new Set(['all', 'get', 'run']);
|
||||
const FUNCTION_KINDS = new Set([
|
||||
ts.SyntaxKind.FunctionDeclaration,
|
||||
ts.SyntaxKind.FunctionExpression,
|
||||
ts.SyntaxKind.ArrowFunction,
|
||||
ts.SyntaxKind.MethodDeclaration,
|
||||
ts.SyntaxKind.GetAccessor,
|
||||
ts.SyntaxKind.SetAccessor,
|
||||
]);
|
||||
|
||||
function listTsFiles(dir) {
|
||||
const out = [];
|
||||
const stack = [dir];
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop();
|
||||
const entries = fs.readdirSync(current, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const full = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
stack.push(full);
|
||||
continue;
|
||||
}
|
||||
if (entry.isFile() && full.endsWith('.ts')) {
|
||||
out.push(full);
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function isFunctionLike(node) {
|
||||
return FUNCTION_KINDS.has(node.kind);
|
||||
}
|
||||
|
||||
function hasAsyncModifier(node) {
|
||||
return !!node.modifiers?.some((mod) => mod.kind === ts.SyntaxKind.AsyncKeyword);
|
||||
}
|
||||
|
||||
function getFunctionTokenPos(node, sourceFile) {
|
||||
if (node.kind === ts.SyntaxKind.ArrowFunction) {
|
||||
return node.getStart(sourceFile);
|
||||
}
|
||||
if (node.kind === ts.SyntaxKind.MethodDeclaration || node.kind === ts.SyntaxKind.GetAccessor || node.kind === ts.SyntaxKind.SetAccessor) {
|
||||
if (!node.name) return node.getStart(sourceFile);
|
||||
return node.name.getStart(sourceFile);
|
||||
}
|
||||
|
||||
let pos = node.getStart(sourceFile);
|
||||
const children = node.getChildren(sourceFile);
|
||||
for (const child of children) {
|
||||
if (child.kind === ts.SyntaxKind.FunctionKeyword) {
|
||||
pos = child.getStart(sourceFile);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return pos;
|
||||
}
|
||||
|
||||
function findNearestFunction(node) {
|
||||
let current = node.parent;
|
||||
while (current) {
|
||||
if (isFunctionLike(current)) return current;
|
||||
current = current.parent;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function unwrapExpression(node) {
|
||||
if (!node) return node;
|
||||
if (ts.isParenthesizedExpression(node)) return unwrapExpression(node.expression);
|
||||
if (ts.isAsExpression(node) || ts.isTypeAssertionExpression(node) || ts.isSatisfiesExpression?.(node)) {
|
||||
return unwrapExpression(node.expression);
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
function isDbIdentifier(node, queryVars) {
|
||||
return ts.isIdentifier(node) && (node.text === 'db' || node.text === 'tx' || queryVars.has(node.text));
|
||||
}
|
||||
|
||||
function dependsOnDb(node, queryVars) {
|
||||
const expr = unwrapExpression(node);
|
||||
if (!expr) return false;
|
||||
|
||||
if (isDbIdentifier(expr, queryVars)) return true;
|
||||
|
||||
if (ts.isPropertyAccessExpression(expr) || ts.isElementAccessExpression(expr)) {
|
||||
return dependsOnDb(expr.expression, queryVars);
|
||||
}
|
||||
|
||||
if (ts.isCallExpression(expr) || ts.isNewExpression(expr)) {
|
||||
if (dependsOnDb(expr.expression, queryVars)) return true;
|
||||
return expr.arguments?.some((arg) => dependsOnDb(arg, queryVars)) ?? false;
|
||||
}
|
||||
|
||||
if (ts.isConditionalExpression(expr)) {
|
||||
return dependsOnDb(expr.condition, queryVars)
|
||||
|| dependsOnDb(expr.whenTrue, queryVars)
|
||||
|| dependsOnDb(expr.whenFalse, queryVars);
|
||||
}
|
||||
|
||||
if (ts.isBinaryExpression(expr)) {
|
||||
return dependsOnDb(expr.left, queryVars) || dependsOnDb(expr.right, queryVars);
|
||||
}
|
||||
|
||||
if (ts.isArrayLiteralExpression(expr)) {
|
||||
return expr.elements.some((el) => dependsOnDb(el, queryVars));
|
||||
}
|
||||
|
||||
if (ts.isObjectLiteralExpression(expr)) {
|
||||
return expr.properties.some((prop) => {
|
||||
if (ts.isPropertyAssignment(prop)) return dependsOnDb(prop.initializer, queryVars);
|
||||
if (ts.isShorthandPropertyAssignment(prop)) return queryVars.has(prop.name.text);
|
||||
if (ts.isSpreadAssignment(prop)) return dependsOnDb(prop.expression, queryVars);
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function collectQueryVars(sourceFile) {
|
||||
const queryVars = new Set();
|
||||
let changed = true;
|
||||
|
||||
while (changed) {
|
||||
changed = false;
|
||||
function visit(node) {
|
||||
if (ts.isVariableDeclaration(node) && ts.isIdentifier(node.name) && node.initializer) {
|
||||
if (!queryVars.has(node.name.text) && dependsOnDb(node.initializer, queryVars)) {
|
||||
queryVars.add(node.name.text);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
visit(sourceFile);
|
||||
}
|
||||
|
||||
return queryVars;
|
||||
}
|
||||
|
||||
function shouldAwaitDbCall(callNode, queryVars) {
|
||||
if (!ts.isPropertyAccessExpression(callNode.expression)) return false;
|
||||
const method = callNode.expression.name.text;
|
||||
if (!METHOD_NAMES.has(method)) return false;
|
||||
if (ts.isAwaitExpression(callNode.parent)) return false;
|
||||
|
||||
const targetExpr = callNode.expression.expression;
|
||||
return dependsOnDb(targetExpr, queryVars);
|
||||
}
|
||||
|
||||
function buildEditsForFile(filePath) {
|
||||
const text = fs.readFileSync(filePath, 'utf8');
|
||||
const sourceFile = ts.createSourceFile(filePath, text, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS);
|
||||
const queryVars = collectQueryVars(sourceFile);
|
||||
|
||||
const awaitedCalls = [];
|
||||
const asyncFunctions = new Map();
|
||||
|
||||
function visit(node) {
|
||||
if (ts.isCallExpression(node) && shouldAwaitDbCall(node, queryVars)) {
|
||||
awaitedCalls.push(node);
|
||||
const fn = findNearestFunction(node);
|
||||
if (fn && !hasAsyncModifier(fn)) {
|
||||
asyncFunctions.set(fn.pos + ':' + fn.end, fn);
|
||||
}
|
||||
}
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
visit(sourceFile);
|
||||
|
||||
const edits = [];
|
||||
for (const callNode of awaitedCalls) {
|
||||
const parent = callNode.parent;
|
||||
const callStart = callNode.getStart(sourceFile);
|
||||
const callEnd = callNode.getEnd();
|
||||
const original = text.slice(callStart, callEnd);
|
||||
const needsParens = (
|
||||
(ts.isPropertyAccessExpression(parent) || ts.isElementAccessExpression(parent))
|
||||
&& parent.expression === callNode
|
||||
);
|
||||
const replacement = needsParens ? `(await ${original})` : `await ${original}`;
|
||||
edits.push({ start: callStart, end: callEnd, text: replacement, type: 'await' });
|
||||
}
|
||||
|
||||
for (const fn of asyncFunctions.values()) {
|
||||
const insertPos = getFunctionTokenPos(fn, sourceFile);
|
||||
edits.push({ start: insertPos, end: insertPos, text: 'async ', type: 'async' });
|
||||
}
|
||||
|
||||
// De-duplicate same edit span/text
|
||||
const uniq = new Map();
|
||||
for (const edit of edits) {
|
||||
const key = `${edit.start}:${edit.end}:${edit.text}`;
|
||||
if (!uniq.has(key)) uniq.set(key, edit);
|
||||
}
|
||||
|
||||
const finalEdits = [...uniq.values()].sort((a, b) => b.start - a.start || b.end - a.end);
|
||||
if (finalEdits.length === 0) {
|
||||
return { changed: false, text, nextText: text, awaitCount: 0, asyncCount: 0 };
|
||||
}
|
||||
|
||||
let nextText = text;
|
||||
for (const edit of finalEdits) {
|
||||
nextText = nextText.slice(0, edit.start) + edit.text + nextText.slice(edit.end);
|
||||
}
|
||||
|
||||
return {
|
||||
changed: nextText !== text,
|
||||
text,
|
||||
nextText,
|
||||
awaitCount: finalEdits.filter((e) => e.type === 'await').length,
|
||||
asyncCount: finalEdits.filter((e) => e.type === 'async').length,
|
||||
};
|
||||
}
|
||||
|
||||
const files = listTsFiles(targetDir);
|
||||
let changedFiles = 0;
|
||||
let totalAwaitEdits = 0;
|
||||
let totalAsyncEdits = 0;
|
||||
const touched = [];
|
||||
|
||||
for (const file of files) {
|
||||
const result = buildEditsForFile(file);
|
||||
if (!result.changed) continue;
|
||||
changedFiles += 1;
|
||||
totalAwaitEdits += result.awaitCount;
|
||||
totalAsyncEdits += result.asyncCount;
|
||||
touched.push(path.relative(root, file));
|
||||
if (mode === 'write') {
|
||||
fs.writeFileSync(file, result.nextText, 'utf8');
|
||||
}
|
||||
}
|
||||
|
||||
console.log(JSON.stringify({
|
||||
mode,
|
||||
changedFiles,
|
||||
totalAwaitEdits,
|
||||
totalAsyncEdits,
|
||||
sampleFiles: touched.slice(0, 20),
|
||||
}, null, 2));
|
||||
@@ -0,0 +1,95 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import ts from 'typescript';
|
||||
|
||||
const mode = process.argv.includes('--write') ? 'write' : 'dry-run';
|
||||
const root = process.cwd();
|
||||
const targetDir = path.join(root, 'src', 'server');
|
||||
|
||||
const FUNCTION_KINDS = new Set([
|
||||
ts.SyntaxKind.FunctionDeclaration,
|
||||
ts.SyntaxKind.FunctionExpression,
|
||||
ts.SyntaxKind.ArrowFunction,
|
||||
ts.SyntaxKind.MethodDeclaration,
|
||||
]);
|
||||
|
||||
function listTsFiles(dir) {
|
||||
const out = [];
|
||||
const stack = [dir];
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop();
|
||||
const entries = fs.readdirSync(current, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const full = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
stack.push(full);
|
||||
continue;
|
||||
}
|
||||
if (entry.isFile() && full.endsWith('.ts')) {
|
||||
out.push(full);
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function hasAsyncModifier(node) {
|
||||
return !!node.modifiers?.some((mod) => mod.kind === ts.SyntaxKind.AsyncKeyword);
|
||||
}
|
||||
|
||||
function isPromiseTypeNode(typeNode) {
|
||||
if (!typeNode) return false;
|
||||
if (!ts.isTypeReferenceNode(typeNode)) return false;
|
||||
const typeName = typeNode.typeName;
|
||||
return ts.isIdentifier(typeName) && typeName.text === 'Promise';
|
||||
}
|
||||
|
||||
function applyEdits(text, edits) {
|
||||
const sorted = [...edits].sort((a, b) => b.start - a.start || b.end - a.end);
|
||||
let next = text;
|
||||
for (const edit of sorted) {
|
||||
next = next.slice(0, edit.start) + edit.text + next.slice(edit.end);
|
||||
}
|
||||
return next;
|
||||
}
|
||||
|
||||
const files = listTsFiles(targetDir);
|
||||
let changedFiles = 0;
|
||||
let editsCount = 0;
|
||||
const touched = [];
|
||||
|
||||
for (const filePath of files) {
|
||||
const text = fs.readFileSync(filePath, 'utf8');
|
||||
const sourceFile = ts.createSourceFile(filePath, text, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS);
|
||||
const edits = [];
|
||||
|
||||
function visit(node) {
|
||||
if (FUNCTION_KINDS.has(node.kind) && hasAsyncModifier(node) && node.type && !isPromiseTypeNode(node.type)) {
|
||||
const start = node.type.getStart(sourceFile);
|
||||
const end = node.type.getEnd();
|
||||
const original = text.slice(start, end);
|
||||
edits.push({ start, end, text: `Promise<${original}>` });
|
||||
}
|
||||
ts.forEachChild(node, visit);
|
||||
}
|
||||
|
||||
visit(sourceFile);
|
||||
if (edits.length === 0) continue;
|
||||
|
||||
const next = applyEdits(text, edits);
|
||||
if (next === text) continue;
|
||||
|
||||
changedFiles += 1;
|
||||
editsCount += edits.length;
|
||||
touched.push(path.relative(root, filePath));
|
||||
if (mode === 'write') {
|
||||
fs.writeFileSync(filePath, next, 'utf8');
|
||||
}
|
||||
}
|
||||
|
||||
console.log(JSON.stringify({
|
||||
mode,
|
||||
changedFiles,
|
||||
editsCount,
|
||||
sampleFiles: touched.slice(0, 20),
|
||||
}, null, 2));
|
||||
@@ -0,0 +1,111 @@
|
||||
import { sql } from 'drizzle-orm';
|
||||
|
||||
type DbType = 'sqlite' | 'mysql' | 'postgres';
|
||||
|
||||
type CliOptions = {
|
||||
dbType?: string;
|
||||
dbUrl?: string;
|
||||
};
|
||||
|
||||
function parseArgs(argv: string[]): CliOptions {
|
||||
const options: CliOptions = {};
|
||||
for (let i = 0; i < argv.length; i += 1) {
|
||||
const arg = argv[i] || '';
|
||||
if (!arg.startsWith('--')) continue;
|
||||
|
||||
const [rawKey, rawValue] = arg.slice(2).split('=', 2);
|
||||
const key = rawKey.trim();
|
||||
const value = rawValue !== undefined ? rawValue : (argv[i + 1]?.startsWith('--') ? '' : argv[++i] || '');
|
||||
|
||||
if (key === 'db-type') options.dbType = value.trim();
|
||||
if (key === 'db-url') options.dbUrl = value.trim();
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
function normalizeDbType(input: string | undefined): DbType {
|
||||
const normalized = (input || '').trim().toLowerCase();
|
||||
if (normalized === 'mysql') return 'mysql';
|
||||
if (normalized === 'postgres' || normalized === 'postgresql') return 'postgres';
|
||||
return 'sqlite';
|
||||
}
|
||||
|
||||
function normalizeFirstScalar(value: unknown): number | string | null {
|
||||
if (value === undefined || value === null) return null;
|
||||
if (typeof value === 'number' || typeof value === 'string') return value;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 0) return null;
|
||||
return normalizeFirstScalar(value[0]);
|
||||
}
|
||||
|
||||
if (typeof value === 'object') {
|
||||
const record = value as Record<string, unknown>;
|
||||
if (Array.isArray(record.rows)) {
|
||||
return normalizeFirstScalar(record.rows);
|
||||
}
|
||||
|
||||
const scalar = Object.values(record)[0];
|
||||
return normalizeFirstScalar(scalar);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const options = parseArgs(process.argv.slice(2));
|
||||
if (options.dbType) process.env.DB_TYPE = options.dbType;
|
||||
if (options.dbUrl !== undefined) process.env.DB_URL = options.dbUrl;
|
||||
|
||||
const { config } = await import('../../src/server/config.js');
|
||||
const { db, runtimeDbDialect, closeDbConnections } = await import('../../src/server/db/index.js');
|
||||
|
||||
try {
|
||||
const dbType = normalizeDbType(config.dbType || options.dbType);
|
||||
const dbUrl = (config.dbUrl || '').trim();
|
||||
|
||||
if ((dbType === 'mysql' || dbType === 'postgres') && !dbUrl) {
|
||||
throw new Error(`DB_URL is required for DB_TYPE=${dbType}`);
|
||||
}
|
||||
|
||||
console.log(`[db-smoke] start dbType=${dbType} runtime=${runtimeDbDialect}`);
|
||||
if (dbUrl) {
|
||||
console.log(`[db-smoke] dbUrl=${dbUrl}`);
|
||||
} else {
|
||||
console.log('[db-smoke] dbUrl=(empty, using default sqlite path)');
|
||||
}
|
||||
|
||||
const pingRows = await db.execute(sql`select 1 as ok`);
|
||||
const pingScalar = normalizeFirstScalar(pingRows);
|
||||
if (Number(pingScalar) !== 1) {
|
||||
throw new Error(`unexpected ping result: ${JSON.stringify(pingRows)}`);
|
||||
}
|
||||
console.log('[db-smoke] ping ok');
|
||||
|
||||
const txRows = await db.transaction(async (tx: any) => tx.execute(sql`select 1 as ok`));
|
||||
const txScalar = normalizeFirstScalar(txRows);
|
||||
if (Number(txScalar) !== 1) {
|
||||
throw new Error(`unexpected transaction ping result: ${JSON.stringify(txRows)}`);
|
||||
}
|
||||
console.log('[db-smoke] transaction ok');
|
||||
|
||||
const versionRows = dbType === 'sqlite'
|
||||
? await db.execute(sql`select sqlite_version() as v`)
|
||||
: await db.execute(sql`select version() as v`);
|
||||
const version = normalizeFirstScalar(versionRows);
|
||||
if (typeof version !== 'string' || version.trim().length === 0) {
|
||||
throw new Error(`failed to read server version: ${JSON.stringify(versionRows)}`);
|
||||
}
|
||||
console.log(`[db-smoke] version=${version.slice(0, 120)}`);
|
||||
|
||||
console.log('[db-smoke] success');
|
||||
} finally {
|
||||
await closeDbConnections();
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('[db-smoke] failed');
|
||||
console.error(error instanceof Error ? error.message : error);
|
||||
process.exit(1);
|
||||
});
|
||||
+12
-1
@@ -21,6 +21,15 @@ function parseCsvList(value: string | undefined): string[] {
|
||||
.filter((item) => item.length > 0);
|
||||
}
|
||||
|
||||
function parseDbType(value: string | undefined): 'sqlite' | 'mysql' | 'postgres' {
|
||||
const normalized = (value || 'sqlite').trim().toLowerCase();
|
||||
if (normalized === 'mysql') return 'mysql';
|
||||
if (normalized === 'postgres' || normalized === 'postgresql') return 'postgres';
|
||||
return 'sqlite';
|
||||
}
|
||||
|
||||
const dataDir = process.env.DATA_DIR || './data';
|
||||
|
||||
export const config = {
|
||||
authToken: process.env.AUTH_TOKEN || 'change-me-admin-token',
|
||||
proxyToken: process.env.PROXY_TOKEN || 'change-me-proxy-sk-token',
|
||||
@@ -47,7 +56,9 @@ export const config = {
|
||||
notifyCooldownSec: Math.max(0, Math.trunc(parseNumber(process.env.NOTIFY_COOLDOWN_SEC, 300))),
|
||||
adminIpAllowlist: parseCsvList(process.env.ADMIN_IP_ALLOWLIST),
|
||||
port: Math.trunc(parseNumber(process.env.PORT, 4000)),
|
||||
dataDir: process.env.DATA_DIR || './data',
|
||||
dataDir,
|
||||
dbType: parseDbType(process.env.DB_TYPE),
|
||||
dbUrl: (process.env.DB_URL || '').trim(),
|
||||
routingFallbackUnitCost: Math.max(1e-6, parseNumber(process.env.ROUTING_FALLBACK_UNIT_COST, 1)),
|
||||
tokenRouterCacheTtlMs: Math.max(100, Math.trunc(parseNumber(process.env.TOKEN_ROUTER_CACHE_TTL_MS, 1_500))),
|
||||
proxyLogRetentionDays: Math.max(0, Math.trunc(parseNumber(process.env.PROXY_LOG_RETENTION_DAYS, 30))),
|
||||
|
||||
+399
-15
@@ -1,29 +1,73 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import { drizzle } from 'drizzle-orm/better-sqlite3';
|
||||
import mysql from 'mysql2/promise';
|
||||
import pg from 'pg';
|
||||
import { drizzle as drizzleSqliteProxy } from 'drizzle-orm/sqlite-proxy';
|
||||
import { drizzle as drizzleMysqlProxy } from 'drizzle-orm/mysql-proxy';
|
||||
import { drizzle as drizzlePgProxy } from 'drizzle-orm/pg-proxy';
|
||||
import * as schema from './schema.js';
|
||||
import { config } from '../config.js';
|
||||
import { mkdirSync } from 'fs';
|
||||
import { dirname } from 'path';
|
||||
import { dirname, resolve } from 'path';
|
||||
|
||||
const dbPath = `${config.dataDir}/hub.db`;
|
||||
mkdirSync(dirname(dbPath), { recursive: true });
|
||||
type RuntimeDbDialect = 'sqlite' | 'mysql' | 'postgres';
|
||||
type SqlMethod = 'all' | 'get' | 'run' | 'values' | 'execute';
|
||||
|
||||
const sqlite = new Database(dbPath);
|
||||
sqlite.pragma('journal_mode = WAL');
|
||||
sqlite.pragma('foreign_keys = ON');
|
||||
const TABLES_WITH_NUMERIC_ID = new Set([
|
||||
'sites',
|
||||
'accounts',
|
||||
'account_tokens',
|
||||
'checkin_logs',
|
||||
'model_availability',
|
||||
'token_model_availability',
|
||||
'token_routes',
|
||||
'route_channels',
|
||||
'proxy_logs',
|
||||
'downstream_api_keys',
|
||||
'events',
|
||||
]);
|
||||
|
||||
export const runtimeDbDialect: RuntimeDbDialect = config.dbType;
|
||||
|
||||
let sqliteConnection: Database.Database | null = null;
|
||||
let mysqlPool: mysql.Pool | null = null;
|
||||
let pgPool: pg.Pool | null = null;
|
||||
|
||||
function resolveSqlitePath(): string {
|
||||
const raw = (config.dbUrl || '').trim();
|
||||
if (!raw) return resolve(`${config.dataDir}/hub.db`);
|
||||
if (raw === ':memory:') return raw;
|
||||
if (raw.startsWith('file://')) {
|
||||
const parsed = new URL(raw);
|
||||
return decodeURIComponent(parsed.pathname);
|
||||
}
|
||||
if (raw.startsWith('sqlite://')) {
|
||||
return resolve(raw.slice('sqlite://'.length).trim());
|
||||
}
|
||||
return resolve(raw);
|
||||
}
|
||||
|
||||
function requireSqliteConnection(): Database.Database {
|
||||
if (!sqliteConnection) {
|
||||
throw new Error('SQLite connection is not initialized');
|
||||
}
|
||||
return sqliteConnection;
|
||||
}
|
||||
|
||||
function tableExists(table: string): boolean {
|
||||
const sqlite = requireSqliteConnection();
|
||||
const row = sqlite.prepare("SELECT name FROM sqlite_master WHERE type = 'table' AND name = ? LIMIT 1")
|
||||
.get(table) as { name?: string } | undefined;
|
||||
return !!row?.name;
|
||||
}
|
||||
|
||||
function tableColumnExists(table: string, column: string): boolean {
|
||||
const sqlite = requireSqliteConnection();
|
||||
const rows = sqlite.prepare(`PRAGMA table_info(${table})`).all() as Array<{ name?: string }>;
|
||||
return rows.some((row) => row.name === column);
|
||||
}
|
||||
|
||||
function ensureTokenManagementSchema() {
|
||||
const sqlite = requireSqliteConnection();
|
||||
if (!tableExists('accounts') || !tableExists('route_channels')) {
|
||||
return;
|
||||
}
|
||||
@@ -93,6 +137,7 @@ function ensureTokenManagementSchema() {
|
||||
}
|
||||
|
||||
function ensureSiteStatusSchema() {
|
||||
const sqlite = requireSqliteConnection();
|
||||
if (!tableExists('sites')) {
|
||||
return;
|
||||
}
|
||||
@@ -119,6 +164,7 @@ function ensureSiteStatusSchema() {
|
||||
}
|
||||
|
||||
function ensureSiteProxySchema() {
|
||||
const sqlite = requireSqliteConnection();
|
||||
if (!tableExists('sites')) {
|
||||
return;
|
||||
}
|
||||
@@ -129,6 +175,7 @@ function ensureSiteProxySchema() {
|
||||
}
|
||||
|
||||
function ensureSiteExternalCheckinUrlSchema() {
|
||||
const sqlite = requireSqliteConnection();
|
||||
if (!tableExists('sites')) {
|
||||
return;
|
||||
}
|
||||
@@ -139,6 +186,7 @@ function ensureSiteExternalCheckinUrlSchema() {
|
||||
}
|
||||
|
||||
function ensureSiteGlobalWeightSchema() {
|
||||
const sqlite = requireSqliteConnection();
|
||||
if (!tableExists('sites')) {
|
||||
return;
|
||||
}
|
||||
@@ -156,6 +204,7 @@ function ensureSiteGlobalWeightSchema() {
|
||||
}
|
||||
|
||||
function ensureRouteGroupingSchema() {
|
||||
const sqlite = requireSqliteConnection();
|
||||
if (!tableExists('token_routes') || !tableExists('route_channels')) {
|
||||
return;
|
||||
}
|
||||
@@ -174,6 +223,7 @@ function ensureRouteGroupingSchema() {
|
||||
}
|
||||
|
||||
function ensureDownstreamApiKeySchema() {
|
||||
const sqlite = requireSqliteConnection();
|
||||
sqlite.exec(`
|
||||
CREATE TABLE IF NOT EXISTS downstream_api_keys (
|
||||
id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
@@ -213,13 +263,347 @@ function ensureDownstreamApiKeySchema() {
|
||||
`);
|
||||
}
|
||||
|
||||
ensureTokenManagementSchema();
|
||||
ensureSiteStatusSchema();
|
||||
ensureSiteProxySchema();
|
||||
ensureSiteExternalCheckinUrlSchema();
|
||||
ensureSiteGlobalWeightSchema();
|
||||
ensureRouteGroupingSchema();
|
||||
ensureDownstreamApiKeySchema();
|
||||
async function sqliteProxyQuery(sqlText: string, params: unknown[], method: SqlMethod) {
|
||||
const sqlite = requireSqliteConnection();
|
||||
const statement = sqlite.prepare(sqlText);
|
||||
if (method === 'run' || method === 'execute') {
|
||||
const result = statement.run(...params);
|
||||
return {
|
||||
rows: [],
|
||||
changes: Number(result.changes || 0),
|
||||
lastInsertRowid: Number(result.lastInsertRowid || 0),
|
||||
};
|
||||
}
|
||||
|
||||
export const db = drizzle(sqlite, { schema });
|
||||
if (method === 'get') {
|
||||
const row = statement.raw().get(...params) as unknown[] | undefined;
|
||||
return { rows: row as any };
|
||||
}
|
||||
|
||||
const rows = statement.raw().all(...params) as unknown[][];
|
||||
return { rows };
|
||||
}
|
||||
|
||||
type MysqlQueryable = mysql.Pool | mysql.PoolConnection;
|
||||
async function mysqlProxyQuery(executor: MysqlQueryable, sqlText: string, params: unknown[], method: SqlMethod) {
|
||||
const queryOptions = {
|
||||
sql: sqlText,
|
||||
rowsAsArray: method === 'all' || method === 'values',
|
||||
};
|
||||
const [rows] = await executor.query(queryOptions as mysql.QueryOptions, params as any[]);
|
||||
|
||||
if (method === 'all' || method === 'values') {
|
||||
return { rows: Array.isArray(rows) ? rows : [] };
|
||||
}
|
||||
|
||||
if (Array.isArray(rows)) {
|
||||
return { rows };
|
||||
}
|
||||
return { rows: [rows] };
|
||||
}
|
||||
|
||||
type PgQueryable = pg.Pool | pg.PoolClient;
|
||||
function parseInsertTableName(sqlText: string): string | null {
|
||||
const match = sqlText.match(/insert\s+into\s+"?([a-zA-Z0-9_]+)"?/i);
|
||||
return match?.[1]?.toLowerCase() || null;
|
||||
}
|
||||
|
||||
async function pgProxyQuery(executor: PgQueryable, sqlText: string, params: unknown[], method: SqlMethod) {
|
||||
const trimmedLower = sqlText.trim().toLowerCase();
|
||||
const values = params as any[];
|
||||
|
||||
if (method === 'all' || method === 'values') {
|
||||
const result = await executor.query({
|
||||
text: sqlText,
|
||||
values,
|
||||
rowMode: 'array',
|
||||
} as pg.QueryConfig);
|
||||
return { rows: result.rows };
|
||||
}
|
||||
|
||||
if (trimmedLower.startsWith('insert') && method === 'execute') {
|
||||
const tableName = parseInsertTableName(sqlText);
|
||||
const canReturnId = tableName !== null && TABLES_WITH_NUMERIC_ID.has(tableName) && !trimmedLower.includes(' returning ');
|
||||
if (canReturnId) {
|
||||
const result = await executor.query({
|
||||
text: `${sqlText} returning id`,
|
||||
values,
|
||||
} as pg.QueryConfig);
|
||||
const insertedId = Number((result.rows?.[0] as { id?: unknown } | undefined)?.id ?? 0);
|
||||
return {
|
||||
rows: [{
|
||||
changes: Number(result.rowCount || 0),
|
||||
lastInsertRowid: Number.isFinite(insertedId) ? insertedId : 0,
|
||||
}],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const result = await executor.query({
|
||||
text: sqlText,
|
||||
values,
|
||||
} as pg.QueryConfig);
|
||||
|
||||
if (trimmedLower.startsWith('select')) {
|
||||
return { rows: result.rows };
|
||||
}
|
||||
|
||||
return { rows: [{ changes: Number(result.rowCount || 0) }] };
|
||||
}
|
||||
|
||||
function normalizeAllResult(result: unknown): unknown[] {
|
||||
if (!Array.isArray(result)) return [];
|
||||
if (result.length === 0) return [];
|
||||
const first = result[0] as Record<string, unknown> | undefined;
|
||||
if (first && typeof first === 'object') {
|
||||
if ('affectedRows' in first || 'insertId' in first) return [];
|
||||
if ('changes' in first && result.length === 1) return [];
|
||||
if ('rowCount' in first && result.length === 1) return [];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function normalizeRunResult(result: unknown): { changes: number; lastInsertRowid: number } {
|
||||
if (!result) return { changes: 0, lastInsertRowid: 0 };
|
||||
|
||||
if (typeof result === 'object' && !Array.isArray(result)) {
|
||||
const row = result as Record<string, unknown>;
|
||||
if ('changes' in row || 'lastInsertRowid' in row) {
|
||||
return {
|
||||
changes: Number(row.changes || 0),
|
||||
lastInsertRowid: Number(row.lastInsertRowid || 0),
|
||||
};
|
||||
}
|
||||
if ('affectedRows' in row || 'insertId' in row) {
|
||||
return {
|
||||
changes: Number(row.affectedRows || 0),
|
||||
lastInsertRowid: Number(row.insertId || 0),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(result) && result.length > 0) {
|
||||
const first = result[0] as Record<string, unknown>;
|
||||
if (first && typeof first === 'object') {
|
||||
if ('changes' in first || 'lastInsertRowid' in first) {
|
||||
return {
|
||||
changes: Number(first.changes || 0),
|
||||
lastInsertRowid: Number(first.lastInsertRowid || 0),
|
||||
};
|
||||
}
|
||||
if ('affectedRows' in first || 'insertId' in first) {
|
||||
return {
|
||||
changes: Number(first.affectedRows || 0),
|
||||
lastInsertRowid: Number(first.insertId || 0),
|
||||
};
|
||||
}
|
||||
if ('rowCount' in first) {
|
||||
return {
|
||||
changes: Number(first.rowCount || 0),
|
||||
lastInsertRowid: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { changes: 0, lastInsertRowid: 0 };
|
||||
}
|
||||
|
||||
const wrappedObjects = new WeakMap<object, unknown>();
|
||||
|
||||
function wrapQueryLike<T>(value: T): T {
|
||||
if (!value || typeof value !== 'object') return value;
|
||||
const target = value as unknown as object;
|
||||
if (wrappedObjects.has(target)) {
|
||||
return wrappedObjects.get(target) as T;
|
||||
}
|
||||
|
||||
const proxy = new Proxy(target as Record<string, unknown>, {
|
||||
get(innerTarget, prop, receiver) {
|
||||
if (prop === 'then' && typeof innerTarget.then === 'function') {
|
||||
return innerTarget.then.bind(innerTarget);
|
||||
}
|
||||
|
||||
if (prop === 'all' && typeof innerTarget.all !== 'function' && typeof innerTarget.execute === 'function') {
|
||||
return async (...args: unknown[]) => normalizeAllResult(await (innerTarget.execute as (...a: unknown[]) => Promise<unknown>)(...args));
|
||||
}
|
||||
|
||||
if (prop === 'get' && typeof innerTarget.get !== 'function' && typeof innerTarget.execute === 'function') {
|
||||
return async (...args: unknown[]) => {
|
||||
const rows = normalizeAllResult(await (innerTarget.execute as (...a: unknown[]) => Promise<unknown>)(...args));
|
||||
return rows[0] ?? undefined;
|
||||
};
|
||||
}
|
||||
|
||||
if (prop === 'run' && typeof innerTarget.run !== 'function' && typeof innerTarget.execute === 'function') {
|
||||
return async (...args: unknown[]) => normalizeRunResult(await (innerTarget.execute as (...a: unknown[]) => Promise<unknown>)(...args));
|
||||
}
|
||||
|
||||
const original = Reflect.get(innerTarget, prop, receiver);
|
||||
if (typeof original !== 'function') {
|
||||
return original;
|
||||
}
|
||||
|
||||
return (...args: unknown[]) => {
|
||||
const result = original.apply(innerTarget, args);
|
||||
if (result && typeof result === 'object' && typeof (result as Promise<unknown>).then !== 'function') {
|
||||
return wrapQueryLike(result);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
wrappedObjects.set(target, proxy);
|
||||
return proxy as unknown as T;
|
||||
}
|
||||
|
||||
function wrapDbClient<T extends object>(
|
||||
rawDb: T,
|
||||
customTransaction?: <R>(fn: (tx: any) => Promise<R> | R) => Promise<R>,
|
||||
) {
|
||||
return new Proxy(rawDb as Record<string, unknown>, {
|
||||
get(target, prop, receiver) {
|
||||
if (prop === 'transaction') {
|
||||
if (customTransaction) return customTransaction;
|
||||
|
||||
const originalTransaction = target.transaction;
|
||||
if (typeof originalTransaction !== 'function') return undefined;
|
||||
return async <R>(fn: (tx: any) => Promise<R> | R) => {
|
||||
return await (originalTransaction as (handler: (tx: unknown) => Promise<R> | R) => Promise<R>).call(target, async (tx: unknown) => {
|
||||
return await fn(wrapDbClient(tx as object));
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
const original = Reflect.get(target, prop, receiver);
|
||||
if (typeof original !== 'function') {
|
||||
return original;
|
||||
}
|
||||
|
||||
return (...args: unknown[]) => {
|
||||
const result = original.apply(target, args);
|
||||
if (result && typeof result === 'object' && typeof (result as Promise<unknown>).then !== 'function') {
|
||||
return wrapQueryLike(result);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
},
|
||||
}) as T;
|
||||
}
|
||||
|
||||
function initSqliteDb() {
|
||||
const sqlitePath = resolveSqlitePath();
|
||||
if (sqlitePath !== ':memory:') {
|
||||
mkdirSync(dirname(sqlitePath), { recursive: true });
|
||||
}
|
||||
|
||||
const sqlite = new Database(sqlitePath);
|
||||
sqliteConnection = sqlite;
|
||||
sqlite.pragma('journal_mode = WAL');
|
||||
sqlite.pragma('foreign_keys = ON');
|
||||
|
||||
ensureTokenManagementSchema();
|
||||
ensureSiteStatusSchema();
|
||||
ensureSiteProxySchema();
|
||||
ensureSiteExternalCheckinUrlSchema();
|
||||
ensureSiteGlobalWeightSchema();
|
||||
ensureRouteGroupingSchema();
|
||||
ensureDownstreamApiKeySchema();
|
||||
|
||||
const rawDb = drizzleSqliteProxy(
|
||||
(sqlText, params, method) => sqliteProxyQuery(sqlText, params, method as SqlMethod),
|
||||
{ schema },
|
||||
) as any;
|
||||
return wrapDbClient(rawDb);
|
||||
}
|
||||
|
||||
type AppDb = ReturnType<typeof initSqliteDb>;
|
||||
|
||||
function initMysqlDb(): AppDb {
|
||||
if (!config.dbUrl) {
|
||||
throw new Error('DB_URL is required when DB_TYPE=mysql');
|
||||
}
|
||||
mysqlPool = mysql.createPool(config.dbUrl);
|
||||
|
||||
const rawDb = drizzleMysqlProxy(
|
||||
(sqlText, params, method) => mysqlProxyQuery(mysqlPool!, sqlText, params, method as SqlMethod),
|
||||
{ schema },
|
||||
) as any;
|
||||
|
||||
return wrapDbClient(rawDb, async <R>(fn: (tx: any) => Promise<R> | R) => {
|
||||
const connection = await mysqlPool!.getConnection();
|
||||
try {
|
||||
await connection.beginTransaction();
|
||||
const txRaw = drizzleMysqlProxy(
|
||||
(sqlText, params, method) => mysqlProxyQuery(connection, sqlText, params, method as SqlMethod),
|
||||
{ schema },
|
||||
) as any;
|
||||
const txWrapped = wrapDbClient(txRaw);
|
||||
const result = await fn(txWrapped);
|
||||
await connection.commit();
|
||||
return result;
|
||||
} catch (error) {
|
||||
await connection.rollback();
|
||||
throw error;
|
||||
} finally {
|
||||
connection.release();
|
||||
}
|
||||
}) as AppDb;
|
||||
}
|
||||
|
||||
function initPostgresDb(): AppDb {
|
||||
if (!config.dbUrl) {
|
||||
throw new Error('DB_URL is required when DB_TYPE=postgres');
|
||||
}
|
||||
pgPool = new pg.Pool({ connectionString: config.dbUrl });
|
||||
|
||||
const rawDb = drizzlePgProxy(
|
||||
(sqlText, params, method) => pgProxyQuery(pgPool!, sqlText, params, method as SqlMethod),
|
||||
{ schema },
|
||||
) as any;
|
||||
|
||||
return wrapDbClient(rawDb, async <R>(fn: (tx: any) => Promise<R> | R) => {
|
||||
const client = await pgPool!.connect();
|
||||
try {
|
||||
await client.query('BEGIN');
|
||||
const txRaw = drizzlePgProxy(
|
||||
(sqlText, params, method) => pgProxyQuery(client, sqlText, params, method as SqlMethod),
|
||||
{ schema },
|
||||
) as any;
|
||||
const txWrapped = wrapDbClient(txRaw);
|
||||
const result = await fn(txWrapped);
|
||||
await client.query('COMMIT');
|
||||
return result;
|
||||
} catch (error) {
|
||||
await client.query('ROLLBACK');
|
||||
throw error;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}) as AppDb;
|
||||
}
|
||||
|
||||
function initDb(): AppDb {
|
||||
if (runtimeDbDialect === 'mysql') return initMysqlDb();
|
||||
if (runtimeDbDialect === 'postgres') return initPostgresDb();
|
||||
return initSqliteDb();
|
||||
}
|
||||
|
||||
export const db: any = initDb();
|
||||
export { schema };
|
||||
|
||||
export async function closeDbConnections(): Promise<void> {
|
||||
if (mysqlPool) {
|
||||
await mysqlPool.end();
|
||||
mysqlPool = null;
|
||||
}
|
||||
if (pgPool) {
|
||||
await pgPool.end();
|
||||
pgPool = null;
|
||||
}
|
||||
if (sqliteConnection) {
|
||||
sqliteConnection.close();
|
||||
sqliteConnection = null;
|
||||
}
|
||||
}
|
||||
|
||||
+3
-3
@@ -27,12 +27,12 @@ import { db, schema } from './db/index.js';
|
||||
|
||||
// Load runtime config overrides from settings
|
||||
try {
|
||||
const rows = db.select().from(schema.settings).all();
|
||||
const rows = await db.select().from(schema.settings).all();
|
||||
const settingsMap = new Map(rows.map((row) => [row.key, row.value]));
|
||||
|
||||
const parseSetting = <T>(key: string): T | undefined => {
|
||||
const raw = settingsMap.get(key);
|
||||
if (!raw) return undefined;
|
||||
if (typeof raw !== 'string' || !raw) return undefined;
|
||||
try {
|
||||
return JSON.parse(raw) as T;
|
||||
} catch {
|
||||
@@ -195,7 +195,7 @@ if (existsSync(webDir)) {
|
||||
}
|
||||
|
||||
// Start scheduler
|
||||
startScheduler();
|
||||
await startScheduler();
|
||||
startProxyLogRetentionService();
|
||||
app.addHook('onClose', async () => {
|
||||
stopProxyLogRetentionService();
|
||||
|
||||
@@ -75,14 +75,14 @@ export async function proxyAuthMiddleware(request: FastifyRequest, reply: Fastif
|
||||
return;
|
||||
}
|
||||
|
||||
const authResult = authorizeDownstreamToken(token);
|
||||
const authResult = await authorizeDownstreamToken(token);
|
||||
if (!authResult.ok) {
|
||||
reply.code(authResult.statusCode).send({ error: authResult.error });
|
||||
return;
|
||||
}
|
||||
|
||||
if (authResult.source === 'managed' && authResult.key) {
|
||||
consumeManagedKeyRequest(authResult.key.id);
|
||||
await consumeManagedKeyRequest(authResult.key.id);
|
||||
}
|
||||
|
||||
proxyAuthContextByRequest.set(request, {
|
||||
|
||||
@@ -35,18 +35,18 @@ describe('account tokens sync routes with site status', () => {
|
||||
return seedId;
|
||||
};
|
||||
|
||||
const seedAccount = (input: { siteStatus?: 'active' | 'disabled'; accountStatus?: string; accessToken?: string | null }) => {
|
||||
const seedAccount = async (input: { siteStatus?: 'active' | 'disabled'; accountStatus?: string; accessToken?: string | null }) => {
|
||||
const id = nextSeed();
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: `site-${id}`,
|
||||
url: `https://site-${id}.example.com`,
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
if (input.siteStatus === 'disabled') {
|
||||
db.run(sql`update sites set status = 'disabled' where id = ${site.id}`);
|
||||
await db.run(sql`update sites set status = 'disabled' where id = ${site.id}`);
|
||||
}
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: `user-${id}`,
|
||||
accessToken: input.accessToken ?? `access-token-${id}`,
|
||||
@@ -70,7 +70,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
await app.register(routesModule.accountTokensRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
getApiTokensMock.mockReset();
|
||||
getApiTokenMock.mockReset();
|
||||
createApiTokenMock.mockReset();
|
||||
@@ -78,14 +78,14 @@ describe('account tokens sync routes with site status', () => {
|
||||
deleteApiTokenMock.mockReset();
|
||||
seedId = 0;
|
||||
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -94,7 +94,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
});
|
||||
|
||||
it('returns skipped for single-account sync when site is disabled', async () => {
|
||||
const { account } = seedAccount({ siteStatus: 'disabled' });
|
||||
const { account } = await seedAccount({ siteStatus: 'disabled' });
|
||||
|
||||
const response = await app.inject({
|
||||
method: 'POST',
|
||||
@@ -113,7 +113,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
});
|
||||
|
||||
it('returns skipped when upstream has no api tokens', async () => {
|
||||
const { account } = seedAccount({ siteStatus: 'active' });
|
||||
const { account } = await seedAccount({ siteStatus: 'active' });
|
||||
getApiTokensMock.mockResolvedValue([]);
|
||||
getApiTokenMock.mockResolvedValue(null);
|
||||
|
||||
@@ -130,7 +130,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
reason: 'no_upstream_tokens',
|
||||
});
|
||||
|
||||
const tokenRows = db.select()
|
||||
const tokenRows = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(eq(schema.accountTokens.accountId, account.id))
|
||||
.all();
|
||||
@@ -138,8 +138,8 @@ describe('account tokens sync routes with site status', () => {
|
||||
});
|
||||
|
||||
it('sync-all skips disabled-site accounts and syncs active-site accounts', async () => {
|
||||
const disabled = seedAccount({ siteStatus: 'disabled' });
|
||||
const active = seedAccount({ siteStatus: 'active' });
|
||||
const disabled = await seedAccount({ siteStatus: 'disabled' });
|
||||
const active = await seedAccount({ siteStatus: 'active' });
|
||||
|
||||
getApiTokensMock.mockResolvedValue([
|
||||
{ name: 'default', key: 'sk-synced-token', enabled: true },
|
||||
@@ -186,7 +186,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
synced: true,
|
||||
});
|
||||
|
||||
const syncedDefaultToken = db.select()
|
||||
const syncedDefaultToken = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(and(eq(schema.accountTokens.accountId, active.account.id), eq(schema.accountTokens.isDefault, true)))
|
||||
.get();
|
||||
@@ -194,7 +194,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
});
|
||||
|
||||
it('creates token via upstream api and syncs into local store when manual token is omitted', async () => {
|
||||
const { account, site } = seedAccount({ siteStatus: 'active' });
|
||||
const { account, site } = await seedAccount({ siteStatus: 'active' });
|
||||
createApiTokenMock.mockResolvedValue(true);
|
||||
getApiTokensMock.mockResolvedValue([
|
||||
{ name: 'created-from-upstream', key: 'sk-created-upstream-token', enabled: true },
|
||||
@@ -220,7 +220,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
expect(createApiTokenMock.mock.calls[0][0]).toBe(site.url);
|
||||
expect(createApiTokenMock.mock.calls[0][1]).toBe(account.accessToken);
|
||||
|
||||
const tokenRows = db.select()
|
||||
const tokenRows = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(eq(schema.accountTokens.accountId, account.id))
|
||||
.all();
|
||||
@@ -232,7 +232,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
});
|
||||
|
||||
it('passes token creation options to upstream adapter', async () => {
|
||||
const { account } = seedAccount({ siteStatus: 'active' });
|
||||
const { account } = await seedAccount({ siteStatus: 'active' });
|
||||
createApiTokenMock.mockResolvedValue(true);
|
||||
getApiTokensMock.mockResolvedValue([
|
||||
{ name: 'custom-token', key: 'sk-created-upstream-token', enabled: true },
|
||||
@@ -265,7 +265,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
});
|
||||
|
||||
it('returns 400 when limited token misses remainQuota', async () => {
|
||||
const { account } = seedAccount({ siteStatus: 'active' });
|
||||
const { account } = await seedAccount({ siteStatus: 'active' });
|
||||
|
||||
const response = await app.inject({
|
||||
method: 'POST',
|
||||
@@ -286,7 +286,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
});
|
||||
|
||||
it('returns 502 when upstream token creation fails', async () => {
|
||||
const { account } = seedAccount({ siteStatus: 'active' });
|
||||
const { account } = await seedAccount({ siteStatus: 'active' });
|
||||
createApiTokenMock.mockResolvedValue(false);
|
||||
|
||||
const response = await app.inject({
|
||||
@@ -306,7 +306,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
});
|
||||
|
||||
it('fetches account token groups from upstream', async () => {
|
||||
const { account } = seedAccount({ siteStatus: 'active' });
|
||||
const { account } = await seedAccount({ siteStatus: 'active' });
|
||||
getUserGroupsMock.mockResolvedValue(['default', 'vip']);
|
||||
|
||||
const response = await app.inject({
|
||||
@@ -323,8 +323,8 @@ describe('account tokens sync routes with site status', () => {
|
||||
});
|
||||
|
||||
it('deletes upstream token before removing local token', async () => {
|
||||
const { account, site } = seedAccount({ siteStatus: 'active' });
|
||||
const token = db.insert(schema.accountTokens).values({
|
||||
const { account, site } = await seedAccount({ siteStatus: 'active' });
|
||||
const token = await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'upstream-token',
|
||||
token: 'sk-upstream-token',
|
||||
@@ -345,13 +345,13 @@ describe('account tokens sync routes with site status', () => {
|
||||
expect(deleteApiTokenMock.mock.calls[0][1]).toBe(account.accessToken);
|
||||
expect(deleteApiTokenMock.mock.calls[0][2]).toBe('sk-upstream-token');
|
||||
|
||||
const removed = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, token.id)).get();
|
||||
const removed = await db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, token.id)).get();
|
||||
expect(removed).toBeUndefined();
|
||||
});
|
||||
|
||||
it('keeps local token when upstream deletion fails', async () => {
|
||||
const { account } = seedAccount({ siteStatus: 'active' });
|
||||
const token = db.insert(schema.accountTokens).values({
|
||||
const { account } = await seedAccount({ siteStatus: 'active' });
|
||||
const token = await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'upstream-token',
|
||||
token: 'sk-upstream-token',
|
||||
@@ -372,7 +372,7 @@ describe('account tokens sync routes with site status', () => {
|
||||
message: '站点删除令牌失败,本地未删除',
|
||||
});
|
||||
|
||||
const existing = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, token.id)).get();
|
||||
const existing = await db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, token.id)).get();
|
||||
expect(existing).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -233,7 +233,7 @@ async function executeAccountTokenSync(row: AccountWithSiteRow): Promise<SyncExe
|
||||
};
|
||||
}
|
||||
|
||||
const synced = syncTokensFromUpstream(accountId, tokens);
|
||||
const synced = await syncTokensFromUpstream(accountId, tokens);
|
||||
return {
|
||||
...base,
|
||||
status: 'synced',
|
||||
@@ -255,7 +255,7 @@ async function executeAccountTokenSync(row: AccountWithSiteRow): Promise<SyncExe
|
||||
}
|
||||
}
|
||||
|
||||
function appendTokenSyncEvent(result: SyncExecutionResult) {
|
||||
async function appendTokenSyncEvent(result: SyncExecutionResult) {
|
||||
const title = result.status === 'synced'
|
||||
? '令牌同步成功'
|
||||
: (result.status === 'skipped' ? '令牌同步跳过' : '令牌同步失败');
|
||||
@@ -267,7 +267,7 @@ function appendTokenSyncEvent(result: SyncExecutionResult) {
|
||||
: (result.message || result.reason || 'sync skipped');
|
||||
|
||||
try {
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: 'token',
|
||||
title,
|
||||
message: `${result.accountName} @ ${result.siteName}: ${detail}`,
|
||||
@@ -279,7 +279,7 @@ function appendTokenSyncEvent(result: SyncExecutionResult) {
|
||||
}
|
||||
|
||||
async function executeSyncAllAccountTokens() {
|
||||
const rows = db.select().from(schema.accounts)
|
||||
const rows = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.accounts.status, 'active'))
|
||||
.all();
|
||||
@@ -331,7 +331,7 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
modelLimits?: string;
|
||||
} }>('/api/account-tokens', async (request, reply) => {
|
||||
const body = request.body;
|
||||
const row = db.select()
|
||||
const row = await db.select()
|
||||
.from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.accounts.id, body.accountId))
|
||||
@@ -343,11 +343,11 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
const tokenValue = (body.token || '').trim();
|
||||
if (tokenValue) {
|
||||
const now = new Date().toISOString();
|
||||
const existing = db.select().from(schema.accountTokens)
|
||||
const existing = await db.select().from(schema.accountTokens)
|
||||
.where(eq(schema.accountTokens.accountId, body.accountId))
|
||||
.all();
|
||||
|
||||
const created = db.insert(schema.accountTokens).values({
|
||||
const inserted = await db.insert(schema.accountTokens).values({
|
||||
accountId: body.accountId,
|
||||
name: (body.name || '').trim() || (existing.length === 0 ? 'default' : `token-${existing.length + 1}`),
|
||||
token: tokenValue,
|
||||
@@ -357,12 +357,20 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
isDefault: body.isDefault ?? false,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}).returning().get();
|
||||
}).run();
|
||||
const createdId = Number(inserted.lastInsertRowid || 0);
|
||||
if (createdId <= 0) {
|
||||
return reply.code(500).send({ success: false, message: '创建令牌失败' });
|
||||
}
|
||||
const created = await db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, createdId)).get();
|
||||
if (!created) {
|
||||
return reply.code(500).send({ success: false, message: '创建令牌失败' });
|
||||
}
|
||||
|
||||
if (body.isDefault || (existing.length === 0 && (body.enabled ?? true))) {
|
||||
setDefaultToken(created.id);
|
||||
await setDefaultToken(created.id);
|
||||
} else if (existing.every((token) => !token.isDefault) && (body.enabled ?? true)) {
|
||||
setDefaultToken(created.id);
|
||||
await setDefaultToken(created.id);
|
||||
}
|
||||
|
||||
return { success: true, token: created };
|
||||
@@ -445,12 +453,12 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(502).send({ success: false, message: syncResult.message || '站点未返回可用令牌' });
|
||||
}
|
||||
|
||||
const preferred = db.select().from(schema.accountTokens)
|
||||
const preferred = await db.select().from(schema.accountTokens)
|
||||
.where(and(eq(schema.accountTokens.accountId, account.id), eq(schema.accountTokens.isDefault, true)))
|
||||
.get();
|
||||
const token = preferred || db.select().from(schema.accountTokens)
|
||||
const token = preferred || (await db.select().from(schema.accountTokens)
|
||||
.where(eq(schema.accountTokens.accountId, account.id))
|
||||
.all()
|
||||
.all())
|
||||
.slice(-1)[0] || null;
|
||||
|
||||
return {
|
||||
@@ -467,7 +475,7 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: '令牌 ID 无效' });
|
||||
}
|
||||
|
||||
const existing = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).get();
|
||||
const existing = await db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).get();
|
||||
if (!existing) {
|
||||
return reply.code(404).send({ success: false, message: '令牌不存在' });
|
||||
}
|
||||
@@ -491,9 +499,9 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
if (body.source !== undefined) updates.source = body.source;
|
||||
if (body.isDefault !== undefined) updates.isDefault = body.isDefault;
|
||||
|
||||
db.update(schema.accountTokens).set(updates).where(eq(schema.accountTokens.id, tokenId)).run();
|
||||
await db.update(schema.accountTokens).set(updates).where(eq(schema.accountTokens.id, tokenId)).run();
|
||||
|
||||
const latest = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).get();
|
||||
const latest = await db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).get();
|
||||
if (!latest) {
|
||||
return reply.code(500).send({ success: false, message: '更新失败' });
|
||||
}
|
||||
@@ -529,7 +537,7 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: '令牌 ID 无效' });
|
||||
}
|
||||
|
||||
const row = db.select()
|
||||
const row = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
@@ -555,7 +563,7 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: '账号 ID 无效' });
|
||||
}
|
||||
|
||||
const row = db.select()
|
||||
const row = await db.select()
|
||||
.from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.accounts.id, accountId))
|
||||
@@ -593,7 +601,7 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: '令牌 ID 无效' });
|
||||
}
|
||||
|
||||
const row = db.select()
|
||||
const row = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
@@ -622,7 +630,7 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
}
|
||||
}
|
||||
|
||||
db.delete(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).run();
|
||||
await db.delete(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).run();
|
||||
|
||||
if (existing.isDefault) {
|
||||
repairDefaultToken(existing.accountId);
|
||||
@@ -637,7 +645,7 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: '账号 ID 无效' });
|
||||
}
|
||||
|
||||
const row = db.select().from(schema.accounts)
|
||||
const row = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.accounts.id, accountId))
|
||||
.get();
|
||||
@@ -707,7 +715,7 @@ export async function accountTokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: '账号 ID 无效' });
|
||||
}
|
||||
|
||||
const row = db.select()
|
||||
const row = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
|
||||
@@ -36,20 +36,20 @@ describe('accounts add requires token verification success', () => {
|
||||
await app.register(routesModule.accountsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
verifyTokenMock.mockReset();
|
||||
getApiTokensMock.mockReset();
|
||||
getApiTokensMock.mockResolvedValue([]);
|
||||
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -60,7 +60,7 @@ describe('accounts add requires token verification success', () => {
|
||||
it('rejects binding when token verification is not successful', async () => {
|
||||
verifyTokenMock.mockResolvedValueOnce({ tokenType: 'unknown' });
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Verify Site',
|
||||
url: 'https://verify.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -79,13 +79,13 @@ describe('accounts add requires token verification success', () => {
|
||||
expect(response.json()).toMatchObject({
|
||||
success: false,
|
||||
});
|
||||
expect(db.select().from(schema.accounts).all()).toHaveLength(0);
|
||||
expect(await db.select().from(schema.accounts).all()).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('returns rebind hint when token verify reports invalid access token', async () => {
|
||||
verifyTokenMock.mockRejectedValueOnce(new Error('无权进行此操作,access token 无效'));
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Verify Site',
|
||||
url: 'https://verify.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -105,7 +105,7 @@ describe('accounts add requires token verification success', () => {
|
||||
success: false,
|
||||
message: '无权进行此操作,access token 无效,请在中转站重新生成系统访问令牌后重新绑定账号',
|
||||
});
|
||||
expect(db.select().from(schema.accounts).all()).toHaveLength(0);
|
||||
expect(await db.select().from(schema.accounts).all()).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('allows binding when token verification succeeds as api key', async () => {
|
||||
@@ -114,7 +114,7 @@ describe('accounts add requires token verification success', () => {
|
||||
models: ['gpt-4o-mini'],
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'API Key Site',
|
||||
url: 'https://apikey.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -134,7 +134,7 @@ describe('accounts add requires token verification success', () => {
|
||||
expect(body.tokenType).toBe('apikey');
|
||||
expect(body.apiTokenFound).toBe(true);
|
||||
|
||||
const accounts = db.select().from(schema.accounts).all();
|
||||
const accounts = await db.select().from(schema.accounts).all();
|
||||
expect(accounts).toHaveLength(1);
|
||||
expect((accounts[0]?.apiToken || '').startsWith('sk-')).toBe(true);
|
||||
});
|
||||
|
||||
@@ -39,21 +39,21 @@ describe('accounts credential mode', () => {
|
||||
await app.register(routesModule.accountsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
verifyTokenMock.mockReset();
|
||||
getModelsMock.mockReset();
|
||||
getApiTokensMock.mockReset();
|
||||
getApiTokensMock.mockResolvedValue([]);
|
||||
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -65,7 +65,7 @@ describe('accounts credential mode', () => {
|
||||
verifyTokenMock.mockRejectedValueOnce(new Error('verifyToken should not be called'));
|
||||
getModelsMock.mockResolvedValueOnce(['gpt-5-mini', 'gpt-4o-mini']);
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Fast Verify Site',
|
||||
url: 'https://fast-verify.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -95,7 +95,7 @@ describe('accounts credential mode', () => {
|
||||
verifyTokenMock.mockRejectedValueOnce(new Error('verifyToken should not be called'));
|
||||
getModelsMock.mockResolvedValueOnce(['gpt-4o-mini']);
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Proxy Only Site',
|
||||
url: 'https://proxy-only.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -116,7 +116,7 @@ describe('accounts credential mode', () => {
|
||||
expect(body.tokenType).toBe('apikey');
|
||||
expect(body.capabilities?.proxyOnly).toBe(true);
|
||||
|
||||
const accounts = db.select().from(schema.accounts).all();
|
||||
const accounts = await db.select().from(schema.accounts).all();
|
||||
expect(accounts).toHaveLength(1);
|
||||
expect(accounts[0]?.accessToken || '').toBe('');
|
||||
expect((accounts[0]?.apiToken || '').startsWith('sk-')).toBe(true);
|
||||
@@ -132,7 +132,7 @@ describe('accounts credential mode', () => {
|
||||
userInfo: { username: 'sub2-user' },
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Sub2 Site',
|
||||
url: 'https://sub2.example.com',
|
||||
platform: 'sub2api',
|
||||
@@ -150,7 +150,7 @@ describe('accounts credential mode', () => {
|
||||
});
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
const created = db.select().from(schema.accounts).all()[0];
|
||||
const created = (await db.select().from(schema.accounts).all())[0];
|
||||
const parsedExtra = JSON.parse(created?.extraConfig || '{}') as {
|
||||
credentialMode?: string;
|
||||
sub2apiAuth?: {
|
||||
@@ -164,12 +164,12 @@ describe('accounts credential mode', () => {
|
||||
});
|
||||
|
||||
it('updates and clears managed refresh token via account update API', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Sub2 Site',
|
||||
url: 'https://sub2.example.com',
|
||||
platform: 'sub2api',
|
||||
}).returning().get();
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'sub2-user',
|
||||
accessToken: 'access-token',
|
||||
@@ -192,7 +192,7 @@ describe('accounts credential mode', () => {
|
||||
});
|
||||
expect(updateResponse.statusCode).toBe(200);
|
||||
|
||||
const updated = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const updated = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const parsedUpdated = JSON.parse(updated?.extraConfig || '{}') as {
|
||||
sub2apiAuth?: { refreshToken?: string; tokenExpiresAt?: number };
|
||||
};
|
||||
@@ -208,7 +208,7 @@ describe('accounts credential mode', () => {
|
||||
});
|
||||
expect(clearResponse.statusCode).toBe(200);
|
||||
|
||||
const cleared = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const cleared = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const parsedCleared = JSON.parse(cleared?.extraConfig || '{}') as {
|
||||
sub2apiAuth?: { refreshToken?: string; tokenExpiresAt?: number };
|
||||
};
|
||||
|
||||
@@ -32,17 +32,17 @@ describe('accounts health refresh runtime state', () => {
|
||||
await app.register(routesModule.accountsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
refreshBalanceMock.mockReset();
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -51,13 +51,13 @@ describe('accounts health refresh runtime state', () => {
|
||||
});
|
||||
|
||||
it('keeps degraded runtime state for unsupported checkin after health refresh', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Wind Hub',
|
||||
url: 'https://windhub.cc',
|
||||
platform: 'done-hub',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'ld6jl3djexjf',
|
||||
accessToken: 'token',
|
||||
|
||||
@@ -34,18 +34,18 @@ describe('accounts login shield detection', () => {
|
||||
await app.register(routesModule.accountsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
loginMock.mockReset();
|
||||
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -59,7 +59,7 @@ describe('accounts login shield detection', () => {
|
||||
message: "Unexpected token '<', \"<html><scr\"... is not valid JSON",
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'AnyRouter',
|
||||
url: 'https://anyrouter.example.com',
|
||||
platform: 'new-api',
|
||||
|
||||
@@ -35,18 +35,18 @@ describe('accounts rebind-session api', () => {
|
||||
await app.register(routesModule.accountsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
verifyTokenMock.mockReset();
|
||||
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -57,13 +57,13 @@ describe('accounts rebind-session api', () => {
|
||||
it('rejects rebinding when token is not verified as session', async () => {
|
||||
verifyTokenMock.mockResolvedValueOnce({ tokenType: 'unknown' });
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Rebind Site',
|
||||
url: 'https://rebind.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'linuxdo_1001',
|
||||
accessToken: 'old-access-token',
|
||||
@@ -83,7 +83,7 @@ describe('accounts rebind-session api', () => {
|
||||
success: false,
|
||||
});
|
||||
|
||||
const latest = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const latest = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
expect(latest?.accessToken).toBe('old-access-token');
|
||||
expect(latest?.status).toBe('expired');
|
||||
});
|
||||
@@ -91,13 +91,13 @@ describe('accounts rebind-session api', () => {
|
||||
it('returns rebind hint when verify reports invalid access token', async () => {
|
||||
verifyTokenMock.mockRejectedValueOnce(new Error('无权进行此操作,access token 无效'));
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Rebind Site',
|
||||
url: 'https://rebind.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'linuxdo_1001',
|
||||
accessToken: 'old-access-token',
|
||||
@@ -126,13 +126,13 @@ describe('accounts rebind-session api', () => {
|
||||
apiToken: 'sk-rebound-token',
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Rebind Site',
|
||||
url: 'https://rebind.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'linuxdo_1001',
|
||||
accessToken: 'old-access-token',
|
||||
@@ -153,7 +153,7 @@ describe('accounts rebind-session api', () => {
|
||||
expect(body.success).toBe(true);
|
||||
expect(body.apiTokenFound).toBe(true);
|
||||
|
||||
const latest = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const latest = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
expect(latest?.accessToken).toBe('new-session-token');
|
||||
expect(latest?.apiToken).toBe('sk-rebound-token');
|
||||
expect(latest?.username).toBe('linuxdo_1002');
|
||||
@@ -166,13 +166,13 @@ describe('accounts rebind-session api', () => {
|
||||
userInfo: { username: 'sub2_user' },
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Sub2 Rebind Site',
|
||||
url: 'https://sub2.example.com',
|
||||
platform: 'sub2api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'sub2_user',
|
||||
accessToken: 'old-access-token',
|
||||
@@ -197,7 +197,7 @@ describe('accounts rebind-session api', () => {
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const latest = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const latest = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const parsedExtra = JSON.parse(String(latest?.extraConfig || '{}')) as {
|
||||
sub2apiAuth?: { refreshToken?: string; tokenExpiresAt?: number };
|
||||
};
|
||||
@@ -211,13 +211,13 @@ describe('accounts rebind-session api', () => {
|
||||
userInfo: { username: 'sub2_user' },
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Sub2 Rebind Site',
|
||||
url: 'https://sub2.example.com',
|
||||
platform: 'sub2api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'sub2_user',
|
||||
accessToken: 'old-access-token',
|
||||
@@ -242,7 +242,7 @@ describe('accounts rebind-session api', () => {
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const latest = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const latest = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const parsedExtra = JSON.parse(String(latest?.extraConfig || '{}')) as {
|
||||
sub2apiAuth?: { refreshToken?: string; tokenExpiresAt?: number };
|
||||
};
|
||||
@@ -256,13 +256,13 @@ describe('accounts rebind-session api', () => {
|
||||
userInfo: { username: 'linuxdo_1003' },
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Rebind Site',
|
||||
url: 'https://rebind.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'linuxdo_1001',
|
||||
accessToken: 'old-access-token',
|
||||
@@ -282,7 +282,7 @@ describe('accounts rebind-session api', () => {
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const latest = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const latest = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const parsedExtra = JSON.parse(String(latest?.extraConfig || '{}')) as {
|
||||
sub2apiAuth?: { refreshToken?: string; tokenExpiresAt?: number };
|
||||
platformUserId?: number;
|
||||
|
||||
@@ -32,16 +32,16 @@ describe('accounts api today reward fallback', () => {
|
||||
await app.register(routesModule.accountsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -51,12 +51,12 @@ describe('accounts api today reward fallback', () => {
|
||||
|
||||
it('uses today income value when checkin reward is missing', async () => {
|
||||
const today = formatLocalDate(new Date());
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'reward-site',
|
||||
url: 'https://reward-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'reward-user',
|
||||
accessToken: 'token',
|
||||
@@ -71,7 +71,7 @@ describe('accounts api today reward fallback', () => {
|
||||
}),
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.checkinLogs).values({
|
||||
await db.insert(schema.checkinLogs).values({
|
||||
accountId: account.id,
|
||||
status: 'success',
|
||||
message: 'checked in',
|
||||
@@ -92,12 +92,12 @@ describe('accounts api today reward fallback', () => {
|
||||
|
||||
it('prefers parsed checkin reward when available', async () => {
|
||||
const today = formatLocalDate(new Date());
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'reward-site',
|
||||
url: 'https://reward-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'reward-user',
|
||||
accessToken: 'token',
|
||||
@@ -112,7 +112,7 @@ describe('accounts api today reward fallback', () => {
|
||||
}),
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.checkinLogs).values({
|
||||
await db.insert(schema.checkinLogs).values({
|
||||
accountId: account.id,
|
||||
status: 'success',
|
||||
message: 'checkin success',
|
||||
@@ -132,12 +132,12 @@ describe('accounts api today reward fallback', () => {
|
||||
});
|
||||
|
||||
it('counts today spend only inside local-day range', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'spend-site',
|
||||
url: 'https://spend-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'spend-user',
|
||||
accessToken: 'token',
|
||||
@@ -151,7 +151,7 @@ describe('accounts api today reward fallback', () => {
|
||||
const inRange = formatUtcSqlDateTime(new Date(startDate.getTime() + 60_000));
|
||||
const afterEnd = formatUtcSqlDateTime(new Date(endDate.getTime() + 60_000));
|
||||
|
||||
db.insert(schema.proxyLogs).values([
|
||||
await db.insert(schema.proxyLogs).values([
|
||||
{
|
||||
accountId: account.id,
|
||||
status: 'success',
|
||||
|
||||
@@ -114,8 +114,8 @@ function normalizeManagedTokenExpiresAt(input: unknown): number | undefined {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getNextAccountSortOrder(): number {
|
||||
const rows = db.select({ sortOrder: schema.accounts.sortOrder }).from(schema.accounts).all();
|
||||
async function getNextAccountSortOrder(): Promise<number> {
|
||||
const rows = await db.select({ sortOrder: schema.accounts.sortOrder }).from(schema.accounts).all();
|
||||
const max = rows.reduce((currentMax, row) => Math.max(currentMax, row.sortOrder || 0), -1);
|
||||
return max + 1;
|
||||
}
|
||||
@@ -191,7 +191,7 @@ async function refreshRuntimeHealthForRow(row: AccountWithSiteRow): Promise<Acco
|
||||
|
||||
try {
|
||||
await refreshBalance(accountId);
|
||||
const refreshedAccount = db.select().from(schema.accounts)
|
||||
const refreshedAccount = await db.select().from(schema.accounts)
|
||||
.where(eq(schema.accounts.id, accountId))
|
||||
.get();
|
||||
const runtimeHealth = buildRuntimeHealthForAccount({
|
||||
@@ -227,7 +227,7 @@ async function refreshRuntimeHealthForRow(row: AccountWithSiteRow): Promise<Acco
|
||||
}
|
||||
|
||||
async function executeRefreshAccountRuntimeHealth(accountId?: number) {
|
||||
const rows = db.select().from(schema.accounts)
|
||||
const rows = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.all();
|
||||
|
||||
@@ -249,13 +249,13 @@ async function executeRefreshAccountRuntimeHealth(accountId?: number) {
|
||||
export async function accountsRoutes(app: FastifyInstance) {
|
||||
// List all accounts (with site info)
|
||||
app.get('/api/accounts', async () => {
|
||||
const rows = db.select().from(schema.accounts)
|
||||
const rows = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id)).all();
|
||||
|
||||
const { localDay, startUtc, endUtc } = getLocalDayRangeUtc();
|
||||
|
||||
// Aggregate today's spend per account from proxy logs
|
||||
const todaySpendRows = db.select({
|
||||
const todaySpendRows = await db.select({
|
||||
accountId: schema.proxyLogs.accountId,
|
||||
totalSpend: sql<number>`coalesce(sum(${schema.proxyLogs.estimatedCost}), 0)`,
|
||||
}).from(schema.proxyLogs)
|
||||
@@ -269,7 +269,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
}
|
||||
|
||||
// Aggregate today's checkin rewards per account
|
||||
const todayCheckins = db.select({
|
||||
const todayCheckins = await db.select({
|
||||
accountId: schema.checkinLogs.accountId,
|
||||
reward: schema.checkinLogs.reward,
|
||||
message: schema.checkinLogs.message,
|
||||
@@ -323,7 +323,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
const { siteId, username, password } = request.body;
|
||||
|
||||
// Get site info
|
||||
const site = db.select().from(schema.sites).where(eq(schema.sites.id, siteId)).get();
|
||||
const site = await db.select().from(schema.sites).where(eq(schema.sites.id, siteId)).get();
|
||||
if (!site) return { success: false, message: 'site not found' };
|
||||
|
||||
// Get platform adapter
|
||||
@@ -354,7 +354,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
} catch { }
|
||||
|
||||
const preferredApiToken = apiTokens.find((token) => token.enabled !== false && token.key)?.key || apiToken || null;
|
||||
const existing = db.select().from(schema.accounts)
|
||||
const existing = await db.select().from(schema.accounts)
|
||||
.where(and(eq(schema.accounts.siteId, siteId), eq(schema.accounts.username, username)))
|
||||
.get();
|
||||
|
||||
@@ -374,7 +374,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
// Create or update account
|
||||
let accountId = existing?.id;
|
||||
if (existing) {
|
||||
db.update(schema.accounts).set({
|
||||
await db.update(schema.accounts).set({
|
||||
accessToken: loginResult.accessToken,
|
||||
apiToken: preferredApiToken || undefined,
|
||||
checkinEnabled: true,
|
||||
@@ -383,7 +383,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
updatedAt: new Date().toISOString(),
|
||||
}).where(eq(schema.accounts.id, existing.id)).run();
|
||||
} else {
|
||||
const created = db.insert(schema.accounts).values({
|
||||
const inserted = await db.insert(schema.accounts).values({
|
||||
siteId,
|
||||
username,
|
||||
accessToken: loginResult.accessToken,
|
||||
@@ -391,23 +391,24 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
checkinEnabled: true,
|
||||
extraConfig,
|
||||
isPinned: false,
|
||||
sortOrder: getNextAccountSortOrder(),
|
||||
}).returning().get();
|
||||
accountId = created.id;
|
||||
sortOrder: await getNextAccountSortOrder(),
|
||||
}).run();
|
||||
const insertedId = Number(inserted.lastInsertRowid || 0);
|
||||
accountId = insertedId > 0 ? insertedId : undefined;
|
||||
}
|
||||
|
||||
const result = db.select().from(schema.accounts).where(eq(schema.accounts.id, accountId!)).get();
|
||||
const result = await db.select().from(schema.accounts).where(eq(schema.accounts.id, accountId!)).get();
|
||||
if (!result) {
|
||||
return { success: false, message: 'account create failed' };
|
||||
}
|
||||
|
||||
if (apiTokens.length > 0) {
|
||||
try {
|
||||
syncTokensFromUpstream(result.id, apiTokens);
|
||||
await syncTokensFromUpstream(result.id, apiTokens);
|
||||
} catch { }
|
||||
} else if (preferredApiToken) {
|
||||
try {
|
||||
ensureDefaultTokenForAccount(result.id, preferredApiToken, { name: 'default', source: 'sync' });
|
||||
await ensureDefaultTokenForAccount(result.id, preferredApiToken, { name: 'default', source: 'sync' });
|
||||
} catch { }
|
||||
}
|
||||
|
||||
@@ -415,10 +416,10 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
try { await refreshBalance(result.id); } catch { }
|
||||
try {
|
||||
await refreshModelsForAccount(result.id);
|
||||
rebuildTokenRoutesFromAvailability();
|
||||
await rebuildTokenRoutesFromAvailability();
|
||||
} catch { }
|
||||
|
||||
const account = db.select().from(schema.accounts).where(eq(schema.accounts.id, result.id)).get();
|
||||
const account = await db.select().from(schema.accounts).where(eq(schema.accounts.id, result.id)).get();
|
||||
return {
|
||||
success: true,
|
||||
account,
|
||||
@@ -433,7 +434,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
const { siteId, platformUserId } = request.body;
|
||||
const accessToken = (request.body.accessToken || '').trim();
|
||||
const credentialMode = resolveRequestedCredentialMode(request.body.credentialMode);
|
||||
const site = db.select().from(schema.sites).where(eq(schema.sites.id, siteId)).get();
|
||||
const site = await db.select().from(schema.sites).where(eq(schema.sites.id, siteId)).get();
|
||||
if (!site) return { success: false, message: 'site not found' };
|
||||
|
||||
if (!accessToken) {
|
||||
@@ -607,7 +608,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: '请提供新的 Session Token' });
|
||||
}
|
||||
|
||||
const row = db.select()
|
||||
const row = await db.select()
|
||||
.from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.accounts.id, accountId))
|
||||
@@ -686,11 +687,11 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
}
|
||||
updates.extraConfig = mergeAccountExtraConfig(account.extraConfig, extraConfigPatch);
|
||||
|
||||
db.update(schema.accounts).set(updates).where(eq(schema.accounts.id, accountId)).run();
|
||||
await db.update(schema.accounts).set(updates).where(eq(schema.accounts.id, accountId)).run();
|
||||
|
||||
if (nextApiToken) {
|
||||
try {
|
||||
ensureDefaultTokenForAccount(accountId, nextApiToken, { name: 'default', source: 'sync' });
|
||||
await ensureDefaultTokenForAccount(accountId, nextApiToken, { name: 'default', source: 'sync' });
|
||||
} catch {}
|
||||
}
|
||||
|
||||
@@ -699,10 +700,10 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
} catch {}
|
||||
try {
|
||||
await refreshModelsForAccount(accountId);
|
||||
rebuildTokenRoutesFromAvailability();
|
||||
await rebuildTokenRoutesFromAvailability();
|
||||
} catch {}
|
||||
|
||||
const latest = db.select().from(schema.accounts).where(eq(schema.accounts.id, accountId)).get();
|
||||
const latest = await db.select().from(schema.accounts).where(eq(schema.accounts.id, accountId)).get();
|
||||
return {
|
||||
success: true,
|
||||
account: latest,
|
||||
@@ -717,7 +718,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
// Add an account (manual credential input)
|
||||
app.post<{ Body: { siteId: number; username?: string; accessToken: string; apiToken?: string; platformUserId?: number; checkinEnabled?: boolean; credentialMode?: AccountCredentialMode; refreshToken?: string; tokenExpiresAt?: number | string } }>('/api/accounts', async (request, reply) => {
|
||||
const body = request.body;
|
||||
const site = db.select().from(schema.sites).where(eq(schema.sites.id, body.siteId)).get();
|
||||
const site = await db.select().from(schema.sites).where(eq(schema.sites.id, body.siteId)).get();
|
||||
if (!site) {
|
||||
return reply.code(400).send({ success: false, message: 'site not found' });
|
||||
}
|
||||
@@ -821,7 +822,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
}
|
||||
const extraConfig = mergeAccountExtraConfig(undefined, extraConfigPatch);
|
||||
|
||||
const result = db.insert(schema.accounts).values({
|
||||
const inserted = await db.insert(schema.accounts).values({
|
||||
siteId: body.siteId,
|
||||
username: username || undefined,
|
||||
accessToken,
|
||||
@@ -829,12 +830,20 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
checkinEnabled: tokenType === 'session' ? (body.checkinEnabled ?? true) : false,
|
||||
extraConfig,
|
||||
isPinned: false,
|
||||
sortOrder: getNextAccountSortOrder(),
|
||||
}).returning().get();
|
||||
sortOrder: await getNextAccountSortOrder(),
|
||||
}).run();
|
||||
const insertedId = Number(inserted.lastInsertRowid || 0);
|
||||
if (insertedId <= 0) {
|
||||
return reply.code(500).send({ success: false, message: '创建账号失败' });
|
||||
}
|
||||
const result = await db.select().from(schema.accounts).where(eq(schema.accounts.id, insertedId)).get();
|
||||
if (!result) {
|
||||
return reply.code(500).send({ success: false, message: '创建账号失败' });
|
||||
}
|
||||
|
||||
if (apiToken) {
|
||||
try {
|
||||
ensureDefaultTokenForAccount(result.id, apiToken, { name: 'default', source: 'manual' });
|
||||
await ensureDefaultTokenForAccount(result.id, apiToken, { name: 'default', source: 'manual' });
|
||||
} catch { }
|
||||
}
|
||||
|
||||
@@ -842,7 +851,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
try {
|
||||
const syncedTokens = await adapter.getApiTokens(site.url, accessToken, resolvedPlatformUserId);
|
||||
if (syncedTokens.length > 0) {
|
||||
syncTokensFromUpstream(result.id, syncedTokens);
|
||||
await syncTokensFromUpstream(result.id, syncedTokens);
|
||||
}
|
||||
} catch { }
|
||||
}
|
||||
@@ -853,10 +862,10 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
}
|
||||
try {
|
||||
await refreshModelsForAccount(result.id);
|
||||
rebuildTokenRoutesFromAvailability();
|
||||
await rebuildTokenRoutesFromAvailability();
|
||||
} catch { }
|
||||
|
||||
const account = db.select().from(schema.accounts).where(eq(schema.accounts.id, result.id)).get();
|
||||
const account = await db.select().from(schema.accounts).where(eq(schema.accounts.id, result.id)).get();
|
||||
const finalCredentialMode = account ? resolveStoredCredentialMode(account) : resolvedCredentialMode;
|
||||
const capabilities = account
|
||||
? buildCapabilitiesForAccount(account)
|
||||
@@ -876,7 +885,7 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
app.put<{ Params: { id: string }; Body: any }>('/api/accounts/:id', async (request, reply) => {
|
||||
const id = parseInt(request.params.id);
|
||||
const body = request.body as Record<string, unknown>;
|
||||
const row = db.select()
|
||||
const row = await db.select()
|
||||
.from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.accounts.id, id))
|
||||
@@ -933,28 +942,28 @@ export async function accountsRoutes(app: FastifyInstance) {
|
||||
}
|
||||
|
||||
updates.updatedAt = new Date().toISOString();
|
||||
db.update(schema.accounts).set(updates).where(eq(schema.accounts.id, id)).run();
|
||||
await db.update(schema.accounts).set(updates).where(eq(schema.accounts.id, id)).run();
|
||||
|
||||
if (typeof updates.apiToken === 'string' && updates.apiToken.trim()) {
|
||||
try {
|
||||
ensureDefaultTokenForAccount(id, updates.apiToken, { name: 'default', source: 'manual' });
|
||||
await ensureDefaultTokenForAccount(id, updates.apiToken, { name: 'default', source: 'manual' });
|
||||
} catch { }
|
||||
}
|
||||
|
||||
try {
|
||||
await refreshModelsForAccount(id);
|
||||
rebuildTokenRoutesFromAvailability();
|
||||
await rebuildTokenRoutesFromAvailability();
|
||||
} catch { }
|
||||
|
||||
return db.select().from(schema.accounts).where(eq(schema.accounts.id, id)).get();
|
||||
return await db.select().from(schema.accounts).where(eq(schema.accounts.id, id)).get();
|
||||
});
|
||||
|
||||
// Delete an account
|
||||
app.delete<{ Params: { id: string } }>('/api/accounts/:id', async (request) => {
|
||||
const id = parseInt(request.params.id);
|
||||
db.delete(schema.accounts).where(eq(schema.accounts.id, id)).run();
|
||||
await db.delete(schema.accounts).where(eq(schema.accounts.id, id)).run();
|
||||
try {
|
||||
rebuildTokenRoutesFromAvailability();
|
||||
await rebuildTokenRoutesFromAvailability();
|
||||
} catch { }
|
||||
return { success: true };
|
||||
});
|
||||
|
||||
@@ -41,20 +41,20 @@ describe('accounts verify-token shield detection', () => {
|
||||
await app.register(routesModule.accountsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
verifyTokenMock.mockReset();
|
||||
undiciFetchMock.mockReset();
|
||||
adapterPlatformName = 'new-api';
|
||||
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -65,7 +65,7 @@ describe('accounts verify-token shield detection', () => {
|
||||
it('returns rebind hint when verify-token reports invalid access token', async () => {
|
||||
verifyTokenMock.mockRejectedValueOnce(new Error('invalid access token'));
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'AnyRouter',
|
||||
url: 'https://anyrouter.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -96,7 +96,7 @@ describe('accounts verify-token shield detection', () => {
|
||||
},
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'AnyRouter',
|
||||
url: 'https://anyrouter.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -129,7 +129,7 @@ describe('accounts verify-token shield detection', () => {
|
||||
},
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'AnyRouter Alias',
|
||||
url: 'https://anyrouter-alias.example.com',
|
||||
platform: 'newapi',
|
||||
@@ -165,7 +165,7 @@ describe('accounts verify-token shield detection', () => {
|
||||
},
|
||||
});
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Legacy Shielded',
|
||||
url: 'https://legacy-shield.example.com',
|
||||
platform: 'one-api',
|
||||
|
||||
@@ -21,18 +21,18 @@ export async function authRoutes(app: FastifyInstance) {
|
||||
}
|
||||
|
||||
// Save to settings table
|
||||
const existing = db.select().from(schema.settings).where(eq(schema.settings.key, 'auth_token')).get();
|
||||
const existing = await db.select().from(schema.settings).where(eq(schema.settings.key, 'auth_token')).get();
|
||||
if (existing) {
|
||||
db.update(schema.settings).set({ value: JSON.stringify(newToken) }).where(eq(schema.settings.key, 'auth_token')).run();
|
||||
await db.update(schema.settings).set({ value: JSON.stringify(newToken) }).where(eq(schema.settings.key, 'auth_token')).run();
|
||||
} else {
|
||||
db.insert(schema.settings).values({ key: 'auth_token', value: JSON.stringify(newToken) }).run();
|
||||
await db.insert(schema.settings).values({ key: 'auth_token', value: JSON.stringify(newToken) }).run();
|
||||
}
|
||||
|
||||
// Update runtime config
|
||||
config.authToken = newToken;
|
||||
|
||||
try {
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: 'token',
|
||||
title: '管理员登录令牌已更新',
|
||||
message: '管理员登录 Token 已被修改,请使用新 Token 登录。',
|
||||
|
||||
@@ -126,7 +126,7 @@ export async function checkinRoutes(app: FastifyInstance) {
|
||||
query = query.where(eq(schema.checkinLogs.accountId, parseInt(request.query.accountId, 10))) as any;
|
||||
}
|
||||
|
||||
const rows = query.all();
|
||||
const rows = await query.all();
|
||||
return rows.map((row: any) => {
|
||||
const source = row?.checkin_logs || row;
|
||||
const failureReason = classifyFailureReason({
|
||||
@@ -144,7 +144,7 @@ export async function checkinRoutes(app: FastifyInstance) {
|
||||
app.put<{ Body: { cron: string } }>('/api/checkin/schedule', async (request) => {
|
||||
try {
|
||||
updateCheckinCron(request.body.cron);
|
||||
db.insert(schema.settings).values({ key: 'checkin_cron', value: JSON.stringify(request.body.cron) })
|
||||
await db.insert(schema.settings).values({ key: 'checkin_cron', value: JSON.stringify(request.body.cron) })
|
||||
.onConflictDoUpdate({ target: schema.settings.key, set: { value: JSON.stringify(request.body.cron) } }).run();
|
||||
return { success: true, cron: request.body.cron };
|
||||
} catch (err: any) {
|
||||
|
||||
@@ -28,7 +28,7 @@ export async function downstreamApiKeysRoutes(app: FastifyInstance) {
|
||||
app.get('/api/downstream-keys', async () => {
|
||||
return {
|
||||
success: true,
|
||||
items: listDownstreamApiKeys(),
|
||||
items: await listDownstreamApiKeys(),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -66,7 +66,7 @@ export async function downstreamApiKeysRoutes(app: FastifyInstance) {
|
||||
const nowIso = new Date().toISOString();
|
||||
|
||||
try {
|
||||
const inserted = db.insert(schema.downstreamApiKeys).values({
|
||||
const insertedResult = await db.insert(schema.downstreamApiKeys).values({
|
||||
name: normalized.name,
|
||||
key: normalized.key,
|
||||
description: normalized.description,
|
||||
@@ -81,7 +81,17 @@ export async function downstreamApiKeysRoutes(app: FastifyInstance) {
|
||||
siteWeightMultipliers: toPersistenceJson(normalized.siteWeightMultipliers),
|
||||
createdAt: nowIso,
|
||||
updatedAt: nowIso,
|
||||
}).returning().get();
|
||||
}).run();
|
||||
const insertedId = Number(insertedResult.lastInsertRowid || 0);
|
||||
if (insertedId <= 0) {
|
||||
return reply.code(500).send({ success: false, message: '创建失败' });
|
||||
}
|
||||
const inserted = await db.select().from(schema.downstreamApiKeys)
|
||||
.where(eq(schema.downstreamApiKeys.id, insertedId))
|
||||
.get();
|
||||
if (!inserted) {
|
||||
return reply.code(500).send({ success: false, message: '创建失败' });
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
@@ -115,7 +125,7 @@ export async function downstreamApiKeysRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: 'id 无效' });
|
||||
}
|
||||
|
||||
const existing = db.select().from(schema.downstreamApiKeys)
|
||||
const existing = await db.select().from(schema.downstreamApiKeys)
|
||||
.where(eq(schema.downstreamApiKeys.id, id))
|
||||
.get();
|
||||
|
||||
@@ -154,7 +164,7 @@ export async function downstreamApiKeysRoutes(app: FastifyInstance) {
|
||||
|
||||
const nowIso = new Date().toISOString();
|
||||
try {
|
||||
db.update(schema.downstreamApiKeys).set({
|
||||
await db.update(schema.downstreamApiKeys).set({
|
||||
name: normalized.name,
|
||||
key: normalized.key,
|
||||
description: normalized.description,
|
||||
@@ -192,7 +202,7 @@ export async function downstreamApiKeysRoutes(app: FastifyInstance) {
|
||||
return reply.code(404).send({ success: false, message: 'API key 不存在' });
|
||||
}
|
||||
|
||||
db.update(schema.downstreamApiKeys).set({
|
||||
await db.update(schema.downstreamApiKeys).set({
|
||||
usedCost: 0,
|
||||
usedRequests: 0,
|
||||
updatedAt: new Date().toISOString(),
|
||||
@@ -215,7 +225,7 @@ export async function downstreamApiKeysRoutes(app: FastifyInstance) {
|
||||
return reply.code(404).send({ success: false, message: 'API key 不存在' });
|
||||
}
|
||||
|
||||
db.delete(schema.downstreamApiKeys)
|
||||
await db.delete(schema.downstreamApiKeys)
|
||||
.where(eq(schema.downstreamApiKeys.id, id))
|
||||
.run();
|
||||
|
||||
|
||||
@@ -10,14 +10,14 @@ export async function eventsRoutes(app: FastifyInstance) {
|
||||
const type = request.query.type;
|
||||
const readQuery = request.query.read;
|
||||
|
||||
const filters = [];
|
||||
const filters: any[] = [];
|
||||
if (type) filters.push(eq(schema.events.type, type));
|
||||
if (readQuery === 'true') filters.push(eq(schema.events.read, true));
|
||||
if (readQuery === 'false') filters.push(eq(schema.events.read, false));
|
||||
|
||||
const base = db.select().from(schema.events);
|
||||
if (filters.length > 0) {
|
||||
return base
|
||||
return await base
|
||||
.where(and(...filters))
|
||||
.orderBy(desc(schema.events.createdAt))
|
||||
.limit(limit)
|
||||
@@ -25,7 +25,7 @@ export async function eventsRoutes(app: FastifyInstance) {
|
||||
.all();
|
||||
}
|
||||
|
||||
return base
|
||||
return await base
|
||||
.orderBy(desc(schema.events.createdAt))
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
@@ -34,7 +34,7 @@ export async function eventsRoutes(app: FastifyInstance) {
|
||||
|
||||
// Unread count
|
||||
app.get('/api/events/count', async () => {
|
||||
const result = db.select({ count: sql<number>`count(*)` }).from(schema.events)
|
||||
const result = await db.select({ count: sql<number>`count(*)` }).from(schema.events)
|
||||
.where(eq(schema.events.read, false)).get();
|
||||
return { count: result?.count || 0 };
|
||||
});
|
||||
@@ -42,19 +42,19 @@ export async function eventsRoutes(app: FastifyInstance) {
|
||||
// Mark one as read
|
||||
app.post<{ Params: { id: string } }>('/api/events/:id/read', async (request) => {
|
||||
const id = parseInt(request.params.id);
|
||||
db.update(schema.events).set({ read: true }).where(eq(schema.events.id, id)).run();
|
||||
await db.update(schema.events).set({ read: true }).where(eq(schema.events.id, id)).run();
|
||||
return { success: true };
|
||||
});
|
||||
|
||||
// Mark all as read
|
||||
app.post('/api/events/read-all', async () => {
|
||||
db.update(schema.events).set({ read: true }).where(eq(schema.events.read, false)).run();
|
||||
await db.update(schema.events).set({ read: true }).where(eq(schema.events.read, false)).run();
|
||||
return { success: true };
|
||||
});
|
||||
|
||||
// Clear all events
|
||||
app.delete('/api/events', async () => {
|
||||
db.delete(schema.events).run();
|
||||
await db.delete(schema.events).run();
|
||||
return { success: true };
|
||||
});
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@ const MONITOR_AUTH_COOKIE = 'meta_monitor_auth';
|
||||
const LDOH_BASE_URL = 'https://ldoh.105117.xyz';
|
||||
const LDOH_COOKIE_SETTING_KEY = 'monitor_ldoh_cookie';
|
||||
|
||||
function upsertSetting(key: string, value: unknown) {
|
||||
db.insert(schema.settings)
|
||||
async function upsertSetting(key: string, value: unknown) {
|
||||
await db.insert(schema.settings)
|
||||
.values({ key, value: JSON.stringify(value) })
|
||||
.onConflictDoUpdate({
|
||||
target: schema.settings.key,
|
||||
@@ -17,8 +17,8 @@ function upsertSetting(key: string, value: unknown) {
|
||||
.run();
|
||||
}
|
||||
|
||||
function getSettingString(key: string): string {
|
||||
const row = db.select().from(schema.settings).where(eq(schema.settings.key, key)).get();
|
||||
async function getSettingString(key: string): Promise<string> {
|
||||
const row = await db.select().from(schema.settings).where(eq(schema.settings.key, key)).get();
|
||||
if (!row?.value) return '';
|
||||
try {
|
||||
const parsed = JSON.parse(row.value);
|
||||
@@ -111,7 +111,7 @@ function resolveLdohProxyPath(request: FastifyRequest): string {
|
||||
|
||||
export async function monitorRoutes(app: FastifyInstance) {
|
||||
app.get('/api/monitor/config', async () => {
|
||||
const ldohCookie = getSettingString(LDOH_COOKIE_SETTING_KEY);
|
||||
const ldohCookie = await getSettingString(LDOH_COOKIE_SETTING_KEY);
|
||||
return {
|
||||
ldohCookieConfigured: !!ldohCookie,
|
||||
ldohCookieMasked: ldohCookie ? maskCookieValue(ldohCookie) : '',
|
||||
@@ -121,7 +121,7 @@ export async function monitorRoutes(app: FastifyInstance) {
|
||||
app.put<{ Body: { ldohCookie?: string | null } }>('/api/monitor/config', async (request, reply) => {
|
||||
const raw = String(request.body?.ldohCookie || '').trim();
|
||||
if (!raw) {
|
||||
upsertSetting(LDOH_COOKIE_SETTING_KEY, '');
|
||||
await upsertSetting(LDOH_COOKIE_SETTING_KEY, '');
|
||||
return { success: true, message: 'LDOH Cookie 已清空', ldohCookieConfigured: false };
|
||||
}
|
||||
|
||||
@@ -130,7 +130,7 @@ export async function monitorRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: 'Cookie 格式无效,请填写 ld_auth_session 或其值' });
|
||||
}
|
||||
|
||||
upsertSetting(LDOH_COOKIE_SETTING_KEY, normalized);
|
||||
await upsertSetting(LDOH_COOKIE_SETTING_KEY, normalized);
|
||||
return {
|
||||
success: true,
|
||||
message: 'LDOH Cookie 已保存',
|
||||
@@ -151,7 +151,7 @@ export async function monitorRoutes(app: FastifyInstance) {
|
||||
const handleLdohProxy = async (request: FastifyRequest, reply: FastifyReply) => {
|
||||
if (!ensureMonitorAuth(request, reply)) return;
|
||||
|
||||
const storedCookie = getSettingString(LDOH_COOKIE_SETTING_KEY);
|
||||
const storedCookie = await getSettingString(LDOH_COOKIE_SETTING_KEY);
|
||||
if (!storedCookie) {
|
||||
return reply.code(400).send('LDOH cookie not configured');
|
||||
}
|
||||
|
||||
@@ -13,11 +13,11 @@ export async function searchRoutes(app: FastifyInstance) {
|
||||
const perCategory = Math.min(Math.ceil(limit / 5), 10);
|
||||
|
||||
// Search sites
|
||||
const sites = db.select().from(schema.sites)
|
||||
const sites = (await db.select().from(schema.sites)
|
||||
.where(like(schema.sites.name, q))
|
||||
.limit(perCategory).all()
|
||||
.limit(perCategory).all())
|
||||
.concat(
|
||||
db.select().from(schema.sites)
|
||||
await db.select().from(schema.sites)
|
||||
.where(like(schema.sites.url, q))
|
||||
.limit(perCategory).all()
|
||||
);
|
||||
@@ -25,28 +25,28 @@ export async function searchRoutes(app: FastifyInstance) {
|
||||
const uniqueSites = [...new Map(sites.map(s => [s.id, s])).values()].slice(0, perCategory);
|
||||
|
||||
// Search accounts (join with sites for site name)
|
||||
const accountResults = db.select().from(schema.accounts)
|
||||
const accountResults = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(like(schema.accounts.username, q))
|
||||
.limit(perCategory).all();
|
||||
const accounts = accountResults.map(r => ({ ...r.accounts, site: r.sites }));
|
||||
|
||||
// Search checkin logs (by message)
|
||||
const checkinLogs = db.select().from(schema.checkinLogs)
|
||||
const checkinLogs = (await db.select().from(schema.checkinLogs)
|
||||
.innerJoin(schema.accounts, eq(schema.checkinLogs.accountId, schema.accounts.id))
|
||||
.where(like(schema.checkinLogs.message, q))
|
||||
.orderBy(desc(schema.checkinLogs.createdAt))
|
||||
.limit(perCategory).all()
|
||||
.limit(perCategory).all())
|
||||
.map(r => ({ ...r.checkin_logs, account: r.accounts }));
|
||||
|
||||
// Search proxy logs (by model name)
|
||||
const proxyLogs = db.select().from(schema.proxyLogs)
|
||||
const proxyLogs = await db.select().from(schema.proxyLogs)
|
||||
.where(like(schema.proxyLogs.modelRequested, q))
|
||||
.orderBy(desc(schema.proxyLogs.createdAt))
|
||||
.limit(perCategory).all();
|
||||
|
||||
// Search models (only keep routable items)
|
||||
const modelRows = db.select({
|
||||
const modelRows = await db.select({
|
||||
modelName: schema.tokenModelAvailability.modelName,
|
||||
tokenId: schema.accountTokens.id,
|
||||
accountId: schema.accounts.id,
|
||||
|
||||
@@ -0,0 +1,142 @@
|
||||
import Fastify, { type FastifyInstance } from 'fastify';
|
||||
import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest';
|
||||
import { mkdtempSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import Database from 'better-sqlite3';
|
||||
|
||||
type DbModule = typeof import('../../db/index.js');
|
||||
|
||||
describe('settings database migration api', () => {
|
||||
let app: FastifyInstance;
|
||||
let db: DbModule['db'];
|
||||
let schema: DbModule['schema'];
|
||||
let dataDir = '';
|
||||
|
||||
beforeAll(async () => {
|
||||
dataDir = mkdtempSync(join(tmpdir(), 'metapi-settings-db-migration-'));
|
||||
process.env.DATA_DIR = dataDir;
|
||||
|
||||
await import('../../db/migrate.js');
|
||||
const dbModule = await import('../../db/index.js');
|
||||
const settingsRoutesModule = await import('./settings.js');
|
||||
|
||||
db = dbModule.db;
|
||||
schema = dbModule.schema;
|
||||
|
||||
app = Fastify();
|
||||
await app.register(settingsRoutesModule.settingsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
await db.delete(schema.settings).run();
|
||||
await db.delete(schema.events).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await app.close();
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
it('tests sqlite target connection from settings api', async () => {
|
||||
const targetPath = join(dataDir, 'target-connect.db');
|
||||
const response = await app.inject({
|
||||
method: 'POST',
|
||||
url: '/api/settings/database/test-connection',
|
||||
payload: {
|
||||
dialect: 'sqlite',
|
||||
connectionString: targetPath,
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
const body = response.json() as { success?: boolean; dialect?: string };
|
||||
expect(body.success).toBe(true);
|
||||
expect(body.dialect).toBe('sqlite');
|
||||
});
|
||||
|
||||
it('migrates current sqlite data to another sqlite file via settings api', async () => {
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'Target Site',
|
||||
url: 'https://example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
proxyUrl: null,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
}).returning().get();
|
||||
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'u1',
|
||||
accessToken: 'token-1',
|
||||
apiToken: null,
|
||||
status: 'active',
|
||||
checkinEnabled: true,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
}).returning().get();
|
||||
|
||||
await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'default',
|
||||
token: 'sk-test',
|
||||
source: 'manual',
|
||||
enabled: true,
|
||||
isDefault: true,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
}).run();
|
||||
|
||||
await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'gpt-4o-mini',
|
||||
modelMapping: null,
|
||||
enabled: true,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
}).run();
|
||||
|
||||
await db.insert(schema.settings).values({
|
||||
key: 'routing_fallback_unit_cost',
|
||||
value: JSON.stringify(0.25),
|
||||
}).run();
|
||||
|
||||
const targetPath = join(dataDir, 'target-migrate.db');
|
||||
const response = await app.inject({
|
||||
method: 'POST',
|
||||
url: '/api/settings/database/migrate',
|
||||
payload: {
|
||||
dialect: 'sqlite',
|
||||
connectionString: targetPath,
|
||||
overwrite: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
const body = response.json() as { success?: boolean; rows?: Record<string, number> };
|
||||
expect(body.success).toBe(true);
|
||||
expect(body.rows?.sites).toBe(1);
|
||||
expect(body.rows?.accounts).toBe(1);
|
||||
expect(body.rows?.accountTokens).toBe(1);
|
||||
expect(body.rows?.settings).toBe(1);
|
||||
|
||||
const targetDb = new Database(targetPath);
|
||||
try {
|
||||
const targetSites = targetDb.prepare('SELECT COUNT(*) AS cnt FROM sites').get() as { cnt: number };
|
||||
const targetAccounts = targetDb.prepare('SELECT COUNT(*) AS cnt FROM accounts').get() as { cnt: number };
|
||||
const targetSettings = targetDb.prepare('SELECT COUNT(*) AS cnt FROM settings').get() as { cnt: number };
|
||||
|
||||
expect(targetSites.cnt).toBe(1);
|
||||
expect(targetAccounts.cnt).toBe(1);
|
||||
expect(targetSettings.cnt).toBe(1);
|
||||
} finally {
|
||||
targetDb.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -34,9 +34,9 @@ describe('settings and auth events', () => {
|
||||
await app.register(authRoutesModule.authRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.events).run();
|
||||
db.delete(schema.settings).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.events).run();
|
||||
await db.delete(schema.settings).run();
|
||||
|
||||
config.authToken = 'old-admin-token-123';
|
||||
config.proxyToken = 'sk-old-proxy-token-123';
|
||||
@@ -62,7 +62,7 @@ describe('settings and auth events', () => {
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const events = db.select().from(schema.events).all();
|
||||
const events = await db.select().from(schema.events).all();
|
||||
expect(events.length).toBe(1);
|
||||
expect(events[0]).toMatchObject({
|
||||
type: 'status',
|
||||
@@ -178,7 +178,7 @@ describe('settings and auth events', () => {
|
||||
expect(updated.routingFallbackUnitCost).toBe(0.25);
|
||||
expect(config.routingFallbackUnitCost).toBe(0.25);
|
||||
|
||||
const saved = db.select().from(schema.settings).where(eq(schema.settings.key, 'routing_fallback_unit_cost')).get();
|
||||
const saved = await db.select().from(schema.settings).where(eq(schema.settings.key, 'routing_fallback_unit_cost')).get();
|
||||
expect(saved).toBeTruthy();
|
||||
expect(saved?.value).toBe(JSON.stringify(0.25));
|
||||
|
||||
@@ -206,7 +206,7 @@ describe('settings and auth events', () => {
|
||||
expect(body.message).toContain('白名单');
|
||||
expect(body.message).toContain('198.51.100.10');
|
||||
|
||||
const saved = db.select().from(schema.settings).where(eq(schema.settings.key, 'admin_ip_allowlist')).get();
|
||||
const saved = await db.select().from(schema.settings).where(eq(schema.settings.key, 'admin_ip_allowlist')).get();
|
||||
expect(saved).toBeFalsy();
|
||||
});
|
||||
|
||||
@@ -224,7 +224,7 @@ describe('settings and auth events', () => {
|
||||
const body = response.json() as { adminIpAllowlist?: string[] };
|
||||
expect(body.adminIpAllowlist).toEqual(['198.51.100.10', '198.51.100.11']);
|
||||
|
||||
const saved = db.select().from(schema.settings).where(eq(schema.settings.key, 'admin_ip_allowlist')).get();
|
||||
const saved = await db.select().from(schema.settings).where(eq(schema.settings.key, 'admin_ip_allowlist')).get();
|
||||
expect(saved?.value).toBe(JSON.stringify(['198.51.100.10', '198.51.100.11']));
|
||||
});
|
||||
|
||||
@@ -240,7 +240,7 @@ describe('settings and auth events', () => {
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const events = db.select().from(schema.events).all();
|
||||
const events = await db.select().from(schema.events).all();
|
||||
expect(events.length).toBe(1);
|
||||
expect(events[0]).toMatchObject({
|
||||
type: 'token',
|
||||
|
||||
@@ -7,6 +7,7 @@ import { updateBalanceRefreshCron, updateCheckinCron } from '../../services/chec
|
||||
import { sendNotification } from '../../services/notifyService.js';
|
||||
import { exportBackup, importBackup, type BackupExportType } from '../../services/backupService.js';
|
||||
import { startBackgroundTask } from '../../services/backgroundTaskService.js';
|
||||
import { migrateCurrentDatabase, testDatabaseConnection } from '../../services/databaseMigrationService.js';
|
||||
import { extractClientIp, isIpAllowed } from '../../middleware/auth.js';
|
||||
|
||||
type RoutingWeights = typeof config.routingWeights;
|
||||
@@ -38,6 +39,12 @@ interface RuntimeSettingsBody {
|
||||
routingWeights?: Partial<RoutingWeights>;
|
||||
}
|
||||
|
||||
interface DatabaseMigrationBody {
|
||||
dialect?: unknown;
|
||||
connectionString?: unknown;
|
||||
overwrite?: unknown;
|
||||
}
|
||||
|
||||
const PROXY_TOKEN_PREFIX = 'sk-';
|
||||
|
||||
function isValidProxyToken(value: string): boolean {
|
||||
@@ -50,8 +57,8 @@ function maskSecret(value: string): string {
|
||||
return `${value.slice(0, 4)}****${value.slice(-4)}`;
|
||||
}
|
||||
|
||||
function upsertSetting(key: string, value: unknown) {
|
||||
db.insert(schema.settings)
|
||||
async function upsertSetting(key: string, value: unknown) {
|
||||
await db.insert(schema.settings)
|
||||
.values({ key, value: JSON.stringify(value) })
|
||||
.onConflictDoUpdate({
|
||||
target: schema.settings.key,
|
||||
@@ -60,14 +67,14 @@ function upsertSetting(key: string, value: unknown) {
|
||||
.run();
|
||||
}
|
||||
|
||||
function appendSettingsEvent(input: {
|
||||
async function appendSettingsEvent(input: {
|
||||
type: 'checkin' | 'balance' | 'proxy' | 'status' | 'token';
|
||||
title: string;
|
||||
message: string;
|
||||
level?: 'info' | 'warning' | 'error';
|
||||
}) {
|
||||
try {
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: input.type,
|
||||
title: input.title,
|
||||
message: input.message,
|
||||
@@ -275,7 +282,7 @@ function getRuntimeSettingsResponse(currentAdminIp = '') {
|
||||
}
|
||||
|
||||
export async function settingsRoutes(app: FastifyInstance) {
|
||||
app.get('/api/settings/runtime', async (request) => {
|
||||
await app.get('/api/settings/runtime', async (request) => {
|
||||
const currentAdminIp = extractClientIp(request.ip, request.headers['x-forwarded-for']);
|
||||
return getRuntimeSettingsResponse(currentAdminIp);
|
||||
});
|
||||
@@ -595,13 +602,50 @@ export async function settingsRoutes(app: FastifyInstance) {
|
||||
};
|
||||
});
|
||||
|
||||
app.get<{ Querystring: { type?: string } }>('/api/settings/backup/export', async (request, reply) => {
|
||||
app.post<{ Body: DatabaseMigrationBody }>('/api/settings/database/test-connection', async (request, reply) => {
|
||||
try {
|
||||
const result = await testDatabaseConnection(request.body || {});
|
||||
return {
|
||||
success: true,
|
||||
message: '目标数据库连接成功',
|
||||
...result,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return reply.code(400).send({
|
||||
success: false,
|
||||
message: err?.message || '数据库连接失败',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
app.post<{ Body: DatabaseMigrationBody }>('/api/settings/database/migrate', async (request, reply) => {
|
||||
try {
|
||||
const result = await migrateCurrentDatabase(request.body || {});
|
||||
appendSettingsEvent({
|
||||
type: 'status',
|
||||
title: '数据库迁移已完成',
|
||||
message: `目标 ${result.dialect},已迁移站点 ${result.rows.sites}、账号 ${result.rows.accounts}、令牌 ${result.rows.accountTokens}、路由 ${result.rows.tokenRoutes}、通道 ${result.rows.routeChannels}、设置 ${result.rows.settings}`,
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
message: '数据库迁移完成',
|
||||
...result,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return reply.code(400).send({
|
||||
success: false,
|
||||
message: err?.message || '数据库迁移失败',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await app.get<{ Querystring: { type?: string } }>('/api/settings/backup/export', async (request, reply) => {
|
||||
const rawType = String(request.query.type || 'all').trim().toLowerCase();
|
||||
const type: BackupExportType = rawType === 'accounts' || rawType === 'preferences' ? rawType : 'all';
|
||||
if (rawType && !['all', 'accounts', 'preferences'].includes(rawType)) {
|
||||
return reply.code(400).send({ success: false, message: '导出类型无效,仅支持 all/accounts/preferences' });
|
||||
}
|
||||
return exportBackup(type);
|
||||
return await exportBackup(type);
|
||||
});
|
||||
|
||||
app.post<{ Body: { data?: Record<string, unknown> } }>('/api/settings/backup/import', async (request, reply) => {
|
||||
@@ -611,7 +655,7 @@ export async function settingsRoutes(app: FastifyInstance) {
|
||||
}
|
||||
|
||||
try {
|
||||
const result = importBackup(payload);
|
||||
const result = await importBackup(payload);
|
||||
for (const item of result.appliedSettings) {
|
||||
applyImportedSettingToRuntime(item.key, item.value);
|
||||
}
|
||||
@@ -653,9 +697,9 @@ export async function settingsRoutes(app: FastifyInstance) {
|
||||
});
|
||||
|
||||
app.post('/api/settings/maintenance/clear-cache', async (_, reply) => {
|
||||
const deletedModelAvailability = db.delete(schema.modelAvailability).run().changes;
|
||||
const deletedRouteChannels = db.delete(schema.routeChannels).run().changes;
|
||||
const deletedTokenRoutes = db.delete(schema.tokenRoutes).run().changes;
|
||||
const deletedModelAvailability = (await db.delete(schema.modelAvailability).run()).changes;
|
||||
const deletedRouteChannels = (await db.delete(schema.routeChannels).run()).changes;
|
||||
const deletedTokenRoutes = (await db.delete(schema.tokenRoutes).run()).changes;
|
||||
|
||||
const { task, reused } = startBackgroundTask(
|
||||
{
|
||||
@@ -686,9 +730,9 @@ export async function settingsRoutes(app: FastifyInstance) {
|
||||
});
|
||||
|
||||
app.post('/api/settings/maintenance/clear-usage', async () => {
|
||||
const deletedProxyLogs = db.delete(schema.proxyLogs).run().changes;
|
||||
const deletedProxyLogs = (await db.delete(schema.proxyLogs).run()).changes;
|
||||
|
||||
db.update(schema.routeChannels).set({
|
||||
await db.update(schema.routeChannels).set({
|
||||
successCount: 0,
|
||||
failCount: 0,
|
||||
totalLatencyMs: 0,
|
||||
@@ -698,7 +742,7 @@ export async function settingsRoutes(app: FastifyInstance) {
|
||||
cooldownUntil: null,
|
||||
}).run();
|
||||
|
||||
db.update(schema.accounts).set({
|
||||
await db.update(schema.accounts).set({
|
||||
balanceUsed: 0,
|
||||
updatedAt: new Date().toISOString(),
|
||||
}).run();
|
||||
@@ -717,3 +761,4 @@ export async function settingsRoutes(app: FastifyInstance) {
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -26,9 +26,9 @@ describe('sites proxy url settings', () => {
|
||||
await app.register(routesModule.sitesRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
||||
@@ -27,9 +27,9 @@ describe('sites status cascade', () => {
|
||||
await app.register(routesModule.sitesRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -38,13 +38,13 @@ describe('sites status cascade', () => {
|
||||
});
|
||||
|
||||
it('disables and re-enables related accounts with site status', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'status-site',
|
||||
url: 'https://status-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'status-user',
|
||||
accessToken: 'access-token',
|
||||
@@ -58,7 +58,7 @@ describe('sites status cascade', () => {
|
||||
});
|
||||
expect(disableResp.statusCode).toBe(200);
|
||||
|
||||
const disabledAccount = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const disabledAccount = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
expect(disabledAccount?.status).toBe('disabled');
|
||||
|
||||
const enableResp = await app.inject({
|
||||
@@ -68,7 +68,7 @@ describe('sites status cascade', () => {
|
||||
});
|
||||
expect(enableResp.statusCode).toBe(200);
|
||||
|
||||
const enabledAccount = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const enabledAccount = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
expect(enabledAccount?.status).toBe('active');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -71,8 +71,8 @@ function normalizeOptionalExternalCheckinUrl(input: unknown): {
|
||||
export async function sitesRoutes(app: FastifyInstance) {
|
||||
// List all sites
|
||||
app.get('/api/sites', async () => {
|
||||
const siteRows = db.select().from(schema.sites).all();
|
||||
const accountBalanceRows = db.select({
|
||||
const siteRows = await db.select().from(schema.sites).all();
|
||||
const accountBalanceRows = await db.select({
|
||||
siteId: schema.accounts.siteId,
|
||||
totalBalance: sql<number>`coalesce(sum(${schema.accounts.balance}), 0)`,
|
||||
}).from(schema.accounts)
|
||||
@@ -129,7 +129,7 @@ export async function sitesRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ error: 'Invalid globalWeight value. Expected a positive number.' });
|
||||
}
|
||||
|
||||
const existingSites = db.select().from(schema.sites).all();
|
||||
const existingSites = await db.select().from(schema.sites).all();
|
||||
const maxSortOrder = existingSites.reduce((max, site) => Math.max(max, site.sortOrder || 0), -1);
|
||||
|
||||
let detectedPlatform = platform;
|
||||
@@ -140,7 +140,7 @@ export async function sitesRoutes(app: FastifyInstance) {
|
||||
if (!detectedPlatform) {
|
||||
return { error: 'Could not detect platform. Please specify manually.' };
|
||||
}
|
||||
const result = db.insert(schema.sites).values({
|
||||
const inserted = await db.insert(schema.sites).values({
|
||||
name,
|
||||
url: url.replace(/\/+$/, ''),
|
||||
platform: detectedPlatform,
|
||||
@@ -151,7 +151,15 @@ export async function sitesRoutes(app: FastifyInstance) {
|
||||
isPinned: normalizedPinned ?? false,
|
||||
sortOrder: normalizedSortOrder ?? (maxSortOrder + 1),
|
||||
globalWeight: normalizedGlobalWeight ?? 1,
|
||||
}).returning().get();
|
||||
}).run();
|
||||
const siteId = Number(inserted.lastInsertRowid || 0);
|
||||
if (siteId <= 0) {
|
||||
return reply.code(500).send({ error: 'Create site failed' });
|
||||
}
|
||||
const result = await db.select().from(schema.sites).where(eq(schema.sites.id, siteId)).get();
|
||||
if (!result) {
|
||||
return reply.code(500).send({ error: 'Create site failed' });
|
||||
}
|
||||
invalidateSiteProxyCache();
|
||||
return result;
|
||||
});
|
||||
@@ -174,7 +182,7 @@ export async function sitesRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ error: 'Invalid site id' });
|
||||
}
|
||||
|
||||
const existingSite = db.select().from(schema.sites).where(eq(schema.sites.id, id)).get();
|
||||
const existingSite = await db.select().from(schema.sites).where(eq(schema.sites.id, id)).get();
|
||||
if (!existingSite) {
|
||||
return reply.code(404).send({ error: 'Site not found' });
|
||||
}
|
||||
@@ -217,19 +225,19 @@ export async function sitesRoutes(app: FastifyInstance) {
|
||||
if (body.sortOrder !== undefined) updates.sortOrder = normalizedSortOrder;
|
||||
if (body.globalWeight !== undefined) updates.globalWeight = normalizedGlobalWeight;
|
||||
updates.updatedAt = new Date().toISOString();
|
||||
db.update(schema.sites).set(updates).where(eq(schema.sites.id, id)).run();
|
||||
await db.update(schema.sites).set(updates).where(eq(schema.sites.id, id)).run();
|
||||
invalidateSiteProxyCache();
|
||||
|
||||
if (body.status !== undefined && normalizedStatus) {
|
||||
const now = new Date().toISOString();
|
||||
if (normalizedStatus === 'disabled') {
|
||||
db.update(schema.accounts)
|
||||
await db.update(schema.accounts)
|
||||
.set({ status: 'disabled', updatedAt: now })
|
||||
.where(eq(schema.accounts.siteId, id))
|
||||
.run();
|
||||
|
||||
try {
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: 'status',
|
||||
title: '站点已禁用',
|
||||
message: `${existingSite.name} 已禁用,关联账号已全部置为禁用`,
|
||||
@@ -239,13 +247,13 @@ export async function sitesRoutes(app: FastifyInstance) {
|
||||
}).run();
|
||||
} catch {}
|
||||
} else {
|
||||
db.update(schema.accounts)
|
||||
await db.update(schema.accounts)
|
||||
.set({ status: 'active', updatedAt: now })
|
||||
.where(and(eq(schema.accounts.siteId, id), eq(schema.accounts.status, 'disabled')))
|
||||
.run();
|
||||
|
||||
try {
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: 'status',
|
||||
title: '站点已启用',
|
||||
message: `${existingSite.name} 已启用,关联禁用账号已恢复为活跃`,
|
||||
@@ -257,13 +265,13 @@ export async function sitesRoutes(app: FastifyInstance) {
|
||||
}
|
||||
}
|
||||
|
||||
return db.select().from(schema.sites).where(eq(schema.sites.id, id)).get();
|
||||
return await db.select().from(schema.sites).where(eq(schema.sites.id, id)).get();
|
||||
});
|
||||
|
||||
// Delete a site
|
||||
app.delete<{ Params: { id: string } }>('/api/sites/:id', async (request) => {
|
||||
const id = parseInt(request.params.id);
|
||||
db.delete(schema.sites).where(eq(schema.sites.id, id)).run();
|
||||
await db.delete(schema.sites).where(eq(schema.sites.id, id)).run();
|
||||
invalidateSiteProxyCache();
|
||||
return { success: true };
|
||||
});
|
||||
|
||||
@@ -28,16 +28,16 @@ describe('/api/models/marketplace', () => {
|
||||
await app.register(routesModule.statsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -46,14 +46,14 @@ describe('/api/models/marketplace', () => {
|
||||
});
|
||||
|
||||
it('returns account-level discovered models even when account has no managed tokens', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'site-no-token',
|
||||
url: 'https://site-no-token.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'alice',
|
||||
accessToken: 'session-token',
|
||||
@@ -61,14 +61,14 @@ describe('/api/models/marketplace', () => {
|
||||
balance: 12.5,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.modelAvailability).values({
|
||||
await db.insert(schema.modelAvailability).values({
|
||||
accountId: account.id,
|
||||
modelName: 'claude-sonnet-4-5-20250929',
|
||||
available: true,
|
||||
latencyMs: 233,
|
||||
}).run();
|
||||
|
||||
const visibleRows = db.select().from(schema.modelAvailability)
|
||||
const visibleRows = await db.select().from(schema.modelAvailability)
|
||||
.innerJoin(schema.accounts, eq(schema.modelAvailability.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(
|
||||
|
||||
@@ -28,16 +28,16 @@ describe('stats dashboard filters disabled sites', () => {
|
||||
await app.register(routesModule.statsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -46,21 +46,21 @@ describe('stats dashboard filters disabled sites', () => {
|
||||
});
|
||||
|
||||
it('excludes disabled-site balances from dashboard totals', async () => {
|
||||
const activeSite = db.insert(schema.sites).values({
|
||||
const activeSite = await db.insert(schema.sites).values({
|
||||
name: 'active-site',
|
||||
url: 'https://active-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const disabledSite = db.insert(schema.sites).values({
|
||||
const disabledSite = await db.insert(schema.sites).values({
|
||||
name: 'disabled-site',
|
||||
url: 'https://disabled-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
db.run(sql`update sites set status = 'disabled' where id = ${disabledSite.id}`);
|
||||
await db.run(sql`update sites set status = 'disabled' where id = ${disabledSite.id}`);
|
||||
|
||||
db.insert(schema.accounts).values({
|
||||
await db.insert(schema.accounts).values({
|
||||
siteId: activeSite.id,
|
||||
username: 'active-user',
|
||||
accessToken: 'active-token',
|
||||
@@ -68,7 +68,7 @@ describe('stats dashboard filters disabled sites', () => {
|
||||
status: 'active',
|
||||
}).run();
|
||||
|
||||
db.insert(schema.accounts).values({
|
||||
await db.insert(schema.accounts).values({
|
||||
siteId: disabledSite.id,
|
||||
username: 'disabled-user',
|
||||
accessToken: 'disabled-token',
|
||||
@@ -95,13 +95,13 @@ describe('stats dashboard filters disabled sites', () => {
|
||||
|
||||
it('treats skipped checkins as successful in dashboard stats', async () => {
|
||||
const today = formatLocalDate(new Date());
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'checkin-site',
|
||||
url: 'https://checkin-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'checkin-user',
|
||||
accessToken: 'token',
|
||||
@@ -109,7 +109,7 @@ describe('stats dashboard filters disabled sites', () => {
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.checkinLogs).values([
|
||||
await db.insert(schema.checkinLogs).values([
|
||||
{
|
||||
accountId: account.id,
|
||||
status: 'success',
|
||||
|
||||
@@ -32,16 +32,16 @@ describe('stats dashboard today reward fallback', () => {
|
||||
await app.register(routesModule.statsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -51,12 +51,12 @@ describe('stats dashboard today reward fallback', () => {
|
||||
|
||||
it('uses today income value for dashboard todayReward when reward text is empty', async () => {
|
||||
const today = formatLocalDate(new Date());
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'stats-site',
|
||||
url: 'https://stats-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'stats-user',
|
||||
accessToken: 'token',
|
||||
@@ -71,7 +71,7 @@ describe('stats dashboard today reward fallback', () => {
|
||||
}),
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.checkinLogs).values({
|
||||
await db.insert(schema.checkinLogs).values({
|
||||
accountId: account.id,
|
||||
status: 'success',
|
||||
message: 'checked in',
|
||||
@@ -90,12 +90,12 @@ describe('stats dashboard today reward fallback', () => {
|
||||
});
|
||||
|
||||
it('counts dashboard today spend only inside local-day range', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'stats-spend-site',
|
||||
url: 'https://stats-spend.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'stats-spend-user',
|
||||
accessToken: 'token',
|
||||
@@ -109,7 +109,7 @@ describe('stats dashboard today reward fallback', () => {
|
||||
const inRange = formatUtcSqlDateTime(new Date(startDate.getTime() + 60_000));
|
||||
const afterEnd = formatUtcSqlDateTime(new Date(endDate.getTime() + 60_000));
|
||||
|
||||
db.insert(schema.proxyLogs).values([
|
||||
await db.insert(schema.proxyLogs).values([
|
||||
{
|
||||
accountId: account.id,
|
||||
status: 'success',
|
||||
|
||||
@@ -35,18 +35,18 @@ describe('/api/models/token-candidates', () => {
|
||||
await app.register(routesModule.statsRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
fetchModelPricingCatalogMock.mockReset();
|
||||
fetchModelPricingCatalogMock.mockResolvedValue(null);
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -55,21 +55,21 @@ describe('/api/models/token-candidates', () => {
|
||||
});
|
||||
|
||||
it('returns modelsWithoutToken for models available in account but not covered by enabled tokens', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'site-a',
|
||||
url: 'https://site-a.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'alice',
|
||||
accessToken: 'acc-token',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const token = db.insert(schema.accountTokens).values({
|
||||
const token = await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'default',
|
||||
token: 'tk-default',
|
||||
@@ -77,7 +77,7 @@ describe('/api/models/token-candidates', () => {
|
||||
isDefault: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.modelAvailability).values([
|
||||
await db.insert(schema.modelAvailability).values([
|
||||
{
|
||||
accountId: account.id,
|
||||
modelName: 'claude-haiku-4-5-20251001',
|
||||
@@ -90,7 +90,7 @@ describe('/api/models/token-candidates', () => {
|
||||
},
|
||||
]).run();
|
||||
|
||||
db.insert(schema.tokenModelAvailability).values({
|
||||
await db.insert(schema.tokenModelAvailability).values({
|
||||
tokenId: token.id,
|
||||
modelName: 'claude-haiku-4-5-20251001',
|
||||
available: true,
|
||||
@@ -120,21 +120,21 @@ describe('/api/models/token-candidates', () => {
|
||||
});
|
||||
|
||||
it('returns modelsMissingTokenGroups when account has partial group token coverage', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'site-b',
|
||||
url: 'https://site-b.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'bob',
|
||||
accessToken: 'acc-token-b',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const defaultToken = db.insert(schema.accountTokens).values({
|
||||
const defaultToken = await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'default-token',
|
||||
token: 'sk-default',
|
||||
@@ -143,13 +143,13 @@ describe('/api/models/token-candidates', () => {
|
||||
isDefault: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.modelAvailability).values({
|
||||
await db.insert(schema.modelAvailability).values({
|
||||
accountId: account.id,
|
||||
modelName: 'claude-opus-4-6',
|
||||
available: true,
|
||||
}).run();
|
||||
|
||||
db.insert(schema.tokenModelAvailability).values({
|
||||
await db.insert(schema.tokenModelAvailability).values({
|
||||
tokenId: defaultToken.id,
|
||||
modelName: 'claude-opus-4-6',
|
||||
available: true,
|
||||
@@ -202,21 +202,21 @@ describe('/api/models/token-candidates', () => {
|
||||
});
|
||||
|
||||
it('infers default group from token name when token_group is empty', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'site-c',
|
||||
url: 'https://site-c.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'charlie',
|
||||
accessToken: 'acc-token-c',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const token = db.insert(schema.accountTokens).values({
|
||||
const token = await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'default',
|
||||
token: 'sk-default-c',
|
||||
@@ -225,13 +225,13 @@ describe('/api/models/token-candidates', () => {
|
||||
isDefault: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.modelAvailability).values({
|
||||
await db.insert(schema.modelAvailability).values({
|
||||
accountId: account.id,
|
||||
modelName: 'claude-sonnet-4-5-20250929',
|
||||
available: true,
|
||||
}).run();
|
||||
|
||||
db.insert(schema.tokenModelAvailability).values({
|
||||
await db.insert(schema.tokenModelAvailability).values({
|
||||
tokenId: token.id,
|
||||
modelName: 'claude-sonnet-4-5-20250929',
|
||||
available: true,
|
||||
@@ -283,21 +283,21 @@ describe('/api/models/token-candidates', () => {
|
||||
});
|
||||
|
||||
it('marks coverage as uncertain when token group cannot be inferred', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'site-d',
|
||||
url: 'https://site-d.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'david',
|
||||
accessToken: 'acc-token-d',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const token = db.insert(schema.accountTokens).values({
|
||||
const token = await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'token-1',
|
||||
token: 'sk-token-1',
|
||||
@@ -306,13 +306,13 @@ describe('/api/models/token-candidates', () => {
|
||||
isDefault: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.modelAvailability).values({
|
||||
await db.insert(schema.modelAvailability).values({
|
||||
accountId: account.id,
|
||||
modelName: 'claude-opus-4-5-20251101',
|
||||
available: true,
|
||||
}).run();
|
||||
|
||||
db.insert(schema.tokenModelAvailability).values({
|
||||
await db.insert(schema.tokenModelAvailability).values({
|
||||
tokenId: token.id,
|
||||
modelName: 'claude-opus-4-5-20251101',
|
||||
available: true,
|
||||
|
||||
@@ -71,7 +71,7 @@ function proxyCostSqlExpression() {
|
||||
export async function statsRoutes(app: FastifyInstance) {
|
||||
// Dashboard summary
|
||||
app.get('/api/stats/dashboard', async () => {
|
||||
const accountRows = db.select().from(schema.accounts)
|
||||
const accountRows = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.sites.status, 'active'))
|
||||
.all();
|
||||
@@ -80,7 +80,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
const activeCount = accounts.filter((a) => a.status === 'active').length;
|
||||
|
||||
const { localDay: today, startUtc: todayStartUtc, endUtc: todayEndUtc } = getLocalDayRangeUtc();
|
||||
const todayCheckinRows = db.select().from(schema.checkinLogs)
|
||||
const todayCheckinRows = await db.select().from(schema.checkinLogs)
|
||||
.innerJoin(schema.accounts, eq(schema.checkinLogs.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(and(
|
||||
@@ -109,13 +109,13 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
const nowTs = Date.now();
|
||||
const last24hDate = formatUtcSqlDateTime(new Date(nowTs - 86400000));
|
||||
const last7dDate = getLocalRangeStartUtc(7);
|
||||
const recentProxyLogs = db.select().from(schema.proxyLogs)
|
||||
const recentProxyLogs = (await db.select().from(schema.proxyLogs)
|
||||
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
|
||||
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(and(gte(schema.proxyLogs.createdAt, last7dDate), eq(schema.sites.status, 'active')))
|
||||
.all()
|
||||
.all())
|
||||
.map((row) => row.proxy_logs);
|
||||
const totalUsedRow = db.select({
|
||||
const totalUsedRow = await db.select({
|
||||
totalUsed: sql<number>`coalesce(sum(${proxyCostSqlExpression()}), 0)`,
|
||||
})
|
||||
.from(schema.proxyLogs)
|
||||
@@ -123,7 +123,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.sites.status, 'active'))
|
||||
.get();
|
||||
const proxy24hRow = db.select({
|
||||
const proxy24hRow = await db.select({
|
||||
total: sql<number>`count(*)`,
|
||||
success: sql<number>`coalesce(sum(case when ${schema.proxyLogs.status} = 'success' then 1 else 0 end), 0)`,
|
||||
failed: sql<number>`coalesce(sum(case when ${schema.proxyLogs.status} = 'failed' then 1 else 0 end), 0)`,
|
||||
@@ -134,7 +134,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(and(gte(schema.proxyLogs.createdAt, last24hDate), eq(schema.sites.status, 'active')))
|
||||
.get();
|
||||
const todaySpendRow = db.select({
|
||||
const todaySpendRow = await db.select({
|
||||
todaySpend: sql<number>`coalesce(sum(coalesce(${schema.proxyLogs.estimatedCost}, 0)), 0)`,
|
||||
})
|
||||
.from(schema.proxyLogs)
|
||||
@@ -179,7 +179,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
app.get<{ Querystring: { limit?: string; offset?: string } }>('/api/stats/proxy-logs', async (request) => {
|
||||
const limit = parseInt(request.query.limit || '50', 10);
|
||||
const offset = parseInt(request.query.offset || '0', 10);
|
||||
const rows = db.select().from(schema.proxyLogs)
|
||||
const rows = await db.select().from(schema.proxyLogs)
|
||||
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
|
||||
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.orderBy(desc(schema.proxyLogs.createdAt))
|
||||
@@ -244,12 +244,12 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
}
|
||||
}
|
||||
|
||||
const availability = db.select().from(schema.tokenModelAvailability)
|
||||
const availability = await db.select().from(schema.tokenModelAvailability)
|
||||
.innerJoin(schema.accountTokens, eq(schema.tokenModelAvailability.tokenId, schema.accountTokens.id))
|
||||
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.all();
|
||||
const accountAvailability = db.select().from(schema.modelAvailability)
|
||||
const accountAvailability = await db.select().from(schema.modelAvailability)
|
||||
.innerJoin(schema.accounts, eq(schema.modelAvailability.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(
|
||||
@@ -262,7 +262,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
.all();
|
||||
|
||||
const last7d = getLocalRangeStartUtc(7);
|
||||
const recentLogs = db.select().from(schema.proxyLogs)
|
||||
const recentLogs = await db.select().from(schema.proxyLogs)
|
||||
.where(gte(schema.proxyLogs.createdAt, last7d))
|
||||
.all();
|
||||
|
||||
@@ -299,7 +299,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
|
||||
const modelMetadataMap = new Map<string, ModelMetadataAggregate>();
|
||||
if (includePricing) {
|
||||
const activeAccountRows = db.select().from(schema.accounts)
|
||||
const activeAccountRows = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(and(eq(schema.accounts.status, 'active'), eq(schema.sites.status, 'active')))
|
||||
.all();
|
||||
@@ -506,7 +506,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
return name;
|
||||
};
|
||||
|
||||
const rows = db.select().from(schema.tokenModelAvailability)
|
||||
const rows = await db.select().from(schema.tokenModelAvailability)
|
||||
.innerJoin(schema.accountTokens, eq(schema.tokenModelAvailability.tokenId, schema.accountTokens.id))
|
||||
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
@@ -519,7 +519,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
),
|
||||
)
|
||||
.all();
|
||||
const availableModelRows = db.select({
|
||||
const availableModelRows = await db.select({
|
||||
modelName: schema.modelAvailability.modelName,
|
||||
accountId: schema.accounts.id,
|
||||
username: schema.accounts.username,
|
||||
@@ -621,7 +621,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
const hasPotentialGroupHints = hasAnyTokenGroupSignals || unknownGroupCoverageByAccountModel.size > 0;
|
||||
|
||||
if (hasPotentialGroupHints && accountIdsForGroupHints.size > 0) {
|
||||
const accountRows = db.select().from(schema.accounts)
|
||||
const accountRows = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(
|
||||
and(
|
||||
@@ -749,7 +749,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
|
||||
// Site distribution – per-site aggregate data
|
||||
app.get('/api/stats/site-distribution', async () => {
|
||||
const accountRows = db.select({
|
||||
const accountRows = await db.select({
|
||||
siteId: schema.sites.id,
|
||||
siteName: schema.sites.name,
|
||||
platform: schema.sites.platform,
|
||||
@@ -762,7 +762,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
.groupBy(schema.sites.id, schema.sites.name, schema.sites.platform)
|
||||
.all();
|
||||
|
||||
const spendRows = db.select({
|
||||
const spendRows = await db.select({
|
||||
siteId: schema.sites.id,
|
||||
totalSpend: sql<number>`coalesce(sum(${proxyCostSqlExpression()}), 0)`,
|
||||
})
|
||||
@@ -796,7 +796,7 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
const days = Math.max(1, parseInt(request.query.days || '7', 10));
|
||||
const sinceDate = getLocalRangeStartUtc(days);
|
||||
|
||||
const rows = db.select().from(schema.proxyLogs)
|
||||
const rows = await db.select().from(schema.proxyLogs)
|
||||
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
|
||||
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(and(gte(schema.proxyLogs.createdAt, sinceDate), eq(schema.sites.status, 'active')))
|
||||
@@ -847,12 +847,12 @@ export async function statsRoutes(app: FastifyInstance) {
|
||||
// Get account IDs belonging to the site (if filtered)
|
||||
let accountIds: Set<number> | null = null;
|
||||
if (siteId != null && !Number.isNaN(siteId)) {
|
||||
const siteAccounts = db.select().from(schema.accounts)
|
||||
const siteAccounts = await db.select().from(schema.accounts)
|
||||
.where(eq(schema.accounts.siteId, siteId)).all();
|
||||
accountIds = new Set(siteAccounts.map((a) => a.id));
|
||||
}
|
||||
|
||||
const rows = db.select().from(schema.proxyLogs)
|
||||
const rows = await db.select().from(schema.proxyLogs)
|
||||
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
|
||||
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(and(gte(schema.proxyLogs.createdAt, sinceDate), eq(schema.sites.status, 'active')))
|
||||
|
||||
@@ -19,24 +19,24 @@ describe('PUT /api/channels/batch', () => {
|
||||
return seedId;
|
||||
};
|
||||
|
||||
const seedChannel = (options: { priority: number; weight: number; manualOverride?: boolean }) => {
|
||||
const seedChannel = async (options: { priority: number; weight: number; manualOverride?: boolean }) => {
|
||||
const id = nextId();
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: `site-${id}`,
|
||||
url: `https://example.com/${id}`,
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
accessToken: `access-token-${id}`,
|
||||
apiToken: `api-token-${id}`,
|
||||
}).returning().get();
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
const route = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: `gpt-4o-${id}`,
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
return db.insert(schema.routeChannels).values({
|
||||
return await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: account.id,
|
||||
priority: options.priority,
|
||||
@@ -59,12 +59,12 @@ describe('PUT /api/channels/batch', () => {
|
||||
await app.register(routesModule.tokensRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -112,8 +112,8 @@ describe('PUT /api/channels/batch', () => {
|
||||
});
|
||||
|
||||
it('updates priorities in batch, sets manualOverride, and keeps weight unchanged', async () => {
|
||||
const channelA = seedChannel({ priority: 9, weight: 17, manualOverride: false });
|
||||
const channelB = seedChannel({ priority: 8, weight: 23, manualOverride: false });
|
||||
const channelA = await seedChannel({ priority: 9, weight: 17, manualOverride: false });
|
||||
const channelB = await seedChannel({ priority: 8, weight: 23, manualOverride: false });
|
||||
|
||||
const res = await app.inject({
|
||||
method: 'PUT',
|
||||
@@ -145,8 +145,8 @@ describe('PUT /api/channels/batch', () => {
|
||||
expect(returnedA?.manualOverride).toBe(true);
|
||||
expect(returnedB?.manualOverride).toBe(true);
|
||||
|
||||
const dbA = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelA.id)).get();
|
||||
const dbB = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelB.id)).get();
|
||||
const dbA = await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelA.id)).get();
|
||||
const dbB = await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelB.id)).get();
|
||||
expect(dbA?.priority).toBe(3);
|
||||
expect(dbB?.priority).toBe(0);
|
||||
expect(dbA?.weight).toBe(17);
|
||||
|
||||
@@ -20,15 +20,15 @@ describe('POST /api/routes/decision/batch', () => {
|
||||
return seedId;
|
||||
};
|
||||
|
||||
const seedRoutableChannel = () => {
|
||||
const seedRoutableChannel = async () => {
|
||||
const id = nextId();
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: `site-${id}`,
|
||||
url: `https://site-${id}.example.com`,
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: `user-${id}`,
|
||||
accessToken: `access-token-${id}`,
|
||||
@@ -36,12 +36,12 @@ describe('POST /api/routes/decision/batch', () => {
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
const route = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'gpt-4o-mini',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -67,12 +67,12 @@ describe('POST /api/routes/decision/batch', () => {
|
||||
await app.register(routesModule.tokensRoutes);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
seedId = 0;
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
invalidateTokenRouterCache();
|
||||
});
|
||||
|
||||
@@ -107,13 +107,13 @@ describe('POST /api/routes/decision/batch', () => {
|
||||
});
|
||||
|
||||
it('returns decisions scoped by route id to avoid wildcard channel mismatch', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'wildcard-site',
|
||||
url: 'https://wildcard-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'wildcard-user',
|
||||
accessToken: 'wildcard-access',
|
||||
@@ -121,12 +121,12 @@ describe('POST /api/routes/decision/batch', () => {
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const exactRoute = db.insert(schema.tokenRoutes).values({
|
||||
const exactRoute = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'claude-opus-4-6',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: exactRoute.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -136,12 +136,12 @@ describe('POST /api/routes/decision/batch', () => {
|
||||
enabled: true,
|
||||
}).run();
|
||||
|
||||
const wildcardRoute = db.insert(schema.tokenRoutes).values({
|
||||
const wildcardRoute = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 're:^claude-(opus|sonnet)-4-6$',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const wildcardChannel = db.insert(schema.routeChannels).values({
|
||||
const wildcardChannel = await db.insert(schema.routeChannels).values({
|
||||
routeId: wildcardRoute.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -173,13 +173,13 @@ describe('POST /api/routes/decision/batch', () => {
|
||||
});
|
||||
|
||||
it('returns route-wide wildcard probabilities normalized to 100 across all channels', async () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'route-wide-site',
|
||||
url: 'https://route-wide-site.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'route-wide-user',
|
||||
accessToken: 'route-wide-access',
|
||||
@@ -187,12 +187,12 @@ describe('POST /api/routes/decision/batch', () => {
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
const route = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 're:^claude-(opus|sonnet)-4-6$',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const channelA = db.insert(schema.routeChannels).values({
|
||||
const channelA = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -202,7 +202,7 @@ describe('POST /api/routes/decision/batch', () => {
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const channelB = db.insert(schema.routeChannels).values({
|
||||
const channelB = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
|
||||
@@ -12,8 +12,8 @@ function isExactModelPattern(modelPattern: string): boolean {
|
||||
return !/[\*\?\[]/.test(normalized);
|
||||
}
|
||||
|
||||
function getDefaultTokenId(accountId: number): number | null {
|
||||
const token = db.select().from(schema.accountTokens)
|
||||
async function getDefaultTokenId(accountId: number): Promise<number | null> {
|
||||
const token = await db.select().from(schema.accountTokens)
|
||||
.where(and(eq(schema.accountTokens.accountId, accountId), eq(schema.accountTokens.enabled, true), eq(schema.accountTokens.isDefault, true)))
|
||||
.get();
|
||||
return token?.id ?? null;
|
||||
@@ -35,8 +35,8 @@ function isModelAliasEquivalent(left: string, right: string): boolean {
|
||||
return !!a && !!b && a === b;
|
||||
}
|
||||
|
||||
function tokenSupportsModel(tokenId: number, modelName: string): boolean {
|
||||
const rows = db.select().from(schema.tokenModelAvailability)
|
||||
async function tokenSupportsModel(tokenId: number, modelName: string): Promise<boolean> {
|
||||
const rows = await db.select().from(schema.tokenModelAvailability)
|
||||
.where(
|
||||
and(
|
||||
eq(schema.tokenModelAvailability.tokenId, tokenId),
|
||||
@@ -51,15 +51,15 @@ function tokenSupportsModel(tokenId: number, modelName: string): boolean {
|
||||
});
|
||||
}
|
||||
|
||||
function checkTokenBelongsToAccount(tokenId: number, accountId: number): boolean {
|
||||
const row = db.select().from(schema.accountTokens)
|
||||
async function checkTokenBelongsToAccount(tokenId: number, accountId: number): Promise<boolean> {
|
||||
const row = await db.select().from(schema.accountTokens)
|
||||
.where(and(eq(schema.accountTokens.id, tokenId), eq(schema.accountTokens.accountId, accountId)))
|
||||
.get();
|
||||
return !!row;
|
||||
}
|
||||
|
||||
function getPatternTokenCandidates(modelPattern: string): Array<{ tokenId: number; accountId: number; sourceModel: string }> {
|
||||
const rows = db.select().from(schema.tokenModelAvailability)
|
||||
async function getPatternTokenCandidates(modelPattern: string): Promise<Array<{ tokenId: number; accountId: number; sourceModel: string }>> {
|
||||
const rows = await db.select().from(schema.tokenModelAvailability)
|
||||
.innerJoin(schema.accountTokens, eq(schema.tokenModelAvailability.tokenId, schema.accountTokens.id))
|
||||
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
@@ -88,7 +88,7 @@ function getPatternTokenCandidates(modelPattern: string): Array<{ tokenId: numbe
|
||||
return result;
|
||||
}
|
||||
|
||||
function getMatchedExactRouteChannelCandidates(modelPattern: string): Array<{
|
||||
async function getMatchedExactRouteChannelCandidates(modelPattern: string): Promise<Array<{
|
||||
tokenId: number | null;
|
||||
accountId: number;
|
||||
sourceModel: string;
|
||||
@@ -96,17 +96,17 @@ function getMatchedExactRouteChannelCandidates(modelPattern: string): Array<{
|
||||
weight: number;
|
||||
enabled: boolean;
|
||||
manualOverride: boolean;
|
||||
}> {
|
||||
const matchedRoutes = db.select().from(schema.tokenRoutes)
|
||||
}>> {
|
||||
const matchedRoutes = (await db.select().from(schema.tokenRoutes)
|
||||
.where(eq(schema.tokenRoutes.enabled, true))
|
||||
.all()
|
||||
.all())
|
||||
.filter((route) => isExactModelPattern(route.modelPattern) && matchesModelPattern(route.modelPattern, modelPattern));
|
||||
|
||||
if (matchedRoutes.length === 0) return [];
|
||||
const routeMap = new Map<number, typeof matchedRoutes[number]>();
|
||||
for (const route of matchedRoutes) routeMap.set(route.id, route);
|
||||
|
||||
const channels = db.select().from(schema.routeChannels)
|
||||
const channels = await db.select().from(schema.routeChannels)
|
||||
.where(inArray(schema.routeChannels.routeId, matchedRoutes.map((route) => route.id)))
|
||||
.all();
|
||||
|
||||
@@ -121,9 +121,9 @@ function getMatchedExactRouteChannelCandidates(modelPattern: string): Array<{
|
||||
})).filter((candidate) => candidate.sourceModel.length > 0);
|
||||
}
|
||||
|
||||
function populateRouteChannelsByModelPattern(routeId: number, modelPattern: string): number {
|
||||
const routeCandidates = getMatchedExactRouteChannelCandidates(modelPattern);
|
||||
const availabilityCandidates = getPatternTokenCandidates(modelPattern).map((candidate) => ({
|
||||
async function populateRouteChannelsByModelPattern(routeId: number, modelPattern: string): Promise<number> {
|
||||
const routeCandidates = await getMatchedExactRouteChannelCandidates(modelPattern);
|
||||
const availabilityCandidates = (await getPatternTokenCandidates(modelPattern)).map((candidate) => ({
|
||||
tokenId: candidate.tokenId,
|
||||
accountId: candidate.accountId,
|
||||
sourceModel: candidate.sourceModel,
|
||||
@@ -135,7 +135,7 @@ function populateRouteChannelsByModelPattern(routeId: number, modelPattern: stri
|
||||
const candidates = [...routeCandidates, ...availabilityCandidates];
|
||||
if (candidates.length === 0) return 0;
|
||||
|
||||
const existingChannels = db.select().from(schema.routeChannels)
|
||||
const existingChannels = await db.select().from(schema.routeChannels)
|
||||
.where(eq(schema.routeChannels.routeId, routeId))
|
||||
.all();
|
||||
const existingPairs = new Set<string>(
|
||||
@@ -152,7 +152,7 @@ function populateRouteChannelsByModelPattern(routeId: number, modelPattern: stri
|
||||
const tokenId = typeof candidate.tokenId === 'number' && Number.isFinite(candidate.tokenId) ? candidate.tokenId : 0;
|
||||
const pairKey = `${candidate.accountId}::${tokenId}::${candidate.sourceModel.trim().toLowerCase()}`;
|
||||
if (existingPairs.has(pairKey)) continue;
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId,
|
||||
accountId: candidate.accountId,
|
||||
tokenId: candidate.tokenId,
|
||||
@@ -328,11 +328,11 @@ function parseBatchRouteWideDecisionRouteIds(
|
||||
export async function tokensRoutes(app: FastifyInstance) {
|
||||
// List all routes
|
||||
app.get('/api/routes', async () => {
|
||||
const routes = db.select().from(schema.tokenRoutes).all();
|
||||
const routes = await db.select().from(schema.tokenRoutes).all();
|
||||
if (routes.length === 0) return [];
|
||||
|
||||
const routeIds = routes.map((route) => route.id);
|
||||
const channelRows = db.select().from(schema.routeChannels)
|
||||
const channelRows = await db.select().from(schema.routeChannels)
|
||||
.innerJoin(schema.accounts, eq(schema.routeChannels.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.leftJoin(schema.accountTokens, eq(schema.routeChannels.tokenId, schema.accountTokens.id))
|
||||
@@ -372,7 +372,7 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: 'model 不能为空' });
|
||||
}
|
||||
|
||||
const decision = tokenRouter.explainSelection(model);
|
||||
const decision = await tokenRouter.explainSelection(model);
|
||||
return { success: true, decision };
|
||||
});
|
||||
|
||||
@@ -382,10 +382,10 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: parsed.message });
|
||||
}
|
||||
|
||||
const decisions: Record<string, ReturnType<typeof tokenRouter.explainSelection>> = {};
|
||||
for (const model of parsed.models) {
|
||||
decisions[model] = tokenRouter.explainSelection(model);
|
||||
}
|
||||
const decisions: Record<string, Awaited<ReturnType<typeof tokenRouter.explainSelection>>> = {};
|
||||
for (const model of parsed.models) {
|
||||
decisions[model] = await tokenRouter.explainSelection(model);
|
||||
}
|
||||
|
||||
return { success: true, decisions };
|
||||
});
|
||||
@@ -396,12 +396,12 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: parsed.message });
|
||||
}
|
||||
|
||||
const decisions: Record<string, Record<string, ReturnType<typeof tokenRouter.explainSelection>>> = {};
|
||||
for (const item of parsed.items) {
|
||||
const routeKey = String(item.routeId);
|
||||
if (!decisions[routeKey]) decisions[routeKey] = {};
|
||||
decisions[routeKey][item.model] = tokenRouter.explainSelectionForRoute(item.routeId, item.model);
|
||||
}
|
||||
const decisions: Record<string, Record<string, Awaited<ReturnType<typeof tokenRouter.explainSelectionForRoute>>>> = {};
|
||||
for (const item of parsed.items) {
|
||||
const routeKey = String(item.routeId);
|
||||
if (!decisions[routeKey]) decisions[routeKey] = {};
|
||||
decisions[routeKey][item.model] = await tokenRouter.explainSelectionForRoute(item.routeId, item.model);
|
||||
}
|
||||
|
||||
return { success: true, decisions };
|
||||
});
|
||||
@@ -412,10 +412,10 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: parsed.message });
|
||||
}
|
||||
|
||||
const decisions: Record<string, ReturnType<typeof tokenRouter.explainSelection>> = {};
|
||||
for (const routeId of parsed.routeIds) {
|
||||
decisions[String(routeId)] = tokenRouter.explainSelectionRouteWide(routeId);
|
||||
}
|
||||
const decisions: Record<string, Awaited<ReturnType<typeof tokenRouter.explainSelectionRouteWide>>> = {};
|
||||
for (const routeId of parsed.routeIds) {
|
||||
decisions[String(routeId)] = await tokenRouter.explainSelectionRouteWide(routeId);
|
||||
}
|
||||
|
||||
return { success: true, decisions };
|
||||
});
|
||||
@@ -423,15 +423,23 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
// Create a route
|
||||
app.post<{ Body: { modelPattern: string; displayName?: string; displayIcon?: string; modelMapping?: string; enabled?: boolean } }>('/api/routes', async (request) => {
|
||||
const body = request.body;
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: body.modelPattern,
|
||||
displayName: body.displayName,
|
||||
displayIcon: body.displayIcon,
|
||||
modelMapping: body.modelMapping,
|
||||
enabled: body.enabled ?? true,
|
||||
}).returning().get();
|
||||
const insertedRoute = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: body.modelPattern,
|
||||
displayName: body.displayName,
|
||||
displayIcon: body.displayIcon,
|
||||
modelMapping: body.modelMapping,
|
||||
enabled: body.enabled ?? true,
|
||||
}).run();
|
||||
const routeId = Number(insertedRoute.lastInsertRowid || 0);
|
||||
if (routeId <= 0) {
|
||||
return { success: false, message: '创建路由失败' };
|
||||
}
|
||||
const route = await db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, routeId)).get();
|
||||
if (!route) {
|
||||
return { success: false, message: '创建路由失败' };
|
||||
}
|
||||
|
||||
populateRouteChannelsByModelPattern(route.id, body.modelPattern);
|
||||
await populateRouteChannelsByModelPattern(route.id, body.modelPattern);
|
||||
invalidateTokenRouterCache();
|
||||
return route;
|
||||
});
|
||||
@@ -449,15 +457,15 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
if (body.enabled !== undefined) updates.enabled = body.enabled;
|
||||
updates.updatedAt = new Date().toISOString();
|
||||
|
||||
db.update(schema.tokenRoutes).set(updates).where(eq(schema.tokenRoutes.id, id)).run();
|
||||
await db.update(schema.tokenRoutes).set(updates).where(eq(schema.tokenRoutes.id, id)).run();
|
||||
invalidateTokenRouterCache();
|
||||
return db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, id)).get();
|
||||
return await db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, id)).get();
|
||||
});
|
||||
|
||||
// Delete a route
|
||||
app.delete<{ Params: { id: string } }>('/api/routes/:id', async (request) => {
|
||||
const id = parseInt(request.params.id, 10);
|
||||
db.delete(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, id)).run();
|
||||
await db.delete(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, id)).run();
|
||||
invalidateTokenRouterCache();
|
||||
return { success: true };
|
||||
});
|
||||
@@ -467,7 +475,7 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
const routeId = parseInt(request.params.id, 10);
|
||||
const body = request.body;
|
||||
|
||||
const route = db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, routeId)).get();
|
||||
const route = await db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, routeId)).get();
|
||||
if (!route) {
|
||||
return reply.code(404).send({ success: false, message: '路由不存在' });
|
||||
}
|
||||
@@ -475,19 +483,19 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
const sourceModel = typeof body.sourceModel === 'string'
|
||||
? body.sourceModel.trim()
|
||||
: (isExactModelPattern(route.modelPattern) ? route.modelPattern.trim() : '');
|
||||
const effectiveTokenId = body.tokenId ?? getDefaultTokenId(body.accountId);
|
||||
const effectiveTokenId = body.tokenId ?? await getDefaultTokenId(body.accountId);
|
||||
|
||||
if (body.tokenId && !checkTokenBelongsToAccount(body.tokenId, body.accountId)) {
|
||||
if (body.tokenId && !await checkTokenBelongsToAccount(body.tokenId, body.accountId)) {
|
||||
return reply.code(400).send({ success: false, message: '令牌不存在或不属于当前账号' });
|
||||
}
|
||||
|
||||
if (isExactModelPattern(route.modelPattern) && effectiveTokenId && !tokenSupportsModel(effectiveTokenId, route.modelPattern)) {
|
||||
if (isExactModelPattern(route.modelPattern) && effectiveTokenId && !await tokenSupportsModel(effectiveTokenId, route.modelPattern)) {
|
||||
return reply.code(400).send({ success: false, message: '该令牌不支持当前模型' });
|
||||
}
|
||||
|
||||
const duplicate = db.select().from(schema.routeChannels)
|
||||
const duplicate = (await db.select().from(schema.routeChannels)
|
||||
.where(eq(schema.routeChannels.routeId, routeId))
|
||||
.all()
|
||||
.all())
|
||||
.some((channel) =>
|
||||
channel.accountId === body.accountId
|
||||
&& (channel.tokenId ?? null) === (body.tokenId ?? null)
|
||||
@@ -497,14 +505,22 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
return reply.code(400).send({ success: false, message: '该来源模型的通道已存在' });
|
||||
}
|
||||
|
||||
const created = db.insert(schema.routeChannels).values({
|
||||
routeId,
|
||||
accountId: body.accountId,
|
||||
tokenId: body.tokenId,
|
||||
sourceModel: sourceModel || null,
|
||||
priority: body.priority ?? 0,
|
||||
weight: body.weight ?? 10,
|
||||
}).returning().get();
|
||||
const insertedChannel = await db.insert(schema.routeChannels).values({
|
||||
routeId,
|
||||
accountId: body.accountId,
|
||||
tokenId: body.tokenId,
|
||||
sourceModel: sourceModel || null,
|
||||
priority: body.priority ?? 0,
|
||||
weight: body.weight ?? 10,
|
||||
}).run();
|
||||
const channelId = Number(insertedChannel.lastInsertRowid || 0);
|
||||
if (channelId <= 0) {
|
||||
return reply.code(500).send({ success: false, message: '创建通道失败' });
|
||||
}
|
||||
const created = await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
|
||||
if (!created) {
|
||||
return reply.code(500).send({ success: false, message: '创建通道失败' });
|
||||
}
|
||||
invalidateTokenRouterCache();
|
||||
return created;
|
||||
});
|
||||
@@ -517,7 +533,7 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
}
|
||||
|
||||
const channelIds = Array.from(new Set(parsed.updates.map((update) => update.id)));
|
||||
const existingChannels = db.select().from(schema.routeChannels)
|
||||
const existingChannels = await db.select().from(schema.routeChannels)
|
||||
.where(inArray(schema.routeChannels.id, channelIds))
|
||||
.all();
|
||||
if (existingChannels.length !== channelIds.length) {
|
||||
@@ -527,13 +543,13 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
}
|
||||
|
||||
for (const update of parsed.updates) {
|
||||
db.update(schema.routeChannels).set({
|
||||
await db.update(schema.routeChannels).set({
|
||||
priority: update.priority,
|
||||
manualOverride: true,
|
||||
}).where(eq(schema.routeChannels.id, update.id)).run();
|
||||
}
|
||||
|
||||
const updatedChannels = db.select().from(schema.routeChannels)
|
||||
const updatedChannels = await db.select().from(schema.routeChannels)
|
||||
.where(inArray(schema.routeChannels.id, channelIds))
|
||||
.all();
|
||||
invalidateTokenRouterCache();
|
||||
@@ -545,28 +561,28 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
const channelId = parseInt(request.params.channelId, 10);
|
||||
const body = request.body as Record<string, unknown>;
|
||||
|
||||
const channel = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
|
||||
const channel = await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
|
||||
if (!channel) {
|
||||
return reply.code(404).send({ success: false, message: '通道不存在' });
|
||||
}
|
||||
|
||||
const route = db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, channel.routeId)).get();
|
||||
const route = await db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, channel.routeId)).get();
|
||||
if (!route) {
|
||||
return reply.code(404).send({ success: false, message: '路由不存在' });
|
||||
}
|
||||
|
||||
if (body.tokenId !== undefined && body.tokenId !== null) {
|
||||
const tokenId = Number(body.tokenId);
|
||||
if (!Number.isFinite(tokenId) || !checkTokenBelongsToAccount(tokenId, channel.accountId)) {
|
||||
if (!Number.isFinite(tokenId) || !await checkTokenBelongsToAccount(tokenId, channel.accountId)) {
|
||||
return reply.code(400).send({ success: false, message: '令牌不存在或不属于通道账号' });
|
||||
}
|
||||
}
|
||||
|
||||
const nextTokenId = body.tokenId === undefined
|
||||
? (channel.tokenId ?? getDefaultTokenId(channel.accountId))
|
||||
: (body.tokenId === null ? getDefaultTokenId(channel.accountId) : Number(body.tokenId));
|
||||
const nextTokenId = body.tokenId === undefined
|
||||
? (channel.tokenId ?? await getDefaultTokenId(channel.accountId))
|
||||
: (body.tokenId === null ? await getDefaultTokenId(channel.accountId) : Number(body.tokenId));
|
||||
|
||||
if (isExactModelPattern(route.modelPattern) && nextTokenId && !tokenSupportsModel(nextTokenId, route.modelPattern)) {
|
||||
if (isExactModelPattern(route.modelPattern) && nextTokenId && !await tokenSupportsModel(nextTokenId, route.modelPattern)) {
|
||||
return reply.code(400).send({ success: false, message: '该令牌不支持当前模型' });
|
||||
}
|
||||
|
||||
@@ -580,15 +596,15 @@ export async function tokensRoutes(app: FastifyInstance) {
|
||||
if (body[key] !== undefined) updates[key] = body[key];
|
||||
}
|
||||
|
||||
db.update(schema.routeChannels).set(updates).where(eq(schema.routeChannels.id, channelId)).run();
|
||||
await db.update(schema.routeChannels).set(updates).where(eq(schema.routeChannels.id, channelId)).run();
|
||||
invalidateTokenRouterCache();
|
||||
return db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
|
||||
return await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
|
||||
});
|
||||
|
||||
// Delete a channel
|
||||
app.delete<{ Params: { channelId: string } }>('/api/channels/:channelId', async (request) => {
|
||||
const channelId = parseInt(request.params.channelId, 10);
|
||||
db.delete(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).run();
|
||||
await db.delete(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).run();
|
||||
invalidateTokenRouterCache();
|
||||
return { success: true };
|
||||
});
|
||||
|
||||
@@ -139,7 +139,7 @@ async function handleChatProxyRequest(
|
||||
|
||||
const { requestedModel, isStream, upstreamBody, claudeOriginalBody } = parsedRequest.value!;
|
||||
const downstreamPath = downstreamFormat === 'claude' ? '/v1/messages' : '/v1/chat/completions';
|
||||
if (!ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
if (!await ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
const downstreamPolicy = getDownstreamRoutingPolicy(request);
|
||||
|
||||
const excludeChannelIds: number[] = [];
|
||||
@@ -147,12 +147,12 @@ async function handleChatProxyRequest(
|
||||
|
||||
while (retryCount <= MAX_RETRIES) {
|
||||
let selected = retryCount === 0
|
||||
? tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
? await tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: await tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
|
||||
if (!selected && retryCount === 0) {
|
||||
await refreshModelsAndRebuildRoutes();
|
||||
selected = tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
selected = await tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
}
|
||||
|
||||
if (!selected) {
|
||||
@@ -703,7 +703,7 @@ async function handleChatProxyRequest(
|
||||
}
|
||||
}
|
||||
|
||||
function logProxy(
|
||||
async function logProxy(
|
||||
selected: any,
|
||||
modelRequested: string,
|
||||
status: string,
|
||||
@@ -724,7 +724,7 @@ function logProxy(
|
||||
upstreamPath,
|
||||
errorMessage,
|
||||
});
|
||||
db.insert(schema.proxyLogs).values({
|
||||
await db.insert(schema.proxyLogs).values({
|
||||
routeId: selected.channel.routeId,
|
||||
channelId: selected.channel.id,
|
||||
accountId: selected.account.id,
|
||||
|
||||
@@ -22,7 +22,7 @@ export async function completionsProxyRoute(app: FastifyInstance) {
|
||||
if (!requestedModel) {
|
||||
return reply.code(400).send({ error: { message: 'model is required', type: 'invalid_request_error' } });
|
||||
}
|
||||
if (!ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
if (!await ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
const downstreamPolicy = getDownstreamRoutingPolicy(request);
|
||||
|
||||
const isStream = body.stream === true;
|
||||
@@ -31,12 +31,12 @@ export async function completionsProxyRoute(app: FastifyInstance) {
|
||||
|
||||
while (retryCount <= MAX_RETRIES) {
|
||||
let selected = retryCount === 0
|
||||
? tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
? await tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: await tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
|
||||
if (!selected && retryCount === 0) {
|
||||
await refreshModelsAndRebuildRoutes();
|
||||
selected = tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
selected = await tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
}
|
||||
|
||||
if (!selected) {
|
||||
@@ -228,7 +228,7 @@ export async function completionsProxyRoute(app: FastifyInstance) {
|
||||
});
|
||||
}
|
||||
|
||||
function logProxy(
|
||||
async function logProxy(
|
||||
selected: any,
|
||||
modelRequested: string,
|
||||
status: string,
|
||||
@@ -246,7 +246,7 @@ function logProxy(
|
||||
downstreamPath: '/v1/completions',
|
||||
errorMessage,
|
||||
});
|
||||
db.insert(schema.proxyLogs).values({
|
||||
await db.insert(schema.proxyLogs).values({
|
||||
routeId: selected.channel.routeId,
|
||||
channelId: selected.channel.id,
|
||||
accountId: selected.account.id,
|
||||
|
||||
@@ -7,15 +7,15 @@ export function getDownstreamRoutingPolicy(request: FastifyRequest): DownstreamR
|
||||
return getProxyAuthContext(request)?.policy || EMPTY_DOWNSTREAM_ROUTING_POLICY;
|
||||
}
|
||||
|
||||
export function ensureModelAllowedForDownstreamKey(
|
||||
export async function ensureModelAllowedForDownstreamKey(
|
||||
request: FastifyRequest,
|
||||
reply: FastifyReply,
|
||||
requestedModel: string,
|
||||
): boolean {
|
||||
): Promise<boolean> {
|
||||
const authContext = getProxyAuthContext(request);
|
||||
if (!authContext) return true;
|
||||
|
||||
if (isModelAllowedByPolicyOrAllowedRoutes(requestedModel, authContext.policy)) {
|
||||
if (await isModelAllowedByPolicyOrAllowedRoutes(requestedModel, authContext.policy)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -31,5 +31,5 @@ export function ensureModelAllowedForDownstreamKey(
|
||||
export function recordDownstreamCostUsage(request: FastifyRequest, estimatedCost: number): void {
|
||||
const authContext = getProxyAuthContext(request);
|
||||
if (!authContext || authContext.keyId === null) return;
|
||||
recordManagedKeyCostUsage(authContext.keyId, estimatedCost);
|
||||
void recordManagedKeyCostUsage(authContext.keyId, estimatedCost);
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ export async function embeddingsProxyRoute(app: FastifyInstance) {
|
||||
if (!requestedModel) {
|
||||
return reply.code(400).send({ error: { message: 'model is required', type: 'invalid_request_error' } });
|
||||
}
|
||||
if (!ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
if (!await ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
const downstreamPolicy = getDownstreamRoutingPolicy(request);
|
||||
|
||||
const excludeChannelIds: number[] = [];
|
||||
@@ -30,12 +30,12 @@ export async function embeddingsProxyRoute(app: FastifyInstance) {
|
||||
|
||||
while (retryCount <= MAX_RETRIES) {
|
||||
let selected = retryCount === 0
|
||||
? tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
? await tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: await tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
|
||||
if (!selected && retryCount === 0) {
|
||||
await refreshModelsAndRebuildRoutes();
|
||||
selected = tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
selected = await tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
}
|
||||
|
||||
if (!selected) {
|
||||
@@ -143,7 +143,7 @@ export async function embeddingsProxyRoute(app: FastifyInstance) {
|
||||
});
|
||||
}
|
||||
|
||||
function logProxy(
|
||||
async function logProxy(
|
||||
selected: any,
|
||||
modelRequested: string,
|
||||
status: string,
|
||||
@@ -161,7 +161,7 @@ function logProxy(
|
||||
downstreamPath: '/v1/embeddings',
|
||||
errorMessage,
|
||||
});
|
||||
db.insert(schema.proxyLogs).values({
|
||||
await db.insert(schema.proxyLogs).values({
|
||||
routeId: selected.channel.routeId,
|
||||
channelId: selected.channel.id,
|
||||
accountId: selected.account.id,
|
||||
|
||||
@@ -17,19 +17,19 @@ export async function imagesProxyRoute(app: FastifyInstance) {
|
||||
app.post('/v1/images/generations', async (request: FastifyRequest, reply: FastifyReply) => {
|
||||
const body = request.body as any;
|
||||
const requestedModel = body?.model || 'gpt-image-1';
|
||||
if (!ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
if (!await ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
const downstreamPolicy = getDownstreamRoutingPolicy(request);
|
||||
const excludeChannelIds: number[] = [];
|
||||
let retryCount = 0;
|
||||
|
||||
while (retryCount <= MAX_RETRIES) {
|
||||
let selected = retryCount === 0
|
||||
? tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
? await tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: await tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
|
||||
if (!selected && retryCount === 0) {
|
||||
await refreshModelsAndRebuildRoutes();
|
||||
selected = tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
selected = await tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
}
|
||||
|
||||
if (!selected) {
|
||||
@@ -116,7 +116,7 @@ export async function imagesProxyRoute(app: FastifyInstance) {
|
||||
});
|
||||
}
|
||||
|
||||
function logProxy(
|
||||
async function logProxy(
|
||||
selected: any,
|
||||
modelRequested: string,
|
||||
status: string,
|
||||
@@ -131,7 +131,7 @@ function logProxy(
|
||||
downstreamPath: '/v1/images/generations',
|
||||
errorMessage,
|
||||
});
|
||||
db.insert(schema.proxyLogs).values({
|
||||
await db.insert(schema.proxyLogs).values({
|
||||
routeId: selected.channel.routeId,
|
||||
channelId: selected.channel.id,
|
||||
accountId: selected.account.id,
|
||||
|
||||
@@ -9,8 +9,8 @@ export async function modelsProxyRoute(app: FastifyInstance) {
|
||||
app.get('/v1/models', async (request) => {
|
||||
const downstreamPolicy = getDownstreamRoutingPolicy(request);
|
||||
|
||||
const readModels = () => {
|
||||
const rows = db.select({ modelName: schema.modelAvailability.modelName })
|
||||
const readModels = async () => {
|
||||
const rows = await db.select({ modelName: schema.modelAvailability.modelName })
|
||||
.from(schema.modelAvailability)
|
||||
.innerJoin(schema.accounts, eq(schema.modelAvailability.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
@@ -22,23 +22,29 @@ export async function modelsProxyRoute(app: FastifyInstance) {
|
||||
),
|
||||
)
|
||||
.all();
|
||||
const routeAliases = db.select({ displayName: schema.tokenRoutes.displayName })
|
||||
const routeAliases = (await db.select({ displayName: schema.tokenRoutes.displayName })
|
||||
.from(schema.tokenRoutes)
|
||||
.where(eq(schema.tokenRoutes.enabled, true))
|
||||
.all()
|
||||
.all())
|
||||
.map((row) => (row.displayName || '').trim())
|
||||
.filter((name) => name.length > 0);
|
||||
const deduped = Array.from(new Set([
|
||||
...rows.map((r) => r.modelName),
|
||||
...routeAliases,
|
||||
])).sort();
|
||||
return deduped.filter((modelName) => isModelAllowedByPolicyOrAllowedRoutes(modelName, downstreamPolicy));
|
||||
const allowed: string[] = [];
|
||||
for (const modelName of deduped) {
|
||||
if (await isModelAllowedByPolicyOrAllowedRoutes(modelName, downstreamPolicy)) {
|
||||
allowed.push(modelName);
|
||||
}
|
||||
}
|
||||
return allowed;
|
||||
};
|
||||
|
||||
let models = readModels();
|
||||
let models = await readModels();
|
||||
if (models.length === 0) {
|
||||
await refreshModelsAndRebuildRoutes();
|
||||
models = readModels();
|
||||
models = await readModels();
|
||||
}
|
||||
|
||||
const wantsClaudeFormat = typeof request.headers['anthropic-version'] === 'string'
|
||||
|
||||
@@ -1147,7 +1147,7 @@ export async function responsesProxyRoute(app: FastifyInstance) {
|
||||
if (!requestedModel) {
|
||||
return reply.code(400).send({ error: { message: 'model is required', type: 'invalid_request_error' } });
|
||||
}
|
||||
if (!ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
if (!await ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
|
||||
const downstreamPolicy = getDownstreamRoutingPolicy(request);
|
||||
|
||||
const isStream = body.stream === true;
|
||||
@@ -1156,12 +1156,12 @@ export async function responsesProxyRoute(app: FastifyInstance) {
|
||||
|
||||
while (retryCount <= MAX_RETRIES) {
|
||||
let selected = retryCount === 0
|
||||
? tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
? await tokenRouter.selectChannel(requestedModel, downstreamPolicy)
|
||||
: await tokenRouter.selectNextChannel(requestedModel, excludeChannelIds, downstreamPolicy);
|
||||
|
||||
if (!selected && retryCount === 0) {
|
||||
await refreshModelsAndRebuildRoutes();
|
||||
selected = tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
selected = await tokenRouter.selectChannel(requestedModel, downstreamPolicy);
|
||||
}
|
||||
|
||||
if (!selected) {
|
||||
@@ -1561,7 +1561,7 @@ export async function responsesProxyRoute(app: FastifyInstance) {
|
||||
});
|
||||
}
|
||||
|
||||
function logProxy(
|
||||
async function logProxy(
|
||||
selected: any,
|
||||
modelRequested: string,
|
||||
status: string,
|
||||
@@ -1582,7 +1582,7 @@ function logProxy(
|
||||
upstreamPath,
|
||||
errorMessage,
|
||||
});
|
||||
db.insert(schema.proxyLogs).values({
|
||||
await db.insert(schema.proxyLogs).values({
|
||||
routeId: selected.channel.routeId,
|
||||
channelId: selected.channel.id,
|
||||
accountId: selected.account.id,
|
||||
|
||||
@@ -144,7 +144,7 @@ function applyRuntimeHealthToExtraConfig(extraConfig: string | null | undefined,
|
||||
});
|
||||
}
|
||||
|
||||
export function setAccountRuntimeHealth(
|
||||
export async function setAccountRuntimeHealth(
|
||||
accountId: number,
|
||||
input: {
|
||||
state: RuntimeHealthState;
|
||||
@@ -152,16 +152,16 @@ export function setAccountRuntimeHealth(
|
||||
source?: string | null;
|
||||
checkedAt?: string | null;
|
||||
},
|
||||
): RuntimeHealthInfo | null {
|
||||
): Promise<RuntimeHealthInfo | null> {
|
||||
try {
|
||||
const query = db.select().from(schema.accounts).where(eq(schema.accounts.id, accountId)) as any;
|
||||
const account = typeof query?.get === 'function' ? query.get() : null;
|
||||
const account = typeof query?.get === 'function' ? await query.get() : null;
|
||||
if (!account) return null;
|
||||
|
||||
const health = buildRuntimeHealthPatch(input);
|
||||
const nextExtraConfig = applyRuntimeHealthToExtraConfig(account.extraConfig, health);
|
||||
|
||||
db.update(schema.accounts)
|
||||
await db.update(schema.accounts)
|
||||
.set({
|
||||
extraConfig: nextExtraConfig,
|
||||
updatedAt: new Date().toISOString(),
|
||||
|
||||
@@ -59,15 +59,15 @@ function normalizeTokenGroup(value: string | null | undefined, tokenName?: strin
|
||||
return name;
|
||||
}
|
||||
|
||||
function updateAccountApiToken(accountId: number, tokenValue: string | null) {
|
||||
db.update(schema.accounts)
|
||||
async function updateAccountApiToken(accountId: number, tokenValue: string | null) {
|
||||
await db.update(schema.accounts)
|
||||
.set({ apiToken: tokenValue || null, updatedAt: new Date().toISOString() })
|
||||
.where(eq(schema.accounts.id, accountId))
|
||||
.run();
|
||||
}
|
||||
|
||||
export function getPreferredAccountToken(accountId: number) {
|
||||
const tokens = db.select()
|
||||
export async function getPreferredAccountToken(accountId: number) {
|
||||
const tokens = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(and(eq(schema.accountTokens.accountId, accountId), eq(schema.accountTokens.enabled, true)))
|
||||
.all();
|
||||
@@ -78,24 +78,24 @@ export function getPreferredAccountToken(accountId: number) {
|
||||
return preferred;
|
||||
}
|
||||
|
||||
export function ensureDefaultTokenForAccount(
|
||||
export async function ensureDefaultTokenForAccount(
|
||||
accountId: number,
|
||||
tokenValue: string,
|
||||
options?: { name?: string; source?: string; enabled?: boolean; tokenGroup?: string | null },
|
||||
): number | null {
|
||||
): Promise<number | null> {
|
||||
const normalizedToken = normalizeTokenValue(tokenValue);
|
||||
if (!normalizedToken) return null;
|
||||
const tokenGroup = normalizeTokenGroup(options?.tokenGroup, options?.name) || 'default';
|
||||
|
||||
const now = new Date().toISOString();
|
||||
const tokens = db.select()
|
||||
const tokens = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(eq(schema.accountTokens.accountId, accountId))
|
||||
.all();
|
||||
|
||||
let target = tokens.find((t) => t.token === normalizedToken) || null;
|
||||
if (!target) {
|
||||
target = db.insert(schema.accountTokens)
|
||||
const inserted = await db.insert(schema.accountTokens)
|
||||
.values({
|
||||
accountId,
|
||||
name: normalizeTokenName(options?.name, tokens.length + 1),
|
||||
@@ -107,10 +107,14 @@ export function ensureDefaultTokenForAccount(
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.returning()
|
||||
.get();
|
||||
.run();
|
||||
const insertedId = Number(inserted.lastInsertRowid || 0);
|
||||
target = insertedId > 0
|
||||
? (await db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, insertedId)).get()) ?? null
|
||||
: null;
|
||||
if (!target) return null;
|
||||
} else {
|
||||
db.update(schema.accountTokens)
|
||||
await db.update(schema.accountTokens)
|
||||
.set({
|
||||
name: options?.name ? normalizeTokenName(options.name) : target.name,
|
||||
tokenGroup,
|
||||
@@ -123,66 +127,66 @@ export function ensureDefaultTokenForAccount(
|
||||
.run();
|
||||
}
|
||||
|
||||
db.update(schema.accountTokens)
|
||||
await db.update(schema.accountTokens)
|
||||
.set({ isDefault: false, updatedAt: now })
|
||||
.where(and(eq(schema.accountTokens.accountId, accountId), ne(schema.accountTokens.id, target.id)))
|
||||
.run();
|
||||
|
||||
updateAccountApiToken(accountId, normalizedToken);
|
||||
await updateAccountApiToken(accountId, normalizedToken);
|
||||
return target.id;
|
||||
}
|
||||
|
||||
export function setDefaultToken(tokenId: number): boolean {
|
||||
const target = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).get();
|
||||
export async function setDefaultToken(tokenId: number): Promise<boolean> {
|
||||
const target = await db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).get();
|
||||
if (!target) return false;
|
||||
|
||||
const now = new Date().toISOString();
|
||||
db.update(schema.accountTokens)
|
||||
await db.update(schema.accountTokens)
|
||||
.set({ isDefault: false, updatedAt: now })
|
||||
.where(eq(schema.accountTokens.accountId, target.accountId))
|
||||
.run();
|
||||
|
||||
db.update(schema.accountTokens)
|
||||
await db.update(schema.accountTokens)
|
||||
.set({ isDefault: true, enabled: true, updatedAt: now })
|
||||
.where(eq(schema.accountTokens.id, tokenId))
|
||||
.run();
|
||||
|
||||
updateAccountApiToken(target.accountId, target.token);
|
||||
await updateAccountApiToken(target.accountId, target.token);
|
||||
return true;
|
||||
}
|
||||
|
||||
export function repairDefaultToken(accountId: number) {
|
||||
const tokens = db.select()
|
||||
export async function repairDefaultToken(accountId: number) {
|
||||
const tokens = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(eq(schema.accountTokens.accountId, accountId))
|
||||
.all();
|
||||
|
||||
const enabled = tokens.filter((t) => t.enabled);
|
||||
if (enabled.length === 0) {
|
||||
updateAccountApiToken(accountId, null);
|
||||
await updateAccountApiToken(accountId, null);
|
||||
return null;
|
||||
}
|
||||
|
||||
const currentDefault = enabled.find((t) => t.isDefault) || enabled[0];
|
||||
const now = new Date().toISOString();
|
||||
|
||||
db.update(schema.accountTokens)
|
||||
await db.update(schema.accountTokens)
|
||||
.set({ isDefault: false, updatedAt: now })
|
||||
.where(eq(schema.accountTokens.accountId, accountId))
|
||||
.run();
|
||||
|
||||
db.update(schema.accountTokens)
|
||||
await db.update(schema.accountTokens)
|
||||
.set({ isDefault: true, enabled: true, updatedAt: now })
|
||||
.where(eq(schema.accountTokens.id, currentDefault.id))
|
||||
.run();
|
||||
|
||||
updateAccountApiToken(accountId, currentDefault.token);
|
||||
await updateAccountApiToken(accountId, currentDefault.token);
|
||||
return currentDefault;
|
||||
}
|
||||
|
||||
export function syncTokensFromUpstream(accountId: number, upstreamTokens: UpstreamApiToken[]) {
|
||||
export async function syncTokensFromUpstream(accountId: number, upstreamTokens: UpstreamApiToken[]) {
|
||||
const now = new Date().toISOString();
|
||||
const existing = db.select()
|
||||
const existing = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(eq(schema.accountTokens.accountId, accountId))
|
||||
.all();
|
||||
@@ -201,7 +205,7 @@ export function syncTokensFromUpstream(accountId: number, upstreamTokens: Upstre
|
||||
|
||||
const byToken = existing.find((row) => row.token === tokenValue);
|
||||
if (byToken) {
|
||||
db.update(schema.accountTokens)
|
||||
await db.update(schema.accountTokens)
|
||||
.set({
|
||||
name: tokenName,
|
||||
tokenGroup,
|
||||
@@ -220,7 +224,7 @@ export function syncTokensFromUpstream(accountId: number, upstreamTokens: Upstre
|
||||
continue;
|
||||
}
|
||||
|
||||
const createdRow = db.insert(schema.accountTokens)
|
||||
const inserted = await db.insert(schema.accountTokens)
|
||||
.values({
|
||||
accountId,
|
||||
name: tokenName,
|
||||
@@ -232,15 +236,18 @@ export function syncTokensFromUpstream(accountId: number, upstreamTokens: Upstre
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.returning()
|
||||
.get();
|
||||
.run();
|
||||
const insertedId = Number(inserted.lastInsertRowid || 0);
|
||||
if (insertedId <= 0) continue;
|
||||
const createdRow = await db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, insertedId)).get();
|
||||
if (!createdRow) continue;
|
||||
|
||||
existing.push(createdRow);
|
||||
created++;
|
||||
index++;
|
||||
}
|
||||
|
||||
const repaired = repairDefaultToken(accountId);
|
||||
const repaired = await repairDefaultToken(accountId);
|
||||
|
||||
return {
|
||||
created,
|
||||
@@ -250,15 +257,15 @@ export function syncTokensFromUpstream(accountId: number, upstreamTokens: Upstre
|
||||
};
|
||||
}
|
||||
|
||||
export function listTokensWithRelations(accountId?: number) {
|
||||
export async function listTokensWithRelations(accountId?: number) {
|
||||
const base = db.select()
|
||||
.from(schema.accountTokens)
|
||||
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id));
|
||||
|
||||
const rows = accountId
|
||||
? base.where(eq(schema.accountTokens.accountId, accountId)).all()
|
||||
: base.all();
|
||||
? await base.where(eq(schema.accountTokens.accountId, accountId)).all()
|
||||
: await base.all();
|
||||
|
||||
return rows.map((row) => {
|
||||
const { token, ...tokenMeta } = row.account_tokens;
|
||||
|
||||
@@ -15,7 +15,7 @@ export async function reportTokenExpired(params: {
|
||||
const detailText = params.detail ? appendSessionTokenRebindHint(params.detail) : '';
|
||||
const detail = detailText ? ` (${detailText})` : '';
|
||||
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: 'token',
|
||||
title: 'Token 已失效',
|
||||
message: `${accountLabel} @ ${siteLabel} 的 Token 无效或已过期${detail}`,
|
||||
@@ -24,7 +24,7 @@ export async function reportTokenExpired(params: {
|
||||
relatedType: 'account',
|
||||
}).run();
|
||||
|
||||
db.update(schema.accounts).set({
|
||||
await db.update(schema.accounts).set({
|
||||
status: 'expired',
|
||||
updatedAt: new Date().toISOString(),
|
||||
}).where(eq(schema.accounts.id, params.accountId)).run();
|
||||
@@ -43,7 +43,7 @@ export async function reportTokenExpired(params: {
|
||||
}
|
||||
|
||||
export async function reportProxyAllFailed(params: { model: string; reason: string }) {
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: 'proxy',
|
||||
title: '代理全部失败',
|
||||
message: `模型=${params.model}, 原因=${params.reason}`,
|
||||
|
||||
@@ -80,9 +80,9 @@ function setTaskStatus(task: BackgroundTask, patch: Partial<BackgroundTask>) {
|
||||
return next;
|
||||
}
|
||||
|
||||
function appendTaskEvent(level: 'info' | 'warning' | 'error', title: string, message: string, taskId: string) {
|
||||
async function appendTaskEvent(level: 'info' | 'warning' | 'error', title: string, message: string, taskId: string) {
|
||||
try {
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: 'status',
|
||||
title,
|
||||
message,
|
||||
|
||||
@@ -26,26 +26,26 @@ describe('backupService', () => {
|
||||
backupService = serviceModule;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.checkinLogs).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
db.delete(schema.settings).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.checkinLogs).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
await db.delete(schema.settings).run();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
it('preserves extended fields in full backup import/export roundtrip', () => {
|
||||
it('preserves extended fields in full backup import/export roundtrip', async () => {
|
||||
const now = new Date().toISOString();
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'roundtrip-site',
|
||||
url: 'https://roundtrip.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -58,7 +58,7 @@ describe('backupService', () => {
|
||||
updatedAt: now,
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'roundtrip-user',
|
||||
accessToken: 'session-token',
|
||||
@@ -77,7 +77,7 @@ describe('backupService', () => {
|
||||
updatedAt: now,
|
||||
}).returning().get();
|
||||
|
||||
const accountToken = db.insert(schema.accountTokens).values({
|
||||
const accountToken = await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'default',
|
||||
token: 'sk-roundtrip-token',
|
||||
@@ -88,7 +88,7 @@ describe('backupService', () => {
|
||||
updatedAt: now,
|
||||
}).returning().get();
|
||||
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
const route = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'gpt-*',
|
||||
displayName: 'gpt-route',
|
||||
displayIcon: 'icon-gpt',
|
||||
@@ -98,7 +98,7 @@ describe('backupService', () => {
|
||||
updatedAt: now,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: account.id,
|
||||
tokenId: accountToken.id,
|
||||
@@ -116,16 +116,16 @@ describe('backupService', () => {
|
||||
cooldownUntil: now,
|
||||
}).run();
|
||||
|
||||
const exported = backupService.exportBackup('all');
|
||||
const result = backupService.importBackup(exported as unknown as Record<string, unknown>);
|
||||
const exported = await backupService.exportBackup('all');
|
||||
const result = await backupService.importBackup(exported as unknown as Record<string, unknown>);
|
||||
|
||||
expect(result.allImported).toBe(true);
|
||||
expect(result.sections.accounts).toBe(true);
|
||||
|
||||
const restoredSite = db.select().from(schema.sites).where(eq(schema.sites.id, site.id)).get();
|
||||
const restoredAccount = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const restoredRoute = db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, route.id)).get();
|
||||
const restoredChannel = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.routeId, route.id)).get();
|
||||
const restoredSite = await db.select().from(schema.sites).where(eq(schema.sites.id, site.id)).get();
|
||||
const restoredAccount = await db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
|
||||
const restoredRoute = await db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, route.id)).get();
|
||||
const restoredChannel = await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.routeId, route.id)).get();
|
||||
|
||||
expect(restoredSite?.proxyUrl).toBe('http://127.0.0.1:8080');
|
||||
expect(restoredSite?.isPinned).toBe(true);
|
||||
@@ -140,7 +140,7 @@ describe('backupService', () => {
|
||||
expect(restoredChannel?.sourceModel).toBe('gpt-4o');
|
||||
});
|
||||
|
||||
it('imports ALL-API-Hub style payload with accounts and preferences', () => {
|
||||
it('imports ALL-API-Hub style payload with accounts and preferences', async () => {
|
||||
const payload = {
|
||||
timestamp: Date.now(),
|
||||
accounts: {
|
||||
@@ -180,15 +180,15 @@ describe('backupService', () => {
|
||||
},
|
||||
} as Record<string, unknown>;
|
||||
|
||||
const result = backupService.importBackup(payload);
|
||||
const result = await backupService.importBackup(payload);
|
||||
expect(result.allImported).toBe(true);
|
||||
expect(result.sections.accounts).toBe(true);
|
||||
expect(result.sections.preferences).toBe(true);
|
||||
expect(result.appliedSettings.length).toBeGreaterThan(0);
|
||||
|
||||
const sites = db.select().from(schema.sites).all();
|
||||
const accounts = db.select().from(schema.accounts).all();
|
||||
const settings = db.select().from(schema.settings).all();
|
||||
const sites = await db.select().from(schema.sites).all();
|
||||
const accounts = await db.select().from(schema.accounts).all();
|
||||
const settings = await db.select().from(schema.settings).all();
|
||||
|
||||
expect(sites.length).toBe(1);
|
||||
expect(accounts.length).toBe(1);
|
||||
|
||||
@@ -314,12 +314,12 @@ function isSettingValueAcceptable(key: string, value: unknown): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
function exportAccountsSection(): AccountsBackupSection {
|
||||
const sites = db.select().from(schema.sites).orderBy(asc(schema.sites.id)).all();
|
||||
const accounts = db.select().from(schema.accounts).orderBy(asc(schema.accounts.id)).all();
|
||||
const accountTokens = db.select().from(schema.accountTokens).orderBy(asc(schema.accountTokens.id)).all();
|
||||
const tokenRoutes = db.select().from(schema.tokenRoutes).orderBy(asc(schema.tokenRoutes.id)).all();
|
||||
const routeChannels = db.select().from(schema.routeChannels).orderBy(asc(schema.routeChannels.id)).all();
|
||||
async function exportAccountsSection(): Promise<AccountsBackupSection> {
|
||||
const sites = await db.select().from(schema.sites).orderBy(asc(schema.sites.id)).all();
|
||||
const accounts = await db.select().from(schema.accounts).orderBy(asc(schema.accounts.id)).all();
|
||||
const accountTokens = await db.select().from(schema.accountTokens).orderBy(asc(schema.accountTokens.id)).all();
|
||||
const tokenRoutes = await db.select().from(schema.tokenRoutes).orderBy(asc(schema.tokenRoutes.id)).all();
|
||||
const routeChannels = await db.select().from(schema.routeChannels).orderBy(asc(schema.routeChannels.id)).all();
|
||||
|
||||
return {
|
||||
sites,
|
||||
@@ -330,8 +330,8 @@ function exportAccountsSection(): AccountsBackupSection {
|
||||
};
|
||||
}
|
||||
|
||||
function exportPreferencesSection(): PreferencesBackupSection {
|
||||
const settings = db.select().from(schema.settings).all()
|
||||
async function exportPreferencesSection(): Promise<PreferencesBackupSection> {
|
||||
const settings = (await db.select().from(schema.settings).all())
|
||||
.filter((row) => !EXCLUDED_SETTING_KEYS.has(row.key))
|
||||
.map((row) => ({
|
||||
key: row.key,
|
||||
@@ -341,14 +341,14 @@ function exportPreferencesSection(): PreferencesBackupSection {
|
||||
return { settings };
|
||||
}
|
||||
|
||||
export function exportBackup(type: BackupExportType): BackupV2 {
|
||||
export async function exportBackup(type: BackupExportType): Promise<BackupV2> {
|
||||
const now = Date.now();
|
||||
if (type === 'accounts') {
|
||||
return {
|
||||
version: BACKUP_VERSION,
|
||||
timestamp: now,
|
||||
type: 'accounts',
|
||||
accounts: exportAccountsSection(),
|
||||
accounts: await exportAccountsSection(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -357,15 +357,15 @@ export function exportBackup(type: BackupExportType): BackupV2 {
|
||||
version: BACKUP_VERSION,
|
||||
timestamp: now,
|
||||
type: 'preferences',
|
||||
preferences: exportPreferencesSection(),
|
||||
preferences: await exportPreferencesSection(),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
version: BACKUP_VERSION,
|
||||
timestamp: now,
|
||||
accounts: exportAccountsSection(),
|
||||
preferences: exportPreferencesSection(),
|
||||
accounts: await exportAccountsSection(),
|
||||
preferences: await exportPreferencesSection(),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -446,20 +446,20 @@ function detectPreferencesSection(data: RawBackupData): PreferencesBackupSection
|
||||
return null;
|
||||
}
|
||||
|
||||
function importAccountsSection(section: AccountsBackupSection) {
|
||||
db.transaction((tx) => {
|
||||
tx.delete(schema.routeChannels).run();
|
||||
tx.delete(schema.tokenRoutes).run();
|
||||
tx.delete(schema.tokenModelAvailability).run();
|
||||
tx.delete(schema.modelAvailability).run();
|
||||
tx.delete(schema.proxyLogs).run();
|
||||
tx.delete(schema.checkinLogs).run();
|
||||
tx.delete(schema.accountTokens).run();
|
||||
tx.delete(schema.accounts).run();
|
||||
tx.delete(schema.sites).run();
|
||||
async function importAccountsSection(section: AccountsBackupSection): Promise<void> {
|
||||
await db.transaction(async (tx) => {
|
||||
await tx.delete(schema.routeChannels).run();
|
||||
await tx.delete(schema.tokenRoutes).run();
|
||||
await tx.delete(schema.tokenModelAvailability).run();
|
||||
await tx.delete(schema.modelAvailability).run();
|
||||
await tx.delete(schema.proxyLogs).run();
|
||||
await tx.delete(schema.checkinLogs).run();
|
||||
await tx.delete(schema.accountTokens).run();
|
||||
await tx.delete(schema.accounts).run();
|
||||
await tx.delete(schema.sites).run();
|
||||
|
||||
for (const row of section.sites) {
|
||||
tx.insert(schema.sites).values({
|
||||
await tx.insert(schema.sites).values({
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
url: row.url,
|
||||
@@ -476,7 +476,7 @@ function importAccountsSection(section: AccountsBackupSection) {
|
||||
}
|
||||
|
||||
for (const row of section.accounts) {
|
||||
tx.insert(schema.accounts).values({
|
||||
await tx.insert(schema.accounts).values({
|
||||
id: row.id,
|
||||
siteId: row.siteId,
|
||||
username: row.username,
|
||||
@@ -500,7 +500,7 @@ function importAccountsSection(section: AccountsBackupSection) {
|
||||
}
|
||||
|
||||
for (const row of section.accountTokens) {
|
||||
tx.insert(schema.accountTokens).values({
|
||||
await tx.insert(schema.accountTokens).values({
|
||||
id: row.id,
|
||||
accountId: row.accountId,
|
||||
name: row.name,
|
||||
@@ -515,7 +515,7 @@ function importAccountsSection(section: AccountsBackupSection) {
|
||||
}
|
||||
|
||||
for (const row of section.tokenRoutes) {
|
||||
tx.insert(schema.tokenRoutes).values({
|
||||
await tx.insert(schema.tokenRoutes).values({
|
||||
id: row.id,
|
||||
modelPattern: row.modelPattern,
|
||||
displayName: row.displayName ?? null,
|
||||
@@ -528,7 +528,7 @@ function importAccountsSection(section: AccountsBackupSection) {
|
||||
}
|
||||
|
||||
for (const row of section.routeChannels) {
|
||||
tx.insert(schema.routeChannels).values({
|
||||
await tx.insert(schema.routeChannels).values({
|
||||
id: row.id,
|
||||
routeId: row.routeId,
|
||||
accountId: row.accountId,
|
||||
@@ -550,14 +550,14 @@ function importAccountsSection(section: AccountsBackupSection) {
|
||||
});
|
||||
}
|
||||
|
||||
function importPreferencesSection(section: PreferencesBackupSection): Array<{ key: string; value: unknown }> {
|
||||
async function importPreferencesSection(section: PreferencesBackupSection): Promise<Array<{ key: string; value: unknown }>> {
|
||||
const applied: Array<{ key: string; value: unknown }> = [];
|
||||
|
||||
db.transaction((tx) => {
|
||||
await db.transaction(async (tx) => {
|
||||
for (const row of section.settings) {
|
||||
if (!isSettingValueAcceptable(row.key, row.value)) continue;
|
||||
|
||||
tx.insert(schema.settings).values({
|
||||
await tx.insert(schema.settings).values({
|
||||
key: row.key,
|
||||
value: stringifySettingValue(row.value),
|
||||
}).onConflictDoUpdate({
|
||||
@@ -571,7 +571,7 @@ function importPreferencesSection(section: PreferencesBackupSection): Array<{ ke
|
||||
return applied;
|
||||
}
|
||||
|
||||
export function importBackup(data: RawBackupData): BackupImportResult {
|
||||
export async function importBackup(data: RawBackupData): Promise<BackupImportResult> {
|
||||
if (!isRecord(data)) {
|
||||
throw new Error('导入数据格式错误:必须为 JSON 对象');
|
||||
}
|
||||
@@ -599,7 +599,7 @@ export function importBackup(data: RawBackupData): BackupImportResult {
|
||||
if (!accountsSection) {
|
||||
throw new Error('导入数据格式错误:账号数据结构不正确');
|
||||
}
|
||||
importAccountsSection(accountsSection);
|
||||
await importAccountsSection(accountsSection);
|
||||
accountsImported = true;
|
||||
}
|
||||
|
||||
@@ -607,7 +607,7 @@ export function importBackup(data: RawBackupData): BackupImportResult {
|
||||
if (!preferencesSection) {
|
||||
throw new Error('导入数据格式错误:设置数据结构不正确');
|
||||
}
|
||||
appliedSettings = importPreferencesSection(preferencesSection);
|
||||
appliedSettings = await importPreferencesSection(preferencesSection);
|
||||
preferencesImported = true;
|
||||
}
|
||||
|
||||
|
||||
@@ -217,7 +217,7 @@ async function refreshSub2ApiManagedSession(params: {
|
||||
tokenExpiresAt: refreshed.tokenExpiresAt,
|
||||
},
|
||||
});
|
||||
db.update(schema.accounts)
|
||||
await db.update(schema.accounts)
|
||||
.set({
|
||||
accessToken: refreshed.accessToken,
|
||||
extraConfig: nextExtraConfig,
|
||||
@@ -320,7 +320,7 @@ async function tryAutoRelogin(account: any, site: any): Promise<string | null> {
|
||||
const loginResult = await adapter.login(site.url, relogin.username, password);
|
||||
if (!loginResult.success || !loginResult.accessToken) return null;
|
||||
|
||||
db.update(schema.accounts)
|
||||
await db.update(schema.accounts)
|
||||
.set({
|
||||
accessToken: loginResult.accessToken,
|
||||
status: account.status === 'expired' ? 'active' : account.status,
|
||||
@@ -333,7 +333,7 @@ async function tryAutoRelogin(account: any, site: any): Promise<string | null> {
|
||||
}
|
||||
|
||||
export async function refreshBalance(accountId: number) {
|
||||
const rows = db
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
@@ -485,7 +485,7 @@ export async function refreshBalance(accountId: number) {
|
||||
updates.extraConfig = nextExtraConfig;
|
||||
}
|
||||
|
||||
db.update(schema.accounts)
|
||||
await db.update(schema.accounts)
|
||||
.set(updates)
|
||||
.where(eq(schema.accounts.id, accountId))
|
||||
.run();
|
||||
@@ -504,7 +504,7 @@ export async function refreshBalance(accountId: number) {
|
||||
}
|
||||
|
||||
export async function refreshAllBalances() {
|
||||
const rows = db
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(schema.accounts)
|
||||
.where(eq(schema.accounts.status, 'active'))
|
||||
|
||||
@@ -14,9 +14,9 @@ let dailySummaryTask: cron.ScheduledTask | null = null;
|
||||
|
||||
const DAILY_SUMMARY_DEFAULT_CRON = '58 23 * * *';
|
||||
|
||||
function resolveCronSetting(settingKey: string, fallback: string): string {
|
||||
async function resolveCronSetting(settingKey: string, fallback: string): Promise<string> {
|
||||
try {
|
||||
const row = db.select().from(schema.settings).where(eq(schema.settings.key, settingKey)).get();
|
||||
const row = await db.select().from(schema.settings).where(eq(schema.settings.key, settingKey)).get();
|
||||
if (row?.value) {
|
||||
const parsed = JSON.parse(row.value);
|
||||
if (typeof parsed === 'string' && cron.validate(parsed)) {
|
||||
@@ -58,7 +58,7 @@ function createDailySummaryTask(cronExpr: string) {
|
||||
return cron.schedule(cronExpr, async () => {
|
||||
console.log(`[Scheduler] Sending daily summary at ${new Date().toISOString()}`);
|
||||
try {
|
||||
const metrics = collectDailySummaryMetrics();
|
||||
const metrics = await collectDailySummaryMetrics();
|
||||
const { title, message } = buildDailySummaryNotification(metrics);
|
||||
await sendNotification(title, message, 'info', {
|
||||
bypassThrottle: true,
|
||||
@@ -72,10 +72,10 @@ function createDailySummaryTask(cronExpr: string) {
|
||||
});
|
||||
}
|
||||
|
||||
export function startScheduler() {
|
||||
const activeCheckinCron = resolveCronSetting('checkin_cron', config.checkinCron);
|
||||
const activeBalanceCron = resolveCronSetting('balance_refresh_cron', config.balanceRefreshCron);
|
||||
const activeDailySummaryCron = resolveCronSetting('daily_summary_cron', DAILY_SUMMARY_DEFAULT_CRON);
|
||||
export async function startScheduler() {
|
||||
const activeCheckinCron = await resolveCronSetting('checkin_cron', config.checkinCron);
|
||||
const activeBalanceCron = await resolveCronSetting('balance_refresh_cron', config.balanceRefreshCron);
|
||||
const activeDailySummaryCron = await resolveCronSetting('daily_summary_cron', DAILY_SUMMARY_DEFAULT_CRON);
|
||||
config.checkinCron = activeCheckinCron;
|
||||
config.balanceRefreshCron = activeBalanceCron;
|
||||
|
||||
|
||||
@@ -102,7 +102,7 @@ async function tryAutoRelogin(account: any, site: any): Promise<string | null> {
|
||||
const result = await adapter.login(site.url, relogin.username, password);
|
||||
if (!result.success || !result.accessToken) return null;
|
||||
|
||||
db.update(schema.accounts)
|
||||
await db.update(schema.accounts)
|
||||
.set({
|
||||
accessToken: result.accessToken,
|
||||
updatedAt: new Date().toISOString(),
|
||||
@@ -115,7 +115,7 @@ async function tryAutoRelogin(account: any, site: any): Promise<string | null> {
|
||||
}
|
||||
|
||||
export async function checkinAccount(accountId: number, options?: { skipEvent?: boolean }) {
|
||||
const rows = db
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
@@ -133,14 +133,14 @@ export async function checkinAccount(accountId: number, options?: { skipEvent?:
|
||||
reason: '\u7ad9\u70b9\u5df2\u7981\u7528',
|
||||
source: 'checkin',
|
||||
});
|
||||
db.insert(schema.checkinLogs).values({
|
||||
await db.insert(schema.checkinLogs).values({
|
||||
accountId: account.id,
|
||||
status: 'skipped',
|
||||
message: 'site disabled',
|
||||
}).run();
|
||||
|
||||
if (!options?.skipEvent) {
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: 'checkin',
|
||||
title: 'checkin skipped',
|
||||
message: `${account.username || 'ID:' + accountId} @ ${site.name}: site disabled`,
|
||||
@@ -220,7 +220,7 @@ export async function checkinAccount(accountId: number, options?: { skipEvent?:
|
||||
updates.updatedAt = new Date().toISOString();
|
||||
}
|
||||
|
||||
db.update(schema.accounts)
|
||||
await db.update(schema.accounts)
|
||||
.set(updates)
|
||||
.where(eq(schema.accounts.id, accountId))
|
||||
.run();
|
||||
@@ -240,7 +240,7 @@ export async function checkinAccount(accountId: number, options?: { skipEvent?:
|
||||
}
|
||||
}
|
||||
|
||||
db.insert(schema.checkinLogs).values({
|
||||
await db.insert(schema.checkinLogs).values({
|
||||
accountId: account.id,
|
||||
status: normalizedStatus,
|
||||
message: logMessage,
|
||||
@@ -248,7 +248,7 @@ export async function checkinAccount(accountId: number, options?: { skipEvent?:
|
||||
}).run();
|
||||
|
||||
if (!options?.skipEvent) {
|
||||
db.insert(schema.events).values({
|
||||
await db.insert(schema.events).values({
|
||||
type: 'checkin',
|
||||
title: effectiveSuccess
|
||||
? (normalizedStatus === 'skipped' ? 'checkin skipped' : 'checkin success')
|
||||
@@ -302,7 +302,7 @@ export async function checkinAccount(accountId: number, options?: { skipEvent?:
|
||||
}
|
||||
|
||||
export async function checkinAll() {
|
||||
const rows = db
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
|
||||
@@ -27,10 +27,10 @@ function round6(value: number): number {
|
||||
return Math.round(value * 1_000_000) / 1_000_000;
|
||||
}
|
||||
|
||||
export function collectDailySummaryMetrics(now = new Date()): DailySummaryMetrics {
|
||||
export async function collectDailySummaryMetrics(now = new Date()): Promise<DailySummaryMetrics> {
|
||||
const { localDay, startUtc, endUtc } = getLocalDayRangeUtc(now);
|
||||
|
||||
const accountRows = db.select().from(schema.accounts)
|
||||
const accountRows = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.sites.status, 'active'))
|
||||
.all();
|
||||
@@ -39,7 +39,7 @@ export function collectDailySummaryMetrics(now = new Date()): DailySummaryMetric
|
||||
const activeAccounts = accounts.filter((account) => account.status === 'active').length;
|
||||
const lowBalanceAccounts = accounts.filter((account) => (account.balance || 0) < 1).length;
|
||||
|
||||
const todayCheckinRows = db.select().from(schema.checkinLogs)
|
||||
const todayCheckinRows = await db.select().from(schema.checkinLogs)
|
||||
.innerJoin(schema.accounts, eq(schema.checkinLogs.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(and(
|
||||
@@ -67,7 +67,7 @@ export function collectDailySummaryMetrics(now = new Date()): DailySummaryMetric
|
||||
parsedRewardCountByAccount[accountId] = (parsedRewardCountByAccount[accountId] || 0) + 1;
|
||||
}
|
||||
|
||||
const todayProxyRows = db.select().from(schema.proxyLogs)
|
||||
const todayProxyRows = await db.select().from(schema.proxyLogs)
|
||||
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
|
||||
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(and(
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { maskConnectionString, normalizeMigrationInput } from './databaseMigrationService.js';
|
||||
|
||||
describe('databaseMigrationService', () => {
|
||||
it('accepts postgres migration input with normalized url', () => {
|
||||
const normalized = normalizeMigrationInput({
|
||||
dialect: 'postgres',
|
||||
connectionString: ' postgres://user:pass@db.example.com:5432/metapi ',
|
||||
overwrite: true,
|
||||
});
|
||||
|
||||
expect(normalized).toEqual({
|
||||
dialect: 'postgres',
|
||||
connectionString: 'postgres://user:pass@db.example.com:5432/metapi',
|
||||
overwrite: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('accepts mysql migration input', () => {
|
||||
const normalized = normalizeMigrationInput({
|
||||
dialect: 'mysql',
|
||||
connectionString: 'mysql://root:pass@db.example.com:3306/metapi',
|
||||
});
|
||||
|
||||
expect(normalized.dialect).toBe('mysql');
|
||||
expect(normalized.overwrite).toBe(true);
|
||||
});
|
||||
|
||||
it('accepts sqlite file migration target path', () => {
|
||||
const normalized = normalizeMigrationInput({
|
||||
dialect: 'sqlite',
|
||||
connectionString: './data/target.db',
|
||||
overwrite: false,
|
||||
});
|
||||
|
||||
expect(normalized).toEqual({
|
||||
dialect: 'sqlite',
|
||||
connectionString: './data/target.db',
|
||||
overwrite: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('rejects unknown dialect', () => {
|
||||
expect(() => normalizeMigrationInput({
|
||||
dialect: 'oracle',
|
||||
connectionString: 'oracle://db',
|
||||
} as any)).toThrow(/鏂硅█|sqlite\/mysql\/postgres/i);
|
||||
});
|
||||
|
||||
it('rejects postgres input when scheme mismatches', () => {
|
||||
expect(() => normalizeMigrationInput({
|
||||
dialect: 'postgres',
|
||||
connectionString: 'mysql://root:pass@127.0.0.1:3306/metapi',
|
||||
})).toThrow(/postgres/i);
|
||||
});
|
||||
|
||||
it('masks connection string credentials', () => {
|
||||
const masked = maskConnectionString('postgres://admin:super-secret@db.example.com:5432/metapi');
|
||||
expect(masked).toBe('postgres://admin:***@db.example.com:5432/metapi');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,710 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import mysql from 'mysql2/promise';
|
||||
import pg from 'pg';
|
||||
import { mkdirSync } from 'node:fs';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { db, schema } from '../db/index.js';
|
||||
|
||||
export type MigrationDialect = 'sqlite' | 'mysql' | 'postgres';
|
||||
|
||||
export interface DatabaseMigrationInput {
|
||||
dialect?: unknown;
|
||||
connectionString?: unknown;
|
||||
overwrite?: unknown;
|
||||
}
|
||||
|
||||
export interface NormalizedDatabaseMigrationInput {
|
||||
dialect: MigrationDialect;
|
||||
connectionString: string;
|
||||
overwrite: boolean;
|
||||
}
|
||||
|
||||
type BackupSnapshot = {
|
||||
version: string;
|
||||
timestamp: number;
|
||||
accounts: {
|
||||
sites: Array<Record<string, unknown>>;
|
||||
accounts: Array<Record<string, unknown>>;
|
||||
accountTokens: Array<Record<string, unknown>>;
|
||||
checkinLogs: Array<Record<string, unknown>>;
|
||||
modelAvailability: Array<Record<string, unknown>>;
|
||||
tokenModelAvailability: Array<Record<string, unknown>>;
|
||||
tokenRoutes: Array<Record<string, unknown>>;
|
||||
routeChannels: Array<Record<string, unknown>>;
|
||||
proxyLogs: Array<Record<string, unknown>>;
|
||||
downstreamApiKeys: Array<Record<string, unknown>>;
|
||||
events: Array<Record<string, unknown>>;
|
||||
};
|
||||
preferences: {
|
||||
settings: Array<{ key: string; value: unknown }>;
|
||||
};
|
||||
};
|
||||
|
||||
export interface DatabaseMigrationSummary {
|
||||
dialect: MigrationDialect;
|
||||
connection: string;
|
||||
overwrite: boolean;
|
||||
version: string;
|
||||
timestamp: number;
|
||||
rows: {
|
||||
sites: number;
|
||||
accounts: number;
|
||||
accountTokens: number;
|
||||
tokenRoutes: number;
|
||||
routeChannels: number;
|
||||
checkinLogs: number;
|
||||
modelAvailability: number;
|
||||
tokenModelAvailability: number;
|
||||
proxyLogs: number;
|
||||
downstreamApiKeys: number;
|
||||
events: number;
|
||||
settings: number;
|
||||
};
|
||||
}
|
||||
|
||||
interface SqlClient {
|
||||
dialect: MigrationDialect;
|
||||
begin(): Promise<void>;
|
||||
commit(): Promise<void>;
|
||||
rollback(): Promise<void>;
|
||||
execute(sqlText: string, params?: unknown[]): Promise<unknown>;
|
||||
queryScalar(sqlText: string, params?: unknown[]): Promise<number>;
|
||||
close(): Promise<void>;
|
||||
}
|
||||
|
||||
interface InsertStatement {
|
||||
table: string;
|
||||
columns: string[];
|
||||
values: unknown[];
|
||||
}
|
||||
|
||||
const DIALECTS: MigrationDialect[] = ['sqlite', 'mysql', 'postgres'];
|
||||
|
||||
function asString(value: unknown): string {
|
||||
return typeof value === 'string' ? value.trim() : '';
|
||||
}
|
||||
|
||||
function asBoolean(value: unknown, fallback: boolean): boolean {
|
||||
if (typeof value === 'boolean') return value;
|
||||
if (typeof value === 'number') return value !== 0;
|
||||
if (typeof value === 'string') {
|
||||
const normalized = value.trim().toLowerCase();
|
||||
if (['1', 'true', 'yes', 'on'].includes(normalized)) return true;
|
||||
if (['0', 'false', 'no', 'off'].includes(normalized)) return false;
|
||||
}
|
||||
return fallback;
|
||||
}
|
||||
|
||||
function asNumber(value: unknown, fallback: number | null = null): number | null {
|
||||
if (value === null || value === undefined) return fallback;
|
||||
const numeric = Number(value);
|
||||
return Number.isFinite(numeric) ? numeric : fallback;
|
||||
}
|
||||
|
||||
function asNullableString(value: unknown): string | null {
|
||||
if (value === null || value === undefined) return null;
|
||||
return String(value);
|
||||
}
|
||||
|
||||
function toJsonString(value: unknown): string {
|
||||
return JSON.stringify(value ?? null);
|
||||
}
|
||||
|
||||
function assertDialectUrl(dialect: MigrationDialect, connectionString: string): void {
|
||||
if (dialect === 'sqlite') return;
|
||||
let parsed: URL;
|
||||
try {
|
||||
parsed = new URL(connectionString);
|
||||
} catch {
|
||||
throw new Error(`数据库连接串无效:${dialect} 需要合法 URL`);
|
||||
}
|
||||
|
||||
if (dialect === 'postgres' && parsed.protocol !== 'postgres:' && parsed.protocol !== 'postgresql:') {
|
||||
throw new Error('PostgreSQL 连接串必须以 postgres:// 或 postgresql:// 开头');
|
||||
}
|
||||
|
||||
if (dialect === 'mysql' && parsed.protocol !== 'mysql:') {
|
||||
throw new Error('MySQL 连接串必须以 mysql:// 开头');
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeSqliteTarget(raw: string): string {
|
||||
if (!raw) throw new Error('SQLite 目标路径不能为空');
|
||||
if (raw.startsWith('file://')) {
|
||||
const parsed = new URL(raw);
|
||||
return decodeURIComponent(parsed.pathname);
|
||||
}
|
||||
if (raw.startsWith('sqlite://')) {
|
||||
return raw.slice('sqlite://'.length).trim();
|
||||
}
|
||||
return raw;
|
||||
}
|
||||
|
||||
export function normalizeMigrationInput(input: DatabaseMigrationInput): NormalizedDatabaseMigrationInput {
|
||||
const rawDialect = asString(input.dialect).toLowerCase();
|
||||
if (!DIALECTS.includes(rawDialect as MigrationDialect)) {
|
||||
throw new Error('数据库方言无效,仅支持 sqlite/mysql/postgres');
|
||||
}
|
||||
|
||||
const dialect = rawDialect as MigrationDialect;
|
||||
let connectionString = asString(input.connectionString);
|
||||
if (!connectionString) {
|
||||
throw new Error('数据库连接串不能为空');
|
||||
}
|
||||
|
||||
if (dialect === 'sqlite') {
|
||||
connectionString = normalizeSqliteTarget(connectionString);
|
||||
} else {
|
||||
assertDialectUrl(dialect, connectionString);
|
||||
}
|
||||
|
||||
return {
|
||||
dialect,
|
||||
connectionString,
|
||||
overwrite: input.overwrite === undefined ? true : asBoolean(input.overwrite, true),
|
||||
};
|
||||
}
|
||||
|
||||
export function maskConnectionString(connectionString: string): string {
|
||||
try {
|
||||
const parsed = new URL(connectionString);
|
||||
if (!parsed.password) return connectionString;
|
||||
parsed.password = '***';
|
||||
return parsed.toString();
|
||||
} catch {
|
||||
return connectionString;
|
||||
}
|
||||
}
|
||||
|
||||
function quoteIdent(dialect: MigrationDialect, identifier: string): string {
|
||||
return dialect === 'mysql' ? `\`${identifier}\`` : `"${identifier}"`;
|
||||
}
|
||||
|
||||
function parseSettingValue(raw: string | null): unknown {
|
||||
if (raw === null || raw === undefined) return null;
|
||||
try {
|
||||
return JSON.parse(raw);
|
||||
} catch {
|
||||
return raw;
|
||||
}
|
||||
}
|
||||
|
||||
async function toBackupSnapshot(): Promise<BackupSnapshot> {
|
||||
const settingsRows = await db.select().from(schema.settings).all();
|
||||
return {
|
||||
version: 'live-db-snapshot',
|
||||
timestamp: Date.now(),
|
||||
accounts: {
|
||||
sites: await db.select().from(schema.sites).all() as Array<Record<string, unknown>>,
|
||||
accounts: await db.select().from(schema.accounts).all() as Array<Record<string, unknown>>,
|
||||
accountTokens: await db.select().from(schema.accountTokens).all() as Array<Record<string, unknown>>,
|
||||
checkinLogs: await db.select().from(schema.checkinLogs).all() as Array<Record<string, unknown>>,
|
||||
modelAvailability: await db.select().from(schema.modelAvailability).all() as Array<Record<string, unknown>>,
|
||||
tokenModelAvailability: await db.select().from(schema.tokenModelAvailability).all() as Array<Record<string, unknown>>,
|
||||
tokenRoutes: await db.select().from(schema.tokenRoutes).all() as Array<Record<string, unknown>>,
|
||||
routeChannels: await db.select().from(schema.routeChannels).all() as Array<Record<string, unknown>>,
|
||||
proxyLogs: await db.select().from(schema.proxyLogs).all() as Array<Record<string, unknown>>,
|
||||
downstreamApiKeys: await db.select().from(schema.downstreamApiKeys).all() as Array<Record<string, unknown>>,
|
||||
events: await db.select().from(schema.events).all() as Array<Record<string, unknown>>,
|
||||
},
|
||||
preferences: {
|
||||
settings: settingsRows.map((row) => ({ key: row.key, value: parseSettingValue(row.value) })),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function createPostgresClient(connectionString: string): Promise<SqlClient> {
|
||||
const client = new pg.Client({ connectionString });
|
||||
await client.connect();
|
||||
|
||||
return {
|
||||
dialect: 'postgres',
|
||||
begin: async () => { await client.query('BEGIN'); },
|
||||
commit: async () => { await client.query('COMMIT'); },
|
||||
rollback: async () => { await client.query('ROLLBACK'); },
|
||||
execute: async (sqlText, params = []) => client.query(sqlText, params),
|
||||
queryScalar: async (sqlText, params = []) => {
|
||||
const result = await client.query(sqlText, params);
|
||||
const row = result.rows[0] as Record<string, unknown> | undefined;
|
||||
if (!row) return 0;
|
||||
return Number(Object.values(row)[0]) || 0;
|
||||
},
|
||||
close: async () => { await client.end(); },
|
||||
};
|
||||
}
|
||||
|
||||
async function createMySqlClient(connectionString: string): Promise<SqlClient> {
|
||||
const connection = await mysql.createConnection(connectionString);
|
||||
|
||||
return {
|
||||
dialect: 'mysql',
|
||||
begin: async () => { await connection.beginTransaction(); },
|
||||
commit: async () => { await connection.commit(); },
|
||||
rollback: async () => { await connection.rollback(); },
|
||||
execute: async (sqlText, params = []) => connection.execute(sqlText, params as any[]),
|
||||
queryScalar: async (sqlText, params = []) => {
|
||||
const [rows] = await connection.query(sqlText, params as any[]);
|
||||
if (!Array.isArray(rows) || rows.length === 0) return 0;
|
||||
const row = rows[0] as Record<string, unknown>;
|
||||
return Number(Object.values(row)[0]) || 0;
|
||||
},
|
||||
close: async () => { await connection.end(); },
|
||||
};
|
||||
}
|
||||
|
||||
async function createSqliteClient(connectionString: string): Promise<SqlClient> {
|
||||
const filePath = resolve(connectionString);
|
||||
mkdirSync(dirname(filePath), { recursive: true });
|
||||
const sqlite = new Database(filePath);
|
||||
sqlite.pragma('journal_mode = WAL');
|
||||
sqlite.pragma('foreign_keys = ON');
|
||||
|
||||
return {
|
||||
dialect: 'sqlite',
|
||||
begin: async () => { sqlite.exec('BEGIN'); },
|
||||
commit: async () => { sqlite.exec('COMMIT'); },
|
||||
rollback: async () => { sqlite.exec('ROLLBACK'); },
|
||||
execute: async (sqlText, params = []) => {
|
||||
const lowered = sqlText.trim().toLowerCase();
|
||||
const stmt = sqlite.prepare(sqlText);
|
||||
if (lowered.startsWith('select')) return await stmt.all(...params);
|
||||
return await stmt.run(...params);
|
||||
},
|
||||
queryScalar: async (sqlText, params = []) => {
|
||||
const row = await sqlite.prepare(sqlText).get(...params) as Record<string, unknown> | undefined;
|
||||
if (!row) return 0;
|
||||
return Number(Object.values(row)[0]) || 0;
|
||||
},
|
||||
close: async () => { sqlite.close(); },
|
||||
};
|
||||
}
|
||||
|
||||
async function createClient(input: NormalizedDatabaseMigrationInput): Promise<SqlClient> {
|
||||
if (input.dialect === 'postgres') return createPostgresClient(input.connectionString);
|
||||
if (input.dialect === 'mysql') return createMySqlClient(input.connectionString);
|
||||
return createSqliteClient(input.connectionString);
|
||||
}
|
||||
|
||||
async function ensureSchema(client: SqlClient): Promise<void> {
|
||||
const statements = client.dialect === 'postgres'
|
||||
? [
|
||||
`CREATE TABLE IF NOT EXISTS "sites" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "name" TEXT NOT NULL, "url" TEXT NOT NULL, "external_checkin_url" TEXT, "platform" TEXT NOT NULL, "proxy_url" TEXT, "status" TEXT DEFAULT 'active', "is_pinned" BOOLEAN DEFAULT FALSE, "sort_order" INTEGER DEFAULT 0, "global_weight" DOUBLE PRECISION DEFAULT 1, "api_key" TEXT, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "accounts" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "site_id" INTEGER NOT NULL REFERENCES "sites"("id") ON DELETE CASCADE, "username" TEXT, "access_token" TEXT NOT NULL, "api_token" TEXT, "balance" DOUBLE PRECISION DEFAULT 0, "balance_used" DOUBLE PRECISION DEFAULT 0, "quota" DOUBLE PRECISION DEFAULT 0, "unit_cost" DOUBLE PRECISION, "value_score" DOUBLE PRECISION DEFAULT 0, "status" TEXT DEFAULT 'active', "is_pinned" BOOLEAN DEFAULT FALSE, "sort_order" INTEGER DEFAULT 0, "checkin_enabled" BOOLEAN DEFAULT TRUE, "last_checkin_at" TEXT, "last_balance_refresh" TEXT, "extra_config" TEXT, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "account_tokens" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "account_id" INTEGER NOT NULL REFERENCES "accounts"("id") ON DELETE CASCADE, "name" TEXT NOT NULL, "token" TEXT NOT NULL, "token_group" TEXT, "source" TEXT DEFAULT 'manual', "enabled" BOOLEAN DEFAULT TRUE, "is_default" BOOLEAN DEFAULT FALSE, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "checkin_logs" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "account_id" INTEGER NOT NULL REFERENCES "accounts"("id") ON DELETE CASCADE, "status" TEXT NOT NULL, "message" TEXT, "reward" TEXT, "created_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "model_availability" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "account_id" INTEGER NOT NULL REFERENCES "accounts"("id") ON DELETE CASCADE, "model_name" TEXT NOT NULL, "available" BOOLEAN, "latency_ms" INTEGER, "checked_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "token_model_availability" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "token_id" INTEGER NOT NULL REFERENCES "account_tokens"("id") ON DELETE CASCADE, "model_name" TEXT NOT NULL, "available" BOOLEAN, "latency_ms" INTEGER, "checked_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "token_routes" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "model_pattern" TEXT NOT NULL, "display_name" TEXT, "display_icon" TEXT, "model_mapping" TEXT, "enabled" BOOLEAN DEFAULT TRUE, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "route_channels" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "route_id" INTEGER NOT NULL REFERENCES "token_routes"("id") ON DELETE CASCADE, "account_id" INTEGER NOT NULL REFERENCES "accounts"("id") ON DELETE CASCADE, "token_id" INTEGER REFERENCES "account_tokens"("id") ON DELETE SET NULL, "source_model" TEXT, "priority" INTEGER DEFAULT 0, "weight" INTEGER DEFAULT 10, "enabled" BOOLEAN DEFAULT TRUE, "manual_override" BOOLEAN DEFAULT FALSE, "success_count" INTEGER DEFAULT 0, "fail_count" INTEGER DEFAULT 0, "total_latency_ms" INTEGER DEFAULT 0, "total_cost" DOUBLE PRECISION DEFAULT 0, "last_used_at" TEXT, "last_fail_at" TEXT, "cooldown_until" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "proxy_logs" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "route_id" INTEGER, "channel_id" INTEGER, "account_id" INTEGER, "model_requested" TEXT, "model_actual" TEXT, "status" TEXT, "http_status" INTEGER, "latency_ms" INTEGER, "prompt_tokens" INTEGER, "completion_tokens" INTEGER, "total_tokens" INTEGER, "estimated_cost" DOUBLE PRECISION, "error_message" TEXT, "retry_count" INTEGER DEFAULT 0, "created_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "downstream_api_keys" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "name" TEXT NOT NULL, "key" TEXT NOT NULL UNIQUE, "description" TEXT, "enabled" BOOLEAN DEFAULT TRUE, "expires_at" TEXT, "max_cost" DOUBLE PRECISION, "used_cost" DOUBLE PRECISION DEFAULT 0, "max_requests" INTEGER, "used_requests" INTEGER DEFAULT 0, "supported_models" TEXT, "allowed_route_ids" TEXT, "site_weight_multipliers" TEXT, "last_used_at" TEXT, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "events" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, "type" TEXT NOT NULL, "title" TEXT NOT NULL, "message" TEXT, "level" TEXT DEFAULT 'info', "read" BOOLEAN DEFAULT FALSE, "related_id" INTEGER, "related_type" TEXT, "created_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "settings" ("key" TEXT PRIMARY KEY, "value" TEXT)`,
|
||||
]
|
||||
: client.dialect === 'mysql'
|
||||
? [
|
||||
`CREATE TABLE IF NOT EXISTS \`sites\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`name\` TEXT NOT NULL, \`url\` TEXT NOT NULL, \`external_checkin_url\` TEXT NULL, \`platform\` VARCHAR(64) NOT NULL, \`proxy_url\` TEXT NULL, \`status\` VARCHAR(32) DEFAULT 'active', \`is_pinned\` BOOLEAN DEFAULT FALSE, \`sort_order\` INT DEFAULT 0, \`global_weight\` DOUBLE DEFAULT 1, \`api_key\` TEXT NULL, \`created_at\` TEXT NULL, \`updated_at\` TEXT NULL)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`accounts\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`site_id\` INT NOT NULL, \`username\` TEXT NULL, \`access_token\` TEXT NOT NULL, \`api_token\` TEXT NULL, \`balance\` DOUBLE DEFAULT 0, \`balance_used\` DOUBLE DEFAULT 0, \`quota\` DOUBLE DEFAULT 0, \`unit_cost\` DOUBLE NULL, \`value_score\` DOUBLE DEFAULT 0, \`status\` VARCHAR(32) DEFAULT 'active', \`is_pinned\` BOOLEAN DEFAULT FALSE, \`sort_order\` INT DEFAULT 0, \`checkin_enabled\` BOOLEAN DEFAULT TRUE, \`last_checkin_at\` TEXT NULL, \`last_balance_refresh\` TEXT NULL, \`extra_config\` TEXT NULL, \`created_at\` TEXT NULL, \`updated_at\` TEXT NULL, CONSTRAINT \`accounts_site_fk\` FOREIGN KEY (\`site_id\`) REFERENCES \`sites\`(\`id\`) ON DELETE CASCADE)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`account_tokens\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`account_id\` INT NOT NULL, \`name\` TEXT NOT NULL, \`token\` TEXT NOT NULL, \`token_group\` TEXT NULL, \`source\` VARCHAR(32) DEFAULT 'manual', \`enabled\` BOOLEAN DEFAULT TRUE, \`is_default\` BOOLEAN DEFAULT FALSE, \`created_at\` TEXT NULL, \`updated_at\` TEXT NULL, CONSTRAINT \`account_tokens_account_fk\` FOREIGN KEY (\`account_id\`) REFERENCES \`accounts\`(\`id\`) ON DELETE CASCADE)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`checkin_logs\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`account_id\` INT NOT NULL, \`status\` VARCHAR(32) NOT NULL, \`message\` TEXT NULL, \`reward\` TEXT NULL, \`created_at\` TEXT NULL, CONSTRAINT \`checkin_logs_account_fk\` FOREIGN KEY (\`account_id\`) REFERENCES \`accounts\`(\`id\`) ON DELETE CASCADE)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`model_availability\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`account_id\` INT NOT NULL, \`model_name\` TEXT NOT NULL, \`available\` BOOLEAN NULL, \`latency_ms\` INT NULL, \`checked_at\` TEXT NULL, CONSTRAINT \`model_availability_account_fk\` FOREIGN KEY (\`account_id\`) REFERENCES \`accounts\`(\`id\`) ON DELETE CASCADE)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`token_model_availability\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`token_id\` INT NOT NULL, \`model_name\` TEXT NOT NULL, \`available\` BOOLEAN NULL, \`latency_ms\` INT NULL, \`checked_at\` TEXT NULL, CONSTRAINT \`token_model_availability_token_fk\` FOREIGN KEY (\`token_id\`) REFERENCES \`account_tokens\`(\`id\`) ON DELETE CASCADE)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`token_routes\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`model_pattern\` TEXT NOT NULL, \`display_name\` TEXT NULL, \`display_icon\` TEXT NULL, \`model_mapping\` TEXT NULL, \`enabled\` BOOLEAN DEFAULT TRUE, \`created_at\` TEXT NULL, \`updated_at\` TEXT NULL)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`route_channels\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`route_id\` INT NOT NULL, \`account_id\` INT NOT NULL, \`token_id\` INT NULL, \`source_model\` TEXT NULL, \`priority\` INT DEFAULT 0, \`weight\` INT DEFAULT 10, \`enabled\` BOOLEAN DEFAULT TRUE, \`manual_override\` BOOLEAN DEFAULT FALSE, \`success_count\` INT DEFAULT 0, \`fail_count\` INT DEFAULT 0, \`total_latency_ms\` INT DEFAULT 0, \`total_cost\` DOUBLE DEFAULT 0, \`last_used_at\` TEXT NULL, \`last_fail_at\` TEXT NULL, \`cooldown_until\` TEXT NULL, CONSTRAINT \`route_channels_route_fk\` FOREIGN KEY (\`route_id\`) REFERENCES \`token_routes\`(\`id\`) ON DELETE CASCADE, CONSTRAINT \`route_channels_account_fk\` FOREIGN KEY (\`account_id\`) REFERENCES \`accounts\`(\`id\`) ON DELETE CASCADE, CONSTRAINT \`route_channels_token_fk\` FOREIGN KEY (\`token_id\`) REFERENCES \`account_tokens\`(\`id\`) ON DELETE SET NULL)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`proxy_logs\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`route_id\` INT NULL, \`channel_id\` INT NULL, \`account_id\` INT NULL, \`model_requested\` TEXT NULL, \`model_actual\` TEXT NULL, \`status\` VARCHAR(32) NULL, \`http_status\` INT NULL, \`latency_ms\` INT NULL, \`prompt_tokens\` INT NULL, \`completion_tokens\` INT NULL, \`total_tokens\` INT NULL, \`estimated_cost\` DOUBLE NULL, \`error_message\` TEXT NULL, \`retry_count\` INT DEFAULT 0, \`created_at\` TEXT NULL)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`downstream_api_keys\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`name\` TEXT NOT NULL, \`key\` VARCHAR(191) NOT NULL UNIQUE, \`description\` TEXT NULL, \`enabled\` BOOLEAN DEFAULT TRUE, \`expires_at\` TEXT NULL, \`max_cost\` DOUBLE NULL, \`used_cost\` DOUBLE DEFAULT 0, \`max_requests\` INT NULL, \`used_requests\` INT DEFAULT 0, \`supported_models\` TEXT NULL, \`allowed_route_ids\` TEXT NULL, \`site_weight_multipliers\` TEXT NULL, \`last_used_at\` TEXT NULL, \`created_at\` TEXT NULL, \`updated_at\` TEXT NULL)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`events\` (\`id\` INT AUTO_INCREMENT PRIMARY KEY, \`type\` VARCHAR(32) NOT NULL, \`title\` TEXT NOT NULL, \`message\` TEXT NULL, \`level\` VARCHAR(16) DEFAULT 'info', \`read\` BOOLEAN DEFAULT FALSE, \`related_id\` INT NULL, \`related_type\` VARCHAR(32) NULL, \`created_at\` TEXT NULL)`,
|
||||
`CREATE TABLE IF NOT EXISTS \`settings\` (\`key\` VARCHAR(191) PRIMARY KEY, \`value\` TEXT NULL)`,
|
||||
]
|
||||
: [
|
||||
`CREATE TABLE IF NOT EXISTS "sites" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "name" TEXT NOT NULL, "url" TEXT NOT NULL, "external_checkin_url" TEXT, "platform" TEXT NOT NULL, "proxy_url" TEXT, "status" TEXT DEFAULT 'active', "is_pinned" INTEGER DEFAULT 0, "sort_order" INTEGER DEFAULT 0, "global_weight" REAL DEFAULT 1, "api_key" TEXT, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "accounts" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "site_id" INTEGER NOT NULL REFERENCES "sites"("id") ON DELETE CASCADE, "username" TEXT, "access_token" TEXT NOT NULL, "api_token" TEXT, "balance" REAL DEFAULT 0, "balance_used" REAL DEFAULT 0, "quota" REAL DEFAULT 0, "unit_cost" REAL, "value_score" REAL DEFAULT 0, "status" TEXT DEFAULT 'active', "is_pinned" INTEGER DEFAULT 0, "sort_order" INTEGER DEFAULT 0, "checkin_enabled" INTEGER DEFAULT 1, "last_checkin_at" TEXT, "last_balance_refresh" TEXT, "extra_config" TEXT, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "account_tokens" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "account_id" INTEGER NOT NULL REFERENCES "accounts"("id") ON DELETE CASCADE, "name" TEXT NOT NULL, "token" TEXT NOT NULL, "token_group" TEXT, "source" TEXT DEFAULT 'manual', "enabled" INTEGER DEFAULT 1, "is_default" INTEGER DEFAULT 0, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "checkin_logs" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "account_id" INTEGER NOT NULL REFERENCES "accounts"("id") ON DELETE CASCADE, "status" TEXT NOT NULL, "message" TEXT, "reward" TEXT, "created_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "model_availability" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "account_id" INTEGER NOT NULL REFERENCES "accounts"("id") ON DELETE CASCADE, "model_name" TEXT NOT NULL, "available" INTEGER, "latency_ms" INTEGER, "checked_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "token_model_availability" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "token_id" INTEGER NOT NULL REFERENCES "account_tokens"("id") ON DELETE CASCADE, "model_name" TEXT NOT NULL, "available" INTEGER, "latency_ms" INTEGER, "checked_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "token_routes" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "model_pattern" TEXT NOT NULL, "display_name" TEXT, "display_icon" TEXT, "model_mapping" TEXT, "enabled" INTEGER DEFAULT 1, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "route_channels" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "route_id" INTEGER NOT NULL REFERENCES "token_routes"("id") ON DELETE CASCADE, "account_id" INTEGER NOT NULL REFERENCES "accounts"("id") ON DELETE CASCADE, "token_id" INTEGER REFERENCES "account_tokens"("id") ON DELETE SET NULL, "source_model" TEXT, "priority" INTEGER DEFAULT 0, "weight" INTEGER DEFAULT 10, "enabled" INTEGER DEFAULT 1, "manual_override" INTEGER DEFAULT 0, "success_count" INTEGER DEFAULT 0, "fail_count" INTEGER DEFAULT 0, "total_latency_ms" INTEGER DEFAULT 0, "total_cost" REAL DEFAULT 0, "last_used_at" TEXT, "last_fail_at" TEXT, "cooldown_until" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "proxy_logs" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "route_id" INTEGER, "channel_id" INTEGER, "account_id" INTEGER, "model_requested" TEXT, "model_actual" TEXT, "status" TEXT, "http_status" INTEGER, "latency_ms" INTEGER, "prompt_tokens" INTEGER, "completion_tokens" INTEGER, "total_tokens" INTEGER, "estimated_cost" REAL, "error_message" TEXT, "retry_count" INTEGER DEFAULT 0, "created_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "downstream_api_keys" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "name" TEXT NOT NULL, "key" TEXT NOT NULL UNIQUE, "description" TEXT, "enabled" INTEGER DEFAULT 1, "expires_at" TEXT, "max_cost" REAL, "used_cost" REAL DEFAULT 0, "max_requests" INTEGER, "used_requests" INTEGER DEFAULT 0, "supported_models" TEXT, "allowed_route_ids" TEXT, "site_weight_multipliers" TEXT, "last_used_at" TEXT, "created_at" TEXT, "updated_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "events" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "type" TEXT NOT NULL, "title" TEXT NOT NULL, "message" TEXT, "level" TEXT DEFAULT 'info', "read" INTEGER DEFAULT 0, "related_id" INTEGER, "related_type" TEXT, "created_at" TEXT)`,
|
||||
`CREATE TABLE IF NOT EXISTS "settings" ("key" TEXT PRIMARY KEY, "value" TEXT)`,
|
||||
];
|
||||
|
||||
for (const sqlText of statements) {
|
||||
await client.execute(sqlText);
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureTargetState(client: SqlClient, overwrite: boolean): Promise<void> {
|
||||
const siteCount = await client.queryScalar(`SELECT COUNT(*) FROM ${quoteIdent(client.dialect, 'sites')}`);
|
||||
const settingCount = await client.queryScalar(`SELECT COUNT(*) FROM ${quoteIdent(client.dialect, 'settings')}`);
|
||||
if (!overwrite && (siteCount > 0 || settingCount > 0)) {
|
||||
throw new Error('目标数据库已包含数据。若确认覆盖,请勾选“覆盖目标数据库现有数据”');
|
||||
}
|
||||
}
|
||||
|
||||
async function clearTargetData(client: SqlClient): Promise<void> {
|
||||
const tables = [
|
||||
'route_channels',
|
||||
'token_model_availability',
|
||||
'model_availability',
|
||||
'checkin_logs',
|
||||
'proxy_logs',
|
||||
'account_tokens',
|
||||
'accounts',
|
||||
'token_routes',
|
||||
'sites',
|
||||
'downstream_api_keys',
|
||||
'events',
|
||||
'settings',
|
||||
];
|
||||
for (const table of tables) {
|
||||
await client.execute(`DELETE FROM ${quoteIdent(client.dialect, table)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function buildStatements(snapshot: BackupSnapshot): InsertStatement[] {
|
||||
const statements: InsertStatement[] = [];
|
||||
|
||||
for (const row of snapshot.accounts.sites) {
|
||||
statements.push({
|
||||
table: 'sites',
|
||||
columns: ['id', 'name', 'url', 'external_checkin_url', 'platform', 'proxy_url', 'status', 'is_pinned', 'sort_order', 'global_weight', 'api_key', 'created_at', 'updated_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNullableString(row.name),
|
||||
asNullableString(row.url),
|
||||
asNullableString(row.externalCheckinUrl),
|
||||
asNullableString(row.platform),
|
||||
asNullableString(row.proxyUrl),
|
||||
asNullableString(row.status) ?? 'active',
|
||||
asBoolean(row.isPinned, false),
|
||||
asNumber(row.sortOrder, 0),
|
||||
asNumber(row.globalWeight, 1),
|
||||
asNullableString(row.apiKey),
|
||||
asNullableString(row.createdAt),
|
||||
asNullableString(row.updatedAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.accounts) {
|
||||
statements.push({
|
||||
table: 'accounts',
|
||||
columns: ['id', 'site_id', 'username', 'access_token', 'api_token', 'balance', 'balance_used', 'quota', 'unit_cost', 'value_score', 'status', 'is_pinned', 'sort_order', 'checkin_enabled', 'last_checkin_at', 'last_balance_refresh', 'extra_config', 'created_at', 'updated_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNumber(row.siteId, 0),
|
||||
asNullableString(row.username),
|
||||
asNullableString(row.accessToken),
|
||||
asNullableString(row.apiToken),
|
||||
asNumber(row.balance, 0),
|
||||
asNumber(row.balanceUsed, 0),
|
||||
asNumber(row.quota, 0),
|
||||
asNumber(row.unitCost, null),
|
||||
asNumber(row.valueScore, 0),
|
||||
asNullableString(row.status) ?? 'active',
|
||||
asBoolean(row.isPinned, false),
|
||||
asNumber(row.sortOrder, 0),
|
||||
asBoolean(row.checkinEnabled, true),
|
||||
asNullableString(row.lastCheckinAt),
|
||||
asNullableString(row.lastBalanceRefresh),
|
||||
asNullableString(row.extraConfig),
|
||||
asNullableString(row.createdAt),
|
||||
asNullableString(row.updatedAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.accountTokens) {
|
||||
statements.push({
|
||||
table: 'account_tokens',
|
||||
columns: ['id', 'account_id', 'name', 'token', 'token_group', 'source', 'enabled', 'is_default', 'created_at', 'updated_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNumber(row.accountId, 0),
|
||||
asNullableString(row.name),
|
||||
asNullableString(row.token),
|
||||
asNullableString(row.tokenGroup),
|
||||
asNullableString(row.source) ?? 'manual',
|
||||
asBoolean(row.enabled, true),
|
||||
asBoolean(row.isDefault, false),
|
||||
asNullableString(row.createdAt),
|
||||
asNullableString(row.updatedAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.checkinLogs) {
|
||||
statements.push({
|
||||
table: 'checkin_logs',
|
||||
columns: ['id', 'account_id', 'status', 'message', 'reward', 'created_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNumber(row.accountId, 0),
|
||||
asNullableString(row.status) ?? 'success',
|
||||
asNullableString(row.message),
|
||||
asNullableString(row.reward),
|
||||
asNullableString(row.createdAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.modelAvailability) {
|
||||
statements.push({
|
||||
table: 'model_availability',
|
||||
columns: ['id', 'account_id', 'model_name', 'available', 'latency_ms', 'checked_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNumber(row.accountId, 0),
|
||||
asNullableString(row.modelName),
|
||||
asBoolean(row.available, false),
|
||||
asNumber(row.latencyMs, null),
|
||||
asNullableString(row.checkedAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.tokenModelAvailability) {
|
||||
statements.push({
|
||||
table: 'token_model_availability',
|
||||
columns: ['id', 'token_id', 'model_name', 'available', 'latency_ms', 'checked_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNumber(row.tokenId, 0),
|
||||
asNullableString(row.modelName),
|
||||
asBoolean(row.available, false),
|
||||
asNumber(row.latencyMs, null),
|
||||
asNullableString(row.checkedAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.tokenRoutes) {
|
||||
statements.push({
|
||||
table: 'token_routes',
|
||||
columns: ['id', 'model_pattern', 'display_name', 'display_icon', 'model_mapping', 'enabled', 'created_at', 'updated_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNullableString(row.modelPattern),
|
||||
asNullableString(row.displayName),
|
||||
asNullableString(row.displayIcon),
|
||||
asNullableString(row.modelMapping),
|
||||
asBoolean(row.enabled, true),
|
||||
asNullableString(row.createdAt),
|
||||
asNullableString(row.updatedAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.routeChannels) {
|
||||
statements.push({
|
||||
table: 'route_channels',
|
||||
columns: ['id', 'route_id', 'account_id', 'token_id', 'source_model', 'priority', 'weight', 'enabled', 'manual_override', 'success_count', 'fail_count', 'total_latency_ms', 'total_cost', 'last_used_at', 'last_fail_at', 'cooldown_until'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNumber(row.routeId, 0),
|
||||
asNumber(row.accountId, 0),
|
||||
asNumber(row.tokenId, null),
|
||||
asNullableString(row.sourceModel),
|
||||
asNumber(row.priority, 0),
|
||||
asNumber(row.weight, 10),
|
||||
asBoolean(row.enabled, true),
|
||||
asBoolean(row.manualOverride, false),
|
||||
asNumber(row.successCount, 0),
|
||||
asNumber(row.failCount, 0),
|
||||
asNumber(row.totalLatencyMs, 0),
|
||||
asNumber(row.totalCost, 0),
|
||||
asNullableString(row.lastUsedAt),
|
||||
asNullableString(row.lastFailAt),
|
||||
asNullableString(row.cooldownUntil),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.proxyLogs) {
|
||||
statements.push({
|
||||
table: 'proxy_logs',
|
||||
columns: ['id', 'route_id', 'channel_id', 'account_id', 'model_requested', 'model_actual', 'status', 'http_status', 'latency_ms', 'prompt_tokens', 'completion_tokens', 'total_tokens', 'estimated_cost', 'error_message', 'retry_count', 'created_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNumber(row.routeId, null),
|
||||
asNumber(row.channelId, null),
|
||||
asNumber(row.accountId, null),
|
||||
asNullableString(row.modelRequested),
|
||||
asNullableString(row.modelActual),
|
||||
asNullableString(row.status),
|
||||
asNumber(row.httpStatus, null),
|
||||
asNumber(row.latencyMs, null),
|
||||
asNumber(row.promptTokens, null),
|
||||
asNumber(row.completionTokens, null),
|
||||
asNumber(row.totalTokens, null),
|
||||
asNumber(row.estimatedCost, null),
|
||||
asNullableString(row.errorMessage),
|
||||
asNumber(row.retryCount, 0),
|
||||
asNullableString(row.createdAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.downstreamApiKeys) {
|
||||
statements.push({
|
||||
table: 'downstream_api_keys',
|
||||
columns: ['id', 'name', 'key', 'description', 'enabled', 'expires_at', 'max_cost', 'used_cost', 'max_requests', 'used_requests', 'supported_models', 'allowed_route_ids', 'site_weight_multipliers', 'last_used_at', 'created_at', 'updated_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNullableString(row.name),
|
||||
asNullableString(row.key),
|
||||
asNullableString(row.description),
|
||||
asBoolean(row.enabled, true),
|
||||
asNullableString(row.expiresAt),
|
||||
asNumber(row.maxCost, null),
|
||||
asNumber(row.usedCost, 0),
|
||||
asNumber(row.maxRequests, null),
|
||||
asNumber(row.usedRequests, 0),
|
||||
asNullableString(row.supportedModels),
|
||||
asNullableString(row.allowedRouteIds),
|
||||
asNullableString(row.siteWeightMultipliers),
|
||||
asNullableString(row.lastUsedAt),
|
||||
asNullableString(row.createdAt),
|
||||
asNullableString(row.updatedAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.accounts.events) {
|
||||
statements.push({
|
||||
table: 'events',
|
||||
columns: ['id', 'type', 'title', 'message', 'level', 'read', 'related_id', 'related_type', 'created_at'],
|
||||
values: [
|
||||
asNumber(row.id, 0),
|
||||
asNullableString(row.type),
|
||||
asNullableString(row.title),
|
||||
asNullableString(row.message),
|
||||
asNullableString(row.level) ?? 'info',
|
||||
asBoolean(row.read, false),
|
||||
asNumber(row.relatedId, null),
|
||||
asNullableString(row.relatedType),
|
||||
asNullableString(row.createdAt),
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
for (const row of snapshot.preferences.settings) {
|
||||
statements.push({
|
||||
table: 'settings',
|
||||
columns: ['key', 'value'],
|
||||
values: [row.key, toJsonString(row.value)],
|
||||
});
|
||||
}
|
||||
|
||||
return statements;
|
||||
}
|
||||
|
||||
function buildInsertSql(dialect: MigrationDialect, statement: InsertStatement): { sqlText: string; params: unknown[] } {
|
||||
const table = quoteIdent(dialect, statement.table);
|
||||
const columns = statement.columns.map((item) => quoteIdent(dialect, item)).join(', ');
|
||||
const placeholders = statement.columns.map((_, index) => (dialect === 'postgres' ? `$${index + 1}` : '?')).join(', ');
|
||||
const params = statement.values.map((value) => {
|
||||
if (dialect === 'sqlite' && typeof value === 'boolean') {
|
||||
return value ? 1 : 0;
|
||||
}
|
||||
return value;
|
||||
});
|
||||
return {
|
||||
sqlText: `INSERT INTO ${table} (${columns}) VALUES (${placeholders})`,
|
||||
params,
|
||||
};
|
||||
}
|
||||
|
||||
async function insertAllRows(client: SqlClient, statements: InsertStatement[]): Promise<void> {
|
||||
for (const statement of statements) {
|
||||
const { sqlText, params } = buildInsertSql(client.dialect, statement);
|
||||
await client.execute(sqlText, params);
|
||||
}
|
||||
}
|
||||
|
||||
async function syncPostgresSequences(client: SqlClient): Promise<void> {
|
||||
if (client.dialect !== 'postgres') return;
|
||||
const tables = [
|
||||
'sites',
|
||||
'accounts',
|
||||
'account_tokens',
|
||||
'checkin_logs',
|
||||
'model_availability',
|
||||
'token_model_availability',
|
||||
'token_routes',
|
||||
'route_channels',
|
||||
'proxy_logs',
|
||||
'downstream_api_keys',
|
||||
'events',
|
||||
];
|
||||
for (const table of tables) {
|
||||
await client.execute(`SELECT setval(pg_get_serial_sequence('${table}', 'id'), COALESCE((SELECT MAX(id) FROM "${table}"), 1), TRUE)`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function migrateCurrentDatabase(input: DatabaseMigrationInput): Promise<DatabaseMigrationSummary> {
|
||||
const normalized = normalizeMigrationInput(input);
|
||||
const snapshot = await toBackupSnapshot();
|
||||
const client = await createClient(normalized);
|
||||
|
||||
try {
|
||||
await ensureSchema(client);
|
||||
await ensureTargetState(client, normalized.overwrite);
|
||||
|
||||
await client.begin();
|
||||
try {
|
||||
if (normalized.overwrite) {
|
||||
await clearTargetData(client);
|
||||
}
|
||||
await insertAllRows(client, buildStatements(snapshot));
|
||||
await syncPostgresSequences(client);
|
||||
await client.commit();
|
||||
} catch (error) {
|
||||
await client.rollback();
|
||||
throw error;
|
||||
}
|
||||
} finally {
|
||||
await client.close();
|
||||
}
|
||||
|
||||
return {
|
||||
dialect: normalized.dialect,
|
||||
connection: maskConnectionString(normalized.connectionString),
|
||||
overwrite: normalized.overwrite,
|
||||
version: snapshot.version,
|
||||
timestamp: snapshot.timestamp,
|
||||
rows: {
|
||||
sites: snapshot.accounts.sites.length,
|
||||
accounts: snapshot.accounts.accounts.length,
|
||||
accountTokens: snapshot.accounts.accountTokens.length,
|
||||
tokenRoutes: snapshot.accounts.tokenRoutes.length,
|
||||
routeChannels: snapshot.accounts.routeChannels.length,
|
||||
checkinLogs: snapshot.accounts.checkinLogs.length,
|
||||
modelAvailability: snapshot.accounts.modelAvailability.length,
|
||||
tokenModelAvailability: snapshot.accounts.tokenModelAvailability.length,
|
||||
proxyLogs: snapshot.accounts.proxyLogs.length,
|
||||
downstreamApiKeys: snapshot.accounts.downstreamApiKeys.length,
|
||||
events: snapshot.accounts.events.length,
|
||||
settings: snapshot.preferences.settings.length,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function testDatabaseConnection(input: DatabaseMigrationInput): Promise<{ dialect: MigrationDialect; connection: string }> {
|
||||
const normalized = normalizeMigrationInput(input);
|
||||
const client = await createClient(normalized);
|
||||
try {
|
||||
await client.execute('SELECT 1');
|
||||
} finally {
|
||||
await client.close();
|
||||
}
|
||||
|
||||
return {
|
||||
dialect: normalized.dialect,
|
||||
connection: maskConnectionString(normalized.connectionString),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -29,9 +29,9 @@ describe('downstreamApiKeyService', () => {
|
||||
service = serviceModule;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.downstreamApiKeys).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.downstreamApiKeys).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
config.proxyToken = 'sk-global-proxy-token';
|
||||
});
|
||||
|
||||
@@ -39,8 +39,8 @@ describe('downstreamApiKeyService', () => {
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
it('authorizes global proxy token when no managed key matches', () => {
|
||||
const result = service.authorizeDownstreamToken('sk-global-proxy-token');
|
||||
it('authorizes global proxy token when no managed key matches', async () => {
|
||||
const result = await service.authorizeDownstreamToken('sk-global-proxy-token');
|
||||
expect(result.ok).toBe(true);
|
||||
if (result.ok) {
|
||||
expect(result.key).toBeNull();
|
||||
@@ -49,23 +49,23 @@ describe('downstreamApiKeyService', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it('rejects managed keys by lifecycle guards (disabled, expired, over budget, over requests)', () => {
|
||||
it('rejects managed keys by lifecycle guards (disabled, expired, over budget, over requests)', async () => {
|
||||
const now = Date.now();
|
||||
|
||||
const disabled = db.insert(schema.downstreamApiKeys).values({
|
||||
const disabled = await db.insert(schema.downstreamApiKeys).values({
|
||||
name: 'disabled',
|
||||
key: 'sk-disabled',
|
||||
enabled: false,
|
||||
}).returning().get();
|
||||
|
||||
const expired = db.insert(schema.downstreamApiKeys).values({
|
||||
const expired = await db.insert(schema.downstreamApiKeys).values({
|
||||
name: 'expired',
|
||||
key: 'sk-expired',
|
||||
enabled: true,
|
||||
expiresAt: new Date(now - 60_000).toISOString(),
|
||||
}).returning().get();
|
||||
|
||||
const overBudget = db.insert(schema.downstreamApiKeys).values({
|
||||
const overBudget = await db.insert(schema.downstreamApiKeys).values({
|
||||
name: 'over-budget',
|
||||
key: 'sk-over-budget',
|
||||
enabled: true,
|
||||
@@ -73,7 +73,7 @@ describe('downstreamApiKeyService', () => {
|
||||
usedCost: 1.2,
|
||||
}).returning().get();
|
||||
|
||||
const overRequests = db.insert(schema.downstreamApiKeys).values({
|
||||
const overRequests = await db.insert(schema.downstreamApiKeys).values({
|
||||
name: 'over-requests',
|
||||
key: 'sk-over-requests',
|
||||
enabled: true,
|
||||
@@ -81,10 +81,10 @@ describe('downstreamApiKeyService', () => {
|
||||
usedRequests: 10,
|
||||
}).returning().get();
|
||||
|
||||
const r1 = service.authorizeDownstreamToken(disabled.key);
|
||||
const r2 = service.authorizeDownstreamToken(expired.key);
|
||||
const r3 = service.authorizeDownstreamToken(overBudget.key);
|
||||
const r4 = service.authorizeDownstreamToken(overRequests.key);
|
||||
const r1 = await service.authorizeDownstreamToken(disabled.key);
|
||||
const r2 = await service.authorizeDownstreamToken(expired.key);
|
||||
const r3 = await service.authorizeDownstreamToken(overBudget.key);
|
||||
const r4 = await service.authorizeDownstreamToken(overRequests.key);
|
||||
|
||||
expect(r1.ok).toBe(false);
|
||||
expect(r2.ok).toBe(false);
|
||||
@@ -92,8 +92,8 @@ describe('downstreamApiKeyService', () => {
|
||||
expect(r4.ok).toBe(false);
|
||||
});
|
||||
|
||||
it('parses policy fields and supports model matching patterns', () => {
|
||||
const row = db.insert(schema.downstreamApiKeys).values({
|
||||
it('parses policy fields and supports model matching patterns', async () => {
|
||||
const row = await db.insert(schema.downstreamApiKeys).values({
|
||||
name: 'project-a',
|
||||
key: 'sk-project-a',
|
||||
enabled: true,
|
||||
@@ -102,7 +102,7 @@ describe('downstreamApiKeyService', () => {
|
||||
siteWeightMultipliers: JSON.stringify({ '1': 2.5, '7': 0.4 }),
|
||||
}).returning().get();
|
||||
|
||||
const result = service.authorizeDownstreamToken(row.key);
|
||||
const result = await service.authorizeDownstreamToken(row.key);
|
||||
expect(result.ok).toBe(true);
|
||||
if (!result.ok) return;
|
||||
|
||||
@@ -116,8 +116,8 @@ describe('downstreamApiKeyService', () => {
|
||||
expect(service.isModelAllowedByPolicy('gemini-2.0-flash', result.policy)).toBe(false);
|
||||
});
|
||||
|
||||
it('treats selected groups as additional allowed model scope (union semantics)', () => {
|
||||
const claudeGroup = db.insert(schema.tokenRoutes).values({
|
||||
it('treats selected groups as additional allowed model scope (union semantics)', async () => {
|
||||
const claudeGroup = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 're:^claude-(opus|sonnet)-4-6$',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
@@ -129,12 +129,12 @@ describe('downstreamApiKeyService', () => {
|
||||
};
|
||||
|
||||
expect(service.isModelAllowedByPolicy('claude-opus-4-6', policy)).toBe(false);
|
||||
expect(service.isModelAllowedByPolicyOrAllowedRoutes('claude-opus-4-6', policy)).toBe(true);
|
||||
expect(service.isModelAllowedByPolicyOrAllowedRoutes('gemini-2.0-flash', policy)).toBe(false);
|
||||
expect(await service.isModelAllowedByPolicyOrAllowedRoutes('claude-opus-4-6', policy)).toBe(true);
|
||||
expect(await service.isModelAllowedByPolicyOrAllowedRoutes('gemini-2.0-flash', policy)).toBe(false);
|
||||
});
|
||||
|
||||
it('authorizes by selected group model pattern only, not arbitrary internal models', () => {
|
||||
const virtualModelGroup = db.insert(schema.tokenRoutes).values({
|
||||
it('authorizes by selected group model pattern only, not arbitrary internal models', async () => {
|
||||
const virtualModelGroup = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'claude-opus-4-6',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
@@ -145,12 +145,12 @@ describe('downstreamApiKeyService', () => {
|
||||
siteWeightMultipliers: {},
|
||||
};
|
||||
|
||||
expect(service.isModelAllowedByPolicyOrAllowedRoutes('claude-opus-4-6', policy)).toBe(true);
|
||||
expect(service.isModelAllowedByPolicyOrAllowedRoutes('claude-sonnet-4-6', policy)).toBe(false);
|
||||
expect(await service.isModelAllowedByPolicyOrAllowedRoutes('claude-opus-4-6', policy)).toBe(true);
|
||||
expect(await service.isModelAllowedByPolicyOrAllowedRoutes('claude-sonnet-4-6', policy)).toBe(false);
|
||||
});
|
||||
|
||||
it('authorizes models by selected route display name alias', () => {
|
||||
const aliasRoute = db.insert(schema.tokenRoutes).values({
|
||||
it('authorizes models by selected route display name alias', async () => {
|
||||
const aliasRoute = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 're:^claude-(opus|sonnet)-4-5$',
|
||||
displayName: 'claude-opus-4-6',
|
||||
enabled: true,
|
||||
@@ -162,14 +162,14 @@ describe('downstreamApiKeyService', () => {
|
||||
siteWeightMultipliers: {},
|
||||
};
|
||||
|
||||
expect(service.isModelAllowedByPolicyOrAllowedRoutes('claude-opus-4-6', policy)).toBe(true);
|
||||
expect(service.isModelAllowedByPolicyOrAllowedRoutes('claude-sonnet-4-5', policy)).toBe(true);
|
||||
expect(service.isModelAllowedByPolicyOrAllowedRoutes('claude-opus-4-5', policy)).toBe(true);
|
||||
expect(service.isModelAllowedByPolicyOrAllowedRoutes('gpt-4o-mini', policy)).toBe(false);
|
||||
expect(await service.isModelAllowedByPolicyOrAllowedRoutes('claude-opus-4-6', policy)).toBe(true);
|
||||
expect(await service.isModelAllowedByPolicyOrAllowedRoutes('claude-sonnet-4-5', policy)).toBe(true);
|
||||
expect(await service.isModelAllowedByPolicyOrAllowedRoutes('claude-opus-4-5', policy)).toBe(true);
|
||||
expect(await service.isModelAllowedByPolicyOrAllowedRoutes('gpt-4o-mini', policy)).toBe(false);
|
||||
});
|
||||
|
||||
it('accumulates managed key request/cost usage and applies limits', () => {
|
||||
const row = db.insert(schema.downstreamApiKeys).values({
|
||||
it('accumulates managed key request/cost usage and applies limits', async () => {
|
||||
const row = await db.insert(schema.downstreamApiKeys).values({
|
||||
name: 'metered-key',
|
||||
key: 'sk-metered-key',
|
||||
enabled: true,
|
||||
@@ -179,16 +179,16 @@ describe('downstreamApiKeyService', () => {
|
||||
usedCost: 0,
|
||||
}).returning().get();
|
||||
|
||||
service.consumeManagedKeyRequest(row.id);
|
||||
service.consumeManagedKeyRequest(row.id);
|
||||
service.recordManagedKeyCostUsage(row.id, 0.4);
|
||||
service.recordManagedKeyCostUsage(row.id, 0.6);
|
||||
await service.consumeManagedKeyRequest(row.id);
|
||||
await service.consumeManagedKeyRequest(row.id);
|
||||
await service.recordManagedKeyCostUsage(row.id, 0.4);
|
||||
await service.recordManagedKeyCostUsage(row.id, 0.6);
|
||||
|
||||
const latest = service.getDownstreamApiKeyById(row.id);
|
||||
const latest = await service.getDownstreamApiKeyById(row.id);
|
||||
expect(latest?.usedRequests).toBe(2);
|
||||
expect(latest?.usedCost).toBeCloseTo(1);
|
||||
|
||||
const authResult = service.authorizeDownstreamToken(row.key);
|
||||
const authResult = await service.authorizeDownstreamToken(row.key);
|
||||
expect(authResult.ok).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -176,10 +176,10 @@ export function isModelAllowedByPolicy(model: string, policy: DownstreamRoutingP
|
||||
return patterns.some((pattern) => matchesDownstreamModelPattern(model, pattern));
|
||||
}
|
||||
|
||||
function isModelMatchedByAllowedRoutes(model: string, allowedRouteIds: number[]): boolean {
|
||||
async function isModelMatchedByAllowedRoutes(model: string, allowedRouteIds: number[]): Promise<boolean> {
|
||||
if (allowedRouteIds.length === 0) return false;
|
||||
|
||||
const routes = db.select({
|
||||
const routes = await db.select({
|
||||
id: schema.tokenRoutes.id,
|
||||
modelPattern: schema.tokenRoutes.modelPattern,
|
||||
displayName: schema.tokenRoutes.displayName,
|
||||
@@ -198,7 +198,7 @@ function isModelMatchedByAllowedRoutes(model: string, allowedRouteIds: number[])
|
||||
});
|
||||
}
|
||||
|
||||
export function isModelAllowedByPolicyOrAllowedRoutes(model: string, policy: DownstreamRoutingPolicy): boolean {
|
||||
export async function isModelAllowedByPolicyOrAllowedRoutes(model: string, policy: DownstreamRoutingPolicy): Promise<boolean> {
|
||||
const patterns = normalizeSupportedModelsInput(policy.supportedModels);
|
||||
const allowedRouteIds = normalizeAllowedRouteIdsInput(policy.allowedRouteIds);
|
||||
const hasPatternRules = patterns.length > 0;
|
||||
@@ -212,7 +212,7 @@ export function isModelAllowedByPolicyOrAllowedRoutes(model: string, policy: Dow
|
||||
|
||||
if (!hasRouteRules) return false;
|
||||
|
||||
return isModelMatchedByAllowedRoutes(model, allowedRouteIds);
|
||||
return await isModelMatchedByAllowedRoutes(model, allowedRouteIds);
|
||||
}
|
||||
|
||||
export function toDownstreamApiKeyPolicyView(row: DownstreamApiKeyRow): DownstreamApiKeyPolicyView {
|
||||
@@ -249,26 +249,26 @@ export function toPolicyFromView(view: Pick<DownstreamApiKeyPolicyView, 'support
|
||||
};
|
||||
}
|
||||
|
||||
export function listDownstreamApiKeys(): DownstreamApiKeyPolicyView[] {
|
||||
return db.select().from(schema.downstreamApiKeys)
|
||||
.all()
|
||||
export async function listDownstreamApiKeys(): Promise<DownstreamApiKeyPolicyView[]> {
|
||||
return (await db.select().from(schema.downstreamApiKeys)
|
||||
.all())
|
||||
.map((row) => toDownstreamApiKeyPolicyView(row))
|
||||
.sort((a, b) => b.id - a.id);
|
||||
}
|
||||
|
||||
export function getDownstreamApiKeyById(id: number): DownstreamApiKeyPolicyView | null {
|
||||
const row = db.select().from(schema.downstreamApiKeys)
|
||||
export async function getDownstreamApiKeyById(id: number): Promise<DownstreamApiKeyPolicyView | null> {
|
||||
const row = await db.select().from(schema.downstreamApiKeys)
|
||||
.where(eq(schema.downstreamApiKeys.id, id))
|
||||
.get();
|
||||
if (!row) return null;
|
||||
return toDownstreamApiKeyPolicyView(row);
|
||||
}
|
||||
|
||||
export function getManagedDownstreamApiKeyByToken(token: string): DownstreamApiKeyPolicyView | null {
|
||||
export async function getManagedDownstreamApiKeyByToken(token: string): Promise<DownstreamApiKeyPolicyView | null> {
|
||||
const normalizedToken = normalizeToken(token);
|
||||
if (!normalizedToken) return null;
|
||||
|
||||
const row = db.select().from(schema.downstreamApiKeys)
|
||||
const row = await db.select().from(schema.downstreamApiKeys)
|
||||
.where(eq(schema.downstreamApiKeys.key, normalizedToken))
|
||||
.get();
|
||||
|
||||
@@ -280,7 +280,7 @@ export function getDefaultGlobalPolicy(): DownstreamRoutingPolicy {
|
||||
return EMPTY_DOWNSTREAM_ROUTING_POLICY;
|
||||
}
|
||||
|
||||
export function authorizeDownstreamToken(token: string): DownstreamTokenAuthResult {
|
||||
export async function authorizeDownstreamToken(token: string): Promise<DownstreamTokenAuthResult> {
|
||||
const normalizedToken = normalizeToken(token);
|
||||
if (!normalizedToken) {
|
||||
return {
|
||||
@@ -291,7 +291,7 @@ export function authorizeDownstreamToken(token: string): DownstreamTokenAuthResu
|
||||
};
|
||||
}
|
||||
|
||||
const managed = getManagedDownstreamApiKeyByToken(normalizedToken);
|
||||
const managed = await getManagedDownstreamApiKeyByToken(normalizedToken);
|
||||
if (managed) {
|
||||
if (!managed.enabled) {
|
||||
return {
|
||||
@@ -359,9 +359,9 @@ export function authorizeDownstreamToken(token: string): DownstreamTokenAuthResu
|
||||
};
|
||||
}
|
||||
|
||||
export function consumeManagedKeyRequest(keyId: number): void {
|
||||
export async function consumeManagedKeyRequest(keyId: number): Promise<void> {
|
||||
const nowIso = new Date().toISOString();
|
||||
db.update(schema.downstreamApiKeys).set({
|
||||
await db.update(schema.downstreamApiKeys).set({
|
||||
// Atomic increment to avoid lost updates under multi-process concurrency.
|
||||
usedRequests: sql`coalesce(${schema.downstreamApiKeys.usedRequests}, 0) + 1`,
|
||||
lastUsedAt: nowIso,
|
||||
@@ -369,11 +369,11 @@ export function consumeManagedKeyRequest(keyId: number): void {
|
||||
}).where(eq(schema.downstreamApiKeys.id, keyId)).run();
|
||||
}
|
||||
|
||||
export function recordManagedKeyCostUsage(keyId: number, estimatedCost: number): void {
|
||||
export async function recordManagedKeyCostUsage(keyId: number, estimatedCost: number): Promise<void> {
|
||||
const cost = Number(estimatedCost);
|
||||
if (!Number.isFinite(cost) || cost <= 0) return;
|
||||
const nowIso = new Date().toISOString();
|
||||
db.update(schema.downstreamApiKeys).set({
|
||||
await db.update(schema.downstreamApiKeys).set({
|
||||
// Atomic increment to avoid lost updates under multi-process concurrency.
|
||||
usedCost: sql`coalesce(${schema.downstreamApiKeys.usedCost}, 0) + ${cost}`,
|
||||
lastUsedAt: nowIso,
|
||||
|
||||
@@ -259,7 +259,7 @@ async function fetchJson(url: string, options?: UndiciRequestInit): Promise<unkn
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
...withSiteProxyRequestInit(url, {
|
||||
...(await withSiteProxyRequestInit(url, {
|
||||
...options,
|
||||
signal: controller.signal,
|
||||
body: options?.body ?? undefined,
|
||||
@@ -267,7 +267,7 @@ async function fetchJson(url: string, options?: UndiciRequestInit): Promise<unkn
|
||||
'Content-Type': 'application/json',
|
||||
...options?.headers,
|
||||
},
|
||||
}),
|
||||
})),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -36,17 +36,17 @@ describe('refreshModelsForAccount credential discovery', () => {
|
||||
refreshModelsForAccount = modelService.refreshModelsForAccount;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
getApiTokenMock.mockReset();
|
||||
getModelsMock.mockReset();
|
||||
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
@@ -59,14 +59,14 @@ describe('refreshModelsForAccount credential discovery', () => {
|
||||
token === 'session-token' ? ['claude-sonnet-4-5-20250929', 'claude-opus-4-6'] : []
|
||||
));
|
||||
|
||||
const site = db.insert(schema.sites).values({
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'site-a',
|
||||
url: 'https://site-a.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'alice',
|
||||
accessToken: 'session-token',
|
||||
@@ -84,7 +84,7 @@ describe('refreshModelsForAccount credential discovery', () => {
|
||||
discoveredByCredential: true,
|
||||
});
|
||||
|
||||
const rows = db.select().from(schema.modelAvailability)
|
||||
const rows = await db.select().from(schema.modelAvailability)
|
||||
.where(eq(schema.modelAvailability.accountId, account.id))
|
||||
.all();
|
||||
expect(rows.map((row) => row.modelName).sort()).toEqual([
|
||||
@@ -92,7 +92,7 @@ describe('refreshModelsForAccount credential discovery', () => {
|
||||
'claude-sonnet-4-5-20250929',
|
||||
]);
|
||||
|
||||
const tokenRows = db.select().from(schema.tokenModelAvailability).all();
|
||||
const tokenRows = await db.select().from(schema.tokenModelAvailability).all();
|
||||
expect(tokenRows).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -26,35 +26,35 @@ describe('rebuildTokenRoutesFromAvailability', () => {
|
||||
rebuildTokenRoutesFromAvailability = modelService.rebuildTokenRoutesFromAvailability;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
it('removes stale exact routes and keeps wildcard routes on rebuild', () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
it('removes stale exact routes and keeps wildcard routes on rebuild', async () => {
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'site-1',
|
||||
url: 'https://site-1.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'user-1',
|
||||
accessToken: 'access-1',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const token = db.insert(schema.accountTokens).values({
|
||||
const token = await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'default',
|
||||
token: 'sk-test',
|
||||
@@ -63,18 +63,18 @@ describe('rebuildTokenRoutesFromAvailability', () => {
|
||||
isDefault: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.tokenModelAvailability).values({
|
||||
await db.insert(schema.tokenModelAvailability).values({
|
||||
tokenId: token.id,
|
||||
modelName: 'latest-model',
|
||||
available: true,
|
||||
}).run();
|
||||
|
||||
const staleRoute = db.insert(schema.tokenRoutes).values({
|
||||
const staleRoute = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'old-model',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: staleRoute.id,
|
||||
accountId: account.id,
|
||||
tokenId: token.id,
|
||||
@@ -84,12 +84,12 @@ describe('rebuildTokenRoutesFromAvailability', () => {
|
||||
manualOverride: false,
|
||||
}).run();
|
||||
|
||||
const wildcardRoute = db.insert(schema.tokenRoutes).values({
|
||||
const wildcardRoute = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'gpt-*',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: wildcardRoute.id,
|
||||
accountId: account.id,
|
||||
tokenId: token.id,
|
||||
@@ -99,25 +99,25 @@ describe('rebuildTokenRoutesFromAvailability', () => {
|
||||
manualOverride: false,
|
||||
}).run();
|
||||
|
||||
const rebuild = rebuildTokenRoutesFromAvailability();
|
||||
const rebuild = await rebuildTokenRoutesFromAvailability();
|
||||
|
||||
expect(rebuild.models).toBe(1);
|
||||
expect(rebuild.removedRoutes).toBe(1);
|
||||
|
||||
const oldRoute = db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, staleRoute.id)).get();
|
||||
const oldRoute = await db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, staleRoute.id)).get();
|
||||
expect(oldRoute).toBeUndefined();
|
||||
|
||||
const oldChannels = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.routeId, staleRoute.id)).all();
|
||||
const oldChannels = await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.routeId, staleRoute.id)).all();
|
||||
expect(oldChannels).toHaveLength(0);
|
||||
|
||||
const latestRoute = db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.modelPattern, 'latest-model')).get();
|
||||
const latestRoute = await db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.modelPattern, 'latest-model')).get();
|
||||
expect(latestRoute).toBeDefined();
|
||||
const latestChannels = db.select().from(schema.routeChannels)
|
||||
const latestChannels = await db.select().from(schema.routeChannels)
|
||||
.where(and(eq(schema.routeChannels.routeId, latestRoute!.id), eq(schema.routeChannels.tokenId, token.id)))
|
||||
.all();
|
||||
expect(latestChannels.length).toBeGreaterThan(0);
|
||||
|
||||
const wildcardRouteAfter = db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, wildcardRoute.id)).get();
|
||||
const wildcardRouteAfter = await db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, wildcardRoute.id)).get();
|
||||
expect(wildcardRouteAfter).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -39,7 +39,7 @@ async function withTimeout<T>(fn: () => Promise<T>, timeoutMs: number, timeoutMe
|
||||
}
|
||||
|
||||
export async function refreshModelsForAccount(accountId: number) {
|
||||
const row = db.select().from(schema.accounts)
|
||||
const row = await db.select().from(schema.accounts)
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.where(eq(schema.accounts.id, accountId))
|
||||
.get();
|
||||
@@ -52,17 +52,17 @@ export async function refreshModelsForAccount(accountId: number) {
|
||||
const site = row.sites;
|
||||
const adapter = getAdapter(site.platform);
|
||||
|
||||
const accountTokens = db.select()
|
||||
const accountTokens = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(eq(schema.accountTokens.accountId, accountId))
|
||||
.all();
|
||||
|
||||
db.delete(schema.modelAvailability)
|
||||
await db.delete(schema.modelAvailability)
|
||||
.where(eq(schema.modelAvailability.accountId, accountId))
|
||||
.run();
|
||||
|
||||
for (const token of accountTokens) {
|
||||
db.delete(schema.tokenModelAvailability)
|
||||
await db.delete(schema.tokenModelAvailability)
|
||||
.where(eq(schema.tokenModelAvailability.tokenId, token.id))
|
||||
.run();
|
||||
}
|
||||
@@ -87,7 +87,7 @@ export async function refreshModelsForAccount(accountId: number) {
|
||||
);
|
||||
if (discoveredApiToken) {
|
||||
ensureDefaultTokenForAccount(account.id, discoveredApiToken, { name: 'default', source: 'sync' });
|
||||
db.update(schema.accounts).set({
|
||||
await db.update(schema.accounts).set({
|
||||
apiToken: discoveredApiToken,
|
||||
updatedAt: new Date().toISOString(),
|
||||
}).where(eq(schema.accounts.id, account.id)).run();
|
||||
@@ -95,7 +95,7 @@ export async function refreshModelsForAccount(accountId: number) {
|
||||
} catch {}
|
||||
}
|
||||
|
||||
let enabledTokens = db.select()
|
||||
let enabledTokens = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(and(eq(schema.accountTokens.accountId, account.id), eq(schema.accountTokens.enabled, true)))
|
||||
.all();
|
||||
@@ -105,7 +105,7 @@ export async function refreshModelsForAccount(accountId: number) {
|
||||
const fallback = discoveredApiToken || account.apiToken || null;
|
||||
if (fallback) {
|
||||
ensureDefaultTokenForAccount(account.id, fallback, { name: 'default', source: 'legacy' });
|
||||
enabledTokens = db.select()
|
||||
enabledTokens = await db.select()
|
||||
.from(schema.accountTokens)
|
||||
.where(and(eq(schema.accountTokens.accountId, account.id), eq(schema.accountTokens.enabled, true)))
|
||||
.all();
|
||||
@@ -182,7 +182,7 @@ export async function refreshModelsForAccount(accountId: number) {
|
||||
const latencyMs = Date.now() - startedAt;
|
||||
const checkedAt = new Date().toISOString();
|
||||
|
||||
db.insert(schema.tokenModelAvailability).values(
|
||||
await db.insert(schema.tokenModelAvailability).values(
|
||||
models.map((modelName) => ({
|
||||
tokenId: token.id,
|
||||
modelName,
|
||||
@@ -198,7 +198,7 @@ export async function refreshModelsForAccount(accountId: number) {
|
||||
|
||||
if (accountModels.size > 0) {
|
||||
const checkedAt = new Date().toISOString();
|
||||
db.insert(schema.modelAvailability).values(
|
||||
await db.insert(schema.modelAvailability).values(
|
||||
Array.from(accountModels).map((modelName) => ({
|
||||
accountId: account.id,
|
||||
modelName,
|
||||
@@ -220,11 +220,11 @@ export async function refreshModelsForAccount(accountId: number) {
|
||||
}
|
||||
|
||||
async function refreshModelsForAllActiveAccounts() {
|
||||
const accounts = db.select({ id: schema.accounts.id }).from(schema.accounts)
|
||||
const accounts = await db.select({ id: schema.accounts.id }).from(schema.accounts)
|
||||
.where(eq(schema.accounts.status, 'active'))
|
||||
.all();
|
||||
|
||||
const results = [];
|
||||
const results: any[] = [];
|
||||
for (let offset = 0; offset < accounts.length; offset += MODEL_REFRESH_BATCH_SIZE) {
|
||||
const batch = accounts.slice(offset, offset + MODEL_REFRESH_BATCH_SIZE);
|
||||
const batchResults = await Promise.all(batch.map(async (account) => refreshModelsForAccount(account.id)));
|
||||
@@ -233,8 +233,8 @@ async function refreshModelsForAllActiveAccounts() {
|
||||
return results;
|
||||
}
|
||||
|
||||
export function rebuildTokenRoutesFromAvailability() {
|
||||
const tokenRows = db.select().from(schema.tokenModelAvailability)
|
||||
export async function rebuildTokenRoutesFromAvailability() {
|
||||
const tokenRows = await db.select().from(schema.tokenModelAvailability)
|
||||
.innerJoin(schema.accountTokens, eq(schema.tokenModelAvailability.tokenId, schema.accountTokens.id))
|
||||
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
@@ -255,8 +255,8 @@ export function rebuildTokenRoutesFromAvailability() {
|
||||
modelTokens.get(modelName)!.set(row.account_tokens.id, row.accounts.id);
|
||||
}
|
||||
|
||||
const routes = db.select().from(schema.tokenRoutes).all();
|
||||
const channels = db.select().from(schema.routeChannels).all();
|
||||
const routes = await db.select().from(schema.tokenRoutes).all();
|
||||
const channels = await db.select().from(schema.routeChannels).all();
|
||||
|
||||
let createdRoutes = 0;
|
||||
let createdChannels = 0;
|
||||
@@ -264,15 +264,20 @@ export function rebuildTokenRoutesFromAvailability() {
|
||||
let removedRoutes = 0;
|
||||
|
||||
for (const [modelName, tokenAccountMap] of modelTokens.entries()) {
|
||||
let route = routes.find((r) => r.modelPattern === modelName);
|
||||
if (!route) {
|
||||
route = db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: modelName,
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
routes.push(route);
|
||||
createdRoutes++;
|
||||
}
|
||||
let route = routes.find((r) => r.modelPattern === modelName);
|
||||
if (!route) {
|
||||
const inserted = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: modelName,
|
||||
enabled: true,
|
||||
}).run();
|
||||
const insertedId = Number(inserted.lastInsertRowid || 0);
|
||||
route = insertedId > 0
|
||||
? await db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, insertedId)).get()
|
||||
: undefined;
|
||||
if (!route) continue;
|
||||
routes.push(route);
|
||||
createdRoutes++;
|
||||
}
|
||||
|
||||
const routeChannels = channels.filter((channel) => channel.routeId === route.id);
|
||||
const desiredTokenIds = new Set<number>(Array.from(tokenAccountMap.keys()));
|
||||
@@ -281,17 +286,21 @@ export function rebuildTokenRoutesFromAvailability() {
|
||||
const exists = routeChannels.some((channel) => channel.tokenId === tokenId);
|
||||
if (exists) continue;
|
||||
|
||||
const created = db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId,
|
||||
tokenId,
|
||||
priority: 0,
|
||||
weight: 10,
|
||||
enabled: true,
|
||||
manualOverride: false,
|
||||
}).returning().get();
|
||||
channels.push(created);
|
||||
createdChannels++;
|
||||
const inserted = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId,
|
||||
tokenId,
|
||||
priority: 0,
|
||||
weight: 10,
|
||||
enabled: true,
|
||||
manualOverride: false,
|
||||
}).run();
|
||||
const insertedId = Number(inserted.lastInsertRowid || 0);
|
||||
if (insertedId <= 0) continue;
|
||||
const created = await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, insertedId)).get();
|
||||
if (!created) continue;
|
||||
channels.push(created);
|
||||
createdChannels++;
|
||||
}
|
||||
|
||||
for (const channel of routeChannels) {
|
||||
@@ -300,9 +309,9 @@ export function rebuildTokenRoutesFromAvailability() {
|
||||
}
|
||||
|
||||
if (!channel.tokenId) {
|
||||
const preferred = getPreferredAccountToken(channel.accountId);
|
||||
const preferred = await getPreferredAccountToken(channel.accountId);
|
||||
if (preferred && desiredTokenIds.has(preferred.id)) {
|
||||
db.update(schema.routeChannels)
|
||||
await db.update(schema.routeChannels)
|
||||
.set({ tokenId: preferred.id })
|
||||
.where(eq(schema.routeChannels.id, channel.id))
|
||||
.run();
|
||||
@@ -311,7 +320,7 @@ export function rebuildTokenRoutesFromAvailability() {
|
||||
}
|
||||
|
||||
if (!channel.manualOverride) {
|
||||
db.delete(schema.routeChannels).where(eq(schema.routeChannels.id, channel.id)).run();
|
||||
await db.delete(schema.routeChannels).where(eq(schema.routeChannels.id, channel.id)).run();
|
||||
removedChannels++;
|
||||
}
|
||||
}
|
||||
@@ -329,7 +338,7 @@ export function rebuildTokenRoutesFromAvailability() {
|
||||
removedChannels += routeChannelCount;
|
||||
}
|
||||
|
||||
const deleted = db.delete(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, route.id)).run().changes;
|
||||
const deleted = (await db.delete(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, route.id)).run()).changes;
|
||||
if (deleted > 0) {
|
||||
removedRoutes += deleted;
|
||||
}
|
||||
@@ -346,8 +355,8 @@ export function rebuildTokenRoutesFromAvailability() {
|
||||
};
|
||||
}
|
||||
|
||||
export async function refreshModelsAndRebuildRoutes() {
|
||||
const refresh = await refreshModelsForAllActiveAccounts();
|
||||
const rebuild = rebuildTokenRoutesFromAvailability();
|
||||
return { refresh, rebuild };
|
||||
}
|
||||
export async function refreshModelsAndRebuildRoutes() {
|
||||
const refresh = await refreshModelsForAllActiveAccounts();
|
||||
const rebuild = await rebuildTokenRoutesFromAvailability();
|
||||
return { refresh, rebuild };
|
||||
}
|
||||
|
||||
@@ -185,7 +185,7 @@ export abstract class BasePlatformAdapter implements PlatformAdapter {
|
||||
...options?.headers,
|
||||
},
|
||||
};
|
||||
const proxiedRequestOptions = withSiteProxyRequestInit(url, requestOptions);
|
||||
const proxiedRequestOptions = await withSiteProxyRequestInit(url, requestOptions);
|
||||
const res = await fetch(url, proxiedRequestOptions);
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP ${res.status}: ${await res.text()}`);
|
||||
|
||||
@@ -516,7 +516,7 @@ export class NewApiAdapter extends BasePlatformAdapter {
|
||||
body: options?.body ?? undefined,
|
||||
headers,
|
||||
};
|
||||
const proxiedRequestOptions = withSiteProxyRequestInit(url, requestOptions);
|
||||
const proxiedRequestOptions = await withSiteProxyRequestInit(url, requestOptions);
|
||||
const res = await fetch(url, proxiedRequestOptions);
|
||||
const text = await res.text();
|
||||
const getSetCookie = (res.headers as unknown as { getSetCookie?: () => string[] }).getSetCookie;
|
||||
|
||||
@@ -32,10 +32,10 @@ describe('proxyLogRetentionService', () => {
|
||||
originalRetentionDays = config.proxyLogRetentionDays;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.proxyLogs).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.proxyLogs).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
@@ -43,15 +43,15 @@ describe('proxyLogRetentionService', () => {
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
function seedAccount(platform = 'new-api') {
|
||||
const site = db.insert(schema.sites).values({
|
||||
async function seedAccount(platform = 'new-api') {
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: `retention-site-${platform}`,
|
||||
url: `https://retention-${platform}.example.com`,
|
||||
platform,
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
return db.insert(schema.accounts).values({
|
||||
return await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: `retention-${platform}`,
|
||||
accessToken: `access-${platform}`,
|
||||
@@ -60,11 +60,11 @@ describe('proxyLogRetentionService', () => {
|
||||
}).returning().get();
|
||||
}
|
||||
|
||||
it('deletes proxy logs older than retention days', () => {
|
||||
it('deletes proxy logs older than retention days', async () => {
|
||||
config.proxyLogRetentionDays = 7;
|
||||
const account = seedAccount('new-api');
|
||||
const account = await seedAccount('new-api');
|
||||
|
||||
db.insert(schema.proxyLogs).values([
|
||||
await db.insert(schema.proxyLogs).values([
|
||||
{
|
||||
accountId: account.id,
|
||||
modelRequested: 'gpt-4o-mini',
|
||||
@@ -79,19 +79,19 @@ describe('proxyLogRetentionService', () => {
|
||||
},
|
||||
]).run();
|
||||
|
||||
const result = cleanupExpiredProxyLogs(Date.parse('2026-03-04T00:00:00Z'));
|
||||
const result = await cleanupExpiredProxyLogs(Date.parse('2026-03-04T00:00:00Z'));
|
||||
expect(result.deleted).toBe(1);
|
||||
|
||||
const rows = db.select().from(schema.proxyLogs).all();
|
||||
const rows = await db.select().from(schema.proxyLogs).all();
|
||||
expect(rows).toHaveLength(1);
|
||||
expect(rows[0]?.createdAt).toBe('2026-03-01 12:00:00');
|
||||
});
|
||||
|
||||
it('skips cleanup when retention is disabled', () => {
|
||||
it('skips cleanup when retention is disabled', async () => {
|
||||
config.proxyLogRetentionDays = 0;
|
||||
const account = seedAccount('new-api');
|
||||
const account = await seedAccount('new-api');
|
||||
|
||||
db.insert(schema.proxyLogs).values({
|
||||
await db.insert(schema.proxyLogs).values({
|
||||
accountId: account.id,
|
||||
modelRequested: 'gpt-4o-mini',
|
||||
status: 'success',
|
||||
@@ -101,10 +101,10 @@ describe('proxyLogRetentionService', () => {
|
||||
const cutoff = getProxyLogRetentionCutoffUtc(Date.parse('2026-03-04T00:00:00Z'));
|
||||
expect(cutoff).toBeNull();
|
||||
|
||||
const result = cleanupExpiredProxyLogs(Date.parse('2026-03-04T00:00:00Z'));
|
||||
const result = await cleanupExpiredProxyLogs(Date.parse('2026-03-04T00:00:00Z'));
|
||||
expect(result.deleted).toBe(0);
|
||||
|
||||
const rows = db.select().from(schema.proxyLogs).all();
|
||||
const rows = await db.select().from(schema.proxyLogs).all();
|
||||
expect(rows).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -13,12 +13,12 @@ export function getProxyLogRetentionCutoffUtc(nowMs = Date.now()): string | null
|
||||
return formatUtcSqlDateTime(new Date(nowMs - days * DAY_MS));
|
||||
}
|
||||
|
||||
export function cleanupExpiredProxyLogs(nowMs = Date.now()): {
|
||||
export async function cleanupExpiredProxyLogs(nowMs = Date.now()): Promise<{
|
||||
enabled: boolean;
|
||||
retentionDays: number;
|
||||
cutoffUtc: string | null;
|
||||
deleted: number;
|
||||
} {
|
||||
}> {
|
||||
const retentionDays = Math.max(0, Math.trunc(config.proxyLogRetentionDays));
|
||||
const cutoffUtc = getProxyLogRetentionCutoffUtc(nowMs);
|
||||
if (!cutoffUtc) {
|
||||
@@ -30,9 +30,9 @@ export function cleanupExpiredProxyLogs(nowMs = Date.now()): {
|
||||
};
|
||||
}
|
||||
|
||||
const deleted = db.delete(schema.proxyLogs)
|
||||
const deleted = (await db.delete(schema.proxyLogs)
|
||||
.where(lt(schema.proxyLogs.createdAt, cutoffUtc))
|
||||
.run()
|
||||
.run())
|
||||
.changes;
|
||||
|
||||
return {
|
||||
@@ -48,9 +48,9 @@ export function startProxyLogRetentionService(): void {
|
||||
|
||||
const intervalMinutes = Math.max(1, Math.trunc(config.proxyLogRetentionPruneIntervalMinutes));
|
||||
const intervalMs = intervalMinutes * 60 * 1000;
|
||||
const runCleanup = () => {
|
||||
const runCleanup = async () => {
|
||||
try {
|
||||
const result = cleanupExpiredProxyLogs();
|
||||
const result = await cleanupExpiredProxyLogs();
|
||||
if (!result.enabled || result.deleted <= 0) return;
|
||||
console.info(`[proxy-log-retention] deleted ${result.deleted} logs before ${result.cutoffUtc}`);
|
||||
} catch (error) {
|
||||
@@ -58,8 +58,8 @@ export function startProxyLogRetentionService(): void {
|
||||
}
|
||||
};
|
||||
|
||||
runCleanup();
|
||||
retentionTimer = setInterval(runCleanup, intervalMs);
|
||||
void runCleanup();
|
||||
retentionTimer = setInterval(() => { void runCleanup(); }, intervalMs);
|
||||
retentionTimer.unref?.();
|
||||
}
|
||||
|
||||
|
||||
@@ -21,8 +21,8 @@ describe('siteProxy', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
const { invalidateSiteProxyCache } = await import('./siteProxy.js');
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
invalidateSiteProxyCache();
|
||||
});
|
||||
|
||||
@@ -31,7 +31,7 @@ describe('siteProxy', () => {
|
||||
});
|
||||
|
||||
it('resolves longest matched site proxy url', async () => {
|
||||
db.insert(schema.sites).values([
|
||||
await db.insert(schema.sites).values([
|
||||
{
|
||||
name: 'base-site',
|
||||
url: 'https://relay.example.com',
|
||||
@@ -47,14 +47,14 @@ describe('siteProxy', () => {
|
||||
]).run();
|
||||
|
||||
const { resolveSiteProxyUrlByRequestUrl } = await import('./siteProxy.js');
|
||||
expect(resolveSiteProxyUrlByRequestUrl('https://relay.example.com/openai/v1/models'))
|
||||
expect(await resolveSiteProxyUrlByRequestUrl('https://relay.example.com/openai/v1/models'))
|
||||
.toBe('http://127.0.0.1:7890');
|
||||
expect(resolveSiteProxyUrlByRequestUrl('https://relay.example.com/v1/models'))
|
||||
expect(await resolveSiteProxyUrlByRequestUrl('https://relay.example.com/v1/models'))
|
||||
.toBe('http://127.0.0.1:7891');
|
||||
});
|
||||
|
||||
it('injects dispatcher when proxy exists', async () => {
|
||||
db.insert(schema.sites).values({
|
||||
await db.insert(schema.sites).values({
|
||||
name: 'proxy-site',
|
||||
url: 'https://proxy-site.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -62,7 +62,7 @@ describe('siteProxy', () => {
|
||||
}).run();
|
||||
|
||||
const { withSiteProxyRequestInit } = await import('./siteProxy.js');
|
||||
const requestInit = withSiteProxyRequestInit('https://proxy-site.example.com/v1/chat/completions', {
|
||||
const requestInit = await withSiteProxyRequestInit('https://proxy-site.example.com/v1/chat/completions', {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
|
||||
@@ -48,13 +48,13 @@ function normalizeSiteUrl(value: string): string {
|
||||
}
|
||||
}
|
||||
|
||||
function getCachedSiteProxyRows(nowMs = Date.now()): SiteProxyRow[] {
|
||||
async function getCachedSiteProxyRows(nowMs = Date.now()): Promise<SiteProxyRow[]> {
|
||||
if ((nowMs - siteProxyCache.loadedAt) < SITE_PROXY_CACHE_TTL_MS) {
|
||||
return siteProxyCache.rows;
|
||||
}
|
||||
|
||||
try {
|
||||
const rows = db
|
||||
const rows = await db
|
||||
.select({
|
||||
siteUrl: schema.sites.url,
|
||||
proxyUrl: schema.sites.proxyUrl,
|
||||
@@ -141,11 +141,11 @@ export function invalidateSiteProxyCache(): void {
|
||||
siteProxyCache = { loadedAt: 0, rows: [] };
|
||||
}
|
||||
|
||||
export function resolveSiteProxyUrlByRequestUrl(requestUrl: string): string | null {
|
||||
export async function resolveSiteProxyUrlByRequestUrl(requestUrl: string): Promise<string | null> {
|
||||
const normalizedRequestUrl = normalizeSiteUrl(requestUrl);
|
||||
if (!normalizedRequestUrl) return null;
|
||||
|
||||
const rows = getCachedSiteProxyRows();
|
||||
const rows = await getCachedSiteProxyRows();
|
||||
let bestMatch: string | null = null;
|
||||
let bestMatchLength = -1;
|
||||
|
||||
@@ -169,11 +169,11 @@ export function resolveSiteProxyUrlByRequestUrl(requestUrl: string): string | nu
|
||||
return bestMatch;
|
||||
}
|
||||
|
||||
export function withSiteProxyRequestInit(
|
||||
export async function withSiteProxyRequestInit(
|
||||
requestUrl: string,
|
||||
options?: UndiciRequestInit,
|
||||
): UndiciRequestInit {
|
||||
const proxyUrl = resolveSiteProxyUrlByRequestUrl(requestUrl);
|
||||
): Promise<UndiciRequestInit> {
|
||||
const proxyUrl = await resolveSiteProxyUrlByRequestUrl(requestUrl);
|
||||
if (!proxyUrl) return options ?? {};
|
||||
|
||||
const dispatcher = getDispatcherByProxyUrl(proxyUrl);
|
||||
|
||||
@@ -33,12 +33,12 @@ describe('TokenRouter runtime cache', () => {
|
||||
originalCacheTtlMs = config.tokenRouterCacheTtlMs;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
config.tokenRouterCacheTtlMs = 60_000;
|
||||
invalidateTokenRouterCache();
|
||||
});
|
||||
@@ -49,15 +49,15 @@ describe('TokenRouter runtime cache', () => {
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
it('keeps route snapshot inside TTL until explicit invalidation', () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
it('keeps route snapshot inside TTL until explicit invalidation', async () => {
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'cache-site',
|
||||
url: 'https://cache-site.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'cache-user',
|
||||
accessToken: 'cache-access-token',
|
||||
@@ -65,7 +65,7 @@ describe('TokenRouter runtime cache', () => {
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const token = db.insert(schema.accountTokens).values({
|
||||
const token = await db.insert(schema.accountTokens).values({
|
||||
accountId: account.id,
|
||||
name: 'cache-token',
|
||||
token: 'sk-cache-token',
|
||||
@@ -73,12 +73,12 @@ describe('TokenRouter runtime cache', () => {
|
||||
isDefault: true,
|
||||
}).returning().get();
|
||||
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
const route = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'gpt-4o-mini',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: account.id,
|
||||
tokenId: token.id,
|
||||
@@ -88,16 +88,16 @@ describe('TokenRouter runtime cache', () => {
|
||||
}).run();
|
||||
|
||||
const router = new TokenRouter();
|
||||
expect(router.selectChannel('gpt-4o-mini')).toBeTruthy();
|
||||
expect(await router.selectChannel('gpt-4o-mini')).toBeTruthy();
|
||||
|
||||
db.delete(schema.routeChannels).where(eq(schema.routeChannels.routeId, route.id)).run();
|
||||
db.delete(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, route.id)).run();
|
||||
await db.delete(schema.routeChannels).where(eq(schema.routeChannels.routeId, route.id)).run();
|
||||
await db.delete(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, route.id)).run();
|
||||
|
||||
const cachedSelection = router.selectChannel('gpt-4o-mini');
|
||||
const cachedSelection = await router.selectChannel('gpt-4o-mini');
|
||||
expect(cachedSelection).toBeTruthy();
|
||||
|
||||
invalidateTokenRouterCache();
|
||||
const refreshedSelection = router.selectChannel('gpt-4o-mini');
|
||||
const refreshedSelection = await router.selectChannel('gpt-4o-mini');
|
||||
expect(refreshedSelection).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -26,12 +26,12 @@ describe('TokenRouter downstream policy', () => {
|
||||
invalidateTokenRouterCache = tokenRouterModule.invalidateTokenRouterCache;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
invalidateTokenRouterCache();
|
||||
});
|
||||
|
||||
@@ -40,15 +40,15 @@ describe('TokenRouter downstream policy', () => {
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
it('respects allowedRouteIds when selecting channels', () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
it('respects allowedRouteIds when selecting channels', async () => {
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'site-a',
|
||||
url: 'https://a.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'user-a',
|
||||
accessToken: 'access-a',
|
||||
@@ -56,17 +56,17 @@ describe('TokenRouter downstream policy', () => {
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const routeAllowed = db.insert(schema.tokenRoutes).values({
|
||||
const routeAllowed = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'claude-opus-4-6',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const routeBlocked = db.insert(schema.tokenRoutes).values({
|
||||
const routeBlocked = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'gpt-4o-mini',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: routeAllowed.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -75,7 +75,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
enabled: true,
|
||||
}).run();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: routeBlocked.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -86,12 +86,12 @@ describe('TokenRouter downstream policy', () => {
|
||||
|
||||
const router = new TokenRouter();
|
||||
|
||||
const allowedPick = router.selectChannel('claude-opus-4-6', {
|
||||
const allowedPick = await router.selectChannel('claude-opus-4-6', {
|
||||
allowedRouteIds: [routeAllowed.id],
|
||||
supportedModels: [],
|
||||
siteWeightMultipliers: {},
|
||||
});
|
||||
const blockedPick = router.selectChannel('gpt-4o-mini', {
|
||||
const blockedPick = await router.selectChannel('gpt-4o-mini', {
|
||||
allowedRouteIds: [routeAllowed.id],
|
||||
supportedModels: [],
|
||||
siteWeightMultipliers: {},
|
||||
@@ -102,22 +102,22 @@ describe('TokenRouter downstream policy', () => {
|
||||
expect(blockedPick).toBeNull();
|
||||
});
|
||||
|
||||
it('applies site weight multipliers to probability explanation', () => {
|
||||
const siteHigh = db.insert(schema.sites).values({
|
||||
it('applies site weight multipliers to probability explanation', async () => {
|
||||
const siteHigh = await db.insert(schema.sites).values({
|
||||
name: 'high-site',
|
||||
url: 'https://high.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const siteLow = db.insert(schema.sites).values({
|
||||
const siteLow = await db.insert(schema.sites).values({
|
||||
name: 'low-site',
|
||||
url: 'https://low.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const accountHigh = db.insert(schema.accounts).values({
|
||||
const accountHigh = await db.insert(schema.accounts).values({
|
||||
siteId: siteHigh.id,
|
||||
username: 'user-high',
|
||||
accessToken: 'access-high',
|
||||
@@ -127,7 +127,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
balance: 100,
|
||||
}).returning().get();
|
||||
|
||||
const accountLow = db.insert(schema.accounts).values({
|
||||
const accountLow = await db.insert(schema.accounts).values({
|
||||
siteId: siteLow.id,
|
||||
username: 'user-low',
|
||||
accessToken: 'access-low',
|
||||
@@ -137,12 +137,12 @@ describe('TokenRouter downstream policy', () => {
|
||||
balance: 100,
|
||||
}).returning().get();
|
||||
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
const route = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'claude-sonnet-4-6',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const channelHigh = db.insert(schema.routeChannels).values({
|
||||
const channelHigh = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountHigh.id,
|
||||
tokenId: null,
|
||||
@@ -151,7 +151,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const channelLow = db.insert(schema.routeChannels).values({
|
||||
const channelLow = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountLow.id,
|
||||
tokenId: null,
|
||||
@@ -161,7 +161,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
}).returning().get();
|
||||
|
||||
const router = new TokenRouter();
|
||||
const decision = router.explainSelectionForRoute(
|
||||
const decision = await router.explainSelectionForRoute(
|
||||
route.id,
|
||||
'claude-sonnet-4-6',
|
||||
[],
|
||||
@@ -183,8 +183,8 @@ describe('TokenRouter downstream policy', () => {
|
||||
expect((highCandidate?.probability || 0)).toBeGreaterThan(lowCandidate?.probability || 0);
|
||||
});
|
||||
|
||||
it('combines site global weight with downstream site multiplier', () => {
|
||||
const siteGlobalHigh = db.insert(schema.sites).values({
|
||||
it('combines site global weight with downstream site multiplier', async () => {
|
||||
const siteGlobalHigh = await db.insert(schema.sites).values({
|
||||
name: 'global-high-site',
|
||||
url: 'https://global-high.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -192,7 +192,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
globalWeight: 3,
|
||||
}).returning().get();
|
||||
|
||||
const siteGlobalLow = db.insert(schema.sites).values({
|
||||
const siteGlobalLow = await db.insert(schema.sites).values({
|
||||
name: 'global-low-site',
|
||||
url: 'https://global-low.example.com',
|
||||
platform: 'new-api',
|
||||
@@ -200,7 +200,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
globalWeight: 1,
|
||||
}).returning().get();
|
||||
|
||||
const accountGlobalHigh = db.insert(schema.accounts).values({
|
||||
const accountGlobalHigh = await db.insert(schema.accounts).values({
|
||||
siteId: siteGlobalHigh.id,
|
||||
username: 'user-global-high',
|
||||
accessToken: 'access-global-high',
|
||||
@@ -210,7 +210,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
balance: 100,
|
||||
}).returning().get();
|
||||
|
||||
const accountGlobalLow = db.insert(schema.accounts).values({
|
||||
const accountGlobalLow = await db.insert(schema.accounts).values({
|
||||
siteId: siteGlobalLow.id,
|
||||
username: 'user-global-low',
|
||||
accessToken: 'access-global-low',
|
||||
@@ -220,12 +220,12 @@ describe('TokenRouter downstream policy', () => {
|
||||
balance: 100,
|
||||
}).returning().get();
|
||||
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
const route = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'gpt-5-mini',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const highChannel = db.insert(schema.routeChannels).values({
|
||||
const highChannel = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountGlobalHigh.id,
|
||||
tokenId: null,
|
||||
@@ -234,7 +234,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const lowChannel = db.insert(schema.routeChannels).values({
|
||||
const lowChannel = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountGlobalLow.id,
|
||||
tokenId: null,
|
||||
@@ -244,7 +244,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
}).returning().get();
|
||||
|
||||
const router = new TokenRouter();
|
||||
const decision = router.explainSelectionForRoute(
|
||||
const decision = await router.explainSelectionForRoute(
|
||||
route.id,
|
||||
'gpt-5-mini',
|
||||
[],
|
||||
@@ -267,15 +267,15 @@ describe('TokenRouter downstream policy', () => {
|
||||
expect((highCandidate?.probability || 0)).toBeGreaterThan(lowCandidate?.probability || 0);
|
||||
});
|
||||
|
||||
it('supports union semantics between supportedModels and allowedRouteIds', () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
it('supports union semantics between supportedModels and allowedRouteIds', async () => {
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'site-union',
|
||||
url: 'https://union.example.com',
|
||||
platform: 'new-api',
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'user-union',
|
||||
accessToken: 'access-union',
|
||||
@@ -283,17 +283,17 @@ describe('TokenRouter downstream policy', () => {
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const claudeGroupRoute = db.insert(schema.tokenRoutes).values({
|
||||
const claudeGroupRoute = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 're:^claude-(opus|sonnet)-4-6$',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const gptExactRoute = db.insert(schema.tokenRoutes).values({
|
||||
const gptExactRoute = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'gpt-4o-mini',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: claudeGroupRoute.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -302,7 +302,7 @@ describe('TokenRouter downstream policy', () => {
|
||||
enabled: true,
|
||||
}).run();
|
||||
|
||||
db.insert(schema.routeChannels).values({
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: gptExactRoute.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -318,8 +318,8 @@ describe('TokenRouter downstream policy', () => {
|
||||
siteWeightMultipliers: {},
|
||||
};
|
||||
|
||||
const claudePick = router.selectChannel('claude-opus-4-6', policy);
|
||||
const gptPick = router.selectChannel('gpt-4o-mini', policy);
|
||||
const claudePick = await router.selectChannel('claude-opus-4-6', policy);
|
||||
const gptPick = await router.selectChannel('gpt-4o-mini', policy);
|
||||
|
||||
expect(claudePick?.channel.routeId).toBe(claudeGroupRoute.id);
|
||||
expect(gptPick?.channel.routeId).toBe(gptExactRoute.id);
|
||||
|
||||
@@ -32,13 +32,13 @@ describe('TokenRouter patterns and model mapping', () => {
|
||||
invalidateTokenRouterCache = tokenRouterModule.invalidateTokenRouterCache;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
idSeed = 0;
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
invalidateTokenRouterCache();
|
||||
});
|
||||
|
||||
@@ -47,9 +47,9 @@ describe('TokenRouter patterns and model mapping', () => {
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
function createSite(namePrefix: string) {
|
||||
async function createSite(namePrefix: string) {
|
||||
const id = nextId();
|
||||
return db.insert(schema.sites).values({
|
||||
return await db.insert(schema.sites).values({
|
||||
name: `${namePrefix}-${id}`,
|
||||
url: `https://${namePrefix}-${id}.example.com`,
|
||||
platform: 'new-api',
|
||||
@@ -57,9 +57,9 @@ describe('TokenRouter patterns and model mapping', () => {
|
||||
}).returning().get();
|
||||
}
|
||||
|
||||
function createAccount(siteId: number, usernamePrefix: string) {
|
||||
async function createAccount(siteId: number, usernamePrefix: string) {
|
||||
const id = nextId();
|
||||
return db.insert(schema.accounts).values({
|
||||
return await db.insert(schema.accounts).values({
|
||||
siteId,
|
||||
username: `${usernamePrefix}-${id}`,
|
||||
accessToken: `access-${id}`,
|
||||
@@ -68,20 +68,20 @@ describe('TokenRouter patterns and model mapping', () => {
|
||||
}).returning().get();
|
||||
}
|
||||
|
||||
function createRouteWithSingleChannel(
|
||||
async function createRouteWithSingleChannel(
|
||||
modelPattern: string,
|
||||
modelMapping?: string,
|
||||
options?: { displayName?: string; sourceModel?: string | null },
|
||||
) {
|
||||
const site = createSite('pattern-site');
|
||||
const account = createAccount(site.id, 'pattern-user');
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
const site = await createSite('pattern-site');
|
||||
const account = await createAccount(site.id, 'pattern-user');
|
||||
const route = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern,
|
||||
displayName: options?.displayName,
|
||||
modelMapping,
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
const channel = db.insert(schema.routeChannels).values({
|
||||
const channel = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -93,49 +93,49 @@ describe('TokenRouter patterns and model mapping', () => {
|
||||
return { route, channel };
|
||||
}
|
||||
|
||||
it('matches routes with re: regex patterns', () => {
|
||||
createRouteWithSingleChannel('re:^claude-(opus|sonnet)-4-6$');
|
||||
it('matches routes with re: regex patterns', async () => {
|
||||
await createRouteWithSingleChannel('re:^claude-(opus|sonnet)-4-6$');
|
||||
const router = new TokenRouter();
|
||||
|
||||
const matched = router.selectChannel('claude-opus-4-6');
|
||||
const unmatched = router.selectChannel('claude-haiku-4-6');
|
||||
const matched = await router.selectChannel('claude-opus-4-6');
|
||||
const unmatched = await router.selectChannel('claude-haiku-4-6');
|
||||
|
||||
expect(matched).toBeTruthy();
|
||||
expect(matched?.actualModel).toBe('claude-opus-4-6');
|
||||
expect(unmatched).toBeNull();
|
||||
});
|
||||
|
||||
it('ignores invalid re: patterns and falls back to next matched route', () => {
|
||||
const invalid = createRouteWithSingleChannel('re:([a-z');
|
||||
const glob = createRouteWithSingleChannel('claude-*');
|
||||
it('ignores invalid re: patterns and falls back to next matched route', async () => {
|
||||
const invalid = await createRouteWithSingleChannel('re:([a-z');
|
||||
const glob = await createRouteWithSingleChannel('claude-*');
|
||||
const router = new TokenRouter();
|
||||
|
||||
const selected = router.selectChannel('claude-opus-4-6');
|
||||
const selected = await router.selectChannel('claude-opus-4-6');
|
||||
expect(selected).toBeTruthy();
|
||||
expect(selected?.channel.id).toBe(glob.channel.id);
|
||||
expect(selected?.channel.id).not.toBe(invalid.channel.id);
|
||||
});
|
||||
|
||||
it('supports exact, glob and re: keys in modelMapping with exact taking precedence', () => {
|
||||
it('supports exact, glob and re: keys in modelMapping with exact taking precedence', async () => {
|
||||
const mapping = JSON.stringify({
|
||||
'claude-sonnet-4-6': 'target-exact',
|
||||
'claude-sonnet-*': 'target-glob',
|
||||
're:^gpt-4o-mini-\\d+$': 'target-regex',
|
||||
});
|
||||
createRouteWithSingleChannel('*', mapping);
|
||||
await createRouteWithSingleChannel('*', mapping);
|
||||
const router = new TokenRouter();
|
||||
|
||||
const exact = router.selectChannel('claude-sonnet-4-6');
|
||||
const glob = router.selectChannel('claude-sonnet-4-7');
|
||||
const regex = router.selectChannel('gpt-4o-mini-20250101');
|
||||
const exact = await router.selectChannel('claude-sonnet-4-6');
|
||||
const glob = await router.selectChannel('claude-sonnet-4-7');
|
||||
const regex = await router.selectChannel('gpt-4o-mini-20250101');
|
||||
|
||||
expect(exact?.actualModel).toBe('target-exact');
|
||||
expect(glob?.actualModel).toBe('target-glob');
|
||||
expect(regex?.actualModel).toBe('target-regex');
|
||||
});
|
||||
|
||||
it('matches a route by display name alias as an exposed model', () => {
|
||||
createRouteWithSingleChannel(
|
||||
it('matches a route by display name alias as an exposed model', async () => {
|
||||
await createRouteWithSingleChannel(
|
||||
're:^claude-(opus|sonnet)-4-5$',
|
||||
undefined,
|
||||
{
|
||||
@@ -145,9 +145,9 @@ describe('TokenRouter patterns and model mapping', () => {
|
||||
);
|
||||
const router = new TokenRouter();
|
||||
|
||||
const selected = router.selectChannel('claude-opus-4-6');
|
||||
const decision = router.explainSelection('claude-opus-4-6');
|
||||
const exposedModels = router.getAvailableModels();
|
||||
const selected = await router.selectChannel('claude-opus-4-6');
|
||||
const decision = await router.explainSelection('claude-opus-4-6');
|
||||
const exposedModels = await router.getAvailableModels();
|
||||
|
||||
expect(selected).toBeTruthy();
|
||||
expect(selected?.actualModel).toBe('claude-opus-4-5');
|
||||
|
||||
@@ -52,15 +52,15 @@ describe('TokenRouter selection scoring', () => {
|
||||
originalRoutingFallbackUnitCost = config.routingFallbackUnitCost;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
idSeed = 0;
|
||||
mockedCatalogRoutingCost.mockReset();
|
||||
mockedCatalogRoutingCost.mockReturnValue(null);
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
invalidateTokenRouterCache();
|
||||
});
|
||||
|
||||
@@ -71,16 +71,16 @@ describe('TokenRouter selection scoring', () => {
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
function createRoute(modelPattern: string) {
|
||||
return db.insert(schema.tokenRoutes).values({
|
||||
async function createRoute(modelPattern: string) {
|
||||
return await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern,
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
}
|
||||
|
||||
function createSite(namePrefix: string) {
|
||||
async function createSite(namePrefix: string) {
|
||||
const id = nextId();
|
||||
return db.insert(schema.sites).values({
|
||||
return await db.insert(schema.sites).values({
|
||||
name: `${namePrefix}-${id}`,
|
||||
url: `https://${namePrefix}-${id}.example.com`,
|
||||
platform: 'new-api',
|
||||
@@ -88,9 +88,9 @@ describe('TokenRouter selection scoring', () => {
|
||||
}).returning().get();
|
||||
}
|
||||
|
||||
function createAccount(siteId: number, usernamePrefix: string) {
|
||||
async function createAccount(siteId: number, usernamePrefix: string) {
|
||||
const id = nextId();
|
||||
return db.insert(schema.accounts).values({
|
||||
return await db.insert(schema.accounts).values({
|
||||
siteId,
|
||||
username: `${usernamePrefix}-${id}`,
|
||||
accessToken: `access-${id}`,
|
||||
@@ -99,8 +99,8 @@ describe('TokenRouter selection scoring', () => {
|
||||
}).returning().get();
|
||||
}
|
||||
|
||||
function createToken(accountId: number, name: string) {
|
||||
return db.insert(schema.accountTokens).values({
|
||||
async function createToken(accountId: number, name: string) {
|
||||
return await db.insert(schema.accountTokens).values({
|
||||
accountId,
|
||||
name,
|
||||
token: `token-${name}-${nextId()}`,
|
||||
@@ -109,7 +109,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
}).returning().get();
|
||||
}
|
||||
|
||||
it('normalizes probability across channels on the same site', () => {
|
||||
it('normalizes probability across channels on the same site', async () => {
|
||||
config.routingWeights = {
|
||||
baseWeightFactor: 1,
|
||||
valueScoreFactor: 0,
|
||||
@@ -118,18 +118,18 @@ describe('TokenRouter selection scoring', () => {
|
||||
usageWeight: 0,
|
||||
};
|
||||
|
||||
const route = createRoute('claude-haiku-4-5-20251001');
|
||||
const route = await createRoute('claude-haiku-4-5-20251001');
|
||||
|
||||
const siteA = createSite('site-a');
|
||||
const accountA = createAccount(siteA.id, 'user-a');
|
||||
const tokenA1 = createToken(accountA.id, 'a-1');
|
||||
const tokenA2 = createToken(accountA.id, 'a-2');
|
||||
const siteA = await createSite('site-a');
|
||||
const accountA = await createAccount(siteA.id, 'user-a');
|
||||
const tokenA1 = await createToken(accountA.id, 'a-1');
|
||||
const tokenA2 = await createToken(accountA.id, 'a-2');
|
||||
|
||||
const siteB = createSite('site-b');
|
||||
const accountB = createAccount(siteB.id, 'user-b');
|
||||
const tokenB = createToken(accountB.id, 'b-1');
|
||||
const siteB = await createSite('site-b');
|
||||
const accountB = await createAccount(siteB.id, 'user-b');
|
||||
const tokenB = await createToken(accountB.id, 'b-1');
|
||||
|
||||
const channelA1 = db.insert(schema.routeChannels).values({
|
||||
const channelA1 = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountA.id,
|
||||
tokenId: tokenA1.id,
|
||||
@@ -138,7 +138,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const channelA2 = db.insert(schema.routeChannels).values({
|
||||
const channelA2 = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountA.id,
|
||||
tokenId: tokenA2.id,
|
||||
@@ -147,7 +147,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const channelB = db.insert(schema.routeChannels).values({
|
||||
const channelB = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountB.id,
|
||||
tokenId: tokenB.id,
|
||||
@@ -156,7 +156,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const decision = new TokenRouter().explainSelection('claude-haiku-4-5-20251001');
|
||||
const decision = await new TokenRouter().explainSelection('claude-haiku-4-5-20251001');
|
||||
const probMap = new Map(decision.candidates.map((candidate) => [candidate.channelId, candidate.probability]));
|
||||
|
||||
const probA1 = probMap.get(channelA1.id) ?? 0;
|
||||
@@ -169,7 +169,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
expect(probA1 + probA2).toBeCloseTo(probB, 1);
|
||||
});
|
||||
|
||||
it('uses observed channel cost from real routing results when scoring cost priority', () => {
|
||||
it('uses observed channel cost from real routing results when scoring cost priority', async () => {
|
||||
config.routingWeights = {
|
||||
baseWeightFactor: 0.35,
|
||||
valueScoreFactor: 0.65,
|
||||
@@ -178,12 +178,12 @@ describe('TokenRouter selection scoring', () => {
|
||||
usageWeight: 0,
|
||||
};
|
||||
|
||||
const route = createRoute('claude-opus-4-6');
|
||||
const route = await createRoute('claude-opus-4-6');
|
||||
|
||||
const siteCheap = createSite('cheap-site');
|
||||
const accountCheap = createAccount(siteCheap.id, 'cheap-user');
|
||||
const tokenCheap = createToken(accountCheap.id, 'cheap-token');
|
||||
db.insert(schema.routeChannels).values({
|
||||
const siteCheap = await createSite('cheap-site');
|
||||
const accountCheap = await createAccount(siteCheap.id, 'cheap-user');
|
||||
const tokenCheap = await createToken(accountCheap.id, 'cheap-token');
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountCheap.id,
|
||||
tokenId: tokenCheap.id,
|
||||
@@ -195,10 +195,10 @@ describe('TokenRouter selection scoring', () => {
|
||||
totalCost: 0.01,
|
||||
}).run();
|
||||
|
||||
const siteExpensive = createSite('expensive-site');
|
||||
const accountExpensive = createAccount(siteExpensive.id, 'expensive-user');
|
||||
const tokenExpensive = createToken(accountExpensive.id, 'exp-token');
|
||||
db.insert(schema.routeChannels).values({
|
||||
const siteExpensive = await createSite('expensive-site');
|
||||
const accountExpensive = await createAccount(siteExpensive.id, 'expensive-user');
|
||||
const tokenExpensive = await createToken(accountExpensive.id, 'exp-token');
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountExpensive.id,
|
||||
tokenId: tokenExpensive.id,
|
||||
@@ -210,7 +210,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
totalCost: 0.1,
|
||||
}).run();
|
||||
|
||||
const decision = new TokenRouter().explainSelection('claude-opus-4-6');
|
||||
const decision = await new TokenRouter().explainSelection('claude-opus-4-6');
|
||||
const cheapCandidate = decision.candidates.find((candidate) => candidate.siteName.startsWith('cheap-site'));
|
||||
const expensiveCandidate = decision.candidates.find((candidate) => candidate.siteName.startsWith('expensive-site'));
|
||||
|
||||
@@ -221,7 +221,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
expect(expensiveCandidate?.reason || '').toContain('成本=实测');
|
||||
});
|
||||
|
||||
it('uses runtime-configured fallback unit cost when observed and configured costs are missing', () => {
|
||||
it('uses runtime-configured fallback unit cost when observed and configured costs are missing', async () => {
|
||||
config.routingWeights = {
|
||||
baseWeightFactor: 0.35,
|
||||
valueScoreFactor: 0.65,
|
||||
@@ -231,12 +231,12 @@ describe('TokenRouter selection scoring', () => {
|
||||
};
|
||||
config.routingFallbackUnitCost = 0.02;
|
||||
|
||||
const route = createRoute('claude-sonnet-4-6');
|
||||
const route = await createRoute('claude-sonnet-4-6');
|
||||
|
||||
const siteFallback = createSite('fallback-site');
|
||||
const accountFallback = createAccount(siteFallback.id, 'fallback-user');
|
||||
const tokenFallback = createToken(accountFallback.id, 'fallback-token');
|
||||
db.insert(schema.routeChannels).values({
|
||||
const siteFallback = await createSite('fallback-site');
|
||||
const accountFallback = await createAccount(siteFallback.id, 'fallback-user');
|
||||
const tokenFallback = await createToken(accountFallback.id, 'fallback-token');
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountFallback.id,
|
||||
tokenId: tokenFallback.id,
|
||||
@@ -248,10 +248,10 @@ describe('TokenRouter selection scoring', () => {
|
||||
totalCost: 0,
|
||||
}).run();
|
||||
|
||||
const siteObserved = createSite('observed-site');
|
||||
const accountObserved = createAccount(siteObserved.id, 'observed-user');
|
||||
const tokenObserved = createToken(accountObserved.id, 'observed-token');
|
||||
db.insert(schema.routeChannels).values({
|
||||
const siteObserved = await createSite('observed-site');
|
||||
const accountObserved = await createAccount(siteObserved.id, 'observed-user');
|
||||
const tokenObserved = await createToken(accountObserved.id, 'observed-token');
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountObserved.id,
|
||||
tokenId: tokenObserved.id,
|
||||
@@ -263,7 +263,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
totalCost: 2, // unit cost 0.2
|
||||
}).run();
|
||||
|
||||
const decision = new TokenRouter().explainSelection('claude-sonnet-4-6');
|
||||
const decision = await new TokenRouter().explainSelection('claude-sonnet-4-6');
|
||||
const fallbackCandidate = decision.candidates.find((candidate) => candidate.siteName.startsWith('fallback-site'));
|
||||
const observedCandidate = decision.candidates.find((candidate) => candidate.siteName.startsWith('observed-site'));
|
||||
|
||||
@@ -273,7 +273,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
expect(fallbackCandidate?.reason || '').toContain('成本=默认:0.020000');
|
||||
});
|
||||
|
||||
it('penalizes fallback-cost channels when fallback unit cost is set very high', () => {
|
||||
it('penalizes fallback-cost channels when fallback unit cost is set very high', async () => {
|
||||
config.routingWeights = {
|
||||
baseWeightFactor: 0.35,
|
||||
valueScoreFactor: 0.65,
|
||||
@@ -283,10 +283,10 @@ describe('TokenRouter selection scoring', () => {
|
||||
};
|
||||
config.routingFallbackUnitCost = 1000;
|
||||
|
||||
const route = createRoute('gpt-5-nano');
|
||||
const route = await createRoute('gpt-5-nano');
|
||||
|
||||
const siteFallback = createSite('fallback-high-balance');
|
||||
const accountFallback = db.insert(schema.accounts).values({
|
||||
const siteFallback = await createSite('fallback-high-balance');
|
||||
const accountFallback = await db.insert(schema.accounts).values({
|
||||
siteId: siteFallback.id,
|
||||
username: `fallback-high-balance-${nextId()}`,
|
||||
accessToken: `access-${nextId()}`,
|
||||
@@ -294,8 +294,8 @@ describe('TokenRouter selection scoring', () => {
|
||||
status: 'active',
|
||||
balance: 10_000,
|
||||
}).returning().get();
|
||||
const tokenFallback = createToken(accountFallback.id, 'fallback-token');
|
||||
db.insert(schema.routeChannels).values({
|
||||
const tokenFallback = await createToken(accountFallback.id, 'fallback-token');
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountFallback.id,
|
||||
tokenId: tokenFallback.id,
|
||||
@@ -307,8 +307,8 @@ describe('TokenRouter selection scoring', () => {
|
||||
totalCost: 0,
|
||||
}).run();
|
||||
|
||||
const siteObserved = createSite('observed-low-balance');
|
||||
const accountObserved = db.insert(schema.accounts).values({
|
||||
const siteObserved = await createSite('observed-low-balance');
|
||||
const accountObserved = await db.insert(schema.accounts).values({
|
||||
siteId: siteObserved.id,
|
||||
username: `observed-low-balance-${nextId()}`,
|
||||
accessToken: `access-${nextId()}`,
|
||||
@@ -316,8 +316,8 @@ describe('TokenRouter selection scoring', () => {
|
||||
status: 'active',
|
||||
balance: 0,
|
||||
}).returning().get();
|
||||
const tokenObserved = createToken(accountObserved.id, 'observed-token');
|
||||
db.insert(schema.routeChannels).values({
|
||||
const tokenObserved = await createToken(accountObserved.id, 'observed-token');
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountObserved.id,
|
||||
tokenId: tokenObserved.id,
|
||||
@@ -329,7 +329,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
totalCost: 10, // observed unit cost = 1
|
||||
}).run();
|
||||
|
||||
const decision = new TokenRouter().explainSelection('gpt-5-nano');
|
||||
const decision = await new TokenRouter().explainSelection('gpt-5-nano');
|
||||
const fallbackCandidate = decision.candidates.find((candidate) => candidate.siteName.startsWith('fallback-high-balance'));
|
||||
const observedCandidate = decision.candidates.find((candidate) => candidate.siteName.startsWith('observed-low-balance'));
|
||||
|
||||
@@ -340,7 +340,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
expect(fallbackCandidate?.reason || '').toContain('成本=默认:1000.000000');
|
||||
});
|
||||
|
||||
it('uses cached catalog routing cost when observed and configured costs are missing', () => {
|
||||
it('uses cached catalog routing cost when observed and configured costs are missing', async () => {
|
||||
config.routingWeights = {
|
||||
baseWeightFactor: 0.35,
|
||||
valueScoreFactor: 0.65,
|
||||
@@ -350,12 +350,12 @@ describe('TokenRouter selection scoring', () => {
|
||||
};
|
||||
config.routingFallbackUnitCost = 100;
|
||||
|
||||
const route = createRoute('claude-sonnet-4-5-20250929');
|
||||
const route = await createRoute('claude-sonnet-4-5-20250929');
|
||||
|
||||
const siteCatalog = createSite('catalog-site');
|
||||
const accountCatalog = createAccount(siteCatalog.id, 'catalog-user');
|
||||
const tokenCatalog = createToken(accountCatalog.id, 'catalog-token');
|
||||
db.insert(schema.routeChannels).values({
|
||||
const siteCatalog = await createSite('catalog-site');
|
||||
const accountCatalog = await createAccount(siteCatalog.id, 'catalog-user');
|
||||
const tokenCatalog = await createToken(accountCatalog.id, 'catalog-token');
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountCatalog.id,
|
||||
tokenId: tokenCatalog.id,
|
||||
@@ -367,10 +367,10 @@ describe('TokenRouter selection scoring', () => {
|
||||
totalCost: 0,
|
||||
}).run();
|
||||
|
||||
const siteFallback = createSite('fallback-site');
|
||||
const accountFallback = createAccount(siteFallback.id, 'fallback-user');
|
||||
const tokenFallback = createToken(accountFallback.id, 'fallback-token');
|
||||
db.insert(schema.routeChannels).values({
|
||||
const siteFallback = await createSite('fallback-site');
|
||||
const accountFallback = await createAccount(siteFallback.id, 'fallback-user');
|
||||
const tokenFallback = await createToken(accountFallback.id, 'fallback-token');
|
||||
await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: accountFallback.id,
|
||||
tokenId: tokenFallback.id,
|
||||
@@ -388,7 +388,7 @@ describe('TokenRouter selection scoring', () => {
|
||||
return 0.2;
|
||||
});
|
||||
|
||||
const decision = new TokenRouter().explainSelection('claude-sonnet-4-5-20250929');
|
||||
const decision = await new TokenRouter().explainSelection('claude-sonnet-4-5-20250929');
|
||||
const catalogCandidate = decision.candidates.find((candidate) => candidate.siteName.startsWith('catalog-site'));
|
||||
const fallbackCandidate = decision.candidates.find((candidate) => candidate.siteName.startsWith('fallback-site'));
|
||||
|
||||
|
||||
@@ -27,14 +27,14 @@ describe('TokenRouter site status guard', () => {
|
||||
invalidateTokenRouterCache = tokenRouterModule.invalidateTokenRouterCache;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
db.delete(schema.routeChannels).run();
|
||||
db.delete(schema.tokenRoutes).run();
|
||||
db.delete(schema.tokenModelAvailability).run();
|
||||
db.delete(schema.modelAvailability).run();
|
||||
db.delete(schema.accountTokens).run();
|
||||
db.delete(schema.accounts).run();
|
||||
db.delete(schema.sites).run();
|
||||
beforeEach(async () => {
|
||||
await db.delete(schema.routeChannels).run();
|
||||
await db.delete(schema.tokenRoutes).run();
|
||||
await db.delete(schema.tokenModelAvailability).run();
|
||||
await db.delete(schema.modelAvailability).run();
|
||||
await db.delete(schema.accountTokens).run();
|
||||
await db.delete(schema.accounts).run();
|
||||
await db.delete(schema.sites).run();
|
||||
invalidateTokenRouterCache();
|
||||
});
|
||||
|
||||
@@ -43,15 +43,15 @@ describe('TokenRouter site status guard', () => {
|
||||
delete process.env.DATA_DIR;
|
||||
});
|
||||
|
||||
it('does not select channels from disabled sites', () => {
|
||||
const site = db.insert(schema.sites).values({
|
||||
it('does not select channels from disabled sites', async () => {
|
||||
const site = await db.insert(schema.sites).values({
|
||||
name: 'disabled-site',
|
||||
url: 'https://disabled.example.com',
|
||||
platform: 'new-api',
|
||||
}).returning().get();
|
||||
db.run(sql`update sites set status = 'disabled' where id = ${site.id}`);
|
||||
await db.run(sql`update sites set status = 'disabled' where id = ${site.id}`);
|
||||
|
||||
const account = db.insert(schema.accounts).values({
|
||||
const account = await db.insert(schema.accounts).values({
|
||||
siteId: site.id,
|
||||
username: 'disabled-user',
|
||||
accessToken: 'access-disabled',
|
||||
@@ -59,12 +59,12 @@ describe('TokenRouter site status guard', () => {
|
||||
status: 'active',
|
||||
}).returning().get();
|
||||
|
||||
const route = db.insert(schema.tokenRoutes).values({
|
||||
const route = await db.insert(schema.tokenRoutes).values({
|
||||
modelPattern: 'gpt-4o-mini',
|
||||
enabled: true,
|
||||
}).returning().get();
|
||||
|
||||
const channel = db.insert(schema.routeChannels).values({
|
||||
const channel = await db.insert(schema.routeChannels).values({
|
||||
routeId: route.id,
|
||||
accountId: account.id,
|
||||
tokenId: null,
|
||||
@@ -75,10 +75,10 @@ describe('TokenRouter site status guard', () => {
|
||||
}).returning().get();
|
||||
|
||||
const router = new TokenRouter();
|
||||
const selected = router.selectChannel('gpt-4o-mini');
|
||||
const selected = await router.selectChannel('gpt-4o-mini');
|
||||
expect(selected).toBeNull();
|
||||
|
||||
const decision = router.explainSelection('gpt-4o-mini');
|
||||
const decision = await router.explainSelection('gpt-4o-mini');
|
||||
expect(decision.matched).toBe(true);
|
||||
const candidate = decision.candidates.find((item) => item.channelId === channel.id);
|
||||
expect(candidate?.eligible).toBe(false);
|
||||
|
||||
@@ -64,14 +64,14 @@ function isCacheFresh(loadedAt: number, nowMs: number): boolean {
|
||||
return nowMs - loadedAt < resolveTokenRouterCacheTtlMs();
|
||||
}
|
||||
|
||||
function loadEnabledRoutes(nowMs = Date.now()): RouteRow[] {
|
||||
async function loadEnabledRoutes(nowMs = Date.now()): Promise<RouteRow[]> {
|
||||
if (isCacheFresh(routeCacheSnapshot.loadedAt, nowMs)) {
|
||||
return routeCacheSnapshot.routes;
|
||||
}
|
||||
|
||||
const routes = db.select().from(schema.tokenRoutes)
|
||||
.where(eq(schema.tokenRoutes.enabled, true))
|
||||
.all();
|
||||
const routes = await db.select().from(schema.tokenRoutes)
|
||||
.where(eq(schema.tokenRoutes.enabled, true))
|
||||
.all();
|
||||
routeCacheSnapshot = {
|
||||
loadedAt: nowMs,
|
||||
routes,
|
||||
@@ -79,17 +79,17 @@ function loadEnabledRoutes(nowMs = Date.now()): RouteRow[] {
|
||||
return routes;
|
||||
}
|
||||
|
||||
function loadRouteMatch(route: RouteRow, nowMs = Date.now()): RouteMatch {
|
||||
async function loadRouteMatch(route: RouteRow, nowMs = Date.now()): Promise<RouteMatch> {
|
||||
const cached = routeMatchCache.get(route.id);
|
||||
if (cached && isCacheFresh(cached.loadedAt, nowMs)) {
|
||||
return cached.match;
|
||||
}
|
||||
|
||||
const channels = db
|
||||
.select()
|
||||
.from(schema.routeChannels)
|
||||
.innerJoin(schema.accounts, eq(schema.routeChannels.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
const channels = await db
|
||||
.select()
|
||||
.from(schema.routeChannels)
|
||||
.innerJoin(schema.accounts, eq(schema.routeChannels.accountId, schema.accounts.id))
|
||||
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
|
||||
.leftJoin(schema.accountTokens, eq(schema.routeChannels.tokenId, schema.accountTokens.id))
|
||||
.where(eq(schema.routeChannels.routeId, route.id))
|
||||
.all();
|
||||
@@ -364,15 +364,15 @@ function resolveEffectiveUnitCost(candidate: RouteChannelCandidate, modelName: s
|
||||
};
|
||||
}
|
||||
|
||||
export class TokenRouter {
|
||||
export class TokenRouter {
|
||||
/**
|
||||
* Find matching route and select a channel for the given model.
|
||||
* Returns null if no route/channel available.
|
||||
*/
|
||||
selectChannel(requestedModel: string, downstreamPolicy: DownstreamRoutingPolicy = DEFAULT_DOWNSTREAM_POLICY): SelectedChannel | null {
|
||||
async selectChannel(requestedModel: string, downstreamPolicy: DownstreamRoutingPolicy = DEFAULT_DOWNSTREAM_POLICY): Promise<SelectedChannel | null> {
|
||||
if (!isModelAllowedByDownstreamPolicy(requestedModel, downstreamPolicy)) return null;
|
||||
|
||||
const match = this.findRoute(requestedModel, downstreamPolicy);
|
||||
const match = await this.findRoute(requestedModel, downstreamPolicy);
|
||||
if (!match) return null;
|
||||
|
||||
const mappedModel = resolveMappedModel(requestedModel, match.route.modelMapping);
|
||||
@@ -443,14 +443,14 @@ export class TokenRouter {
|
||||
/**
|
||||
* Select next channel for failover (exclude already-tried channels).
|
||||
*/
|
||||
selectNextChannel(
|
||||
async selectNextChannel(
|
||||
requestedModel: string,
|
||||
excludeChannelIds: number[],
|
||||
downstreamPolicy: DownstreamRoutingPolicy = DEFAULT_DOWNSTREAM_POLICY,
|
||||
): SelectedChannel | null {
|
||||
): Promise<SelectedChannel | null> {
|
||||
if (!isModelAllowedByDownstreamPolicy(requestedModel, downstreamPolicy)) return null;
|
||||
|
||||
const match = this.findRoute(requestedModel, downstreamPolicy);
|
||||
const match = await this.findRoute(requestedModel, downstreamPolicy);
|
||||
if (!match) return null;
|
||||
|
||||
const mappedModel = resolveMappedModel(requestedModel, match.route.modelMapping);
|
||||
@@ -514,30 +514,30 @@ export class TokenRouter {
|
||||
return null;
|
||||
}
|
||||
|
||||
explainSelection(
|
||||
async explainSelection(
|
||||
requestedModel: string,
|
||||
excludeChannelIds: number[] = [],
|
||||
downstreamPolicy: DownstreamRoutingPolicy = DEFAULT_DOWNSTREAM_POLICY,
|
||||
): RouteDecisionExplanation {
|
||||
const match = this.findRoute(requestedModel, downstreamPolicy);
|
||||
return this.explainSelectionFromMatch(match, requestedModel, { excludeChannelIds, downstreamPolicy });
|
||||
}
|
||||
|
||||
explainSelectionForRoute(
|
||||
): Promise<RouteDecisionExplanation> {
|
||||
const match = await this.findRoute(requestedModel, downstreamPolicy);
|
||||
return this.explainSelectionFromMatch(match, requestedModel, { excludeChannelIds, downstreamPolicy });
|
||||
}
|
||||
|
||||
async explainSelectionForRoute(
|
||||
routeId: number,
|
||||
requestedModel: string,
|
||||
excludeChannelIds: number[] = [],
|
||||
downstreamPolicy: DownstreamRoutingPolicy = DEFAULT_DOWNSTREAM_POLICY,
|
||||
): RouteDecisionExplanation {
|
||||
const match = this.findRouteById(routeId, downstreamPolicy);
|
||||
return this.explainSelectionFromMatch(match, requestedModel, { excludeChannelIds, downstreamPolicy });
|
||||
}
|
||||
|
||||
explainSelectionRouteWide(routeId: number, downstreamPolicy: DownstreamRoutingPolicy = DEFAULT_DOWNSTREAM_POLICY): RouteDecisionExplanation {
|
||||
const match = this.findRouteById(routeId, downstreamPolicy);
|
||||
const fallbackRequestedModel = match?.route.modelPattern || `route:${routeId}`;
|
||||
return this.explainSelectionFromMatch(match, fallbackRequestedModel, {
|
||||
bypassSourceModelCheck: true,
|
||||
): Promise<RouteDecisionExplanation> {
|
||||
const match = await this.findRouteById(routeId, downstreamPolicy);
|
||||
return this.explainSelectionFromMatch(match, requestedModel, { excludeChannelIds, downstreamPolicy });
|
||||
}
|
||||
|
||||
async explainSelectionRouteWide(routeId: number, downstreamPolicy: DownstreamRoutingPolicy = DEFAULT_DOWNSTREAM_POLICY): Promise<RouteDecisionExplanation> {
|
||||
const match = await this.findRouteById(routeId, downstreamPolicy);
|
||||
const fallbackRequestedModel = match?.route.modelPattern || `route:${routeId}`;
|
||||
return this.explainSelectionFromMatch(match, fallbackRequestedModel, {
|
||||
bypassSourceModelCheck: true,
|
||||
useChannelSourceModelForCost: true,
|
||||
downstreamPolicy,
|
||||
});
|
||||
@@ -720,14 +720,14 @@ export class TokenRouter {
|
||||
/**
|
||||
* Record success for a channel.
|
||||
*/
|
||||
recordSuccess(channelId: number, latencyMs: number, cost: number) {
|
||||
const ch = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
|
||||
async recordSuccess(channelId: number, latencyMs: number, cost: number) {
|
||||
const ch = await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
|
||||
if (!ch) return;
|
||||
const nowIso = new Date().toISOString();
|
||||
const nextSuccessCount = (ch.successCount ?? 0) + 1;
|
||||
const nextTotalLatencyMs = (ch.totalLatencyMs ?? 0) + latencyMs;
|
||||
const nextTotalCost = (ch.totalCost ?? 0) + cost;
|
||||
db.update(schema.routeChannels).set({
|
||||
await db.update(schema.routeChannels).set({
|
||||
successCount: nextSuccessCount,
|
||||
totalLatencyMs: nextTotalLatencyMs,
|
||||
totalCost: nextTotalCost,
|
||||
@@ -749,15 +749,15 @@ export class TokenRouter {
|
||||
/**
|
||||
* Record failure and set cooldown.
|
||||
*/
|
||||
recordFailure(channelId: number) {
|
||||
const ch = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
|
||||
async recordFailure(channelId: number) {
|
||||
const ch = await db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
|
||||
if (!ch) return;
|
||||
const failCount = (ch.failCount ?? 0) + 1;
|
||||
// Exponential backoff cooldown: 30s, 60s, 120s, 240s, max 5min
|
||||
const cooldownSec = Math.min(30 * Math.pow(2, failCount - 1), 300);
|
||||
const cooldownUntil = new Date(Date.now() + cooldownSec * 1000).toISOString();
|
||||
const nowIso = new Date().toISOString();
|
||||
db.update(schema.routeChannels).set({
|
||||
await db.update(schema.routeChannels).set({
|
||||
failCount,
|
||||
lastFailAt: nowIso,
|
||||
cooldownUntil,
|
||||
@@ -773,18 +773,18 @@ export class TokenRouter {
|
||||
/**
|
||||
* Get all available models (aggregated from all routes).
|
||||
*/
|
||||
getAvailableModels(): string[] {
|
||||
const routes = loadEnabledRoutes();
|
||||
const exposed = routes
|
||||
.map((route) => getExposedModelNameForRoute(route).trim())
|
||||
.filter((name) => name.length > 0);
|
||||
async getAvailableModels(): Promise<string[]> {
|
||||
const routes = await loadEnabledRoutes();
|
||||
const exposed = routes
|
||||
.map((route) => getExposedModelNameForRoute(route).trim())
|
||||
.filter((name) => name.length > 0);
|
||||
return Array.from(new Set(exposed));
|
||||
}
|
||||
|
||||
// --- Private methods ---
|
||||
|
||||
private findRoute(model: string, downstreamPolicy: DownstreamRoutingPolicy): RouteMatch | null {
|
||||
let routes = loadEnabledRoutes();
|
||||
private async findRoute(model: string, downstreamPolicy: DownstreamRoutingPolicy): Promise<RouteMatch | null> {
|
||||
let routes = await loadEnabledRoutes();
|
||||
|
||||
const supportedPatterns = Array.isArray(downstreamPolicy.supportedModels)
|
||||
? downstreamPolicy.supportedModels
|
||||
@@ -803,23 +803,23 @@ export class TokenRouter {
|
||||
|
||||
if (!matchedRoute) return null;
|
||||
|
||||
return this.loadRouteMatch(matchedRoute);
|
||||
}
|
||||
|
||||
private findRouteById(routeId: number, downstreamPolicy: DownstreamRoutingPolicy): RouteMatch | null {
|
||||
return await this.loadRouteMatch(matchedRoute);
|
||||
}
|
||||
|
||||
private async findRouteById(routeId: number, downstreamPolicy: DownstreamRoutingPolicy): Promise<RouteMatch | null> {
|
||||
if (downstreamPolicy.allowedRouteIds.length > 0 && !downstreamPolicy.allowedRouteIds.includes(routeId)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const route = loadEnabledRoutes().find((item) => item.id === routeId);
|
||||
if (!route) return null;
|
||||
|
||||
return this.loadRouteMatch(route);
|
||||
}
|
||||
|
||||
private loadRouteMatch(route: typeof schema.tokenRoutes.$inferSelect): RouteMatch {
|
||||
return loadRouteMatch(route);
|
||||
}
|
||||
const route = (await loadEnabledRoutes()).find((item) => item.id === routeId);
|
||||
if (!route) return null;
|
||||
|
||||
return await this.loadRouteMatch(route);
|
||||
}
|
||||
|
||||
private async loadRouteMatch(route: typeof schema.tokenRoutes.$inferSelect): Promise<RouteMatch> {
|
||||
return await loadRouteMatch(route);
|
||||
}
|
||||
|
||||
private resolveChannelTokenValue(candidate: {
|
||||
channel: typeof schema.routeChannels.$inferSelect;
|
||||
|
||||
@@ -202,6 +202,17 @@ export const api = {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(data),
|
||||
}),
|
||||
testExternalDatabaseConnection: (data: { dialect: 'sqlite' | 'mysql' | 'postgres'; connectionString: string; overwrite?: boolean }) =>
|
||||
request('/api/settings/database/test-connection', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(data),
|
||||
}),
|
||||
migrateExternalDatabase: (data: { dialect: 'sqlite' | 'mysql' | 'postgres'; connectionString: string; overwrite?: boolean }) =>
|
||||
request('/api/settings/database/migrate', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(data),
|
||||
timeoutMs: 120_000,
|
||||
}),
|
||||
getDownstreamApiKeys: () => request('/api/downstream-keys'),
|
||||
createDownstreamApiKey: (data: any) => request('/api/downstream-keys', {
|
||||
method: 'POST',
|
||||
|
||||
@@ -63,6 +63,22 @@ type RouteSelectorItem = {
|
||||
enabled: boolean;
|
||||
};
|
||||
|
||||
type DatabaseMigrationSummary = {
|
||||
dialect: 'sqlite' | 'mysql' | 'postgres';
|
||||
connection: string;
|
||||
overwrite: boolean;
|
||||
version: string;
|
||||
timestamp: number;
|
||||
rows: {
|
||||
sites: number;
|
||||
accounts: number;
|
||||
accountTokens: number;
|
||||
tokenRoutes: number;
|
||||
routeChannels: number;
|
||||
settings: number;
|
||||
};
|
||||
};
|
||||
|
||||
const defaultWeights: RoutingWeights = {
|
||||
baseWeightFactor: 0.5,
|
||||
valueScoreFactor: 0.5,
|
||||
@@ -119,6 +135,12 @@ export default function Settings() {
|
||||
const [adminIpAllowlistText, setAdminIpAllowlistText] = useState('');
|
||||
const [clearingCache, setClearingCache] = useState(false);
|
||||
const [clearingUsage, setClearingUsage] = useState(false);
|
||||
const [migrationDialect, setMigrationDialect] = useState<'sqlite' | 'mysql' | 'postgres'>('postgres');
|
||||
const [migrationConnectionString, setMigrationConnectionString] = useState('');
|
||||
const [migrationOverwrite, setMigrationOverwrite] = useState(true);
|
||||
const [testingMigrationConnection, setTestingMigrationConnection] = useState(false);
|
||||
const [migratingDatabase, setMigratingDatabase] = useState(false);
|
||||
const [migrationSummary, setMigrationSummary] = useState<DatabaseMigrationSummary | null>(null);
|
||||
const [showChangeKey, setShowChangeKey] = useState(false);
|
||||
const [downstreamKeys, setDownstreamKeys] = useState<DownstreamApiKeyItem[]>([]);
|
||||
const [downstreamLoading, setDownstreamLoading] = useState(false);
|
||||
@@ -557,6 +579,54 @@ export default function Settings() {
|
||||
}
|
||||
};
|
||||
|
||||
const handleTestExternalDatabaseConnection = async () => {
|
||||
if (!migrationConnectionString.trim()) {
|
||||
toast.info('请先填写目标数据库连接串');
|
||||
return;
|
||||
}
|
||||
|
||||
setTestingMigrationConnection(true);
|
||||
try {
|
||||
const res = await api.testExternalDatabaseConnection({
|
||||
dialect: migrationDialect,
|
||||
connectionString: migrationConnectionString.trim(),
|
||||
overwrite: migrationOverwrite,
|
||||
});
|
||||
toast.success(`连接成功:${res.connection || migrationDialect}`);
|
||||
} catch (err: any) {
|
||||
toast.error(err?.message || '目标数据库连接失败');
|
||||
} finally {
|
||||
setTestingMigrationConnection(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleMigrateToExternalDatabase = async () => {
|
||||
if (!migrationConnectionString.trim()) {
|
||||
toast.info('请先填写目标数据库连接串');
|
||||
return;
|
||||
}
|
||||
|
||||
const warning = migrationOverwrite
|
||||
? '确认迁移并覆盖目标数据库现有数据?'
|
||||
: '确认迁移到目标数据库(目标中已有数据将导致失败)?';
|
||||
if (!window.confirm(warning)) return;
|
||||
|
||||
setMigratingDatabase(true);
|
||||
try {
|
||||
const res = await api.migrateExternalDatabase({
|
||||
dialect: migrationDialect,
|
||||
connectionString: migrationConnectionString.trim(),
|
||||
overwrite: migrationOverwrite,
|
||||
});
|
||||
setMigrationSummary(res);
|
||||
toast.success(res?.message || '数据库迁移完成');
|
||||
} catch (err: any) {
|
||||
toast.error(err?.message || '数据库迁移失败');
|
||||
} finally {
|
||||
setMigratingDatabase(false);
|
||||
}
|
||||
};
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="animate-fade-in">
|
||||
@@ -839,6 +909,63 @@ export default function Settings() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="card animate-slide-up stagger-6" style={{ padding: 20 }}>
|
||||
<div style={{ fontWeight: 600, fontSize: 14, marginBottom: 10 }}>数据库迁移(SQLite / MySQL / PostgreSQL)</div>
|
||||
<div style={{ fontSize: 12, color: 'var(--color-text-muted)', marginBottom: 12 }}>
|
||||
在此填写目标数据库连接串,可先测试连接,再一键把当前 SQLite 数据迁移到目标库。
|
||||
</div>
|
||||
<div style={{ display: 'grid', gridTemplateColumns: '180px 1fr', gap: 10, marginBottom: 10 }}>
|
||||
<select
|
||||
value={migrationDialect}
|
||||
onChange={(e) => setMigrationDialect(e.target.value as 'sqlite' | 'mysql' | 'postgres')}
|
||||
style={inputStyle}
|
||||
>
|
||||
<option value="postgres">PostgreSQL</option>
|
||||
<option value="mysql">MySQL</option>
|
||||
<option value="sqlite">SQLite</option>
|
||||
</select>
|
||||
<input
|
||||
value={migrationConnectionString}
|
||||
onChange={(e) => setMigrationConnectionString(e.target.value)}
|
||||
placeholder={migrationDialect === 'sqlite' ? './data/target.db 或 file:///abs/path.db' : '例如:postgres://user:pass@host:5432/db'}
|
||||
style={{ ...inputStyle, fontFamily: 'var(--font-mono)' }}
|
||||
/>
|
||||
</div>
|
||||
<label style={{ display: 'inline-flex', alignItems: 'center', gap: 8, marginBottom: 12, fontSize: 12, color: 'var(--color-text-secondary)' }}>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={migrationOverwrite}
|
||||
onChange={(e) => setMigrationOverwrite(e.target.checked)}
|
||||
style={{ width: 14, height: 14, accentColor: 'var(--color-primary)' }}
|
||||
/>
|
||||
允许覆盖目标数据库现有数据
|
||||
</label>
|
||||
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap', marginBottom: migrationSummary ? 12 : 0 }}>
|
||||
<button
|
||||
onClick={handleTestExternalDatabaseConnection}
|
||||
disabled={testingMigrationConnection || migratingDatabase}
|
||||
className="btn btn-ghost"
|
||||
style={{ border: '1px solid var(--color-border)' }}
|
||||
>
|
||||
{testingMigrationConnection ? <><span className="spinner spinner-sm" /> 测试中...</> : '测试连接'}
|
||||
</button>
|
||||
<button
|
||||
onClick={handleMigrateToExternalDatabase}
|
||||
disabled={migratingDatabase || testingMigrationConnection}
|
||||
className="btn btn-primary"
|
||||
>
|
||||
{migratingDatabase ? <><span className="spinner spinner-sm" style={{ borderTopColor: 'white', borderColor: 'rgba(255,255,255,0.3)' }} /> 迁移中...</> : '开始迁移'}
|
||||
</button>
|
||||
</div>
|
||||
{migrationSummary && (
|
||||
<div style={{ border: '1px solid var(--color-border-light)', borderRadius: 'var(--radius-sm)', padding: 10, fontSize: 12, color: 'var(--color-text-secondary)', lineHeight: 1.8 }}>
|
||||
<div>目标:{migrationSummary.dialect}({migrationSummary.connection})</div>
|
||||
<div>版本:{migrationSummary.version},时间:{new Date(migrationSummary.timestamp).toLocaleString()}</div>
|
||||
<div>迁移结果:站点 {migrationSummary.rows.sites} / 账号 {migrationSummary.rows.accounts} / 令牌 {migrationSummary.rows.accountTokens} / 路由 {migrationSummary.rows.tokenRoutes} / 通道 {migrationSummary.rows.routeChannels} / 设置 {migrationSummary.rows.settings}</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="card animate-slide-up stagger-6" style={{ padding: 20 }}>
|
||||
<div style={{ fontWeight: 600, fontSize: 14, marginBottom: 12 }}>维护工具</div>
|
||||
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap' }}>
|
||||
|
||||
@@ -4,7 +4,9 @@
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"outDir": "dist/server",
|
||||
"rootDir": "src/server"
|
||||
"rootDir": "src/server",
|
||||
"noImplicitAny": false
|
||||
},
|
||||
"include": ["src/server/**/*"]
|
||||
"include": ["src/server/**/*.ts"],
|
||||
"exclude": ["src/server/**/*.test.ts"]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user