chore: initialize clean metapi repository

This commit is contained in:
cita
2026-02-28 00:36:28 +08:00
commit 2ecc23730d
199 changed files with 46078 additions and 0 deletions
+6
View File
@@ -0,0 +1,6 @@
node_modules
dist
data
.git
.env
*.md
+12
View File
@@ -0,0 +1,12 @@
AUTH_TOKEN=change-me-admin-token
PROXY_TOKEN=change-me-proxy-sk-token
CHECKIN_CRON=0 8 * * *
BALANCE_REFRESH_CRON=0 * * * *
WEBHOOK_URL=
BARK_URL=
SERVERCHAN_KEY=
PORT=4000
DATA_DIR=./data
NOTIFY_COOLDOWN_SEC=300
ADMIN_IP_ALLOWLIST=
TZ=Asia/Shanghai
+33
View File
@@ -0,0 +1,33 @@
name: CI
on:
push:
branches: [main, master]
pull_request:
jobs:
test-and-build:
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 22
cache: npm
- name: Install dependencies
run: npm ci
- name: Run tests
run: npm test
- name: Build
run: npm run build
- name: Audit production dependencies
run: npm audit --omit=dev --audit-level=high
+14
View File
@@ -0,0 +1,14 @@
node_modules/
dist/
drizzle/
data/*
!data/.gitkeep
.env
*.log
logs/
tmp/*
!tmp/.gitkeep
.DS_Store
Thumbs.db
docker/data/
docker/.env
+26
View File
@@ -0,0 +1,26 @@
# Code of Conduct
## Our Commitment
We are committed to providing a welcoming, harassment-free community for everyone.
## Expected Behavior
- Be respectful and constructive.
- Focus on technical discussion, not personal attacks.
- Assume good intent and ask clarifying questions before escalating.
## Unacceptable Behavior
- Harassment, discrimination, or abusive language
- Personal attacks or intimidation
- Publishing others' private information without consent
## Enforcement
Project maintainers are responsible for clarifying and enforcing this code of conduct.
They may remove, edit, or reject comments, commits, code, and contributions that do not align with this policy.
## Scope
This policy applies to project spaces and public/private interactions where an individual is representing the project.
+48
View File
@@ -0,0 +1,48 @@
# Contributing
Thanks for contributing to Metapi.
## Development Setup
1. Install dependencies:
```bash
npm install
```
2. Copy environment template:
```bash
cp .env.example .env
```
3. Run migration and start development:
```bash
npm run db:migrate
npm run dev
```
## Quality Checks
Run before opening a PR:
```bash
npm test
npm run build
```
## Pull Request Guidelines
- Keep PRs focused and small.
- Add or update tests for behavior changes.
- Update docs when user-facing behavior/config changes.
- Avoid committing runtime data (`data/`) or temporary files (`tmp/`).
## Commit Messages
Use concise messages with clear scope, for example:
- `feat: add token route health guard`
- `fix: handle empty model list in dashboard`
- `docs: clarify docker env setup`
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2026 Metapi contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+502
View File
@@ -0,0 +1,502 @@
<div align="center">
<img src="docs/logos/logo-icon-512.png" alt="Metapi Logo" width="120" height="120">
# Metapi
<img src="docs/logos/logo-banner.png" alt="Metapi" width="280">
🔮 **中转站的中转站 — 将分散的 AI 中转站聚合为一个统一网关**
<p>
把你在各处注册的 New API / One API / OneHub / DoneHub / Veloera / AnyRouter / Sub2API 等站点,
<br>
汇聚成 <strong>一个 API Key、一个入口</strong>,自动发现模型、智能路由、成本最优。
</p>
<p align="center">
<a href="https://github.com/cita-777/metapi/releases">
<img alt="GitHub Release" src="https://img.shields.io/github/v/release/cita-777/metapi?label=Release&logo=github&style=flat">
</a><!--
--><a href="https://hub.docker.com/r/1467078763/metapi">
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/1467078763/metapi?style=flat&logo=docker&label=Docker%20Pulls">
</a><!--
--><a href="https://hub.docker.com/r/1467078763/metapi">
<img alt="Docker Image" src="https://img.shields.io/badge/docker-1467078763%2Fmetapi-blue?logo=docker&style=flat">
</a><!--
--><a href="LICENSE">
<img alt="License" src="https://img.shields.io/badge/license-MIT-brightgreen?style=flat">
</a><!--
--><img alt="Node.js" src="https://img.shields.io/badge/Node.js-20+-339933?logo=node.js&style=flat"><!--
--><img alt="TypeScript" src="https://img.shields.io/badge/TypeScript-5.x-3178C6?logo=typescript&style=flat">
</p>
<p align="center">
<a href="#-快速开始">快速开始</a> •
<a href="#-核心功能">核心功能</a> •
<a href="#-文档中心">文档</a> •
<a href="#-环境变量">配置</a> •
<a href="#-接入下游客户端">接入指南</a> •
<a href="#-相关项目">相关项目</a>
</p>
**[文档中心](docs/README.md) | [快速上手](docs/getting-started.md) | [部署指南](docs/deployment.md) | [配置说明](docs/configuration.md) | [常见问题](docs/faq.md) | [贡献指南](CONTRIBUTING.md)**
</div>
---
## 📖 介绍
现在 AI 生态里有越来越多基于 New API / One API 系列的聚合中转站,要管理多个站点的余额、模型列表和 API 密钥,往往既分散又费时。
Metapi 作为这些中转站之上的聚合层,把多个站点统一到 **一个 API Key、一个入口**。当前已支持基于以下项目的中转站:
- [New API](https://github.com/QuantumNous/new-api)
- [One API](https://github.com/songquanpeng/one-api)
- [OneHub](https://github.com/MartialBE/one-hub)
- [DoneHub](https://github.com/deanxv/done-hub)
- [Veloera](https://github.com/Veloera/Veloera)
- [AnyRouter](https://anyrouter.top) — 通用路由平台
- [Sub2API](https://github.com/Wei-Shaw/sub2api) — 订阅制中转
| 痛点 | Metapi 怎么解决 |
|------|-----------------|
| 🔑 每个站点一个 Key,下游工具配置一堆 | **一个 Key 统一代理**,所有站点的模型自动聚合到 `/v1/*` |
| 💸 不知道哪个站点用某个模型最便宜 | **智能路由** 自动按成本、余额、使用率选最优通道 |
| 🔄 某个站点挂了,手动切换好麻烦 | **自动故障转移**,一个通道失败自动冷却并切到下一个 |
| 📊 余额分散在各处,不知道还剩多少 | **集中看板** 一目了然,余额不足自动告警 |
| ✅ 每天得去各站签到领额度 | **自动签到** 定时执行,奖励自动追踪 |
| 🤷 不知道哪个站有什么模型 | **自动模型发现**,上游新增模型零配置出现在你的模型列表里 |
---
## ✨ 核心功能
### 🌐 统一代理网关
- 兼容 **OpenAI****Claude** 下游格式,对接所有主流客户端
- 支持 Chat Completions / Messages / Completions / Embeddings / Images / Models 全接口
- 完整的 SSE 流式传输支持,自动格式转换(OpenAI ⇄ Claude
### 🧠 智能路由引擎
- 自动发现所有上游站点的可用模型,**零配置**生成路由表
- 四级成本信号:**实测成本 → 账号配置成本 → 目录参考价 → 默认兜底**
- 多通道概率分摊,基于成本(40%)、余额(30%)、使用率(30%)加权分配
- 失败通道自动冷却与避让(默认 10 分钟冷却期)
- 请求失败自动重试,自动切换其他可用通道
- 路由决策可视化解释,每次选择透明可审计
### 📡 多平台聚合管理
| 平台 | 适配器 | 说明 |
|------|--------|------|
| **New API** | `new-api` | 新一代大模型网关 |
| **One API** | `one-api` | 经典 OpenAI 接口聚合 |
| **OneHub** | `onehub` | One API 增强分支 |
| **DoneHub** | `donehub` | OneHub 增强分支 |
| **Veloera** | `veloera` | API 网关平台 |
| **AnyRouter** | `anyrouter` | 通用路由平台 |
| **Sub2API** | `sub2api` | 订阅制中转平台 |
每种平台适配器均支持:账号登录、余额查询、模型枚举、Token 同步、每日签到、用户信息获取等完整生命周期管理。
### 👥 账号与 Token 管理
- **多站点多账号**:每个站点可添加多个账号,每个账号可持有多个 API Token
- **健康状态追踪**`healthy` / `unhealthy` / `degraded` / `disabled` 四级状态机
- **凭证加密存储**:所有敏感凭证均加密保存在本地数据库中
- **自动续签**:Token 过期时自动重新登录获取新凭证
- **站点联动**:禁用站点自动级联禁用所有关联账号
### 🏪 模型广场
- 跨站点模型覆盖总览:哪些模型可用、多少账号覆盖、各站定价对比
- 延迟、成功率等实测指标展示
- 上游模型目录缓存与品牌分类(OpenAI、Anthropic、Google、DeepSeek 等)
- 交互式模型测试器,在线验证模型可用性
### ✅ 自动签到
- Cron 定时自动签到(默认每天 08:00)
- 智能解析签到奖励金额,签到失败自动通知
- 逐账号独立执行,支持启用/禁用控制
- 完整的签到日志记录,支持历史查询
- 并发锁机制,防止重复签到
### 💰 余额管理
- 定时余额刷新(默认每小时),批量更新所有活跃账号
- 收入追踪:记录每日/累计收入,追踪额度消耗趋势
- 余额兜底估算:API 不可用时,从代理日志推算余额变动
- 自动重登录:凭证过期时自动刷新
### 🔔 告警与通知
支持四种通知渠道:
| 渠道 | 说明 |
|------|------|
| **Webhook** | 自定义 HTTP 推送 |
| **Bark** | iOS 推送通知 |
| **Server酱** | 微信 / Telegram 通知 |
| **SMTP 邮件** | 标准邮件通知 |
告警场景覆盖:余额不足预警、站点/账号异常、签到失败、代理请求失败、Token 过期提醒、每日摘要报告。支持告警冷却机制(默认 300 秒),防止相同事件重复通知。
### 📊 数据看板
- 站点余额分布饼图、每日消费趋势图表
- 全局搜索(站点、账号、模型)
- 系统事件日志、代理请求日志(含模型、状态、延迟、Token 消耗、成本估算)
### 📦 轻量部署
- **单 Docker 容器**,内置 SQLite,无外部依赖
- Alpine 基础镜像,体积精简
- 数据完整导入导出,迁移无忧
---
## 🖼️ 界面预览
<!-- 截图占位 — 替换为实际路径后取消注释
<table>
<tr>
<td align="center">
<img src="docs/screenshots/dashboard.png" alt="dashboard" style="width:100%;height:auto;"/>
<div>仪表盘</div>
</td>
<td align="center">
<img src="docs/screenshots/model-marketplace.png" alt="model-marketplace" style="width:100%;height:auto;"/>
<div>模型广场</div>
</td>
</tr>
<tr>
<td align="center">
<img src="docs/screenshots/routes.png" alt="routes" style="width:100%;height:auto;"/>
<div>智能路由</div>
</td>
<td align="center">
<img src="docs/screenshots/accounts.png" alt="accounts" style="width:100%;height:auto;"/>
<div>账号管理</div>
</td>
</tr>
</table>
-->
> 📸 截图即将补充,欢迎 Star 关注更新!
---
## 🚀 快速开始
### Docker Compose(推荐)
```bash
mkdir metapi && cd metapi
cat > docker-compose.yml << 'EOF'
services:
metapi:
image: 1467078763/metapi:latest
ports:
- "4000:4000"
volumes:
- ./data:/app/data
environment:
AUTH_TOKEN: ${AUTH_TOKEN:?AUTH_TOKEN is required}
PROXY_TOKEN: ${PROXY_TOKEN:?PROXY_TOKEN is required}
CHECKIN_CRON: "0 8 * * *"
BALANCE_REFRESH_CRON: "0 * * * *"
PORT: ${PORT:-4000}
DATA_DIR: /app/data
TZ: ${TZ:-Asia/Shanghai}
restart: unless-stopped
EOF
# 设置令牌并启动
export AUTH_TOKEN=your-admin-token
export PROXY_TOKEN=your-proxy-sk-token
docker compose up -d
```
<details>
<summary><strong>Docker 命令一行启动</strong></summary>
```bash
docker run -d --name metapi \
-p 4000:4000 \
-e AUTH_TOKEN=your-admin-token \
-e PROXY_TOKEN=your-proxy-sk-token \
-e TZ=Asia/Shanghai \
-v ./data:/app/data \
--restart unless-stopped \
1467078763/metapi:latest
```
</details>
🎉 启动后访问 `http://localhost:4000`,用 `AUTH_TOKEN` 登录即可!
> [!IMPORTANT]
> 请务必修改 `AUTH_TOKEN` 和 `PROXY_TOKEN`,不要使用默认值。数据存储在 `./data` 目录中,升级不影响已有数据。
### 升级
```bash
docker compose pull && docker compose up -d && docker image prune -f
```
📖 更详细的部署方式请参考 [部署指南](docs/deployment.md)
---
## 📚 文档中心
| 分类 | 链接 | 说明 |
|------|------|------|
| 📖 文档总览 | [docs/README.md](docs/README.md) | 文档导航与索引 |
| 🚀 快速上手 | [docs/getting-started.md](docs/getting-started.md) | 10 分钟启动 |
| 🚢 部署指南 | [docs/deployment.md](docs/deployment.md) | Docker Compose、反向代理、升级回滚 |
| ⚙️ 配置说明 | [docs/configuration.md](docs/configuration.md) | 全部环境变量与路由参数 |
| 🔌 客户端接入 | [docs/client-integration.md](docs/client-integration.md) | Open WebUI / Cherry Studio / Cursor 等 |
| 🔧 运维手册 | [docs/operations.md](docs/operations.md) | 备份恢复、日志排查、健康检查 |
| ❓ 常见问题 | [docs/faq.md](docs/faq.md) | 常见报错与修复路径 |
---
## ⚙️ 环境变量
### 基础配置
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `AUTH_TOKEN` | 管理后台登录令牌(**必须修改**) | `change-me` |
| `PROXY_TOKEN` | 代理 API Bearer Token**必须修改** | `change-me-proxy` |
| `PORT` | 服务监听端口 | `4000` |
| `DATA_DIR` | 数据目录(SQLite 数据库) | `./data` |
| `TZ` | 时区 | `Asia/Shanghai` |
| `ACCOUNT_CREDENTIAL_SECRET` | 账号凭证加密密钥 | 默认使用 `AUTH_TOKEN` |
### 定时任务
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `CHECKIN_CRON` | 自动签到 Cron 表达式 | `0 8 * * *` |
| `BALANCE_REFRESH_CRON` | 余额刷新 Cron 表达式 | `0 * * * *` |
<details>
<summary><strong>智能路由参数</strong></summary>
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `ROUTING_FALLBACK_UNIT_COST` | 无成本信号时的默认单价 | `1` |
| `BASE_WEIGHT_FACTOR` | 基础权重因子 | `0.5` |
| `VALUE_SCORE_FACTOR` | 性价比评分因子 | `0.5` |
| `COST_WEIGHT` | 路由选择中成本权重 | `0.4` |
| `BALANCE_WEIGHT` | 路由选择中余额权重 | `0.3` |
| `USAGE_WEIGHT` | 路由选择中使用率权重 | `0.3` |
</details>
<details>
<summary><strong>通知渠道配置</strong></summary>
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `WEBHOOK_ENABLED` | 启用 Webhook 通知 | `true` |
| `WEBHOOK_URL` | Webhook 推送地址 | 空 |
| `BARK_ENABLED` | 启用 Bark 推送 | `true` |
| `BARK_URL` | Bark 推送地址 | 空 |
| `SERVERCHAN_ENABLED` | 启用 Server酱 通知 | `true` |
| `SERVERCHAN_KEY` | Server酱 SendKey | 空 |
| `SMTP_ENABLED` | 启用邮件通知 | `false` |
| `SMTP_HOST` | SMTP 服务器地址 | 空 |
| `SMTP_PORT` | SMTP 端口 | `587` |
| `SMTP_SECURE` | 使用 SSL/TLS | `false` |
| `SMTP_USER` / `SMTP_PASS` | SMTP 认证 | 空 |
| `SMTP_FROM` / `SMTP_TO` | 发件/收件人 | 空 |
| `NOTIFY_COOLDOWN_SEC` | 相同告警冷却时间(秒) | `300` |
</details>
<details>
<summary><strong>安全配置</strong></summary>
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `ADMIN_IP_ALLOWLIST` | 管理端 IP 白名单(逗号分隔) | 空(不限制) |
</details>
📖 完整配置说明:[docs/configuration.md](docs/configuration.md)
---
## 📡 代理接口
Metapi 对下游暴露标准 OpenAI / Claude 兼容接口:
| 接口 | 方法 | 说明 |
|------|------|------|
| `/v1/chat/completions` | POST | OpenAI Chat Completions |
| `/v1/messages` | POST | Claude Messages |
| `/v1/completions` | POST | OpenAI CompletionsLegacy |
| `/v1/embeddings` | POST | 向量嵌入 |
| `/v1/images/generations` | POST | 图像生成 |
| `/v1/models` | GET | 获取所有可用模型列表 |
请求头携带 `Authorization: Bearer <PROXY_TOKEN>` 即可访问。
---
## 🔌 接入下游客户端
适用于所有兼容 OpenAI API 的客户端:
| 配置项 | 值 |
|--------|-----|
| **Base URL** | `https://your-domain.com`(客户端一般会自动拼接 `/v1` |
| **API Key** | 你设置的 `PROXY_TOKEN` 值 |
| **模型列表** | 自动从 `GET /v1/models` 获取 |
### 已验证兼容的客户端
- [ChatGPT-Next-Web](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web)
- [Open WebUI](https://github.com/open-webui/open-webui)
- [Cherry Studio](https://github.com/kangfenmao/cherry-studio)
- [Cursor](https://cursor.sh)
- [Claude Code](https://docs.anthropic.com/en/docs/claude-code)
- [Roo Code](https://github.com/RooVetGit/Roo-Code)
- [Kilo Code](https://github.com/kilocode/kilocode)
- 以及所有支持 OpenAI API 格式的客户端
<details>
<summary><strong>常见问题:流式响应异常</strong></summary>
如果非流式正常但流式异常,请排查:
1. 反向代理是否关闭了 SSE 缓冲(Nginx 需设置 `proxy_buffering off`
2. 中间层是否改写了 `text/event-stream` Content-Type
3. 客户端是否强制要求特定流式格式
**Nginx 参考配置:**
```nginx
location / {
proxy_pass http://127.0.0.1:4000;
proxy_buffering off;
proxy_cache off;
proxy_set_header Connection '';
proxy_http_version 1.1;
chunked_transfer_encoding off;
}
```
</details>
📖 更详细的接入说明:[docs/client-integration.md](docs/client-integration.md)
---
## 🏗️ 技术栈
| 层 | 技术 |
|----|------|
| **后端框架** | [Fastify](https://fastify.dev) — 高性能 Node.js 后端框架 |
| **前端框架** | [React 18](https://react.dev) + [Vite](https://vitejs.dev) |
| **语言** | [TypeScript](https://www.typescriptlang.org) — 端到端类型安全 |
| **样式** | [Tailwind CSS v4](https://tailwindcss.com) — 原子化样式框架 |
| **数据库** | SQLite ([better-sqlite3](https://github.com/WiseLibs/better-sqlite3)) + [Drizzle ORM](https://orm.drizzle.team) |
| **数据可视化** | [VChart](https://visactor.io/vchart) (@visactor/react-vchart) |
| **定时任务** | [node-cron](https://github.com/node-cron/node-cron) |
| **容器化** | Docker (Alpine) + Docker Compose |
| **测试** | [Vitest](https://vitest.dev) |
---
## 🛠️ 本地开发
```bash
# 安装依赖
npm install
# 数据库迁移
npm run db:migrate
# 启动开发环境(前后端热更新)
npm run dev
```
```bash
npm run build # 构建前端 + 后端
npm run build:web # 仅构建前端(Vite
npm run build:server # 仅构建后端(TypeScript
npm test # 运行全部测试
npm run test:watch # 监听模式
npm run db:generate # 生成 Drizzle 迁移文件
```
---
## 🔗 相关项目
### 上游兼容平台
| 项目 | 说明 |
|------|------|
| [New API](https://github.com/QuantumNous/new-api) | 新一代大模型网关,Metapi 的主要上游之一 |
| [One API](https://github.com/songquanpeng/one-api) | 经典 OpenAI 接口聚合管理 |
| [OneHub](https://github.com/MartialBE/one-hub) | One API 增强分支 |
| [DoneHub](https://github.com/deanxv/done-hub) | OneHub 增强分支 |
| [Veloera](https://github.com/Veloera/Veloera) | API 网关平台 |
### 参考和使用的项目
| 项目 | 说明 |
|------|------|
| [All API Hub](https://github.com/qixing-jk/all-api-hub) | 浏览器扩展版 — 一站式管理中转站账号,Metapi 最初灵感来源 |
| [LLM Metadata](https://github.com/nicepkg/llm-metadata) | LLM 模型元数据库,用于模型描述参考 |
| [New API](https://github.com/QuantumNous/new-api) | 平台适配器参考实现 |
---
## 🔒 数据与隐私
Metapi 完全自托管,所有数据(账号、令牌、路由、日志)均存储在本地 SQLite 数据库中,不会向任何第三方发送数据。代理请求仅在你的服务器与上游站点之间直连传输。
---
## 🤝 贡献
欢迎各种形式的贡献!
- 🐛 报告 Bug — [提交 Issue](https://github.com/cita-777/metapi/issues)
- 💡 功能建议 — [发起讨论](https://github.com/cita-777/metapi/issues)
- 🔧 代码贡献 — [提交 Pull Request](https://github.com/cita-777/metapi/pulls)
- 📝 贡献指南 — [CONTRIBUTING.md](CONTRIBUTING.md)
- 📜 行为准则 — [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md)
---
## 🛡️ 安全
如发现安全问题,请参考 [SECURITY.md](SECURITY.md) 使用非公开方式报告。
---
## 📜 License
[MIT](LICENSE)
---
<div align="center">
**⭐ 如果 Metapi 对你有帮助,给个 Star 就是最大的支持!**
<sub>Built with ❤️ by the AI community</sub>
</div>
+23
View File
@@ -0,0 +1,23 @@
# Security Policy
## Supported Versions
Only the latest `main` branch state is supported for security fixes.
## Reporting a Vulnerability
Please do not report security issues in public issues.
Use one of these channels:
- GitHub Security Advisory private report (preferred)
- Maintainer contact listed in project profile
When reporting, include:
- Affected endpoint/module
- Reproduction steps
- Potential impact
- Suggested fix (if available)
We will acknowledge reports as soon as possible and coordinate a fix and disclosure timeline.
View File
+18
View File
@@ -0,0 +1,18 @@
# Meta API Hub - Environment Variables
# Copy this file to .env and fill in your values
# cp .env.example .env
# Admin authentication token (required, change this!)
AUTH_TOKEN=change-me-admin-token
# Proxy API token used for OpenAI-compatible proxy endpoint (required, change this!)
PROXY_TOKEN=change-me-proxy-sk-token
PORT=4000
NOTIFY_COOLDOWN_SEC=300
ADMIN_IP_ALLOWLIST=
# Auto check-in cron (default: 8:00 AM every day)
CHECKIN_CRON=0 8 * * *
# Balance refresh cron (default: every hour)
BALANCE_REFRESH_CRON=0 * * * *
View File
+27
View File
@@ -0,0 +1,27 @@
FROM node:20-alpine AS builder
WORKDIR /app
COPY package.json package-lock.json ./
RUN npm ci
COPY . .
RUN npm run build
FROM node:20-alpine
WORKDIR /app
COPY --from=builder /app/dist ./dist
COPY --from=builder /app/node_modules ./node_modules
COPY --from=builder /app/package.json ./
COPY --from=builder /app/drizzle ./drizzle
RUN mkdir -p /app/data
EXPOSE 4000
ENV NODE_ENV=production
ENV DATA_DIR=/app/data
CMD ["sh", "-c", "node dist/server/db/migrate.js && node dist/server/index.js"]
+9
View File
@@ -0,0 +1,9 @@
# docker-compose.override.yml
# This file is automatically picked up by Docker Compose during local development.
# It overrides the image reference in docker-compose.yml with a local build.
services:
metapi:
image: ~
build:
context: ..
dockerfile: docker/Dockerfile
+16
View File
@@ -0,0 +1,16 @@
services:
metapi:
image: 1467078763/metapi:latest
ports:
- "4000:4000"
volumes:
- ./data:/app/data
environment:
AUTH_TOKEN: ${AUTH_TOKEN:?AUTH_TOKEN is required}
PROXY_TOKEN: ${PROXY_TOKEN:?PROXY_TOKEN is required}
CHECKIN_CRON: "0 8 * * *"
BALANCE_REFRESH_CRON: "0 * * * *"
PORT: ${PORT:-4000}
DATA_DIR: /app/data
TZ: ${TZ:-Asia/Shanghai}
restart: unless-stopped
+67
View File
@@ -0,0 +1,67 @@
# 📚 Metapi 文档中心
<div align="center">
**中转站的中转站 — 将分散的 AI 中转站聚合为一个统一网关**
[返回项目主页](../README.md)
</div>
---
## 快速导航
| 文档 | 适合谁 | 解决的问题 |
|------|--------|------------|
| 🚀 [快速上手](./getting-started.md) | 首次使用者 | 10 分钟完成部署与首次请求 |
| 🚢 [部署指南](./deployment.md) | 部署维护者 | Docker Compose、反向代理、升级回滚 |
| ⚙️ [配置说明](./configuration.md) | 管理员 | 全部环境变量、路由参数、通知渠道 |
| 🔌 [客户端接入](./client-integration.md) | 下游应用接入者 | Open WebUI、Cherry Studio、Cursor 等接入 |
| 🔧 [运维手册](./operations.md) | 运维人员 | 备份恢复、日志排查、健康检查 |
| ❓ [常见问题](./faq.md) | 所有用户 | 常见报错与修复路径 |
| 📁 [目录规范](./project-structure.md) | 开发者 | 项目目录组织与约定 |
## 架构概览
```text
┌──────────────────────────────────────────────────┐
│ 下游客户端 │
│ Open WebUI / Cherry Studio / Cursor / Claude │
│ Code / Roo Code / Kilo Code / ... │
└───────────────────┬──────────────────────────────┘
│ Authorization: Bearer <PROXY_TOKEN>
┌──────────────────────────────────────────────────┐
│ Metapi 网关 │
│ ┌────────────┐ ┌────────────┐ ┌────────────┐ │
│ │ 统一代理 │ │ 智能路由 │ │ 格式转换 │ │
│ │ /v1/* │ │ 成本+余额 │ │ OpenAI⇄ │ │
│ │ │ │ +使用率 │ │ Claude │ │
│ └────────────┘ └────────────┘ └────────────┘ │
│ ┌────────────┐ ┌────────────┐ ┌────────────┐ │
│ │ 自动签到 │ │ 余额管理 │ │ 多渠道告警 │ │
│ └────────────┘ └────────────┘ └────────────┘ │
└───────────────────┬──────────────────────────────┘
┌───────────┼───────────┐
▼ ▼ ▼
┌─────────┐ ┌─────────┐ ┌─────────┐
│ New API │ │ One API │ │ Veloera │ ...
└─────────┘ └─────────┘ └─────────┘
```
## 核心概念
- **站点 (Site)**:一个上游中转站实例(如 New API、OneHub 等)
- **账号 (Account)**:在某站点上注册的用户账号
- **Token**:账号下的 API Key,用于访问该站点的 API
- **路由 (Route)**:一条模型匹配规则,如 `claude-sonnet-4-6`
- **通道 (Channel)**:路由下的一条 Token 链路,一个路由可有多个通道
- **代理 (Proxy)**Metapi 对下游暴露的统一 API 入口
## 开源协作
- 📝 贡献流程:[CONTRIBUTING.md](../CONTRIBUTING.md)
- 🛡️ 安全策略:[SECURITY.md](../SECURITY.md)
- 📜 行为准则:[CODE_OF_CONDUCT.md](../CODE_OF_CONDUCT.md)
+127
View File
@@ -0,0 +1,127 @@
# 🔌 客户端接入
本文档说明如何将下游客户端连接到 Metapi 代理网关。
[返回文档中心](./README.md)
---
## 通用配置
Metapi 暴露标准 OpenAI / Claude 兼容接口,下游客户端只需配置两项:
| 配置项 | 值 |
|--------|-----|
| **Base URL** | `https://your-domain.com`(不要拼接 `/v1`,客户端会自动加) |
| **API Key** | 你设置的 `PROXY_TOKEN` 值 |
模型列表自动从 `GET /v1/models` 获取,无需手动配置。
## 支持的接口
| 接口 | 方法 | 说明 |
|------|------|------|
| `/v1/chat/completions` | POST | OpenAI Chat Completions |
| `/v1/messages` | POST | Claude Messages |
| `/v1/completions` | POST | OpenAI CompletionsLegacy |
| `/v1/embeddings` | POST | 向量嵌入 |
| `/v1/images/generations` | POST | 图像生成 |
| `/v1/models` | GET | 模型列表 |
## 已验证兼容的客户端
### ChatGPT-Next-Web
| 配置项 | 值 |
|--------|-----|
| Settings → Custom Endpoint | `https://your-domain.com` |
| API Key | `PROXY_TOKEN` |
### Open WebUI
| 配置项 | 值 |
|--------|-----|
| Settings → Connections → OpenAI API URL | `https://your-domain.com/v1` |
| API Key | `PROXY_TOKEN` |
### Cherry Studio
| 配置项 | 值 |
|--------|-----|
| 模型提供商 → OpenAI → API 地址 | `https://your-domain.com` |
| API Key | `PROXY_TOKEN` |
### Cursor
| 配置项 | 值 |
|--------|-----|
| Settings → Models → OpenAI API Key | `PROXY_TOKEN` |
| Override OpenAI Base URL | `https://your-domain.com/v1` |
### Claude Code
```bash
export ANTHROPIC_BASE_URL=https://your-domain.com
export ANTHROPIC_API_KEY=your-proxy-sk-token
```
或在配置文件中设置相应的环境变量。
### Roo Code / Kilo Code
配置方式与 Cursor 类似,在设置中填入 Base URL 和 API Key。
### 其他客户端
所有支持 OpenAI API 格式的客户端均可接入,只需找到 Base URL 和 API Key 的配置位置即可。
## 快速自检
部署完成后,用以下命令验证链路:
```bash
# 1. 检查模型列表
curl -sS https://your-domain.com/v1/models \
-H "Authorization: Bearer <PROXY_TOKEN>" | head -50
# 2. 测试对话(非流式)
curl -sS https://your-domain.com/v1/chat/completions \
-H "Authorization: Bearer <PROXY_TOKEN>" \
-H "Content-Type: application/json" \
-d '{"model":"gpt-4o-mini","messages":[{"role":"user","content":"hi"}]}'
# 3. 测试流式
curl -sS https://your-domain.com/v1/chat/completions \
-H "Authorization: Bearer <PROXY_TOKEN>" \
-H "Content-Type: application/json" \
-d '{"model":"gpt-4o-mini","messages":[{"role":"user","content":"hi"}],"stream":true}'
```
## 常见问题
### 流式响应异常
如果非流式正常但流式异常,原因几乎都是反向代理配置问题:
1. Nginx 未设置 `proxy_buffering off`
2. CDN 或中间层缓存了 SSE 响应
3. 中间层改写了 `text/event-stream` Content-Type
参考 [部署指南 → Nginx 配置](./deployment.md#nginx) 解决。
### 模型列表为空
- 检查是否已添加站点和账号
- 检查账号是否处于 `healthy` 状态
- 检查是否已同步 Token
- 在管理后台手动触发「刷新模型」
### 客户端提示 401 / 403
- 确认使用的是 `PROXY_TOKEN` 而非 `AUTH_TOKEN`
- 确认反向代理透传了 `Authorization` 请求头
## 下一步
- [配置说明](./configuration.md) — 环境变量详解
- [常见问题](./faq.md) — 更多故障排查
+137
View File
@@ -0,0 +1,137 @@
# ⚙️ 配置说明
本文档列出 Metapi 的全部环境变量配置。
[返回文档中心](./README.md)
---
## 必填配置
> ⚠️ 以下变量**必须修改**,不要使用默认值。
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `AUTH_TOKEN` | 管理后台登录令牌 | `change-me` |
| `PROXY_TOKEN` | 代理接口 Bearer Token(下游客户端使用此值作为 API Key) | `change-me-proxy` |
## 基础配置
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `PORT` | 服务监听端口 | `4000` |
| `DATA_DIR` | 数据目录(SQLite 数据库存储位置) | `./data` |
| `TZ` | 时区 | `Asia/Shanghai` |
| `ACCOUNT_CREDENTIAL_SECRET` | 账号凭证加密密钥(用于加密存储的上游账号密码) | 默认使用 `AUTH_TOKEN` |
## 定时任务
| 变量名 | 说明 | 默认值 | 示例 |
|--------|------|--------|------|
| `CHECKIN_CRON` | 自动签到计划 | `0 8 * * *` | 每天 8:00 |
| `BALANCE_REFRESH_CRON` | 余额刷新计划 | `0 * * * *` | 每小时整点 |
Cron 表达式格式:`分 时 日 月 周`(标准五段式)
常用示例:
- `0 8 * * *` — 每天 08:00
- `0 */2 * * *` — 每 2 小时
- `30 7,12,20 * * *` — 每天 07:30、12:30、20:30
## 智能路由
Metapi 的路由引擎按多因子加权选择最优通道。
### 成本信号优先级
```
实测成本(代理日志) → 账号配置成本 → 目录参考价 → 兜底默认值
```
### 路由权重参数
| 变量名 | 说明 | 默认值 | 范围 |
|--------|------|--------|------|
| `ROUTING_FALLBACK_UNIT_COST` | 无成本信号时的默认单价 | `1` | > 0 |
| `BASE_WEIGHT_FACTOR` | 基础权重因子 | `0.5` | 0~1 |
| `VALUE_SCORE_FACTOR` | 性价比评分因子 | `0.5` | 0~1 |
| `COST_WEIGHT` | 成本权重(越大越偏向低成本通道) | `0.4` | 0~1 |
| `BALANCE_WEIGHT` | 余额权重(越大越偏向余额充足的通道) | `0.3` | 0~1 |
| `USAGE_WEIGHT` | 使用率权重(越大越偏向使用较少的通道) | `0.3` | 0~1 |
> 三个权重之和建议为 1.0,但不强制。
### 路由预设建议
| 场景 | COST_WEIGHT | BALANCE_WEIGHT | USAGE_WEIGHT |
|------|:-----------:|:--------------:|:------------:|
| **成本优先** | 0.7 | 0.2 | 0.1 |
| **均衡(默认)** | 0.4 | 0.3 | 0.3 |
| **稳定优先** | 0.2 | 0.5 | 0.3 |
| **轮转均匀** | 0.1 | 0.1 | 0.8 |
## 安全配置
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `ADMIN_IP_ALLOWLIST` | 管理端 IP 白名单(逗号分隔) | 空(不限制) |
示例:`ADMIN_IP_ALLOWLIST=192.168.1.0/24,10.0.0.1`
## 通知渠道
### Webhook
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `WEBHOOK_ENABLED` | 启用 Webhook 通知 | `true` |
| `WEBHOOK_URL` | Webhook 推送地址 | 空 |
### BarkiOS 推送)
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `BARK_ENABLED` | 启用 Bark 推送 | `true` |
| `BARK_URL` | Bark 推送地址 | 空 |
### Server酱
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `SERVERCHAN_ENABLED` | 启用 Server酱 通知 | `true` |
| `SERVERCHAN_KEY` | Server酱 SendKey | 空 |
### SMTP 邮件
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `SMTP_ENABLED` | 启用邮件通知 | `false` |
| `SMTP_HOST` | SMTP 服务器地址 | 空 |
| `SMTP_PORT` | SMTP 端口 | `587` |
| `SMTP_SECURE` | 使用 SSL/TLS | `false` |
| `SMTP_USER` | SMTP 用户名 | 空 |
| `SMTP_PASS` | SMTP 密码 | 空 |
| `SMTP_FROM` | 发件人地址 | 空 |
| `SMTP_TO` | 收件人地址 | 空 |
### 告警控制
| 变量名 | 说明 | 默认值 |
|--------|------|--------|
| `NOTIFY_COOLDOWN_SEC` | 相同告警冷却时间(秒),防止同一事件重复通知 | `300` |
## 运行时配置
除环境变量外,以下参数可在管理后台「设置」页面中动态调整,无需重启:
- 路由权重参数
- 通知渠道地址
- SMTP 配置
- 告警冷却时间
运行时配置存储在 SQLite 数据库中,优先级高于环境变量默认值。
## 下一步
- [部署指南](./deployment.md) — Docker Compose 与反向代理
- [客户端接入](./client-integration.md) — 对接下游应用
+157
View File
@@ -0,0 +1,157 @@
# 🚢 部署指南
[返回文档中心](./README.md)
---
## Docker Compose 部署(推荐)
### 标准步骤
```bash
mkdir metapi && cd metapi
# 创建 docker-compose.yml(参见快速上手)
# 设置环境变量
export AUTH_TOKEN=your-admin-token
export PROXY_TOKEN=your-proxy-sk-token
# 启动
docker compose up -d
```
### 使用 `.env` 文件
如果不想每次 export,可以创建 `.env` 文件:
```bash
# .env
AUTH_TOKEN=your-admin-token
PROXY_TOKEN=your-proxy-sk-token
TZ=Asia/Shanghai
PORT=4000
```
```bash
docker compose --env-file .env up -d
```
> ⚠️ `.env` 文件包含敏感信息,请勿提交到 Git 仓库。
## Docker 命令部署
```bash
docker run -d --name metapi \
-p 4000:4000 \
-e AUTH_TOKEN=your-admin-token \
-e PROXY_TOKEN=your-proxy-sk-token \
-e TZ=Asia/Shanghai \
-v ./data:/app/data \
--restart unless-stopped \
1467078763/metapi:latest
```
> **路径说明:**
> - `./data:/app/data` — 相对路径,数据存到当前目录下的 `data` 文件夹
> - 也可以使用绝对路径:`/your/custom/path:/app/data`
## 反向代理
### Nginx
流式请求(SSE)需要关闭缓冲,否则流式输出会异常:
```nginx
server {
listen 443 ssl;
server_name your-domain.com;
ssl_certificate /path/to/cert.pem;
ssl_certificate_key /path/to/key.pem;
location / {
proxy_pass http://127.0.0.1:4000;
# SSE 关键配置
proxy_buffering off;
proxy_cache off;
proxy_set_header Connection '';
proxy_http_version 1.1;
chunked_transfer_encoding off;
# 标准代理头
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# 超时设置(长对话场景)
proxy_read_timeout 300s;
proxy_send_timeout 300s;
}
}
```
### Caddy
```
your-domain.com {
reverse_proxy localhost:4000 {
flush_interval -1
}
}
```
## 升级
```bash
# 拉取最新镜像
docker compose pull
# 重新启动(数据不受影响)
docker compose up -d
# 清理旧镜像
docker image prune -f
```
## 回滚
如果升级后出现问题:
1. **升级前备份**(建议每次升级前执行):
```bash
cp -r data/ data-backup-$(date +%Y%m%d)/
```
2. **回滚到指定版本**
```bash
# 修改 docker-compose.yml 中的 image tag
# 例如:image: 1467078763/metapi:v1.0.0
# 恢复数据
rm -rf data/
cp -r data-backup-20260228/ data/
# 重启
docker compose up -d
```
## 数据持久化
Metapi 的所有运行数据存储在 SQLite 数据库中,位于 `DATA_DIR`(默认 `./data`)目录下。
只要挂载了该目录,升级、重启都不会丢失数据。
### 备份策略建议
- 每日自动备份 `data/` 目录
- 保留最近 7~30 天的备份
- 重要操作前手动快照
## 下一步
- [配置说明](./configuration.md) — 详细环境变量
- [运维手册](./operations.md) — 日志排查、健康检查
+131
View File
@@ -0,0 +1,131 @@
# ❓ 常见问题(FAQ
[返回文档中心](./README.md)
---
## 部署相关
### Q: 启动后无法访问管理后台
**A:** 排查步骤:
1. 确认容器正常运行:`docker compose ps`
2. 确认端口映射正确:`docker compose logs | grep listening`
3. 检查防火墙是否放行了端口(默认 4000)
4. 如果使用反向代理,确认代理配置正确
### Q: 登录失败,提示令牌无效
**A:** 检查 `AUTH_TOKEN` 是否与部署环境一致。注意:
- 使用 `.env` 文件时,确认文件路径正确
- 不要混用 `.env.example` 的默认值
- 环境变量中的值不需要加引号
### Q: Docker Compose 启动报错 `AUTH_TOKEN is required`
**A:** 使用了 `${AUTH_TOKEN:?}` 语法,需要先设置环境变量:
```bash
export AUTH_TOKEN=your-token
export PROXY_TOKEN=your-proxy-token
docker compose up -d
```
或使用 `.env` 文件。
---
## 代理相关
### Q: 下游客户端提示 401 / 403
**A:** 排查:
- 确认使用的是 `PROXY_TOKEN`(代理令牌),而非 `AUTH_TOKEN`(管理令牌)
- 确认反向代理正确透传了 `Authorization` 请求头
- 检查是否设置了 `ADMIN_IP_ALLOWLIST` 限制了访问
### Q: `GET /v1/models` 返回空列表
**A:** 可能原因:
1. 未添加任何站点或账号
2. 账号处于 `unhealthy` 状态 — 在账号管理页面检查并刷新
3. 未同步 Token — 在 Token 管理页面点击「同步」
4. 模型未发现 — 手动触发模型刷新
### Q: 非流式正常,但流式输出异常(卡住、乱码、截断)
**A:** 几乎都是反向代理配置问题。请确认:
1. Nginx:添加 `proxy_buffering off;`
2. 未改写 `text/event-stream` Content-Type
3. 无 CDN 或中间层缓存 SSE 响应
完整 Nginx 配置参考 [部署指南](./deployment.md#nginx)。
### Q: 某模型显示可用,但实际调用失败
**A:** 在管理后台的「模型测试器」中直测该模型,查看具体失败原因:
- **上游账号状态异常**:账号凭证过期或被禁用
- **通道处于冷却期**:近期该通道请求失败,系统自动冷却(默认 10 分钟)
- **上游模型下线**:上游站点已移除该模型
- **余额不足**:对应账号余额已耗尽
### Q: 请求延迟很高
**A:** 排查方向:
- 在代理日志中查看具体延迟分布
- 检查是否因冷却导致使用了较远/较慢的上游
- 调整路由权重,降低 `COST_WEIGHT`、提高成功率高的通道优先级
---
## 签到相关
### Q: 签到一直失败
**A:** 可能原因:
- 上游站点不支持签到功能
- 账号凭证已过期(系统会尝试自动重登录)
- 站点接口变更 — 检查 Metapi 是否为最新版本
### Q: 签到成功但奖励显示为 0
**A:** 部分站点的签到接口不返回奖励金额。Metapi 会尝试从收入日志推算奖励,但可能存在延迟。
---
## 数据相关
### Q: 数据迁移怎么做
**A:** 两种方式:
1. **应用内导入导出**(推荐):在管理后台 → 导入/导出 页面操作,支持选择性导出
2. **目录迁移**:直接拷贝 `data/` 目录到新环境
### Q: 如何清理历史数据
**A:** 代理日志和签到日志会持续增长。在管理后台对应页面可以清理历史记录。
### Q: 开源发布时如何避免泄露敏感信息
**A:**
- 确认 `.gitignore` 包含 `.env``data/``tmp/`
- 发布前执行一次密钥轮换(上游账号密码、通知 SMTP、Webhook 地址)
- 使用全新仓库或清理 Git 历史后再公开
- 检查备份 JSON 文件中是否包含凭证
---
## 更多帮助
- [提交 Issue](https://github.com/cita-777/metapi/issues) — 报告 Bug 或提出建议
- [文档中心](./README.md) — 查看所有文档
+124
View File
@@ -0,0 +1,124 @@
# 🚀 快速上手
本文档帮助你在 10 分钟内完成 Metapi 的首次部署。
[返回文档中心](./README.md)
---
## 前置条件
- Docker 与 Docker Compose(推荐)
- 或 Node.js 20+ 与 npm(本地开发)
## 方式一:Docker Compose 部署(推荐)
### 1. 创建项目目录
```bash
mkdir metapi && cd metapi
```
### 2. 创建 `docker-compose.yml`
```yaml
services:
metapi:
image: 1467078763/metapi:latest
ports:
- "4000:4000"
volumes:
- ./data:/app/data
environment:
AUTH_TOKEN: ${AUTH_TOKEN:?AUTH_TOKEN is required}
PROXY_TOKEN: ${PROXY_TOKEN:?PROXY_TOKEN is required}
CHECKIN_CRON: "0 8 * * *"
BALANCE_REFRESH_CRON: "0 * * * *"
PORT: ${PORT:-4000}
DATA_DIR: /app/data
TZ: ${TZ:-Asia/Shanghai}
restart: unless-stopped
```
### 3. 设置令牌并启动
```bash
export AUTH_TOKEN=your-admin-token
export PROXY_TOKEN=your-proxy-sk-token
docker compose up -d
```
### 4. 访问管理后台
打开 `http://localhost:4000`,使用 `AUTH_TOKEN` 的值登录。
## 方式二:本地开发启动
```bash
git clone https://github.com/cita-777/metapi.git
cd metapi
npm install
npm run db:migrate
npm run dev
```
- 前端地址:`http://localhost:5173`Vite dev server
- 后端地址:`http://localhost:4000`
## 首次使用流程
完成部署后,按以下顺序配置:
### 步骤 1:添加站点
进入 **站点管理**,添加你使用的上游中转站:
- 填写站点名称和 URL
- 选择平台类型(New API / One API / OneHub 等)
- 填写站点的管理员 API Key(可选,部分功能需要)
### 步骤 2:添加账号
进入 **账号管理**,为每个站点添加已注册的账号:
- 填入用户名和访问凭证
- 系统会自动登录并获取余额信息
- 启用自动签到(如站点支持)
### 步骤 3:同步 Token
进入 **Token 管理**
- 点击「同步」从上游账号拉取 API Key
- 或手动添加已有的 API Key
### 步骤 4:检查路由
进入 **路由管理**
- 系统会自动发现模型并生成路由规则
- 可以手动调整通道的优先级和权重
### 步骤 5:验证代理
使用 curl 快速验证:
```bash
# 检查模型列表
curl -sS http://localhost:4000/v1/models \
-H "Authorization: Bearer your-proxy-sk-token"
# 测试对话
curl -sS http://localhost:4000/v1/chat/completions \
-H "Authorization: Bearer your-proxy-sk-token" \
-H "Content-Type: application/json" \
-d '{"model":"gpt-4o-mini","messages":[{"role":"user","content":"hi"}]}'
```
返回正常响应,说明一切就绪。
## 下一步
- [部署指南](./deployment.md) — 反向代理、HTTPS、升级策略
- [配置说明](./configuration.md) — 详细环境变量与路由参数
- [客户端接入](./client-integration.md) — 对接 Open WebUI、Cherry Studio 等
Binary file not shown.

After

Width:  |  Height:  |  Size: 5.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 121 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 478 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

+145
View File
@@ -0,0 +1,145 @@
# 🔧 运维手册
[返回文档中心](./README.md)
---
## 数据备份
### 方式一:目录备份(推荐)
Metapi 的所有数据存储在 `data/` 目录下的 SQLite 数据库中。最简单的备份方式:
```bash
# 手动备份
cp -r data/ data-backup-$(date +%Y%m%d)/
# 自动备份(crontab
0 2 * * * cp -r /path/to/metapi/data/ /path/to/backups/metapi-$(date +\%Y\%m\%d)/
```
建议:
- 每日自动备份一次
- 保留最近 7~30 天
- 备份文件不要提交到 Git
### 方式二:应用内导出
在管理后台 → 「导入/导出」页面:
- **全量导出**:站点、账号、Token、路由、设置
- **仅账号**:站点和账号信息
- **仅偏好**:设置和通知配置
导出为 JSON 文件,可用于跨实例迁移。
## 数据恢复
### 目录恢复
```bash
# 1. 停止容器
docker compose down
# 2. 替换数据目录
rm -rf data/
cp -r data-backup-20260228/ data/
# 3. 重新启动
docker compose up -d
```
### 应用内导入
在管理后台 → 「导入/导出」页面上传之前导出的 JSON 文件。系统会自动校验数据完整性。
## 日志排查
### Docker 环境
```bash
# 查看实时日志
docker compose logs -f
# 查看最近 100 行
docker compose logs --tail 100
# 只看错误
docker compose logs -f 2>&1 | grep -i error
```
### 本地开发
```bash
npm run dev
# 日志直接输出到终端
```
### 重点关注的日志
| 关键词 | 含义 | 处理方式 |
|--------|------|----------|
| `auth failed` | 上游站点鉴权失败 | 检查账号凭证是否过期 |
| `no available channel` | 路由无可用通道 | 检查 Token 是否同步、通道是否被冷却 |
| `notify failed` | 通知发送失败 | 检查通知渠道配置 |
| `checkin failed` | 签到失败 | 检查账号状态和站点连通性 |
| `balance refresh failed` | 余额刷新失败 | 检查账号凭证 |
## 健康检查
### 手动检查
```bash
# 检查服务是否响应
curl -sS http://localhost:4000/v1/models \
-H "Authorization: Bearer <PROXY_TOKEN>" | head -5
# 检查特定模型可用性
curl -sS http://localhost:4000/v1/chat/completions \
-H "Authorization: Bearer <PROXY_TOKEN>" \
-H "Content-Type: application/json" \
-d '{"model":"gpt-4o-mini","messages":[{"role":"user","content":"ping"}]}'
```
### 自动化监控建议
- 定时请求 `/v1/models`,检查返回状态码和模型数量
- 定时抽样请求 `/v1/chat/completions`,检查端到端可用性
- 监控磁盘空间(SQLite WAL 日志可能增长)
- 监控 Docker 容器状态
## 常见运维操作
### 清理代理日志
代理日志会持续增长。如果磁盘空间紧张,可在管理后台 → 代理日志页面清理历史记录。
### 重置账号状态
如果账号状态异常(`unhealthy`),可以在账号管理页面:
1. 点击「刷新」重新检测账号健康状态
2. 如凭证过期,系统会尝试自动重登录
3. 手动禁用/启用账号
### 强制刷新模型
在管理后台手动触发:
- 余额刷新:立即更新所有账号余额
- 模型刷新:重新发现所有上游模型
- 签到:立即执行一次签到
## 发布前检查清单
如果你在本地开发并准备发布:
- [ ] `npm test` 通过
- [ ] `npm run build` 通过
- [ ] `.env``data/``tmp/` 未提交到 Git
- [ ] 敏感凭证已从代码中移除
## 下一步
- [常见问题](./faq.md) — 常见报错与修复
- [配置说明](./configuration.md) — 环境变量详解
+148
View File
@@ -0,0 +1,148 @@
# Metapi 项目目录规范
本文档说明 Metapi 的目录组织方式,目标是让新功能落位更稳定、查找更直接、临时文件不污染根目录。
## 顶层目录
```text
metapi/
├── data/ # 运行时数据(SQLite 数据库)
├── dist/ # 构建产物(前后端)
├── docker/ # 容器相关文件
│ ├── Dockerfile # 多阶段构建(Alpine
│ └── docker-compose.yml # Docker Compose 编排
├── docs/ # 文档与资源
│ ├── logos/ # Logo 素材
│ │ └── drafts/ # Logo 草稿
│ └── project-structure.md
├── drizzle/ # Drizzle ORM 迁移 SQL 与元数据
├── scripts/ # 项目脚本(按场景分组)
│ └── dev/
│ └── restart.bat # Windows 开发环境快捷重启
├── src/
│ ├── server/ # Fastify 后端服务
│ └── web/ # React 前端应用
├── tmp/ # 临时调试文件(已 gitignore
├── package.json
├── tsconfig.json
├── tsconfig.server.json
├── vite.config.ts
└── README.md
```
## 后端目录约定(`src/server`
```text
src/server/
├── index.ts # 服务启动入口
├── config.ts # 环境变量与配置加载
├── middleware/
│ └── auth.ts # 认证中间件
├── db/
│ ├── index.ts # 数据库连接
│ ├── schema.ts # Drizzle 表定义(全部 Schema
│ └── migrate.ts # 迁移执行器
├── routes/
│ ├── api/ # 管理 API 路由
│ │ ├── auth.ts # 登录 / 登出
│ │ ├── sites.ts # 站点 CRUD
│ │ ├── accounts.ts # 账号管理
│ │ ├── accountTokens.ts # Token 同步与管理
│ │ ├── tokens.ts # Token 批量操作
│ │ ├── tokenRoutes.ts # 路由规则管理
│ │ ├── checkin.ts # 签到触发与日志
│ │ ├── stats.ts # 仪表盘统计
│ │ ├── search.ts # 全局搜索
│ │ ├── events.ts # 事件日志
│ │ ├── tasks.ts # 后台任务状态
│ │ ├── settings.ts # 运行时配置
│ │ ├── monitor.ts # 外部监控集成
│ │ └── test.ts # 测试 / 验证端点
│ └── proxy/ # 代理路由
│ ├── router.ts # 代理路由注册
│ ├── chat.ts # Chat Completions & Claude Messages
│ ├── completions.ts # Legacy Completions
│ ├── embeddings.ts # 向量嵌入
│ ├── images.ts # 图像生成
│ ├── models.ts # 模型列表
│ └── chatFormats.ts # OpenAI <-> Claude 格式转换
└── services/
├── platforms/ # 平台适配器
│ ├── base.ts # 适配器接口定义
│ ├── index.ts # 适配器注册表
│ ├── newApi.ts # New API 适配器
│ ├── oneApi.ts # One API 适配器
│ ├── oneHub.ts # OneHub 适配器
│ ├── doneHub.ts # DoneHub 适配器
│ ├── veloera.ts # Veloera 适配器
│ ├── anyrouter.ts # AnyRouter 适配器
│ └── sub2api.ts # Sub2API 适配器
├── tokenRouter.ts # 智能路由引擎
├── checkinService.ts # 签到执行
├── checkinScheduler.ts # 签到调度
├── checkinRewardParser.ts # 奖励金额解析
├── balanceService.ts # 余额刷新
├── modelService.ts # 模型发现与管理
├── modelPricingService.ts # 模型定价
├── modelAnalysisService.ts # 使用分析
├── notifyService.ts # 多渠道通知
├── notificationThrottle.ts # 通知节流
├── alertService.ts # 告警事件
├── alertRules.ts # 告警规则
├── backupService.ts # 数据导入导出
├── backgroundTaskService.ts # 后台任务管理
├── accountCredentialService.ts # 凭证加密
├── accountHealthService.ts # 健康状态管理
├── accountExtraConfig.ts # 平台专属配置
├── proxyRetryPolicy.ts # 重试策略
├── proxyUsageParser.ts # Token 用量解析
├── proxyUsageFallbackService.ts # 余额兜底估算
├── failureReasonService.ts # 错误分类
├── siteDetector.ts # 平台自动检测
├── dailySummaryService.ts # 每日摘要
├── todayIncomeRewardService.ts # 今日收入快照
├── localTimeService.ts # 时区处理
├── upstreamModelDescriptionService.ts # 上游模型描述缓存
├── startupInfo.ts # 启动信息
└── settings.ts # 运行时配置管理
```
## 前端目录约定(`src/web`
```text
src/web/
├── App.tsx # 应用入口与路由配置
├── main.tsx # Vite 入口
├── api.ts # 统一 API 请求客户端
├── authSession.ts # 认证会话管理
├── i18n.tsx # 国际化
├── components/ # 通用 UI 组件
│ ├── BrandIcon.tsx # 模型品牌图标
│ ├── ModelAnalysisPanel.tsx # 消费分析图表
│ ├── SearchModal.tsx # 全局搜索弹窗
│ ├── NotificationPanel.tsx # 实时事件面板
│ ├── Toast.tsx # 通知提示
│ ├── SiteDistributionChart.tsx # 余额分布饼图
│ └── SiteTrendChart.tsx # 消费趋势图
├── pages/ # 页面组件(路由页)
│ ├── Dashboard.tsx # 仪表盘
│ ├── Sites.tsx # 站点管理
│ ├── Accounts.tsx # 账号管理
│ ├── Tokens.tsx # Token 管理
│ ├── TokenRoutes.tsx # 路由规则
│ ├── Models.tsx # 模型广场
│ ├── CheckinLog.tsx # 签到日志
│ ├── ProxyLogs.tsx # 代理日志
│ ├── Settings.tsx # 系统设置
│ ├── About.tsx # 关于页面
│ └── helpers/ # 页面级纯逻辑 / 工具函数(含测试)
└── public/ # 静态资源
```
## 目录卫生规则
- 所有调试临时文件放入 `tmp/`,不要散落在项目根目录。
- 开发脚本统一放入 `scripts/<scene>/`,根目录仅保留必要入口文件。
- 素材草稿统一归档到 `docs/logos/drafts/`,避免根目录堆积二进制文件。
- 测试文件与被测文件同目录(`*.test.ts`),方便就近维护。
- 平台适配器新增时,在 `services/platforms/` 中创建独立文件并注册到 `index.ts`
+8
View File
@@ -0,0 +1,8 @@
import { defineConfig } from 'drizzle-kit';
export default defineConfig({
schema: './src/server/db/schema.ts',
out: './drizzle',
dialect: 'sqlite',
dbCredentials: { url: './data/hub.db' },
});
+8819
View File
File diff suppressed because it is too large Load Diff
+77
View File
@@ -0,0 +1,77 @@
{
"name": "metapi",
"version": "1.0.0",
"description": "Meta-layer management and unified proxy for AI API aggregation platforms",
"keywords": [
"ai",
"api-gateway",
"proxy",
"fastify",
"openai",
"claude",
"aggregator"
],
"homepage": "https://github.com/cita-777/metapi#readme",
"bugs": {
"url": "https://github.com/cita-777/metapi/issues"
},
"repository": {
"type": "git",
"url": "git+https://github.com/cita-777/metapi.git"
},
"license": "MIT",
"type": "module",
"scripts": {
"dev": "concurrently \"tsx watch src/server/index.ts\" \"vite\"",
"build:web": "vite build",
"build:server": "tsc -p tsconfig.server.json",
"build": "npm run build:web && npm run build:server",
"start": "node dist/server/index.js",
"db:generate": "drizzle-kit generate",
"db:migrate": "tsx src/server/db/migrate.ts",
"test": "vitest run --root .",
"test:watch": "vitest --root ."
},
"dependencies": {
"@dnd-kit/core": "^6.3.1",
"@dnd-kit/sortable": "^10.0.0",
"@dnd-kit/utilities": "^3.2.2",
"@fastify/cors": "^11.2.0",
"@fastify/static": "^9.0.0",
"@visactor/react-vchart": "^2.0.16",
"better-sqlite3": "^11.3.0",
"dotenv": "^16.4.5",
"drizzle-orm": "^0.36.4",
"fastify": "^5.7.4",
"minimatch": "^10.2.4",
"minimist": "^1.2.8",
"node-cron": "^3.0.3",
"nodemailer": "^8.0.1",
"undici": "^6.20.1"
},
"devDependencies": {
"@tailwindcss/vite": "^4.0.0",
"@types/better-sqlite3": "^7.6.11",
"@types/node": "^22.10.1",
"@types/node-cron": "^3.0.11",
"@types/nodemailer": "^7.0.11",
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@vitejs/plugin-react": "^4.3.4",
"concurrently": "^9.1.0",
"drizzle-kit": "^0.28.1",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-router-dom": "^6.28.0",
"react-test-renderer": "^18.3.1",
"sharp": "^0.34.5",
"tailwindcss": "^4.0.0",
"tsx": "^4.19.2",
"typescript": "^5.7.2",
"vite": "^6.0.3",
"vitest": "^2.1.8"
},
"overrides": {
"minimist": "^1.2.8"
}
}
+4
View File
@@ -0,0 +1,4 @@
@echo off
setlocal EnableExtensions
call "%~dp0scripts\dev\restart.bat" %*
+46
View File
@@ -0,0 +1,46 @@
@echo off
setlocal EnableExtensions
rem Try to switch output to UTF-8; ignore failures.
chcp 65001 >nul 2>&1
set "SCRIPT_DIR=%~dp0"
set "PROJECT_ROOT=%SCRIPT_DIR%..\.."
cd /d "%PROJECT_ROOT%"
echo [INFO] Stopping old processes...
rem Read ports from .env with defaults.
set "BACKEND_PORT=9000"
set "PROXY_PORT=9001"
set "FRONTEND_PORT=5173"
for /f "tokens=2 delims==" %%v in ('findstr /B /C:"PORT=" .env 2^>nul') do set "BACKEND_PORT=%%v"
for /f "tokens=2 delims==" %%v in ('findstr /B /C:"PROXY_PORT=" .env 2^>nul') do set "PROXY_PORT=%%v"
for /f "tokens=2 delims==" %%v in ('findstr /B /C:"FRONTEND_PORT=" .env 2^>nul') do set "FRONTEND_PORT=%%v"
rem Kill listeners on backend/proxy/frontend ports.
for %%p in (%BACKEND_PORT% %PROXY_PORT% %FRONTEND_PORT%) do (
for /f "tokens=5" %%a in ('netstat -aon ^| findstr ":%%p " ^| findstr /I LISTENING 2^>nul') do (
if not "%%a"=="0" (
echo Killing PID %%a on port %%p
taskkill /F /PID %%a >nul 2>&1
)
)
)
ping -n 3 127.0.0.1 >nul
echo [INFO] Syncing database schema...
call npx drizzle-kit push --force 2>nul || echo (drizzle-kit push skipped)
echo [INFO] Starting development servers
echo Backend: http://localhost:%BACKEND_PORT%
echo Frontend: http://localhost:%FRONTEND_PORT%
echo.
if /I "%~1"=="--no-dev" (
echo --no-dev set; skipping npm run dev
exit /b 0
)
call npm run dev
+56
View File
@@ -0,0 +1,56 @@
import 'dotenv/config';
function parseBoolean(value: string | undefined, fallback = false): boolean {
if (value === undefined) return fallback;
const normalized = value.trim().toLowerCase();
return normalized === '1' || normalized === 'true' || normalized === 'yes' || normalized === 'on';
}
function parseNumber(value: string | undefined, fallback: number): number {
if (value === undefined) return fallback;
const parsed = Number(value);
if (!Number.isFinite(parsed)) return fallback;
return parsed;
}
function parseCsvList(value: string | undefined): string[] {
if (!value) return [];
return value
.split(',')
.map((item) => item.trim())
.filter((item) => item.length > 0);
}
export const config = {
authToken: process.env.AUTH_TOKEN || 'change-me',
proxyToken: process.env.PROXY_TOKEN || 'change-me-proxy',
accountCredentialSecret: process.env.ACCOUNT_CREDENTIAL_SECRET || process.env.AUTH_TOKEN || 'change-me',
checkinCron: process.env.CHECKIN_CRON || '0 8 * * *',
balanceRefreshCron: process.env.BALANCE_REFRESH_CRON || '0 * * * *',
webhookUrl: process.env.WEBHOOK_URL || '',
barkUrl: process.env.BARK_URL || '',
webhookEnabled: parseBoolean(process.env.WEBHOOK_ENABLED, true),
barkEnabled: parseBoolean(process.env.BARK_ENABLED, true),
serverChanEnabled: parseBoolean(process.env.SERVERCHAN_ENABLED, true),
serverChanKey: process.env.SERVERCHAN_KEY || '',
smtpEnabled: parseBoolean(process.env.SMTP_ENABLED, false),
smtpHost: process.env.SMTP_HOST || '',
smtpPort: parseInt(process.env.SMTP_PORT || '587'),
smtpSecure: parseBoolean(process.env.SMTP_SECURE, false),
smtpUser: process.env.SMTP_USER || '',
smtpPass: process.env.SMTP_PASS || '',
smtpFrom: process.env.SMTP_FROM || '',
smtpTo: process.env.SMTP_TO || '',
notifyCooldownSec: Math.max(0, Math.trunc(parseNumber(process.env.NOTIFY_COOLDOWN_SEC, 300))),
adminIpAllowlist: parseCsvList(process.env.ADMIN_IP_ALLOWLIST),
port: Math.trunc(parseNumber(process.env.PORT, 4000)),
dataDir: process.env.DATA_DIR || './data',
routingFallbackUnitCost: Math.max(1e-6, parseNumber(process.env.ROUTING_FALLBACK_UNIT_COST, 1)),
routingWeights: {
baseWeightFactor: parseNumber(process.env.BASE_WEIGHT_FACTOR, 0.5),
valueScoreFactor: parseNumber(process.env.VALUE_SCORE_FACTOR, 0.5),
costWeight: parseNumber(process.env.COST_WEIGHT, 0.4),
balanceWeight: parseNumber(process.env.BALANCE_WEIGHT, 0.3),
usageWeight: parseNumber(process.env.USAGE_WEIGHT, 0.3),
},
};
+106
View File
@@ -0,0 +1,106 @@
import Database from 'better-sqlite3';
import { drizzle } from 'drizzle-orm/better-sqlite3';
import * as schema from './schema.js';
import { config } from '../config.js';
import { mkdirSync } from 'fs';
import { dirname } from 'path';
const dbPath = `${config.dataDir}/hub.db`;
mkdirSync(dirname(dbPath), { recursive: true });
const sqlite = new Database(dbPath);
sqlite.pragma('journal_mode = WAL');
sqlite.pragma('foreign_keys = ON');
function tableColumnExists(table: string, column: string): boolean {
const rows = sqlite.prepare(`PRAGMA table_info(${table})`).all() as Array<{ name?: string }>;
return rows.some((row) => row.name === column);
}
function ensureTokenManagementSchema() {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS account_tokens (
id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
account_id integer NOT NULL,
name text NOT NULL,
token text NOT NULL,
source text DEFAULT 'manual',
enabled integer DEFAULT true,
is_default integer DEFAULT false,
created_at text DEFAULT (datetime('now')),
updated_at text DEFAULT (datetime('now')),
FOREIGN KEY (account_id) REFERENCES accounts(id) ON DELETE cascade
);
`);
if (!tableColumnExists('route_channels', 'token_id')) {
sqlite.exec('ALTER TABLE route_channels ADD COLUMN token_id integer;');
}
sqlite.exec(`
INSERT INTO account_tokens (account_id, name, token, source, enabled, is_default, created_at, updated_at)
SELECT
a.id,
'default',
a.api_token,
'legacy',
true,
true,
datetime('now'),
datetime('now')
FROM accounts AS a
WHERE
a.api_token IS NOT NULL
AND trim(a.api_token) <> ''
AND NOT EXISTS (
SELECT 1 FROM account_tokens AS t
WHERE t.account_id = a.id
AND t.token = a.api_token
);
`);
sqlite.exec(`
CREATE TABLE IF NOT EXISTS token_model_availability (
id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
token_id integer NOT NULL,
model_name text NOT NULL,
available integer,
latency_ms integer,
checked_at text DEFAULT (datetime('now')),
FOREIGN KEY (token_id) REFERENCES account_tokens(id) ON DELETE cascade
);
`);
sqlite.exec(`
CREATE UNIQUE INDEX IF NOT EXISTS token_model_availability_token_model_unique
ON token_model_availability(token_id, model_name);
`);
}
function ensureSiteStatusSchema() {
if (!tableColumnExists('sites', 'status')) {
sqlite.exec(`ALTER TABLE sites ADD COLUMN status text DEFAULT 'active';`);
}
sqlite.exec(`
UPDATE sites
SET status = lower(trim(status))
WHERE status IS NOT NULL
AND lower(trim(status)) IN ('active', 'disabled')
AND status != lower(trim(status));
`);
sqlite.exec(`
UPDATE sites
SET status = 'active'
WHERE status IS NULL
OR trim(status) = ''
OR lower(trim(status)) NOT IN ('active', 'disabled');
`);
}
ensureTokenManagementSchema();
ensureSiteStatusSchema();
export const db = drizzle(sqlite, { schema });
export { schema };
+15
View File
@@ -0,0 +1,15 @@
import Database from 'better-sqlite3';
import { drizzle } from 'drizzle-orm/better-sqlite3';
import { migrate } from 'drizzle-orm/better-sqlite3/migrator';
import { config } from '../config.js';
import { mkdirSync } from 'fs';
import { dirname } from 'path';
const dbPath = `${config.dataDir}/hub.db`;
mkdirSync(dirname(dbPath), { recursive: true });
const sqlite = new Database(dbPath);
const db = drizzle(sqlite);
migrate(db, { migrationsFolder: './drizzle' });
sqlite.close();
console.log('Migration complete.');
+176
View File
@@ -0,0 +1,176 @@
import { sqliteTable, text, integer, real, uniqueIndex, index } from 'drizzle-orm/sqlite-core';
import { sql } from 'drizzle-orm';
export const sites = sqliteTable('sites', {
id: integer('id').primaryKey({ autoIncrement: true }),
name: text('name').notNull(),
url: text('url').notNull(),
platform: text('platform').notNull(), // 'new-api' | 'one-api' | 'veloera' | 'one-hub' | 'done-hub' | 'sub2api'
status: text('status').notNull().default('active'), // 'active' | 'disabled'
apiKey: text('api_key'),
createdAt: text('created_at').default(sql`(datetime('now'))`),
updatedAt: text('updated_at').default(sql`(datetime('now'))`),
}, (table) => ({
statusIdx: index('sites_status_idx').on(table.status),
}));
export const accounts = sqliteTable('accounts', {
id: integer('id').primaryKey({ autoIncrement: true }),
siteId: integer('site_id').notNull().references(() => sites.id, { onDelete: 'cascade' }),
username: text('username'),
accessToken: text('access_token').notNull(),
apiToken: text('api_token'),
balance: real('balance').default(0),
balanceUsed: real('balance_used').default(0),
quota: real('quota').default(0),
unitCost: real('unit_cost'),
valueScore: real('value_score').default(0),
status: text('status').default('active'), // 'active' | 'disabled' | 'expired'
checkinEnabled: integer('checkin_enabled', { mode: 'boolean' }).default(true),
lastCheckinAt: text('last_checkin_at'),
lastBalanceRefresh: text('last_balance_refresh'),
extraConfig: text('extra_config'), // JSON string
createdAt: text('created_at').default(sql`(datetime('now'))`),
updatedAt: text('updated_at').default(sql`(datetime('now'))`),
}, (table) => ({
siteIdIdx: index('accounts_site_id_idx').on(table.siteId),
statusIdx: index('accounts_status_idx').on(table.status),
siteStatusIdx: index('accounts_site_status_idx').on(table.siteId, table.status),
}));
export const accountTokens = sqliteTable('account_tokens', {
id: integer('id').primaryKey({ autoIncrement: true }),
accountId: integer('account_id').notNull().references(() => accounts.id, { onDelete: 'cascade' }),
name: text('name').notNull(),
token: text('token').notNull(),
source: text('source').default('manual'), // 'manual' | 'sync' | 'legacy'
enabled: integer('enabled', { mode: 'boolean' }).default(true),
isDefault: integer('is_default', { mode: 'boolean' }).default(false),
createdAt: text('created_at').default(sql`(datetime('now'))`),
updatedAt: text('updated_at').default(sql`(datetime('now'))`),
}, (table) => ({
accountIdIdx: index('account_tokens_account_id_idx').on(table.accountId),
accountEnabledIdx: index('account_tokens_account_enabled_idx').on(table.accountId, table.enabled),
enabledIdx: index('account_tokens_enabled_idx').on(table.enabled),
}));
export const checkinLogs = sqliteTable('checkin_logs', {
id: integer('id').primaryKey({ autoIncrement: true }),
accountId: integer('account_id').notNull().references(() => accounts.id, { onDelete: 'cascade' }),
status: text('status').notNull(), // 'success' | 'failed' | 'skipped'
message: text('message'),
reward: text('reward'),
createdAt: text('created_at').default(sql`(datetime('now'))`),
}, (table) => ({
accountCreatedIdx: index('checkin_logs_account_created_at_idx').on(table.accountId, table.createdAt),
createdAtIdx: index('checkin_logs_created_at_idx').on(table.createdAt),
statusIdx: index('checkin_logs_status_idx').on(table.status),
}));
export const modelAvailability = sqliteTable('model_availability', {
id: integer('id').primaryKey({ autoIncrement: true }),
accountId: integer('account_id').notNull().references(() => accounts.id, { onDelete: 'cascade' }),
modelName: text('model_name').notNull(),
available: integer('available', { mode: 'boolean' }),
latencyMs: integer('latency_ms'),
checkedAt: text('checked_at').default(sql`(datetime('now'))`),
}, (table) => ({
accountModelUnique: uniqueIndex('model_availability_account_model_unique').on(table.accountId, table.modelName),
accountAvailableIdx: index('model_availability_account_available_idx').on(table.accountId, table.available),
modelNameIdx: index('model_availability_model_name_idx').on(table.modelName),
}));
export const tokenModelAvailability = sqliteTable('token_model_availability', {
id: integer('id').primaryKey({ autoIncrement: true }),
tokenId: integer('token_id').notNull().references(() => accountTokens.id, { onDelete: 'cascade' }),
modelName: text('model_name').notNull(),
available: integer('available', { mode: 'boolean' }),
latencyMs: integer('latency_ms'),
checkedAt: text('checked_at').default(sql`(datetime('now'))`),
}, (table) => ({
tokenModelUnique: uniqueIndex('token_model_availability_token_model_unique').on(table.tokenId, table.modelName),
tokenAvailableIdx: index('token_model_availability_token_available_idx').on(table.tokenId, table.available),
modelNameIdx: index('token_model_availability_model_name_idx').on(table.modelName),
availableIdx: index('token_model_availability_available_idx').on(table.available),
}));
export const tokenRoutes = sqliteTable('token_routes', {
id: integer('id').primaryKey({ autoIncrement: true }),
modelPattern: text('model_pattern').notNull(),
modelMapping: text('model_mapping'), // JSON
enabled: integer('enabled', { mode: 'boolean' }).default(true),
createdAt: text('created_at').default(sql`(datetime('now'))`),
updatedAt: text('updated_at').default(sql`(datetime('now'))`),
}, (table) => ({
modelPatternIdx: index('token_routes_model_pattern_idx').on(table.modelPattern),
enabledIdx: index('token_routes_enabled_idx').on(table.enabled),
}));
export const routeChannels = sqliteTable('route_channels', {
id: integer('id').primaryKey({ autoIncrement: true }),
routeId: integer('route_id').notNull().references(() => tokenRoutes.id, { onDelete: 'cascade' }),
accountId: integer('account_id').notNull().references(() => accounts.id, { onDelete: 'cascade' }),
tokenId: integer('token_id').references(() => accountTokens.id, { onDelete: 'set null' }),
priority: integer('priority').default(0),
weight: integer('weight').default(10),
enabled: integer('enabled', { mode: 'boolean' }).default(true),
manualOverride: integer('manual_override', { mode: 'boolean' }).default(false),
successCount: integer('success_count').default(0),
failCount: integer('fail_count').default(0),
totalLatencyMs: integer('total_latency_ms').default(0),
totalCost: real('total_cost').default(0),
lastUsedAt: text('last_used_at'),
lastFailAt: text('last_fail_at'),
cooldownUntil: text('cooldown_until'),
}, (table) => ({
routeIdIdx: index('route_channels_route_id_idx').on(table.routeId),
accountIdIdx: index('route_channels_account_id_idx').on(table.accountId),
tokenIdIdx: index('route_channels_token_id_idx').on(table.tokenId),
routeEnabledIdx: index('route_channels_route_enabled_idx').on(table.routeId, table.enabled),
routeTokenIdx: index('route_channels_route_token_idx').on(table.routeId, table.tokenId),
}));
export const proxyLogs = sqliteTable('proxy_logs', {
id: integer('id').primaryKey({ autoIncrement: true }),
routeId: integer('route_id'),
channelId: integer('channel_id'),
accountId: integer('account_id'),
modelRequested: text('model_requested'),
modelActual: text('model_actual'),
status: text('status'), // 'success' | 'failed' | 'retried'
httpStatus: integer('http_status'),
latencyMs: integer('latency_ms'),
promptTokens: integer('prompt_tokens'),
completionTokens: integer('completion_tokens'),
totalTokens: integer('total_tokens'),
estimatedCost: real('estimated_cost'),
errorMessage: text('error_message'),
retryCount: integer('retry_count').default(0),
createdAt: text('created_at').default(sql`(datetime('now'))`),
}, (table) => ({
createdAtIdx: index('proxy_logs_created_at_idx').on(table.createdAt),
accountCreatedIdx: index('proxy_logs_account_created_at_idx').on(table.accountId, table.createdAt),
statusCreatedIdx: index('proxy_logs_status_created_at_idx').on(table.status, table.createdAt),
modelActualCreatedIdx: index('proxy_logs_model_actual_created_at_idx').on(table.modelActual, table.createdAt),
}));
export const settings = sqliteTable('settings', {
key: text('key').primaryKey(),
value: text('value'), // JSON
});
export const events = sqliteTable('events', {
id: integer('id').primaryKey({ autoIncrement: true }),
type: text('type').notNull(), // 'checkin' | 'balance' | 'token' | 'proxy' | 'status'
title: text('title').notNull(),
message: text('message'),
level: text('level').notNull().default('info'), // 'info' | 'warning' | 'error'
read: integer('read', { mode: 'boolean' }).default(false),
relatedId: integer('related_id'),
relatedType: text('related_type'), // 'account' | 'site' | 'route'
createdAt: text('created_at').default(sql`(datetime('now'))`),
}, (table) => ({
readCreatedIdx: index('events_read_created_at_idx').on(table.read, table.createdAt),
typeCreatedIdx: index('events_type_created_at_idx').on(table.type, table.createdAt),
createdAtIdx: index('events_created_at_idx').on(table.createdAt),
}));
+204
View File
@@ -0,0 +1,204 @@
import Fastify from 'fastify';
import cors from '@fastify/cors';
import fastifyStatic from '@fastify/static';
import { config } from './config.js';
import { authMiddleware } from './middleware/auth.js';
import { sitesRoutes } from './routes/api/sites.js';
import { accountsRoutes } from './routes/api/accounts.js';
import { checkinRoutes } from './routes/api/checkin.js';
import { tokensRoutes } from './routes/api/tokens.js';
import { statsRoutes } from './routes/api/stats.js';
import { authRoutes } from './routes/api/auth.js';
import { settingsRoutes } from './routes/api/settings.js';
import { accountTokensRoutes } from './routes/api/accountTokens.js';
import { searchRoutes } from './routes/api/search.js';
import { eventsRoutes } from './routes/api/events.js';
import { taskRoutes } from './routes/api/tasks.js';
import { testRoutes } from './routes/api/test.js';
import { monitorRoutes } from './routes/api/monitor.js';
import { proxyRoutes } from './routes/proxy/router.js';
import { startScheduler } from './services/checkinScheduler.js';
import { buildStartupSummaryLines } from './services/startupInfo.js';
import { existsSync } from 'fs';
import { normalize, resolve, sep } from 'path';
import { db, schema } from './db/index.js';
// Load runtime config overrides from settings
try {
const rows = db.select().from(schema.settings).all();
const settingsMap = new Map(rows.map((row) => [row.key, row.value]));
const parseSetting = <T>(key: string): T | undefined => {
const raw = settingsMap.get(key);
if (!raw) return undefined;
try {
return JSON.parse(raw) as T;
} catch {
return undefined;
}
};
const toStringList = (value: unknown): string[] => {
if (Array.isArray(value)) {
return value
.map((item) => (typeof item === 'string' ? item.trim() : ''))
.filter((item) => item.length > 0);
}
if (typeof value === 'string') {
return value
.split(',')
.map((item) => item.trim())
.filter((item) => item.length > 0);
}
return [];
};
const authToken = parseSetting<string>('auth_token');
if (typeof authToken === 'string' && authToken) config.authToken = authToken;
const proxyToken = parseSetting<string>('proxy_token');
if (typeof proxyToken === 'string' && proxyToken) config.proxyToken = proxyToken;
const checkinCron = parseSetting<string>('checkin_cron');
if (typeof checkinCron === 'string' && checkinCron) config.checkinCron = checkinCron;
const balanceRefreshCron = parseSetting<string>('balance_refresh_cron');
if (typeof balanceRefreshCron === 'string' && balanceRefreshCron) config.balanceRefreshCron = balanceRefreshCron;
const routingWeights = parseSetting<Partial<typeof config.routingWeights>>('routing_weights');
if (routingWeights && typeof routingWeights === 'object') {
config.routingWeights = {
...config.routingWeights,
...routingWeights,
};
}
const routingFallbackUnitCost = parseSetting<number>('routing_fallback_unit_cost');
if (typeof routingFallbackUnitCost === 'number' && Number.isFinite(routingFallbackUnitCost) && routingFallbackUnitCost > 0) {
config.routingFallbackUnitCost = Math.max(1e-6, routingFallbackUnitCost);
}
const webhookUrl = parseSetting<string>('webhook_url');
if (typeof webhookUrl === 'string') config.webhookUrl = webhookUrl;
const barkUrl = parseSetting<string>('bark_url');
if (typeof barkUrl === 'string') config.barkUrl = barkUrl;
const serverChanKey = parseSetting<string>('serverchan_key');
if (typeof serverChanKey === 'string') config.serverChanKey = serverChanKey;
const smtpEnabled = parseSetting<boolean>('smtp_enabled');
if (typeof smtpEnabled === 'boolean') config.smtpEnabled = smtpEnabled;
const smtpHost = parseSetting<string>('smtp_host');
if (typeof smtpHost === 'string') config.smtpHost = smtpHost;
const smtpPort = parseSetting<number>('smtp_port');
if (typeof smtpPort === 'number' && Number.isFinite(smtpPort) && smtpPort > 0) {
config.smtpPort = smtpPort;
}
const smtpSecure = parseSetting<boolean>('smtp_secure');
if (typeof smtpSecure === 'boolean') config.smtpSecure = smtpSecure;
const smtpUser = parseSetting<string>('smtp_user');
if (typeof smtpUser === 'string') config.smtpUser = smtpUser;
const smtpPass = parseSetting<string>('smtp_pass');
if (typeof smtpPass === 'string') config.smtpPass = smtpPass;
const smtpFrom = parseSetting<string>('smtp_from');
if (typeof smtpFrom === 'string') config.smtpFrom = smtpFrom;
const smtpTo = parseSetting<string>('smtp_to');
if (typeof smtpTo === 'string') config.smtpTo = smtpTo;
const notifyCooldownSec = parseSetting<number>('notify_cooldown_sec');
if (typeof notifyCooldownSec === 'number' && Number.isFinite(notifyCooldownSec) && notifyCooldownSec >= 0) {
config.notifyCooldownSec = Math.trunc(notifyCooldownSec);
}
const adminIpAllowlist = parseSetting<string[] | string>('admin_ip_allowlist');
if (adminIpAllowlist !== undefined) {
config.adminIpAllowlist = toStringList(adminIpAllowlist);
}
console.log('Loaded runtime settings overrides');
} catch { /* first run, table may not exist */ }
const app = Fastify({ logger: true });
await app.register(cors);
// Auth middleware for /api routes
app.addHook('onRequest', async (request, reply) => {
if (request.url.startsWith('/api/')) {
await authMiddleware(request, reply);
}
});
// Register API routes
await app.register(sitesRoutes);
await app.register(accountsRoutes);
await app.register(checkinRoutes);
await app.register(tokensRoutes);
await app.register(statsRoutes);
await app.register(authRoutes);
await app.register(settingsRoutes);
await app.register(accountTokensRoutes);
await app.register(searchRoutes);
await app.register(eventsRoutes);
await app.register(taskRoutes);
await app.register(testRoutes);
await app.register(monitorRoutes);
// Register OpenAI-compatible proxy routes
await app.register(proxyRoutes);
// Serve static web frontend in production
const webDir = resolve('dist/web');
if (existsSync(webDir)) {
await app.register(fastifyStatic, {
root: webDir,
prefix: '/',
wildcard: false,
setHeaders: (res, filePath) => {
const normalizedPath = normalize(filePath);
if (normalizedPath.includes(`${sep}assets${sep}`)) {
res.setHeader('Cache-Control', 'public, max-age=31536000, immutable');
return;
}
if (normalizedPath.endsWith(`${sep}index.html`)) {
res.setHeader('Cache-Control', 'no-cache');
}
},
});
// SPA fallback
app.setNotFoundHandler(async (request, reply) => {
if (!request.url.startsWith('/api/') && !request.url.startsWith('/v1/')) {
return reply.sendFile('index.html');
}
reply.code(404).send({ error: 'Not found' });
});
}
// Start scheduler
startScheduler();
// Start server
try {
const listenHost = '0.0.0.0';
await app.listen({ port: config.port, host: listenHost });
const summaryLines = buildStartupSummaryLines({
port: config.port,
host: listenHost,
authToken: config.authToken,
proxyToken: config.proxyToken,
});
for (const line of summaryLines) {
console.log(line);
}
} catch (err) {
app.log.error(err);
process.exit(1);
}
+18
View File
@@ -0,0 +1,18 @@
import { describe, expect, it } from 'vitest';
import { extractClientIp, isIpAllowed } from './auth.js';
describe('auth middleware IP helpers', () => {
it('extracts first forwarded IP and normalizes ipv4-mapped address', () => {
const ip = extractClientIp('::ffff:10.0.0.1', '198.51.100.7, 203.0.113.2');
expect(ip).toBe('198.51.100.7');
});
it('allows request when allowlist is empty', () => {
expect(isIpAllowed('203.0.113.8', [])).toBe(true);
});
it('rejects non-allowlisted IP when allowlist is configured', () => {
expect(isIpAllowed('203.0.113.8', ['203.0.113.9'])).toBe(false);
expect(isIpAllowed('203.0.113.9', ['203.0.113.9'])).toBe(true);
});
});
+70
View File
@@ -0,0 +1,70 @@
import { FastifyRequest, FastifyReply } from 'fastify';
import { config } from '../config.js';
function normalizeIp(rawIp: string | null | undefined): string {
const ip = (rawIp || '').trim();
if (!ip) return '';
if (ip.startsWith('::ffff:')) return ip.slice('::ffff:'.length).trim();
if (ip === '::1') return '127.0.0.1';
return ip;
}
export function extractClientIp(remoteIp: string | null | undefined, xForwardedFor?: string | string[] | undefined): string {
if (Array.isArray(xForwardedFor)) {
const first = xForwardedFor.find((item) => item && item.trim().length > 0);
if (first) {
return normalizeIp(first.split(',')[0]);
}
} else if (typeof xForwardedFor === 'string' && xForwardedFor.trim().length > 0) {
return normalizeIp(xForwardedFor.split(',')[0]);
}
return normalizeIp(remoteIp);
}
export function isIpAllowed(clientIp: string, allowlist: string[]): boolean {
if (!allowlist || allowlist.length === 0) return true;
const normalizedClientIp = normalizeIp(clientIp);
if (!normalizedClientIp) return false;
return allowlist.some((item) => normalizeIp(item) === normalizedClientIp);
}
export async function authMiddleware(request: FastifyRequest, reply: FastifyReply) {
const clientIp = extractClientIp(request.ip, request.headers['x-forwarded-for']);
if (!isIpAllowed(clientIp, config.adminIpAllowlist)) {
reply.code(403).send({ error: 'IP not allowed' });
return;
}
const auth = request.headers.authorization;
if (!auth) {
reply.code(401).send({ error: 'Missing Authorization header' });
return;
}
const token = auth.replace('Bearer ', '');
if (token !== config.authToken) {
reply.code(403).send({ error: 'Invalid token' });
return;
}
}
export async function proxyAuthMiddleware(request: FastifyRequest, reply: FastifyReply) {
const auth = typeof request.headers.authorization === 'string'
? request.headers.authorization
: '';
const apiKeyHeader = typeof request.headers['x-api-key'] === 'string'
? request.headers['x-api-key']
: '';
const token = auth
? auth.replace(/^Bearer\s+/i, '').trim()
: apiKeyHeader.trim();
if (!token) {
reply.code(401).send({ error: 'Missing Authorization or x-api-key header' });
return;
}
if (token !== config.proxyToken) {
reply.code(403).send({ error: 'Invalid API key' });
return;
}
}
@@ -0,0 +1,378 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { describe, expect, it, beforeAll, beforeEach, afterAll, vi } from 'vitest';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { mkdtempSync } from 'node:fs';
import { and, eq, sql } from 'drizzle-orm';
const getApiTokensMock = vi.fn();
const getApiTokenMock = vi.fn();
const createApiTokenMock = vi.fn();
const getUserGroupsMock = vi.fn();
const deleteApiTokenMock = vi.fn();
vi.mock('../../services/platforms/index.js', () => ({
getAdapter: () => ({
getApiTokens: (...args: unknown[]) => getApiTokensMock(...args),
getApiToken: (...args: unknown[]) => getApiTokenMock(...args),
createApiToken: (...args: unknown[]) => createApiTokenMock(...args),
getUserGroups: (...args: unknown[]) => getUserGroupsMock(...args),
deleteApiToken: (...args: unknown[]) => deleteApiTokenMock(...args),
}),
}));
type DbModule = typeof import('../../db/index.js');
describe('account tokens sync routes with site status', () => {
let app: FastifyInstance;
let db: DbModule['db'];
let schema: DbModule['schema'];
let dataDir = '';
let seedId = 0;
const nextSeed = () => {
seedId += 1;
return seedId;
};
const seedAccount = (input: { siteStatus?: 'active' | 'disabled'; accountStatus?: string; accessToken?: string | null }) => {
const id = nextSeed();
const site = db.insert(schema.sites).values({
name: `site-${id}`,
url: `https://site-${id}.example.com`,
platform: 'new-api',
}).returning().get();
if (input.siteStatus === 'disabled') {
db.run(sql`update sites set status = 'disabled' where id = ${site.id}`);
}
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: `user-${id}`,
accessToken: input.accessToken ?? `access-token-${id}`,
status: input.accountStatus ?? 'active',
}).returning().get();
return { site, account };
};
beforeAll(async () => {
dataDir = mkdtempSync(join(tmpdir(), 'metapi-account-tokens-sync-'));
process.env.DATA_DIR = dataDir;
await import('../../db/migrate.js');
const dbModule = await import('../../db/index.js');
const routesModule = await import('./accountTokens.js');
db = dbModule.db;
schema = dbModule.schema;
app = Fastify();
await app.register(routesModule.accountTokensRoutes);
});
beforeEach(() => {
getApiTokensMock.mockReset();
getApiTokenMock.mockReset();
createApiTokenMock.mockReset();
getUserGroupsMock.mockReset();
deleteApiTokenMock.mockReset();
seedId = 0;
db.delete(schema.accountTokens).run();
db.delete(schema.routeChannels).run();
db.delete(schema.tokenRoutes).run();
db.delete(schema.tokenModelAvailability).run();
db.delete(schema.modelAvailability).run();
db.delete(schema.checkinLogs).run();
db.delete(schema.accounts).run();
db.delete(schema.sites).run();
});
afterAll(async () => {
await app.close();
delete process.env.DATA_DIR;
});
it('returns skipped for single-account sync when site is disabled', async () => {
const { account } = seedAccount({ siteStatus: 'disabled' });
const response = await app.inject({
method: 'POST',
url: `/api/account-tokens/sync/${account.id}`,
});
expect(response.statusCode).toBe(200);
expect(response.json()).toMatchObject({
success: true,
synced: false,
status: 'skipped',
reason: 'site_disabled',
});
expect(getApiTokensMock).not.toHaveBeenCalled();
expect(getApiTokenMock).not.toHaveBeenCalled();
});
it('returns skipped when upstream has no api tokens', async () => {
const { account } = seedAccount({ siteStatus: 'active' });
getApiTokensMock.mockResolvedValue([]);
getApiTokenMock.mockResolvedValue(null);
const response = await app.inject({
method: 'POST',
url: `/api/account-tokens/sync/${account.id}`,
});
expect(response.statusCode).toBe(200);
expect(response.json()).toMatchObject({
success: true,
synced: false,
status: 'skipped',
reason: 'no_upstream_tokens',
});
const tokenRows = db.select()
.from(schema.accountTokens)
.where(eq(schema.accountTokens.accountId, account.id))
.all();
expect(tokenRows.length).toBe(0);
});
it('sync-all skips disabled-site accounts and syncs active-site accounts', async () => {
const disabled = seedAccount({ siteStatus: 'disabled' });
const active = seedAccount({ siteStatus: 'active' });
getApiTokensMock.mockResolvedValue([
{ name: 'default', key: 'sk-synced-token', enabled: true },
]);
const response = await app.inject({
method: 'POST',
url: '/api/account-tokens/sync-all',
payload: { wait: true },
});
expect(response.statusCode).toBe(200);
const body = response.json() as {
success: boolean;
summary: {
total: number;
synced: number;
skipped: number;
failed: number;
};
results: Array<{ accountId: number; status: string; reason?: string; synced?: boolean }>;
};
expect(body.success).toBe(true);
expect(body.summary).toMatchObject({
total: 2,
synced: 1,
skipped: 1,
failed: 0,
});
const skipped = body.results.find((item) => item.accountId === disabled.account.id);
const synced = body.results.find((item) => item.accountId === active.account.id);
expect(skipped).toMatchObject({
accountId: disabled.account.id,
status: 'skipped',
reason: 'site_disabled',
});
expect(synced).toMatchObject({
accountId: active.account.id,
status: 'synced',
synced: true,
});
const syncedDefaultToken = db.select()
.from(schema.accountTokens)
.where(and(eq(schema.accountTokens.accountId, active.account.id), eq(schema.accountTokens.isDefault, true)))
.get();
expect(syncedDefaultToken?.token).toBe('sk-synced-token');
});
it('creates token via upstream api and syncs into local store when manual token is omitted', async () => {
const { account, site } = seedAccount({ siteStatus: 'active' });
createApiTokenMock.mockResolvedValue(true);
getApiTokensMock.mockResolvedValue([
{ name: 'created-from-upstream', key: 'sk-created-upstream-token', enabled: true },
]);
const response = await app.inject({
method: 'POST',
url: '/api/account-tokens',
payload: {
accountId: account.id,
name: 'created-from-upstream',
},
});
expect(response.statusCode).toBe(200);
expect(response.json()).toMatchObject({
success: true,
createdViaUpstream: true,
synced: true,
status: 'synced',
});
expect(createApiTokenMock).toHaveBeenCalledTimes(1);
expect(createApiTokenMock.mock.calls[0][0]).toBe(site.url);
expect(createApiTokenMock.mock.calls[0][1]).toBe(account.accessToken);
const tokenRows = db.select()
.from(schema.accountTokens)
.where(eq(schema.accountTokens.accountId, account.id))
.all();
expect(tokenRows.length).toBe(1);
expect(tokenRows[0].name).toBe('created-from-upstream');
expect(tokenRows[0].token).toBe('sk-created-upstream-token');
expect(tokenRows[0].source).toBe('sync');
});
it('passes token creation options to upstream adapter', async () => {
const { account } = seedAccount({ siteStatus: 'active' });
createApiTokenMock.mockResolvedValue(true);
getApiTokensMock.mockResolvedValue([
{ name: 'custom-token', key: 'sk-created-upstream-token', enabled: true },
]);
const response = await app.inject({
method: 'POST',
url: '/api/account-tokens',
payload: {
accountId: account.id,
name: 'custom-token',
group: 'vip',
unlimitedQuota: false,
remainQuota: 123456,
expiredTime: 2_000_000_000,
allowIps: '1.1.1.1,2.2.2.2',
},
});
expect(response.statusCode).toBe(200);
expect(createApiTokenMock).toHaveBeenCalledTimes(1);
expect(createApiTokenMock.mock.calls[0][3]).toMatchObject({
name: 'custom-token',
group: 'vip',
unlimitedQuota: false,
remainQuota: 123456,
expiredTime: 2_000_000_000,
allowIps: '1.1.1.1,2.2.2.2',
});
});
it('returns 400 when limited token misses remainQuota', async () => {
const { account } = seedAccount({ siteStatus: 'active' });
const response = await app.inject({
method: 'POST',
url: '/api/account-tokens',
payload: {
accountId: account.id,
name: 'bad-token',
unlimitedQuota: false,
},
});
expect(response.statusCode).toBe(400);
expect(response.json()).toMatchObject({
success: false,
message: '有限额度令牌必须填写 remainQuota',
});
expect(createApiTokenMock).not.toHaveBeenCalled();
});
it('returns 502 when upstream token creation fails', async () => {
const { account } = seedAccount({ siteStatus: 'active' });
createApiTokenMock.mockResolvedValue(false);
const response = await app.inject({
method: 'POST',
url: '/api/account-tokens',
payload: {
accountId: account.id,
name: 'created-from-upstream',
},
});
expect(response.statusCode).toBe(502);
expect(response.json()).toMatchObject({
success: false,
message: '站点创建令牌失败',
});
});
it('fetches account token groups from upstream', async () => {
const { account } = seedAccount({ siteStatus: 'active' });
getUserGroupsMock.mockResolvedValue(['default', 'vip']);
const response = await app.inject({
method: 'GET',
url: `/api/account-tokens/groups/${account.id}`,
});
expect(response.statusCode).toBe(200);
expect(response.json()).toMatchObject({
success: true,
groups: ['default', 'vip'],
});
expect(getUserGroupsMock).toHaveBeenCalledTimes(1);
});
it('deletes upstream token before removing local token', async () => {
const { account, site } = seedAccount({ siteStatus: 'active' });
const token = db.insert(schema.accountTokens).values({
accountId: account.id,
name: 'upstream-token',
token: 'sk-upstream-token',
source: 'sync',
enabled: true,
isDefault: false,
}).returning().get();
deleteApiTokenMock.mockResolvedValue(true);
const response = await app.inject({
method: 'DELETE',
url: `/api/account-tokens/${token.id}`,
});
expect(response.statusCode).toBe(200);
expect(deleteApiTokenMock).toHaveBeenCalledTimes(1);
expect(deleteApiTokenMock.mock.calls[0][0]).toBe(site.url);
expect(deleteApiTokenMock.mock.calls[0][1]).toBe(account.accessToken);
expect(deleteApiTokenMock.mock.calls[0][2]).toBe('sk-upstream-token');
const removed = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, token.id)).get();
expect(removed).toBeUndefined();
});
it('keeps local token when upstream deletion fails', async () => {
const { account } = seedAccount({ siteStatus: 'active' });
const token = db.insert(schema.accountTokens).values({
accountId: account.id,
name: 'upstream-token',
token: 'sk-upstream-token',
source: 'sync',
enabled: true,
isDefault: false,
}).returning().get();
deleteApiTokenMock.mockResolvedValue(false);
const response = await app.inject({
method: 'DELETE',
url: `/api/account-tokens/${token.id}`,
});
expect(response.statusCode).toBe(502);
expect(response.json()).toMatchObject({
success: false,
message: '站点删除令牌失败,本地未删除',
});
const existing = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, token.id)).get();
expect(existing).toBeDefined();
});
});
+726
View File
@@ -0,0 +1,726 @@
import { FastifyInstance } from 'fastify';
import { and, eq } from 'drizzle-orm';
import { db, schema } from '../../db/index.js';
import {
ensureDefaultTokenForAccount,
listTokensWithRelations,
normalizeTokenForDisplay,
maskToken,
repairDefaultToken,
setDefaultToken,
syncTokensFromUpstream,
} from '../../services/accountTokenService.js';
import { getAdapter } from '../../services/platforms/index.js';
import { resolvePlatformUserId } from '../../services/accountExtraConfig.js';
import { startBackgroundTask } from '../../services/backgroundTaskService.js';
type AccountWithSiteRow = {
accounts: typeof schema.accounts.$inferSelect;
sites: typeof schema.sites.$inferSelect;
};
type SyncExecutionResult = {
accountId: number;
accountName: string;
accountStatus: string | null;
siteId: number;
siteName: string;
siteStatus: string | null;
status: 'synced' | 'skipped' | 'failed';
reason?: string;
message?: string;
synced: boolean;
created: number;
updated: number;
total: number;
defaultTokenId?: number | null;
};
const TOKEN_SYNC_TIMEOUT_MS = 15_000;
const SYNC_ALL_BATCH_SIZE = 3;
function buildSyncAccountLabel(item: SyncExecutionResult): string {
const account = (item.accountName || `#${item.accountId}`).trim();
const site = (item.siteName || 'unknown-site').trim();
return `${account} @ ${site}`;
}
function buildSyncReason(item: SyncExecutionResult): string {
const message = String(item.message || item.reason || '').trim();
if (!message) return '';
if (message.length <= 32) return message;
return `${message.slice(0, 32)}...`;
}
function buildTokenSyncTaskDetailMessage(results: SyncExecutionResult[]): string {
if (!Array.isArray(results) || results.length === 0) return '';
const synced = results.filter((item) => item.status === 'synced');
const skipped = results.filter((item) => item.status === 'skipped');
const failed = results.filter((item) => item.status === 'failed');
const renderRows = (rows: SyncExecutionResult[], withReason = false) => {
const sliced = rows.slice(0, 12).map((item) => {
const base = buildSyncAccountLabel(item);
if (!withReason) return base;
const reason = buildSyncReason(item);
return reason ? `${base}(${reason})` : base;
});
if (rows.length > 12) sliced.push(`...等${rows.length}`);
return sliced.join('、');
};
const segments: string[] = [
`成功(${synced.length}): ${synced.length > 0 ? renderRows(synced) : '-'}`,
`跳过(${skipped.length}): ${skipped.length > 0 ? renderRows(skipped, true) : '-'}`,
`失败(${failed.length}): ${failed.length > 0 ? renderRows(failed, true) : '-'}`,
];
return segments.join('\n');
}
function isSiteDisabled(status?: string | null): boolean {
return (status || 'active') === 'disabled';
}
function asTrimmedString(value: unknown): string | undefined {
if (typeof value !== 'string') return undefined;
const trimmed = value.trim();
return trimmed || undefined;
}
function parseOptionalBoolean(value: unknown): boolean | undefined {
if (typeof value === 'boolean') return value;
if (typeof value !== 'string') return undefined;
const normalized = value.trim().toLowerCase();
if (normalized === 'true' || normalized === '1') return true;
if (normalized === 'false' || normalized === '0') return false;
return undefined;
}
function parsePositiveInteger(value: unknown): number | undefined {
if (typeof value === 'number' && Number.isFinite(value)) {
const normalized = Math.trunc(value);
return normalized > 0 ? normalized : undefined;
}
if (typeof value !== 'string') return undefined;
const trimmed = value.trim();
if (!trimmed) return undefined;
const normalized = Number.parseInt(trimmed, 10);
if (Number.isNaN(normalized) || normalized <= 0) return undefined;
return normalized;
}
function parseExpiredTime(value: unknown): number | undefined {
if (typeof value === 'number' && Number.isFinite(value)) {
const normalized = Math.trunc(value);
return normalized > 0 ? normalized : undefined;
}
if (typeof value !== 'string') return undefined;
const trimmed = value.trim();
if (!trimmed) return undefined;
if (/^\d+$/.test(trimmed)) {
const numericValue = Number.parseInt(trimmed, 10);
if (!Number.isNaN(numericValue) && numericValue > 0) return numericValue;
}
const parsedMs = Date.parse(trimmed);
if (!Number.isFinite(parsedMs)) return undefined;
const seconds = Math.trunc(parsedMs / 1000);
return seconds > 0 ? seconds : undefined;
}
async function withTimeout<T>(fn: () => Promise<T>, timeoutMs: number, timeoutMessage: string): Promise<T> {
let timer: ReturnType<typeof setTimeout> | null = null;
try {
return await Promise.race([
fn(),
new Promise<T>((_, reject) => {
timer = setTimeout(() => reject(new Error(timeoutMessage)), timeoutMs);
}),
]);
} finally {
if (timer) clearTimeout(timer);
}
}
async function executeAccountTokenSync(row: AccountWithSiteRow): Promise<SyncExecutionResult> {
const accountId = row.accounts.id;
const base = {
accountId,
accountName: row.accounts.username || `account-${accountId}`,
accountStatus: row.accounts.status,
siteId: row.sites.id,
siteName: row.sites.name,
siteStatus: row.sites.status,
};
if (isSiteDisabled(row.sites.status)) {
return {
...base,
status: 'skipped',
reason: 'site_disabled',
message: 'site disabled',
synced: false,
created: 0,
updated: 0,
total: 0,
defaultTokenId: null,
};
}
if (!row.accounts.accessToken) {
if (row.accounts.apiToken) {
ensureDefaultTokenForAccount(accountId, row.accounts.apiToken, { name: 'default', source: 'legacy' });
}
return {
...base,
status: 'skipped',
reason: 'missing_access_token',
synced: false,
created: 0,
updated: 0,
total: 0,
defaultTokenId: null,
};
}
const adapter = getAdapter(row.sites.platform);
if (!adapter) {
return {
...base,
status: 'failed',
reason: 'unsupported_platform',
message: `不支持的平台: ${row.sites.platform}`,
synced: false,
created: 0,
updated: 0,
total: 0,
defaultTokenId: null,
};
}
try {
const platformUserId = resolvePlatformUserId(row.accounts.extraConfig, row.accounts.username);
let tokens = await withTimeout(
() => adapter.getApiTokens(row.sites.url, row.accounts.accessToken, platformUserId),
TOKEN_SYNC_TIMEOUT_MS,
`token sync timeout (${Math.round(TOKEN_SYNC_TIMEOUT_MS / 1000)}s)`,
);
if (tokens.length === 0) {
const fallback = await withTimeout(
() => adapter.getApiToken(row.sites.url, row.accounts.accessToken, platformUserId),
TOKEN_SYNC_TIMEOUT_MS,
`token sync timeout (${Math.round(TOKEN_SYNC_TIMEOUT_MS / 1000)}s)`,
);
if (fallback) {
tokens = [{ name: 'default', key: fallback, enabled: true }];
}
}
if (tokens.length === 0) {
return {
...base,
status: 'skipped',
reason: 'no_upstream_tokens',
message: 'upstream returned no api tokens',
synced: false,
created: 0,
updated: 0,
total: 0,
defaultTokenId: null,
};
}
const synced = syncTokensFromUpstream(accountId, tokens);
return {
...base,
status: 'synced',
synced: true,
...synced,
};
} catch (error: any) {
return {
...base,
status: 'failed',
reason: 'sync_error',
message: error?.message || 'sync failed',
synced: false,
created: 0,
updated: 0,
total: 0,
defaultTokenId: null,
};
}
}
function appendTokenSyncEvent(result: SyncExecutionResult) {
const title = result.status === 'synced'
? '令牌同步成功'
: (result.status === 'skipped' ? '令牌同步跳过' : '令牌同步失败');
const level = result.status === 'synced'
? 'info'
: (result.status === 'skipped' ? 'warning' : 'error');
const detail = result.status === 'synced'
? `新增 ${result.created},更新 ${result.updated},总数 ${result.total}`
: (result.message || result.reason || 'sync skipped');
try {
db.insert(schema.events).values({
type: 'token',
title,
message: `${result.accountName} @ ${result.siteName}: ${detail}`,
level,
relatedId: result.accountId,
relatedType: 'account',
}).run();
} catch {}
}
async function executeSyncAllAccountTokens() {
const rows = db.select().from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.accounts.status, 'active'))
.all();
const results: SyncExecutionResult[] = [];
for (let offset = 0; offset < rows.length; offset += SYNC_ALL_BATCH_SIZE) {
const batch = rows.slice(offset, offset + SYNC_ALL_BATCH_SIZE);
const batchResults = await Promise.all(
batch.map(async (row) => {
const result = await executeAccountTokenSync(row);
appendTokenSyncEvent(result);
return result;
}),
);
results.push(...batchResults);
}
const summary = {
total: results.length,
synced: results.filter((item) => item.status === 'synced').length,
skipped: results.filter((item) => item.status === 'skipped').length,
failed: results.filter((item) => item.status === 'failed').length,
created: results.reduce((acc, item) => acc + item.created, 0),
updated: results.reduce((acc, item) => acc + item.updated, 0),
};
return { summary, results };
}
export async function accountTokensRoutes(app: FastifyInstance) {
app.get<{ Querystring: { accountId?: string } }>('/api/account-tokens', async (request) => {
const accountId = request.query.accountId ? Number.parseInt(request.query.accountId, 10) : undefined;
return listTokensWithRelations(Number.isFinite(accountId as number) ? accountId : undefined);
});
app.post<{ Body: {
accountId: number;
name?: string;
token?: string;
enabled?: boolean;
isDefault?: boolean;
source?: string;
group?: string;
unlimitedQuota?: boolean | string;
remainQuota?: number | string;
expiredTime?: number | string;
allowIps?: string;
modelLimitsEnabled?: boolean | string;
modelLimits?: string;
} }>('/api/account-tokens', async (request, reply) => {
const body = request.body;
const row = db.select()
.from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.accounts.id, body.accountId))
.get();
if (!row) {
return reply.code(404).send({ success: false, message: '账号不存在' });
}
const tokenValue = (body.token || '').trim();
if (tokenValue) {
const now = new Date().toISOString();
const existing = db.select().from(schema.accountTokens)
.where(eq(schema.accountTokens.accountId, body.accountId))
.all();
const created = db.insert(schema.accountTokens).values({
accountId: body.accountId,
name: (body.name || '').trim() || (existing.length === 0 ? 'default' : `token-${existing.length + 1}`),
token: tokenValue,
source: body.source || 'manual',
enabled: body.enabled ?? true,
isDefault: body.isDefault ?? false,
createdAt: now,
updatedAt: now,
}).returning().get();
if (body.isDefault || (existing.length === 0 && (body.enabled ?? true))) {
setDefaultToken(created.id);
} else if (existing.every((token) => !token.isDefault) && (body.enabled ?? true)) {
setDefaultToken(created.id);
}
return { success: true, token: created };
}
const account = row.accounts;
const site = row.sites;
if (isSiteDisabled(site.status)) {
return reply.code(400).send({ success: false, message: '站点已禁用,无法创建令牌' });
}
if (!account.accessToken?.trim()) {
return reply.code(400).send({ success: false, message: '账号缺少访问令牌,无法创建站点令牌' });
}
const adapter = getAdapter(site.platform);
if (!adapter) {
return reply.code(400).send({ success: false, message: `不支持的平台: ${site.platform}` });
}
const unlimitedQuota = body.unlimitedQuota === undefined
? undefined
: parseOptionalBoolean(body.unlimitedQuota);
if (body.unlimitedQuota !== undefined && unlimitedQuota === undefined) {
return reply.code(400).send({ success: false, message: 'unlimitedQuota 参数无效' });
}
const remainQuota = body.remainQuota === undefined
? undefined
: parsePositiveInteger(body.remainQuota);
if (body.remainQuota !== undefined && remainQuota === undefined) {
return reply.code(400).send({ success: false, message: 'remainQuota 必须是正整数' });
}
if (unlimitedQuota === false && remainQuota === undefined) {
return reply.code(400).send({ success: false, message: '有限额度令牌必须填写 remainQuota' });
}
const expiredTime = body.expiredTime === undefined
? undefined
: parseExpiredTime(body.expiredTime);
if (body.expiredTime !== undefined && expiredTime === undefined) {
return reply.code(400).send({ success: false, message: 'expiredTime 参数无效' });
}
const modelLimitsEnabled = body.modelLimitsEnabled === undefined
? undefined
: parseOptionalBoolean(body.modelLimitsEnabled);
if (body.modelLimitsEnabled !== undefined && modelLimitsEnabled === undefined) {
return reply.code(400).send({ success: false, message: 'modelLimitsEnabled 参数无效' });
}
const platformUserId = resolvePlatformUserId(account.extraConfig, account.username);
const createdViaUpstream = await adapter.createApiToken(
site.url,
account.accessToken,
platformUserId,
{
name: asTrimmedString(body.name),
group: asTrimmedString(body.group),
unlimitedQuota,
remainQuota,
expiredTime,
allowIps: asTrimmedString(body.allowIps),
modelLimitsEnabled,
modelLimits: asTrimmedString(body.modelLimits),
},
);
if (!createdViaUpstream) {
return reply.code(502).send({ success: false, message: '站点创建令牌失败' });
}
const syncResult = await executeAccountTokenSync(row);
appendTokenSyncEvent(syncResult);
if (syncResult.status === 'failed') {
return reply.code(502).send({ success: false, message: syncResult.message || '同步站点令牌失败' });
}
if (syncResult.status === 'skipped') {
return reply.code(502).send({ success: false, message: syncResult.message || '站点未返回可用令牌' });
}
const preferred = db.select().from(schema.accountTokens)
.where(and(eq(schema.accountTokens.accountId, account.id), eq(schema.accountTokens.isDefault, true)))
.get();
const token = preferred || db.select().from(schema.accountTokens)
.where(eq(schema.accountTokens.accountId, account.id))
.all()
.slice(-1)[0] || null;
return {
success: true,
createdViaUpstream: true,
...syncResult,
token,
};
});
app.put<{ Params: { id: string }; Body: { name?: string; token?: string; enabled?: boolean; isDefault?: boolean; source?: string } }>('/api/account-tokens/:id', async (request, reply) => {
const tokenId = Number.parseInt(request.params.id, 10);
if (Number.isNaN(tokenId)) {
return reply.code(400).send({ success: false, message: '令牌 ID 无效' });
}
const existing = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).get();
if (!existing) {
return reply.code(404).send({ success: false, message: '令牌不存在' });
}
const body = request.body;
const updates: Record<string, unknown> = { updatedAt: new Date().toISOString() };
if (body.name !== undefined) {
updates.name = (body.name || '').trim() || existing.name;
}
if (body.token !== undefined) {
const tokenValue = body.token.trim();
if (!tokenValue) {
return reply.code(400).send({ success: false, message: '令牌不能为空' });
}
updates.token = tokenValue;
}
if (body.enabled !== undefined) updates.enabled = body.enabled;
if (body.source !== undefined) updates.source = body.source;
if (body.isDefault !== undefined) updates.isDefault = body.isDefault;
db.update(schema.accountTokens).set(updates).where(eq(schema.accountTokens.id, tokenId)).run();
const latest = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).get();
if (!latest) {
return reply.code(500).send({ success: false, message: '更新失败' });
}
if (body.isDefault === true) {
setDefaultToken(tokenId);
} else if (latest.isDefault && latest.enabled) {
setDefaultToken(tokenId);
} else if (existing.isDefault && !latest.enabled) {
repairDefaultToken(existing.accountId);
} else if (body.isDefault === false && existing.isDefault) {
repairDefaultToken(existing.accountId);
}
return { success: true, token: latest };
});
app.post<{ Params: { id: string } }>('/api/account-tokens/:id/default', async (request, reply) => {
const tokenId = Number.parseInt(request.params.id, 10);
if (Number.isNaN(tokenId)) {
return reply.code(400).send({ success: false, message: '令牌 ID 无效' });
}
const success = setDefaultToken(tokenId);
if (!success) {
return reply.code(404).send({ success: false, message: '令牌不存在' });
}
return { success: true };
});
app.get<{ Params: { id: string } }>('/api/account-tokens/:id/value', async (request, reply) => {
const tokenId = Number.parseInt(request.params.id, 10);
if (Number.isNaN(tokenId)) {
return reply.code(400).send({ success: false, message: '令牌 ID 无效' });
}
const row = db.select()
.from(schema.accountTokens)
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.accountTokens.id, tokenId))
.get();
if (!row) {
return reply.code(404).send({ success: false, message: '令牌不存在' });
}
const tokenValue = normalizeTokenForDisplay(row.account_tokens.token, row.sites.platform);
return {
success: true,
id: row.account_tokens.id,
name: row.account_tokens.name,
token: tokenValue,
tokenMasked: maskToken(row.account_tokens.token, row.sites.platform),
};
});
app.get<{ Params: { accountId: string } }>('/api/account-tokens/groups/:accountId', async (request, reply) => {
const accountId = Number.parseInt(request.params.accountId, 10);
if (Number.isNaN(accountId)) {
return reply.code(400).send({ success: false, message: '账号 ID 无效' });
}
const row = db.select()
.from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.accounts.id, accountId))
.get();
if (!row) {
return reply.code(404).send({ success: false, message: '账号不存在' });
}
const account = row.accounts;
const site = row.sites;
const adapter = getAdapter(site.platform);
if (!adapter) {
return reply.code(400).send({ success: false, message: `不支持的平台: ${site.platform}` });
}
if (!account.accessToken?.trim()) {
return reply.code(400).send({ success: false, message: '账号缺少访问令牌,无法拉取分组' });
}
try {
const platformUserId = resolvePlatformUserId(account.extraConfig, account.username);
const groups = await adapter.getUserGroups(site.url, account.accessToken, platformUserId);
const normalized = Array.from(new Set((groups || []).map((item) => String(item || '').trim()).filter(Boolean)));
return { success: true, groups: normalized.length > 0 ? normalized : ['default'] };
} catch (error: any) {
return reply.code(502).send({
success: false,
message: error?.message || '拉取分组失败',
});
}
});
app.delete<{ Params: { id: string } }>('/api/account-tokens/:id', async (request, reply) => {
const tokenId = Number.parseInt(request.params.id, 10);
if (Number.isNaN(tokenId)) {
return reply.code(400).send({ success: false, message: '令牌 ID 无效' });
}
const row = db.select()
.from(schema.accountTokens)
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.accountTokens.id, tokenId))
.get();
if (!row) {
return reply.code(404).send({ success: false, message: '令牌不存在' });
}
const existing = row.account_tokens;
const account = row.accounts;
const site = row.sites;
const adapter = getAdapter(site.platform);
const shouldDeleteUpstream = !isSiteDisabled(site.status) && !!account.accessToken?.trim() && !!adapter;
if (shouldDeleteUpstream) {
const platformUserId = resolvePlatformUserId(account.extraConfig, account.username);
const upstreamDeleted = await adapter!.deleteApiToken(
site.url,
account.accessToken,
existing.token,
platformUserId,
);
if (!upstreamDeleted) {
return reply.code(502).send({ success: false, message: '站点删除令牌失败,本地未删除' });
}
}
db.delete(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).run();
if (existing.isDefault) {
repairDefaultToken(existing.accountId);
}
return { success: true };
});
app.post<{ Params: { accountId: string } }>('/api/account-tokens/sync/:accountId', async (request, reply) => {
const accountId = Number.parseInt(request.params.accountId, 10);
if (Number.isNaN(accountId)) {
return reply.code(400).send({ success: false, message: '账号 ID 无效' });
}
const row = db.select().from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.accounts.id, accountId))
.get();
if (!row) {
return reply.code(404).send({ success: false, message: '账号不存在' });
}
const result = await executeAccountTokenSync(row);
appendTokenSyncEvent(result);
if (result.status === 'failed' && result.reason === 'unsupported_platform') {
return reply.code(400).send({ success: false, message: result.message });
}
if (result.status === 'failed') {
return reply.code(502).send({ success: false, message: result.message || '同步失败' });
}
return { success: true, ...result };
});
app.post<{ Body?: { wait?: boolean } }>('/api/account-tokens/sync-all', async (request, reply) => {
if (request.body?.wait) {
const syncResult = await executeSyncAllAccountTokens();
return { success: true, ...syncResult };
}
const { task, reused } = startBackgroundTask(
{
type: 'token',
title: '同步全部账号令牌',
dedupeKey: 'sync-all-account-tokens',
notifyOnFailure: true,
successTitle: (currentTask) => {
const summary = (currentTask.result as any)?.summary;
if (!summary) return '同步全部账号令牌已完成';
return `同步全部账号令牌已完成(成功${summary.synced}/跳过${summary.skipped}/失败${summary.failed}`;
},
failureTitle: () => '同步全部账号令牌失败',
successMessage: (currentTask) => {
const summary = (currentTask.result as any)?.summary;
const results = (currentTask.result as any)?.results as SyncExecutionResult[] | undefined;
if (!summary) return '全部账号令牌同步任务已完成';
const detail = buildTokenSyncTaskDetailMessage(Array.isArray(results) ? results : []);
return detail
? `全部账号令牌同步完成:成功 ${summary.synced},跳过 ${summary.skipped},失败 ${summary.failed}\n${detail}`
: `全部账号令牌同步完成:成功 ${summary.synced},跳过 ${summary.skipped},失败 ${summary.failed}`;
},
failureMessage: (currentTask) => `全部账号令牌同步失败:${currentTask.error || 'unknown error'}`,
},
async () => executeSyncAllAccountTokens(),
);
return reply.code(202).send({
success: true,
queued: true,
reused,
jobId: task.id,
status: task.status,
message: reused
? '令牌同步任务执行中,请稍后查看程序日志'
: '已开始全部账号令牌同步,请稍后查看程序日志',
});
});
app.get<{ Params: { accountId: string } }>('/api/account-tokens/account/:accountId/default', async (request, reply) => {
const accountId = Number.parseInt(request.params.accountId, 10);
if (Number.isNaN(accountId)) {
return reply.code(400).send({ success: false, message: '账号 ID 无效' });
}
const row = db.select()
.from(schema.accountTokens)
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(and(eq(schema.accountTokens.accountId, accountId), eq(schema.accountTokens.isDefault, true)))
.get();
return {
success: true,
token: row
? (() => {
const { token: rawToken, ...meta } = row.account_tokens;
return { ...meta, tokenMasked: maskToken(rawToken, row.sites.platform) };
})()
: null,
};
});
}
@@ -0,0 +1,104 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
import { mkdtempSync } from 'node:fs';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
const refreshBalanceMock = vi.fn();
vi.mock('../../services/balanceService.js', () => ({
refreshBalance: (...args: unknown[]) => refreshBalanceMock(...args),
}));
type DbModule = typeof import('../../db/index.js');
describe('accounts health refresh runtime state', () => {
let app: FastifyInstance;
let db: DbModule['db'];
let schema: DbModule['schema'];
let dataDir = '';
beforeAll(async () => {
dataDir = mkdtempSync(join(tmpdir(), 'metapi-accounts-health-refresh-'));
process.env.DATA_DIR = dataDir;
await import('../../db/migrate.js');
const dbModule = await import('../../db/index.js');
const routesModule = await import('./accounts.js');
db = dbModule.db;
schema = dbModule.schema;
app = Fastify();
await app.register(routesModule.accountsRoutes);
});
beforeEach(() => {
refreshBalanceMock.mockReset();
db.delete(schema.proxyLogs).run();
db.delete(schema.checkinLogs).run();
db.delete(schema.routeChannels).run();
db.delete(schema.tokenRoutes).run();
db.delete(schema.tokenModelAvailability).run();
db.delete(schema.modelAvailability).run();
db.delete(schema.accountTokens).run();
db.delete(schema.accounts).run();
db.delete(schema.sites).run();
});
afterAll(async () => {
await app.close();
delete process.env.DATA_DIR;
});
it('keeps degraded runtime state for unsupported checkin after health refresh', async () => {
const site = db.insert(schema.sites).values({
name: 'Wind Hub',
url: 'https://windhub.cc',
platform: 'done-hub',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: 'ld6jl3djexjf',
accessToken: 'token',
status: 'active',
extraConfig: JSON.stringify({
runtimeHealth: {
state: 'degraded',
reason: '站点不支持签到接口',
source: 'checkin',
checkedAt: '2026-02-25T18:00:00.000Z',
},
}),
}).returning().get();
refreshBalanceMock.mockResolvedValueOnce({ balance: 100, used: 0, quota: 100 });
const response = await app.inject({
method: 'POST',
url: '/api/accounts/health/refresh',
payload: { accountId: account.id, wait: true },
});
expect(response.statusCode).toBe(200);
const body = response.json() as {
success: boolean;
summary: {
healthy: number;
degraded: number;
failed: number;
};
results: Array<{ state: string; status: string; message: string }>;
};
expect(body.success).toBe(true);
expect(body.summary.degraded).toBe(1);
expect(body.summary.healthy).toBe(0);
expect(body.summary.failed).toBe(0);
expect(body.results[0]).toMatchObject({
state: 'degraded',
status: 'success',
message: '站点不支持签到接口',
});
});
});
@@ -0,0 +1,185 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { describe, expect, it, beforeAll, beforeEach, afterAll } from 'vitest';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { mkdtempSync } from 'node:fs';
import {
formatLocalDate,
formatUtcSqlDateTime,
getLocalDayRangeUtc,
parseStoredUtcDateTime,
} from '../../services/localTimeService.js';
type DbModule = typeof import('../../db/index.js');
describe('accounts api today reward fallback', () => {
let app: FastifyInstance;
let db: DbModule['db'];
let schema: DbModule['schema'];
let dataDir = '';
beforeAll(async () => {
dataDir = mkdtempSync(join(tmpdir(), 'metapi-accounts-reward-fallback-'));
process.env.DATA_DIR = dataDir;
await import('../../db/migrate.js');
const dbModule = await import('../../db/index.js');
const routesModule = await import('./accounts.js');
db = dbModule.db;
schema = dbModule.schema;
app = Fastify();
await app.register(routesModule.accountsRoutes);
});
beforeEach(() => {
db.delete(schema.proxyLogs).run();
db.delete(schema.checkinLogs).run();
db.delete(schema.routeChannels).run();
db.delete(schema.tokenRoutes).run();
db.delete(schema.tokenModelAvailability).run();
db.delete(schema.modelAvailability).run();
db.delete(schema.accountTokens).run();
db.delete(schema.accounts).run();
db.delete(schema.sites).run();
});
afterAll(async () => {
await app.close();
delete process.env.DATA_DIR;
});
it('uses today income value when checkin reward is missing', async () => {
const today = formatLocalDate(new Date());
const site = db.insert(schema.sites).values({
name: 'reward-site',
url: 'https://reward-site.example.com',
platform: 'new-api',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: 'reward-user',
accessToken: 'token',
status: 'active',
extraConfig: JSON.stringify({
todayIncomeSnapshot: {
day: today,
baseline: 12.5,
latest: 12.5,
updatedAt: `${today}T10:00:00.000Z`,
},
}),
}).returning().get();
db.insert(schema.checkinLogs).values({
accountId: account.id,
status: 'success',
message: 'checked in',
reward: '',
createdAt: `${today} 10:01:00`,
}).run();
const response = await app.inject({
method: 'GET',
url: '/api/accounts',
});
expect(response.statusCode).toBe(200);
const rows = response.json() as Array<{ id: number; todayReward: number }>;
const target = rows.find((row) => row.id === account.id);
expect(target?.todayReward).toBe(12.5);
});
it('prefers parsed checkin reward when available', async () => {
const today = formatLocalDate(new Date());
const site = db.insert(schema.sites).values({
name: 'reward-site',
url: 'https://reward-site.example.com',
platform: 'new-api',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: 'reward-user',
accessToken: 'token',
status: 'active',
extraConfig: JSON.stringify({
todayIncomeSnapshot: {
day: today,
baseline: 10,
latest: 14,
updatedAt: `${today}T10:00:00.000Z`,
},
}),
}).returning().get();
db.insert(schema.checkinLogs).values({
accountId: account.id,
status: 'success',
message: 'checkin success',
reward: '1.2',
createdAt: `${today} 10:01:00`,
}).run();
const response = await app.inject({
method: 'GET',
url: '/api/accounts',
});
expect(response.statusCode).toBe(200);
const rows = response.json() as Array<{ id: number; todayReward: number }>;
const target = rows.find((row) => row.id === account.id);
expect(target?.todayReward).toBe(1.2);
});
it('counts today spend only inside local-day range', async () => {
const site = db.insert(schema.sites).values({
name: 'spend-site',
url: 'https://spend-site.example.com',
platform: 'new-api',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: 'spend-user',
accessToken: 'token',
status: 'active',
}).returning().get();
const { startUtc, endUtc } = getLocalDayRangeUtc();
const startDate = parseStoredUtcDateTime(startUtc)!;
const endDate = parseStoredUtcDateTime(endUtc)!;
const beforeStart = formatUtcSqlDateTime(new Date(startDate.getTime() - 60_000));
const inRange = formatUtcSqlDateTime(new Date(startDate.getTime() + 60_000));
const afterEnd = formatUtcSqlDateTime(new Date(endDate.getTime() + 60_000));
db.insert(schema.proxyLogs).values([
{
accountId: account.id,
status: 'success',
estimatedCost: 1,
createdAt: beforeStart,
},
{
accountId: account.id,
status: 'success',
estimatedCost: 2,
createdAt: inRange,
},
{
accountId: account.id,
status: 'success',
estimatedCost: 4,
createdAt: afterEnd,
},
]).run();
const response = await app.inject({
method: 'GET',
url: '/api/accounts',
});
expect(response.statusCode).toBe(200);
const rows = response.json() as Array<{ id: number; todaySpend: number }>;
const target = rows.find((row) => row.id === account.id);
expect(target?.todaySpend).toBe(2);
});
});
+575
View File
@@ -0,0 +1,575 @@
import { FastifyInstance } from 'fastify';
import { db, schema } from '../../db/index.js';
import { and, eq, gte, lt } from 'drizzle-orm';
import { refreshBalance } from '../../services/balanceService.js';
import { getAdapter } from '../../services/platforms/index.js';
import { refreshModelsForAccount, rebuildTokenRoutesFromAvailability } from '../../services/modelService.js';
import { ensureDefaultTokenForAccount, syncTokensFromUpstream } from '../../services/accountTokenService.js';
import { guessPlatformUserIdFromUsername, mergeAccountExtraConfig } from '../../services/accountExtraConfig.js';
import { encryptAccountPassword } from '../../services/accountCredentialService.js';
import { startBackgroundTask } from '../../services/backgroundTaskService.js';
import { parseCheckinRewardAmount } from '../../services/checkinRewardParser.js';
import { estimateRewardWithTodayIncomeFallback } from '../../services/todayIncomeRewardService.js';
import { getLocalDayRangeUtc } from '../../services/localTimeService.js';
import {
buildRuntimeHealthForAccount,
setAccountRuntimeHealth,
type RuntimeHealthState,
} from '../../services/accountHealthService.js';
type AccountWithSiteRow = {
accounts: typeof schema.accounts.$inferSelect;
sites: typeof schema.sites.$inferSelect;
};
type AccountHealthRefreshResult = {
accountId: number;
username: string | null;
siteName: string;
status: 'success' | 'failed' | 'skipped';
state: RuntimeHealthState;
message: string;
};
function summarizeAccountHealthRefresh(results: AccountHealthRefreshResult[]) {
return {
total: results.length,
healthy: results.filter((item) => item.state === 'healthy').length,
unhealthy: results.filter((item) => item.state === 'unhealthy').length,
degraded: results.filter((item) => item.state === 'degraded').length,
disabled: results.filter((item) => item.state === 'disabled').length,
unknown: results.filter((item) => item.state === 'unknown').length,
success: results.filter((item) => item.status === 'success').length,
failed: results.filter((item) => item.status === 'failed').length,
skipped: results.filter((item) => item.status === 'skipped').length,
};
}
async function refreshRuntimeHealthForRow(row: AccountWithSiteRow): Promise<AccountHealthRefreshResult> {
const accountId = row.accounts.id;
const username = row.accounts.username;
const siteName = row.sites.name;
if ((row.accounts.status || 'active') === 'disabled' || (row.sites.status || 'active') === 'disabled') {
setAccountRuntimeHealth(accountId, {
state: 'disabled',
reason: '账号或站点已禁用',
source: 'health-refresh',
});
return {
accountId,
username,
siteName,
status: 'skipped',
state: 'disabled',
message: '账号或站点已禁用',
};
}
try {
await refreshBalance(accountId);
const refreshedAccount = db.select().from(schema.accounts)
.where(eq(schema.accounts.id, accountId))
.get();
const runtimeHealth = buildRuntimeHealthForAccount({
accountStatus: refreshedAccount?.status || row.accounts.status,
siteStatus: row.sites.status,
extraConfig: refreshedAccount?.extraConfig ?? row.accounts.extraConfig,
});
return {
accountId,
username,
siteName,
status: runtimeHealth.state === 'unhealthy' ? 'failed' : 'success',
state: runtimeHealth.state,
message: runtimeHealth.reason,
};
} catch (error: any) {
const message = String(error?.message || '健康检查失败');
setAccountRuntimeHealth(accountId, {
state: 'unhealthy',
reason: message,
source: 'health-refresh',
});
return {
accountId,
username,
siteName,
status: 'failed',
state: 'unhealthy',
message,
};
}
}
async function executeRefreshAccountRuntimeHealth(accountId?: number) {
const rows = db.select().from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.all();
const targetRows = Number.isFinite(accountId as number)
? rows.filter((row) => row.accounts.id === accountId)
: rows;
const results: AccountHealthRefreshResult[] = [];
for (const row of targetRows) {
results.push(await refreshRuntimeHealthForRow(row));
}
return {
summary: summarizeAccountHealthRefresh(results),
results,
};
}
export async function accountsRoutes(app: FastifyInstance) {
// List all accounts (with site info)
app.get('/api/accounts', async () => {
const rows = db.select().from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id)).all();
const { localDay, startUtc, endUtc } = getLocalDayRangeUtc();
// Aggregate today's spend per account from proxy logs
const todayLogs = db.select().from(schema.proxyLogs)
.where(and(gte(schema.proxyLogs.createdAt, startUtc), lt(schema.proxyLogs.createdAt, endUtc)))
.all();
const spendByAccount: Record<number, number> = {};
for (const log of todayLogs) {
if (log.accountId == null) continue;
const cost = typeof log.estimatedCost === 'number' ? log.estimatedCost : 0;
spendByAccount[log.accountId] = (spendByAccount[log.accountId] || 0) + cost;
}
// Aggregate today's checkin rewards per account
const todayCheckins = db.select().from(schema.checkinLogs)
.where(and(
gte(schema.checkinLogs.createdAt, startUtc),
lt(schema.checkinLogs.createdAt, endUtc),
eq(schema.checkinLogs.status, 'success'),
))
.all();
const rewardByAccount: Record<number, number> = {};
const successCountByAccount: Record<number, number> = {};
const parsedRewardCountByAccount: Record<number, number> = {};
for (const log of todayCheckins) {
successCountByAccount[log.accountId] = (successCountByAccount[log.accountId] || 0) + 1;
const rewardNum = parseCheckinRewardAmount(log.reward) || parseCheckinRewardAmount(log.message);
if (rewardNum <= 0) continue;
rewardByAccount[log.accountId] = (rewardByAccount[log.accountId] || 0) + rewardNum;
parsedRewardCountByAccount[log.accountId] = (parsedRewardCountByAccount[log.accountId] || 0) + 1;
}
return rows.map((r) => ({
...r.accounts,
site: r.sites,
todaySpend: Math.round((spendByAccount[r.accounts.id] || 0) * 1_000_000) / 1_000_000,
todayReward: Math.round(estimateRewardWithTodayIncomeFallback({
day: localDay,
successCount: successCountByAccount[r.accounts.id] || 0,
parsedRewardCount: parsedRewardCountByAccount[r.accounts.id] || 0,
rewardSum: rewardByAccount[r.accounts.id] || 0,
extraConfig: r.accounts.extraConfig,
}) * 1_000_000) / 1_000_000,
runtimeHealth: buildRuntimeHealthForAccount({
accountStatus: r.accounts.status,
siteStatus: r.sites.status,
extraConfig: r.accounts.extraConfig,
}),
}));
});
// Login to a site and auto-create account
app.post<{ Body: { siteId: number; username: string; password: string } }>('/api/accounts/login', async (request) => {
const { siteId, username, password } = request.body;
// Get site info
const site = db.select().from(schema.sites).where(eq(schema.sites.id, siteId)).get();
if (!site) return { success: false, message: 'site not found' };
// Get platform adapter
const adapter = getAdapter(site.platform);
if (!adapter) return { success: false, message: `婵炴垶鎸哥粔鐢稿极椤曗偓楠炴劖鎷呴悜妯兼殸濡ょ姷鍋涢崯鑳亹? ${site.platform}` };
// Login to the target site
const loginResult = await adapter.login(site.url, username, password);
if (!loginResult.success || !loginResult.accessToken) {
return { success: false, message: loginResult.message || 'login failed' };
}
const guessedPlatformUserId = guessPlatformUserIdFromUsername(username);
// Auto-fetch API token(s)
let apiToken: string | null = null;
let apiTokens: Array<{ name?: string | null; key?: string | null; enabled?: boolean | null }> = [];
try {
apiToken = await adapter.getApiToken(site.url, loginResult.accessToken, guessedPlatformUserId);
} catch { }
try {
apiTokens = await adapter.getApiTokens(site.url, loginResult.accessToken, guessedPlatformUserId);
} catch { }
const preferredApiToken = apiTokens.find((token) => token.enabled !== false && token.key)?.key || apiToken || null;
const existing = db.select().from(schema.accounts)
.where(and(eq(schema.accounts.siteId, siteId), eq(schema.accounts.username, username)))
.get();
const extraConfigPatch: Record<string, unknown> = {
autoRelogin: {
username,
passwordCipher: encryptAccountPassword(password),
updatedAt: new Date().toISOString(),
},
};
if (guessedPlatformUserId) {
extraConfigPatch.platformUserId = guessedPlatformUserId;
}
const extraConfig = mergeAccountExtraConfig(existing?.extraConfig, extraConfigPatch);
// Create or update account
let accountId = existing?.id;
if (existing) {
db.update(schema.accounts).set({
accessToken: loginResult.accessToken,
apiToken: preferredApiToken || undefined,
checkinEnabled: true,
status: 'active',
extraConfig,
updatedAt: new Date().toISOString(),
}).where(eq(schema.accounts.id, existing.id)).run();
} else {
const created = db.insert(schema.accounts).values({
siteId,
username,
accessToken: loginResult.accessToken,
apiToken: preferredApiToken || undefined,
checkinEnabled: true,
extraConfig,
}).returning().get();
accountId = created.id;
}
const result = db.select().from(schema.accounts).where(eq(schema.accounts.id, accountId!)).get();
if (!result) {
return { success: false, message: 'account create failed' };
}
if (apiTokens.length > 0) {
try {
syncTokensFromUpstream(result.id, apiTokens);
} catch { }
} else if (preferredApiToken) {
try {
ensureDefaultTokenForAccount(result.id, preferredApiToken, { name: 'default', source: 'sync' });
} catch { }
}
// Auto-refresh balance
try { await refreshBalance(result.id); } catch { }
try {
await refreshModelsForAccount(result.id);
rebuildTokenRoutesFromAvailability();
} catch { }
const account = db.select().from(schema.accounts).where(eq(schema.accounts.id, result.id)).get();
return {
success: true,
account,
apiTokenFound: !!preferredApiToken,
tokenCount: apiTokens.length,
reusedAccount: !!existing,
};
});
// Verify a token against a site - auto-detects token type (session vs API key)
app.post<{ Body: { siteId: number; accessToken: string; platformUserId?: number } }>('/api/accounts/verify-token', async (request) => {
const { siteId, accessToken, platformUserId } = request.body;
const site = db.select().from(schema.sites).where(eq(schema.sites.id, siteId)).get();
if (!site) return { success: false, message: 'site not found' };
const adapter = getAdapter(site.platform);
if (!adapter) return { success: false, message: `婵炴垶鎸哥粔鐢稿极椤曗偓楠炴劖鎷呴悜妯兼殸濡ょ姷鍋涢崯鑳亹? ${site.platform}` };
const result = await adapter.verifyToken(site.url, accessToken, platformUserId);
if (result.tokenType === 'session') {
return {
success: true,
tokenType: 'session',
userInfo: result.userInfo,
balance: result.balance,
apiToken: result.apiToken,
};
}
if (result.tokenType === 'apikey') {
return {
success: true,
tokenType: 'apikey',
modelCount: result.models?.length || 0,
models: result.models?.slice(0, 10),
};
}
// Try to explain unknown failures: missing user id vs anti-bot challenge page.
type VerifyFailureReason = 'needs-user-id' | 'shield-blocked' | null;
const detectVerifyFailureReason = async (): Promise<VerifyFailureReason> => {
const parseFailureReason = (bodyText: string, contentType: string): VerifyFailureReason => {
const text = bodyText || '';
const ct = (contentType || '').toLowerCase();
if (ct.includes('text/html') && /var\s+arg1\s*=|acw_sc__v2|cdn_sec_tc|<script/i.test(text)) {
return 'shield-blocked';
}
try {
const body = JSON.parse(text) as any;
const message = typeof body?.message === 'string' ? body.message : '';
if (/mismatch|new-api-user|user id/i.test(message)) return 'needs-user-id';
if (/shield|challenge|captcha|acw_sc__v2|arg1/i.test(message)) return 'shield-blocked';
} catch { }
return null;
};
try {
const { fetch } = await import('undici');
const candidates = new Set<string>();
const trimmed = (accessToken || '').trim();
const raw = trimmed.startsWith('Bearer ') ? trimmed.slice(7).trim() : trimmed;
if (raw) {
if (raw.includes('=')) candidates.add(raw);
candidates.add(`session=${raw}`);
candidates.add(`token=${raw}`);
}
const headerVariants: Record<string, string>[] = [
{ Authorization: `Bearer ${accessToken}`, 'Content-Type': 'application/json', 'New-Api-User': '0' },
];
for (const cookie of candidates) {
headerVariants.push({
Cookie: cookie,
'Content-Type': 'application/json',
'X-Requested-With': 'XMLHttpRequest',
});
}
for (const headers of headerVariants) {
try {
const testRes = await fetch(`${site.url}/api/user/self`, { headers });
const bodyText = await testRes.text();
const contentType = testRes.headers.get('content-type') || '';
const reason = parseFailureReason(bodyText, contentType);
if (reason) return reason;
} catch { }
}
} catch { }
return null;
};
const failureReason = await detectVerifyFailureReason();
if (failureReason === 'needs-user-id') {
return {
success: false,
needsUserId: true,
message: 'This site requires a user ID. Please fill in your site user ID.',
};
}
if (failureReason === 'shield-blocked') {
return {
success: false,
shieldBlocked: true,
message: 'This site is shielded by anti-bot challenge. Create an API key on the target site and import that key.',
};
}
return {
success: false,
message: 'Token invalid: cannot use it as session token or API key',
};
});
// Add an account (manual token input) - auto-detects token type and fetches info
app.post<{ Body: { siteId: number; username?: string; accessToken: string; apiToken?: string; platformUserId?: number; checkinEnabled?: boolean } }>('/api/accounts', async (request) => {
const body = request.body;
const site = db.select().from(schema.sites).where(eq(schema.sites.id, body.siteId)).get();
let username = body.username;
let accessToken = body.accessToken;
let apiToken = body.apiToken;
let tokenType = 'unknown';
if (site) {
const adapter = getAdapter(site.platform);
if (adapter) {
const verifyResult = await adapter.verifyToken(site.url, body.accessToken, body.platformUserId);
tokenType = verifyResult.tokenType;
if (verifyResult.tokenType === 'session') {
// Token is a session token - can do management ops
if (!username && verifyResult.userInfo?.username) username = verifyResult.userInfo.username;
if (!apiToken && verifyResult.apiToken) apiToken = verifyResult.apiToken;
} else if (verifyResult.tokenType === 'apikey') {
// Token is an API key - store as apiToken, not accessToken
apiToken = body.accessToken;
accessToken = ''; // no session token available
}
}
}
// Store platformUserId in extraConfig for NewAPI sites that need it
const resolvedPlatformUserId =
body.platformUserId || guessPlatformUserIdFromUsername(username) || undefined;
let extraConfig: string | undefined;
if (resolvedPlatformUserId) {
extraConfig = mergeAccountExtraConfig(undefined, { platformUserId: resolvedPlatformUserId });
}
const result = db.insert(schema.accounts).values({
siteId: body.siteId,
username: username || undefined,
accessToken,
apiToken: apiToken || undefined,
checkinEnabled: tokenType === 'session' ? (body.checkinEnabled ?? true) : false,
extraConfig,
}).returning().get();
if (apiToken) {
try {
ensureDefaultTokenForAccount(result.id, apiToken, { name: 'default', source: 'manual' });
} catch { }
}
if (tokenType === 'session' && accessToken && site) {
try {
const adapter = getAdapter(site.platform);
if (adapter) {
const syncedTokens = await adapter.getApiTokens(site.url, accessToken, resolvedPlatformUserId);
if (syncedTokens.length > 0) {
syncTokensFromUpstream(result.id, syncedTokens);
}
}
} catch { }
}
// Try to refresh balance
if (tokenType === 'session') {
try { await refreshBalance(result.id); } catch { }
}
try {
await refreshModelsForAccount(result.id);
rebuildTokenRoutesFromAvailability();
} catch { }
const account = db.select().from(schema.accounts).where(eq(schema.accounts.id, result.id)).get();
return { ...account, tokenType, apiTokenFound: !!apiToken, usernameDetected: !!(!body.username && username) };
});
// Update an account
app.put<{ Params: { id: string }; Body: any }>('/api/accounts/:id', async (request) => {
const id = parseInt(request.params.id);
const body = request.body as Record<string, unknown>;
const updates: any = {};
for (const key of ['username', 'accessToken', 'apiToken', 'status', 'checkinEnabled', 'unitCost', 'extraConfig']) {
if (body[key] !== undefined) updates[key] = body[key];
}
updates.updatedAt = new Date().toISOString();
db.update(schema.accounts).set(updates).where(eq(schema.accounts.id, id)).run();
if (typeof updates.apiToken === 'string' && updates.apiToken.trim()) {
try {
ensureDefaultTokenForAccount(id, updates.apiToken, { name: 'default', source: 'manual' });
} catch { }
}
try {
await refreshModelsForAccount(id);
rebuildTokenRoutesFromAvailability();
} catch { }
return db.select().from(schema.accounts).where(eq(schema.accounts.id, id)).get();
});
// Delete an account
app.delete<{ Params: { id: string } }>('/api/accounts/:id', async (request) => {
const id = parseInt(request.params.id);
db.delete(schema.accounts).where(eq(schema.accounts.id, id)).run();
try {
rebuildTokenRoutesFromAvailability();
} catch { }
return { success: true };
});
app.post<{ Body?: { accountId?: number; wait?: boolean } }>('/api/accounts/health/refresh', async (request, reply) => {
const rawAccountId = request.body?.accountId as unknown;
const hasAccountId = rawAccountId !== undefined && rawAccountId !== null && String(rawAccountId).trim() !== '';
const accountId = hasAccountId ? Number.parseInt(String(rawAccountId), 10) : undefined;
const wait = request.body?.wait === true;
if (hasAccountId && (!Number.isFinite(accountId) || (accountId as number) <= 0)) {
return reply.code(400).send({ success: false, message: '账号 ID 无效' });
}
if (wait) {
const result = await executeRefreshAccountRuntimeHealth(accountId);
if (accountId && result.summary.total === 0) {
return reply.code(404).send({ success: false, message: '账号不存在' });
}
return {
success: true,
...result,
};
}
const taskTitle = accountId ? `刷新账号运行健康状态 #${accountId}` : '刷新全部账号运行健康状态';
const dedupeKey = accountId ? `refresh-account-runtime-health-${accountId}` : 'refresh-all-account-runtime-health';
const { task, reused } = startBackgroundTask(
{
type: 'status',
title: taskTitle,
dedupeKey,
notifyOnFailure: true,
successMessage: (currentTask) => {
const summary = (currentTask.result as { summary?: ReturnType<typeof summarizeAccountHealthRefresh> })?.summary;
if (!summary) return `${taskTitle}已完成`;
return `${taskTitle}完成:健康 ${summary.healthy},异常 ${summary.unhealthy},禁用 ${summary.disabled}`;
},
failureMessage: (currentTask) => `${taskTitle}失败:${currentTask.error || 'unknown error'}`,
},
async () => executeRefreshAccountRuntimeHealth(accountId),
);
return reply.code(202).send({
success: true,
queued: true,
reused,
jobId: task.id,
status: task.status,
message: reused
? '账号运行健康状态刷新进行中,请稍后查看账号列表'
: '已开始刷新账号运行健康状态,请稍后查看账号列表',
});
});
// Refresh balance for an account
app.post<{ Params: { id: string } }>('/api/accounts/:id/balance', async (request, reply) => {
const id = parseInt(request.params.id);
try {
const result = await refreshBalance(id);
if (!result) {
reply.code(404);
return { message: 'account not found or platform not supported' };
}
return result;
} catch (err: any) {
reply.code(400);
return { message: err?.message || 'failed to fetch balance' };
}
});
}
+56
View File
@@ -0,0 +1,56 @@
import { FastifyInstance } from 'fastify';
import { db, schema } from '../../db/index.js';
import { config } from '../../config.js';
import { eq } from 'drizzle-orm';
export async function authRoutes(app: FastifyInstance) {
// Change admin auth token (requires old token verification)
app.post<{ Body: { oldToken: string; newToken: string } }>('/api/settings/auth/change', async (request, reply) => {
const { oldToken, newToken } = request.body;
if (!oldToken || !newToken) {
return reply.code(400).send({ success: false, message: '请填写所有字段' });
}
if (newToken.length < 6) {
return reply.code(400).send({ success: false, message: '新 Token 至少 6 个字符' });
}
if (oldToken !== config.authToken) {
return reply.code(403).send({ success: false, message: '旧 Token 验证失败' });
}
// Save to settings table
const existing = db.select().from(schema.settings).where(eq(schema.settings.key, 'auth_token')).get();
if (existing) {
db.update(schema.settings).set({ value: JSON.stringify(newToken) }).where(eq(schema.settings.key, 'auth_token')).run();
} else {
db.insert(schema.settings).values({ key: 'auth_token', value: JSON.stringify(newToken) }).run();
}
// Update runtime config
config.authToken = newToken;
try {
db.insert(schema.events).values({
type: 'token',
title: '管理员登录令牌已更新',
message: '管理员登录 Token 已被修改,请使用新 Token 登录。',
level: 'warning',
relatedType: 'settings',
createdAt: new Date().toISOString(),
}).run();
} catch {}
return { success: true, message: 'Token 已更新' };
});
// Get masked current token (for display)
app.get('/api/settings/auth/info', async () => {
const token = config.authToken;
const masked = token.length > 8
? token.slice(0, 4) + '****' + token.slice(-4)
: '****';
return { masked };
});
}
@@ -0,0 +1,90 @@
import Fastify from 'fastify';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
const checkinAllMock = vi.fn();
const checkinAccountMock = vi.fn();
vi.mock('../../services/checkinService.js', () => ({
checkinAll: (...args: unknown[]) => checkinAllMock(...args),
checkinAccount: (...args: unknown[]) => checkinAccountMock(...args),
}));
vi.mock('../../services/checkinScheduler.js', () => ({
updateCheckinCron: vi.fn(),
}));
vi.mock('../../db/index.js', () => {
const insertChain = {
values: () => insertChain,
onConflictDoUpdate: () => insertChain,
run: () => ({ changes: 1 }),
};
const queryChain = {
where: () => queryChain,
all: () => [],
limit: () => queryChain,
offset: () => queryChain,
orderBy: () => queryChain,
innerJoin: () => queryChain,
from: () => queryChain,
};
return {
db: {
insert: () => insertChain,
select: () => queryChain,
},
schema: {
settings: { key: 'key' },
checkinLogs: { accountId: 'accountId', createdAt: 'createdAt' },
accounts: { id: 'id' },
events: { id: 'id' },
},
};
});
describe('POST /api/checkin/trigger background task dedupe', () => {
beforeEach(async () => {
checkinAllMock.mockReset();
checkinAccountMock.mockReset();
checkinAccountMock.mockResolvedValue({ success: true, message: 'ok' });
const { __resetBackgroundTasksForTests } = await import('../../services/backgroundTaskService.js');
__resetBackgroundTasksForTests();
});
afterEach(() => {
vi.clearAllMocks();
});
it('reuses the same background task while checkin-all is already running', async () => {
let resolveFirst: (value: Array<unknown>) => void = () => {};
const firstRun = new Promise<Array<unknown>>((resolve) => {
resolveFirst = resolve;
});
checkinAllMock.mockImplementation(() => firstRun);
const { checkinRoutes } = await import('./checkin.js');
const app = Fastify();
await app.register(checkinRoutes);
const firstResponse = await app.inject({ method: 'POST', url: '/api/checkin/trigger' });
expect(firstResponse.statusCode).toBe(202);
const firstBody = firstResponse.json() as { success: boolean; queued: boolean; jobId: string };
expect(firstBody.success).toBe(true);
expect(firstBody.queued).toBe(true);
expect(typeof firstBody.jobId).toBe('string');
expect(firstBody.jobId.length).toBeGreaterThan(10);
const secondResponse = await app.inject({ method: 'POST', url: '/api/checkin/trigger' });
expect(secondResponse.statusCode).toBe(202);
const secondBody = secondResponse.json() as { reused: boolean; jobId: string };
expect(secondBody.reused).toBe(true);
expect(secondBody.jobId).toBe(firstBody.jobId);
expect(checkinAllMock).toHaveBeenCalledTimes(1);
resolveFirst([]);
await new Promise((resolve) => setTimeout(resolve, 20));
await app.close();
});
});
+154
View File
@@ -0,0 +1,154 @@
import { FastifyInstance } from 'fastify';
import { db, schema } from '../../db/index.js';
import { eq, desc } from 'drizzle-orm';
import { checkinAccount, checkinAll } from '../../services/checkinService.js';
import { updateCheckinCron } from '../../services/checkinScheduler.js';
import { startBackgroundTask, summarizeCheckinResults } from '../../services/backgroundTaskService.js';
import { classifyFailureReason } from '../../services/failureReasonService.js';
function buildCheckinAccountLabel(item: any): string {
const username = item?.username || (item?.accountId ? `#${item.accountId}` : 'unknown');
const site = item?.site || 'unknown-site';
return `${username} @ ${site}`;
}
function buildCheckinReason(item: any): string {
const message = String(item?.result?.message || '').trim();
if (!message) return '';
if (message.length <= 32) return message;
return `${message.slice(0, 32)}...`;
}
function buildCheckinTaskDetailMessage(results: any[]): string {
if (!Array.isArray(results) || results.length === 0) return '';
const successRows = results.filter((item) => {
const status = item?.result?.status;
if (status === 'skipped' || item?.result?.skipped) return false;
return !!item?.result?.success;
});
const skippedRows = results.filter((item) => {
const status = item?.result?.status;
return status === 'skipped' || !!item?.result?.skipped;
});
const failedRows = results.filter((item) => {
const status = item?.result?.status;
if (status === 'skipped' || item?.result?.skipped) return false;
return !item?.result?.success;
});
const renderRows = (rows: any[], withReason = false) => {
const sliced = rows.slice(0, 12).map((item) => {
const base = buildCheckinAccountLabel(item);
if (!withReason) return base;
const reason = buildCheckinReason(item);
return reason ? `${base}(${reason})` : base;
});
if (rows.length > 12) sliced.push(`...等${rows.length}`);
return sliced.join('、');
};
const segments: string[] = [
`成功(${successRows.length}): ${successRows.length > 0 ? renderRows(successRows) : '-'}`,
`跳过(${skippedRows.length}): ${skippedRows.length > 0 ? renderRows(skippedRows, true) : '-'}`,
`失败(${failedRows.length}): ${failedRows.length > 0 ? renderRows(failedRows, true) : '-'}`,
];
return segments.join('\n');
}
export async function checkinRoutes(app: FastifyInstance) {
// Trigger check-in for all accounts
app.post('/api/checkin/trigger', async (_, reply) => {
const { task, reused } = startBackgroundTask(
{
type: 'checkin',
title: '全部账号签到',
dedupeKey: 'checkin-all',
notifyOnFailure: true,
successTitle: (currentTask) => {
const summary = (currentTask.result as any)?.summary;
if (!summary) return '全部账号签到已完成';
return `全部账号签到已完成(成功${summary.success}/跳过${summary.skipped}/失败${summary.failed}`;
},
failureTitle: () => '全部账号签到失败',
successMessage: (currentTask) => {
const summary = (currentTask.result as any)?.summary;
const results = (currentTask.result as any)?.results;
if (!summary) return '全部账号签到任务已完成';
const detail = buildCheckinTaskDetailMessage(Array.isArray(results) ? results : []);
return detail
? `全部账号签到完成:成功 ${summary.success},跳过 ${summary.skipped},失败 ${summary.failed}\n${detail}`
: `全部账号签到完成:成功 ${summary.success},跳过 ${summary.skipped},失败 ${summary.failed}`;
},
failureMessage: (currentTask) => `全部账号签到任务失败:${currentTask.error || 'unknown error'}`,
},
async () => {
const results = await checkinAll();
return {
summary: summarizeCheckinResults(results),
total: results.length,
results,
};
},
);
return reply.code(202).send({
success: true,
queued: true,
reused,
jobId: task.id,
status: task.status,
message: reused
? '签到任务执行中,请稍后查看签到日志'
: '已开始全部签到,请稍后查看签到日志',
});
});
// Trigger check-in for a specific account
app.post<{ Params: { id: string } }>('/api/checkin/trigger/:id', async (request) => {
const id = parseInt(request.params.id, 10);
const result = await checkinAccount(id);
return result;
});
// Get check-in logs
app.get<{ Querystring: { limit?: string; offset?: string; accountId?: string } }>('/api/checkin/logs', async (request) => {
const limit = parseInt(request.query.limit || '50', 10);
const offset = parseInt(request.query.offset || '0', 10);
let query = db.select().from(schema.checkinLogs)
.innerJoin(schema.accounts, eq(schema.checkinLogs.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.orderBy(desc(schema.checkinLogs.createdAt))
.limit(limit)
.offset(offset);
if (request.query.accountId) {
query = query.where(eq(schema.checkinLogs.accountId, parseInt(request.query.accountId, 10))) as any;
}
const rows = query.all();
return rows.map((row: any) => {
const source = row?.checkin_logs || row;
const failureReason = classifyFailureReason({
message: source?.message,
status: source?.status,
});
return {
...row,
failureReason,
};
});
});
// Update check-in schedule
app.put<{ Body: { cron: string } }>('/api/checkin/schedule', async (request) => {
try {
updateCheckinCron(request.body.cron);
db.insert(schema.settings).values({ key: 'checkin_cron', value: JSON.stringify(request.body.cron) })
.onConflictDoUpdate({ target: schema.settings.key, set: { value: JSON.stringify(request.body.cron) } }).run();
return { success: true, cron: request.body.cron };
} catch (err: any) {
return { error: err.message };
}
});
}
+60
View File
@@ -0,0 +1,60 @@
import { FastifyInstance } from 'fastify';
import { db, schema } from '../../db/index.js';
import { and, desc, eq, sql } from 'drizzle-orm';
export async function eventsRoutes(app: FastifyInstance) {
// List events
app.get<{ Querystring: { limit?: string; offset?: string; type?: string; read?: string } }>('/api/events', async (request) => {
const limit = Math.max(1, Math.min(500, parseInt(request.query.limit || '30', 10)));
const offset = Math.max(0, parseInt(request.query.offset || '0', 10));
const type = request.query.type;
const readQuery = request.query.read;
const filters = [];
if (type) filters.push(eq(schema.events.type, type));
if (readQuery === 'true') filters.push(eq(schema.events.read, true));
if (readQuery === 'false') filters.push(eq(schema.events.read, false));
const base = db.select().from(schema.events);
if (filters.length > 0) {
return base
.where(and(...filters))
.orderBy(desc(schema.events.createdAt))
.limit(limit)
.offset(offset)
.all();
}
return base
.orderBy(desc(schema.events.createdAt))
.limit(limit)
.offset(offset)
.all();
});
// Unread count
app.get('/api/events/count', async () => {
const result = db.select({ count: sql<number>`count(*)` }).from(schema.events)
.where(eq(schema.events.read, false)).get();
return { count: result?.count || 0 };
});
// Mark one as read
app.post<{ Params: { id: string } }>('/api/events/:id/read', async (request) => {
const id = parseInt(request.params.id);
db.update(schema.events).set({ read: true }).where(eq(schema.events.id, id)).run();
return { success: true };
});
// Mark all as read
app.post('/api/events/read-all', async () => {
db.update(schema.events).set({ read: true }).where(eq(schema.events.read, false)).run();
return { success: true };
});
// Clear all events
app.delete('/api/events', async () => {
db.delete(schema.events).run();
return { success: true };
});
}
+215
View File
@@ -0,0 +1,215 @@
import { FastifyInstance, FastifyReply, FastifyRequest } from 'fastify';
import { db, schema } from '../../db/index.js';
import { config } from '../../config.js';
import { eq } from 'drizzle-orm';
const MONITOR_AUTH_COOKIE = 'meta_monitor_auth';
const LDOH_BASE_URL = 'https://ldoh.105117.xyz';
const LDOH_COOKIE_SETTING_KEY = 'monitor_ldoh_cookie';
function upsertSetting(key: string, value: unknown) {
db.insert(schema.settings)
.values({ key, value: JSON.stringify(value) })
.onConflictDoUpdate({
target: schema.settings.key,
set: { value: JSON.stringify(value) },
})
.run();
}
function getSettingString(key: string): string {
const row = db.select().from(schema.settings).where(eq(schema.settings.key, key)).get();
if (!row?.value) return '';
try {
const parsed = JSON.parse(row.value);
return typeof parsed === 'string' ? parsed : '';
} catch {
return '';
}
}
function parseCookies(raw: string | undefined): Record<string, string> {
const result: Record<string, string> = {};
if (!raw) return result;
for (const part of raw.split(';')) {
const entry = part.trim();
if (!entry) continue;
const idx = entry.indexOf('=');
if (idx <= 0) continue;
const key = entry.slice(0, idx).trim();
const value = entry.slice(idx + 1).trim();
if (!key) continue;
result[key] = value;
}
return result;
}
function maskCookieValue(cookieText: string): string {
const value = cookieText.trim();
if (!value) return '';
const idx = value.indexOf('=');
const raw = idx >= 0 ? value.slice(idx + 1) : value;
if (raw.length <= 10) return `${raw.slice(0, 2)}****`;
return `${raw.slice(0, 6)}****${raw.slice(-4)}`;
}
function normalizeLdohCookie(raw: string): string {
const trimmed = raw.trim();
if (!trimmed) return '';
if (trimmed.includes('ld_auth_session=')) {
const firstPair = trimmed.split(';')[0].trim();
if (firstPair.startsWith('ld_auth_session=')) return firstPair;
}
return `ld_auth_session=${trimmed}`;
}
function rewriteProxyText(text: string): string {
return text
.replaceAll('https://ldoh.105117.xyz/', '/monitor-proxy/ldoh/')
.replaceAll('https:\\/\\/ldoh.105117.xyz\\/', '\\/monitor-proxy\\/ldoh\\/')
.replaceAll('src="/', 'src="/monitor-proxy/ldoh/')
.replaceAll("src='/", "src='/monitor-proxy/ldoh/")
.replaceAll('href="/', 'href="/monitor-proxy/ldoh/')
.replaceAll("href='/", "href='/monitor-proxy/ldoh/")
.replaceAll('action="/', 'action="/monitor-proxy/ldoh/')
.replaceAll("action='/", "action='/monitor-proxy/ldoh/")
.replaceAll('"/_next/', '"/monitor-proxy/ldoh/_next/')
.replaceAll("'/_next/", "'/monitor-proxy/ldoh/_next/")
.replaceAll('"\\/api/', '"\\/monitor-proxy\\/ldoh\\/api/')
.replaceAll("'/api/", "'/monitor-proxy/ldoh/api/")
.replaceAll('"/api/', '"/monitor-proxy/ldoh/api/');
}
function rewriteLocationHeader(location: string | null): string | null {
if (!location) return null;
if (location.startsWith(`${LDOH_BASE_URL}/`)) {
return `/monitor-proxy/ldoh/${location.slice(LDOH_BASE_URL.length + 1)}`;
}
if (location.startsWith('/')) {
return `/monitor-proxy/ldoh${location}`;
}
return location;
}
function ensureMonitorAuth(request: FastifyRequest, reply: FastifyReply): boolean {
const cookies = parseCookies(request.headers.cookie);
if (cookies[MONITOR_AUTH_COOKIE] !== config.authToken) {
reply.code(401).send({ error: 'Missing or invalid monitor session' });
return false;
}
return true;
}
function resolveLdohProxyPath(request: FastifyRequest): string {
const rawUrl = String(request.url || '');
const cleanPath = rawUrl.split('?')[0] || '';
const prefix = '/monitor-proxy/ldoh';
if (cleanPath === prefix || cleanPath === `${prefix}/`) return '';
if (cleanPath.startsWith(`${prefix}/`)) return cleanPath.slice(prefix.length + 1);
return String((request.params as Record<string, unknown>)['*'] || '');
}
export async function monitorRoutes(app: FastifyInstance) {
app.get('/api/monitor/config', async () => {
const ldohCookie = getSettingString(LDOH_COOKIE_SETTING_KEY);
return {
ldohCookieConfigured: !!ldohCookie,
ldohCookieMasked: ldohCookie ? maskCookieValue(ldohCookie) : '',
};
});
app.put<{ Body: { ldohCookie?: string | null } }>('/api/monitor/config', async (request, reply) => {
const raw = String(request.body?.ldohCookie || '').trim();
if (!raw) {
upsertSetting(LDOH_COOKIE_SETTING_KEY, '');
return { success: true, message: 'LDOH Cookie 已清空', ldohCookieConfigured: false };
}
const normalized = normalizeLdohCookie(raw);
if (!normalized.startsWith('ld_auth_session=') || normalized.length < 24) {
return reply.code(400).send({ success: false, message: 'Cookie 格式无效,请填写 ld_auth_session 或其值' });
}
upsertSetting(LDOH_COOKIE_SETTING_KEY, normalized);
return {
success: true,
message: 'LDOH Cookie 已保存',
ldohCookieConfigured: true,
ldohCookieMasked: maskCookieValue(normalized),
};
});
app.post('/api/monitor/session', async (_, reply) => {
// HttpOnly cookie for iframe proxy auth within current origin.
reply.header(
'Set-Cookie',
`${MONITOR_AUTH_COOKIE}=${config.authToken}; Path=/; HttpOnly; SameSite=Lax; Max-Age=7200`,
);
return { success: true };
});
const handleLdohProxy = async (request: FastifyRequest, reply: FastifyReply) => {
if (!ensureMonitorAuth(request, reply)) return;
const storedCookie = getSettingString(LDOH_COOKIE_SETTING_KEY);
if (!storedCookie) {
return reply.code(400).send('LDOH cookie not configured');
}
const wildcardPath = resolveLdohProxyPath(request);
const targetUrl = new URL(`${LDOH_BASE_URL}/${wildcardPath}`);
for (const [key, value] of Object.entries(request.query as Record<string, unknown>)) {
if (value == null) continue;
targetUrl.searchParams.set(key, String(value));
}
const upstreamHeaders: Record<string, string> = {
cookie: storedCookie,
accept: String(request.headers.accept || '*/*'),
'accept-language': String(request.headers['accept-language'] || 'zh-CN,zh;q=0.9,en;q=0.8'),
'user-agent': String(request.headers['user-agent'] || 'metapiMonitorProxy/1.0'),
};
if (request.headers['content-type']) {
upstreamHeaders['content-type'] = String(request.headers['content-type']);
}
if (request.headers.referer) {
upstreamHeaders.referer = String(request.headers.referer).replace('/monitor-proxy/ldoh', '');
}
const method = request.method.toUpperCase();
const bodyAllowed = !['GET', 'HEAD'].includes(method);
const upstreamResponse = await fetch(targetUrl, {
method,
headers: upstreamHeaders,
body: bodyAllowed ? (request.body as BodyInit | null | undefined) : undefined,
redirect: 'manual',
});
const contentType = upstreamResponse.headers.get('content-type') || '';
const location = rewriteLocationHeader(upstreamResponse.headers.get('location'));
if (location) reply.header('location', location);
if (contentType) reply.header('content-type', contentType);
const cacheControl = upstreamResponse.headers.get('cache-control');
if (cacheControl) reply.header('cache-control', cacheControl);
reply.code(upstreamResponse.status);
if (
contentType.includes('text/html')
|| contentType.includes('application/javascript')
|| contentType.includes('text/javascript')
|| contentType.includes('text/css')
|| contentType.includes('application/json')
) {
const text = await upstreamResponse.text();
return reply.send(rewriteProxyText(text));
}
const buffer = Buffer.from(await upstreamResponse.arrayBuffer());
return reply.send(buffer);
};
app.all('/monitor-proxy/ldoh', handleLdohProxy);
app.all('/monitor-proxy/ldoh/', handleLdohProxy);
app.all('/monitor-proxy/ldoh/*', handleLdohProxy);
}
+98
View File
@@ -0,0 +1,98 @@
import { FastifyInstance } from 'fastify';
import { db, schema } from '../../db/index.js';
import { and, like, desc, eq } from 'drizzle-orm';
export async function searchRoutes(app: FastifyInstance) {
app.post<{ Body: { query: string; limit?: number } }>('/api/search', async (request) => {
const { query, limit = 20 } = request.body;
if (!query || query.trim().length === 0) {
return { accounts: [], sites: [], checkinLogs: [], proxyLogs: [], models: [] };
}
const q = `%${query.trim()}%`;
const perCategory = Math.min(Math.ceil(limit / 5), 10);
// Search sites
const sites = db.select().from(schema.sites)
.where(like(schema.sites.name, q))
.limit(perCategory).all()
.concat(
db.select().from(schema.sites)
.where(like(schema.sites.url, q))
.limit(perCategory).all()
);
// Deduplicate by id
const uniqueSites = [...new Map(sites.map(s => [s.id, s])).values()].slice(0, perCategory);
// Search accounts (join with sites for site name)
const accountResults = db.select().from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(like(schema.accounts.username, q))
.limit(perCategory).all();
const accounts = accountResults.map(r => ({ ...r.accounts, site: r.sites }));
// Search checkin logs (by message)
const checkinLogs = db.select().from(schema.checkinLogs)
.innerJoin(schema.accounts, eq(schema.checkinLogs.accountId, schema.accounts.id))
.where(like(schema.checkinLogs.message, q))
.orderBy(desc(schema.checkinLogs.createdAt))
.limit(perCategory).all()
.map(r => ({ ...r.checkin_logs, account: r.accounts }));
// Search proxy logs (by model name)
const proxyLogs = db.select().from(schema.proxyLogs)
.where(like(schema.proxyLogs.modelRequested, q))
.orderBy(desc(schema.proxyLogs.createdAt))
.limit(perCategory).all();
// Search models (only keep routable items)
const modelRows = db.select({
modelName: schema.tokenModelAvailability.modelName,
tokenId: schema.accountTokens.id,
accountId: schema.accounts.id,
siteId: schema.sites.id,
})
.from(schema.tokenModelAvailability)
.innerJoin(schema.accountTokens, eq(schema.tokenModelAvailability.tokenId, schema.accountTokens.id))
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(
and(
like(schema.tokenModelAvailability.modelName, q),
eq(schema.tokenModelAvailability.available, true),
eq(schema.accountTokens.enabled, true),
eq(schema.accounts.status, 'active'),
),
)
.limit(perCategory * 20)
.all();
const modelAgg = new Map<string, { tokenIds: Set<number>; accountIds: Set<number>; siteIds: Set<number> }>();
for (const row of modelRows) {
const key = row.modelName;
if (!modelAgg.has(key)) {
modelAgg.set(key, { tokenIds: new Set(), accountIds: new Set(), siteIds: new Set() });
}
const agg = modelAgg.get(key)!;
agg.tokenIds.add(row.tokenId);
agg.accountIds.add(row.accountId);
agg.siteIds.add(row.siteId);
}
const models = Array.from(modelAgg.entries())
.map(([name, agg]) => ({
name,
accountCount: agg.accountIds.size,
tokenCount: agg.tokenIds.size,
siteCount: agg.siteIds.size,
}))
.sort((a, b) => {
if (b.accountCount !== a.accountCount) return b.accountCount - a.accountCount;
if (b.tokenCount !== a.tokenCount) return b.tokenCount - a.tokenCount;
return a.name.localeCompare(b.name);
})
.slice(0, perCategory);
return { accounts, sites: uniqueSites, checkinLogs, proxyLogs, models };
});
}
@@ -0,0 +1,137 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest';
import { mkdtempSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import { eq } from 'drizzle-orm';
type DbModule = typeof import('../../db/index.js');
type ConfigModule = typeof import('../../config.js');
describe('settings and auth events', () => {
let app: FastifyInstance;
let db: DbModule['db'];
let schema: DbModule['schema'];
let config: ConfigModule['config'];
let dataDir = '';
beforeAll(async () => {
dataDir = mkdtempSync(join(tmpdir(), 'metapi-settings-events-'));
process.env.DATA_DIR = dataDir;
await import('../../db/migrate.js');
const dbModule = await import('../../db/index.js');
const configModule = await import('../../config.js');
const settingsRoutesModule = await import('./settings.js');
const authRoutesModule = await import('./auth.js');
db = dbModule.db;
schema = dbModule.schema;
config = configModule.config;
app = Fastify();
await app.register(settingsRoutesModule.settingsRoutes);
await app.register(authRoutesModule.authRoutes);
});
beforeEach(() => {
db.delete(schema.events).run();
db.delete(schema.settings).run();
config.authToken = 'old-admin-token-123';
config.proxyToken = 'sk-old-proxy-token-123';
config.checkinCron = '0 8 * * *';
config.balanceRefreshCron = '0 * * * *';
config.routingFallbackUnitCost = 1;
});
afterAll(async () => {
await app.close();
delete process.env.DATA_DIR;
});
it('appends event when runtime settings are updated', async () => {
const response = await app.inject({
method: 'PUT',
url: '/api/settings/runtime',
payload: {
proxyToken: 'sk-new-proxy-token-456',
checkinCron: '5 9 * * *',
},
});
expect(response.statusCode).toBe(200);
const events = db.select().from(schema.events).all();
expect(events.length).toBe(1);
expect(events[0]).toMatchObject({
type: 'status',
title: '运行时设置已更新',
relatedType: 'settings',
});
expect(events[0].message || '').toContain('代理访问 Token');
expect(events[0].message || '').toContain('签到 Cron');
});
it('rejects proxy token that does not start with sk-', async () => {
const response = await app.inject({
method: 'PUT',
url: '/api/settings/runtime',
payload: {
proxyToken: 'new-proxy-token-456',
},
});
expect(response.statusCode).toBe(400);
const body = response.json() as { message?: string };
expect(body.message).toContain('sk-');
});
it('persists and returns routing fallback unit cost from runtime settings', async () => {
const updateResponse = await app.inject({
method: 'PUT',
url: '/api/settings/runtime',
payload: {
routingFallbackUnitCost: 0.25,
},
});
expect(updateResponse.statusCode).toBe(200);
const updated = updateResponse.json() as { routingFallbackUnitCost?: number };
expect(updated.routingFallbackUnitCost).toBe(0.25);
expect(config.routingFallbackUnitCost).toBe(0.25);
const saved = db.select().from(schema.settings).where(eq(schema.settings.key, 'routing_fallback_unit_cost')).get();
expect(saved).toBeTruthy();
expect(saved?.value).toBe(JSON.stringify(0.25));
const getResponse = await app.inject({
method: 'GET',
url: '/api/settings/runtime',
});
expect(getResponse.statusCode).toBe(200);
const runtime = getResponse.json() as { routingFallbackUnitCost?: number };
expect(runtime.routingFallbackUnitCost).toBe(0.25);
});
it('appends event when admin auth token changes', async () => {
const response = await app.inject({
method: 'POST',
url: '/api/settings/auth/change',
payload: {
oldToken: 'old-admin-token-123',
newToken: 'new-admin-token-456',
},
});
expect(response.statusCode).toBe(200);
const events = db.select().from(schema.events).all();
expect(events.length).toBe(1);
expect(events[0]).toMatchObject({
type: 'token',
title: '管理员登录令牌已更新',
relatedType: 'settings',
});
});
});
+598
View File
@@ -0,0 +1,598 @@
import { FastifyInstance } from 'fastify';
import cron from 'node-cron';
import { config } from '../../config.js';
import { db, schema } from '../../db/index.js';
import { refreshModelsAndRebuildRoutes } from '../../services/modelService.js';
import { updateBalanceRefreshCron, updateCheckinCron } from '../../services/checkinScheduler.js';
import { sendNotification } from '../../services/notifyService.js';
import { exportBackup, importBackup, type BackupExportType } from '../../services/backupService.js';
import { startBackgroundTask } from '../../services/backgroundTaskService.js';
type RoutingWeights = typeof config.routingWeights;
interface RuntimeSettingsBody {
proxyToken?: string;
checkinCron?: string;
balanceRefreshCron?: string;
webhookUrl?: string;
barkUrl?: string;
webhookEnabled?: boolean;
barkEnabled?: boolean;
serverChanEnabled?: boolean;
serverChanKey?: string;
smtpEnabled?: boolean;
smtpHost?: string;
smtpPort?: number;
smtpSecure?: boolean;
smtpUser?: string;
smtpPass?: string;
smtpFrom?: string;
smtpTo?: string;
notifyCooldownSec?: number;
adminIpAllowlist?: string[] | string;
routingFallbackUnitCost?: number;
routingWeights?: Partial<RoutingWeights>;
}
const PROXY_TOKEN_PREFIX = 'sk-';
function isValidProxyToken(value: string): boolean {
return value.startsWith(PROXY_TOKEN_PREFIX) && value.length >= 6;
}
function maskSecret(value: string): string {
if (!value) return '';
if (value.length <= 8) return '****';
return `${value.slice(0, 4)}****${value.slice(-4)}`;
}
function upsertSetting(key: string, value: unknown) {
db.insert(schema.settings)
.values({ key, value: JSON.stringify(value) })
.onConflictDoUpdate({
target: schema.settings.key,
set: { value: JSON.stringify(value) },
})
.run();
}
function appendSettingsEvent(input: {
type: 'checkin' | 'balance' | 'proxy' | 'status' | 'token';
title: string;
message: string;
level?: 'info' | 'warning' | 'error';
}) {
try {
db.insert(schema.events).values({
type: input.type,
title: input.title,
message: input.message,
level: input.level || 'info',
relatedType: 'settings',
createdAt: new Date().toISOString(),
}).run();
} catch {}
}
function toPositiveNumberOrFallback(value: unknown, fallback: number) {
const n = Number(value);
if (!Number.isFinite(n) || n < 0) return fallback;
return n;
}
function toStringList(value: unknown): string[] {
if (Array.isArray(value)) {
return value
.map((item) => (typeof item === 'string' ? item.trim() : ''))
.filter((item) => item.length > 0);
}
if (typeof value === 'string') {
return value
.split(',')
.map((item) => item.trim())
.filter((item) => item.length > 0);
}
return [];
}
function applyImportedSettingToRuntime(key: string, value: unknown) {
switch (key) {
case 'checkin_cron': {
if (typeof value !== 'string' || !value || !cron.validate(value)) return;
config.checkinCron = value;
updateCheckinCron(value);
return;
}
case 'balance_refresh_cron': {
if (typeof value !== 'string' || !value || !cron.validate(value)) return;
config.balanceRefreshCron = value;
updateBalanceRefreshCron(value);
return;
}
case 'proxy_token': {
if (typeof value !== 'string') return;
const nextToken = value.trim();
if (!isValidProxyToken(nextToken)) return;
config.proxyToken = nextToken;
return;
}
case 'webhook_url': {
if (typeof value !== 'string') return;
config.webhookUrl = value.trim();
return;
}
case 'webhook_enabled': {
config.webhookEnabled = !!value;
return;
}
case 'bark_url': {
if (typeof value !== 'string') return;
config.barkUrl = value.trim();
return;
}
case 'bark_enabled': {
config.barkEnabled = !!value;
return;
}
case 'serverchan_enabled': {
config.serverChanEnabled = !!value;
return;
}
case 'serverchan_key': {
if (typeof value !== 'string') return;
config.serverChanKey = value.trim();
return;
}
case 'smtp_enabled': {
config.smtpEnabled = !!value;
return;
}
case 'smtp_host': {
if (typeof value !== 'string') return;
config.smtpHost = value.trim();
return;
}
case 'smtp_port': {
const n = Number(value);
if (!Number.isFinite(n) || n <= 0) return;
config.smtpPort = Math.trunc(n);
return;
}
case 'smtp_secure': {
config.smtpSecure = !!value;
return;
}
case 'smtp_user': {
if (typeof value !== 'string') return;
config.smtpUser = value.trim();
return;
}
case 'smtp_pass': {
if (typeof value !== 'string') return;
config.smtpPass = value.trim();
return;
}
case 'smtp_from': {
if (typeof value !== 'string') return;
config.smtpFrom = value.trim();
return;
}
case 'smtp_to': {
if (typeof value !== 'string') return;
config.smtpTo = value.trim();
return;
}
case 'notify_cooldown_sec': {
const n = Number(value);
if (!Number.isFinite(n) || n < 0) return;
config.notifyCooldownSec = Math.trunc(n);
return;
}
case 'admin_ip_allowlist': {
config.adminIpAllowlist = toStringList(value);
return;
}
case 'routing_weights': {
if (!value || typeof value !== 'object') return;
const rw = value as Partial<RoutingWeights>;
config.routingWeights = {
baseWeightFactor: toPositiveNumberOrFallback(rw.baseWeightFactor, config.routingWeights.baseWeightFactor),
valueScoreFactor: toPositiveNumberOrFallback(rw.valueScoreFactor, config.routingWeights.valueScoreFactor),
costWeight: toPositiveNumberOrFallback(rw.costWeight, config.routingWeights.costWeight),
balanceWeight: toPositiveNumberOrFallback(rw.balanceWeight, config.routingWeights.balanceWeight),
usageWeight: toPositiveNumberOrFallback(rw.usageWeight, config.routingWeights.usageWeight),
};
return;
}
case 'routing_fallback_unit_cost': {
const n = Number(value);
if (!Number.isFinite(n) || n <= 0) return;
config.routingFallbackUnitCost = Math.max(1e-6, n);
return;
}
default:
return;
}
}
function getRuntimeSettingsResponse() {
return {
checkinCron: config.checkinCron,
balanceRefreshCron: config.balanceRefreshCron,
routingFallbackUnitCost: config.routingFallbackUnitCost,
routingWeights: config.routingWeights,
webhookUrl: config.webhookUrl,
barkUrl: config.barkUrl,
webhookEnabled: config.webhookEnabled,
barkEnabled: config.barkEnabled,
serverChanEnabled: config.serverChanEnabled,
serverChanKeyMasked: maskSecret(config.serverChanKey),
smtpEnabled: config.smtpEnabled,
smtpHost: config.smtpHost,
smtpPort: config.smtpPort,
smtpSecure: config.smtpSecure,
smtpUser: config.smtpUser,
smtpPassMasked: maskSecret(config.smtpPass),
smtpFrom: config.smtpFrom,
smtpTo: config.smtpTo,
notifyCooldownSec: config.notifyCooldownSec,
adminIpAllowlist: config.adminIpAllowlist,
proxyTokenMasked: maskSecret(config.proxyToken),
};
}
export async function settingsRoutes(app: FastifyInstance) {
app.get('/api/settings/runtime', async () => {
return getRuntimeSettingsResponse();
});
app.put<{ Body: RuntimeSettingsBody }>('/api/settings/runtime', async (request, reply) => {
const body = request.body || {};
const changedLabels: string[] = [];
if (body.checkinCron !== undefined) {
if (!cron.validate(body.checkinCron)) {
return reply.code(400).send({ success: false, message: '签到 Cron 表达式无效' });
}
if (body.checkinCron !== config.checkinCron) {
changedLabels.push(`签到 Cron${config.checkinCron} -> ${body.checkinCron}`);
}
updateCheckinCron(body.checkinCron);
upsertSetting('checkin_cron', body.checkinCron);
}
if (body.balanceRefreshCron !== undefined) {
if (!cron.validate(body.balanceRefreshCron)) {
return reply.code(400).send({ success: false, message: '余额刷新 Cron 表达式无效' });
}
if (body.balanceRefreshCron !== config.balanceRefreshCron) {
changedLabels.push(`余额刷新 Cron${config.balanceRefreshCron} -> ${body.balanceRefreshCron}`);
}
updateBalanceRefreshCron(body.balanceRefreshCron);
upsertSetting('balance_refresh_cron', body.balanceRefreshCron);
}
if (body.proxyToken !== undefined) {
const proxyToken = String(body.proxyToken).trim();
if (!proxyToken.startsWith(PROXY_TOKEN_PREFIX)) {
return reply.code(400).send({ success: false, message: '下游访问令牌必须以 sk- 开头' });
}
if (proxyToken.length < 6) {
return reply.code(400).send({ success: false, message: '下游访问令牌至少 6 位(含 sk-)' });
}
if (proxyToken !== config.proxyToken) {
changedLabels.push('代理访问 Token');
}
config.proxyToken = proxyToken;
upsertSetting('proxy_token', proxyToken);
}
if (body.webhookUrl !== undefined) {
if (String(body.webhookUrl || '').trim() !== config.webhookUrl) {
changedLabels.push('Webhook 地址');
}
config.webhookUrl = String(body.webhookUrl || '').trim();
upsertSetting('webhook_url', config.webhookUrl);
}
if (body.webhookEnabled !== undefined) {
if (!!body.webhookEnabled !== config.webhookEnabled) {
changedLabels.push('Webhook 开关');
}
config.webhookEnabled = !!body.webhookEnabled;
upsertSetting('webhook_enabled', config.webhookEnabled);
}
if (body.barkUrl !== undefined) {
if (String(body.barkUrl || '').trim() !== config.barkUrl) {
changedLabels.push('Bark 地址');
}
config.barkUrl = String(body.barkUrl || '').trim();
upsertSetting('bark_url', config.barkUrl);
}
if (body.barkEnabled !== undefined) {
if (!!body.barkEnabled !== config.barkEnabled) {
changedLabels.push('Bark 开关');
}
config.barkEnabled = !!body.barkEnabled;
upsertSetting('bark_enabled', config.barkEnabled);
}
if (body.serverChanEnabled !== undefined) {
if (!!body.serverChanEnabled !== config.serverChanEnabled) {
changedLabels.push('Server 酱开关');
}
config.serverChanEnabled = !!body.serverChanEnabled;
upsertSetting('serverchan_enabled', config.serverChanEnabled);
}
if (body.serverChanKey !== undefined) {
if (String(body.serverChanKey || '').trim() !== config.serverChanKey) {
changedLabels.push('Server 酱密钥');
}
config.serverChanKey = String(body.serverChanKey || '').trim();
upsertSetting('serverchan_key', config.serverChanKey);
}
if (body.smtpEnabled !== undefined) {
if (!!body.smtpEnabled !== config.smtpEnabled) {
changedLabels.push('SMTP 开关');
}
config.smtpEnabled = !!body.smtpEnabled;
upsertSetting('smtp_enabled', config.smtpEnabled);
}
if (body.smtpHost !== undefined) {
if (String(body.smtpHost || '').trim() !== config.smtpHost) {
changedLabels.push('SMTP 主机');
}
config.smtpHost = String(body.smtpHost || '').trim();
upsertSetting('smtp_host', config.smtpHost);
}
if (body.smtpPort !== undefined) {
const smtpPort = Number(body.smtpPort);
if (!Number.isFinite(smtpPort) || smtpPort <= 0) {
return reply.code(400).send({ success: false, message: 'SMTP 端口无效' });
}
if (Math.trunc(smtpPort) !== config.smtpPort) {
changedLabels.push(`SMTP 端口(${config.smtpPort} -> ${Math.trunc(smtpPort)}`);
}
config.smtpPort = Math.trunc(smtpPort);
upsertSetting('smtp_port', config.smtpPort);
}
if (body.smtpSecure !== undefined) {
if (!!body.smtpSecure !== config.smtpSecure) {
changedLabels.push('SMTP 安全连接');
}
config.smtpSecure = !!body.smtpSecure;
upsertSetting('smtp_secure', config.smtpSecure);
}
if (body.smtpUser !== undefined) {
if (String(body.smtpUser || '').trim() !== config.smtpUser) {
changedLabels.push('SMTP 用户');
}
config.smtpUser = String(body.smtpUser || '').trim();
upsertSetting('smtp_user', config.smtpUser);
}
if (body.smtpPass !== undefined) {
if (String(body.smtpPass || '').trim() !== config.smtpPass) {
changedLabels.push('SMTP 密码');
}
config.smtpPass = String(body.smtpPass || '').trim();
upsertSetting('smtp_pass', config.smtpPass);
}
if (body.smtpFrom !== undefined) {
if (String(body.smtpFrom || '').trim() !== config.smtpFrom) {
changedLabels.push('发件人地址');
}
config.smtpFrom = String(body.smtpFrom || '').trim();
upsertSetting('smtp_from', config.smtpFrom);
}
if (body.smtpTo !== undefined) {
if (String(body.smtpTo || '').trim() !== config.smtpTo) {
changedLabels.push('收件人地址');
}
config.smtpTo = String(body.smtpTo || '').trim();
upsertSetting('smtp_to', config.smtpTo);
}
if (body.notifyCooldownSec !== undefined) {
const notifyCooldownSec = Number(body.notifyCooldownSec);
if (!Number.isFinite(notifyCooldownSec) || notifyCooldownSec < 0) {
return reply.code(400).send({ success: false, message: '告警冷静期必须是大于等于 0 的数字(秒)' });
}
const nextCooldown = Math.trunc(notifyCooldownSec);
if (nextCooldown !== config.notifyCooldownSec) {
changedLabels.push(`告警冷静期(${config.notifyCooldownSec}s -> ${nextCooldown}s`);
}
config.notifyCooldownSec = nextCooldown;
upsertSetting('notify_cooldown_sec', config.notifyCooldownSec);
}
if (body.adminIpAllowlist !== undefined) {
const nextAllowlist = toStringList(body.adminIpAllowlist);
if (JSON.stringify(nextAllowlist) !== JSON.stringify(config.adminIpAllowlist)) {
changedLabels.push('管理端 IP 白名单');
}
config.adminIpAllowlist = nextAllowlist;
upsertSetting('admin_ip_allowlist', nextAllowlist);
}
if (body.routingWeights !== undefined) {
const nextWeights: RoutingWeights = {
baseWeightFactor: toPositiveNumberOrFallback(body.routingWeights.baseWeightFactor, config.routingWeights.baseWeightFactor),
valueScoreFactor: toPositiveNumberOrFallback(body.routingWeights.valueScoreFactor, config.routingWeights.valueScoreFactor),
costWeight: toPositiveNumberOrFallback(body.routingWeights.costWeight, config.routingWeights.costWeight),
balanceWeight: toPositiveNumberOrFallback(body.routingWeights.balanceWeight, config.routingWeights.balanceWeight),
usageWeight: toPositiveNumberOrFallback(body.routingWeights.usageWeight, config.routingWeights.usageWeight),
};
if (JSON.stringify(nextWeights) !== JSON.stringify(config.routingWeights)) {
changedLabels.push('路由权重');
}
config.routingWeights = nextWeights;
upsertSetting('routing_weights', nextWeights);
}
if (body.routingFallbackUnitCost !== undefined) {
const nextRoutingFallbackUnitCost = Number(body.routingFallbackUnitCost);
if (!Number.isFinite(nextRoutingFallbackUnitCost) || nextRoutingFallbackUnitCost <= 0) {
return reply.code(400).send({ success: false, message: '无价模型默认单价必须是大于 0 的数字' });
}
const normalized = Math.max(1e-6, nextRoutingFallbackUnitCost);
if (Math.abs(normalized - config.routingFallbackUnitCost) > 1e-12) {
changedLabels.push(`无价模型默认单价(${config.routingFallbackUnitCost} -> ${normalized}`);
}
config.routingFallbackUnitCost = normalized;
upsertSetting('routing_fallback_unit_cost', normalized);
}
if (changedLabels.length > 0) {
let eventType: 'checkin' | 'balance' | 'proxy' | 'status' | 'token' = 'status';
if (changedLabels.length === 1) {
if (changedLabels[0].startsWith('签到 Cron')) eventType = 'checkin';
else if (changedLabels[0].startsWith('余额刷新 Cron')) eventType = 'balance';
else if (changedLabels[0] === '代理访问 Token') eventType = 'proxy';
}
appendSettingsEvent({
type: eventType,
title: '运行时设置已更新',
message: `已更新:${changedLabels.join('、')}`,
});
}
return {
success: true,
message: '运行时设置已更新',
...getRuntimeSettingsResponse(),
};
});
app.get<{ Querystring: { type?: string } }>('/api/settings/backup/export', async (request, reply) => {
const rawType = String(request.query.type || 'all').trim().toLowerCase();
const type: BackupExportType = rawType === 'accounts' || rawType === 'preferences' ? rawType : 'all';
if (rawType && !['all', 'accounts', 'preferences'].includes(rawType)) {
return reply.code(400).send({ success: false, message: '导出类型无效,仅支持 all/accounts/preferences' });
}
return exportBackup(type);
});
app.post<{ Body: { data?: Record<string, unknown> } }>('/api/settings/backup/import', async (request, reply) => {
const payload = request.body?.data;
if (!payload || typeof payload !== 'object' || Array.isArray(payload)) {
return reply.code(400).send({ success: false, message: '导入数据格式错误:需要 JSON 对象' });
}
try {
const result = importBackup(payload);
for (const item of result.appliedSettings) {
applyImportedSettingToRuntime(item.key, item.value);
}
return {
success: true,
message: '导入完成',
...result,
};
} catch (err: any) {
return reply.code(400).send({
success: false,
message: err?.message || '导入失败',
});
}
});
app.post('/api/settings/notify/test', async (_, reply) => {
try {
const result = await sendNotification(
'测试通知',
'您好,这是一条来自系统设置的连通性测试通知,您的通知相关配置目前工作正常!',
'info',
{
bypassThrottle: true,
requireChannel: true,
throwOnFailure: true,
},
);
return {
success: true,
message: `测试通知已发送(成功 ${result.succeeded}/${result.attempted}`,
};
} catch (err: any) {
return reply.code(400).send({
success: false,
message: err?.message || '测试通知发送失败',
});
}
});
app.post('/api/settings/maintenance/clear-cache', async (_, reply) => {
const deletedModelAvailability = db.delete(schema.modelAvailability).run().changes;
const deletedRouteChannels = db.delete(schema.routeChannels).run().changes;
const deletedTokenRoutes = db.delete(schema.tokenRoutes).run().changes;
const { task, reused } = startBackgroundTask(
{
type: 'maintenance',
title: '清理缓存并重建路由',
dedupeKey: 'refresh-models-and-rebuild-routes',
notifyOnFailure: true,
successMessage: (currentTask) => {
const rebuild = (currentTask.result as any)?.rebuild;
if (!rebuild) return '缓存清理后重建路由已完成';
return `缓存清理后重建完成:新增路由 ${rebuild.createdRoutes},新增通道 ${rebuild.createdChannels},移除通道 ${rebuild.removedChannels}`;
},
failureMessage: (currentTask) => `缓存清理后重建失败:${currentTask.error || 'unknown error'}`,
},
async () => refreshModelsAndRebuildRoutes(),
);
return reply.code(202).send({
success: true,
queued: true,
reused,
jobId: task.id,
message: '缓存已清理,重建路由已开始执行',
deletedModelAvailability,
deletedRouteChannels,
deletedTokenRoutes,
});
});
app.post('/api/settings/maintenance/clear-usage', async () => {
const deletedProxyLogs = db.delete(schema.proxyLogs).run().changes;
db.update(schema.routeChannels).set({
successCount: 0,
failCount: 0,
totalLatencyMs: 0,
totalCost: 0,
lastUsedAt: null,
lastFailAt: null,
cooldownUntil: null,
}).run();
db.update(schema.accounts).set({
balanceUsed: 0,
updatedAt: new Date().toISOString(),
}).run();
appendSettingsEvent({
type: 'status',
title: '占用统计与使用日志已清理',
message: `已清理使用日志 ${deletedProxyLogs} 条,并重置路由与账号占用统计`,
level: 'warning',
});
return {
success: true,
message: '占用统计已清理',
deletedProxyLogs,
};
});
}
@@ -0,0 +1,74 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { describe, expect, it, beforeAll, beforeEach, afterAll } from 'vitest';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { mkdtempSync } from 'node:fs';
import { eq } from 'drizzle-orm';
type DbModule = typeof import('../../db/index.js');
describe('sites status cascade', () => {
let app: FastifyInstance;
let db: DbModule['db'];
let schema: DbModule['schema'];
let dataDir = '';
beforeAll(async () => {
dataDir = mkdtempSync(join(tmpdir(), 'metapi-sites-status-cascade-'));
process.env.DATA_DIR = dataDir;
await import('../../db/migrate.js');
const dbModule = await import('../../db/index.js');
const routesModule = await import('./sites.js');
db = dbModule.db;
schema = dbModule.schema;
app = Fastify();
await app.register(routesModule.sitesRoutes);
});
beforeEach(() => {
db.delete(schema.accounts).run();
db.delete(schema.sites).run();
});
afterAll(async () => {
await app.close();
delete process.env.DATA_DIR;
});
it('disables and re-enables related accounts with site status', async () => {
const site = db.insert(schema.sites).values({
name: 'status-site',
url: 'https://status-site.example.com',
platform: 'new-api',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: 'status-user',
accessToken: 'access-token',
status: 'active',
}).returning().get();
const disableResp = await app.inject({
method: 'PUT',
url: `/api/sites/${site.id}`,
payload: { status: 'disabled' },
});
expect(disableResp.statusCode).toBe(200);
const disabledAccount = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
expect(disabledAccount?.status).toBe('disabled');
const enableResp = await app.inject({
method: 'PUT',
url: `/api/sites/${site.id}`,
payload: { status: 'active' },
});
expect(enableResp.statusCode).toBe(200);
const enabledAccount = db.select().from(schema.accounts).where(eq(schema.accounts.id, account.id)).get();
expect(enabledAccount?.status).toBe('active');
});
});
+125
View File
@@ -0,0 +1,125 @@
import { FastifyInstance } from 'fastify';
import { db, schema } from '../../db/index.js';
import { and, eq } from 'drizzle-orm';
import { detectSite } from '../../services/siteDetector.js';
function normalizeSiteStatus(input: unknown): 'active' | 'disabled' | null {
if (input === undefined || input === null) return null;
if (typeof input !== 'string') return null;
const status = input.trim().toLowerCase();
if (status === 'active' || status === 'disabled') return status;
return null;
}
export async function sitesRoutes(app: FastifyInstance) {
// List all sites
app.get('/api/sites', async () => {
return db.select().from(schema.sites).all();
});
// Add a site
app.post<{ Body: { name: string; url: string; platform?: string; apiKey?: string; status?: string } }>('/api/sites', async (request, reply) => {
const { name, url, platform, apiKey, status } = request.body;
const normalizedStatus = normalizeSiteStatus(status);
if (status !== undefined && !normalizedStatus) {
return reply.code(400).send({ error: 'Invalid site status. Expected active or disabled.' });
}
let detectedPlatform = platform;
if (!detectedPlatform) {
const detected = await detectSite(url);
detectedPlatform = detected?.platform;
}
if (!detectedPlatform) {
return { error: 'Could not detect platform. Please specify manually.' };
}
const result = db.insert(schema.sites).values({
name,
url: url.replace(/\/+$/, ''),
platform: detectedPlatform,
apiKey,
status: normalizedStatus ?? 'active',
}).returning().get();
return result;
});
// Update a site
app.put<{ Params: { id: string }; Body: { name?: string; url?: string; platform?: string; apiKey?: string; status?: string } }>('/api/sites/:id', async (request, reply) => {
const id = parseInt(request.params.id);
if (Number.isNaN(id)) {
return reply.code(400).send({ error: 'Invalid site id' });
}
const existingSite = db.select().from(schema.sites).where(eq(schema.sites.id, id)).get();
if (!existingSite) {
return reply.code(404).send({ error: 'Site not found' });
}
const updates: any = {};
const body = request.body;
const normalizedStatus = normalizeSiteStatus(body.status);
if (body.status !== undefined && !normalizedStatus) {
return reply.code(400).send({ error: 'Invalid site status. Expected active or disabled.' });
}
if (body.name !== undefined) updates.name = body.name;
if (body.url !== undefined) updates.url = body.url.replace(/\/+$/, '');
if (body.platform !== undefined) updates.platform = body.platform;
if (body.apiKey !== undefined) updates.apiKey = body.apiKey;
if (body.status !== undefined) updates.status = normalizedStatus;
updates.updatedAt = new Date().toISOString();
db.update(schema.sites).set(updates).where(eq(schema.sites.id, id)).run();
if (body.status !== undefined && normalizedStatus) {
const now = new Date().toISOString();
if (normalizedStatus === 'disabled') {
db.update(schema.accounts)
.set({ status: 'disabled', updatedAt: now })
.where(eq(schema.accounts.siteId, id))
.run();
try {
db.insert(schema.events).values({
type: 'status',
title: '站点已禁用',
message: `${existingSite.name} 已禁用,关联账号已全部置为禁用`,
level: 'warning',
relatedId: id,
relatedType: 'site',
}).run();
} catch {}
} else {
db.update(schema.accounts)
.set({ status: 'active', updatedAt: now })
.where(and(eq(schema.accounts.siteId, id), eq(schema.accounts.status, 'disabled')))
.run();
try {
db.insert(schema.events).values({
type: 'status',
title: '站点已启用',
message: `${existingSite.name} 已启用,关联禁用账号已恢复为活跃`,
level: 'info',
relatedId: id,
relatedType: 'site',
}).run();
} catch {}
}
}
return db.select().from(schema.sites).where(eq(schema.sites.id, id)).get();
});
// Delete a site
app.delete<{ Params: { id: string } }>('/api/sites/:id', async (request) => {
const id = parseInt(request.params.id);
db.delete(schema.sites).where(eq(schema.sites.id, id)).run();
return { success: true };
});
// Detect platform for a URL
app.post<{ Body: { url: string } }>('/api/sites/detect', async (request) => {
const result = await detectSite(request.body.url);
return result || { error: 'Could not detect platform' };
});
}
@@ -0,0 +1,156 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { describe, expect, it, beforeAll, beforeEach, afterAll } from 'vitest';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { mkdtempSync } from 'node:fs';
import { sql } from 'drizzle-orm';
import { formatLocalDate } from '../../services/localTimeService.js';
type DbModule = typeof import('../../db/index.js');
describe('stats dashboard filters disabled sites', () => {
let app: FastifyInstance;
let db: DbModule['db'];
let schema: DbModule['schema'];
let dataDir = '';
beforeAll(async () => {
dataDir = mkdtempSync(join(tmpdir(), 'metapi-stats-site-status-'));
process.env.DATA_DIR = dataDir;
await import('../../db/migrate.js');
const dbModule = await import('../../db/index.js');
const routesModule = await import('./stats.js');
db = dbModule.db;
schema = dbModule.schema;
app = Fastify();
await app.register(routesModule.statsRoutes);
});
beforeEach(() => {
db.delete(schema.proxyLogs).run();
db.delete(schema.checkinLogs).run();
db.delete(schema.routeChannels).run();
db.delete(schema.tokenRoutes).run();
db.delete(schema.tokenModelAvailability).run();
db.delete(schema.modelAvailability).run();
db.delete(schema.accountTokens).run();
db.delete(schema.accounts).run();
db.delete(schema.sites).run();
});
afterAll(async () => {
await app.close();
delete process.env.DATA_DIR;
});
it('excludes disabled-site balances from dashboard totals', async () => {
const activeSite = db.insert(schema.sites).values({
name: 'active-site',
url: 'https://active-site.example.com',
platform: 'new-api',
}).returning().get();
const disabledSite = db.insert(schema.sites).values({
name: 'disabled-site',
url: 'https://disabled-site.example.com',
platform: 'new-api',
}).returning().get();
db.run(sql`update sites set status = 'disabled' where id = ${disabledSite.id}`);
db.insert(schema.accounts).values({
siteId: activeSite.id,
username: 'active-user',
accessToken: 'active-token',
balance: 100,
status: 'active',
}).run();
db.insert(schema.accounts).values({
siteId: disabledSite.id,
username: 'disabled-user',
accessToken: 'disabled-token',
balance: 900,
status: 'active',
}).run();
const response = await app.inject({
method: 'GET',
url: '/api/stats/dashboard',
});
expect(response.statusCode).toBe(200);
const body = response.json() as {
totalBalance: number;
activeAccounts: number;
totalAccounts: number;
};
expect(body.totalBalance).toBe(100);
expect(body.activeAccounts).toBe(1);
expect(body.totalAccounts).toBe(1);
});
it('treats skipped checkins as successful in dashboard stats', async () => {
const today = formatLocalDate(new Date());
const site = db.insert(schema.sites).values({
name: 'checkin-site',
url: 'https://checkin-site.example.com',
platform: 'new-api',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: 'checkin-user',
accessToken: 'token',
balance: 10,
status: 'active',
}).returning().get();
db.insert(schema.checkinLogs).values([
{
accountId: account.id,
status: 'success',
message: 'checked in',
reward: '1',
createdAt: `${today} 09:00:00`,
},
{
accountId: account.id,
status: 'skipped',
message: 'today already checked in',
reward: '',
createdAt: `${today} 09:10:00`,
},
{
accountId: account.id,
status: 'failed',
message: 'checkin failed',
reward: '',
createdAt: `${today} 09:20:00`,
},
]).run();
const response = await app.inject({
method: 'GET',
url: '/api/stats/dashboard',
});
expect(response.statusCode).toBe(200);
const body = response.json() as {
todayCheckin: {
success: number;
failed: number;
total: number;
};
};
expect(body.todayCheckin).toEqual({
success: 2,
failed: 1,
total: 3,
});
});
});
@@ -0,0 +1,142 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { describe, expect, it, beforeAll, beforeEach, afterAll } from 'vitest';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { mkdtempSync } from 'node:fs';
import {
formatLocalDate,
formatUtcSqlDateTime,
getLocalDayRangeUtc,
parseStoredUtcDateTime,
} from '../../services/localTimeService.js';
type DbModule = typeof import('../../db/index.js');
describe('stats dashboard today reward fallback', () => {
let app: FastifyInstance;
let db: DbModule['db'];
let schema: DbModule['schema'];
let dataDir = '';
beforeAll(async () => {
dataDir = mkdtempSync(join(tmpdir(), 'metapi-stats-reward-fallback-'));
process.env.DATA_DIR = dataDir;
await import('../../db/migrate.js');
const dbModule = await import('../../db/index.js');
const routesModule = await import('./stats.js');
db = dbModule.db;
schema = dbModule.schema;
app = Fastify();
await app.register(routesModule.statsRoutes);
});
beforeEach(() => {
db.delete(schema.proxyLogs).run();
db.delete(schema.checkinLogs).run();
db.delete(schema.routeChannels).run();
db.delete(schema.tokenRoutes).run();
db.delete(schema.tokenModelAvailability).run();
db.delete(schema.modelAvailability).run();
db.delete(schema.accountTokens).run();
db.delete(schema.accounts).run();
db.delete(schema.sites).run();
});
afterAll(async () => {
await app.close();
delete process.env.DATA_DIR;
});
it('uses today income value for dashboard todayReward when reward text is empty', async () => {
const today = formatLocalDate(new Date());
const site = db.insert(schema.sites).values({
name: 'stats-site',
url: 'https://stats-site.example.com',
platform: 'new-api',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: 'stats-user',
accessToken: 'token',
status: 'active',
extraConfig: JSON.stringify({
todayIncomeSnapshot: {
day: today,
baseline: 4.8,
latest: 4.8,
updatedAt: `${today}T10:00:00.000Z`,
},
}),
}).returning().get();
db.insert(schema.checkinLogs).values({
accountId: account.id,
status: 'success',
message: 'checked in',
reward: '',
createdAt: `${today} 10:01:00`,
}).run();
const response = await app.inject({
method: 'GET',
url: '/api/stats/dashboard',
});
expect(response.statusCode).toBe(200);
const body = response.json() as { todayReward: number };
expect(body.todayReward).toBe(4.8);
});
it('counts dashboard today spend only inside local-day range', async () => {
const site = db.insert(schema.sites).values({
name: 'stats-spend-site',
url: 'https://stats-spend.example.com',
platform: 'new-api',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: 'stats-spend-user',
accessToken: 'token',
status: 'active',
}).returning().get();
const { startUtc, endUtc } = getLocalDayRangeUtc();
const startDate = parseStoredUtcDateTime(startUtc)!;
const endDate = parseStoredUtcDateTime(endUtc)!;
const beforeStart = formatUtcSqlDateTime(new Date(startDate.getTime() - 60_000));
const inRange = formatUtcSqlDateTime(new Date(startDate.getTime() + 60_000));
const afterEnd = formatUtcSqlDateTime(new Date(endDate.getTime() + 60_000));
db.insert(schema.proxyLogs).values([
{
accountId: account.id,
status: 'success',
estimatedCost: 1,
createdAt: beforeStart,
},
{
accountId: account.id,
status: 'success',
estimatedCost: 3,
createdAt: inRange,
},
{
accountId: account.id,
status: 'success',
estimatedCost: 5,
createdAt: afterEnd,
},
]).run();
const response = await app.inject({
method: 'GET',
url: '/api/stats/dashboard',
});
expect(response.statusCode).toBe(200);
const body = response.json() as { todaySpend: number };
expect(body.todaySpend).toBe(3);
});
});
+637
View File
@@ -0,0 +1,637 @@
import { FastifyInstance } from 'fastify';
import { db, schema } from '../../db/index.js';
import { and, desc, gte, eq, lt } from 'drizzle-orm';
import {
refreshModelsForAccount,
refreshModelsAndRebuildRoutes,
rebuildTokenRoutesFromAvailability,
} from '../../services/modelService.js';
import { buildModelAnalysis } from '../../services/modelAnalysisService.js';
import { fallbackTokenCost, fetchModelPricingCatalog } from '../../services/modelPricingService.js';
import { getUpstreamModelDescriptionsCached } from '../../services/upstreamModelDescriptionService.js';
import { getRunningTaskByDedupeKey, startBackgroundTask } from '../../services/backgroundTaskService.js';
import { parseCheckinRewardAmount } from '../../services/checkinRewardParser.js';
import { estimateRewardWithTodayIncomeFallback } from '../../services/todayIncomeRewardService.js';
import {
getLocalDayRangeUtc,
getLocalRangeStartUtc,
parseStoredUtcDateTime,
toLocalDayKeyFromStoredUtc,
} from '../../services/localTimeService.js';
function parseBooleanFlag(raw?: string): boolean {
if (!raw) return false;
const normalized = raw.trim().toLowerCase();
return normalized === '1' || normalized === 'true' || normalized === 'yes';
}
const MODELS_MARKETPLACE_BASE_TTL_MS = 15_000;
const MODELS_MARKETPLACE_PRICING_TTL_MS = 90_000;
type ModelsMarketplaceCacheEntry = {
expiresAt: number;
models: any[];
};
const modelsMarketplaceCache = new Map<'base' | 'pricing', ModelsMarketplaceCacheEntry>();
function readModelsMarketplaceCache(includePricing: boolean): any[] | null {
const key = includePricing ? 'pricing' : 'base';
const cached = modelsMarketplaceCache.get(key);
if (!cached) return null;
if (Date.now() >= cached.expiresAt) {
modelsMarketplaceCache.delete(key);
return null;
}
return cached.models;
}
function writeModelsMarketplaceCache(includePricing: boolean, models: any[]): void {
const ttl = includePricing ? MODELS_MARKETPLACE_PRICING_TTL_MS : MODELS_MARKETPLACE_BASE_TTL_MS;
const key = includePricing ? 'pricing' : 'base';
modelsMarketplaceCache.set(key, {
expiresAt: Date.now() + ttl,
models,
});
}
export async function statsRoutes(app: FastifyInstance) {
// Dashboard summary
app.get('/api/stats/dashboard', async () => {
const accountRows = db.select().from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.sites.status, 'active'))
.all();
const accounts = accountRows.map((row) => row.accounts);
const totalBalance = accounts.reduce((sum, a) => sum + (a.balance || 0), 0);
const activeCount = accounts.filter((a) => a.status === 'active').length;
const { localDay: today, startUtc: todayStartUtc, endUtc: todayEndUtc } = getLocalDayRangeUtc();
const todayCheckinRows = db.select().from(schema.checkinLogs)
.innerJoin(schema.accounts, eq(schema.checkinLogs.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(and(
gte(schema.checkinLogs.createdAt, todayStartUtc),
lt(schema.checkinLogs.createdAt, todayEndUtc),
eq(schema.sites.status, 'active'),
))
.all();
const todayCheckins = todayCheckinRows.map((row) => row.checkin_logs);
const checkinFailed = todayCheckins.filter((c) => c.status === 'failed').length;
const checkinSuccess = todayCheckins.length - checkinFailed;
const rewardByAccount: Record<number, number> = {};
const successCountByAccount: Record<number, number> = {};
const parsedRewardCountByAccount: Record<number, number> = {};
for (const row of todayCheckinRows) {
const checkin = row.checkin_logs;
if (checkin.status !== 'success') continue;
const accountId = row.accounts.id;
successCountByAccount[accountId] = (successCountByAccount[accountId] || 0) + 1;
const rewardValue = parseCheckinRewardAmount(checkin.reward) || parseCheckinRewardAmount(checkin.message);
if (rewardValue <= 0) continue;
rewardByAccount[accountId] = (rewardByAccount[accountId] || 0) + rewardValue;
parsedRewardCountByAccount[accountId] = (parsedRewardCountByAccount[accountId] || 0) + 1;
}
const nowTs = Date.now();
const last24hTs = nowTs - 86400000;
const last7dDate = getLocalRangeStartUtc(7);
const recentProxyLogs = db.select().from(schema.proxyLogs)
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(and(gte(schema.proxyLogs.createdAt, last7dDate), eq(schema.sites.status, 'active')))
.all()
.map((row) => row.proxy_logs);
const allProxyLogs = db.select()
.from(schema.proxyLogs)
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.sites.status, 'active'))
.all();
const proxy24hLogs = recentProxyLogs.filter((log) => {
if (!log.createdAt) return false;
const ts = parseStoredUtcDateTime(log.createdAt)?.getTime() ?? Number.NaN;
return Number.isFinite(ts) && ts >= last24hTs;
});
const proxySuccess = proxy24hLogs.filter((l) => l.status === 'success').length;
const proxyFailed = proxy24hLogs.filter((l) => l.status === 'failed').length;
const totalTokens = proxy24hLogs.reduce((sum, l) => sum + (l.totalTokens || 0), 0);
const totalUsed = allProxyLogs.reduce((sum, row) => {
const log = row.proxy_logs;
const platform = row.sites?.platform || 'new-api';
const explicitCost = typeof log.estimatedCost === 'number' ? log.estimatedCost : 0;
if (explicitCost > 0) return sum + explicitCost;
return sum + fallbackTokenCost(log.totalTokens || 0, platform);
}, 0);
const todayProxyLogs = recentProxyLogs.filter((log) => {
if (!log.createdAt) return false;
return log.createdAt >= todayStartUtc && log.createdAt < todayEndUtc;
});
const todaySpend = todayProxyLogs.reduce((sum, log) => {
const cost = typeof log.estimatedCost === 'number' ? log.estimatedCost : 0;
return sum + cost;
}, 0);
const todayReward = accounts.reduce((sum, account) => sum + estimateRewardWithTodayIncomeFallback({
day: today,
successCount: successCountByAccount[account.id] || 0,
parsedRewardCount: parsedRewardCountByAccount[account.id] || 0,
rewardSum: rewardByAccount[account.id] || 0,
extraConfig: account.extraConfig,
}), 0);
const modelAnalysis = buildModelAnalysis(recentProxyLogs, { days: 7 });
return {
totalBalance,
totalUsed: Math.round(totalUsed * 1_000_000) / 1_000_000,
todaySpend: Math.round(todaySpend * 1_000_000) / 1_000_000,
todayReward: Math.round(todayReward * 1_000_000) / 1_000_000,
activeAccounts: activeCount,
totalAccounts: accounts.length,
todayCheckin: { success: checkinSuccess, failed: checkinFailed, total: todayCheckins.length },
proxy24h: { success: proxySuccess, failed: proxyFailed, total: proxy24hLogs.length, totalTokens },
modelAnalysis,
};
});
// Proxy logs
app.get<{ Querystring: { limit?: string; offset?: string } }>('/api/stats/proxy-logs', async (request) => {
const limit = parseInt(request.query.limit || '50', 10);
const offset = parseInt(request.query.offset || '0', 10);
const rows = db.select().from(schema.proxyLogs)
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.orderBy(desc(schema.proxyLogs.createdAt))
.limit(limit).offset(offset).all();
return rows.map((row) => ({
...row.proxy_logs,
username: row.accounts?.username || null,
siteName: row.sites?.name || null,
siteUrl: row.sites?.url || null,
}));
});
// Models marketplace - refresh upstream models and aggregate.
app.get<{ Querystring: { refresh?: string; includePricing?: string } }>('/api/models/marketplace', async (request) => {
const refreshRequested = parseBooleanFlag(request.query.refresh);
const includePricing = parseBooleanFlag(request.query.includePricing);
let refreshQueued = false;
let refreshReused = false;
let refreshJobId: string | null = null;
if (refreshRequested) {
modelsMarketplaceCache.clear();
const { task, reused } = startBackgroundTask(
{
type: 'model',
title: '刷新模型广场数据',
dedupeKey: 'refresh-models-and-rebuild-routes',
notifyOnFailure: true,
successMessage: (currentTask) => {
const rebuild = (currentTask.result as any)?.rebuild;
if (!rebuild) return '模型广场刷新已完成';
return `模型广场刷新完成:新增路由 ${rebuild.createdRoutes},新增通道 ${rebuild.createdChannels},移除通道 ${rebuild.removedChannels}`;
},
failureMessage: (currentTask) => `模型广场刷新失败:${currentTask.error || 'unknown error'}`,
},
async () => refreshModelsAndRebuildRoutes(),
);
refreshQueued = !reused;
refreshReused = reused;
refreshJobId = task.id;
}
const runningRefreshTask = getRunningTaskByDedupeKey('refresh-models-and-rebuild-routes');
if (!refreshJobId && runningRefreshTask) refreshJobId = runningRefreshTask.id;
if (!refreshRequested) {
const cachedModels = readModelsMarketplaceCache(includePricing);
if (cachedModels) {
return {
models: cachedModels,
meta: {
refreshRequested,
refreshQueued,
refreshReused,
refreshRunning: !!runningRefreshTask,
refreshJobId,
includePricing,
cacheHit: true,
},
};
}
}
const availability = db.select().from(schema.tokenModelAvailability)
.innerJoin(schema.accountTokens, eq(schema.tokenModelAvailability.tokenId, schema.accountTokens.id))
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.all();
const last7d = getLocalRangeStartUtc(7);
const recentLogs = db.select().from(schema.proxyLogs)
.where(gte(schema.proxyLogs.createdAt, last7d))
.all();
const modelLogStats: Record<string, { success: number; total: number; totalLatency: number }> = {};
for (const log of recentLogs) {
const model = log.modelActual || log.modelRequested || '';
if (!modelLogStats[model]) modelLogStats[model] = { success: 0, total: 0, totalLatency: 0 };
modelLogStats[model].total++;
if (log.status === 'success') modelLogStats[model].success++;
modelLogStats[model].totalLatency += log.latencyMs || 0;
}
type ModelMetadataAggregate = {
description: string | null;
tags: Set<string>;
supportedEndpointTypes: Set<string>;
pricingSources: Array<{
siteId: number;
siteName: string;
accountId: number;
username: string | null;
ownerBy: string | null;
enableGroups: string[];
groupPricing: Record<string, {
quotaType: number;
inputPerMillion?: number;
outputPerMillion?: number;
perCallInput?: number;
perCallOutput?: number;
perCallTotal?: number;
}>;
}>;
};
const modelMetadataMap = new Map<string, ModelMetadataAggregate>();
if (includePricing) {
const activeAccountRows = db.select().from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(and(eq(schema.accounts.status, 'active'), eq(schema.sites.status, 'active')))
.all();
const metadataResults = await Promise.all(activeAccountRows.map(async (row) => {
const catalog = await fetchModelPricingCatalog({
site: {
id: row.sites.id,
url: row.sites.url,
platform: row.sites.platform,
apiKey: row.sites.apiKey,
},
account: {
id: row.accounts.id,
accessToken: row.accounts.accessToken,
apiToken: row.accounts.apiToken,
},
modelName: '__metadata__',
totalTokens: 0,
});
return {
account: row.accounts,
site: row.sites,
catalog,
};
}));
for (const result of metadataResults) {
if (!result.catalog) continue;
for (const model of result.catalog.models) {
const key = model.modelName.toLowerCase();
if (!modelMetadataMap.has(key)) {
modelMetadataMap.set(key, {
description: null,
tags: new Set<string>(),
supportedEndpointTypes: new Set<string>(),
pricingSources: [],
});
}
const aggregate = modelMetadataMap.get(key)!;
if (!aggregate.description && model.modelDescription) {
aggregate.description = model.modelDescription;
}
for (const tag of model.tags) aggregate.tags.add(tag);
for (const endpointType of model.supportedEndpointTypes) {
aggregate.supportedEndpointTypes.add(endpointType);
}
aggregate.pricingSources.push({
siteId: result.site.id,
siteName: result.site.name,
accountId: result.account.id,
username: result.account.username,
ownerBy: model.ownerBy,
enableGroups: model.enableGroups,
groupPricing: model.groupPricing,
});
}
}
}
const modelMap: Record<string, {
name: string;
accountsById: Map<number, {
id: number;
site: string;
username: string | null;
latency: number | null;
unitCost: number | null;
balance: number;
tokens: Array<{ id: number; name: string; isDefault: boolean }>;
}>;
}> = {};
for (const row of availability) {
const m = row.token_model_availability;
const t = row.account_tokens;
const a = row.accounts;
const s = row.sites;
if (!m.available || !t.enabled || a.status !== 'active' || s.status !== 'active') continue;
if (!modelMap[m.modelName]) {
modelMap[m.modelName] = { name: m.modelName, accountsById: new Map() };
}
const existingAccount = modelMap[m.modelName].accountsById.get(a.id);
if (!existingAccount) {
modelMap[m.modelName].accountsById.set(a.id, {
id: a.id,
site: s.name,
username: a.username,
latency: m.latencyMs,
unitCost: a.unitCost,
balance: a.balance || 0,
tokens: [{ id: t.id, name: t.name, isDefault: !!t.isDefault }],
});
} else {
const nextLatency = (() => {
if (existingAccount.latency == null) return m.latencyMs;
if (m.latencyMs == null) return existingAccount.latency;
return Math.min(existingAccount.latency, m.latencyMs);
})();
existingAccount.latency = nextLatency;
if (!existingAccount.tokens.some((token) => token.id === t.id)) {
existingAccount.tokens.push({ id: t.id, name: t.name, isDefault: !!t.isDefault });
}
}
}
let upstreamDescriptionMap = new Map<string, string>();
if (includePricing) {
const hasMissingDescription = Object.keys(modelMap).some((modelName) => {
const metadata = modelMetadataMap.get(modelName.toLowerCase());
return !metadata?.description;
});
if (hasMissingDescription) {
upstreamDescriptionMap = await getUpstreamModelDescriptionsCached();
}
}
const models = Object.values(modelMap).map((m) => {
const logStats = modelLogStats[m.name];
const accounts = Array.from(m.accountsById.values());
const avgLatency = accounts.reduce((sum, a) => sum + (a.latency || 0), 0) / (accounts.length || 1);
const metadata = modelMetadataMap.get(m.name.toLowerCase());
const fallbackDescription = metadata?.description ? null : upstreamDescriptionMap.get(m.name.toLowerCase()) || null;
return {
name: m.name,
accountCount: accounts.length,
tokenCount: accounts.reduce((sum, account) => sum + account.tokens.length, 0),
avgLatency: Math.round(avgLatency),
successRate: logStats ? Math.round((logStats.success / logStats.total) * 1000) / 10 : null,
description: metadata?.description || fallbackDescription,
tags: metadata ? Array.from(metadata.tags).sort((a, b) => a.localeCompare(b)) : [],
supportedEndpointTypes: metadata ? Array.from(metadata.supportedEndpointTypes).sort((a, b) => a.localeCompare(b)) : [],
pricingSources: metadata?.pricingSources || [],
accounts,
};
});
models.sort((a, b) => b.accountCount - a.accountCount);
writeModelsMarketplaceCache(includePricing, models);
return {
models,
meta: {
refreshRequested,
refreshQueued,
refreshReused,
refreshRunning: !!runningRefreshTask,
refreshJobId,
includePricing,
},
};
});
app.get('/api/models/token-candidates', async () => {
const rows = db.select().from(schema.tokenModelAvailability)
.innerJoin(schema.accountTokens, eq(schema.tokenModelAvailability.tokenId, schema.accountTokens.id))
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(
and(
eq(schema.tokenModelAvailability.available, true),
eq(schema.accountTokens.enabled, true),
eq(schema.accounts.status, 'active'),
eq(schema.sites.status, 'active'),
),
)
.all();
const result: Record<string, Array<{
accountId: number;
tokenId: number;
tokenName: string;
isDefault: boolean;
username: string | null;
siteId: number;
siteName: string;
}>> = {};
for (const row of rows) {
const modelName = row.token_model_availability.modelName;
if (!result[modelName]) result[modelName] = [];
if (result[modelName].some((item) => item.tokenId === row.account_tokens.id)) continue;
result[modelName].push({
accountId: row.accounts.id,
tokenId: row.account_tokens.id,
tokenName: row.account_tokens.name,
isDefault: !!row.account_tokens.isDefault,
username: row.accounts.username,
siteId: row.sites.id,
siteName: row.sites.name,
});
}
return { models: result };
});
// Refresh models for one account and rebuild routes.
app.post<{ Params: { accountId: string } }>('/api/models/check/:accountId', async (request) => {
const accountId = Number.parseInt(request.params.accountId, 10);
if (Number.isNaN(accountId)) {
return { success: false, error: 'Invalid account id' };
}
const refresh = await refreshModelsForAccount(accountId);
const rebuild = rebuildTokenRoutesFromAvailability();
return { success: true, refresh, rebuild };
});
// Site distribution per-site aggregate data
app.get('/api/stats/site-distribution', async () => {
const accounts = db.select().from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.sites.status, 'active'))
.all();
const proxyLogs = db.select().from(schema.proxyLogs)
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.sites.status, 'active'))
.all();
// Build spend per site from proxy logs
const spendBySiteId: Record<number, number> = {};
for (const row of proxyLogs) {
const siteId = row.sites?.id;
if (siteId == null) continue;
const log = row.proxy_logs;
const platform = row.sites?.platform || 'new-api';
const explicitCost = typeof log.estimatedCost === 'number' ? log.estimatedCost : 0;
const cost = explicitCost > 0 ? explicitCost : fallbackTokenCost(log.totalTokens || 0, platform);
spendBySiteId[siteId] = (spendBySiteId[siteId] || 0) + cost;
}
// Aggregate accounts by site
const siteMap: Record<number, {
siteName: string;
platform: string;
totalBalance: number;
accountCount: number;
}> = {};
for (const row of accounts) {
const site = row.sites;
const acct = row.accounts;
if (!siteMap[site.id]) {
siteMap[site.id] = { siteName: site.name, platform: site.platform, totalBalance: 0, accountCount: 0 };
}
siteMap[site.id].totalBalance += acct.balance || 0;
siteMap[site.id].accountCount++;
}
const distribution = Object.entries(siteMap).map(([id, info]) => ({
siteId: Number(id),
siteName: info.siteName,
platform: info.platform,
totalBalance: Math.round(info.totalBalance * 1_000_000) / 1_000_000,
totalSpend: Math.round((spendBySiteId[Number(id)] || 0) * 1_000_000) / 1_000_000,
accountCount: info.accountCount,
}));
return { distribution };
});
// Site trend daily spend/calls broken down by site
app.get<{ Querystring: { days?: string } }>('/api/stats/site-trend', async (request) => {
const days = Math.max(1, parseInt(request.query.days || '7', 10));
const sinceDate = getLocalRangeStartUtc(days);
const rows = db.select().from(schema.proxyLogs)
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(and(gte(schema.proxyLogs.createdAt, sinceDate), eq(schema.sites.status, 'active')))
.all();
// Group by date + site name
const dayMap: Record<string, Record<string, { spend: number; calls: number }>> = {};
for (const row of rows) {
const log = row.proxy_logs;
const siteName = row.sites?.name || 'unknown';
const platform = row.sites?.platform || 'new-api';
const date = toLocalDayKeyFromStoredUtc(log.createdAt);
if (!date) continue;
if (!dayMap[date]) dayMap[date] = {};
if (!dayMap[date][siteName]) dayMap[date][siteName] = { spend: 0, calls: 0 };
const explicitCost = typeof log.estimatedCost === 'number' ? log.estimatedCost : 0;
const cost = explicitCost > 0 ? explicitCost : fallbackTokenCost(log.totalTokens || 0, platform);
dayMap[date][siteName].spend += cost;
dayMap[date][siteName].calls++;
}
// Round spend values and sort by date
const trend = Object.entries(dayMap)
.sort(([a], [b]) => a.localeCompare(b))
.map(([date, sites]) => {
const rounded: Record<string, { spend: number; calls: number }> = {};
for (const [name, stats] of Object.entries(sites)) {
rounded[name] = {
spend: Math.round(stats.spend * 1_000_000) / 1_000_000,
calls: stats.calls,
};
}
return { date, sites: rounded };
});
return { trend };
});
// Model stats by site
app.get<{ Querystring: { siteId?: string; days?: string } }>('/api/stats/model-by-site', async (request) => {
const siteId = request.query.siteId ? parseInt(request.query.siteId, 10) : null;
const days = Math.max(1, parseInt(request.query.days || '7', 10));
const sinceDate = getLocalRangeStartUtc(days);
// Get account IDs belonging to the site (if filtered)
let accountIds: Set<number> | null = null;
if (siteId != null && !Number.isNaN(siteId)) {
const siteAccounts = db.select().from(schema.accounts)
.where(eq(schema.accounts.siteId, siteId)).all();
accountIds = new Set(siteAccounts.map((a) => a.id));
}
const rows = db.select().from(schema.proxyLogs)
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(and(gte(schema.proxyLogs.createdAt, sinceDate), eq(schema.sites.status, 'active')))
.all();
const modelMap: Record<string, { calls: number; spend: number; tokens: number }> = {};
for (const row of rows) {
const log = row.proxy_logs;
// Filter by site if siteId is specified
if (accountIds != null && (log.accountId == null || !accountIds.has(log.accountId))) continue;
const model = log.modelActual || log.modelRequested || 'unknown';
const platform = row.sites?.platform || 'new-api';
if (!modelMap[model]) modelMap[model] = { calls: 0, spend: 0, tokens: 0 };
modelMap[model].calls++;
modelMap[model].tokens += log.totalTokens || 0;
const explicitCost = typeof log.estimatedCost === 'number' ? log.estimatedCost : 0;
const cost = explicitCost > 0 ? explicitCost : fallbackTokenCost(log.totalTokens || 0, platform);
modelMap[model].spend += cost;
}
const models = Object.entries(modelMap)
.map(([model, stats]) => ({
model,
calls: stats.calls,
spend: Math.round(stats.spend * 1_000_000) / 1_000_000,
tokens: stats.tokens,
}))
.sort((a, b) => b.calls - a.calls);
return { models };
});
}
+22
View File
@@ -0,0 +1,22 @@
import { FastifyInstance } from 'fastify';
import { getBackgroundTask, listBackgroundTasks } from '../../services/backgroundTaskService.js';
export async function taskRoutes(app: FastifyInstance) {
app.get<{ Querystring: { limit?: string } }>('/api/tasks', async (request) => {
const limit = Number.parseInt(request.query.limit || '50', 10);
return {
tasks: listBackgroundTasks(limit),
};
});
app.get<{ Params: { id: string } }>('/api/tasks/:id', async (request, reply) => {
const task = getBackgroundTask(request.params.id);
if (!task) {
return reply.code(404).send({ success: false, message: 'task not found' });
}
return {
success: true,
task,
};
});
}
+451
View File
@@ -0,0 +1,451 @@
import { FastifyInstance, FastifyReply } from 'fastify';
import { randomUUID } from 'node:crypto';
import { fetch } from 'undici';
import { config } from '../../config.js';
type TestChatMessage = { role: string; content: string };
type TestTargetFormat = 'openai' | 'claude';
type TestChatRequestBody = {
model?: string;
messages?: TestChatMessage[];
targetFormat?: TestTargetFormat;
stream?: boolean;
temperature?: number;
top_p?: number;
max_tokens?: number;
frequency_penalty?: number;
presence_penalty?: number;
seed?: number;
};
type ValidatedTestChatPayload = {
model: string;
messages: TestChatMessage[];
targetFormat: TestTargetFormat;
stream?: boolean;
temperature?: number;
top_p?: number;
max_tokens?: number;
frequency_penalty?: number;
presence_penalty?: number;
seed?: number;
};
type TestChatJobStatus = 'pending' | 'succeeded' | 'failed' | 'cancelled';
type TestChatJob = {
id: string;
status: TestChatJobStatus;
payload: ValidatedTestChatPayload;
result?: unknown;
error?: unknown;
controller?: AbortController | null;
createdAt: number;
updatedAt: number;
expiresAt: number;
};
const JOB_TTL_MS = 10 * 60 * 1000;
const JOB_CLEANUP_INTERVAL_MS = 60 * 1000;
const jobs = new Map<string, TestChatJob>();
class UpstreamProxyError extends Error {
statusCode: number;
responsePayload: unknown;
constructor(statusCode: number, responsePayload: unknown) {
super(`Upstream request failed with status ${statusCode}`);
this.name = 'UpstreamProxyError';
this.statusCode = statusCode;
this.responsePayload = responsePayload;
}
}
const normalizeErrorPayload = (text: string): unknown => {
try {
return JSON.parse(text);
} catch {
return { error: { message: text, type: 'upstream_error' } };
}
};
const validatePayload = (
body: TestChatRequestBody,
reply: FastifyReply,
): ValidatedTestChatPayload | null => {
if (!body.model || body.model.trim().length === 0) {
reply.code(400).send({ error: 'model is required' });
return null;
}
if (!Array.isArray(body.messages) || body.messages.length === 0) {
reply.code(400).send({ error: 'messages is required' });
return null;
}
const targetFormat: TestTargetFormat = body.targetFormat === 'claude' ? 'claude' : 'openai';
return {
model: body.model,
messages: body.messages,
targetFormat,
stream: body.stream,
temperature: body.temperature,
top_p: body.top_p,
max_tokens: body.max_tokens,
frequency_penalty: body.frequency_penalty,
presence_penalty: body.presence_penalty,
seed: body.seed,
};
};
const convertOpenAiPayloadToClaudeBody = (
payload: ValidatedTestChatPayload,
forceStream: boolean,
): Record<string, unknown> => {
const systemContents: string[] = [];
const messages: Array<{ role: 'user' | 'assistant'; content: string }> = [];
for (const message of payload.messages) {
const role = typeof message.role === 'string' ? message.role : 'user';
const content = typeof message.content === 'string' ? message.content : '';
if (!content.trim()) continue;
if (role === 'system') {
systemContents.push(content);
continue;
}
messages.push({
role: role === 'assistant' ? 'assistant' : 'user',
content,
});
}
const body: Record<string, unknown> = {
model: payload.model,
stream: forceStream,
max_tokens: typeof payload.max_tokens === 'number' && Number.isFinite(payload.max_tokens)
? payload.max_tokens
: 4096,
messages,
};
if (systemContents.length > 0) {
body.system = systemContents.join('\n\n');
}
if (typeof payload.temperature === 'number' && Number.isFinite(payload.temperature)) {
body.temperature = payload.temperature;
}
if (typeof payload.top_p === 'number' && Number.isFinite(payload.top_p)) {
body.top_p = payload.top_p;
}
return body;
};
const buildUpstreamRequest = (
payload: ValidatedTestChatPayload,
forceStream: boolean,
): { url: string; headers: Record<string, string>; body: Record<string, unknown> } => {
if (payload.targetFormat === 'claude') {
return {
url: `http://127.0.0.1:${config.port}/v1/messages`,
headers: {
'Content-Type': 'application/json',
'x-api-key': config.proxyToken,
'anthropic-version': '2023-06-01',
},
body: convertOpenAiPayloadToClaudeBody(payload, forceStream),
};
}
const { targetFormat: _targetFormat, ...openAiPayload } = payload;
return {
url: `http://127.0.0.1:${config.port}/v1/chat/completions`,
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${config.proxyToken}`,
},
body: { ...openAiPayload, stream: forceStream },
};
};
const requestUpstreamChat = async (
payload: ValidatedTestChatPayload,
signal?: AbortSignal,
forceStream = false,
): Promise<unknown> => {
const upstreamRequest = buildUpstreamRequest(payload, forceStream);
const upstream = await fetch(upstreamRequest.url, {
method: 'POST',
headers: upstreamRequest.headers,
body: JSON.stringify(upstreamRequest.body),
signal,
});
const text = await upstream.text();
if (!upstream.ok) {
throw new UpstreamProxyError(upstream.status, normalizeErrorPayload(text));
}
try {
return JSON.parse(text);
} catch {
return { raw: text };
}
};
const cleanupExpiredJobs = () => {
const now = Date.now();
for (const [jobId, job] of jobs.entries()) {
if (job.expiresAt <= now) {
jobs.delete(jobId);
}
}
};
const runJob = async (jobId: string) => {
const job = jobs.get(jobId);
if (!job || job.status !== 'pending') return;
const controller = new AbortController();
job.controller = controller;
try {
const result = await requestUpstreamChat(job.payload, controller.signal);
const current = jobs.get(jobId);
if (!current) return;
current.controller = null;
current.status = 'succeeded';
current.result = result;
current.updatedAt = Date.now();
current.expiresAt = current.updatedAt + JOB_TTL_MS;
} catch (error) {
const current = jobs.get(jobId);
if (!current) return;
current.controller = null;
if ((error as any)?.name === 'AbortError') {
current.status = 'cancelled';
current.error = { error: { message: 'job cancelled', type: 'cancelled' } };
current.updatedAt = Date.now();
current.expiresAt = current.updatedAt + 30_000;
return;
}
current.status = 'failed';
current.error = error instanceof UpstreamProxyError
? error.responsePayload
: { error: { message: (error as any)?.message || 'proxy request failed', type: 'server_error' } };
current.updatedAt = Date.now();
current.expiresAt = current.updatedAt + JOB_TTL_MS;
}
};
export async function testRoutes(app: FastifyInstance) {
const cleanupTimer = setInterval(cleanupExpiredJobs, JOB_CLEANUP_INTERVAL_MS);
cleanupTimer.unref?.();
app.addHook('onClose', async () => {
clearInterval(cleanupTimer);
});
app.post<{ Body: TestChatRequestBody }>(
'/api/test/chat',
async (request, reply) => {
const body = request.body || {};
const payload = validatePayload(body, reply);
if (!payload) return;
try {
const data = await requestUpstreamChat(payload, undefined, false);
return reply.send(data);
} catch (error) {
if (error instanceof UpstreamProxyError) {
return reply.code(error.statusCode).send(error.responsePayload);
}
return reply.code(502).send({
error: {
message: (error as any)?.message || 'proxy request failed',
type: 'server_error',
},
});
}
},
);
app.post<{ Body: TestChatRequestBody }>(
'/api/test/chat/stream',
async (request, reply) => {
const body = request.body || {};
const payload = validatePayload(body, reply);
if (!payload) return;
const controller = new AbortController();
const abortUpstream = () => {
try {
if (!controller.signal.aborted) {
controller.abort();
}
} catch {
// no-op
}
};
const onClientAborted = () => {
abortUpstream();
};
const onClientClosed = () => {
if (!reply.raw.writableEnded) {
abortUpstream();
}
};
const cleanupClientListeners = () => {
request.raw.off?.('aborted', onClientAborted);
reply.raw.off?.('close', onClientClosed);
};
request.raw.on('aborted', onClientAborted);
reply.raw.on('close', onClientClosed);
let upstream;
try {
const upstreamRequest = buildUpstreamRequest(payload, true);
upstream = await fetch(upstreamRequest.url, {
method: 'POST',
headers: upstreamRequest.headers,
body: JSON.stringify(upstreamRequest.body),
signal: controller.signal,
});
} catch (error) {
cleanupClientListeners();
return reply.code(502).send({
error: {
message: (error as any)?.message || 'proxy request failed',
type: 'server_error',
},
});
}
if (!upstream.ok) {
const text = await upstream.text();
cleanupClientListeners();
return reply.code(upstream.status).send(normalizeErrorPayload(text));
}
reply.hijack();
reply.raw.statusCode = 200;
reply.raw.setHeader('Content-Type', 'text/event-stream; charset=utf-8');
reply.raw.setHeader('Cache-Control', 'no-cache, no-transform');
reply.raw.setHeader('Connection', 'keep-alive');
reply.raw.setHeader('X-Accel-Buffering', 'no');
const reader = upstream.body?.getReader();
if (!reader) {
cleanupClientListeners();
reply.raw.end();
return;
}
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
if (value) {
reply.raw.write(Buffer.from(value));
}
}
} catch (error) {
if (!reply.raw.writableEnded) {
const message = JSON.stringify({
error: { message: (error as any)?.message || 'stream interrupted', type: 'stream_error' },
});
reply.raw.write(`event: error\ndata: ${message}\n\n`);
}
} finally {
try {
await reader.cancel();
} catch {
// no-op
}
cleanupClientListeners();
if (!reply.raw.writableEnded) {
reply.raw.end();
}
}
},
);
app.post<{ Body: TestChatRequestBody }>(
'/api/test/chat/jobs',
async (request, reply) => {
const body = request.body || {};
const payload = validatePayload(body, reply);
if (!payload) return;
const now = Date.now();
const jobId = randomUUID();
const job: TestChatJob = {
id: jobId,
status: 'pending',
payload,
controller: null,
createdAt: now,
updatedAt: now,
expiresAt: now + JOB_TTL_MS,
};
jobs.set(jobId, job);
void runJob(jobId);
return reply.code(202).send({
jobId,
status: job.status,
createdAt: new Date(job.createdAt).toISOString(),
expiresAt: new Date(job.expiresAt).toISOString(),
});
},
);
app.get<{ Params: { jobId: string } }>(
'/api/test/chat/jobs/:jobId',
async (request, reply) => {
const job = jobs.get(request.params.jobId);
if (!job) {
return reply.code(404).send({ error: { message: 'job not found', type: 'not_found' } });
}
return reply.send({
jobId: job.id,
status: job.status,
result: job.result,
error: job.error,
createdAt: new Date(job.createdAt).toISOString(),
updatedAt: new Date(job.updatedAt).toISOString(),
expiresAt: new Date(job.expiresAt).toISOString(),
});
},
);
app.delete<{ Params: { jobId: string } }>(
'/api/test/chat/jobs/:jobId',
async (request, reply) => {
const job = jobs.get(request.params.jobId);
if (!job) {
return reply.code(404).send({ error: { message: 'job not found', type: 'not_found' } });
}
if (job.status === 'pending' && job.controller) {
try {
job.controller.abort();
} catch {
// no-op
}
}
jobs.delete(request.params.jobId);
return reply.send({ success: true });
},
);
}
+157
View File
@@ -0,0 +1,157 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { describe, expect, it, beforeAll, beforeEach, afterAll } from 'vitest';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { mkdtempSync } from 'node:fs';
import { eq } from 'drizzle-orm';
type DbModule = typeof import('../../db/index.js');
describe('PUT /api/channels/batch', () => {
let app: FastifyInstance;
let db: DbModule['db'];
let schema: DbModule['schema'];
let dataDir = '';
let seedId = 0;
const nextId = () => {
seedId += 1;
return seedId;
};
const seedChannel = (options: { priority: number; weight: number; manualOverride?: boolean }) => {
const id = nextId();
const site = db.insert(schema.sites).values({
name: `site-${id}`,
url: `https://example.com/${id}`,
platform: 'new-api',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
accessToken: `access-token-${id}`,
apiToken: `api-token-${id}`,
}).returning().get();
const route = db.insert(schema.tokenRoutes).values({
modelPattern: `gpt-4o-${id}`,
enabled: true,
}).returning().get();
return db.insert(schema.routeChannels).values({
routeId: route.id,
accountId: account.id,
priority: options.priority,
weight: options.weight,
manualOverride: options.manualOverride ?? false,
}).returning().get();
};
beforeAll(async () => {
dataDir = mkdtempSync(join(tmpdir(), 'metapi-tokens-batch-'));
process.env.DATA_DIR = dataDir;
await import('../../db/migrate.js');
const dbModule = await import('../../db/index.js');
const routesModule = await import('./tokens.js');
db = dbModule.db;
schema = dbModule.schema;
app = Fastify();
await app.register(routesModule.tokensRoutes);
});
beforeEach(() => {
db.delete(schema.routeChannels).run();
db.delete(schema.accountTokens).run();
db.delete(schema.tokenRoutes).run();
db.delete(schema.accounts).run();
db.delete(schema.sites).run();
});
afterAll(async () => {
await app.close();
});
it('returns 400 when updates is missing or empty', async () => {
const missingRes = await app.inject({
method: 'PUT',
url: '/api/channels/batch',
payload: {},
});
expect(missingRes.statusCode).toBe(400);
expect(missingRes.json()).toMatchObject({ success: false });
const emptyRes = await app.inject({
method: 'PUT',
url: '/api/channels/batch',
payload: { updates: [] },
});
expect(emptyRes.statusCode).toBe(400);
expect(emptyRes.json()).toMatchObject({ success: false });
});
it('returns 400 when an update item is invalid', async () => {
const invalidIdRes = await app.inject({
method: 'PUT',
url: '/api/channels/batch',
payload: {
updates: [{ id: '1', priority: 1 }],
},
});
expect(invalidIdRes.statusCode).toBe(400);
expect(invalidIdRes.json()).toMatchObject({ success: false });
const invalidPriorityRes = await app.inject({
method: 'PUT',
url: '/api/channels/batch',
payload: {
updates: [{ id: 1, priority: null }],
},
});
expect(invalidPriorityRes.statusCode).toBe(400);
expect(invalidPriorityRes.json()).toMatchObject({ success: false });
});
it('updates priorities in batch, sets manualOverride, and keeps weight unchanged', async () => {
const channelA = seedChannel({ priority: 9, weight: 17, manualOverride: false });
const channelB = seedChannel({ priority: 8, weight: 23, manualOverride: false });
const res = await app.inject({
method: 'PUT',
url: '/api/channels/batch',
payload: {
updates: [
{ id: channelA.id, priority: 3.8 },
{ id: channelB.id, priority: -7.2 },
],
},
});
expect(res.statusCode).toBe(200);
const body = res.json() as {
success: boolean;
channels: Array<{ id: number; priority: number; weight: number; manualOverride: boolean }>;
};
expect(body.success).toBe(true);
expect(body.channels).toHaveLength(2);
const returnedA = body.channels.find((channel) => channel.id === channelA.id);
const returnedB = body.channels.find((channel) => channel.id === channelB.id);
expect(returnedA).toBeDefined();
expect(returnedB).toBeDefined();
expect(returnedA?.priority).toBe(3);
expect(returnedB?.priority).toBe(0);
expect(returnedA?.weight).toBe(17);
expect(returnedB?.weight).toBe(23);
expect(returnedA?.manualOverride).toBe(true);
expect(returnedB?.manualOverride).toBe(true);
const dbA = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelA.id)).get();
const dbB = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelB.id)).get();
expect(dbA?.priority).toBe(3);
expect(dbB?.priority).toBe(0);
expect(dbA?.weight).toBe(17);
expect(dbB?.weight).toBe(23);
expect(dbA?.manualOverride).toBe(true);
expect(dbB?.manualOverride).toBe(true);
});
});
@@ -0,0 +1,102 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { describe, expect, it, beforeAll, beforeEach, afterAll } from 'vitest';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { mkdtempSync } from 'node:fs';
type DbModule = typeof import('../../db/index.js');
describe('POST /api/routes/decision/batch', () => {
let app: FastifyInstance;
let db: DbModule['db'];
let schema: DbModule['schema'];
let dataDir = '';
let seedId = 0;
const nextId = () => {
seedId += 1;
return seedId;
};
const seedRoutableChannel = () => {
const id = nextId();
const site = db.insert(schema.sites).values({
name: `site-${id}`,
url: `https://site-${id}.example.com`,
platform: 'new-api',
}).returning().get();
const account = db.insert(schema.accounts).values({
siteId: site.id,
username: `user-${id}`,
accessToken: `access-token-${id}`,
apiToken: `sk-api-token-${id}`,
status: 'active',
}).returning().get();
const route = db.insert(schema.tokenRoutes).values({
modelPattern: 'gpt-4o-mini',
enabled: true,
}).returning().get();
db.insert(schema.routeChannels).values({
routeId: route.id,
accountId: account.id,
tokenId: null,
priority: 0,
weight: 10,
enabled: true,
}).run();
};
beforeAll(async () => {
dataDir = mkdtempSync(join(tmpdir(), 'metapi-route-decision-batch-'));
process.env.DATA_DIR = dataDir;
await import('../../db/migrate.js');
const dbModule = await import('../../db/index.js');
const routesModule = await import('./tokens.js');
db = dbModule.db;
schema = dbModule.schema;
app = Fastify();
await app.register(routesModule.tokensRoutes);
});
beforeEach(() => {
seedId = 0;
db.delete(schema.routeChannels).run();
db.delete(schema.tokenRoutes).run();
db.delete(schema.accounts).run();
db.delete(schema.sites).run();
});
afterAll(async () => {
await app.close();
delete process.env.DATA_DIR;
});
it('returns decisions for multiple requested models in one call', async () => {
seedRoutableChannel();
const response = await app.inject({
method: 'POST',
url: '/api/routes/decision/batch',
payload: {
models: ['gpt-4o-mini', 'gpt-4o-mini', 'unknown-model', ''],
},
});
expect(response.statusCode).toBe(200);
const body = response.json() as {
success: boolean;
decisions: Record<string, { matched: boolean; candidates: Array<unknown> }>;
};
expect(body.success).toBe(true);
expect(Object.keys(body.decisions).sort()).toEqual(['gpt-4o-mini', 'unknown-model']);
expect(body.decisions['gpt-4o-mini']?.matched).toBe(true);
expect(Array.isArray(body.decisions['gpt-4o-mini']?.candidates)).toBe(true);
expect(body.decisions['gpt-4o-mini']?.candidates.length).toBeGreaterThan(0);
expect(body.decisions['unknown-model']?.matched).toBe(false);
});
});
+358
View File
@@ -0,0 +1,358 @@
import { FastifyInstance } from 'fastify';
import { and, eq, inArray } from 'drizzle-orm';
import { db, schema } from '../../db/index.js';
import { rebuildTokenRoutesFromAvailability, refreshModelsAndRebuildRoutes } from '../../services/modelService.js';
import { tokenRouter } from '../../services/tokenRouter.js';
import { startBackgroundTask } from '../../services/backgroundTaskService.js';
function isExactModelPattern(modelPattern: string): boolean {
return !/[\*\?\[]/.test(modelPattern);
}
function getDefaultTokenId(accountId: number): number | null {
const token = db.select().from(schema.accountTokens)
.where(and(eq(schema.accountTokens.accountId, accountId), eq(schema.accountTokens.enabled, true), eq(schema.accountTokens.isDefault, true)))
.get();
return token?.id ?? null;
}
function tokenSupportsModel(tokenId: number, modelName: string): boolean {
const row = db.select().from(schema.tokenModelAvailability)
.where(
and(
eq(schema.tokenModelAvailability.tokenId, tokenId),
eq(schema.tokenModelAvailability.modelName, modelName),
eq(schema.tokenModelAvailability.available, true),
),
)
.get();
return !!row;
}
function checkTokenBelongsToAccount(tokenId: number, accountId: number): boolean {
const row = db.select().from(schema.accountTokens)
.where(and(eq(schema.accountTokens.id, tokenId), eq(schema.accountTokens.accountId, accountId)))
.get();
return !!row;
}
type BatchChannelPriorityUpdate = {
id: number;
priority: number;
};
type BatchRouteDecisionModels = {
models: string[];
};
function parseBatchChannelUpdates(input: unknown): { ok: true; updates: BatchChannelPriorityUpdate[] } | { ok: false; message: string } {
if (!input || typeof input !== 'object') {
return { ok: false, message: '请求体必须是对象' };
}
const updates = (input as { updates?: unknown }).updates;
if (!Array.isArray(updates) || updates.length === 0) {
return { ok: false, message: 'updates 必须是非空数组' };
}
const normalized: BatchChannelPriorityUpdate[] = [];
for (let index = 0; index < updates.length; index += 1) {
const item = updates[index];
if (!item || typeof item !== 'object') {
return { ok: false, message: `updates[${index}] 必须是对象` };
}
const { id, priority } = item as { id?: unknown; priority?: unknown };
if (typeof id !== 'number' || !Number.isFinite(id)) {
return { ok: false, message: `updates[${index}].id 必须是有限数字` };
}
if (typeof priority !== 'number' || !Number.isFinite(priority)) {
return { ok: false, message: `updates[${index}].priority 必须是有限数字` };
}
const normalizedId = Math.trunc(id);
if (normalizedId <= 0) {
return { ok: false, message: `updates[${index}].id 必须大于 0` };
}
normalized.push({
id: normalizedId,
priority: Math.max(0, Math.trunc(priority)),
});
}
return { ok: true, updates: normalized };
}
function parseBatchRouteDecisionModels(input: unknown): { ok: true; models: string[] } | { ok: false; message: string } {
if (!input || typeof input !== 'object') {
return { ok: false, message: '请求体必须是对象' };
}
const models = (input as BatchRouteDecisionModels).models;
if (!Array.isArray(models) || models.length === 0) {
return { ok: false, message: 'models 必须是非空数组' };
}
const dedupe = new Set<string>();
const normalized: string[] = [];
for (const raw of models) {
if (typeof raw !== 'string') continue;
const trimmed = raw.trim();
if (!trimmed || dedupe.has(trimmed)) continue;
dedupe.add(trimmed);
normalized.push(trimmed);
if (normalized.length >= 500) break;
}
if (normalized.length === 0) {
return { ok: false, message: 'models 中没有有效模型名称' };
}
return { ok: true, models: normalized };
}
export async function tokensRoutes(app: FastifyInstance) {
// List all routes
app.get('/api/routes', async () => {
const routes = db.select().from(schema.tokenRoutes).all();
if (routes.length === 0) return [];
const routeIds = routes.map((route) => route.id);
const channelRows = db.select().from(schema.routeChannels)
.innerJoin(schema.accounts, eq(schema.routeChannels.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.leftJoin(schema.accountTokens, eq(schema.routeChannels.tokenId, schema.accountTokens.id))
.where(inArray(schema.routeChannels.routeId, routeIds))
.all();
const channelsByRoute = new Map<number, any[]>();
for (const row of channelRows) {
const routeId = row.route_channels.routeId;
if (!channelsByRoute.has(routeId)) channelsByRoute.set(routeId, []);
channelsByRoute.get(routeId)!.push({
...row.route_channels,
account: row.accounts,
site: row.sites,
token: row.account_tokens
? {
id: row.account_tokens.id,
name: row.account_tokens.name,
accountId: row.account_tokens.accountId,
enabled: row.account_tokens.enabled,
isDefault: row.account_tokens.isDefault,
}
: null,
});
}
return routes.map((route) => ({
...route,
channels: channelsByRoute.get(route.id) || [],
}));
});
app.get<{ Querystring: { model?: string } }>('/api/routes/decision', async (request, reply) => {
const model = (request.query.model || '').trim();
if (!model) {
return reply.code(400).send({ success: false, message: 'model 不能为空' });
}
const decision = tokenRouter.explainSelection(model);
return { success: true, decision };
});
app.post<{ Body: BatchRouteDecisionModels }>('/api/routes/decision/batch', async (request, reply) => {
const parsed = parseBatchRouteDecisionModels(request.body);
if (!parsed.ok) {
return reply.code(400).send({ success: false, message: parsed.message });
}
const decisions: Record<string, ReturnType<typeof tokenRouter.explainSelection>> = {};
for (const model of parsed.models) {
decisions[model] = tokenRouter.explainSelection(model);
}
return { success: true, decisions };
});
// Create a route
app.post<{ Body: { modelPattern: string; modelMapping?: string; enabled?: boolean } }>('/api/routes', async (request) => {
const body = request.body;
return db.insert(schema.tokenRoutes).values({
modelPattern: body.modelPattern,
modelMapping: body.modelMapping,
enabled: body.enabled ?? true,
}).returning().get();
});
// Update a route
app.put<{ Params: { id: string }; Body: any }>('/api/routes/:id', async (request) => {
const id = parseInt(request.params.id, 10);
const body = request.body as Record<string, unknown>;
const updates: Record<string, unknown> = {};
if (body.modelPattern !== undefined) updates.modelPattern = body.modelPattern;
if (body.modelMapping !== undefined) updates.modelMapping = body.modelMapping;
if (body.enabled !== undefined) updates.enabled = body.enabled;
updates.updatedAt = new Date().toISOString();
db.update(schema.tokenRoutes).set(updates).where(eq(schema.tokenRoutes.id, id)).run();
return db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, id)).get();
});
// Delete a route
app.delete<{ Params: { id: string } }>('/api/routes/:id', async (request) => {
const id = parseInt(request.params.id, 10);
db.delete(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, id)).run();
return { success: true };
});
// Add a channel to a route
app.post<{ Params: { id: string }; Body: { accountId: number; tokenId?: number; priority?: number; weight?: number } }>('/api/routes/:id/channels', async (request, reply) => {
const routeId = parseInt(request.params.id, 10);
const body = request.body;
const route = db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, routeId)).get();
if (!route) {
return reply.code(404).send({ success: false, message: '路由不存在' });
}
const effectiveTokenId = body.tokenId ?? getDefaultTokenId(body.accountId);
if (body.tokenId && !checkTokenBelongsToAccount(body.tokenId, body.accountId)) {
return reply.code(400).send({ success: false, message: '令牌不存在或不属于当前账号' });
}
if (isExactModelPattern(route.modelPattern) && effectiveTokenId && !tokenSupportsModel(effectiveTokenId, route.modelPattern)) {
return reply.code(400).send({ success: false, message: '该令牌不支持当前模型' });
}
return db.insert(schema.routeChannels).values({
routeId,
accountId: body.accountId,
tokenId: body.tokenId,
priority: body.priority ?? 0,
weight: body.weight ?? 10,
}).returning().get();
});
// Batch update channel priorities
app.put<{ Body: { updates: Array<{ id: number; priority: number }> } }>('/api/channels/batch', async (request, reply) => {
const parsed = parseBatchChannelUpdates(request.body);
if (!parsed.ok) {
return reply.code(400).send({ success: false, message: parsed.message });
}
const channelIds = Array.from(new Set(parsed.updates.map((update) => update.id)));
const existingChannels = db.select().from(schema.routeChannels)
.where(inArray(schema.routeChannels.id, channelIds))
.all();
if (existingChannels.length !== channelIds.length) {
const existingIds = new Set(existingChannels.map((channel) => channel.id));
const missingId = channelIds.find((id) => !existingIds.has(id));
return reply.code(404).send({ success: false, message: `通道不存在: ${missingId}` });
}
for (const update of parsed.updates) {
db.update(schema.routeChannels).set({
priority: update.priority,
manualOverride: true,
}).where(eq(schema.routeChannels.id, update.id)).run();
}
const updatedChannels = db.select().from(schema.routeChannels)
.where(inArray(schema.routeChannels.id, channelIds))
.all();
return { success: true, channels: updatedChannels };
});
// Update a channel
app.put<{ Params: { channelId: string }; Body: any }>('/api/channels/:channelId', async (request, reply) => {
const channelId = parseInt(request.params.channelId, 10);
const body = request.body as Record<string, unknown>;
const channel = db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
if (!channel) {
return reply.code(404).send({ success: false, message: '通道不存在' });
}
const route = db.select().from(schema.tokenRoutes).where(eq(schema.tokenRoutes.id, channel.routeId)).get();
if (!route) {
return reply.code(404).send({ success: false, message: '路由不存在' });
}
if (body.tokenId !== undefined && body.tokenId !== null) {
const tokenId = Number(body.tokenId);
if (!Number.isFinite(tokenId) || !checkTokenBelongsToAccount(tokenId, channel.accountId)) {
return reply.code(400).send({ success: false, message: '令牌不存在或不属于通道账号' });
}
}
const nextTokenId = body.tokenId === undefined
? (channel.tokenId ?? getDefaultTokenId(channel.accountId))
: (body.tokenId === null ? getDefaultTokenId(channel.accountId) : Number(body.tokenId));
if (isExactModelPattern(route.modelPattern) && nextTokenId && !tokenSupportsModel(nextTokenId, route.modelPattern)) {
return reply.code(400).send({ success: false, message: '该令牌不支持当前模型' });
}
const updates: Record<string, unknown> = { manualOverride: true };
for (const key of ['priority', 'weight', 'enabled', 'tokenId']) {
if (body[key] !== undefined) updates[key] = body[key];
}
db.update(schema.routeChannels).set(updates).where(eq(schema.routeChannels.id, channelId)).run();
return db.select().from(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).get();
});
// Delete a channel
app.delete<{ Params: { channelId: string } }>('/api/channels/:channelId', async (request) => {
const channelId = parseInt(request.params.channelId, 10);
db.delete(schema.routeChannels).where(eq(schema.routeChannels.id, channelId)).run();
return { success: true };
});
// Rebuild routes/channels from model availability.
app.post<{ Body?: { refreshModels?: boolean; wait?: boolean } }>('/api/routes/rebuild', async (request, reply) => {
const body = (request.body || {}) as { refreshModels?: boolean };
if (body.refreshModels === false) {
const rebuild = rebuildTokenRoutesFromAvailability();
return { success: true, rebuild };
}
if ((request.body as { wait?: boolean } | undefined)?.wait) {
const result = await refreshModelsAndRebuildRoutes();
return { success: true, ...result };
}
const { task, reused } = startBackgroundTask(
{
type: 'route',
title: '刷新模型并重建路由',
dedupeKey: 'refresh-models-and-rebuild-routes',
notifyOnFailure: true,
successMessage: (currentTask) => {
const rebuild = (currentTask.result as any)?.rebuild;
if (!rebuild) return '刷新模型并重建路由已完成';
return `刷新模型并重建路由完成:新增路由 ${rebuild.createdRoutes},新增通道 ${rebuild.createdChannels},移除通道 ${rebuild.removedChannels}`;
},
failureMessage: (currentTask) => `刷新模型并重建路由失败:${currentTask.error || 'unknown error'}`,
},
async () => refreshModelsAndRebuildRoutes(),
);
return reply.code(202).send({
success: true,
queued: true,
reused,
jobId: task.id,
status: task.status,
message: reused
? '路由重建任务执行中,请稍后查看程序日志'
: '已开始路由重建,请稍后查看程序日志',
});
});
}
+317
View File
@@ -0,0 +1,317 @@
import Fastify, { type FastifyInstance } from 'fastify';
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
const fetchMock = vi.fn();
const selectChannelMock = vi.fn();
const selectNextChannelMock = vi.fn();
const recordSuccessMock = vi.fn();
const recordFailureMock = vi.fn();
const refreshModelsAndRebuildRoutesMock = vi.fn();
const reportProxyAllFailedMock = vi.fn();
const reportTokenExpiredMock = vi.fn();
const estimateProxyCostMock = vi.fn(async (_arg?: any) => 0);
const resolveProxyUsageWithSelfLogFallbackMock = vi.fn(async ({ usage }: any) => ({
...usage,
estimatedCostFromQuota: 0,
recoveredFromSelfLog: false,
}));
const dbInsertMock = vi.fn((_arg?: any) => ({
values: () => ({
run: () => undefined,
}),
}));
vi.mock('undici', () => ({
fetch: (...args: unknown[]) => fetchMock(...args),
}));
vi.mock('../../services/tokenRouter.js', () => ({
tokenRouter: {
selectChannel: (...args: unknown[]) => selectChannelMock(...args),
selectNextChannel: (...args: unknown[]) => selectNextChannelMock(...args),
recordSuccess: (...args: unknown[]) => recordSuccessMock(...args),
recordFailure: (...args: unknown[]) => recordFailureMock(...args),
},
}));
vi.mock('../../services/modelService.js', () => ({
refreshModelsAndRebuildRoutes: (...args: unknown[]) => refreshModelsAndRebuildRoutesMock(...args),
}));
vi.mock('../../services/alertService.js', () => ({
reportProxyAllFailed: (...args: unknown[]) => reportProxyAllFailedMock(...args),
reportTokenExpired: (...args: unknown[]) => reportTokenExpiredMock(...args),
}));
vi.mock('../../services/alertRules.js', () => ({
isTokenExpiredError: () => false,
}));
vi.mock('../../services/modelPricingService.js', () => ({
estimateProxyCost: (arg: any) => estimateProxyCostMock(arg),
}));
vi.mock('../../services/proxyRetryPolicy.js', () => ({
shouldRetryProxyRequest: () => false,
}));
vi.mock('../../services/proxyUsageFallbackService.js', () => ({
resolveProxyUsageWithSelfLogFallback: (arg: any) => resolveProxyUsageWithSelfLogFallbackMock(arg),
}));
vi.mock('../../db/index.js', () => ({
db: {
insert: (arg: any) => dbInsertMock(arg),
},
schema: {
proxyLogs: {},
},
}));
describe('chat proxy stream behavior', () => {
let app: FastifyInstance;
beforeAll(async () => {
const { chatProxyRoute, claudeMessagesProxyRoute } = await import('./chat.js');
app = Fastify();
await app.register(chatProxyRoute);
await app.register(claudeMessagesProxyRoute);
});
beforeEach(() => {
fetchMock.mockReset();
selectChannelMock.mockReset();
selectNextChannelMock.mockReset();
recordSuccessMock.mockReset();
recordFailureMock.mockReset();
refreshModelsAndRebuildRoutesMock.mockReset();
reportProxyAllFailedMock.mockReset();
reportTokenExpiredMock.mockReset();
estimateProxyCostMock.mockClear();
resolveProxyUsageWithSelfLogFallbackMock.mockClear();
dbInsertMock.mockClear();
selectChannelMock.mockReturnValue({
channel: { id: 11, routeId: 22 },
site: { name: 'demo-site', url: 'https://upstream.example.com' },
account: { id: 33, username: 'demo-user' },
tokenName: 'default',
tokenValue: 'sk-demo',
actualModel: 'upstream-gpt',
});
selectNextChannelMock.mockReturnValue(null);
});
afterAll(async () => {
await app.close();
});
it('converts non-SSE upstream streaming responses into SSE events', async () => {
fetchMock.mockResolvedValue(new Response(JSON.stringify({
id: 'chatcmpl-demo',
object: 'chat.completion',
created: 1_706_000_000,
model: 'upstream-gpt',
choices: [{
index: 0,
message: { role: 'assistant', content: 'hello from upstream' },
finish_reason: 'stop',
}],
usage: { prompt_tokens: 11, completion_tokens: 7, total_tokens: 18 },
}), {
status: 200,
headers: { 'content-type': 'application/json' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/chat/completions',
payload: {
model: 'gpt-4o-mini',
stream: true,
messages: [{ role: 'user', content: 'hi' }],
},
});
expect(response.statusCode).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('data: ');
expect(response.body).toContain('"chat.completion.chunk"');
expect(response.body).toContain('hello from upstream');
expect(response.body).toContain('data: [DONE]');
});
it('sets anti-buffering SSE headers for streamed chat responses', async () => {
const encoder = new TextEncoder();
const upstreamBody = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(encoder.encode('data: {"choices":[{"delta":{"content":"hello"}}]}\n\n'));
controller.enqueue(encoder.encode('data: [DONE]\n\n'));
controller.close();
},
});
fetchMock.mockResolvedValue(new Response(upstreamBody, {
status: 200,
headers: { 'content-type': 'text/event-stream; charset=utf-8' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/chat/completions',
payload: {
model: 'gpt-4o-mini',
stream: true,
messages: [{ role: 'user', content: 'hello' }],
},
});
expect(response.statusCode).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.headers['cache-control']).toContain('no-transform');
expect(response.headers['x-accel-buffering']).toBe('no');
expect(response.body).toContain('"chat.completion.chunk"');
expect(response.body).toContain('"delta":{"role":"assistant","content":"hello"}');
expect(response.body).toContain('data: [DONE]');
});
it('normalizes anthropic-style SSE events into OpenAI chunks for clients like OpenWebUI', async () => {
const encoder = new TextEncoder();
const upstreamBody = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(encoder.encode('event: message_start\ndata: {"type":"message_start","message":{"id":"msg_123","model":"claude-opus-4-6"}}\n\n'));
controller.enqueue(encoder.encode('event: content_block_delta\ndata: {"type":"content_block_delta","delta":{"type":"text_delta","text":"hello"}}\n\n'));
controller.enqueue(encoder.encode('event: message_delta\ndata: {"type":"message_delta","delta":{"stop_reason":"end_turn"}}\n\n'));
controller.enqueue(encoder.encode('event: message_stop\ndata: {"type":"message_stop"}\n\n'));
controller.close();
},
});
fetchMock.mockResolvedValue(new Response(upstreamBody, {
status: 200,
headers: { 'content-type': 'text/event-stream; charset=utf-8' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/chat/completions',
payload: {
model: 'claude-opus-4-6',
stream: true,
messages: [{ role: 'user', content: 'who are you' }],
},
});
expect(response.statusCode).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('"chat.completion.chunk"');
expect(response.body).toContain('"delta":{"content":"hello"}');
expect(response.body).toContain('"finish_reason":"stop"');
expect(response.body).toContain('data: [DONE]');
});
it('emits OpenAI-compatible assistant starter chunk for anthropic message_start events', async () => {
const encoder = new TextEncoder();
const upstreamBody = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(encoder.encode('event: message_start\ndata: {"type":"message_start","message":{"id":"msg_compat","model":"claude-opus-4-6"}}\n\n'));
controller.enqueue(encoder.encode('event: content_block_delta\ndata: {"type":"content_block_delta","delta":{"type":"text_delta","text":"compat"}}\n\n'));
controller.enqueue(encoder.encode('event: message_stop\ndata: {"type":"message_stop"}\n\n'));
controller.close();
},
});
fetchMock.mockResolvedValue(new Response(upstreamBody, {
status: 200,
headers: { 'content-type': 'text/event-stream; charset=utf-8' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/chat/completions',
payload: {
model: 'claude-opus-4-6',
stream: true,
messages: [{ role: 'user', content: 'compat test' }],
},
});
expect(response.statusCode).toBe(200);
expect(response.body).toContain('"delta":{"role":"assistant","content":""}');
expect(response.body).toContain('"delta":{"content":"compat"}');
expect(response.body).toContain('data: [DONE]');
});
it('converts OpenAI non-stream responses into Claude message format on /v1/messages', async () => {
fetchMock.mockResolvedValue(new Response(JSON.stringify({
id: 'chatcmpl-upstream',
object: 'chat.completion',
created: 1_706_000_001,
model: 'claude-opus-4-6',
choices: [{
index: 0,
message: { role: 'assistant', content: 'hello from claude format' },
finish_reason: 'stop',
}],
usage: { prompt_tokens: 120, completion_tokens: 16, total_tokens: 136 },
}), {
status: 200,
headers: { 'content-type': 'application/json' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/messages',
payload: {
model: 'claude-opus-4-6',
max_tokens: 256,
messages: [{ role: 'user', content: 'hello' }],
},
});
expect(response.statusCode).toBe(200);
const body = response.json();
expect(body.type).toBe('message');
expect(body.role).toBe('assistant');
expect(body.model).toBe('claude-opus-4-6');
expect(body.content?.[0]?.type).toBe('text');
expect(body.content?.[0]?.text).toContain('hello from claude format');
expect(body.stop_reason).toBe('end_turn');
});
it('converts OpenAI SSE chunks into Claude stream events on /v1/messages', async () => {
const encoder = new TextEncoder();
const upstreamBody = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(encoder.encode('data: {"id":"chatcmpl-1","model":"claude-opus-4-6","choices":[{"delta":{"role":"assistant"},"finish_reason":null}]}\n\n'));
controller.enqueue(encoder.encode('data: {"id":"chatcmpl-1","model":"claude-opus-4-6","choices":[{"delta":{"content":"hello"},"finish_reason":null}]}\n\n'));
controller.enqueue(encoder.encode('data: {"id":"chatcmpl-1","model":"claude-opus-4-6","choices":[{"delta":{},"finish_reason":"stop"}]}\n\n'));
controller.enqueue(encoder.encode('data: [DONE]\n\n'));
controller.close();
},
});
fetchMock.mockResolvedValue(new Response(upstreamBody, {
status: 200,
headers: { 'content-type': 'text/event-stream; charset=utf-8' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/messages',
payload: {
model: 'claude-opus-4-6',
stream: true,
max_tokens: 256,
messages: [{ role: 'user', content: 'hello' }],
},
});
expect(response.statusCode).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('event: message_start');
expect(response.body).toContain('event: content_block_delta');
expect(response.body).toContain('\"text\":\"hello\"');
expect(response.body).toContain('event: message_stop');
});
});
+468
View File
@@ -0,0 +1,468 @@
import { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify';
import { tokenRouter } from '../../services/tokenRouter.js';
import { db, schema } from '../../db/index.js';
import { fetch } from 'undici';
import { refreshModelsAndRebuildRoutes } from '../../services/modelService.js';
import { reportProxyAllFailed, reportTokenExpired } from '../../services/alertService.js';
import { isTokenExpiredError } from '../../services/alertRules.js';
import { estimateProxyCost } from '../../services/modelPricingService.js';
import { shouldRetryProxyRequest } from '../../services/proxyRetryPolicy.js';
import { resolveProxyUsageWithSelfLogFallback } from '../../services/proxyUsageFallbackService.js';
import { mergeProxyUsage, parseProxyUsage } from '../../services/proxyUsageParser.js';
import {
type DownstreamFormat,
createStreamTransformContext,
createClaudeDownstreamContext,
parseDownstreamChatRequest,
pullSseEventsWithDone,
normalizeUpstreamStreamEvent,
serializeNormalizedStreamEvent,
serializeStreamDone,
normalizeUpstreamFinalResponse,
serializeFinalResponse,
buildSyntheticOpenAiChunks,
} from './chatFormats.js';
const MAX_RETRIES = 2;
export async function chatProxyRoute(app: FastifyInstance) {
app.post('/v1/chat/completions', async (request: FastifyRequest, reply: FastifyReply) =>
handleChatProxyRequest(request, reply, 'openai'));
}
export async function claudeMessagesProxyRoute(app: FastifyInstance) {
app.post('/v1/messages', async (request: FastifyRequest, reply: FastifyReply) =>
handleChatProxyRequest(request, reply, 'claude'));
}
async function handleChatProxyRequest(
request: FastifyRequest,
reply: FastifyReply,
downstreamFormat: DownstreamFormat,
) {
const parsedRequest = parseDownstreamChatRequest(request.body, downstreamFormat);
if (parsedRequest.error) {
return reply.code(parsedRequest.error.statusCode).send(parsedRequest.error.payload);
}
const { requestedModel, isStream, upstreamBody } = parsedRequest.value!;
const excludeChannelIds: number[] = [];
let retryCount = 0;
while (retryCount <= MAX_RETRIES) {
let selected = retryCount === 0
? tokenRouter.selectChannel(requestedModel)
: tokenRouter.selectNextChannel(requestedModel, excludeChannelIds);
if (!selected && retryCount === 0) {
await refreshModelsAndRebuildRoutes();
selected = tokenRouter.selectChannel(requestedModel);
}
if (!selected) {
await reportProxyAllFailed({
model: requestedModel,
reason: 'No available channels after retries',
});
return reply.code(503).send({
error: { message: 'No available channels for this model', type: 'server_error' },
});
}
excludeChannelIds.push(selected.channel.id);
const targetUrl = `${selected.site.url}/v1/chat/completions`;
const forwardBody = {
...upstreamBody,
model: selected.actualModel,
stream: isStream,
};
const startTime = Date.now();
try {
const upstream = await fetch(targetUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${selected.tokenValue}`,
},
body: JSON.stringify(forwardBody),
});
if (!upstream.ok) {
const errText = await upstream.text().catch(() => 'unknown error');
tokenRouter.recordFailure(selected.channel.id);
logProxy(selected, requestedModel, 'failed', upstream.status, Date.now() - startTime, errText, retryCount);
if (isTokenExpiredError({ status: upstream.status, message: errText })) {
await reportTokenExpired({
accountId: selected.account.id,
username: selected.account.username,
siteName: selected.site.name,
detail: `HTTP ${upstream.status}`,
});
}
if (shouldRetryProxyRequest(upstream.status, errText) && retryCount < MAX_RETRIES) {
retryCount += 1;
continue;
}
await reportProxyAllFailed({
model: requestedModel,
reason: `upstream returned HTTP ${upstream.status}`,
});
return reply.code(upstream.status).send({
error: { message: errText, type: 'upstream_error' },
});
}
const modelName = selected.actualModel || requestedModel;
if (isStream) {
reply.hijack();
reply.raw.statusCode = 200;
reply.raw.setHeader('Content-Type', 'text/event-stream; charset=utf-8');
reply.raw.setHeader('Cache-Control', 'no-cache, no-transform');
reply.raw.setHeader('Connection', 'keep-alive');
reply.raw.setHeader('X-Accel-Buffering', 'no');
const streamContext = createStreamTransformContext(modelName);
const claudeContext = createClaudeDownstreamContext();
let parsedUsage = { promptTokens: 0, completionTokens: 0, totalTokens: 0 };
const writeLines = (lines: string[]) => {
for (const line of lines) {
reply.raw.write(line);
}
};
const writeDone = () => {
writeLines(serializeStreamDone(downstreamFormat, streamContext, claudeContext));
};
const emitNormalizedFinalAsStream = (upstreamData: unknown, fallbackText = '') => {
const normalizedFinal = normalizeUpstreamFinalResponse(upstreamData, modelName, fallbackText);
streamContext.id = normalizedFinal.id;
streamContext.model = normalizedFinal.model;
streamContext.created = normalizedFinal.created;
if (downstreamFormat === 'openai') {
const syntheticChunks = buildSyntheticOpenAiChunks(normalizedFinal);
for (const chunk of syntheticChunks) {
reply.raw.write(`data: ${JSON.stringify(chunk)}\n\n`);
}
return;
}
writeLines(serializeNormalizedStreamEvent('claude', { role: 'assistant' }, streamContext, claudeContext));
const combinedText = [normalizedFinal.reasoningContent, normalizedFinal.content]
.filter((item) => typeof item === 'string' && item.trim().length > 0)
.join('\n\n');
if (combinedText) {
writeLines(serializeNormalizedStreamEvent('claude', {
contentDelta: combinedText,
}, streamContext, claudeContext));
}
writeLines(serializeNormalizedStreamEvent('claude', {
finishReason: normalizedFinal.finishReason,
}, streamContext, claudeContext));
};
const upstreamContentType = (upstream.headers.get('content-type') || '').toLowerCase();
if (!upstreamContentType.includes('text/event-stream')) {
const fallbackText = await upstream.text();
let fallbackData: unknown = null;
try {
fallbackData = JSON.parse(fallbackText);
} catch {
fallbackData = fallbackText;
}
parsedUsage = mergeProxyUsage(parsedUsage, parseProxyUsage(fallbackData));
emitNormalizedFinalAsStream(fallbackData, fallbackText);
writeDone();
reply.raw.end();
const latency = Date.now() - startTime;
const resolvedUsage = await resolveProxyUsageWithSelfLogFallback({
site: selected.site,
account: selected.account,
tokenValue: selected.tokenValue,
tokenName: selected.tokenName,
modelName,
requestStartedAtMs: startTime,
requestEndedAtMs: startTime + latency,
localLatencyMs: latency,
usage: {
promptTokens: parsedUsage.promptTokens,
completionTokens: parsedUsage.completionTokens,
totalTokens: parsedUsage.totalTokens,
},
});
let estimatedCost = await estimateProxyCost({
site: selected.site,
account: selected.account,
modelName,
promptTokens: resolvedUsage.promptTokens,
completionTokens: resolvedUsage.completionTokens,
totalTokens: resolvedUsage.totalTokens,
});
if (resolvedUsage.estimatedCostFromQuota > 0 && (resolvedUsage.recoveredFromSelfLog || estimatedCost <= 0)) {
estimatedCost = resolvedUsage.estimatedCostFromQuota;
}
tokenRouter.recordSuccess(selected.channel.id, latency, estimatedCost);
logProxy(
selected,
requestedModel,
'success',
200,
latency,
null,
retryCount,
resolvedUsage.promptTokens,
resolvedUsage.completionTokens,
resolvedUsage.totalTokens,
estimatedCost,
);
return;
}
const reader = upstream.body?.getReader();
if (!reader) {
writeDone();
reply.raw.end();
return;
}
const decoder = new TextDecoder();
let sseBuffer = '';
const consumeSseBuffer = (incoming: string): string => {
const pulled = pullSseEventsWithDone(incoming);
for (const eventBlock of pulled.events) {
if (eventBlock.data === '[DONE]') {
writeDone();
continue;
}
let parsedPayload: unknown = null;
try {
parsedPayload = JSON.parse(eventBlock.data);
} catch {
parsedPayload = null;
}
if (parsedPayload && typeof parsedPayload === 'object') {
parsedUsage = mergeProxyUsage(parsedUsage, parseProxyUsage(parsedPayload));
const normalizedEvent = normalizeUpstreamStreamEvent(parsedPayload, streamContext, modelName);
writeLines(serializeNormalizedStreamEvent(
downstreamFormat,
normalizedEvent,
streamContext,
claudeContext,
));
continue;
}
if (downstreamFormat === 'openai') {
reply.raw.write(`data: ${eventBlock.data}\n\n`);
} else {
writeLines(serializeNormalizedStreamEvent('claude', {
contentDelta: eventBlock.data,
}, streamContext, claudeContext));
}
}
return pulled.rest;
};
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
if (!value) continue;
sseBuffer += decoder.decode(value, { stream: true });
sseBuffer = consumeSseBuffer(sseBuffer);
}
sseBuffer += decoder.decode();
if (sseBuffer.trim().length > 0) {
sseBuffer = consumeSseBuffer(`${sseBuffer}\n\n`);
}
} finally {
reader.releaseLock();
writeDone();
reply.raw.end();
}
const latency = Date.now() - startTime;
const resolvedUsage = await resolveProxyUsageWithSelfLogFallback({
site: selected.site,
account: selected.account,
tokenValue: selected.tokenValue,
tokenName: selected.tokenName,
modelName,
requestStartedAtMs: startTime,
requestEndedAtMs: startTime + latency,
localLatencyMs: latency,
usage: {
promptTokens: parsedUsage.promptTokens,
completionTokens: parsedUsage.completionTokens,
totalTokens: parsedUsage.totalTokens,
},
});
let estimatedCost = await estimateProxyCost({
site: selected.site,
account: selected.account,
modelName,
promptTokens: resolvedUsage.promptTokens,
completionTokens: resolvedUsage.completionTokens,
totalTokens: resolvedUsage.totalTokens,
});
if (resolvedUsage.estimatedCostFromQuota > 0 && (resolvedUsage.recoveredFromSelfLog || estimatedCost <= 0)) {
estimatedCost = resolvedUsage.estimatedCostFromQuota;
}
tokenRouter.recordSuccess(selected.channel.id, latency, estimatedCost);
logProxy(
selected,
requestedModel,
'success',
200,
latency,
null,
retryCount,
resolvedUsage.promptTokens,
resolvedUsage.completionTokens,
resolvedUsage.totalTokens,
estimatedCost,
);
return;
}
const rawText = await upstream.text();
let upstreamData: unknown = rawText;
try {
upstreamData = JSON.parse(rawText);
} catch {
upstreamData = rawText;
}
const latency = Date.now() - startTime;
const parsedUsage = parseProxyUsage(upstreamData);
const normalizedFinal = normalizeUpstreamFinalResponse(upstreamData, modelName, rawText);
const downstreamResponse = serializeFinalResponse(downstreamFormat, normalizedFinal, parsedUsage);
const resolvedUsage = await resolveProxyUsageWithSelfLogFallback({
site: selected.site,
account: selected.account,
tokenValue: selected.tokenValue,
tokenName: selected.tokenName,
modelName,
requestStartedAtMs: startTime,
requestEndedAtMs: startTime + latency,
localLatencyMs: latency,
usage: {
promptTokens: parsedUsage.promptTokens,
completionTokens: parsedUsage.completionTokens,
totalTokens: parsedUsage.totalTokens,
},
});
let estimatedCost = await estimateProxyCost({
site: selected.site,
account: selected.account,
modelName,
promptTokens: resolvedUsage.promptTokens,
completionTokens: resolvedUsage.completionTokens,
totalTokens: resolvedUsage.totalTokens,
});
if (resolvedUsage.estimatedCostFromQuota > 0 && (resolvedUsage.recoveredFromSelfLog || estimatedCost <= 0)) {
estimatedCost = resolvedUsage.estimatedCostFromQuota;
}
tokenRouter.recordSuccess(selected.channel.id, latency, estimatedCost);
logProxy(
selected,
requestedModel,
'success',
200,
latency,
null,
retryCount,
resolvedUsage.promptTokens,
resolvedUsage.completionTokens,
resolvedUsage.totalTokens,
estimatedCost,
);
return reply.send(downstreamResponse);
} catch (err: any) {
tokenRouter.recordFailure(selected.channel.id);
logProxy(selected, requestedModel, 'failed', 0, Date.now() - startTime, err?.message || 'network error', retryCount);
if (retryCount < MAX_RETRIES) {
retryCount += 1;
continue;
}
await reportProxyAllFailed({
model: requestedModel,
reason: err?.message || 'network failure',
});
return reply.code(502).send({
error: {
message: `Upstream error: ${err?.message || 'network failure'}`,
type: 'upstream_error',
},
});
}
}
}
function logProxy(
selected: any,
modelRequested: string,
status: string,
httpStatus: number,
latencyMs: number,
errorMessage: string | null,
retryCount: number,
promptTokens = 0,
completionTokens = 0,
totalTokens = 0,
estimatedCost = 0,
) {
try {
db.insert(schema.proxyLogs).values({
routeId: selected.channel.routeId,
channelId: selected.channel.id,
accountId: selected.account.id,
modelRequested,
modelActual: selected.actualModel,
status,
httpStatus,
latencyMs,
promptTokens,
completionTokens,
totalTokens,
estimatedCost,
errorMessage,
retryCount,
}).run();
} catch {}
}
+846
View File
@@ -0,0 +1,846 @@
export type DownstreamFormat = 'openai' | 'claude';
export type ParsedSseEvent = {
event: string;
data: string;
};
export type StreamTransformContext = {
id: string;
model: string;
created: number;
roleSent: boolean;
doneSent: boolean;
};
export type ClaudeDownstreamContext = {
messageStarted: boolean;
contentBlockStarted: boolean;
doneSent: boolean;
};
export type NormalizedStreamEvent = {
role?: 'assistant';
contentDelta?: string;
reasoningDelta?: string;
finishReason?: string | null;
done?: boolean;
};
export type NormalizedFinalResponse = {
id: string;
model: string;
created: number;
content: string;
reasoningContent: string;
finishReason: string;
};
export type ParsedDownstreamChatRequest = {
requestedModel: string;
isStream: boolean;
upstreamBody: Record<string, unknown>;
};
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object';
}
function isNonEmptyString(value: unknown): value is string {
return typeof value === 'string' && value.trim().length > 0;
}
function pickFiniteNumber(value: unknown): number | undefined {
return typeof value === 'number' && Number.isFinite(value) ? value : undefined;
}
function ensureIntegerTimestamp(value: unknown, fallback: number): number {
const n = Number(value);
if (!Number.isFinite(n) || n <= 0) return fallback;
return Math.trunc(n);
}
function joinNonEmpty(parts: string[]): string {
return parts.map((item) => item.trim()).filter((item) => item.length > 0).join('\n\n');
}
function textFromPart(part: unknown): string {
if (typeof part === 'string') return part;
if (!isRecord(part)) return '';
if (typeof part.text === 'string') return part.text;
if (typeof part.content === 'string') return part.content;
if (typeof part.output_text === 'string') return part.output_text;
if (typeof part.completion === 'string') return part.completion;
if (typeof part.partial_json === 'string') return part.partial_json;
if (typeof part.reasoning_content === 'string') return part.reasoning_content;
if (typeof part.reasoning === 'string') return part.reasoning;
if (Array.isArray(part.content)) {
return part.content.map((item) => textFromPart(item)).join('');
}
if (isRecord(part.delta)) {
const fromDelta = textFromPart(part.delta);
if (fromDelta) return fromDelta;
}
return '';
}
function extractTextAndReasoning(value: unknown): { content: string; reasoning: string } {
if (typeof value === 'string') return { content: value, reasoning: '' };
if (Array.isArray(value)) {
const contentParts: string[] = [];
const reasoningParts: string[] = [];
for (const item of value) {
if (typeof item === 'string') {
contentParts.push(item);
continue;
}
if (!isRecord(item)) continue;
const type = typeof item.type === 'string' ? item.type : '';
if (type === 'thinking' && typeof item.thinking === 'string') {
reasoningParts.push(item.thinking);
continue;
}
if (type === 'thinking_delta' && typeof item.text === 'string') {
reasoningParts.push(item.text);
continue;
}
if (typeof item.thought === 'boolean' && item.thought && typeof item.text === 'string') {
reasoningParts.push(item.text);
continue;
}
const text = textFromPart(item);
if (text) contentParts.push(text);
}
return {
content: contentParts.join(''),
reasoning: reasoningParts.join(''),
};
}
if (!isRecord(value)) return { content: '', reasoning: '' };
if (Array.isArray(value.parts)) {
return extractTextAndReasoning(value.parts);
}
return {
content: textFromPart(value),
reasoning: '',
};
}
export function normalizeStopReason(raw: unknown): string | null {
const value = typeof raw === 'string' ? raw.trim().toLowerCase() : '';
if (!value) return null;
if (
value === 'end_turn'
|| value === 'stop'
|| value === 'end'
|| value === 'eos'
|| value === 'finished'
|| value === 'stop_sequence'
) {
return 'stop';
}
if (
value === 'max_tokens'
|| value === 'length'
|| value === 'max_output_tokens'
|| value === 'max_tokens_exceeded'
|| value.includes('max')
) {
return 'length';
}
if (value === 'tool_use' || value === 'tool_calls' || value.includes('tool')) {
return 'tool_calls';
}
return 'stop';
}
export function toClaudeStopReason(finishReason: string | null | undefined): string {
const value = normalizeStopReason(finishReason);
if (value === 'length') return 'max_tokens';
if (value === 'tool_calls') return 'tool_use';
return 'end_turn';
}
export function createStreamTransformContext(modelName: string): StreamTransformContext {
return {
id: `chatcmpl-meta-${Date.now()}`,
model: modelName,
created: Math.floor(Date.now() / 1000),
roleSent: false,
doneSent: false,
};
}
export function createClaudeDownstreamContext(): ClaudeDownstreamContext {
return {
messageStarted: false,
contentBlockStarted: false,
doneSent: false,
};
}
function buildClaudeMessageId(sourceId: string): string {
if (sourceId.startsWith('msg_')) return sourceId;
const sanitized = sourceId.replace(/[^A-Za-z0-9_-]/g, '_');
return `msg_${sanitized || Date.now()}`;
}
function serializeSse(event: string, data: unknown): string {
const payload = typeof data === 'string' ? data : JSON.stringify(data);
if (event) {
return `event: ${event}\ndata: ${payload}\n\n`;
}
return `data: ${payload}\n\n`;
}
function extractAssistantContent(choice: any): string {
const messageContent = choice?.message?.content;
const parsedMessage = extractTextAndReasoning(messageContent).content;
if (parsedMessage) return parsedMessage;
const content = extractTextAndReasoning(choice?.content).content;
if (content) return content;
if (typeof choice?.text === 'string' && choice.text.length > 0) return choice.text;
if (typeof choice?.completion === 'string' && choice.completion.length > 0) return choice.completion;
if (typeof choice?.output_text === 'string' && choice.output_text.length > 0) return choice.output_text;
if (typeof choice?.delta?.content === 'string' && choice.delta.content.length > 0) return choice.delta.content;
return '';
}
function extractAssistantReasoning(choice: any): string {
const message = choice?.message || {};
const direct = [
message.reasoning_content,
message.reasoning,
choice?.reasoning_content,
choice?.reasoning,
].find((item) => typeof item === 'string' && item.length > 0);
if (typeof direct === 'string') return direct;
const parsed = extractTextAndReasoning(message.content);
if (parsed.reasoning) return parsed.reasoning;
const nested = extractTextAndReasoning(choice?.content).reasoning;
if (nested) return nested;
return '';
}
function parseClaudeMessageContent(content: unknown): string {
return extractTextAndReasoning(content).content;
}
function convertClaudeRequestToOpenAiBody(body: Record<string, unknown>): {
model: string;
stream: boolean;
messages: Array<{ role: string; content: string }>;
payload: Record<string, unknown>;
} {
const model = typeof body.model === 'string' ? body.model.trim() : '';
const stream = body.stream === true;
const messages: Array<{ role: string; content: string }> = [];
const appendMessage = (role: string, content: unknown) => {
const text = parseClaudeMessageContent(content);
if (!text) return;
messages.push({ role, content: text });
};
const system = body.system;
if (typeof system === 'string') {
appendMessage('system', system);
} else if (Array.isArray(system)) {
const merged = system.map((item) => parseClaudeMessageContent(item)).filter((item) => item.length > 0).join('\n\n');
if (merged) appendMessage('system', merged);
}
const rawMessages = Array.isArray(body.messages) ? body.messages : [];
for (const message of rawMessages) {
if (!isRecord(message)) continue;
const role = typeof message.role === 'string' ? message.role : 'user';
const mappedRole = role === 'assistant' || role === 'system' ? role : 'user';
appendMessage(mappedRole, message.content);
}
const payload: Record<string, unknown> = {
model,
stream,
messages,
};
const temperature = pickFiniteNumber(body.temperature);
if (temperature !== undefined) payload.temperature = temperature;
const topP = pickFiniteNumber(body.top_p);
if (topP !== undefined) payload.top_p = topP;
const maxTokens = pickFiniteNumber(body.max_tokens);
if (maxTokens !== undefined) {
payload.max_tokens = maxTokens;
} else {
payload.max_tokens = 4096;
}
if (Array.isArray(body.stop_sequences) && body.stop_sequences.length > 0) {
payload.stop = body.stop_sequences;
}
if (body.tools !== undefined) payload.tools = body.tools;
if (body.tool_choice !== undefined) payload.tool_choice = body.tool_choice;
return { model, stream, messages, payload };
}
export function parseDownstreamChatRequest(
body: unknown,
format: DownstreamFormat,
): { value?: ParsedDownstreamChatRequest; error?: { statusCode: number; payload: unknown } } {
const raw = isRecord(body) ? body : {};
if (format === 'claude') {
const converted = convertClaudeRequestToOpenAiBody(raw);
if (!converted.model) {
return {
error: {
statusCode: 400,
payload: { error: { message: 'model is required', type: 'invalid_request_error' } },
},
};
}
if (converted.messages.length <= 0) {
return {
error: {
statusCode: 400,
payload: { error: { message: 'messages is required', type: 'invalid_request_error' } },
},
};
}
return {
value: {
requestedModel: converted.model,
isStream: converted.stream,
upstreamBody: converted.payload,
},
};
}
const model = typeof raw.model === 'string' ? raw.model.trim() : '';
if (!model) {
return {
error: {
statusCode: 400,
payload: { error: { message: 'model is required', type: 'invalid_request_error' } },
},
};
}
return {
value: {
requestedModel: model,
isStream: raw.stream === true,
upstreamBody: raw,
},
};
}
export function normalizeUpstreamFinalResponse(
payload: unknown,
fallbackModel: string,
fallbackText = '',
): NormalizedFinalResponse {
const now = Math.floor(Date.now() / 1000);
const fallbackId = `chatcmpl-meta-${Date.now()}`;
if (isRecord(payload) && Array.isArray(payload.choices)) {
const choice = payload.choices[0] ?? {};
const content = extractAssistantContent(choice) || extractAssistantContent(payload);
const reasoning = extractAssistantReasoning(choice) || extractAssistantReasoning(payload);
return {
id: isNonEmptyString(payload.id) ? payload.id : fallbackId,
model: isNonEmptyString(payload.model) ? payload.model : fallbackModel,
created: ensureIntegerTimestamp(payload.created, now),
content: content || fallbackText,
reasoningContent: reasoning,
finishReason: normalizeStopReason(choice?.finish_reason ?? payload.stop_reason) || 'stop',
};
}
if (isRecord(payload) && typeof payload.type === 'string' && payload.type === 'message') {
return {
id: isNonEmptyString(payload.id) ? payload.id : fallbackId,
model: isNonEmptyString(payload.model) ? payload.model : fallbackModel,
created: now,
content: parseClaudeMessageContent(payload.content) || fallbackText,
reasoningContent: extractTextAndReasoning(payload.content).reasoning,
finishReason: normalizeStopReason(payload.stop_reason) || 'stop',
};
}
if (isRecord(payload) && Array.isArray(payload.candidates)) {
const candidate = payload.candidates[0] || {};
const parsedCandidate = extractTextAndReasoning(candidate?.content?.parts || candidate?.content);
return {
id: isNonEmptyString((payload as any).responseId) ? (payload as any).responseId : fallbackId,
model: isNonEmptyString((payload as any).modelVersion)
? (payload as any).modelVersion
: fallbackModel,
created: now,
content: parsedCandidate.content || fallbackText,
reasoningContent: parsedCandidate.reasoning,
finishReason: normalizeStopReason(candidate?.finishReason || (payload as any).finishReason) || 'stop',
};
}
if (typeof payload === 'string' && payload.trim()) {
return {
id: fallbackId,
model: fallbackModel,
created: now,
content: payload,
reasoningContent: '',
finishReason: 'stop',
};
}
return {
id: fallbackId,
model: fallbackModel,
created: now,
content: fallbackText,
reasoningContent: '',
finishReason: 'stop',
};
}
export function normalizeUpstreamStreamEvent(
payload: unknown,
context: StreamTransformContext,
fallbackModel: string,
): NormalizedStreamEvent {
if (!isRecord(payload)) return {};
if (Array.isArray(payload.choices)) {
if (isNonEmptyString(payload.id)) context.id = payload.id;
if (isNonEmptyString(payload.model)) context.model = payload.model;
context.created = ensureIntegerTimestamp(payload.created, context.created);
const choice = payload.choices[0] ?? {};
const delta = isRecord(choice?.delta) ? choice.delta : {};
const deltaParsed = extractTextAndReasoning(delta.content ?? delta);
const contentDelta =
deltaParsed.content
|| (typeof choice?.message?.content === 'string' ? choice.message.content : '')
|| '';
const reasoningDelta =
(typeof (delta as any).reasoning_content === 'string' ? (delta as any).reasoning_content : '')
|| (typeof (delta as any).reasoning === 'string' ? (delta as any).reasoning : '')
|| deltaParsed.reasoning
|| '';
return {
role: (delta as any).role === 'assistant' ? 'assistant' : undefined,
contentDelta: contentDelta || undefined,
reasoningDelta: reasoningDelta || undefined,
finishReason: normalizeStopReason(choice?.finish_reason),
};
}
const type = typeof payload.type === 'string' ? payload.type : '';
const message = isRecord(payload.message) ? payload.message : null;
if (message) {
if (isNonEmptyString(message.id)) context.id = message.id;
if (isNonEmptyString(message.model)) context.model = message.model;
}
if (!context.model) context.model = fallbackModel;
if (type === 'message_start') {
return { role: 'assistant' };
}
if (type === 'content_block_start') {
const parsed = extractTextAndReasoning(payload.content_block);
return {
contentDelta: parsed.content || undefined,
reasoningDelta: parsed.reasoning || undefined,
};
}
if (type === 'content_block_delta') {
const delta = isRecord(payload.delta) ? payload.delta : {};
const deltaType = typeof delta.type === 'string' ? delta.type : '';
const parsed = extractTextAndReasoning(delta);
if (deltaType === 'thinking_delta') {
return {
reasoningDelta: parsed.content || parsed.reasoning || undefined,
};
}
return {
contentDelta: parsed.content || undefined,
reasoningDelta: parsed.reasoning || undefined,
};
}
if (type === 'message_delta') {
const delta = isRecord(payload.delta) ? payload.delta : {};
return {
finishReason: normalizeStopReason(delta.stop_reason ?? payload.stop_reason),
};
}
if (type === 'message_stop') {
return { done: true };
}
if (Array.isArray(payload.candidates)) {
const candidate = payload.candidates[0] || {};
const parsed = extractTextAndReasoning((candidate as any).content?.parts || (candidate as any).content);
if (isNonEmptyString((payload as any).modelVersion)) {
context.model = (payload as any).modelVersion;
} else if (!context.model) {
context.model = fallbackModel;
}
return {
contentDelta: parsed.content || undefined,
reasoningDelta: parsed.reasoning || undefined,
finishReason: normalizeStopReason((candidate as any).finishReason || (payload as any).finishReason),
};
}
const fallback = extractTextAndReasoning(payload);
return {
contentDelta: fallback.content || undefined,
reasoningDelta: fallback.reasoning || undefined,
};
}
function buildOpenAiStreamChunk(
context: StreamTransformContext,
event: NormalizedStreamEvent,
): Record<string, unknown> | null {
const delta: Record<string, unknown> = {};
const isInitialAssistantRoleOnlyEvent = (
!context.roleSent
&& event.role === 'assistant'
&& !event.contentDelta
&& !event.reasoningDelta
);
if (!context.roleSent && (event.role === 'assistant' || event.contentDelta || event.reasoningDelta)) {
delta.role = 'assistant';
context.roleSent = true;
} else if (event.role === 'assistant') {
delta.role = 'assistant';
context.roleSent = true;
}
if (event.contentDelta) {
delta.content = event.contentDelta;
}
if (event.reasoningDelta) {
delta.reasoning_content = event.reasoningDelta;
}
// Some OpenAI-compatible clients (e.g. OpenWebUI) expect starter chunk to include empty content.
if (isInitialAssistantRoleOnlyEvent) {
delta.content = '';
}
const finishReason = event.finishReason || null;
const hasDelta = Object.keys(delta).length > 0;
if (!hasDelta && !finishReason) return null;
return {
id: context.id,
object: 'chat.completion.chunk',
created: context.created,
model: context.model,
choices: [{
index: 0,
delta,
finish_reason: finishReason,
}],
};
}
function ensureClaudeStartEvents(
context: StreamTransformContext,
claudeContext: ClaudeDownstreamContext,
): string[] {
if (claudeContext.messageStarted) return [];
claudeContext.messageStarted = true;
const payload = {
type: 'message_start',
message: {
id: buildClaudeMessageId(context.id),
type: 'message',
role: 'assistant',
model: context.model,
content: [],
stop_reason: null,
stop_sequence: null,
usage: {
input_tokens: 0,
output_tokens: 0,
},
},
};
return [serializeSse('message_start', payload)];
}
function ensureClaudeContentBlockStart(
claudeContext: ClaudeDownstreamContext,
): string[] {
if (claudeContext.contentBlockStarted) return [];
claudeContext.contentBlockStarted = true;
return [serializeSse('content_block_start', {
type: 'content_block_start',
index: 0,
content_block: {
type: 'text',
text: '',
},
})];
}
function buildClaudeDoneEvents(
context: StreamTransformContext,
claudeContext: ClaudeDownstreamContext,
finishReason?: string | null,
): string[] {
if (claudeContext.doneSent) return [];
const events: string[] = [];
events.push(...ensureClaudeStartEvents(context, claudeContext));
if (claudeContext.contentBlockStarted) {
events.push(serializeSse('content_block_stop', {
type: 'content_block_stop',
index: 0,
}));
claudeContext.contentBlockStarted = false;
}
events.push(serializeSse('message_delta', {
type: 'message_delta',
delta: {
stop_reason: toClaudeStopReason(finishReason),
stop_sequence: null,
},
usage: {
output_tokens: 0,
},
}));
events.push(serializeSse('message_stop', { type: 'message_stop' }));
claudeContext.doneSent = true;
return events;
}
export function serializeNormalizedStreamEvent(
downstreamFormat: DownstreamFormat,
event: NormalizedStreamEvent,
context: StreamTransformContext,
claudeContext: ClaudeDownstreamContext,
): string[] {
if (downstreamFormat === 'openai') {
const chunk = buildOpenAiStreamChunk(context, event);
return chunk ? [serializeSse('', chunk)] : [];
}
if (event.done || event.finishReason) {
return buildClaudeDoneEvents(context, claudeContext, event.finishReason);
}
const events: string[] = [];
if (event.role === 'assistant' || event.contentDelta || event.reasoningDelta) {
events.push(...ensureClaudeStartEvents(context, claudeContext));
}
const mergedText = joinNonEmpty([
event.reasoningDelta || '',
event.contentDelta || '',
]);
if (mergedText) {
events.push(...ensureClaudeContentBlockStart(claudeContext));
events.push(serializeSse('content_block_delta', {
type: 'content_block_delta',
index: 0,
delta: {
type: 'text_delta',
text: mergedText,
},
}));
}
return events;
}
export function serializeStreamDone(
downstreamFormat: DownstreamFormat,
context: StreamTransformContext,
claudeContext: ClaudeDownstreamContext,
): string[] {
if (context.doneSent) return [];
context.doneSent = true;
if (downstreamFormat === 'openai') {
return [serializeSse('', '[DONE]')];
}
return buildClaudeDoneEvents(context, claudeContext, 'stop');
}
export function serializeFinalResponse(
downstreamFormat: DownstreamFormat,
normalized: NormalizedFinalResponse,
usage: { promptTokens: number; completionTokens: number; totalTokens: number },
): Record<string, unknown> {
if (downstreamFormat === 'claude') {
return {
id: buildClaudeMessageId(normalized.id),
type: 'message',
role: 'assistant',
model: normalized.model,
content: [{
type: 'text',
text: normalized.content,
}],
stop_reason: toClaudeStopReason(normalized.finishReason),
stop_sequence: null,
usage: {
input_tokens: usage.promptTokens,
output_tokens: usage.completionTokens,
},
};
}
const message: Record<string, unknown> = {
role: 'assistant',
content: normalized.content,
};
if (normalized.reasoningContent) {
message.reasoning_content = normalized.reasoningContent;
}
return {
id: normalized.id,
object: 'chat.completion',
created: normalized.created,
model: normalized.model,
choices: [{
index: 0,
message,
finish_reason: normalizeStopReason(normalized.finishReason) || 'stop',
}],
usage: {
prompt_tokens: usage.promptTokens,
completion_tokens: usage.completionTokens,
total_tokens: usage.totalTokens,
},
};
}
export function buildSyntheticOpenAiChunks(normalized: NormalizedFinalResponse): Array<Record<string, unknown>> {
const startChunk: Record<string, unknown> = {
id: normalized.id,
object: 'chat.completion.chunk',
created: normalized.created,
model: normalized.model,
choices: [{
index: 0,
delta: normalized.content
? { role: 'assistant', content: normalized.content }
: { role: 'assistant' },
finish_reason: null,
}],
};
if (normalized.reasoningContent) {
(startChunk.choices as any[])[0].delta.reasoning_content = normalized.reasoningContent;
}
const endChunk = {
id: normalized.id,
object: 'chat.completion.chunk',
created: normalized.created,
model: normalized.model,
choices: [{
index: 0,
delta: {},
finish_reason: normalizeStopReason(normalized.finishReason) || 'stop',
}],
};
return [startChunk, endChunk];
}
export function pullSseEventsWithDone(buffer: string): { events: ParsedSseEvent[]; rest: string } {
const normalized = buffer.replace(/\r\n/g, '\n');
const events: ParsedSseEvent[] = [];
let rest = normalized;
while (true) {
const boundary = rest.indexOf('\n\n');
if (boundary < 0) break;
const block = rest.slice(0, boundary);
rest = rest.slice(boundary + 2);
if (!block.trim()) continue;
const lines = block.split('\n');
let eventName = '';
const dataLines: string[] = [];
for (const line of lines) {
if (line.startsWith('event:')) {
eventName = line.slice(6).trim();
continue;
}
if (line.startsWith('data:')) {
dataLines.push(line.slice(5).trimStart());
}
}
if (dataLines.length <= 0) continue;
events.push({
event: eventName,
data: dataLines.join('\n').trim(),
});
}
return { events, rest };
}
+256
View File
@@ -0,0 +1,256 @@
import { FastifyInstance, FastifyReply, FastifyRequest } from 'fastify';
import { fetch } from 'undici';
import { db, schema } from '../../db/index.js';
import { tokenRouter } from '../../services/tokenRouter.js';
import { refreshModelsAndRebuildRoutes } from '../../services/modelService.js';
import { reportProxyAllFailed, reportTokenExpired } from '../../services/alertService.js';
import { isTokenExpiredError } from '../../services/alertRules.js';
import { estimateProxyCost } from '../../services/modelPricingService.js';
import { shouldRetryProxyRequest } from '../../services/proxyRetryPolicy.js';
import { resolveProxyUsageWithSelfLogFallback } from '../../services/proxyUsageFallbackService.js';
import { mergeProxyUsage, parseProxyUsage, pullSseDataEvents } from '../../services/proxyUsageParser.js';
const MAX_RETRIES = 2;
export async function completionsProxyRoute(app: FastifyInstance) {
app.post('/v1/completions', async (request: FastifyRequest, reply: FastifyReply) => {
const body = request.body as any;
const requestedModel = body?.model;
if (!requestedModel) {
return reply.code(400).send({ error: { message: 'model is required', type: 'invalid_request_error' } });
}
const isStream = body.stream === true;
const excludeChannelIds: number[] = [];
let retryCount = 0;
while (retryCount <= MAX_RETRIES) {
let selected = retryCount === 0
? tokenRouter.selectChannel(requestedModel)
: tokenRouter.selectNextChannel(requestedModel, excludeChannelIds);
if (!selected && retryCount === 0) {
await refreshModelsAndRebuildRoutes();
selected = tokenRouter.selectChannel(requestedModel);
}
if (!selected) {
await reportProxyAllFailed({
model: requestedModel,
reason: 'No available channels after retries',
});
return reply.code(503).send({
error: { message: 'No available channels for this model', type: 'server_error' },
});
}
excludeChannelIds.push(selected.channel.id);
const targetUrl = `${selected.site.url}/v1/completions`;
const forwardBody = { ...body, model: selected.actualModel };
const startTime = Date.now();
try {
const upstream = await fetch(targetUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${selected.tokenValue}`,
},
body: JSON.stringify(forwardBody),
});
if (!upstream.ok) {
const errText = await upstream.text().catch(() => 'unknown error');
tokenRouter.recordFailure(selected.channel.id);
logProxy(selected, requestedModel, 'failed', upstream.status, Date.now() - startTime, errText, retryCount);
if (isTokenExpiredError({ status: upstream.status, message: errText })) {
await reportTokenExpired({
accountId: selected.account.id,
username: selected.account.username,
siteName: selected.site.name,
detail: `HTTP ${upstream.status}`,
});
}
if (shouldRetryProxyRequest(upstream.status, errText) && retryCount < MAX_RETRIES) {
retryCount++;
continue;
}
await reportProxyAllFailed({
model: requestedModel,
reason: `upstream returned HTTP ${upstream.status}`,
});
return reply.code(upstream.status).send({ error: { message: errText, type: 'upstream_error' } });
}
if (isStream) {
reply.raw.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
});
const reader = upstream.body?.getReader();
if (!reader) {
reply.raw.end();
return;
}
const decoder = new TextDecoder();
let parsedUsage = { promptTokens: 0, completionTokens: 0, totalTokens: 0 };
let sseBuffer = '';
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value, { stream: true });
reply.raw.write(chunk);
sseBuffer += chunk;
const pulled = pullSseDataEvents(sseBuffer);
sseBuffer = pulled.rest;
for (const eventPayload of pulled.events) {
try {
parsedUsage = mergeProxyUsage(parsedUsage, parseProxyUsage(JSON.parse(eventPayload)));
} catch {}
}
}
if (sseBuffer.trim().length > 0) {
const pulled = pullSseDataEvents(`${sseBuffer}\n\n`);
for (const eventPayload of pulled.events) {
try {
parsedUsage = mergeProxyUsage(parsedUsage, parseProxyUsage(JSON.parse(eventPayload)));
} catch {}
}
}
} finally {
reader.releaseLock();
reply.raw.end();
}
const latency = Date.now() - startTime;
const resolvedUsage = await resolveProxyUsageWithSelfLogFallback({
site: selected.site,
account: selected.account,
tokenValue: selected.tokenValue,
tokenName: selected.tokenName,
modelName: selected.actualModel || requestedModel,
requestStartedAtMs: startTime,
requestEndedAtMs: startTime + latency,
localLatencyMs: latency,
usage: {
promptTokens: parsedUsage.promptTokens,
completionTokens: parsedUsage.completionTokens,
totalTokens: parsedUsage.totalTokens,
},
});
let estimatedCost = await estimateProxyCost({
site: selected.site,
account: selected.account,
modelName: selected.actualModel || requestedModel,
promptTokens: resolvedUsage.promptTokens,
completionTokens: resolvedUsage.completionTokens,
totalTokens: resolvedUsage.totalTokens,
});
if (resolvedUsage.estimatedCostFromQuota > 0 && (resolvedUsage.recoveredFromSelfLog || estimatedCost <= 0)) {
estimatedCost = resolvedUsage.estimatedCostFromQuota;
}
tokenRouter.recordSuccess(selected.channel.id, latency, estimatedCost);
logProxy(
selected, requestedModel, 'success', 200, latency, null, retryCount,
resolvedUsage.promptTokens, resolvedUsage.completionTokens, resolvedUsage.totalTokens, estimatedCost,
);
return;
}
const data = await upstream.json() as any;
const latency = Date.now() - startTime;
const parsedUsage = parseProxyUsage(data);
const resolvedUsage = await resolveProxyUsageWithSelfLogFallback({
site: selected.site,
account: selected.account,
tokenValue: selected.tokenValue,
tokenName: selected.tokenName,
modelName: selected.actualModel || requestedModel,
requestStartedAtMs: startTime,
requestEndedAtMs: startTime + latency,
localLatencyMs: latency,
usage: {
promptTokens: parsedUsage.promptTokens,
completionTokens: parsedUsage.completionTokens,
totalTokens: parsedUsage.totalTokens,
},
});
let estimatedCost = await estimateProxyCost({
site: selected.site,
account: selected.account,
modelName: selected.actualModel || requestedModel,
promptTokens: resolvedUsage.promptTokens,
completionTokens: resolvedUsage.completionTokens,
totalTokens: resolvedUsage.totalTokens,
});
if (resolvedUsage.estimatedCostFromQuota > 0 && (resolvedUsage.recoveredFromSelfLog || estimatedCost <= 0)) {
estimatedCost = resolvedUsage.estimatedCostFromQuota;
}
tokenRouter.recordSuccess(selected.channel.id, latency, estimatedCost);
logProxy(
selected, requestedModel, 'success', 200, latency, null, retryCount,
resolvedUsage.promptTokens, resolvedUsage.completionTokens, resolvedUsage.totalTokens, estimatedCost,
);
return reply.send(data);
} catch (err: any) {
tokenRouter.recordFailure(selected.channel.id);
logProxy(selected, requestedModel, 'failed', 0, Date.now() - startTime, err.message, retryCount);
if (retryCount < MAX_RETRIES) {
retryCount++;
continue;
}
await reportProxyAllFailed({
model: requestedModel,
reason: err.message || 'network failure',
});
return reply.code(502).send({
error: { message: `Upstream error: ${err.message}`, type: 'upstream_error' },
});
}
}
});
}
function logProxy(
selected: any,
modelRequested: string,
status: string,
httpStatus: number,
latencyMs: number,
errorMessage: string | null,
retryCount: number,
promptTokens = 0,
completionTokens = 0,
totalTokens = 0,
estimatedCost = 0,
) {
try {
db.insert(schema.proxyLogs).values({
routeId: selected.channel.routeId,
channelId: selected.channel.id,
accountId: selected.account.id,
modelRequested,
modelActual: selected.actualModel,
status,
httpStatus,
latencyMs,
promptTokens,
completionTokens,
totalTokens,
estimatedCost,
errorMessage,
retryCount,
}).run();
} catch {}
}
+172
View File
@@ -0,0 +1,172 @@
import { FastifyInstance, FastifyReply, FastifyRequest } from 'fastify';
import { fetch } from 'undici';
import { db, schema } from '../../db/index.js';
import { tokenRouter } from '../../services/tokenRouter.js';
import { refreshModelsAndRebuildRoutes } from '../../services/modelService.js';
import { reportProxyAllFailed, reportTokenExpired } from '../../services/alertService.js';
import { isTokenExpiredError } from '../../services/alertRules.js';
import { estimateProxyCost } from '../../services/modelPricingService.js';
import { shouldRetryProxyRequest } from '../../services/proxyRetryPolicy.js';
import { resolveProxyUsageWithSelfLogFallback } from '../../services/proxyUsageFallbackService.js';
import { parseProxyUsage } from '../../services/proxyUsageParser.js';
const MAX_RETRIES = 2;
export async function embeddingsProxyRoute(app: FastifyInstance) {
app.post('/v1/embeddings', async (request: FastifyRequest, reply: FastifyReply) => {
const body = request.body as any;
const requestedModel = body?.model;
if (!requestedModel) {
return reply.code(400).send({ error: { message: 'model is required', type: 'invalid_request_error' } });
}
const excludeChannelIds: number[] = [];
let retryCount = 0;
while (retryCount <= MAX_RETRIES) {
let selected = retryCount === 0
? tokenRouter.selectChannel(requestedModel)
: tokenRouter.selectNextChannel(requestedModel, excludeChannelIds);
if (!selected && retryCount === 0) {
await refreshModelsAndRebuildRoutes();
selected = tokenRouter.selectChannel(requestedModel);
}
if (!selected) {
await reportProxyAllFailed({
model: requestedModel,
reason: 'No available channels after retries',
});
return reply.code(503).send({ error: { message: 'No available channels', type: 'server_error' } });
}
excludeChannelIds.push(selected.channel.id);
const targetUrl = `${selected.site.url}/v1/embeddings`;
const forwardBody = { ...body, model: selected.actualModel };
const startTime = Date.now();
try {
const upstream = await fetch(targetUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${selected.tokenValue}`,
},
body: JSON.stringify(forwardBody),
});
const text = await upstream.text();
if (!upstream.ok) {
tokenRouter.recordFailure(selected.channel.id);
logProxy(selected, requestedModel, 'failed', upstream.status, Date.now() - startTime, text, retryCount);
if (isTokenExpiredError({ status: upstream.status, message: text })) {
await reportTokenExpired({
accountId: selected.account.id,
username: selected.account.username,
siteName: selected.site.name,
detail: `HTTP ${upstream.status}`,
});
}
if (shouldRetryProxyRequest(upstream.status, text) && retryCount < MAX_RETRIES) {
retryCount++;
continue;
}
await reportProxyAllFailed({
model: requestedModel,
reason: `upstream returned HTTP ${upstream.status}`,
});
return reply.code(upstream.status).send({ error: { message: text, type: 'upstream_error' } });
}
let data: any = {};
try { data = JSON.parse(text); } catch { data = {}; }
const latency = Date.now() - startTime;
const parsedUsage = parseProxyUsage(data);
const resolvedUsage = await resolveProxyUsageWithSelfLogFallback({
site: selected.site,
account: selected.account,
tokenValue: selected.tokenValue,
tokenName: selected.tokenName,
modelName: selected.actualModel || requestedModel,
requestStartedAtMs: startTime,
requestEndedAtMs: startTime + latency,
localLatencyMs: latency,
usage: {
promptTokens: parsedUsage.promptTokens,
completionTokens: parsedUsage.completionTokens,
totalTokens: parsedUsage.totalTokens,
},
});
let estimatedCost = await estimateProxyCost({
site: selected.site,
account: selected.account,
modelName: selected.actualModel || requestedModel,
promptTokens: resolvedUsage.promptTokens,
completionTokens: resolvedUsage.completionTokens,
totalTokens: resolvedUsage.totalTokens,
});
if (resolvedUsage.estimatedCostFromQuota > 0 && (resolvedUsage.recoveredFromSelfLog || estimatedCost <= 0)) {
estimatedCost = resolvedUsage.estimatedCostFromQuota;
}
tokenRouter.recordSuccess(selected.channel.id, latency, estimatedCost);
logProxy(
selected, requestedModel, 'success', upstream.status, latency, null, retryCount,
resolvedUsage.promptTokens, resolvedUsage.completionTokens, resolvedUsage.totalTokens, estimatedCost,
);
return reply.code(upstream.status).send(data);
} catch (err: any) {
tokenRouter.recordFailure(selected.channel.id);
logProxy(selected, requestedModel, 'failed', 0, Date.now() - startTime, err.message, retryCount);
if (retryCount < MAX_RETRIES) {
retryCount++;
continue;
}
await reportProxyAllFailed({
model: requestedModel,
reason: err.message || 'network failure',
});
return reply.code(502).send({ error: { message: err.message, type: 'upstream_error' } });
}
}
});
}
function logProxy(
selected: any,
modelRequested: string,
status: string,
httpStatus: number,
latencyMs: number,
errorMessage: string | null,
retryCount: number,
promptTokens = 0,
completionTokens = 0,
totalTokens = 0,
estimatedCost = 0,
) {
try {
db.insert(schema.proxyLogs).values({
routeId: selected.channel.routeId,
channelId: selected.channel.id,
accountId: selected.account.id,
modelRequested,
modelActual: selected.actualModel,
status,
httpStatus,
latencyMs,
promptTokens,
completionTokens,
totalTokens,
estimatedCost,
errorMessage,
retryCount,
}).run();
} catch {}
}
+142
View File
@@ -0,0 +1,142 @@
import { FastifyInstance, FastifyReply, FastifyRequest } from 'fastify';
import { fetch } from 'undici';
import { db, schema } from '../../db/index.js';
import { tokenRouter } from '../../services/tokenRouter.js';
import { refreshModelsAndRebuildRoutes } from '../../services/modelService.js';
import { reportProxyAllFailed, reportTokenExpired } from '../../services/alertService.js';
import { isTokenExpiredError } from '../../services/alertRules.js';
import { estimateProxyCost } from '../../services/modelPricingService.js';
import { shouldRetryProxyRequest } from '../../services/proxyRetryPolicy.js';
const MAX_RETRIES = 2;
export async function imagesProxyRoute(app: FastifyInstance) {
app.post('/v1/images/generations', async (request: FastifyRequest, reply: FastifyReply) => {
const body = request.body as any;
const requestedModel = body?.model || 'gpt-image-1';
const excludeChannelIds: number[] = [];
let retryCount = 0;
while (retryCount <= MAX_RETRIES) {
let selected = retryCount === 0
? tokenRouter.selectChannel(requestedModel)
: tokenRouter.selectNextChannel(requestedModel, excludeChannelIds);
if (!selected && retryCount === 0) {
await refreshModelsAndRebuildRoutes();
selected = tokenRouter.selectChannel(requestedModel);
}
if (!selected) {
await reportProxyAllFailed({
model: requestedModel,
reason: 'No available channels after retries',
});
return reply.code(503).send({
error: { message: 'No available channels for this model', type: 'server_error' },
});
}
excludeChannelIds.push(selected.channel.id);
const targetUrl = `${selected.site.url}/v1/images/generations`;
const forwardBody = { ...body, model: selected.actualModel };
const startTime = Date.now();
try {
const upstream = await fetch(targetUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${selected.tokenValue}`,
},
body: JSON.stringify(forwardBody),
});
const text = await upstream.text();
if (!upstream.ok) {
tokenRouter.recordFailure(selected.channel.id);
logProxy(selected, requestedModel, 'failed', upstream.status, Date.now() - startTime, text, retryCount);
if (isTokenExpiredError({ status: upstream.status, message: text })) {
await reportTokenExpired({
accountId: selected.account.id,
username: selected.account.username,
siteName: selected.site.name,
detail: `HTTP ${upstream.status}`,
});
}
if (shouldRetryProxyRequest(upstream.status, text) && retryCount < MAX_RETRIES) {
retryCount++;
continue;
}
await reportProxyAllFailed({
model: requestedModel,
reason: `upstream returned HTTP ${upstream.status}`,
});
return reply.code(upstream.status).send({ error: { message: text, type: 'upstream_error' } });
}
let data: any = {};
try { data = JSON.parse(text); } catch { data = { data: [] }; }
const latency = Date.now() - startTime;
const estimatedCost = await estimateProxyCost({
site: selected.site,
account: selected.account,
modelName: selected.actualModel || requestedModel,
promptTokens: 0,
completionTokens: 0,
totalTokens: 0,
});
tokenRouter.recordSuccess(selected.channel.id, latency, estimatedCost);
logProxy(selected, requestedModel, 'success', upstream.status, latency, null, retryCount, estimatedCost);
return reply.code(upstream.status).send(data);
} catch (err: any) {
tokenRouter.recordFailure(selected.channel.id);
logProxy(selected, requestedModel, 'failed', 0, Date.now() - startTime, err.message, retryCount);
if (retryCount < MAX_RETRIES) {
retryCount++;
continue;
}
await reportProxyAllFailed({
model: requestedModel,
reason: err.message || 'network failure',
});
return reply.code(502).send({
error: { message: `Upstream error: ${err.message}`, type: 'upstream_error' },
});
}
}
});
}
function logProxy(
selected: any,
modelRequested: string,
status: string,
httpStatus: number,
latencyMs: number,
errorMessage: string | null,
retryCount: number,
estimatedCost = 0,
) {
try {
db.insert(schema.proxyLogs).values({
routeId: selected.channel.routeId,
channelId: selected.channel.id,
accountId: selected.account.id,
modelRequested,
modelActual: selected.actualModel,
status,
httpStatus,
latencyMs,
promptTokens: 0,
completionTokens: 0,
totalTokens: 0,
estimatedCost,
errorMessage,
retryCount,
}).run();
} catch {}
}
+57
View File
@@ -0,0 +1,57 @@
import { FastifyInstance } from 'fastify';
import { db, schema } from '../../db/index.js';
import { and, eq } from 'drizzle-orm';
import { refreshModelsAndRebuildRoutes } from '../../services/modelService.js';
export async function modelsProxyRoute(app: FastifyInstance) {
app.get('/v1/models', async (request) => {
const readModels = () => {
const rows = db.select({ modelName: schema.modelAvailability.modelName })
.from(schema.modelAvailability)
.innerJoin(schema.accounts, eq(schema.modelAvailability.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(
and(
eq(schema.modelAvailability.available, true),
eq(schema.accounts.status, 'active'),
eq(schema.sites.status, 'active'),
),
)
.all();
return Array.from(new Set(rows.map((r) => r.modelName))).sort();
};
let models = readModels();
if (models.length === 0) {
await refreshModelsAndRebuildRoutes();
models = readModels();
}
const wantsClaudeFormat = typeof request.headers['anthropic-version'] === 'string'
|| typeof request.headers['x-api-key'] === 'string';
if (wantsClaudeFormat) {
const data = models.map((id) => ({
id,
type: 'model',
display_name: id,
created_at: new Date().toISOString(),
}));
return {
data,
first_id: data[0]?.id || null,
last_id: data[data.length - 1]?.id || null,
has_more: false,
};
}
return {
object: 'list',
data: models.map(id => ({
id,
object: 'model',
created: Math.floor(Date.now() / 1000),
owned_by: 'metapi',
})),
};
});
}
+21
View File
@@ -0,0 +1,21 @@
import { FastifyInstance } from 'fastify';
import { proxyAuthMiddleware } from '../../middleware/auth.js';
import { chatProxyRoute, claudeMessagesProxyRoute } from './chat.js';
import { modelsProxyRoute } from './models.js';
import { embeddingsProxyRoute } from './embeddings.js';
import { completionsProxyRoute } from './completions.js';
import { imagesProxyRoute } from './images.js';
export async function proxyRoutes(app: FastifyInstance) {
// Auth middleware for all /v1 routes
app.addHook('onRequest', async (request, reply) => {
await proxyAuthMiddleware(request, reply);
});
await app.register(chatProxyRoute);
await app.register(claudeMessagesProxyRoute);
await app.register(completionsProxyRoute);
await app.register(modelsProxyRoute);
await app.register(embeddingsProxyRoute);
await app.register(imagesProxyRoute);
}
@@ -0,0 +1,18 @@
import { describe, expect, it } from 'vitest';
import {
decryptAccountPassword,
encryptAccountPassword,
} from './accountCredentialService.js';
describe('accountCredentialService', () => {
it('encrypts and decrypts password values', () => {
const cipher = encryptAccountPassword('P@ssw0rd-123');
expect(cipher).toBeTruthy();
expect(cipher).not.toContain('P@ssw0rd-123');
expect(decryptAccountPassword(cipher)).toBe('P@ssw0rd-123');
});
it('returns null for malformed cipher text', () => {
expect(decryptAccountPassword('invalid-cipher')).toBeNull();
});
});
@@ -0,0 +1,44 @@
import { createCipheriv, createDecipheriv, createHash, randomBytes } from 'node:crypto';
import { config } from '../config.js';
const VERSION = 'v1';
const ALGORITHM = 'aes-256-gcm';
function buildKey(): Buffer {
const secret = (config.accountCredentialSecret || '').trim() || config.authToken || 'change-me';
return createHash('sha256').update(secret).digest();
}
export function encryptAccountPassword(password: string): string {
const key = buildKey();
const iv = randomBytes(12);
const cipher = createCipheriv(ALGORITHM, key, iv);
const encrypted = Buffer.concat([cipher.update(password, 'utf8'), cipher.final()]);
const tag = cipher.getAuthTag();
return [
VERSION,
iv.toString('base64url'),
tag.toString('base64url'),
encrypted.toString('base64url'),
].join(':');
}
export function decryptAccountPassword(cipherText: string): string | null {
const parts = (cipherText || '').split(':');
if (parts.length !== 4 || parts[0] !== VERSION) return null;
try {
const [, ivRaw, tagRaw, dataRaw] = parts;
const key = buildKey();
const iv = Buffer.from(ivRaw, 'base64url');
const tag = Buffer.from(tagRaw, 'base64url');
const data = Buffer.from(dataRaw, 'base64url');
const decipher = createDecipheriv(ALGORITHM, key, iv);
decipher.setAuthTag(tag);
const plain = Buffer.concat([decipher.update(data), decipher.final()]);
return plain.toString('utf8');
} catch {
return null;
}
}
@@ -0,0 +1,41 @@
import { describe, expect, it } from 'vitest';
import {
getPlatformUserIdFromExtraConfig,
guessPlatformUserIdFromUsername,
mergeAccountExtraConfig,
resolvePlatformUserId,
} from './accountExtraConfig.js';
describe('accountExtraConfig', () => {
it('reads platformUserId from extra config when present', () => {
expect(getPlatformUserIdFromExtraConfig(JSON.stringify({ platformUserId: 11494 }))).toBe(11494);
expect(getPlatformUserIdFromExtraConfig(JSON.stringify({ platformUserId: '7659' }))).toBe(7659);
});
it('guesses platformUserId from username suffix digits', () => {
expect(guessPlatformUserIdFromUsername('linuxdo_7659')).toBe(7659);
expect(guessPlatformUserIdFromUsername('user11494')).toBe(11494);
expect(guessPlatformUserIdFromUsername('abc')).toBeUndefined();
expect(guessPlatformUserIdFromUsername('id_12')).toBeUndefined();
});
it('prefers configured user id over guessed user id', () => {
expect(resolvePlatformUserId(JSON.stringify({ platformUserId: 5001 }), 'linuxdo_7659')).toBe(5001);
});
it('merges platformUserId into existing config without dropping keys', () => {
const merged = mergeAccountExtraConfig(
JSON.stringify({
foo: 'bar',
autoRelogin: { username: 'demo', passwordCipher: 'cipher' },
}),
{ platformUserId: 7659 },
);
expect(merged).toBeTruthy();
const parsed = JSON.parse(merged!);
expect(parsed.foo).toBe('bar');
expect(parsed.autoRelogin?.username).toBe('demo');
expect(parsed.platformUserId).toBe(7659);
});
});
+74
View File
@@ -0,0 +1,74 @@
type AutoReloginConfig = {
username?: unknown;
passwordCipher?: unknown;
updatedAt?: unknown;
};
type AccountExtraConfig = {
platformUserId?: unknown;
autoRelogin?: AutoReloginConfig;
[key: string]: unknown;
};
function parseExtraConfig(extraConfig?: string | null): AccountExtraConfig {
if (!extraConfig) return {};
try {
const parsed = JSON.parse(extraConfig) as unknown;
if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {};
return parsed as AccountExtraConfig;
} catch {
return {};
}
}
function normalizeUserId(raw: unknown): number | undefined {
if (typeof raw === 'number' && Number.isFinite(raw) && raw > 0) return Math.trunc(raw);
if (typeof raw === 'string') {
const n = Number.parseInt(raw.trim(), 10);
if (!Number.isNaN(n) && n > 0) return n;
}
return undefined;
}
export function getPlatformUserIdFromExtraConfig(extraConfig?: string | null): number | undefined {
const parsed = parseExtraConfig(extraConfig);
return normalizeUserId(parsed.platformUserId);
}
export function guessPlatformUserIdFromUsername(username?: string | null): number | undefined {
const text = (username || '').trim();
if (!text) return undefined;
const match = text.match(/(\d{3,8})$/);
if (!match?.[1]) return undefined;
return normalizeUserId(match[1]);
}
export function resolvePlatformUserId(extraConfig?: string | null, username?: string | null): number | undefined {
return getPlatformUserIdFromExtraConfig(extraConfig) || guessPlatformUserIdFromUsername(username);
}
export function mergeAccountExtraConfig(
extraConfig: string | null | undefined,
patch: Record<string, unknown>,
): string {
const merged: Record<string, unknown> = {
...parseExtraConfig(extraConfig),
...patch,
};
return JSON.stringify(merged);
}
export function getAutoReloginConfig(extraConfig?: string | null): {
username: string;
passwordCipher: string;
} | null {
const parsed = parseExtraConfig(extraConfig);
const relogin = parsed.autoRelogin;
if (!relogin || typeof relogin !== 'object' || Array.isArray(relogin)) return null;
const username = typeof relogin.username === 'string' ? relogin.username.trim() : '';
const passwordCipher = typeof relogin.passwordCipher === 'string' ? relogin.passwordCipher.trim() : '';
if (!username || !passwordCipher) return null;
return { username, passwordCipher };
}
@@ -0,0 +1,56 @@
import { describe, expect, it } from 'vitest';
import { buildRuntimeHealthForAccount } from './accountHealthService.js';
describe('accountHealthService', () => {
it('marks disabled when site or account is disabled', () => {
expect(
buildRuntimeHealthForAccount({ accountStatus: 'active', siteStatus: 'disabled', extraConfig: null }).state,
).toBe('disabled');
expect(
buildRuntimeHealthForAccount({ accountStatus: 'disabled', siteStatus: 'active', extraConfig: null }).state,
).toBe('disabled');
});
it('marks unhealthy when account is expired', () => {
const health = buildRuntimeHealthForAccount({
accountStatus: 'expired',
siteStatus: 'active',
extraConfig: null,
});
expect(health.state).toBe('unhealthy');
});
it('returns stored runtime health from extra config when available', () => {
const health = buildRuntimeHealthForAccount({
accountStatus: 'active',
siteStatus: 'active',
extraConfig: JSON.stringify({
runtimeHealth: {
state: 'healthy',
reason: '余额刷新成功',
source: 'balance',
checkedAt: '2026-02-25T12:00:00.000Z',
},
}),
});
expect(health).toMatchObject({
state: 'healthy',
reason: '余额刷新成功',
source: 'balance',
checkedAt: '2026-02-25T12:00:00.000Z',
});
});
it('falls back to unknown when no runtime health info exists', () => {
const health = buildRuntimeHealthForAccount({
accountStatus: 'active',
siteStatus: 'active',
extraConfig: null,
});
expect(health).toMatchObject({
state: 'unknown',
source: 'none',
});
});
});
+176
View File
@@ -0,0 +1,176 @@
import { eq } from 'drizzle-orm';
import { db, schema } from '../db/index.js';
import { mergeAccountExtraConfig } from './accountExtraConfig.js';
export type RuntimeHealthState = 'healthy' | 'unhealthy' | 'degraded' | 'unknown' | 'disabled';
type RuntimeHealthInfo = {
state: RuntimeHealthState;
reason: string;
source: string;
checkedAt: string | null;
};
const VALID_RUNTIME_HEALTH_STATES = new Set<RuntimeHealthState>([
'healthy',
'unhealthy',
'degraded',
'unknown',
'disabled',
]);
function parseObject(value: string | null | undefined): Record<string, unknown> {
if (!value) return {};
try {
const parsed = JSON.parse(value);
if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {};
return parsed as Record<string, unknown>;
} catch {
return {};
}
}
function normalizeRuntimeHealthState(value: unknown): RuntimeHealthState | null {
if (typeof value !== 'string') return null;
const normalized = value.trim().toLowerCase();
if (!VALID_RUNTIME_HEALTH_STATES.has(normalized as RuntimeHealthState)) return null;
return normalized as RuntimeHealthState;
}
function normalizeRuntimeHealthRecord(raw: unknown): RuntimeHealthInfo | null {
if (!raw || typeof raw !== 'object' || Array.isArray(raw)) return null;
const source = raw as Record<string, unknown>;
const state = normalizeRuntimeHealthState(source.state);
if (!state) return null;
const reason = typeof source.reason === 'string'
? source.reason.trim().slice(0, 500)
: '';
const checkedAt = typeof source.checkedAt === 'string' && source.checkedAt.trim()
? source.checkedAt.trim()
: null;
const eventSource = typeof source.source === 'string' && source.source.trim()
? source.source.trim().slice(0, 64)
: 'unknown';
return {
state,
reason: reason || defaultHealthReason(state),
source: eventSource,
checkedAt,
};
}
function defaultHealthReason(state: RuntimeHealthState): string {
switch (state) {
case 'healthy':
return '运行状态正常';
case 'unhealthy':
return '最近一次检查失败';
case 'degraded':
return '运行状态波动';
case 'disabled':
return '账号或站点已禁用';
case 'unknown':
default:
return '尚未检测';
}
}
export function extractRuntimeHealth(extraConfig?: string | null): RuntimeHealthInfo | null {
const parsed = parseObject(extraConfig);
return normalizeRuntimeHealthRecord(parsed.runtimeHealth);
}
export function buildRuntimeHealthForAccount(input: {
accountStatus?: string | null;
siteStatus?: string | null;
extraConfig?: string | null;
}): RuntimeHealthInfo {
const accountStatus = (input.accountStatus || 'active').toLowerCase();
const siteStatus = (input.siteStatus || 'active').toLowerCase();
if (accountStatus === 'disabled' || siteStatus === 'disabled') {
return {
state: 'disabled',
reason: defaultHealthReason('disabled'),
source: 'system',
checkedAt: null,
};
}
if (accountStatus === 'expired') {
return {
state: 'unhealthy',
reason: '访问令牌已过期',
source: 'auth',
checkedAt: null,
};
}
const stored = extractRuntimeHealth(input.extraConfig);
if (stored) return stored;
return {
state: 'unknown',
reason: defaultHealthReason('unknown'),
source: 'none',
checkedAt: null,
};
}
function buildRuntimeHealthPatch(input: {
state: RuntimeHealthState;
reason?: string | null;
source?: string | null;
checkedAt?: string | null;
}): RuntimeHealthInfo {
const state = normalizeRuntimeHealthState(input.state) || 'unknown';
const reason = (input.reason || '').trim().slice(0, 500) || defaultHealthReason(state);
const source = (input.source || '').trim().slice(0, 64) || 'manual';
const checkedAt = (input.checkedAt || '').trim() || new Date().toISOString();
return {
state,
reason,
source,
checkedAt,
};
}
function applyRuntimeHealthToExtraConfig(extraConfig: string | null | undefined, health: RuntimeHealthInfo): string {
return mergeAccountExtraConfig(extraConfig, {
runtimeHealth: health,
});
}
export function setAccountRuntimeHealth(
accountId: number,
input: {
state: RuntimeHealthState;
reason?: string | null;
source?: string | null;
checkedAt?: string | null;
},
): RuntimeHealthInfo | null {
try {
const query = db.select().from(schema.accounts).where(eq(schema.accounts.id, accountId)) as any;
const account = typeof query?.get === 'function' ? query.get() : null;
if (!account) return null;
const health = buildRuntimeHealthPatch(input);
const nextExtraConfig = applyRuntimeHealthToExtraConfig(account.extraConfig, health);
db.update(schema.accounts)
.set({
extraConfig: nextExtraConfig,
updatedAt: new Date().toISOString(),
})
.where(eq(schema.accounts.id, accountId))
.run();
return health;
} catch {
return null;
}
}
@@ -0,0 +1,34 @@
import { describe, expect, it } from 'vitest';
import { maskToken, normalizeTokenForDisplay } from './accountTokenService.js';
describe('maskToken', () => {
it('keeps sk- prefix for short tokens', () => {
const masked = maskToken('sk-abcde');
expect(masked.startsWith('sk-')).toBe(true);
expect(masked.includes('***')).toBe(true);
});
it('keeps sk- prefix for long tokens', () => {
const masked = maskToken('sk-proj-abcdefghijklmnopqrstuvwxyz123456');
expect(masked.startsWith('sk-')).toBe(true);
expect(masked.includes('***')).toBe(true);
});
it('adds sk- prefix for new-api display when upstream key misses prefix', () => {
const masked = maskToken('5Uh6KjAgVOqQxNv97MAS7abkBf7Fg5GphgBXUpAOuvq2IvL4', 'new-api');
expect(masked.startsWith('sk-')).toBe(true);
});
});
describe('normalizeTokenForDisplay', () => {
it('adds sk- for any platform when token misses prefix', () => {
expect(normalizeTokenForDisplay('abc123', 'new-api')).toBe('sk-abc123');
expect(normalizeTokenForDisplay('xyz789', 'one-api')).toBe('sk-xyz789');
expect(normalizeTokenForDisplay('pqr456', 'anyrouter')).toBe('sk-pqr456');
expect(normalizeTokenForDisplay('uvw000', 'veloera')).toBe('sk-uvw000');
});
it('keeps existing sk- token unchanged', () => {
expect(normalizeTokenForDisplay('sk-abc123', 'veloera')).toBe('sk-abc123');
});
});
+260
View File
@@ -0,0 +1,260 @@
import { and, eq, ne } from 'drizzle-orm';
import { db, schema } from '../db/index.js';
type UpstreamApiToken = {
name?: string | null;
key?: string | null;
enabled?: boolean | null;
};
export function normalizeTokenForDisplay(token?: string | null, platform?: string | null): string {
if (!token) return '';
const value = token.trim();
if (!value) return '';
if (platform !== undefined) {
// Keep the parameter for route-level compatibility; display rule is now global.
}
if (!value.toLowerCase().startsWith('sk-')) {
return `sk-${value}`;
}
return value;
}
export function maskToken(token?: string | null, platform?: string | null): string {
const value = normalizeTokenForDisplay(token, platform);
if (!value) return '';
if (value.toLowerCase().startsWith('sk-')) {
if (value.length <= 7) return 'sk-***';
const visibleMiddle = value.slice(3, Math.min(6, value.length));
if (value.length <= 12) return `sk-${visibleMiddle}***${value.slice(-2)}`;
return `sk-${visibleMiddle}***${value.slice(-4)}`;
}
if (value.length <= 10) return `${value.slice(0, 2)}***${value.slice(-2)}`;
return `${value.slice(0, 4)}***${value.slice(-4)}`;
}
function normalizeTokenName(name: string | null | undefined, fallbackIndex = 1): string {
const trimmed = (name || '').trim();
if (trimmed) return trimmed;
return fallbackIndex === 1 ? 'default' : `token-${fallbackIndex}`;
}
function normalizeTokenValue(token: string | null | undefined): string | null {
const trimmed = (token || '').trim();
return trimmed.length > 0 ? trimmed : null;
}
function updateAccountApiToken(accountId: number, tokenValue: string | null) {
db.update(schema.accounts)
.set({ apiToken: tokenValue || null, updatedAt: new Date().toISOString() })
.where(eq(schema.accounts.id, accountId))
.run();
}
export function getPreferredAccountToken(accountId: number) {
const tokens = db.select()
.from(schema.accountTokens)
.where(and(eq(schema.accountTokens.accountId, accountId), eq(schema.accountTokens.enabled, true)))
.all();
if (tokens.length === 0) return null;
const preferred = tokens.find((t) => t.isDefault) || tokens[0];
return preferred;
}
export function ensureDefaultTokenForAccount(
accountId: number,
tokenValue: string,
options?: { name?: string; source?: string; enabled?: boolean },
): number | null {
const normalizedToken = normalizeTokenValue(tokenValue);
if (!normalizedToken) return null;
const now = new Date().toISOString();
const tokens = db.select()
.from(schema.accountTokens)
.where(eq(schema.accountTokens.accountId, accountId))
.all();
let target = tokens.find((t) => t.token === normalizedToken) || null;
if (!target) {
target = db.insert(schema.accountTokens)
.values({
accountId,
name: normalizeTokenName(options?.name, tokens.length + 1),
token: normalizedToken,
source: options?.source || 'manual',
enabled: options?.enabled ?? true,
isDefault: true,
createdAt: now,
updatedAt: now,
})
.returning()
.get();
} else {
db.update(schema.accountTokens)
.set({
name: options?.name ? normalizeTokenName(options.name) : target.name,
source: options?.source || target.source || 'manual',
enabled: options?.enabled ?? target.enabled,
isDefault: true,
updatedAt: now,
})
.where(eq(schema.accountTokens.id, target.id))
.run();
}
db.update(schema.accountTokens)
.set({ isDefault: false, updatedAt: now })
.where(and(eq(schema.accountTokens.accountId, accountId), ne(schema.accountTokens.id, target.id)))
.run();
updateAccountApiToken(accountId, normalizedToken);
return target.id;
}
export function setDefaultToken(tokenId: number): boolean {
const target = db.select().from(schema.accountTokens).where(eq(schema.accountTokens.id, tokenId)).get();
if (!target) return false;
const now = new Date().toISOString();
db.update(schema.accountTokens)
.set({ isDefault: false, updatedAt: now })
.where(eq(schema.accountTokens.accountId, target.accountId))
.run();
db.update(schema.accountTokens)
.set({ isDefault: true, enabled: true, updatedAt: now })
.where(eq(schema.accountTokens.id, tokenId))
.run();
updateAccountApiToken(target.accountId, target.token);
return true;
}
export function repairDefaultToken(accountId: number) {
const tokens = db.select()
.from(schema.accountTokens)
.where(eq(schema.accountTokens.accountId, accountId))
.all();
const enabled = tokens.filter((t) => t.enabled);
if (enabled.length === 0) {
updateAccountApiToken(accountId, null);
return null;
}
const currentDefault = enabled.find((t) => t.isDefault) || enabled[0];
const now = new Date().toISOString();
db.update(schema.accountTokens)
.set({ isDefault: false, updatedAt: now })
.where(eq(schema.accountTokens.accountId, accountId))
.run();
db.update(schema.accountTokens)
.set({ isDefault: true, enabled: true, updatedAt: now })
.where(eq(schema.accountTokens.id, currentDefault.id))
.run();
updateAccountApiToken(accountId, currentDefault.token);
return currentDefault;
}
export function syncTokensFromUpstream(accountId: number, upstreamTokens: UpstreamApiToken[]) {
const now = new Date().toISOString();
const existing = db.select()
.from(schema.accountTokens)
.where(eq(schema.accountTokens.accountId, accountId))
.all();
let created = 0;
let updated = 0;
let index = existing.length + 1;
for (const upstream of upstreamTokens) {
const tokenValue = normalizeTokenValue(upstream.key);
if (!tokenValue) continue;
const tokenName = normalizeTokenName(upstream.name, index);
const enabled = upstream.enabled ?? true;
const byToken = existing.find((row) => row.token === tokenValue);
if (byToken) {
db.update(schema.accountTokens)
.set({
name: tokenName,
source: 'sync',
enabled,
updatedAt: now,
})
.where(eq(schema.accountTokens.id, byToken.id))
.run();
byToken.name = tokenName;
byToken.enabled = enabled;
byToken.source = 'sync';
byToken.updatedAt = now;
updated++;
continue;
}
const createdRow = db.insert(schema.accountTokens)
.values({
accountId,
name: tokenName,
token: tokenValue,
source: 'sync',
enabled,
isDefault: false,
createdAt: now,
updatedAt: now,
})
.returning()
.get();
existing.push(createdRow);
created++;
index++;
}
const repaired = repairDefaultToken(accountId);
return {
created,
updated,
total: existing.length,
defaultTokenId: repaired?.id || null,
};
}
export function listTokensWithRelations(accountId?: number) {
const base = db.select()
.from(schema.accountTokens)
.innerJoin(schema.accounts, eq(schema.accountTokens.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id));
const rows = accountId
? base.where(eq(schema.accountTokens.accountId, accountId)).all()
: base.all();
return rows.map((row) => {
const { token, ...tokenMeta } = row.account_tokens;
return {
...tokenMeta,
tokenMasked: maskToken(token, row.sites.platform),
account: {
id: row.accounts.id,
username: row.accounts.username,
status: row.accounts.status,
},
site: {
id: row.sites.id,
name: row.sites.name,
url: row.sites.url,
platform: row.sites.platform,
},
};
});
}
+21
View File
@@ -0,0 +1,21 @@
import { describe, expect, it } from 'vitest';
import { isCloudflareChallenge, isTokenExpiredError } from './alertRules.js';
describe('alertRules', () => {
it('detects cloudflare challenge messages', () => {
expect(isCloudflareChallenge('Cloudflare challenge detected')).toBe(true);
expect(isCloudflareChallenge('cf challenge required')).toBe(true);
expect(isCloudflareChallenge('invalid token')).toBe(false);
});
it('detects token expiration by status or message', () => {
expect(isTokenExpiredError({ status: 401, message: 'Unauthorized' })).toBe(true);
expect(isTokenExpiredError({ status: 403, message: 'Forbidden' })).toBe(true);
expect(isTokenExpiredError({ message: 'jwt expired' })).toBe(true);
expect(isTokenExpiredError({ message: 'token invalid' })).toBe(true);
expect(isTokenExpiredError({ message: 'invalid access token' })).toBe(true);
expect(isTokenExpiredError({ message: 'Token 无效' })).toBe(true);
expect(isTokenExpiredError({ message: '无权进行此操作,未登录且未提供 access token' })).toBe(false);
expect(isTokenExpiredError({ status: 500, message: 'upstream error' })).toBe(false);
});
});
+29
View File
@@ -0,0 +1,29 @@
export function isCloudflareChallenge(message?: string | null): boolean {
if (!message) return false;
const text = message.toLowerCase();
return text.includes('cloudflare') || text.includes('cf challenge') || text.includes('challenge required');
}
export function isTokenExpiredError(input: { status?: number; message?: string | null }): boolean {
if (input.status === 401 || input.status === 403) return true;
const text = (input.message || '').toLowerCase();
if (!text) return false;
// NewAPI-like sites may return this when session context is missing for an action,
// which does not always mean the account token is expired.
if (text.includes('未登录且未提供 access token')) return false;
const tokenPhrase = text.includes('token') || text.includes('令牌') || text.includes('访问令牌');
const hasInvalid = text.includes('invalid') || text.includes('无效');
const hasExpired = text.includes('expired') || text.includes('过期');
return (
text.includes('jwt expired') ||
text.includes('token expired') ||
(tokenPhrase && (hasInvalid || hasExpired)) ||
/invalid\s+access\s+token/.test(text) ||
/access\s+token\s+is\s+invalid/.test(text) ||
text.includes('unauthorized') ||
text.includes('forbidden')
);
}
+57
View File
@@ -0,0 +1,57 @@
import { db, schema } from '../db/index.js';
import { eq } from 'drizzle-orm';
import { sendNotification } from './notifyService.js';
import { setAccountRuntimeHealth } from './accountHealthService.js';
export async function reportTokenExpired(params: {
accountId: number;
username?: string | null;
siteName?: string | null;
detail?: string;
}) {
const accountLabel = params.username || `ID:${params.accountId}`;
const siteLabel = params.siteName || 'unknown-site';
const detail = params.detail ? ` (${params.detail})` : '';
db.insert(schema.events).values({
type: 'token',
title: 'Token 已失效',
message: `${accountLabel} @ ${siteLabel} 的 Token 无效或已过期${detail}`,
level: 'error',
relatedId: params.accountId,
relatedType: 'account',
}).run();
db.update(schema.accounts).set({
status: 'expired',
updatedAt: new Date().toISOString(),
}).where(eq(schema.accounts.id, params.accountId)).run();
setAccountRuntimeHealth(params.accountId, {
state: 'unhealthy',
reason: params.detail ? `访问令牌失效:${params.detail}` : '访问令牌失效',
source: 'auth',
});
await sendNotification(
'Token 已失效',
`${accountLabel} @ ${siteLabel} 的 Token 无效或已过期${detail}`,
'error',
);
}
export async function reportProxyAllFailed(params: { model: string; reason: string }) {
db.insert(schema.events).values({
type: 'proxy',
title: '代理全部失败',
message: `模型=${params.model}, 原因=${params.reason}`,
level: 'error',
relatedType: 'route',
}).run();
await sendNotification(
'代理全部失败',
`模型=${params.model}, 原因=${params.reason}`,
'error',
);
}
@@ -0,0 +1,244 @@
import { randomUUID } from 'node:crypto';
import { db, schema } from '../db/index.js';
import { sendNotification } from './notifyService.js';
export type BackgroundTaskStatus = 'pending' | 'running' | 'succeeded' | 'failed';
export type BackgroundTask = {
id: string;
type: string;
title: string;
status: BackgroundTaskStatus;
message: string;
error: string | null;
result: unknown;
dedupeKey: string | null;
createdAt: string;
updatedAt: string;
startedAt: string | null;
finishedAt: string | null;
expiresAtMs: number;
};
type TaskMessageTemplate = string | ((task: BackgroundTask) => string);
type BackgroundTaskStartOptions = {
type: string;
title: string;
dedupeKey?: string;
keepMs?: number;
notifyOnSuccess?: boolean;
notifyOnFailure?: boolean;
successTitle?: TaskMessageTemplate;
failureTitle?: TaskMessageTemplate;
successMessage?: TaskMessageTemplate;
failureMessage?: TaskMessageTemplate;
};
const TASK_TTL_MS = 6 * 60 * 60 * 1000;
const TASK_CLEANUP_INTERVAL_MS = 60 * 1000;
const tasks = new Map<string, BackgroundTask>();
const dedupeTaskIds = new Map<string, string>();
function nowIso() {
return new Date().toISOString();
}
function summarizeError(error: unknown): string {
if (error instanceof Error && error.message) return error.message;
if (typeof error === 'string' && error.trim()) return error.trim();
if (error && typeof error === 'object') {
try {
return JSON.stringify(error);
} catch {
return 'unknown error';
}
}
return 'unknown error';
}
function resolveTaskMessage(template: TaskMessageTemplate | undefined, task: BackgroundTask, fallback: string): string {
if (typeof template === 'function') {
try {
const value = template(task);
if (typeof value === 'string' && value.trim()) return value.trim();
} catch {}
return fallback;
}
if (typeof template === 'string' && template.trim()) return template.trim();
return fallback;
}
function setTaskStatus(task: BackgroundTask, patch: Partial<BackgroundTask>) {
const next: BackgroundTask = {
...task,
...patch,
updatedAt: nowIso(),
};
tasks.set(task.id, next);
return next;
}
function appendTaskEvent(level: 'info' | 'warning' | 'error', title: string, message: string, taskId: string) {
try {
db.insert(schema.events).values({
type: 'status',
title,
message,
level,
relatedType: 'task',
createdAt: nowIso(),
}).run();
} catch {}
void taskId;
}
async function runTask(taskId: string, options: BackgroundTaskStartOptions, runner: () => Promise<unknown>) {
const initialTask = tasks.get(taskId);
if (!initialTask) return;
let task = setTaskStatus(initialTask, {
status: 'running',
startedAt: nowIso(),
message: `${initialTask.title} 正在执行`,
});
try {
const result = await runner();
task = setTaskStatus(task, {
status: 'succeeded',
finishedAt: nowIso(),
result,
error: null,
});
const eventTitle = resolveTaskMessage(options.successTitle, task, `${task.title} 已完成`);
const eventMessage = resolveTaskMessage(options.successMessage, task, `${task.title} 已完成`);
task = setTaskStatus(task, { message: eventMessage });
appendTaskEvent('info', eventTitle, eventMessage, task.id);
if (options.notifyOnSuccess) {
await sendNotification(eventTitle, eventMessage, 'info');
}
} catch (error) {
const errorText = summarizeError(error);
task = setTaskStatus(task, {
status: 'failed',
finishedAt: nowIso(),
error: errorText,
message: `${task.title} 失败:${errorText}`,
});
const eventTitle = resolveTaskMessage(options.failureTitle, task, `${task.title} 失败`);
const eventMessage = resolveTaskMessage(options.failureMessage, task, task.message);
task = setTaskStatus(task, { message: eventMessage });
appendTaskEvent('error', eventTitle, eventMessage, task.id);
if (options.notifyOnFailure ?? true) {
await sendNotification(eventTitle, eventMessage, 'error');
}
} finally {
if (task.dedupeKey && dedupeTaskIds.get(task.dedupeKey) === task.id) {
dedupeTaskIds.delete(task.dedupeKey);
}
}
}
function cleanupExpiredTasks() {
const now = Date.now();
for (const [taskId, task] of tasks.entries()) {
if (task.expiresAtMs <= now) {
tasks.delete(taskId);
if (task.dedupeKey && dedupeTaskIds.get(task.dedupeKey) === taskId) {
dedupeTaskIds.delete(task.dedupeKey);
}
}
}
}
const cleanupTimer = setInterval(cleanupExpiredTasks, TASK_CLEANUP_INTERVAL_MS);
cleanupTimer.unref?.();
export function startBackgroundTask(
options: BackgroundTaskStartOptions,
runner: () => Promise<unknown>,
): { task: BackgroundTask; reused: boolean } {
const dedupeKey = options.dedupeKey?.trim() || '';
if (dedupeKey) {
const existingTaskId = dedupeTaskIds.get(dedupeKey);
if (existingTaskId) {
const existing = tasks.get(existingTaskId);
if (existing && (existing.status === 'pending' || existing.status === 'running')) {
return { task: existing, reused: true };
}
dedupeTaskIds.delete(dedupeKey);
}
}
const createdAt = nowIso();
const task: BackgroundTask = {
id: randomUUID(),
type: options.type,
title: options.title,
status: 'pending',
message: `${options.title} 已开始执行`,
error: null,
result: null,
dedupeKey: dedupeKey || null,
createdAt,
updatedAt: createdAt,
startedAt: null,
finishedAt: null,
expiresAtMs: Date.now() + Math.max(60_000, options.keepMs ?? TASK_TTL_MS),
};
tasks.set(task.id, task);
if (dedupeKey) dedupeTaskIds.set(dedupeKey, task.id);
appendTaskEvent('info', `${task.title}已开始`, `${task.title} 已开始执行`, task.id);
void runTask(task.id, options, runner);
return { task, reused: false };
}
export function getBackgroundTask(taskId: string): BackgroundTask | null {
return tasks.get(taskId) || null;
}
export function listBackgroundTasks(limit = 50): BackgroundTask[] {
const safeLimit = Number.isFinite(limit) ? Math.max(1, Math.min(200, Math.trunc(limit))) : 50;
return Array.from(tasks.values())
.sort((a, b) => Date.parse(b.createdAt) - Date.parse(a.createdAt))
.slice(0, safeLimit);
}
export function getRunningTaskByDedupeKey(key: string): BackgroundTask | null {
const taskId = dedupeTaskIds.get(key.trim());
if (!taskId) return null;
const task = tasks.get(taskId);
if (!task) return null;
if (task.status !== 'pending' && task.status !== 'running') return null;
return task;
}
export function summarizeCheckinResults(results: Array<{ result?: any }>): { total: number; success: number; skipped: number; failed: number } {
const summary = { total: results.length, success: 0, skipped: 0, failed: 0 };
for (const item of results) {
const status = item?.result?.status;
if (status === 'skipped' || item?.result?.skipped) {
summary.skipped += 1;
continue;
}
if (item?.result?.success) {
summary.success += 1;
continue;
}
summary.failed += 1;
}
return summary;
}
export function __resetBackgroundTasksForTests() {
tasks.clear();
dedupeTaskIds.clear();
}
+604
View File
@@ -0,0 +1,604 @@
import { asc } from 'drizzle-orm';
import cron from 'node-cron';
import { db, schema } from '../db/index.js';
const BACKUP_VERSION = '2.0';
export type BackupExportType = 'all' | 'accounts' | 'preferences';
type SiteRow = typeof schema.sites.$inferSelect;
type AccountRow = typeof schema.accounts.$inferSelect;
type AccountTokenRow = typeof schema.accountTokens.$inferSelect;
type TokenRouteRow = typeof schema.tokenRoutes.$inferSelect;
type RouteChannelRow = typeof schema.routeChannels.$inferSelect;
interface AccountsBackupSection {
sites: SiteRow[];
accounts: AccountRow[];
accountTokens: AccountTokenRow[];
tokenRoutes: TokenRouteRow[];
routeChannels: RouteChannelRow[];
}
interface PreferencesBackupSection {
settings: Array<{ key: string; value: unknown }>;
}
interface BackupFullV2 {
version: string;
timestamp: number;
accounts: AccountsBackupSection;
preferences: PreferencesBackupSection;
}
interface BackupAccountsPartialV2 {
version: string;
timestamp: number;
type: 'accounts';
accounts: AccountsBackupSection;
}
interface BackupPreferencesPartialV2 {
version: string;
timestamp: number;
type: 'preferences';
preferences: PreferencesBackupSection;
}
type BackupV2 = BackupFullV2 | BackupAccountsPartialV2 | BackupPreferencesPartialV2;
type RawBackupData = Record<string, unknown>;
interface BackupImportResult {
allImported: boolean;
sections: {
accounts: boolean;
preferences: boolean;
};
appliedSettings: Array<{ key: string; value: unknown }>;
}
const EXCLUDED_SETTING_KEYS = new Set<string>([
// Keep current admin login credential unchanged to avoid accidental lock-out.
'auth_token',
]);
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value);
}
function asString(value: unknown): string {
if (typeof value !== 'string') return '';
return value.trim();
}
function asBoolean(value: unknown, fallback = false): boolean {
if (typeof value === 'boolean') return value;
return fallback;
}
function asNumber(value: unknown, fallback = 0): number {
const n = Number(value);
return Number.isFinite(n) ? n : fallback;
}
function toIsoString(value: unknown): string {
if (typeof value === 'string' && value.trim()) {
const d = new Date(value);
if (!Number.isNaN(d.getTime())) return d.toISOString();
}
if (typeof value === 'number' && Number.isFinite(value)) {
const d = new Date(value);
if (!Number.isNaN(d.getTime())) return d.toISOString();
}
return new Date().toISOString();
}
function normalizeLegacyQuota(raw: unknown): number {
const value = asNumber(raw, 0);
if (!Number.isFinite(value) || value <= 0) return 0;
// ref-all-api-hub stores quota in raw units for NewAPI-like sites.
// Convert obvious raw values to display currency units.
if (value >= 10_000) return value / 500_000;
return value;
}
function normalizeLegacyPlatform(raw: string): string {
const value = raw.trim().toLowerCase();
if (!value) return 'new-api';
const supported = new Set([
'new-api',
'one-api',
'anyrouter',
'one-hub',
'done-hub',
'sub2api',
'veloera',
]);
if (supported.has(value)) return value;
if (value.includes('wong')) return 'new-api';
if (value.includes('anyrouter')) return 'anyrouter';
if (value.includes('done')) return 'done-hub';
return 'new-api';
}
function buildAccountsSectionFromRefBackup(data: RawBackupData): AccountsBackupSection | null {
const accountsContainer = isRecord(data.accounts) ? data.accounts : null;
const rows = Array.isArray(accountsContainer?.accounts) ? accountsContainer.accounts : null;
if (!rows) return null;
const sites: SiteRow[] = [];
const accounts: AccountRow[] = [];
const accountTokens: AccountTokenRow[] = [];
const tokenRoutes: TokenRouteRow[] = [];
const routeChannels: RouteChannelRow[] = [];
const siteIdByKey = new Map<string, number>();
let nextSiteId = 1;
let nextAccountId = 1;
let nextTokenId = 1;
for (const item of rows) {
if (!isRecord(item)) continue;
const siteUrl = asString(item.site_url);
if (!siteUrl) continue;
const platform = normalizeLegacyPlatform(asString(item.site_type));
const siteName = asString(item.site_name) || siteUrl;
const siteKey = `${platform}::${siteUrl}`;
let siteId = siteIdByKey.get(siteKey) || 0;
if (!siteId) {
siteId = nextSiteId++;
siteIdByKey.set(siteKey, siteId);
sites.push({
id: siteId,
name: siteName,
url: siteUrl,
platform,
status: 'active',
apiKey: null,
createdAt: toIsoString(item.created_at),
updatedAt: toIsoString(item.updated_at),
});
}
const accountInfo = isRecord(item.account_info) ? item.account_info : {};
const cookieAuth = isRecord(item.cookieAuth) ? item.cookieAuth : {};
const authType = asString(item.authType);
const accountAccessToken =
asString(accountInfo.access_token)
|| asString(cookieAuth.sessionCookie)
|| asString((item as Record<string, unknown>).access_token);
if (!accountAccessToken) continue;
const platformUserId = asNumber(accountInfo.id, 0);
const username = asString(accountInfo.username)
|| asString(item.username)
|| (platformUserId > 0 ? `user-${platformUserId}` : `account-${nextAccountId}`);
let apiToken: string | null = null;
if (authType === 'api_key') {
apiToken = accountAccessToken;
}
const createdAt = toIsoString(item.created_at);
const updatedAt = toIsoString(item.updated_at);
const checkin = isRecord(item.checkIn) ? item.checkIn : {};
const extraConfigPayload = {
platformUserId: platformUserId > 0 ? platformUserId : undefined,
authType: authType || undefined,
source: 'ref-all-api-hub',
};
const accountId = nextAccountId++;
const importedBalance = normalizeLegacyQuota(accountInfo.quota);
const importedUsed = normalizeLegacyQuota(accountInfo.today_quota_consumption);
const importedQuota = importedBalance + importedUsed;
accounts.push({
id: accountId,
siteId,
username,
accessToken: accountAccessToken,
apiToken,
balance: importedBalance,
balanceUsed: importedUsed,
quota: importedQuota > 0 ? importedQuota : importedBalance,
unitCost: null,
valueScore: 0,
status: asBoolean(item.disabled, false) ? 'disabled' : 'active',
checkinEnabled: asBoolean(checkin.autoCheckInEnabled, true),
lastCheckinAt: null,
lastBalanceRefresh: null,
extraConfig: JSON.stringify(extraConfigPayload),
createdAt,
updatedAt,
});
if (apiToken) {
accountTokens.push({
id: nextTokenId++,
accountId,
name: 'default',
token: apiToken,
source: 'legacy',
enabled: true,
isDefault: true,
createdAt,
updatedAt,
});
}
}
return {
sites,
accounts,
accountTokens,
tokenRoutes,
routeChannels,
};
}
function buildPreferencesSectionFromRefBackup(data: RawBackupData): PreferencesBackupSection | null {
const settings: Array<{ key: string; value: unknown }> = [];
if (isRecord(data.preferences)) {
settings.push({ key: 'legacy_preferences_ref_v2', value: data.preferences });
}
if (isRecord(data.channelConfigs)) {
settings.push({ key: 'legacy_channel_configs_ref_v2', value: data.channelConfigs });
}
if (isRecord(data.apiCredentialProfiles)) {
settings.push({ key: 'legacy_api_credential_profiles_ref_v2', value: data.apiCredentialProfiles });
}
if (isRecord(data.tagStore)) {
settings.push({ key: 'legacy_tag_store_ref_v2', value: data.tagStore });
}
if (settings.length === 0) return null;
return { settings };
}
function parseSettingValue(raw: string | null): unknown {
if (raw === null || raw === undefined) return null;
try {
return JSON.parse(raw);
} catch {
return raw;
}
}
function stringifySettingValue(value: unknown): string {
return JSON.stringify(value);
}
function isFiniteNumber(value: unknown): value is number {
return typeof value === 'number' && Number.isFinite(value);
}
function isSettingValueAcceptable(key: string, value: unknown): boolean {
if (key === 'checkin_cron' || key === 'balance_refresh_cron') {
return typeof value === 'string' && cron.validate(value);
}
if (key === 'proxy_token') {
return typeof value === 'string'
&& value.trim().length >= 6
&& value.trim().startsWith('sk-');
}
if (key === 'smtp_port') {
return isFiniteNumber(value) && value > 0;
}
if (key === 'routing_weights') {
if (!isRecord(value)) return false;
const keys = ['baseWeightFactor', 'valueScoreFactor', 'costWeight', 'balanceWeight', 'usageWeight'] as const;
return keys.every((weightKey) => value[weightKey] === undefined || isFiniteNumber(value[weightKey]));
}
return true;
}
function exportAccountsSection(): AccountsBackupSection {
const sites = db.select().from(schema.sites).orderBy(asc(schema.sites.id)).all();
const accounts = db.select().from(schema.accounts).orderBy(asc(schema.accounts.id)).all();
const accountTokens = db.select().from(schema.accountTokens).orderBy(asc(schema.accountTokens.id)).all();
const tokenRoutes = db.select().from(schema.tokenRoutes).orderBy(asc(schema.tokenRoutes.id)).all();
const routeChannels = db.select().from(schema.routeChannels).orderBy(asc(schema.routeChannels.id)).all();
return {
sites,
accounts,
accountTokens,
tokenRoutes,
routeChannels,
};
}
function exportPreferencesSection(): PreferencesBackupSection {
const settings = db.select().from(schema.settings).all()
.filter((row) => !EXCLUDED_SETTING_KEYS.has(row.key))
.map((row) => ({
key: row.key,
value: parseSettingValue(row.value),
}));
return { settings };
}
export function exportBackup(type: BackupExportType): BackupV2 {
const now = Date.now();
if (type === 'accounts') {
return {
version: BACKUP_VERSION,
timestamp: now,
type: 'accounts',
accounts: exportAccountsSection(),
};
}
if (type === 'preferences') {
return {
version: BACKUP_VERSION,
timestamp: now,
type: 'preferences',
preferences: exportPreferencesSection(),
};
}
return {
version: BACKUP_VERSION,
timestamp: now,
accounts: exportAccountsSection(),
preferences: exportPreferencesSection(),
};
}
function coerceAccountsSection(input: unknown): AccountsBackupSection | null {
if (!isRecord(input)) return null;
const sites = Array.isArray(input.sites) ? input.sites as SiteRow[] : null;
const accounts = Array.isArray(input.accounts) ? input.accounts as AccountRow[] : null;
const accountTokens = Array.isArray(input.accountTokens) ? input.accountTokens as AccountTokenRow[] : null;
const tokenRoutes = Array.isArray(input.tokenRoutes) ? input.tokenRoutes as TokenRouteRow[] : null;
const routeChannels = Array.isArray(input.routeChannels) ? input.routeChannels as RouteChannelRow[] : null;
if (!sites || !accounts || !accountTokens || !tokenRoutes || !routeChannels) return null;
return {
sites,
accounts,
accountTokens,
tokenRoutes,
routeChannels,
};
}
function coercePreferencesSection(input: unknown): PreferencesBackupSection | null {
if (!isRecord(input)) return null;
const settingsRaw = input.settings;
if (!Array.isArray(settingsRaw)) return null;
const settings = settingsRaw
.map((row) => {
if (!isRecord(row)) return null;
const key = typeof row.key === 'string' ? row.key.trim() : '';
if (!key || EXCLUDED_SETTING_KEYS.has(key)) return null;
return { key, value: row.value };
})
.filter((row): row is { key: string; value: unknown } => !!row);
return { settings };
}
function detectAccountsSection(data: RawBackupData): AccountsBackupSection | null {
const rootMatch = coerceAccountsSection(data);
if (rootMatch) return rootMatch;
if ('accounts' in data) {
const nested = coerceAccountsSection(data.accounts);
if (nested) return nested;
}
if (isRecord(data.data) && 'accounts' in data.data) {
const legacyNested = coerceAccountsSection((data.data as Record<string, unknown>).accounts);
if (legacyNested) return legacyNested;
}
const refFormat = buildAccountsSectionFromRefBackup(data);
if (refFormat) return refFormat;
return null;
}
function detectPreferencesSection(data: RawBackupData): PreferencesBackupSection | null {
const rootMatch = coercePreferencesSection(data);
if (rootMatch) return rootMatch;
if ('preferences' in data) {
const nested = coercePreferencesSection(data.preferences);
if (nested) return nested;
}
if (isRecord(data.data) && 'preferences' in data.data) {
const legacyNested = coercePreferencesSection((data.data as Record<string, unknown>).preferences);
if (legacyNested) return legacyNested;
}
const refFormat = buildPreferencesSectionFromRefBackup(data);
if (refFormat) return refFormat;
return null;
}
function importAccountsSection(section: AccountsBackupSection) {
db.transaction((tx) => {
tx.delete(schema.routeChannels).run();
tx.delete(schema.tokenRoutes).run();
tx.delete(schema.tokenModelAvailability).run();
tx.delete(schema.modelAvailability).run();
tx.delete(schema.proxyLogs).run();
tx.delete(schema.checkinLogs).run();
tx.delete(schema.accountTokens).run();
tx.delete(schema.accounts).run();
tx.delete(schema.sites).run();
for (const row of section.sites) {
tx.insert(schema.sites).values({
id: row.id,
name: row.name,
url: row.url,
platform: row.platform,
status: row.status || 'active',
apiKey: row.apiKey,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
}).run();
}
for (const row of section.accounts) {
tx.insert(schema.accounts).values({
id: row.id,
siteId: row.siteId,
username: row.username,
accessToken: row.accessToken,
apiToken: row.apiToken,
balance: row.balance,
balanceUsed: row.balanceUsed,
quota: row.quota,
unitCost: row.unitCost,
valueScore: row.valueScore,
status: row.status,
checkinEnabled: row.checkinEnabled,
lastCheckinAt: row.lastCheckinAt,
lastBalanceRefresh: row.lastBalanceRefresh,
extraConfig: row.extraConfig,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
}).run();
}
for (const row of section.accountTokens) {
tx.insert(schema.accountTokens).values({
id: row.id,
accountId: row.accountId,
name: row.name,
token: row.token,
source: row.source,
enabled: row.enabled,
isDefault: row.isDefault,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
}).run();
}
for (const row of section.tokenRoutes) {
tx.insert(schema.tokenRoutes).values({
id: row.id,
modelPattern: row.modelPattern,
modelMapping: row.modelMapping,
enabled: row.enabled,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
}).run();
}
for (const row of section.routeChannels) {
tx.insert(schema.routeChannels).values({
id: row.id,
routeId: row.routeId,
accountId: row.accountId,
tokenId: row.tokenId,
priority: row.priority,
weight: row.weight,
enabled: row.enabled,
manualOverride: row.manualOverride,
successCount: row.successCount,
failCount: row.failCount,
totalLatencyMs: row.totalLatencyMs,
totalCost: row.totalCost,
lastUsedAt: row.lastUsedAt,
lastFailAt: row.lastFailAt,
cooldownUntil: row.cooldownUntil,
}).run();
}
});
}
function importPreferencesSection(section: PreferencesBackupSection): Array<{ key: string; value: unknown }> {
const applied: Array<{ key: string; value: unknown }> = [];
db.transaction((tx) => {
for (const row of section.settings) {
if (!isSettingValueAcceptable(row.key, row.value)) continue;
tx.insert(schema.settings).values({
key: row.key,
value: stringifySettingValue(row.value),
}).onConflictDoUpdate({
target: schema.settings.key,
set: { value: stringifySettingValue(row.value) },
}).run();
applied.push({ key: row.key, value: row.value });
}
});
return applied;
}
export function importBackup(data: RawBackupData): BackupImportResult {
if (!isRecord(data)) {
throw new Error('导入数据格式错误:必须为 JSON 对象');
}
if (!('timestamp' in data) || data.timestamp === null || data.timestamp === undefined) {
throw new Error('导入数据格式错误:缺少 timestamp');
}
const accountsSection = detectAccountsSection(data);
const preferencesSection = detectPreferencesSection(data);
const type = typeof data.type === 'string' ? data.type : '';
const accountsRequested = type === 'accounts' || !!accountsSection;
const preferencesRequested = type === 'preferences' || !!preferencesSection;
if (!accountsRequested && !preferencesRequested) {
throw new Error('导入数据中没有可识别的账号或设置数据');
}
let accountsImported = false;
let preferencesImported = false;
let appliedSettings: Array<{ key: string; value: unknown }> = [];
if (accountsRequested) {
if (!accountsSection) {
throw new Error('导入数据格式错误:账号数据结构不正确');
}
importAccountsSection(accountsSection);
accountsImported = true;
}
if (preferencesRequested) {
if (!preferencesSection) {
throw new Error('导入数据格式错误:设置数据结构不正确');
}
appliedSettings = importPreferencesSection(preferencesSection);
preferencesImported = true;
}
return {
allImported: (!accountsRequested || accountsImported) && (!preferencesRequested || preferencesImported),
sections: {
accounts: accountsImported,
preferences: preferencesImported,
},
appliedSettings,
};
}
@@ -0,0 +1,267 @@
import { beforeEach, describe, expect, it, vi } from 'vitest';
const adapterMock = {
getBalance: vi.fn(),
login: vi.fn(),
};
const selectAllMock = vi.fn();
const updateSetMock = vi.fn();
const insertValuesMock = vi.fn();
const reportTokenExpiredMock = vi.fn();
const sendNotificationMock = vi.fn();
const decryptPasswordMock = vi.fn();
const setAccountRuntimeHealthMock = vi.fn();
const extractRuntimeHealthMock = vi.fn();
const undiciFetchMock = vi.fn();
vi.mock('../db/index.js', () => {
const selectChain = {
all: () => selectAllMock(),
where: () => selectChain,
innerJoin: () => selectChain,
from: () => selectChain,
};
const updateWhereChain = {
run: () => ({}),
};
const updateSetChain = {
where: () => updateWhereChain,
};
const insertChain = {
run: () => ({}),
values: (...args: unknown[]) => {
insertValuesMock(...args);
return insertChain;
},
};
return {
db: {
select: () => selectChain,
update: () => ({
set: (updates: Record<string, unknown>) => {
updateSetMock(updates);
return updateSetChain;
},
}),
insert: () => insertChain,
},
schema: {
accounts: { id: 'id', siteId: 'siteId', status: 'status' },
sites: { id: 'id' },
events: {},
},
};
});
vi.mock('./platforms/index.js', () => ({
getAdapter: () => adapterMock,
}));
vi.mock('./alertService.js', () => ({
reportTokenExpired: (...args: unknown[]) => reportTokenExpiredMock(...args),
}));
vi.mock('./notifyService.js', () => ({
sendNotification: (...args: unknown[]) => sendNotificationMock(...args),
}));
vi.mock('./accountCredentialService.js', () => ({
decryptAccountPassword: (...args: unknown[]) => decryptPasswordMock(...args),
}));
vi.mock('./accountHealthService.js', () => ({
setAccountRuntimeHealth: (...args: unknown[]) => setAccountRuntimeHealthMock(...args),
extractRuntimeHealth: (...args: unknown[]) => extractRuntimeHealthMock(...args),
}));
vi.mock('undici', () => ({
fetch: (...args: unknown[]) => undiciFetchMock(...args),
}));
describe('balanceService auto relogin', () => {
beforeEach(() => {
adapterMock.getBalance.mockReset();
adapterMock.login.mockReset();
selectAllMock.mockReset();
updateSetMock.mockReset();
insertValuesMock.mockReset();
reportTokenExpiredMock.mockReset();
sendNotificationMock.mockReset();
decryptPasswordMock.mockReset();
setAccountRuntimeHealthMock.mockReset();
extractRuntimeHealthMock.mockReset();
undiciFetchMock.mockReset();
extractRuntimeHealthMock.mockReturnValue(null);
undiciFetchMock.mockResolvedValue({
ok: false,
json: async () => ({}),
});
});
it('retries balance fetch once after successful auto relogin', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 1,
username: 'linuxdo_11494',
accessToken: 'stale-token',
status: 'active',
extraConfig: JSON.stringify({
platformUserId: 11494,
autoRelogin: { username: 'linuxdo_11494', passwordCipher: 'cipher' },
}),
},
sites: {
id: 3,
name: 'wong',
url: 'https://wzw.pp.ua',
platform: 'new-api',
},
},
]);
adapterMock.getBalance
.mockRejectedValueOnce(new Error('HTTP 401: access token required'))
.mockResolvedValueOnce({ balance: 12, used: 1, quota: 13 });
decryptPasswordMock.mockReturnValue('plain-password');
adapterMock.login.mockResolvedValue({ success: true, accessToken: 'fresh-token' });
const { refreshBalance } = await import('./balanceService.js');
const result = await refreshBalance(1);
expect(result).toEqual({ balance: 12, used: 1, quota: 13 });
expect(adapterMock.login).toHaveBeenCalledTimes(1);
expect(adapterMock.getBalance).toHaveBeenCalledTimes(2);
expect(adapterMock.getBalance.mock.calls[0][1]).toBe('stale-token');
expect(adapterMock.getBalance.mock.calls[1][1]).toBe('fresh-token');
expect(updateSetMock.mock.calls.some((call) => call[0]?.accessToken === 'fresh-token')).toBe(true);
expect(reportTokenExpiredMock).not.toHaveBeenCalled();
});
it('reports token expired when relogin is unavailable', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 2,
username: 'linuxdo_7659',
accessToken: 'stale-token',
status: 'active',
extraConfig: null,
},
sites: {
id: 4,
name: 'kfc',
url: 'https://kfc-api.sxxe.net',
platform: 'new-api',
},
},
]);
adapterMock.getBalance.mockRejectedValueOnce(new Error('HTTP 401: access token required'));
const { refreshBalance } = await import('./balanceService.js');
await expect(refreshBalance(2)).rejects.toThrow('access token');
expect(adapterMock.login).not.toHaveBeenCalled();
expect(reportTokenExpiredMock).toHaveBeenCalledTimes(1);
});
it('keeps degraded health when checkin is unsupported but balance refresh succeeds', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 3,
username: 'ld6jl3djexjf',
accessToken: 'active-token',
status: 'active',
extraConfig: JSON.stringify({
runtimeHealth: {
state: 'degraded',
reason: 'checkin endpoint not found',
source: 'checkin',
},
}),
},
sites: {
id: 5,
name: 'Wind Hub',
url: 'https://windhub.cc',
platform: 'done-hub',
},
},
]);
adapterMock.getBalance.mockResolvedValueOnce({ balance: 100, used: 2, quota: 102 });
extractRuntimeHealthMock.mockReturnValue({
state: 'degraded',
reason: 'checkin endpoint not found',
source: 'checkin',
checkedAt: '2026-02-25T12:00:00.000Z',
});
const { refreshBalance } = await import('./balanceService.js');
const result = await refreshBalance(3);
expect(result).toEqual({ balance: 100, used: 2, quota: 102 });
expect(setAccountRuntimeHealthMock).toHaveBeenCalledWith(
3,
expect.objectContaining({
state: 'degraded',
reason: 'checkin endpoint not found',
}),
);
});
it('fills today income snapshot from log endpoint when balance api lacks today_income', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 4,
username: 'linuxdo_7659',
accessToken: 'active-token',
status: 'active',
extraConfig: null,
},
sites: {
id: 6,
name: 'kfc',
url: 'https://kfc-api.sxxe.net',
platform: 'new-api',
},
},
]);
adapterMock.getBalance.mockResolvedValueOnce({ balance: 12, used: 1, quota: 13 });
undiciFetchMock
.mockResolvedValueOnce({
ok: true,
json: async () => ({ data: { items: [], total: 0 } }),
})
.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: {
items: [{ quota: 0, content: '签到奖励 2.083650 额度' }],
total: 1,
},
}),
});
const { refreshBalance } = await import('./balanceService.js');
const result = await refreshBalance(4);
expect(result).toEqual({ balance: 12, used: 1, quota: 13, todayIncome: 2.08365 });
const updateWithSnapshot = updateSetMock.mock.calls
.map((call) => call[0] as Record<string, unknown>)
.find((payload) => typeof payload.extraConfig === 'string');
expect(updateWithSnapshot).toBeDefined();
const parsedExtra = JSON.parse(String(updateWithSnapshot?.extraConfig));
expect(parsedExtra.todayIncomeSnapshot?.latest).toBeCloseTo(2.08365, 6);
});
});
+391
View File
@@ -0,0 +1,391 @@
import { db, schema } from '../db/index.js';
import { getAdapter } from './platforms/index.js';
import { eq } from 'drizzle-orm';
import { sendNotification } from './notifyService.js';
import { isTokenExpiredError } from './alertRules.js';
import { reportTokenExpired } from './alertService.js';
import { getAutoReloginConfig, resolvePlatformUserId } from './accountExtraConfig.js';
import { decryptAccountPassword } from './accountCredentialService.js';
import { extractRuntimeHealth, setAccountRuntimeHealth } from './accountHealthService.js';
import { updateTodayIncomeSnapshot } from './todayIncomeRewardService.js';
import type { BalanceInfo } from './platforms/base.js';
function isSiteDisabled(status?: string | null): boolean {
return (status || 'active') === 'disabled';
}
function shouldAttemptAutoRelogin(message?: string | null): boolean {
if (!message) return false;
if (isTokenExpiredError({ message })) return true;
const text = message.toLowerCase();
return (
text.includes('access token') ||
text.includes('new-api-user') ||
text.includes('unauthorized') ||
text.includes('forbidden') ||
text.includes('not login') ||
text.includes('not logged')
);
}
function shouldReportExpired(message?: string | null): boolean {
if (!message) return false;
if (isTokenExpiredError({ message })) return true;
const text = message.toLowerCase();
return (
text.includes('access token') ||
text.includes('new-api-user') ||
text.includes('unauthorized') ||
text.includes('forbidden')
);
}
function isUnsupportedCheckinRuntimeHealth(health: ReturnType<typeof extractRuntimeHealth>): boolean {
if (!health || health.state !== 'degraded') return false;
if ((health.source || '').toLowerCase() === 'checkin') return true;
const reason = (health.reason || '').toLowerCase();
return (
reason.includes('checkin endpoint not found') ||
reason.includes('invalid url (post /api/user/checkin)') ||
(reason.includes('http 404') && reason.includes('/api/user/checkin')) ||
reason.includes('unsupported checkin endpoint')
);
}
const INCOME_LOG_TYPES = [1, 4] as const;
const LOG_PAGE_SIZE = 100;
const LOG_MAX_PAGES = 6;
function supportsTodayIncomeLogFallback(platform?: string | null): boolean {
const normalized = (platform || '').toLowerCase();
return (
normalized === 'new-api' ||
normalized === 'anyrouter' ||
normalized === 'one-api' ||
normalized === 'veloera'
);
}
function getTodayUnixSecondsRange(now = new Date()): { start: number; end: number } {
const startDate = new Date(now);
startDate.setHours(0, 0, 0, 0);
const endDate = new Date(now);
endDate.setHours(23, 59, 59, 999);
return {
start: Math.floor(startDate.getTime() / 1000),
end: Math.floor(endDate.getTime() / 1000),
};
}
function parsePositiveNumber(value: unknown): number {
if (typeof value === 'number' && Number.isFinite(value) && value > 0) return value;
if (typeof value === 'string') {
const parsed = Number(value.trim());
if (Number.isFinite(parsed) && parsed > 0) return parsed;
}
return 0;
}
function parseIncomeFromContent(content: unknown): number {
if (typeof content !== 'string') return 0;
const normalized = content.replace(/,/g, '');
const match = normalized.match(/[-+]?\d+(?:\.\d+)?/);
if (!match) return 0;
const parsed = Number(match[0]);
if (!Number.isFinite(parsed) || parsed <= 0) return 0;
return parsed;
}
function extractLogItems(payload: any): Array<{ quota?: unknown; content?: unknown }> {
if (Array.isArray(payload?.data?.items)) return payload.data.items;
if (Array.isArray(payload?.items)) return payload.items;
if (Array.isArray(payload?.data)) return payload.data;
return [];
}
function extractLogTotal(payload: any): number | null {
const candidates = [payload?.data?.total, payload?.total];
for (const value of candidates) {
if (typeof value === 'number' && Number.isFinite(value) && value >= 0) return value;
if (typeof value === 'string') {
const parsed = Number.parseInt(value.trim(), 10);
if (!Number.isNaN(parsed) && parsed >= 0) return parsed;
}
}
return null;
}
function resolveQuotaConversionFactor(platform?: string | null): number {
return (platform || '').toLowerCase() === 'veloera' ? 1_000_000 : 500_000;
}
async function fetchTodayIncomeFromLogs(params: {
baseUrl: string;
accessToken: string;
platform?: string | null;
platformUserId?: number;
}): Promise<number | null> {
const baseUrl = params.baseUrl.trim();
const accessToken = params.accessToken.trim();
if (!baseUrl || !accessToken) return null;
const { fetch } = await import('undici');
const { start, end } = getTodayUnixSecondsRange();
const conversionFactor = resolveQuotaConversionFactor(params.platform);
const headers: Record<string, string> = {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
};
if (typeof params.platformUserId === 'number' && Number.isFinite(params.platformUserId)) {
headers['New-Api-User'] = String(Math.trunc(params.platformUserId));
}
let hasAnyLogResponse = false;
let totalIncome = 0;
for (const logType of INCOME_LOG_TYPES) {
let page = 1;
while (page <= LOG_MAX_PAGES) {
const query = new URLSearchParams({
p: String(page),
page_size: String(LOG_PAGE_SIZE),
type: String(logType),
token_name: '',
model_name: '',
start_timestamp: String(start),
end_timestamp: String(end),
group: '',
});
try {
const response = await fetch(`${baseUrl}/api/log/self?${query.toString()}`, {
method: 'GET',
headers,
});
if (!response.ok) break;
const payload = await response.json().catch(() => null);
if (!payload || typeof payload !== 'object') break;
hasAnyLogResponse = true;
const items = extractLogItems(payload);
for (const item of items) {
const quotaRaw = parsePositiveNumber(item?.quota);
if (quotaRaw > 0) {
totalIncome += quotaRaw / conversionFactor;
continue;
}
totalIncome += parseIncomeFromContent(item?.content);
}
const total = extractLogTotal(payload);
if (items.length === 0) break;
if (total != null && page * LOG_PAGE_SIZE >= total) break;
page += 1;
} catch {
break;
}
}
}
if (!hasAnyLogResponse) return null;
return Math.round(totalIncome * 1_000_000) / 1_000_000;
}
async function tryAutoRelogin(account: any, site: any): Promise<string | null> {
const adapter = getAdapter(site.platform);
if (!adapter) return null;
const relogin = getAutoReloginConfig(account.extraConfig);
if (!relogin) return null;
const password = decryptAccountPassword(relogin.passwordCipher);
if (!password) return null;
const loginResult = await adapter.login(site.url, relogin.username, password);
if (!loginResult.success || !loginResult.accessToken) return null;
db.update(schema.accounts)
.set({
accessToken: loginResult.accessToken,
status: account.status === 'expired' ? 'active' : account.status,
updatedAt: new Date().toISOString(),
})
.where(eq(schema.accounts.id, account.id))
.run();
return loginResult.accessToken;
}
export async function refreshBalance(accountId: number) {
const rows = db
.select()
.from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.accounts.id, accountId))
.all();
if (rows.length === 0) return null;
const account = rows[0].accounts;
const site = rows[0].sites;
if (isSiteDisabled(site.status)) {
setAccountRuntimeHealth(account.id, {
state: 'disabled',
reason: '站点已禁用',
source: 'balance',
});
return {
balance: account.balance ?? 0,
used: account.balanceUsed ?? 0,
quota: account.quota ?? 0,
skipped: true,
reason: 'site_disabled',
};
}
const adapter = getAdapter(site.platform);
if (!adapter) return null;
const platformUserId = resolvePlatformUserId(account.extraConfig, account.username);
let activeAccessToken = account.accessToken;
let balanceInfo: BalanceInfo | null = null;
const readBalance = async (token: string) => adapter.getBalance(site.url, token, platformUserId);
const handleBalanceError = async (err: any) => {
const message = err?.message || 'unknown error';
setAccountRuntimeHealth(account.id, {
state: 'unhealthy',
reason: message,
source: 'balance',
});
if (shouldReportExpired(message)) {
await reportTokenExpired({
accountId: account.id,
username: account.username,
siteName: site.name,
detail: message,
});
}
throw err;
};
try {
balanceInfo = await readBalance(activeAccessToken);
} catch (err: any) {
const message = err?.message || 'unknown error';
if (shouldAttemptAutoRelogin(message)) {
const refreshedAccessToken = await tryAutoRelogin(account, site);
if (refreshedAccessToken) {
activeAccessToken = refreshedAccessToken;
try {
balanceInfo = await readBalance(activeAccessToken);
} catch (retryErr: any) {
await handleBalanceError(retryErr);
}
} else {
await handleBalanceError(err);
}
} else {
await handleBalanceError(err);
}
}
if (!balanceInfo) {
throw new Error('failed to fetch balance');
}
if (
!(typeof balanceInfo.todayIncome === 'number' && Number.isFinite(balanceInfo.todayIncome)) &&
supportsTodayIncomeLogFallback(site.platform)
) {
try {
const fallbackIncome = await fetchTodayIncomeFromLogs({
baseUrl: site.url,
accessToken: activeAccessToken,
platform: site.platform,
platformUserId,
});
if (typeof fallbackIncome === 'number' && Number.isFinite(fallbackIncome)) {
balanceInfo.todayIncome = fallbackIncome;
}
} catch {}
}
const existingRuntimeHealth = extractRuntimeHealth(account.extraConfig);
const keepUnsupportedCheckinDegraded = isUnsupportedCheckinRuntimeHealth(existingRuntimeHealth);
const updates: Record<string, unknown> = {
balance: balanceInfo.balance,
balanceUsed: balanceInfo.used,
quota: balanceInfo.quota,
status: account.status === 'expired' ? 'active' : account.status,
lastBalanceRefresh: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
if (typeof balanceInfo.todayIncome === 'number' && Number.isFinite(balanceInfo.todayIncome)) {
updates.extraConfig = updateTodayIncomeSnapshot(account.extraConfig, balanceInfo.todayIncome);
}
db.update(schema.accounts)
.set(updates)
.where(eq(schema.accounts.id, accountId))
.run();
setAccountRuntimeHealth(account.id, {
state: keepUnsupportedCheckinDegraded ? 'degraded' : 'healthy',
reason: keepUnsupportedCheckinDegraded
? (existingRuntimeHealth?.reason || '\u7ad9\u70b9\u4e0d\u652f\u6301\u7b7e\u5230\u63a5\u53e3')
: '\u4f59\u989d\u5237\u65b0\u6210\u529f',
source: keepUnsupportedCheckinDegraded
? (existingRuntimeHealth?.source || 'checkin')
: 'balance',
});
if (balanceInfo.balance < 1) {
db.insert(schema.events).values({
type: 'balance',
title: '余额不足',
message: `${account.username || 'ID:' + accountId} 余额不足: $${balanceInfo.balance.toFixed(2)}`,
level: 'warning',
relatedId: accountId,
relatedType: 'account',
}).run();
await sendNotification(
'余额不足提醒',
`${account.username || 'ID:' + accountId} 余额不足: $${balanceInfo.balance.toFixed(2)}`,
'warning',
);
}
return balanceInfo;
}
export async function refreshAllBalances() {
const rows = db
.select()
.from(schema.accounts)
.where(eq(schema.accounts.status, 'active'))
.all();
const results: Array<{ accountId: number; balance: number | null }> = [];
await Promise.all(
rows.map(async (account) => {
try {
const info = await refreshBalance(account.id);
results.push({ accountId: account.id, balance: info?.balance ?? null });
} catch {
results.push({ accountId: account.id, balance: null });
}
}),
);
return results;
}
@@ -0,0 +1,21 @@
import { describe, expect, it } from 'vitest';
import { parseCheckinRewardAmount } from './checkinRewardParser.js';
describe('checkinRewardParser', () => {
it('parses numeric reward values', () => {
expect(parseCheckinRewardAmount(3.5)).toBe(3.5);
expect(parseCheckinRewardAmount('12')).toBe(12);
});
it('parses reward strings with text wrappers', () => {
expect(parseCheckinRewardAmount('奖励 +2.75')).toBe(2.75);
expect(parseCheckinRewardAmount('checkin success, reward=5')).toBe(5);
});
it('returns zero for missing or non-positive values', () => {
expect(parseCheckinRewardAmount('')).toBe(0);
expect(parseCheckinRewardAmount(null)).toBe(0);
expect(parseCheckinRewardAmount('checked in')).toBe(0);
expect(parseCheckinRewardAmount('-1')).toBe(0);
});
});
@@ -0,0 +1,28 @@
function toFiniteNumber(value: unknown): number | null {
if (typeof value === 'number' && Number.isFinite(value)) return value;
if (typeof value !== 'string') return null;
const trimmed = value.trim();
if (!trimmed) return null;
const parsed = Number(trimmed);
if (Number.isFinite(parsed)) return parsed;
return null;
}
export function parseCheckinRewardAmount(value: unknown): number {
const numeric = toFiniteNumber(value);
if (numeric != null) {
return numeric > 0 ? numeric : 0;
}
if (typeof value !== 'string') return 0;
const text = value.trim();
if (!text) return 0;
const normalized = text.replace(/,/g, '');
const match = normalized.match(/[-+]?\d+(?:\.\d+)?/);
if (!match) return 0;
const parsed = Number(match[0]);
if (!Number.isFinite(parsed) || parsed <= 0) return 0;
return parsed;
}
+103
View File
@@ -0,0 +1,103 @@
import cron from 'node-cron';
import { eq } from 'drizzle-orm';
import { config } from '../config.js';
import { db, schema } from '../db/index.js';
import { refreshAllBalances } from './balanceService.js';
import { checkinAll } from './checkinService.js';
import { refreshModelsAndRebuildRoutes } from './modelService.js';
import { sendNotification } from './notifyService.js';
import { buildDailySummaryNotification, collectDailySummaryMetrics } from './dailySummaryService.js';
let checkinTask: cron.ScheduledTask | null = null;
let balanceTask: cron.ScheduledTask | null = null;
let dailySummaryTask: cron.ScheduledTask | null = null;
const DAILY_SUMMARY_DEFAULT_CRON = '58 23 * * *';
function resolveCronSetting(settingKey: string, fallback: string): string {
try {
const row = db.select().from(schema.settings).where(eq(schema.settings.key, settingKey)).get();
if (row?.value) {
const parsed = JSON.parse(row.value);
if (typeof parsed === 'string' && cron.validate(parsed)) {
return parsed;
}
}
} catch {}
return fallback;
}
function createCheckinTask(cronExpr: string) {
return cron.schedule(cronExpr, async () => {
console.log(`[Scheduler] Running check-in at ${new Date().toISOString()}`);
try {
const results = await checkinAll();
const success = results.filter((r) => r.result.success).length;
const failed = results.length - success;
console.log(`[Scheduler] Check-in complete: ${success} success, ${failed} failed`);
} catch (err) {
console.error('[Scheduler] Check-in error:', err);
}
});
}
function createBalanceTask(cronExpr: string) {
return cron.schedule(cronExpr, async () => {
console.log(`[Scheduler] Refreshing balances at ${new Date().toISOString()}`);
try {
await refreshAllBalances();
await refreshModelsAndRebuildRoutes();
console.log('[Scheduler] Balance refresh complete');
} catch (err) {
console.error('[Scheduler] Balance refresh error:', err);
}
});
}
function createDailySummaryTask(cronExpr: string) {
return cron.schedule(cronExpr, async () => {
console.log(`[Scheduler] Sending daily summary at ${new Date().toISOString()}`);
try {
const metrics = collectDailySummaryMetrics();
const { title, message } = buildDailySummaryNotification(metrics);
await sendNotification(title, message, 'info', {
bypassThrottle: true,
requireChannel: true,
throwOnFailure: true,
});
console.log(`[Scheduler] Daily summary sent: ${title}`);
} catch (err) {
console.error('[Scheduler] Daily summary error:', err);
}
});
}
export function startScheduler() {
const activeCheckinCron = resolveCronSetting('checkin_cron', config.checkinCron);
const activeBalanceCron = resolveCronSetting('balance_refresh_cron', config.balanceRefreshCron);
const activeDailySummaryCron = resolveCronSetting('daily_summary_cron', DAILY_SUMMARY_DEFAULT_CRON);
config.checkinCron = activeCheckinCron;
config.balanceRefreshCron = activeBalanceCron;
checkinTask = createCheckinTask(activeCheckinCron);
balanceTask = createBalanceTask(activeBalanceCron);
dailySummaryTask = createDailySummaryTask(activeDailySummaryCron);
console.log(`[Scheduler] Check-in cron: ${activeCheckinCron}`);
console.log(`[Scheduler] Balance refresh cron: ${activeBalanceCron}`);
console.log(`[Scheduler] Daily summary cron: ${activeDailySummaryCron}`);
}
export function updateCheckinCron(cronExpr: string) {
if (!cron.validate(cronExpr)) throw new Error(`Invalid cron: ${cronExpr}`);
config.checkinCron = cronExpr;
checkinTask?.stop();
checkinTask = createCheckinTask(cronExpr);
}
export function updateBalanceRefreshCron(cronExpr: string) {
if (!cron.validate(cronExpr)) throw new Error(`Invalid cron: ${cronExpr}`);
config.balanceRefreshCron = cronExpr;
balanceTask?.stop();
balanceTask = createBalanceTask(cronExpr);
}
@@ -0,0 +1,321 @@
import { beforeEach, describe, expect, it, vi } from 'vitest';
const adapterMock = {
checkin: vi.fn(),
login: vi.fn(),
};
const notifyMock = vi.fn();
const reportTokenExpiredMock = vi.fn();
const refreshBalanceMock = vi.fn();
const decryptPasswordMock = vi.fn();
const selectAllMock = vi.fn();
const insertValuesMock = vi.fn();
const updateSetMock = vi.fn();
vi.mock('../db/index.js', () => {
const selectChain = {
all: () => selectAllMock(),
where: () => selectChain,
innerJoin: () => selectChain,
from: () => selectChain,
};
const insertChain = {
run: () => ({}),
values: (...args: unknown[]) => {
insertValuesMock(...args);
return insertChain;
},
};
const updateWhereChain = {
run: () => ({}),
};
const updateSetChain = {
where: () => updateWhereChain,
};
return {
db: {
select: () => selectChain,
insert: () => insertChain,
update: () => ({
set: (updates: Record<string, unknown>) => {
updateSetMock(updates);
return updateSetChain;
},
}),
},
schema: {
accounts: { id: 'id', siteId: 'siteId', checkinEnabled: 'checkinEnabled', status: 'status' },
sites: { id: 'id' },
checkinLogs: {},
events: {},
},
};
});
vi.mock('./platforms/index.js', () => ({
getAdapter: () => adapterMock,
}));
vi.mock('./notifyService.js', () => ({
sendNotification: (...args: unknown[]) => notifyMock(...args),
}));
vi.mock('./alertService.js', () => ({
reportTokenExpired: (...args: unknown[]) => reportTokenExpiredMock(...args),
}));
vi.mock('./balanceService.js', () => ({
refreshBalance: (...args: unknown[]) => refreshBalanceMock(...args),
}));
vi.mock('./accountCredentialService.js', () => ({
decryptAccountPassword: (...args: unknown[]) => decryptPasswordMock(...args),
}));
describe('checkinService auto relogin', () => {
beforeEach(() => {
adapterMock.checkin.mockReset();
adapterMock.login.mockReset();
notifyMock.mockReset();
reportTokenExpiredMock.mockReset();
refreshBalanceMock.mockReset();
decryptPasswordMock.mockReset();
selectAllMock.mockReset();
insertValuesMock.mockReset();
updateSetMock.mockReset();
});
it('retries checkin once after auto relogin when access token is missing', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 1,
username: 'linuxdo_7659',
accessToken: 'expired-token',
status: 'active',
extraConfig: JSON.stringify({
autoRelogin: { username: 'linuxdo_7659', passwordCipher: 'cipher' },
}),
},
sites: {
id: 3,
name: 'kfc',
url: 'https://kfc-api.sxxe.net',
platform: 'new-api',
},
},
]);
adapterMock.checkin
.mockResolvedValueOnce({ success: false, message: '无权进行此操作,未登录且未提供 access token' })
.mockResolvedValueOnce({ success: true, message: 'checked in' });
decryptPasswordMock.mockReturnValue('plain-password');
adapterMock.login.mockResolvedValue({ success: true, accessToken: 'fresh-token' });
const { checkinAccount } = await import('./checkinService.js');
const result = await checkinAccount(1);
expect(result.success).toBe(true);
expect(adapterMock.login).toHaveBeenCalledTimes(1);
expect(adapterMock.checkin).toHaveBeenCalledTimes(2);
expect(adapterMock.checkin.mock.calls[0][1]).toBe('expired-token');
expect(adapterMock.checkin.mock.calls[1][1]).toBe('fresh-token');
expect(adapterMock.checkin.mock.calls[0][2]).toBe(7659);
expect(updateSetMock).toHaveBeenCalledWith(expect.objectContaining({ accessToken: 'fresh-token' }));
});
it('passes guessed platform user id when config does not include it', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 2,
username: 'linuxdo_11494',
accessToken: 'token',
status: 'active',
extraConfig: null,
},
sites: {
id: 4,
name: 'wong',
url: 'https://wzw.pp.ua',
platform: 'new-api',
},
},
]);
adapterMock.checkin.mockResolvedValue({ success: true, message: 'checked in' });
const { checkinAccount } = await import('./checkinService.js');
await checkinAccount(2);
expect(adapterMock.checkin).toHaveBeenCalledTimes(1);
expect(adapterMock.checkin.mock.calls[0][2]).toBe(11494);
});
it('keeps successful checkin as success when message is 签到成功', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 12,
username: 'linuxdo_5566',
accessToken: 'token',
status: 'active',
extraConfig: null,
},
sites: {
id: 12,
name: 'demo',
url: 'https://example.com',
platform: 'new-api',
},
},
]);
adapterMock.checkin.mockResolvedValue({ success: true, message: '签到成功' });
const { checkinAccount } = await import('./checkinService.js');
const result = await checkinAccount(12);
expect(result.success).toBe(true);
const firstInsertPayload = insertValuesMock.mock.calls[0]?.[0] as Record<string, unknown>;
expect(firstInsertPayload?.status).toBe('success');
});
it('infers reward from balance delta when checkin reward text is empty', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 13,
username: 'linuxdo_7788',
accessToken: 'token',
status: 'active',
balance: 10,
extraConfig: null,
},
sites: {
id: 13,
name: 'demo',
url: 'https://example.com',
platform: 'new-api',
},
},
]);
adapterMock.checkin.mockResolvedValue({ success: true, message: 'checkin success' });
refreshBalanceMock.mockResolvedValue({ balance: 12.5, used: 0, quota: 12.5 });
const { checkinAccount } = await import('./checkinService.js');
await checkinAccount(13);
const firstInsertPayload = insertValuesMock.mock.calls[0]?.[0] as Record<string, unknown>;
expect(Number(firstInsertPayload?.reward)).toBeCloseTo(2.5, 6);
});
it('treats already checked in responses as successful checkins', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 9,
username: 'linuxdo_9999',
accessToken: 'token',
status: 'active',
extraConfig: null,
},
sites: {
id: 9,
name: 'demo',
url: 'https://example.com',
platform: 'new-api',
},
},
]);
adapterMock.checkin.mockResolvedValue({ success: false, message: '今天已经签到过啦' });
const { checkinAccount } = await import('./checkinService.js');
const result = await checkinAccount(9);
expect(result.success).toBe(true);
expect(result.status).toBe('success');
const firstInsertPayload = insertValuesMock.mock.calls[0]?.[0] as Record<string, unknown>;
expect(firstInsertPayload?.status).toBe('success');
expect(notifyMock).not.toHaveBeenCalled();
});
it('treats unsupported checkin endpoint responses as skipped', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 10,
username: 'linuxdo_131936',
accessToken: 'token',
status: 'active',
extraConfig: null,
},
sites: {
id: 10,
name: 'anyrouter',
url: 'https://anyrouter.top',
platform: 'anyrouter',
},
},
]);
adapterMock.checkin.mockResolvedValue({
success: false,
message: 'HTTP 404: {"error":{"message":"Invalid URL (POST /api/user/checkin)"}}',
});
const { checkinAccount } = await import('./checkinService.js');
const result = await checkinAccount(10);
expect(result.success).toBe(true);
expect(result.status).toBe('skipped');
const firstInsertPayload = insertValuesMock.mock.calls[0]?.[0] as Record<string, unknown>;
expect(firstInsertPayload?.status).toBe('skipped');
expect(refreshBalanceMock).not.toHaveBeenCalled();
expect(notifyMock).not.toHaveBeenCalled();
});
it('treats turnstile-required responses as skipped', async () => {
selectAllMock.mockReturnValue([
{
accounts: {
id: 14,
username: 'linuxdo_10277',
accessToken: 'token',
status: 'active',
extraConfig: null,
},
sites: {
id: 14,
name: 'run-anytime',
url: 'https://runanytime.hxi.me',
platform: 'new-api',
},
},
]);
adapterMock.checkin.mockResolvedValue({
success: false,
message: 'Turnstile token 为空',
});
const { checkinAccount } = await import('./checkinService.js');
const result = await checkinAccount(14);
expect(result.success).toBe(true);
expect(result.status).toBe('skipped');
const firstInsertPayload = insertValuesMock.mock.calls[0]?.[0] as Record<string, unknown>;
expect(firstInsertPayload?.status).toBe('skipped');
expect(firstInsertPayload?.message).toBe('站点开启了 Turnstile 校验,需要人工签到');
expect(refreshBalanceMock).not.toHaveBeenCalled();
expect(notifyMock).not.toHaveBeenCalled();
});
});
+336
View File
@@ -0,0 +1,336 @@
import { db, schema } from '../db/index.js';
import { getAdapter } from './platforms/index.js';
import { eq, and } from 'drizzle-orm';
import { sendNotification } from './notifyService.js';
import { isCloudflareChallenge, isTokenExpiredError } from './alertRules.js';
import { reportTokenExpired } from './alertService.js';
import { refreshBalance } from './balanceService.js';
import { parseCheckinRewardAmount } from './checkinRewardParser.js';
import {
getAutoReloginConfig,
getPlatformUserIdFromExtraConfig,
guessPlatformUserIdFromUsername,
mergeAccountExtraConfig,
resolvePlatformUserId,
} from './accountExtraConfig.js';
import { decryptAccountPassword } from './accountCredentialService.js';
import { setAccountRuntimeHealth } from './accountHealthService.js';
type CheckinExecutionStatus = 'success' | 'failed' | 'skipped';
function isSiteDisabled(status?: string | null): boolean {
return (status || 'active') === 'disabled';
}
function isAlreadyCheckedInMessage(message?: string | null): boolean {
if (!message) return false;
const text = message.trim();
if (!text) return false;
const normalized = text.toLowerCase();
return (
normalized.includes('already checked in') ||
normalized.includes('already signed') ||
normalized.includes('already sign in') ||
text.includes('\u4eca\u65e5\u5df2\u7b7e\u5230') ||
text.includes('\u4eca\u5929\u5df2\u7b7e\u5230') ||
text.includes('\u4eca\u5929\u5df2\u7ecf\u7b7e\u5230') ||
text.includes('\u4eca\u65e5\u5df2\u7ecf\u7b7e\u5230') ||
text.includes('\u5df2\u7ecf\u7b7e\u5230') ||
text.includes('\u5df2\u7b7e\u5230') ||
text.includes('\u91cd\u590d\u7b7e\u5230') ||
text.includes('\u7b7e\u5230\u8fc7')
);
}
function isUnsupportedCheckinMessage(message?: string | null): boolean {
if (!message) return false;
const text = message.toLowerCase();
return (
text.includes('invalid url (post /api/user/checkin)') ||
(text.includes('http 404') && text.includes('/api/user/checkin')) ||
text.includes('checkin endpoint not found')
);
}
function isManualVerificationRequiredMessage(message?: string | null): boolean {
if (!message) return false;
const text = message.toLowerCase();
return (
text.includes('turnstile token \u4e3a\u7a7a') ||
(text.includes('turnstile') && (text.includes('token') || text.includes('\u6821\u9a8c') || text.includes('\u9a8c\u8bc1')))
);
}
function shouldAttemptAutoRelogin(message?: string | null): boolean {
if (!message) return false;
if (isTokenExpiredError({ message })) return true;
const text = message.toLowerCase();
if (text.includes('new-api-user')) return true;
if (text.includes('access token')) return true;
return false;
}
function inferRewardFromBalanceDelta(previousBalance: unknown, latestBalance: unknown): number {
const before = typeof previousBalance === 'number' && Number.isFinite(previousBalance)
? previousBalance
: null;
const after = typeof latestBalance === 'number' && Number.isFinite(latestBalance)
? latestBalance
: null;
if (before == null || after == null) return 0;
const delta = after - before;
if (!Number.isFinite(delta) || delta <= 0) return 0;
return Math.round(delta * 1_000_000) / 1_000_000;
}
async function tryAutoRelogin(account: any, site: any): Promise<string | null> {
const adapter = getAdapter(site.platform);
if (!adapter) return null;
const relogin = getAutoReloginConfig(account.extraConfig);
if (!relogin) return null;
const password = decryptAccountPassword(relogin.passwordCipher);
if (!password) return null;
const result = await adapter.login(site.url, relogin.username, password);
if (!result.success || !result.accessToken) return null;
db.update(schema.accounts)
.set({
accessToken: result.accessToken,
updatedAt: new Date().toISOString(),
status: account.status === 'expired' ? 'active' : account.status,
})
.where(eq(schema.accounts.id, account.id))
.run();
return result.accessToken;
}
export async function checkinAccount(accountId: number, options?: { skipEvent?: boolean }) {
const rows = db
.select()
.from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.accounts.id, accountId))
.all();
if (rows.length === 0) return { success: false, message: 'account not found' };
const account = rows[0].accounts;
const site = rows[0].sites;
if (isSiteDisabled(site.status)) {
setAccountRuntimeHealth(account.id, {
state: 'disabled',
reason: '\u7ad9\u70b9\u5df2\u7981\u7528',
source: 'checkin',
});
db.insert(schema.checkinLogs).values({
accountId: account.id,
status: 'skipped',
message: 'site disabled',
}).run();
if (!options?.skipEvent) {
db.insert(schema.events).values({
type: 'checkin',
title: 'checkin skipped',
message: `${account.username || 'ID:' + accountId} @ ${site.name}: site disabled`,
level: 'info',
relatedId: accountId,
relatedType: 'account',
}).run();
}
return {
success: true,
status: 'skipped' as const,
skipped: true,
reason: 'site_disabled',
message: 'site disabled',
};
}
const adapter = getAdapter(site.platform);
if (!adapter) return { success: false, status: 'failed' as const, message: `unsupported platform: ${site.platform}` };
const storedPlatformUserId = getPlatformUserIdFromExtraConfig(account.extraConfig);
const guessedPlatformUserId = storedPlatformUserId
? undefined
: guessPlatformUserIdFromUsername(account.username);
const platformUserId = resolvePlatformUserId(account.extraConfig, account.username);
let activeAccessToken = account.accessToken;
let result = await adapter.checkin(site.url, activeAccessToken, platformUserId);
if (!result.success && shouldAttemptAutoRelogin(result.message)) {
const refreshedAccessToken = await tryAutoRelogin(account, site);
if (refreshedAccessToken) {
activeAccessToken = refreshedAccessToken;
result = await adapter.checkin(site.url, activeAccessToken, platformUserId);
}
}
const isCloudflare = isCloudflareChallenge(result.message);
const alreadyCheckedIn = isAlreadyCheckedInMessage(result.message);
const unsupportedCheckin = isUnsupportedCheckinMessage(result.message);
const manualVerificationRequired = isManualVerificationRequiredMessage(result.message);
const manualVerificationMessage = '\u7ad9\u70b9\u5f00\u542f\u4e86 Turnstile \u6821\u9a8c\uff0c\u9700\u8981\u4eba\u5de5\u7b7e\u5230';
const logMessage = manualVerificationRequired ? manualVerificationMessage : result.message;
const effectiveSuccess = result.success || alreadyCheckedIn || unsupportedCheckin || manualVerificationRequired;
const shouldRefreshBalance = result.success || alreadyCheckedIn;
const directCheckinSuccess = result.success && !alreadyCheckedIn && !unsupportedCheckin;
const normalizedStatus: CheckinExecutionStatus = effectiveSuccess
? ((unsupportedCheckin || manualVerificationRequired) ? 'skipped' : 'success')
: 'failed';
let logReward = result.reward;
let refreshedBalanceInfo: Awaited<ReturnType<typeof refreshBalance>> | null = null;
if (effectiveSuccess) {
const healthState = (unsupportedCheckin || manualVerificationRequired) ? 'degraded' : 'healthy';
const healthReason = unsupportedCheckin
? '\u7ad9\u70b9\u4e0d\u652f\u6301\u7b7e\u5230\u63a5\u53e3'
: manualVerificationRequired
? manualVerificationMessage
: (alreadyCheckedIn ? '\u4eca\u65e5\u5df2\u7b7e\u5230' : (result.message || '\u7b7e\u5230\u6210\u529f'));
setAccountRuntimeHealth(account.id, {
state: healthState,
reason: healthReason,
source: 'checkin',
});
const updates: Record<string, unknown> = {
lastCheckinAt: new Date().toISOString(),
};
if (!storedPlatformUserId && guessedPlatformUserId) {
updates.extraConfig = mergeAccountExtraConfig(account.extraConfig, {
platformUserId: guessedPlatformUserId,
});
}
if (account.status === 'expired') {
updates.status = 'active';
updates.updatedAt = new Date().toISOString();
}
db.update(schema.accounts)
.set(updates)
.where(eq(schema.accounts.id, accountId))
.run();
if (shouldRefreshBalance) {
try {
refreshedBalanceInfo = await refreshBalance(account.id);
} catch {}
}
const parsedReward = parseCheckinRewardAmount(logReward) || parseCheckinRewardAmount(result.message);
if (directCheckinSuccess && parsedReward <= 0) {
const inferredReward = inferRewardFromBalanceDelta(account.balance, refreshedBalanceInfo?.balance);
if (inferredReward > 0) {
logReward = inferredReward.toString();
}
}
}
db.insert(schema.checkinLogs).values({
accountId: account.id,
status: normalizedStatus,
message: logMessage,
reward: logReward,
}).run();
if (!options?.skipEvent) {
db.insert(schema.events).values({
type: 'checkin',
title: effectiveSuccess
? (normalizedStatus === 'skipped' ? 'checkin skipped' : 'checkin success')
: (isCloudflare ? 'checkin failed (cloudflare challenge)' : 'checkin failed'),
message: `${account.username || 'ID:' + accountId} @ ${site.name}: ${logMessage}`,
level: effectiveSuccess ? 'info' : 'error',
relatedId: accountId,
relatedType: 'account',
}).run();
}
if (!effectiveSuccess) {
setAccountRuntimeHealth(account.id, {
state: 'unhealthy',
reason: result.message || '\u7b7e\u5230\u5931\u8d25',
source: 'checkin',
});
if (isTokenExpiredError({ message: result.message })) {
await reportTokenExpired({
accountId: account.id,
username: account.username,
siteName: site.name,
detail: result.message,
});
}
if (isCloudflare) {
await sendNotification(
'Cloudflare challenge',
`${account.username || 'ID:' + accountId} @ ${site.name}: ${result.message}`,
'warning',
);
}
if (!unsupportedCheckin && !manualVerificationRequired) {
await sendNotification(
'checkin failed',
`${account.username || 'ID:' + accountId} @ ${site.name}: ${result.message}`,
'error',
);
}
}
return {
...result,
success: effectiveSuccess,
status: normalizedStatus,
...(normalizedStatus === 'skipped' ? { skipped: true } : {}),
};
}
export async function checkinAll() {
const rows = db
.select()
.from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(
and(
eq(schema.accounts.checkinEnabled, true),
eq(schema.accounts.status, 'active'),
),
)
.all();
const results: Array<{ accountId: number; username: string | null; site: string; result: any }> = [];
const grouped = new Map<number, typeof rows>();
for (const row of rows) {
const siteId = row.sites.id;
if (!grouped.has(siteId)) grouped.set(siteId, []);
grouped.get(siteId)!.push(row);
}
const promises = Array.from(grouped.entries()).map(async ([_, siteRows]) => {
for (const row of siteRows) {
const r = await checkinAccount(row.accounts.id, { skipEvent: true });
results.push({
accountId: row.accounts.id,
username: row.accounts.username,
site: row.sites.name,
result: r,
});
}
});
await Promise.all(promises);
return results;
}
@@ -0,0 +1,32 @@
import { describe, expect, it } from 'vitest';
import { buildDailySummaryNotification, type DailySummaryMetrics } from './dailySummaryService.js';
describe('dailySummaryService', () => {
it('builds readable daily summary notification text', () => {
const metrics: DailySummaryMetrics = {
localDay: '2026-02-27',
generatedAtLocal: '2026-02-27 23:58:00',
timeZone: 'Asia/Shanghai',
totalAccounts: 10,
activeAccounts: 8,
lowBalanceAccounts: 2,
checkinTotal: 7,
checkinSuccess: 5,
checkinSkipped: 1,
checkinFailed: 1,
proxyTotal: 120,
proxySuccess: 114,
proxyFailed: 6,
proxyTotalTokens: 987654,
todaySpend: 12.345678,
todayReward: 3.210987,
};
const { title, message } = buildDailySummaryNotification(metrics);
expect(title).toBe('每日总结 2026-02-27');
expect(message).toContain('生成时间: 2026-02-27 23:58:00 (Asia/Shanghai)');
expect(message).toContain('签到统计: 总计 7 | 成功 5 | 跳过 1 | 失败 1');
expect(message).toContain('代理统计: 总计 120 | 成功 114 | 失败 6');
expect(message).toContain('费用统计: 支出 $12.345678 | 奖励 $3.210987 | 净值 $-9.134691');
});
});
+126
View File
@@ -0,0 +1,126 @@
import { and, eq, gte, lt } from 'drizzle-orm';
import { db, schema } from '../db/index.js';
import { getLocalDayRangeUtc, formatLocalDateTime, getResolvedTimeZone } from './localTimeService.js';
import { parseCheckinRewardAmount } from './checkinRewardParser.js';
import { estimateRewardWithTodayIncomeFallback } from './todayIncomeRewardService.js';
export type DailySummaryMetrics = {
localDay: string;
generatedAtLocal: string;
timeZone: string;
totalAccounts: number;
activeAccounts: number;
lowBalanceAccounts: number;
checkinTotal: number;
checkinSuccess: number;
checkinSkipped: number;
checkinFailed: number;
proxyTotal: number;
proxySuccess: number;
proxyFailed: number;
proxyTotalTokens: number;
todaySpend: number;
todayReward: number;
};
function round6(value: number): number {
return Math.round(value * 1_000_000) / 1_000_000;
}
export function collectDailySummaryMetrics(now = new Date()): DailySummaryMetrics {
const { localDay, startUtc, endUtc } = getLocalDayRangeUtc(now);
const accountRows = db.select().from(schema.accounts)
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(eq(schema.sites.status, 'active'))
.all();
const accounts = accountRows.map((row) => row.accounts);
const activeAccounts = accounts.filter((account) => account.status === 'active').length;
const lowBalanceAccounts = accounts.filter((account) => (account.balance || 0) < 1).length;
const todayCheckinRows = db.select().from(schema.checkinLogs)
.innerJoin(schema.accounts, eq(schema.checkinLogs.accountId, schema.accounts.id))
.innerJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(and(
gte(schema.checkinLogs.createdAt, startUtc),
lt(schema.checkinLogs.createdAt, endUtc),
eq(schema.sites.status, 'active'),
))
.all();
const todayCheckins = todayCheckinRows.map((row) => row.checkin_logs);
const checkinSkipped = todayCheckins.filter((checkin) => checkin.status === 'skipped').length;
const checkinFailed = todayCheckins.filter((checkin) => checkin.status === 'failed').length;
const checkinSuccess = todayCheckins.length - checkinSkipped - checkinFailed;
const rewardByAccount: Record<number, number> = {};
const successCountByAccount: Record<number, number> = {};
const parsedRewardCountByAccount: Record<number, number> = {};
for (const row of todayCheckinRows) {
const checkin = row.checkin_logs;
if (checkin.status !== 'success') continue;
const accountId = row.accounts.id;
successCountByAccount[accountId] = (successCountByAccount[accountId] || 0) + 1;
const rewardValue = parseCheckinRewardAmount(checkin.reward) || parseCheckinRewardAmount(checkin.message);
if (rewardValue <= 0) continue;
rewardByAccount[accountId] = (rewardByAccount[accountId] || 0) + rewardValue;
parsedRewardCountByAccount[accountId] = (parsedRewardCountByAccount[accountId] || 0) + 1;
}
const todayProxyRows = db.select().from(schema.proxyLogs)
.leftJoin(schema.accounts, eq(schema.proxyLogs.accountId, schema.accounts.id))
.leftJoin(schema.sites, eq(schema.accounts.siteId, schema.sites.id))
.where(and(
gte(schema.proxyLogs.createdAt, startUtc),
lt(schema.proxyLogs.createdAt, endUtc),
eq(schema.sites.status, 'active'),
))
.all();
const todayProxyLogs = todayProxyRows.map((row) => row.proxy_logs);
const proxySuccess = todayProxyLogs.filter((log) => log.status === 'success').length;
const proxyFailed = todayProxyLogs.filter((log) => log.status === 'failed').length;
const proxyTotalTokens = todayProxyLogs.reduce((sum, log) => sum + (log.totalTokens || 0), 0);
const todaySpend = todayProxyLogs.reduce((sum, log) => sum + (typeof log.estimatedCost === 'number' ? log.estimatedCost : 0), 0);
const todayReward = accounts.reduce((sum, account) => sum + estimateRewardWithTodayIncomeFallback({
day: localDay,
successCount: successCountByAccount[account.id] || 0,
parsedRewardCount: parsedRewardCountByAccount[account.id] || 0,
rewardSum: rewardByAccount[account.id] || 0,
extraConfig: account.extraConfig,
}), 0);
return {
localDay,
generatedAtLocal: formatLocalDateTime(now),
timeZone: getResolvedTimeZone(),
totalAccounts: accounts.length,
activeAccounts,
lowBalanceAccounts,
checkinTotal: todayCheckins.length,
checkinSuccess: Math.max(0, checkinSuccess),
checkinSkipped,
checkinFailed,
proxyTotal: todayProxyLogs.length,
proxySuccess,
proxyFailed,
proxyTotalTokens,
todaySpend: round6(todaySpend),
todayReward: round6(todayReward),
};
}
export function buildDailySummaryNotification(metrics: DailySummaryMetrics): { title: string; message: string } {
const net = round6(metrics.todayReward - metrics.todaySpend);
const title = `每日总结 ${metrics.localDay}`;
const message = [
`日期: ${metrics.localDay}`,
`生成时间: ${metrics.generatedAtLocal} (${metrics.timeZone})`,
'',
`账号概览: 总计 ${metrics.totalAccounts} | 活跃 ${metrics.activeAccounts} | 低余额(<$1) ${metrics.lowBalanceAccounts}`,
`签到统计: 总计 ${metrics.checkinTotal} | 成功 ${metrics.checkinSuccess} | 跳过 ${metrics.checkinSkipped} | 失败 ${metrics.checkinFailed}`,
`代理统计: 总计 ${metrics.proxyTotal} | 成功 ${metrics.proxySuccess} | 失败 ${metrics.proxyFailed} | Tokens ${metrics.proxyTotalTokens.toLocaleString()}`,
`费用统计: 支出 $${metrics.todaySpend.toFixed(6)} | 奖励 $${metrics.todayReward.toFixed(6)} | 净值 $${net.toFixed(6)}`,
].join('\n');
return { title, message };
}

Some files were not shown because too many files have changed in this diff Show More