Prepare protocol compatibility release

This commit is contained in:
cita-777
2026-05-08 00:32:50 +08:00
28 changed files with 2205 additions and 108 deletions
+4 -3
View File
@@ -310,8 +310,9 @@ helper 端使用:
补充一点:
- `Docker Hub` 里的**自动发现**仍然优先 `latest` / `main` / 稳定 SemVer 标签
- 如果你要部署 `dev`、分支名、临时标签或 `sha-*` 这类非稳定标签,直接在 Docker Hub 卡片底部用“手动部署 Docker Hub 标签”填写即可
- `Docker Hub` 里的**主候选**仍然优先 `latest` / `main` / 稳定 SemVer 标签
- 页面还会自动列出最近推送的 `dev`、分支名、临时标签或 `sha-*` 这类**非稳定标签**,并一起带出 digest
- 如果你要部署一个更老或更特殊、没出现在自动列表里的标签,仍然可以在 Docker Hub 卡片底部用“手动部署 Docker Hub 标签”填写
对已经跑起来的 K3s / Helm 用户来说,更新中心的日常配置主要就在这一页:
@@ -330,7 +331,7 @@ helper 端使用:
- 版本来源发现了可部署版本
- 如果 Docker Hub 显示的是 `latest @ sha256:...`,说明页面已经识别到 alias tag 当前指向的具体镜像 digest
- Deploy Helper 显示健康
4. 如果你要跟随稳定候选,直接点部署按钮;如果你要切到 `dev` / 分支 / 临时标签,就在 Docker Hub 卡片底部手动填写 tag,必要时连 digest 一起填
4. 如果你要跟随稳定候选,直接点部署按钮;如果你要切到最近的 `dev` / 分支 / 临时标签,可以直接点自动列出的那一项;只有更特殊的 tag 才需要手动填写,必要时连 digest 一起填
5. 在页面下方看部署日志
6. 如果升级后发现问题,可以直接在“回退历史”里点旧 revision 回滚;只要该 revision 当时记录了 digest,就会跟着一起回到对应镜像
+1
View File
@@ -142,6 +142,7 @@ export function buildConfig(env: NodeJS.ProcessEnv) {
proxyDebugTargetModel: (env.PROXY_DEBUG_TARGET_MODEL || '').trim(),
proxyDebugRetentionHours: Math.max(1, Math.trunc(parseNumber(env.PROXY_DEBUG_RETENTION_HOURS, 24))),
proxyDebugMaxBodyBytes: Math.max(1024, Math.trunc(parseNumber(env.PROXY_DEBUG_MAX_BODY_BYTES, 262_144))),
openAiServiceTierRules: parseJsonValue(env.OPENAI_SERVICE_TIER_RULES_JSON || env.OPENAI_SERVICE_TIER_RULES),
modelAvailabilityProbeEnabled: parseBoolean(env.MODEL_AVAILABILITY_PROBE_ENABLED, false),
modelAvailabilityProbeIntervalMs: Math.max(60_000, Math.trunc(parseNumber(env.MODEL_AVAILABILITY_PROBE_INTERVAL_MS, 30 * 60 * 1000))),
modelAvailabilityProbeTimeoutMs: Math.max(3_000, Math.trunc(parseNumber(env.MODEL_AVAILABILITY_PROBE_TIMEOUT_MS, 15_000))),
@@ -0,0 +1,65 @@
import { describe, expect, it } from 'vitest';
import { validateExternalResponsesHttpRequest } from './responsesPreflight.js';
describe('validateExternalResponsesHttpRequest', () => {
it('rejects external HTTP previous_response_id and explains msg ids separately', () => {
const responseIdResult = validateExternalResponsesHttpRequest({
model: 'gpt-5',
previous_response_id: 'resp_prev_1',
input: 'hello',
});
expect(responseIdResult.ok).toBe(false);
if (!responseIdResult.ok) {
expect(responseIdResult.payload.error.message).toContain('HTTP /v1/responses does not support');
}
const messageIdResult = validateExternalResponsesHttpRequest({
model: 'gpt-5',
previous_response_id: 'msg_prev_1',
input: 'hello',
});
expect(messageIdResult.ok).toBe(false);
if (!messageIdResult.ok) {
expect(messageIdResult.payload.error.message).toContain('response id');
expect(messageIdResult.payload.error.message).toContain('msg_*');
}
});
it('rejects HTTP function_call_output without call_id or local context, but allows matching item_reference', () => {
const missingCallId = validateExternalResponsesHttpRequest({
model: 'gpt-5',
input: [{ type: 'function_call_output', output: 'done' }],
});
expect(missingCallId.ok).toBe(false);
if (!missingCallId.ok) {
expect(missingCallId.payload.error.message).toContain('requires call_id');
}
const missingContext = validateExternalResponsesHttpRequest({
model: 'gpt-5',
input: [{ type: 'function_call_output', call_id: 'call_1', output: 'done' }],
});
expect(missingContext.ok).toBe(false);
if (!missingContext.ok) {
expect(missingContext.payload.error.message).toContain('Responses WebSocket v2');
}
expect(validateExternalResponsesHttpRequest({
model: 'gpt-5',
input: [
{ type: 'function_call', id: 'fc_1', call_id: 'call_1', name: 'lookup', arguments: '{}' },
{ type: 'function_call_output', call_id: 'call_1', output: 'done' },
],
})).toEqual({ ok: true });
expect(validateExternalResponsesHttpRequest({
model: 'gpt-5',
input: [
{ type: 'function_call', id: 'fc_1', call_id: 'call_other', name: 'lookup', arguments: '{}' },
{ type: 'function_call_output', call_id: 'call_1', item_reference: 'fc_1', output: 'done' },
],
})).toEqual({ ok: true });
});
});
+166
View File
@@ -0,0 +1,166 @@
export type ResponsesPreflightResult =
| { ok: true }
| {
ok: false;
statusCode: 400;
payload: {
error: {
message: string;
type: 'invalid_request_error';
};
};
};
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value);
}
function asTrimmedString(value: unknown): string {
return typeof value === 'string' ? value.trim() : '';
}
function invalidRequest(message: string): Extract<ResponsesPreflightResult, { ok: false }> {
return {
ok: false,
statusCode: 400,
payload: {
error: {
message,
type: 'invalid_request_error',
},
},
};
}
function walkRecords(value: unknown, visitor: (item: Record<string, unknown>) => void): void {
if (Array.isArray(value)) {
for (const item of value) walkRecords(item, visitor);
return;
}
if (!isRecord(value)) return;
visitor(value);
for (const item of Object.values(value)) {
if (Array.isArray(item) || isRecord(item)) walkRecords(item, visitor);
}
}
function validateExternalPreviousResponseId(body: Record<string, unknown>): ResponsesPreflightResult {
const previousResponseId = asTrimmedString(body.previous_response_id);
if (!previousResponseId) return { ok: true };
const message = previousResponseId.startsWith('msg_')
? 'previous_response_id must be a response id beginning with resp_; message ids like msg_* are not valid, and HTTP /v1/responses does not support continuation chaining. Use Responses WebSocket v2 for function-call follow-up turns.'
: 'HTTP /v1/responses does not support user-supplied previous_response_id continuation chaining. Use Responses WebSocket v2 for follow-up turns.';
return invalidRequest(message);
}
function validateFunctionCallOutputs(
body: Record<string, unknown>,
options: { allowContinuationToolOutput?: boolean } = {},
): ResponsesPreflightResult {
const input = body.input;
if (!Array.isArray(input)) return { ok: true };
const knownCallIds = new Set<string>();
const knownReferenceIds = new Set<string>();
const outputs: Record<string, unknown>[] = [];
for (const item of input) {
if (!isRecord(item)) continue;
const itemType = asTrimmedString(item.type).toLowerCase();
if (itemType === 'function_call' || itemType === 'custom_tool_call' || itemType === 'tool_call') {
const callId = asTrimmedString(item.call_id ?? item.id);
if (callId) knownCallIds.add(callId);
const id = asTrimmedString(item.id);
if (id) knownReferenceIds.add(id);
continue;
}
if (itemType === 'function_call_output' || itemType === 'custom_tool_call_output') {
outputs.push(item);
}
}
if (options.allowContinuationToolOutput) {
return { ok: true };
}
for (const item of outputs) {
const callId = asTrimmedString(item.call_id);
if (!callId) {
return invalidRequest('function_call_output requires call_id. Use Responses WebSocket v2 for incremental tool-output follow-up turns.');
}
const itemReference = asTrimmedString(
item.item_reference
?? item.itemReference
?? (isRecord(item.reference) ? item.reference.id : undefined),
);
if (!knownCallIds.has(callId) && (!itemReference || !knownReferenceIds.has(itemReference))) {
return invalidRequest('function_call_output must match a function_call/tool_call in the same HTTP request or include a matching item_reference. Use Responses WebSocket v2 for continuation turns.');
}
}
return { ok: true };
}
export function validateExternalResponsesHttpRequest(
body: Record<string, unknown>,
options: { allowContinuationToolOutput?: boolean } = {},
): ResponsesPreflightResult {
const previousResponseResult = validateExternalPreviousResponseId(body);
if (!previousResponseResult.ok) return previousResponseResult;
const functionCallOutputResult = validateFunctionCallOutputs(body, options);
if (!functionCallOutputResult.ok) return functionCallOutputResult;
return { ok: true };
}
function isWebSearchToolRecord(tool: Record<string, unknown>): boolean {
const type = asTrimmedString(tool.type).toLowerCase();
const name = asTrimmedString(tool.name).toLowerCase();
return type === 'web_search'
|| type === 'web_search_preview'
|| type === 'web_search_20250305'
|| type === 'google_search'
|| name === 'web_search'
|| name === 'google_search';
}
export function hasResponsesWebSearchOnlyRequest(body: Record<string, unknown>): boolean {
let hasWebSearchTool = false;
let hasOtherTools = false;
const tools = Array.isArray(body.tools) ? body.tools : [];
for (const tool of tools) {
if (!isRecord(tool)) continue;
if (isWebSearchToolRecord(tool)) {
hasWebSearchTool = true;
} else {
hasOtherTools = true;
}
}
return hasWebSearchTool && !hasOtherTools;
}
export function extractResponsesWebSearchQuery(body: Record<string, unknown>): string {
if (typeof body.input === 'string') {
const query = body.input.trim();
if (query) return query;
}
const queryCandidates: string[] = [];
walkRecords(body.input, (item) => {
const type = asTrimmedString(item.type).toLowerCase();
if (type === 'input_text' || type === 'text') {
const text = asTrimmedString(item.text);
if (text) queryCandidates.push(text);
return;
}
if (type === 'message') {
const text = asTrimmedString(item.content);
if (text) queryCandidates.push(text);
}
});
return queryCandidates[queryCandidates.length - 1] || '';
}
@@ -0,0 +1,76 @@
import { describe, expect, it } from 'vitest';
import { applyOpenAiServiceTierPolicy } from './serviceTierPolicy.js';
describe('applyOpenAiServiceTierPolicy', () => {
it('normalizes fast to priority and drops unknown or non-string tiers', () => {
expect(applyOpenAiServiceTierPolicy({
body: { model: 'gpt-5', service_tier: ' fast ' },
})).toMatchObject({
ok: true,
body: { model: 'gpt-5', service_tier: 'priority' },
action: 'pass',
});
expect(applyOpenAiServiceTierPolicy({
body: { model: 'gpt-5', service_tier: 'turbo' },
})).toMatchObject({
ok: true,
body: { model: 'gpt-5' },
action: 'filter',
});
expect(applyOpenAiServiceTierPolicy({
body: { model: 'gpt-5', service_tier: 123 },
})).toMatchObject({
ok: true,
body: { model: 'gpt-5' },
action: 'filter',
});
});
it('applies pass, filter and block rules by tier, model, platform and account type', () => {
const rules = [
{
action: 'filter',
tiers: ['flex'],
platforms: ['sub2api'],
},
{
action: 'block',
tiers: ['priority'],
models: ['gpt-5'],
platforms: ['openai'],
accountTypes: ['free'],
},
];
expect(applyOpenAiServiceTierPolicy({
body: { model: 'gpt-5', service_tier: 'flex' },
context: { sitePlatform: 'sub2api', requestedModel: 'gpt-5' },
rules,
})).toMatchObject({
ok: true,
body: { model: 'gpt-5' },
serviceTier: 'flex',
action: 'filter',
});
const blocked = applyOpenAiServiceTierPolicy({
body: { model: 'gpt-5', service_tier: 'fast' },
context: {
sitePlatform: 'openai',
requestedModel: 'gpt-5',
accountType: 'free',
},
rules,
});
expect(blocked.ok).toBe(false);
if (!blocked.ok) {
expect(blocked.statusCode).toBe(400);
expect(blocked.payload.error.message).toContain('priority');
}
});
});
+163
View File
@@ -0,0 +1,163 @@
export type ServiceTierAction = 'pass' | 'filter' | 'block';
export type ServiceTierRule = {
action: ServiceTierAction;
tiers?: string[];
models?: string[];
platforms?: string[];
accountTypes?: string[];
};
export type ServiceTierRuleContext = {
requestedModel?: string | null;
actualModel?: string | null;
sitePlatform?: string | null;
accountType?: string | null;
};
export type ServiceTierPolicyResult =
| {
ok: true;
body: Record<string, unknown>;
serviceTier?: string;
action: 'pass' | 'filter';
}
| {
ok: false;
statusCode: 400;
payload: {
error: {
message: string;
type: 'invalid_request_error';
};
};
};
const KNOWN_OPENAI_SERVICE_TIERS = new Set([
'auto',
'default',
'flex',
'priority',
]);
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value);
}
function asTrimmedString(value: unknown): string {
return typeof value === 'string' ? value.trim() : '';
}
function normalizeMatchList(value: unknown): string[] {
if (!Array.isArray(value)) return [];
return value
.map((item) => asTrimmedString(item).toLowerCase())
.filter((item) => item.length > 0);
}
function normalizeRule(value: unknown): ServiceTierRule | null {
if (!isRecord(value)) return null;
const action = asTrimmedString(value.action).toLowerCase();
if (action !== 'pass' && action !== 'filter' && action !== 'block') return null;
return {
action,
tiers: normalizeMatchList(value.tiers),
models: normalizeMatchList(value.models),
platforms: normalizeMatchList(value.platforms),
accountTypes: normalizeMatchList(value.accountTypes ?? value.account_types),
};
}
function normalizeRules(value: unknown): ServiceTierRule[] {
if (!Array.isArray(value)) return [];
return value
.map((item) => normalizeRule(item))
.filter((item): item is ServiceTierRule => !!item);
}
function candidateMatches(patterns: string[] | undefined, candidates: Array<string | null | undefined>): boolean {
if (!patterns || patterns.length <= 0) return true;
const normalizedCandidates = candidates
.map((item) => asTrimmedString(item).toLowerCase())
.filter((item) => item.length > 0);
if (normalizedCandidates.length <= 0) return false;
return patterns.some((pattern) => {
if (pattern === '*') return true;
return normalizedCandidates.some((candidate) => candidate === pattern);
});
}
function ruleMatches(
rule: ServiceTierRule,
serviceTier: string,
context: ServiceTierRuleContext,
): boolean {
return candidateMatches(rule.tiers, [serviceTier])
&& candidateMatches(rule.models, [context.actualModel, context.requestedModel])
&& candidateMatches(rule.platforms, [context.sitePlatform])
&& candidateMatches(rule.accountTypes, [context.accountType]);
}
function normalizeServiceTier(raw: unknown): string | null {
if (typeof raw !== 'string') return null;
const normalized = raw.trim().toLowerCase();
if (!normalized) return null;
if (normalized === 'fast') return 'priority';
if (!KNOWN_OPENAI_SERVICE_TIERS.has(normalized)) return null;
return normalized;
}
function buildBlockedResult(serviceTier: string): Extract<ServiceTierPolicyResult, { ok: false }> {
return {
ok: false,
statusCode: 400,
payload: {
error: {
message: `service_tier '${serviceTier}' is not allowed for this upstream policy`,
type: 'invalid_request_error',
},
},
};
}
export function applyOpenAiServiceTierPolicy(input: {
body: Record<string, unknown>;
context?: ServiceTierRuleContext;
rules?: unknown;
}): ServiceTierPolicyResult {
const rawTier = input.body.service_tier;
const serviceTier = normalizeServiceTier(rawTier);
const next = { ...input.body };
if (!serviceTier) {
delete next.service_tier;
return {
ok: true,
body: next,
action: 'filter',
};
}
next.service_tier = serviceTier;
const rules = normalizeRules(input.rules);
const matchedRule = rules.find((rule) => ruleMatches(rule, serviceTier, input.context || {}));
if (matchedRule?.action === 'block') return buildBlockedResult(serviceTier);
if (matchedRule?.action === 'filter') {
delete next.service_tier;
return {
ok: true,
body: next,
serviceTier,
action: 'filter',
};
}
return {
ok: true,
body: next,
serviceTier,
action: 'pass',
};
}
+69 -1
View File
@@ -52,6 +52,8 @@ import { getRuntimeResponseReader, readRuntimeResponseText } from '../executors/
import { detectDownstreamClientContext } from '../downstreamClientContext.js';
import { getProxyMaxChannelRetries } from '../../services/proxyChannelRetry.js';
import { shouldAbortSameSiteEndpointFallback } from '../../services/proxyRetryPolicy.js';
import { applyOpenAiServiceTierPolicy } from '../serviceTierPolicy.js';
import { maybeHandleWebSearchOnlySimulation } from '../webSearchSimulation.js';
import {
acquireSurfaceChannelLease,
bindSurfaceStickyChannel,
@@ -144,6 +146,17 @@ export async function handleChatSurfaceRequest(
upstreamBody,
claudeOriginalBody,
} = requestEnvelope.parsed;
if (downstreamFormat === 'claude') {
const handledSearch = await maybeHandleWebSearchOnlySimulation({
app: request.server,
request,
reply,
downstreamFormat: 'claude',
body: (claudeOriginalBody || request.body || {}) as Record<string, unknown>,
openAiBody: upstreamBody,
});
if (handledSearch) return;
}
if (!await ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
const downstreamPolicy = getDownstreamRoutingPolicy(request);
const forcedChannelId = getTesterForcedChannelId({
@@ -328,6 +341,29 @@ export async function handleChatSurfaceRequest(
options: { forceNormalizeClaudeBody?: boolean } = {},
) => {
const upstreamStream = isStream || (forceResponsesUpstreamStream && endpoint === 'responses');
const bodyForEndpoint = endpoint === 'responses'
? (() => {
const policyResult = applyOpenAiServiceTierPolicy({
body: resolvedOpenAiBody,
context: {
requestedModel,
actualModel: modelName,
sitePlatform: selected.site.platform,
accountType: oauth?.planType,
},
rules: (config as any).openAiServiceTierRules,
});
if (!policyResult.ok) {
const error = new SiteApiEndpointRequestError(policyResult.payload.error.message, {
status: policyResult.statusCode,
rawErrText: JSON.stringify(policyResult.payload),
});
(error as SiteApiEndpointRequestError & { serviceTierBlocked?: boolean }).serviceTierBlocked = true;
throw error;
}
return policyResult.body;
})()
: resolvedOpenAiBody;
const endpointRequest = buildUpstreamEndpointRequest({
endpoint,
modelName,
@@ -337,7 +373,7 @@ export async function handleChatSurfaceRequest(
oauthProjectId: oauth?.projectId,
sitePlatform: selected.site.platform,
siteUrl: siteApiBaseUrl,
openaiBody: resolvedOpenAiBody,
openaiBody: bodyForEndpoint,
downstreamFormat,
claudeOriginalBody,
forceNormalizeClaudeBody: options.forceNormalizeClaudeBody,
@@ -959,8 +995,24 @@ export async function handleChatSurfaceRequest(
err instanceof SiteApiEndpointRequestError
|| err?.name === 'SiteApiEndpointRequestError'
|| err?.siteApiEndpointUpstreamFailure === true
|| err?.serviceTierBlocked === true
|| (endpointFailureStatus !== null && endpointFailureStatus >= 500)
);
if (err?.serviceTierBlocked === true) {
let payload: unknown = null;
try {
payload = JSON.parse(err.rawErrText || '');
} catch {
payload = {
error: {
message: err.message || 'service_tier is blocked by policy',
type: 'invalid_request_error',
},
};
}
await finalizeDebugFailure(endpointFailureStatus || 400, payload, null);
return reply.code(endpointFailureStatus || 400).send(payload);
}
if (isSiteApiEndpointFailure) {
const failureOutcome = await failureToolkit.handleUpstreamFailure({
selected,
@@ -1391,8 +1443,24 @@ export async function handleClaudeCountTokensSurfaceRequest(
error instanceof SiteApiEndpointRequestError
|| error?.name === 'SiteApiEndpointRequestError'
|| error?.siteApiEndpointUpstreamFailure === true
|| error?.serviceTierBlocked === true
|| (endpointFailureStatus !== null && endpointFailureStatus >= 500)
);
if (error?.serviceTierBlocked === true) {
let payload: unknown = null;
try {
payload = JSON.parse(error.rawErrText || '');
} catch {
payload = {
error: {
message: error.message || 'service_tier is blocked by policy',
type: 'invalid_request_error',
},
};
}
await finalizeDebugFailure(endpointFailureStatus || 400, payload, null);
return reply.code(endpointFailureStatus || 400).send(payload);
}
if (isSiteApiEndpointFailure) {
const failureOutcome = await failureToolkit.handleUpstreamFailure({
selected,
@@ -66,6 +66,9 @@ import {
shouldFallbackCompactResponsesToResponses,
} from '../capabilities/responsesCompact.js';
import { detectDownstreamClientContext } from '../downstreamClientContext.js';
import { validateExternalResponsesHttpRequest } from '../responsesPreflight.js';
import { applyOpenAiServiceTierPolicy } from '../serviceTierPolicy.js';
import { maybeHandleWebSearchOnlySimulation } from '../webSearchSimulation.js';
import { getProxyMaxChannelRetries } from '../../services/proxyChannelRetry.js';
import { shouldAbortSameSiteEndpointFallback } from '../../services/proxyRetryPolicy.js';
import {
@@ -262,6 +265,14 @@ export async function handleOpenAiResponsesSurfaceRequest(
const defaultEncryptedReasoningInclude = isCodexResponsesSurface(
request.headers as Record<string, unknown>,
);
if (!isResponsesWebsocketTransportRequest(request.headers as Record<string, unknown>)) {
const preflight = validateExternalResponsesHttpRequest(body, {
allowContinuationToolOutput: defaultEncryptedReasoningInclude,
});
if (!preflight.ok) {
return reply.code(preflight.statusCode).send(preflight.payload);
}
}
const parsedRequestEnvelope = openAiResponsesTransformer.transformRequest(body, {
defaultEncryptedReasoningInclude,
});
@@ -280,6 +291,16 @@ export async function handleOpenAiResponsesSurfaceRequest(
},
});
}
if (!isCompactRequest) {
const handledSearch = await maybeHandleWebSearchOnlySimulation({
app: request.server,
request,
reply,
downstreamFormat: 'responses',
body: requestEnvelope.parsed.normalizedBody,
});
if (handledSearch) return;
}
if (!await ensureModelAllowedForDownstreamKey(request, reply, requestedModel)) return;
const downstreamPolicy = getDownstreamRoutingPolicy(request);
const forcedChannelId = getTesterForcedChannelId({
@@ -400,6 +421,21 @@ export async function handleOpenAiResponsesSurfaceRequest(
model: modelName,
stream: isStream,
};
const serviceTierPolicy = applyOpenAiServiceTierPolicy({
body: normalizedResponsesBody,
context: {
requestedModel,
actualModel: modelName,
sitePlatform: selected.site.platform,
accountType: oauth?.planType,
},
rules: (config as any).openAiServiceTierRules,
});
if (!serviceTierPolicy.ok) {
await finalizeDebugFailure(serviceTierPolicy.statusCode, serviceTierPolicy.payload, null);
return reply.code(serviceTierPolicy.statusCode).send(serviceTierPolicy.payload);
}
normalizedResponsesBody = serviceTierPolicy.body;
if (body.generate === false) {
normalizedResponsesBody.generate = false;
}
@@ -0,0 +1,314 @@
import { randomUUID } from 'node:crypto';
import type { FastifyInstance, FastifyReply, FastifyRequest } from 'fastify';
import { anthropicMessagesTransformer } from '../transformers/anthropic/messages/index.js';
import {
extractResponsesWebSearchQuery,
hasResponsesWebSearchOnlyRequest,
} from './responsesPreflight.js';
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value);
}
function asTrimmedString(value: unknown): string {
return typeof value === 'string' ? value.trim() : '';
}
function findSearchTool(body: Record<string, unknown>): Record<string, unknown> | null {
const tools = Array.isArray(body.tools) ? body.tools : [];
for (const tool of tools) {
if (!isRecord(tool)) continue;
const type = asTrimmedString(tool.type).toLowerCase();
const name = asTrimmedString(tool.name).toLowerCase();
if (
type === 'web_search'
|| type === 'web_search_preview'
|| type === 'web_search_20250305'
|| type === 'google_search'
|| name === 'web_search'
|| name === 'google_search'
) {
return tool;
}
}
return null;
}
function toSearchMaxResults(tool: Record<string, unknown> | null): number {
const raw = tool?.max_uses ?? tool?.max_results ?? tool?.maxResults;
if (typeof raw !== 'number' || !Number.isFinite(raw)) return 10;
return Math.max(1, Math.min(20, Math.trunc(raw)));
}
function buildSearchInjectHeaders(request: FastifyRequest): Record<string, string | string[]> {
const headers: Record<string, string | string[]> = {};
for (const [rawKey, rawValue] of Object.entries(request.headers as Record<string, string | string[]>)) {
const key = rawKey.toLowerCase();
if (
key === 'host'
|| key === 'content-length'
|| key === 'content-type'
|| key === 'connection'
|| key === 'transfer-encoding'
) {
continue;
}
if (rawValue === undefined) continue;
headers[rawKey] = rawValue;
}
return headers;
}
function extractAnthropicSearchQuery(body: Record<string, unknown>): string {
const messages = Array.isArray(body.messages) ? body.messages : [];
for (let index = messages.length - 1; index >= 0; index -= 1) {
const message = messages[index];
if (!isRecord(message)) continue;
if (asTrimmedString(message.role).toLowerCase() !== 'user') continue;
const content = message.content;
if (typeof content === 'string' && content.trim()) return content.trim();
if (!Array.isArray(content)) continue;
const parts = content
.map((item) => {
if (typeof item === 'string') return item.trim();
if (!isRecord(item)) return '';
const type = asTrimmedString(item.type).toLowerCase();
if (type && type !== 'text' && type !== 'input_text') return '';
return asTrimmedString(item.text ?? item.content);
})
.filter((item) => item.length > 0);
if (parts.length > 0) return parts.join('\n');
}
return '';
}
async function callLocalSearchRoute(input: {
app: FastifyInstance;
request: FastifyRequest;
query: string;
model: string;
maxResults: number;
}): Promise<{ statusCode: number; payload: unknown }> {
const searchResponse = await input.app.inject({
method: 'POST',
url: '/v1/search',
headers: buildSearchInjectHeaders(input.request),
payload: {
model: input.model,
query: input.query,
max_results: input.maxResults,
},
});
let payload: unknown = null;
try {
payload = JSON.parse(searchResponse.body);
} catch {
payload = searchResponse.body;
}
return {
statusCode: searchResponse.statusCode,
payload,
};
}
function normalizeSearchResults(payload: unknown): unknown[] {
if (!isRecord(payload)) return [];
const data = Array.isArray(payload.data) ? payload.data : [];
const results = Array.isArray(payload.results) ? payload.results : [];
return data.length > 0 ? data : results;
}
function buildSyntheticResponsesPayload(input: {
body: Record<string, unknown>;
query: string;
searchPayload: unknown;
}) {
const createdAt = Math.floor(Date.now() / 1000);
const responseId = `resp_web_search_${randomUUID()}`;
const searchCallId = `ws_${randomUUID()}`;
const results = normalizeSearchResults(input.searchPayload);
return {
id: responseId,
object: 'response',
created_at: createdAt,
model: asTrimmedString(input.body.model) || 'unknown',
status: 'completed',
output: [
{
id: searchCallId,
type: 'web_search_call',
status: 'completed',
action: {
type: 'search',
query: input.query,
},
},
{
id: `msg_${searchCallId}`,
type: 'message',
role: 'assistant',
status: 'completed',
content: [{
type: 'output_text',
text: results.length > 0
? JSON.stringify(results)
: '[]',
}],
},
],
usage: {
input_tokens: 0,
output_tokens: 0,
total_tokens: 0,
},
};
}
function serializeResponsesSse(payload: Record<string, unknown>): string[] {
return [
`event: response.completed\ndata: ${JSON.stringify({
type: 'response.completed',
response: payload,
})}\n\n`,
'data: [DONE]\n\n',
];
}
async function sendAnthropicSearchSimulation(input: {
app: FastifyInstance;
request: FastifyRequest;
reply: FastifyReply;
body: Record<string, unknown>;
openAiBody: Record<string, unknown>;
searchBody: Record<string, unknown>;
}): Promise<boolean> {
const tool = findSearchTool(input.searchBody) || findSearchTool(input.openAiBody);
const query = extractAnthropicSearchQuery(input.body) || extractResponsesWebSearchQuery(input.openAiBody);
if (!query) return false;
const search = await callLocalSearchRoute({
app: input.app,
request: input.request,
query,
model: asTrimmedString(input.body.model) || asTrimmedString(input.openAiBody.model) || '__search',
maxResults: toSearchMaxResults(tool),
});
if (search.statusCode < 200 || search.statusCode >= 300) {
input.reply.code(search.statusCode).send(search.payload);
return true;
}
const responsesPayload = buildSyntheticResponsesPayload({
body: input.openAiBody,
query,
searchPayload: search.payload,
});
if (input.body.stream === true) {
const streamContext = anthropicMessagesTransformer.createStreamContext(asTrimmedString(input.body.model) || 'unknown');
const claudeContext = anthropicMessagesTransformer.createDownstreamContext();
const lines = anthropicMessagesTransformer.serializeUpstreamFinalAsStream(
responsesPayload,
asTrimmedString(input.body.model) || 'unknown',
'',
streamContext,
claudeContext,
);
input.reply
.code(200)
.header('Content-Type', 'text/event-stream; charset=utf-8')
.header('Cache-Control', 'no-cache, no-transform')
.send(lines.join(''));
return true;
}
const normalized = anthropicMessagesTransformer.transformFinalResponse(
responsesPayload,
asTrimmedString(input.body.model) || 'unknown',
'',
);
input.reply.code(200).send(anthropicMessagesTransformer.serializeFinalResponse(normalized, {
promptTokens: 0,
completionTokens: 0,
totalTokens: 0,
}));
return true;
}
async function sendResponsesSearchSimulation(input: {
app: FastifyInstance;
request: FastifyRequest;
reply: FastifyReply;
body: Record<string, unknown>;
}): Promise<boolean> {
if (!hasResponsesWebSearchOnlyRequest(input.body)) return false;
const tool = findSearchTool(input.body);
const query = extractResponsesWebSearchQuery(input.body);
if (!query) return false;
const search = await callLocalSearchRoute({
app: input.app,
request: input.request,
query,
model: asTrimmedString(input.body.model) || '__search',
maxResults: toSearchMaxResults(tool),
});
if (search.statusCode < 200 || search.statusCode >= 300) {
input.reply.code(search.statusCode).send(search.payload);
return true;
}
const payload = buildSyntheticResponsesPayload({
body: input.body,
query,
searchPayload: search.payload,
});
if (input.body.stream === true) {
input.reply
.code(200)
.header('Content-Type', 'text/event-stream; charset=utf-8')
.header('Cache-Control', 'no-cache, no-transform')
.send(serializeResponsesSse(payload).join(''));
return true;
}
input.reply.code(200).send(payload);
return true;
}
export async function maybeHandleWebSearchOnlySimulation(input: {
app: FastifyInstance;
request: FastifyRequest;
reply: FastifyReply;
downstreamFormat: 'responses' | 'claude';
body: Record<string, unknown>;
openAiBody?: Record<string, unknown>;
}): Promise<boolean> {
if (input.downstreamFormat === 'responses') {
return sendResponsesSearchSimulation({
app: input.app,
request: input.request,
reply: input.reply,
body: input.body,
});
}
const openAiBody = input.openAiBody;
const rawBodyHasSearchOnly = hasResponsesWebSearchOnlyRequest(input.body);
const openAiBodyHasSearchOnly = !!openAiBody && hasResponsesWebSearchOnlyRequest(openAiBody);
if (!openAiBody || (!rawBodyHasSearchOnly && !openAiBodyHasSearchOnly)) return false;
return sendAnthropicSearchSimulation({
app: input.app,
request: input.request,
reply: input.reply,
body: input.body,
openAiBody,
searchBody: rawBodyHasSearchOnly ? input.body : openAiBody,
});
}
export function isResponsesWebSearchOnlyRequest(body: Record<string, unknown>): boolean {
return hasResponsesWebSearchOnlyRequest(body);
}
+89 -21
View File
@@ -9,13 +9,13 @@ import { waitForBackgroundTaskToReachTerminalState } from '../../test-fixtures/b
const {
fetchLatestStableGitHubReleaseMock,
fetchLatestDockerHubTagMock,
fetchDockerHubTagCandidatesMock,
getUpdateCenterHelperStatusMock,
streamUpdateCenterDeployMock,
streamUpdateCenterRollbackMock,
} = vi.hoisted(() => ({
fetchLatestStableGitHubReleaseMock: vi.fn(),
fetchLatestDockerHubTagMock: vi.fn(),
fetchDockerHubTagCandidatesMock: vi.fn(),
getUpdateCenterHelperStatusMock: vi.fn(),
streamUpdateCenterDeployMock: vi.fn(),
streamUpdateCenterRollbackMock: vi.fn(),
@@ -26,7 +26,7 @@ vi.mock('../../services/updateCenterVersionService.js', async () => {
return {
...actual,
fetchLatestStableGitHubRelease: (...args: unknown[]) => fetchLatestStableGitHubReleaseMock(...args),
fetchLatestDockerHubTag: (...args: unknown[]) => fetchLatestDockerHubTagMock(...args),
fetchDockerHubTagCandidates: (...args: unknown[]) => fetchDockerHubTagCandidatesMock(...args),
};
});
@@ -103,7 +103,7 @@ describe('update center routes', () => {
beforeEach(async () => {
fetchLatestStableGitHubReleaseMock.mockReset();
fetchLatestDockerHubTagMock.mockReset();
fetchDockerHubTagCandidatesMock.mockReset();
getUpdateCenterHelperStatusMock.mockReset();
streamUpdateCenterDeployMock.mockReset();
streamUpdateCenterRollbackMock.mockReset();
@@ -139,6 +139,18 @@ describe('update center routes', () => {
publishedAt: '2026-03-29T11:54:35.591877Z',
url: null,
} as const;
const dockerHubRecentTags = [
{
source: 'docker-hub-tag',
rawVersion: 'dev',
normalizedVersion: 'dev',
tagName: 'dev',
digest: 'sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
displayVersion: 'dev @ sha256:aaaaaaaaaaaa',
publishedAt: '2026-03-29T12:54:35.591877Z',
url: null,
},
] as const;
const helperStatus = {
ok: true,
releaseName: 'metapi',
@@ -161,7 +173,10 @@ describe('update center routes', () => {
],
} as const;
fetchLatestStableGitHubReleaseMock.mockResolvedValue(githubRelease);
fetchLatestDockerHubTagMock.mockResolvedValue(dockerHubTag);
fetchDockerHubTagCandidatesMock.mockResolvedValue({
primary: dockerHubTag,
recentNonStable: dockerHubRecentTags,
});
getUpdateCenterHelperStatusMock.mockResolvedValue(helperStatus);
const saveResponse = await app.inject({
@@ -216,6 +231,13 @@ describe('update center routes', () => {
displayVersion: 'latest @ sha256:efb2ee655386',
digest: 'sha256:efb2ee6553866bd3268dcc54c02fa5f9789728c51ed4af63328aaba6da67df35',
},
dockerHubRecentTags: [
{
normalizedVersion: 'dev',
displayVersion: 'dev @ sha256:aaaaaaaaaaaa',
digest: 'sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
},
],
helper: {
ok: true,
healthy: true,
@@ -242,11 +264,14 @@ describe('update center routes', () => {
it('returns partial status when a single version source lookup fails', async () => {
fetchLatestStableGitHubReleaseMock.mockRejectedValue(new Error('GitHub releases lookup timed out'));
fetchLatestDockerHubTagMock.mockResolvedValue({
source: 'docker-hub-tag',
rawVersion: '1.3.1',
normalizedVersion: '1.3.1',
url: null,
fetchDockerHubTagCandidatesMock.mockResolvedValue({
primary: {
source: 'docker-hub-tag',
rawVersion: '1.3.1',
normalizedVersion: '1.3.1',
url: null,
},
recentNonStable: [],
});
getUpdateCenterHelperStatusMock.mockResolvedValue({
ok: true,
@@ -426,6 +451,18 @@ describe('update center routes', () => {
publishedAt: '2026-03-31T09:00:00Z',
url: null,
},
dockerHubRecentTags: [
{
source: 'docker-hub-tag',
rawVersion: 'dev',
normalizedVersion: 'dev',
tagName: 'dev',
digest: 'sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
displayVersion: 'dev @ sha256:aaaaaaaaaaaa',
publishedAt: '2026-03-31T09:05:00Z',
url: null,
},
],
helper: {
ok: true,
releaseName: 'metapi',
@@ -454,6 +491,12 @@ describe('update center routes', () => {
dockerHubTag: {
displayVersion: 'latest @ sha256:efb2ee655386',
},
dockerHubRecentTags: [
{
normalizedVersion: 'dev',
displayVersion: 'dev @ sha256:aaaaaaaaaaaa',
},
],
helper: {
imageDigest: 'sha256:cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
},
@@ -462,7 +505,7 @@ describe('update center routes', () => {
},
});
expect(fetchLatestStableGitHubReleaseMock).not.toHaveBeenCalled();
expect(fetchLatestDockerHubTagMock).not.toHaveBeenCalled();
expect(fetchDockerHubTagCandidatesMock).not.toHaveBeenCalled();
expect(getUpdateCenterHelperStatusMock).not.toHaveBeenCalled();
});
@@ -477,15 +520,29 @@ describe('update center routes', () => {
publishedAt: '2026-03-31T10:00:00Z',
url: 'https://github.com/cita-777/metapi/releases/tag/v1.3.1',
});
fetchLatestDockerHubTagMock.mockResolvedValue({
source: 'docker-hub-tag',
rawVersion: 'latest',
normalizedVersion: 'latest',
tagName: 'latest',
digest: 'sha256:dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd',
displayVersion: 'latest @ sha256:dddddddddddd',
publishedAt: '2026-03-31T10:00:00Z',
url: null,
fetchDockerHubTagCandidatesMock.mockResolvedValue({
primary: {
source: 'docker-hub-tag',
rawVersion: 'latest',
normalizedVersion: 'latest',
tagName: 'latest',
digest: 'sha256:dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd',
displayVersion: 'latest @ sha256:dddddddddddd',
publishedAt: '2026-03-31T10:00:00Z',
url: null,
},
recentNonStable: [
{
source: 'docker-hub-tag',
rawVersion: 'dev-20260417-f67ade2',
normalizedVersion: 'dev-20260417-f67ade2',
tagName: 'dev-20260417-f67ade2',
digest: 'sha256:abababababababababababababababababababababababababababababababab',
displayVersion: 'dev-20260417-f67ade2 @ sha256:abababababab',
publishedAt: '2026-03-31T10:05:00Z',
url: null,
},
],
});
getUpdateCenterHelperStatusMock.mockResolvedValue({
ok: true,
@@ -512,6 +569,12 @@ describe('update center routes', () => {
dockerHubTag: {
digest: 'sha256:dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd',
},
dockerHubRecentTags: [
{
normalizedVersion: 'dev-20260417-f67ade2',
digest: 'sha256:abababababababababababababababababababababababababababababababab',
},
],
helper: {
revision: '13',
},
@@ -520,7 +583,7 @@ describe('update center routes', () => {
},
});
expect(fetchLatestStableGitHubReleaseMock).toHaveBeenCalledTimes(1);
expect(fetchLatestDockerHubTagMock).toHaveBeenCalledTimes(1);
expect(fetchDockerHubTagCandidatesMock).toHaveBeenCalledTimes(1);
expect(getUpdateCenterHelperStatusMock).toHaveBeenCalledTimes(1);
expect(await loadUpdateCenterRuntimeState()).toEqual(expect.objectContaining({
lastResolvedSource: 'github-release',
@@ -532,6 +595,11 @@ describe('update center routes', () => {
dockerHubTag: expect.objectContaining({
digest: 'sha256:dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd',
}),
dockerHubRecentTags: [
expect.objectContaining({
normalizedVersion: 'dev-20260417-f67ade2',
}),
],
helper: expect.objectContaining({
revision: '13',
}),
+194 -55
View File
@@ -112,10 +112,12 @@ describe('chat proxy stream behavior', () => {
beforeAll(async () => {
const { chatProxyRoute, claudeMessagesProxyRoute } = await import('./chat.js');
const { responsesProxyRoute } = await import('./responses.js');
const { searchProxyRoute } = await import('./search.js');
app = Fastify();
await app.register(chatProxyRoute);
await app.register(claudeMessagesProxyRoute);
await app.register(responsesProxyRoute);
await app.register(searchProxyRoute);
});
beforeEach(() => {
@@ -156,6 +158,7 @@ describe('chat proxy stream behavior', () => {
filter: [],
};
(config as any).disableCrossProtocolFallback = false;
(config as any).openAiServiceTierRules = undefined;
config.proxyEmptyContentFailEnabled = false;
config.proxyErrorKeywords = [];
});
@@ -193,7 +196,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('data: ');
expect(response.body).toContain('"chat.completion.chunk"');
@@ -233,7 +236,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.json()?.choices?.[0]?.message?.content).toBe('你好,来自 zstd 非流式响应');
});
@@ -268,7 +271,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('"chat.completion.chunk"');
expect(response.body).toContain('你好,来自 zstd 流式回退');
@@ -300,7 +303,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('"chat.completion.chunk"');
expect(response.body).toContain('你好,来自 zstd 原生 SSE');
@@ -513,7 +516,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('visible answer despite zero output usage');
expect(response.body).toContain('data: [DONE]');
@@ -563,7 +566,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.headers['cache-control']).toContain('no-transform');
expect(response.headers['x-accel-buffering']).toBe('no');
@@ -600,7 +603,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.body).toContain('"reasoning_content":"plan quietly"');
expect(response.body).toContain('"content":"visible answer"');
expect(response.body).not.toContain('<think>');
@@ -639,7 +642,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.body).toContain('"reasoning_content":"plan "');
expect(response.body).toContain('"reasoning_content":"quietly"');
expect(response.body).toContain('"content":"visible "');
@@ -710,7 +713,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('"chat.completion.chunk"');
expect(response.body).toContain('"delta":{"content":"hello"}');
@@ -777,7 +780,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
const body = response.json();
expect(body.type).toBe('message');
expect(body.role).toBe('assistant');
@@ -815,7 +818,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('event: message_start');
expect(response.body).toContain('event: content_block_delta');
@@ -2018,7 +2021,7 @@ describe('chat proxy stream behavior', () => {
});
});
it('preserves sub2api responses semantics across compatibility retries without using a strict field-dropping body', async () => {
it('rejects external HTTP previous_response_id before sub2api compatibility retries', async () => {
selectChannelMock.mockReturnValue({
channel: { id: 11, routeId: 22 },
site: { name: 'sub2api-site', url: 'https://sub2api.example.com', platform: 'sub2api' },
@@ -2076,48 +2079,9 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(fetchMock).toHaveBeenCalledTimes(2);
const [, firstOptions] = fetchMock.mock.calls[0] as [string, any];
const [, secondOptions] = fetchMock.mock.calls[1] as [string, any];
const firstBody = JSON.parse(firstOptions.body);
const secondBody = JSON.parse(secondOptions.body);
expect(firstOptions.headers.accept).toBe('text/event-stream');
expect(secondOptions.headers.accept).toBe('text/event-stream');
expect(firstOptions.headers['openai-beta']).toBe('responses-2025-03-11');
expect(secondOptions.headers['openai-beta']).toBe('responses-2025-03-11');
expect(firstOptions.headers.originator).toBe('codex_cli_rs');
expect(secondOptions.headers.originator).toBe('codex_cli_rs');
expect(secondOptions.headers['accept-language']).toBe('zh-CN');
expect(secondOptions.headers.session_id).toBe('session-123');
expect(secondOptions.headers.conversation_id).toBe('conversation-123');
expect(secondOptions.headers['x-codex-turn-state']).toBe('turn-state');
expect(secondOptions.headers['x-codex-turn-metadata']).toBe('turn-metadata');
expect(secondOptions.headers['x-stainless-lang']).toBeUndefined();
expect(secondOptions.headers.version).toBeUndefined();
expect(secondOptions.headers['user-agent']).toBe('lightMyRequest');
expect(firstBody).toMatchObject({
stream: true,
store: false,
previous_response_id: 'resp_prev_1',
include: ['reasoning.encrypted_content'],
reasoning: { effort: 'high' },
prompt_cache_key: 'cache-key-1',
service_tier: 'priority',
background: true,
});
expect(secondBody).toMatchObject({
stream: true,
store: false,
previous_response_id: 'resp_prev_1',
include: ['reasoning.encrypted_content'],
reasoning: { effort: 'high' },
prompt_cache_key: 'cache-key-1',
service_tier: 'priority',
background: true,
});
expect(response.statusCode).toBe(400);
expect(response.json().error.message).toContain('HTTP /v1/responses');
expect(fetchMock).not.toHaveBeenCalled();
});
it('retries generic 400 /v1/responses with minimal headers for strict compatibility fallback', async () => {
@@ -2863,7 +2827,7 @@ describe('chat proxy stream behavior', () => {
},
});
expect(response.statusCode).toBe(200);
expect(response.statusCode, response.body).toBe(200);
expect(fetchMock).toHaveBeenCalledTimes(1);
const [targetUrl] = fetchMock.mock.calls[0] as [string, any];
expect(targetUrl).toContain('/v1/responses');
@@ -4697,6 +4661,181 @@ describe('chat proxy stream behavior', () => {
expect(forwardedBody.tool_choice).toEqual({ type: 'function', name: 'Glob' });
});
it('forwards legacy functions/function_call when /v1/chat/completions is routed to /v1/responses upstream', async () => {
fetchModelPricingCatalogMock.mockResolvedValue({
models: [
{
modelName: 'upstream-gpt',
supportedEndpointTypes: ['/v1/responses'],
},
],
groupRatio: {},
});
fetchMock.mockResolvedValue(new Response(JSON.stringify({
id: 'resp_legacy_chat',
object: 'response',
model: 'upstream-gpt',
status: 'completed',
output: [],
output_text: 'ok',
usage: { input_tokens: 3, output_tokens: 2, total_tokens: 5 },
}), {
status: 200,
headers: { 'content-type': 'application/json' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/chat/completions',
payload: {
model: 'gpt-4o-mini',
stream: false,
functions: [{
name: 'legacy_lookup',
parameters: { type: 'object' },
}],
function_call: { name: 'legacy_lookup' },
messages: [
{
role: 'function',
name: 'legacy_lookup',
content: '{"ok":true}',
},
{
role: 'user',
content: 'continue',
},
],
},
});
expect(response.statusCode).toBe(200);
const [targetUrl, options] = fetchMock.mock.calls[0] as [string, any];
expect(targetUrl).toContain('/v1/responses');
const forwardedBody = JSON.parse(options.body);
expect(forwardedBody.tools).toEqual([
{ type: 'function', name: 'legacy_lookup', parameters: { type: 'object' } },
]);
expect(forwardedBody.tool_choice).toEqual({ type: 'function', name: 'legacy_lookup' });
expect(forwardedBody.input).toContainEqual({
type: 'function_call_output',
call_id: 'legacy_lookup',
output: '{"ok":true}',
});
});
it('returns synthetic Anthropic web_search server tool results without adding a new search dependency', async () => {
fetchMock.mockResolvedValue(new Response(JSON.stringify({
object: 'search.result',
data: [{ title: 'Metapi', url: 'https://example.com/metapi' }],
}), {
status: 200,
headers: { 'content-type': 'application/json' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/messages',
payload: {
model: 'claude-opus-4-6',
max_tokens: 256,
stream: false,
tools: [{ type: 'web_search_20250305', max_uses: 2 }],
messages: [{ role: 'user', content: 'metapi protocol compatibility' }],
},
});
expect(response.statusCode, response.body).toBe(200);
expect(fetchMock).toHaveBeenCalledTimes(1);
const [targetUrl, options] = fetchMock.mock.calls[0] as [string, any];
expect(targetUrl).toBe('https://upstream.example.com/v1/search');
expect(JSON.parse(options.body)).toMatchObject({
query: 'metapi protocol compatibility',
max_results: 2,
});
const body = response.json();
expect(body.content?.[0]).toMatchObject({
type: 'server_tool_use',
name: 'web_search',
});
expect(body.content?.[1]).toMatchObject({
type: 'web_search_tool_result',
});
});
it('streams synthetic Anthropic web_search server tool results over SSE', async () => {
fetchMock.mockResolvedValue(new Response(JSON.stringify({
object: 'search.result',
data: [{ title: 'Metapi SSE' }],
}), {
status: 200,
headers: { 'content-type': 'application/json' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/messages',
payload: {
model: 'claude-opus-4-6',
max_tokens: 256,
stream: true,
tools: [{ type: 'web_search_20250305' }],
messages: [{ role: 'user', content: 'metapi sse search' }],
},
});
expect(response.statusCode, response.body).toBe(200);
expect(response.headers['content-type']).toContain('text/event-stream');
expect(response.body).toContain('"type":"server_tool_use"');
expect(response.body).toContain('"type":"web_search_tool_result"');
expect(response.body).toContain('message_stop');
});
it('returns synthetic Responses web_search results without touching completions upstreams', async () => {
fetchMock.mockResolvedValue(new Response(JSON.stringify({
object: 'search.result',
data: [{ title: 'Metapi Responses', url: 'https://example.com/responses' }],
}), {
status: 200,
headers: { 'content-type': 'application/json' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/responses',
payload: {
model: 'gpt-4.1',
stream: false,
tools: [{ type: 'web_search', name: 'web_search', max_results: 3 }],
input: 'metapi responses web search',
},
});
expect(response.statusCode, response.body).toBe(200);
expect(fetchMock).toHaveBeenCalledTimes(1);
const [targetUrl, options] = fetchMock.mock.calls[0] as [string, any];
expect(targetUrl).toBe('https://upstream.example.com/v1/search');
expect(JSON.parse(options.body)).toMatchObject({
query: 'metapi responses web search',
max_results: 3,
});
const body = response.json();
expect(body.object).toBe('response');
expect(body.output?.[0]).toMatchObject({
type: 'web_search_call',
status: 'completed',
action: {
type: 'search',
query: 'metapi responses web search',
},
});
expect(body.output?.[1]?.content?.[0]?.text).toContain('Metapi Responses');
});
it('routes gemini platform to OpenAI-compatible upstream endpoint path', async () => {
selectChannelMock.mockReturnValue({
channel: { id: 11, routeId: 22 },
@@ -190,6 +190,7 @@ describe('responses proxy codex oauth refresh', () => {
config.proxyStickySessionEnabled = originalProxyStickySessionEnabled;
config.proxySessionChannelConcurrencyLimit = originalProxySessionChannelConcurrencyLimit;
config.proxySessionChannelQueueWaitMs = originalProxySessionChannelQueueWaitMs;
(config as any).openAiServiceTierRules = undefined;
fetchMock.mockReset();
selectChannelMock.mockReset();
selectNextChannelMock.mockReset();
@@ -685,6 +686,137 @@ describe('responses proxy codex oauth refresh', () => {
]);
});
it('rejects external HTTP previous_response_id without touching upstream', async () => {
const response = await app.inject({
method: 'POST',
url: '/v1/responses',
payload: {
model: 'gpt-5.2-codex',
previous_response_id: 'msg_wrong_1',
input: 'hello',
},
});
expect(response.statusCode).toBe(400);
expect(response.json().error.message).toContain('msg_*');
expect(response.json().error.message).toContain('HTTP /v1/responses');
expect(fetchMock).not.toHaveBeenCalled();
});
it('rejects orphan external HTTP function_call_output but keeps codex session inference allowed', async () => {
const rejected = await app.inject({
method: 'POST',
url: '/v1/responses',
payload: {
model: 'gpt-5.2-codex',
input: [
{
type: 'function_call_output',
call_id: 'call_orphan',
output: '{"ok":true}',
},
],
},
});
expect(rejected.statusCode).toBe(400);
expect(rejected.json().error.message).toContain('Responses WebSocket v2');
expect(fetchMock).not.toHaveBeenCalled();
fetchMock.mockResolvedValueOnce(new Response(JSON.stringify({
id: 'resp_codex_preflight_ok',
object: 'response',
model: 'gpt-5.2-codex',
status: 'completed',
output_text: 'accepted',
usage: { input_tokens: 1, output_tokens: 1, total_tokens: 2 },
}), {
status: 200,
headers: { 'content-type': 'application/json' },
}));
const allowed = await app.inject({
method: 'POST',
url: '/v1/responses',
headers: {
session_id: 'session-preflight-codex',
'user-agent': 'CodexClient/1.0',
},
payload: {
model: 'gpt-5.2-codex',
input: [
{
type: 'function_call_output',
call_id: 'call_codex',
output: '{"ok":true}',
},
],
},
});
expect(allowed.statusCode).toBe(200);
expect(fetchMock).toHaveBeenCalledTimes(1);
});
it('filters HTTP service_tier by selected account policy before forwarding upstream', async () => {
(config as any).openAiServiceTierRules = [{
action: 'filter',
tiers: ['priority'],
platforms: ['codex'],
accountTypes: ['plus'],
}];
fetchMock.mockResolvedValueOnce(new Response(JSON.stringify({
id: 'resp_codex_tier_filtered',
object: 'response',
model: 'gpt-5.2-codex',
status: 'completed',
output_text: 'tier filtered',
usage: { input_tokens: 1, output_tokens: 1, total_tokens: 2 },
}), {
status: 200,
headers: { 'content-type': 'application/json' },
}));
const response = await app.inject({
method: 'POST',
url: '/v1/responses',
payload: {
model: 'gpt-5.2-codex',
service_tier: 'fast',
input: 'hello',
},
});
expect(response.statusCode).toBe(200);
expect(fetchMock).toHaveBeenCalledTimes(1);
const [, options] = fetchMock.mock.calls[0] as [string, any];
const forwardedBody = JSON.parse(options.body);
expect(forwardedBody.service_tier).toBeUndefined();
});
it('blocks HTTP service_tier by selected account policy without touching upstream', async () => {
(config as any).openAiServiceTierRules = [{
action: 'block',
tiers: ['priority'],
platforms: ['codex'],
accountTypes: ['plus'],
}];
const response = await app.inject({
method: 'POST',
url: '/v1/responses',
payload: {
model: 'gpt-5.2-codex',
service_tier: 'fast',
input: 'hello',
},
});
expect(response.statusCode).toBe(400);
expect(response.json().error.message).toContain('service_tier');
expect(fetchMock).not.toHaveBeenCalled();
});
it('infers previous_response_id for codex tool-output follow-up turns when the client only sends conversation_id', async () => {
fetchMock
.mockResolvedValueOnce(new Response(JSON.stringify({
@@ -927,17 +1059,9 @@ describe('responses proxy codex oauth refresh', () => {
},
});
expect(response.statusCode).toBe(200);
expect(fetchMock).toHaveBeenCalledTimes(2);
const [, firstOptions] = fetchMock.mock.calls[0] as [string, any];
const [, secondOptions] = fetchMock.mock.calls[1] as [string, any];
const firstBody = JSON.parse(firstOptions.body);
const secondBody = JSON.parse(secondOptions.body);
expect(firstBody.previous_response_id).toBe('resp_stale');
expect(secondBody.previous_response_id).toBeUndefined();
expect(secondBody.input).toEqual(firstBody.input);
expect(response.statusCode).toBe(400);
expect(response.json().error.message).toContain('HTTP /v1/responses');
expect(fetchMock).not.toHaveBeenCalled();
});
it('strips generic downstream headers before forwarding codex responses upstream', async () => {
@@ -395,6 +395,7 @@ describe('responses websocket transport', () => {
upstreamRequests = [];
(config as any).codexResponsesWebsocketBeta = originalCodexResponsesWebsocketBeta;
(config as any).codexUpstreamWebsocketEnabled = true;
(config as any).openAiServiceTierRules = undefined;
rejectedUpgradeStatus = 426;
rejectedUpgradeStatusText = 'Upgrade Required';
rejectedUpgradeBody = 'Upgrade Required';
@@ -1556,6 +1557,79 @@ describe('responses websocket transport', () => {
});
});
it('applies service_tier policy to websocket frames before upstream dispatch', async () => {
(config as any).openAiServiceTierRules = [{
action: 'filter',
tiers: ['priority'],
platforms: ['openai'],
}];
const selectedChannel = createSelectedChannel({
sitePlatform: 'openai',
actualModel: 'gpt-4.1',
});
selectChannelMock.mockReturnValue(selectedChannel);
previewSelectedChannelMock.mockResolvedValue(selectedChannel);
fetchMock.mockResolvedValueOnce(createSseResponse([
'event: response.completed\n',
'data: {"type":"response.completed","response":{"id":"resp_ws_tier","model":"gpt-4.1","status":"completed","output":[],"usage":{"input_tokens":1,"output_tokens":1,"total_tokens":2}}}\n\n',
'data: [DONE]\n\n',
]));
const socket = createClientSocket(baseUrl);
await waitForSocketOpen(socket);
const responsePromise = waitForSocketMessageMatching(
socket,
(message) => message?.type === 'response.completed',
);
socket.send(JSON.stringify({
type: 'response.create',
model: 'gpt-4.1',
service_tier: 'fast',
input: [],
}));
await responsePromise;
socket.close();
const [, options] = fetchMock.mock.calls[0] as [string, RequestInit];
const forwardedBody = JSON.parse(String(options.body));
expect(forwardedBody.service_tier).toBeUndefined();
(config as any).openAiServiceTierRules = undefined;
});
it('blocks websocket service_tier before upstream dispatch', async () => {
(config as any).openAiServiceTierRules = [{
action: 'block',
tiers: ['priority'],
platforms: ['openai'],
}];
const selectedChannel = createSelectedChannel({
sitePlatform: 'openai',
actualModel: 'gpt-4.1',
});
selectChannelMock.mockReturnValue(selectedChannel);
previewSelectedChannelMock.mockResolvedValue(selectedChannel);
const socket = createClientSocket(baseUrl);
await waitForSocketOpen(socket);
const errorPromise = waitForSocketMessageMatching(
socket,
(message) => message?.type === 'error',
);
socket.send(JSON.stringify({
type: 'response.create',
model: 'gpt-4.1',
service_tier: 'fast',
input: [],
}));
const message = await errorPromise;
socket.close();
expect(message.status).toBe(400);
expect(message.error.message).toContain('service_tier');
expect(fetchMock).not.toHaveBeenCalled();
(config as any).openAiServiceTierRules = undefined;
});
it('keeps streamed output items for follow-up turns when the terminal HTTP fallback payload has an empty output array', async () => {
const selectedChannel = createSelectedChannel({
sitePlatform: 'openai',
+54 -1
View File
@@ -14,9 +14,11 @@ import {
import { runWithSiteApiEndpointPool, SiteApiEndpointRequestError } from '../../services/siteApiEndpointService.js';
import { tokenRouter } from '../../services/tokenRouter.js';
import { buildOauthProviderHeaders } from '../../services/oauth/service.js';
import { getOauthInfoFromAccount } from '../../services/oauth/oauthAccount.js';
import { openAiResponsesTransformer } from '../../transformers/openai/responses/index.js';
import { buildUpstreamEndpointRequest } from './upstreamEndpoint.js';
import { config } from '../../config.js';
import { applyOpenAiServiceTierPolicy } from '../../proxy-core/serviceTierPolicy.js';
const installedApps = new WeakSet<FastifyInstance>();
const WS_TURN_STATE_HEADER = 'x-codex-turn-state';
@@ -43,6 +45,10 @@ function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value);
}
function getServiceTierPolicyRules(): unknown {
return (config as typeof config & { openAiServiceTierRules?: unknown }).openAiServiceTierRules;
}
function asTrimmedString(value: unknown): string {
return typeof value === 'string' ? value.trim() : '';
}
@@ -592,6 +598,24 @@ async function handleResponsesWebsocketConnection(
writeResponsesWebsocketError(socket, 403, 'model is not allowed for this downstream key');
return;
}
const serviceTierPolicy = applyOpenAiServiceTierPolicy({
body: parsed,
context: {
requestedModel: requestModel,
},
rules: getServiceTierPolicyRules(),
});
if (!serviceTierPolicy.ok) {
writeResponsesWebsocketError(
socket,
serviceTierPolicy.statusCode,
serviceTierPolicy.payload.error.message,
serviceTierPolicy.payload,
);
return;
}
parsed.service_tier = serviceTierPolicy.body.service_tier;
if (serviceTierPolicy.body.service_tier === undefined) delete parsed.service_tier;
const supportsIncrementalInput = selectedChannelSupportsIncrementalInput(selectedChannel, requestModel)
|| await supportsResponsesWebsocketIncrementalInput(parsed, lastRequest, authContext);
const shouldHandleLocalPrewarm = shouldHandleResponsesWebsocketPrewarmLocally(
@@ -614,8 +638,8 @@ async function handleResponsesWebsocketConnection(
await consumeManagedKeyRequest(authContext.key.id);
}
lastRequest = normalized.nextRequestSnapshot;
if (shouldHandleLocalPrewarm) {
lastRequest = normalized.nextRequestSnapshot;
lastResponseOutput = [];
for (const payload of synthesizePrewarmResponsePayloads(normalized.request)) {
socket.send(JSON.stringify(payload));
@@ -629,6 +653,35 @@ async function handleResponsesWebsocketConnection(
: null;
}
const selectedServiceTierPolicy = applyOpenAiServiceTierPolicy({
body: normalized.request,
context: {
requestedModel: requestModel,
actualModel: asTrimmedString(selectedChannel?.actualModel),
sitePlatform: asTrimmedString(selectedChannel?.site?.platform),
accountType: getOauthInfoFromAccount(selectedChannel?.account)?.planType,
},
rules: getServiceTierPolicyRules(),
});
if (!selectedServiceTierPolicy.ok) {
writeResponsesWebsocketError(
socket,
selectedServiceTierPolicy.statusCode,
selectedServiceTierPolicy.payload.error.message,
selectedServiceTierPolicy.payload,
);
return;
}
normalized.request = selectedServiceTierPolicy.body;
normalized.nextRequestSnapshot = {
...normalized.nextRequestSnapshot,
service_tier: selectedServiceTierPolicy.body.service_tier,
};
if (selectedServiceTierPolicy.body.service_tier === undefined) {
delete normalized.nextRequestSnapshot.service_tier;
}
lastRequest = normalized.nextRequestSnapshot;
const codexWebsocketChannel = selectedChannelSupportsCodexWebsocketTransport(selectedChannel, requestModel)
? selectedChannel
: null;
@@ -97,6 +97,7 @@ describe('updateCenterPollingService', () => {
publishedAt: '2026-03-31T12:00:00Z',
},
dockerHubTag: null,
dockerHubRecentTags: [],
helper: {
ok: true,
releaseName: 'metapi',
@@ -115,6 +116,7 @@ describe('updateCenterPollingService', () => {
currentVersion: '1.2.3',
githubRelease: runtime.statusSnapshot.githubRelease,
dockerHubTag: null,
dockerHubRecentTags: [],
helper: runtime.statusSnapshot.helper,
runtime,
},
@@ -151,15 +153,16 @@ describe('updateCenterPollingService', () => {
lastResolvedSource: 'github-release',
lastResolvedCandidateKey: 'github-release:v1.3.0',
lastNotifiedCandidateKey: 'github-release:v1.3.0',
statusSnapshot: {
statusSnapshot: expect.objectContaining({
githubRelease: expect.objectContaining({
normalizedVersion: '1.3.0',
}),
dockerHubTag: null,
dockerHubRecentTags: [],
helper: expect.objectContaining({
imageTag: '1.2.3',
}),
},
}),
}));
await vi.advanceTimersByTimeAsync(60_000);
@@ -205,6 +208,7 @@ describe('updateCenterPollingService', () => {
publishedAt: '2026-03-31T12:01:00Z',
},
dockerHubTag: null,
dockerHubRecentTags: [],
helper: {
ok: true,
releaseName: 'metapi',
@@ -223,6 +227,7 @@ describe('updateCenterPollingService', () => {
currentVersion: '1.2.3',
githubRelease: runtime.statusSnapshot.githubRelease,
dockerHubTag: null,
dockerHubRecentTags: [],
helper: runtime.statusSnapshot.helper,
runtime,
},
@@ -251,15 +256,16 @@ describe('updateCenterPollingService', () => {
lastResolvedCandidateKey: 'github-release:v1.3.0',
lastNotifiedCandidateKey: 'github-release:v1.3.0',
lastNotifiedAt: expect.any(String),
statusSnapshot: {
statusSnapshot: expect.objectContaining({
githubRelease: expect.objectContaining({
normalizedVersion: '1.3.0',
}),
dockerHubTag: null,
dockerHubRecentTags: [],
helper: expect.objectContaining({
imageTag: '1.2.3',
}),
},
}),
}));
});
});
@@ -88,6 +88,18 @@ describe('updateCenterRuntimeStateService', () => {
displayVersion: 'latest @ sha256:efb2ee655386',
publishedAt: '2026-03-30T20:30:00Z',
},
dockerHubRecentTags: [
{
source: 'docker-hub-tag',
rawVersion: 'dev',
normalizedVersion: 'dev',
url: null,
tagName: 'dev',
digest: 'sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
displayVersion: 'dev @ sha256:aaaaaaaaaaaa',
publishedAt: '2026-03-30T20:35:00Z',
},
],
helper: {
ok: true,
releaseName: 'metapi',
@@ -142,6 +154,18 @@ describe('updateCenterRuntimeStateService', () => {
displayVersion: 'latest @ sha256:efb2ee655386',
publishedAt: '2026-03-30T20:30:00Z',
},
dockerHubRecentTags: [
{
source: 'docker-hub-tag',
rawVersion: 'dev',
normalizedVersion: 'dev',
url: null,
tagName: 'dev',
digest: 'sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
displayVersion: 'dev @ sha256:aaaaaaaaaaaa',
publishedAt: '2026-03-30T20:35:00Z',
},
],
helper: {
ok: true,
releaseName: 'metapi',
@@ -8,6 +8,7 @@ import type { UpdateCenterVersionCandidate, UpdateCenterVersionSource } from './
export type UpdateCenterStatusSnapshot = {
githubRelease: UpdateCenterVersionCandidate | null;
dockerHubTag: UpdateCenterVersionCandidate | null;
dockerHubRecentTags: UpdateCenterVersionCandidate[];
helper: UpdateCenterHelperStatus | null;
};
@@ -66,6 +67,13 @@ function normalizeVersionCandidate(input: unknown): UpdateCenterVersionCandidate
};
}
function normalizeVersionCandidates(input: unknown): UpdateCenterVersionCandidate[] {
if (!Array.isArray(input)) return [];
return input
.map((entry) => normalizeVersionCandidate(entry))
.filter((entry): entry is UpdateCenterVersionCandidate => !!entry);
}
function normalizeHelperHistoryEntry(input: unknown): NonNullable<UpdateCenterHelperStatus['history']>[number] | null {
if (!input || typeof input !== 'object') return null;
const record = input as Record<string, unknown>;
@@ -109,6 +117,7 @@ function normalizeStatusSnapshot(input: unknown): UpdateCenterStatusSnapshot | n
return {
githubRelease: normalizeVersionCandidate(record.githubRelease),
dockerHubTag: normalizeVersionCandidate(record.dockerHubTag),
dockerHubRecentTags: normalizeVersionCandidates(record.dockerHubRecentTags),
helper: normalizeHelperSnapshot(record.helper),
};
}
@@ -2,9 +2,10 @@ import { config as runtimeConfig } from '../config.js';
import { formatUtcSqlDateTime } from './localTimeService.js';
import { listBackgroundTasks } from './backgroundTaskService.js';
import {
fetchLatestDockerHubTag,
fetchDockerHubTagCandidates,
fetchLatestStableGitHubRelease,
getCurrentRuntimeVersion,
type DockerHubTagCandidates,
type UpdateCenterVersionCandidate,
} from './updateCenterVersionService.js';
import {
@@ -71,6 +72,7 @@ export type UpdateCenterStatusResult = {
config: UpdateCenterConfig;
githubRelease: UpdateCenterVersionCandidate | null;
dockerHubTag: UpdateCenterVersionCandidate | null;
dockerHubRecentTags: UpdateCenterVersionCandidate[];
helper: UpdateCenterHelperStatus;
runningTask: ReturnType<typeof getDeployTasks>[number] | null;
lastFinishedTask: ReturnType<typeof getDeployTasks>[number] | null;
@@ -92,16 +94,17 @@ function buildUnavailableHelperStatus(error: string | null = null): UpdateCenter
};
}
function buildStatusSnapshot(status: Pick<UpdateCenterStatusResult, 'githubRelease' | 'dockerHubTag' | 'helper'>): UpdateCenterStatusSnapshot {
function buildStatusSnapshot(status: Pick<UpdateCenterStatusResult, 'githubRelease' | 'dockerHubTag' | 'dockerHubRecentTags' | 'helper'>): UpdateCenterStatusSnapshot {
return {
githubRelease: status.githubRelease || null,
dockerHubTag: status.dockerHubTag || null,
dockerHubRecentTags: status.dockerHubRecentTags || [],
helper: status.helper || null,
};
}
function buildNextRuntimeState(
status: Pick<UpdateCenterStatusResult, 'currentVersion' | 'githubRelease' | 'dockerHubTag' | 'helper'>,
status: Pick<UpdateCenterStatusResult, 'currentVersion' | 'githubRelease' | 'dockerHubTag' | 'dockerHubRecentTags' | 'helper'>,
previousRuntime: UpdateCenterRuntimeState,
checkedAt: string,
): { candidate: UpdateReminderCandidate | null; nextRuntime: UpdateCenterRuntimeState } {
@@ -137,6 +140,7 @@ function buildResponseFromState(config: UpdateCenterConfig, runtime: UpdateCente
config,
githubRelease: snapshot?.githubRelease || null,
dockerHubTag: snapshot?.dockerHubTag || null,
dockerHubRecentTags: snapshot?.dockerHubRecentTags || [],
helper: snapshot?.helper || buildUnavailableHelperStatus(runtime.lastCheckError),
runningTask,
lastFinishedTask,
@@ -150,7 +154,7 @@ export async function buildUpdateCenterStatus(): Promise<UpdateCenterStatusResul
const [githubLookup, dockerLookup, helperLookup, runtime] = await Promise.all([
settleOptional(config.githubReleasesEnabled, async () => await fetchLatestStableGitHubRelease()),
settleOptional(config.dockerHubTagsEnabled, async () => await fetchLatestDockerHubTag()),
settleOptional(config.dockerHubTagsEnabled, async () => await fetchDockerHubTagCandidates()),
settleOptional(!!config.helperBaseUrl, async () => {
if (!helperToken) {
throw new Error('DEPLOY_HELPER_TOKEN is required');
@@ -161,7 +165,9 @@ export async function buildUpdateCenterStatus(): Promise<UpdateCenterStatusResul
]);
const githubRelease = githubLookup.value as UpdateCenterVersionCandidate | null;
const dockerHubTag = dockerLookup.value as UpdateCenterVersionCandidate | null;
const dockerHubCandidates = dockerLookup.value as DockerHubTagCandidates | null;
const dockerHubTag = dockerHubCandidates?.primary || null;
const dockerHubRecentTags = dockerHubCandidates?.recentNonStable || [];
const helper = (helperLookup.value as UpdateCenterHelperStatus | null) || buildUnavailableHelperStatus(helperLookup.error);
const tasks = getDeployTasks();
@@ -173,6 +179,7 @@ export async function buildUpdateCenterStatus(): Promise<UpdateCenterStatusResul
config,
githubRelease,
dockerHubTag,
dockerHubRecentTags,
helper,
runningTask,
lastFinishedTask,
@@ -14,12 +14,15 @@ vi.mock('undici', async () => {
import {
compareStableSemVer,
fetchDockerHubTagCandidates,
fetchLatestDockerHubTag,
fetchLatestStableGitHubRelease,
parseStableSemVer,
resolvePreferredDeploySource,
selectDockerHubTagCandidates,
selectLatestDockerHubTag,
selectLatestStableGitHubRelease,
selectRecentNonStableDockerHubTags,
} from './updateCenterVersionService.js';
describe('update center version service', () => {
@@ -132,6 +135,84 @@ describe('update center version service', () => {
});
});
describe('selectRecentNonStableDockerHubTags', () => {
it('returns recent non-stable docker tags with dev-like tags prioritized ahead of generic branch tags', () => {
const candidates = selectRecentNonStableDockerHubTags([
{
name: 'feature-login',
digest: 'sha256:1111111111111111111111111111111111111111111111111111111111111111',
tag_last_pushed: '2026-04-17T10:00:00Z',
},
{
name: 'sha-a2c2ae6',
digest: 'sha256:2222222222222222222222222222222222222222222222222222222222222222',
tag_last_pushed: '2026-04-17T09:00:00Z',
},
{
name: 'dev',
digest: 'sha256:3333333333333333333333333333333333333333333333333333333333333333',
tag_last_pushed: '2026-04-17T08:00:00Z',
},
{
name: 'dev-20260417-f67ade2',
digest: 'sha256:4444444444444444444444444444444444444444444444444444444444444444',
tag_last_pushed: '2026-04-17T07:00:00Z',
},
{
name: 'latest',
digest: 'sha256:5555555555555555555555555555555555555555555555555555555555555555',
tag_last_pushed: '2026-04-17T11:00:00Z',
},
{
name: '1.10.0',
digest: 'sha256:6666666666666666666666666666666666666666666666666666666666666666',
tag_last_pushed: '2026-04-16T11:00:00Z',
},
]);
expect(candidates).toHaveLength(4);
expect(candidates.map((candidate) => candidate.tagName)).toEqual([
'dev',
'dev-20260417-f67ade2',
'sha-a2c2ae6',
'feature-login',
]);
expect(candidates[0]).toMatchObject({
displayVersion: 'dev @ sha256:333333333333',
});
});
});
describe('selectDockerHubTagCandidates', () => {
it('returns both the stable primary candidate and recent non-stable candidates from one tag list', () => {
const candidates = selectDockerHubTagCandidates([
{
name: 'latest',
digest: 'sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
tag_last_pushed: '2026-04-17T12:00:00Z',
},
{
name: 'dev-20260417-f67ade2',
digest: 'sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
tag_last_pushed: '2026-04-17T11:00:00Z',
},
{
name: 'sha-f67ade2',
digest: 'sha256:cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
tag_last_pushed: '2026-04-17T10:00:00Z',
},
]);
expect(candidates.primary).toMatchObject({
tagName: 'latest',
});
expect(candidates.recentNonStable.map((candidate) => candidate.tagName)).toEqual([
'dev-20260417-f67ade2',
'sha-f67ade2',
]);
});
});
describe('resolvePreferredDeploySource', () => {
it('prefers the configured default source when both channels have valid candidates', () => {
const preferred = resolvePreferredDeploySource({
@@ -263,4 +344,40 @@ describe('update center version service', () => {
});
});
});
describe('fetchDockerHubTagCandidates', () => {
it('returns the stable docker candidate plus recent non-stable tags from one lookup', async () => {
fetchMock.mockResolvedValue(new Response(JSON.stringify({
results: [
{
name: 'latest',
digest: 'sha256:efb2ee6553866bd3268dcc54c02fa5f9789728c51ed4af63328aaba6da67df35',
tag_last_pushed: '2026-03-29T11:54:35.591877Z',
},
{
name: 'dev',
digest: 'sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
tag_last_pushed: '2026-03-30T11:54:35.591877Z',
},
{
name: 'sha-f67ade2',
digest: 'sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
tag_last_pushed: '2026-03-30T10:54:35.591877Z',
},
],
}), {
status: 200,
headers: { 'content-type': 'application/json' },
}));
const candidates = await fetchDockerHubTagCandidates();
expect(candidates.primary).toMatchObject({
tagName: 'latest',
});
expect(candidates.recentNonStable.map((candidate) => candidate.tagName)).toEqual([
'dev',
'sha-f67ade2',
]);
});
});
});
@@ -39,11 +39,17 @@ export type DockerHubTagRecord = {
digest?: string | null;
};
export type DockerHubTagCandidates = {
primary: UpdateCenterVersionCandidate | null;
recentNonStable: UpdateCenterVersionCandidate[];
};
const STABLE_SEMVER_PATTERN = /^v?(\d+)\.(\d+)\.(\d+)(?:\+[\w.-]+)?$/i;
const GITHUB_RELEASES_URL = 'https://api.github.com/repos/cita-777/metapi/releases';
const DOCKER_HUB_TAGS_URL = 'https://hub.docker.com/v2/repositories/1467078763/metapi/tags?page_size=100';
const UPDATE_CENTER_VERSION_FETCH_TIMEOUT_MS = 5_000;
const PREFERRED_DOCKER_HUB_TAG_ALIASES = ['latest', 'main'] as const;
const MAX_RECENT_NON_STABLE_DOCKER_HUB_TAGS = 5;
async function fetchJsonWithTimeout(url: string, init: UndiciRequestInit, timeoutLabel: string): Promise<unknown> {
const controller = new AbortController();
@@ -133,6 +139,21 @@ function normalizeDockerHubTagRecord(input: string | DockerHubTagRecord): Docker
return input;
}
function normalizeDockerHubTagName(input: string | null | undefined): string {
return String(input || '').trim();
}
function isPreferredDockerHubAlias(input: string | null | undefined): boolean {
const tag = normalizeDockerHubTagName(input);
return PREFERRED_DOCKER_HUB_TAG_ALIASES.includes(tag as typeof PREFERRED_DOCKER_HUB_TAG_ALIASES[number]);
}
function isStableDockerHubTag(input: string | null | undefined): boolean {
const tag = normalizeDockerHubTagName(input);
if (!tag) return false;
return isPreferredDockerHubAlias(tag) || !!parseStableSemVer(tag);
}
function normalizeDockerDigest(input: string | null | undefined): string | null {
const digest = String(input || '').trim();
return /^sha256:[a-f0-9]{64}$/i.test(digest) ? digest.toLowerCase() : null;
@@ -143,6 +164,22 @@ function getDockerHubTagPublishedAt(record: DockerHubTagRecord): string | null {
return value || null;
}
function getDockerHubTagPublishedTimestamp(record: DockerHubTagRecord): number {
const publishedAt = getDockerHubTagPublishedAt(record);
if (!publishedAt) return Number.NEGATIVE_INFINITY;
const timestamp = Date.parse(publishedAt);
return Number.isFinite(timestamp) ? timestamp : Number.NEGATIVE_INFINITY;
}
function getRecentNonStableDockerHubPriority(input: string | null | undefined): number {
const tag = normalizeDockerHubTagName(input).toLowerCase();
if (!tag) return 99;
if (tag === 'dev') return 0;
if (tag.startsWith('dev-')) return 1;
if (tag.startsWith('sha-')) return 2;
return 3;
}
function toShortDigest(digest: string | null | undefined): string | null {
if (!digest) return null;
return digest.slice(0, 'sha256:'.length + 12);
@@ -194,6 +231,44 @@ export function selectLatestDockerHubTag(tags: Array<string | DockerHubTagRecord
return buildDockerHubVersionCandidate(selected.record, selected.semver.normalized);
}
export function selectRecentNonStableDockerHubTags(
tags: Array<string | DockerHubTagRecord>,
limit = MAX_RECENT_NON_STABLE_DOCKER_HUB_TAGS,
): UpdateCenterVersionCandidate[] {
const records = tags
.map((tag) => normalizeDockerHubTagRecord(tag))
.filter((record) => normalizeDockerHubTagName(record.name))
.filter((record) => !isStableDockerHubTag(record.name));
const deduped = new Map<string, DockerHubTagRecord>();
for (const record of records) {
const tagName = normalizeDockerHubTagName(record.name);
const previous = deduped.get(tagName);
if (!previous || getDockerHubTagPublishedTimestamp(record) > getDockerHubTagPublishedTimestamp(previous)) {
deduped.set(tagName, record);
}
}
return Array.from(deduped.values())
.sort((a, b) => {
const priorityDelta = getRecentNonStableDockerHubPriority(a.name) - getRecentNonStableDockerHubPriority(b.name);
if (priorityDelta !== 0) return priorityDelta;
const publishedDelta = getDockerHubTagPublishedTimestamp(b) - getDockerHubTagPublishedTimestamp(a);
if (publishedDelta !== 0) return publishedDelta;
return normalizeDockerHubTagName(a.name).localeCompare(normalizeDockerHubTagName(b.name));
})
.slice(0, Math.max(0, limit))
.map((record) => buildDockerHubVersionCandidate(record, normalizeDockerHubTagName(record.name)))
.filter((candidate): candidate is UpdateCenterVersionCandidate => !!candidate);
}
export function selectDockerHubTagCandidates(tags: Array<string | DockerHubTagRecord>): DockerHubTagCandidates {
return {
primary: selectLatestDockerHubTag(tags),
recentNonStable: selectRecentNonStableDockerHubTags(tags),
};
}
export function resolvePreferredDeploySource(input: {
defaultSource: UpdateCenterVersionSource;
githubRelease: UpdateCenterVersionCandidate | null;
@@ -216,13 +291,17 @@ export async function fetchLatestStableGitHubRelease(): Promise<UpdateCenterVers
}
export async function fetchLatestDockerHubTag(): Promise<UpdateCenterVersionCandidate | null> {
return (await fetchDockerHubTagCandidates()).primary;
}
export async function fetchDockerHubTagCandidates(): Promise<DockerHubTagCandidates> {
const payload = await fetchJsonWithTimeout(DOCKER_HUB_TAGS_URL, {
headers: {
accept: 'application/json',
'user-agent': 'metapi-update-center/1.0',
},
}, 'Docker Hub tag lookup') as { results?: DockerHubTagRecord[] };
return selectLatestDockerHubTag(Array.isArray(payload?.results) ? payload.results : []);
return selectDockerHubTagCandidates(Array.isArray(payload?.results) ? payload.results : []);
}
export function getCurrentRuntimeVersion(): string {
@@ -0,0 +1,93 @@
import { describe, expect, it } from 'vitest';
import { convertOpenAiBodyToResponsesBody } from '../../transformers/openai/responses/conversion.js';
import {
convertAnthropicToolsToOpenAi,
convertOpenAiBodyToAnthropicMessagesBody,
convertOpenAiToolsToAnthropic,
} from '../../transformers/anthropic/messages/conversion.js';
import { convertClaudeRequestToOpenAiBody } from '../../transformers/shared/chatFormatsCore.js';
import { validateExternalResponsesHttpRequest } from '../../proxy-core/responsesPreflight.js';
import { applyOpenAiServiceTierPolicy } from '../../proxy-core/serviceTierPolicy.js';
describe('non-cache cross-protocol field matrix', () => {
it('audits web_search, legacy functions/function_call, continuation diagnostics, function outputs and service_tier', () => {
const legacyChatToResponses = convertOpenAiBodyToResponsesBody(
{
model: 'gpt-5',
messages: [
{ role: 'user', content: 'hello' },
{ role: 'function', name: 'legacy_tool', content: 'done' },
],
functions: [{ name: 'legacy_tool', parameters: { type: 'object' } }],
function_call: { name: 'legacy_tool' },
},
'gpt-5',
false,
);
expect(legacyChatToResponses).toMatchObject({
tools: [{ type: 'function', name: 'legacy_tool' }],
tool_choice: { type: 'function', name: 'legacy_tool' },
});
expect((legacyChatToResponses.input as any[]).some((item) => item.type === 'function_call_output')).toBe(true);
expect(convertOpenAiToolsToAnthropic([
{ type: 'web_search' },
{ type: 'google_search' },
])).toEqual([
{ type: 'web_search_20250305', name: 'web_search' },
{ type: 'web_search_20250305', name: 'web_search' },
]);
expect(convertAnthropicToolsToOpenAi([
{ type: 'web_search_20250305' },
])).toEqual([
{ type: 'web_search', name: 'web_search' },
]);
const claudeToChat = convertClaudeRequestToOpenAiBody({
model: 'claude-opus',
max_tokens: 256,
tools: [{ type: 'web_search_20250305' }],
messages: [{ role: 'user', content: 'search' }],
});
expect(claudeToChat.payload.tools).toEqual([{ type: 'web_search', name: 'web_search' }]);
const openAiToClaude = convertOpenAiBodyToAnthropicMessagesBody(
{
model: 'gpt-5',
messages: [{ role: 'user', content: 'search' }],
tools: [{ type: 'web_search' }],
},
'claude-opus',
false,
);
expect(openAiToClaude.tools).toEqual([{ type: 'web_search_20250305', name: 'web_search' }]);
const previousResponseDiagnostic = validateExternalResponsesHttpRequest({
model: 'gpt-5',
previous_response_id: 'msg_wrong',
input: 'hello',
});
expect(previousResponseDiagnostic.ok).toBe(false);
if (!previousResponseDiagnostic.ok) {
expect(previousResponseDiagnostic.payload.error.message).toContain('msg_*');
}
const functionOutputDiagnostic = validateExternalResponsesHttpRequest({
model: 'gpt-5',
input: [{ type: 'function_call_output', call_id: 'call_missing', output: 'done' }],
});
expect(functionOutputDiagnostic.ok).toBe(false);
if (!functionOutputDiagnostic.ok) {
expect(functionOutputDiagnostic.payload.error.message).toContain('Responses WebSocket v2');
}
expect(applyOpenAiServiceTierPolicy({
body: { model: 'gpt-5', service_tier: 'fast' },
})).toMatchObject({
ok: true,
body: { model: 'gpt-5', service_tier: 'priority' },
});
});
});
@@ -1,8 +1,10 @@
import { describe, expect, it } from 'vitest';
import {
convertAnthropicToolsToOpenAi,
convertOpenAiBodyToAnthropicMessagesBody,
convertOpenAiToolChoiceToAnthropic,
convertOpenAiToolsToAnthropic,
sanitizeAnthropicMessagesBody,
} from './conversion.js';
import { anthropicMessagesInbound } from './inbound.js';
@@ -29,6 +31,24 @@ describe('sanitizeAnthropicMessagesBody', () => {
});
});
it('maps web_search server tools between OpenAI-compatible and Anthropic Messages requests', () => {
expect(convertOpenAiToolsToAnthropic([
{ type: 'web_search', max_results: 3 },
{ type: 'google_search' },
])).toEqual([
{ type: 'web_search_20250305', max_results: 3, name: 'web_search' },
{ type: 'web_search_20250305', name: 'web_search' },
]);
expect(convertAnthropicToolsToOpenAi([
{ type: 'web_search_20250305', max_uses: 2 },
{ name: 'lookup_weather', input_schema: { type: 'object' } },
])).toEqual([
{ type: 'web_search', max_uses: 2, name: 'web_search' },
{ name: 'lookup_weather', input_schema: { type: 'object' } },
]);
});
it('normalizes string system and message content before rebuilding cache anchors', () => {
const result = sanitizeAnthropicMessagesBody({
model: 'claude-opus-4-6',
@@ -12,6 +12,11 @@ const VALID_ANTHROPIC_THINKING_TYPES = new Set(['enabled', 'disabled', 'adaptive
const VALID_ANTHROPIC_EFFORTS = new Set(['low', 'medium', 'high', 'max']);
const MAX_ANTHROPIC_CACHE_CONTROL_BREAKPOINTS = 4;
const ADAPTIVE_ANTHROPIC_CACHE_CONTROL_BLOCK_WINDOW = 20;
const ANTHROPIC_WEB_SEARCH_TOOL_TYPES = new Set([
'web_search',
'web_search_20250305',
'google_search',
]);
function asTrimmedString(value: unknown): string {
return typeof value === 'string' ? value.trim() : '';
@@ -181,6 +186,20 @@ function normalizeOpenAiToolArguments(raw: unknown): string {
return '';
}
function isAnthropicWebSearchTool(value: Record<string, unknown>): boolean {
const type = asTrimmedString(value.type).toLowerCase();
const name = asTrimmedString(value.name).toLowerCase();
return ANTHROPIC_WEB_SEARCH_TOOL_TYPES.has(type) || name === 'web_search' || name === 'google_search';
}
function convertAnthropicWebSearchToolToOpenAi(value: Record<string, unknown>): Record<string, unknown> {
const next: Record<string, unknown> = { ...value };
const type = asTrimmedString(value.type).toLowerCase();
next.type = type === 'google_search' ? 'web_search' : 'web_search';
next.name = 'web_search';
return next;
}
export function normalizeAnthropicToolInput(raw: unknown): unknown {
if (raw === undefined || raw === null) return {};
if (isRecord(raw) || Array.isArray(raw)) return raw;
@@ -802,6 +821,14 @@ export function convertOpenAiToolsToAnthropic(rawTools: unknown): unknown {
if (!isRecord(item)) return null;
const type = asTrimmedString(item.type).toLowerCase();
if (isAnthropicWebSearchTool(item)) {
return {
...item,
type: 'web_search_20250305',
name: 'web_search',
};
}
if (type === 'function' && isRecord(item.function)) {
const fn = item.function;
const name = asTrimmedString(fn.name);
@@ -826,6 +853,16 @@ export function convertOpenAiToolsToAnthropic(rawTools: unknown): unknown {
return converted.length > 0 ? converted : rawTools;
}
export function convertAnthropicToolsToOpenAi(rawTools: unknown): unknown {
if (!Array.isArray(rawTools)) return rawTools;
return rawTools.map((item) => {
if (!isRecord(item)) return item;
if (isAnthropicWebSearchTool(item)) return convertAnthropicWebSearchToolToOpenAi(item);
return item;
});
}
export function convertOpenAiToolChoiceToAnthropic(rawToolChoice: unknown): unknown {
if (rawToolChoice === undefined) return undefined;
@@ -1059,6 +1059,79 @@ describe('convertOpenAiBodyToResponsesBody', () => {
});
});
it('maps legacy chat functions, function_call and role:function messages into Responses request fields', () => {
const result = convertOpenAiBodyToResponsesBody(
{
model: 'gpt-5',
messages: [
{ role: 'user', content: 'lookup weather' },
{ role: 'function', name: 'legacy_weather', content: '{"temp":21}' },
],
functions: [
{
name: 'legacy_weather',
description: 'Lookup weather',
parameters: {
type: 'object',
properties: { city: { type: 'string' } },
},
},
],
function_call: { name: 'legacy_weather' },
},
'gpt-5',
false,
);
expect(result.tools).toEqual([
{
type: 'function',
name: 'legacy_weather',
description: 'Lookup weather',
parameters: {
type: 'object',
properties: { city: { type: 'string' } },
},
},
]);
expect(result.tool_choice).toEqual({
type: 'function',
name: 'legacy_weather',
});
expect(result.input).toEqual([
{
type: 'message',
role: 'user',
content: [{ type: 'input_text', text: 'lookup weather' }],
},
{
type: 'function_call_output',
call_id: 'legacy_weather',
output: '{"temp":21}',
},
]);
});
it('preserves OpenAI web_search and google_search server tools in Responses request bodies', () => {
const result = convertOpenAiBodyToResponsesBody(
{
model: 'gpt-5',
messages: [{ role: 'user', content: 'search docs' }],
tools: [
{ type: 'web_search_preview', search_context_size: 'low' },
{ type: 'google_search', max_results: 3 },
],
},
'gpt-5',
false,
);
expect(result.tools).toEqual([
{ type: 'web_search_preview', search_context_size: 'low', name: 'web_search' },
{ type: 'web_search', max_results: 3, name: 'web_search' },
]);
});
it('drops function tools with blank names when converting OpenAI-compatible input into Responses bodies', () => {
const result = convertOpenAiBodyToResponsesBody(
{
@@ -71,6 +71,19 @@ function cloneJsonValue<T>(value: T): T {
return value;
}
const OPENAI_WEB_SEARCH_TOOL_TYPES = new Set([
'web_search',
'web_search_preview',
'web_search_preview_2025_03_11',
'google_search',
]);
function isOpenAiWebSearchToolRecord(item: Record<string, unknown>): boolean {
const type = asTrimmedString(item.type).toLowerCase();
const name = asTrimmedString(item.name).toLowerCase();
return OPENAI_WEB_SEARCH_TOOL_TYPES.has(type) || name === 'web_search' || name === 'google_search';
}
function normalizeOptionalTrimmedString(value: unknown): string | undefined {
const trimmed = asTrimmedString(value);
return trimmed || undefined;
@@ -287,11 +300,22 @@ function collectOpenAiToolNames(body: Record<string, unknown>): string[] {
}
}
const rawFunctions = Array.isArray(body.functions) ? body.functions : [];
for (const item of rawFunctions) {
if (!isRecord(item)) continue;
pushName(item.name);
}
const toolChoice = isRecord(body.tool_choice) ? body.tool_choice : null;
if (toolChoice && asTrimmedString(toolChoice.type).toLowerCase() === 'function') {
pushName(isRecord(toolChoice.function) ? toolChoice.function.name : toolChoice.name);
}
const legacyFunctionCall = isRecord(body.function_call) ? body.function_call : null;
if (legacyFunctionCall) {
pushName(legacyFunctionCall.name);
}
const rawMessages = Array.isArray(body.messages) ? body.messages : [];
for (const message of rawMessages) {
if (!isRecord(message) || asTrimmedString(message.role).toLowerCase() !== 'assistant') continue;
@@ -344,6 +368,14 @@ function convertOpenAiToolsToResponses(
return item;
}
if (isOpenAiWebSearchToolRecord(item)) {
return {
...item,
type: type === 'google_search' ? 'web_search' : (type || 'web_search'),
name: asTrimmedString(item.name) || 'web_search',
};
}
if (type === 'custom' && asTrimmedString(item.name)) {
return item;
}
@@ -355,6 +387,43 @@ function convertOpenAiToolsToResponses(
return converted;
}
function convertOpenAiFunctionsToResponses(
rawFunctions: unknown,
toolNameMap: Record<string, string>,
): Array<Record<string, unknown>> {
if (!Array.isArray(rawFunctions)) return [];
return rawFunctions
.map((item) => {
if (!isRecord(item)) return null;
const name = asTrimmedString(item.name);
if (!name) return null;
const mapped: Record<string, unknown> = {
type: 'function',
name: getShortToolName(name, toolNameMap),
};
const description = asTrimmedString(item.description);
if (description) mapped.description = description;
if (item.parameters !== undefined) mapped.parameters = item.parameters;
if (item.strict !== undefined) mapped.strict = item.strict;
return mapped;
})
.filter((item): item is Record<string, unknown> => !!item);
}
function mergeOpenAiToolsAndFunctionsToResponses(
rawTools: unknown,
rawFunctions: unknown,
toolNameMap: Record<string, string>,
): unknown {
const convertedTools = convertOpenAiToolsToResponses(rawTools, toolNameMap);
const toolList = Array.isArray(convertedTools) ? convertedTools : [];
const functionList = convertOpenAiFunctionsToResponses(rawFunctions, toolNameMap);
if (toolList.length <= 0 && functionList.length <= 0) return convertedTools;
return [...toolList, ...functionList];
}
function convertOpenAiToolChoiceToResponses(
rawToolChoice: unknown,
toolNameMap: Record<string, string>,
@@ -373,6 +442,26 @@ function convertOpenAiToolChoiceToResponses(
return rawToolChoice;
}
function convertOpenAiFunctionCallToResponsesToolChoice(
rawFunctionCall: unknown,
toolNameMap: Record<string, string>,
): unknown {
if (rawFunctionCall === undefined) return undefined;
if (typeof rawFunctionCall === 'string') {
const normalized = rawFunctionCall.trim().toLowerCase();
if (normalized === 'none' || normalized === 'auto' || normalized === 'required') return normalized;
return rawFunctionCall;
}
if (!isRecord(rawFunctionCall)) return rawFunctionCall;
const name = asTrimmedString(rawFunctionCall.name);
if (!name) return 'required';
return {
type: 'function',
name: getShortToolName(name, toolNameMap),
};
}
function normalizeResponsesBodyForCompatibility(
body: Record<string, unknown>,
): Record<string, unknown> {
@@ -596,6 +685,23 @@ export function convertOpenAiBodyToResponsesBody(
continue;
}
if (role === 'function') {
const callId = asTrimmedString(item.name) || asTrimmedString(item.id);
if (!callId) continue;
const output = normalizeToolOutput(item.content);
inputItems.push({
type: 'function_call_output',
call_id: callId,
output: (
(typeof output === 'string' && output === '')
|| (Array.isArray(output) && output.length === 0)
)
? '(empty)'
: output,
});
continue;
}
const normalizedContent = normalizeResponsesMessageContent('user', item.content);
if (normalizedContent.length <= 0) continue;
inputItems.push({
@@ -639,7 +745,13 @@ export function convertOpenAiBodyToResponsesBody(
if (openaiBody.reasoning_budget !== undefined) body.reasoning_budget = openaiBody.reasoning_budget;
if (openaiBody.reasoning_summary !== undefined) body.reasoning_summary = openaiBody.reasoning_summary;
if (openaiBody.parallel_tool_calls !== undefined) body.parallel_tool_calls = openaiBody.parallel_tool_calls;
if (openaiBody.tools !== undefined) body.tools = convertOpenAiToolsToResponses(openaiBody.tools, toolNameMap);
if (openaiBody.tools !== undefined || openaiBody.functions !== undefined) {
body.tools = mergeOpenAiToolsAndFunctionsToResponses(
openaiBody.tools,
openaiBody.functions,
toolNameMap,
);
}
if (openaiBody.safety_identifier !== undefined) body.safety_identifier = openaiBody.safety_identifier;
if (openaiBody.max_tool_calls !== undefined) body.max_tool_calls = openaiBody.max_tool_calls;
if (openaiBody.prompt_cache_key !== undefined) body.prompt_cache_key = openaiBody.prompt_cache_key;
@@ -665,7 +777,9 @@ export function convertOpenAiBodyToResponsesBody(
body.text = textConfig;
}
const responsesToolChoice = convertOpenAiToolChoiceToResponses(openaiBody.tool_choice, toolNameMap);
const responsesToolChoice = openaiBody.tool_choice !== undefined
? convertOpenAiToolChoiceToResponses(openaiBody.tool_choice, toolNameMap)
: convertOpenAiFunctionCallToResponsesToolChoice(openaiBody.function_call, toolNameMap);
if (responsesToolChoice !== undefined) body.tool_choice = responsesToolChoice;
if (Array.isArray(body.tools) && body.tools.length === 0) {
delete body.tool_choice;
@@ -973,11 +973,25 @@ function convertClaudeToolsToOpenAiChat(rawTools: unknown): unknown {
if (!isRecord(item)) return item;
const type = asTrimmedString(item.type).toLowerCase();
const name = asTrimmedString(item.name);
if (
type === 'web_search'
|| type === 'web_search_20250305'
|| type === 'google_search'
|| name === 'web_search'
|| name === 'google_search'
) {
return {
...item,
type: 'web_search',
name: 'web_search',
};
}
if (type === 'function' || type === 'custom' || type === 'image_generation') {
return item;
}
const name = asTrimmedString(item.name);
if (!name) return item;
return {
@@ -73,6 +73,22 @@ describe('UpdateCenterSection', () => {
displayVersion: 'latest @ sha256:efb2ee655386',
publishedAt: '2026-03-29T11:54:35.591877Z',
},
dockerHubRecentTags: [
{
normalizedVersion: 'dev',
tagName: 'dev',
digest: 'sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
displayVersion: 'dev @ sha256:aaaaaaaaaaaa',
publishedAt: '2026-03-30T11:54:35.591877Z',
},
{
normalizedVersion: 'dev-20260417-f67ade2',
tagName: 'dev-20260417-f67ade2',
digest: 'sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
displayVersion: 'dev-20260417-f67ade2 @ sha256:bbbbbbbbbbbb',
publishedAt: '2026-03-30T10:54:35.591877Z',
},
],
helper: {
ok: true,
healthy: true,
@@ -159,6 +175,15 @@ describe('UpdateCenterSection', () => {
displayVersion: 'latest @ sha256:efb2ee655386',
publishedAt: '2026-03-29T11:54:35.591877Z',
},
dockerHubRecentTags: [
{
normalizedVersion: 'dev',
tagName: 'dev',
digest: 'sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
displayVersion: 'dev @ sha256:aaaaaaaaaaaa',
publishedAt: '2026-03-30T11:54:35.591877Z',
},
],
helper: {
ok: true,
healthy: true,
@@ -431,6 +456,50 @@ describe('UpdateCenterSection', () => {
}
});
it('deploys auto-discovered recent non-stable Docker Hub tags without manual input', async () => {
let root!: ReactTestRenderer;
try {
await act(async () => {
root = create(
<MemoryRouter>
<ToastProvider>
<UpdateCenterSection />
</ToastProvider>
</MemoryRouter>,
);
});
await flushMicrotasks();
const recentTagCard = root.root.find((node) => (
typeof node.props?.style === 'object'
&& collectText(node).includes('dev-20260417-f67ade2 @ sha256:bbbbbbbbbbbb')
));
expect(recentTagCard).toBeTruthy();
const deployRecentButton = root.root.find((node) => (
node.type === 'button'
&& typeof node.props.onClick === 'function'
&& collectText(node).includes('部署 dev-20260417-f67ade2')
));
expect(deployRecentButton.props.disabled).toBe(false);
await act(async () => {
await deployRecentButton.props.onClick();
});
await flushMicrotasks();
expect(apiMock.deployUpdateCenter).toHaveBeenCalledWith({
source: 'docker-hub-tag',
targetTag: 'dev-20260417-f67ade2',
targetDigest: 'sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
});
expect(apiMock.streamUpdateCenterTaskLogs).toHaveBeenCalledWith('task-1', expect.any(Object));
} finally {
root?.unmount();
}
});
it('keeps rollback history compact and opens the full revision list in a centered modal', async () => {
let root!: ReactTestRenderer;
try {
+90 -3
View File
@@ -39,6 +39,13 @@ type UpdateCenterStatus = {
digest?: string | null;
publishedAt?: string | null;
} | null;
dockerHubRecentTags?: Array<{
normalizedVersion?: string;
displayVersion?: string;
tagName?: string;
digest?: string | null;
publishedAt?: string | null;
}> | null;
helper?: {
ok?: boolean;
healthy?: boolean;
@@ -214,6 +221,11 @@ function formatImageTarget(tag?: string | null, digest?: string | null) {
return '';
}
function normalizeRecentDockerCandidates(input?: UpdateCenterStatus['dockerHubRecentTags'] | null) {
if (!Array.isArray(input)) return [];
return input.filter((entry) => String(entry?.tagName || entry?.normalizedVersion || '').trim());
}
export default function UpdateCenterSection() {
const toast = useToast();
const isMobile = useIsMobile();
@@ -432,6 +444,11 @@ export default function UpdateCenterSection() {
const canDeployDocker = !deploying && dockerDeployState.canDeploy;
const manualDockerTag = String(manualDockerTarget.tag || '').trim();
const manualDockerDigest = String(manualDockerTarget.digest || '').trim();
const recentDockerCandidates = normalizeRecentDockerCandidates(status?.dockerHubRecentTags);
const canDeployDockerByTag = !deploying
&& config.enabled
&& config.dockerHubTagsEnabled
&& helperHealthy;
const canDeployManualDocker = !deploying
&& config.enabled
&& config.dockerHubTagsEnabled
@@ -570,7 +587,7 @@ export default function UpdateCenterSection() {
/>
<span style={{ display: 'grid', gap: 4 }}>
<span style={{ fontSize: 13, fontWeight: 600, color: 'var(--color-text-primary)' }}>Docker Hub</span>
<span style={fieldHintStyle}>dev / / </span>
<span style={fieldHintStyle}> dev / / sha </span>
</span>
</label>
</div>
@@ -731,7 +748,7 @@ export default function UpdateCenterSection() {
) : null}
</div>
</div>
<div style={{ ...fieldHintStyle, marginBottom: 10 }}> latest / main / SemVerdev / / sha </div>
<div style={{ ...fieldHintStyle, marginBottom: 10 }}> latest / main / SemVer dev / / sha </div>
<div className={dockerDeployState.highlight ? 'stat-value-glow' : ''} style={{ ...summaryValueStyle, fontFamily: 'var(--font-mono)', marginBottom: 8 }}>
{status?.dockerHubTag?.displayVersion || status?.dockerHubTag?.normalizedVersion || '未发现'}
</div>
@@ -773,12 +790,82 @@ export default function UpdateCenterSection() {
</button>
) : null}
</div>
<div style={{ borderTop: '1px dashed var(--color-border-light)', marginTop: 4, paddingTop: 12, marginBottom: 12 }}>
<div style={{ fontSize: 12, color: 'var(--color-text-primary)', fontWeight: 600, marginBottom: 6 }}>
Docker
</div>
<div style={{ ...fieldHintStyle, marginBottom: 10 }}>
dev / / sha 使 tag digest
</div>
{recentDockerCandidates.length ? (
<div style={{ display: 'grid', gap: 8 }}>
{recentDockerCandidates.map((candidate) => {
const candidateTag = String(candidate.tagName || candidate.normalizedVersion || '').trim();
const candidateDigest = String(candidate.digest || '').trim();
const candidateLabel = candidate.displayVersion || candidate.normalizedVersion || candidateTag;
return (
<div
key={`${candidateTag}:${candidateDigest || 'no-digest'}`}
style={{
border: '1px solid var(--color-border-light)',
borderRadius: 'var(--radius-sm)',
padding: 10,
display: 'grid',
gap: 8,
}}
>
<div style={{ ...summaryValueStyle, fontFamily: 'var(--font-mono)', marginBottom: 0 }}>
{candidateLabel}
</div>
<div style={fieldHintStyle}>
{formatTaskTime(candidate.publishedAt)}
</div>
<div style={{ display: 'flex', gap: 8, flexWrap: 'wrap' }}>
<button
type="button"
className="btn btn-ghost"
style={{ border: '1px solid var(--color-border)' }}
disabled={!canDeployDockerByTag || !candidateTag}
onClick={() => {
if (!canDeployDockerByTag || !candidateTag) return;
void runDeploy('docker-hub-tag', {
tag: candidateTag,
digest: candidateDigest || null,
});
}}
>
{candidateTag}
</button>
<button
type="button"
className="btn btn-ghost"
style={{ border: '1px solid var(--color-border)' }}
onClick={() => {
setManualDockerTarget({
tag: candidateTag,
digest: candidateDigest,
});
}}
>
</button>
</div>
</div>
);
})}
</div>
) : (
<div style={fieldHintStyle}>
dev / / sha Docker
</div>
)}
</div>
<div style={{ borderTop: '1px dashed var(--color-border-light)', marginTop: 4, paddingTop: 12 }}>
<div style={{ fontSize: 12, color: 'var(--color-text-primary)', fontWeight: 600, marginBottom: 6 }}>
Docker Hub
</div>
<div style={{ ...fieldHintStyle, marginBottom: 10 }}>
devfeature sha
tag
</div>
<div
style={{