Tighten alias and header edge-cases called out in review
The latest review pass found a few more concrete edge-cases that were still cheap to close: invalid /responses aliases could still rewrite typos, websocket session headers should prefer proxy-safe dashed forms, contributor rendering needed an input guard, and several upstream request/response helpers still had small contract mismatches around Accept/beta handling and synthesized tool ids. These fixes keep the release branch behavior explicit while the PR remains open. Constraint: Keep changes surgical and well-covered so the review cleanup does not destabilize the already-green release branch Rejected: Leave these for post-merge cleanup | they were still active review threads with straightforward fixes Confidence: high Scope-risk: narrow Directive: When alias routes exist only for ergonomics, reject unknown subpaths loudly instead of silently rewriting them Tested: Focused vitest for contributor updater, route aliases, upstream request builder, openai responses response bridge; typecheck; git diff --check Not-tested: Full npm test; the remaining Gemini review thread that likely needs a broader canonical-tool-result contract decision
This commit is contained in:
@@ -110,6 +110,10 @@ describe('README contributors updater', () => {
|
||||
expect((html.match(/\n <a href=/g) || []).length).toBe(2);
|
||||
});
|
||||
|
||||
it('rejects invalid per-line values instead of looping forever', () => {
|
||||
expect(() => renderContributorsBlock([], 0)).toThrow('perLine must be a positive integer');
|
||||
});
|
||||
|
||||
it('parses concatenated paginated gh api arrays without requiring newlines between pages', () => {
|
||||
expect(parsePaginatedJson('[{"login":"a"}][{"login":"b"}]')).toEqual([
|
||||
{ login: 'a' },
|
||||
|
||||
@@ -64,6 +64,10 @@ export function renderContributorsBlock(
|
||||
contributors: ReadmeContributor[],
|
||||
perLine = PER_LINE,
|
||||
): string {
|
||||
if (!Number.isInteger(perLine) || perLine <= 0) {
|
||||
throw new RangeError('perLine must be a positive integer');
|
||||
}
|
||||
|
||||
if (contributors.length === 0) {
|
||||
return '<p align="left">\n <sub>No public contributors found yet.</sub>\n</p>';
|
||||
}
|
||||
|
||||
@@ -4,12 +4,22 @@ import { ensureResponsesWebsocketTransport } from './responsesWebsocket.js';
|
||||
|
||||
function resolveAliasedResponsesDownstreamPath(
|
||||
request: FastifyRequest,
|
||||
): '/v1/responses' | '/v1/responses/compact' {
|
||||
): '/v1/responses' | '/v1/responses/compact' | null {
|
||||
const rawUrl = request.raw.url || request.url || '';
|
||||
const pathname = rawUrl.split('?')[0] || rawUrl;
|
||||
if (pathname === '/responses') return '/v1/responses';
|
||||
return pathname.endsWith('/compact')
|
||||
? '/v1/responses/compact'
|
||||
: '/v1/responses';
|
||||
: null;
|
||||
}
|
||||
|
||||
async function replyUnsupportedAliasedResponsesPath(reply: FastifyReply) {
|
||||
return reply.code(404).send({
|
||||
error: {
|
||||
message: 'Unknown /responses alias path',
|
||||
type: 'invalid_request_error',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function responsesProxyRoute(app: FastifyInstance) {
|
||||
@@ -29,8 +39,13 @@ export async function responsesProxyRoute(app: FastifyInstance) {
|
||||
|
||||
app.post('/responses', async (request: FastifyRequest, reply: FastifyReply) =>
|
||||
handleOpenAiResponsesSurfaceRequest(request, reply, '/v1/responses'));
|
||||
app.post('/responses/*', async (request: FastifyRequest, reply: FastifyReply) =>
|
||||
handleOpenAiResponsesSurfaceRequest(request, reply, resolveAliasedResponsesDownstreamPath(request)));
|
||||
app.post('/responses/*', async (request: FastifyRequest, reply: FastifyReply) => {
|
||||
const downstreamPath = resolveAliasedResponsesDownstreamPath(request);
|
||||
if (!downstreamPath) {
|
||||
return replyUnsupportedAliasedResponsesPath(reply);
|
||||
}
|
||||
return handleOpenAiResponsesSurfaceRequest(request, reply, downstreamPath);
|
||||
});
|
||||
app.get('/responses', async (_request: FastifyRequest, reply: FastifyReply) =>
|
||||
reply.code(426).send({
|
||||
error: {
|
||||
@@ -38,4 +53,16 @@ export async function responsesProxyRoute(app: FastifyInstance) {
|
||||
type: 'invalid_request_error',
|
||||
},
|
||||
}));
|
||||
app.get('/responses/*', async (request: FastifyRequest, reply: FastifyReply) => {
|
||||
const downstreamPath = resolveAliasedResponsesDownstreamPath(request);
|
||||
if (!downstreamPath) {
|
||||
return replyUnsupportedAliasedResponsesPath(reply);
|
||||
}
|
||||
return reply.code(426).send({
|
||||
error: {
|
||||
message: `WebSocket upgrade required for GET ${downstreamPath}`,
|
||||
type: 'invalid_request_error',
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -554,8 +554,8 @@ async function handleResponsesWebsocketConnection(
|
||||
) {
|
||||
const websocketSessionId = headerValueToTrimmedString(request.headers['session_id'])
|
||||
|| headerValueToTrimmedString(request.headers['session-id'])
|
||||
|| headerValueToTrimmedString(request.headers['conversation_id'])
|
||||
|| headerValueToTrimmedString(request.headers['conversation-id'])
|
||||
|| headerValueToTrimmedString(request.headers['conversation_id'])
|
||||
|| randomUUID();
|
||||
const runtimeSessionKeys = new Set<string>();
|
||||
let lastRequest: Record<string, unknown> | null = null;
|
||||
|
||||
@@ -95,6 +95,39 @@ describe('proxy route aliases', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('rejects unknown /responses alias subpaths instead of silently rewriting them', async () => {
|
||||
const response = await app.inject({
|
||||
method: 'POST',
|
||||
url: '/responses/other',
|
||||
payload: { model: 'gpt-5.2', input: 'hello' },
|
||||
});
|
||||
|
||||
expect(response.statusCode).toBe(404);
|
||||
expect(handleOpenAiResponsesSurfaceRequestMock).not.toHaveBeenCalled();
|
||||
expect(response.json()).toEqual({
|
||||
error: {
|
||||
message: 'Unknown /responses alias path',
|
||||
type: 'invalid_request_error',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('keeps GET /responses/compact aligned with the websocket upgrade path', async () => {
|
||||
const response = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/responses/compact',
|
||||
});
|
||||
|
||||
expect(response.statusCode).toBe(426);
|
||||
expect(handleOpenAiResponsesSurfaceRequestMock).not.toHaveBeenCalled();
|
||||
expect(response.json()).toEqual({
|
||||
error: {
|
||||
message: 'WebSocket upgrade required for GET /v1/responses/compact',
|
||||
type: 'invalid_request_error',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('registers bare chat completions alias against the openai chat handler', async () => {
|
||||
const response = await app.inject({
|
||||
method: 'POST',
|
||||
|
||||
@@ -63,6 +63,27 @@ describe('upstreamRequestBuilder', () => {
|
||||
expect(request.body.store).toBe(false);
|
||||
});
|
||||
|
||||
it('overrides downstream Accept so responses transport mode wins', () => {
|
||||
const request = buildUpstreamEndpointRequest({
|
||||
endpoint: 'responses',
|
||||
modelName: 'upstream-gpt',
|
||||
stream: true,
|
||||
tokenValue: 'sk-test',
|
||||
sitePlatform: 'openai',
|
||||
siteUrl: 'https://example.com',
|
||||
openaiBody: {
|
||||
model: 'gpt-5.2',
|
||||
messages: [{ role: 'user', content: 'hello' }],
|
||||
},
|
||||
downstreamFormat: 'openai',
|
||||
downstreamHeaders: {
|
||||
accept: 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
expect(request.headers.accept).toBe('text/event-stream');
|
||||
});
|
||||
|
||||
it('applies a sub2api-style allowlist to generic passthrough headers', () => {
|
||||
const request = buildUpstreamEndpointRequest({
|
||||
endpoint: 'chat',
|
||||
@@ -134,4 +155,23 @@ describe('upstreamRequestBuilder', () => {
|
||||
expect(request.body).not.toHaveProperty('max_tokens');
|
||||
expect(request.body).not.toHaveProperty('maxTokens');
|
||||
});
|
||||
|
||||
it('merges body betas with existing anthropic-beta headers for Claude count_tokens', () => {
|
||||
const request = buildClaudeCountTokensUpstreamRequest({
|
||||
modelName: 'claude-opus-4-6',
|
||||
tokenValue: 'sk-test',
|
||||
sitePlatform: 'claude',
|
||||
claudeBody: {
|
||||
model: 'claude-opus-4-6',
|
||||
betas: ['beta-from-body'],
|
||||
messages: [{ role: 'user', content: 'hello' }],
|
||||
},
|
||||
downstreamHeaders: {
|
||||
'anthropic-beta': 'header-beta',
|
||||
},
|
||||
});
|
||||
|
||||
expect(request.headers['anthropic-beta']).toContain('header-beta');
|
||||
expect(request.headers['anthropic-beta']).toContain('beta-from-body');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -267,20 +267,19 @@ function ensureResponsesAcceptHeader(
|
||||
sitePlatform?: string;
|
||||
},
|
||||
): Record<string, string> {
|
||||
const existingAccept = (
|
||||
headerValueToString(headers.accept)
|
||||
|| headerValueToString((headers as Record<string, unknown>).Accept)
|
||||
);
|
||||
if (existingAccept) return headers;
|
||||
const nextHeaders = { ...headers };
|
||||
delete (nextHeaders as Record<string, unknown>).Accept;
|
||||
delete (nextHeaders as Record<string, unknown>).accept;
|
||||
|
||||
if (input.stream) {
|
||||
return {
|
||||
...headers,
|
||||
...nextHeaders,
|
||||
accept: 'text/event-stream',
|
||||
};
|
||||
}
|
||||
if (normalizePlatformName(input.sitePlatform) === 'sub2api') {
|
||||
return {
|
||||
...headers,
|
||||
...nextHeaders,
|
||||
accept: 'application/json',
|
||||
};
|
||||
}
|
||||
@@ -765,9 +764,18 @@ export function buildClaudeCountTokensUpstreamRequest(input: {
|
||||
delete sanitizedBody.maxTokens;
|
||||
delete sanitizedBody.stream;
|
||||
const providerProfile = resolveProviderProfile(sitePlatform);
|
||||
const mergedBetas = [
|
||||
...asTrimmedString(claudeHeaders['anthropic-beta'])
|
||||
.split(',')
|
||||
.map((entry) => entry.trim())
|
||||
.filter(Boolean),
|
||||
...betas,
|
||||
];
|
||||
const effectiveClaudeHeaders = {
|
||||
...claudeHeaders,
|
||||
...(betas.length > 0 ? { 'anthropic-beta': betas.join(',') } : {}),
|
||||
...(mergedBetas.length > 0
|
||||
? { 'anthropic-beta': Array.from(new Set(mergedBetas)).join(',') }
|
||||
: {}),
|
||||
};
|
||||
|
||||
if (providerProfile?.id === 'claude') {
|
||||
|
||||
@@ -292,6 +292,41 @@ describe('openai responses response bridge', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it('uses fc_* item ids while keeping call_* ids for synthesized function_call outputs', () => {
|
||||
const payload = buildNormalizedFinalToOpenAiResponsesPayload({
|
||||
upstreamPayload: {
|
||||
id: 'opaque_tool_call_ids',
|
||||
model: 'gpt-5',
|
||||
},
|
||||
normalized: {
|
||||
id: 'opaque_tool_call_ids',
|
||||
model: 'gpt-5',
|
||||
created: 1700000000,
|
||||
content: '',
|
||||
reasoningContent: '',
|
||||
finishReason: 'tool_calls',
|
||||
toolCalls: [{
|
||||
id: 'call_weather_1',
|
||||
name: 'lookup_weather',
|
||||
arguments: '{"city":"Paris"}',
|
||||
}],
|
||||
},
|
||||
usage: {
|
||||
promptTokens: 1,
|
||||
completionTokens: 1,
|
||||
totalTokens: 2,
|
||||
},
|
||||
});
|
||||
|
||||
expect(payload.output).toEqual([
|
||||
expect.objectContaining({
|
||||
id: 'fc_weather_1',
|
||||
type: 'function_call',
|
||||
call_id: 'call_weather_1',
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
it('keeps the outbound facade pointed at the response bridge object', () => {
|
||||
expect(openAiResponsesOutbound).toBe(openAiResponsesResponseBridge);
|
||||
});
|
||||
|
||||
@@ -20,6 +20,15 @@ function createSyntheticId(prefix: 'resp' | 'msg' | 'call'): string {
|
||||
return `${prefix}_${Date.now()}_${syntheticIdCounter}`;
|
||||
}
|
||||
|
||||
function toFunctionCallItemId(callId: string): string {
|
||||
const trimmed = callId.trim();
|
||||
if (!trimmed) {
|
||||
const syntheticCallId = createSyntheticId('call');
|
||||
return `fc_${syntheticCallId.slice('call_'.length)}`;
|
||||
}
|
||||
return trimmed.startsWith('call_') ? `fc_${trimmed.slice('call_'.length)}` : `fc_${trimmed}`;
|
||||
}
|
||||
|
||||
function cloneJson<T>(value: T): T {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((item) => cloneJson(item)) as T;
|
||||
@@ -413,7 +422,7 @@ export function buildNormalizedFinalToOpenAiResponsesPayload(input: {
|
||||
}
|
||||
|
||||
output.push({
|
||||
id: toolCall.id,
|
||||
id: toFunctionCallItemId(toolCall.id),
|
||||
type: 'function_call',
|
||||
status: 'completed',
|
||||
call_id: toolCall.id,
|
||||
|
||||
Reference in New Issue
Block a user