[codex] add harness engineering guardrails (#284)

* refactor: add harness engineering guardrails

* fix: address harness review feedback

---------

Co-authored-by: apple <apple@appledeMacBook-Pro.local>
This commit is contained in:
Cita
2026-03-26 21:22:36 +08:00
committed by GitHub
parent b3387dfba7
commit 8ebfc39745
16 changed files with 737 additions and 22 deletions
+21 -9
View File
@@ -50,7 +50,6 @@ jobs:
test-core:
name: Test Core
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 20
@@ -74,7 +73,6 @@ jobs:
build-web:
name: Build Web
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 15
@@ -96,7 +94,6 @@ jobs:
build-server:
name: Build Server
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 15
@@ -118,7 +115,6 @@ jobs:
build-desktop:
name: Build Desktop
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 15
@@ -140,7 +136,6 @@ jobs:
typecheck:
name: Typecheck
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 15
@@ -160,9 +155,29 @@ jobs:
- name: Run typecheck
run: npm run typecheck
repo-drift:
name: Repo Drift Check
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: npm
- name: Install dependencies
run: npm ci --prefer-offline --no-audit --no-fund
- name: Run repo drift check
run: npm run repo:drift-check
schema-sqlite:
name: Schema Check (SQLite)
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 15
@@ -187,7 +202,6 @@ jobs:
schema-mysql:
name: Schema Check (MySQL)
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 20
services:
@@ -231,7 +245,6 @@ jobs:
schema-postgres:
name: Schema Check (Postgres)
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 20
services:
@@ -299,7 +312,6 @@ jobs:
audit:
name: Audit Production Dependencies
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 10
# Keep PR audit informational so build/test/schema remain the merge gate.
@@ -0,0 +1,44 @@
name: Harness Drift Report
on:
schedule:
- cron: '37 17 * * 1'
workflow_dispatch:
permissions:
contents: read
env:
NODE_VERSION: 22
jobs:
drift-report:
name: Generate Drift Report
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: npm
- name: Install dependencies
run: npm ci --prefer-offline --no-audit --no-fund
- name: Generate repo drift report
run: npm run repo:drift-check -- --format markdown --output tmp/repo-drift-report.md --report-only
- name: Publish report to step summary
run: cat tmp/repo-drift-report.md >> "$GITHUB_STEP_SUMMARY"
- name: Upload report artifact
uses: actions/upload-artifact@v4
with:
name: repo-drift-report
path: tmp/repo-drift-report.md
retention-days: 30
+74
View File
@@ -0,0 +1,74 @@
# Metapi Engineering Rules
These rules apply to the whole repository unless a deeper `AGENTS.md` overrides
them. They are intentionally opinionated and mechanical so humans and agents can
make small, consistent changes without re-learning the codebase each time.
## Golden Principles
- Prefer one source of truth. If a helper, contract, or workflow already owns
an invariant, extend it instead of creating a parallel implementation.
- Fix the family, not just the symptom. When a bug comes from a repeated
pattern, sweep adjacent paths in the same subsystem before calling the work
done.
- Keep changes narrow and reviewable. Land one coherent slice at a time and
avoid bundling unrelated cleanup into the same patch.
## Server Layers
- `src/server/routes/**` are adapters, not owners. Route files may register
Fastify endpoints, parse request context, and delegate. They must not own
protocol conversion, retry policy, stream lifecycle, billing, or
persistence.
- If a helper is imported by anything outside one route file, it does not
belong under `src/server/routes/proxy/`.
- `src/server/proxy-core/**` owns proxy orchestration. Endpoint fallback should
flow through `executeEndpointFlow()`. Channel/session bookkeeping should flow
through `sharedSurface.ts`.
- `src/server/transformers/**` are protocol-pure. Do not import from
`src/server/routes/**`, Fastify, OAuth services, token router, or runtime
dispatch modules. If a transformer needs a shared contract, move it to a
neutral module first.
- Whole-body upstream reads in proxy orchestration should use
`readRuntimeResponseText()` instead of direct `.text()` reads.
## Platform And Routing Rules
- Platform behavior must be explicit. Detection, endpoint preference, discovery
transport, and management capability should come from one declared capability
story, not scattered `if platform === ...` branches.
- Thin adapters must stay honest. Do not let a platform look feature-complete
through inherited defaults if the underlying upstream does not support the
feature.
- Retry classification and routing health classification should share the same
failure vocabulary whenever possible.
## Database Rules
- One schema change requires three synchronized outputs: update the Drizzle
schema, update SQLite migration history, and regenerate checked-in schema
artifacts together.
- Cross-dialect bootstrap and upgrade SQL must be generated from the schema
contract. Do not hand-write new MySQL/Postgres schema patches in feature
code.
- Legacy schema compatibility is temporary and spec-owned. Additive startup
shims should stay narrow and trace back to a feature compatibility spec.
## Web Rules
- Pages are orchestration surfaces, not shared utility libraries. Do not import
one top-level page from another top-level page.
- Mobile behavior should reuse existing shared primitives first:
`ResponsiveFilterPanel`, `ResponsiveBatchActionBar`, `MobileCard`,
`useIsMobile`, and `mobileLayout.ts`.
- When a page grows a second complex modal, drawer, or panel family, extract it
into a domain subfolder before adding more inline state and rendering logic.
## Guardrails
- Run `npm run repo:drift-check` before finishing changes that touch shared
architecture boundaries.
- If you add a new boundary-heavy module, add or extend an architecture test in
the same area so the rule becomes executable.
- Keep local planning files under `docs/plans/`. They are intentionally ignored
by git and should not be treated as published documentation.
+2
View File
@@ -153,10 +153,12 @@ metapi/
- Documentation changes: `npm run docs:build` / 文档更改:`npm run docs:build`
- Code changes: `npm test && npm run build` / 代码更改:`npm test && npm run build`
- Database changes: `npm run smoke:db` / 数据库更改:`npm run smoke:db`
- Architecture / repo drift changes: `npm run repo:drift-check` / 架构与仓库漂移检查:`npm run repo:drift-check`
5. **Follow code style** / **遵循代码风格**
- Use TypeScript for type safety / 使用 TypeScript 确保类型安全
- Follow existing code patterns / 遵循现有代码模式
- Follow repo-level engineering rules in `AGENTS.md` / 遵循仓库根目录 `AGENTS.md` 中的工程规则
- Keep functions small and focused / 保持函数小而专注
### Commit Messages / 提交信息
+1
View File
@@ -35,6 +35,7 @@ npm run docs:build
| 运维排障与日常维护 | [运维手册](./operations.md) / [常见问题](./faq.md) | 新排障案例、备份恢复、健康检查、典型报错变化时 |
| FAQ / 教程协作沉淀 | [FAQ/教程贡献规范](./community/faq-tutorial-guidelines.md) | 需要新增教程、FAQ 模板、内容提交流程时 |
| 仓库目录与组织约定 | [目录规范](./project-structure.md) | 目录结构、归档策略或命名约定变化时 |
| 工程守则与漂移治理 | [Harness Engineering](./engineering/harness-engineering.md) | 需要更新仓库级黄金原则、自动巡检范围或垃圾回收流程时 |
## 维护约定
+140
View File
@@ -0,0 +1,140 @@
# Metapi Harness Engineering
This document captures the repo-level engineering taste that should remain true
even when future work is performed by autonomous agents. The goal is not to
replace feature tests. The goal is to keep the repository readable,
review-friendly, and hard to drift.
## Why This Exists
`metapi` already has strong local discipline in several areas:
- architecture tests that pin important boundaries
- schema parity and runtime bootstrap verification across databases
- shared protocol helpers that deliberately replace parallel implementations
What was still missing was one place that states the mechanical rules behind
those choices and a lightweight loop that continuously checks for drift. This
document is that shared contract.
## Golden Principles
### 1. One Invariant, One Source Of Truth
If a helper, contract, or workflow already owns an invariant, new work should
reuse that home instead of creating a second implementation.
Examples:
- protocol shaping and normalization belong in `src/server/transformers/**`
- endpoint fallback belongs in `src/server/routes/proxy/endpointFlow.ts`
- proxy success/failure bookkeeping belongs in
`src/server/proxy-core/surfaces/sharedSurface.ts`
- schema heuristics belong in `src/server/db/schemaMetadata.ts`
### 2. Routes Are Adapters, Not Owners
`src/server/routes/**` should remain thin. Route files may register endpoints,
read Fastify request state, and delegate to shared orchestration. They should
not become the place where retry loops, protocol conversion, stream lifecycle,
or billing logic lives.
If a helper is imported outside a single route file, that helper should move to
`proxy-core`, `services`, `transformers`, or another neutral home.
### 3. Transformers Must Stay Protocol-Pure
Transformers are the protocol boundary. They may depend on canonical/shared
contracts, but they should not reach back into route files, Fastify handlers,
OAuth services, token routing, or runtime dispatch details.
In practice this means:
- `canonical` is the request truth source
- `shared/normalized` is the response truth source
- compatibility layers may orchestrate retries or fallback bodies, but they do
not redefine protocol semantics
### 4. Proxy-Core Follows The Golden Path
Proxy orchestration should prefer shared, tested paths instead of bespoke
surface-local logic.
Current preferred path:
- endpoint ranking and request shaping via `upstreamEndpoint.ts`
- endpoint-attempt loops via `executeEndpointFlow()`
- whole-body response decoding via `readRuntimeResponseText()`
- OAuth refresh, sticky/lease behavior, billing, and proxy logging via
`sharedSurface.ts`
- Codex header/session semantics via provider profiles and header utils
- Codex websocket transport via the dedicated websocket runtime, not generic
fetch executors
### 5. Platform Capability Must Be Explicit
Platform behavior is easy to let drift because it spans adapters, discovery,
endpoint preference, and routing. Any platform-specific behavior that matters
at runtime should be stated once and reused, not re-inferred in multiple
subsystems.
Thin adapters should stay honest. Feature-complete adapters should be tested as
feature-complete. Do not silently “upgrade” support through generic defaults.
### 6. Database Changes Must Stay Contract-Driven
Schema work already has one of the strongest harnesses in the repo. Keep it
that way.
- update Drizzle schema and SQLite migration history together
- regenerate checked-in contract/artifacts together
- keep cross-dialect bootstrap/upgrade generation contract-driven
- keep legacy startup compatibility narrow and spec-owned
### 7. Web Pages Are Orchestration Surfaces
Top-level pages should not be reused as shared component libraries.
- no page-to-page imports for reusable UI
- mobile behavior should reuse shared primitives first
- extract repeated modals, drawers, and panels into domain subfolders early
## First-Wave Drift Checks
The first repo-level drift loop is intentionally small and high-signal. It
checks for:
- transformer imports from `routes/proxy`
- new proxy-core surface body reads that bypass `readRuntimeResponseText()`
- new imports from `routes/proxy` inside `proxy-core` beyond the current debt
baseline
- new top-level page-to-page imports in the admin UI beyond the current debt
baseline
These checks live in `scripts/dev/repo-drift-check.ts` and are wired into CI.
## Tracked Debt Vs New Violations
The repository already contains some known architectural debt, especially where
`proxy-core` still imports helpers from `routes/proxy` and where one admin page
reuses another page's export.
The harness uses a ratchet:
- tracked debt is reported so it stays visible
- new violations fail CI
This keeps the repo moving forward without forcing a risky “rewrite
everything first” migration.
## Garbage Collection Loop
The harness loop is intentionally conservative:
1. rules are encoded in repo docs and executable checks
2. CI blocks new drift
3. a scheduled workflow generates a drift report artifact
4. targeted cleanup PRs can remove tracked debt in small slices
The goal is steady principal payments on technical debt, not occasional heroic
cleanup sprints.
+1
View File
@@ -90,6 +90,7 @@ scripts/
docs/
├── .vitepress/ # 文档站导航与主题配置
├── community/ # 社区贡献规范
├── engineering/ # 仓库级工程规则、harness 与漂移治理说明
├── public/ # 文档站公开静态资源
├── logos/ # 可编辑 Logo 源文件与草稿
├── screenshots/ # 文档截图
+1
View File
@@ -52,6 +52,7 @@
"db:migrate": "tsx src/server/db/migrate.ts",
"schema:contract": "tsx scripts/dev/generate-schema-contract.ts",
"schema:generate": "npm run db:generate && npm run schema:contract",
"repo:drift-check": "tsx scripts/dev/repo-drift-check.ts",
"test:schema:unit": "vitest run --root . src/server/db/schemaContract.test.ts src/server/db/schemaArtifactGenerator.test.ts src/server/db/schemaIntrospection.test.ts src/server/db/schemaParity.test.ts",
"test:schema:parity": "vitest run --root . src/server/db/schemaParity.live.test.ts",
"test:schema:upgrade": "vitest run --root . src/server/db/schemaUpgrade.live.test.ts",
+22
View File
@@ -0,0 +1,22 @@
import { describe, expect, it } from 'vitest';
import { readFileSync } from 'node:fs';
import { resolve } from 'node:path';
describe('harness workflows', () => {
it('keeps repo drift checks wired into ci and scheduled reporting', () => {
const ciWorkflow = readFileSync(resolve(process.cwd(), '.github/workflows/ci.yml'), 'utf8');
const driftWorkflow = readFileSync(resolve(process.cwd(), '.github/workflows/harness-drift-report.yml'), 'utf8');
expect(ciWorkflow).toContain('name: Repo Drift Check');
expect(ciWorkflow).toContain('npm run repo:drift-check');
expect(ciWorkflow).toContain('name: Test Core');
expect(ciWorkflow).toContain('name: Build Web');
expect(ciWorkflow).toContain('name: Typecheck');
expect(driftWorkflow).toContain('schedule:');
expect(driftWorkflow).toContain('workflow_dispatch:');
expect(driftWorkflow).toContain('npm run repo:drift-check -- --format markdown --output tmp/repo-drift-report.md --report-only');
expect(driftWorkflow).toContain('actions/upload-artifact@v4');
expect(driftWorkflow).toContain('repo-drift-report');
});
});
+64
View File
@@ -0,0 +1,64 @@
import { mkdtempSync, mkdirSync, writeFileSync } from 'node:fs';
import { tmpdir } from 'node:os';
import { dirname, join } from 'node:path';
import { describe, expect, it } from 'vitest';
import { formatRepoDriftReport, runRepoDriftCheck } from './repo-drift-check.js';
function writeWorkspaceFiles(root: string, files: Record<string, string>): void {
for (const [relativePath, contents] of Object.entries(files)) {
const fullPath = join(root, relativePath);
mkdirSync(dirname(fullPath), { recursive: true });
writeFileSync(fullPath, contents);
}
}
describe('repo drift check', () => {
it('separates new violations from tracked debt', () => {
const root = mkdtempSync(join(tmpdir(), 'metapi-repo-drift-'));
writeWorkspaceFiles(root, {
'src/server/transformers/openai/responses/routeCompatibility.ts': "import type { EndpointAttemptContext } from '../../../routes/proxy/endpointFlow.js';\n",
'src/server/proxy-core/surfaces/chatSurface.ts': 'const payload = await upstream.text();\n',
'src/server/proxy-core/surfaces/sharedSurface.ts': "import { dispatchRuntimeRequest } from '../../routes/proxy/runtimeExecutor.js';\n",
'src/web/pages/Accounts.tsx': "import { TokensPanel } from './Tokens.js';\n",
'src/web/pages/Tokens.tsx': 'export const TokensPanel = () => null;\n',
});
const report = runRepoDriftCheck({ root });
expect(report.violations).toEqual(expect.arrayContaining([
expect.objectContaining({
ruleId: 'proxy-surface-body-read',
file: 'src/server/proxy-core/surfaces/chatSurface.ts',
}),
expect.objectContaining({
ruleId: 'transformers-route-blind',
file: 'src/server/transformers/openai/responses/routeCompatibility.ts',
}),
]));
expect(report.trackedDebt).toEqual(expect.arrayContaining([
expect.objectContaining({
ruleId: 'proxy-core-routes-proxy-import',
file: 'src/server/proxy-core/surfaces/sharedSurface.ts',
}),
expect.objectContaining({
ruleId: 'web-page-to-page-import',
file: 'src/web/pages/Accounts.tsx',
}),
]));
});
it('keeps the current repository within the first-wave ratchet', () => {
const report = runRepoDriftCheck({ root: process.cwd() });
expect(report.violations).toEqual([]);
expect(report.trackedDebt).toEqual(expect.any(Array));
});
it('can render markdown reports for scheduled cleanup jobs', () => {
const report = runRepoDriftCheck({ root: process.cwd() });
const markdown = formatRepoDriftReport(report, 'markdown');
expect(markdown).toContain('# Repo Drift Report');
expect(markdown).toContain('## Violations');
expect(markdown).toContain('## Tracked Debt');
});
});
+338
View File
@@ -0,0 +1,338 @@
import { lstatSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'node:fs';
import { dirname, extname, relative, resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import process from 'node:process';
type ReportFormat = 'text' | 'markdown';
type Severity = 'violation' | 'tracked_debt';
type DriftFinding = {
ruleId: string;
severity: Severity;
file: string;
line: number;
message: string;
excerpt: string;
};
type DriftReport = {
root: string;
generatedAt: string;
violations: DriftFinding[];
trackedDebt: DriftFinding[];
};
type RuleSpec = {
id: string;
description: string;
fileFilter: (file: string) => boolean;
lineMatch: (line: string, file: string) => boolean;
message: string | ((file: string, line: string) => string);
allowlistedFiles?: Set<string>;
};
type RunOptions = {
root: string;
};
const SOURCE_EXTENSIONS = new Set(['.ts', '.tsx', '.js', '.jsx']);
const ROUTES_PROXY_IMPORT_ALLOWLIST = new Set([
'src/server/proxy-core/surfaces/chatSurface.ts',
'src/server/proxy-core/surfaces/filesSurface.ts',
'src/server/proxy-core/surfaces/geminiSurface.ts',
'src/server/proxy-core/surfaces/openAiResponsesSurface.ts',
'src/server/proxy-core/surfaces/sharedSurface.ts',
]);
const TOP_LEVEL_PAGE_IMPORT_ALLOWLIST = new Set([
'src/web/pages/Accounts.tsx',
]);
function normalizeRelativePath(root: string, fullPath: string): string {
return relative(root, fullPath).replaceAll('\\', '/');
}
function walkFiles(root: string, currentDir = root): string[] {
const entries = readdirSync(currentDir).sort((left, right) => left.localeCompare(right, 'en'));
const files: string[] = [];
for (const entry of entries) {
if (entry === '.git' || entry === 'node_modules' || entry === 'dist' || entry === 'coverage') {
continue;
}
const fullPath = resolve(currentDir, entry);
const stat = lstatSync(fullPath);
if (stat.isSymbolicLink()) {
continue;
}
if (stat.isDirectory()) {
files.push(...walkFiles(root, fullPath));
continue;
}
const extension = extname(entry);
if (SOURCE_EXTENSIONS.has(extension)) {
files.push(fullPath);
}
}
return files;
}
function readWorkspaceLines(root: string, file: string): Array<{ lineNumber: number; text: string }> {
const source = readFileSync(resolve(root, file), 'utf8').replaceAll('\r\n', '\n');
return source.split('\n').map((text, index) => ({
lineNumber: index + 1,
text,
}));
}
function isNonTestSource(file: string): boolean {
return !file.endsWith('.test.ts')
&& !file.endsWith('.test.tsx')
&& !file.endsWith('.test.js')
&& !file.endsWith('.test.jsx');
}
function isTopLevelPageFile(file: string): boolean {
return /^src\/web\/pages\/[^/]+\.(ts|tsx|js|jsx)$/.test(file)
&& isNonTestSource(file);
}
function createRules(): RuleSpec[] {
return [
{
id: 'transformers-route-blind',
description: 'Transformers must not import route-layer proxy helpers',
fileFilter: (file) => file.startsWith('src/server/transformers/')
&& isNonTestSource(file),
lineMatch: (line) => /from\s+['"][^'"]*routes\/proxy\//.test(line),
message: 'transformer imports route-layer proxy code',
},
{
id: 'proxy-surface-body-read',
description: 'Proxy-core surfaces should use readRuntimeResponseText() for whole-body reads',
fileFilter: (file) => file.startsWith('src/server/proxy-core/surfaces/')
&& isNonTestSource(file),
lineMatch: (line) => /\.text\(/.test(line),
message: 'proxy-core surface reads a full upstream body via .text()',
},
{
id: 'proxy-core-routes-proxy-import',
description: 'Proxy-core imports from routes/proxy are tracked debt and should not grow',
fileFilter: (file) => file.startsWith('src/server/proxy-core/')
&& isNonTestSource(file),
lineMatch: (line) => /from\s+['"][^'"]*routes\/proxy\//.test(line),
message: 'proxy-core imports a helper from routes/proxy',
allowlistedFiles: ROUTES_PROXY_IMPORT_ALLOWLIST,
},
{
id: 'web-page-to-page-import',
description: 'Top-level route pages should not import other top-level route pages',
fileFilter: (file) => isTopLevelPageFile(file),
lineMatch: (line, file) => {
const match = line.match(/from\s+['"]\.\/([^/'"]+?)(?:\.(?:js|ts|tsx|jsx))?['"]/);
if (!match) return false;
const importedPage = match[1];
const currentPage = file.replace(/^src\/web\/pages\//, '').replace(/\.(ts|tsx|js|jsx)$/, '');
return importedPage !== currentPage;
},
message: (file, line) => {
const match = line.match(/from\s+['"](\.\/[^'"]+)['"]/);
const imported = match?.[1] ?? 'another page file';
return `top-level page imports ${imported}`;
},
allowlistedFiles: TOP_LEVEL_PAGE_IMPORT_ALLOWLIST,
},
];
}
export function runRepoDriftCheck(options: Partial<RunOptions> = {}): DriftReport {
const root = resolve(options.root ?? process.cwd());
const report: DriftReport = {
root,
generatedAt: new Date().toISOString(),
violations: [],
trackedDebt: [],
};
const files = walkFiles(root).map((file) => normalizeRelativePath(root, file));
const rules = createRules();
for (const rule of rules) {
for (const file of files) {
if (!rule.fileFilter(file)) continue;
for (const { lineNumber, text } of readWorkspaceLines(root, file)) {
if (!rule.lineMatch(text, file)) continue;
const finding: DriftFinding = {
ruleId: rule.id,
severity: rule.allowlistedFiles?.has(file) ? 'tracked_debt' : 'violation',
file,
line: lineNumber,
message: typeof rule.message === 'function' ? rule.message(file, text) : rule.message,
excerpt: text.trim(),
};
if (finding.severity === 'tracked_debt') {
report.trackedDebt.push(finding);
} else {
report.violations.push(finding);
}
}
}
}
return report;
}
function formatFindingText(finding: DriftFinding): string {
return `- [${finding.ruleId}] ${finding.file}:${finding.line} ${finding.message}\n ${finding.excerpt}`;
}
function formatFindingMarkdownRow(finding: DriftFinding): string {
const escapedMessage = finding.message.replaceAll('|', '\\|');
const escapedExcerpt = finding.excerpt.replaceAll('|', '\\|');
return `| \`${finding.ruleId}\` | \`${finding.file}:${finding.line}\` | ${escapedMessage} | \`${escapedExcerpt}\` |`;
}
export function formatRepoDriftReport(report: DriftReport, format: ReportFormat = 'text'): string {
if (format === 'markdown') {
const lines: string[] = [
'# Repo Drift Report',
'',
`- Root: \`${report.root}\``,
`- Generated at: \`${report.generatedAt}\``,
`- Violations: **${report.violations.length}**`,
`- Tracked debt: **${report.trackedDebt.length}**`,
'',
];
if (report.violations.length > 0) {
lines.push('## Violations', '', '| Rule | Location | Message | Excerpt |', '| --- | --- | --- | --- |');
for (const finding of report.violations) {
lines.push(formatFindingMarkdownRow(finding));
}
lines.push('');
} else {
lines.push('## Violations', '', 'No new violations found.', '');
}
if (report.trackedDebt.length > 0) {
lines.push('## Tracked Debt', '', '| Rule | Location | Message | Excerpt |', '| --- | --- | --- | --- |');
for (const finding of report.trackedDebt) {
lines.push(formatFindingMarkdownRow(finding));
}
lines.push('');
} else {
lines.push('## Tracked Debt', '', 'No tracked debt entries were observed.', '');
}
return lines.join('\n');
}
const lines: string[] = [
`Repo drift report for ${report.root}`,
`Generated at ${report.generatedAt}`,
'',
`Violations: ${report.violations.length}`,
];
if (report.violations.length > 0) {
lines.push(...report.violations.map(formatFindingText));
} else {
lines.push('- none');
}
lines.push('', `Tracked debt: ${report.trackedDebt.length}`);
if (report.trackedDebt.length > 0) {
lines.push(...report.trackedDebt.map(formatFindingText));
} else {
lines.push('- none');
}
return lines.join('\n');
}
type CliOptions = {
format: ReportFormat;
output?: string;
reportOnly: boolean;
root?: string;
};
function parseCliOptions(argv: string[]): CliOptions {
const options: CliOptions = {
format: 'text',
reportOnly: false,
};
const readRequiredValue = (flag: string, index: number): string => {
const value = argv[index + 1];
if (!value || value.startsWith('--')) {
throw new Error(`${flag} requires a value`);
}
return value;
};
for (let index = 0; index < argv.length; index += 1) {
const arg = argv[index];
if (arg === '--format') {
const value = readRequiredValue(arg, index);
if (value !== 'text' && value !== 'markdown') {
throw new Error(`--format must be one of: text, markdown`);
}
options.format = value;
index += 1;
continue;
}
if (arg === '--output') {
options.output = readRequiredValue(arg, index);
index += 1;
continue;
}
if (arg === '--report-only') {
options.reportOnly = true;
continue;
}
if (arg === '--root') {
options.root = readRequiredValue(arg, index);
index += 1;
}
}
return options;
}
function maybeWriteReport(outputPath: string | undefined, contents: string): void {
if (!outputPath) return;
const resolved = resolve(outputPath);
mkdirSync(dirname(resolved), { recursive: true });
writeFileSync(resolved, contents);
}
const isMainModule = (() => {
try {
return process.argv[1] != null && fileURLToPath(import.meta.url) === resolve(process.argv[1]);
} catch {
return false;
}
})();
if (isMainModule) {
try {
const options = parseCliOptions(process.argv.slice(2));
const report = runRepoDriftCheck({ root: options.root });
const contents = formatRepoDriftReport(report, options.format);
maybeWriteReport(options.output, contents);
console.log(contents);
process.exit(report.violations.length > 0 && !options.reportOnly ? 1 : 0);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
console.error(message);
process.exit(1);
}
}
+6 -2
View File
@@ -119,9 +119,13 @@ function decodeRuntimeResponseBuffer(buffer: Buffer, contentEncoding: string | n
export async function readRuntimeResponseText(
response: RuntimeResponse,
): Promise<string> {
const contentEncoding = response.headers.get('content-encoding');
const contentEncoding = typeof response.headers?.get === 'function'
? response.headers.get('content-encoding')
: null;
if (!hasZstdContentEncoding(contentEncoding)) {
return response.text().catch(() => '');
return typeof response.text === 'function'
? response.text().catch(() => '')
: '';
}
const rawBuffer = Buffer.from(await response.arrayBuffer());
@@ -872,7 +872,7 @@ export async function handleClaudeCountTokensSurfaceRequest(
const latency = Date.now() - startTime;
const contentType = upstream.headers.get('content-type') || 'application/json';
const text = await upstream.text();
const text = await readRuntimeResponseText(upstream);
let payload: unknown = text;
try {
payload = JSON.parse(text);
@@ -18,6 +18,7 @@ import { buildUpstreamUrl } from '../../routes/proxy/upstreamUrl.js';
import { recordOauthQuotaResetHint } from '../../services/oauth/quota.js';
import { refreshOauthAccessTokenSingleflight } from '../../services/oauth/refreshSingleflight.js';
import { proxyChannelCoordinator } from '../../services/proxyChannelCoordinator.js';
import { readRuntimeResponseText } from '../executors/types.js';
type SelectedChannel = Awaited<ReturnType<typeof tokenRouter.selectChannel>>;
type SurfaceWarningScope = 'chat' | 'responses';
@@ -333,7 +334,8 @@ export async function trySurfaceOauthRefreshRecovery<TRequest extends BuiltEndpo
input.ctx.request = refreshedRequest;
input.ctx.response = refreshedResponse;
if (input.captureFailureBody !== false) {
input.ctx.rawErrText = await refreshedResponse.text().catch(() => 'unknown error');
const failureBody = await readRuntimeResponseText(refreshedResponse).catch(() => '');
input.ctx.rawErrText = failureBody.trim() || 'unknown error';
}
} catch {
return null;
@@ -61,14 +61,10 @@ describe('final transformer hard-cut architecture', () => {
for (const file of files) {
const source = readFileSync(file, 'utf8');
expect(source).not.toContain("from '../../routes/proxy/");
expect(source).not.toContain("from '../routes/proxy/");
expect(source).not.toContain("from '../../services/oauth/");
expect(source).not.toContain("from '../services/oauth/");
expect(source).not.toContain("from '../../services/tokenRouter.js'");
expect(source).not.toContain("from '../services/tokenRouter.js'");
expect(source).not.toContain("from '../../routes/proxy/runtimeExecutor.js'");
expect(source).not.toContain("from '../routes/proxy/runtimeExecutor.js'");
expect(source).not.toMatch(/(?:^|\n)\s*(?:import|export)\s+(?:type\s+)?(?:[^'"\n]*?\s+from\s+)?['"][^'"]*routes\/proxy\//m);
expect(source).not.toMatch(/(?:^|\n)\s*(?:import|export)\s+(?:type\s+)?(?:[^'"\n]*?\s+from\s+)?['"][^'"]*services\/oauth\//m);
expect(source).not.toMatch(/(?:^|\n)\s*(?:import|export)\s+(?:type\s+)?(?:[^'"\n]*?\s+from\s+)?['"][^'"]*services\/tokenRouter\.js['"]/m);
expect(source).not.toMatch(/(?:^|\n)\s*(?:import|export)\s+(?:type\s+)?(?:[^'"\n]*?\s+from\s+)?['"][^'"]*routes\/proxy\/runtimeExecutor\.js['"]/m);
expect(source).not.toContain("from 'fastify'");
}
});
@@ -1,10 +1,10 @@
import type { Response as UndiciResponse } from 'undici';
import {
buildMinimalJsonHeadersForCompatibility,
isEndpointDowngradeError,
isUnsupportedMediaTypeError,
type CompatibilityEndpoint,
} from '../../shared/endpointCompatibility.js';
import type { EndpointAttemptContext, EndpointRecoverResult } from '../../../routes/proxy/endpointFlow.js';
import {
buildResponsesCompatibilityBodies,
buildResponsesCompatibilityHeaderCandidates,
@@ -19,6 +19,20 @@ type CompatibilityRequest = {
body: Record<string, unknown>;
};
type EndpointAttemptContext = {
request: CompatibilityRequest;
targetUrl: string;
response: UndiciResponse;
rawErrText: string;
};
type EndpointRecoverResult = {
upstream: UndiciResponse;
upstreamPath: string;
request?: CompatibilityRequest;
targetUrl?: string;
} | null;
type UpstreamResponse = Exclude<EndpointRecoverResult, null>['upstream'];
type CreateResponsesEndpointStrategyInput = {