fix(lint): resolve biome violations across src modules

This commit is contained in:
jeffusion
2026-03-24 10:00:13 +08:00
committed by 路遥知码力
parent 28d86aff16
commit 3c1d616dc1
15 changed files with 91 additions and 65 deletions

View File

@@ -29,12 +29,12 @@ describe('secrets — AES-256-GCM encryption', () => {
beforeEach(() => { beforeEach(() => {
savedEncryptionKey = process.env.ENCRYPTION_KEY; savedEncryptionKey = process.env.ENCRYPTION_KEY;
delete process.env.ENCRYPTION_KEY; Reflect.deleteProperty(process.env, 'ENCRYPTION_KEY');
}); });
afterEach(() => { afterEach(() => {
if (savedEncryptionKey === undefined) { if (savedEncryptionKey === undefined) {
delete process.env.ENCRYPTION_KEY; Reflect.deleteProperty(process.env, 'ENCRYPTION_KEY');
} else { } else {
process.env.ENCRYPTION_KEY = savedEncryptionKey; process.env.ENCRYPTION_KEY = savedEncryptionKey;
} }
@@ -53,7 +53,7 @@ describe('secrets — AES-256-GCM encryption', () => {
}); });
test('throws if ENCRYPTION_KEY is not set', async () => { test('throws if ENCRYPTION_KEY is not set', async () => {
delete process.env.ENCRYPTION_KEY; Reflect.deleteProperty(process.env, 'ENCRYPTION_KEY');
const secrets = await importFresh(); const secrets = await importFresh();
expect(() => secrets.initMasterKey()).toThrow('ENCRYPTION_KEY env var is required'); expect(() => secrets.initMasterKey()).toThrow('ENCRYPTION_KEY env var is required');

View File

@@ -32,7 +32,7 @@ describe('provider-repo', () => {
afterEach(() => { afterEach(() => {
closeDatabase(); closeDatabase();
if (savedDbPath === undefined) { if (savedDbPath === undefined) {
delete process.env.DATABASE_PATH; Reflect.deleteProperty(process.env, 'DATABASE_PATH');
} else { } else {
process.env.DATABASE_PATH = savedDbPath; process.env.DATABASE_PATH = savedDbPath;
} }
@@ -212,7 +212,7 @@ describe('provider-repo', () => {
}); });
test('deleting provider does not affect other providers', () => { test('deleting provider does not affect other providers', () => {
const p1 = providerRepo.create({ ...sampleInput, name: 'Keep' }); providerRepo.create({ ...sampleInput, name: 'Keep' });
const p2 = providerRepo.create({ ...sampleInput, name: 'Delete' }); const p2 = providerRepo.create({ ...sampleInput, name: 'Delete' });
providerRepo.delete(p2.id); providerRepo.delete(p2.id);

View File

@@ -37,7 +37,9 @@ describe('secret-repo', () => {
mkdirSync(tmpDir, { recursive: true }); mkdirSync(tmpDir, { recursive: true });
dbPath = join(tmpDir, 'test.db'); dbPath = join(tmpDir, 'test.db');
process.env.DATABASE_PATH = dbPath; process.env.DATABASE_PATH = dbPath;
process.env.ENCRYPTION_KEY = Buffer.from(crypto.getRandomValues(new Uint8Array(32))).toString('hex'); process.env.ENCRYPTION_KEY = Buffer.from(crypto.getRandomValues(new Uint8Array(32))).toString(
'hex'
);
initMasterKey(); initMasterKey();
initDatabase(); initDatabase();
@@ -49,12 +51,12 @@ describe('secret-repo', () => {
afterEach(() => { afterEach(() => {
closeDatabase(); closeDatabase();
if (savedDbPath === undefined) { if (savedDbPath === undefined) {
delete process.env.DATABASE_PATH; Reflect.deleteProperty(process.env, 'DATABASE_PATH');
} else { } else {
process.env.DATABASE_PATH = savedDbPath; process.env.DATABASE_PATH = savedDbPath;
} }
if (savedEncryptionKey === undefined) { if (savedEncryptionKey === undefined) {
delete process.env.ENCRYPTION_KEY; Reflect.deleteProperty(process.env, 'ENCRYPTION_KEY');
} else { } else {
process.env.ENCRYPTION_KEY = savedEncryptionKey; process.env.ENCRYPTION_KEY = savedEncryptionKey;
} }

View File

@@ -9,10 +9,10 @@ import { llmConfigRouter } from './controllers/llm-config';
import { handleGiteaWebhook } from './controllers/review'; import { handleGiteaWebhook } from './controllers/review';
import { initMasterKey } from './crypto/secrets'; import { initMasterKey } from './crypto/secrets';
import { initDatabase } from './db/database'; import { initDatabase } from './db/database';
import { cleanupScheduler } from './review/cleanup-scheduler';
import { codexEngine } from './review/codex/codex-engine'; import { codexEngine } from './review/codex/codex-engine';
import { mcpRouter } from './review/codex/mcp-handler'; import { mcpRouter } from './review/codex/mcp-handler';
import { reviewEngine } from './review/engine'; import { reviewEngine } from './review/engine';
import { cleanupScheduler } from './review/cleanup-scheduler';
initMasterKey(); initMasterKey();
initDatabase(); initDatabase();

View File

@@ -118,8 +118,8 @@ describe('LLMSemaphore', () => {
expect(sem.activeCount).toBe(2); expect(sem.activeCount).toBe(2);
expect(sem.pendingCount).toBe(0); expect(sem.pendingCount).toBe(0);
const p3 = sem.acquire().then(() => sequence.push('acquire3')); sem.acquire().then(() => sequence.push('acquire3'));
const p4 = sem.acquire().then(() => sequence.push('acquire4')); sem.acquire().then(() => sequence.push('acquire4'));
const p5 = sem.acquire().then(() => sequence.push('acquire5')); const p5 = sem.acquire().then(() => sequence.push('acquire5'));
await new Promise((resolve) => setTimeout(resolve, 10)); await new Promise((resolve) => setTimeout(resolve, 10));
@@ -344,7 +344,7 @@ describe('retryWithBackoff', () => {
try { try {
await retryWithBackoff(fn, { maxAttempts: 2 }); await retryWithBackoff(fn, { maxAttempts: 2 });
expect(true).toBe(false); expect(true).toBe(false);
} catch (e: any) { } catch {
expect(callCount).toBe(2); expect(callCount).toBe(2);
} }
}); });
@@ -509,7 +509,7 @@ describe('withResilience', () => {
}, },
{ maxAttempts: 3, baseDelayMs: 10, jitter: false } { maxAttempts: 3, baseDelayMs: 10, jitter: false }
); );
} catch (e: any) { } catch {
expect(callCount).toBe(3); expect(callCount).toBe(3);
} }
}); });

View File

@@ -40,10 +40,7 @@ export class LLMGateway {
private semaphore: LLMSemaphore; private semaphore: LLMSemaphore;
private retryOptions: Partial<RetryOptions>; private retryOptions: Partial<RetryOptions>;
constructor( constructor(maxConcurrent = 4, retryOptions?: Partial<RetryOptions>) {
maxConcurrent = 4,
retryOptions?: Partial<RetryOptions>
) {
this.semaphore = new LLMSemaphore(maxConcurrent); this.semaphore = new LLMSemaphore(maxConcurrent);
this.retryOptions = retryOptions ?? {}; this.retryOptions = retryOptions ?? {};
} }
@@ -105,7 +102,10 @@ export class LLMGateway {
() => { () => {
const provider = this.getOrCreateProvider(assignment.provider_id); const provider = this.getOrCreateProvider(assignment.provider_id);
if (!provider.embed) { if (!provider.embed) {
throw new LLMError(`Provider '${provider.type}' does not support embeddings`, provider.type); throw new LLMError(
`Provider '${provider.type}' does not support embeddings`,
provider.type
);
} }
return provider.embed(texts); return provider.embed(texts);
}, },

View File

@@ -32,7 +32,6 @@ export interface LLMProvider {
/** Optional: embedding interface (only for providers that support it). */ /** Optional: embedding interface (only for providers that support it). */
embed?(texts: string[]): Promise<number[][]>; embed?(texts: string[]): Promise<number[][]>;
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@@ -50,7 +50,6 @@ class CodexEngine {
return; return;
} }
await this.store.init(); await this.store.init();
const recovered = await this.store.recoverInterruptedRuns(); const recovered = await this.store.recoverInterruptedRuns();
if (recovered > 0) { if (recovered > 0) {

View File

@@ -218,12 +218,12 @@ export class CodexRunner {
`name = "OpenAI"`, `name = "OpenAI"`,
`base_url = "${apiUrl}"`, `base_url = "${apiUrl}"`,
`env_key = "OPENAI_API_KEY"`, `env_key = "OPENAI_API_KEY"`,
`requires_openai_auth = false`, 'requires_openai_auth = false',
'', '',
'[mcp_servers.gitea-review]', '[mcp_servers.gitea-review]',
`url = "http://127.0.0.1:${port}/mcp/gitea-review"`, `url = "http://127.0.0.1:${port}/mcp/gitea-review"`,
`http_headers = { "X-Review-Run-Id" = "${runId}" }`, `http_headers = { "X-Review-Run-Id" = "${runId}" }`,
`required = true`, 'required = true',
'', '',
]; ];
@@ -276,9 +276,7 @@ export class CodexRunner {
sections.push(`## 项目级审查要求\n\n${projectPrompt}`); sections.push(`## 项目级审查要求\n\n${projectPrompt}`);
} }
sections.push( sections.push('当要求冲突时,优先级为:项目级审查要求 > 全局审查要求 > 审查原则。');
'当要求冲突时,优先级为:项目级审查要求 > 全局审查要求 > 审查原则。'
);
const contextLines: string[] = ['## 当前审查目标']; const contextLines: string[] = ['## 当前审查目标'];
@@ -288,7 +286,7 @@ export class CodexRunner {
if (run.baseSha) contextLines.push(`- Base SHA${run.baseSha}`); if (run.baseSha) contextLines.push(`- Base SHA${run.baseSha}`);
if (run.headSha) contextLines.push(`- Head SHA${run.headSha}`); if (run.headSha) contextLines.push(`- Head SHA${run.headSha}`);
if (lastReviewedHead) { if (lastReviewedHead) {
contextLines.push(`- 增量审查模式:仅审查上次审查后的新变更`); contextLines.push('- 增量审查模式:仅审查上次审查后的新变更');
contextLines.push(`- 上次审查 SHA${lastReviewedHead}`); contextLines.push(`- 上次审查 SHA${lastReviewedHead}`);
contextLines.push(`- 请使用 \`git diff ${lastReviewedHead}..${run.headSha}\` 获取增量差异`); contextLines.push(`- 请使用 \`git diff ${lastReviewedHead}..${run.headSha}\` 获取增量差异`);
} else { } else {
@@ -314,7 +312,11 @@ export class CodexRunner {
/** /**
* 执行 codex exec 子进程(自定义 prompt + MCP 工具) * 执行 codex exec 子进程(自定义 prompt + MCP 工具)
*/ */
private async runCodexProcess(workspacePath: string, run: ReviewRun, lastReviewedHead?: string): Promise<void> { private async runCodexProcess(
workspacePath: string,
run: ReviewRun,
lastReviewedHead?: string
): Promise<void> {
const timeoutMs = config.review.codexTimeoutMs; const timeoutMs = config.review.codexTimeoutMs;
const codexHome = path.join(workspacePath, '.codex'); const codexHome = path.join(workspacePath, '.codex');

View File

@@ -166,14 +166,21 @@ export class McpToolExecutor {
if (!prNumber) { if (!prNumber) {
prNumber = ctx.relatedPrNumber; prNumber = ctx.relatedPrNumber;
if (!prNumber && ctx.commitSha) { if (!prNumber && ctx.commitSha) {
const related = await giteaService.getRelatedPullRequest(ctx.owner, ctx.repo, ctx.commitSha); const related = await giteaService.getRelatedPullRequest(
ctx.owner,
ctx.repo,
ctx.commitSha
);
prNumber = related?.number; prNumber = related?.number;
} }
} }
if (prNumber) { if (prNumber) {
await giteaService.addPullRequestComment(ctx.owner, ctx.repo, prNumber, body); await giteaService.addPullRequestComment(ctx.owner, ctx.repo, prNumber, body);
logger.info('Codex MCP: \u5df2\u53d1\u5e03 PR \u5ba1\u67e5\u603b\u7ed3', { runId: ctx.runId, prNumber }); logger.info('Codex MCP: \u5df2\u53d1\u5e03 PR \u5ba1\u67e5\u603b\u7ed3', {
runId: ctx.runId,
prNumber,
});
} else if (ctx.commitSha) { } else if (ctx.commitSha) {
await giteaService.addCommitComment(ctx.owner, ctx.repo, ctx.commitSha, body); await giteaService.addCommitComment(ctx.owner, ctx.repo, ctx.commitSha, body);
logger.info('Codex MCP: \u5df2\u53d1\u5e03 Commit \u5ba1\u67e5\u603b\u7ed3', { logger.info('Codex MCP: \u5df2\u53d1\u5e03 Commit \u5ba1\u67e5\u603b\u7ed3', {
@@ -182,7 +189,12 @@ export class McpToolExecutor {
}); });
} else { } else {
return { return {
content: [{ type: 'text', text: '\u65e0\u6cd5\u53d1\u5e03\uff1a\u7f3a\u5c11 PR number \u6216 commit SHA' }], content: [
{
type: 'text',
text: '\u65e0\u6cd5\u53d1\u5e03\uff1a\u7f3a\u5c11 PR number \u6216 commit SHA',
},
],
isError: true, isError: true,
}; };
} }

View File

@@ -112,7 +112,16 @@ export class LocalRepoManager {
// fetch使用认证参数 // fetch使用认证参数
await this.sandboxExec.run( await this.sandboxExec.run(
'git', 'git',
[...authArgs, '--git-dir', mirrorPath, 'fetch', '--prune', 'origin', '+refs/*:refs/*', '^refs/reviewed/*'], [
...authArgs,
'--git-dir',
mirrorPath,
'fetch',
'--prune',
'origin',
'+refs/*:refs/*',
'^refs/reviewed/*',
],
{ {
cwd: this.workDir, cwd: this.workDir,
timeoutMs: this.commandTimeoutMs, timeoutMs: this.commandTimeoutMs,
@@ -252,27 +261,24 @@ export class LocalRepoManager {
* 保存审查快照 ref记录 PR 最后一次成功审查的 baseSha 和 headSha * 保存审查快照 ref记录 PR 最后一次成功审查的 baseSha 和 headSha
* 存储在 mirror 的 refs/reviewed/pr/{prNumber}/head 和 refs/reviewed/pr/{prNumber}/base * 存储在 mirror 的 refs/reviewed/pr/{prNumber}/head 和 refs/reviewed/pr/{prNumber}/base
*/ */
async saveReviewedRef(mirrorPath: string, prNumber: number, baseSha: string, headSha: string): Promise<void> { async saveReviewedRef(
mirrorPath: string,
prNumber: number,
baseSha: string,
headSha: string
): Promise<void> {
const unlock = await this.acquireMirrorLock(mirrorPath); const unlock = await this.acquireMirrorLock(mirrorPath);
try { try {
const headRef = `refs/reviewed/pr/${prNumber}/head`; const headRef = `refs/reviewed/pr/${prNumber}/head`;
const baseRef = `refs/reviewed/pr/${prNumber}/base`; const baseRef = `refs/reviewed/pr/${prNumber}/base`;
await this.sandboxExec.run( await this.sandboxExec.run('git', ['--git-dir', mirrorPath, 'update-ref', headRef, headSha], {
'git', cwd: this.workDir,
['--git-dir', mirrorPath, 'update-ref', headRef, headSha], timeoutMs: this.commandTimeoutMs,
{ });
cwd: this.workDir, await this.sandboxExec.run('git', ['--git-dir', mirrorPath, 'update-ref', baseRef, baseSha], {
timeoutMs: this.commandTimeoutMs, cwd: this.workDir,
} timeoutMs: this.commandTimeoutMs,
); });
await this.sandboxExec.run(
'git',
['--git-dir', mirrorPath, 'update-ref', baseRef, baseSha],
{
cwd: this.workDir,
timeoutMs: this.commandTimeoutMs,
}
);
logger.info('已保存审查快照 ref', { mirrorPath, prNumber, baseSha, headSha }); logger.info('已保存审查快照 ref', { mirrorPath, prNumber, baseSha, headSha });
} finally { } finally {
unlock(); unlock();
@@ -283,7 +289,10 @@ export class LocalRepoManager {
* 解析上次审查的快照baseSha + headSha * 解析上次审查的快照baseSha + headSha
* 如果任一 ref 不存在,返回 null * 如果任一 ref 不存在,返回 null
*/ */
async resolveReviewedRef(mirrorPath: string, prNumber: number): Promise<{ baseSha: string; headSha: string } | null> { async resolveReviewedRef(
mirrorPath: string,
prNumber: number
): Promise<{ baseSha: string; headSha: string } | null> {
try { try {
const headRef = `refs/reviewed/pr/${prNumber}/head`; const headRef = `refs/reviewed/pr/${prNumber}/head`;
const baseRef = `refs/reviewed/pr/${prNumber}/base`; const baseRef = `refs/reviewed/pr/${prNumber}/base`;
@@ -375,7 +384,10 @@ export class LocalRepoManager {
if (now - lastActive > maxAgeMs) { if (now - lastActive > maxAgeMs) {
await rm(mirrorPath, { recursive: true, force: true }); await rm(mirrorPath, { recursive: true, force: true });
cleaned++; cleaned++;
logger.info('已清理过期 mirror 目录', { mirrorPath, lastActiveDaysAgo: Math.floor((now - lastActive) / (24 * 60 * 60 * 1000)) }); logger.info('已清理过期 mirror 目录', {
mirrorPath,
lastActiveDaysAgo: Math.floor((now - lastActive) / (24 * 60 * 60 * 1000)),
});
} }
} catch (error) { } catch (error) {
logger.warn('检查/清理 mirror 目录失败', { logger.warn('检查/清理 mirror 目录失败', {

View File

@@ -243,7 +243,11 @@ export class TokenCounter {
}); });
}, CATALOG_TTL_MS); }, CATALOG_TTL_MS);
// Don't prevent process exit // Don't prevent process exit
if (this.refreshTimer && typeof this.refreshTimer === 'object' && 'unref' in this.refreshTimer) { if (
this.refreshTimer &&
typeof this.refreshTimer === 'object' &&
'unref' in this.refreshTimer
) {
(this.refreshTimer as NodeJS.Timeout).unref(); (this.refreshTimer as NodeJS.Timeout).unref();
} }
} }

View File

@@ -42,7 +42,10 @@ class ReviewEngine {
} }
/** Fresh DiffExtractor that reads current config values. */ /** Fresh DiffExtractor that reads current config values. */
private createDiffExtractor(sandboxExec: SandboxExec, localRepoManager: LocalRepoManager): DiffExtractor { private createDiffExtractor(
sandboxExec: SandboxExec,
localRepoManager: LocalRepoManager
): DiffExtractor {
return new DiffExtractor( return new DiffExtractor(
sandboxExec, sandboxExec,
localRepoManager, localRepoManager,
@@ -66,13 +69,10 @@ class ReviewEngine {
} }
// Configure LLM Gateway resilience from current config // Configure LLM Gateway resilience from current config
llmGateway.updateResilienceConfig( llmGateway.updateResilienceConfig(config.review.llmMaxConcurrentCalls, {
config.review.llmMaxConcurrentCalls, maxAttempts: config.review.llmRetryMaxAttempts,
{ baseDelayMs: config.review.llmRetryBaseDelayMs,
maxAttempts: config.review.llmRetryMaxAttempts, });
baseDelayMs: config.review.llmRetryBaseDelayMs,
}
);
// Preload dynamic model catalog from models.dev (non-blocking) // Preload dynamic model catalog from models.dev (non-blocking)
tokenCounter.refreshCatalog().catch((error) => { tokenCounter.refreshCatalog().catch((error) => {

View File

@@ -1,5 +1,5 @@
import type { LLMMessage } from '../../llm/types';
import config from '../../config'; import config from '../../config';
import type { LLMMessage } from '../../llm/types';
import { logger } from '../../utils/logger'; import { logger } from '../../utils/logger';
import { VectorMemoryStore } from '../memory/vector-store'; import { VectorMemoryStore } from '../memory/vector-store';
import { FileReviewStore } from '../store/file-review-store'; import { FileReviewStore } from '../store/file-review-store';

View File

@@ -78,11 +78,7 @@ export function createFunctionReferenceSearchTool(sandbox: SandboxExec): Tool {
for (const task of tasks) { for (const task of tasks) {
const pattern = task.patterns.join('|'); const pattern = task.patterns.join('|');
const args = [ const args = ['--json', '--max-count', String(max_results || 30)];
'--json',
'--max-count',
String(max_results || 30),
];
if (file_types && file_types.length > 0) { if (file_types && file_types.length > 0) {
args.push('--type-add', `custom:*.{${file_types.join(',')}}`); args.push('--type-add', `custom:*.{${file_types.join(',')}}`);