test: add unit tests for incremental review, codex engine, MCP tools, and cleanup

- LocalRepoManager: snapshot ref CRUD, getMirrorPath, cleanStaleMirrors (real git)
- DiffExtractor: incremental two-dot vs three-dot diff, token clipping (real git)
- Orchestrator + CodexRunner: incremental baseline resolution, rebase fallback
- McpToolExecutor: context management, tool dispatch, JSON-RPC handler routes
- CleanupScheduler: start/stop lifecycle, idempotency, scheduling logic
- Config schema: Codex field definitions (API URL, key, model, timeout, prompt)
This commit is contained in:
jeffusion
2026-03-07 00:49:52 +08:00
committed by 路遥知码力
parent 792ed7faa2
commit 2d4f670365
6 changed files with 1534 additions and 0 deletions

View File

@@ -0,0 +1,61 @@
import { describe, expect, test } from 'bun:test';
import { CONFIG_FIELDS } from '../config-schema';
function findField(envKey: string) {
const field = CONFIG_FIELDS.find((item) => item.envKey === envKey);
expect(field).toBeDefined();
return field!;
}
describe('config-schema codex fields', () => {
test('CODEX_API_URL exists with expected metadata and default', () => {
const field = findField('CODEX_API_URL');
expect(field.envKey).toBe('CODEX_API_URL');
expect(field.group).toBe('review');
expect(field.type).toBe('url');
expect(field.sensitive).toBe(false);
expect(field.defaultValue).toBe('https://api.openai.com/v1');
});
test('CODEX_API_KEY is marked sensitive', () => {
const field = findField('CODEX_API_KEY');
expect(field.envKey).toBe('CODEX_API_KEY');
expect(field.group).toBe('review');
expect(field.type).toBe('string');
expect(field.sensitive).toBe(true);
expect(field.defaultValue).toBeUndefined();
});
test("CODEX_MODEL default is 'o3'", () => {
const field = findField('CODEX_MODEL');
expect(field.envKey).toBe('CODEX_MODEL');
expect(field.group).toBe('review');
expect(field.type).toBe('string');
expect(field.sensitive).toBe(false);
expect(field.defaultValue).toBe('o3');
});
test('CODEX_TIMEOUT_MS has expected min/max bounds', () => {
const field = findField('CODEX_TIMEOUT_MS');
expect(field.envKey).toBe('CODEX_TIMEOUT_MS');
expect(field.group).toBe('review');
expect(field.type).toBe('number');
expect(field.sensitive).toBe(false);
expect(field.min).toBe(30000);
expect(field.max).toBe(600000);
expect(field.defaultValue).toBe(300000);
});
test('CODEX_REVIEW_PROMPT exists in review group', () => {
const field = findField('CODEX_REVIEW_PROMPT');
expect(field.envKey).toBe('CODEX_REVIEW_PROMPT');
expect(field.group).toBe('review');
expect(field.type).toBe('text');
expect(field.sensitive).toBe(false);
});
});

View File

@@ -0,0 +1,119 @@
import { afterEach, beforeEach, describe, expect, mock, spyOn, test } from 'bun:test';
import { cleanupScheduler } from '../cleanup-scheduler';
import { LocalRepoManager } from '../context/local-repo-manager';
type PendingTimer = {
callback: TimerHandler;
delay: number;
};
function toDelayNumber(delay: number | undefined): number {
return typeof delay === 'number' ? delay : 0;
}
function calcDelayToNext2Am(nowTimestamp: number): number {
const now = new Date(nowTimestamp);
const next = new Date(now);
next.setHours(2, 0, 0, 0);
if (next.getTime() <= now.getTime()) {
next.setDate(next.getDate() + 1);
}
return next.getTime() - now.getTime();
}
describe('cleanupScheduler', () => {
let pendingTimers: Map<number, PendingTimer>;
let nextTimerId: number;
beforeEach(() => {
pendingTimers = new Map<number, PendingTimer>();
nextTimerId = 1;
const setTimeoutBase = (callback: TimerHandler, delay?: number, ..._args: unknown[]) => {
const timerId = nextTimerId;
nextTimerId += 1;
pendingTimers.set(timerId, { callback, delay: toDelayNumber(delay) });
return timerId as unknown as ReturnType<typeof setTimeout>;
};
const setTimeoutImpl = Object.assign(setTimeoutBase, {
__promisify__: globalThis.setTimeout.__promisify__,
}) as unknown as typeof setTimeout;
const clearTimeoutImpl = ((timerId?: number | Timer) => {
const numericTimerId = Number(timerId);
pendingTimers.delete(numericTimerId);
}) as typeof clearTimeout;
spyOn(globalThis, 'setTimeout').mockImplementation(setTimeoutImpl);
spyOn(globalThis, 'clearTimeout').mockImplementation(clearTimeoutImpl);
});
afterEach(() => {
cleanupScheduler.stop();
mock.restore();
});
test('start() is idempotent and does not create multiple timers', () => {
cleanupScheduler.start();
cleanupScheduler.start();
expect(globalThis.setTimeout).toHaveBeenCalledTimes(1);
expect(pendingTimers.size).toBe(1);
});
test('stop() clears active timer', () => {
cleanupScheduler.start();
const clearTimeoutSpy = globalThis.clearTimeout;
cleanupScheduler.stop();
expect(clearTimeoutSpy).toHaveBeenCalledTimes(1);
expect(pendingTimers.size).toBe(0);
});
test('stop() after start() then start() again re-schedules correctly', () => {
cleanupScheduler.start();
cleanupScheduler.stop();
cleanupScheduler.start();
expect(globalThis.setTimeout).toHaveBeenCalledTimes(2);
expect(globalThis.clearTimeout).toHaveBeenCalledTimes(1);
expect(pendingTimers.size).toBe(1);
});
test('schedule logic computes delay to next 2:00 AM', async () => {
const cleanSpy = spyOn(LocalRepoManager.prototype, 'cleanStaleMirrors').mockResolvedValue(0);
cleanupScheduler.start();
const firstTimer = pendingTimers.get(1);
expect(firstTimer).toBeDefined();
expect(firstTimer?.delay).toBe(60_000);
const beforeTrigger = Date.now();
if (typeof firstTimer?.callback === 'function') {
firstTimer.callback();
}
const afterTrigger = Date.now();
const scheduledTimer = pendingTimers.get(2);
expect(scheduledTimer).toBeDefined();
const expectedMin = calcDelayToNext2Am(afterTrigger);
const expectedMax = calcDelayToNext2Am(beforeTrigger);
expect(scheduledTimer?.delay).toBeGreaterThanOrEqual(expectedMin);
expect(scheduledTimer?.delay).toBeLessThanOrEqual(expectedMax);
expect(cleanSpy).toHaveBeenCalledTimes(1);
});
test('stop() prevents pending cleanup execution', () => {
const cleanSpy = spyOn(LocalRepoManager.prototype, 'cleanStaleMirrors').mockResolvedValue(0);
cleanupScheduler.start();
cleanupScheduler.stop();
expect(pendingTimers.size).toBe(0);
expect(cleanSpy).not.toHaveBeenCalled();
});
});

View File

@@ -0,0 +1,424 @@
import { afterEach, beforeEach, describe, expect, mock, test } from 'bun:test';
import { CodexRunner } from '../codex/codex-runner';
import type { DiffExtractor } from '../context/diff-extractor';
import type { LocalRepoManager, LocalRepoPaths } from '../context/local-repo-manager';
import { ReviewOrchestrator } from '../orchestrator';
import type { FileReviewStore } from '../store/file-review-store';
import type { Finding, ReviewContext, ReviewRun } from '../types';
type Snapshot = { baseSha: string; headSha: string } | null;
function makeRun(overrides: Partial<ReviewRun> = {}): ReviewRun {
return {
id: 'run-1',
idempotencyKey: 'owner/repo#1:base...head',
eventType: 'pull_request',
status: 'in_progress',
owner: 'owner',
repo: 'repo',
cloneUrl: 'https://example.com/repo.git',
prNumber: 1,
baseSha: 'base-sha',
headSha: 'head-sha',
attempts: 1,
maxAttempts: 3,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
...overrides,
};
}
function createStoreMock() {
const store = {
markRunIgnored: mock(async () => undefined),
addStep: mock(async () => undefined),
getRunDetails: mock(async () => ({ comments: [], findings: [] })),
addFindings: mock(async () => undefined),
markFindingPublished: mock(async () => true),
addCommentRecord: mock(async () => undefined),
};
return store;
}
function createLocalRepoManagerMock(snapshot: Snapshot) {
const repoPaths: LocalRepoPaths = {
mirrorPath: '/tmp/mirror',
workspacePath: '/tmp/workspace',
};
const manager = {
prepareWorkspace: mock(async () => repoPaths),
resolveReviewedRef: mock(async () => snapshot),
saveReviewedRef: mock(async () => undefined),
cleanupWorkspace: mock(async () => undefined),
};
return { manager, repoPaths };
}
function createDiffExtractorMock(diff = 'diff --git a/a.ts b/a.ts\n+const x = 1;') {
const context: ReviewContext = {
workspacePath: '/tmp/workspace',
mirrorPath: '/tmp/mirror',
diff,
changedFiles: [],
parsedDiff: [],
fileContents: {},
};
const extractor = {
getSandbox: mock(() => ({
execute: async () => ({ stdout: '', stderr: '', exitCode: 0 }),
})),
buildContext: mock(async () => context),
};
return { extractor, context };
}
function wireOrchestratorFastPath(orchestrator: ReviewOrchestrator) {
const internal = orchestrator as unknown as {
triageAgent: {
analyze: (context: ReviewContext) => Promise<{
complexity: 'trivial' | 'standard' | 'complex';
relevantDomains: Array<'correctness' | 'security' | 'reliability' | 'maintainability'>;
}>;
};
judgeAgent: {
judge: (findings: Array<Omit<Finding, 'id' | 'runId' | 'published'>>) => {
summaryMarkdown: string;
findings: Array<Omit<Finding, 'id' | 'runId' | 'published'>>;
};
};
publishSummary: (run: ReviewRun, summary: string, gatedCount: number) => Promise<void>;
publishLineComments: (
run: ReviewRun,
comments: Array<{ path: string; line: number; comment: string }>
) => Promise<boolean>;
};
internal.triageAgent = {
analyze: mock(async () => ({ complexity: 'trivial' as const, relevantDomains: [] })),
};
internal.judgeAgent = {
judge: mock(() => ({ summaryMarkdown: 'ok', findings: [] })),
};
internal.publishSummary = mock(async () => undefined);
internal.publishLineComments = mock(async () => false);
}
function createCodexRunnerForExecute(snapshot: Snapshot) {
const store = createStoreMock();
const { manager, repoPaths } = createLocalRepoManagerMock(snapshot);
const runner = new CodexRunner(
store as unknown as FileReviewStore,
manager as unknown as LocalRepoManager
);
const internal = runner as unknown as {
generateCodexWorkspaceConfig: (workspacePath: string, runId: string) => Promise<void>;
runCodexProcess: (
workspacePath: string,
run: ReviewRun,
lastReviewedHead?: string
) => Promise<void>;
};
internal.generateCodexWorkspaceConfig = mock(async () => undefined);
internal.runCodexProcess = mock(async () => undefined);
return {
runner,
store,
manager,
repoPaths,
runCodexProcessMock: internal.runCodexProcess as ReturnType<typeof mock>,
};
}
describe('ReviewOrchestrator incremental baseline resolution', () => {
beforeEach(() => {
mock.restore();
});
afterEach(() => {
mock.restore();
});
test('matching baseSha uses snapshot head as lastReviewedHead', async () => {
const run = makeRun({ baseSha: 'same-base', headSha: 'new-head' });
const store = createStoreMock();
const { manager } = createLocalRepoManagerMock({ baseSha: 'same-base', headSha: 'old-head' });
const { extractor } = createDiffExtractorMock();
const orchestrator = new ReviewOrchestrator(
store as unknown as FileReviewStore,
manager as unknown as LocalRepoManager,
extractor as unknown as DiffExtractor
);
wireOrchestratorFastPath(orchestrator);
await orchestrator.execute(run);
expect(manager.resolveReviewedRef).toHaveBeenCalledTimes(1);
expect(extractor.buildContext).toHaveBeenCalledTimes(1);
expect(extractor.buildContext).toHaveBeenCalledWith(
run,
'/tmp/mirror',
'/tmp/workspace',
'old-head'
);
});
test('different baseSha falls back to full review (no lastReviewedHead)', async () => {
const run = makeRun({ baseSha: 'current-base' });
const store = createStoreMock();
const { manager } = createLocalRepoManagerMock({ baseSha: 'saved-base', headSha: 'old-head' });
const { extractor } = createDiffExtractorMock();
const orchestrator = new ReviewOrchestrator(
store as unknown as FileReviewStore,
manager as unknown as LocalRepoManager,
extractor as unknown as DiffExtractor
);
wireOrchestratorFastPath(orchestrator);
await orchestrator.execute(run);
expect(extractor.buildContext).toHaveBeenCalledWith(
run,
'/tmp/mirror',
'/tmp/workspace',
undefined
);
});
test('missing snapshot falls back to full review', async () => {
const run = makeRun();
const store = createStoreMock();
const { manager } = createLocalRepoManagerMock(null);
const { extractor } = createDiffExtractorMock();
const orchestrator = new ReviewOrchestrator(
store as unknown as FileReviewStore,
manager as unknown as LocalRepoManager,
extractor as unknown as DiffExtractor
);
wireOrchestratorFastPath(orchestrator);
await orchestrator.execute(run);
expect(extractor.buildContext).toHaveBeenCalledWith(
run,
'/tmp/mirror',
'/tmp/workspace',
undefined
);
});
test('non pull_request event skips incremental snapshot lookup', async () => {
const run = makeRun({
eventType: 'commit_status',
prNumber: undefined,
commitSha: 'commit-sha',
headSha: undefined,
});
const store = createStoreMock();
const { manager } = createLocalRepoManagerMock({ baseSha: 'same-base', headSha: 'old-head' });
const { extractor } = createDiffExtractorMock();
const orchestrator = new ReviewOrchestrator(
store as unknown as FileReviewStore,
manager as unknown as LocalRepoManager,
extractor as unknown as DiffExtractor
);
wireOrchestratorFastPath(orchestrator);
await orchestrator.execute(run);
expect(manager.resolveReviewedRef).not.toHaveBeenCalled();
expect(extractor.buildContext).toHaveBeenCalledWith(
run,
'/tmp/mirror',
'/tmp/workspace',
undefined
);
});
test('successful review saves reviewed ref snapshot', async () => {
const run = makeRun({ baseSha: 'base-1', headSha: 'head-1', prNumber: 99 });
const store = createStoreMock();
const { manager } = createLocalRepoManagerMock(null);
const { extractor } = createDiffExtractorMock();
const orchestrator = new ReviewOrchestrator(
store as unknown as FileReviewStore,
manager as unknown as LocalRepoManager,
extractor as unknown as DiffExtractor
);
wireOrchestratorFastPath(orchestrator);
await orchestrator.execute(run);
expect(manager.saveReviewedRef).toHaveBeenCalledTimes(1);
expect(manager.saveReviewedRef).toHaveBeenCalledWith('/tmp/mirror', 99, 'base-1', 'head-1');
});
test('failed review does not save reviewed ref snapshot', async () => {
const run = makeRun({ baseSha: 'base-1', headSha: 'head-1', prNumber: 99 });
const store = createStoreMock();
const { manager } = createLocalRepoManagerMock(null);
const { extractor } = createDiffExtractorMock();
extractor.buildContext = mock(async () => {
throw new Error('context failed');
});
const orchestrator = new ReviewOrchestrator(
store as unknown as FileReviewStore,
manager as unknown as LocalRepoManager,
extractor as unknown as DiffExtractor
);
let caught: Error | undefined;
try {
await orchestrator.execute(run);
} catch (error) {
caught = error as Error;
}
expect(caught).toBeDefined();
expect(caught?.message).toContain('context failed');
expect(manager.saveReviewedRef).not.toHaveBeenCalled();
});
});
describe('CodexRunner incremental baseline resolution and prompt behavior', () => {
beforeEach(() => {
mock.restore();
});
afterEach(() => {
mock.restore();
});
test('matching baseSha uses snapshot head for incremental run', async () => {
const run = makeRun({ baseSha: 'same-base', headSha: 'new-head' });
const { runner, runCodexProcessMock } = createCodexRunnerForExecute({
baseSha: 'same-base',
headSha: 'old-head',
});
await runner.execute(run);
expect(runCodexProcessMock).toHaveBeenCalledWith('/tmp/workspace', run, 'old-head');
});
test('different baseSha falls back to full run', async () => {
const run = makeRun({ baseSha: 'current-base', headSha: 'new-head' });
const { runner, runCodexProcessMock } = createCodexRunnerForExecute({
baseSha: 'saved-base',
headSha: 'old-head',
});
await runner.execute(run);
expect(runCodexProcessMock).toHaveBeenCalledWith('/tmp/workspace', run, undefined);
});
test('missing snapshot falls back to full run', async () => {
const run = makeRun();
const { runner, runCodexProcessMock } = createCodexRunnerForExecute(null);
await runner.execute(run);
expect(runCodexProcessMock).toHaveBeenCalledWith('/tmp/workspace', run, undefined);
});
test('non pull_request event skips incremental snapshot lookup', async () => {
const run = makeRun({
eventType: 'commit_status',
prNumber: undefined,
commitSha: 'commit-sha',
headSha: undefined,
});
const { runner, manager, runCodexProcessMock } = createCodexRunnerForExecute({
baseSha: 'saved-base',
headSha: 'old-head',
});
await runner.execute(run);
expect(manager.resolveReviewedRef).not.toHaveBeenCalled();
expect(runCodexProcessMock).toHaveBeenCalledWith('/tmp/workspace', run, undefined);
});
test('successful codex review saves reviewed ref snapshot', async () => {
const run = makeRun({ baseSha: 'base-1', headSha: 'head-1', prNumber: 22 });
const { runner, manager } = createCodexRunnerForExecute(null);
await runner.execute(run);
expect(manager.saveReviewedRef).toHaveBeenCalledTimes(1);
expect(manager.saveReviewedRef).toHaveBeenCalledWith('/tmp/mirror', 22, 'base-1', 'head-1');
});
test('failed codex review does not save reviewed ref snapshot', async () => {
const run = makeRun({ baseSha: 'base-1', headSha: 'head-1', prNumber: 22 });
const { runner, manager } = createCodexRunnerForExecute(null);
const internal = runner as unknown as {
runCodexProcess: (
workspacePath: string,
runArg: ReviewRun,
lastReviewedHead?: string
) => Promise<void>;
};
internal.runCodexProcess = mock(async () => {
throw new Error('codex failed');
});
let caught: Error | undefined;
try {
await runner.execute(run);
} catch (error) {
caught = error as Error;
}
expect(caught).toBeDefined();
expect(caught?.message).toContain('codex failed');
expect(manager.saveReviewedRef).not.toHaveBeenCalled();
});
test('buildReviewPrompt includes incremental instructions when lastReviewedHead is set', () => {
const run = makeRun({ baseSha: 'base-a', headSha: 'head-b', prNumber: 7 });
const { runner } = createCodexRunnerForExecute(null);
const internal = runner as unknown as {
buildReviewPrompt: (runArg: ReviewRun, lastReviewedHead?: string) => string;
};
const prompt = internal.buildReviewPrompt(run, 'reviewed-head-123');
expect(prompt).toContain('增量审查模式:仅审查上次审查后的新变更');
expect(prompt).toContain('上次审查 SHAreviewed-head-123');
expect(prompt).toContain('git diff reviewed-head-123..head-b');
});
test('normalizeApiBaseUrl appends /v1 when missing', () => {
const { runner } = createCodexRunnerForExecute(null);
const internal = runner as unknown as {
normalizeApiBaseUrl: (rawUrl: string) => string;
};
expect(internal.normalizeApiBaseUrl('https://api.example.com')).toBe(
'https://api.example.com/v1'
);
expect(internal.normalizeApiBaseUrl('https://api.example.com/v1')).toBe(
'https://api.example.com/v1'
);
expect(internal.normalizeApiBaseUrl('https://api.example.com/')).toBe(
'https://api.example.com/v1'
);
});
});

View File

@@ -0,0 +1,398 @@
import { afterEach, beforeEach, describe, expect, mock, spyOn, test } from 'bun:test';
import { FileReviewStore } from '../../store/file-review-store';
type MockGiteaService = {
addPullRequestComment: ReturnType<
typeof mock<(owner: string, repo: string, prNumber: number, body: string) => Promise<void>>
>;
addCommitComment: ReturnType<
typeof mock<(owner: string, repo: string, commitSha: string, body: string) => Promise<void>>
>;
addLineComments: ReturnType<
typeof mock<
(
owner: string,
repo: string,
prNumber: number,
commitId: string,
comments: Array<{ path: string; line: number; comment: string }>
) => Promise<void>
>
>;
getRelatedPullRequest: ReturnType<
typeof mock<
(owner: string, repo: string, commitSha: string) => Promise<{ number: number } | null>
>
>;
};
function createMockGiteaService(): MockGiteaService {
return {
addPullRequestComment: mock(async () => {}),
addCommitComment: mock(async () => {}),
addLineComments: mock(async () => {}),
getRelatedPullRequest: mock(async () => null),
};
}
function setupGiteaModuleMock(mockGiteaService: MockGiteaService): void {
mock.module('../../../services/gitea', () => ({
giteaService: mockGiteaService,
}));
}
class TestStore extends FileReviewStore {
constructor() {
super('/tmp/mcp-tool-executor-test-store');
}
}
describe('McpToolExecutor', () => {
let mockGiteaService: MockGiteaService;
beforeEach(() => {
mockGiteaService = createMockGiteaService();
setupGiteaModuleMock(mockGiteaService);
});
afterEach(() => {
mock.restore();
});
test('registerContext / unregisterContext / getContext works as CRUD', async () => {
const { McpToolExecutor } = await import('../mcp-tools');
const executor = new McpToolExecutor();
const context = {
runId: 'run-1',
owner: 'octo',
repo: 'demo',
prNumber: 42,
baseSha: 'base-sha',
headSha: 'head-sha',
};
executor.registerContext(context);
expect(executor.getContext('run-1')).toEqual(context);
executor.unregisterContext('run-1');
expect(executor.getContext('run-1')).toBeUndefined();
});
test('callTool(get_pr_info) returns full review mode when no lastReviewedHead', async () => {
const { McpToolExecutor } = await import('../mcp-tools');
const executor = new McpToolExecutor();
executor.registerContext({
runId: 'run-full',
owner: 'octo',
repo: 'demo',
prNumber: 7,
baseSha: 'base-1',
headSha: 'head-1',
});
const result = await executor.callTool('run-full', 'get_pr_info', {});
const parsed = JSON.parse(result.content[0].text) as Record<string, unknown>;
expect(result.isError).toBeUndefined();
expect(parsed.owner).toBe('octo');
expect(parsed.repo).toBe('demo');
expect(parsed.prNumber).toBe(7);
expect(parsed.baseSha).toBe('base-1');
expect(parsed.headSha).toBe('head-1');
expect(parsed.commitSha).toBe('head-1');
expect(parsed.reviewMode).toBe('full');
expect(parsed.lastReviewedHead).toBeUndefined();
});
test('callTool(get_pr_info) returns incremental review mode with lastReviewedHead', async () => {
const { McpToolExecutor } = await import('../mcp-tools');
const executor = new McpToolExecutor();
executor.registerContext({
runId: 'run-incremental',
owner: 'octo',
repo: 'demo',
prNumber: 9,
baseSha: 'base-2',
headSha: 'head-2',
lastReviewedHead: 'head-1',
});
const result = await executor.callTool('run-incremental', 'get_pr_info', {});
const parsed = JSON.parse(result.content[0].text) as Record<string, unknown>;
expect(result.isError).toBeUndefined();
expect(parsed.reviewMode).toBe('incremental');
expect(parsed.lastReviewedHead).toBe('head-1');
});
test('callTool(unknown_tool) returns error', async () => {
const { McpToolExecutor } = await import('../mcp-tools');
const executor = new McpToolExecutor();
executor.registerContext({
runId: 'run-unknown',
owner: 'octo',
repo: 'demo',
});
const result = await executor.callTool('run-unknown', 'unknown_tool', {});
expect(result.isError).toBe(true);
expect(result.content[0].text).toContain('未知工具');
});
test('callTool with non-existent runId returns context-not-found error', async () => {
const { McpToolExecutor } = await import('../mcp-tools');
const executor = new McpToolExecutor();
const result = await executor.callTool('missing-run', 'get_pr_info', {});
expect(result.isError).toBe(true);
expect(result.content[0].text).toContain('找不到审查上下文');
expect(result.content[0].text).toContain('missing-run');
});
test('callTool(add_review_summary) calls giteaService and persists comment record', async () => {
const { McpToolExecutor } = await import('../mcp-tools');
const store = new TestStore();
const addCommentRecordSpy = spyOn(store, 'addCommentRecord').mockResolvedValue();
const executor = new McpToolExecutor(store);
executor.registerContext({
runId: 'run-summary',
owner: 'octo',
repo: 'demo',
prNumber: 88,
headSha: 'head-summary',
});
const result = await executor.callTool('run-summary', 'add_review_summary', {
summary: 'Looks good overall.',
});
expect(result.isError).toBeUndefined();
expect(result.content[0].text).toContain('审查总结已发布成功');
expect(mockGiteaService.addPullRequestComment).toHaveBeenCalledTimes(1);
expect(mockGiteaService.addPullRequestComment).toHaveBeenCalledWith(
'octo',
'demo',
88,
expect.stringContaining('Looks good overall.')
);
expect(addCommentRecordSpy).toHaveBeenCalledTimes(1);
expect(addCommentRecordSpy).toHaveBeenCalledWith(
expect.objectContaining({
runId: 'run-summary',
status: 'published',
})
);
});
test('callTool(add_line_comment) calls giteaService and persists comment record', async () => {
const { McpToolExecutor } = await import('../mcp-tools');
const store = new TestStore();
const addCommentRecordSpy = spyOn(store, 'addCommentRecord').mockResolvedValue();
const executor = new McpToolExecutor(store);
executor.registerContext({
runId: 'run-line',
owner: 'octo',
repo: 'demo',
prNumber: 11,
headSha: 'head-line',
});
const result = await executor.callTool('run-line', 'add_line_comment', {
path: 'src/app.ts',
line: 23,
comment: 'Potential null pointer here.',
});
expect(result.isError).toBeUndefined();
expect(result.content[0].text).toContain('行评论已添加');
expect(mockGiteaService.addLineComments).toHaveBeenCalledTimes(1);
expect(mockGiteaService.addLineComments).toHaveBeenCalledWith('octo', 'demo', 11, 'head-line', [
{ path: 'src/app.ts', line: 23, comment: 'Potential null pointer here.' },
]);
expect(addCommentRecordSpy).toHaveBeenCalledTimes(1);
expect(addCommentRecordSpy).toHaveBeenCalledWith(
expect.objectContaining({
runId: 'run-line',
status: 'published',
path: 'src/app.ts',
line: 23,
})
);
});
});
describe('mcpRouter JSON-RPC', () => {
afterEach(() => {
mock.restore();
});
test('initialize returns protocol and server info', async () => {
const callTool = mock(async () => ({ content: [{ type: 'text', text: 'ok' }] }));
mock.module('../mcp-tools', () => ({
MCP_TOOLS: [{ name: 'get_pr_info', description: 'desc', inputSchema: { type: 'object' } }],
mcpToolExecutor: { callTool },
}));
const { mcpRouter } = await import('../mcp-handler');
const response = await mcpRouter.request('/', {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'X-Review-Run-Id': 'run-init' },
body: JSON.stringify({ jsonrpc: '2.0', id: 1, method: 'initialize', params: {} }),
});
const data = (await response.json()) as {
jsonrpc: string;
id: number;
result: { protocolVersion: string; serverInfo: { name: string; version: string } };
};
expect(response.status).toBe(200);
expect(data.jsonrpc).toBe('2.0');
expect(data.id).toBe(1);
expect(data.result.protocolVersion).toBe('2025-03-26');
expect(data.result.serverInfo.name).toBe('gitea-review');
expect(callTool).not.toHaveBeenCalled();
});
test('tools/list returns tools', async () => {
const callTool = mock(async () => ({ content: [{ type: 'text', text: 'ok' }] }));
mock.module('../mcp-tools', () => ({
MCP_TOOLS: [
{ name: 'add_review_summary', description: 'desc', inputSchema: { type: 'object' } },
],
mcpToolExecutor: { callTool },
}));
const { mcpRouter } = await import('../mcp-handler');
const response = await mcpRouter.request('/', {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'X-Review-Run-Id': 'run-list' },
body: JSON.stringify({ jsonrpc: '2.0', id: 2, method: 'tools/list' }),
});
const data = (await response.json()) as {
result: { tools: Array<{ name: string }> };
};
expect(response.status).toBe(200);
expect(data.result.tools).toHaveLength(1);
expect(data.result.tools[0].name).toBe('add_review_summary');
});
test('tools/call dispatches to mcpToolExecutor', async () => {
const toolResult = { content: [{ type: 'text', text: 'tool-ok' }] };
const callTool = mock(async () => toolResult);
mock.module('../mcp-tools', () => ({
MCP_TOOLS: [],
mcpToolExecutor: { callTool },
}));
const { mcpRouter } = await import('../mcp-handler');
const response = await mcpRouter.request('/', {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'X-Review-Run-Id': 'run-call' },
body: JSON.stringify({
jsonrpc: '2.0',
id: 3,
method: 'tools/call',
params: { name: 'get_pr_info', arguments: { verbose: true } },
}),
});
const data = (await response.json()) as { result: { content: Array<{ text: string }> } };
expect(response.status).toBe(200);
expect(callTool).toHaveBeenCalledTimes(1);
expect(callTool).toHaveBeenCalledWith('run-call', 'get_pr_info', { verbose: true });
expect(data.result.content[0].text).toBe('tool-ok');
});
test('notification-only batch returns 202', async () => {
const callTool = mock(async () => ({ content: [{ type: 'text', text: 'tool-ok' }] }));
mock.module('../mcp-tools', () => ({
MCP_TOOLS: [],
mcpToolExecutor: { callTool },
}));
const { mcpRouter } = await import('../mcp-handler');
const response = await mcpRouter.request('/', {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'X-Review-Run-Id': 'run-notif' },
body: JSON.stringify([{ jsonrpc: '2.0', method: 'notifications/initialized' }]),
});
expect(response.status).toBe(202);
expect(callTool).not.toHaveBeenCalled();
});
test('invalid JSON returns parse error', async () => {
const callTool = mock(async () => ({ content: [{ type: 'text', text: 'tool-ok' }] }));
mock.module('../mcp-tools', () => ({
MCP_TOOLS: [],
mcpToolExecutor: { callTool },
}));
const { mcpRouter } = await import('../mcp-handler');
const response = await mcpRouter.request('/', {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'X-Review-Run-Id': 'run-parse' },
body: '{not-json',
});
const data = (await response.json()) as { error: { code: number; message: string } };
expect(response.status).toBe(400);
expect(data.error.code).toBe(-32700);
expect(data.error.message).toBe('Parse error');
});
test('tools/call without runId header returns invalid params error', async () => {
const callTool = mock(async () => ({ content: [{ type: 'text', text: 'tool-ok' }] }));
mock.module('../mcp-tools', () => ({
MCP_TOOLS: [],
mcpToolExecutor: { callTool },
}));
const { mcpRouter } = await import('../mcp-handler');
const response = await mcpRouter.request('/', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
jsonrpc: '2.0',
id: 4,
method: 'tools/call',
params: { name: 'get_pr_info', arguments: {} },
}),
});
const data = (await response.json()) as { error: { code: number; message: string } };
expect(response.status).toBe(200);
expect(data.error.code).toBe(-32602);
expect(data.error.message).toContain('Missing X-Review-Run-Id header');
expect(callTool).not.toHaveBeenCalled();
});
test('unknown method returns method-not-found error', async () => {
const callTool = mock(async () => ({ content: [{ type: 'text', text: 'tool-ok' }] }));
mock.module('../mcp-tools', () => ({
MCP_TOOLS: [],
mcpToolExecutor: { callTool },
}));
const { mcpRouter } = await import('../mcp-handler');
const response = await mcpRouter.request('/', {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'X-Review-Run-Id': 'run-missing-method' },
body: JSON.stringify({ jsonrpc: '2.0', id: 5, method: 'no_such_method' }),
});
const data = (await response.json()) as { error: { code: number; message: string } };
expect(response.status).toBe(200);
expect(data.error.code).toBe(-32601);
expect(data.error.message).toContain('Method not found');
});
});

View File

@@ -0,0 +1,305 @@
import { afterEach, beforeEach, describe, expect, test } from 'bun:test';
import { mkdir, mkdtemp, rm, writeFile } from 'node:fs/promises';
import { tmpdir } from 'node:os';
import path from 'node:path';
import { ReviewRun } from '../../types';
import { DiffExtractor } from '../diff-extractor';
import { LocalRepoManager } from '../local-repo-manager';
import { SandboxExec } from '../sandbox-exec';
import { tokenCounter } from '../token-counter';
interface DivergedFixture {
tempDir: string;
repoPath: string;
extractor: DiffExtractor;
run: ReviewRun;
lastReviewedHead: string;
}
interface LargeDiffFixture {
tempDir: string;
repoPath: string;
extractor: DiffExtractor;
run: ReviewRun;
unclippedDiff: string;
}
type GetDiffMethod = (
workspacePath: string,
eventType: ReviewRun['eventType'],
baseSha: string,
targetSha: string,
incremental?: boolean
) => Promise<string>;
type GetChangedFilesMethod = (
workspacePath: string,
baseSha: string,
targetSha: string,
incremental?: boolean
) => Promise<Array<{ path: string }>>;
function createRun(baseSha: string, headSha: string, cloneUrl: string): ReviewRun {
const now = new Date().toISOString();
return {
id: 'test-run-1',
idempotencyKey: 'test',
eventType: 'pull_request',
status: 'in_progress',
owner: 'test',
repo: 'test',
cloneUrl,
baseSha,
headSha,
attempts: 1,
maxAttempts: 3,
createdAt: now,
updatedAt: now,
};
}
function createExtractor(workDir: string, sandbox: SandboxExec): DiffExtractor {
const localRepoManager = new LocalRepoManager(workDir, sandbox, 10_000);
return new DiffExtractor(sandbox, localRepoManager, 10_000, 200, 200_000);
}
async function setupDivergedRepoFixture(): Promise<DivergedFixture> {
const tempDir = await mkdtemp(path.join(tmpdir(), 'diff-extractor-diverged-'));
const repoPath = path.join(tempDir, 'repo');
await mkdir(repoPath, { recursive: true });
const sandbox = new SandboxExec(['git']);
const git = async (...args: string[]) =>
sandbox.run('git', args, {
cwd: repoPath,
timeoutMs: 10_000,
});
await git('init');
await git('checkout', '-b', 'main');
await git('config', 'user.email', 'test@example.com');
await git('config', 'user.name', 'Test User');
await writeFile(path.join(repoPath, 'shared.txt'), 'base-line\n', 'utf-8');
await git('add', '.');
await git('commit', '-m', 'base commit');
await git('checkout', '-b', 'feature');
await writeFile(path.join(repoPath, 'shared.txt'), 'base-line\nfeature-one-line\n', 'utf-8');
await writeFile(path.join(repoPath, 'feature-only.txt'), 'feature branch commit one\n', 'utf-8');
await git('add', '.');
await git('commit', '-m', 'feature commit one');
const lastReviewedHead = (await git('rev-parse', 'HEAD')).stdout.trim();
await writeFile(
path.join(repoPath, 'shared.txt'),
'base-line\nfeature-one-line\nfeature-two-line\n',
'utf-8'
);
await writeFile(
path.join(repoPath, 'incremental-only.txt'),
'feature branch commit two\n',
'utf-8'
);
await git('add', '.');
await git('commit', '-m', 'feature commit two');
const headSha = (await git('rev-parse', 'HEAD')).stdout.trim();
await git('checkout', 'main');
await writeFile(path.join(repoPath, 'main-only.txt'), 'main branch only change\n', 'utf-8');
await git('add', '.');
await git('commit', '-m', 'main branch diverges');
const baseSha = (await git('rev-parse', 'HEAD')).stdout.trim();
await git('checkout', 'feature');
const run = createRun(baseSha, headSha, `file://${repoPath}`);
const extractor = createExtractor(tempDir, sandbox);
return {
tempDir,
repoPath,
extractor,
run,
lastReviewedHead,
};
}
async function setupLargeDiffRepoFixture(): Promise<LargeDiffFixture> {
const tempDir = await mkdtemp(path.join(tmpdir(), 'diff-extractor-large-diff-'));
const repoPath = path.join(tempDir, 'repo');
await mkdir(repoPath, { recursive: true });
const sandbox = new SandboxExec(['git']);
const git = async (...args: string[]) =>
sandbox.run('git', args, {
cwd: repoPath,
timeoutMs: 10_000,
});
await git('init');
await git('checkout', '-b', 'main');
await git('config', 'user.email', 'test@example.com');
await git('config', 'user.name', 'Test User');
await writeFile(path.join(repoPath, 'huge.txt'), 'seed\n', 'utf-8');
await git('add', '.');
await git('commit', '-m', 'base commit');
const baseSha = (await git('rev-parse', 'HEAD')).stdout.trim();
const largePayload = `${'x'.repeat(140_000)}\n`;
await writeFile(path.join(repoPath, 'huge.txt'), largePayload, 'utf-8');
await git('add', '.');
await git('commit', '-m', 'huge diff commit');
const headSha = (await git('rev-parse', 'HEAD')).stdout.trim();
const unclippedDiff = (await git('diff', '--unified=3', `${baseSha}...${headSha}`)).stdout;
const run = createRun(baseSha, headSha, `file://${repoPath}`);
const extractor = createExtractor(tempDir, sandbox);
return {
tempDir,
repoPath,
extractor,
run,
unclippedDiff,
};
}
describe('DiffExtractor incremental review mode', () => {
let fixture: DivergedFixture;
beforeEach(async () => {
fixture = await setupDivergedRepoFixture();
});
afterEach(async () => {
await rm(fixture.tempDir, { recursive: true, force: true });
});
test('buildContext without lastReviewedHead uses base...head semantics', async () => {
const context = await fixture.extractor.buildContext(
fixture.run,
fixture.repoPath,
fixture.repoPath
);
const paths = context.changedFiles.map((file) => file.path);
expect(paths).toContain('feature-only.txt');
expect(paths).toContain('incremental-only.txt');
expect(paths).toContain('shared.txt');
expect(paths).not.toContain('main-only.txt');
expect(context.diff).toContain('feature branch commit one');
expect(context.diff).toContain('feature branch commit two');
expect(context.diff).not.toContain('main branch only change');
});
test('buildContext with lastReviewedHead uses incremental two-dot range', async () => {
const context = await fixture.extractor.buildContext(
fixture.run,
fixture.repoPath,
fixture.repoPath,
fixture.lastReviewedHead
);
const paths = context.changedFiles.map((file) => file.path);
expect(paths).toContain('incremental-only.txt');
expect(paths).toContain('shared.txt');
expect(paths).not.toContain('feature-only.txt');
expect(paths).not.toContain('main-only.txt');
expect(context.diff).toContain('feature branch commit two');
expect(context.diff).toContain('feature-two-line');
expect(context.diff).not.toContain('feature branch commit one');
});
test('getDiff for pull_request incremental=false uses three-dot range', async () => {
const getDiff = Reflect.get(fixture.extractor, 'getDiff').bind(
fixture.extractor
) as GetDiffMethod;
const diff = await getDiff(
fixture.repoPath,
fixture.run.eventType,
fixture.run.baseSha!,
fixture.run.headSha!,
false
);
expect(diff).toContain('feature branch commit one');
expect(diff).toContain('feature branch commit two');
expect(diff).not.toContain('main branch only change');
expect(diff).not.toContain('diff --git a/main-only.txt b/main-only.txt');
});
test('getDiff for pull_request incremental=true uses two-dot range', async () => {
const getDiff = Reflect.get(fixture.extractor, 'getDiff').bind(
fixture.extractor
) as GetDiffMethod;
const diff = await getDiff(
fixture.repoPath,
fixture.run.eventType,
fixture.run.baseSha!,
fixture.run.headSha!,
true
);
expect(diff).toContain('feature branch commit one');
expect(diff).toContain('feature branch commit two');
expect(diff).toContain('diff --git a/main-only.txt b/main-only.txt');
expect(diff).toContain('-main branch only change');
});
test('getChangedFiles respects incremental flag diff range', async () => {
const getChangedFiles = Reflect.get(fixture.extractor, 'getChangedFiles').bind(
fixture.extractor
) as GetChangedFilesMethod;
const threeDotFiles = await getChangedFiles(
fixture.repoPath,
fixture.run.baseSha!,
fixture.run.headSha!,
false
);
const twoDotFiles = await getChangedFiles(
fixture.repoPath,
fixture.run.baseSha!,
fixture.run.headSha!,
true
);
const threeDotPaths = threeDotFiles.map((file) => file.path);
const twoDotPaths = twoDotFiles.map((file) => file.path);
expect(threeDotPaths).not.toContain('main-only.txt');
expect(twoDotPaths).toContain('main-only.txt');
expect(threeDotPaths).toContain('feature-only.txt');
expect(twoDotPaths).toContain('feature-only.txt');
});
});
describe('DiffExtractor token budget clipping', () => {
let fixture: LargeDiffFixture;
beforeEach(async () => {
fixture = await setupLargeDiffRepoFixture();
});
afterEach(async () => {
await rm(fixture.tempDir, { recursive: true, force: true });
});
test('buildContext clips raw diff when exceeding MAX_RAW_DIFF_TOKENS', async () => {
expect(tokenCounter.count(fixture.unclippedDiff)).toBeGreaterThan(30_000);
const context = await fixture.extractor.buildContext(
fixture.run,
fixture.repoPath,
fixture.repoPath
);
expect(context.diff).toContain('[truncated, exceeded 30000 token budget]');
expect(context.diff.length).toBeLessThan(fixture.unclippedDiff.length);
});
});

View File

@@ -0,0 +1,227 @@
import { afterEach, beforeEach, describe, expect, test } from 'bun:test';
import { createHash } from 'node:crypto';
import { access, mkdir, mkdtemp, rm, utimes, writeFile } from 'node:fs/promises';
import { tmpdir } from 'node:os';
import path from 'node:path';
import { LocalRepoManager } from '../local-repo-manager';
import { SandboxExec } from '../sandbox-exec';
async function runGit(args: string[], cwd?: string): Promise<{ stdout: string; stderr: string }> {
const proc = Bun.spawn({
cmd: ['git', ...args],
cwd,
stdout: 'pipe',
stderr: 'pipe',
});
const [exitCode, stdout, stderr] = await Promise.all([
proc.exited,
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
]);
if (exitCode !== 0) {
throw new Error(`git ${args.join(' ')} failed with code=${exitCode}: ${stderr}`);
}
return { stdout, stderr };
}
async function pathExists(targetPath: string): Promise<boolean> {
try {
await access(targetPath);
return true;
} catch {
return false;
}
}
async function createSeedCommits(
rootDir: string,
bareRepoPath: string,
count: number
): Promise<string[]> {
const seedRepoPath = path.join(rootDir, 'seed');
await mkdir(seedRepoPath, { recursive: true });
await runGit(['init'], seedRepoPath);
await runGit(['config', 'user.name', 'Test User'], seedRepoPath);
await runGit(['config', 'user.email', 'test@example.com'], seedRepoPath);
const commitShas: string[] = [];
for (let index = 0; index < count; index++) {
const filePath = path.join(seedRepoPath, 'fixture.txt');
await writeFile(filePath, `fixture-${index}\n`, 'utf8');
await runGit(['add', 'fixture.txt'], seedRepoPath);
await runGit(['commit', '-m', `commit-${index}`], seedRepoPath);
const revParse = await runGit(['rev-parse', 'HEAD'], seedRepoPath);
commitShas.push(revParse.stdout.trim());
}
await runGit(['remote', 'add', 'origin', bareRepoPath], seedRepoPath);
await runGit(['push', 'origin', 'HEAD:refs/heads/main'], seedRepoPath);
return commitShas;
}
describe('LocalRepoManager snapshot refs and cleanup', () => {
let tempDir: string;
let workDir: string;
let mirrorPath: string;
let sandbox: SandboxExec;
let manager: LocalRepoManager;
let commitShas: string[];
beforeEach(async () => {
tempDir = await mkdtemp(path.join(tmpdir(), 'local-repo-manager-test-'));
workDir = path.join(tempDir, 'workdir');
mirrorPath = path.join(tempDir, 'mirror.git');
await mkdir(workDir, { recursive: true });
await runGit(['init', '--bare', mirrorPath]);
commitShas = await createSeedCommits(tempDir, mirrorPath, 8);
sandbox = new SandboxExec(['git']);
manager = new LocalRepoManager(workDir, sandbox, 10_000);
});
afterEach(async () => {
await rm(tempDir, { recursive: true, force: true });
});
describe('saveReviewedRef / resolveReviewedRef', () => {
test('saveReviewedRef stores base/head refs and resolveReviewedRef reads them back', async () => {
const prNumber = 101;
const baseSha = commitShas[5];
const headSha = commitShas[7];
await manager.saveReviewedRef(mirrorPath, prNumber, baseSha, headSha);
const resolved = await manager.resolveReviewedRef(mirrorPath, prNumber);
expect(resolved).toEqual({ baseSha, headSha });
const headRef = await sandbox.run(
'git',
['--git-dir', mirrorPath, 'rev-parse', '--verify', `refs/reviewed/pr/${prNumber}/head`],
{ cwd: workDir, timeoutMs: 10_000 }
);
const baseRef = await sandbox.run(
'git',
['--git-dir', mirrorPath, 'rev-parse', '--verify', `refs/reviewed/pr/${prNumber}/base`],
{ cwd: workDir, timeoutMs: 10_000 }
);
expect(headRef.stdout.trim()).toBe(headSha);
expect(baseRef.stdout.trim()).toBe(baseSha);
});
test('resolveReviewedRef returns null when refs do not exist', async () => {
const resolved = await manager.resolveReviewedRef(mirrorPath, 9999);
expect(resolved).toBeNull();
});
test('concurrent saveReviewedRef calls keep ref pair consistent', async () => {
const prNumber = 202;
const pairs = commitShas.slice(0, 7).map((baseSha, index) => ({
baseSha,
headSha: commitShas[index + 1],
}));
await Promise.all(
pairs.map((pair) =>
manager.saveReviewedRef(mirrorPath, prNumber, pair.baseSha, pair.headSha)
)
);
const resolved = await manager.resolveReviewedRef(mirrorPath, prNumber);
expect(resolved).not.toBeNull();
const matchedPair = pairs.find(
(pair) => pair.baseSha === resolved!.baseSha && pair.headSha === resolved!.headSha
);
expect(matchedPair).toBeDefined();
});
});
describe('deleteReviewedRefs', () => {
test('removes reviewed refs and delete is idempotent when refs are missing', async () => {
const prNumber = 303;
const baseSha = commitShas[3];
const headSha = commitShas[4];
await manager.saveReviewedRef(mirrorPath, prNumber, baseSha, headSha);
expect(await manager.resolveReviewedRef(mirrorPath, prNumber)).toEqual({ baseSha, headSha });
await manager.deleteReviewedRefs(mirrorPath, prNumber);
await manager.deleteReviewedRefs(mirrorPath, prNumber);
const resolved = await manager.resolveReviewedRef(mirrorPath, prNumber);
expect(resolved).toBeNull();
});
test('does not throw when deleting refs that never existed', async () => {
await manager.deleteReviewedRefs(mirrorPath, 4040);
});
});
describe('getMirrorPath', () => {
test('returns hash-based mirror path for owner/repo', () => {
const owner = 'octocat';
const repo = 'hello-world';
const expectedHash = createHash('sha256')
.update(`${owner}/${repo}`)
.digest('hex')
.slice(0, 16);
const expectedPath = path.join(workDir, 'repos', `${expectedHash}.git`);
expect(manager.getMirrorPath(owner, repo)).toBe(expectedPath);
});
});
describe('cleanStaleMirrors', () => {
test('cleans stale mirrors/workspaces and keeps recent ones', async () => {
const reposRoot = path.join(workDir, 'repos');
const workspacesRoot = path.join(workDir, 'workspaces');
const staleMirror = path.join(reposRoot, 'stale.git');
const recentMirror = path.join(reposRoot, 'recent.git');
const staleWorkspace = path.join(workspacesRoot, 'stale-ws');
const recentWorkspace = path.join(workspacesRoot, 'recent-ws');
await mkdir(staleMirror, { recursive: true });
await mkdir(recentMirror, { recursive: true });
await mkdir(staleWorkspace, { recursive: true });
await mkdir(recentWorkspace, { recursive: true });
const now = Date.now();
const staleDate = new Date(now - 40 * 24 * 60 * 60 * 1000);
const recentDate = new Date(now - 2 * 24 * 60 * 60 * 1000);
await utimes(staleMirror, staleDate, staleDate);
await utimes(recentMirror, recentDate, recentDate);
await utimes(staleWorkspace, staleDate, staleDate);
await utimes(recentWorkspace, recentDate, recentDate);
const cleaned = await manager.cleanStaleMirrors(30);
expect(cleaned).toBe(2);
expect(await pathExists(staleMirror)).toBe(false);
expect(await pathExists(staleWorkspace)).toBe(false);
expect(await pathExists(recentMirror)).toBe(true);
expect(await pathExists(recentWorkspace)).toBe(true);
});
test('returns 0 when repos directory does not exist', async () => {
const cleanWorkDir = path.join(tempDir, 'clean-workdir');
await mkdir(cleanWorkDir, { recursive: true });
const cleanManager = new LocalRepoManager(cleanWorkDir, sandbox, 10_000);
const cleaned = await cleanManager.cleanStaleMirrors(30);
expect(cleaned).toBe(0);
});
});
});