codereview-aia 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -164,12 +164,30 @@ async function runDirect(options: DirectOptions): Promise<void> {
164
164
  switch (stage) {
165
165
  case 'collecting':
166
166
  if (info?.filesFound !== undefined) {
167
- console.log(pc.green(`✓ Found ${info.filesFound} files`));
167
+ const uncommitted = info.uncommittedFiles ?? info.filesFound;
168
+ const context = info.contextFiles ?? 0;
169
+ const breakdown = context ? ` (${uncommitted} uncommitted + ${context} context)` : '';
170
+ console.log(pc.green(`✓ Found ${info.filesFound} files${breakdown}`));
168
171
  }
169
172
  break;
170
- case 'reviewing':
171
- console.log(pc.blue('Running AI review...'));
173
+ case 'reviewing': {
174
+ const context = info?.contextFiles ?? 0;
175
+ const base = info?.uncommittedFiles ?? info?.filesFound;
176
+ const suffix =
177
+ base || context
178
+ ? ` (scope: ${base ?? '?'} uncommitted${context ? ` + ${context} context` : ''})`
179
+ : '';
180
+ const defaultLine = `Running AI review${suffix ? suffix : ''}...`;
181
+ const detailLine =
182
+ typeof info?.fileIndex === 'number' &&
183
+ typeof info.totalFiles === 'number' &&
184
+ info.totalFiles > 0 &&
185
+ info.currentFile
186
+ ? `Running AI review ▸ ${info.fileIndex}/${info.totalFiles} ${info.currentFile}${suffix}`
187
+ : defaultLine;
188
+ console.log(pc.blue(detailLine));
172
189
  break;
190
+ }
173
191
  case 'merging':
174
192
  console.log(pc.blue('Merging reports...'));
175
193
  break;
@@ -0,0 +1,37 @@
1
+ import { mkdirSync } from 'node:fs';
2
+ import { join, resolve } from 'node:path';
3
+ import { disableFileLogging, enableFileLogging } from '../../utils/logger';
4
+
5
+ export interface DebugLogSession {
6
+ filePath: string;
7
+ stop: () => void;
8
+ }
9
+
10
+ function formatTimestamp(date: Date): string {
11
+ const pad = (value: number) => value.toString().padStart(2, '0');
12
+ const year = date.getFullYear();
13
+ const month = pad(date.getMonth() + 1);
14
+ const day = pad(date.getDate());
15
+ const hours = pad(date.getHours());
16
+ const minutes = pad(date.getMinutes());
17
+ const seconds = pad(date.getSeconds());
18
+ return `${year}${month}${day}-${hours}${minutes}${seconds}`;
19
+ }
20
+
21
+ export function startDebugLogSession(workspaceRoot: string): DebugLogSession {
22
+ const logsDir = resolve(workspaceRoot, 'logs');
23
+ mkdirSync(logsDir, { recursive: true });
24
+
25
+ const timestamp = formatTimestamp(new Date());
26
+ const fileName = `cr-debug-${timestamp}.log`;
27
+ const filePath = join(logsDir, fileName);
28
+
29
+ enableFileLogging(filePath);
30
+
31
+ return {
32
+ filePath,
33
+ stop: () => {
34
+ disableFileLogging();
35
+ },
36
+ };
37
+ }
@@ -1,9 +1,32 @@
1
- import { existsSync, statSync } from 'node:fs';
2
- import { relative as relativePath, resolve as resolvePath, isAbsolute as isAbsolutePath, join as joinPath } from 'node:path';
1
+ import { existsSync, readFileSync, readdirSync, statSync } from 'node:fs';
2
+ import {
3
+ relative as relativePath,
4
+ resolve as resolvePath,
5
+ isAbsolute as isAbsolutePath,
6
+ join as joinPath,
7
+ dirname as dirnamePath,
8
+ } from 'node:path';
3
9
  import { execa } from 'execa';
4
10
  import { MissingCrIgnoreError } from './errors';
5
11
 
6
- export async function collectFiles(): Promise<string[]> {
12
+ export interface FileCollectionStats {
13
+ uncommitted: number;
14
+ context: number;
15
+ }
16
+
17
+ export interface FileCollectionResult {
18
+ targets: string[];
19
+ context: string[];
20
+ stats: FileCollectionStats;
21
+ }
22
+
23
+ export type FileCollectionMode = 'uncommitted' | 'preprod';
24
+
25
+ export interface CollectFilesOptions {
26
+ mode?: FileCollectionMode;
27
+ }
28
+
29
+ export async function collectFiles(options: CollectFilesOptions = {}): Promise<FileCollectionResult> {
7
30
  try {
8
31
  const { stdout: rootOut } = await execa('git', ['rev-parse', '--show-toplevel']);
9
32
  const repoRoot = resolvePath(rootOut.trim());
@@ -58,45 +81,73 @@ export async function collectFiles(): Promise<string[]> {
58
81
  return absolutePath;
59
82
  };
60
83
 
61
- const gitWorkspaceOptions = { cwd: workspaceRoot } as const;
62
- const gitPathSpec = ['--', '.'] as const;
84
+ const mode: FileCollectionMode = options.mode ?? 'uncommitted';
63
85
 
64
- const { stdout: statusOut } = await execa('git', ['status', '--porcelain=1', '-z', ...gitPathSpec], gitWorkspaceOptions);
65
- const { stdout: diffOut } = await execa('git', ['diff', '--name-only', '-z', ...gitPathSpec], gitWorkspaceOptions);
86
+ const collectUncommittedFiles = async (): Promise<string[]> => {
87
+ const gitWorkspaceOptions = { cwd: workspaceRoot } as const;
88
+ const gitPathSpec = ['--', '.'] as const;
66
89
 
67
- const statusEntries = statusOut.split('\0').filter(Boolean);
68
- const statusFiles: string[] = [];
90
+ const { stdout: statusOut } = await execa('git', ['status', '--porcelain=1', '-z', ...gitPathSpec], gitWorkspaceOptions);
91
+ const { stdout: diffOut } = await execa('git', ['diff', '--name-only', '-z', ...gitPathSpec], gitWorkspaceOptions);
69
92
 
70
- for (let i = 0; i < statusEntries.length; i += 1) {
71
- const entry = statusEntries[i];
72
- const statusPrefix = entry.slice(0, 3);
73
- const pathPart = entry.slice(3).trim();
93
+ const statusEntries = statusOut.split('\0').filter(Boolean);
94
+ const statusFiles: string[] = [];
74
95
 
75
- if (!pathPart) {
76
- continue;
77
- }
96
+ for (let i = 0; i < statusEntries.length; i += 1) {
97
+ const entry = statusEntries[i];
98
+ const statusPrefix = entry.slice(0, 3);
99
+ const pathPart = entry.slice(3).trim();
78
100
 
79
- if (statusPrefix.trim().startsWith('R')) {
80
- const renamedTarget = statusEntries[i + 1];
81
- if (renamedTarget) {
82
- statusFiles.push(renamedTarget);
83
- i += 1;
101
+ if (!pathPart) {
84
102
  continue;
85
103
  }
104
+
105
+ if (statusPrefix.trim().startsWith('R')) {
106
+ const renamedTarget = statusEntries[i + 1];
107
+ if (renamedTarget) {
108
+ statusFiles.push(renamedTarget);
109
+ i += 1;
110
+ continue;
111
+ }
112
+ }
113
+
114
+ statusFiles.push(pathPart);
86
115
  }
87
116
 
88
- statusFiles.push(pathPart);
89
- }
117
+ const diffFiles = diffOut.split('\0').filter(Boolean);
118
+ const unique = new Set([...statusFiles, ...diffFiles]);
119
+ return Array.from(unique);
120
+ };
90
121
 
91
- const diffFiles = diffOut.split('\0').filter(Boolean);
92
- const unique = new Set([...statusFiles, ...diffFiles]);
122
+ const collectWorkspaceFiles = async (): Promise<string[]> => {
123
+ const pathSpec = workspaceRelative === '' ? '.' : workspaceRelative;
124
+ const tracked = await listTrackedFiles(repoRoot, pathSpec || '.');
125
+ const untracked = await listUntrackedFiles(repoRoot, pathSpec || '.');
126
+ return Array.from(new Set([...tracked, ...untracked]));
127
+ };
128
+
129
+ const pendingFiles = mode === 'preprod' ? await collectWorkspaceFiles() : await collectUncommittedFiles();
93
130
 
94
- const resolved = Array.from(unique)
131
+ const resolved = pendingFiles
95
132
  .map((file) => normalize(file))
96
133
  .filter((file): file is string => Boolean(file));
97
134
 
98
- const gitFiltered = await excludeIgnored(repoRoot, resolved);
99
- return await excludeCrIgnored(workspaceRoot, crIgnorePath, gitFiltered);
135
+ const baseSet = new Set(resolved);
136
+ const { combinedFiles, extras } = await augmentWithContext(resolved, workspaceRoot);
137
+ const extraSet = new Set(extras);
138
+
139
+ const gitFiltered = await excludeIgnored(repoRoot, combinedFiles);
140
+ const finalFiles = await excludeCrIgnored(workspaceRoot, crIgnorePath, gitFiltered);
141
+
142
+ const targets = finalFiles.filter((file) => baseSet.has(file));
143
+ const contextFiles = finalFiles.filter((file) => extraSet.has(file) && !baseSet.has(file));
144
+
145
+ const stats: FileCollectionStats = {
146
+ uncommitted: targets.length,
147
+ context: contextFiles.length,
148
+ };
149
+
150
+ return { targets, context: contextFiles, stats };
100
151
  } catch (error: any) {
101
152
  if (error.message?.includes('not a git repository')) {
102
153
  throw new Error('Not a git repository. Please run this command from a git repository root.');
@@ -105,6 +156,262 @@ export async function collectFiles(): Promise<string[]> {
105
156
  }
106
157
  }
107
158
 
159
+ const CONTEXT_FILENAMES = [
160
+ 'README.md',
161
+ 'readme.md',
162
+ 'README',
163
+ 'README.mdx',
164
+ 'CHANGELOG.md',
165
+ 'changelog.md',
166
+ 'HISTORY.md',
167
+ 'history.md',
168
+ 'package.json',
169
+ '.env.example',
170
+ '.env.sample',
171
+ '.env.template',
172
+ '.env.local.example',
173
+ '.env.production.example',
174
+ ];
175
+
176
+ const DEPENDENCY_EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs', '.mts', '.cts'];
177
+ const MAX_CONTEXT_FILE_BYTES = 200 * 1024; // 200KB guardrail
178
+
179
+ async function augmentWithContext(
180
+ files: string[],
181
+ workspaceRoot: string,
182
+ ): Promise<{ combinedFiles: string[]; extras: string[] }> {
183
+ if (files.length === 0) {
184
+ return { combinedFiles: files, extras: [] };
185
+ }
186
+
187
+ const baseSet = new Set(files);
188
+ const contextFiles = collectContextFiles(files, workspaceRoot);
189
+ const dependencyFiles = collectDependencyFiles(files, workspaceRoot);
190
+ const localMarkdownFiles = collectLocalMarkdownFiles(files);
191
+ const globalNamedFiles = await collectGlobalNamedContext(workspaceRoot);
192
+
193
+ const extras = Array.from(
194
+ new Set(
195
+ [...contextFiles, ...dependencyFiles, ...localMarkdownFiles, ...globalNamedFiles].filter(
196
+ (file) => !baseSet.has(file),
197
+ ),
198
+ ),
199
+ );
200
+
201
+ const combinedFiles = Array.from(new Set([...files, ...extras]));
202
+ return { combinedFiles, extras };
203
+ }
204
+
205
+ function collectContextFiles(files: string[], workspaceRoot: string): string[] {
206
+ const extras = new Set<string>();
207
+ const candidateDirs = new Set<string>();
208
+ files.forEach((file) => candidateDirs.add(dirnamePath(file)));
209
+ candidateDirs.add(workspaceRoot);
210
+
211
+ for (const dir of candidateDirs) {
212
+ for (const filename of CONTEXT_FILENAMES) {
213
+ const candidate = joinPath(dir, filename);
214
+ if (!candidate.startsWith(workspaceRoot)) {
215
+ continue;
216
+ }
217
+ try {
218
+ const stats = statSync(candidate);
219
+ if (stats.isFile() && stats.size <= MAX_CONTEXT_FILE_BYTES && isTextFile(candidate)) {
220
+ extras.add(candidate);
221
+ }
222
+ } catch {
223
+ // ignored
224
+ }
225
+ }
226
+ }
227
+
228
+ return Array.from(extras);
229
+ }
230
+
231
+ function collectDependencyFiles(files: string[], workspaceRoot: string): string[] {
232
+ const extras = new Set<string>();
233
+
234
+ for (const file of files) {
235
+ if (!isDependencyCandidate(file)) {
236
+ continue;
237
+ }
238
+
239
+ let content: string;
240
+ try {
241
+ content = readFileSync(file, 'utf-8');
242
+ } catch {
243
+ continue;
244
+ }
245
+
246
+ const specifiers = extractImportSpecifiers(content);
247
+ if (specifiers.length === 0) {
248
+ continue;
249
+ }
250
+
251
+ const baseDir = dirnamePath(file);
252
+ for (const specifier of specifiers) {
253
+ const resolved = resolveImportSpecifier(specifier, baseDir, workspaceRoot);
254
+ if (resolved) {
255
+ extras.add(resolved);
256
+ }
257
+ }
258
+ }
259
+
260
+ return Array.from(extras);
261
+ }
262
+
263
+ function isDependencyCandidate(filePath: string): boolean {
264
+ const lower = filePath.toLowerCase();
265
+ return DEPENDENCY_EXTENSIONS.some((ext) => lower.endsWith(ext));
266
+ }
267
+
268
+ function extractImportSpecifiers(content: string): string[] {
269
+ const specifiers = new Set<string>();
270
+ const patterns = [
271
+ /import\s+[^;]*?from\s+['"]([^'"\n]+)['"]/g,
272
+ /import\s+['"]([^'"\n]+)['"]/g,
273
+ /export\s+[^;]*?from\s+['"]([^'"\n]+)['"]/g,
274
+ /require\(\s*['"]([^'"\n]+)['"]\s*\)/g,
275
+ /import\(\s*['"]([^'"\n]+)['"]\s*\)/g,
276
+ ];
277
+
278
+ for (const pattern of patterns) {
279
+ let match: RegExpExecArray | null;
280
+ while ((match = pattern.exec(content)) !== null) {
281
+ const spec = match[1];
282
+ if (spec && spec.startsWith('.')) {
283
+ specifiers.add(spec);
284
+ }
285
+ }
286
+ }
287
+
288
+ return Array.from(specifiers);
289
+ }
290
+
291
+ function resolveImportSpecifier(
292
+ specifier: string,
293
+ baseDir: string,
294
+ workspaceRoot: string,
295
+ ): string | null {
296
+ const cleaned = specifier.replace(/[#?].*$/, '');
297
+ const resolvedBase = resolvePath(baseDir, cleaned);
298
+ const candidates = new Set<string>([resolvedBase]);
299
+
300
+ const hasExtension = DEPENDENCY_EXTENSIONS.some((ext) => resolvedBase.endsWith(ext));
301
+ if (!hasExtension) {
302
+ for (const ext of DEPENDENCY_EXTENSIONS) {
303
+ candidates.add(`${resolvedBase}${ext}`);
304
+ candidates.add(joinPath(resolvedBase, `index${ext}`));
305
+ }
306
+ }
307
+
308
+ for (const candidate of candidates) {
309
+ if (!candidate.startsWith(workspaceRoot)) {
310
+ continue;
311
+ }
312
+ try {
313
+ const stats = statSync(candidate);
314
+ if (stats.isFile()) {
315
+ return candidate;
316
+ }
317
+ } catch {
318
+ continue;
319
+ }
320
+ }
321
+
322
+ return null;
323
+ }
324
+
325
+ function collectLocalMarkdownFiles(files: string[]): string[] {
326
+ const extras = new Set<string>();
327
+ const dirs = new Set<string>();
328
+ files.forEach((file) => dirs.add(dirnamePath(file)));
329
+
330
+ for (const dir of dirs) {
331
+ let entries;
332
+ try {
333
+ entries = readdirSync(dir, { withFileTypes: true });
334
+ } catch {
335
+ continue;
336
+ }
337
+
338
+ for (const entry of entries) {
339
+ if (!entry.isFile()) {
340
+ continue;
341
+ }
342
+
343
+ const lower = entry.name.toLowerCase();
344
+ if (!lower.endsWith('.md') && !lower.endsWith('.mdx')) {
345
+ continue;
346
+ }
347
+
348
+ const fullPath = joinPath(dir, entry.name);
349
+ try {
350
+ const stats = statSync(fullPath);
351
+ if (stats.isFile() && stats.size <= MAX_CONTEXT_FILE_BYTES && isTextFile(fullPath)) {
352
+ extras.add(fullPath);
353
+ }
354
+ } catch {
355
+ continue;
356
+ }
357
+ }
358
+ }
359
+
360
+ return Array.from(extras);
361
+ }
362
+
363
+ async function collectGlobalNamedContext(workspaceRoot: string): Promise<string[]> {
364
+ try {
365
+ const pathspecs = CONTEXT_FILENAMES.map((name) => `:(glob)**/${name}`);
366
+ const { stdout } = await execa('git', ['ls-files', '-z', '--', ...pathspecs], {
367
+ cwd: workspaceRoot,
368
+ reject: false,
369
+ });
370
+ if (!stdout) {
371
+ return [];
372
+ }
373
+
374
+ return stdout
375
+ .split('\0')
376
+ .filter(Boolean)
377
+ .map((rel) => resolvePath(workspaceRoot, rel))
378
+ .filter((abs) => {
379
+ try {
380
+ const stats = statSync(abs);
381
+ return stats.isFile() && stats.size <= MAX_CONTEXT_FILE_BYTES && isTextFile(abs);
382
+ } catch {
383
+ return false;
384
+ }
385
+ });
386
+ } catch {
387
+ return [];
388
+ }
389
+ }
390
+
391
+ function isTextFile(filePath: string): boolean {
392
+ try {
393
+ const buffer = readFileSync(filePath);
394
+ const sample = buffer.subarray(0, 4096);
395
+ return !sample.includes(0);
396
+ } catch {
397
+ return false;
398
+ }
399
+ }
400
+
401
+ async function listTrackedFiles(repoRoot: string, pathSpec: string): Promise<string[]> {
402
+ const { stdout } = await execa('git', ['ls-files', '-z', '--', pathSpec], {
403
+ cwd: repoRoot,
404
+ });
405
+ return stdout.split('\0').filter(Boolean);
406
+ }
407
+
408
+ async function listUntrackedFiles(repoRoot: string, pathSpec: string): Promise<string[]> {
409
+ const { stdout } = await execa('git', ['ls-files', '-z', '--others', '--exclude-standard', '--', pathSpec], {
410
+ cwd: repoRoot,
411
+ });
412
+ return stdout.split('\0').filter(Boolean);
413
+ }
414
+
108
415
  async function excludeIgnored(repoRoot: string, files: string[]): Promise<string[]> {
109
416
  if (files.length === 0) {
110
417
  return [];
@@ -0,0 +1,104 @@
1
+ const DEFAULT_AIA_BASE_URL = 'https://ai.enki.si';
2
+
3
+ function resolveAiaBaseUrl(): string {
4
+ const configured = process.env.AIA_BASE_URL || process.env.CR_AIA_AIA_BASE_URL;
5
+ const base = (configured || DEFAULT_AIA_BASE_URL).trim();
6
+ return base.replace(/\/$/, '');
7
+ }
8
+
9
+ function summarizeErrorBody(body: string): string {
10
+ if (!body.trim()) {
11
+ return 'No response body';
12
+ }
13
+
14
+ if (/<html/i.test(body)) {
15
+ const sanitized = body
16
+ .replace(/<script[\s\S]*?<\/script>/gi, ' ')
17
+ .replace(/<style[\s\S]*?<\/style>/gi, ' ')
18
+ .replace(/<[^>]+>/g, ' ')
19
+ .replace(/\s+/g, ' ')
20
+ .trim();
21
+ return sanitized ? `${sanitized.slice(0, 200)}${sanitized.length > 200 ? '…' : ''}` : 'HTML error response';
22
+ }
23
+
24
+ const normalized = body.replace(/\s+/g, ' ').trim();
25
+ if (!normalized) {
26
+ return 'Empty error body';
27
+ }
28
+ return normalized.length > 200 ? `${normalized.slice(0, 200)}…` : normalized;
29
+ }
30
+
31
+ class WebCheckRequestError extends Error {
32
+ status?: number;
33
+ detail?: string;
34
+
35
+ constructor(message: string, options?: { status?: number; detail?: string; cause?: unknown }) {
36
+ super(message, options?.cause ? { cause: options.cause } : undefined);
37
+ this.name = 'WebCheckRequestError';
38
+ this.status = options?.status;
39
+ this.detail = options?.detail;
40
+ }
41
+ }
42
+
43
+ async function extractErrorDetail(response: Response): Promise<string | undefined> {
44
+ const contentType = response.headers.get('content-type') || '';
45
+ if (contentType.includes('application/json')) {
46
+ try {
47
+ const payload = (await response.json()) as { error?: { message?: string }; message?: string };
48
+ return payload?.error?.message || payload?.message || JSON.stringify(payload);
49
+ } catch {
50
+ return undefined;
51
+ }
52
+ }
53
+
54
+ try {
55
+ const textBody = await response.text();
56
+ return summarizeErrorBody(textBody);
57
+ } catch {
58
+ return undefined;
59
+ }
60
+ }
61
+
62
+ async function sendWebCheckRequest(endpoint: string, payload: Record<string, unknown>): Promise<void> {
63
+ let response: Response;
64
+ try {
65
+ response = await fetch(endpoint, {
66
+ method: 'POST',
67
+ headers: {
68
+ 'Content-Type': 'application/json',
69
+ },
70
+ body: JSON.stringify(payload),
71
+ });
72
+ } catch (networkError) {
73
+ const message =
74
+ networkError instanceof Error ? networkError.message : 'Website check request failed due to an unknown error';
75
+ throw new WebCheckRequestError(`Website check request failed: ${message}`, { cause: networkError });
76
+ }
77
+
78
+ if (!response.ok) {
79
+ const detail = await extractErrorDetail(response);
80
+ const suffix = detail ? ` ${detail}` : '';
81
+ throw new WebCheckRequestError(`Website check failed (${response.status}).${suffix}`, {
82
+ status: response.status,
83
+ detail,
84
+ });
85
+ }
86
+ }
87
+
88
+ export async function triggerManualWebCheck(targetUrl: string): Promise<void> {
89
+ const baseUrl = resolveAiaBaseUrl();
90
+ const manualEndpoint = `${baseUrl}/api/manual/check`;
91
+ const legacyEndpoint = `${baseUrl}/api/check-website`;
92
+ const missingEndpointStatuses = new Set([404, 405, 501]);
93
+
94
+ try {
95
+ await sendWebCheckRequest(manualEndpoint, { url: targetUrl });
96
+ return;
97
+ } catch (error) {
98
+ if (!(error instanceof WebCheckRequestError) || !error.status || !missingEndpointStatuses.has(error.status)) {
99
+ throw error;
100
+ }
101
+ }
102
+
103
+ await sendWebCheckRequest(legacyEndpoint, { url: targetUrl, options: {} });
104
+ }