@neurcode-ai/cli 0.9.31 → 0.9.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/README.md +22 -0
  2. package/dist/commands/apply.d.ts.map +1 -1
  3. package/dist/commands/apply.js +45 -3
  4. package/dist/commands/apply.js.map +1 -1
  5. package/dist/commands/map.d.ts.map +1 -1
  6. package/dist/commands/map.js +78 -1
  7. package/dist/commands/map.js.map +1 -1
  8. package/dist/commands/plan-slo.d.ts +7 -0
  9. package/dist/commands/plan-slo.d.ts.map +1 -0
  10. package/dist/commands/plan-slo.js +205 -0
  11. package/dist/commands/plan-slo.js.map +1 -0
  12. package/dist/commands/plan.d.ts.map +1 -1
  13. package/dist/commands/plan.js +665 -29
  14. package/dist/commands/plan.js.map +1 -1
  15. package/dist/commands/repo.d.ts +3 -0
  16. package/dist/commands/repo.d.ts.map +1 -0
  17. package/dist/commands/repo.js +166 -0
  18. package/dist/commands/repo.js.map +1 -0
  19. package/dist/commands/ship.d.ts.map +1 -1
  20. package/dist/commands/ship.js +29 -0
  21. package/dist/commands/ship.js.map +1 -1
  22. package/dist/commands/verify.d.ts.map +1 -1
  23. package/dist/commands/verify.js +261 -9
  24. package/dist/commands/verify.js.map +1 -1
  25. package/dist/index.js +17 -0
  26. package/dist/index.js.map +1 -1
  27. package/dist/services/mapper/ProjectScanner.d.ts +76 -2
  28. package/dist/services/mapper/ProjectScanner.d.ts.map +1 -1
  29. package/dist/services/mapper/ProjectScanner.js +545 -40
  30. package/dist/services/mapper/ProjectScanner.js.map +1 -1
  31. package/dist/services/security/SecurityGuard.d.ts +21 -2
  32. package/dist/services/security/SecurityGuard.d.ts.map +1 -1
  33. package/dist/services/security/SecurityGuard.js +130 -27
  34. package/dist/services/security/SecurityGuard.js.map +1 -1
  35. package/dist/utils/governance.d.ts +2 -0
  36. package/dist/utils/governance.d.ts.map +1 -1
  37. package/dist/utils/governance.js +2 -0
  38. package/dist/utils/governance.js.map +1 -1
  39. package/dist/utils/plan-slo.d.ts +73 -0
  40. package/dist/utils/plan-slo.d.ts.map +1 -0
  41. package/dist/utils/plan-slo.js +271 -0
  42. package/dist/utils/plan-slo.js.map +1 -0
  43. package/dist/utils/project-root.d.ts +5 -4
  44. package/dist/utils/project-root.d.ts.map +1 -1
  45. package/dist/utils/project-root.js +82 -7
  46. package/dist/utils/project-root.js.map +1 -1
  47. package/dist/utils/repo-links.d.ts +17 -0
  48. package/dist/utils/repo-links.d.ts.map +1 -0
  49. package/dist/utils/repo-links.js +136 -0
  50. package/dist/utils/repo-links.js.map +1 -0
  51. package/package.json +3 -3
@@ -2,52 +2,137 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.ProjectScanner = void 0;
4
4
  const ts_morph_1 = require("ts-morph");
5
- const glob_1 = require("glob");
5
+ const crypto_1 = require("crypto");
6
6
  const path_1 = require("path");
7
+ const fs_1 = require("fs");
8
+ const DEFAULT_MAX_SOURCE_FILES = 1200;
9
+ const DEFAULT_MAX_FILE_BYTES = 1024 * 1024; // 1MB
10
+ const DEFAULT_SHALLOW_SCAN_BYTES = 256 * 1024; // 256KB head/tail sample
11
+ const DEFAULT_SHALLOW_SCAN_WINDOWS = 5; // stratified windows across oversized files
12
+ const DEFAULT_MAX_ADAPTIVE_DEEPEN_FILES = 3;
13
+ const DEFAULT_MAX_ADAPTIVE_DEEPEN_TOTAL_BYTES = 2 * 1024 * 1024; // 2MB
14
+ const DEFAULT_ENABLE_ADAPTIVE_ESCALATION = true;
15
+ const DEFAULT_ADAPTIVE_ESCALATION_SHALLOW_RATIO_THRESHOLD = 0.35;
16
+ const DEFAULT_ADAPTIVE_ESCALATION_MIN_CANDIDATES = 3;
17
+ const DEFAULT_MAX_ADAPTIVE_ESCALATION_FILES = 2;
18
+ const DEFAULT_MAX_ADAPTIVE_ESCALATION_TOTAL_BYTES = 1024 * 1024; // 1MB
19
+ const MAX_SHALLOW_EXPORTS_PER_FILE = 120;
20
+ const SOURCE_EXTENSIONS = new Set(['ts', 'tsx', 'js', 'jsx']);
21
+ const IGNORED_DIR_NAMES = new Set([
22
+ 'node_modules',
23
+ '.git',
24
+ '.next',
25
+ 'dist',
26
+ 'build',
27
+ '.turbo',
28
+ '.cache',
29
+ '.pnpm-store',
30
+ '.yarn',
31
+ '.idea',
32
+ '.vscode',
33
+ ]);
7
34
  class ProjectScanner {
8
35
  project;
9
36
  rootDir;
10
- ignorePatterns;
11
- constructor(rootDir = process.cwd()) {
37
+ maxSourceFiles;
38
+ maxFileBytes;
39
+ shallowScanBytes;
40
+ shallowScanWindows;
41
+ adaptiveDeepenIntent;
42
+ maxAdaptiveDeepenFiles;
43
+ maxAdaptiveDeepenTotalBytes;
44
+ enableAdaptiveEscalation;
45
+ adaptiveEscalationShallowRatioThreshold;
46
+ adaptiveEscalationMinCandidates;
47
+ maxAdaptiveEscalationFiles;
48
+ maxAdaptiveEscalationTotalBytes;
49
+ scanStats;
50
+ deepenedShallowFiles = new Set();
51
+ constructor(rootDir = process.cwd(), options) {
12
52
  this.rootDir = (0, path_1.resolve)(rootDir);
13
53
  this.project = new ts_morph_1.Project({
14
54
  tsConfigFilePath: undefined, // We'll add files manually
15
55
  skipAddingFilesFromTsConfig: true,
16
56
  skipFileDependencyResolution: true,
17
57
  });
18
- this.ignorePatterns = [
19
- '**/node_modules/**',
20
- '**/dist/**',
21
- '**/.git/**',
22
- '**/build/**',
23
- '**/.next/**',
24
- '**/.turbo/**',
25
- '**/.cache/**',
26
- '**/*.map',
27
- '**/*.log',
28
- ];
58
+ this.maxSourceFiles = Math.max(1, Math.floor(options?.maxSourceFiles || DEFAULT_MAX_SOURCE_FILES));
59
+ this.maxFileBytes = Math.max(1, Math.floor(options?.maxFileBytes || DEFAULT_MAX_FILE_BYTES));
60
+ this.shallowScanBytes = Math.max(1024, Math.floor(options?.shallowScanBytes || DEFAULT_SHALLOW_SCAN_BYTES));
61
+ this.shallowScanWindows = Math.max(1, Math.floor(options?.shallowScanWindows || DEFAULT_SHALLOW_SCAN_WINDOWS));
62
+ this.adaptiveDeepenIntent = (options?.adaptiveDeepenIntent || '').trim();
63
+ this.maxAdaptiveDeepenFiles = Math.max(0, Math.floor(options?.maxAdaptiveDeepenFiles ?? DEFAULT_MAX_ADAPTIVE_DEEPEN_FILES));
64
+ this.maxAdaptiveDeepenTotalBytes = Math.max(0, Math.floor(options?.maxAdaptiveDeepenTotalBytes ?? DEFAULT_MAX_ADAPTIVE_DEEPEN_TOTAL_BYTES));
65
+ this.enableAdaptiveEscalation = options?.enableAdaptiveEscalation ?? DEFAULT_ENABLE_ADAPTIVE_ESCALATION;
66
+ const adaptiveEscalationRatioRaw = options?.adaptiveEscalationShallowRatioThreshold;
67
+ this.adaptiveEscalationShallowRatioThreshold = Number.isFinite(adaptiveEscalationRatioRaw)
68
+ ? Math.min(1, Math.max(0, Number(adaptiveEscalationRatioRaw)))
69
+ : DEFAULT_ADAPTIVE_ESCALATION_SHALLOW_RATIO_THRESHOLD;
70
+ this.adaptiveEscalationMinCandidates = Math.max(1, Math.floor(options?.adaptiveEscalationMinCandidates ?? DEFAULT_ADAPTIVE_ESCALATION_MIN_CANDIDATES));
71
+ this.maxAdaptiveEscalationFiles = Math.max(0, Math.floor(options?.maxAdaptiveEscalationFiles ?? DEFAULT_MAX_ADAPTIVE_ESCALATION_FILES));
72
+ this.maxAdaptiveEscalationTotalBytes = Math.max(0, Math.floor(options?.maxAdaptiveEscalationTotalBytes ?? DEFAULT_MAX_ADAPTIVE_ESCALATION_TOTAL_BYTES));
73
+ this.scanStats = this.createEmptyScanStats();
74
+ }
75
+ createEmptyScanStats() {
76
+ return {
77
+ indexedSourceFiles: 0,
78
+ parsedSourceFiles: 0,
79
+ parseFailures: 0,
80
+ shallowIndexedSourceFiles: 0,
81
+ shallowIndexFailures: 0,
82
+ adaptiveDeepenCandidates: 0,
83
+ adaptiveDeepenedFiles: 0,
84
+ adaptiveDeepenFailures: 0,
85
+ adaptiveDeepenSkippedBudget: 0,
86
+ adaptiveEscalationTriggered: false,
87
+ adaptiveEscalationReason: null,
88
+ adaptiveEscalationDeepenedFiles: 0,
89
+ adaptiveEscalationSkippedBudget: 0,
90
+ maxSourceFiles: this.maxSourceFiles,
91
+ maxFileBytes: this.maxFileBytes,
92
+ shallowScanBytes: this.shallowScanBytes,
93
+ shallowScanWindows: this.shallowScanWindows,
94
+ maxAdaptiveDeepenFiles: this.maxAdaptiveDeepenFiles,
95
+ maxAdaptiveDeepenTotalBytes: this.maxAdaptiveDeepenTotalBytes,
96
+ maxAdaptiveEscalationFiles: this.maxAdaptiveEscalationFiles,
97
+ maxAdaptiveEscalationTotalBytes: this.maxAdaptiveEscalationTotalBytes,
98
+ cappedByMaxSourceFiles: false,
99
+ skippedByIgnoredDirectory: 0,
100
+ skippedBySymlink: 0,
101
+ skippedByExtension: 0,
102
+ skippedBySize: 0,
103
+ skippedUnreadable: 0,
104
+ };
29
105
  }
30
106
  /**
31
107
  * Scan the project and extract exports and imports
32
108
  */
33
109
  async scan() {
34
- // Find all TypeScript/JavaScript files
35
- const files = await this.findSourceFiles();
36
- // Add files to ts-morph project
110
+ this.scanStats = this.createEmptyScanStats();
111
+ this.deepenedShallowFiles.clear();
112
+ const adaptiveIntentTokens = this.getIntentTokens(this.adaptiveDeepenIntent);
113
+ const adaptiveIntentFingerprint = this.computeIntentFingerprint(adaptiveIntentTokens);
114
+ // Find all TypeScript/JavaScript files (size-aware mode: full AST or shallow)
115
+ const files = this.findSourceFiles();
116
+ this.scanStats.indexedSourceFiles = files.length;
117
+ // Add full-mode files to ts-morph project
37
118
  const sourceFiles = [];
38
- for (const filePath of files) {
119
+ for (const file of files) {
120
+ if (file.mode !== 'full')
121
+ continue;
39
122
  try {
40
- const sourceFile = this.project.addSourceFileAtPath(filePath);
123
+ const sourceFile = this.project.addSourceFileAtPath(file.fullPath);
41
124
  sourceFiles.push(sourceFile);
42
125
  }
43
126
  catch (error) {
44
127
  // Skip files that can't be parsed
128
+ this.scanStats.parseFailures += 1;
45
129
  continue;
46
130
  }
47
131
  }
132
+ this.scanStats.parsedSourceFiles = sourceFiles.length;
133
+ this.scanStats.shallowIndexedSourceFiles = files.filter((file) => file.mode === 'shallow').length;
48
134
  // Extract metadata from each file
49
135
  const fileMetadata = {};
50
- const globalExports = [];
51
136
  for (const sourceFile of sourceFiles) {
52
137
  const filePath = (0, path_1.relative)(this.rootDir, sourceFile.getFilePath());
53
138
  try {
@@ -58,8 +143,6 @@ class ProjectScanner {
58
143
  exports,
59
144
  imports,
60
145
  };
61
- // Add to global exports list
62
- globalExports.push(...exports);
63
146
  }
64
147
  catch (error) {
65
148
  // If extraction fails for a file, continue with others
@@ -70,39 +153,461 @@ class ProjectScanner {
70
153
  };
71
154
  }
72
155
  }
156
+ // Process oversized files using bounded shallow symbol/import extraction.
157
+ for (const file of files) {
158
+ if (file.mode !== 'shallow')
159
+ continue;
160
+ const relativePath = (0, path_1.relative)(this.rootDir, file.fullPath);
161
+ const shallowMetadata = this.extractShallowMetadata(file.fullPath, relativePath);
162
+ if (!shallowMetadata) {
163
+ this.scanStats.shallowIndexFailures += 1;
164
+ continue;
165
+ }
166
+ fileMetadata[relativePath] = shallowMetadata;
167
+ }
168
+ // Deepen a small, intent-relevant subset of oversized files with full AST parsing.
169
+ const baseDeepen = this.adaptiveDeepenShallowFiles(files, fileMetadata, adaptiveIntentTokens, {
170
+ maxFiles: this.maxAdaptiveDeepenFiles,
171
+ maxTotalBytes: this.maxAdaptiveDeepenTotalBytes,
172
+ });
173
+ this.maybeRunAdaptiveEscalation(files, fileMetadata, adaptiveIntentTokens, baseDeepen);
174
+ const globalExports = [];
175
+ for (const metadata of Object.values(fileMetadata)) {
176
+ globalExports.push(...metadata.exports);
177
+ }
73
178
  return {
74
179
  files: fileMetadata,
75
180
  globalExports,
76
181
  scannedAt: new Date().toISOString(),
182
+ scanStats: { ...this.scanStats },
183
+ scanContext: {
184
+ adaptiveIntentFingerprint,
185
+ },
77
186
  };
78
187
  }
79
188
  /**
80
189
  * Find all TypeScript/JavaScript source files
81
190
  */
82
- async findSourceFiles() {
83
- const patterns = [
84
- '**/*.ts',
85
- '**/*.tsx',
86
- '**/*.js',
87
- '**/*.jsx',
88
- ];
89
- const allFiles = [];
90
- for (const pattern of patterns) {
191
+ findSourceFiles() {
192
+ const results = [];
193
+ const stack = [this.rootDir];
194
+ let capped = false;
195
+ while (stack.length > 0 && results.length < this.maxSourceFiles) {
196
+ const currentDir = stack.pop();
197
+ let entries;
91
198
  try {
92
- const files = await (0, glob_1.glob)(pattern, {
93
- cwd: this.rootDir,
94
- ignore: this.ignorePatterns,
95
- absolute: true,
199
+ entries = (0, fs_1.readdirSync)(currentDir, { withFileTypes: true, encoding: 'utf8' });
200
+ }
201
+ catch {
202
+ this.scanStats.skippedUnreadable += 1;
203
+ continue;
204
+ }
205
+ for (const entry of entries) {
206
+ if (results.length >= this.maxSourceFiles) {
207
+ capped = true;
208
+ break;
209
+ }
210
+ const fullPath = (0, path_1.join)(currentDir, entry.name);
211
+ if (entry.isSymbolicLink()) {
212
+ // Skip symlinks so we never traverse outside repo boundaries.
213
+ this.scanStats.skippedBySymlink += 1;
214
+ continue;
215
+ }
216
+ if (entry.isDirectory()) {
217
+ if (this.shouldSkipDirectory(entry.name)) {
218
+ this.scanStats.skippedByIgnoredDirectory += 1;
219
+ continue;
220
+ }
221
+ stack.push(fullPath);
222
+ continue;
223
+ }
224
+ if (!entry.isFile()) {
225
+ this.scanStats.skippedByExtension += 1;
226
+ continue;
227
+ }
228
+ const decision = this.getFileScanDecision(entry.name, fullPath);
229
+ if (decision.mode === 'skip') {
230
+ if (decision.reason === 'read')
231
+ this.scanStats.skippedUnreadable += 1;
232
+ else
233
+ this.scanStats.skippedByExtension += 1;
234
+ continue;
235
+ }
236
+ if (decision.mode === 'shallow') {
237
+ this.scanStats.skippedBySize += 1; // skipped from full AST parse due to size; handled by shallow indexing
238
+ }
239
+ results.push({ fullPath, mode: decision.mode, size: decision.size || 0 });
240
+ }
241
+ }
242
+ if (!capped && results.length >= this.maxSourceFiles && stack.length > 0) {
243
+ capped = true;
244
+ }
245
+ this.scanStats.cappedByMaxSourceFiles = capped;
246
+ return results.sort((a, b) => a.fullPath.localeCompare(b.fullPath));
247
+ }
248
+ shouldSkipDirectory(name) {
249
+ if (IGNORED_DIR_NAMES.has(name))
250
+ return true;
251
+ // Skip hidden directories except ".neurcode" when someone intentionally stores JS there.
252
+ if (name.startsWith('.') && name !== '.neurcode')
253
+ return true;
254
+ return false;
255
+ }
256
+ getFileScanDecision(name, fullPath) {
257
+ if (name.endsWith('.map') || name.endsWith('.log'))
258
+ return { mode: 'skip', reason: 'ext' };
259
+ const ext = name.includes('.') ? name.split('.').pop()?.toLowerCase() || '' : '';
260
+ if (!SOURCE_EXTENSIONS.has(ext))
261
+ return { mode: 'skip', reason: 'ext' };
262
+ try {
263
+ const stat = (0, fs_1.statSync)(fullPath);
264
+ if (!stat.isFile())
265
+ return { mode: 'skip', reason: 'ext' };
266
+ if (stat.size > this.maxFileBytes)
267
+ return { mode: 'shallow', size: stat.size };
268
+ return { mode: 'full', size: stat.size };
269
+ }
270
+ catch {
271
+ return { mode: 'skip', reason: 'read' };
272
+ }
273
+ }
274
+ extractShallowMetadata(fullPath, filePath) {
275
+ const sample = this.readShallowTextSample(fullPath);
276
+ if (sample === null)
277
+ return null;
278
+ const exports = this.extractShallowExports(sample, filePath);
279
+ const imports = this.extractShallowImports(sample);
280
+ return {
281
+ filePath,
282
+ exports,
283
+ imports,
284
+ };
285
+ }
286
+ readShallowTextSample(fullPath) {
287
+ let fd = null;
288
+ try {
289
+ const stats = (0, fs_1.statSync)(fullPath);
290
+ if (!stats.isFile())
291
+ return null;
292
+ if (stats.size <= 0)
293
+ return '';
294
+ const budget = Math.min(stats.size, this.shallowScanBytes);
295
+ const desiredWindows = Math.max(1, this.shallowScanWindows);
296
+ const effectiveWindows = Math.max(1, Math.min(desiredWindows, Math.floor(budget / 1024) || 1));
297
+ const windowBytes = Math.max(1, Math.floor(budget / effectiveWindows));
298
+ fd = (0, fs_1.openSync)(fullPath, 'r');
299
+ const maxStart = Math.max(0, stats.size - windowBytes);
300
+ const starts = [];
301
+ if (effectiveWindows <= 1 || maxStart <= 0) {
302
+ starts.push(0);
303
+ }
304
+ else {
305
+ const stride = maxStart / (effectiveWindows - 1);
306
+ for (let index = 0; index < effectiveWindows; index += 1) {
307
+ starts.push(Math.min(maxStart, Math.max(0, Math.round(index * stride))));
308
+ }
309
+ }
310
+ const uniqueStarts = Array.from(new Set(starts)).sort((a, b) => a - b);
311
+ const windows = [];
312
+ for (const start of uniqueStarts) {
313
+ const bytesToRead = Math.min(windowBytes, stats.size - start);
314
+ if (bytesToRead <= 0)
315
+ continue;
316
+ const buffer = Buffer.alloc(bytesToRead);
317
+ const read = (0, fs_1.readSync)(fd, buffer, 0, bytesToRead, start);
318
+ if (read <= 0)
319
+ continue;
320
+ const end = start + read;
321
+ windows.push(`/* neurcode-shallow-scan-window:${start}-${end} */\n${buffer.subarray(0, read).toString('utf8')}`);
322
+ }
323
+ if (windows.length === 0) {
324
+ return null;
325
+ }
326
+ return windows.join('\n/* neurcode-shallow-scan-split */\n');
327
+ }
328
+ catch {
329
+ return null;
330
+ }
331
+ finally {
332
+ if (typeof fd === 'number') {
333
+ try {
334
+ (0, fs_1.closeSync)(fd);
335
+ }
336
+ catch {
337
+ // ignore close errors
338
+ }
339
+ }
340
+ }
341
+ }
342
+ extractShallowExports(sample, filePath) {
343
+ const exports = [];
344
+ const seen = new Set();
345
+ const addExport = (name, type, signature) => {
346
+ const cleaned = name.trim();
347
+ if (!cleaned)
348
+ return;
349
+ const key = `${type}:${cleaned}`;
350
+ if (seen.has(key))
351
+ return;
352
+ seen.add(key);
353
+ exports.push({
354
+ name: cleaned,
355
+ filePath,
356
+ type,
357
+ signature,
358
+ });
359
+ };
360
+ const functionPattern = /export\s+(?:async\s+)?function\s+([A-Za-z_$][\w$]*)\s*\(/g;
361
+ let match;
362
+ while ((match = functionPattern.exec(sample)) !== null) {
363
+ addExport(match[1], 'function', `function ${match[1]}(...)`);
364
+ }
365
+ const classPattern = /export\s+class\s+([A-Za-z_$][\w$]*)/g;
366
+ while ((match = classPattern.exec(sample)) !== null) {
367
+ addExport(match[1], 'class', `class ${match[1]}`);
368
+ }
369
+ const interfacePattern = /export\s+interface\s+([A-Za-z_$][\w$]*)/g;
370
+ while ((match = interfacePattern.exec(sample)) !== null) {
371
+ addExport(match[1], 'interface', `interface ${match[1]}`);
372
+ }
373
+ const typePattern = /export\s+type\s+([A-Za-z_$][\w$]*)/g;
374
+ while ((match = typePattern.exec(sample)) !== null) {
375
+ addExport(match[1], 'type', `type ${match[1]} = ...`);
376
+ }
377
+ const enumPattern = /export\s+enum\s+([A-Za-z_$][\w$]*)/g;
378
+ while ((match = enumPattern.exec(sample)) !== null) {
379
+ addExport(match[1], 'enum', `enum ${match[1]}`);
380
+ }
381
+ const constPattern = /export\s+(?:const|let|var)\s+([A-Za-z_$][\w$]*)/g;
382
+ while ((match = constPattern.exec(sample)) !== null) {
383
+ addExport(match[1], 'const', `const ${match[1]} = ...`);
384
+ }
385
+ const namedExportPattern = /export\s*\{([^}]+)\}/g;
386
+ while ((match = namedExportPattern.exec(sample)) !== null) {
387
+ const rawGroup = match[1] || '';
388
+ const parts = rawGroup.split(',').map((value) => value.trim()).filter(Boolean);
389
+ for (const part of parts) {
390
+ const aliasParts = part.split(/\s+as\s+/i).map((value) => value.trim()).filter(Boolean);
391
+ const exportedName = aliasParts.length > 1 ? aliasParts[1] : aliasParts[0];
392
+ if (!exportedName || exportedName === 'default')
393
+ continue;
394
+ addExport(exportedName, 'variable');
395
+ }
396
+ }
397
+ const namespaceExportPattern = /export\s+\*\s+from\s+['"][^'"]+['"]/g;
398
+ while ((match = namespaceExportPattern.exec(sample)) !== null) {
399
+ addExport('*', 'namespace', 'export * from ...');
400
+ }
401
+ const defaultExportPattern = /export\s+default\b/g;
402
+ if (defaultExportPattern.test(sample)) {
403
+ addExport('default', 'default', 'export default ...');
404
+ }
405
+ return exports.slice(0, MAX_SHALLOW_EXPORTS_PER_FILE);
406
+ }
407
+ extractShallowImports(sample) {
408
+ const importsByModule = new Map();
409
+ const upsertImport = (moduleName, symbolName, isTypeOnly) => {
410
+ const normalized = moduleName.trim();
411
+ if (!normalized)
412
+ return;
413
+ const existing = importsByModule.get(normalized);
414
+ if (!existing) {
415
+ importsByModule.set(normalized, {
416
+ from: normalized,
417
+ imports: symbolName ? [symbolName] : [],
418
+ isTypeOnly,
96
419
  });
97
- allFiles.push(...files);
420
+ return;
98
421
  }
99
- catch (error) {
100
- // Continue with other patterns if one fails
422
+ existing.isTypeOnly = existing.isTypeOnly && isTypeOnly;
423
+ if (symbolName && !existing.imports.includes(symbolName)) {
424
+ existing.imports.push(symbolName);
425
+ }
426
+ };
427
+ const importFromPattern = /import\s+(type\s+)?([\s\S]*?)\s+from\s+['"]([^'"]+)['"]/g;
428
+ let match;
429
+ while ((match = importFromPattern.exec(sample)) !== null) {
430
+ const moduleName = match[3];
431
+ const importBody = (match[2] || '').trim();
432
+ const isTypeOnly = Boolean(match[1]);
433
+ if (!importBody) {
434
+ upsertImport(moduleName, '', isTypeOnly);
435
+ continue;
436
+ }
437
+ upsertImport(moduleName, importBody.slice(0, 120), isTypeOnly);
438
+ }
439
+ const sideEffectImportPattern = /import\s+['"]([^'"]+)['"]/g;
440
+ while ((match = sideEffectImportPattern.exec(sample)) !== null) {
441
+ upsertImport(match[1], '', false);
442
+ }
443
+ const requirePattern = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
444
+ while ((match = requirePattern.exec(sample)) !== null) {
445
+ upsertImport(match[1], 'require', false);
446
+ }
447
+ const dynamicImportPattern = /import\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
448
+ while ((match = dynamicImportPattern.exec(sample)) !== null) {
449
+ upsertImport(match[1], 'dynamic', false);
450
+ }
451
+ return Array.from(importsByModule.values());
452
+ }
453
+ maybeRunAdaptiveEscalation(files, fileMetadata, intentTokens, baseDeepen) {
454
+ if (!this.enableAdaptiveEscalation)
455
+ return;
456
+ if (intentTokens.length === 0)
457
+ return;
458
+ if (this.scanStats.shallowIndexedSourceFiles <= 0)
459
+ return;
460
+ if (this.maxAdaptiveEscalationFiles <= 0 || this.maxAdaptiveEscalationTotalBytes <= 0)
461
+ return;
462
+ const indexedSourceFiles = Math.max(1, this.scanStats.indexedSourceFiles);
463
+ const shallowRatio = this.scanStats.shallowIndexedSourceFiles / indexedSourceFiles;
464
+ const noInitialDeepening = baseDeepen.deepened === 0 && baseDeepen.candidates > 0;
465
+ const shallowPressure = shallowRatio >= this.adaptiveEscalationShallowRatioThreshold;
466
+ const candidateFloorMet = baseDeepen.candidates >= this.adaptiveEscalationMinCandidates || noInitialDeepening;
467
+ if (!candidateFloorMet || (!shallowPressure && !noInitialDeepening)) {
468
+ return;
469
+ }
470
+ this.scanStats.adaptiveEscalationTriggered = true;
471
+ this.scanStats.adaptiveEscalationReason = noInitialDeepening
472
+ ? 'no_initial_deepening'
473
+ : 'shallow_pressure';
474
+ this.adaptiveDeepenShallowFiles(files, fileMetadata, intentTokens, {
475
+ maxFiles: this.maxAdaptiveEscalationFiles,
476
+ maxTotalBytes: this.maxAdaptiveEscalationTotalBytes,
477
+ trackAsEscalation: true,
478
+ });
479
+ }
480
+ adaptiveDeepenShallowFiles(files, fileMetadata, intentTokens, budget) {
481
+ if (intentTokens.length === 0) {
482
+ return { candidates: 0, deepened: 0, skippedBudget: 0 };
483
+ }
484
+ const candidates = files
485
+ .filter((file) => file.mode === 'shallow')
486
+ .map((file) => {
487
+ const filePath = (0, path_1.relative)(this.rootDir, file.fullPath);
488
+ const metadata = fileMetadata[filePath];
489
+ if (!metadata)
490
+ return null;
491
+ const score = this.computeAdaptiveDeepenScore(intentTokens, filePath, metadata);
492
+ if (score <= 0)
493
+ return null;
494
+ return {
495
+ fullPath: file.fullPath,
496
+ filePath,
497
+ size: file.size,
498
+ score,
499
+ };
500
+ })
501
+ .filter((candidate) => Boolean(candidate))
502
+ .filter((candidate) => !this.deepenedShallowFiles.has(candidate.filePath))
503
+ .sort((a, b) => {
504
+ if (b.score !== a.score)
505
+ return b.score - a.score;
506
+ return a.size - b.size;
507
+ });
508
+ this.scanStats.adaptiveDeepenCandidates = Math.max(this.scanStats.adaptiveDeepenCandidates, candidates.length);
509
+ if (candidates.length === 0)
510
+ return { candidates: 0, deepened: 0, skippedBudget: 0 };
511
+ if (budget.maxFiles <= 0 || budget.maxTotalBytes <= 0) {
512
+ return { candidates: candidates.length, deepened: 0, skippedBudget: 0 };
513
+ }
514
+ let remainingFiles = budget.maxFiles;
515
+ let remainingBytes = budget.maxTotalBytes;
516
+ let deepened = 0;
517
+ let skippedBudget = 0;
518
+ for (const candidate of candidates) {
519
+ if (remainingFiles <= 0)
520
+ break;
521
+ if (candidate.size > remainingBytes) {
522
+ skippedBudget += 1;
523
+ if (budget.trackAsEscalation) {
524
+ this.scanStats.adaptiveEscalationSkippedBudget += 1;
525
+ }
526
+ else {
527
+ this.scanStats.adaptiveDeepenSkippedBudget += 1;
528
+ }
101
529
  continue;
102
530
  }
531
+ let sourceFile;
532
+ try {
533
+ sourceFile = this.project.addSourceFileAtPath(candidate.fullPath);
534
+ const exports = this.extractExports(sourceFile, candidate.filePath);
535
+ const imports = this.extractImports(sourceFile);
536
+ fileMetadata[candidate.filePath] = {
537
+ filePath: candidate.filePath,
538
+ exports,
539
+ imports,
540
+ };
541
+ this.deepenedShallowFiles.add(candidate.filePath);
542
+ this.scanStats.adaptiveDeepenedFiles += 1;
543
+ if (budget.trackAsEscalation) {
544
+ this.scanStats.adaptiveEscalationDeepenedFiles += 1;
545
+ }
546
+ deepened += 1;
547
+ remainingFiles -= 1;
548
+ remainingBytes -= candidate.size;
549
+ }
550
+ catch {
551
+ this.scanStats.adaptiveDeepenFailures += 1;
552
+ }
553
+ finally {
554
+ if (sourceFile) {
555
+ try {
556
+ this.project.removeSourceFile(sourceFile);
557
+ }
558
+ catch {
559
+ // ignore remove errors
560
+ }
561
+ }
562
+ }
563
+ }
564
+ return {
565
+ candidates: candidates.length,
566
+ deepened,
567
+ skippedBudget,
568
+ };
569
+ }
570
+ getIntentTokens(intent) {
571
+ const stopWords = new Set([
572
+ 'the', 'and', 'for', 'with', 'that', 'from', 'into', 'this', 'your', 'will', 'have', 'should',
573
+ 'about', 'where', 'when', 'what', 'which', 'plan', 'code', 'repo', 'file', 'files', 'create',
574
+ 'build', 'add', 'update', 'change', 'make',
575
+ ]);
576
+ const raw = intent.toLowerCase().match(/[a-z0-9_]{3,}/g) || [];
577
+ const deduped = new Set();
578
+ for (const token of raw) {
579
+ if (stopWords.has(token))
580
+ continue;
581
+ deduped.add(token);
582
+ if (deduped.size >= 24)
583
+ break;
584
+ }
585
+ return Array.from(deduped);
586
+ }
587
+ computeIntentFingerprint(tokens) {
588
+ if (tokens.length === 0)
589
+ return null;
590
+ const normalized = [...tokens].sort().join('|');
591
+ return (0, crypto_1.createHash)('sha1').update(normalized, 'utf-8').digest('hex');
592
+ }
593
+ computeAdaptiveDeepenScore(tokens, filePath, metadata) {
594
+ const lowerPath = filePath.toLowerCase();
595
+ const exportText = metadata.exports
596
+ .map((item) => `${item.name} ${item.signature || ''}`.toLowerCase())
597
+ .join(' ');
598
+ const importText = metadata.imports
599
+ .map((item) => `${item.from} ${(item.imports || []).join(' ')}`.toLowerCase())
600
+ .join(' ');
601
+ let score = 0;
602
+ for (const token of tokens) {
603
+ if (lowerPath.includes(token))
604
+ score += 3;
605
+ if (exportText.includes(token))
606
+ score += 2;
607
+ if (importText.includes(token))
608
+ score += 1;
103
609
  }
104
- // Remove duplicates and sort
105
- return Array.from(new Set(allFiles)).sort();
610
+ return score;
106
611
  }
107
612
  /**
108
613
  * Extract all exports from a source file