@snapback/cli 1.1.14 → 1.1.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/README.md +70 -9
  2. package/dist/SkippedTestDetector-AXTMWWHC.js +5 -0
  3. package/dist/SkippedTestDetector-QLSQV7K7.js +5 -0
  4. package/dist/analysis-6WTBZJH3.js +6 -0
  5. package/dist/analysis-C472LUGW.js +2475 -0
  6. package/dist/auth-HFJRXXG2.js +1446 -0
  7. package/dist/auto-provision-organization-SF6XM7X4.js +161 -0
  8. package/dist/chunk-23G5VYA3.js +4259 -0
  9. package/dist/{chunk-QAKFE3NE.js → chunk-4YTE4JEW.js} +3 -4
  10. package/dist/chunk-5EOPYJ4Y.js +12 -0
  11. package/dist/{chunk-OJNDAPC2.js → chunk-5SQA44V7.js} +1086 -19
  12. package/dist/{chunk-BW7RALUZ.js → chunk-7ADPL4Q3.js} +11 -4
  13. package/dist/chunk-CBGOC6RV.js +293 -0
  14. package/dist/chunk-DNEADD2G.js +3499 -0
  15. package/dist/{chunk-Q5XZ3DCB.js → chunk-DPWFZNMY.js} +122 -15
  16. package/dist/chunk-GQ73B37K.js +314 -0
  17. package/dist/chunk-HR34NJP7.js +6133 -0
  18. package/dist/chunk-ICKSHS3A.js +2264 -0
  19. package/dist/{chunk-2TOJVUVJ.js → chunk-OI2HNNT6.js} +285 -33
  20. package/dist/chunk-PL4HF4M2.js +593 -0
  21. package/dist/chunk-WS36HDEU.js +3735 -0
  22. package/dist/chunk-XYU5FFE3.js +111 -0
  23. package/dist/chunk-ZBQDE6WJ.js +108 -0
  24. package/dist/client-WIO6W447.js +8 -0
  25. package/dist/dist-E7E2T3DQ.js +9 -0
  26. package/dist/dist-TEWNOZYS.js +5 -0
  27. package/dist/dist-YZBJAYEJ.js +12 -0
  28. package/dist/index.js +63852 -39806
  29. package/dist/local-service-adapter-3JHN6G4O.js +6 -0
  30. package/dist/pioneer-oauth-hook-V2JKEXM7.js +12 -0
  31. package/dist/{secure-credentials-A4QHHOE2.js → secure-credentials-UEPG7GWW.js} +3 -4
  32. package/dist/snapback-dir-MG7DTRMF.js +6 -0
  33. package/package.json +8 -41
  34. package/scripts/postinstall.mjs +2 -3
  35. package/dist/SkippedTestDetector-B3JZUE5G.js +0 -5
  36. package/dist/SkippedTestDetector-B3JZUE5G.js.map +0 -1
  37. package/dist/analysis-C6XVLBAL.js +0 -6
  38. package/dist/analysis-C6XVLBAL.js.map +0 -1
  39. package/dist/chunk-2TOJVUVJ.js.map +0 -1
  40. package/dist/chunk-5EQLSU5B.js +0 -385
  41. package/dist/chunk-5EQLSU5B.js.map +0 -1
  42. package/dist/chunk-6MR2TINI.js +0 -27
  43. package/dist/chunk-6MR2TINI.js.map +0 -1
  44. package/dist/chunk-A3TUM7U4.js +0 -13002
  45. package/dist/chunk-A3TUM7U4.js.map +0 -1
  46. package/dist/chunk-BW7RALUZ.js.map +0 -1
  47. package/dist/chunk-LEXNOXPV.js +0 -21621
  48. package/dist/chunk-LEXNOXPV.js.map +0 -1
  49. package/dist/chunk-OJNDAPC2.js.map +0 -1
  50. package/dist/chunk-Q5XZ3DCB.js.map +0 -1
  51. package/dist/chunk-QAKFE3NE.js.map +0 -1
  52. package/dist/chunk-QLCHTUT5.js +0 -1067
  53. package/dist/chunk-QLCHTUT5.js.map +0 -1
  54. package/dist/dist-D2SHOZMS.js +0 -8
  55. package/dist/dist-D2SHOZMS.js.map +0 -1
  56. package/dist/dist-L76VXYJ5.js +0 -5
  57. package/dist/dist-L76VXYJ5.js.map +0 -1
  58. package/dist/dist-RPM72FHJ.js +0 -5
  59. package/dist/dist-RPM72FHJ.js.map +0 -1
  60. package/dist/index.js.map +0 -1
  61. package/dist/learning-pruner-YSZSOOOC.js +0 -7
  62. package/dist/learning-pruner-YSZSOOOC.js.map +0 -1
  63. package/dist/secure-credentials-A4QHHOE2.js.map +0 -1
  64. package/dist/snapback-dir-6QUSO6Y3.js +0 -6
  65. package/dist/snapback-dir-6QUSO6Y3.js.map +0 -1
  66. package/dist/storage-H366UNAR.js +0 -6
  67. package/dist/storage-H366UNAR.js.map +0 -1
@@ -1,10 +1,900 @@
1
- #!/usr/bin/env node
2
- import { __name } from './chunk-BW7RALUZ.js';
1
+ #!/usr/bin/env node --no-warnings=ExperimentalWarning
2
+ import { __name } from './chunk-7ADPL4Q3.js';
3
+ import { parseSync } from 'oxc-parser';
4
+ import { dirname, resolve, relative, basename } from 'path';
3
5
  import * as eslintParser from '@typescript-eslint/parser';
4
6
  import { parse } from '@babel/parser';
5
7
  import traverse from '@babel/traverse';
6
- import { dirname, relative, basename } from 'path';
7
8
 
9
+ process.env.SNAPBACK_CLI='true';
10
+ var TS_EXTENSIONS = /* @__PURE__ */ new Set([
11
+ ".ts",
12
+ ".tsx",
13
+ ".mts",
14
+ ".cts"
15
+ ]);
16
+ var JSX_EXTENSIONS = /* @__PURE__ */ new Set([
17
+ ".tsx",
18
+ ".jsx"
19
+ ]);
20
+ var ALL_EXTENSIONS = /* @__PURE__ */ new Set([
21
+ ".ts",
22
+ ".tsx",
23
+ ".js",
24
+ ".jsx",
25
+ ".mts",
26
+ ".cts",
27
+ ".mjs",
28
+ ".cjs"
29
+ ]);
30
+ function isSupportedFile(filePath) {
31
+ const ext = getExtension(filePath);
32
+ return ALL_EXTENSIONS.has(ext);
33
+ }
34
+ __name(isSupportedFile, "isSupportedFile");
35
+ function parseSource(content, filePath) {
36
+ const ext = getExtension(filePath);
37
+ try {
38
+ const lang = JSX_EXTENSIONS.has(ext) ? TS_EXTENSIONS.has(ext) ? "tsx" : "jsx" : TS_EXTENSIONS.has(ext) ? "ts" : "js";
39
+ const result = parseSync(filePath, content, {
40
+ sourceType: "module",
41
+ lang
42
+ });
43
+ const errors = Array.isArray(result.errors) ? result.errors.map((e) => normalizeError(e)) : [];
44
+ return {
45
+ program: result.program,
46
+ errors,
47
+ success: errors.length === 0
48
+ };
49
+ } catch (error) {
50
+ return {
51
+ program: {
52
+ type: "Program",
53
+ body: [],
54
+ sourceType: "module"
55
+ },
56
+ errors: [
57
+ {
58
+ message: error instanceof Error ? error.message : String(error),
59
+ severity: "error"
60
+ }
61
+ ],
62
+ success: false
63
+ };
64
+ }
65
+ }
66
+ __name(parseSource, "parseSource");
67
+ function walkAST(node, visitor, parent) {
68
+ if (!node || typeof node !== "object") {
69
+ return;
70
+ }
71
+ const n = node;
72
+ if (typeof n.type === "string") {
73
+ visitor(n, parent);
74
+ }
75
+ for (const key of Object.keys(n)) {
76
+ if (key === "type" || key === "start" || key === "end" || key === "loc") {
77
+ continue;
78
+ }
79
+ const value = n[key];
80
+ if (Array.isArray(value)) {
81
+ for (const item of value) {
82
+ if (item && typeof item === "object" && typeof item.type === "string") {
83
+ walkAST(item, visitor, n);
84
+ }
85
+ }
86
+ } else if (value && typeof value === "object" && typeof value.type === "string") {
87
+ walkAST(value, visitor, n);
88
+ }
89
+ }
90
+ }
91
+ __name(walkAST, "walkAST");
92
+ function countASTNodes(program) {
93
+ let count = 0;
94
+ walkAST(program, () => {
95
+ count++;
96
+ });
97
+ return count;
98
+ }
99
+ __name(countASTNodes, "countASTNodes");
100
+ function offsetToLine(source, offset) {
101
+ if (offset < 0 || offset > source.length) {
102
+ return 1;
103
+ }
104
+ let line = 1;
105
+ for (let i = 0; i < offset; i++) {
106
+ if (source[i] === "\n") {
107
+ line++;
108
+ }
109
+ }
110
+ return line;
111
+ }
112
+ __name(offsetToLine, "offsetToLine");
113
+ function getExtension(filePath) {
114
+ const lastDot = filePath.lastIndexOf(".");
115
+ return lastDot >= 0 ? filePath.substring(lastDot).toLowerCase() : "";
116
+ }
117
+ __name(getExtension, "getExtension");
118
+ function normalizeError(e) {
119
+ if (typeof e === "object" && e !== null) {
120
+ const err = e;
121
+ return {
122
+ message: String(err.message ?? err),
123
+ severity: String(err.severity ?? "error"),
124
+ labels: Array.isArray(err.labels) ? err.labels : void 0
125
+ };
126
+ }
127
+ return {
128
+ message: String(e),
129
+ severity: "error"
130
+ };
131
+ }
132
+ __name(normalizeError, "normalizeError");
133
+
134
+ // ../../packages/core/dist/analysis/ast/ComplexityAnalyzer.js
135
+ var THRESHOLDS = {
136
+ /** Cyclomatic complexity per function */
137
+ maxCyclomaticPerFunction: 15,
138
+ /** Maximum nesting depth per function */
139
+ maxNestingDepth: 5,
140
+ /** Maximum parameters per function */
141
+ maxParameters: 5,
142
+ /** Maximum functions per file */
143
+ maxFunctionsPerFile: 30,
144
+ /** File-level aggregate cyclomatic complexity */
145
+ maxCyclomaticPerFile: 50
146
+ };
147
+ var BRANCH_NODES = /* @__PURE__ */ new Set([
148
+ "IfStatement",
149
+ "ConditionalExpression",
150
+ "SwitchCase",
151
+ "ForStatement",
152
+ "ForInStatement",
153
+ "ForOfStatement",
154
+ "WhileStatement",
155
+ "DoWhileStatement",
156
+ "CatchClause"
157
+ ]);
158
+ var LOGICAL_OPERATORS = /* @__PURE__ */ new Set([
159
+ "&&",
160
+ "||",
161
+ "??"
162
+ ]);
163
+ var FUNCTION_NODES = /* @__PURE__ */ new Set([
164
+ "FunctionDeclaration",
165
+ "FunctionExpression",
166
+ "ArrowFunctionExpression",
167
+ "MethodDefinition"
168
+ ]);
169
+ var ComplexityAnalyzer = class {
170
+ static {
171
+ __name(this, "ComplexityAnalyzer");
172
+ }
173
+ id = "complexity";
174
+ name = "Complexity Analysis";
175
+ filePatterns = [
176
+ "*.ts",
177
+ "*.tsx",
178
+ "*.js",
179
+ "*.jsx"
180
+ ];
181
+ async analyze(context) {
182
+ const startTime = performance.now();
183
+ const issues = [];
184
+ let filesAnalyzed = 0;
185
+ let totalNodesVisited = 0;
186
+ const parseErrors = [];
187
+ for (const [file, content] of context.contents) {
188
+ if (!this.shouldAnalyzeFile(file)) {
189
+ continue;
190
+ }
191
+ filesAnalyzed++;
192
+ const fileComplexity = this.analyzeFile(content, file);
193
+ if (!fileComplexity) {
194
+ parseErrors.push(`${file}: Failed to parse`);
195
+ continue;
196
+ }
197
+ totalNodesVisited += fileComplexity.functions.length;
198
+ for (const fn of fileComplexity.functions) {
199
+ if (fn.cyclomatic > THRESHOLDS.maxCyclomaticPerFunction) {
200
+ issues.push({
201
+ id: `complexity/cyclomatic/${file}/${fn.line}`,
202
+ severity: fn.cyclomatic > THRESHOLDS.maxCyclomaticPerFunction * 2 ? "high" : "medium",
203
+ type: "HIGH_CYCLOMATIC_COMPLEXITY",
204
+ message: `Function "${fn.name}" has cyclomatic complexity ${fn.cyclomatic} (max: ${THRESHOLDS.maxCyclomaticPerFunction})`,
205
+ file,
206
+ line: fn.line,
207
+ fix: "Extract helper functions or simplify branching logic"
208
+ });
209
+ }
210
+ if (fn.maxNesting > THRESHOLDS.maxNestingDepth) {
211
+ issues.push({
212
+ id: `complexity/nesting/${file}/${fn.line}`,
213
+ severity: "medium",
214
+ type: "DEEP_NESTING",
215
+ message: `Function "${fn.name}" has nesting depth ${fn.maxNesting} (max: ${THRESHOLDS.maxNestingDepth})`,
216
+ file,
217
+ line: fn.line,
218
+ fix: "Use early returns or extract nested logic into helper functions"
219
+ });
220
+ }
221
+ if (fn.parameters > THRESHOLDS.maxParameters) {
222
+ issues.push({
223
+ id: `complexity/parameters/${file}/${fn.line}`,
224
+ severity: "low",
225
+ type: "TOO_MANY_PARAMETERS",
226
+ message: `Function "${fn.name}" has ${fn.parameters} parameters (max: ${THRESHOLDS.maxParameters})`,
227
+ file,
228
+ line: fn.line,
229
+ fix: "Use an options object pattern to reduce parameter count"
230
+ });
231
+ }
232
+ }
233
+ if (fileComplexity.functionCount > THRESHOLDS.maxFunctionsPerFile) {
234
+ issues.push({
235
+ id: `complexity/function-count/${file}`,
236
+ severity: "low",
237
+ type: "TOO_MANY_FUNCTIONS",
238
+ message: `${file} has ${fileComplexity.functionCount} functions (max: ${THRESHOLDS.maxFunctionsPerFile})`,
239
+ file,
240
+ fix: "Split into multiple focused modules"
241
+ });
242
+ }
243
+ if (fileComplexity.totalCyclomatic > THRESHOLDS.maxCyclomaticPerFile) {
244
+ issues.push({
245
+ id: `complexity/file-complexity/${file}`,
246
+ severity: "medium",
247
+ type: "HIGH_FILE_COMPLEXITY",
248
+ message: `${file} has total cyclomatic complexity ${fileComplexity.totalCyclomatic} (max: ${THRESHOLDS.maxCyclomaticPerFile})`,
249
+ file,
250
+ fix: "Consider splitting this file into smaller modules"
251
+ });
252
+ }
253
+ }
254
+ return {
255
+ analyzer: this.id,
256
+ success: true,
257
+ issues,
258
+ coverage: filesAnalyzed / Math.max(context.files.length, 1),
259
+ duration: performance.now() - startTime,
260
+ metadata: {
261
+ filesAnalyzed,
262
+ nodesVisited: totalNodesVisited,
263
+ patternsChecked: [
264
+ "HIGH_CYCLOMATIC_COMPLEXITY",
265
+ "DEEP_NESTING",
266
+ "TOO_MANY_PARAMETERS",
267
+ "TOO_MANY_FUNCTIONS",
268
+ "HIGH_FILE_COMPLEXITY"
269
+ ],
270
+ parseErrors
271
+ }
272
+ };
273
+ }
274
+ shouldRun(context) {
275
+ return context.files.some((f) => this.shouldAnalyzeFile(f));
276
+ }
277
+ /**
278
+ * Analyze a single file and return complexity metrics.
279
+ * Useful for external callers that just want metrics, not issues.
280
+ */
281
+ analyzeFile(content, filePath) {
282
+ if (!isSupportedFile(filePath)) {
283
+ return null;
284
+ }
285
+ const { program, success } = parseSource(content, filePath);
286
+ if (!success && program.body.length === 0) {
287
+ return null;
288
+ }
289
+ const functions = [];
290
+ let fileMaxNesting = 0;
291
+ walkAST(program, (node) => {
292
+ if (!FUNCTION_NODES.has(node.type)) {
293
+ return;
294
+ }
295
+ const fn = this.analyzeFunctionNode(node);
296
+ functions.push(fn);
297
+ if (fn.maxNesting > fileMaxNesting) {
298
+ fileMaxNesting = fn.maxNesting;
299
+ }
300
+ });
301
+ const totalCyclomatic = functions.reduce((sum, fn) => sum + fn.cyclomatic, 0);
302
+ return {
303
+ filePath,
304
+ functions,
305
+ totalCyclomatic,
306
+ maxNesting: fileMaxNesting,
307
+ functionCount: functions.length,
308
+ averageCyclomatic: functions.length > 0 ? totalCyclomatic / functions.length : 0
309
+ };
310
+ }
311
+ // -----------------------------------------------------------------------
312
+ // Per-function analysis
313
+ // -----------------------------------------------------------------------
314
+ analyzeFunctionNode(node) {
315
+ const name = this.getFunctionName(node);
316
+ const line = node.start ?? 0;
317
+ const params = this.getParameterCount(node);
318
+ let cyclomatic = 1;
319
+ let maxNesting = 0;
320
+ const body = this.getFunctionBody(node);
321
+ if (body) {
322
+ this.walkForComplexity(body, (n, depth) => {
323
+ if (BRANCH_NODES.has(n.type)) {
324
+ cyclomatic++;
325
+ }
326
+ if (n.type === "LogicalExpression") {
327
+ const operator = n.operator;
328
+ if (LOGICAL_OPERATORS.has(operator)) {
329
+ cyclomatic++;
330
+ }
331
+ }
332
+ if (depth > maxNesting) {
333
+ maxNesting = depth;
334
+ }
335
+ });
336
+ }
337
+ return {
338
+ name,
339
+ line,
340
+ cyclomatic,
341
+ maxNesting,
342
+ parameters: params
343
+ };
344
+ }
345
+ /**
346
+ * Walk AST nodes counting nesting depth for complexity metrics
347
+ */
348
+ walkForComplexity(node, callback, depth = 0) {
349
+ if (!node || typeof node !== "object") {
350
+ return;
351
+ }
352
+ const n = node;
353
+ if (typeof n.type !== "string") {
354
+ return;
355
+ }
356
+ const nestingNodes = /* @__PURE__ */ new Set([
357
+ "IfStatement",
358
+ "ForStatement",
359
+ "ForInStatement",
360
+ "ForOfStatement",
361
+ "WhileStatement",
362
+ "DoWhileStatement",
363
+ "SwitchStatement",
364
+ "TryStatement"
365
+ ]);
366
+ const newDepth = nestingNodes.has(n.type) ? depth + 1 : depth;
367
+ callback(n, newDepth);
368
+ for (const key of Object.keys(n)) {
369
+ if (key === "type" || key === "start" || key === "end" || key === "loc") {
370
+ continue;
371
+ }
372
+ const value = n[key];
373
+ if (Array.isArray(value)) {
374
+ for (const item of value) {
375
+ this.walkForComplexity(item, callback, newDepth);
376
+ }
377
+ } else if (value && typeof value === "object" && typeof value.type === "string") {
378
+ this.walkForComplexity(value, callback, newDepth);
379
+ }
380
+ }
381
+ }
382
+ // -----------------------------------------------------------------------
383
+ // Helpers
384
+ // -----------------------------------------------------------------------
385
+ getFunctionName(node) {
386
+ if (node.id && typeof node.id === "object") {
387
+ const id = node.id;
388
+ if (typeof id.name === "string") {
389
+ return id.name;
390
+ }
391
+ }
392
+ if (node.key && typeof node.key === "object") {
393
+ const key = node.key;
394
+ if (typeof key.name === "string") {
395
+ return key.name;
396
+ }
397
+ if (typeof key.value === "string") {
398
+ return key.value;
399
+ }
400
+ }
401
+ return "<anonymous>";
402
+ }
403
+ getParameterCount(node) {
404
+ if (node.type === "MethodDefinition") {
405
+ const value = node.value;
406
+ if (value && Array.isArray(value.params)) {
407
+ return value.params.length;
408
+ }
409
+ }
410
+ if (Array.isArray(node.params)) {
411
+ return node.params.length;
412
+ }
413
+ return 0;
414
+ }
415
+ getFunctionBody(node) {
416
+ if (node.type === "MethodDefinition") {
417
+ const value = node.value;
418
+ if (value?.body) {
419
+ return value.body;
420
+ }
421
+ }
422
+ if (node.body) {
423
+ return node.body;
424
+ }
425
+ return null;
426
+ }
427
+ shouldAnalyzeFile(file) {
428
+ const ext = file.split(".").pop()?.toLowerCase();
429
+ return [
430
+ "ts",
431
+ "tsx",
432
+ "js",
433
+ "jsx",
434
+ "mts",
435
+ "cts"
436
+ ].includes(ext ?? "");
437
+ }
438
+ };
439
+
440
+ // ../../packages/core/dist/analysis/ast/import-extractor.js
441
+ function extractImports(content, filePath) {
442
+ if (!isSupportedFile(filePath)) {
443
+ return {
444
+ filePath,
445
+ imports: [],
446
+ parseSuccess: false,
447
+ parseErrors: [
448
+ `Unsupported file type: ${filePath}`
449
+ ]
450
+ };
451
+ }
452
+ const { program, errors, success } = parseSource(content, filePath);
453
+ const imports = [];
454
+ walkAST(program, (node) => {
455
+ switch (node.type) {
456
+ // import ... from 'source'
457
+ case "ImportDeclaration":
458
+ handleImportDeclaration(node, imports);
459
+ break;
460
+ // export { ... } from 'source' OR export * from 'source'
461
+ case "ExportNamedDeclaration":
462
+ case "ExportAllDeclaration":
463
+ handleExportDeclaration(node, imports);
464
+ break;
465
+ // import('source') — ESTree ImportExpression
466
+ case "ImportExpression":
467
+ handleImportExpression(node, imports);
468
+ break;
469
+ // require('source') OR legacy import('source') as CallExpression
470
+ case "CallExpression":
471
+ handleCallExpression(node, imports);
472
+ break;
473
+ }
474
+ });
475
+ return {
476
+ filePath,
477
+ imports,
478
+ parseSuccess: success,
479
+ parseErrors: errors.map((e) => e.message)
480
+ };
481
+ }
482
+ __name(extractImports, "extractImports");
483
+ function extractImportSources(content, filePath) {
484
+ const result = extractImports(content, filePath);
485
+ const sources = /* @__PURE__ */ new Set();
486
+ for (const imp of result.imports) {
487
+ sources.add(imp.source);
488
+ }
489
+ return [
490
+ ...sources
491
+ ];
492
+ }
493
+ __name(extractImportSources, "extractImportSources");
494
+ function extractImportsBatch(files) {
495
+ const results = /* @__PURE__ */ new Map();
496
+ for (const [filePath, content] of files) {
497
+ results.set(filePath, extractImports(content, filePath));
498
+ }
499
+ return results;
500
+ }
501
+ __name(extractImportsBatch, "extractImportsBatch");
502
+ function handleImportDeclaration(node, imports) {
503
+ const source = getStringValue(node.source);
504
+ if (!source) {
505
+ return;
506
+ }
507
+ const specifiers = [];
508
+ if (Array.isArray(node.specifiers)) {
509
+ for (const spec of node.specifiers) {
510
+ const s = spec;
511
+ if (s.type === "ImportSpecifier") {
512
+ const imported = s.imported;
513
+ specifiers.push(getIdentifierName(imported) ?? "unknown");
514
+ } else if (s.type === "ImportDefaultSpecifier") {
515
+ specifiers.push("default");
516
+ } else if (s.type === "ImportNamespaceSpecifier") {
517
+ specifiers.push("*");
518
+ }
519
+ }
520
+ }
521
+ imports.push({
522
+ source,
523
+ kind: "static",
524
+ typeOnly: node.importKind === "type" || Boolean(node.isTypeOnly),
525
+ line: node.start != null ? node.start : void 0,
526
+ specifiers
527
+ });
528
+ }
529
+ __name(handleImportDeclaration, "handleImportDeclaration");
530
+ function handleExportDeclaration(node, imports) {
531
+ const source = getStringValue(node.source);
532
+ if (!source) {
533
+ return;
534
+ }
535
+ const specifiers = [];
536
+ if (node.type === "ExportAllDeclaration") {
537
+ specifiers.push("*");
538
+ } else if (Array.isArray(node.specifiers)) {
539
+ for (const spec of node.specifiers) {
540
+ const s = spec;
541
+ const local = s.local;
542
+ specifiers.push(getIdentifierName(local) ?? "unknown");
543
+ }
544
+ }
545
+ imports.push({
546
+ source,
547
+ kind: "re-export",
548
+ typeOnly: node.exportKind === "type" || Boolean(node.isTypeOnly),
549
+ line: node.start != null ? node.start : void 0,
550
+ specifiers
551
+ });
552
+ }
553
+ __name(handleExportDeclaration, "handleExportDeclaration");
554
+ function handleImportExpression(node, imports) {
555
+ const source = getStringValue(node.source);
556
+ if (source) {
557
+ imports.push({
558
+ source,
559
+ kind: "dynamic",
560
+ typeOnly: false,
561
+ line: node.start != null ? node.start : void 0,
562
+ specifiers: []
563
+ });
564
+ }
565
+ }
566
+ __name(handleImportExpression, "handleImportExpression");
567
+ function handleCallExpression(node, imports) {
568
+ const callee = node.callee;
569
+ if (!callee) {
570
+ return;
571
+ }
572
+ if (node.type === "CallExpression" && callee.type === "Import") {
573
+ const args = node.arguments;
574
+ if (args && args.length > 0) {
575
+ const source = getStringValue(args[0]);
576
+ if (source) {
577
+ imports.push({
578
+ source,
579
+ kind: "dynamic",
580
+ typeOnly: false,
581
+ line: node.start != null ? node.start : void 0,
582
+ specifiers: []
583
+ });
584
+ }
585
+ }
586
+ return;
587
+ }
588
+ if (callee.type === "Identifier" && callee.name === "require") {
589
+ const args = node.arguments;
590
+ if (args && args.length > 0) {
591
+ const source = getStringValue(args[0]);
592
+ if (source) {
593
+ imports.push({
594
+ source,
595
+ kind: "require",
596
+ typeOnly: false,
597
+ line: node.start != null ? node.start : void 0,
598
+ specifiers: []
599
+ });
600
+ }
601
+ }
602
+ }
603
+ }
604
+ __name(handleCallExpression, "handleCallExpression");
605
+ function getStringValue(node) {
606
+ if (!node || typeof node !== "object") {
607
+ return void 0;
608
+ }
609
+ const n = node;
610
+ if (n.type === "StringLiteral" || n.type === "Literal") {
611
+ return typeof n.value === "string" ? n.value : void 0;
612
+ }
613
+ return void 0;
614
+ }
615
+ __name(getStringValue, "getStringValue");
616
+ function getIdentifierName(node) {
617
+ if (!node || typeof node !== "object") {
618
+ return void 0;
619
+ }
620
+ const n = node;
621
+ if (n.type === "Identifier" || n.type === "IdentifierName" || n.type === "IdentifierReference") {
622
+ return typeof n.name === "string" ? n.name : void 0;
623
+ }
624
+ return void 0;
625
+ }
626
+ __name(getIdentifierName, "getIdentifierName");
627
+
628
+ // ../../packages/core/dist/analysis/ast/ImportGraphAnalyzer.js
629
+ var THRESHOLDS2 = {
630
+ /** Max files that can import a single file before it's flagged as high fan-in */
631
+ highFanIn: 15,
632
+ /** Max imports a single file can have before it's flagged as high fan-out */
633
+ highFanOut: 20,
634
+ /** Minimum cycle length to report (avoids noise from self-imports) */
635
+ minCycleLength: 2
636
+ };
637
+ var ImportGraphAnalyzer = class {
638
+ static {
639
+ __name(this, "ImportGraphAnalyzer");
640
+ }
641
+ id = "import-graph";
642
+ name = "Import Graph Analysis";
643
+ filePatterns = [
644
+ "*.ts",
645
+ "*.tsx",
646
+ "*.js",
647
+ "*.jsx"
648
+ ];
649
+ async analyze(context) {
650
+ const startTime = performance.now();
651
+ const issues = [];
652
+ let filesAnalyzed = 0;
653
+ const parseErrors = [];
654
+ const extractions = /* @__PURE__ */ new Map();
655
+ for (const [file, content] of context.contents) {
656
+ if (!this.shouldAnalyzeFile(file)) {
657
+ continue;
658
+ }
659
+ filesAnalyzed++;
660
+ const result = extractImports(content, file);
661
+ extractions.set(file, result);
662
+ if (!result.parseSuccess) {
663
+ parseErrors.push(...result.parseErrors);
664
+ }
665
+ }
666
+ const graph = this.buildGraph(extractions, context.workspaceRoot);
667
+ const cycles = this.detectCycles(graph.edges);
668
+ graph.cycles = cycles;
669
+ for (const cycle of cycles) {
670
+ issues.push({
671
+ id: `import-graph/circular/${cycle.join("->")}`,
672
+ severity: "high",
673
+ type: "CIRCULAR_DEPENDENCY",
674
+ message: `Circular dependency: ${cycle.join(" \u2192 ")}`,
675
+ file: cycle[0],
676
+ fix: "Break the cycle by extracting shared code or using dependency injection"
677
+ });
678
+ }
679
+ for (const [file, node] of graph.nodes) {
680
+ if (node.importedBy.length > THRESHOLDS2.highFanIn) {
681
+ issues.push({
682
+ id: `import-graph/high-fan-in/${file}`,
683
+ severity: "medium",
684
+ type: "HIGH_FAN_IN",
685
+ message: `${file} is imported by ${node.importedBy.length} files \u2014 changes here have high blast radius`,
686
+ file,
687
+ fix: "Consider splitting into smaller, more focused modules"
688
+ });
689
+ }
690
+ }
691
+ for (const [file, node] of graph.nodes) {
692
+ const runtimeImports = node.imports.filter((imp) => !node.typeOnlyImports.includes(imp));
693
+ if (runtimeImports.length > THRESHOLDS2.highFanOut) {
694
+ issues.push({
695
+ id: `import-graph/high-fan-out/${file}`,
696
+ severity: "low",
697
+ type: "HIGH_FAN_OUT",
698
+ message: `${file} imports ${runtimeImports.length} modules \u2014 high coupling`,
699
+ file,
700
+ fix: "Consider using a facade or consolidating related imports"
701
+ });
702
+ }
703
+ }
704
+ for (const [file, node] of graph.nodes) {
705
+ if (node.importedBy.length === 0 && !this.isEntryPoint(file)) {
706
+ issues.push({
707
+ id: `import-graph/orphan/${file}`,
708
+ severity: "info",
709
+ type: "ORPHAN_FILE",
710
+ message: `${file} is not imported by any other analyzed file`,
711
+ file,
712
+ fix: "Verify this file is needed \u2014 it may be dead code"
713
+ });
714
+ }
715
+ }
716
+ return {
717
+ analyzer: this.id,
718
+ success: true,
719
+ issues,
720
+ coverage: filesAnalyzed / Math.max(context.files.length, 1),
721
+ duration: performance.now() - startTime,
722
+ metadata: {
723
+ filesAnalyzed,
724
+ nodesVisited: graph.nodes.size,
725
+ patternsChecked: [
726
+ "CIRCULAR_DEPENDENCY",
727
+ "HIGH_FAN_IN",
728
+ "HIGH_FAN_OUT",
729
+ "ORPHAN_FILE"
730
+ ],
731
+ parseErrors
732
+ }
733
+ };
734
+ }
735
+ shouldRun(context) {
736
+ return context.files.some((f) => this.shouldAnalyzeFile(f));
737
+ }
738
+ /**
739
+ * Build the import graph and return it for external consumption.
740
+ * Useful for other tools (momentum scoring, risk propagation, etc.).
741
+ */
742
+ buildGraphFromContext(context) {
743
+ const extractions = /* @__PURE__ */ new Map();
744
+ for (const [file, content] of context.contents) {
745
+ if (this.shouldAnalyzeFile(file)) {
746
+ extractions.set(file, extractImports(content, file));
747
+ }
748
+ }
749
+ const graph = this.buildGraph(extractions, context.workspaceRoot);
750
+ graph.cycles = this.detectCycles(graph.edges);
751
+ return graph;
752
+ }
753
+ // -----------------------------------------------------------------------
754
+ // Graph construction
755
+ // -----------------------------------------------------------------------
756
+ buildGraph(extractions, workspaceRoot) {
757
+ const nodes = /* @__PURE__ */ new Map();
758
+ const edges = /* @__PURE__ */ new Map();
759
+ const reverseEdges = /* @__PURE__ */ new Map();
760
+ for (const filePath of extractions.keys()) {
761
+ const normalized = this.normalizePath(filePath);
762
+ nodes.set(normalized, {
763
+ filePath: normalized,
764
+ imports: [],
765
+ importedBy: [],
766
+ typeOnlyImports: []
767
+ });
768
+ edges.set(normalized, /* @__PURE__ */ new Set());
769
+ }
770
+ for (const [filePath, extraction] of extractions) {
771
+ const normalized = this.normalizePath(filePath);
772
+ for (const imp of extraction.imports) {
773
+ const resolved = this.resolveImport(imp.source, filePath, workspaceRoot);
774
+ if (!resolved) {
775
+ continue;
776
+ }
777
+ const resolvedNorm = this.normalizePath(resolved);
778
+ edges.get(normalized)?.add(resolvedNorm);
779
+ const node = nodes.get(normalized);
780
+ if (node && !node.imports.includes(resolvedNorm)) {
781
+ node.imports.push(resolvedNorm);
782
+ if (imp.typeOnly) {
783
+ node.typeOnlyImports.push(resolvedNorm);
784
+ }
785
+ }
786
+ if (!reverseEdges.has(resolvedNorm)) {
787
+ reverseEdges.set(resolvedNorm, /* @__PURE__ */ new Set());
788
+ }
789
+ reverseEdges.get(resolvedNorm)?.add(normalized);
790
+ if (!nodes.has(resolvedNorm)) {
791
+ nodes.set(resolvedNorm, {
792
+ filePath: resolvedNorm,
793
+ imports: [],
794
+ importedBy: [],
795
+ typeOnlyImports: []
796
+ });
797
+ }
798
+ }
799
+ }
800
+ for (const [file, importers] of reverseEdges) {
801
+ const node = nodes.get(file);
802
+ if (node) {
803
+ node.importedBy = [
804
+ ...importers
805
+ ];
806
+ }
807
+ }
808
+ return {
809
+ nodes,
810
+ edges,
811
+ reverseEdges,
812
+ cycles: []
813
+ };
814
+ }
815
+ // -----------------------------------------------------------------------
816
+ // Cycle detection (Tarjan's SCC adapted for cycles)
817
+ // -----------------------------------------------------------------------
818
+ detectCycles(edges) {
819
+ const cycles = [];
820
+ const visited = /* @__PURE__ */ new Set();
821
+ const inStack = /* @__PURE__ */ new Set();
822
+ const stack = [];
823
+ const dfs = /* @__PURE__ */ __name((node) => {
824
+ if (inStack.has(node)) {
825
+ const cycleStart = stack.indexOf(node);
826
+ if (cycleStart >= 0) {
827
+ const cycle = stack.slice(cycleStart);
828
+ if (cycle.length >= THRESHOLDS2.minCycleLength) {
829
+ cycles.push([
830
+ ...cycle,
831
+ node
832
+ ]);
833
+ }
834
+ }
835
+ return;
836
+ }
837
+ if (visited.has(node)) {
838
+ return;
839
+ }
840
+ visited.add(node);
841
+ inStack.add(node);
842
+ stack.push(node);
843
+ const neighbors = edges.get(node) ?? /* @__PURE__ */ new Set();
844
+ for (const neighbor of neighbors) {
845
+ dfs(neighbor);
846
+ }
847
+ stack.pop();
848
+ inStack.delete(node);
849
+ }, "dfs");
850
+ for (const node of edges.keys()) {
851
+ dfs(node);
852
+ }
853
+ return cycles;
854
+ }
855
+ // -----------------------------------------------------------------------
856
+ // Import resolution
857
+ // -----------------------------------------------------------------------
858
+ resolveImport(importSource, fromFile, _workspaceRoot) {
859
+ if (!importSource.startsWith(".") && !importSource.startsWith("/")) {
860
+ if (importSource.startsWith("@")) {
861
+ const parts = importSource.split("/");
862
+ if (parts.length >= 2) {
863
+ const pkg = parts[1];
864
+ return `packages/${pkg}/src/index.ts`;
865
+ }
866
+ }
867
+ return null;
868
+ }
869
+ const fromDir = dirname(fromFile);
870
+ let resolved = resolve(fromDir, importSource);
871
+ if (!resolved.match(/\.(ts|tsx|js|jsx|mts|cts|mjs|cjs)$/)) {
872
+ resolved += ".ts";
873
+ }
874
+ resolved = resolved.replace(/\.js$/, ".ts").replace(/\.jsx$/, ".tsx");
875
+ return resolved;
876
+ }
877
+ // -----------------------------------------------------------------------
878
+ // Helpers
879
+ // -----------------------------------------------------------------------
880
+ shouldAnalyzeFile(file) {
881
+ const ext = file.split(".").pop()?.toLowerCase();
882
+ return [
883
+ "ts",
884
+ "tsx",
885
+ "js",
886
+ "jsx",
887
+ "mts",
888
+ "cts"
889
+ ].includes(ext ?? "");
890
+ }
891
+ isEntryPoint(file) {
892
+ return file.includes("index.") || file.includes("main.") || file.includes("entry.") || file.includes("server.") || file.includes("app.") || file.endsWith("/page.tsx") || file.endsWith("/layout.tsx") || file.endsWith("/route.ts") || file.includes("__tests__") || file.includes(".test.") || file.includes(".spec.");
893
+ }
894
+ normalizePath(filePath) {
895
+ return filePath.replace(/\\/g, "/").replace(/^\.\//, "");
896
+ }
897
+ };
8
898
  var SyntaxAnalyzer = class {
9
899
  static {
10
900
  __name(this, "SyntaxAnalyzer");
@@ -694,21 +1584,18 @@ var ChangeImpactAnalyzer = class {
694
1584
  }
695
1585
  }
696
1586
  /**
697
- * Extract import statements from file content
1587
+ * Extract import statements from file content using AST analysis.
1588
+ *
1589
+ * UPGRADED (11b): Replaces regex-based import extraction with proper AST walking
1590
+ * via packages/core/src/analysis/ast/import-extractor.ts (oxc-parser based).
1591
+ * This eliminates false positives from imports in strings/comments and correctly
1592
+ * handles dynamic imports, re-exports, and type-only imports.
698
1593
  */
699
1594
  extractImports(content, fromFile) {
1595
+ const rawSources = extractImportSources(content, fromFile);
700
1596
  const imports = [];
701
- const importRegex = /import\s+(?:.*?\s+from\s+)?['"]([^'"]+)['"]/g;
702
- const requireRegex = /require\s*\(['"]([^'"]+)['"]\)/g;
703
- let match;
704
- while ((match = importRegex.exec(content)) !== null) {
705
- const importPath = this.resolveImportPath(match[1], fromFile);
706
- if (importPath) {
707
- imports.push(importPath);
708
- }
709
- }
710
- while ((match = requireRegex.exec(content)) !== null) {
711
- const importPath = this.resolveImportPath(match[1], fromFile);
1597
+ for (const source of rawSources) {
1598
+ const importPath = this.resolveImportPath(source, fromFile);
712
1599
  if (importPath) {
713
1600
  imports.push(importPath);
714
1601
  }
@@ -1274,6 +2161,188 @@ var SecurityAnalyzer = class {
1274
2161
  }
1275
2162
  };
1276
2163
 
2164
+ // ../../packages/core/dist/analysis/pipeline.js
2165
+ var ANALYZER_COVERAGE_MAP = {
2166
+ syntax: "astParsed",
2167
+ security: "securityChecked",
2168
+ completeness: "completenessChecked",
2169
+ "change-impact": "architectureChecked",
2170
+ "import-graph": "importGraphChecked",
2171
+ complexity: "complexityChecked"
2172
+ };
2173
+ var CONFIDENCE_WEIGHTS = {
2174
+ syntax: 0.2,
2175
+ security: 0.25,
2176
+ completeness: 0.15,
2177
+ "change-impact": 0.1,
2178
+ "import-graph": 0.15,
2179
+ complexity: 0.15
2180
+ };
2181
+ async function runAnalysisPipeline(context, config) {
2182
+ const start = Date.now();
2183
+ const parallel = config?.parallel ?? true;
2184
+ const timeout = config?.timeout ?? 3e4;
2185
+ const allAnalyzers = createAnalyzers(context.workspaceRoot);
2186
+ const selectedAnalyzers = filterAnalyzers(allAnalyzers, context, config?.analyzers);
2187
+ const results = parallel ? await runParallel(selectedAnalyzers, context, timeout) : await runSequential(selectedAnalyzers, context, timeout);
2188
+ const coverage = buildCoverageInfo(results, context);
2189
+ const confidence = calculateConfidence(results, coverage);
2190
+ const allIssues = results.flatMap((r) => r.issues);
2191
+ const issuesBySeverity = groupBySeverity(allIssues);
2192
+ return {
2193
+ results,
2194
+ totalIssues: allIssues.length,
2195
+ issuesBySeverity,
2196
+ coverage,
2197
+ confidence,
2198
+ duration: Date.now() - start
2199
+ };
2200
+ }
2201
+ __name(runAnalysisPipeline, "runAnalysisPipeline");
2202
+ function createAnalyzers(workspaceRoot) {
2203
+ return [
2204
+ new SyntaxAnalyzer(),
2205
+ new SecurityAnalyzer(),
2206
+ new CompletenessAnalyzer(),
2207
+ new ComplexityAnalyzer(),
2208
+ new ImportGraphAnalyzer(),
2209
+ new ChangeImpactAnalyzer(workspaceRoot)
2210
+ ];
2211
+ }
2212
+ __name(createAnalyzers, "createAnalyzers");
2213
+ function filterAnalyzers(analyzers, context, selectedIds) {
2214
+ let filtered = analyzers;
2215
+ if (selectedIds && selectedIds.length > 0) {
2216
+ const idSet = new Set(selectedIds);
2217
+ filtered = filtered.filter((a) => idSet.has(a.id));
2218
+ }
2219
+ return filtered.filter((a) => a.shouldRun(context));
2220
+ }
2221
+ __name(filterAnalyzers, "filterAnalyzers");
2222
+ async function runParallel(analyzers, context, timeout) {
2223
+ const promises = analyzers.map((analyzer) => runWithTimeout(analyzer, context, timeout));
2224
+ return Promise.all(promises);
2225
+ }
2226
+ __name(runParallel, "runParallel");
2227
+ async function runSequential(analyzers, context, timeout) {
2228
+ const results = [];
2229
+ for (const analyzer of analyzers) {
2230
+ const result = await runWithTimeout(analyzer, context, timeout);
2231
+ results.push(result);
2232
+ }
2233
+ return results;
2234
+ }
2235
+ __name(runSequential, "runSequential");
2236
+ async function runWithTimeout(analyzer, context, timeout) {
2237
+ const start = Date.now();
2238
+ try {
2239
+ const result = await Promise.race([
2240
+ analyzer.analyze(context),
2241
+ new Promise((_, reject) => {
2242
+ setTimeout(() => reject(new Error(`Analyzer '${analyzer.id}' timed out after ${timeout}ms`)), timeout);
2243
+ })
2244
+ ]);
2245
+ return result;
2246
+ } catch (error) {
2247
+ return {
2248
+ analyzer: analyzer.id,
2249
+ success: false,
2250
+ issues: [
2251
+ {
2252
+ id: `pipeline/${analyzer.id}/error`,
2253
+ severity: "high",
2254
+ type: "ANALYZER_ERROR",
2255
+ message: error instanceof Error ? error.message : String(error)
2256
+ }
2257
+ ],
2258
+ coverage: 0,
2259
+ duration: Date.now() - start
2260
+ };
2261
+ }
2262
+ }
2263
+ __name(runWithTimeout, "runWithTimeout");
2264
+ function buildCoverageInfo(results, context) {
2265
+ const coverage = {
2266
+ astParsed: false,
2267
+ securityChecked: false,
2268
+ completenessChecked: false,
2269
+ architectureChecked: false,
2270
+ importGraphChecked: false,
2271
+ complexityChecked: false,
2272
+ filesCoverage: 0
2273
+ };
2274
+ for (const result of results) {
2275
+ const field = ANALYZER_COVERAGE_MAP[result.analyzer];
2276
+ if (field && field !== "filesCoverage" && result.success) {
2277
+ coverage[field] = true;
2278
+ }
2279
+ }
2280
+ const totalFiles = context.files.length;
2281
+ if (totalFiles > 0) {
2282
+ const successfulResults = results.filter((r) => r.success);
2283
+ const avgCoverage = successfulResults.length > 0 ? successfulResults.reduce((sum, r) => sum + r.coverage, 0) / successfulResults.length : 0;
2284
+ coverage.filesCoverage = avgCoverage;
2285
+ }
2286
+ return coverage;
2287
+ }
2288
+ __name(buildCoverageInfo, "buildCoverageInfo");
2289
+ function calculateConfidence(results, coverage) {
2290
+ const breakdown = {};
2291
+ let weightedSum = 0;
2292
+ let totalWeight = 0;
2293
+ let maxPossible = 0;
2294
+ for (const [id, weight] of Object.entries(CONFIDENCE_WEIGHTS)) {
2295
+ const result = results.find((r) => r.analyzer === id);
2296
+ totalWeight += weight;
2297
+ if (result) {
2298
+ const analyzerConfidence = result.success ? result.coverage : 0;
2299
+ breakdown[id] = analyzerConfidence;
2300
+ weightedSum += weight * analyzerConfidence;
2301
+ maxPossible += weight;
2302
+ } else {
2303
+ breakdown[id] = 0;
2304
+ }
2305
+ }
2306
+ const confidence = totalWeight > 0 ? weightedSum / totalWeight : 0;
2307
+ const maxPossibleConfidence = totalWeight > 0 ? maxPossible / totalWeight : 0;
2308
+ const ranCount = results.filter((r) => r.success).length;
2309
+ const totalAnalyzers = Object.keys(CONFIDENCE_WEIGHTS).length;
2310
+ const explanationParts = [
2311
+ `${ranCount}/${totalAnalyzers} analyzers ran successfully`,
2312
+ `Files coverage: ${(coverage.filesCoverage * 100).toFixed(0)}%`
2313
+ ];
2314
+ const failedAnalyzers = results.filter((r) => !r.success);
2315
+ if (failedAnalyzers.length > 0) {
2316
+ explanationParts.push(`Failed: ${failedAnalyzers.map((r) => r.analyzer).join(", ")}`);
2317
+ }
2318
+ return {
2319
+ confidence: Math.round(confidence * 100) / 100,
2320
+ breakdown,
2321
+ explanation: explanationParts.join(". "),
2322
+ maxPossibleConfidence: Math.round(maxPossibleConfidence * 100) / 100
2323
+ };
2324
+ }
2325
+ __name(calculateConfidence, "calculateConfidence");
2326
+ function groupBySeverity(issues) {
2327
+ const grouped = {
2328
+ critical: [],
2329
+ high: [],
2330
+ medium: [],
2331
+ low: [],
2332
+ info: []
2333
+ };
2334
+ for (const issue of issues) {
2335
+ const severity = issue.severity || "info";
2336
+ if (severity in grouped) {
2337
+ grouped[severity].push(issue);
2338
+ } else {
2339
+ grouped.info.push(issue);
2340
+ }
2341
+ }
2342
+ return grouped;
2343
+ }
2344
+ __name(groupBySeverity, "groupBySeverity");
2345
+
1277
2346
  // ../../packages/core/dist/analysis/static/OrphanDetector.js
1278
2347
  var DEFAULT_OPTIONS = {
1279
2348
  fileExtensions: [
@@ -1376,7 +2445,7 @@ async function runStaticAnalysis(files, _workspaceRoot, options = {}) {
1376
2445
  };
1377
2446
  if (!options.skipTestDetection) {
1378
2447
  try {
1379
- const { analyzeSkippedTests: analyzeSkippedTests2 } = await import('./SkippedTestDetector-B3JZUE5G.js');
2448
+ const { analyzeSkippedTests: analyzeSkippedTests2 } = await import('./SkippedTestDetector-AXTMWWHC.js');
1380
2449
  const testResults = analyzeSkippedTests2(files);
1381
2450
  for (const testResult of testResults) {
1382
2451
  if (!testResult.parsed && testResult.error) {
@@ -1402,6 +2471,4 @@ async function runStaticAnalysis(files, _workspaceRoot, options = {}) {
1402
2471
  }
1403
2472
  __name(runStaticAnalysis, "runStaticAnalysis");
1404
2473
 
1405
- export { ChangeImpactAnalyzer, CompletenessAnalyzer, SecurityAnalyzer, SyntaxAnalyzer, checkFilesForOrphanStatus, createChangeImpactAnalyzer, detectOrphans, filterOrphansToFiles, runStaticAnalysis };
1406
- //# sourceMappingURL=chunk-OJNDAPC2.js.map
1407
- //# sourceMappingURL=chunk-OJNDAPC2.js.map
2474
+ export { ChangeImpactAnalyzer, CompletenessAnalyzer, ComplexityAnalyzer, ImportGraphAnalyzer, SecurityAnalyzer, SyntaxAnalyzer, checkFilesForOrphanStatus, countASTNodes, createChangeImpactAnalyzer, detectOrphans, extractImportSources, extractImports, extractImportsBatch, filterOrphansToFiles, isSupportedFile, offsetToLine, parseSource, runAnalysisPipeline, runStaticAnalysis, walkAST };