arcvision 0.2.7 → 0.2.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +15 -11
  2. package/dist/index.js +683 -81
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -1,6 +1,7 @@
1
1
  # ArcVision System Context Artifact
2
2
 
3
- ## What This Is
3
+
4
+ ## What This Is [arcvision.context.json](./arcvision.context.json)
4
5
 
5
6
  This file is the **canonical structural context** of this codebase.
6
7
  It represents how the system actually works — not how it is described.
@@ -35,9 +36,9 @@ Use this artifact when:
35
36
 
36
37
  ## Determinism & Trust
37
38
 
38
- - Generated from commit: b63e62db4aedf6ffb21055dd88d13c8a947a7422
39
- - Generation timestamp: 2026-01-13T03:50:34.571Z
40
- - Tool version: 0.2.5
39
+ - Generated from commit: baeeb76e6d823a22126f930b1aa9b4064ed6ab24
40
+ - Generation timestamp: 2026-01-15T03:47:02.411Z
41
+ - Tool version: 0.2.11
41
42
  - Deterministic: same input → same output
42
43
  - Explicit assumptions listed inside the artifact
43
44
 
@@ -47,16 +48,16 @@ If this artifact conflicts with human memory, **trust the artifact**.
47
48
 
48
49
  The following files have the highest blast radius and represent critical structural hubs in the system:
49
50
 
50
- - **src/lib/utils.ts**
51
- - Blast Radius: 62 files (14.73% of codebase)
51
+ - **src/core/scanner.js**
52
+ - Blast Radius: 3 files (6% of codebase)
52
53
  - Risk: Changes here may silently propagate across the system.
53
54
 
54
- - **src/components/ui/button.tsx**
55
- - Blast Radius: 49 files (11.64% of codebase)
55
+ - **src/core/tsconfig-utils.js**
56
+ - Blast Radius: 3 files (6% of codebase)
56
57
  - Risk: Acts as a coordination layer between components.
57
58
 
58
- - **src/lib/supabase/client.ts**
59
- - Blast Radius: 45 files (10.69% of codebase)
59
+ - **src/core/blastRadius.js**
60
+ - Blast Radius: 2 files (4% of codebase)
60
61
  - Risk: Modifications can cause widespread inconsistencies.
61
62
 
62
63
 
@@ -82,4 +83,7 @@ arcvision scan --upload
82
83
  ## Source of Truth
83
84
 
84
85
  This artifact is the **source of truth** for system structure.
85
- All explanations, decisions, and AI reasoning should reference it.
86
+ All explanations, decisions, and AI reasoning should reference it.
87
+
88
+ Some execution script invocations are dynamically assembled at runtime and may not be statically traceable; such scripts are included
89
+ as execution boundaries without guaranteed call-site resolution
package/dist/index.js CHANGED
@@ -56393,6 +56393,7 @@ var require_parser = __commonJS({
56393
56393
  function parseFile(filePath) {
56394
56394
  const content = fs2.readFileSync(filePath, "utf-8");
56395
56395
  let ast;
56396
+ const isDeclarationFile = filePath.endsWith(".d.ts");
56396
56397
  const parserOptions = {
56397
56398
  sourceType: "unambiguous",
56398
56399
  // Auto-detect between script and module
@@ -56432,26 +56433,80 @@ var require_parser = __commonJS({
56432
56433
  "decoratorAutoAccessors"
56433
56434
  ]
56434
56435
  };
56436
+ if (isDeclarationFile) {
56437
+ parserOptions.plugins.push("typescript");
56438
+ }
56435
56439
  try {
56436
56440
  ast = parser.parse(content, parserOptions);
56437
56441
  } catch (error) {
56438
56442
  const fallbackParserOptions = {
56439
56443
  ...parserOptions,
56440
56444
  // Allow more flexible parsing
56441
- errorRecovery: true
56445
+ errorRecovery: true,
56442
56446
  // Allow parser to continue after errors
56447
+ plugins: [
56448
+ "jsx",
56449
+ "typescript",
56450
+ "decorators-legacy",
56451
+ "classProperties",
56452
+ "exportDefaultFrom",
56453
+ "exportNamespaceFrom",
56454
+ "dynamicImport",
56455
+ "functionBind",
56456
+ "nullishCoalescingOperator",
56457
+ "optionalChaining",
56458
+ "numericSeparator",
56459
+ "bigInt",
56460
+ "importMeta",
56461
+ "asyncGenerators",
56462
+ "optionalCatchBinding",
56463
+ "throwExpressions",
56464
+ "logicalAssignment",
56465
+ "nullishCoalescingOperator",
56466
+ "doExpressions",
56467
+ "pipeOperator",
56468
+ "recordAndTuple",
56469
+ "moduleBlocks",
56470
+ "topLevelAwait",
56471
+ "v8intrinsic",
56472
+ "partialApplication",
56473
+ "regexpUnicodeSets",
56474
+ "destructuringPrivate",
56475
+ "decoratorAutoAccessors",
56476
+ "typescript",
56477
+ "decorators"
56478
+ ]
56443
56479
  };
56480
+ if (isDeclarationFile) {
56481
+ fallbackParserOptions.plugins.push("typescript");
56482
+ }
56444
56483
  try {
56445
56484
  ast = parser.parse(content, fallbackParserOptions);
56446
56485
  } catch (fallbackError) {
56447
- console.warn(`\u26A0\uFE0F Failed to parse ${filePath}: ${error.message}`);
56448
- return {
56449
- id: filePath,
56450
- imports: [],
56451
- exports: [],
56452
- functions: [],
56453
- apiCalls: []
56486
+ const ultraFallbackParserOptions = {
56487
+ sourceType: "unambiguous",
56488
+ allowImportExportEverywhere: true,
56489
+ allowReturnOutsideFunction: true,
56490
+ allowSuperOutsideMethod: true,
56491
+ errorRecovery: true,
56492
+ plugins: ["typescript", "jsx", "decorators-legacy", "classProperties"]
56454
56493
  };
56494
+ if (isDeclarationFile) {
56495
+ ultraFallbackParserOptions.plugins.push("typescript");
56496
+ }
56497
+ try {
56498
+ ast = parser.parse(content, ultraFallbackParserOptions);
56499
+ } catch (ultraFallbackError) {
56500
+ console.warn(`\u26A0\uFE0F Failed to parse ${filePath}: ${error.message}`);
56501
+ return {
56502
+ id: filePath,
56503
+ imports: [],
56504
+ exports: [],
56505
+ functions: [],
56506
+ apiCalls: [],
56507
+ potentialInvocations: extractPotentialLuaInvocations(content)
56508
+ };
56509
+ }
56455
56510
  }
56456
56511
  }
56457
56512
  const metadata = {
@@ -56732,7 +56787,13 @@ var require_parser = __commonJS({
56732
56787
  }
56733
56788
  },
56734
56789
  VariableDeclarator({ node }) {
56735
- if (node.init && node.init.type === "CallExpression" && node.init.callee.name === "require" && node.init.arguments.length > 0 && node.init.arguments[0].type === "StringLiteral") {
56790
+ if (node.init && node.init.type === "CallExpression" && (node.init.callee.name === "require" || node.init.callee.property && node.init.callee.property.name === "require" || node.init.callee.type === "MemberExpression" && node.init.callee.property.name === "import") && node.init.arguments.length > 0 && (node.init.arguments[0].type === "StringLiteral" || node.init.arguments[0].type === "TemplateLiteral")) {
56791
+ let sourceValue = "";
56792
+ if (node.init.arguments[0].type === "StringLiteral") {
56793
+ sourceValue = node.init.arguments[0].value;
56794
+ } else if (node.init.arguments[0].type === "TemplateLiteral") {
56795
+ sourceValue = node.init.arguments[0].quasis.map((q) => q.value.cooked).join("");
56796
+ }
56736
56797
  const specifiers = [];
56737
56798
  if (node.id.type === "ObjectPattern") {
56738
56799
  node.id.properties.forEach((prop) => {
@@ -56750,9 +56811,18 @@ var require_parser = __commonJS({
56750
56811
  imported: "default",
56751
56812
  local: node.id.name
56752
56813
  });
56814
+ } else if (node.id.type === "ArrayPattern") {
56815
+ node.id.elements.forEach((element, index) => {
56816
+ if (element && element.name) {
56817
+ specifiers.push({
56818
+ imported: String(index),
56819
+ local: element.name
56820
+ });
56821
+ }
56822
+ });
56753
56823
  }
56754
56824
  metadata.imports.push({
56755
- source: node.init.arguments[0].value,
56825
+ source: sourceValue,
56756
56826
  specifiers,
56757
56827
  type: "require-assignment"
56758
56828
  });
@@ -56763,6 +56833,30 @@ var require_parser = __commonJS({
56763
56833
  });
56764
56834
  return metadata;
56765
56835
  }
56836
+ function extractPotentialLuaInvocations(content) {
56837
+ const invocations = [];
56838
+ const patterns = [
56839
+ /defineCommand\(\s*['"].*?['"]\s*,\s*\{[^}]*lua\s*:[^}]*(?:\.lua)?[^}]*\}/gi,
56840
+ /defineCommand\(\s*[^,)]+\s*,\s*\{[^}]*lua\s*:[^}]*(?:\.lua)?[^}]*\}/gi,
56841
+ /defineCommand\(\s*\{[^}]*lua\s*:[^}]*(?:\.lua)?[^}]*\}/gi,
56842
+ /readFileSync\(['"].*\.lua['"].*\)/gi,
56843
+ /require\(['"].*\.lua['"].*\)/gi,
56844
+ /fs\.readFileSync\(['"].*\.lua['"].*\)/gi,
56845
+ /loadScript\(['"].*\.lua['"].*\)/gi,
56846
+ /loadLua\(['"].*\.lua['"].*\)/gi,
56847
+ /load\(['"].*\.lua['"].*\)/gi
56848
+ ];
56849
+ patterns.forEach((pattern) => {
56850
+ let match;
56851
+ while ((match = pattern.exec(content)) !== null) {
56852
+ invocations.push({
56853
+ pattern: match[0],
56854
+ type: "lua_invocation"
56855
+ });
56856
+ }
56857
+ });
56858
+ return invocations;
56859
+ }
56766
56860
  module2.exports = { parseFile };
56767
56861
  }
56768
56862
  });
@@ -57418,6 +57512,10 @@ var require_parser_enhanced = __commonJS({
57418
57512
  }
57419
57513
  const ast = metadata.ast;
57420
57514
  const fileContent = fs2.readFileSync(filePath, "utf-8");
57515
+ if (filePath.endsWith(".md")) {
57516
+ delete metadata.ast;
57517
+ return metadata;
57518
+ }
57421
57519
  try {
57422
57520
  const diPatterns = detectDependencyInjection(ast);
57423
57521
  metadata.constructorInjections = diPatterns.constructorInjections || [];
@@ -57524,15 +57622,21 @@ var require_pass1_facts = __commonJS({
57524
57622
  if (fs2.existsSync(pluginDir)) {
57525
57623
  pluginManager.loadPluginsFromDirectory(pluginDir);
57526
57624
  }
57527
- const files = await glob("**/*.{js,jsx,ts,tsx,json,lua}", {
57625
+ const files = await glob("**/*.{js,jsx,ts,tsx,cjs,mjs,json,lua,md}", {
57528
57626
  ...scanOptions,
57529
- ignore: [...scanOptions.ignore, "**/*.d.ts", "**/.next/**", "**/coverage/**", "**/arcvision.context.json"]
57627
+ ignore: [...scanOptions.ignore, "**/*.d.ts", "**/.next/**", "**/coverage/**", "**/arcvision.context.json", "**/package-lock.json", "**/yarn.lock"]
57530
57628
  });
57629
+ const dtsFiles = await glob("**/*.d.ts", {
57630
+ ...scanOptions,
57631
+ ignore: [...scanOptions.ignore, "**/node_modules/**", "**/.git/**", "**/dist/**", "**/build/**"]
57632
+ });
57633
+ const allFiles = [...files, ...dtsFiles];
57634
+ console.log(` Identified ${allFiles.length} files for analysis (${files.length} code files, ${dtsFiles.length} declaration files)`);
57531
57635
  const CONCURRENCY = 100;
57532
57636
  const rawNodes = [];
57533
57637
  let totalFacts = 0;
57534
- for (let i = 0; i < files.length; i += CONCURRENCY) {
57535
- const batch = files.slice(i, i + CONCURRENCY);
57638
+ for (let i = 0; i < allFiles.length; i += CONCURRENCY) {
57639
+ const batch = allFiles.slice(i, i + CONCURRENCY);
57536
57640
  const promises = batch.map(async (file) => {
57537
57641
  try {
57538
57642
  const relativePath = path2.relative(directory, file);
@@ -57607,7 +57711,7 @@ var require_pass1_facts = __commonJS({
57607
57711
  totalFacts += res.factCount;
57608
57712
  });
57609
57713
  if (i % (CONCURRENCY * 4) === 0 && i > 0) {
57610
- console.log(` ... processed ${Math.min(i + CONCURRENCY, files.length)} / ${files.length} files`);
57714
+ console.log(` ... processed ${Math.min(i + CONCURRENCY, allFiles.length)} / ${allFiles.length} files`);
57611
57715
  }
57612
57716
  }
57613
57717
  console.log(` \u2713 Scanned ${rawNodes.length} files`);
@@ -57990,6 +58094,21 @@ var require_call_resolver = __commonJS({
57990
58094
  if (this.symbolIndex.has(candidateId))
57991
58095
  return candidateId;
57992
58096
  }
58097
+ const commonPatterns = [
58098
+ `${targetFileId}::use${targetSymbolName.charAt(0).toUpperCase() + targetSymbolName.slice(1)}`,
58099
+ `${targetFileId}::get${targetSymbolName.charAt(0).toUpperCase() + targetSymbolName.slice(1)}`,
58100
+ `${targetFileId}::create${targetSymbolName.charAt(0).toUpperCase() + targetSymbolName.slice(1)}`,
58101
+ `${targetFileId}::_${targetSymbolName}`,
58102
+ // underscore prefix
58103
+ `${targetFileId}::${targetSymbolName}Impl`,
58104
+ // implementation suffix
58105
+ `${targetFileId}::${targetSymbolName}Handler`
58106
+ // handler suffix
58107
+ ];
58108
+ for (const patternId of commonPatterns) {
58109
+ if (this.symbolIndex.has(patternId))
58110
+ return patternId;
58111
+ }
57993
58112
  }
57994
58113
  }
57995
58114
  }
@@ -57998,6 +58117,21 @@ var require_call_resolver = __commonJS({
57998
58117
  if (this.symbolIndex.has(localId)) {
57999
58118
  return localId;
58000
58119
  }
58120
+ const localPatterns = [
58121
+ `${fileId}::use${symbolName.charAt(0).toUpperCase() + symbolName.slice(1)}`,
58122
+ `${fileId}::get${symbolName.charAt(0).toUpperCase() + symbolName.slice(1)}`,
58123
+ `${fileId}::create${symbolName.charAt(0).toUpperCase() + symbolName.slice(1)}`,
58124
+ `${fileId}::_${symbolName}`,
58125
+ // underscore prefix
58126
+ `${fileId}::${symbolName}Impl`,
58127
+ // implementation suffix
58128
+ `${fileId}::${symbolName}Handler`
58129
+ // handler suffix
58130
+ ];
58131
+ for (const patternId of localPatterns) {
58132
+ if (this.symbolIndex.has(patternId))
58133
+ return patternId;
58134
+ }
58001
58135
  return null;
58002
58136
  };
58003
58137
  if (functionCalls) {
@@ -58207,11 +58341,13 @@ var require_pass2_semantics = __commonJS({
58207
58341
  if (!content)
58208
58342
  continue;
58209
58343
  const luaInvocationPatterns = [
58210
- /readFileSync\(['"].*\.lua['"].*\)/gi,
58211
- /defineCommand\(\s*[^,]*,\s*\{[^}]*lua\s*:.+?\}/gi,
58344
+ /defineCommand\(\s*['"].*?['"]\s*,\s*\{[^}]*lua\s*:[^}]*\}/gi,
58345
+ // Matches defineCommand("commandName", { ... lua: ... })
58346
+ /defineCommand\(\s*[^,)]+\s*,\s*\{[^}]*lua\s*:[^}]*\}/gi,
58212
58347
  // Matches defineCommand(commandName, { ... lua: ... })
58213
- /defineCommand\(\s*\{\s*lua\s*:.*?\}/gi,
58348
+ /defineCommand\(\s*\{[^}]*lua\s*:[^}]*\}/gi,
58214
58349
  // Original pattern
58350
+ /readFileSync\(['"].*\.lua['"].*\)/gi,
58215
58351
  /require\(['"].*\.lua['"].*\)/gi,
58216
58352
  /fs\.readFileSync\(['"].*\.lua['"].*\)/gi,
58217
58353
  /import\s+.*?from\s+['"].*\.lua['"].*\)?/gi,
@@ -58485,8 +58621,44 @@ var require_blastRadius = __commonJS({
58485
58621
  const dependencyStrength = calculateDependencyStrength(reverseGraph, file);
58486
58622
  let criticalityScore = blastRadius;
58487
58623
  criticalityScore += dependencyStrength * 0.5;
58488
- if (node && node.metadata && node.metadata.functions && node.metadata.functions.length > 0) {
58489
- criticalityScore += 1;
58624
+ if (node && node.metadata) {
58625
+ if (node.metadata.functions && node.metadata.functions.length > 0) {
58626
+ criticalityScore += 1;
58627
+ }
58628
+ if (node.metadata.classes && node.metadata.classes.length > 0) {
58629
+ criticalityScore += 1.5;
58630
+ }
58631
+ if (node.metadata.types && node.metadata.types.length > 0) {
58632
+ criticalityScore += 0.5;
58633
+ }
58634
+ if (node.metadata.exports && node.metadata.exports.length > 0) {
58635
+ criticalityScore += 0.7;
58636
+ }
58637
+ if (node.metadata.apiCalls && node.metadata.apiCalls.length > 0) {
58638
+ criticalityScore += 1.2;
58639
+ }
58640
+ }
58641
+ if (file.toLowerCase().includes("auth")) {
58642
+ criticalityScore += 2;
58643
+ } else if (file.toLowerCase().includes("core") || file.toLowerCase().includes("util") || file.toLowerCase().includes("utils")) {
58644
+ criticalityScore += 1.5;
58645
+ } else if (file.toLowerCase().includes("config") || file.toLowerCase().includes("index")) {
58646
+ criticalityScore += 0.8;
58647
+ }
58648
+ if (blastRadius > 10) {
58649
+ criticalityScore *= 1.8;
58650
+ } else if (blastRadius > 5) {
58651
+ criticalityScore *= 1.5;
58652
+ } else if (blastRadius > 0) {
58653
+ criticalityScore *= 1.1;
58654
+ }
58655
+ if (node && node.dependencies && node.dependencies.length > 5) {
58656
+ criticalityScore += 2;
58657
+ } else if (node && node.dependencies && node.dependencies.length > 10) {
58658
+ criticalityScore += 5;
58659
+ }
58660
+ if (file.includes("/shared/") || file.includes("/common/") || file.includes("/utils/") || file.includes("/lib/")) {
58661
+ criticalityScore += 1.5;
58490
58662
  }
58491
58663
  criticalityAnalysis.push({
58492
58664
  file,
@@ -58642,6 +58814,18 @@ var require_context_builder = __commonJS({
58642
58814
  if (file.intelligence && file.intelligence.connections) {
58643
58815
  file.intelligence.connections.forEach((c) => addDep(c.target));
58644
58816
  }
58817
+ if (!file.intelligence && file.metadata && file.metadata.imports) {
58818
+ file.metadata.imports.forEach((imp) => {
58819
+ if (imp.source) {
58820
+ let depPath = imp.source;
58821
+ if (depPath.startsWith("./") || depPath.startsWith("../")) {
58822
+ depPath = path2.join(path2.dirname(file.id), depPath);
58823
+ depPath = path2.normalize(depPath);
58824
+ }
58825
+ addDep(depPath);
58826
+ }
58827
+ });
58828
+ }
58645
58829
  const nodeObj = {
58646
58830
  id: stableId(file.id),
58647
58831
  type: "file",
@@ -65849,6 +66033,325 @@ var require_diff_analyzer = __commonJS({
65849
66033
  }
65850
66034
  });
65851
66035
 
66036
+ // src/core/chunked-uploader.js
66037
+ var require_chunked_uploader = __commonJS({
66038
+ "src/core/chunked-uploader.js"(exports2, module2) {
66039
+ var chalk2 = require_source();
66040
+ var { Buffer: Buffer2 } = require("buffer");
66041
+ var ChunkedUploader2 = class {
66042
+ constructor(chunkSize = 1024 * 1024 * 4) {
66043
+ this.chunkSize = chunkSize;
66044
+ }
66045
+ /**
66046
+ * Calculate approximate byte size of a JSON object
66047
+ */
66048
+ calculateByteSize(obj) {
66049
+ return Buffer2.byteLength(JSON.stringify(obj));
66050
+ }
66051
+ /**
66052
+ * Split a large JSON object into chunks
66053
+ */
66054
+ splitIntoChunks(data, maxChunkSize = this.chunkSize) {
66055
+ const jsonString = JSON.stringify(data);
66056
+ const chunks = [];
66057
+ for (let i = 0; i < jsonString.length; i += maxChunkSize) {
66058
+ const chunk = jsonString.slice(i, i + maxChunkSize);
66059
+ chunks.push({
66060
+ data: chunk,
66061
+ index: Math.floor(i / maxChunkSize),
66062
+ totalChunks: Math.ceil(jsonString.length / maxChunkSize)
66063
+ });
66064
+ }
66065
+ return chunks;
66066
+ }
66067
+ /**
66068
+ * Reconstruct data from chunks
66069
+ */
66070
+ reconstructFromChunks(chunks) {
66071
+ chunks.sort((a, b) => a.index - b.index);
66072
+ const reconstructedString = chunks.map((chunk) => chunk.data).join("");
66073
+ return JSON.parse(reconstructedString);
66074
+ }
66075
+ /**
66076
+ * Upload data in chunks to the server
66077
+ */
66078
+ async uploadInChunks(data, token, apiUrl) {
66079
+ const totalSize = this.calculateByteSize(data);
66080
+ const chunks = this.splitIntoChunks(data);
66081
+ console.log(chalk2.blue(`Preparing to upload ${chunks.length} chunks (${Math.round(totalSize / 1024 / 1024)} MB total)`));
66082
+ const sessionId = await this.initiateSession(chunks.length, token, apiUrl);
66083
+ if (!sessionId) {
66084
+ throw new Error("Failed to initiate upload session");
66085
+ }
66086
+ const uploadPromises = chunks.map(async (chunk, index) => {
66087
+ const progress = (index + 1) / chunks.length * 100;
66088
+ console.log(chalk2.yellow(`Uploading chunk ${index + 1}/${chunks.length} (${Math.round(progress)}%)`));
66089
+ return this.uploadChunk(chunk, sessionId, token, apiUrl);
66090
+ });
66091
+ const results = await Promise.all(uploadPromises);
66092
+ const allSuccessful = results.every((result) => result.success);
66093
+ if (!allSuccessful) {
66094
+ throw new Error("Some chunks failed to upload");
66095
+ }
66096
+ const finalResult = await this.finalizeUpload(sessionId, token, apiUrl);
66097
+ return finalResult;
66098
+ }
66099
+ /**
66100
+ * Initiate a chunked upload session
66101
+ */
66102
+ async initiateSession(totalChunks, token, apiUrl) {
66103
+ try {
66104
+ const response = await fetch(`${apiUrl}/api/upload/initiate`, {
66105
+ method: "POST",
66106
+ headers: {
66107
+ "Content-Type": "application/json",
66108
+ "Authorization": `Bearer ${token}`
66109
+ },
66110
+ body: JSON.stringify({
66111
+ totalChunks,
66112
+ timestamp: Date.now()
66113
+ })
66114
+ });
66115
+ if (!response.ok) {
66116
+ throw new Error(`Session initiation failed: ${response.status}`);
66117
+ }
66118
+ const result = await response.json();
66119
+ return result.sessionId;
66120
+ } catch (error) {
66121
+ console.error(chalk2.red(`Failed to initiate upload session: ${error.message}`));
66122
+ return null;
66123
+ }
66124
+ }
66125
+ /**
66126
+ * Upload a single chunk
66127
+ */
66128
+ async uploadChunk(chunk, sessionId, token, apiUrl) {
66129
+ try {
66130
+ const response = await fetch(`${apiUrl}/api/upload/chunk`, {
66131
+ method: "POST",
66132
+ headers: {
66133
+ "Content-Type": "application/json",
66134
+ "Authorization": `Bearer ${token}`
66135
+ },
66136
+ body: JSON.stringify({
66137
+ sessionId,
66138
+ chunkIndex: chunk.index,
66139
+ chunkData: chunk.data,
66140
+ totalChunks: chunk.totalChunks
66141
+ })
66142
+ });
66143
+ const result = await response.json();
66144
+ if (!response.ok) {
66145
+ console.error(chalk2.red(`Chunk ${chunk.index} upload failed: ${response.status}`));
66146
+ return { success: false, error: result.error || response.statusText };
66147
+ }
66148
+ return { success: true, chunkIndex: chunk.index };
66149
+ } catch (error) {
66150
+ console.error(chalk2.red(`Chunk ${chunk.index} upload error: ${error.message}`));
66151
+ return { success: false, error: error.message };
66152
+ }
66153
+ }
66154
+ /**
66155
+ * Finalize the chunked upload
66156
+ */
66157
+ async finalizeUpload(sessionId, token, apiUrl) {
66158
+ try {
66159
+ console.log(chalk2.yellow("Finalizing upload..."));
66160
+ const response = await fetch(`${apiUrl}/api/upload/finalize`, {
66161
+ method: "POST",
66162
+ headers: {
66163
+ "Content-Type": "application/json",
66164
+ "Authorization": `Bearer ${token}`
66165
+ },
66166
+ body: JSON.stringify({
66167
+ sessionId
66168
+ })
66169
+ });
66170
+ if (!response.ok) {
66171
+ throw new Error(`Finalization failed: ${response.status}`);
66172
+ }
66173
+ const result = await response.json();
66174
+ return result;
66175
+ } catch (error) {
66176
+ console.error(chalk2.red(`Upload finalization failed: ${error.message}`));
66177
+ throw error;
66178
+ }
66179
+ }
66180
+ };
66181
+ module2.exports = {
66182
+ ChunkedUploader: ChunkedUploader2
66183
+ };
66184
+ }
66185
+ });
66186
+
66187
+ // src/core/compression.js
66188
+ var require_compression = __commonJS({
66189
+ "src/core/compression.js"(exports2, module2) {
66190
+ var zlib2 = require("zlib");
66191
+ var { promisify } = require("util");
66192
+ var compressAsync = promisify(zlib2.deflate);
66193
+ var decompressAsync = promisify(zlib2.inflate);
66194
+ var CompressionUtil2 = class {
66195
+ /**
66196
+ * Compress JSON data using zlib deflate
66197
+ */
66198
+ static async compress(data) {
66199
+ try {
66200
+ const jsonString = JSON.stringify(data);
66201
+ const compressedBuffer = await compressAsync(jsonString, {
66202
+ level: 6
66203
+ // Balance between speed and compression ratio
66204
+ });
66205
+ return compressedBuffer.toString("base64");
66206
+ } catch (error) {
66207
+ throw new Error(`Compression failed: ${error.message}`);
66208
+ }
66209
+ }
66210
+ /**
66211
+ * Decompress base64 encoded compressed data
66212
+ */
66213
+ static async decompress(compressedBase64) {
66214
+ try {
66215
+ const compressedBuffer = Buffer.from(compressedBase64, "base64");
66216
+ const decompressedBuffer = await decompressAsync(compressedBuffer);
66217
+ const jsonString = decompressedBuffer.toString();
66218
+ return JSON.parse(jsonString);
66219
+ } catch (error) {
66220
+ throw new Error(`Decompression failed: ${error.message}`);
66221
+ }
66222
+ }
66223
+ /**
66224
+ * Get the compression ratio for data
66225
+ */
66226
+ static async getCompressionRatio(data) {
66227
+ const originalSize = Buffer.byteLength(JSON.stringify(data));
66228
+ const compressedData = await this.compress(data);
66229
+ const compressedSize = Buffer.byteLength(compressedData);
66230
+ return {
66231
+ originalSize,
66232
+ compressedSize,
66233
+ ratio: originalSize / compressedSize,
66234
+ percentage: Math.round((originalSize - compressedSize) / originalSize * 100)
66235
+ };
66236
+ }
66237
+ /**
66238
+ * Check if data needs compression based on size
66239
+ */
66240
+ static needsCompression(data, threshold = 1024 * 1024) {
66241
+ return Buffer.byteLength(JSON.stringify(data)) > threshold;
66242
+ }
66243
+ };
66244
+ module2.exports = CompressionUtil2;
66245
+ }
66246
+ });
66247
+
66248
+ // src/core/progress-tracker.js
66249
+ var require_progress_tracker = __commonJS({
66250
+ "src/core/progress-tracker.js"(exports2, module2) {
66251
+ var chalk2 = require_source();
66252
+ var ProgressTracker2 = class {
66253
+ constructor(totalSteps) {
66254
+ this.totalSteps = totalSteps;
66255
+ this.currentStep = 0;
66256
+ this.startTime = Date.now();
66257
+ }
66258
+ update(step, message) {
66259
+ this.currentStep = step;
66260
+ const progress = this.currentStep / this.totalSteps * 100;
66261
+ const elapsed = Date.now() - this.startTime;
66262
+ const elapsedSeconds = Math.floor(elapsed / 1e3);
66263
+ const minutes = Math.floor(elapsedSeconds / 60);
66264
+ const seconds = elapsedSeconds % 60;
66265
+ const timeString = `${minutes}:${seconds.toString().padStart(2, "0")}`;
66266
+ console.log(
66267
+ chalk2.blue(`[${timeString}] `) + chalk2.yellow(`Progress: ${this.currentStep}/${this.totalSteps} (${Math.round(progress)}%) - ${message}`)
66268
+ );
66269
+ }
66270
+ updateSimple(current, total, message) {
66271
+ const progress = current / total * 100;
66272
+ const elapsed = Date.now() - this.startTime;
66273
+ const elapsedSeconds = Math.floor(elapsed / 1e3);
66274
+ const minutes = Math.floor(elapsedSeconds / 60);
66275
+ const seconds = elapsedSeconds % 60;
66276
+ const timeString = `${minutes}:${seconds.toString().padStart(2, "0")}`;
66277
+ console.log(
66278
+ chalk2.blue(`[${timeString}] `) + chalk2.yellow(`Progress: ${current}/${total} (${Math.round(progress)}%) - ${message}`)
66279
+ );
66280
+ }
66281
+ complete(message = "Operation completed") {
66282
+ const elapsed = Date.now() - this.startTime;
66283
+ const elapsedSeconds = Math.floor(elapsed / 1e3);
66284
+ const minutes = Math.floor(elapsedSeconds / 60);
66285
+ const seconds = elapsedSeconds % 60;
66286
+ const timeString = `${minutes}:${seconds.toString().padStart(2, "0")}`;
66287
+ console.log(
66288
+ chalk2.green(`[${timeString}] \u2713 ${message}`)
66289
+ );
66290
+ }
66291
+ estimateRemaining(current, total) {
66292
+ const elapsed = Date.now() - this.startTime;
66293
+ const progress = current / total;
66294
+ const estimatedTotalTime = elapsed / progress;
66295
+ const remainingTime = estimatedTotalTime - elapsed;
66296
+ const remainingSeconds = Math.floor(remainingTime / 1e3);
66297
+ const minutes = Math.floor(remainingSeconds / 60);
66298
+ const seconds = remainingSeconds % 60;
66299
+ return `${minutes}:${seconds.toString().padStart(2, "0")}`;
66300
+ }
66301
+ };
66302
+ module2.exports = ProgressTracker2;
66303
+ }
66304
+ });
66305
+
66306
+ // src/core/retry-handler.js
66307
+ var require_retry_handler = __commonJS({
66308
+ "src/core/retry-handler.js"(exports2, module2) {
66309
+ var chalk2 = require_source();
66310
+ var RetryHandler2 = class {
66311
+ constructor(maxRetries = 3, baseDelay = 1e3, backoffMultiplier = 2) {
66312
+ this.maxRetries = maxRetries;
66313
+ this.baseDelay = baseDelay;
66314
+ this.backoffMultiplier = backoffMultiplier;
66315
+ }
66316
+ async executeWithRetry(operation, context = "") {
66317
+ let lastError;
66318
+ for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
66319
+ try {
66320
+ if (attempt > 0) {
66321
+ const delay = this.baseDelay * Math.pow(this.backoffMultiplier, attempt - 1);
66322
+ console.log(chalk2.yellow(`Retry attempt ${attempt}/${this.maxRetries} for ${context} after ${delay}ms delay...`));
66323
+ await this.sleep(delay);
66324
+ }
66325
+ const result = await operation(attempt);
66326
+ return { success: true, result, attempts: attempt + 1 };
66327
+ } catch (error) {
66328
+ lastError = error;
66329
+ if (attempt < this.maxRetries) {
66330
+ console.log(chalk2.red(`Attempt ${attempt + 1} failed for ${context}: ${error.message}`));
66331
+ }
66332
+ }
66333
+ }
66334
+ return { success: false, error: lastError, attempts: this.maxRetries + 1 };
66335
+ }
66336
+ sleep(ms) {
66337
+ return new Promise((resolve) => setTimeout(resolve, ms));
66338
+ }
66339
+ // Specific retry for network requests
66340
+ async executeNetworkRequest(url, options, context = "network request") {
66341
+ return this.executeWithRetry(async (attempt) => {
66342
+ const response = await fetch(url, options);
66343
+ if (!response.ok) {
66344
+ const errorText = await response.text().catch(() => "Unknown error");
66345
+ throw new Error(`HTTP ${response.status}: ${errorText}`);
66346
+ }
66347
+ return await response.json();
66348
+ }, context);
66349
+ }
66350
+ };
66351
+ module2.exports = RetryHandler2;
66352
+ }
66353
+ });
66354
+
65852
66355
  // src/core/readme-generator.js
65853
66356
  var require_readme_generator = __commonJS({
65854
66357
  "src/core/readme-generator.js"(exports2, module2) {
@@ -65967,7 +66470,10 @@ arcvision scan --upload
65967
66470
  ## Source of Truth
65968
66471
 
65969
66472
  This artifact is the **source of truth** for system structure.
65970
- All explanations, decisions, and AI reasoning should reference it.`;
66473
+ All explanations, decisions, and AI reasoning should reference it.
66474
+
66475
+ Some execution script invocations are dynamically assembled at runtime and may not be statically traceable; such scripts are included
66476
+ as execution boundaries without guaranteed call-site resolution`;
65971
66477
  return content;
65972
66478
  }
65973
66479
  function generateReadme(outputDir, toolVersion, blastRadiusData = null) {
@@ -65988,8 +66494,13 @@ var chalk = require_source();
65988
66494
  var path = require("path");
65989
66495
  var fs = require("fs");
65990
66496
  var os = require("os");
66497
+ var zlib = require("zlib");
65991
66498
  var scanner = require_scanner();
65992
66499
  var { generateDiffSummary } = require_diff_analyzer();
66500
+ var { ChunkedUploader } = require_chunked_uploader();
66501
+ var CompressionUtil = require_compression();
66502
+ var ProgressTracker = require_progress_tracker();
66503
+ var RetryHandler = require_retry_handler();
65993
66504
  var version = "1.0.0";
65994
66505
  try {
65995
66506
  const packageJsonPath = path.join(__dirname, "../package.json");
@@ -66071,68 +66582,159 @@ async function uploadToDatabase(jsonData) {
66071
66582
  process.exit(1);
66072
66583
  }
66073
66584
  try {
66074
- console.log(chalk.blue(`Uploading to ${API_URL}/api/upload...`));
66075
- const controller = new AbortController();
66076
- const progress30s = setTimeout(() => {
66077
- console.log(chalk.yellow("Upload taking longer than expected, please wait while the process continues..."));
66078
- }, 3e4);
66079
- const progress60s = setTimeout(() => {
66080
- console.log(chalk.yellow("File size is large and may take additional time, process still working..."));
66081
- }, 6e4);
66082
- const timeoutId = setTimeout(() => {
66083
- controller.abort();
66084
- clearTimeout(progress30s);
66085
- clearTimeout(progress60s);
66086
- }, 12e4);
66087
- const response = await fetch(`${API_URL}/api/upload`, {
66088
- method: "POST",
66089
- headers: {
66090
- "Content-Type": "application/json",
66091
- "Authorization": `Bearer ${token}`
66092
- },
66093
- body: JSON.stringify({
66094
- graph: jsonData
66095
- }),
66096
- signal: controller.signal
66097
- });
66098
- clearTimeout(timeoutId);
66099
- clearTimeout(progress30s);
66100
- clearTimeout(progress60s);
66101
- if (response.status === 401) {
66102
- console.error(chalk.red("\u274C Invalid or revoked token."));
66103
- console.error(chalk.yellow("The token may be invalid, revoked, or the associated project may have been deleted."));
66104
- console.log(chalk.yellow("\u{1F4A1} Please create a new project on the dashboard and generate a new token."));
66105
- process.exit(1);
66106
- }
66107
- if (response.status === 404) {
66108
- console.error(chalk.red("\u274C Project not found."));
66109
- console.error(chalk.yellow("The project associated with this token may have been deleted."));
66110
- console.log(chalk.yellow("\u{1F4A1} Please create a new project on the dashboard and generate a new token."));
66111
- process.exit(1);
66112
- }
66113
- if (response.status === 429) {
66114
- console.error(chalk.red("\u274C Rate limit exceeded. Please wait before trying again."));
66115
- process.exit(1);
66116
- }
66117
- if (!response.ok) {
66118
- console.error(chalk.red(`\u274C Upload failed: ${response.status} ${response.statusText}`));
66119
- const text = await response.text();
66120
- try {
66121
- const errorJson = JSON.parse(text);
66122
- if (errorJson.error)
66123
- console.error(chalk.red(`Server Error: ${errorJson.error}`));
66124
- } catch (e) {
66125
- console.error(chalk.red(`Server Error: ${text}`));
66585
+ console.log(chalk.blue(`Starting upload to ${API_URL}/api/upload...`));
66586
+ const originalSize = Buffer.byteLength(JSON.stringify(jsonData));
66587
+ const sizeInMB = Math.round(originalSize / (1024 * 1024));
66588
+ console.log(chalk.yellow(`Data size: ~${sizeInMB} MB`));
66589
+ const needsChunkedUpload = sizeInMB > 3;
66590
+ const needsCompression = CompressionUtil.needsCompression(jsonData, 1024 * 1024 * 2);
66591
+ if (needsCompression) {
66592
+ console.log(chalk.yellow("Compressing data to reduce payload size..."));
66593
+ const compressionRatio = await CompressionUtil.getCompressionRatio(jsonData);
66594
+ console.log(chalk.yellow(`Compression ratio: ${compressionRatio.ratio.toFixed(2)}x (${compressionRatio.percentage}% reduction)`));
66595
+ const compressedData = await CompressionUtil.compress(jsonData);
66596
+ const compressedSize = Buffer.byteLength(compressedData);
66597
+ const compressedSizeInMB = Math.round(compressedSize / (1024 * 1024));
66598
+ console.log(chalk.yellow(`Compressed size: ~${compressedSizeInMB} MB`));
66599
+ if (compressedSizeInMB > 3 || needsChunkedUpload) {
66600
+ console.log(chalk.blue("Using chunked upload for large compressed data..."));
66601
+ const chunkedUploader = new ChunkedUploader();
66602
+ const retryHandler = new RetryHandler(2, 2e3, 2);
66603
+ const result = await retryHandler.executeWithRetry(async () => {
66604
+ return await chunkedUploader.uploadInChunks(
66605
+ { graph: compressedData, compressed: true },
66606
+ token,
66607
+ API_URL
66608
+ );
66609
+ }, "chunked upload");
66610
+ if (result.success) {
66611
+ console.log(chalk.green("\u2705 Graph uploaded successfully using chunked upload!"));
66612
+ } else {
66613
+ console.error(chalk.red("\u274C Chunked upload failed after retries:", result.error.message));
66614
+ process.exit(1);
66615
+ }
66616
+ } else {
66617
+ console.log(chalk.blue("Sending compressed data in single request..."));
66618
+ const retryHandler = new RetryHandler(3, 1e3, 2);
66619
+ const result = await retryHandler.executeWithRetry(async () => {
66620
+ const controller = new AbortController();
66621
+ const progress30s = setTimeout(() => {
66622
+ console.log(chalk.yellow("Upload taking longer than expected, please wait while the process continues..."));
66623
+ }, 3e4);
66624
+ const progress60s = setTimeout(() => {
66625
+ console.log(chalk.yellow("File size is large and may take additional time, process still working..."));
66626
+ }, 6e4);
66627
+ const timeoutId = setTimeout(() => {
66628
+ controller.abort();
66629
+ clearTimeout(progress30s);
66630
+ clearTimeout(progress60s);
66631
+ }, 18e4);
66632
+ const response = await fetch(`${API_URL}/api/upload`, {
66633
+ method: "POST",
66634
+ headers: {
66635
+ "Content-Type": "application/json",
66636
+ "Authorization": `Bearer ${token}`
66637
+ },
66638
+ body: JSON.stringify({
66639
+ graph: compressedData,
66640
+ compressed: true
66641
+ }),
66642
+ signal: controller.signal
66643
+ });
66644
+ clearTimeout(timeoutId);
66645
+ clearTimeout(progress30s);
66646
+ clearTimeout(progress60s);
66647
+ if (response.status === 401) {
66648
+ throw new Error("Invalid or revoked token");
66649
+ }
66650
+ if (response.status === 404) {
66651
+ throw new Error("Project not found");
66652
+ }
66653
+ if (response.status === 429) {
66654
+ throw new Error("Rate limit exceeded");
66655
+ }
66656
+ if (!response.ok) {
66657
+ const text = await response.text().catch(() => "Unknown error");
66658
+ throw new Error(`${response.status} ${response.statusText}: ${text}`);
66659
+ }
66660
+ return await response.json();
66661
+ }, "compressed upload");
66662
+ if (result.success && result.result.success) {
66663
+ console.log(chalk.green("\u2705 Compressed graph uploaded successfully!"));
66664
+ } else {
66665
+ console.error(chalk.red("\u274C Compressed upload failed:", result.error?.message || "Unknown error"));
66666
+ process.exit(1);
66667
+ }
66126
66668
  }
66127
- process.exit(1);
66128
- }
66129
- const result = await response.json();
66130
- if (result.success) {
66131
- console.log(chalk.green("\u2705 Graph uploaded successfully!"));
66132
66669
  } else {
66133
- console.log(chalk.red("\u274C Upload reported failure despite 200 OK."));
66134
- if (result.error) {
66135
- console.error(chalk.red(`Error details: ${result.error}`));
66670
+ if (needsChunkedUpload) {
66671
+ console.log(chalk.blue("Using chunked upload for large data..."));
66672
+ const chunkedUploader = new ChunkedUploader();
66673
+ const retryHandler = new RetryHandler(2, 2e3, 2);
66674
+ const result = await retryHandler.executeWithRetry(async () => {
66675
+ return await chunkedUploader.uploadInChunks(
66676
+ { graph: jsonData },
66677
+ token,
66678
+ API_URL
66679
+ );
66680
+ }, "chunked upload");
66681
+ if (result.success) {
66682
+ console.log(chalk.green("\u2705 Graph uploaded successfully using chunked upload!"));
66683
+ } else {
66684
+ console.error(chalk.red("\u274C Chunked upload failed after retries:", result.error.message));
66685
+ process.exit(1);
66686
+ }
66687
+ } else {
66688
+ console.log(chalk.blue("Sending data in single request..."));
66689
+ const retryHandler = new RetryHandler(3, 1e3, 2);
66690
+ const result = await retryHandler.executeWithRetry(async () => {
66691
+ const controller = new AbortController();
66692
+ const progress30s = setTimeout(() => {
66693
+ console.log(chalk.yellow("Upload taking longer than expected, please wait while the process continues..."));
66694
+ }, 3e4);
66695
+ const progress60s = setTimeout(() => {
66696
+ console.log(chalk.yellow("File size is large and may take additional time, process still working..."));
66697
+ }, 6e4);
66698
+ const timeoutId = setTimeout(() => {
66699
+ controller.abort();
66700
+ clearTimeout(progress30s);
66701
+ clearTimeout(progress60s);
66702
+ }, 12e4);
66703
+ const response = await fetch(`${API_URL}/api/upload`, {
66704
+ method: "POST",
66705
+ headers: {
66706
+ "Content-Type": "application/json",
66707
+ "Authorization": `Bearer ${token}`
66708
+ },
66709
+ body: JSON.stringify({
66710
+ graph: jsonData
66711
+ }),
66712
+ signal: controller.signal
66713
+ });
66714
+ clearTimeout(timeoutId);
66715
+ clearTimeout(progress30s);
66716
+ clearTimeout(progress60s);
66717
+ if (response.status === 401) {
66718
+ throw new Error("Invalid or revoked token");
66719
+ }
66720
+ if (response.status === 404) {
66721
+ throw new Error("Project not found");
66722
+ }
66723
+ if (response.status === 429) {
66724
+ throw new Error("Rate limit exceeded");
66725
+ }
66726
+ if (!response.ok) {
66727
+ const text = await response.text().catch(() => "Unknown error");
66728
+ throw new Error(`${response.status} ${response.statusText}: ${text}`);
66729
+ }
66730
+ return await response.json();
66731
+ }, "standard upload");
66732
+ if (result.success && result.result.success) {
66733
+ console.log(chalk.green("\u2705 Graph uploaded successfully!"));
66734
+ } else {
66735
+ console.error(chalk.red("\u274C Upload failed:", result.error?.message || "Unknown error"));
66736
+ process.exit(1);
66737
+ }
66136
66738
  }
66137
66739
  }
66138
66740
  } catch (error) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "arcvision",
3
- "version": "0.2.7",
3
+ "version": "0.2.12",
4
4
  "description": "Architecture scanner for modern codebases",
5
5
  "bin": {
6
6
  "arcvision": "./dist/index.js"