arcvision 0.2.12 → 0.2.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. package/ARCVISION_DIRECTORY_STRUCTURE.md +104 -0
  2. package/CLI_STRUCTURE.md +110 -0
  3. package/CONFIGURATION.md +119 -0
  4. package/IMPLEMENTATION_SUMMARY.md +99 -0
  5. package/README.md +149 -89
  6. package/architecture.authority.ledger.json +46 -0
  7. package/arcvision-0.2.3.tgz +0 -0
  8. package/arcvision-0.2.4.tgz +0 -0
  9. package/arcvision-0.2.5.tgz +0 -0
  10. package/arcvision.context.diff.json +2181 -0
  11. package/arcvision.context.json +1021 -0
  12. package/arcvision.context.v1.json +2163 -0
  13. package/arcvision.context.v2.json +2173 -0
  14. package/arcvision_context/README.md +93 -0
  15. package/arcvision_context/architecture.authority.ledger.json +83 -0
  16. package/arcvision_context/arcvision.context.json +6884 -0
  17. package/debug-cycle-detection.js +56 -0
  18. package/dist/index.js +1626 -25
  19. package/docs/ENHANCED_ACCURACY_SAFETY_PROTOCOL.md +172 -0
  20. package/docs/accuracy-enhancement-artifacts/enhanced-validation-config.json +98 -0
  21. package/docs/acig-robustness-guide.md +164 -0
  22. package/docs/authoritative-gate-implementation.md +168 -0
  23. package/docs/cli-strengthening-summary.md +232 -0
  24. package/docs/invariant-system-summary.md +100 -0
  25. package/docs/invariant-system.md +112 -0
  26. package/generate_large_test.js +42 -0
  27. package/large_test_repo.json +1 -0
  28. package/output1.json +2163 -0
  29. package/output2.json +2163 -0
  30. package/package.json +46 -36
  31. package/scan_calcom_report.txt +0 -0
  32. package/scan_leafmint_report.txt +0 -0
  33. package/scan_output.txt +0 -0
  34. package/scan_trigger_report.txt +0 -0
  35. package/schema/arcvision_context_schema_v1.json +136 -1
  36. package/src/arcvision-guard.js +433 -0
  37. package/src/core/authority-core-detector.js +382 -0
  38. package/src/core/authority-ledger.js +300 -0
  39. package/src/core/blastRadius.js +299 -0
  40. package/src/core/call-resolver.js +196 -0
  41. package/src/core/change-evaluator.js +509 -0
  42. package/src/core/change-evaluator.js.backup +424 -0
  43. package/src/core/change-evaluator.ts +285 -0
  44. package/src/core/chunked-uploader.js +180 -0
  45. package/src/core/circular-dependency-detector.js +404 -0
  46. package/src/core/cli-error-handler.js +458 -0
  47. package/src/core/cli-validator.js +458 -0
  48. package/src/core/compression.js +64 -0
  49. package/src/core/context_builder.js +741 -0
  50. package/src/core/dependency-manager.js +134 -0
  51. package/src/core/di-detector.js +202 -0
  52. package/src/core/diff-analyzer.js +76 -0
  53. package/src/core/example-invariants.js +135 -0
  54. package/src/core/failure-mode-synthesizer.js +341 -0
  55. package/src/core/invariant-analyzer.js +294 -0
  56. package/src/core/invariant-detector.js +548 -0
  57. package/src/core/invariant-enforcer.js +171 -0
  58. package/src/core/invariant-evaluation-utils.js +172 -0
  59. package/src/core/invariant-hooks.js +152 -0
  60. package/src/core/invariant-integration-example.js +186 -0
  61. package/src/core/invariant-registry.js +298 -0
  62. package/src/core/invariant-registry.ts +100 -0
  63. package/src/core/invariant-types.js +66 -0
  64. package/src/core/invariants-index.js +88 -0
  65. package/src/core/method-tracker.js +170 -0
  66. package/src/core/override-handler.js +304 -0
  67. package/src/core/ownership-resolver.js +227 -0
  68. package/src/core/parser-enhanced.js +80 -0
  69. package/src/core/parser.js +610 -0
  70. package/src/core/path-resolver.js +240 -0
  71. package/src/core/pattern-matcher.js +246 -0
  72. package/src/core/progress-tracker.js +71 -0
  73. package/src/core/react-nextjs-detector.js +245 -0
  74. package/src/core/readme-generator.js +167 -0
  75. package/src/core/retry-handler.js +57 -0
  76. package/src/core/scanner.js +289 -0
  77. package/src/core/semantic-analyzer.js +204 -0
  78. package/src/core/structural-context-owner.js +442 -0
  79. package/src/core/symbol-indexer.js +164 -0
  80. package/src/core/tsconfig-utils.js +73 -0
  81. package/src/core/type-analyzer.js +272 -0
  82. package/src/core/watcher.js +18 -0
  83. package/src/core/workspace-scanner.js +88 -0
  84. package/src/engine/context_builder.js +280 -0
  85. package/src/engine/context_sorter.js +59 -0
  86. package/src/engine/context_validator.js +200 -0
  87. package/src/engine/id-generator.js +16 -0
  88. package/src/engine/pass1_facts.js +260 -0
  89. package/src/engine/pass2_semantics.js +333 -0
  90. package/src/engine/pass3_lifter.js +99 -0
  91. package/src/engine/pass4_signals.js +201 -0
  92. package/src/index.js +830 -0
  93. package/src/plugins/express-plugin.js +48 -0
  94. package/src/plugins/plugin-manager.js +58 -0
  95. package/src/plugins/react-plugin.js +54 -0
  96. package/temp_original.js +0 -0
  97. package/test/determinism-test.js +83 -0
  98. package/test-authoritative-context.js +53 -0
  99. package/test-real-authoritative-context.js +118 -0
  100. package/test-upload-enhancements.js +111 -0
  101. package/test_repos/allowed-clean-architecture/.arcvision/invariants.json +57 -0
  102. package/test_repos/allowed-clean-architecture/adapters/controllers/UserController.js +95 -0
  103. package/test_repos/allowed-clean-architecture/adapters/http/HttpServer.js +78 -0
  104. package/test_repos/allowed-clean-architecture/application/dtos/CreateUserRequest.js +37 -0
  105. package/test_repos/allowed-clean-architecture/application/services/UserService.js +61 -0
  106. package/test_repos/allowed-clean-architecture/arcvision_context/README.md +93 -0
  107. package/test_repos/allowed-clean-architecture/arcvision_context/arcvision.context.json +2796 -0
  108. package/test_repos/allowed-clean-architecture/domain/interfaces/UserRepository.js +25 -0
  109. package/test_repos/allowed-clean-architecture/domain/models/User.js +39 -0
  110. package/test_repos/allowed-clean-architecture/index.js +45 -0
  111. package/test_repos/allowed-clean-architecture/infrastructure/database/DatabaseConnection.js +56 -0
  112. package/test_repos/allowed-clean-architecture/infrastructure/repositories/InMemoryUserRepository.js +61 -0
  113. package/test_repos/allowed-clean-architecture/package.json +15 -0
  114. package/test_repos/blocked-legacy-monolith/.arcvision/invariants.json +78 -0
  115. package/test_repos/blocked-legacy-monolith/arcvision_context/README.md +93 -0
  116. package/test_repos/blocked-legacy-monolith/arcvision_context/arcvision.context.json +2882 -0
  117. package/test_repos/blocked-legacy-monolith/database/dbConnection.js +35 -0
  118. package/test_repos/blocked-legacy-monolith/index.js +38 -0
  119. package/test_repos/blocked-legacy-monolith/modules/emailService.js +31 -0
  120. package/test_repos/blocked-legacy-monolith/modules/paymentProcessor.js +37 -0
  121. package/test_repos/blocked-legacy-monolith/package.json +15 -0
  122. package/test_repos/blocked-legacy-monolith/shared/utils.js +19 -0
  123. package/test_repos/blocked-legacy-monolith/utils/helpers.js +23 -0
  124. package/test_repos/risky-microservices-concerns/.arcvision/invariants.json +69 -0
  125. package/test_repos/risky-microservices-concerns/arcvision_context/README.md +93 -0
  126. package/test_repos/risky-microservices-concerns/arcvision_context/arcvision.context.json +3070 -0
  127. package/test_repos/risky-microservices-concerns/common/utils.js +77 -0
  128. package/test_repos/risky-microservices-concerns/gateways/apiGateway.js +84 -0
  129. package/test_repos/risky-microservices-concerns/index.js +20 -0
  130. package/test_repos/risky-microservices-concerns/libs/deprecatedHelper.js +36 -0
  131. package/test_repos/risky-microservices-concerns/package.json +15 -0
  132. package/test_repos/risky-microservices-concerns/services/orderService.js +42 -0
  133. package/test_repos/risky-microservices-concerns/services/userService.js +48 -0
  134. package/verify_engine.js +116 -0
package/src/index.js ADDED
@@ -0,0 +1,830 @@
1
+ #!/usr/bin/env node
2
+
3
+ const { Command } = require('commander');
4
+ const chalk = require('chalk');
5
+ const path = require('path');
6
+ const fs = require('fs');
7
+ const os = require('os');
8
+ const zlib = require('zlib');
9
+ const scanner = require('./core/scanner');
10
+ const { generateDiffSummary } = require('./core/diff-analyzer');
11
+ const { ChunkedUploader } = require('./core/chunked-uploader');
12
+ const CompressionUtil = require('./core/compression');
13
+ const ProgressTracker = require('./core/progress-tracker');
14
+ const RetryHandler = require('./core/retry-handler');
15
+ // Import new ACIG components
16
+ const { invariantRegistry } = require('./core/invariant-registry');
17
+ const { evaluateChange } = require('./core/change-evaluator');
18
+ // Import authority gate components
19
+ const { authorityLedger } = require('./core/authority-ledger');
20
+ const { overrideHandler } = require('./core/override-handler');
21
+ // Import robust validation and error handling
22
+ const { cliValidator } = require('./core/cli-validator');
23
+ const { cliErrorHandler } = require('./core/cli-error-handler');
24
+
25
+ // Get version from package.json
26
+ // Use a try-catch to handle bundled environment
27
+ let version = '1.0.0'; // fallback version
28
+ try {
29
+ // Try to get the package.json path relative to the bundled file
30
+ const packageJsonPath = path.join(__dirname, '../package.json');
31
+ const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
32
+ version = packageJson.version;
33
+ } catch (error) {
34
+ // Fallback to 1.0.0 if package.json cannot be found
35
+ // This can happen in bundled environments
36
+ console.warn('Warning: Could not load version from package.json, using default');
37
+ }
38
+
39
+ const CONFIG_FILE = path.join(os.homedir(), '.arcvisionrc');
40
+ const API_URL = process.env.ARCVISION_API_URL || 'https://arcvisiondev.vercel.app';
41
+
42
+ // Blast radius analysis
43
+ const { findHighestBlastRadius, getTopBlastRadiusFiles, computeBlastRadiusWithPercentage } = require('./core/blastRadius');
44
+
45
+ function analyzeBlastRadius(architectureMap) {
46
+ // Extract blast radius information from nodes
47
+ // Handle both old and new schema formats
48
+ const blastRadiusData = [];
49
+
50
+ if (architectureMap.nodes && architectureMap.nodes.length > 0) {
51
+ // Check if nodes have the old format (with metadata) or new format
52
+ architectureMap.nodes.forEach(node => {
53
+ let blastRadius = 0;
54
+
55
+ // Try to get blast radius from different possible locations
56
+ if (node.blast_radius !== undefined) {
57
+ // Current format: blast_radius is a direct property
58
+ blastRadius = node.blast_radius;
59
+ } else if (node.metadata && node.metadata.blast_radius !== undefined) {
60
+ // Old format
61
+ blastRadius = node.metadata.blast_radius;
62
+ } else if (node.signals && node.signals.blast_radius !== undefined) {
63
+ // New format: blast_radius is in the signals object
64
+ blastRadius = node.signals.blast_radius;
65
+ } else if (architectureMap.contextSurface && architectureMap.contextSurface.topBlastRadiusFiles) {
66
+ // Get from contextSurface if available
67
+ const foundFile = architectureMap.contextSurface.topBlastRadiusFiles.find(f => f.file === node.path || f.file === node.id);
68
+ if (foundFile) {
69
+ blastRadius = foundFile.blastRadius || 0;
70
+ }
71
+ }
72
+
73
+ blastRadiusData.push({
74
+ file: node.path || node.id, // Use path if available, otherwise id
75
+ blast_radius: blastRadius
76
+ });
77
+ });
78
+ }
79
+
80
+ const blastRadiusMap = blastRadiusData.reduce((acc, item) => {
81
+ acc[item.file] = item.blast_radius;
82
+ return acc;
83
+ }, {});
84
+
85
+ const totalFiles = architectureMap.nodes ? architectureMap.nodes.length : 0;
86
+
87
+ // Get top 3 files by blast radius with percentages
88
+ const topFiles = computeBlastRadiusWithPercentage(blastRadiusMap, totalFiles);
89
+
90
+ return {
91
+ topFiles: topFiles.slice(0, 3),
92
+ totalFiles: totalFiles
93
+ };
94
+ }
95
+
96
+ function saveToken(token) {
97
+ try {
98
+ // Validate token format before saving
99
+ if (!token || typeof token !== 'string' || token.length < 10) {
100
+ throw new Error('Invalid token format: Token must be a string with at least 10 characters');
101
+ }
102
+
103
+ fs.writeFileSync(CONFIG_FILE, JSON.stringify({ token }));
104
+ console.log(chalk.green('✅ Token saved successfully!'));
105
+ } catch (error) {
106
+ cliErrorHandler.handleFatalError(error, {
107
+ operation: 'save_token',
108
+ file: CONFIG_FILE
109
+ });
110
+ }
111
+ }
112
+
113
+ function getToken() {
114
+ try {
115
+ if (fs.existsSync(CONFIG_FILE)) {
116
+ const config = JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8'));
117
+ return config.token;
118
+ }
119
+ } catch (error) {
120
+ console.error(chalk.red('❌ Failed to read token configuration:'), error.message);
121
+ console.error(chalk.yellow('Token file may be corrupted. Run `arcvision link <TOKEN>` to reset.'));
122
+ return null;
123
+ }
124
+ return null;
125
+ }
126
+
127
+ // Function to upload JSON to database via Token
128
+ async function uploadToDatabase(jsonData) {
129
+ const token = getToken();
130
+ if (!token) {
131
+ console.log(chalk.red('❌ No upload token found.'));
132
+ console.log(chalk.yellow('Run `arcvision link <TOKEN>` first to connect to a project.'));
133
+ process.exit(1);
134
+ }
135
+
136
+ try {
137
+ // Pre-flight validation for upload
138
+ await cliValidator.preFlightValidation('upload', {
139
+ contextData: jsonData,
140
+ token: token
141
+ });
142
+
143
+ console.log(chalk.blue(`Starting upload to ${API_URL}/api/upload...`));
144
+
145
+ // Check the size of the data and decide on strategy
146
+ const originalSize = Buffer.byteLength(JSON.stringify(jsonData));
147
+ const sizeInMB = Math.round(originalSize / (1024 * 1024));
148
+
149
+ console.log(chalk.yellow(`Data size: ~${sizeInMB} MB`));
150
+
151
+ // Determine if we need to use compression or chunked upload
152
+ const needsChunkedUpload = sizeInMB > 3; // Use chunked upload for data > 3MB
153
+ const needsCompression = CompressionUtil.needsCompression(jsonData, 1024 * 1024 * 2); // Compress if > 2MB
154
+
155
+ if (needsCompression) {
156
+ console.log(chalk.yellow('Compressing data to reduce payload size...'));
157
+ const compressionRatio = await CompressionUtil.getCompressionRatio(jsonData);
158
+ console.log(chalk.yellow(`Compression ratio: ${compressionRatio.ratio.toFixed(2)}x (${compressionRatio.percentage}% reduction)`));
159
+
160
+ const compressedData = await CompressionUtil.compress(jsonData);
161
+
162
+ // Check if compressed data still needs chunked upload
163
+ const compressedSize = Buffer.byteLength(compressedData);
164
+ const compressedSizeInMB = Math.round(compressedSize / (1024 * 1024));
165
+
166
+ console.log(chalk.yellow(`Compressed size: ~${compressedSizeInMB} MB`));
167
+
168
+ if (compressedSizeInMB > 3 || needsChunkedUpload) {
169
+ // Use chunked upload for compressed data
170
+ console.log(chalk.blue('Using chunked upload for large compressed data...'));
171
+
172
+ const chunkedUploader = new ChunkedUploader();
173
+ const retryHandler = new RetryHandler(2, 2000, 2); // 2 retries, 2s base delay, 2x multiplier
174
+
175
+ // Execute with retry mechanism
176
+ const result = await retryHandler.executeWithRetry(async () => {
177
+ return await chunkedUploader.uploadInChunks(
178
+ { graph: compressedData, compressed: true },
179
+ token,
180
+ API_URL
181
+ );
182
+ }, 'chunked upload');
183
+
184
+ if (result.success) {
185
+ console.log(chalk.green('✅ Graph uploaded successfully using chunked upload!'));
186
+ } else {
187
+ cliErrorHandler.handleFatalError(result.error, {
188
+ operation: 'chunked_upload',
189
+ retryCount: 2
190
+ });
191
+ }
192
+ } else {
193
+ // Send compressed data in single request
194
+ console.log(chalk.blue('Sending compressed data in single request...'));
195
+
196
+ const retryHandler = new RetryHandler(3, 1000, 2); // 3 retries, 1s base delay, 2x multiplier
197
+
198
+ const result = await retryHandler.executeWithRetry(async () => {
199
+ const controller = new AbortController();
200
+
201
+ // Set up progress indicators
202
+ const progress30s = setTimeout(() => {
203
+ console.log(chalk.yellow('Upload taking longer than expected, please wait while the process continues...'));
204
+ }, 30000); // 30 seconds
205
+
206
+ const progress60s = setTimeout(() => {
207
+ console.log(chalk.yellow('File size is large and may take additional time, process still working...'));
208
+ }, 60000); // 60 seconds
209
+
210
+ const timeoutId = setTimeout(() => {
211
+ controller.abort();
212
+ clearTimeout(progress30s);
213
+ clearTimeout(progress60s);
214
+ }, 180000); // 180 second timeout for larger files
215
+
216
+ const response = await fetch(`${API_URL}/api/upload`, {
217
+ method: 'POST',
218
+ headers: {
219
+ 'Content-Type': 'application/json',
220
+ 'Authorization': `Bearer ${token}`
221
+ },
222
+ body: JSON.stringify({
223
+ graph: compressedData,
224
+ compressed: true
225
+ }),
226
+ signal: controller.signal
227
+ });
228
+
229
+ // Clear all timeouts
230
+ clearTimeout(timeoutId);
231
+ clearTimeout(progress30s);
232
+ clearTimeout(progress60s);
233
+
234
+ if (response.status === 401) {
235
+ throw new Error('Invalid or revoked token');
236
+ }
237
+
238
+ if (response.status === 404) {
239
+ throw new Error('Project not found');
240
+ }
241
+
242
+ if (response.status === 429) {
243
+ throw new Error('Rate limit exceeded');
244
+ }
245
+
246
+ if (!response.ok) {
247
+ const text = await response.text().catch(() => 'Unknown error');
248
+ throw new Error(`${response.status} ${response.statusText}: ${text}`);
249
+ }
250
+
251
+ return await response.json();
252
+ }, 'compressed upload');
253
+
254
+ if (result.success && result.result.success) {
255
+ console.log(chalk.green('✅ Compressed graph uploaded successfully!'));
256
+ } else {
257
+ cliErrorHandler.handleFatalError(result.error, {
258
+ operation: 'compressed_upload',
259
+ retryCount: 3
260
+ });
261
+ }
262
+ }
263
+ } else {
264
+ // Original upload method for smaller data
265
+ if (needsChunkedUpload) {
266
+ console.log(chalk.blue('Using chunked upload for large data...'));
267
+
268
+ const chunkedUploader = new ChunkedUploader();
269
+ const retryHandler = new RetryHandler(2, 2000, 2); // 2 retries, 2s base delay, 2x multiplier
270
+
271
+ // Execute with retry mechanism
272
+ const result = await retryHandler.executeWithRetry(async () => {
273
+ return await chunkedUploader.uploadInChunks(
274
+ { graph: jsonData },
275
+ token,
276
+ API_URL
277
+ );
278
+ }, 'chunked upload');
279
+
280
+ if (result.success) {
281
+ console.log(chalk.green('✅ Graph uploaded successfully using chunked upload!'));
282
+ } else {
283
+ cliErrorHandler.handleFatalError(result.error, {
284
+ operation: 'chunked_upload',
285
+ retryCount: 2
286
+ });
287
+ }
288
+ } else {
289
+ // Standard upload for small data
290
+ console.log(chalk.blue('Sending data in single request...'));
291
+
292
+ const retryHandler = new RetryHandler(3, 1000, 2); // 3 retries, 1s base delay, 2x multiplier
293
+
294
+ const result = await retryHandler.executeWithRetry(async () => {
295
+ const controller = new AbortController();
296
+
297
+ // Set up progress indicators
298
+ const progress30s = setTimeout(() => {
299
+ console.log(chalk.yellow('Upload taking longer than expected, please wait while the process continues...'));
300
+ }, 30000); // 30 seconds
301
+
302
+ const progress60s = setTimeout(() => {
303
+ console.log(chalk.yellow('File size is large and may take additional time, process still working...'));
304
+ }, 60000); // 60 seconds
305
+
306
+ const timeoutId = setTimeout(() => {
307
+ controller.abort();
308
+ clearTimeout(progress30s);
309
+ clearTimeout(progress60s);
310
+ }, 120000); // 120 second timeout
311
+
312
+ const response = await fetch(`${API_URL}/api/upload`, {
313
+ method: 'POST',
314
+ headers: {
315
+ 'Content-Type': 'application/json',
316
+ 'Authorization': `Bearer ${token}`
317
+ },
318
+ body: JSON.stringify({
319
+ graph: jsonData
320
+ }),
321
+ signal: controller.signal
322
+ });
323
+
324
+ // Clear all timeouts
325
+ clearTimeout(timeoutId);
326
+ clearTimeout(progress30s);
327
+ clearTimeout(progress60s);
328
+
329
+ if (response.status === 401) {
330
+ throw new Error('Invalid or revoked token');
331
+ }
332
+
333
+ if (response.status === 404) {
334
+ throw new Error('Project not found');
335
+ }
336
+
337
+ if (response.status === 429) {
338
+ throw new Error('Rate limit exceeded');
339
+ }
340
+
341
+ if (!response.ok) {
342
+ const text = await response.text().catch(() => 'Unknown error');
343
+ throw new Error(`${response.status} ${response.statusText}: ${text}`);
344
+ }
345
+
346
+ return await response.json();
347
+ }, 'standard upload');
348
+
349
+ if (result.success && result.result.success) {
350
+ console.log(chalk.green('✅ Graph uploaded successfully!'));
351
+ } else {
352
+ cliErrorHandler.handleFatalError(result.error, {
353
+ operation: 'standard_upload',
354
+ retryCount: 3
355
+ });
356
+ }
357
+ }
358
+ }
359
+ } catch (error) {
360
+ cliErrorHandler.handleFatalError(error, {
361
+ operation: 'upload_to_database'
362
+ });
363
+ }
364
+ }
365
+
366
+ const program = new Command();
367
+
368
+ program
369
+ .name('arcvision')
370
+ .description(process.env.CLI_DESCRIPTION || `CLI to visualize codebase architecture
371
+
372
+ Quick Start:
373
+ 1. Sign up at the ArcVision dashboard
374
+ 2. Create a project and name it
375
+ 3. Generate a CLI token
376
+ 4. Run: arcvision link <token>
377
+ 5. Run: arcvision scan --upload
378
+ 6. Open dashboard to see results
379
+ `)
380
+ .version(version);
381
+
382
+ program
383
+ .command('link <token>')
384
+ .description('Link this CLI to a project via upload token')
385
+ .action((token) => {
386
+ try {
387
+ saveToken(token);
388
+ } catch (error) {
389
+ // saveToken already handles its own errors and exits
390
+ process.exit(1);
391
+ }
392
+ });
393
+
394
+ program
395
+ .command('scan')
396
+ .description('Scan the current directory and generate architecture map')
397
+ .argument('[directory]', 'Directory to scan', '.')
398
+ .option('-u, --upload', 'Upload to database')
399
+ .action(async (directory, options) => {
400
+ const targetDir = path.resolve(directory);
401
+
402
+ try {
403
+ // Pre-flight validation
404
+ await cliValidator.preFlightValidation('scan', {
405
+ directory: targetDir
406
+ });
407
+
408
+ console.log(chalk.blue(`Scanning directory: ${targetDir}`));
409
+
410
+ const map = await scanner.scan(targetDir);
411
+ console.log(chalk.green('Scan complete!'));
412
+
413
+ // Analyze and print blast radius insight
414
+ const blastRadiusAnalysis = analyzeBlastRadius(map);
415
+ if (blastRadiusAnalysis && blastRadiusAnalysis.topFiles && blastRadiusAnalysis.topFiles.length > 0) {
416
+ console.log('\n⚠️ Top Structural Context Hubs Detected:\n');
417
+
418
+ blastRadiusAnalysis.topFiles.forEach((item, index) => {
419
+ let warningMessage = '';
420
+ if (index === 0) {
421
+ warningMessage = 'Changes here may silently propagate across the system.';
422
+ } else if (index === 1) {
423
+ warningMessage = 'Acts as a coordination layer between components.';
424
+ } else {
425
+ warningMessage = 'Modifications can cause widespread inconsistencies.';
426
+ }
427
+
428
+ console.log(`${index + 1}. ${item.file}`);
429
+ console.log(` Blast Radius: ${item.blastRadius} files (${item.percentOfGraph}%)`);
430
+ console.log(` Warning: ${warningMessage}\n`);
431
+ });
432
+ } else {
433
+ console.log('\nNo high-structure files detected based on import dependencies.');
434
+ }
435
+
436
+ // Validate the map before saving or uploading
437
+ const { validateContext } = require('./engine/context_validator');
438
+ const validation = validateContext(map);
439
+
440
+ if (!validation.valid) {
441
+ console.warn('⚠️ VALIDATION ISSUES FOUND:');
442
+ validation.errors.forEach(e => console.warn(' -', e));
443
+ console.warn('⚠️ Proceeding with output despite validation issues to see results');
444
+ }
445
+
446
+ // Create dedicated arcvision_context directory
447
+ const arcvisionDir = path.join(targetDir, 'arcvision_context');
448
+ if (!fs.existsSync(arcvisionDir)) {
449
+ fs.mkdirSync(arcvisionDir, { recursive: true });
450
+ console.log(chalk.green(`📁 Created arcvision_context directory: ${arcvisionDir}`));
451
+ }
452
+
453
+ // Upload to database if requested
454
+ if (options.upload) {
455
+ await uploadToDatabase(map);
456
+
457
+ // Generate README for system context when uploading
458
+ const { generateReadme } = require('./core/readme-generator');
459
+ generateReadme(arcvisionDir, version, blastRadiusAnalysis);
460
+ } else {
461
+ // Save to file with validation in dedicated directory
462
+ const fs = require('fs');
463
+ const outputFileName = path.join(arcvisionDir, 'arcvision.context.json');
464
+ fs.writeFileSync(outputFileName, JSON.stringify(map, null, 2));
465
+ console.log(chalk.green(`✅ Structural context saved to ${outputFileName}`));
466
+
467
+ // Generate README for system context in dedicated directory
468
+ const { generateReadme } = require('./core/readme-generator');
469
+ generateReadme(arcvisionDir, version, blastRadiusAnalysis);
470
+
471
+ console.log(chalk.dim('\nUse --upload to send to dashboard.'));
472
+ }
473
+ } catch (error) {
474
+ cliErrorHandler.handleFatalError(error, {
475
+ operation: 'scan',
476
+ directory: targetDir
477
+ });
478
+ }
479
+ });
480
+
481
+ // Add a diff command to compare two context artifacts
482
+ program
483
+ .command('diff')
484
+ .description('Compare two context artifacts and generate diff summary')
485
+ .argument('<old-file>', 'Path to the old context artifact')
486
+ .argument('<new-file>', 'Path to the new context artifact')
487
+ .action(async (oldFile, newFile) => {
488
+ try {
489
+ // Pre-flight validation
490
+ await cliValidator.preFlightValidation('diff', {
491
+ oldFile,
492
+ newFile
493
+ });
494
+
495
+ console.log(chalk.blue(`Comparing context artifacts:`));
496
+ console.log(chalk.blue(` Old: ${oldFile}`));
497
+ console.log(chalk.blue(` New: ${newFile}`));
498
+
499
+ // Read both files
500
+ const oldContext = JSON.parse(fs.readFileSync(oldFile, 'utf8'));
501
+ const newContext = JSON.parse(fs.readFileSync(newFile, 'utf8'));
502
+
503
+ // Generate diff summary
504
+ const diffResult = generateDiffSummary(oldContext, newContext);
505
+
506
+ // Add diff summary to the new context
507
+ newContext.diff_summary = diffResult.diff_summary;
508
+
509
+ // Save updated context with diff summary in arcvision_context directory
510
+ const arcvisionDir = path.join(path.dirname(newFile), 'arcvision_context');
511
+ if (!fs.existsSync(arcvisionDir)) {
512
+ fs.mkdirSync(arcvisionDir, { recursive: true });
513
+ }
514
+ const outputFileName = path.join(arcvisionDir, 'arcvision.context.diff.json');
515
+ fs.writeFileSync(outputFileName, JSON.stringify(newContext, null, 2));
516
+
517
+ console.log(chalk.green('✅ Structural diff completed!'));
518
+ console.log(chalk.green(`✅ Diff summary saved to ${outputFileName}`));
519
+ console.log(chalk.yellow('\nDiff Summary:'));
520
+ console.log(chalk.yellow(` Nodes Added: ${diffResult.diff_summary.nodes_added}`));
521
+ console.log(chalk.yellow(` Nodes Removed: ${diffResult.diff_summary.nodes_removed}`));
522
+ console.log(chalk.yellow(` Edges Added: ${diffResult.diff_summary.edges_added}`));
523
+ console.log(chalk.yellow(` Edges Removed: ${diffResult.diff_summary.edges_removed}`));
524
+ console.log(chalk.yellow(` Roles Changed: ${diffResult.diff_summary.roles_changed}`));
525
+ console.log(chalk.yellow(` Blast Radius Changes: ${diffResult.diff_summary.blast_radius_changes}`));
526
+
527
+ } catch (error) {
528
+ cliErrorHandler.handleFatalError(error, {
529
+ operation: 'diff',
530
+ oldFile,
531
+ newFile
532
+ });
533
+ }
534
+ });
535
+
536
+ // Add the new Authoritative Change Impact Gate command
537
+ program
538
+ .command('evaluate')
539
+ .alias('acig')
540
+ .description('Evaluate changes against authoritative invariants (Authoritative Change Impact Gate)')
541
+ .argument('[directory]', 'Directory to evaluate', '.')
542
+ .option('-c, --context-file <file>', 'Context file to evaluate against (default: arcvision.context.json)')
543
+ .option('-i, --invariants-file <file>', 'Invariants file to use (default: .arcvision/invariants.json)')
544
+ .option('-v, --verbose', 'Show detailed evaluation output')
545
+ .option('--simulate-changes <files>', 'Simulate changes to specific files (comma-separated)')
546
+ .option('--fail-on-blocked', 'Exit with error code when decision is BLOCKED (default: false)')
547
+ .action(async (directory, options) => {
548
+ const targetDir = path.resolve(directory);
549
+
550
+ try {
551
+ // Pre-flight validation
552
+ await cliValidator.preFlightValidation('evaluate', {
553
+ directory: targetDir,
554
+ contextFile: options.contextFile,
555
+ invariantsFile: options.invariantsFile
556
+ });
557
+
558
+ console.log(chalk.blue('🔒 Authoritative Change Impact Gate (ACIG)'));
559
+ console.log(chalk.blue('Evaluating changes against canonical system invariants...\n'));
560
+
561
+ // Load current context from arcvision_context directory
562
+ const contextFile = options.contextFile || path.join(targetDir, 'arcvision_context', 'arcvision.context.json');
563
+ if (!fs.existsSync(contextFile)) {
564
+ console.error(chalk.red(`❌ Context file not found: ${contextFile}`));
565
+ console.error(chalk.yellow('Run `arcvision scan` first to generate context file'));
566
+ process.exit(1);
567
+ }
568
+
569
+ let context;
570
+ try {
571
+ const contextContent = fs.readFileSync(contextFile, 'utf8');
572
+ context = JSON.parse(contextContent);
573
+ } catch (parseError) {
574
+ console.error(chalk.red(`❌ Failed to parse context file: ${parseError.message}`));
575
+ process.exit(1);
576
+ }
577
+
578
+ console.log(chalk.green(`✅ Loaded context from: ${contextFile}`));
579
+
580
+ // Load invariants
581
+ const invariantsFile = options.invariantsFile || path.join(targetDir, '.arcvision', 'invariants.json');
582
+ let loadSuccess = false;
583
+
584
+ if (fs.existsSync(invariantsFile)) {
585
+ loadSuccess = invariantRegistry.loadFromFile(invariantsFile);
586
+ if (loadSuccess) {
587
+ console.log(chalk.green(`✅ Loaded invariants from: ${invariantsFile}`));
588
+ } else {
589
+ console.log(chalk.yellow(`⚠️ Failed to load invariants from: ${invariantsFile}`));
590
+ console.log(chalk.yellow('Using default invariants...'));
591
+ }
592
+ } else {
593
+ console.log(chalk.yellow(`⚠️ Invariants file not found: ${invariantsFile}`));
594
+ console.log(chalk.yellow('Using default invariants...'));
595
+ }
596
+
597
+ // If invariants couldn't be loaded or file doesn't exist, use defaults
598
+ if (!loadSuccess) {
599
+ const defaultInvariants = JSON.parse(process.env.DEFAULT_INVARIANTS_JSON || JSON.stringify([
600
+ {
601
+ id: 'generic-architecture-rules',
602
+ system: 'general',
603
+ description: 'Generic architectural rules - define your own invariants.json',
604
+ severity: 'risk',
605
+ scope: { files: ['**/*.js', '**/*.ts', '**/*.jsx', '**/*.tsx'] },
606
+ rule: { type: 'dependency', condition: {} },
607
+ createdAt: new Date().toISOString()
608
+ }
609
+ ]));
610
+ // Temporarily override the registry for this evaluation
611
+ const originalGetAll = invariantRegistry.getAll.bind(invariantRegistry);
612
+ invariantRegistry.getAll = () => defaultInvariants;
613
+ }
614
+
615
+ // Get list of changed files
616
+ let changedFiles = [];
617
+
618
+ if (options.simulateChanges) {
619
+ // Use simulated changes if provided
620
+ changedFiles = options.simulateChanges.split(',').map(f => f.trim()).filter(f => f);
621
+ console.log(chalk.blue(`Using simulated changes: ${changedFiles.join(', ')}`));
622
+ } else {
623
+ // Otherwise, use all files from the context as "changed" to demonstrate the evaluation
624
+ changedFiles = context.nodes ? context.nodes.map(node => node.path || node.id).filter(Boolean) : [];
625
+ }
626
+
627
+ if (changedFiles.length === 0) {
628
+ console.log(chalk.yellow('No files found in context to evaluate'));
629
+ process.exit(0);
630
+ }
631
+
632
+ console.log(chalk.blue(`Evaluating ${changedFiles.length} files against ${invariantRegistry.getAll().length} invariants...`));
633
+
634
+ // Evaluate changes
635
+ const evaluation = evaluateChange({
636
+ changedFiles,
637
+ dependencyGraph: context,
638
+ invariants: invariantRegistry.getAll(),
639
+ context: { blastRadiusAnalysis: analyzeBlastRadius(context) }
640
+ });
641
+
642
+ // Record BLOCKED decisions in authority ledger
643
+ let eventId = null;
644
+ if (evaluation.decision === 'BLOCKED') {
645
+ eventId = authorityLedger.recordBlocked(evaluation, {
646
+ commit: options.commit || 'HEAD',
647
+ branch: options.branch || 'unknown',
648
+ author: options.author || 'unknown'
649
+ });
650
+ }
651
+
652
+ // Display results
653
+ console.log('\n' + '='.repeat(60));
654
+ console.log(chalk.bold(`DECISION: ${evaluation.decision}`));
655
+ console.log('='.repeat(60));
656
+
657
+ if (evaluation.decision === 'BLOCKED') {
658
+ console.log(chalk.red('❌ CHANGE BLOCKED - Invariant violations detected'));
659
+ console.log(chalk.red(`❌ ${evaluation.violations.length} invariant(s) violated:`));
660
+ evaluation.violations.forEach((violation, index) => {
661
+ console.log(chalk.red(` ${index + 1}. ${violation.description}`));
662
+ console.log(chalk.red(` System: ${violation.system}, Severity: ${violation.severity}`));
663
+ });
664
+
665
+ if (options.verbose) {
666
+ console.log(chalk.yellow('\nDetailed Impact:'));
667
+ console.log(chalk.yellow(` Affected Nodes: ${evaluation.details.affectedNodes}`));
668
+ console.log(chalk.yellow(` Blast Radius Impact: ${evaluation.details.blastRadiusImpact}`));
669
+ console.log(chalk.yellow(` Authority Core Changes: ${evaluation.details.authorityCoreChanges ? 'YES' : 'NO'}`));
670
+ }
671
+
672
+ // Show override instructions if event was recorded
673
+ if (eventId) {
674
+ console.log(chalk.red('\n[!] AUTHORITATIVE GATE BLOCKED THIS CHANGE'));
675
+ console.log(chalk.yellow('To override with permanent record, run:'));
676
+ console.log(chalk.cyan(process.env.BLOCKED_OVERRIDE_COMMAND_FORMAT || ('arcvision override --event-id ' + eventId)));
677
+ console.log(chalk.cyan(process.env.BLOCKED_OVERRIDE_REASON_PLACEHOLDER || ' --reason "<detailed explanation>"'));
678
+ console.log(chalk.cyan(process.env.BLOCKED_OVERRIDE_OWNER_PLACEHOLDER || ' --owner "<your-identifier>"'));
679
+ console.log(chalk.red(process.env.BLOCKED_OVERRIDE_WARNING || '\n[WARNING] Override creates permanent architectural scar'));
680
+ }
681
+
682
+ console.log(chalk.red('\n🚨 ACTION REQUIRED: Fix violations before merging'));
683
+ const exitCode = options.failOnBlocked ? 1 : 0;
684
+ process.exit(exitCode);
685
+ } else if (evaluation.decision === 'RISKY') {
686
+ console.log(chalk.yellow('⚠️ CHANGE IS RISKY - Warnings detected'));
687
+ console.log(chalk.yellow(`⚠️ ${evaluation.violations.length} warning(s) issued:`));
688
+ evaluation.violations.forEach((violation, index) => {
689
+ console.log(chalk.yellow(` ${index + 1}. ${violation.description}`));
690
+ console.log(chalk.yellow(` System: ${violation.system}, Severity: ${violation.severity}`));
691
+ });
692
+
693
+ if (options.verbose) {
694
+ console.log(chalk.yellow('\nDetailed Impact:'));
695
+ console.log(chalk.yellow(` Affected Nodes: ${evaluation.details.affectedNodes}`));
696
+ console.log(chalk.yellow(` Blast Radius Impact: ${evaluation.details.blastRadiusImpact}`));
697
+ console.log(chalk.yellow(` Authority Core Changes: ${evaluation.details.authorityCoreChanges ? 'YES' : 'NO'}`));
698
+ }
699
+
700
+ console.log(chalk.yellow('\n⚠️ REVIEW RECOMMENDED: Check warnings before merging'));
701
+ process.exit(0); // Exit with success code but warn user
702
+ } else if (evaluation.decision === 'ERROR') {
703
+ console.log(chalk.red('💥 EVALUATION ERROR - Could not complete evaluation'));
704
+ console.log(chalk.red(`💥 ${evaluation.reasons.join('\n')}`));
705
+ process.exit(1);
706
+ } else {
707
+ console.log(chalk.green('✅ CHANGE ALLOWED - No critical violations detected'));
708
+ console.log(chalk.green(`✅ ${evaluation.reasons.join(', ')}`));
709
+
710
+ if (options.verbose) {
711
+ console.log(chalk.green('\nImpact Summary:'));
712
+ console.log(chalk.green(` Affected Nodes: ${evaluation.details.affectedNodes}`));
713
+ console.log(chalk.green(` Blast Radius Impact: ${evaluation.details.blastRadiusImpact}`));
714
+ console.log(chalk.green(` Authority Core Changes: ${evaluation.details.authorityCoreChanges ? 'YES' : 'NO'}`));
715
+ }
716
+
717
+ console.log(chalk.green('\n✅ Ready to merge'));
718
+ process.exit(0);
719
+ }
720
+
721
+ } catch (error) {
722
+ cliErrorHandler.handleFatalError(error, {
723
+ operation: 'evaluate',
724
+ directory: targetDir
725
+ });
726
+ }
727
+ });
728
+
729
+ // Add override command for Authoritative Gate
730
+ program
731
+ .command('override')
732
+ .description('Override a BLOCKED decision with permanent record (Authoritative Gate)')
733
+ .requiredOption('--event-id <id>', 'Event ID of the BLOCKED decision to override')
734
+ .requiredOption('--reason <reason>', 'Detailed reason for override (minimum 10 characters)')
735
+ .requiredOption('--owner <owner>', 'Person responsible for this override')
736
+ .option('--commit <hash>', 'Commit hash being overridden (optional)')
737
+ .option('--branch <name>', 'Branch name (optional)')
738
+ .action(async (options) => {
739
+ try {
740
+ console.log(chalk.blue('🔐 Processing Authoritative Gate Override'));
741
+ console.log(chalk.blue('Creating permanent architectural scar...\n'));
742
+
743
+ const result = await overrideHandler.processOverride({
744
+ eventId: options.eventId,
745
+ reason: options.reason,
746
+ owner: options.owner,
747
+ commit: options.commit || 'HEAD',
748
+ branch: options.branch || 'unknown'
749
+ });
750
+
751
+ if (result.success) {
752
+ console.log(chalk.green('\n✅ OVERRIDE SUCCESSFUL'));
753
+ console.log(chalk.green('='.repeat(40)));
754
+ console.log(chalk.green(`Override ID: ${result.overrideEventId}`));
755
+ console.log(chalk.green(`Original Event: ${result.blockedEventId}`));
756
+ console.log(chalk.green(`Reason: ${result.reason}`));
757
+ console.log(chalk.green(`Owner: ${result.owner}`));
758
+ console.log(chalk.red(process.env.OVERRIDE_PERMANENT_RECORD_MESSAGE || '\n[PERMANENT RECORD] This override is now in the architectural ledger'));
759
+ console.log(chalk.yellow(process.env.OVERRIDE_FUTURE_SCRUTINY_MESSAGE || 'Future violations will face increased scrutiny'));
760
+ process.exit(0);
761
+ }
762
+ } catch (error) {
763
+ console.error(chalk.red(process.env.OVERRIDE_FAILURE_HEADER || '\n❌ OVERRIDE FAILED'));
764
+ console.error(chalk.red(`${process.env.OVERRIDE_FAILURE_ERROR_PREFIX || 'Error:'} ${error.message}`));
765
+ console.error(chalk.yellow(process.env.OVERRIDE_FAILURE_REQUIREMENTS_HEADER || '\nRequirements:'));
766
+ console.error(chalk.yellow(process.env.OVERRIDE_FAILURE_BLOCKED_REQUIREMENT || '• Event must be BLOCKED'));
767
+ console.error(chalk.yellow(process.env.OVERRIDE_FAILURE_REASON_REQUIREMENT || '• Reason must be detailed (>10 chars)'));
768
+ console.error(chalk.yellow(process.env.OVERRIDE_FAILURE_OWNER_REQUIREMENT || '• Owner must be specified'));
769
+ console.error(chalk.yellow(process.env.OVERRIDE_FAILURE_DUPLICATE_REQUIREMENT || '• Event must not already be overridden'));
770
+ process.exit(1);
771
+ }
772
+ });
773
+
774
+ // Add ledger inspection command
775
+ program
776
+ .command('ledger')
777
+ .description('Inspect authority ledger records')
778
+ .option('--stats', 'Show ledger statistics')
779
+ .option('--recent-blocks', 'Show recent unresolved BLOCKED events')
780
+ .option('--export <format>', 'Export ledger (json|csv)')
781
+ .action(async (options) => {
782
+ try {
783
+ if (options.stats) {
784
+ const stats = authorityLedger.getStats();
785
+ console.log(chalk.blue('📊 Authority Ledger Statistics'));
786
+ console.log(chalk.blue('=============================='));
787
+ console.log(chalk.green(`Total Events: ${stats.total_events}`));
788
+ console.log(chalk.red(`Blocked Decisions: ${stats.blocked_decisions}`));
789
+ console.log(chalk.yellow(`Overridden Decisions: ${stats.overridden_decisions}`));
790
+ console.log(chalk.gray(`Last Event: ${stats.last_event || 'None'}`));
791
+ process.exit(0);
792
+ }
793
+
794
+ if (options.recentBlocks) {
795
+ const blocks = authorityLedger.getRecentUnresolvedBlocks(10);
796
+ console.log(chalk.blue('🚨 Recent Unresolved BLOCKED Events'));
797
+ console.log(chalk.blue('====================================='));
798
+
799
+ if (blocks.length === 0) {
800
+ console.log(chalk.green('No unresolved blocked events found'));
801
+ } else {
802
+ blocks.forEach((block, index) => {
803
+ console.log(chalk.red(`${index + 1}. Event: ${block.event_id}`));
804
+ console.log(chalk.red(` Commit: ${block.commit}`));
805
+ console.log(chalk.red(` Timestamp: ${block.timestamp}`));
806
+ console.log(chalk.red(` Violations: ${block.violations?.length || 0}`));
807
+ console.log('');
808
+ });
809
+ }
810
+ process.exit(0);
811
+ }
812
+
813
+ if (options.export) {
814
+ const exported = authorityLedger.exportLedger(options.export);
815
+ console.log(exported);
816
+ process.exit(0);
817
+ }
818
+
819
+ // Default: show ledger info
820
+ console.log(chalk.blue('Authority Ledger Location: ./architecture.authority.ledger.json'));
821
+ console.log(chalk.yellow('Use --stats, --recent-blocks, or --export for detailed information'));
822
+ process.exit(0);
823
+
824
+ } catch (error) {
825
+ console.error(chalk.red('❌ Ledger command failed:'), error.message);
826
+ process.exit(1);
827
+ }
828
+ });
829
+
830
+ program.parse();