arcvision 0.2.21 → 0.2.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/bin/arcvision.js +1 -1
  2. package/package.json +10 -3
  3. package/src/core/change-evaluator.js +38 -9
  4. package/src/index.js +208 -97
  5. package/.arcvision/logs/errors.log +0 -7
  6. package/arcvision_context/architecture.authority.ledger.json +0 -26
  7. package/dist/index.js +0 -68468
  8. package/schema/arcvision_context_schema_v1.json +0 -356
  9. package/test-block-functionality.js +0 -40
  10. package/test-dev-project/.arcvision/invariants.json +0 -19
  11. package/test-dev-project/arcvision_context/README.md +0 -93
  12. package/test-dev-project/arcvision_context/architecture.authority.ledger.json +0 -45
  13. package/test-dev-project/arcvision_context/arcvision.context.json +0 -1023
  14. package/test-dev-project/src/core/data-service.js +0 -0
  15. package/test-dev-project/src/ui/user-profile.js +0 -0
  16. package/test-dev-project/src/utils/helpers.js +0 -0
  17. package/test_repos/allowed-clean-architecture/.arcvision/invariants.json +0 -57
  18. package/test_repos/allowed-clean-architecture/adapters/controllers/UserController.js +0 -95
  19. package/test_repos/allowed-clean-architecture/adapters/http/HttpServer.js +0 -78
  20. package/test_repos/allowed-clean-architecture/application/dtos/CreateUserRequest.js +0 -37
  21. package/test_repos/allowed-clean-architecture/application/services/UserService.js +0 -61
  22. package/test_repos/allowed-clean-architecture/arcvision_context/README.md +0 -93
  23. package/test_repos/allowed-clean-architecture/arcvision_context/arcvision.context.json +0 -2796
  24. package/test_repos/allowed-clean-architecture/domain/interfaces/UserRepository.js +0 -25
  25. package/test_repos/allowed-clean-architecture/domain/models/User.js +0 -39
  26. package/test_repos/allowed-clean-architecture/index.js +0 -45
  27. package/test_repos/allowed-clean-architecture/infrastructure/database/DatabaseConnection.js +0 -56
  28. package/test_repos/allowed-clean-architecture/infrastructure/repositories/InMemoryUserRepository.js +0 -61
  29. package/test_repos/allowed-clean-architecture/package.json +0 -15
  30. package/test_repos/blocked-legacy-monolith/.arcvision/invariants.json +0 -78
  31. package/test_repos/blocked-legacy-monolith/arcvision_context/README.md +0 -93
  32. package/test_repos/blocked-legacy-monolith/arcvision_context/arcvision.context.json +0 -2882
  33. package/test_repos/blocked-legacy-monolith/database/dbConnection.js +0 -35
  34. package/test_repos/blocked-legacy-monolith/index.js +0 -38
  35. package/test_repos/blocked-legacy-monolith/modules/emailService.js +0 -31
  36. package/test_repos/blocked-legacy-monolith/modules/paymentProcessor.js +0 -37
  37. package/test_repos/blocked-legacy-monolith/package.json +0 -15
  38. package/test_repos/blocked-legacy-monolith/shared/utils.js +0 -19
  39. package/test_repos/blocked-legacy-monolith/utils/helpers.js +0 -23
  40. package/test_repos/risky-microservices-concerns/.arcvision/invariants.json +0 -69
  41. package/test_repos/risky-microservices-concerns/arcvision_context/README.md +0 -93
  42. package/test_repos/risky-microservices-concerns/arcvision_context/arcvision.context.json +0 -3070
  43. package/test_repos/risky-microservices-concerns/common/utils.js +0 -77
  44. package/test_repos/risky-microservices-concerns/gateways/apiGateway.js +0 -84
  45. package/test_repos/risky-microservices-concerns/index.js +0 -20
  46. package/test_repos/risky-microservices-concerns/libs/deprecatedHelper.js +0 -36
  47. package/test_repos/risky-microservices-concerns/package.json +0 -15
  48. package/test_repos/risky-microservices-concerns/services/orderService.js +0 -42
  49. package/test_repos/risky-microservices-concerns/services/userService.js +0 -48
package/src/index.js CHANGED
@@ -99,7 +99,7 @@ function saveToken(token) {
99
99
  if (!token || typeof token !== 'string' || token.length < 10) {
100
100
  throw new Error('Invalid token format: Token must be a string with at least 10 characters');
101
101
  }
102
-
102
+
103
103
  fs.writeFileSync(CONFIG_FILE, JSON.stringify({ token }));
104
104
  console.log(chalk.green('✅ Token saved successfully!'));
105
105
  } catch (error) {
@@ -125,7 +125,7 @@ function getToken() {
125
125
  }
126
126
 
127
127
  // Function to upload JSON to database via Token
128
- async function uploadToDatabase(jsonData) {
128
+ async function uploadToDatabase(jsonData, contextDir) {
129
129
  const token = getToken();
130
130
  if (!token) {
131
131
  console.log(chalk.red('❌ No upload token found.'));
@@ -141,13 +141,45 @@ async function uploadToDatabase(jsonData) {
141
141
  });
142
142
 
143
143
  console.log(chalk.blue(`Starting upload to ${API_URL}/api/upload...`));
144
-
144
+
145
+ // Collect all JSON files from arcvision_context directory
146
+ console.log(chalk.blue('Collecting all files from arcvision_context directory...'));
147
+ const contextFiles = {};
148
+
149
+ if (contextDir && fs.existsSync(contextDir)) {
150
+ const files = fs.readdirSync(contextDir)
151
+ .filter(file => file.endsWith('.json'));
152
+
153
+ files.forEach(file => {
154
+ try {
155
+ const filePath = path.join(contextDir, file);
156
+ const fileContent = fs.readFileSync(filePath, 'utf8');
157
+ contextFiles[file] = JSON.parse(fileContent);
158
+ console.log(chalk.green(`✅ Collected: ${file}`));
159
+ } catch (fileError) {
160
+ console.warn(chalk.yellow(`⚠️ Skipping invalid JSON file: ${file}`));
161
+ }
162
+ });
163
+
164
+ console.log(chalk.blue(`Collected ${Object.keys(contextFiles).length} context files`));
165
+ } else {
166
+ console.log(chalk.yellow('No arcvision_context directory found, uploading main context only'));
167
+ }
168
+
169
+ // Package all data together
170
+ const uploadPayload = {
171
+ graph: jsonData,
172
+ contextFiles: contextFiles,
173
+ timestamp: new Date().toISOString(),
174
+ uploadType: 'full_context_package'
175
+ };
176
+
145
177
  // Check the size of the data and decide on strategy
146
- const originalSize = Buffer.byteLength(JSON.stringify(jsonData));
178
+ const originalSize = Buffer.byteLength(JSON.stringify(uploadPayload));
147
179
  const sizeInMB = Math.round(originalSize / (1024 * 1024));
148
-
149
- console.log(chalk.yellow(`Data size: ~${sizeInMB} MB`));
150
-
180
+
181
+ console.log(chalk.yellow(`Total package size: ~${sizeInMB} MB`));
182
+
151
183
  // Determine if we need to use compression or chunked upload
152
184
  const needsChunkedUpload = sizeInMB > 3; // Use chunked upload for data > 3MB
153
185
  const needsCompression = CompressionUtil.needsCompression(jsonData, 1024 * 1024 * 2); // Compress if > 2MB
@@ -156,31 +188,31 @@ async function uploadToDatabase(jsonData) {
156
188
  console.log(chalk.yellow('Compressing data to reduce payload size...'));
157
189
  const compressionRatio = await CompressionUtil.getCompressionRatio(jsonData);
158
190
  console.log(chalk.yellow(`Compression ratio: ${compressionRatio.ratio.toFixed(2)}x (${compressionRatio.percentage}% reduction)`));
159
-
191
+
160
192
  const compressedData = await CompressionUtil.compress(jsonData);
161
-
193
+
162
194
  // Check if compressed data still needs chunked upload
163
195
  const compressedSize = Buffer.byteLength(compressedData);
164
196
  const compressedSizeInMB = Math.round(compressedSize / (1024 * 1024));
165
-
197
+
166
198
  console.log(chalk.yellow(`Compressed size: ~${compressedSizeInMB} MB`));
167
-
199
+
168
200
  if (compressedSizeInMB > 3 || needsChunkedUpload) {
169
201
  // Use chunked upload for compressed data
170
202
  console.log(chalk.blue('Using chunked upload for large compressed data...'));
171
-
203
+
172
204
  const chunkedUploader = new ChunkedUploader();
173
205
  const retryHandler = new RetryHandler(2, 2000, 2); // 2 retries, 2s base delay, 2x multiplier
174
-
206
+
175
207
  // Execute with retry mechanism
176
208
  const result = await retryHandler.executeWithRetry(async () => {
177
209
  return await chunkedUploader.uploadInChunks(
178
- { graph: compressedData, compressed: true },
210
+ { graph: compressedData, contextFiles: contextFiles, compressed: true, timestamp: new Date().toISOString() },
179
211
  token,
180
212
  API_URL
181
213
  );
182
214
  }, 'chunked upload');
183
-
215
+
184
216
  if (result.success) {
185
217
  console.log(chalk.green('✅ Graph uploaded successfully using chunked upload!'));
186
218
  } else {
@@ -192,27 +224,27 @@ async function uploadToDatabase(jsonData) {
192
224
  } else {
193
225
  // Send compressed data in single request
194
226
  console.log(chalk.blue('Sending compressed data in single request...'));
195
-
227
+
196
228
  const retryHandler = new RetryHandler(3, 1000, 2); // 3 retries, 1s base delay, 2x multiplier
197
-
229
+
198
230
  const result = await retryHandler.executeWithRetry(async () => {
199
231
  const controller = new AbortController();
200
-
232
+
201
233
  // Set up progress indicators
202
234
  const progress30s = setTimeout(() => {
203
235
  console.log(chalk.yellow('Upload taking longer than expected, please wait while the process continues...'));
204
236
  }, 30000); // 30 seconds
205
-
237
+
206
238
  const progress60s = setTimeout(() => {
207
239
  console.log(chalk.yellow('File size is large and may take additional time, process still working...'));
208
240
  }, 60000); // 60 seconds
209
-
241
+
210
242
  const timeoutId = setTimeout(() => {
211
243
  controller.abort();
212
244
  clearTimeout(progress30s);
213
245
  clearTimeout(progress60s);
214
246
  }, 180000); // 180 second timeout for larger files
215
-
247
+
216
248
  const response = await fetch(`${API_URL}/api/upload`, {
217
249
  method: 'POST',
218
250
  headers: {
@@ -221,36 +253,65 @@ async function uploadToDatabase(jsonData) {
221
253
  },
222
254
  body: JSON.stringify({
223
255
  graph: compressedData,
224
- compressed: true
256
+ contextFiles: contextFiles,
257
+ compressed: true,
258
+ timestamp: new Date().toISOString()
225
259
  }),
226
260
  signal: controller.signal
227
261
  });
228
-
262
+
229
263
  // Clear all timeouts
230
264
  clearTimeout(timeoutId);
231
265
  clearTimeout(progress30s);
232
266
  clearTimeout(progress60s);
233
-
267
+
234
268
  if (response.status === 401) {
235
269
  throw new Error('Invalid or revoked token');
236
270
  }
237
-
271
+
238
272
  if (response.status === 404) {
239
273
  throw new Error('Project not found');
240
274
  }
241
-
275
+
242
276
  if (response.status === 429) {
243
277
  throw new Error('Rate limit exceeded');
244
278
  }
245
-
279
+
246
280
  if (!response.ok) {
247
281
  const text = await response.text().catch(() => 'Unknown error');
282
+
283
+ // Handle database schema error specifically
284
+ if (response.status === 500 && text.includes('context_files') && text.includes('column')) {
285
+ console.log(chalk.yellow('Database schema error detected, attempting fallback upload without context files...'));
286
+
287
+ // Retry with minimal payload
288
+ const fallbackResponse = await fetch(`${API_URL}/api/upload`, {
289
+ method: 'POST',
290
+ headers: {
291
+ 'Content-Type': 'application/json',
292
+ 'Authorization': `Bearer ${token}`
293
+ },
294
+ body: JSON.stringify({
295
+ graph: jsonData,
296
+ timestamp: new Date().toISOString()
297
+ })
298
+ });
299
+
300
+ if (fallbackResponse.ok) {
301
+ console.log(chalk.green('✅ Fallback upload successful! (context files not stored due to database schema)'));
302
+ return await fallbackResponse.json();
303
+ } else {
304
+ const fallbackText = await fallbackResponse.text().catch(() => 'Unknown error');
305
+ throw new Error(`${fallbackResponse.status} ${fallbackResponse.statusText}: ${fallbackText}`);
306
+ }
307
+ }
308
+
248
309
  throw new Error(`${response.status} ${response.statusText}: ${text}`);
249
310
  }
250
-
311
+
251
312
  return await response.json();
252
313
  }, 'compressed upload');
253
-
314
+
254
315
  if (result.success && result.result.success) {
255
316
  console.log(chalk.green('✅ Compressed graph uploaded successfully!'));
256
317
  } else {
@@ -264,19 +325,19 @@ async function uploadToDatabase(jsonData) {
264
325
  // Original upload method for smaller data
265
326
  if (needsChunkedUpload) {
266
327
  console.log(chalk.blue('Using chunked upload for large data...'));
267
-
328
+
268
329
  const chunkedUploader = new ChunkedUploader();
269
330
  const retryHandler = new RetryHandler(2, 2000, 2); // 2 retries, 2s base delay, 2x multiplier
270
-
331
+
271
332
  // Execute with retry mechanism
272
333
  const result = await retryHandler.executeWithRetry(async () => {
273
334
  return await chunkedUploader.uploadInChunks(
274
- { graph: jsonData },
335
+ { graph: jsonData, contextFiles: contextFiles, timestamp: new Date().toISOString() },
275
336
  token,
276
337
  API_URL
277
338
  );
278
339
  }, 'chunked upload');
279
-
340
+
280
341
  if (result.success) {
281
342
  console.log(chalk.green('✅ Graph uploaded successfully using chunked upload!'));
282
343
  } else {
@@ -288,30 +349,30 @@ async function uploadToDatabase(jsonData) {
288
349
  } else {
289
350
  // Standard upload for small data
290
351
  console.log(chalk.blue('Sending data in single request...'));
291
-
352
+
292
353
  const retryHandler = new RetryHandler(3, 1000, 2); // 3 retries, 1s base delay, 2x multiplier
293
-
354
+
294
355
  const result = await retryHandler.executeWithRetry(async () => {
295
356
  const controller = new AbortController();
296
-
357
+
297
358
  // Set up progress indicators
298
359
  const progress30s = setTimeout(() => {
299
360
  console.log(chalk.yellow('Upload taking longer than expected, please wait while the process continues...'));
300
361
  }, 30000); // 30 seconds
301
-
362
+
302
363
  const progress60s = setTimeout(() => {
303
364
  console.log(chalk.yellow('File size is large and may take additional time, process still working...'));
304
365
  }, 60000); // 60 seconds
305
-
366
+
306
367
  const timeoutId = setTimeout(() => {
307
368
  controller.abort();
308
369
  clearTimeout(progress30s);
309
370
  clearTimeout(progress60s);
310
- console.error(chalk.red('\n❌ Upload timeout after 120 seconds'));
371
+ console.error(chalk.red('\n❌ Upload timeout after 120 seconds'));
311
372
  console.error(chalk.red('This may indicate network issues or server problems'));
312
373
  console.error(chalk.yellow('Try again later or check your internet connection'));
313
374
  }, 120000); // 120 second timeout
314
-
375
+
315
376
  const response = await fetch(`${API_URL}/api/upload`, {
316
377
  method: 'POST',
317
378
  headers: {
@@ -319,7 +380,9 @@ async function uploadToDatabase(jsonData) {
319
380
  'Authorization': `Bearer ${token}`
320
381
  },
321
382
  body: JSON.stringify({
322
- graph: jsonData
383
+ graph: jsonData,
384
+ contextFiles: contextFiles,
385
+ timestamp: new Date().toISOString()
323
386
  }),
324
387
  signal: controller.signal
325
388
  }).catch(error => {
@@ -331,32 +394,59 @@ async function uploadToDatabase(jsonData) {
331
394
  }
332
395
  throw error;
333
396
  });
334
-
397
+
335
398
  // Clear all timeouts
336
399
  clearTimeout(timeoutId);
337
400
  clearTimeout(progress30s);
338
401
  clearTimeout(progress60s);
339
-
402
+
340
403
  if (response.status === 401) {
341
404
  throw new Error('Invalid or revoked token');
342
405
  }
343
-
406
+
344
407
  if (response.status === 404) {
345
408
  throw new Error('Project not found');
346
409
  }
347
-
410
+
348
411
  if (response.status === 429) {
349
412
  throw new Error('Rate limit exceeded');
350
413
  }
351
-
414
+
352
415
  if (!response.ok) {
353
416
  const text = await response.text().catch(() => 'Unknown error');
417
+
418
+ // Handle database schema error specifically
419
+ if (response.status === 500 && text.includes('context_files') && text.includes('column')) {
420
+ console.log(chalk.yellow('Database schema error detected, attempting fallback upload without context files...'));
421
+
422
+ // Retry with minimal payload
423
+ const fallbackResponse = await fetch(`${API_URL}/api/upload`, {
424
+ method: 'POST',
425
+ headers: {
426
+ 'Content-Type': 'application/json',
427
+ 'Authorization': `Bearer ${token}`
428
+ },
429
+ body: JSON.stringify({
430
+ graph: jsonData,
431
+ timestamp: new Date().toISOString()
432
+ })
433
+ });
434
+
435
+ if (fallbackResponse.ok) {
436
+ console.log(chalk.green('✅ Fallback upload successful! (context files not stored due to database schema)'));
437
+ return await fallbackResponse.json();
438
+ } else {
439
+ const fallbackText = await fallbackResponse.text().catch(() => 'Unknown error');
440
+ throw new Error(`${fallbackResponse.status} ${fallbackResponse.statusText}: ${fallbackText}`);
441
+ }
442
+ }
443
+
354
444
  throw new Error(`${response.status} ${response.statusText}: ${text}`);
355
445
  }
356
-
446
+
357
447
  return await response.json();
358
448
  }, 'standard upload');
359
-
449
+
360
450
  if (result.success && result.result.success) {
361
451
  console.log(chalk.green('✅ Graph uploaded successfully!'));
362
452
  } else {
@@ -409,7 +499,7 @@ program
409
499
  .option('-u, --upload', 'Upload to database')
410
500
  .action(async (directory, options) => {
411
501
  const targetDir = path.resolve(directory);
412
-
502
+
413
503
  try {
414
504
  // Pre-flight validation
415
505
  await cliValidator.preFlightValidation('scan', {
@@ -419,10 +509,10 @@ program
419
509
  // Initialize Artifact Manager
420
510
  const { ArtifactManager } = require('./core/artifact-manager');
421
511
  const artifactManager = new ArtifactManager(targetDir);
422
-
512
+
423
513
  // Ensure all required artifacts exist
424
514
  artifactManager.ensureArtifacts();
425
-
515
+
426
516
  console.log(chalk.blue(`Scanning directory: ${targetDir}`));
427
517
 
428
518
  const map = await scanner.scan(targetDir);
@@ -467,11 +557,11 @@ program
467
557
  fs.mkdirSync(arcvisionDir, { recursive: true });
468
558
  console.log(chalk.green(`📁 Created arcvision_context directory: ${arcvisionDir}`));
469
559
  }
470
-
560
+
471
561
  // Upload to database if requested
472
562
  if (options.upload) {
473
- await uploadToDatabase(map);
474
-
563
+ await uploadToDatabase(map, arcvisionDir);
564
+
475
565
  // Generate README for system context when uploading
476
566
  const { generateReadme } = require('./core/readme-generator');
477
567
  generateReadme(arcvisionDir, version, blastRadiusAnalysis);
@@ -481,11 +571,11 @@ program
481
571
  const outputFileName = path.join(arcvisionDir, 'arcvision.context.json');
482
572
  fs.writeFileSync(outputFileName, JSON.stringify(map, null, 2));
483
573
  console.log(chalk.green(`✅ Structural context saved to ${outputFileName}`));
484
-
574
+
485
575
  // Generate README for system context in dedicated directory
486
576
  const { generateReadme } = require('./core/readme-generator');
487
577
  generateReadme(arcvisionDir, version, blastRadiusAnalysis);
488
-
578
+
489
579
  console.log(chalk.dim('\nUse --upload to send to dashboard.'));
490
580
  }
491
581
  } catch (error) {
@@ -513,17 +603,17 @@ program
513
603
  console.log(chalk.blue(`Comparing context artifacts:`));
514
604
  console.log(chalk.blue(` Old: ${oldFile}`));
515
605
  console.log(chalk.blue(` New: ${newFile}`));
516
-
606
+
517
607
  // Read both files
518
608
  const oldContext = JSON.parse(fs.readFileSync(oldFile, 'utf8'));
519
609
  const newContext = JSON.parse(fs.readFileSync(newFile, 'utf8'));
520
-
610
+
521
611
  // Generate diff summary
522
612
  const diffResult = generateDiffSummary(oldContext, newContext);
523
-
613
+
524
614
  // Add diff summary to the new context
525
615
  newContext.diff_summary = diffResult.diff_summary;
526
-
616
+
527
617
  // Save updated context with diff summary in arcvision_context directory
528
618
  const arcvisionDir = path.join(path.dirname(newFile), 'arcvision_context');
529
619
  if (!fs.existsSync(arcvisionDir)) {
@@ -531,7 +621,7 @@ program
531
621
  }
532
622
  const outputFileName = path.join(arcvisionDir, 'arcvision.context.diff.json');
533
623
  fs.writeFileSync(outputFileName, JSON.stringify(newContext, null, 2));
534
-
624
+
535
625
  console.log(chalk.green('✅ Structural diff completed!'));
536
626
  console.log(chalk.green(`✅ Diff summary saved to ${outputFileName}`));
537
627
  console.log(chalk.yellow('\nDiff Summary:'));
@@ -541,7 +631,7 @@ program
541
631
  console.log(chalk.yellow(` Edges Removed: ${diffResult.diff_summary.edges_removed}`));
542
632
  console.log(chalk.yellow(` Roles Changed: ${diffResult.diff_summary.roles_changed}`));
543
633
  console.log(chalk.yellow(` Blast Radius Changes: ${diffResult.diff_summary.blast_radius_changes}`));
544
-
634
+
545
635
  } catch (error) {
546
636
  cliErrorHandler.handleFatalError(error, {
547
637
  operation: 'diff',
@@ -564,7 +654,7 @@ program
564
654
  .option('--fail-on-blocked', 'Exit with error code when decision is BLOCKED (default: false)')
565
655
  .action(async (directory, options) => {
566
656
  const targetDir = path.resolve(directory);
567
-
657
+
568
658
  try {
569
659
  // Pre-flight validation
570
660
  await cliValidator.preFlightValidation('evaluate', {
@@ -572,20 +662,20 @@ program
572
662
  contextFile: options.contextFile,
573
663
  invariantsFile: options.invariantsFile
574
664
  });
575
-
665
+
576
666
  // Initialize Artifact Manager
577
667
  const { ArtifactManager } = require('./core/artifact-manager');
578
668
  const artifactManager = new ArtifactManager(targetDir);
579
-
669
+
580
670
  // Ensure all required artifacts exist
581
671
  artifactManager.ensureArtifacts();
582
-
672
+
583
673
  // Initialize Authority Ledger with target directory
584
674
  const authorityLedger = new AuthorityLedger(path.join(targetDir, 'arcvision_context', 'architecture.authority.ledger.json'));
585
-
675
+
586
676
  console.log(chalk.blue('🔒 Authoritative Change Impact Gate (ACIG)'));
587
677
  console.log(chalk.blue('Evaluating changes against canonical system invariants...\n'));
588
-
678
+
589
679
  // Load current context from arcvision_context directory
590
680
  const contextFile = options.contextFile || path.join(targetDir, 'arcvision_context', 'arcvision.context.json');
591
681
  if (!fs.existsSync(contextFile)) {
@@ -593,7 +683,7 @@ program
593
683
  console.error(chalk.yellow('Run \`arcvision scan\` first to generate context file'));
594
684
  process.exit(1);
595
685
  }
596
-
686
+
597
687
  let context;
598
688
  try {
599
689
  const contextContent = fs.readFileSync(contextFile, 'utf8');
@@ -602,31 +692,34 @@ program
602
692
  console.error(chalk.red(`❌ Failed to parse context file: ${parseError.message}`));
603
693
  process.exit(1);
604
694
  }
605
-
695
+
606
696
  console.log(chalk.green(`✅ Loaded context from: ${contextFile}`));
607
-
697
+
608
698
  // Load invariants from both sources for backward compatibility
609
699
  const invariantsFile = options.invariantsFile || path.join(targetDir, '.arcvision', 'invariants.json');
610
700
  let loadSuccess = false;
611
-
701
+ let loadedInvariants = [];
702
+
612
703
  // First, try to load auto-detected invariants from the main context file
613
704
  const contextFilePath = options.contextFile || path.join(targetDir, 'arcvision_context', 'arcvision.context.json');
614
705
  if (fs.existsSync(contextFilePath)) {
615
706
  try {
616
707
  const contextContent = fs.readFileSync(contextFilePath, 'utf8');
617
708
  const contextData = JSON.parse(contextContent);
618
-
709
+
619
710
  // Load auto-detected invariants from the context file if they exist
620
711
  if (contextData.invariants && Array.isArray(contextData.invariants) && contextData.invariants.length > 0) {
621
712
  console.log(chalk.blue(`Loading ${contextData.invariants.length} auto-detected invariants from context file...`));
622
-
713
+
623
714
  // Load each invariant individually to the registry
624
715
  for (const invariant of contextData.invariants) {
625
- invariantRegistry.register(invariant);
716
+ if (invariantRegistry.register(invariant)) {
717
+ loadedInvariants.push(invariant);
718
+ }
626
719
  }
627
-
628
- console.log(chalk.green(`✅ Loaded ${contextData.invariants.length} auto-detected invariants from context file`));
629
- loadSuccess = true; // Mark as successful if we loaded from context
720
+
721
+ console.log(chalk.green(`✅ Loaded ${loadedInvariants.length} auto-detected invariants from context file`));
722
+ loadSuccess = loadedInvariants.length > 0;
630
723
  } else {
631
724
  console.log(chalk.yellow('No auto-detected invariants found in context file'));
632
725
  }
@@ -634,13 +727,18 @@ program
634
727
  console.log(chalk.yellow(`⚠️ Could not load invariants from context file: ${contextError.message}`));
635
728
  }
636
729
  }
637
-
730
+
638
731
  // Then, try to load project-specific invariants from the separate file (for backward compatibility)
639
732
  if (fs.existsSync(invariantsFile)) {
733
+ console.log(chalk.blue(`Attempting to load project-specific invariants from: ${invariantsFile}`));
640
734
  // We'll load the separate file to supplement the context file invariants
641
735
  const fileLoadSuccess = invariantRegistry.loadFromFile(invariantsFile);
642
736
  if (fileLoadSuccess) {
643
- console.log(chalk.green(`✅ Loaded project-specific invariants from: ${invariantsFile}`));
737
+ const fileInvariants = invariantRegistry.getAll().filter(inv =>
738
+ !loadedInvariants.some(loaded => loaded.id === inv.id)
739
+ );
740
+ console.log(chalk.green(`✅ Loaded ${fileInvariants.length} project-specific invariants from: ${invariantsFile}`));
741
+ loadedInvariants = [...loadedInvariants, ...fileInvariants];
644
742
  loadSuccess = true;
645
743
  } else {
646
744
  console.log(chalk.yellow(`⚠️ Failed to load project-specific invariants from: ${invariantsFile}`));
@@ -649,8 +747,18 @@ program
649
747
  console.log(chalk.yellow(`⚠️ No project-specific invariants file found: ${invariantsFile}`));
650
748
  }
651
749
 
750
+ // Debug output to show what invariants were loaded
751
+ console.log(chalk.blue(`📊 Total invariants loaded: ${loadedInvariants.length}`));
752
+ if (loadedInvariants.length > 0) {
753
+ console.log(chalk.blue('📋 Loaded invariants:'));
754
+ loadedInvariants.forEach((inv, index) => {
755
+ console.log(chalk.blue(` ${index + 1}. ${inv.id} (${inv.severity}) - ${inv.description}`));
756
+ });
757
+ }
758
+
652
759
  // If invariants couldn't be loaded or file doesn't exist, use defaults
653
- if (!loadSuccess) {
760
+ if (!loadSuccess || loadedInvariants.length === 0) {
761
+ console.log(chalk.yellow('⚠️ No invariants loaded, using default invariants...'));
654
762
  const defaultInvariants = JSON.parse(process.env.DEFAULT_INVARIANTS_JSON || JSON.stringify([
655
763
  {
656
764
  id: 'generic-architecture-rules',
@@ -662,14 +770,17 @@ program
662
770
  createdAt: new Date().toISOString()
663
771
  }
664
772
  ]));
665
- // Temporarily override the registry for this evaluation
666
- const originalGetAll = invariantRegistry.getAll.bind(invariantRegistry);
667
- invariantRegistry.getAll = () => defaultInvariants;
773
+
774
+ // Clear registry and load defaults
775
+ invariantRegistry.clear();
776
+ defaultInvariants.forEach(inv => invariantRegistry.register(inv));
777
+ loadedInvariants = defaultInvariants;
778
+ console.log(chalk.yellow(`✅ Loaded ${defaultInvariants.length} default invariants`));
668
779
  }
669
780
 
670
781
  // Get list of changed files
671
782
  let changedFiles = [];
672
-
783
+
673
784
  if (options.simulateChanges) {
674
785
  // Use simulated changes if provided
675
786
  changedFiles = options.simulateChanges.split(',').map(f => f.trim()).filter(f => f);
@@ -678,19 +789,19 @@ program
678
789
  // Otherwise, use all files from the context as "changed" to demonstrate the evaluation
679
790
  changedFiles = context.nodes ? context.nodes.map(node => node.path || node.id).filter(Boolean) : [];
680
791
  }
681
-
792
+
682
793
  if (changedFiles.length === 0) {
683
794
  console.log(chalk.yellow('No files found in context to evaluate'));
684
795
  process.exit(0);
685
796
  }
686
797
 
687
- console.log(chalk.blue(`Evaluating ${changedFiles.length} files against ${invariantRegistry.getAll().length} invariants...`));
798
+ console.log(chalk.blue(`Evaluating ${changedFiles.length} files against ${loadedInvariants.length} invariants...`));
688
799
 
689
800
  // Evaluate changes
690
801
  const evaluation = evaluateChange({
691
802
  changedFiles,
692
803
  dependencyGraph: context,
693
- invariants: invariantRegistry.getAll(),
804
+ invariants: loadedInvariants,
694
805
  context: { blastRadiusAnalysis: analyzeBlastRadius(context) }
695
806
  });
696
807
 
@@ -716,14 +827,14 @@ program
716
827
  console.log(chalk.red(` ${index + 1}. ${violation.description}`));
717
828
  console.log(chalk.red(` System: ${violation.system}, Severity: ${violation.severity}`));
718
829
  });
719
-
830
+
720
831
  if (options.verbose) {
721
832
  console.log(chalk.yellow('\nDetailed Impact:'));
722
833
  console.log(chalk.yellow(` Affected Nodes: ${evaluation.details.affectedNodes}`));
723
834
  console.log(chalk.yellow(` Blast Radius Impact: ${evaluation.details.blastRadiusImpact}`));
724
835
  console.log(chalk.yellow(` Authority Core Changes: ${evaluation.details.authorityCoreChanges ? 'YES' : 'NO'}`));
725
836
  }
726
-
837
+
727
838
  // Show override instructions if event was recorded
728
839
  if (eventId) {
729
840
  console.log(chalk.red('\n[!] AUTHORITATIVE GATE BLOCKED THIS CHANGE'));
@@ -733,7 +844,7 @@ program
733
844
  console.log(chalk.cyan(process.env.BLOCKED_OVERRIDE_OWNER_PLACEHOLDER || ' --owner "<your-identifier>"'));
734
845
  console.log(chalk.red(process.env.BLOCKED_OVERRIDE_WARNING || '\n[WARNING] Override creates permanent architectural scar'));
735
846
  }
736
-
847
+
737
848
  console.log(chalk.red('\n🚨 ACTION REQUIRED: Fix violations before merging'));
738
849
  const exitCode = options.failOnBlocked ? 1 : 0;
739
850
  process.exit(exitCode);
@@ -744,14 +855,14 @@ program
744
855
  console.log(chalk.yellow(` ${index + 1}. ${violation.description}`));
745
856
  console.log(chalk.yellow(` System: ${violation.system}, Severity: ${violation.severity}`));
746
857
  });
747
-
858
+
748
859
  if (options.verbose) {
749
860
  console.log(chalk.yellow('\nDetailed Impact:'));
750
861
  console.log(chalk.yellow(` Affected Nodes: ${evaluation.details.affectedNodes}`));
751
862
  console.log(chalk.yellow(` Blast Radius Impact: ${evaluation.details.blastRadiusImpact}`));
752
863
  console.log(chalk.yellow(` Authority Core Changes: ${evaluation.details.authorityCoreChanges ? 'YES' : 'NO'}`));
753
864
  }
754
-
865
+
755
866
  console.log(chalk.yellow('\n⚠️ REVIEW RECOMMENDED: Check warnings before merging'));
756
867
  process.exit(0); // Exit with success code but warn user
757
868
  } else if (evaluation.decision === 'ERROR') {
@@ -761,14 +872,14 @@ program
761
872
  } else {
762
873
  console.log(chalk.green('✅ CHANGE ALLOWED - No critical violations detected'));
763
874
  console.log(chalk.green(`✅ ${evaluation.reasons.join(', ')}`));
764
-
875
+
765
876
  if (options.verbose) {
766
877
  console.log(chalk.green('\nImpact Summary:'));
767
878
  console.log(chalk.green(` Affected Nodes: ${evaluation.details.affectedNodes}`));
768
879
  console.log(chalk.green(` Blast Radius Impact: ${evaluation.details.blastRadiusImpact}`));
769
880
  console.log(chalk.green(` Authority Core Changes: ${evaluation.details.authorityCoreChanges ? 'YES' : 'NO'}`));
770
881
  }
771
-
882
+
772
883
  console.log(chalk.green('\n✅ Ready to merge'));
773
884
  process.exit(0);
774
885
  }
@@ -837,7 +948,7 @@ program
837
948
  try {
838
949
  // Initialize Authority Ledger with default path
839
950
  const authorityLedger = new AuthorityLedger(path.join(process.cwd(), 'arcvision_context', 'architecture.authority.ledger.json'));
840
-
951
+
841
952
  if (options.stats) {
842
953
  const stats = authorityLedger.getStats();
843
954
  console.log(chalk.blue('📊 Authority Ledger Statistics'));
@@ -853,7 +964,7 @@ program
853
964
  const blocks = authorityLedger.getRecentUnresolvedBlocks(10);
854
965
  console.log(chalk.blue('🚨 Recent Unresolved BLOCKED Events'));
855
966
  console.log(chalk.blue('====================================='));
856
-
967
+
857
968
  if (blocks.length === 0) {
858
969
  console.log(chalk.green('No unresolved blocked events found'));
859
970
  } else {