@soulcraft/brainy 3.37.5 β†’ 3.37.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -146,6 +146,12 @@ export class GcsStorage extends BaseStorage {
146
146
  });
147
147
  // Initialize counts from storage
148
148
  await this.initializeCounts();
149
+ // CRITICAL FIX (v3.37.7): Clear any stale cache entries from previous runs
150
+ // This prevents cache poisoning from causing silent failures on container restart
151
+ prodLog.info('🧹 Clearing cache from previous run to prevent cache poisoning');
152
+ this.nounCacheManager.clear();
153
+ this.verbCacheManager.clear();
154
+ prodLog.info('βœ… Cache cleared - starting fresh');
149
155
  this.isInitialized = true;
150
156
  }
151
157
  catch (error) {
@@ -380,12 +386,27 @@ export class GcsStorage extends BaseStorage {
380
386
  */
381
387
  async getNode(id) {
382
388
  await this.ensureInitialized();
383
- // Check cache first
389
+ // Check cache first WITH LOGGING
384
390
  const cached = this.nounCacheManager.get(id);
385
- if (cached) {
391
+ // DIAGNOSTIC LOGGING: Reveal cache poisoning
392
+ prodLog.info(`[getNode] πŸ” Cache check for ${id.substring(0, 8)}...:`, {
393
+ hasCached: cached !== undefined,
394
+ isNull: cached === null,
395
+ isObject: cached !== null && typeof cached === 'object',
396
+ type: typeof cached
397
+ });
398
+ // CRITICAL FIX: Only return cached value if it's valid (not null/undefined)
399
+ if (cached !== undefined && cached !== null) {
400
+ prodLog.info(`[getNode] βœ… Cache HIT - returning cached node for ${id.substring(0, 8)}...`);
386
401
  this.logger.trace(`Cache hit for noun ${id}`);
387
402
  return cached;
388
403
  }
404
+ else if (cached === null) {
405
+ prodLog.warn(`[getNode] ⚠️ Cache contains NULL for ${id.substring(0, 8)}... - ignoring and loading from GCS`);
406
+ }
407
+ else {
408
+ prodLog.info(`[getNode] ❌ Cache MISS - loading from GCS for ${id.substring(0, 8)}...`);
409
+ }
389
410
  // Apply backpressure
390
411
  const requestId = await this.applyBackpressure();
391
412
  try {
@@ -393,12 +414,19 @@ export class GcsStorage extends BaseStorage {
393
414
  // Get the GCS key with UUID-based sharding
394
415
  const key = this.getNounKey(id);
395
416
  // DIAGNOSTIC LOGGING: Show exact path being accessed
396
- this.logger.trace(`Computed GCS key: ${key}`);
417
+ prodLog.info(`[getNode] πŸ” Attempting to load:`);
418
+ prodLog.info(`[getNode] UUID: ${id}`);
419
+ prodLog.info(`[getNode] Path: ${key}`);
420
+ prodLog.info(`[getNode] Bucket: ${this.bucketName}`);
397
421
  // Download from GCS
398
422
  const file = this.bucket.file(key);
423
+ prodLog.info(`[getNode] πŸ“₯ Downloading file...`);
399
424
  const [contents] = await file.download();
425
+ prodLog.info(`[getNode] βœ… Download successful: ${contents.length} bytes`);
400
426
  // Parse JSON
427
+ prodLog.info(`[getNode] πŸ”§ Parsing JSON...`);
401
428
  const data = JSON.parse(contents.toString());
429
+ prodLog.info(`[getNode] βœ… JSON parsed successfully, id: ${data.id}`);
402
430
  // Convert serialized connections back to Map<number, Set<string>>
403
431
  const connections = new Map();
404
432
  for (const [level, nounIds] of Object.entries(data.connections || {})) {
@@ -413,29 +441,44 @@ export class GcsStorage extends BaseStorage {
413
441
  level: data.level || 0
414
442
  // NO metadata field - retrieved separately for scalability
415
443
  };
416
- // Update cache
417
- this.nounCacheManager.set(id, node);
444
+ // CRITICAL FIX: Only cache valid nodes (never cache null)
445
+ if (node && node.id && node.vector && Array.isArray(node.vector)) {
446
+ this.nounCacheManager.set(id, node);
447
+ prodLog.info(`[getNode] πŸ’Ύ Cached node ${id.substring(0, 8)}... successfully`);
448
+ }
449
+ else {
450
+ prodLog.warn(`[getNode] ⚠️ NOT caching invalid node for ${id.substring(0, 8)}...`);
451
+ }
418
452
  this.logger.trace(`Successfully retrieved node ${id}`);
419
453
  this.releaseBackpressure(true, requestId);
420
454
  return node;
421
455
  }
422
456
  catch (error) {
423
457
  this.releaseBackpressure(false, requestId);
458
+ // DIAGNOSTIC LOGGING: Log EVERY error before any conditional checks
459
+ const key = this.getNounKey(id);
460
+ prodLog.error(`[getNode] ❌ EXCEPTION CAUGHT:`);
461
+ prodLog.error(`[getNode] UUID: ${id}`);
462
+ prodLog.error(`[getNode] Path: ${key}`);
463
+ prodLog.error(`[getNode] Bucket: ${this.bucketName}`);
464
+ prodLog.error(`[getNode] Error type: ${error?.constructor?.name || typeof error}`);
465
+ prodLog.error(`[getNode] Error code: ${JSON.stringify(error?.code)}`);
466
+ prodLog.error(`[getNode] Error message: ${error?.message || String(error)}`);
467
+ prodLog.error(`[getNode] Error object:`, JSON.stringify(error, null, 2));
424
468
  // Check if this is a "not found" error
425
469
  if (error.code === 404) {
426
- // DIAGNOSTIC LOGGING: Upgrade 404 errors to WARN level with full details
427
- const key = this.getNounKey(id);
428
- prodLog.warn(`[getNode] ❌ 404 NOT FOUND: File does not exist at GCS path: ${key}`);
429
- prodLog.warn(`[getNode] UUID: ${id}`);
430
- prodLog.warn(`[getNode] Bucket: ${this.bucketName}`);
431
- prodLog.warn(`[getNode] This suggests a path mismatch or the file was not written correctly`);
470
+ prodLog.warn(`[getNode] Identified as 404 error - returning null WITHOUT caching`);
471
+ // CRITICAL FIX: Do NOT cache null values
432
472
  return null;
433
473
  }
434
474
  // Handle throttling
435
475
  if (this.isThrottlingError(error)) {
476
+ prodLog.warn(`[getNode] Identified as throttling error - rethrowing`);
436
477
  await this.handleThrottling(error);
437
478
  throw error;
438
479
  }
480
+ // All other errors should throw, not return null
481
+ prodLog.error(`[getNode] Unhandled error - rethrowing`);
439
482
  this.logger.error(`Failed to get node ${id}:`, error);
440
483
  throw BrainyError.fromError(error, `getNoun(${id})`);
441
484
  }
@@ -237,6 +237,16 @@ export class S3CompatibleStorage extends BaseStorage {
237
237
  await this.cleanupLegacyIndexFolder();
238
238
  // Initialize counts from storage
239
239
  await this.initializeCounts();
240
+ // CRITICAL FIX (v3.37.7): Clear any stale cache entries from previous runs
241
+ // This prevents cache poisoning from causing silent failures on container restart
242
+ const nodeCacheSize = this.nodeCache?.size || 0;
243
+ if (nodeCacheSize > 0) {
244
+ prodLog.info(`🧹 Clearing ${nodeCacheSize} cached node entries from previous run`);
245
+ this.nodeCache.clear();
246
+ }
247
+ else {
248
+ prodLog.info('🧹 Node cache is empty - starting fresh');
249
+ }
240
250
  this.isInitialized = true;
241
251
  this.logger.info(`Initialized ${this.serviceType} storage with bucket ${this.bucketName}`);
242
252
  }
@@ -808,60 +818,117 @@ export class S3CompatibleStorage extends BaseStorage {
808
818
  */
809
819
  async getNode(id) {
810
820
  await this.ensureInitialized();
821
+ // Check cache first WITH LOGGING
822
+ const cached = this.nodeCache.get(id);
823
+ // DIAGNOSTIC LOGGING: Reveal cache poisoning
824
+ prodLog.info(`[getNode] πŸ” Cache check for ${id.substring(0, 8)}...:`, {
825
+ hasCached: cached !== undefined,
826
+ isNull: cached === null,
827
+ isObject: cached !== null && typeof cached === 'object',
828
+ type: typeof cached
829
+ });
830
+ // CRITICAL FIX: Only return cached value if it's valid (not null/undefined)
831
+ if (cached !== undefined && cached !== null) {
832
+ prodLog.info(`[getNode] βœ… Cache HIT - returning cached node for ${id.substring(0, 8)}...`);
833
+ this.logger.trace(`Cache hit for node ${id}`);
834
+ return cached;
835
+ }
836
+ else if (cached === null) {
837
+ prodLog.warn(`[getNode] ⚠️ Cache contains NULL for ${id.substring(0, 8)}... - ignoring and loading from S3`);
838
+ }
839
+ else {
840
+ prodLog.info(`[getNode] ❌ Cache MISS - loading from S3 for ${id.substring(0, 8)}...`);
841
+ }
811
842
  try {
812
843
  // Import the GetObjectCommand only when needed
813
844
  const { GetObjectCommand } = await import('@aws-sdk/client-s3');
814
845
  // Use getNounKey() to properly handle sharding
815
846
  const key = this.getNounKey(id);
816
- this.logger.trace(`Getting node ${id} from key: ${key}`);
847
+ // DIAGNOSTIC LOGGING: Show exact path being accessed
848
+ prodLog.info(`[getNode] πŸ” Attempting to load:`);
849
+ prodLog.info(`[getNode] UUID: ${id}`);
850
+ prodLog.info(`[getNode] Path: ${key}`);
851
+ prodLog.info(`[getNode] Bucket: ${this.bucketName}`);
817
852
  // Try to get the node from the nouns directory
853
+ prodLog.info(`[getNode] πŸ“₯ Downloading file...`);
818
854
  const response = await this.s3Client.send(new GetObjectCommand({
819
855
  Bucket: this.bucketName,
820
856
  Key: key
821
857
  }));
822
858
  // Check if response is null or undefined
823
859
  if (!response || !response.Body) {
824
- this.logger.trace(`No node found for ${id}`);
860
+ prodLog.warn(`[getNode] ❌ Response or Body is null/undefined`);
825
861
  return null;
826
862
  }
827
863
  // Convert the response body to a string
828
864
  const bodyContents = await response.Body.transformToString();
829
- this.logger.trace(`Retrieved node body for ${id}`);
865
+ prodLog.info(`[getNode] βœ… Download successful: ${bodyContents.length} bytes`);
830
866
  // Parse the JSON string
831
- try {
832
- const parsedNode = JSON.parse(bodyContents);
833
- this.logger.trace(`Parsed node data for ${id}`);
834
- // Ensure the parsed node has the expected properties
835
- if (!parsedNode ||
836
- !parsedNode.id ||
837
- !parsedNode.vector ||
838
- !parsedNode.connections) {
839
- this.logger.warn(`Invalid node data for ${id}`);
840
- return null;
841
- }
842
- // Convert serialized connections back to Map<number, Set<string>>
843
- const connections = new Map();
844
- for (const [level, nodeIds] of Object.entries(parsedNode.connections)) {
845
- connections.set(Number(level), new Set(nodeIds));
846
- }
847
- const node = {
848
- id: parsedNode.id,
849
- vector: parsedNode.vector,
850
- connections,
851
- level: parsedNode.level || 0
852
- };
853
- this.logger.trace(`Successfully retrieved node ${id}`);
854
- return node;
855
- }
856
- catch (parseError) {
857
- this.logger.error(`Failed to parse node data for ${id}:`, parseError);
867
+ prodLog.info(`[getNode] πŸ”§ Parsing JSON...`);
868
+ const parsedNode = JSON.parse(bodyContents);
869
+ prodLog.info(`[getNode] βœ… JSON parsed successfully, id: ${parsedNode.id}`);
870
+ // Ensure the parsed node has the expected properties
871
+ if (!parsedNode ||
872
+ !parsedNode.id ||
873
+ !parsedNode.vector ||
874
+ !parsedNode.connections) {
875
+ prodLog.error(`[getNode] ❌ Invalid node data structure for ${id}`);
876
+ prodLog.error(`[getNode] Has id: ${!!parsedNode?.id}`);
877
+ prodLog.error(`[getNode] Has vector: ${!!parsedNode?.vector}`);
878
+ prodLog.error(`[getNode] Has connections: ${!!parsedNode?.connections}`);
858
879
  return null;
859
880
  }
881
+ // Convert serialized connections back to Map<number, Set<string>>
882
+ const connections = new Map();
883
+ for (const [level, nodeIds] of Object.entries(parsedNode.connections)) {
884
+ connections.set(Number(level), new Set(nodeIds));
885
+ }
886
+ const node = {
887
+ id: parsedNode.id,
888
+ vector: parsedNode.vector,
889
+ connections,
890
+ level: parsedNode.level || 0
891
+ };
892
+ // CRITICAL FIX: Only cache valid nodes (never cache null)
893
+ if (node && node.id && node.vector && Array.isArray(node.vector)) {
894
+ this.nodeCache.set(id, node);
895
+ prodLog.info(`[getNode] πŸ’Ύ Cached node ${id.substring(0, 8)}... successfully`);
896
+ }
897
+ else {
898
+ prodLog.warn(`[getNode] ⚠️ NOT caching invalid node for ${id.substring(0, 8)}...`);
899
+ }
900
+ this.logger.trace(`Successfully retrieved node ${id}`);
901
+ return node;
860
902
  }
861
903
  catch (error) {
862
- // Node not found or other error
863
- this.logger.trace(`Node not found for ${id}`);
864
- return null;
904
+ // DIAGNOSTIC LOGGING: Log EVERY error before any conditional checks
905
+ const key = this.getNounKey(id);
906
+ prodLog.error(`[getNode] ❌ EXCEPTION CAUGHT:`);
907
+ prodLog.error(`[getNode] UUID: ${id}`);
908
+ prodLog.error(`[getNode] Path: ${key}`);
909
+ prodLog.error(`[getNode] Bucket: ${this.bucketName}`);
910
+ prodLog.error(`[getNode] Error type: ${error?.constructor?.name || typeof error}`);
911
+ prodLog.error(`[getNode] Error name: ${error?.name}`);
912
+ prodLog.error(`[getNode] Error code: ${JSON.stringify(error?.Code || error?.code)}`);
913
+ prodLog.error(`[getNode] Error message: ${error?.message || String(error)}`);
914
+ prodLog.error(`[getNode] HTTP status: ${error?.$metadata?.httpStatusCode}`);
915
+ prodLog.error(`[getNode] Error object:`, JSON.stringify(error, null, 2));
916
+ // Check if this is a "not found" error (S3 uses "NoSuchKey")
917
+ if (error?.name === 'NoSuchKey' || error?.Code === 'NoSuchKey' || error?.$metadata?.httpStatusCode === 404) {
918
+ prodLog.warn(`[getNode] Identified as 404/NoSuchKey error - returning null WITHOUT caching`);
919
+ // CRITICAL FIX: Do NOT cache null values
920
+ return null;
921
+ }
922
+ // Handle throttling
923
+ if (this.isThrottlingError(error)) {
924
+ prodLog.warn(`[getNode] Identified as throttling error - rethrowing`);
925
+ await this.handleThrottling(error);
926
+ throw error;
927
+ }
928
+ // All other errors should throw, not return null
929
+ prodLog.error(`[getNode] Unhandled error - rethrowing`);
930
+ this.logger.error(`Failed to get node ${id}:`, error);
931
+ throw BrainyError.fromError(error, `getNoun(${id})`);
865
932
  }
866
933
  }
867
934
  /**
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@soulcraft/brainy",
3
- "version": "3.37.5",
3
+ "version": "3.37.7",
4
4
  "description": "Universal Knowledge Protocolβ„’ - World's first Triple Intelligence database unifying vector, graph, and document search in one API. 31 nouns Γ— 40 verbs for infinite expressiveness.",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.js",