@soulcraft/brainy 3.37.4 β†’ 3.37.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -392,11 +392,20 @@ export class GcsStorage extends BaseStorage {
392
392
  this.logger.trace(`Getting node ${id}`);
393
393
  // Get the GCS key with UUID-based sharding
394
394
  const key = this.getNounKey(id);
395
+ // DIAGNOSTIC LOGGING: Show exact path being accessed
396
+ prodLog.info(`[getNode] πŸ” Attempting to load:`);
397
+ prodLog.info(`[getNode] UUID: ${id}`);
398
+ prodLog.info(`[getNode] Path: ${key}`);
399
+ prodLog.info(`[getNode] Bucket: ${this.bucketName}`);
395
400
  // Download from GCS
396
401
  const file = this.bucket.file(key);
402
+ prodLog.info(`[getNode] πŸ“₯ Downloading file...`);
397
403
  const [contents] = await file.download();
404
+ prodLog.info(`[getNode] βœ… Download successful: ${contents.length} bytes`);
398
405
  // Parse JSON
406
+ prodLog.info(`[getNode] πŸ”§ Parsing JSON...`);
399
407
  const data = JSON.parse(contents.toString());
408
+ prodLog.info(`[getNode] βœ… JSON parsed successfully, id: ${data.id}`);
400
409
  // Convert serialized connections back to Map<number, Set<string>>
401
410
  const connections = new Map();
402
411
  for (const [level, nounIds] of Object.entries(data.connections || {})) {
@@ -419,16 +428,29 @@ export class GcsStorage extends BaseStorage {
419
428
  }
420
429
  catch (error) {
421
430
  this.releaseBackpressure(false, requestId);
431
+ // DIAGNOSTIC LOGGING: Log EVERY error before any conditional checks
432
+ const key = this.getNounKey(id);
433
+ prodLog.error(`[getNode] ❌ EXCEPTION CAUGHT:`);
434
+ prodLog.error(`[getNode] UUID: ${id}`);
435
+ prodLog.error(`[getNode] Path: ${key}`);
436
+ prodLog.error(`[getNode] Bucket: ${this.bucketName}`);
437
+ prodLog.error(`[getNode] Error type: ${error?.constructor?.name || typeof error}`);
438
+ prodLog.error(`[getNode] Error code: ${JSON.stringify(error?.code)}`);
439
+ prodLog.error(`[getNode] Error message: ${error?.message || String(error)}`);
440
+ prodLog.error(`[getNode] Error object:`, JSON.stringify(error, null, 2));
422
441
  // Check if this is a "not found" error
423
442
  if (error.code === 404) {
424
- this.logger.trace(`Node not found: ${id}`);
443
+ prodLog.warn(`[getNode] Identified as 404 error - returning null`);
425
444
  return null;
426
445
  }
427
446
  // Handle throttling
428
447
  if (this.isThrottlingError(error)) {
448
+ prodLog.warn(`[getNode] Identified as throttling error - rethrowing`);
429
449
  await this.handleThrottling(error);
430
450
  throw error;
431
451
  }
452
+ // All other errors should throw, not return null
453
+ prodLog.error(`[getNode] Unhandled error - rethrowing`);
432
454
  this.logger.error(`Failed to get node ${id}:`, error);
433
455
  throw BrainyError.fromError(error, `getNoun(${id})`);
434
456
  }
@@ -785,6 +807,13 @@ export class GcsStorage extends BaseStorage {
785
807
  await this.ensureInitialized(); // CRITICAL: Must initialize before using this.bucket
786
808
  const limit = options.limit || 100;
787
809
  const useCache = options.useCache !== false;
810
+ // DIAGNOSTIC LOGGING: Track pagination performance
811
+ prodLog.info(`[getNodesWithPagination] Starting pagination: limit=${limit}, cursor=${options.cursor || 'none'}`);
812
+ const startTime = Date.now();
813
+ let shardsChecked = 0;
814
+ let filesFound = 0;
815
+ let nodesLoaded = 0;
816
+ let nodesFailed = 0;
788
817
  try {
789
818
  const nodes = [];
790
819
  // Parse cursor (format: "shardIndex:gcsPageToken")
@@ -799,6 +828,7 @@ export class GcsStorage extends BaseStorage {
799
828
  for (let shardIndex = startShardIndex; shardIndex < TOTAL_SHARDS; shardIndex++) {
800
829
  const shardId = getShardIdByIndex(shardIndex);
801
830
  const shardPrefix = `${this.nounPrefix}${shardId}/`;
831
+ shardsChecked++;
802
832
  // List objects in this shard
803
833
  // Cap maxResults to GCS API limit to prevent "Invalid unsigned integer" errors
804
834
  const requestedPageSize = limit - nodes.length;
@@ -808,6 +838,12 @@ export class GcsStorage extends BaseStorage {
808
838
  maxResults: cappedPageSize,
809
839
  pageToken: shardIndex === startShardIndex ? gcsPageToken : undefined
810
840
  });
841
+ // DIAGNOSTIC LOGGING: Show files found per shard (only log non-empty shards)
842
+ if (files && files.length > 0) {
843
+ filesFound += files.length;
844
+ prodLog.info(`[Shard ${shardId}] Found ${files.length} files in "${shardPrefix}"`);
845
+ prodLog.info(`[Shard ${shardId}] Sample file names: ${files.slice(0, 3).map((f) => f.name).join(', ')}`);
846
+ }
811
847
  // Extract node IDs from file names
812
848
  if (files && files.length > 0) {
813
849
  const nodeIds = files
@@ -824,11 +860,21 @@ export class GcsStorage extends BaseStorage {
824
860
  return name;
825
861
  })
826
862
  .filter((id) => id && id.length > 0);
863
+ // DIAGNOSTIC LOGGING: Show extracted UUIDs
864
+ prodLog.info(`[Shard ${shardId}] Extracted ${nodeIds.length} UUIDs: ${nodeIds.slice(0, 3).join(', ')}...`);
827
865
  // Load nodes
828
866
  for (const id of nodeIds) {
867
+ // DIAGNOSTIC LOGGING: Show each getNode() attempt
868
+ prodLog.info(`[Shard ${shardId}] Calling getNode("${id}")...`);
829
869
  const node = await this.getNode(id);
830
870
  if (node) {
831
871
  nodes.push(node);
872
+ nodesLoaded++;
873
+ prodLog.info(`[Shard ${shardId}] βœ… Successfully loaded node ${id}`);
874
+ }
875
+ else {
876
+ nodesFailed++;
877
+ prodLog.warn(`[Shard ${shardId}] ❌ getNode("${id}") returned null!`);
832
878
  }
833
879
  if (nodes.length >= limit) {
834
880
  break;
@@ -861,6 +907,14 @@ export class GcsStorage extends BaseStorage {
861
907
  // Continue to next shard
862
908
  }
863
909
  // No more shards or nodes
910
+ // DIAGNOSTIC LOGGING: Final summary
911
+ const elapsedTime = Date.now() - startTime;
912
+ prodLog.info(`[getNodesWithPagination] COMPLETED in ${elapsedTime}ms:`);
913
+ prodLog.info(` - Shards checked: ${shardsChecked}/${TOTAL_SHARDS}`);
914
+ prodLog.info(` - Files found: ${filesFound}`);
915
+ prodLog.info(` - Nodes loaded: ${nodesLoaded}`);
916
+ prodLog.info(` - Nodes failed: ${nodesFailed}`);
917
+ prodLog.info(` - Success rate: ${filesFound > 0 ? ((nodesLoaded / filesFound) * 100).toFixed(1) : 'N/A'}%`);
864
918
  return {
865
919
  nodes,
866
920
  totalCount: this.totalNounCount,
@@ -813,55 +813,82 @@ export class S3CompatibleStorage extends BaseStorage {
813
813
  const { GetObjectCommand } = await import('@aws-sdk/client-s3');
814
814
  // Use getNounKey() to properly handle sharding
815
815
  const key = this.getNounKey(id);
816
- this.logger.trace(`Getting node ${id} from key: ${key}`);
816
+ // DIAGNOSTIC LOGGING: Show exact path being accessed
817
+ prodLog.info(`[getNode] πŸ” Attempting to load:`);
818
+ prodLog.info(`[getNode] UUID: ${id}`);
819
+ prodLog.info(`[getNode] Path: ${key}`);
820
+ prodLog.info(`[getNode] Bucket: ${this.bucketName}`);
817
821
  // Try to get the node from the nouns directory
822
+ prodLog.info(`[getNode] πŸ“₯ Downloading file...`);
818
823
  const response = await this.s3Client.send(new GetObjectCommand({
819
824
  Bucket: this.bucketName,
820
825
  Key: key
821
826
  }));
822
827
  // Check if response is null or undefined
823
828
  if (!response || !response.Body) {
824
- this.logger.trace(`No node found for ${id}`);
829
+ prodLog.warn(`[getNode] ❌ Response or Body is null/undefined`);
825
830
  return null;
826
831
  }
827
832
  // Convert the response body to a string
828
833
  const bodyContents = await response.Body.transformToString();
829
- this.logger.trace(`Retrieved node body for ${id}`);
834
+ prodLog.info(`[getNode] βœ… Download successful: ${bodyContents.length} bytes`);
830
835
  // Parse the JSON string
831
- try {
832
- const parsedNode = JSON.parse(bodyContents);
833
- this.logger.trace(`Parsed node data for ${id}`);
834
- // Ensure the parsed node has the expected properties
835
- if (!parsedNode ||
836
- !parsedNode.id ||
837
- !parsedNode.vector ||
838
- !parsedNode.connections) {
839
- this.logger.warn(`Invalid node data for ${id}`);
840
- return null;
841
- }
842
- // Convert serialized connections back to Map<number, Set<string>>
843
- const connections = new Map();
844
- for (const [level, nodeIds] of Object.entries(parsedNode.connections)) {
845
- connections.set(Number(level), new Set(nodeIds));
846
- }
847
- const node = {
848
- id: parsedNode.id,
849
- vector: parsedNode.vector,
850
- connections,
851
- level: parsedNode.level || 0
852
- };
853
- this.logger.trace(`Successfully retrieved node ${id}`);
854
- return node;
855
- }
856
- catch (parseError) {
857
- this.logger.error(`Failed to parse node data for ${id}:`, parseError);
836
+ prodLog.info(`[getNode] πŸ”§ Parsing JSON...`);
837
+ const parsedNode = JSON.parse(bodyContents);
838
+ prodLog.info(`[getNode] βœ… JSON parsed successfully, id: ${parsedNode.id}`);
839
+ // Ensure the parsed node has the expected properties
840
+ if (!parsedNode ||
841
+ !parsedNode.id ||
842
+ !parsedNode.vector ||
843
+ !parsedNode.connections) {
844
+ prodLog.error(`[getNode] ❌ Invalid node data structure for ${id}`);
845
+ prodLog.error(`[getNode] Has id: ${!!parsedNode?.id}`);
846
+ prodLog.error(`[getNode] Has vector: ${!!parsedNode?.vector}`);
847
+ prodLog.error(`[getNode] Has connections: ${!!parsedNode?.connections}`);
858
848
  return null;
859
849
  }
850
+ // Convert serialized connections back to Map<number, Set<string>>
851
+ const connections = new Map();
852
+ for (const [level, nodeIds] of Object.entries(parsedNode.connections)) {
853
+ connections.set(Number(level), new Set(nodeIds));
854
+ }
855
+ const node = {
856
+ id: parsedNode.id,
857
+ vector: parsedNode.vector,
858
+ connections,
859
+ level: parsedNode.level || 0
860
+ };
861
+ this.logger.trace(`Successfully retrieved node ${id}`);
862
+ return node;
860
863
  }
861
864
  catch (error) {
862
- // Node not found or other error
863
- this.logger.trace(`Node not found for ${id}`);
864
- return null;
865
+ // DIAGNOSTIC LOGGING: Log EVERY error before any conditional checks
866
+ const key = this.getNounKey(id);
867
+ prodLog.error(`[getNode] ❌ EXCEPTION CAUGHT:`);
868
+ prodLog.error(`[getNode] UUID: ${id}`);
869
+ prodLog.error(`[getNode] Path: ${key}`);
870
+ prodLog.error(`[getNode] Bucket: ${this.bucketName}`);
871
+ prodLog.error(`[getNode] Error type: ${error?.constructor?.name || typeof error}`);
872
+ prodLog.error(`[getNode] Error name: ${error?.name}`);
873
+ prodLog.error(`[getNode] Error code: ${JSON.stringify(error?.Code || error?.code)}`);
874
+ prodLog.error(`[getNode] Error message: ${error?.message || String(error)}`);
875
+ prodLog.error(`[getNode] HTTP status: ${error?.$metadata?.httpStatusCode}`);
876
+ prodLog.error(`[getNode] Error object:`, JSON.stringify(error, null, 2));
877
+ // Check if this is a "not found" error (S3 uses "NoSuchKey")
878
+ if (error?.name === 'NoSuchKey' || error?.Code === 'NoSuchKey' || error?.$metadata?.httpStatusCode === 404) {
879
+ prodLog.warn(`[getNode] Identified as 404/NoSuchKey error - returning null`);
880
+ return null;
881
+ }
882
+ // Handle throttling
883
+ if (this.isThrottlingError(error)) {
884
+ prodLog.warn(`[getNode] Identified as throttling error - rethrowing`);
885
+ await this.handleThrottling(error);
886
+ throw error;
887
+ }
888
+ // All other errors should throw, not return null
889
+ prodLog.error(`[getNode] Unhandled error - rethrowing`);
890
+ this.logger.error(`Failed to get node ${id}:`, error);
891
+ throw BrainyError.fromError(error, `getNoun(${id})`);
865
892
  }
866
893
  }
867
894
  /**
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@soulcraft/brainy",
3
- "version": "3.37.4",
3
+ "version": "3.37.6",
4
4
  "description": "Universal Knowledge Protocolβ„’ - World's first Triple Intelligence database unifying vector, graph, and document search in one API. 31 nouns Γ— 40 verbs for infinite expressiveness.",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.js",