@soulcraft/brainy 5.7.3 → 5.7.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,11 @@
2
2
 
3
3
  All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
4
4
 
5
+ ### [5.7.4](https://github.com/soulcraftlabs/brainy/compare/v5.7.3...v5.7.4) (2025-11-12)
6
+
7
+ - fix: resolve v5.7.3 race condition by persisting write-through cache (v5.7.4) (6e19ec8)
8
+
9
+
5
10
  ### [5.7.3](https://github.com/soulcraftlabs/brainy/compare/v5.7.2...v5.7.3) (2025-11-12)
6
11
 
7
12
 
package/dist/brainy.js CHANGED
@@ -1613,21 +1613,6 @@ export class Brainy {
1613
1613
  lastBatchTime = Date.now();
1614
1614
  }
1615
1615
  }
1616
- // v5.7.3: Ensure nounTypeCache is populated for all successful entities
1617
- // This prevents cache misses that trigger expensive 42-type searches
1618
- // when entities are immediately queried (e.g., during brain.relate())
1619
- const cacheWarmingNeeded = result.successful.filter(id => !this.storage.nounTypeCache?.has(id));
1620
- if (cacheWarmingNeeded.length > 0) {
1621
- // Warm the cache by fetching metadata for entities not in cache
1622
- await Promise.all(cacheWarmingNeeded.map(async (id) => {
1623
- try {
1624
- await this.storage.getNounMetadata(id);
1625
- }
1626
- catch (error) {
1627
- // Ignore errors during cache warming (entity may be invalid)
1628
- }
1629
- }));
1630
- }
1631
1616
  result.duration = Date.now() - startTime;
1632
1617
  return result;
1633
1618
  }
@@ -3094,15 +3079,15 @@ export class Brainy {
3094
3079
  // 3. Flush graph adjacency index (relationship cache)
3095
3080
  // Note: Graph structure is already persisted via storage.saveVerb() calls
3096
3081
  // This just flushes the in-memory cache for performance
3097
- this.graphIndex.flush(),
3098
- // 4. v5.7.3: Clear write-through cache after flush
3099
- // Cache persists during batch operations for read-after-write consistency
3100
- // Cleared here after all writes are guaranteed flushed to disk
3101
- (async () => {
3102
- if (this.storage && typeof this.storage.writeCache !== 'undefined') {
3103
- this.storage.writeCache.clear();
3104
- }
3105
- })()
3082
+ this.graphIndex.flush()
3083
+ // Note: Write-through cache (storage.writeCache) is NOT cleared here
3084
+ // Cache persists indefinitely for read-after-write consistency
3085
+ // Provides safety net for:
3086
+ // - Cloud storage eventual consistency (S3, GCS, Azure, R2)
3087
+ // - Filesystem buffer cache timing
3088
+ // - Type cache warming period (nounTypeCache population)
3089
+ // Cache entries are removed only when explicitly deleted (deleteObjectFromBranch)
3090
+ // Memory footprint is negligible for typical workloads (<10MB for 100k entities)
3106
3091
  ]);
3107
3092
  const elapsed = Date.now() - startTime;
3108
3093
  console.log(`✅ All indexes flushed to disk in ${elapsed}ms`);
@@ -643,10 +643,6 @@ export class ImportCoordinator {
643
643
  if (addResult.failed.length > 0) {
644
644
  console.warn(`⚠️ ${addResult.failed.length} entities failed to create`);
645
645
  }
646
- // v5.7.3: Ensure all writes are flushed before creating relationships
647
- // Fixes "Source entity not found" error in v5.7.0/v5.7.1/v5.7.2
648
- // Guarantees entities are fully persisted and queryable before brain.relate() is called
649
- await this.brain.flush();
650
646
  // Create provenance links in batch
651
647
  if (documentEntityId && options.createProvenanceLinks !== false && entities.length > 0) {
652
648
  const provenanceParams = entities.map((entity, idx) => {
@@ -238,6 +238,11 @@ export class BaseStorage extends BaseStorageAdapter {
238
238
  if (Buffer.isBuffer(data)) {
239
239
  return data;
240
240
  }
241
+ // v5.7.5: Unwrap binary data stored as {_binary: true, data: "base64..."}
242
+ // Fixes "Blob integrity check failed" - hash must be calculated on original content
243
+ if (data._binary && typeof data.data === 'string') {
244
+ return Buffer.from(data.data, 'base64');
245
+ }
241
246
  return Buffer.from(JSON.stringify(data));
242
247
  }
243
248
  catch (error) {
@@ -84,6 +84,7 @@ export declare class BlobStorage {
84
84
  private stats;
85
85
  private zstdCompress?;
86
86
  private zstdDecompress?;
87
+ private compressionReady;
87
88
  private readonly CACHE_MAX_SIZE;
88
89
  private readonly MULTIPART_THRESHOLD;
89
90
  private readonly COMPRESSION_THRESHOLD;
@@ -96,6 +97,11 @@ export declare class BlobStorage {
96
97
  * (Avoids loading if not needed)
97
98
  */
98
99
  private initCompression;
100
+ /**
101
+ * v5.7.5: Ensure compression is ready before write operations
102
+ * Fixes race condition where write happens before async compression init completes
103
+ */
104
+ private ensureCompressionReady;
99
105
  /**
100
106
  * Compute SHA-256 hash of data
101
107
  *
@@ -29,6 +29,7 @@ import { NULL_HASH, isNullHash } from './constants.js';
29
29
  */
30
30
  export class BlobStorage {
31
31
  constructor(adapter, options) {
32
+ this.compressionReady = false;
32
33
  // Configuration
33
34
  this.CACHE_MAX_SIZE = 100 * 1024 * 1024; // 100MB default
34
35
  this.MULTIPART_THRESHOLD = 5 * 1024 * 1024; // 5MB
@@ -74,6 +75,16 @@ export class BlobStorage {
74
75
  this.zstdDecompress = undefined;
75
76
  }
76
77
  }
78
+ /**
79
+ * v5.7.5: Ensure compression is ready before write operations
80
+ * Fixes race condition where write happens before async compression init completes
81
+ */
82
+ async ensureCompressionReady() {
83
+ if (this.compressionReady)
84
+ return;
85
+ await this.initCompression();
86
+ this.compressionReady = true;
87
+ }
77
88
  /**
78
89
  * Compute SHA-256 hash of data
79
90
  *
@@ -107,6 +118,9 @@ export class BlobStorage {
107
118
  this.stats.dedupSavings += data.length;
108
119
  return hash;
109
120
  }
121
+ // v5.7.5: Ensure compression is initialized before writing
122
+ // Fixes race condition where write happens before async init completes
123
+ await this.ensureCompressionReady();
110
124
  // Determine compression strategy
111
125
  const compression = this.selectCompression(data, options);
112
126
  // Compress if needed
@@ -117,11 +131,14 @@ export class BlobStorage {
117
131
  compressedSize = finalData.length;
118
132
  }
119
133
  // Create metadata
134
+ // v5.7.5: Store ACTUAL compression state, not intended
135
+ // Prevents corruption if compression failed to initialize
136
+ const actualCompression = finalData === data ? 'none' : compression;
120
137
  const metadata = {
121
138
  hash,
122
139
  size: data.length,
123
140
  compressedSize,
124
- compression,
141
+ compression: actualCompression,
125
142
  type: options.type || 'blob', // CRITICAL FIX: Use 'blob' default to match storage prefix
126
143
  createdAt: Date.now(),
127
144
  refCount: 1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@soulcraft/brainy",
3
- "version": "5.7.3",
3
+ "version": "5.7.5",
4
4
  "description": "Universal Knowledge Protocol™ - World's first Triple Intelligence database unifying vector, graph, and document search in one API. Stage 3 CANONICAL: 42 nouns × 127 verbs covering 96-97% of all human knowledge.",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.js",