@soulcraft/brainy 5.0.0 → 5.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/brainy.js CHANGED
@@ -97,6 +97,12 @@ export class Brainy {
97
97
  // Setup and initialize storage
98
98
  this.storage = await this.setupStorage();
99
99
  await this.storage.init();
100
+ // Enable COW immediately after storage init (v5.0.1)
101
+ // This ensures ALL data is stored in branch-scoped paths from the start
102
+ // Lightweight: just sets cowEnabled=true and currentBranch, no RefManager/BlobStorage yet
103
+ if (typeof this.storage.enableCOWLightweight === 'function') {
104
+ this.storage.enableCOWLightweight(this.config.storage?.branch || 'main');
105
+ }
100
106
  // Setup index now that we have storage
101
107
  this.index = this.setupIndex();
102
108
  // Initialize core metadata index
@@ -135,7 +141,13 @@ export class Brainy {
135
141
  this.registerShutdownHooks();
136
142
  Brainy.shutdownHooksRegisteredGlobally = true;
137
143
  }
144
+ // Mark as initialized BEFORE VFS init (v5.0.1)
145
+ // VFS.init() needs brain to be marked initialized to call brain methods
138
146
  this.initialized = true;
147
+ // Initialize VFS (v5.0.1): Ensure VFS is ready when accessed as property
148
+ // This eliminates need for separate vfs.init() calls - zero additional complexity
149
+ this._vfs = new VirtualFileSystem(this);
150
+ await this._vfs.init();
139
151
  }
140
152
  catch (error) {
141
153
  throw new Error(`Failed to initialize Brainy: ${error}`);
@@ -307,14 +319,16 @@ export class Brainy {
307
319
  ...(params.weight !== undefined && { weight: params.weight }),
308
320
  ...(params.createdBy && { createdBy: params.createdBy })
309
321
  };
310
- // v4.0.0: Save vector and metadata separately
322
+ // v5.0.1: Save metadata FIRST so TypeAwareStorage can cache the type
323
+ // This prevents the race condition where saveNoun() defaults to 'thing'
324
+ await this.storage.saveNounMetadata(id, storageMetadata);
325
+ // Then save vector
311
326
  await this.storage.saveNoun({
312
327
  id,
313
328
  vector,
314
329
  connections: new Map(),
315
330
  level: 0
316
331
  });
317
- await this.storage.saveNounMetadata(id, storageMetadata);
318
332
  // v4.8.0: Build entity structure for indexing (NEW - with top-level fields)
319
333
  const entityForIndexing = {
320
334
  id,
@@ -1820,11 +1834,22 @@ export class Brainy {
1820
1834
  await this.ensureInitialized();
1821
1835
  return this.augmentationRegistry.execute('fork', { branch, options }, async () => {
1822
1836
  const branchName = branch || `fork-${Date.now()}`;
1823
- // Check if storage has RefManager (COW enabled)
1824
- if (!('refManager' in this.storage)) {
1825
- throw new Error('Fork requires COW-enabled storage. ' +
1826
- 'This storage adapter does not support branching. ' +
1827
- 'Please use v5.0.0+ storage adapters.');
1837
+ // v5.0.1: Lazy COW initialization - enable automatically on first fork()
1838
+ // This is zero-config and transparent to users
1839
+ if (!('refManager' in this.storage) || !this.storage.refManager) {
1840
+ // Storage supports COW but isn't initialized yet - initialize now
1841
+ if (typeof this.storage.initializeCOW === 'function') {
1842
+ await this.storage.initializeCOW({
1843
+ branch: this.config.storage?.branch || 'main',
1844
+ enableCompression: true
1845
+ });
1846
+ }
1847
+ else {
1848
+ // Storage adapter doesn't support COW at all
1849
+ throw new Error('Fork requires COW-enabled storage. ' +
1850
+ 'This storage adapter does not support branching. ' +
1851
+ 'Please use v5.0.0+ storage adapters.');
1852
+ }
1828
1853
  }
1829
1854
  const refManager = this.storage.refManager;
1830
1855
  const currentBranch = this.storage.currentBranch || 'main';
@@ -1839,9 +1864,11 @@ export class Brainy {
1839
1864
  }
1840
1865
  };
1841
1866
  const clone = new Brainy(forkConfig);
1842
- // Step 3: TRUE INSTANT FORK - Shallow copy indexes (O(1), <10ms)
1843
- // Share storage reference (already COW-enabled)
1844
- clone.storage = this.storage;
1867
+ // Step 3: Clone storage with separate currentBranch
1868
+ // Share RefManager/BlobStorage/CommitLog but maintain separate branch context
1869
+ clone.storage = Object.create(this.storage);
1870
+ clone.storage.currentBranch = branchName;
1871
+ // isInitialized inherited from prototype
1845
1872
  // Shallow copy HNSW index (INSTANT - just copies Map references)
1846
1873
  clone.index = this.setupIndex();
1847
1874
  // Enable COW (handle both HNSWIndex and TypeAwareHNSWIndex)
@@ -1891,8 +1918,8 @@ export class Brainy {
1891
1918
  const refs = await refManager.listRefs();
1892
1919
  // Filter to branches only (exclude tags)
1893
1920
  return refs
1894
- .filter((ref) => ref.startsWith('heads/'))
1895
- .map((ref) => ref.replace('heads/', ''));
1921
+ .filter((ref) => ref.name.startsWith('refs/heads/'))
1922
+ .map((ref) => ref.name.replace('refs/heads/', ''));
1896
1923
  });
1897
1924
  }
1898
1925
  /**
@@ -2163,6 +2190,208 @@ export class Brainy {
2163
2190
  return { added, modified, deleted, conflicts };
2164
2191
  });
2165
2192
  }
2193
+ /**
2194
+ * Compare differences between two branches (like git diff)
2195
+ * @param sourceBranch - Branch to compare from (defaults to current branch)
2196
+ * @param targetBranch - Branch to compare to (defaults to 'main')
2197
+ * @returns Diff result showing added, modified, and deleted entities/relationships
2198
+ *
2199
+ * @example
2200
+ * ```typescript
2201
+ * // Compare current branch with main
2202
+ * const diff = await brain.diff()
2203
+ *
2204
+ * // Compare two specific branches
2205
+ * const diff = await brain.diff('experiment', 'main')
2206
+ * console.log(diff)
2207
+ * // {
2208
+ * // entities: { added: 5, modified: 3, deleted: 1 },
2209
+ * // relationships: { added: 10, modified: 2, deleted: 0 }
2210
+ * // }
2211
+ * ```
2212
+ */
2213
+ async diff(sourceBranch, targetBranch) {
2214
+ await this.ensureInitialized();
2215
+ return this.augmentationRegistry.execute('diff', { sourceBranch, targetBranch }, async () => {
2216
+ // Default branches
2217
+ const source = sourceBranch || (await this.getCurrentBranch());
2218
+ const target = targetBranch || 'main';
2219
+ const currentBranch = await this.getCurrentBranch();
2220
+ // If source is current branch, use this instance directly (no fork needed)
2221
+ let sourceFork;
2222
+ let sourceForkCreated = false;
2223
+ if (source === currentBranch) {
2224
+ sourceFork = this;
2225
+ }
2226
+ else {
2227
+ sourceFork = await this.fork(`temp-diff-source-${Date.now()}`);
2228
+ sourceForkCreated = true;
2229
+ try {
2230
+ await sourceFork.checkout(source);
2231
+ }
2232
+ catch (err) {
2233
+ // If checkout fails, branch may not exist - just use current state
2234
+ }
2235
+ }
2236
+ // If target is current branch, use this instance directly (no fork needed)
2237
+ let targetFork;
2238
+ let targetForkCreated = false;
2239
+ if (target === currentBranch) {
2240
+ targetFork = this;
2241
+ }
2242
+ else {
2243
+ targetFork = await this.fork(`temp-diff-target-${Date.now()}`);
2244
+ targetForkCreated = true;
2245
+ try {
2246
+ await targetFork.checkout(target);
2247
+ }
2248
+ catch (err) {
2249
+ // If checkout fails, branch may not exist - just use current state
2250
+ }
2251
+ }
2252
+ try {
2253
+ // Get all entities from both branches
2254
+ const sourceResults = await sourceFork.find({});
2255
+ const targetResults = await targetFork.find({});
2256
+ // Create maps for lookup
2257
+ const sourceMap = new Map(sourceResults.map(r => [r.entity.id, r.entity]));
2258
+ const targetMap = new Map(targetResults.map(r => [r.entity.id, r.entity]));
2259
+ // Track differences
2260
+ const entitiesAdded = [];
2261
+ const entitiesModified = [];
2262
+ const entitiesDeleted = [];
2263
+ // Find added and modified entities
2264
+ for (const [id, sourceEntity] of sourceMap.entries()) {
2265
+ const targetEntity = targetMap.get(id);
2266
+ if (!targetEntity) {
2267
+ // Entity exists in source but not target = ADDED
2268
+ entitiesAdded.push({
2269
+ id: sourceEntity.id,
2270
+ type: sourceEntity.type,
2271
+ data: sourceEntity.data
2272
+ });
2273
+ }
2274
+ else {
2275
+ // Entity exists in both - check for modifications
2276
+ const changes = [];
2277
+ if (sourceEntity.data !== targetEntity.data) {
2278
+ changes.push('data');
2279
+ }
2280
+ if ((sourceEntity.updatedAt || 0) !== (targetEntity.updatedAt || 0)) {
2281
+ changes.push('updatedAt');
2282
+ }
2283
+ if (changes.length > 0) {
2284
+ entitiesModified.push({
2285
+ id: sourceEntity.id,
2286
+ type: sourceEntity.type,
2287
+ changes
2288
+ });
2289
+ }
2290
+ }
2291
+ }
2292
+ // Find deleted entities (in target but not in source)
2293
+ for (const [id, targetEntity] of targetMap.entries()) {
2294
+ if (!sourceMap.has(id)) {
2295
+ entitiesDeleted.push({
2296
+ id: targetEntity.id,
2297
+ type: targetEntity.type
2298
+ });
2299
+ }
2300
+ }
2301
+ // Compare relationships
2302
+ const sourceVerbsResult = await sourceFork.storage.getVerbs({});
2303
+ const targetVerbsResult = await targetFork.storage.getVerbs({});
2304
+ const sourceVerbs = sourceVerbsResult.items || [];
2305
+ const targetVerbs = targetVerbsResult.items || [];
2306
+ const sourceRelMap = new Map(sourceVerbs.map((v) => [`${v.sourceId}-${v.verb}-${v.targetId}`, v]));
2307
+ const targetRelMap = new Map(targetVerbs.map((v) => [`${v.sourceId}-${v.verb}-${v.targetId}`, v]));
2308
+ const relationshipsAdded = [];
2309
+ const relationshipsModified = [];
2310
+ const relationshipsDeleted = [];
2311
+ // Find added and modified relationships
2312
+ for (const [key, sourceVerb] of sourceRelMap.entries()) {
2313
+ const targetVerb = targetRelMap.get(key);
2314
+ if (!targetVerb) {
2315
+ // Relationship exists in source but not target = ADDED
2316
+ relationshipsAdded.push({
2317
+ from: sourceVerb.sourceId,
2318
+ to: sourceVerb.targetId,
2319
+ type: sourceVerb.verb
2320
+ });
2321
+ }
2322
+ else {
2323
+ // Relationship exists in both - check for modifications
2324
+ const changes = [];
2325
+ if ((sourceVerb.weight || 0) !== (targetVerb.weight || 0)) {
2326
+ changes.push('weight');
2327
+ }
2328
+ if (JSON.stringify(sourceVerb.metadata) !== JSON.stringify(targetVerb.metadata)) {
2329
+ changes.push('metadata');
2330
+ }
2331
+ if (changes.length > 0) {
2332
+ relationshipsModified.push({
2333
+ from: sourceVerb.sourceId,
2334
+ to: sourceVerb.targetId,
2335
+ type: sourceVerb.verb,
2336
+ changes
2337
+ });
2338
+ }
2339
+ }
2340
+ }
2341
+ // Find deleted relationships
2342
+ for (const [key, targetVerb] of targetRelMap.entries()) {
2343
+ if (!sourceRelMap.has(key)) {
2344
+ relationshipsDeleted.push({
2345
+ from: targetVerb.sourceId,
2346
+ to: targetVerb.targetId,
2347
+ type: targetVerb.verb
2348
+ });
2349
+ }
2350
+ }
2351
+ return {
2352
+ entities: {
2353
+ added: entitiesAdded,
2354
+ modified: entitiesModified,
2355
+ deleted: entitiesDeleted
2356
+ },
2357
+ relationships: {
2358
+ added: relationshipsAdded,
2359
+ modified: relationshipsModified,
2360
+ deleted: relationshipsDeleted
2361
+ },
2362
+ summary: {
2363
+ entitiesAdded: entitiesAdded.length,
2364
+ entitiesModified: entitiesModified.length,
2365
+ entitiesDeleted: entitiesDeleted.length,
2366
+ relationshipsAdded: relationshipsAdded.length,
2367
+ relationshipsModified: relationshipsModified.length,
2368
+ relationshipsDeleted: relationshipsDeleted.length
2369
+ }
2370
+ };
2371
+ }
2372
+ finally {
2373
+ // Clean up temporary forks (only if we created them)
2374
+ try {
2375
+ const branches = await this.listBranches();
2376
+ if (sourceForkCreated && sourceFork !== this) {
2377
+ const sourceBranchName = await sourceFork.getCurrentBranch();
2378
+ if (branches.includes(sourceBranchName)) {
2379
+ await this.deleteBranch(sourceBranchName);
2380
+ }
2381
+ }
2382
+ if (targetForkCreated && targetFork !== this) {
2383
+ const targetBranchName = await targetFork.getCurrentBranch();
2384
+ if (branches.includes(targetBranchName)) {
2385
+ await this.deleteBranch(targetBranchName);
2386
+ }
2387
+ }
2388
+ }
2389
+ catch (err) {
2390
+ // Ignore cleanup errors
2391
+ }
2392
+ }
2393
+ });
2394
+ }
2166
2395
  /**
2167
2396
  * Delete a branch/fork
2168
2397
  * @param branch - Branch name to delete
@@ -2422,36 +2651,46 @@ export class Brainy {
2422
2651
  return await coordinator.import(source, options);
2423
2652
  }
2424
2653
  /**
2425
- * Virtual File System API - Knowledge Operating System
2654
+ * Virtual File System API - Knowledge Operating System (v5.0.1+)
2426
2655
  *
2427
- * Returns a cached VFS instance. You must call vfs.init() before use:
2656
+ * Returns a cached VFS instance that is auto-initialized during brain.init().
2657
+ * No separate initialization needed!
2428
2658
  *
2429
2659
  * @example After import
2430
2660
  * ```typescript
2431
2661
  * await brain.import('./data.xlsx', { vfsPath: '/imports/data' })
2432
- *
2433
- * const vfs = brain.vfs()
2434
- * await vfs.init() // Required! (safe to call multiple times)
2435
- * const files = await vfs.readdir('/imports/data')
2662
+ * // VFS ready immediately - no init() call needed!
2663
+ * const files = await brain.vfs.readdir('/imports/data')
2436
2664
  * ```
2437
2665
  *
2438
2666
  * @example Direct VFS usage
2439
2667
  * ```typescript
2440
- * const vfs = brain.vfs()
2441
- * await vfs.init() // Always required before first use
2442
- * await vfs.writeFile('/docs/readme.md', 'Hello World')
2443
- * const content = await vfs.readFile('/docs/readme.md')
2668
+ * await brain.init() // VFS auto-initialized here!
2669
+ * await brain.vfs.writeFile('/docs/readme.md', 'Hello World')
2670
+ * const content = await brain.vfs.readFile('/docs/readme.md')
2444
2671
  * ```
2445
2672
  *
2446
- * **Note:** brain.import() automatically initializes the VFS, so after
2447
- * an import you can call vfs.init() again (it's idempotent) and immediately
2448
- * query the imported files.
2673
+ * @example With fork (COW isolation)
2674
+ * ```typescript
2675
+ * await brain.init()
2676
+ * await brain.vfs.writeFile('/config.json', '{"v": 1}')
2449
2677
  *
2450
- * **Pattern:** The VFS instance is cached, so multiple calls to brain.vfs()
2678
+ * const fork = await brain.fork('experiment')
2679
+ * // Fork inherits parent's files
2680
+ * const config = await fork.vfs.readFile('/config.json')
2681
+ * // Fork modifications are isolated
2682
+ * await fork.vfs.writeFile('/test.txt', 'Fork only')
2683
+ * ```
2684
+ *
2685
+ * **Pattern:** The VFS instance is cached, so multiple calls to brain.vfs
2451
2686
  * return the same instance. This ensures import and user code share state.
2687
+ *
2688
+ * @since v5.0.1 - Auto-initialization during brain.init()
2452
2689
  */
2453
- vfs() {
2690
+ get vfs() {
2454
2691
  if (!this._vfs) {
2692
+ // VFS is initialized during brain.init() (v5.0.1)
2693
+ // If not initialized yet, create instance but user should call brain.init() first
2455
2694
  this._vfs = new VirtualFileSystem(this);
2456
2695
  }
2457
2696
  return this._vfs;
@@ -416,7 +416,7 @@ export const importCommands = {
416
416
  spinner = ora('Initializing VFS import...').start();
417
417
  const brain = getBrainy();
418
418
  // Get VFS
419
- const vfs = await brain.vfs();
419
+ const vfs = await brain.vfs;
420
420
  // Load DirectoryImporter
421
421
  const { DirectoryImporter } = await import('../../vfs/importers/DirectoryImporter.js');
422
422
  const importer = new DirectoryImporter(vfs, brain);
@@ -9,9 +9,10 @@ import Table from 'cli-table3';
9
9
  import { readFileSync, writeFileSync } from 'node:fs';
10
10
  import { Brainy } from '../../brainy.js';
11
11
  let brainyInstance = null;
12
- const getBrainy = () => {
12
+ const getBrainy = async () => {
13
13
  if (!brainyInstance) {
14
14
  brainyInstance = new Brainy();
15
+ await brainyInstance.init(); // v5.0.1: Initialize brain (VFS auto-initialized here!)
15
16
  }
16
17
  return brainyInstance;
17
18
  };
@@ -38,10 +39,9 @@ export const vfsCommands = {
38
39
  async read(path, options) {
39
40
  const spinner = ora('Reading file...').start();
40
41
  try {
41
- const brain = getBrainy();
42
- const vfs = brain.vfs();
43
- await vfs.init();
44
- const buffer = await vfs.readFile(path, {
42
+ const brain = await getBrainy(); // v5.0.1: Await async getBrainy
43
+ // v5.0.1: VFS auto-initialized, no need for vfs.init()
44
+ const buffer = await brain.vfs.readFile(path, {
45
45
  encoding: options.encoding
46
46
  });
47
47
  spinner.succeed('File read successfully');
@@ -70,9 +70,7 @@ export const vfsCommands = {
70
70
  async write(path, options) {
71
71
  const spinner = ora('Writing file...').start();
72
72
  try {
73
- const brain = getBrainy();
74
- const vfs = brain.vfs();
75
- await vfs.init();
73
+ const brain = await getBrainy();
76
74
  let data;
77
75
  if (options.file) {
78
76
  // Read from local file
@@ -85,7 +83,7 @@ export const vfsCommands = {
85
83
  spinner.fail('Must provide --content or --file');
86
84
  process.exit(1);
87
85
  }
88
- await vfs.writeFile(path, data, {
86
+ await brain.vfs.writeFile(path, data, {
89
87
  encoding: options.encoding
90
88
  });
91
89
  spinner.succeed('File written successfully');
@@ -109,10 +107,8 @@ export const vfsCommands = {
109
107
  async ls(path, options) {
110
108
  const spinner = ora('Listing directory...').start();
111
109
  try {
112
- const brain = getBrainy();
113
- const vfs = brain.vfs();
114
- await vfs.init();
115
- const entries = await vfs.readdir(path, { withFileTypes: true });
110
+ const brain = await getBrainy();
111
+ const entries = await brain.vfs.readdir(path, { withFileTypes: true });
116
112
  spinner.succeed(`Found ${Array.isArray(entries) ? entries.length : 0} items`);
117
113
  if (!options.json) {
118
114
  if (!Array.isArray(entries) || entries.length === 0) {
@@ -128,7 +124,7 @@ export const vfsCommands = {
128
124
  for (const entry of entries) {
129
125
  if (!options.all && entry.name.startsWith('.'))
130
126
  continue;
131
- const stat = await vfs.stat(`${path}/${entry.name}`);
127
+ const stat = await brain.vfs.stat(`${path}/${entry.name}`);
132
128
  table.push([
133
129
  entry.isDirectory() ? chalk.blue('DIR') : 'FILE',
134
130
  entry.isDirectory() ? '-' : formatBytes(stat.size),
@@ -169,10 +165,8 @@ export const vfsCommands = {
169
165
  async stat(path, options) {
170
166
  const spinner = ora('Getting file stats...').start();
171
167
  try {
172
- const brain = getBrainy();
173
- const vfs = brain.vfs();
174
- await vfs.init();
175
- const stats = await vfs.stat(path);
168
+ const brain = await getBrainy();
169
+ const stats = await brain.vfs.stat(path);
176
170
  spinner.succeed('Stats retrieved');
177
171
  if (!options.json) {
178
172
  console.log(chalk.cyan('\nFile Statistics:'));
@@ -199,10 +193,8 @@ export const vfsCommands = {
199
193
  async mkdir(path, options) {
200
194
  const spinner = ora('Creating directory...').start();
201
195
  try {
202
- const brain = getBrainy();
203
- const vfs = brain.vfs();
204
- await vfs.init();
205
- await vfs.mkdir(path, { recursive: options.parents });
196
+ const brain = await getBrainy();
197
+ await brain.vfs.mkdir(path, { recursive: options.parents });
206
198
  spinner.succeed('Directory created');
207
199
  if (!options.json) {
208
200
  console.log(chalk.green(`✓ Created: ${path}`));
@@ -223,15 +215,13 @@ export const vfsCommands = {
223
215
  async rm(path, options) {
224
216
  const spinner = ora('Removing...').start();
225
217
  try {
226
- const brain = getBrainy();
227
- const vfs = brain.vfs();
228
- await vfs.init();
229
- const stats = await vfs.stat(path);
218
+ const brain = await getBrainy();
219
+ const stats = await brain.vfs.stat(path);
230
220
  if (stats.isDirectory()) {
231
- await vfs.rmdir(path, { recursive: options.recursive });
221
+ await brain.vfs.rmdir(path, { recursive: options.recursive });
232
222
  }
233
223
  else {
234
- await vfs.unlink(path);
224
+ await brain.vfs.unlink(path);
235
225
  }
236
226
  spinner.succeed('Removed successfully');
237
227
  if (!options.json) {
@@ -255,10 +245,8 @@ export const vfsCommands = {
255
245
  async search(query, options) {
256
246
  const spinner = ora('Searching files...').start();
257
247
  try {
258
- const brain = getBrainy();
259
- const vfs = brain.vfs();
260
- await vfs.init();
261
- const results = await vfs.search(query, {
248
+ const brain = await getBrainy();
249
+ const results = await brain.vfs.search(query, {
262
250
  path: options.path,
263
251
  limit: options.limit ? parseInt(options.limit) : 10
264
252
  });
@@ -297,10 +285,8 @@ export const vfsCommands = {
297
285
  async similar(path, options) {
298
286
  const spinner = ora('Finding similar files...').start();
299
287
  try {
300
- const brain = getBrainy();
301
- const vfs = brain.vfs();
302
- await vfs.init();
303
- const results = await vfs.findSimilar(path, {
288
+ const brain = await getBrainy();
289
+ const results = await brain.vfs.findSimilar(path, {
304
290
  limit: options.limit ? parseInt(options.limit) : 10,
305
291
  threshold: options.threshold ? parseFloat(options.threshold) : 0.7
306
292
  });
@@ -336,10 +322,8 @@ export const vfsCommands = {
336
322
  async tree(path, options) {
337
323
  const spinner = ora('Building tree...').start();
338
324
  try {
339
- const brain = getBrainy();
340
- const vfs = brain.vfs();
341
- await vfs.init();
342
- const tree = await vfs.getTreeStructure(path, {
325
+ const brain = await getBrainy();
326
+ const tree = await brain.vfs.getTreeStructure(path, {
343
327
  maxDepth: options.depth ? parseInt(options.depth) : 3
344
328
  });
345
329
  spinner.succeed('Tree built');
@@ -23,7 +23,7 @@ export class ImportHistory {
23
23
  */
24
24
  async init() {
25
25
  try {
26
- const vfs = this.brain.vfs();
26
+ const vfs = this.brain.vfs;
27
27
  await vfs.init();
28
28
  // Try to load existing history
29
29
  const content = await vfs.readFile(this.historyFile);
@@ -102,7 +102,7 @@ export class ImportHistory {
102
102
  }
103
103
  // Delete VFS files
104
104
  try {
105
- const vfs = this.brain.vfs();
105
+ const vfs = this.brain.vfs;
106
106
  await vfs.init();
107
107
  for (const vfsPath of entry.vfsPaths) {
108
108
  try {
@@ -160,7 +160,7 @@ export class ImportHistory {
160
160
  */
161
161
  async persist() {
162
162
  try {
163
- const vfs = this.brain.vfs();
163
+ const vfs = this.brain.vfs;
164
164
  await vfs.init();
165
165
  // Ensure directory exists
166
166
  const dir = this.historyFile.substring(0, this.historyFile.lastIndexOf('/'));
@@ -65,7 +65,7 @@ export declare class VFSStructureGenerator {
65
65
  * Initialize the generator
66
66
  *
67
67
  * CRITICAL: Gets brain's VFS instance and initializes it if needed.
68
- * This ensures that after import, brain.vfs() returns an initialized instance.
68
+ * This ensures that after import, brain.vfs returns an initialized instance.
69
69
  */
70
70
  init(): Promise<void>;
71
71
  /**
@@ -15,7 +15,7 @@ import { NounType } from '../types/graphTypes.js';
15
15
  export class VFSStructureGenerator {
16
16
  constructor(brain) {
17
17
  this.brain = brain;
18
- // CRITICAL FIX: Use brain.vfs() instead of creating separate instance
18
+ // CRITICAL FIX: Use brain.vfs instead of creating separate instance
19
19
  // This ensures VFSStructureGenerator and user code share the same VFS instance
20
20
  // Before: Created separate instance that wasn't accessible to users
21
21
  // After: Uses brain's cached instance, making VFS queryable after import
@@ -24,11 +24,11 @@ export class VFSStructureGenerator {
24
24
  * Initialize the generator
25
25
  *
26
26
  * CRITICAL: Gets brain's VFS instance and initializes it if needed.
27
- * This ensures that after import, brain.vfs() returns an initialized instance.
27
+ * This ensures that after import, brain.vfs returns an initialized instance.
28
28
  */
29
29
  async init() {
30
30
  // Get brain's cached VFS instance (creates if doesn't exist)
31
- this.vfs = this.brain.vfs();
31
+ this.vfs = this.brain.vfs;
32
32
  // CRITICAL FIX (v4.10.2): Always call vfs.init() explicitly
33
33
  // The previous code tried to check if initialized via stat('/') but this was unreliable
34
34
  // vfs.init() is idempotent, so calling it multiple times is safe
@@ -37,11 +37,13 @@ export declare class MemoryStorage extends BaseStorage {
37
37
  init(): Promise<void>;
38
38
  /**
39
39
  * Save a noun to storage (v4.0.0: pure vector only, no metadata)
40
+ * v5.0.1: COW-aware - uses branch-prefixed paths for fork isolation
40
41
  */
41
42
  protected saveNoun_internal(noun: HNSWNoun): Promise<void>;
42
43
  /**
43
44
  * Get a noun from storage (v4.0.0: returns pure vector only)
44
45
  * Base class handles combining with metadata
46
+ * v5.0.1: COW-aware - reads from branch-prefixed paths with inheritance
45
47
  */
46
48
  protected getNoun_internal(id: string): Promise<HNSWNoun | null>;
47
49
  /**
@@ -90,15 +92,18 @@ export declare class MemoryStorage extends BaseStorage {
90
92
  protected getNounsByNounType_internal(nounType: string): Promise<HNSWNoun[]>;
91
93
  /**
92
94
  * Delete a noun from storage (v4.0.0)
95
+ * v5.0.1: COW-aware - deletes from branch-prefixed paths
93
96
  */
94
97
  protected deleteNoun_internal(id: string): Promise<void>;
95
98
  /**
96
99
  * Save a verb to storage (v4.0.0: pure vector + core fields, no metadata)
100
+ * v5.0.1: COW-aware - uses branch-prefixed paths for fork isolation
97
101
  */
98
102
  protected saveVerb_internal(verb: HNSWVerb): Promise<void>;
99
103
  /**
100
104
  * Get a verb from storage (v4.0.0: returns pure vector + core fields)
101
105
  * Base class handles combining with metadata
106
+ * v5.0.1: COW-aware - reads from branch-prefixed paths with inheritance
102
107
  */
103
108
  protected getVerb_internal(id: string): Promise<HNSWVerb | null>;
104
109
  /**
@@ -143,6 +148,7 @@ export declare class MemoryStorage extends BaseStorage {
143
148
  protected getVerbsByType_internal(type: string): Promise<HNSWVerbWithMetadata[]>;
144
149
  /**
145
150
  * Delete a verb from storage
151
+ * v5.0.1: COW-aware - deletes from branch-prefixed paths
146
152
  */
147
153
  protected deleteVerb_internal(id: string): Promise<void>;
148
154
  /**