@soulcraft/brainy 5.0.0 → 5.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +148 -0
- package/README.md +1 -1
- package/dist/augmentations/cacheAugmentation.js +9 -4
- package/dist/brainy.d.ts +87 -15
- package/dist/brainy.js +279 -33
- package/dist/cli/commands/import.js +1 -1
- package/dist/cli/commands/vfs.js +24 -40
- package/dist/import/ImportHistory.js +3 -3
- package/dist/importers/VFSStructureGenerator.d.ts +1 -1
- package/dist/importers/VFSStructureGenerator.js +3 -3
- package/dist/storage/adapters/memoryStorage.d.ts +6 -0
- package/dist/storage/adapters/memoryStorage.js +39 -14
- package/dist/storage/adapters/typeAwareStorageAdapter.d.ts +31 -1
- package/dist/storage/adapters/typeAwareStorageAdapter.js +272 -43
- package/dist/storage/baseStorage.d.ts +44 -1
- package/dist/storage/baseStorage.js +148 -16
- package/dist/storage/cow/BlobStorage.d.ts +1 -0
- package/dist/storage/cow/BlobStorage.js +5 -3
- package/dist/storage/storageFactory.d.ts +1 -2
- package/dist/storage/storageFactory.js +7 -6
- package/dist/types/brainy.types.d.ts +0 -1
- package/dist/vfs/FSCompat.d.ts +1 -1
- package/dist/vfs/FSCompat.js +1 -1
- package/dist/vfs/VirtualFileSystem.js +5 -6
- package/package.json +1 -1
package/dist/brainy.js
CHANGED
|
@@ -97,6 +97,12 @@ export class Brainy {
|
|
|
97
97
|
// Setup and initialize storage
|
|
98
98
|
this.storage = await this.setupStorage();
|
|
99
99
|
await this.storage.init();
|
|
100
|
+
// Enable COW immediately after storage init (v5.0.1)
|
|
101
|
+
// This ensures ALL data is stored in branch-scoped paths from the start
|
|
102
|
+
// Lightweight: just sets cowEnabled=true and currentBranch, no RefManager/BlobStorage yet
|
|
103
|
+
if (typeof this.storage.enableCOWLightweight === 'function') {
|
|
104
|
+
this.storage.enableCOWLightweight(this.config.storage?.branch || 'main');
|
|
105
|
+
}
|
|
100
106
|
// Setup index now that we have storage
|
|
101
107
|
this.index = this.setupIndex();
|
|
102
108
|
// Initialize core metadata index
|
|
@@ -135,7 +141,13 @@ export class Brainy {
|
|
|
135
141
|
this.registerShutdownHooks();
|
|
136
142
|
Brainy.shutdownHooksRegisteredGlobally = true;
|
|
137
143
|
}
|
|
144
|
+
// Mark as initialized BEFORE VFS init (v5.0.1)
|
|
145
|
+
// VFS.init() needs brain to be marked initialized to call brain methods
|
|
138
146
|
this.initialized = true;
|
|
147
|
+
// Initialize VFS (v5.0.1): Ensure VFS is ready when accessed as property
|
|
148
|
+
// This eliminates need for separate vfs.init() calls - zero additional complexity
|
|
149
|
+
this._vfs = new VirtualFileSystem(this);
|
|
150
|
+
await this._vfs.init();
|
|
139
151
|
}
|
|
140
152
|
catch (error) {
|
|
141
153
|
throw new Error(`Failed to initialize Brainy: ${error}`);
|
|
@@ -307,14 +319,16 @@ export class Brainy {
|
|
|
307
319
|
...(params.weight !== undefined && { weight: params.weight }),
|
|
308
320
|
...(params.createdBy && { createdBy: params.createdBy })
|
|
309
321
|
};
|
|
310
|
-
//
|
|
322
|
+
// v5.0.1: Save metadata FIRST so TypeAwareStorage can cache the type
|
|
323
|
+
// This prevents the race condition where saveNoun() defaults to 'thing'
|
|
324
|
+
await this.storage.saveNounMetadata(id, storageMetadata);
|
|
325
|
+
// Then save vector
|
|
311
326
|
await this.storage.saveNoun({
|
|
312
327
|
id,
|
|
313
328
|
vector,
|
|
314
329
|
connections: new Map(),
|
|
315
330
|
level: 0
|
|
316
331
|
});
|
|
317
|
-
await this.storage.saveNounMetadata(id, storageMetadata);
|
|
318
332
|
// v4.8.0: Build entity structure for indexing (NEW - with top-level fields)
|
|
319
333
|
const entityForIndexing = {
|
|
320
334
|
id,
|
|
@@ -497,15 +511,18 @@ export class Brainy {
|
|
|
497
511
|
if (!existing) {
|
|
498
512
|
throw new Error(`Entity ${params.id} not found`);
|
|
499
513
|
}
|
|
500
|
-
// Update vector if data changed
|
|
514
|
+
// Update vector if data changed OR if type changed (need to re-index with new type)
|
|
501
515
|
let vector = existing.vector;
|
|
502
|
-
|
|
503
|
-
|
|
516
|
+
const newType = params.type || existing.type;
|
|
517
|
+
if (params.data || params.type) {
|
|
518
|
+
if (params.data) {
|
|
519
|
+
vector = params.vector || (await this.embed(params.data));
|
|
520
|
+
}
|
|
504
521
|
// Update in index (remove and re-add since no update method)
|
|
505
522
|
// Phase 2: pass type for TypeAwareHNSWIndex
|
|
506
523
|
if (this.index instanceof TypeAwareHNSWIndex) {
|
|
507
524
|
await this.index.removeItem(params.id, existing.type);
|
|
508
|
-
await this.index.addItem({ id: params.id, vector },
|
|
525
|
+
await this.index.addItem({ id: params.id, vector }, newType); // v5.1.0: use new type
|
|
509
526
|
}
|
|
510
527
|
else {
|
|
511
528
|
await this.index.removeItem(params.id);
|
|
@@ -535,14 +552,18 @@ export class Brainy {
|
|
|
535
552
|
...(params.confidence === undefined && existing.confidence !== undefined && { confidence: existing.confidence }),
|
|
536
553
|
...(params.weight === undefined && existing.weight !== undefined && { weight: existing.weight })
|
|
537
554
|
};
|
|
538
|
-
// v4.0.0: Save
|
|
555
|
+
// v4.0.0: Save metadata FIRST (v5.1.0 fix: updates type cache for TypeAwareStorage)
|
|
556
|
+
// v5.1.0: saveNounMetadata must be called before saveNoun so that the type cache
|
|
557
|
+
// is updated before determining the shard path. Otherwise type changes cause
|
|
558
|
+
// entities to be saved in the wrong shard and become unfindable.
|
|
559
|
+
await this.storage.saveNounMetadata(params.id, updatedMetadata);
|
|
560
|
+
// Then save vector (will use updated type cache)
|
|
539
561
|
await this.storage.saveNoun({
|
|
540
562
|
id: params.id,
|
|
541
563
|
vector,
|
|
542
564
|
connections: new Map(),
|
|
543
565
|
level: 0
|
|
544
566
|
});
|
|
545
|
-
await this.storage.saveNounMetadata(params.id, updatedMetadata);
|
|
546
567
|
// v4.8.0: Build entity structure for metadata index (with top-level fields)
|
|
547
568
|
const entityForIndexing = {
|
|
548
569
|
id: params.id,
|
|
@@ -1820,11 +1841,22 @@ export class Brainy {
|
|
|
1820
1841
|
await this.ensureInitialized();
|
|
1821
1842
|
return this.augmentationRegistry.execute('fork', { branch, options }, async () => {
|
|
1822
1843
|
const branchName = branch || `fork-${Date.now()}`;
|
|
1823
|
-
//
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1844
|
+
// v5.0.1: Lazy COW initialization - enable automatically on first fork()
|
|
1845
|
+
// This is zero-config and transparent to users
|
|
1846
|
+
if (!('refManager' in this.storage) || !this.storage.refManager) {
|
|
1847
|
+
// Storage supports COW but isn't initialized yet - initialize now
|
|
1848
|
+
if (typeof this.storage.initializeCOW === 'function') {
|
|
1849
|
+
await this.storage.initializeCOW({
|
|
1850
|
+
branch: this.config.storage?.branch || 'main',
|
|
1851
|
+
enableCompression: true
|
|
1852
|
+
});
|
|
1853
|
+
}
|
|
1854
|
+
else {
|
|
1855
|
+
// Storage adapter doesn't support COW at all
|
|
1856
|
+
throw new Error('Fork requires COW-enabled storage. ' +
|
|
1857
|
+
'This storage adapter does not support branching. ' +
|
|
1858
|
+
'Please use v5.0.0+ storage adapters.');
|
|
1859
|
+
}
|
|
1828
1860
|
}
|
|
1829
1861
|
const refManager = this.storage.refManager;
|
|
1830
1862
|
const currentBranch = this.storage.currentBranch || 'main';
|
|
@@ -1839,9 +1871,11 @@ export class Brainy {
|
|
|
1839
1871
|
}
|
|
1840
1872
|
};
|
|
1841
1873
|
const clone = new Brainy(forkConfig);
|
|
1842
|
-
// Step 3:
|
|
1843
|
-
// Share
|
|
1844
|
-
clone.storage = this.storage;
|
|
1874
|
+
// Step 3: Clone storage with separate currentBranch
|
|
1875
|
+
// Share RefManager/BlobStorage/CommitLog but maintain separate branch context
|
|
1876
|
+
clone.storage = Object.create(this.storage);
|
|
1877
|
+
clone.storage.currentBranch = branchName;
|
|
1878
|
+
// isInitialized inherited from prototype
|
|
1845
1879
|
// Shallow copy HNSW index (INSTANT - just copies Map references)
|
|
1846
1880
|
clone.index = this.setupIndex();
|
|
1847
1881
|
// Enable COW (handle both HNSWIndex and TypeAwareHNSWIndex)
|
|
@@ -1891,8 +1925,8 @@ export class Brainy {
|
|
|
1891
1925
|
const refs = await refManager.listRefs();
|
|
1892
1926
|
// Filter to branches only (exclude tags)
|
|
1893
1927
|
return refs
|
|
1894
|
-
.filter((ref) => ref.startsWith('heads/'))
|
|
1895
|
-
.map((ref) => ref.replace('heads/', ''));
|
|
1928
|
+
.filter((ref) => ref.name.startsWith('refs/heads/'))
|
|
1929
|
+
.map((ref) => ref.name.replace('refs/heads/', ''));
|
|
1896
1930
|
});
|
|
1897
1931
|
}
|
|
1898
1932
|
/**
|
|
@@ -2163,6 +2197,208 @@ export class Brainy {
|
|
|
2163
2197
|
return { added, modified, deleted, conflicts };
|
|
2164
2198
|
});
|
|
2165
2199
|
}
|
|
2200
|
+
/**
|
|
2201
|
+
* Compare differences between two branches (like git diff)
|
|
2202
|
+
* @param sourceBranch - Branch to compare from (defaults to current branch)
|
|
2203
|
+
* @param targetBranch - Branch to compare to (defaults to 'main')
|
|
2204
|
+
* @returns Diff result showing added, modified, and deleted entities/relationships
|
|
2205
|
+
*
|
|
2206
|
+
* @example
|
|
2207
|
+
* ```typescript
|
|
2208
|
+
* // Compare current branch with main
|
|
2209
|
+
* const diff = await brain.diff()
|
|
2210
|
+
*
|
|
2211
|
+
* // Compare two specific branches
|
|
2212
|
+
* const diff = await brain.diff('experiment', 'main')
|
|
2213
|
+
* console.log(diff)
|
|
2214
|
+
* // {
|
|
2215
|
+
* // entities: { added: 5, modified: 3, deleted: 1 },
|
|
2216
|
+
* // relationships: { added: 10, modified: 2, deleted: 0 }
|
|
2217
|
+
* // }
|
|
2218
|
+
* ```
|
|
2219
|
+
*/
|
|
2220
|
+
async diff(sourceBranch, targetBranch) {
|
|
2221
|
+
await this.ensureInitialized();
|
|
2222
|
+
return this.augmentationRegistry.execute('diff', { sourceBranch, targetBranch }, async () => {
|
|
2223
|
+
// Default branches
|
|
2224
|
+
const source = sourceBranch || (await this.getCurrentBranch());
|
|
2225
|
+
const target = targetBranch || 'main';
|
|
2226
|
+
const currentBranch = await this.getCurrentBranch();
|
|
2227
|
+
// If source is current branch, use this instance directly (no fork needed)
|
|
2228
|
+
let sourceFork;
|
|
2229
|
+
let sourceForkCreated = false;
|
|
2230
|
+
if (source === currentBranch) {
|
|
2231
|
+
sourceFork = this;
|
|
2232
|
+
}
|
|
2233
|
+
else {
|
|
2234
|
+
sourceFork = await this.fork(`temp-diff-source-${Date.now()}`);
|
|
2235
|
+
sourceForkCreated = true;
|
|
2236
|
+
try {
|
|
2237
|
+
await sourceFork.checkout(source);
|
|
2238
|
+
}
|
|
2239
|
+
catch (err) {
|
|
2240
|
+
// If checkout fails, branch may not exist - just use current state
|
|
2241
|
+
}
|
|
2242
|
+
}
|
|
2243
|
+
// If target is current branch, use this instance directly (no fork needed)
|
|
2244
|
+
let targetFork;
|
|
2245
|
+
let targetForkCreated = false;
|
|
2246
|
+
if (target === currentBranch) {
|
|
2247
|
+
targetFork = this;
|
|
2248
|
+
}
|
|
2249
|
+
else {
|
|
2250
|
+
targetFork = await this.fork(`temp-diff-target-${Date.now()}`);
|
|
2251
|
+
targetForkCreated = true;
|
|
2252
|
+
try {
|
|
2253
|
+
await targetFork.checkout(target);
|
|
2254
|
+
}
|
|
2255
|
+
catch (err) {
|
|
2256
|
+
// If checkout fails, branch may not exist - just use current state
|
|
2257
|
+
}
|
|
2258
|
+
}
|
|
2259
|
+
try {
|
|
2260
|
+
// Get all entities from both branches
|
|
2261
|
+
const sourceResults = await sourceFork.find({});
|
|
2262
|
+
const targetResults = await targetFork.find({});
|
|
2263
|
+
// Create maps for lookup
|
|
2264
|
+
const sourceMap = new Map(sourceResults.map(r => [r.entity.id, r.entity]));
|
|
2265
|
+
const targetMap = new Map(targetResults.map(r => [r.entity.id, r.entity]));
|
|
2266
|
+
// Track differences
|
|
2267
|
+
const entitiesAdded = [];
|
|
2268
|
+
const entitiesModified = [];
|
|
2269
|
+
const entitiesDeleted = [];
|
|
2270
|
+
// Find added and modified entities
|
|
2271
|
+
for (const [id, sourceEntity] of sourceMap.entries()) {
|
|
2272
|
+
const targetEntity = targetMap.get(id);
|
|
2273
|
+
if (!targetEntity) {
|
|
2274
|
+
// Entity exists in source but not target = ADDED
|
|
2275
|
+
entitiesAdded.push({
|
|
2276
|
+
id: sourceEntity.id,
|
|
2277
|
+
type: sourceEntity.type,
|
|
2278
|
+
data: sourceEntity.data
|
|
2279
|
+
});
|
|
2280
|
+
}
|
|
2281
|
+
else {
|
|
2282
|
+
// Entity exists in both - check for modifications
|
|
2283
|
+
const changes = [];
|
|
2284
|
+
if (sourceEntity.data !== targetEntity.data) {
|
|
2285
|
+
changes.push('data');
|
|
2286
|
+
}
|
|
2287
|
+
if ((sourceEntity.updatedAt || 0) !== (targetEntity.updatedAt || 0)) {
|
|
2288
|
+
changes.push('updatedAt');
|
|
2289
|
+
}
|
|
2290
|
+
if (changes.length > 0) {
|
|
2291
|
+
entitiesModified.push({
|
|
2292
|
+
id: sourceEntity.id,
|
|
2293
|
+
type: sourceEntity.type,
|
|
2294
|
+
changes
|
|
2295
|
+
});
|
|
2296
|
+
}
|
|
2297
|
+
}
|
|
2298
|
+
}
|
|
2299
|
+
// Find deleted entities (in target but not in source)
|
|
2300
|
+
for (const [id, targetEntity] of targetMap.entries()) {
|
|
2301
|
+
if (!sourceMap.has(id)) {
|
|
2302
|
+
entitiesDeleted.push({
|
|
2303
|
+
id: targetEntity.id,
|
|
2304
|
+
type: targetEntity.type
|
|
2305
|
+
});
|
|
2306
|
+
}
|
|
2307
|
+
}
|
|
2308
|
+
// Compare relationships
|
|
2309
|
+
const sourceVerbsResult = await sourceFork.storage.getVerbs({});
|
|
2310
|
+
const targetVerbsResult = await targetFork.storage.getVerbs({});
|
|
2311
|
+
const sourceVerbs = sourceVerbsResult.items || [];
|
|
2312
|
+
const targetVerbs = targetVerbsResult.items || [];
|
|
2313
|
+
const sourceRelMap = new Map(sourceVerbs.map((v) => [`${v.sourceId}-${v.verb}-${v.targetId}`, v]));
|
|
2314
|
+
const targetRelMap = new Map(targetVerbs.map((v) => [`${v.sourceId}-${v.verb}-${v.targetId}`, v]));
|
|
2315
|
+
const relationshipsAdded = [];
|
|
2316
|
+
const relationshipsModified = [];
|
|
2317
|
+
const relationshipsDeleted = [];
|
|
2318
|
+
// Find added and modified relationships
|
|
2319
|
+
for (const [key, sourceVerb] of sourceRelMap.entries()) {
|
|
2320
|
+
const targetVerb = targetRelMap.get(key);
|
|
2321
|
+
if (!targetVerb) {
|
|
2322
|
+
// Relationship exists in source but not target = ADDED
|
|
2323
|
+
relationshipsAdded.push({
|
|
2324
|
+
from: sourceVerb.sourceId,
|
|
2325
|
+
to: sourceVerb.targetId,
|
|
2326
|
+
type: sourceVerb.verb
|
|
2327
|
+
});
|
|
2328
|
+
}
|
|
2329
|
+
else {
|
|
2330
|
+
// Relationship exists in both - check for modifications
|
|
2331
|
+
const changes = [];
|
|
2332
|
+
if ((sourceVerb.weight || 0) !== (targetVerb.weight || 0)) {
|
|
2333
|
+
changes.push('weight');
|
|
2334
|
+
}
|
|
2335
|
+
if (JSON.stringify(sourceVerb.metadata) !== JSON.stringify(targetVerb.metadata)) {
|
|
2336
|
+
changes.push('metadata');
|
|
2337
|
+
}
|
|
2338
|
+
if (changes.length > 0) {
|
|
2339
|
+
relationshipsModified.push({
|
|
2340
|
+
from: sourceVerb.sourceId,
|
|
2341
|
+
to: sourceVerb.targetId,
|
|
2342
|
+
type: sourceVerb.verb,
|
|
2343
|
+
changes
|
|
2344
|
+
});
|
|
2345
|
+
}
|
|
2346
|
+
}
|
|
2347
|
+
}
|
|
2348
|
+
// Find deleted relationships
|
|
2349
|
+
for (const [key, targetVerb] of targetRelMap.entries()) {
|
|
2350
|
+
if (!sourceRelMap.has(key)) {
|
|
2351
|
+
relationshipsDeleted.push({
|
|
2352
|
+
from: targetVerb.sourceId,
|
|
2353
|
+
to: targetVerb.targetId,
|
|
2354
|
+
type: targetVerb.verb
|
|
2355
|
+
});
|
|
2356
|
+
}
|
|
2357
|
+
}
|
|
2358
|
+
return {
|
|
2359
|
+
entities: {
|
|
2360
|
+
added: entitiesAdded,
|
|
2361
|
+
modified: entitiesModified,
|
|
2362
|
+
deleted: entitiesDeleted
|
|
2363
|
+
},
|
|
2364
|
+
relationships: {
|
|
2365
|
+
added: relationshipsAdded,
|
|
2366
|
+
modified: relationshipsModified,
|
|
2367
|
+
deleted: relationshipsDeleted
|
|
2368
|
+
},
|
|
2369
|
+
summary: {
|
|
2370
|
+
entitiesAdded: entitiesAdded.length,
|
|
2371
|
+
entitiesModified: entitiesModified.length,
|
|
2372
|
+
entitiesDeleted: entitiesDeleted.length,
|
|
2373
|
+
relationshipsAdded: relationshipsAdded.length,
|
|
2374
|
+
relationshipsModified: relationshipsModified.length,
|
|
2375
|
+
relationshipsDeleted: relationshipsDeleted.length
|
|
2376
|
+
}
|
|
2377
|
+
};
|
|
2378
|
+
}
|
|
2379
|
+
finally {
|
|
2380
|
+
// Clean up temporary forks (only if we created them)
|
|
2381
|
+
try {
|
|
2382
|
+
const branches = await this.listBranches();
|
|
2383
|
+
if (sourceForkCreated && sourceFork !== this) {
|
|
2384
|
+
const sourceBranchName = await sourceFork.getCurrentBranch();
|
|
2385
|
+
if (branches.includes(sourceBranchName)) {
|
|
2386
|
+
await this.deleteBranch(sourceBranchName);
|
|
2387
|
+
}
|
|
2388
|
+
}
|
|
2389
|
+
if (targetForkCreated && targetFork !== this) {
|
|
2390
|
+
const targetBranchName = await targetFork.getCurrentBranch();
|
|
2391
|
+
if (branches.includes(targetBranchName)) {
|
|
2392
|
+
await this.deleteBranch(targetBranchName);
|
|
2393
|
+
}
|
|
2394
|
+
}
|
|
2395
|
+
}
|
|
2396
|
+
catch (err) {
|
|
2397
|
+
// Ignore cleanup errors
|
|
2398
|
+
}
|
|
2399
|
+
}
|
|
2400
|
+
});
|
|
2401
|
+
}
|
|
2166
2402
|
/**
|
|
2167
2403
|
* Delete a branch/fork
|
|
2168
2404
|
* @param branch - Branch name to delete
|
|
@@ -2422,36 +2658,46 @@ export class Brainy {
|
|
|
2422
2658
|
return await coordinator.import(source, options);
|
|
2423
2659
|
}
|
|
2424
2660
|
/**
|
|
2425
|
-
* Virtual File System API - Knowledge Operating System
|
|
2661
|
+
* Virtual File System API - Knowledge Operating System (v5.0.1+)
|
|
2426
2662
|
*
|
|
2427
|
-
* Returns a cached VFS instance
|
|
2663
|
+
* Returns a cached VFS instance that is auto-initialized during brain.init().
|
|
2664
|
+
* No separate initialization needed!
|
|
2428
2665
|
*
|
|
2429
2666
|
* @example After import
|
|
2430
2667
|
* ```typescript
|
|
2431
2668
|
* await brain.import('./data.xlsx', { vfsPath: '/imports/data' })
|
|
2432
|
-
*
|
|
2433
|
-
* const
|
|
2434
|
-
* await vfs.init() // Required! (safe to call multiple times)
|
|
2435
|
-
* const files = await vfs.readdir('/imports/data')
|
|
2669
|
+
* // VFS ready immediately - no init() call needed!
|
|
2670
|
+
* const files = await brain.vfs.readdir('/imports/data')
|
|
2436
2671
|
* ```
|
|
2437
2672
|
*
|
|
2438
2673
|
* @example Direct VFS usage
|
|
2439
2674
|
* ```typescript
|
|
2440
|
-
*
|
|
2441
|
-
* await vfs.
|
|
2442
|
-
* await vfs.
|
|
2443
|
-
* const content = await vfs.readFile('/docs/readme.md')
|
|
2675
|
+
* await brain.init() // VFS auto-initialized here!
|
|
2676
|
+
* await brain.vfs.writeFile('/docs/readme.md', 'Hello World')
|
|
2677
|
+
* const content = await brain.vfs.readFile('/docs/readme.md')
|
|
2444
2678
|
* ```
|
|
2445
2679
|
*
|
|
2446
|
-
*
|
|
2447
|
-
*
|
|
2448
|
-
*
|
|
2680
|
+
* @example With fork (COW isolation)
|
|
2681
|
+
* ```typescript
|
|
2682
|
+
* await brain.init()
|
|
2683
|
+
* await brain.vfs.writeFile('/config.json', '{"v": 1}')
|
|
2449
2684
|
*
|
|
2450
|
-
*
|
|
2685
|
+
* const fork = await brain.fork('experiment')
|
|
2686
|
+
* // Fork inherits parent's files
|
|
2687
|
+
* const config = await fork.vfs.readFile('/config.json')
|
|
2688
|
+
* // Fork modifications are isolated
|
|
2689
|
+
* await fork.vfs.writeFile('/test.txt', 'Fork only')
|
|
2690
|
+
* ```
|
|
2691
|
+
*
|
|
2692
|
+
* **Pattern:** The VFS instance is cached, so multiple calls to brain.vfs
|
|
2451
2693
|
* return the same instance. This ensures import and user code share state.
|
|
2694
|
+
*
|
|
2695
|
+
* @since v5.0.1 - Auto-initialization during brain.init()
|
|
2452
2696
|
*/
|
|
2453
|
-
vfs() {
|
|
2697
|
+
get vfs() {
|
|
2454
2698
|
if (!this._vfs) {
|
|
2699
|
+
// VFS is initialized during brain.init() (v5.0.1)
|
|
2700
|
+
// If not initialized yet, create instance but user should call brain.init() first
|
|
2455
2701
|
this._vfs = new VirtualFileSystem(this);
|
|
2456
2702
|
}
|
|
2457
2703
|
return this._vfs;
|
|
@@ -416,7 +416,7 @@ export const importCommands = {
|
|
|
416
416
|
spinner = ora('Initializing VFS import...').start();
|
|
417
417
|
const brain = getBrainy();
|
|
418
418
|
// Get VFS
|
|
419
|
-
const vfs = await brain.vfs
|
|
419
|
+
const vfs = await brain.vfs;
|
|
420
420
|
// Load DirectoryImporter
|
|
421
421
|
const { DirectoryImporter } = await import('../../vfs/importers/DirectoryImporter.js');
|
|
422
422
|
const importer = new DirectoryImporter(vfs, brain);
|
package/dist/cli/commands/vfs.js
CHANGED
|
@@ -9,9 +9,10 @@ import Table from 'cli-table3';
|
|
|
9
9
|
import { readFileSync, writeFileSync } from 'node:fs';
|
|
10
10
|
import { Brainy } from '../../brainy.js';
|
|
11
11
|
let brainyInstance = null;
|
|
12
|
-
const getBrainy = () => {
|
|
12
|
+
const getBrainy = async () => {
|
|
13
13
|
if (!brainyInstance) {
|
|
14
14
|
brainyInstance = new Brainy();
|
|
15
|
+
await brainyInstance.init(); // v5.0.1: Initialize brain (VFS auto-initialized here!)
|
|
15
16
|
}
|
|
16
17
|
return brainyInstance;
|
|
17
18
|
};
|
|
@@ -38,10 +39,9 @@ export const vfsCommands = {
|
|
|
38
39
|
async read(path, options) {
|
|
39
40
|
const spinner = ora('Reading file...').start();
|
|
40
41
|
try {
|
|
41
|
-
const brain = getBrainy();
|
|
42
|
-
|
|
43
|
-
await vfs.
|
|
44
|
-
const buffer = await vfs.readFile(path, {
|
|
42
|
+
const brain = await getBrainy(); // v5.0.1: Await async getBrainy
|
|
43
|
+
// v5.0.1: VFS auto-initialized, no need for vfs.init()
|
|
44
|
+
const buffer = await brain.vfs.readFile(path, {
|
|
45
45
|
encoding: options.encoding
|
|
46
46
|
});
|
|
47
47
|
spinner.succeed('File read successfully');
|
|
@@ -70,9 +70,7 @@ export const vfsCommands = {
|
|
|
70
70
|
async write(path, options) {
|
|
71
71
|
const spinner = ora('Writing file...').start();
|
|
72
72
|
try {
|
|
73
|
-
const brain = getBrainy();
|
|
74
|
-
const vfs = brain.vfs();
|
|
75
|
-
await vfs.init();
|
|
73
|
+
const brain = await getBrainy();
|
|
76
74
|
let data;
|
|
77
75
|
if (options.file) {
|
|
78
76
|
// Read from local file
|
|
@@ -85,7 +83,7 @@ export const vfsCommands = {
|
|
|
85
83
|
spinner.fail('Must provide --content or --file');
|
|
86
84
|
process.exit(1);
|
|
87
85
|
}
|
|
88
|
-
await vfs.writeFile(path, data, {
|
|
86
|
+
await brain.vfs.writeFile(path, data, {
|
|
89
87
|
encoding: options.encoding
|
|
90
88
|
});
|
|
91
89
|
spinner.succeed('File written successfully');
|
|
@@ -109,10 +107,8 @@ export const vfsCommands = {
|
|
|
109
107
|
async ls(path, options) {
|
|
110
108
|
const spinner = ora('Listing directory...').start();
|
|
111
109
|
try {
|
|
112
|
-
const brain = getBrainy();
|
|
113
|
-
const
|
|
114
|
-
await vfs.init();
|
|
115
|
-
const entries = await vfs.readdir(path, { withFileTypes: true });
|
|
110
|
+
const brain = await getBrainy();
|
|
111
|
+
const entries = await brain.vfs.readdir(path, { withFileTypes: true });
|
|
116
112
|
spinner.succeed(`Found ${Array.isArray(entries) ? entries.length : 0} items`);
|
|
117
113
|
if (!options.json) {
|
|
118
114
|
if (!Array.isArray(entries) || entries.length === 0) {
|
|
@@ -128,7 +124,7 @@ export const vfsCommands = {
|
|
|
128
124
|
for (const entry of entries) {
|
|
129
125
|
if (!options.all && entry.name.startsWith('.'))
|
|
130
126
|
continue;
|
|
131
|
-
const stat = await vfs.stat(`${path}/${entry.name}`);
|
|
127
|
+
const stat = await brain.vfs.stat(`${path}/${entry.name}`);
|
|
132
128
|
table.push([
|
|
133
129
|
entry.isDirectory() ? chalk.blue('DIR') : 'FILE',
|
|
134
130
|
entry.isDirectory() ? '-' : formatBytes(stat.size),
|
|
@@ -169,10 +165,8 @@ export const vfsCommands = {
|
|
|
169
165
|
async stat(path, options) {
|
|
170
166
|
const spinner = ora('Getting file stats...').start();
|
|
171
167
|
try {
|
|
172
|
-
const brain = getBrainy();
|
|
173
|
-
const
|
|
174
|
-
await vfs.init();
|
|
175
|
-
const stats = await vfs.stat(path);
|
|
168
|
+
const brain = await getBrainy();
|
|
169
|
+
const stats = await brain.vfs.stat(path);
|
|
176
170
|
spinner.succeed('Stats retrieved');
|
|
177
171
|
if (!options.json) {
|
|
178
172
|
console.log(chalk.cyan('\nFile Statistics:'));
|
|
@@ -199,10 +193,8 @@ export const vfsCommands = {
|
|
|
199
193
|
async mkdir(path, options) {
|
|
200
194
|
const spinner = ora('Creating directory...').start();
|
|
201
195
|
try {
|
|
202
|
-
const brain = getBrainy();
|
|
203
|
-
|
|
204
|
-
await vfs.init();
|
|
205
|
-
await vfs.mkdir(path, { recursive: options.parents });
|
|
196
|
+
const brain = await getBrainy();
|
|
197
|
+
await brain.vfs.mkdir(path, { recursive: options.parents });
|
|
206
198
|
spinner.succeed('Directory created');
|
|
207
199
|
if (!options.json) {
|
|
208
200
|
console.log(chalk.green(`✓ Created: ${path}`));
|
|
@@ -223,15 +215,13 @@ export const vfsCommands = {
|
|
|
223
215
|
async rm(path, options) {
|
|
224
216
|
const spinner = ora('Removing...').start();
|
|
225
217
|
try {
|
|
226
|
-
const brain = getBrainy();
|
|
227
|
-
const
|
|
228
|
-
await vfs.init();
|
|
229
|
-
const stats = await vfs.stat(path);
|
|
218
|
+
const brain = await getBrainy();
|
|
219
|
+
const stats = await brain.vfs.stat(path);
|
|
230
220
|
if (stats.isDirectory()) {
|
|
231
|
-
await vfs.rmdir(path, { recursive: options.recursive });
|
|
221
|
+
await brain.vfs.rmdir(path, { recursive: options.recursive });
|
|
232
222
|
}
|
|
233
223
|
else {
|
|
234
|
-
await vfs.unlink(path);
|
|
224
|
+
await brain.vfs.unlink(path);
|
|
235
225
|
}
|
|
236
226
|
spinner.succeed('Removed successfully');
|
|
237
227
|
if (!options.json) {
|
|
@@ -255,10 +245,8 @@ export const vfsCommands = {
|
|
|
255
245
|
async search(query, options) {
|
|
256
246
|
const spinner = ora('Searching files...').start();
|
|
257
247
|
try {
|
|
258
|
-
const brain = getBrainy();
|
|
259
|
-
const
|
|
260
|
-
await vfs.init();
|
|
261
|
-
const results = await vfs.search(query, {
|
|
248
|
+
const brain = await getBrainy();
|
|
249
|
+
const results = await brain.vfs.search(query, {
|
|
262
250
|
path: options.path,
|
|
263
251
|
limit: options.limit ? parseInt(options.limit) : 10
|
|
264
252
|
});
|
|
@@ -297,10 +285,8 @@ export const vfsCommands = {
|
|
|
297
285
|
async similar(path, options) {
|
|
298
286
|
const spinner = ora('Finding similar files...').start();
|
|
299
287
|
try {
|
|
300
|
-
const brain = getBrainy();
|
|
301
|
-
const
|
|
302
|
-
await vfs.init();
|
|
303
|
-
const results = await vfs.findSimilar(path, {
|
|
288
|
+
const brain = await getBrainy();
|
|
289
|
+
const results = await brain.vfs.findSimilar(path, {
|
|
304
290
|
limit: options.limit ? parseInt(options.limit) : 10,
|
|
305
291
|
threshold: options.threshold ? parseFloat(options.threshold) : 0.7
|
|
306
292
|
});
|
|
@@ -336,10 +322,8 @@ export const vfsCommands = {
|
|
|
336
322
|
async tree(path, options) {
|
|
337
323
|
const spinner = ora('Building tree...').start();
|
|
338
324
|
try {
|
|
339
|
-
const brain = getBrainy();
|
|
340
|
-
const
|
|
341
|
-
await vfs.init();
|
|
342
|
-
const tree = await vfs.getTreeStructure(path, {
|
|
325
|
+
const brain = await getBrainy();
|
|
326
|
+
const tree = await brain.vfs.getTreeStructure(path, {
|
|
343
327
|
maxDepth: options.depth ? parseInt(options.depth) : 3
|
|
344
328
|
});
|
|
345
329
|
spinner.succeed('Tree built');
|
|
@@ -23,7 +23,7 @@ export class ImportHistory {
|
|
|
23
23
|
*/
|
|
24
24
|
async init() {
|
|
25
25
|
try {
|
|
26
|
-
const vfs = this.brain.vfs
|
|
26
|
+
const vfs = this.brain.vfs;
|
|
27
27
|
await vfs.init();
|
|
28
28
|
// Try to load existing history
|
|
29
29
|
const content = await vfs.readFile(this.historyFile);
|
|
@@ -102,7 +102,7 @@ export class ImportHistory {
|
|
|
102
102
|
}
|
|
103
103
|
// Delete VFS files
|
|
104
104
|
try {
|
|
105
|
-
const vfs = this.brain.vfs
|
|
105
|
+
const vfs = this.brain.vfs;
|
|
106
106
|
await vfs.init();
|
|
107
107
|
for (const vfsPath of entry.vfsPaths) {
|
|
108
108
|
try {
|
|
@@ -160,7 +160,7 @@ export class ImportHistory {
|
|
|
160
160
|
*/
|
|
161
161
|
async persist() {
|
|
162
162
|
try {
|
|
163
|
-
const vfs = this.brain.vfs
|
|
163
|
+
const vfs = this.brain.vfs;
|
|
164
164
|
await vfs.init();
|
|
165
165
|
// Ensure directory exists
|
|
166
166
|
const dir = this.historyFile.substring(0, this.historyFile.lastIndexOf('/'));
|
|
@@ -65,7 +65,7 @@ export declare class VFSStructureGenerator {
|
|
|
65
65
|
* Initialize the generator
|
|
66
66
|
*
|
|
67
67
|
* CRITICAL: Gets brain's VFS instance and initializes it if needed.
|
|
68
|
-
* This ensures that after import, brain.vfs
|
|
68
|
+
* This ensures that after import, brain.vfs returns an initialized instance.
|
|
69
69
|
*/
|
|
70
70
|
init(): Promise<void>;
|
|
71
71
|
/**
|
|
@@ -15,7 +15,7 @@ import { NounType } from '../types/graphTypes.js';
|
|
|
15
15
|
export class VFSStructureGenerator {
|
|
16
16
|
constructor(brain) {
|
|
17
17
|
this.brain = brain;
|
|
18
|
-
// CRITICAL FIX: Use brain.vfs
|
|
18
|
+
// CRITICAL FIX: Use brain.vfs instead of creating separate instance
|
|
19
19
|
// This ensures VFSStructureGenerator and user code share the same VFS instance
|
|
20
20
|
// Before: Created separate instance that wasn't accessible to users
|
|
21
21
|
// After: Uses brain's cached instance, making VFS queryable after import
|
|
@@ -24,11 +24,11 @@ export class VFSStructureGenerator {
|
|
|
24
24
|
* Initialize the generator
|
|
25
25
|
*
|
|
26
26
|
* CRITICAL: Gets brain's VFS instance and initializes it if needed.
|
|
27
|
-
* This ensures that after import, brain.vfs
|
|
27
|
+
* This ensures that after import, brain.vfs returns an initialized instance.
|
|
28
28
|
*/
|
|
29
29
|
async init() {
|
|
30
30
|
// Get brain's cached VFS instance (creates if doesn't exist)
|
|
31
|
-
this.vfs = this.brain.vfs
|
|
31
|
+
this.vfs = this.brain.vfs;
|
|
32
32
|
// CRITICAL FIX (v4.10.2): Always call vfs.init() explicitly
|
|
33
33
|
// The previous code tried to check if initialized via stat('/') but this was unreliable
|
|
34
34
|
// vfs.init() is idempotent, so calling it multiple times is safe
|