@soulcraft/brainy 4.7.1 → 4.7.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,11 @@
2
2
 
3
3
  All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
4
4
 
5
+ ### [4.7.3](https://github.com/soulcraftlabs/brainy/compare/v4.7.2...v4.7.3) (2025-10-27)
6
+
7
+ - fix(storage): CRITICAL - preserve vectors when updating HNSW connections (v4.7.3) (46e7482)
8
+
9
+
5
10
  ### [4.4.0](https://github.com/soulcraftlabs/brainy/compare/v4.3.2...v4.4.0) (2025-10-24)
6
11
 
7
12
  - docs: update CHANGELOG for v4.4.0 release (a3c8a28)
@@ -1275,13 +1275,38 @@ export class AzureBlobStorage extends BaseStorage {
1275
1275
  async saveHNSWData(nounId, hnswData) {
1276
1276
  await this.ensureInitialized();
1277
1277
  try {
1278
+ // CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
1278
1279
  const shard = getShardIdFromUuid(nounId);
1279
1280
  const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
1280
1281
  const blockBlobClient = this.containerClient.getBlockBlobClient(key);
1281
- const content = JSON.stringify(hnswData, null, 2);
1282
- await blockBlobClient.upload(content, content.length, {
1283
- blobHTTPHeaders: { blobContentType: 'application/json' }
1284
- });
1282
+ try {
1283
+ // Read existing node data
1284
+ const downloadResponse = await blockBlobClient.download(0);
1285
+ const existingData = await this.streamToBuffer(downloadResponse.readableStreamBody);
1286
+ const existingNode = JSON.parse(existingData.toString());
1287
+ // Preserve id and vector, update only HNSW graph metadata
1288
+ const updatedNode = {
1289
+ ...existingNode,
1290
+ level: hnswData.level,
1291
+ connections: hnswData.connections
1292
+ };
1293
+ const content = JSON.stringify(updatedNode, null, 2);
1294
+ await blockBlobClient.upload(content, content.length, {
1295
+ blobHTTPHeaders: { blobContentType: 'application/json' }
1296
+ });
1297
+ }
1298
+ catch (error) {
1299
+ // If node doesn't exist yet, create it with just HNSW data
1300
+ if (error.statusCode === 404 || error.code === 'BlobNotFound') {
1301
+ const content = JSON.stringify(hnswData, null, 2);
1302
+ await blockBlobClient.upload(content, content.length, {
1303
+ blobHTTPHeaders: { blobContentType: 'application/json' }
1304
+ });
1305
+ }
1306
+ else {
1307
+ throw error;
1308
+ }
1309
+ }
1285
1310
  }
1286
1311
  catch (error) {
1287
1312
  this.logger.error(`Failed to save HNSW data for ${nounId}:`, error);
@@ -24,7 +24,6 @@ export declare class FileSystemStorage extends BaseStorage {
24
24
  private indexDir;
25
25
  private systemDir;
26
26
  private lockDir;
27
- private useDualWrite;
28
27
  private activeLocks;
29
28
  private lockTimers;
30
29
  private allTimers;
@@ -262,17 +261,9 @@ export declare class FileSystemStorage extends BaseStorage {
262
261
  */
263
262
  protected getStatisticsData(): Promise<StatisticsData | null>;
264
263
  /**
265
- * Save statistics with backward compatibility (dual write)
264
+ * Save statistics to storage
266
265
  */
267
266
  private saveStatisticsWithBackwardCompat;
268
- /**
269
- * Get statistics with backward compatibility (dual read)
270
- */
271
- private getStatisticsWithBackwardCompat;
272
- /**
273
- * Merge statistics from multiple sources
274
- */
275
- private mergeStatistics;
276
267
  /**
277
268
  * Initialize counts from filesystem storage
278
269
  */
@@ -2,8 +2,7 @@
2
2
  * File System Storage Adapter
3
3
  * File system storage adapter for Node.js environments
4
4
  */
5
- import { BaseStorage, NOUNS_DIR, VERBS_DIR, METADATA_DIR, NOUN_METADATA_DIR, VERB_METADATA_DIR, INDEX_DIR, SYSTEM_DIR, STATISTICS_KEY } from '../baseStorage.js';
6
- import { StorageCompatibilityLayer } from '../backwardCompatibility.js';
5
+ import { BaseStorage, SYSTEM_DIR, STATISTICS_KEY } from '../baseStorage.js';
7
6
  // Node.js modules - dynamically imported to avoid issues in browser environments
8
7
  let fs;
9
8
  let path;
@@ -50,7 +49,6 @@ export class FileSystemStorage extends BaseStorage {
50
49
  this.SHARDING_DEPTH = 1;
51
50
  this.MAX_SHARDS = 256; // Hex range: 00-ff
52
51
  this.cachedShardingDepth = this.SHARDING_DEPTH; // Always use fixed depth
53
- this.useDualWrite = true; // Write to both locations during migration
54
52
  this.activeLocks = new Set();
55
53
  this.lockTimers = new Map(); // Track timers for cleanup
56
54
  this.allTimers = new Set(); // Track all timers for cleanup
@@ -89,13 +87,14 @@ export class FileSystemStorage extends BaseStorage {
89
87
  }
90
88
  try {
91
89
  // Initialize directory paths now that path module is loaded
92
- this.nounsDir = path.join(this.rootDir, NOUNS_DIR);
93
- this.verbsDir = path.join(this.rootDir, VERBS_DIR);
94
- this.metadataDir = path.join(this.rootDir, METADATA_DIR);
95
- this.nounMetadataDir = path.join(this.rootDir, NOUN_METADATA_DIR);
96
- this.verbMetadataDir = path.join(this.rootDir, VERB_METADATA_DIR);
97
- this.indexDir = path.join(this.rootDir, INDEX_DIR); // Legacy
98
- this.systemDir = path.join(this.rootDir, SYSTEM_DIR); // New
90
+ // Clean directory structure (v4.7.2+)
91
+ this.nounsDir = path.join(this.rootDir, 'entities/nouns/hnsw');
92
+ this.verbsDir = path.join(this.rootDir, 'entities/verbs/hnsw');
93
+ this.metadataDir = path.join(this.rootDir, 'entities/nouns/metadata'); // Legacy reference
94
+ this.nounMetadataDir = path.join(this.rootDir, 'entities/nouns/metadata');
95
+ this.verbMetadataDir = path.join(this.rootDir, 'entities/verbs/metadata');
96
+ this.indexDir = path.join(this.rootDir, 'indexes');
97
+ this.systemDir = path.join(this.rootDir, SYSTEM_DIR);
99
98
  this.lockDir = path.join(this.rootDir, 'locks');
100
99
  // Create the root directory if it doesn't exist
101
100
  await this.ensureDirectoryExists(this.rootDir);
@@ -1351,7 +1350,7 @@ export class FileSystemStorage extends BaseStorage {
1351
1350
  }
1352
1351
  try {
1353
1352
  // Get existing statistics to merge with new data
1354
- const existingStats = await this.getStatisticsWithBackwardCompat();
1353
+ const existingStats = await this.getStatisticsData();
1355
1354
  if (existingStats) {
1356
1355
  // Merge statistics data
1357
1356
  const mergedStats = {
@@ -1386,122 +1385,25 @@ export class FileSystemStorage extends BaseStorage {
1386
1385
  * Get statistics data from storage
1387
1386
  */
1388
1387
  async getStatisticsData() {
1389
- return this.getStatisticsWithBackwardCompat();
1390
- }
1391
- /**
1392
- * Save statistics with backward compatibility (dual write)
1393
- */
1394
- async saveStatisticsWithBackwardCompat(statistics) {
1395
- // Always write to new location
1396
- const newPath = path.join(this.systemDir, `${STATISTICS_KEY}.json`);
1397
- await this.ensureDirectoryExists(this.systemDir);
1398
- await fs.promises.writeFile(newPath, JSON.stringify(statistics, null, 2));
1399
- // During migration period, also write to old location if it exists
1400
- if (this.useDualWrite && await this.directoryExists(this.indexDir)) {
1401
- const oldPath = path.join(this.indexDir, `${STATISTICS_KEY}.json`);
1402
- try {
1403
- await fs.promises.writeFile(oldPath, JSON.stringify(statistics, null, 2));
1404
- }
1405
- catch (error) {
1406
- // Log but don't fail if old location write fails
1407
- StorageCompatibilityLayer.logMigrationEvent('Failed to write to legacy location', { path: oldPath, error });
1408
- }
1409
- }
1410
- }
1411
- /**
1412
- * Get statistics with backward compatibility (dual read)
1413
- */
1414
- async getStatisticsWithBackwardCompat() {
1415
- let newStats = null;
1416
- let oldStats = null;
1417
- // Try to read from new location first
1418
1388
  try {
1419
- const newPath = path.join(this.systemDir, `${STATISTICS_KEY}.json`);
1420
- const data = await fs.promises.readFile(newPath, 'utf-8');
1421
- newStats = JSON.parse(data);
1389
+ const statsPath = path.join(this.systemDir, `${STATISTICS_KEY}.json`);
1390
+ const data = await fs.promises.readFile(statsPath, 'utf-8');
1391
+ return JSON.parse(data);
1422
1392
  }
1423
1393
  catch (error) {
1424
1394
  if (error.code !== 'ENOENT') {
1425
- console.error('Error reading statistics from new location:', error);
1426
- }
1427
- }
1428
- // Try to read from old location as fallback
1429
- if (!newStats && await this.directoryExists(this.indexDir)) {
1430
- try {
1431
- const oldPath = path.join(this.indexDir, `${STATISTICS_KEY}.json`);
1432
- const data = await fs.promises.readFile(oldPath, 'utf-8');
1433
- oldStats = JSON.parse(data);
1434
- // If we found data in old location but not new, migrate it
1435
- if (oldStats && !newStats) {
1436
- StorageCompatibilityLayer.logMigrationEvent('Migrating statistics from legacy location');
1437
- await this.saveStatisticsWithBackwardCompat(oldStats);
1438
- }
1439
- }
1440
- catch (error) {
1441
- if (error.code !== 'ENOENT') {
1442
- console.error('Error reading statistics from old location:', error);
1443
- }
1395
+ console.error('Error reading statistics:', error);
1444
1396
  }
1397
+ return null;
1445
1398
  }
1446
- // Merge statistics from both locations
1447
- return this.mergeStatistics(newStats, oldStats);
1448
1399
  }
1449
1400
  /**
1450
- * Merge statistics from multiple sources
1401
+ * Save statistics to storage
1451
1402
  */
1452
- mergeStatistics(storageStats, localStats) {
1453
- // Handle null cases
1454
- if (!storageStats && !localStats) {
1455
- // CRITICAL FIX (v3.37.4): Statistics files don't exist yet (first init)
1456
- // Return minimal stats with counts instead of zeros
1457
- // This prevents HNSW from seeing entityCount=0 during index rebuild
1458
- return {
1459
- nounCount: {},
1460
- verbCount: {},
1461
- metadataCount: {},
1462
- hnswIndexSize: 0,
1463
- totalNodes: this.totalNounCount,
1464
- totalEdges: this.totalVerbCount,
1465
- totalMetadata: 0,
1466
- lastUpdated: new Date().toISOString()
1467
- };
1468
- }
1469
- if (!storageStats)
1470
- return localStats;
1471
- if (!localStats)
1472
- return storageStats;
1473
- // Merge noun counts by taking the maximum of each type
1474
- const mergedNounCount = {
1475
- ...storageStats.nounCount
1476
- };
1477
- for (const [type, count] of Object.entries(localStats.nounCount)) {
1478
- mergedNounCount[type] = Math.max(mergedNounCount[type] || 0, count);
1479
- }
1480
- // Merge verb counts by taking the maximum of each type
1481
- const mergedVerbCount = {
1482
- ...storageStats.verbCount
1483
- };
1484
- for (const [type, count] of Object.entries(localStats.verbCount)) {
1485
- mergedVerbCount[type] = Math.max(mergedVerbCount[type] || 0, count);
1486
- }
1487
- // Merge metadata counts by taking the maximum of each type
1488
- const mergedMetadataCount = {
1489
- ...storageStats.metadataCount
1490
- };
1491
- for (const [type, count] of Object.entries(localStats.metadataCount)) {
1492
- mergedMetadataCount[type] = Math.max(mergedMetadataCount[type] || 0, count);
1493
- }
1494
- return {
1495
- nounCount: mergedNounCount,
1496
- verbCount: mergedVerbCount,
1497
- metadataCount: mergedMetadataCount,
1498
- hnswIndexSize: Math.max(storageStats.hnswIndexSize || 0, localStats.hnswIndexSize || 0),
1499
- totalNodes: Math.max(storageStats.totalNodes || 0, localStats.totalNodes || 0),
1500
- totalEdges: Math.max(storageStats.totalEdges || 0, localStats.totalEdges || 0),
1501
- totalMetadata: Math.max(storageStats.totalMetadata || 0, localStats.totalMetadata || 0),
1502
- operations: storageStats.operations || localStats.operations,
1503
- lastUpdated: new Date().toISOString()
1504
- };
1403
+ async saveStatisticsWithBackwardCompat(statistics) {
1404
+ const statsPath = path.join(this.systemDir, `${STATISTICS_KEY}.json`);
1405
+ await this.ensureDirectoryExists(this.systemDir);
1406
+ await fs.promises.writeFile(statsPath, JSON.stringify(statistics, null, 2));
1505
1407
  }
1506
1408
  // =============================================
1507
1409
  // Count Management for O(1) Scalability
@@ -2254,12 +2156,34 @@ export class FileSystemStorage extends BaseStorage {
2254
2156
  */
2255
2157
  async saveHNSWData(nounId, hnswData) {
2256
2158
  await this.ensureInitialized();
2257
- // Use sharded path for HNSW data
2258
- const shard = nounId.substring(0, 2).toLowerCase();
2259
- const hnswDir = path.join(this.rootDir, 'entities', 'nouns', 'hnsw', shard);
2260
- await this.ensureDirectoryExists(hnswDir);
2261
- const filePath = path.join(hnswDir, `${nounId}.json`);
2262
- await fs.promises.writeFile(filePath, JSON.stringify(hnswData, null, 2));
2159
+ // CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
2160
+ // Previous implementation overwrote the entire file, destroying vector data
2161
+ // Now we READ the existing node, UPDATE only connections/level, then WRITE back the complete node
2162
+ const filePath = this.getNodePath(nounId);
2163
+ try {
2164
+ // Read existing node data
2165
+ const existingData = await fs.promises.readFile(filePath, 'utf-8');
2166
+ const existingNode = JSON.parse(existingData);
2167
+ // Preserve id and vector, update only HNSW graph metadata
2168
+ const updatedNode = {
2169
+ ...existingNode, // Preserve all existing fields (id, vector, etc.)
2170
+ level: hnswData.level,
2171
+ connections: hnswData.connections
2172
+ };
2173
+ // Write back the COMPLETE node with updated HNSW data
2174
+ await fs.promises.writeFile(filePath, JSON.stringify(updatedNode, null, 2));
2175
+ }
2176
+ catch (error) {
2177
+ // If node doesn't exist yet, create it with just HNSW data
2178
+ // This should only happen during initial node creation
2179
+ if (error.code === 'ENOENT') {
2180
+ await this.ensureDirectoryExists(path.dirname(filePath));
2181
+ await fs.promises.writeFile(filePath, JSON.stringify(hnswData, null, 2));
2182
+ }
2183
+ else {
2184
+ throw error;
2185
+ }
2186
+ }
2263
2187
  }
2264
2188
  /**
2265
2189
  * Get HNSW graph data for a noun
@@ -1469,14 +1469,41 @@ export class GcsStorage extends BaseStorage {
1469
1469
  async saveHNSWData(nounId, hnswData) {
1470
1470
  await this.ensureInitialized();
1471
1471
  try {
1472
- // Use sharded path for HNSW data
1472
+ // CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
1473
+ // Previous implementation overwrote the entire file, destroying vector data
1474
+ // Now we READ the existing node, UPDATE only connections/level, then WRITE back the complete node
1473
1475
  const shard = getShardIdFromUuid(nounId);
1474
1476
  const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
1475
1477
  const file = this.bucket.file(key);
1476
- await file.save(JSON.stringify(hnswData, null, 2), {
1477
- contentType: 'application/json',
1478
- resumable: false
1479
- });
1478
+ try {
1479
+ // Read existing node data
1480
+ const [existingData] = await file.download();
1481
+ const existingNode = JSON.parse(existingData.toString());
1482
+ // Preserve id and vector, update only HNSW graph metadata
1483
+ const updatedNode = {
1484
+ ...existingNode, // Preserve all existing fields (id, vector, etc.)
1485
+ level: hnswData.level,
1486
+ connections: hnswData.connections
1487
+ };
1488
+ // Write back the COMPLETE node with updated HNSW data
1489
+ await file.save(JSON.stringify(updatedNode, null, 2), {
1490
+ contentType: 'application/json',
1491
+ resumable: false
1492
+ });
1493
+ }
1494
+ catch (error) {
1495
+ // If node doesn't exist yet, create it with just HNSW data
1496
+ // This should only happen during initial node creation
1497
+ if (error.code === 404) {
1498
+ await file.save(JSON.stringify(hnswData, null, 2), {
1499
+ contentType: 'application/json',
1500
+ resumable: false
1501
+ });
1502
+ }
1503
+ else {
1504
+ throw error;
1505
+ }
1506
+ }
1480
1507
  }
1481
1508
  catch (error) {
1482
1509
  this.logger.error(`Failed to save HNSW data for ${nounId}:`, error);
@@ -1693,17 +1693,32 @@ export class OPFSStorage extends BaseStorage {
1693
1693
  async saveHNSWData(nounId, hnswData) {
1694
1694
  await this.ensureInitialized();
1695
1695
  try {
1696
- // Get or create the hnsw directory under nouns
1696
+ // CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
1697
1697
  const hnswDir = await this.nounsDir.getDirectoryHandle('hnsw', { create: true });
1698
- // Use sharded path for HNSW data
1699
1698
  const shard = getShardIdFromUuid(nounId);
1700
1699
  const shardDir = await hnswDir.getDirectoryHandle(shard, { create: true });
1701
- // Create or get the file in the shard directory
1702
1700
  const fileHandle = await shardDir.getFileHandle(`${nounId}.json`, { create: true });
1703
- // Write the HNSW data to the file
1704
- const writable = await fileHandle.createWritable();
1705
- await writable.write(JSON.stringify(hnswData, null, 2));
1706
- await writable.close();
1701
+ try {
1702
+ // Read existing node data
1703
+ const file = await fileHandle.getFile();
1704
+ const existingData = await file.text();
1705
+ const existingNode = JSON.parse(existingData);
1706
+ // Preserve id and vector, update only HNSW graph metadata
1707
+ const updatedNode = {
1708
+ ...existingNode,
1709
+ level: hnswData.level,
1710
+ connections: hnswData.connections
1711
+ };
1712
+ const writable = await fileHandle.createWritable();
1713
+ await writable.write(JSON.stringify(updatedNode, null, 2));
1714
+ await writable.close();
1715
+ }
1716
+ catch (error) {
1717
+ // If node doesn't exist or read fails, create with just HNSW data
1718
+ const writable = await fileHandle.createWritable();
1719
+ await writable.write(JSON.stringify(hnswData, null, 2));
1720
+ await writable.close();
1721
+ }
1707
1722
  }
1708
1723
  catch (error) {
1709
1724
  console.error(`Failed to save HNSW data for ${nounId}:`, error);
@@ -753,9 +753,30 @@ export class R2Storage extends BaseStorage {
753
753
  }
754
754
  async saveHNSWData(nounId, hnswData) {
755
755
  await this.ensureInitialized();
756
+ // CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
756
757
  const shard = getShardIdFromUuid(nounId);
757
758
  const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
758
- await this.writeObjectToPath(key, hnswData);
759
+ try {
760
+ // Read existing node data
761
+ const existingNode = await this.readObjectFromPath(key);
762
+ if (existingNode) {
763
+ // Preserve id and vector, update only HNSW graph metadata
764
+ const updatedNode = {
765
+ ...existingNode,
766
+ level: hnswData.level,
767
+ connections: hnswData.connections
768
+ };
769
+ await this.writeObjectToPath(key, updatedNode);
770
+ }
771
+ else {
772
+ // Node doesn't exist yet, create with just HNSW data
773
+ await this.writeObjectToPath(key, hnswData);
774
+ }
775
+ }
776
+ catch (error) {
777
+ // If read fails, create with just HNSW data
778
+ await this.writeObjectToPath(key, hnswData);
779
+ }
759
780
  }
760
781
  async getHNSWData(nounId) {
761
782
  await this.ensureInitialized();
@@ -3055,16 +3055,45 @@ export class S3CompatibleStorage extends BaseStorage {
3055
3055
  async saveHNSWData(nounId, hnswData) {
3056
3056
  await this.ensureInitialized();
3057
3057
  try {
3058
- const { PutObjectCommand } = await import('@aws-sdk/client-s3');
3059
- // Use sharded path for HNSW data
3058
+ const { PutObjectCommand, GetObjectCommand } = await import('@aws-sdk/client-s3');
3059
+ // CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
3060
3060
  const shard = getShardIdFromUuid(nounId);
3061
3061
  const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
3062
- await this.s3Client.send(new PutObjectCommand({
3063
- Bucket: this.bucketName,
3064
- Key: key,
3065
- Body: JSON.stringify(hnswData, null, 2),
3066
- ContentType: 'application/json'
3067
- }));
3062
+ try {
3063
+ // Read existing node data
3064
+ const getResponse = await this.s3Client.send(new GetObjectCommand({
3065
+ Bucket: this.bucketName,
3066
+ Key: key
3067
+ }));
3068
+ const existingData = await getResponse.Body.transformToString();
3069
+ const existingNode = JSON.parse(existingData);
3070
+ // Preserve id and vector, update only HNSW graph metadata
3071
+ const updatedNode = {
3072
+ ...existingNode,
3073
+ level: hnswData.level,
3074
+ connections: hnswData.connections
3075
+ };
3076
+ await this.s3Client.send(new PutObjectCommand({
3077
+ Bucket: this.bucketName,
3078
+ Key: key,
3079
+ Body: JSON.stringify(updatedNode, null, 2),
3080
+ ContentType: 'application/json'
3081
+ }));
3082
+ }
3083
+ catch (error) {
3084
+ // If node doesn't exist yet, create it with just HNSW data
3085
+ if (error.name === 'NoSuchKey' || error.Code === 'NoSuchKey') {
3086
+ await this.s3Client.send(new PutObjectCommand({
3087
+ Bucket: this.bucketName,
3088
+ Key: key,
3089
+ Body: JSON.stringify(hnswData, null, 2),
3090
+ ContentType: 'application/json'
3091
+ }));
3092
+ }
3093
+ else {
3094
+ throw error;
3095
+ }
3096
+ }
3068
3097
  }
3069
3098
  catch (error) {
3070
3099
  this.logger.error(`Failed to save HNSW data for ${nounId}:`, error);
@@ -1,5 +1,6 @@
1
1
  /**
2
- * Storage backward compatibility layer for legacy data migrations
2
+ * DEPRECATED (v4.7.2): Backward compatibility stubs
3
+ * TODO: Remove in v4.7.3 after migrating s3CompatibleStorage
3
4
  */
4
5
  export declare class StorageCompatibilityLayer {
5
6
  static logMigrationEvent(event: string, details?: any): void;
@@ -1,26 +1,23 @@
1
1
  /**
2
- * Storage backward compatibility layer for legacy data migrations
2
+ * DEPRECATED (v4.7.2): Backward compatibility stubs
3
+ * TODO: Remove in v4.7.3 after migrating s3CompatibleStorage
3
4
  */
4
5
  export class StorageCompatibilityLayer {
5
6
  static logMigrationEvent(event, details) {
6
- // Simplified logging for migration events
7
- if (process.env.DEBUG_MIGRATION) {
8
- console.log(`[Migration] ${event}`, details);
9
- }
7
+ // No-op
10
8
  }
11
9
  static async migrateIfNeeded(storagePath) {
12
- // No-op for now - can be extended later if needed
10
+ // No-op
13
11
  }
14
12
  }
15
- // Helper to get default paths
16
13
  export function getDefaultStoragePaths(basePath) {
17
14
  return {
18
- nouns: `${basePath}/nouns`,
19
- verbs: `${basePath}/verbs`,
20
- metadata: `${basePath}/metadata`,
21
- index: `${basePath}/index`,
22
- system: `${basePath}/system`,
23
- statistics: `${basePath}/statistics.json`
15
+ nouns: `${basePath}/entities/nouns/hnsw`,
16
+ verbs: `${basePath}/entities/verbs/hnsw`,
17
+ metadata: `${basePath}/entities/nouns/metadata`,
18
+ index: `${basePath}/indexes`,
19
+ system: `${basePath}/_system`,
20
+ statistics: `${basePath}/_system/statistics.json`
24
21
  };
25
22
  }
26
23
  //# sourceMappingURL=backwardCompatibility.js.map
@@ -5,26 +5,16 @@
5
5
  import { GraphAdjacencyIndex } from '../graph/graphAdjacencyIndex.js';
6
6
  import { GraphVerb, HNSWNoun, HNSWVerb, NounMetadata, VerbMetadata, HNSWNounWithMetadata, HNSWVerbWithMetadata, StatisticsData } from '../coreTypes.js';
7
7
  import { BaseStorageAdapter } from './adapters/baseStorageAdapter.js';
8
- export declare const ENTITIES_DIR = "entities";
9
- export declare const NOUNS_VECTOR_DIR = "entities/nouns/vectors";
10
8
  export declare const NOUNS_METADATA_DIR = "entities/nouns/metadata";
11
- export declare const VERBS_VECTOR_DIR = "entities/verbs/vectors";
12
9
  export declare const VERBS_METADATA_DIR = "entities/verbs/metadata";
13
- export declare const INDEXES_DIR = "indexes";
14
- export declare const METADATA_INDEX_DIR = "indexes/metadata";
15
- export declare const NOUNS_DIR = "nouns";
16
- export declare const VERBS_DIR = "verbs";
17
- export declare const METADATA_DIR = "metadata";
18
- export declare const NOUN_METADATA_DIR = "noun-metadata";
19
- export declare const VERB_METADATA_DIR = "verb-metadata";
20
- export declare const INDEX_DIR = "index";
21
10
  export declare const SYSTEM_DIR = "_system";
22
11
  export declare const STATISTICS_KEY = "statistics";
23
- export declare const STORAGE_SCHEMA_VERSION = 3;
24
- export declare const USE_ENTITY_BASED_STRUCTURE = true;
25
- /**
26
- * Get the appropriate directory path based on configuration
27
- */
12
+ export declare const NOUNS_DIR = "entities/nouns/hnsw";
13
+ export declare const VERBS_DIR = "entities/verbs/hnsw";
14
+ export declare const METADATA_DIR = "entities/nouns/metadata";
15
+ export declare const NOUN_METADATA_DIR = "entities/nouns/metadata";
16
+ export declare const VERB_METADATA_DIR = "entities/verbs/metadata";
17
+ export declare const INDEX_DIR = "indexes";
28
18
  export declare function getDirectoryPath(entityType: 'noun' | 'verb', dataType: 'vector' | 'metadata'): string;
29
19
  /**
30
20
  * Base storage adapter that implements common functionality
@@ -6,49 +6,26 @@ import { GraphAdjacencyIndex } from '../graph/graphAdjacencyIndex.js';
6
6
  import { BaseStorageAdapter } from './adapters/baseStorageAdapter.js';
7
7
  import { validateNounType, validateVerbType } from '../utils/typeValidation.js';
8
8
  import { getShardIdFromUuid } from './sharding.js';
9
- // Common directory/prefix names
10
- // Option A: Entity-Based Directory Structure
11
- export const ENTITIES_DIR = 'entities';
12
- export const NOUNS_VECTOR_DIR = 'entities/nouns/vectors';
9
+ // Clean directory structure (v4.7.2+)
10
+ // All storage adapters use this consistent structure
13
11
  export const NOUNS_METADATA_DIR = 'entities/nouns/metadata';
14
- export const VERBS_VECTOR_DIR = 'entities/verbs/vectors';
15
12
  export const VERBS_METADATA_DIR = 'entities/verbs/metadata';
16
- export const INDEXES_DIR = 'indexes';
17
- export const METADATA_INDEX_DIR = 'indexes/metadata';
18
- // Legacy paths - kept for backward compatibility during migration
19
- export const NOUNS_DIR = 'nouns'; // Legacy: now maps to entities/nouns/vectors
20
- export const VERBS_DIR = 'verbs'; // Legacy: now maps to entities/verbs/vectors
21
- export const METADATA_DIR = 'metadata'; // Legacy: now maps to entities/nouns/metadata
22
- export const NOUN_METADATA_DIR = 'noun-metadata'; // Legacy: now maps to entities/nouns/metadata
23
- export const VERB_METADATA_DIR = 'verb-metadata'; // Legacy: now maps to entities/verbs/metadata
24
- export const INDEX_DIR = 'index'; // Legacy - kept for backward compatibility
25
- export const SYSTEM_DIR = '_system'; // System config & metadata indexes
13
+ export const SYSTEM_DIR = '_system';
26
14
  export const STATISTICS_KEY = 'statistics';
27
- // Migration version to track compatibility
28
- export const STORAGE_SCHEMA_VERSION = 3; // v3: Entity-Based Directory Structure (Option A)
29
- // Configuration flag to enable new directory structure
30
- export const USE_ENTITY_BASED_STRUCTURE = true; // Set to true to use Option A structure
31
- /**
32
- * Get the appropriate directory path based on configuration
33
- */
15
+ // DEPRECATED (v4.7.2): Temporary stubs for adapters not yet migrated
16
+ // TODO: Remove in v4.7.3 after migrating remaining adapters
17
+ export const NOUNS_DIR = 'entities/nouns/hnsw';
18
+ export const VERBS_DIR = 'entities/verbs/hnsw';
19
+ export const METADATA_DIR = 'entities/nouns/metadata';
20
+ export const NOUN_METADATA_DIR = 'entities/nouns/metadata';
21
+ export const VERB_METADATA_DIR = 'entities/verbs/metadata';
22
+ export const INDEX_DIR = 'indexes';
34
23
  export function getDirectoryPath(entityType, dataType) {
35
- if (USE_ENTITY_BASED_STRUCTURE) {
36
- // Option A: Entity-Based Structure
37
- if (entityType === 'noun') {
38
- return dataType === 'vector' ? NOUNS_VECTOR_DIR : NOUNS_METADATA_DIR;
39
- }
40
- else {
41
- return dataType === 'vector' ? VERBS_VECTOR_DIR : VERBS_METADATA_DIR;
42
- }
24
+ if (entityType === 'noun') {
25
+ return dataType === 'vector' ? NOUNS_DIR : NOUNS_METADATA_DIR;
43
26
  }
44
27
  else {
45
- // Legacy structure
46
- if (entityType === 'noun') {
47
- return dataType === 'vector' ? NOUNS_DIR : METADATA_DIR;
48
- }
49
- else {
50
- return dataType === 'vector' ? VERBS_DIR : VERB_METADATA_DIR;
51
- }
28
+ return dataType === 'vector' ? VERBS_DIR : VERBS_METADATA_DIR;
52
29
  }
53
30
  }
54
31
  /**
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@soulcraft/brainy",
3
- "version": "4.7.1",
3
+ "version": "4.7.3",
4
4
  "description": "Universal Knowledge Protocol™ - World's first Triple Intelligence database unifying vector, graph, and document search in one API. 31 nouns × 40 verbs for infinite expressiveness.",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.js",