@soulcraft/brainy 4.7.2 → 4.7.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +5 -0
- package/dist/storage/adapters/azureBlobStorage.js +29 -4
- package/dist/storage/adapters/fileSystemStorage.js +28 -6
- package/dist/storage/adapters/gcsStorage.js +32 -5
- package/dist/storage/adapters/opfsStorage.js +22 -7
- package/dist/storage/adapters/r2Storage.js +22 -1
- package/dist/storage/adapters/s3CompatibleStorage.js +37 -8
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,11 @@
|
|
|
2
2
|
|
|
3
3
|
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
|
4
4
|
|
|
5
|
+
### [4.7.3](https://github.com/soulcraftlabs/brainy/compare/v4.7.2...v4.7.3) (2025-10-27)
|
|
6
|
+
|
|
7
|
+
- fix(storage): CRITICAL - preserve vectors when updating HNSW connections (v4.7.3) (46e7482)
|
|
8
|
+
|
|
9
|
+
|
|
5
10
|
### [4.4.0](https://github.com/soulcraftlabs/brainy/compare/v4.3.2...v4.4.0) (2025-10-24)
|
|
6
11
|
|
|
7
12
|
- docs: update CHANGELOG for v4.4.0 release (a3c8a28)
|
|
@@ -1275,13 +1275,38 @@ export class AzureBlobStorage extends BaseStorage {
|
|
|
1275
1275
|
async saveHNSWData(nounId, hnswData) {
|
|
1276
1276
|
await this.ensureInitialized();
|
|
1277
1277
|
try {
|
|
1278
|
+
// CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
|
|
1278
1279
|
const shard = getShardIdFromUuid(nounId);
|
|
1279
1280
|
const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
|
|
1280
1281
|
const blockBlobClient = this.containerClient.getBlockBlobClient(key);
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1282
|
+
try {
|
|
1283
|
+
// Read existing node data
|
|
1284
|
+
const downloadResponse = await blockBlobClient.download(0);
|
|
1285
|
+
const existingData = await this.streamToBuffer(downloadResponse.readableStreamBody);
|
|
1286
|
+
const existingNode = JSON.parse(existingData.toString());
|
|
1287
|
+
// Preserve id and vector, update only HNSW graph metadata
|
|
1288
|
+
const updatedNode = {
|
|
1289
|
+
...existingNode,
|
|
1290
|
+
level: hnswData.level,
|
|
1291
|
+
connections: hnswData.connections
|
|
1292
|
+
};
|
|
1293
|
+
const content = JSON.stringify(updatedNode, null, 2);
|
|
1294
|
+
await blockBlobClient.upload(content, content.length, {
|
|
1295
|
+
blobHTTPHeaders: { blobContentType: 'application/json' }
|
|
1296
|
+
});
|
|
1297
|
+
}
|
|
1298
|
+
catch (error) {
|
|
1299
|
+
// If node doesn't exist yet, create it with just HNSW data
|
|
1300
|
+
if (error.statusCode === 404 || error.code === 'BlobNotFound') {
|
|
1301
|
+
const content = JSON.stringify(hnswData, null, 2);
|
|
1302
|
+
await blockBlobClient.upload(content, content.length, {
|
|
1303
|
+
blobHTTPHeaders: { blobContentType: 'application/json' }
|
|
1304
|
+
});
|
|
1305
|
+
}
|
|
1306
|
+
else {
|
|
1307
|
+
throw error;
|
|
1308
|
+
}
|
|
1309
|
+
}
|
|
1285
1310
|
}
|
|
1286
1311
|
catch (error) {
|
|
1287
1312
|
this.logger.error(`Failed to save HNSW data for ${nounId}:`, error);
|
|
@@ -2156,12 +2156,34 @@ export class FileSystemStorage extends BaseStorage {
|
|
|
2156
2156
|
*/
|
|
2157
2157
|
async saveHNSWData(nounId, hnswData) {
|
|
2158
2158
|
await this.ensureInitialized();
|
|
2159
|
-
//
|
|
2160
|
-
|
|
2161
|
-
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2159
|
+
// CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
|
|
2160
|
+
// Previous implementation overwrote the entire file, destroying vector data
|
|
2161
|
+
// Now we READ the existing node, UPDATE only connections/level, then WRITE back the complete node
|
|
2162
|
+
const filePath = this.getNodePath(nounId);
|
|
2163
|
+
try {
|
|
2164
|
+
// Read existing node data
|
|
2165
|
+
const existingData = await fs.promises.readFile(filePath, 'utf-8');
|
|
2166
|
+
const existingNode = JSON.parse(existingData);
|
|
2167
|
+
// Preserve id and vector, update only HNSW graph metadata
|
|
2168
|
+
const updatedNode = {
|
|
2169
|
+
...existingNode, // Preserve all existing fields (id, vector, etc.)
|
|
2170
|
+
level: hnswData.level,
|
|
2171
|
+
connections: hnswData.connections
|
|
2172
|
+
};
|
|
2173
|
+
// Write back the COMPLETE node with updated HNSW data
|
|
2174
|
+
await fs.promises.writeFile(filePath, JSON.stringify(updatedNode, null, 2));
|
|
2175
|
+
}
|
|
2176
|
+
catch (error) {
|
|
2177
|
+
// If node doesn't exist yet, create it with just HNSW data
|
|
2178
|
+
// This should only happen during initial node creation
|
|
2179
|
+
if (error.code === 'ENOENT') {
|
|
2180
|
+
await this.ensureDirectoryExists(path.dirname(filePath));
|
|
2181
|
+
await fs.promises.writeFile(filePath, JSON.stringify(hnswData, null, 2));
|
|
2182
|
+
}
|
|
2183
|
+
else {
|
|
2184
|
+
throw error;
|
|
2185
|
+
}
|
|
2186
|
+
}
|
|
2165
2187
|
}
|
|
2166
2188
|
/**
|
|
2167
2189
|
* Get HNSW graph data for a noun
|
|
@@ -1469,14 +1469,41 @@ export class GcsStorage extends BaseStorage {
|
|
|
1469
1469
|
async saveHNSWData(nounId, hnswData) {
|
|
1470
1470
|
await this.ensureInitialized();
|
|
1471
1471
|
try {
|
|
1472
|
-
//
|
|
1472
|
+
// CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
|
|
1473
|
+
// Previous implementation overwrote the entire file, destroying vector data
|
|
1474
|
+
// Now we READ the existing node, UPDATE only connections/level, then WRITE back the complete node
|
|
1473
1475
|
const shard = getShardIdFromUuid(nounId);
|
|
1474
1476
|
const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
|
|
1475
1477
|
const file = this.bucket.file(key);
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
|
|
1478
|
+
try {
|
|
1479
|
+
// Read existing node data
|
|
1480
|
+
const [existingData] = await file.download();
|
|
1481
|
+
const existingNode = JSON.parse(existingData.toString());
|
|
1482
|
+
// Preserve id and vector, update only HNSW graph metadata
|
|
1483
|
+
const updatedNode = {
|
|
1484
|
+
...existingNode, // Preserve all existing fields (id, vector, etc.)
|
|
1485
|
+
level: hnswData.level,
|
|
1486
|
+
connections: hnswData.connections
|
|
1487
|
+
};
|
|
1488
|
+
// Write back the COMPLETE node with updated HNSW data
|
|
1489
|
+
await file.save(JSON.stringify(updatedNode, null, 2), {
|
|
1490
|
+
contentType: 'application/json',
|
|
1491
|
+
resumable: false
|
|
1492
|
+
});
|
|
1493
|
+
}
|
|
1494
|
+
catch (error) {
|
|
1495
|
+
// If node doesn't exist yet, create it with just HNSW data
|
|
1496
|
+
// This should only happen during initial node creation
|
|
1497
|
+
if (error.code === 404) {
|
|
1498
|
+
await file.save(JSON.stringify(hnswData, null, 2), {
|
|
1499
|
+
contentType: 'application/json',
|
|
1500
|
+
resumable: false
|
|
1501
|
+
});
|
|
1502
|
+
}
|
|
1503
|
+
else {
|
|
1504
|
+
throw error;
|
|
1505
|
+
}
|
|
1506
|
+
}
|
|
1480
1507
|
}
|
|
1481
1508
|
catch (error) {
|
|
1482
1509
|
this.logger.error(`Failed to save HNSW data for ${nounId}:`, error);
|
|
@@ -1693,17 +1693,32 @@ export class OPFSStorage extends BaseStorage {
|
|
|
1693
1693
|
async saveHNSWData(nounId, hnswData) {
|
|
1694
1694
|
await this.ensureInitialized();
|
|
1695
1695
|
try {
|
|
1696
|
-
//
|
|
1696
|
+
// CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
|
|
1697
1697
|
const hnswDir = await this.nounsDir.getDirectoryHandle('hnsw', { create: true });
|
|
1698
|
-
// Use sharded path for HNSW data
|
|
1699
1698
|
const shard = getShardIdFromUuid(nounId);
|
|
1700
1699
|
const shardDir = await hnswDir.getDirectoryHandle(shard, { create: true });
|
|
1701
|
-
// Create or get the file in the shard directory
|
|
1702
1700
|
const fileHandle = await shardDir.getFileHandle(`${nounId}.json`, { create: true });
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1701
|
+
try {
|
|
1702
|
+
// Read existing node data
|
|
1703
|
+
const file = await fileHandle.getFile();
|
|
1704
|
+
const existingData = await file.text();
|
|
1705
|
+
const existingNode = JSON.parse(existingData);
|
|
1706
|
+
// Preserve id and vector, update only HNSW graph metadata
|
|
1707
|
+
const updatedNode = {
|
|
1708
|
+
...existingNode,
|
|
1709
|
+
level: hnswData.level,
|
|
1710
|
+
connections: hnswData.connections
|
|
1711
|
+
};
|
|
1712
|
+
const writable = await fileHandle.createWritable();
|
|
1713
|
+
await writable.write(JSON.stringify(updatedNode, null, 2));
|
|
1714
|
+
await writable.close();
|
|
1715
|
+
}
|
|
1716
|
+
catch (error) {
|
|
1717
|
+
// If node doesn't exist or read fails, create with just HNSW data
|
|
1718
|
+
const writable = await fileHandle.createWritable();
|
|
1719
|
+
await writable.write(JSON.stringify(hnswData, null, 2));
|
|
1720
|
+
await writable.close();
|
|
1721
|
+
}
|
|
1707
1722
|
}
|
|
1708
1723
|
catch (error) {
|
|
1709
1724
|
console.error(`Failed to save HNSW data for ${nounId}:`, error);
|
|
@@ -753,9 +753,30 @@ export class R2Storage extends BaseStorage {
|
|
|
753
753
|
}
|
|
754
754
|
async saveHNSWData(nounId, hnswData) {
|
|
755
755
|
await this.ensureInitialized();
|
|
756
|
+
// CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
|
|
756
757
|
const shard = getShardIdFromUuid(nounId);
|
|
757
758
|
const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
|
|
758
|
-
|
|
759
|
+
try {
|
|
760
|
+
// Read existing node data
|
|
761
|
+
const existingNode = await this.readObjectFromPath(key);
|
|
762
|
+
if (existingNode) {
|
|
763
|
+
// Preserve id and vector, update only HNSW graph metadata
|
|
764
|
+
const updatedNode = {
|
|
765
|
+
...existingNode,
|
|
766
|
+
level: hnswData.level,
|
|
767
|
+
connections: hnswData.connections
|
|
768
|
+
};
|
|
769
|
+
await this.writeObjectToPath(key, updatedNode);
|
|
770
|
+
}
|
|
771
|
+
else {
|
|
772
|
+
// Node doesn't exist yet, create with just HNSW data
|
|
773
|
+
await this.writeObjectToPath(key, hnswData);
|
|
774
|
+
}
|
|
775
|
+
}
|
|
776
|
+
catch (error) {
|
|
777
|
+
// If read fails, create with just HNSW data
|
|
778
|
+
await this.writeObjectToPath(key, hnswData);
|
|
779
|
+
}
|
|
759
780
|
}
|
|
760
781
|
async getHNSWData(nounId) {
|
|
761
782
|
await this.ensureInitialized();
|
|
@@ -3055,16 +3055,45 @@ export class S3CompatibleStorage extends BaseStorage {
|
|
|
3055
3055
|
async saveHNSWData(nounId, hnswData) {
|
|
3056
3056
|
await this.ensureInitialized();
|
|
3057
3057
|
try {
|
|
3058
|
-
const { PutObjectCommand } = await import('@aws-sdk/client-s3');
|
|
3059
|
-
//
|
|
3058
|
+
const { PutObjectCommand, GetObjectCommand } = await import('@aws-sdk/client-s3');
|
|
3059
|
+
// CRITICAL FIX (v4.7.3): Must preserve existing node data (id, vector) when updating HNSW metadata
|
|
3060
3060
|
const shard = getShardIdFromUuid(nounId);
|
|
3061
3061
|
const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
|
|
3062
|
-
|
|
3063
|
-
|
|
3064
|
-
|
|
3065
|
-
|
|
3066
|
-
|
|
3067
|
-
|
|
3062
|
+
try {
|
|
3063
|
+
// Read existing node data
|
|
3064
|
+
const getResponse = await this.s3Client.send(new GetObjectCommand({
|
|
3065
|
+
Bucket: this.bucketName,
|
|
3066
|
+
Key: key
|
|
3067
|
+
}));
|
|
3068
|
+
const existingData = await getResponse.Body.transformToString();
|
|
3069
|
+
const existingNode = JSON.parse(existingData);
|
|
3070
|
+
// Preserve id and vector, update only HNSW graph metadata
|
|
3071
|
+
const updatedNode = {
|
|
3072
|
+
...existingNode,
|
|
3073
|
+
level: hnswData.level,
|
|
3074
|
+
connections: hnswData.connections
|
|
3075
|
+
};
|
|
3076
|
+
await this.s3Client.send(new PutObjectCommand({
|
|
3077
|
+
Bucket: this.bucketName,
|
|
3078
|
+
Key: key,
|
|
3079
|
+
Body: JSON.stringify(updatedNode, null, 2),
|
|
3080
|
+
ContentType: 'application/json'
|
|
3081
|
+
}));
|
|
3082
|
+
}
|
|
3083
|
+
catch (error) {
|
|
3084
|
+
// If node doesn't exist yet, create it with just HNSW data
|
|
3085
|
+
if (error.name === 'NoSuchKey' || error.Code === 'NoSuchKey') {
|
|
3086
|
+
await this.s3Client.send(new PutObjectCommand({
|
|
3087
|
+
Bucket: this.bucketName,
|
|
3088
|
+
Key: key,
|
|
3089
|
+
Body: JSON.stringify(hnswData, null, 2),
|
|
3090
|
+
ContentType: 'application/json'
|
|
3091
|
+
}));
|
|
3092
|
+
}
|
|
3093
|
+
else {
|
|
3094
|
+
throw error;
|
|
3095
|
+
}
|
|
3096
|
+
}
|
|
3068
3097
|
}
|
|
3069
3098
|
catch (error) {
|
|
3070
3099
|
this.logger.error(`Failed to save HNSW data for ${nounId}:`, error);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@soulcraft/brainy",
|
|
3
|
-
"version": "4.7.
|
|
3
|
+
"version": "4.7.3",
|
|
4
4
|
"description": "Universal Knowledge Protocol™ - World's first Triple Intelligence database unifying vector, graph, and document search in one API. 31 nouns × 40 verbs for infinite expressiveness.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"module": "dist/index.js",
|