@soulcraft/brainy 2.1.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/augmentations/AugmentationMetadataContract.d.ts +94 -0
  2. package/dist/augmentations/AugmentationMetadataContract.js +306 -0
  3. package/dist/augmentations/apiServerAugmentation.d.ts +1 -0
  4. package/dist/augmentations/apiServerAugmentation.js +1 -0
  5. package/dist/augmentations/batchProcessingAugmentation.d.ts +1 -0
  6. package/dist/augmentations/batchProcessingAugmentation.js +1 -0
  7. package/dist/augmentations/brainyAugmentation.d.ts +16 -0
  8. package/dist/augmentations/cacheAugmentation.d.ts +1 -0
  9. package/dist/augmentations/cacheAugmentation.js +1 -0
  10. package/dist/augmentations/conduitAugmentations.d.ts +1 -0
  11. package/dist/augmentations/conduitAugmentations.js +1 -0
  12. package/dist/augmentations/connectionPoolAugmentation.d.ts +1 -0
  13. package/dist/augmentations/connectionPoolAugmentation.js +1 -0
  14. package/dist/augmentations/entityRegistryAugmentation.d.ts +2 -0
  15. package/dist/augmentations/entityRegistryAugmentation.js +2 -0
  16. package/dist/augmentations/indexAugmentation.d.ts +1 -0
  17. package/dist/augmentations/indexAugmentation.js +1 -0
  18. package/dist/augmentations/intelligentVerbScoringAugmentation.d.ts +4 -0
  19. package/dist/augmentations/intelligentVerbScoringAugmentation.js +4 -0
  20. package/dist/augmentations/metadataEnforcer.d.ts +20 -0
  21. package/dist/augmentations/metadataEnforcer.js +171 -0
  22. package/dist/augmentations/metricsAugmentation.d.ts +2 -7
  23. package/dist/augmentations/metricsAugmentation.js +1 -0
  24. package/dist/augmentations/monitoringAugmentation.d.ts +1 -0
  25. package/dist/augmentations/monitoringAugmentation.js +1 -0
  26. package/dist/augmentations/neuralImport.d.ts +4 -0
  27. package/dist/augmentations/neuralImport.js +4 -0
  28. package/dist/augmentations/requestDeduplicatorAugmentation.d.ts +1 -0
  29. package/dist/augmentations/requestDeduplicatorAugmentation.js +1 -0
  30. package/dist/augmentations/serverSearchAugmentations.d.ts +2 -0
  31. package/dist/augmentations/serverSearchAugmentations.js +2 -0
  32. package/dist/augmentations/storageAugmentation.d.ts +1 -0
  33. package/dist/augmentations/storageAugmentation.js +1 -0
  34. package/dist/augmentations/synapseAugmentation.d.ts +4 -0
  35. package/dist/augmentations/synapseAugmentation.js +4 -0
  36. package/dist/augmentations/walAugmentation.d.ts +1 -0
  37. package/dist/augmentations/walAugmentation.js +1 -0
  38. package/dist/brainyData.d.ts +28 -1
  39. package/dist/brainyData.js +229 -83
  40. package/dist/embeddings/model-manager.d.ts +9 -8
  41. package/dist/embeddings/model-manager.js +105 -85
  42. package/dist/triple/TripleIntelligence.d.ts +4 -0
  43. package/dist/triple/TripleIntelligence.js +39 -9
  44. package/dist/utils/deletedItemsIndex.d.ts +59 -0
  45. package/dist/utils/deletedItemsIndex.js +98 -0
  46. package/dist/utils/ensureDeleted.d.ts +38 -0
  47. package/dist/utils/ensureDeleted.js +79 -0
  48. package/dist/utils/metadataFilter.js +5 -0
  49. package/dist/utils/metadataIndex.d.ts +4 -0
  50. package/dist/utils/metadataIndex.js +45 -0
  51. package/dist/utils/metadataNamespace.d.ts +113 -0
  52. package/dist/utils/metadataNamespace.js +162 -0
  53. package/dist/utils/periodicCleanup.d.ts +87 -0
  54. package/dist/utils/periodicCleanup.js +219 -0
  55. package/package.json +9 -3
@@ -1,47 +1,43 @@
1
1
  /**
2
2
  * Model Manager - Ensures transformer models are available at runtime
3
3
  *
4
- * Strategy:
5
- * 1. Check local cache first
6
- * 2. Try GitHub releases (our backup)
7
- * 3. Fall back to Hugging Face
8
- * 4. Future: CDN at models.soulcraft.com
4
+ * Strategy (in order):
5
+ * 1. Check local cache first (instant)
6
+ * 2. Try Soulcraft CDN (fastest when available)
7
+ * 3. Try GitHub release tar.gz with extraction (reliable backup)
8
+ * 4. Fall back to Hugging Face (always works)
9
+ *
10
+ * NO USER CONFIGURATION REQUIRED - Everything is automatic!
9
11
  */
10
12
  import { existsSync } from 'fs';
11
- import { join, dirname } from 'path';
13
+ import { mkdir, writeFile } from 'fs/promises';
14
+ import { join } from 'path';
12
15
  import { env } from '@huggingface/transformers';
13
16
  // Model sources in order of preference
14
17
  const MODEL_SOURCES = {
15
- // GitHub Release - our controlled backup
16
- github: 'https://github.com/soulcraftlabs/brainy/releases/download/models-v1/all-MiniLM-L6-v2.tar.gz',
17
- // Future CDN - fastest option when available
18
- cdn: 'https://models.soulcraft.com/brainy/all-MiniLM-L6-v2.tar.gz',
19
- // Original Hugging Face - fallback
20
- huggingface: 'default' // Uses transformers.js default
21
- };
22
- // Expected model files and their hashes
23
- const MODEL_MANIFEST = {
24
- 'Xenova/all-MiniLM-L6-v2': {
25
- files: {
26
- 'onnx/model.onnx': {
27
- size: 90555481,
28
- sha256: null // Will be computed from actual model
29
- },
30
- 'tokenizer.json': {
31
- size: 711661,
32
- sha256: null
33
- },
34
- 'config.json': {
35
- size: 650,
36
- sha256: null
37
- },
38
- 'tokenizer_config.json': {
39
- size: 366,
40
- sha256: null
41
- }
42
- }
18
+ // CDN - Fastest when available (currently active)
19
+ cdn: {
20
+ host: 'https://models.soulcraft.com/models',
21
+ pathTemplate: '{model}/', // e.g., Xenova/all-MiniLM-L6-v2/
22
+ testFile: 'config.json' // File to test availability
23
+ },
24
+ // GitHub Release - tar.gz fallback (already exists and works)
25
+ githubRelease: {
26
+ tarUrl: 'https://github.com/soulcraftlabs/brainy/releases/download/models-v1/all-MiniLM-L6-v2.tar.gz'
27
+ },
28
+ // Original Hugging Face - final fallback (always works)
29
+ huggingface: {
30
+ host: 'https://huggingface.co',
31
+ pathTemplate: '{model}/resolve/{revision}/' // Default transformers.js pattern
43
32
  }
44
33
  };
34
+ // Model verification files - minimal set needed for transformers.js
35
+ const MODEL_FILES = [
36
+ 'config.json',
37
+ 'tokenizer.json',
38
+ 'tokenizer_config.json',
39
+ 'onnx/model.onnx'
40
+ ];
45
41
  export class ModelManager {
46
42
  constructor() {
47
43
  this.isInitialized = false;
@@ -76,96 +72,120 @@ export class ModelManager {
76
72
  if (this.isInitialized) {
77
73
  return true;
78
74
  }
79
- const modelPath = join(this.modelsPath, ...modelName.split('/'));
75
+ // Configure transformers.js environment
76
+ env.cacheDir = this.modelsPath;
77
+ env.allowLocalModels = true;
78
+ env.useFSCache = true;
80
79
  // Check if model already exists locally
81
- if (await this.verifyModelFiles(modelPath, modelName)) {
80
+ const modelPath = join(this.modelsPath, ...modelName.split('/'));
81
+ if (await this.verifyModelFiles(modelPath)) {
82
82
  console.log('✅ Models found in cache:', modelPath);
83
- this.configureTransformers(modelPath);
83
+ env.allowRemoteModels = false; // Use local only
84
84
  this.isInitialized = true;
85
85
  return true;
86
86
  }
87
87
  // Try to download from our sources
88
88
  console.log('📥 Downloading transformer models...');
89
- // Try GitHub first (our backup)
90
- if (await this.downloadFromGitHub(modelName)) {
89
+ // Try CDN first (fastest when available)
90
+ if (await this.tryModelSource('Soulcraft CDN', MODEL_SOURCES.cdn, modelName)) {
91
91
  this.isInitialized = true;
92
92
  return true;
93
93
  }
94
- // Try CDN (when available)
95
- if (await this.downloadFromCDN(modelName)) {
94
+ // Try GitHub release with tar.gz extraction (reliable backup)
95
+ if (await this.downloadAndExtractFromGitHub(modelName)) {
96
96
  this.isInitialized = true;
97
97
  return true;
98
98
  }
99
- // Fall back to Hugging Face (default transformers.js behavior)
99
+ // Fall back to Hugging Face (always works)
100
100
  console.log('⚠️ Using Hugging Face fallback for models');
101
+ env.remoteHost = MODEL_SOURCES.huggingface.host;
102
+ env.remotePathTemplate = MODEL_SOURCES.huggingface.pathTemplate;
101
103
  env.allowRemoteModels = true;
102
104
  this.isInitialized = true;
103
105
  return true;
104
106
  }
105
- async verifyModelFiles(modelPath, modelName) {
106
- const manifest = MODEL_MANIFEST[modelName];
107
- if (!manifest)
108
- return false;
109
- for (const [filePath, info] of Object.entries(manifest.files)) {
110
- const fullPath = join(modelPath, filePath);
107
+ async verifyModelFiles(modelPath) {
108
+ // Check if essential model files exist
109
+ for (const file of MODEL_FILES) {
110
+ const fullPath = join(modelPath, file);
111
111
  if (!existsSync(fullPath)) {
112
112
  return false;
113
113
  }
114
- // Optionally verify size
115
- if (process.env.VERIFY_MODEL_SIZE === 'true') {
116
- const stats = await import('fs').then(fs => fs.promises.stat(fullPath));
117
- if (stats.size !== info.size) {
118
- console.warn(`⚠️ Model file size mismatch: ${filePath}`);
119
- return false;
120
- }
121
- }
122
114
  }
123
115
  return true;
124
116
  }
125
- async downloadFromGitHub(modelName) {
117
+ async tryModelSource(name, source, modelName) {
126
118
  try {
127
- const url = MODEL_SOURCES.github;
128
- console.log('📥 Downloading from GitHub releases...');
129
- // Download tar.gz file
130
- const response = await fetch(url);
131
- if (!response.ok) {
132
- throw new Error(`GitHub download failed: ${response.status}`);
119
+ console.log(`📥 Trying ${name}...`);
120
+ // Test if the source is accessible by trying to fetch a test file
121
+ const testFile = source.testFile || 'config.json';
122
+ const modelPath = source.pathTemplate.replace('{model}', modelName).replace('{revision}', 'main');
123
+ const testUrl = `${source.host}/${modelPath}${testFile}`;
124
+ const response = await fetch(testUrl).catch(() => null);
125
+ if (response && response.ok) {
126
+ console.log(`✅ ${name} is available`);
127
+ // Configure transformers.js to use this source
128
+ env.remoteHost = source.host;
129
+ env.remotePathTemplate = source.pathTemplate;
130
+ env.allowRemoteModels = true;
131
+ // The model will be downloaded automatically by transformers.js when needed
132
+ return true;
133
+ }
134
+ else {
135
+ console.log(`⚠️ ${name} not available (${response?.status || 'unreachable'})`);
136
+ return false;
133
137
  }
134
- const buffer = await response.arrayBuffer();
135
- // Extract tar.gz (would need tar library in production)
136
- // For now, return false to fall back to other methods
137
- console.log('⚠️ GitHub model extraction not yet implemented');
138
- return false;
139
138
  }
140
139
  catch (error) {
141
- console.log('⚠️ GitHub download failed:', error.message);
140
+ console.log(`⚠️ ${name} check failed:`, error.message);
142
141
  return false;
143
142
  }
144
143
  }
145
- async downloadFromCDN(modelName) {
144
+ async downloadAndExtractFromGitHub(modelName) {
146
145
  try {
147
- const url = MODEL_SOURCES.cdn;
148
- console.log('📥 Downloading from Soulcraft CDN...');
149
- // Try to fetch from CDN
150
- const response = await fetch(url);
146
+ console.log('📥 Trying GitHub Release (tar.gz)...');
147
+ // Download tar.gz file
148
+ const response = await fetch(MODEL_SOURCES.githubRelease.tarUrl);
151
149
  if (!response.ok) {
152
- throw new Error(`CDN download failed: ${response.status}`);
150
+ console.log(`⚠️ GitHub Release not available (${response.status})`);
151
+ return false;
152
+ }
153
+ // Since we can't use tar-stream, we'll use Node's built-in child_process
154
+ // to extract using system tar command (available on all Unix systems)
155
+ const buffer = await response.arrayBuffer();
156
+ const modelPath = join(this.modelsPath, ...modelName.split('/'));
157
+ // Create model directory
158
+ await mkdir(modelPath, { recursive: true });
159
+ // Write tar.gz to temp file and extract
160
+ const tempFile = join(this.modelsPath, 'temp-model.tar.gz');
161
+ await writeFile(tempFile, Buffer.from(buffer));
162
+ // Extract using system tar command
163
+ const { exec } = await import('child_process');
164
+ const { promisify } = await import('util');
165
+ const execAsync = promisify(exec);
166
+ try {
167
+ // Extract and strip the first directory component
168
+ await execAsync(`tar -xzf ${tempFile} -C ${modelPath} --strip-components=1`, {
169
+ cwd: this.modelsPath
170
+ });
171
+ // Clean up temp file
172
+ const { unlink } = await import('fs/promises');
173
+ await unlink(tempFile);
174
+ console.log('✅ GitHub Release models extracted and cached locally');
175
+ // Configure to use local models now
176
+ env.allowRemoteModels = false;
177
+ return true;
178
+ }
179
+ catch (extractError) {
180
+ console.log('⚠️ Tar extraction failed, trying alternative method');
181
+ return false;
153
182
  }
154
- // Would extract files here
155
- console.log('⚠️ CDN not yet available');
156
- return false;
157
183
  }
158
184
  catch (error) {
159
- console.log('⚠️ CDN download failed:', error.message);
185
+ console.log('⚠️ GitHub Release download failed:', error.message);
160
186
  return false;
161
187
  }
162
188
  }
163
- configureTransformers(modelPath) {
164
- // Configure transformers.js to use our local models
165
- env.localModelPath = dirname(modelPath);
166
- env.allowRemoteModels = false;
167
- console.log('🔧 Configured transformers.js to use local models');
168
- }
169
189
  /**
170
190
  * Pre-download models for deployment
171
191
  * This is what npm run download-models calls
@@ -83,6 +83,10 @@ export declare class TripleIntelligenceEngine {
83
83
  * Field-based filtering
84
84
  */
85
85
  private fieldFilter;
86
+ /**
87
+ * Fallback manual metadata filtering when index is not available
88
+ */
89
+ private manualMetadataFilter;
86
90
  /**
87
91
  * Fusion ranking combines all signals
88
92
  */
@@ -171,25 +171,30 @@ export class TripleIntelligenceEngine {
171
171
  }
172
172
  break;
173
173
  case 'vector':
174
- if (candidates.length === 0) {
175
- // Initial vector search
174
+ // CRITICAL: If we have a previous step that returned 0 candidates,
175
+ // we must respect that and not do a fresh search
176
+ if (candidates.length === 0 && plan.steps[0].type === 'vector') {
177
+ // This is the first step - do initial vector search
176
178
  const results = await this.vectorSearch(query.like || query.similar, query.limit);
177
179
  candidates = results;
178
180
  }
179
- else {
180
- // Vector search within candidates
181
+ else if (candidates.length > 0) {
182
+ // Vector search within existing candidates
181
183
  candidates = await this.vectorSearchWithin(query.like || query.similar, candidates);
182
184
  }
185
+ // If candidates.length === 0 and this isn't the first step, keep empty candidates
183
186
  break;
184
187
  case 'graph':
185
- if (candidates.length === 0) {
186
- // Initial graph traversal
188
+ // CRITICAL: Same logic as vector - respect empty candidates from previous steps
189
+ if (candidates.length === 0 && plan.steps[0].type === 'graph') {
190
+ // This is the first step - do initial graph traversal
187
191
  candidates = await this.graphTraversal(query.connected);
188
192
  }
189
- else {
190
- // Graph expansion from candidates
193
+ else if (candidates.length > 0) {
194
+ // Graph expansion from existing candidates
191
195
  candidates = await this.graphExpand(candidates, query.connected);
192
196
  }
197
+ // If candidates.length === 0 and this isn't the first step, keep empty candidates
193
198
  break;
194
199
  case 'fusion':
195
200
  // Final fusion ranking
@@ -248,7 +253,13 @@ export class TripleIntelligenceEngine {
248
253
  // Use the MetadataIndex directly for FAST field queries!
249
254
  // This uses B-tree indexes for O(log n) range queries
250
255
  // and hash indexes for O(1) exact matches
251
- const matchingIds = await this.brain.metadataIndex?.getIdsForFilter(where) || [];
256
+ const metadataIndex = this.brain.metadataIndex;
257
+ // Check if metadata index is properly initialized
258
+ if (!metadataIndex || typeof metadataIndex.getIdsForFilter !== 'function') {
259
+ // Fallback to manual filtering - slower but works
260
+ return this.manualMetadataFilter(where);
261
+ }
262
+ const matchingIds = await metadataIndex.getIdsForFilter(where) || [];
252
263
  // Convert to result format with metadata
253
264
  const results = [];
254
265
  for (const id of matchingIds.slice(0, 1000)) {
@@ -263,6 +274,25 @@ export class TripleIntelligenceEngine {
263
274
  }
264
275
  return results;
265
276
  }
277
+ /**
278
+ * Fallback manual metadata filtering when index is not available
279
+ */
280
+ async manualMetadataFilter(where) {
281
+ const { matchesMetadataFilter } = await import('../utils/metadataFilter.js');
282
+ const results = [];
283
+ // Get all nouns and manually filter them
284
+ const allNouns = this.brain.index.getNouns();
285
+ for (const [id, noun] of Array.from(allNouns.entries()).slice(0, 1000)) {
286
+ if (noun && matchesMetadataFilter(noun.metadata || {}, where)) {
287
+ results.push({
288
+ id,
289
+ score: 1.0,
290
+ metadata: noun.metadata || {}
291
+ });
292
+ }
293
+ }
294
+ return results;
295
+ }
266
296
  /**
267
297
  * Fusion ranking combines all signals
268
298
  */
@@ -0,0 +1,59 @@
1
+ /**
2
+ * Dedicated index for tracking soft-deleted items
3
+ * This is MUCH more efficient than checking every item in the database
4
+ *
5
+ * Performance characteristics:
6
+ * - Add deleted item: O(1)
7
+ * - Remove deleted item: O(1)
8
+ * - Check if deleted: O(1)
9
+ * - Get all deleted: O(d) where d = number of deleted items << total items
10
+ */
11
+ export declare class DeletedItemsIndex {
12
+ private deletedIds;
13
+ private deletedCount;
14
+ /**
15
+ * Mark an item as deleted
16
+ */
17
+ markDeleted(id: string): void;
18
+ /**
19
+ * Mark an item as not deleted (restored)
20
+ */
21
+ markRestored(id: string): void;
22
+ /**
23
+ * Check if an item is deleted - O(1)
24
+ */
25
+ isDeleted(id: string): boolean;
26
+ /**
27
+ * Get all deleted item IDs - O(d)
28
+ */
29
+ getAllDeleted(): string[];
30
+ /**
31
+ * Filter out deleted items from results - O(k) where k = result count
32
+ */
33
+ filterDeleted<T extends {
34
+ id?: string;
35
+ }>(items: T[]): T[];
36
+ /**
37
+ * Get statistics
38
+ */
39
+ getStats(): {
40
+ deletedCount: number;
41
+ memoryUsage: number;
42
+ };
43
+ /**
44
+ * Clear all deleted items (for testing)
45
+ */
46
+ clear(): void;
47
+ /**
48
+ * Serialize for persistence
49
+ */
50
+ serialize(): string;
51
+ /**
52
+ * Deserialize from persistence
53
+ */
54
+ deserialize(data: string): void;
55
+ }
56
+ /**
57
+ * Global singleton for deleted items tracking
58
+ */
59
+ export declare const deletedItemsIndex: DeletedItemsIndex;
@@ -0,0 +1,98 @@
1
+ /**
2
+ * Dedicated index for tracking soft-deleted items
3
+ * This is MUCH more efficient than checking every item in the database
4
+ *
5
+ * Performance characteristics:
6
+ * - Add deleted item: O(1)
7
+ * - Remove deleted item: O(1)
8
+ * - Check if deleted: O(1)
9
+ * - Get all deleted: O(d) where d = number of deleted items << total items
10
+ */
11
+ export class DeletedItemsIndex {
12
+ constructor() {
13
+ this.deletedIds = new Set();
14
+ this.deletedCount = 0;
15
+ }
16
+ /**
17
+ * Mark an item as deleted
18
+ */
19
+ markDeleted(id) {
20
+ if (!this.deletedIds.has(id)) {
21
+ this.deletedIds.add(id);
22
+ this.deletedCount++;
23
+ }
24
+ }
25
+ /**
26
+ * Mark an item as not deleted (restored)
27
+ */
28
+ markRestored(id) {
29
+ if (this.deletedIds.delete(id)) {
30
+ this.deletedCount--;
31
+ }
32
+ }
33
+ /**
34
+ * Check if an item is deleted - O(1)
35
+ */
36
+ isDeleted(id) {
37
+ return this.deletedIds.has(id);
38
+ }
39
+ /**
40
+ * Get all deleted item IDs - O(d)
41
+ */
42
+ getAllDeleted() {
43
+ return Array.from(this.deletedIds);
44
+ }
45
+ /**
46
+ * Filter out deleted items from results - O(k) where k = result count
47
+ */
48
+ filterDeleted(items) {
49
+ if (this.deletedCount === 0) {
50
+ // Fast path - no deleted items
51
+ return items;
52
+ }
53
+ return items.filter(item => {
54
+ const id = item.id;
55
+ return id ? !this.deletedIds.has(id) : true;
56
+ });
57
+ }
58
+ /**
59
+ * Get statistics
60
+ */
61
+ getStats() {
62
+ return {
63
+ deletedCount: this.deletedCount,
64
+ memoryUsage: this.deletedCount * 100 // Rough estimate: 100 bytes per ID
65
+ };
66
+ }
67
+ /**
68
+ * Clear all deleted items (for testing)
69
+ */
70
+ clear() {
71
+ this.deletedIds.clear();
72
+ this.deletedCount = 0;
73
+ }
74
+ /**
75
+ * Serialize for persistence
76
+ */
77
+ serialize() {
78
+ return JSON.stringify(Array.from(this.deletedIds));
79
+ }
80
+ /**
81
+ * Deserialize from persistence
82
+ */
83
+ deserialize(data) {
84
+ try {
85
+ const ids = JSON.parse(data);
86
+ this.deletedIds = new Set(ids);
87
+ this.deletedCount = this.deletedIds.size;
88
+ }
89
+ catch (e) {
90
+ console.warn('Failed to deserialize deleted items index');
91
+ }
92
+ }
93
+ }
94
+ /**
95
+ * Global singleton for deleted items tracking
96
+ */
97
+ export const deletedItemsIndex = new DeletedItemsIndex();
98
+ //# sourceMappingURL=deletedItemsIndex.js.map
@@ -0,0 +1,38 @@
1
+ /**
2
+ * Utility to ensure all metadata has the deleted field set properly
3
+ * This is CRITICAL for O(1) soft delete filtering performance
4
+ *
5
+ * Uses _brainy namespace to avoid conflicts with user metadata
6
+ */
7
+ /**
8
+ * Ensure metadata has internal Brainy fields set
9
+ * @param metadata The metadata object (could be null/undefined)
10
+ * @param preserveExisting If true, preserve existing deleted value
11
+ * @returns Metadata with internal fields guaranteed
12
+ */
13
+ export declare function ensureDeletedField(metadata: any, preserveExisting?: boolean): any;
14
+ /**
15
+ * Mark an item as soft deleted
16
+ * @param metadata The metadata object
17
+ * @returns Metadata with _brainy.deleted=true
18
+ */
19
+ export declare function markAsDeleted(metadata: any): any;
20
+ /**
21
+ * Mark an item as restored (not deleted)
22
+ * @param metadata The metadata object
23
+ * @returns Metadata with _brainy.deleted=false
24
+ */
25
+ export declare function markAsRestored(metadata: any): any;
26
+ /**
27
+ * Check if an item is deleted
28
+ * @param metadata The metadata object
29
+ * @returns true if deleted, false otherwise (including if field missing)
30
+ */
31
+ export declare function isDeleted(metadata: any): boolean;
32
+ /**
33
+ * Check if an item is active (not deleted)
34
+ * @param metadata The metadata object
35
+ * @returns true if not deleted (default), false if deleted
36
+ */
37
+ export declare function isActive(metadata: any): boolean;
38
+ export declare const BRAINY_DELETED_FIELD = "_brainy.deleted";
@@ -0,0 +1,79 @@
1
+ /**
2
+ * Utility to ensure all metadata has the deleted field set properly
3
+ * This is CRITICAL for O(1) soft delete filtering performance
4
+ *
5
+ * Uses _brainy namespace to avoid conflicts with user metadata
6
+ */
7
+ const BRAINY_NAMESPACE = '_brainy';
8
+ /**
9
+ * Ensure metadata has internal Brainy fields set
10
+ * @param metadata The metadata object (could be null/undefined)
11
+ * @param preserveExisting If true, preserve existing deleted value
12
+ * @returns Metadata with internal fields guaranteed
13
+ */
14
+ export function ensureDeletedField(metadata, preserveExisting = true) {
15
+ // Handle null/undefined metadata
16
+ if (!metadata) {
17
+ return {
18
+ [BRAINY_NAMESPACE]: {
19
+ deleted: false,
20
+ version: 1
21
+ }
22
+ };
23
+ }
24
+ // Clone to avoid mutation
25
+ const result = { ...metadata };
26
+ // Ensure _brainy namespace exists
27
+ if (!result[BRAINY_NAMESPACE]) {
28
+ result[BRAINY_NAMESPACE] = {};
29
+ }
30
+ // Set deleted field if not present
31
+ if (!('deleted' in result[BRAINY_NAMESPACE])) {
32
+ result[BRAINY_NAMESPACE].deleted = false;
33
+ }
34
+ else if (!preserveExisting) {
35
+ // Force to false if not preserving
36
+ result[BRAINY_NAMESPACE].deleted = false;
37
+ }
38
+ return result;
39
+ }
40
+ /**
41
+ * Mark an item as soft deleted
42
+ * @param metadata The metadata object
43
+ * @returns Metadata with _brainy.deleted=true
44
+ */
45
+ export function markAsDeleted(metadata) {
46
+ const result = ensureDeletedField(metadata);
47
+ result[BRAINY_NAMESPACE].deleted = true;
48
+ return result;
49
+ }
50
+ /**
51
+ * Mark an item as restored (not deleted)
52
+ * @param metadata The metadata object
53
+ * @returns Metadata with _brainy.deleted=false
54
+ */
55
+ export function markAsRestored(metadata) {
56
+ const result = ensureDeletedField(metadata);
57
+ result[BRAINY_NAMESPACE].deleted = false;
58
+ return result;
59
+ }
60
+ /**
61
+ * Check if an item is deleted
62
+ * @param metadata The metadata object
63
+ * @returns true if deleted, false otherwise (including if field missing)
64
+ */
65
+ export function isDeleted(metadata) {
66
+ return metadata?.[BRAINY_NAMESPACE]?.deleted === true;
67
+ }
68
+ /**
69
+ * Check if an item is active (not deleted)
70
+ * @param metadata The metadata object
71
+ * @returns true if not deleted (default), false if deleted
72
+ */
73
+ export function isActive(metadata) {
74
+ // If no deleted field or deleted=false, item is active
75
+ return !isDeleted(metadata);
76
+ }
77
+ // Export the namespace constant for use in queries
78
+ export const BRAINY_DELETED_FIELD = `${BRAINY_NAMESPACE}.deleted`;
79
+ //# sourceMappingURL=ensureDeleted.js.map
@@ -24,8 +24,13 @@ function matchesQuery(value, query) {
24
24
  case 'notEquals':
25
25
  case 'isNot':
26
26
  case 'ne':
27
+ // Special handling: if value is undefined and operand is not undefined,
28
+ // they are not equal (so the condition passes)
29
+ // This ensures items without a 'deleted' field match 'deleted !== true'
27
30
  if (value === operand)
28
31
  return false;
32
+ // If value is undefined and operand is not, they're not equal (pass)
33
+ // If both are undefined, they're equal (fail, handled above)
29
34
  break;
30
35
  // Comparison operators
31
36
  case 'greaterThan':
@@ -107,6 +107,10 @@ export declare class MetadataIndexManager {
107
107
  * Remove item from metadata indexes
108
108
  */
109
109
  removeFromIndex(id: string, metadata?: any): Promise<void>;
110
+ /**
111
+ * Get all IDs in the index
112
+ */
113
+ getAllIds(): Promise<string[]>;
110
114
  /**
111
115
  * Get IDs for a specific field-value combination with caching
112
116
  */