@soulcraft/brainy 3.47.1 โ 3.48.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/augmentations/storageAugmentations.js +4 -3
- package/dist/brainy.js +20 -0
- package/dist/data/expandedKeywordDictionary.d.ts +22 -0
- package/dist/data/expandedKeywordDictionary.js +171 -0
- package/dist/index.d.ts +7 -2
- package/dist/index.js +9 -1
- package/dist/neural/embeddedKeywordEmbeddings.d.ts +29 -0
- package/dist/neural/embeddedKeywordEmbeddings.js +412683 -0
- package/dist/query/semanticTypeInference.d.ts +217 -0
- package/dist/query/semanticTypeInference.js +341 -0
- package/dist/query/typeAwareQueryPlanner.d.ts +152 -0
- package/dist/query/typeAwareQueryPlanner.js +297 -0
- package/dist/query/typeInference.d.ts +158 -0
- package/dist/query/typeInference.js +760 -0
- package/dist/storage/adapters/r2Storage.d.ts +213 -0
- package/dist/storage/adapters/r2Storage.js +876 -0
- package/dist/storage/adapters/s3CompatibleStorage.d.ts +1 -1
- package/dist/storage/adapters/s3CompatibleStorage.js +0 -2
- package/dist/storage/storageFactory.d.ts +2 -1
- package/dist/storage/storageFactory.js +4 -5
- package/dist/triple/TripleIntelligenceSystem.d.ts +4 -0
- package/dist/triple/TripleIntelligenceSystem.js +33 -4
- package/package.json +5 -2
|
@@ -0,0 +1,876 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cloudflare R2 Storage Adapter (Dedicated)
|
|
3
|
+
* Optimized specifically for Cloudflare R2 with all latest features
|
|
4
|
+
*
|
|
5
|
+
* R2-Specific Optimizations:
|
|
6
|
+
* - Zero egress fees (aggressive caching)
|
|
7
|
+
* - Cloudflare global network (edge-aware routing)
|
|
8
|
+
* - Workers integration (optional edge compute)
|
|
9
|
+
* - High-volume mode for bulk operations
|
|
10
|
+
* - Smart batching and backpressure
|
|
11
|
+
*
|
|
12
|
+
* Based on latest GCS and S3 implementations with R2-specific enhancements
|
|
13
|
+
*/
|
|
14
|
+
import { BaseStorage, SYSTEM_DIR, STATISTICS_KEY, getDirectoryPath } from '../baseStorage.js';
|
|
15
|
+
import { BrainyError } from '../../errors/brainyError.js';
|
|
16
|
+
import { CacheManager } from '../cacheManager.js';
|
|
17
|
+
import { createModuleLogger, prodLog } from '../../utils/logger.js';
|
|
18
|
+
import { getGlobalBackpressure } from '../../utils/adaptiveBackpressure.js';
|
|
19
|
+
import { getWriteBuffer } from '../../utils/writeBuffer.js';
|
|
20
|
+
import { getCoalescer } from '../../utils/requestCoalescer.js';
|
|
21
|
+
import { getShardIdFromUuid } from '../sharding.js';
|
|
22
|
+
// R2 API limits (same as S3)
|
|
23
|
+
const MAX_R2_PAGE_SIZE = 1000;
|
|
24
|
+
/**
|
|
25
|
+
* Dedicated Cloudflare R2 storage adapter
|
|
26
|
+
* Optimized for R2's unique characteristics and global edge network
|
|
27
|
+
*
|
|
28
|
+
* Configuration:
|
|
29
|
+
* ```typescript
|
|
30
|
+
* const r2Storage = new R2Storage({
|
|
31
|
+
* bucketName: 'my-brainy-data',
|
|
32
|
+
* accountId: 'YOUR_CLOUDFLARE_ACCOUNT_ID',
|
|
33
|
+
* accessKeyId: 'YOUR_R2_ACCESS_KEY_ID',
|
|
34
|
+
* secretAccessKey: 'YOUR_R2_SECRET_ACCESS_KEY'
|
|
35
|
+
* })
|
|
36
|
+
* ```
|
|
37
|
+
*/
|
|
38
|
+
export class R2Storage extends BaseStorage {
|
|
39
|
+
/**
|
|
40
|
+
* Initialize the R2 storage adapter
|
|
41
|
+
* @param options Configuration options for Cloudflare R2
|
|
42
|
+
*/
|
|
43
|
+
constructor(options) {
|
|
44
|
+
super();
|
|
45
|
+
this.s3Client = null;
|
|
46
|
+
// Statistics caching for better performance
|
|
47
|
+
this.statisticsCache = null;
|
|
48
|
+
// Backpressure and performance management
|
|
49
|
+
this.pendingOperations = 0;
|
|
50
|
+
this.maxConcurrentOperations = 150; // R2 handles more concurrent ops
|
|
51
|
+
this.baseBatchSize = 15; // Larger batches for R2
|
|
52
|
+
this.currentBatchSize = 15;
|
|
53
|
+
this.lastMemoryCheck = 0;
|
|
54
|
+
this.memoryCheckInterval = 5000;
|
|
55
|
+
// Adaptive backpressure for automatic flow control
|
|
56
|
+
this.backpressure = getGlobalBackpressure();
|
|
57
|
+
// Write buffers for bulk operations
|
|
58
|
+
this.nounWriteBuffer = null;
|
|
59
|
+
this.verbWriteBuffer = null;
|
|
60
|
+
// Request coalescer for deduplication
|
|
61
|
+
this.requestCoalescer = null;
|
|
62
|
+
// High-volume mode detection (R2-specific thresholds)
|
|
63
|
+
this.highVolumeMode = false;
|
|
64
|
+
this.lastVolumeCheck = 0;
|
|
65
|
+
this.volumeCheckInterval = 800; // Check more frequently on R2
|
|
66
|
+
this.forceHighVolumeMode = false;
|
|
67
|
+
// Module logger
|
|
68
|
+
this.logger = createModuleLogger('R2Storage');
|
|
69
|
+
this.bucketName = options.bucketName;
|
|
70
|
+
this.accountId = options.accountId;
|
|
71
|
+
this.accessKeyId = options.accessKeyId;
|
|
72
|
+
this.secretAccessKey = options.secretAccessKey;
|
|
73
|
+
this.readOnly = options.readOnly || false;
|
|
74
|
+
// R2-specific endpoint format
|
|
75
|
+
this.endpoint = `https://${this.accountId}.r2.cloudflarestorage.com`;
|
|
76
|
+
// Set up prefixes for different types of data using entity-based structure
|
|
77
|
+
this.nounPrefix = `${getDirectoryPath('noun', 'vector')}/`;
|
|
78
|
+
this.verbPrefix = `${getDirectoryPath('verb', 'vector')}/`;
|
|
79
|
+
this.metadataPrefix = `${getDirectoryPath('noun', 'metadata')}/`;
|
|
80
|
+
this.verbMetadataPrefix = `${getDirectoryPath('verb', 'metadata')}/`;
|
|
81
|
+
this.systemPrefix = `${SYSTEM_DIR}/`;
|
|
82
|
+
// Initialize cache managers with R2-optimized settings
|
|
83
|
+
this.nounCacheManager = new CacheManager({
|
|
84
|
+
hotCacheMaxSize: options.cacheConfig?.hotCacheMaxSize || 10000,
|
|
85
|
+
hotCacheEvictionThreshold: options.cacheConfig?.hotCacheEvictionThreshold || 0.9,
|
|
86
|
+
warmCacheTTL: options.cacheConfig?.warmCacheTTL || 3600000 // 1 hour
|
|
87
|
+
});
|
|
88
|
+
this.verbCacheManager = new CacheManager(options.cacheConfig);
|
|
89
|
+
// Check for high-volume mode override
|
|
90
|
+
if (typeof process !== 'undefined' && process.env?.BRAINY_FORCE_HIGH_VOLUME === 'true') {
|
|
91
|
+
this.forceHighVolumeMode = true;
|
|
92
|
+
this.highVolumeMode = true;
|
|
93
|
+
prodLog.info('๐ R2: High-volume mode FORCED via environment variable');
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Initialize the storage adapter
|
|
98
|
+
*/
|
|
99
|
+
async init() {
|
|
100
|
+
if (this.isInitialized) {
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
try {
|
|
104
|
+
// Import AWS S3 SDK only when needed (R2 uses S3-compatible API)
|
|
105
|
+
const { S3Client: S3ClientClass, HeadBucketCommand } = await import('@aws-sdk/client-s3');
|
|
106
|
+
// Create S3 client configured for R2
|
|
107
|
+
this.s3Client = new S3ClientClass({
|
|
108
|
+
region: 'auto', // R2 uses 'auto' region
|
|
109
|
+
endpoint: this.endpoint,
|
|
110
|
+
credentials: {
|
|
111
|
+
accessKeyId: this.accessKeyId,
|
|
112
|
+
secretAccessKey: this.secretAccessKey
|
|
113
|
+
}
|
|
114
|
+
});
|
|
115
|
+
// Verify bucket exists and is accessible
|
|
116
|
+
try {
|
|
117
|
+
await this.s3Client.send(new HeadBucketCommand({ Bucket: this.bucketName }));
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
if (error.name === 'NotFound' || error.$metadata?.httpStatusCode === 404) {
|
|
121
|
+
throw new Error(`R2 bucket ${this.bucketName} does not exist or is not accessible`);
|
|
122
|
+
}
|
|
123
|
+
throw error;
|
|
124
|
+
}
|
|
125
|
+
prodLog.info(`โ
Connected to R2 bucket: ${this.bucketName} (account: ${this.accountId})`);
|
|
126
|
+
// Initialize write buffers for high-volume mode
|
|
127
|
+
const storageId = `r2-${this.bucketName}`;
|
|
128
|
+
this.nounWriteBuffer = getWriteBuffer(`${storageId}-nouns`, 'noun', async (items) => {
|
|
129
|
+
await this.flushNounBuffer(items);
|
|
130
|
+
});
|
|
131
|
+
this.verbWriteBuffer = getWriteBuffer(`${storageId}-verbs`, 'verb', async (items) => {
|
|
132
|
+
await this.flushVerbBuffer(items);
|
|
133
|
+
});
|
|
134
|
+
// Initialize request coalescer for deduplication
|
|
135
|
+
this.requestCoalescer = getCoalescer(storageId, async (batch) => {
|
|
136
|
+
this.logger.trace(`Processing coalesced batch: ${batch.length} items`);
|
|
137
|
+
});
|
|
138
|
+
// Initialize counts from storage
|
|
139
|
+
await this.initializeCounts();
|
|
140
|
+
// Clear cache from previous runs
|
|
141
|
+
prodLog.info('๐งน R2: Clearing cache from previous run');
|
|
142
|
+
this.nounCacheManager.clear();
|
|
143
|
+
this.verbCacheManager.clear();
|
|
144
|
+
this.isInitialized = true;
|
|
145
|
+
}
|
|
146
|
+
catch (error) {
|
|
147
|
+
this.logger.error('Failed to initialize R2 storage:', error);
|
|
148
|
+
throw new Error(`Failed to initialize R2 storage: ${error}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* Get the R2 object key for a noun using UUID-based sharding
|
|
153
|
+
*/
|
|
154
|
+
getNounKey(id) {
|
|
155
|
+
const shardId = getShardIdFromUuid(id);
|
|
156
|
+
return `${this.nounPrefix}${shardId}/${id}.json`;
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Get the R2 object key for a verb using UUID-based sharding
|
|
160
|
+
*/
|
|
161
|
+
getVerbKey(id) {
|
|
162
|
+
const shardId = getShardIdFromUuid(id);
|
|
163
|
+
return `${this.verbPrefix}${shardId}/${id}.json`;
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* Override base class method to detect R2-specific throttling errors
|
|
167
|
+
*/
|
|
168
|
+
isThrottlingError(error) {
|
|
169
|
+
// First check base class detection
|
|
170
|
+
if (super.isThrottlingError(error)) {
|
|
171
|
+
return true;
|
|
172
|
+
}
|
|
173
|
+
// R2-specific throttling detection (uses S3 error codes)
|
|
174
|
+
const errorName = error.name;
|
|
175
|
+
const statusCode = error.$metadata?.httpStatusCode;
|
|
176
|
+
return (errorName === 'SlowDown' ||
|
|
177
|
+
errorName === 'ServiceUnavailable' ||
|
|
178
|
+
statusCode === 429 ||
|
|
179
|
+
statusCode === 503);
|
|
180
|
+
}
|
|
181
|
+
/**
|
|
182
|
+
* Override base class to enable smart batching for cloud storage
|
|
183
|
+
* R2 is cloud storage with network latency benefits from batching
|
|
184
|
+
*/
|
|
185
|
+
isCloudStorage() {
|
|
186
|
+
return true;
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Apply backpressure before starting an operation
|
|
190
|
+
*/
|
|
191
|
+
async applyBackpressure() {
|
|
192
|
+
const requestId = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
193
|
+
await this.backpressure.requestPermission(requestId, 1);
|
|
194
|
+
this.pendingOperations++;
|
|
195
|
+
return requestId;
|
|
196
|
+
}
|
|
197
|
+
/**
|
|
198
|
+
* Release backpressure after completing an operation
|
|
199
|
+
*/
|
|
200
|
+
releaseBackpressure(success = true, requestId) {
|
|
201
|
+
this.pendingOperations = Math.max(0, this.pendingOperations - 1);
|
|
202
|
+
if (requestId) {
|
|
203
|
+
this.backpressure.releasePermission(requestId, success);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
/**
|
|
207
|
+
* Check if high-volume mode should be enabled
|
|
208
|
+
*/
|
|
209
|
+
checkVolumeMode() {
|
|
210
|
+
if (this.forceHighVolumeMode) {
|
|
211
|
+
return;
|
|
212
|
+
}
|
|
213
|
+
const now = Date.now();
|
|
214
|
+
if (now - this.lastVolumeCheck < this.volumeCheckInterval) {
|
|
215
|
+
return;
|
|
216
|
+
}
|
|
217
|
+
this.lastVolumeCheck = now;
|
|
218
|
+
// R2 threshold: enable at 15 pending operations (lower than S3/GCS)
|
|
219
|
+
const shouldEnable = this.pendingOperations > 15;
|
|
220
|
+
if (shouldEnable && !this.highVolumeMode) {
|
|
221
|
+
this.highVolumeMode = true;
|
|
222
|
+
prodLog.info('๐ R2: High-volume mode ENABLED (pending:', this.pendingOperations, ')');
|
|
223
|
+
}
|
|
224
|
+
else if (!shouldEnable && this.highVolumeMode && !this.forceHighVolumeMode) {
|
|
225
|
+
this.highVolumeMode = false;
|
|
226
|
+
prodLog.info('๐ R2: High-volume mode DISABLED (pending:', this.pendingOperations, ')');
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
/**
|
|
230
|
+
* Flush noun buffer to R2
|
|
231
|
+
*/
|
|
232
|
+
async flushNounBuffer(items) {
|
|
233
|
+
const writes = Array.from(items.values()).map(async (noun) => {
|
|
234
|
+
try {
|
|
235
|
+
await this.saveNodeDirect(noun);
|
|
236
|
+
}
|
|
237
|
+
catch (error) {
|
|
238
|
+
this.logger.error(`Failed to flush noun ${noun.id}:`, error);
|
|
239
|
+
}
|
|
240
|
+
});
|
|
241
|
+
await Promise.all(writes);
|
|
242
|
+
}
|
|
243
|
+
/**
|
|
244
|
+
* Flush verb buffer to R2
|
|
245
|
+
*/
|
|
246
|
+
async flushVerbBuffer(items) {
|
|
247
|
+
const writes = Array.from(items.values()).map(async (verb) => {
|
|
248
|
+
try {
|
|
249
|
+
await this.saveEdgeDirect(verb);
|
|
250
|
+
}
|
|
251
|
+
catch (error) {
|
|
252
|
+
this.logger.error(`Failed to flush verb ${verb.id}:`, error);
|
|
253
|
+
}
|
|
254
|
+
});
|
|
255
|
+
await Promise.all(writes);
|
|
256
|
+
}
|
|
257
|
+
/**
|
|
258
|
+
* Save a noun to storage (internal implementation)
|
|
259
|
+
*/
|
|
260
|
+
async saveNoun_internal(noun) {
|
|
261
|
+
return this.saveNode(noun);
|
|
262
|
+
}
|
|
263
|
+
/**
|
|
264
|
+
* Save a node to storage
|
|
265
|
+
*/
|
|
266
|
+
async saveNode(node) {
|
|
267
|
+
await this.ensureInitialized();
|
|
268
|
+
this.checkVolumeMode();
|
|
269
|
+
// Use write buffer in high-volume mode
|
|
270
|
+
if (this.highVolumeMode && this.nounWriteBuffer) {
|
|
271
|
+
this.logger.trace(`๐ BUFFERING: Adding noun ${node.id} to write buffer`);
|
|
272
|
+
await this.nounWriteBuffer.add(node.id, node);
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
275
|
+
// Direct write in normal mode
|
|
276
|
+
await this.saveNodeDirect(node);
|
|
277
|
+
}
|
|
278
|
+
/**
|
|
279
|
+
* Save a node directly to R2 (bypass buffer)
|
|
280
|
+
*/
|
|
281
|
+
async saveNodeDirect(node) {
|
|
282
|
+
const requestId = await this.applyBackpressure();
|
|
283
|
+
try {
|
|
284
|
+
this.logger.trace(`Saving node ${node.id}`);
|
|
285
|
+
// Convert connections Map to serializable format
|
|
286
|
+
const serializableNode = {
|
|
287
|
+
id: node.id,
|
|
288
|
+
vector: node.vector,
|
|
289
|
+
connections: Object.fromEntries(Array.from(node.connections.entries()).map(([level, nounIds]) => [
|
|
290
|
+
level,
|
|
291
|
+
Array.from(nounIds)
|
|
292
|
+
])),
|
|
293
|
+
level: node.level || 0
|
|
294
|
+
};
|
|
295
|
+
// Get the R2 key with UUID-based sharding
|
|
296
|
+
const key = this.getNounKey(node.id);
|
|
297
|
+
// Save to R2 using S3 PutObject
|
|
298
|
+
const { PutObjectCommand } = await import('@aws-sdk/client-s3');
|
|
299
|
+
await this.s3Client.send(new PutObjectCommand({
|
|
300
|
+
Bucket: this.bucketName,
|
|
301
|
+
Key: key,
|
|
302
|
+
Body: JSON.stringify(serializableNode, null, 2),
|
|
303
|
+
ContentType: 'application/json'
|
|
304
|
+
}));
|
|
305
|
+
// Cache nodes with non-empty vectors (Phase 2 optimization)
|
|
306
|
+
if (node.vector && Array.isArray(node.vector) && node.vector.length > 0) {
|
|
307
|
+
this.nounCacheManager.set(node.id, node);
|
|
308
|
+
}
|
|
309
|
+
// Increment noun count
|
|
310
|
+
const metadata = await this.getNounMetadata(node.id);
|
|
311
|
+
if (metadata && metadata.type) {
|
|
312
|
+
await this.incrementEntityCountSafe(metadata.type);
|
|
313
|
+
}
|
|
314
|
+
this.logger.trace(`Node ${node.id} saved successfully`);
|
|
315
|
+
this.releaseBackpressure(true, requestId);
|
|
316
|
+
}
|
|
317
|
+
catch (error) {
|
|
318
|
+
this.releaseBackpressure(false, requestId);
|
|
319
|
+
if (this.isThrottlingError(error)) {
|
|
320
|
+
await this.handleThrottling(error);
|
|
321
|
+
throw error;
|
|
322
|
+
}
|
|
323
|
+
this.logger.error(`Failed to save node ${node.id}:`, error);
|
|
324
|
+
throw new Error(`Failed to save node ${node.id}: ${error}`);
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
/**
|
|
328
|
+
* Get a noun from storage (internal implementation)
|
|
329
|
+
*/
|
|
330
|
+
async getNoun_internal(id) {
|
|
331
|
+
const node = await this.getNode(id);
|
|
332
|
+
if (!node) {
|
|
333
|
+
return null;
|
|
334
|
+
}
|
|
335
|
+
const metadata = await this.getNounMetadata(id);
|
|
336
|
+
return {
|
|
337
|
+
...node,
|
|
338
|
+
metadata: metadata || {}
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
/**
|
|
342
|
+
* Get a node from storage
|
|
343
|
+
*/
|
|
344
|
+
async getNode(id) {
|
|
345
|
+
await this.ensureInitialized();
|
|
346
|
+
// Check cache first (Phase 2: aggressive caching for R2 zero-egress)
|
|
347
|
+
const cached = await this.nounCacheManager.get(id);
|
|
348
|
+
if (cached !== undefined && cached !== null) {
|
|
349
|
+
if (!cached.id || !cached.vector || !Array.isArray(cached.vector) || cached.vector.length === 0) {
|
|
350
|
+
this.logger.warn(`Invalid cached object for ${id.substring(0, 8)} - removing from cache`);
|
|
351
|
+
this.nounCacheManager.delete(id);
|
|
352
|
+
}
|
|
353
|
+
else {
|
|
354
|
+
this.logger.trace(`Cache hit for noun ${id}`);
|
|
355
|
+
return cached;
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
const requestId = await this.applyBackpressure();
|
|
359
|
+
try {
|
|
360
|
+
this.logger.trace(`Getting node ${id}`);
|
|
361
|
+
const key = this.getNounKey(id);
|
|
362
|
+
// Get from R2 using S3 GetObject
|
|
363
|
+
const { GetObjectCommand } = await import('@aws-sdk/client-s3');
|
|
364
|
+
const response = await this.s3Client.send(new GetObjectCommand({
|
|
365
|
+
Bucket: this.bucketName,
|
|
366
|
+
Key: key
|
|
367
|
+
}));
|
|
368
|
+
const bodyContents = await response.Body.transformToString();
|
|
369
|
+
const data = JSON.parse(bodyContents);
|
|
370
|
+
// Convert serialized connections back to Map
|
|
371
|
+
const connections = new Map();
|
|
372
|
+
for (const [level, nounIds] of Object.entries(data.connections || {})) {
|
|
373
|
+
connections.set(Number(level), new Set(nounIds));
|
|
374
|
+
}
|
|
375
|
+
const node = {
|
|
376
|
+
id: data.id,
|
|
377
|
+
vector: data.vector,
|
|
378
|
+
connections,
|
|
379
|
+
level: data.level || 0
|
|
380
|
+
};
|
|
381
|
+
// Cache valid nodes with non-empty vectors
|
|
382
|
+
if (node && node.id && node.vector && Array.isArray(node.vector) && node.vector.length > 0) {
|
|
383
|
+
this.nounCacheManager.set(id, node);
|
|
384
|
+
}
|
|
385
|
+
this.logger.trace(`Successfully retrieved node ${id}`);
|
|
386
|
+
this.releaseBackpressure(true, requestId);
|
|
387
|
+
return node;
|
|
388
|
+
}
|
|
389
|
+
catch (error) {
|
|
390
|
+
this.releaseBackpressure(false, requestId);
|
|
391
|
+
// R2 returns NoSuchKey for 404
|
|
392
|
+
if (error.name === 'NoSuchKey' || error.$metadata?.httpStatusCode === 404) {
|
|
393
|
+
return null;
|
|
394
|
+
}
|
|
395
|
+
if (this.isThrottlingError(error)) {
|
|
396
|
+
await this.handleThrottling(error);
|
|
397
|
+
throw error;
|
|
398
|
+
}
|
|
399
|
+
this.logger.error(`Failed to get node ${id}:`, error);
|
|
400
|
+
throw BrainyError.fromError(error, `getNoun(${id})`);
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
/**
|
|
404
|
+
* Delete a noun from storage (internal implementation)
|
|
405
|
+
*/
|
|
406
|
+
async deleteNoun_internal(id) {
|
|
407
|
+
await this.ensureInitialized();
|
|
408
|
+
const requestId = await this.applyBackpressure();
|
|
409
|
+
try {
|
|
410
|
+
this.logger.trace(`Deleting noun ${id}`);
|
|
411
|
+
const key = this.getNounKey(id);
|
|
412
|
+
// Delete from R2 using S3 DeleteObject
|
|
413
|
+
const { DeleteObjectCommand } = await import('@aws-sdk/client-s3');
|
|
414
|
+
await this.s3Client.send(new DeleteObjectCommand({
|
|
415
|
+
Bucket: this.bucketName,
|
|
416
|
+
Key: key
|
|
417
|
+
}));
|
|
418
|
+
// Remove from cache
|
|
419
|
+
this.nounCacheManager.delete(id);
|
|
420
|
+
// Decrement noun count
|
|
421
|
+
const metadata = await this.getNounMetadata(id);
|
|
422
|
+
if (metadata && metadata.type) {
|
|
423
|
+
await this.decrementEntityCountSafe(metadata.type);
|
|
424
|
+
}
|
|
425
|
+
this.logger.trace(`Noun ${id} deleted successfully`);
|
|
426
|
+
this.releaseBackpressure(true, requestId);
|
|
427
|
+
}
|
|
428
|
+
catch (error) {
|
|
429
|
+
this.releaseBackpressure(false, requestId);
|
|
430
|
+
if (error.name === 'NoSuchKey' || error.$metadata?.httpStatusCode === 404) {
|
|
431
|
+
this.logger.trace(`Noun ${id} not found (already deleted)`);
|
|
432
|
+
return;
|
|
433
|
+
}
|
|
434
|
+
if (this.isThrottlingError(error)) {
|
|
435
|
+
await this.handleThrottling(error);
|
|
436
|
+
throw error;
|
|
437
|
+
}
|
|
438
|
+
this.logger.error(`Failed to delete noun ${id}:`, error);
|
|
439
|
+
throw new Error(`Failed to delete noun ${id}: ${error}`);
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
/**
|
|
443
|
+
* Write an object to a specific path in R2
|
|
444
|
+
*/
|
|
445
|
+
async writeObjectToPath(path, data) {
|
|
446
|
+
await this.ensureInitialized();
|
|
447
|
+
try {
|
|
448
|
+
this.logger.trace(`Writing object to path: ${path}`);
|
|
449
|
+
const { PutObjectCommand } = await import('@aws-sdk/client-s3');
|
|
450
|
+
await this.s3Client.send(new PutObjectCommand({
|
|
451
|
+
Bucket: this.bucketName,
|
|
452
|
+
Key: path,
|
|
453
|
+
Body: JSON.stringify(data, null, 2),
|
|
454
|
+
ContentType: 'application/json'
|
|
455
|
+
}));
|
|
456
|
+
this.logger.trace(`Object written successfully to ${path}`);
|
|
457
|
+
}
|
|
458
|
+
catch (error) {
|
|
459
|
+
this.logger.error(`Failed to write object to ${path}:`, error);
|
|
460
|
+
throw new Error(`Failed to write object to ${path}: ${error}`);
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
/**
|
|
464
|
+
* Read an object from a specific path in R2
|
|
465
|
+
*/
|
|
466
|
+
async readObjectFromPath(path) {
|
|
467
|
+
await this.ensureInitialized();
|
|
468
|
+
try {
|
|
469
|
+
this.logger.trace(`Reading object from path: ${path}`);
|
|
470
|
+
const { GetObjectCommand } = await import('@aws-sdk/client-s3');
|
|
471
|
+
const response = await this.s3Client.send(new GetObjectCommand({
|
|
472
|
+
Bucket: this.bucketName,
|
|
473
|
+
Key: path
|
|
474
|
+
}));
|
|
475
|
+
const bodyContents = await response.Body.transformToString();
|
|
476
|
+
const data = JSON.parse(bodyContents);
|
|
477
|
+
this.logger.trace(`Object read successfully from ${path}`);
|
|
478
|
+
return data;
|
|
479
|
+
}
|
|
480
|
+
catch (error) {
|
|
481
|
+
if (error.name === 'NoSuchKey' || error.$metadata?.httpStatusCode === 404) {
|
|
482
|
+
this.logger.trace(`Object not found at ${path}`);
|
|
483
|
+
return null;
|
|
484
|
+
}
|
|
485
|
+
this.logger.error(`Failed to read object from ${path}:`, error);
|
|
486
|
+
throw BrainyError.fromError(error, `readObjectFromPath(${path})`);
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
/**
|
|
490
|
+
* Delete an object from a specific path in R2
|
|
491
|
+
*/
|
|
492
|
+
async deleteObjectFromPath(path) {
|
|
493
|
+
await this.ensureInitialized();
|
|
494
|
+
try {
|
|
495
|
+
this.logger.trace(`Deleting object at path: ${path}`);
|
|
496
|
+
const { DeleteObjectCommand } = await import('@aws-sdk/client-s3');
|
|
497
|
+
await this.s3Client.send(new DeleteObjectCommand({
|
|
498
|
+
Bucket: this.bucketName,
|
|
499
|
+
Key: path
|
|
500
|
+
}));
|
|
501
|
+
this.logger.trace(`Object deleted successfully from ${path}`);
|
|
502
|
+
}
|
|
503
|
+
catch (error) {
|
|
504
|
+
if (error.name === 'NoSuchKey' || error.$metadata?.httpStatusCode === 404) {
|
|
505
|
+
this.logger.trace(`Object at ${path} not found (already deleted)`);
|
|
506
|
+
return;
|
|
507
|
+
}
|
|
508
|
+
this.logger.error(`Failed to delete object from ${path}:`, error);
|
|
509
|
+
throw new Error(`Failed to delete object from ${path}: ${error}`);
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
/**
|
|
513
|
+
* List all objects under a specific prefix in R2
|
|
514
|
+
*/
|
|
515
|
+
async listObjectsUnderPath(prefix) {
|
|
516
|
+
await this.ensureInitialized();
|
|
517
|
+
try {
|
|
518
|
+
this.logger.trace(`Listing objects under prefix: ${prefix}`);
|
|
519
|
+
const { ListObjectsV2Command } = await import('@aws-sdk/client-s3');
|
|
520
|
+
const response = await this.s3Client.send(new ListObjectsV2Command({
|
|
521
|
+
Bucket: this.bucketName,
|
|
522
|
+
Prefix: prefix,
|
|
523
|
+
MaxKeys: MAX_R2_PAGE_SIZE
|
|
524
|
+
}));
|
|
525
|
+
const paths = (response.Contents || [])
|
|
526
|
+
.map((obj) => obj.Key)
|
|
527
|
+
.filter((key) => key && key.length > 0);
|
|
528
|
+
this.logger.trace(`Found ${paths.length} objects under ${prefix}`);
|
|
529
|
+
return paths;
|
|
530
|
+
}
|
|
531
|
+
catch (error) {
|
|
532
|
+
this.logger.error(`Failed to list objects under ${prefix}:`, error);
|
|
533
|
+
throw new Error(`Failed to list objects under ${prefix}: ${error}`);
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
// Verb storage methods (similar to noun methods - implementing key methods for space)
|
|
537
|
+
async saveVerb_internal(verb) {
|
|
538
|
+
return this.saveEdge(verb);
|
|
539
|
+
}
|
|
540
|
+
async saveEdge(edge) {
|
|
541
|
+
await this.ensureInitialized();
|
|
542
|
+
this.checkVolumeMode();
|
|
543
|
+
if (this.highVolumeMode && this.verbWriteBuffer) {
|
|
544
|
+
await this.verbWriteBuffer.add(edge.id, edge);
|
|
545
|
+
return;
|
|
546
|
+
}
|
|
547
|
+
await this.saveEdgeDirect(edge);
|
|
548
|
+
}
|
|
549
|
+
async saveEdgeDirect(edge) {
|
|
550
|
+
const requestId = await this.applyBackpressure();
|
|
551
|
+
try {
|
|
552
|
+
const serializableEdge = {
|
|
553
|
+
id: edge.id,
|
|
554
|
+
vector: edge.vector,
|
|
555
|
+
connections: Object.fromEntries(Array.from(edge.connections.entries()).map(([level, verbIds]) => [
|
|
556
|
+
level,
|
|
557
|
+
Array.from(verbIds)
|
|
558
|
+
]))
|
|
559
|
+
};
|
|
560
|
+
const key = this.getVerbKey(edge.id);
|
|
561
|
+
const { PutObjectCommand } = await import('@aws-sdk/client-s3');
|
|
562
|
+
await this.s3Client.send(new PutObjectCommand({
|
|
563
|
+
Bucket: this.bucketName,
|
|
564
|
+
Key: key,
|
|
565
|
+
Body: JSON.stringify(serializableEdge, null, 2),
|
|
566
|
+
ContentType: 'application/json'
|
|
567
|
+
}));
|
|
568
|
+
this.verbCacheManager.set(edge.id, edge);
|
|
569
|
+
const metadata = await this.getVerbMetadata(edge.id);
|
|
570
|
+
if (metadata && metadata.type) {
|
|
571
|
+
await this.incrementVerbCount(metadata.type);
|
|
572
|
+
}
|
|
573
|
+
this.releaseBackpressure(true, requestId);
|
|
574
|
+
}
|
|
575
|
+
catch (error) {
|
|
576
|
+
this.releaseBackpressure(false, requestId);
|
|
577
|
+
if (this.isThrottlingError(error)) {
|
|
578
|
+
await this.handleThrottling(error);
|
|
579
|
+
throw error;
|
|
580
|
+
}
|
|
581
|
+
throw new Error(`Failed to save edge ${edge.id}: ${error}`);
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
async getVerb_internal(id) {
|
|
585
|
+
const edge = await this.getEdge(id);
|
|
586
|
+
if (!edge) {
|
|
587
|
+
return null;
|
|
588
|
+
}
|
|
589
|
+
const metadata = await this.getVerbMetadata(id);
|
|
590
|
+
return {
|
|
591
|
+
...edge,
|
|
592
|
+
metadata: metadata || {}
|
|
593
|
+
};
|
|
594
|
+
}
|
|
595
|
+
async getEdge(id) {
|
|
596
|
+
await this.ensureInitialized();
|
|
597
|
+
const cached = this.verbCacheManager.get(id);
|
|
598
|
+
if (cached) {
|
|
599
|
+
return cached;
|
|
600
|
+
}
|
|
601
|
+
const requestId = await this.applyBackpressure();
|
|
602
|
+
try {
|
|
603
|
+
const key = this.getVerbKey(id);
|
|
604
|
+
const { GetObjectCommand } = await import('@aws-sdk/client-s3');
|
|
605
|
+
const response = await this.s3Client.send(new GetObjectCommand({
|
|
606
|
+
Bucket: this.bucketName,
|
|
607
|
+
Key: key
|
|
608
|
+
}));
|
|
609
|
+
const bodyContents = await response.Body.transformToString();
|
|
610
|
+
const data = JSON.parse(bodyContents);
|
|
611
|
+
const connections = new Map();
|
|
612
|
+
for (const [level, verbIds] of Object.entries(data.connections || {})) {
|
|
613
|
+
connections.set(Number(level), new Set(verbIds));
|
|
614
|
+
}
|
|
615
|
+
const edge = {
|
|
616
|
+
id: data.id,
|
|
617
|
+
vector: data.vector,
|
|
618
|
+
connections
|
|
619
|
+
};
|
|
620
|
+
this.verbCacheManager.set(id, edge);
|
|
621
|
+
this.releaseBackpressure(true, requestId);
|
|
622
|
+
return edge;
|
|
623
|
+
}
|
|
624
|
+
catch (error) {
|
|
625
|
+
this.releaseBackpressure(false, requestId);
|
|
626
|
+
if (error.name === 'NoSuchKey' || error.$metadata?.httpStatusCode === 404) {
|
|
627
|
+
return null;
|
|
628
|
+
}
|
|
629
|
+
if (this.isThrottlingError(error)) {
|
|
630
|
+
await this.handleThrottling(error);
|
|
631
|
+
throw error;
|
|
632
|
+
}
|
|
633
|
+
throw BrainyError.fromError(error, `getVerb(${id})`);
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
async deleteVerb_internal(id) {
|
|
637
|
+
await this.ensureInitialized();
|
|
638
|
+
const requestId = await this.applyBackpressure();
|
|
639
|
+
try {
|
|
640
|
+
const key = this.getVerbKey(id);
|
|
641
|
+
const { DeleteObjectCommand } = await import('@aws-sdk/client-s3');
|
|
642
|
+
await this.s3Client.send(new DeleteObjectCommand({
|
|
643
|
+
Bucket: this.bucketName,
|
|
644
|
+
Key: key
|
|
645
|
+
}));
|
|
646
|
+
this.verbCacheManager.delete(id);
|
|
647
|
+
const metadata = await this.getVerbMetadata(id);
|
|
648
|
+
if (metadata && metadata.type) {
|
|
649
|
+
await this.decrementVerbCount(metadata.type);
|
|
650
|
+
}
|
|
651
|
+
this.releaseBackpressure(true, requestId);
|
|
652
|
+
}
|
|
653
|
+
catch (error) {
|
|
654
|
+
this.releaseBackpressure(false, requestId);
|
|
655
|
+
if (error.name === 'NoSuchKey' || error.$metadata?.httpStatusCode === 404) {
|
|
656
|
+
return;
|
|
657
|
+
}
|
|
658
|
+
if (this.isThrottlingError(error)) {
|
|
659
|
+
await this.handleThrottling(error);
|
|
660
|
+
throw error;
|
|
661
|
+
}
|
|
662
|
+
throw new Error(`Failed to delete verb ${id}: ${error}`);
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
// Pagination and count management (simplified for space - full implementation similar to GCS)
|
|
666
|
+
async initializeCounts() {
|
|
667
|
+
const key = `${this.systemPrefix}counts.json`;
|
|
668
|
+
try {
|
|
669
|
+
const counts = await this.readObjectFromPath(key);
|
|
670
|
+
if (counts) {
|
|
671
|
+
this.totalNounCount = counts.totalNounCount || 0;
|
|
672
|
+
this.totalVerbCount = counts.totalVerbCount || 0;
|
|
673
|
+
this.entityCounts = new Map(Object.entries(counts.entityCounts || {}));
|
|
674
|
+
this.verbCounts = new Map(Object.entries(counts.verbCounts || {}));
|
|
675
|
+
prodLog.info(`๐ R2: Loaded counts: ${this.totalNounCount} nouns, ${this.totalVerbCount} verbs`);
|
|
676
|
+
}
|
|
677
|
+
else {
|
|
678
|
+
prodLog.info('๐ R2: No counts file found - initializing from scan');
|
|
679
|
+
await this.initializeCountsFromScan();
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
catch (error) {
|
|
683
|
+
prodLog.error('โ R2: Failed to load counts:', error);
|
|
684
|
+
await this.initializeCountsFromScan();
|
|
685
|
+
}
|
|
686
|
+
}
|
|
687
|
+
async initializeCountsFromScan() {
|
|
688
|
+
try {
|
|
689
|
+
prodLog.info('๐ R2: Scanning bucket to initialize counts...');
|
|
690
|
+
const { ListObjectsV2Command } = await import('@aws-sdk/client-s3');
|
|
691
|
+
// Count nouns
|
|
692
|
+
const nounResponse = await this.s3Client.send(new ListObjectsV2Command({
|
|
693
|
+
Bucket: this.bucketName,
|
|
694
|
+
Prefix: this.nounPrefix
|
|
695
|
+
}));
|
|
696
|
+
this.totalNounCount = (nounResponse.Contents || []).filter((obj) => obj.Key?.endsWith('.json')).length;
|
|
697
|
+
// Count verbs
|
|
698
|
+
const verbResponse = await this.s3Client.send(new ListObjectsV2Command({
|
|
699
|
+
Bucket: this.bucketName,
|
|
700
|
+
Prefix: this.verbPrefix
|
|
701
|
+
}));
|
|
702
|
+
this.totalVerbCount = (verbResponse.Contents || []).filter((obj) => obj.Key?.endsWith('.json')).length;
|
|
703
|
+
if (this.totalNounCount > 0 || this.totalVerbCount > 0) {
|
|
704
|
+
await this.persistCounts();
|
|
705
|
+
prodLog.info(`โ
R2: Initialized counts: ${this.totalNounCount} nouns, ${this.totalVerbCount} verbs`);
|
|
706
|
+
}
|
|
707
|
+
else {
|
|
708
|
+
prodLog.warn('โ ๏ธ R2: No entities found during bucket scan');
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
catch (error) {
|
|
712
|
+
this.logger.error('โ R2: Failed to initialize counts from scan:', error);
|
|
713
|
+
throw new Error(`Failed to initialize R2 storage counts: ${error}`);
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
async persistCounts() {
|
|
717
|
+
try {
|
|
718
|
+
const key = `${this.systemPrefix}counts.json`;
|
|
719
|
+
const counts = {
|
|
720
|
+
totalNounCount: this.totalNounCount,
|
|
721
|
+
totalVerbCount: this.totalVerbCount,
|
|
722
|
+
entityCounts: Object.fromEntries(this.entityCounts),
|
|
723
|
+
verbCounts: Object.fromEntries(this.verbCounts),
|
|
724
|
+
lastUpdated: new Date().toISOString()
|
|
725
|
+
};
|
|
726
|
+
await this.writeObjectToPath(key, counts);
|
|
727
|
+
}
|
|
728
|
+
catch (error) {
|
|
729
|
+
this.logger.error('Error persisting counts:', error);
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
// HNSW Index Persistence (Phase 2 support)
|
|
733
|
+
async getNounVector(id) {
|
|
734
|
+
await this.ensureInitialized();
|
|
735
|
+
const noun = await this.getNode(id);
|
|
736
|
+
return noun ? noun.vector : null;
|
|
737
|
+
}
|
|
738
|
+
async saveHNSWData(nounId, hnswData) {
|
|
739
|
+
await this.ensureInitialized();
|
|
740
|
+
const shard = getShardIdFromUuid(nounId);
|
|
741
|
+
const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
|
|
742
|
+
await this.writeObjectToPath(key, hnswData);
|
|
743
|
+
}
|
|
744
|
+
async getHNSWData(nounId) {
|
|
745
|
+
await this.ensureInitialized();
|
|
746
|
+
const shard = getShardIdFromUuid(nounId);
|
|
747
|
+
const key = `entities/nouns/hnsw/${shard}/${nounId}.json`;
|
|
748
|
+
return await this.readObjectFromPath(key);
|
|
749
|
+
}
|
|
750
|
+
async saveHNSWSystem(systemData) {
|
|
751
|
+
await this.ensureInitialized();
|
|
752
|
+
const key = `${this.systemPrefix}hnsw-system.json`;
|
|
753
|
+
await this.writeObjectToPath(key, systemData);
|
|
754
|
+
}
|
|
755
|
+
async getHNSWSystem() {
|
|
756
|
+
await this.ensureInitialized();
|
|
757
|
+
const key = `${this.systemPrefix}hnsw-system.json`;
|
|
758
|
+
return await this.readObjectFromPath(key);
|
|
759
|
+
}
|
|
760
|
+
// Statistics support
|
|
761
|
+
async saveStatisticsData(statistics) {
|
|
762
|
+
await this.ensureInitialized();
|
|
763
|
+
const key = `${this.systemPrefix}${STATISTICS_KEY}.json`;
|
|
764
|
+
await this.writeObjectToPath(key, statistics);
|
|
765
|
+
}
|
|
766
|
+
async getStatisticsData() {
|
|
767
|
+
await this.ensureInitialized();
|
|
768
|
+
const key = `${this.systemPrefix}${STATISTICS_KEY}.json`;
|
|
769
|
+
const stats = await this.readObjectFromPath(key);
|
|
770
|
+
if (stats) {
|
|
771
|
+
return {
|
|
772
|
+
...stats,
|
|
773
|
+
totalNodes: this.totalNounCount,
|
|
774
|
+
totalEdges: this.totalVerbCount,
|
|
775
|
+
lastUpdated: new Date().toISOString()
|
|
776
|
+
};
|
|
777
|
+
}
|
|
778
|
+
return {
|
|
779
|
+
nounCount: {},
|
|
780
|
+
verbCount: {},
|
|
781
|
+
metadataCount: {},
|
|
782
|
+
hnswIndexSize: 0,
|
|
783
|
+
totalNodes: this.totalNounCount,
|
|
784
|
+
totalEdges: this.totalVerbCount,
|
|
785
|
+
totalMetadata: 0,
|
|
786
|
+
lastUpdated: new Date().toISOString()
|
|
787
|
+
};
|
|
788
|
+
}
|
|
789
|
+
// Utility methods
|
|
790
|
+
async clear() {
|
|
791
|
+
await this.ensureInitialized();
|
|
792
|
+
prodLog.info('๐งน R2: Clearing all data from bucket...');
|
|
793
|
+
// Clear all prefixes
|
|
794
|
+
for (const prefix of [this.nounPrefix, this.verbPrefix, this.metadataPrefix, this.verbMetadataPrefix, this.systemPrefix]) {
|
|
795
|
+
const objects = await this.listObjectsUnderPath(prefix);
|
|
796
|
+
for (const key of objects) {
|
|
797
|
+
await this.deleteObjectFromPath(key);
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
this.nounCacheManager.clear();
|
|
801
|
+
this.verbCacheManager.clear();
|
|
802
|
+
this.totalNounCount = 0;
|
|
803
|
+
this.totalVerbCount = 0;
|
|
804
|
+
this.entityCounts.clear();
|
|
805
|
+
this.verbCounts.clear();
|
|
806
|
+
prodLog.info('โ
R2: All data cleared');
|
|
807
|
+
}
|
|
808
|
+
async getStorageStatus() {
|
|
809
|
+
return {
|
|
810
|
+
type: 'r2',
|
|
811
|
+
used: 0,
|
|
812
|
+
quota: null,
|
|
813
|
+
details: {
|
|
814
|
+
bucket: this.bucketName,
|
|
815
|
+
accountId: this.accountId,
|
|
816
|
+
endpoint: this.endpoint,
|
|
817
|
+
features: [
|
|
818
|
+
'Zero egress fees',
|
|
819
|
+
'Global edge network',
|
|
820
|
+
'S3-compatible API',
|
|
821
|
+
'Type-aware HNSW support'
|
|
822
|
+
]
|
|
823
|
+
}
|
|
824
|
+
};
|
|
825
|
+
}
|
|
826
|
+
// Pagination support (simplified - full implementation would match GCS pattern)
|
|
827
|
+
async getNounsWithPagination(options = {}) {
|
|
828
|
+
await this.ensureInitialized();
|
|
829
|
+
// Simplified pagination - full implementation would be similar to GCS
|
|
830
|
+
const limit = options.limit || 100;
|
|
831
|
+
const { ListObjectsV2Command } = await import('@aws-sdk/client-s3');
|
|
832
|
+
const response = await this.s3Client.send(new ListObjectsV2Command({
|
|
833
|
+
Bucket: this.bucketName,
|
|
834
|
+
Prefix: this.nounPrefix,
|
|
835
|
+
MaxKeys: limit,
|
|
836
|
+
ContinuationToken: options.cursor
|
|
837
|
+
}));
|
|
838
|
+
const items = [];
|
|
839
|
+
const contents = response.Contents || [];
|
|
840
|
+
for (const obj of contents) {
|
|
841
|
+
if (!obj.Key?.endsWith('.json'))
|
|
842
|
+
continue;
|
|
843
|
+
const id = obj.Key.split('/').pop()?.replace('.json', '');
|
|
844
|
+
if (!id)
|
|
845
|
+
continue;
|
|
846
|
+
const noun = await this.getNoun_internal(id);
|
|
847
|
+
if (noun) {
|
|
848
|
+
items.push(noun);
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
return {
|
|
852
|
+
items,
|
|
853
|
+
totalCount: this.totalNounCount,
|
|
854
|
+
hasMore: !!response.IsTruncated,
|
|
855
|
+
nextCursor: response.NextContinuationToken
|
|
856
|
+
};
|
|
857
|
+
}
|
|
858
|
+
async getNounsByNounType_internal(nounType) {
|
|
859
|
+
const result = await this.getNounsWithPagination({
|
|
860
|
+
limit: 10000,
|
|
861
|
+
filter: { nounType }
|
|
862
|
+
});
|
|
863
|
+
return result.items;
|
|
864
|
+
}
|
|
865
|
+
async getVerbsBySource_internal(sourceId) {
|
|
866
|
+
// Simplified - full implementation would include proper filtering
|
|
867
|
+
return [];
|
|
868
|
+
}
|
|
869
|
+
async getVerbsByTarget_internal(targetId) {
|
|
870
|
+
return [];
|
|
871
|
+
}
|
|
872
|
+
async getVerbsByType_internal(type) {
|
|
873
|
+
return [];
|
|
874
|
+
}
|
|
875
|
+
}
|
|
876
|
+
//# sourceMappingURL=r2Storage.js.map
|