@soulcraft/brainy 0.54.0 → 0.54.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/storage/adapters/s3CompatibleStorage.d.ts +43 -0
- package/dist/storage/adapters/s3CompatibleStorage.js +263 -0
- package/dist/storage/adapters/s3CompatibleStorage.js.map +1 -1
- package/dist/utils/requestCoalescer.d.ts +91 -0
- package/dist/utils/requestCoalescer.js +324 -0
- package/dist/utils/requestCoalescer.js.map +1 -0
- package/dist/utils/writeBuffer.d.ts +93 -0
- package/dist/utils/writeBuffer.js +328 -0
- package/dist/utils/writeBuffer.js.map +1 -0
- package/package.json +1 -1
|
@@ -70,6 +70,13 @@ export declare class S3CompatibleStorage extends BaseStorage {
|
|
|
70
70
|
private lastErrorReset;
|
|
71
71
|
private socketManager;
|
|
72
72
|
private backpressure;
|
|
73
|
+
private nounWriteBuffer;
|
|
74
|
+
private verbWriteBuffer;
|
|
75
|
+
private requestCoalescer;
|
|
76
|
+
private highVolumeMode;
|
|
77
|
+
private lastVolumeCheck;
|
|
78
|
+
private volumeCheckInterval;
|
|
79
|
+
private forceHighVolumeMode;
|
|
73
80
|
private operationExecutors;
|
|
74
81
|
private nounCacheManager;
|
|
75
82
|
private verbCacheManager;
|
|
@@ -99,6 +106,42 @@ export declare class S3CompatibleStorage extends BaseStorage {
|
|
|
99
106
|
* Initialize the storage adapter
|
|
100
107
|
*/
|
|
101
108
|
init(): Promise<void>;
|
|
109
|
+
/**
|
|
110
|
+
* Initialize write buffers for high-volume scenarios
|
|
111
|
+
*/
|
|
112
|
+
private initializeBuffers;
|
|
113
|
+
/**
|
|
114
|
+
* Initialize request coalescer
|
|
115
|
+
*/
|
|
116
|
+
private initializeCoalescer;
|
|
117
|
+
/**
|
|
118
|
+
* Check if we should enable high-volume mode
|
|
119
|
+
*/
|
|
120
|
+
private checkVolumeMode;
|
|
121
|
+
/**
|
|
122
|
+
* Bulk write nouns to S3
|
|
123
|
+
*/
|
|
124
|
+
private bulkWriteNouns;
|
|
125
|
+
/**
|
|
126
|
+
* Bulk write verbs to S3
|
|
127
|
+
*/
|
|
128
|
+
private bulkWriteVerbs;
|
|
129
|
+
/**
|
|
130
|
+
* Process coalesced batch of operations
|
|
131
|
+
*/
|
|
132
|
+
private processCoalescedBatch;
|
|
133
|
+
/**
|
|
134
|
+
* Process bulk deletes
|
|
135
|
+
*/
|
|
136
|
+
private processBulkDeletes;
|
|
137
|
+
/**
|
|
138
|
+
* Process bulk writes
|
|
139
|
+
*/
|
|
140
|
+
private processBulkWrites;
|
|
141
|
+
/**
|
|
142
|
+
* Process bulk reads
|
|
143
|
+
*/
|
|
144
|
+
private processBulkReads;
|
|
102
145
|
/**
|
|
103
146
|
* Dynamically adjust batch size based on memory pressure and error rates
|
|
104
147
|
*/
|
|
@@ -11,6 +11,8 @@ import { CacheManager } from '../cacheManager.js';
|
|
|
11
11
|
import { createModuleLogger } from '../../utils/logger.js';
|
|
12
12
|
import { getGlobalSocketManager } from '../../utils/adaptiveSocketManager.js';
|
|
13
13
|
import { getGlobalBackpressure } from '../../utils/adaptiveBackpressure.js';
|
|
14
|
+
import { getWriteBuffer } from '../../utils/writeBuffer.js';
|
|
15
|
+
import { getCoalescer } from '../../utils/requestCoalescer.js';
|
|
14
16
|
// Export R2Storage as an alias for S3CompatibleStorage
|
|
15
17
|
export { S3CompatibleStorage as R2Storage };
|
|
16
18
|
/**
|
|
@@ -64,6 +66,16 @@ export class S3CompatibleStorage extends BaseStorage {
|
|
|
64
66
|
this.socketManager = getGlobalSocketManager();
|
|
65
67
|
// Adaptive backpressure for automatic flow control
|
|
66
68
|
this.backpressure = getGlobalBackpressure();
|
|
69
|
+
// Write buffers for bulk operations
|
|
70
|
+
this.nounWriteBuffer = null;
|
|
71
|
+
this.verbWriteBuffer = null;
|
|
72
|
+
// Request coalescer for deduplication
|
|
73
|
+
this.requestCoalescer = null;
|
|
74
|
+
// High-volume mode detection - MUCH more aggressive
|
|
75
|
+
this.highVolumeMode = false;
|
|
76
|
+
this.lastVolumeCheck = 0;
|
|
77
|
+
this.volumeCheckInterval = 1000; // Check every second, not 5
|
|
78
|
+
this.forceHighVolumeMode = false; // Environment variable override
|
|
67
79
|
// Module logger
|
|
68
80
|
this.logger = createModuleLogger('S3Storage');
|
|
69
81
|
// Node cache to avoid redundant API calls
|
|
@@ -214,6 +226,10 @@ export class S3CompatibleStorage extends BaseStorage {
|
|
|
214
226
|
// Set storage adapters for cache managers
|
|
215
227
|
this.nounCacheManager.setStorageAdapters(nounStorageAdapter, nounStorageAdapter);
|
|
216
228
|
this.verbCacheManager.setStorageAdapters(verbStorageAdapter, verbStorageAdapter);
|
|
229
|
+
// Initialize write buffers for high-volume scenarios
|
|
230
|
+
this.initializeBuffers();
|
|
231
|
+
// Initialize request coalescer
|
|
232
|
+
this.initializeCoalescer();
|
|
217
233
|
this.isInitialized = true;
|
|
218
234
|
this.logger.info(`Initialized ${this.serviceType} storage with bucket ${this.bucketName}`);
|
|
219
235
|
}
|
|
@@ -222,6 +238,231 @@ export class S3CompatibleStorage extends BaseStorage {
|
|
|
222
238
|
throw new Error(`Failed to initialize ${this.serviceType} storage: ${error}`);
|
|
223
239
|
}
|
|
224
240
|
}
|
|
241
|
+
/**
|
|
242
|
+
* Initialize write buffers for high-volume scenarios
|
|
243
|
+
*/
|
|
244
|
+
initializeBuffers() {
|
|
245
|
+
const storageId = `${this.serviceType}-${this.bucketName}`;
|
|
246
|
+
// Create noun write buffer
|
|
247
|
+
this.nounWriteBuffer = getWriteBuffer(`${storageId}-nouns`, 'noun', async (items) => {
|
|
248
|
+
// Bulk write nouns to S3
|
|
249
|
+
await this.bulkWriteNouns(items);
|
|
250
|
+
});
|
|
251
|
+
// Create verb write buffer
|
|
252
|
+
this.verbWriteBuffer = getWriteBuffer(`${storageId}-verbs`, 'verb', async (items) => {
|
|
253
|
+
// Bulk write verbs to S3
|
|
254
|
+
await this.bulkWriteVerbs(items);
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
/**
|
|
258
|
+
* Initialize request coalescer
|
|
259
|
+
*/
|
|
260
|
+
initializeCoalescer() {
|
|
261
|
+
const storageId = `${this.serviceType}-${this.bucketName}`;
|
|
262
|
+
this.requestCoalescer = getCoalescer(storageId, async (batch) => {
|
|
263
|
+
// Process coalesced operations
|
|
264
|
+
await this.processCoalescedBatch(batch);
|
|
265
|
+
});
|
|
266
|
+
}
|
|
267
|
+
/**
|
|
268
|
+
* Check if we should enable high-volume mode
|
|
269
|
+
*/
|
|
270
|
+
checkVolumeMode() {
|
|
271
|
+
const now = Date.now();
|
|
272
|
+
if (now - this.lastVolumeCheck < this.volumeCheckInterval) {
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
275
|
+
this.lastVolumeCheck = now;
|
|
276
|
+
// Check environment variable override
|
|
277
|
+
const envThreshold = process.env.BRAINY_BUFFER_THRESHOLD;
|
|
278
|
+
const threshold = envThreshold ? parseInt(envThreshold) : 1; // Default to 1!
|
|
279
|
+
// Force enable from environment
|
|
280
|
+
if (process.env.BRAINY_FORCE_BUFFERING === 'true') {
|
|
281
|
+
this.forceHighVolumeMode = true;
|
|
282
|
+
}
|
|
283
|
+
// Get metrics
|
|
284
|
+
const backpressureStatus = this.backpressure.getStatus();
|
|
285
|
+
const socketMetrics = this.socketManager.getMetrics();
|
|
286
|
+
// MUCH more aggressive detection - trigger on almost any load
|
|
287
|
+
const shouldEnableHighVolume = this.forceHighVolumeMode || // Environment override
|
|
288
|
+
backpressureStatus.queueLength > threshold || // Configurable threshold
|
|
289
|
+
socketMetrics.pendingRequests > threshold || // Socket pressure
|
|
290
|
+
this.pendingOperations > threshold || // Any pending ops
|
|
291
|
+
socketMetrics.socketUtilization > 0.1 || // Even 10% socket usage
|
|
292
|
+
(socketMetrics.requestsPerSecond > 10) || // High request rate
|
|
293
|
+
(this.consecutiveErrors > 0); // Any errors at all
|
|
294
|
+
if (shouldEnableHighVolume && !this.highVolumeMode) {
|
|
295
|
+
this.highVolumeMode = true;
|
|
296
|
+
this.logger.warn(`🚨 HIGH-VOLUME MODE ACTIVATED 🚨`);
|
|
297
|
+
this.logger.warn(` Queue Length: ${backpressureStatus.queueLength}`);
|
|
298
|
+
this.logger.warn(` Pending Requests: ${socketMetrics.pendingRequests}`);
|
|
299
|
+
this.logger.warn(` Pending Operations: ${this.pendingOperations}`);
|
|
300
|
+
this.logger.warn(` Socket Utilization: ${(socketMetrics.socketUtilization * 100).toFixed(1)}%`);
|
|
301
|
+
this.logger.warn(` Requests/sec: ${socketMetrics.requestsPerSecond}`);
|
|
302
|
+
this.logger.warn(` Consecutive Errors: ${this.consecutiveErrors}`);
|
|
303
|
+
this.logger.warn(` Threshold: ${threshold}`);
|
|
304
|
+
// Adjust buffer parameters for high volume
|
|
305
|
+
const queueLength = Math.max(backpressureStatus.queueLength, socketMetrics.pendingRequests, 100);
|
|
306
|
+
if (this.nounWriteBuffer) {
|
|
307
|
+
this.nounWriteBuffer.adjustForLoad(queueLength);
|
|
308
|
+
const stats = this.nounWriteBuffer.getStats();
|
|
309
|
+
this.logger.warn(` Noun Buffer: ${stats.bufferSize} items, ${stats.totalWrites} total writes`);
|
|
310
|
+
}
|
|
311
|
+
if (this.verbWriteBuffer) {
|
|
312
|
+
this.verbWriteBuffer.adjustForLoad(queueLength);
|
|
313
|
+
const stats = this.verbWriteBuffer.getStats();
|
|
314
|
+
this.logger.warn(` Verb Buffer: ${stats.bufferSize} items, ${stats.totalWrites} total writes`);
|
|
315
|
+
}
|
|
316
|
+
if (this.requestCoalescer) {
|
|
317
|
+
this.requestCoalescer.adjustParameters(queueLength);
|
|
318
|
+
const sizes = this.requestCoalescer.getQueueSizes();
|
|
319
|
+
this.logger.warn(` Coalescer: ${sizes.total} queued operations`);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
else if (!shouldEnableHighVolume && this.highVolumeMode && !this.forceHighVolumeMode) {
|
|
323
|
+
this.highVolumeMode = false;
|
|
324
|
+
this.logger.info('✅ High-volume mode deactivated - load normalized');
|
|
325
|
+
}
|
|
326
|
+
// Log current status every 10 checks when in high-volume mode
|
|
327
|
+
if (this.highVolumeMode && (now % 10000) < this.volumeCheckInterval) {
|
|
328
|
+
this.logger.info(`📊 High-volume mode status: Queue=${backpressureStatus.queueLength}, Pending=${socketMetrics.pendingRequests}, Sockets=${(socketMetrics.socketUtilization * 100).toFixed(1)}%`);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
/**
|
|
332
|
+
* Bulk write nouns to S3
|
|
333
|
+
*/
|
|
334
|
+
async bulkWriteNouns(items) {
|
|
335
|
+
const { PutObjectCommand } = await import('@aws-sdk/client-s3');
|
|
336
|
+
// Process in parallel with limited concurrency
|
|
337
|
+
const promises = [];
|
|
338
|
+
const batchSize = 10; // Process 10 at a time
|
|
339
|
+
const entries = Array.from(items.entries());
|
|
340
|
+
for (let i = 0; i < entries.length; i += batchSize) {
|
|
341
|
+
const batch = entries.slice(i, i + batchSize);
|
|
342
|
+
const batchPromise = Promise.all(batch.map(async ([id, node]) => {
|
|
343
|
+
const serializableNode = {
|
|
344
|
+
...node,
|
|
345
|
+
connections: this.mapToObject(node.connections, (set) => Array.from(set))
|
|
346
|
+
};
|
|
347
|
+
const key = `${this.nounPrefix}${id}.json`;
|
|
348
|
+
const body = JSON.stringify(serializableNode, null, 2);
|
|
349
|
+
await this.s3Client.send(new PutObjectCommand({
|
|
350
|
+
Bucket: this.bucketName,
|
|
351
|
+
Key: key,
|
|
352
|
+
Body: body,
|
|
353
|
+
ContentType: 'application/json'
|
|
354
|
+
}));
|
|
355
|
+
})).then(() => { }); // Convert Promise<void[]> to Promise<void>
|
|
356
|
+
promises.push(batchPromise);
|
|
357
|
+
}
|
|
358
|
+
await Promise.all(promises);
|
|
359
|
+
}
|
|
360
|
+
/**
|
|
361
|
+
* Bulk write verbs to S3
|
|
362
|
+
*/
|
|
363
|
+
async bulkWriteVerbs(items) {
|
|
364
|
+
const { PutObjectCommand } = await import('@aws-sdk/client-s3');
|
|
365
|
+
// Process in parallel with limited concurrency
|
|
366
|
+
const promises = [];
|
|
367
|
+
const batchSize = 10;
|
|
368
|
+
const entries = Array.from(items.entries());
|
|
369
|
+
for (let i = 0; i < entries.length; i += batchSize) {
|
|
370
|
+
const batch = entries.slice(i, i + batchSize);
|
|
371
|
+
const batchPromise = Promise.all(batch.map(async ([id, edge]) => {
|
|
372
|
+
const serializableEdge = {
|
|
373
|
+
...edge,
|
|
374
|
+
connections: this.mapToObject(edge.connections, (set) => Array.from(set))
|
|
375
|
+
};
|
|
376
|
+
const key = `${this.verbPrefix}${id}.json`;
|
|
377
|
+
const body = JSON.stringify(serializableEdge, null, 2);
|
|
378
|
+
await this.s3Client.send(new PutObjectCommand({
|
|
379
|
+
Bucket: this.bucketName,
|
|
380
|
+
Key: key,
|
|
381
|
+
Body: body,
|
|
382
|
+
ContentType: 'application/json'
|
|
383
|
+
}));
|
|
384
|
+
})).then(() => { }); // Convert Promise<void[]> to Promise<void>
|
|
385
|
+
promises.push(batchPromise);
|
|
386
|
+
}
|
|
387
|
+
await Promise.all(promises);
|
|
388
|
+
}
|
|
389
|
+
/**
|
|
390
|
+
* Process coalesced batch of operations
|
|
391
|
+
*/
|
|
392
|
+
async processCoalescedBatch(batch) {
|
|
393
|
+
// Group operations by type
|
|
394
|
+
const writes = [];
|
|
395
|
+
const reads = [];
|
|
396
|
+
const deletes = [];
|
|
397
|
+
for (const op of batch) {
|
|
398
|
+
if (op.type === 'write') {
|
|
399
|
+
writes.push(op);
|
|
400
|
+
}
|
|
401
|
+
else if (op.type === 'read') {
|
|
402
|
+
reads.push(op);
|
|
403
|
+
}
|
|
404
|
+
else if (op.type === 'delete') {
|
|
405
|
+
deletes.push(op);
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
// Process in order: deletes, writes, reads
|
|
409
|
+
if (deletes.length > 0) {
|
|
410
|
+
await this.processBulkDeletes(deletes);
|
|
411
|
+
}
|
|
412
|
+
if (writes.length > 0) {
|
|
413
|
+
await this.processBulkWrites(writes);
|
|
414
|
+
}
|
|
415
|
+
if (reads.length > 0) {
|
|
416
|
+
await this.processBulkReads(reads);
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
/**
|
|
420
|
+
* Process bulk deletes
|
|
421
|
+
*/
|
|
422
|
+
async processBulkDeletes(deletes) {
|
|
423
|
+
const { DeleteObjectCommand } = await import('@aws-sdk/client-s3');
|
|
424
|
+
await Promise.all(deletes.map(async (op) => {
|
|
425
|
+
await this.s3Client.send(new DeleteObjectCommand({
|
|
426
|
+
Bucket: this.bucketName,
|
|
427
|
+
Key: op.key
|
|
428
|
+
}));
|
|
429
|
+
}));
|
|
430
|
+
}
|
|
431
|
+
/**
|
|
432
|
+
* Process bulk writes
|
|
433
|
+
*/
|
|
434
|
+
async processBulkWrites(writes) {
|
|
435
|
+
const { PutObjectCommand } = await import('@aws-sdk/client-s3');
|
|
436
|
+
await Promise.all(writes.map(async (op) => {
|
|
437
|
+
await this.s3Client.send(new PutObjectCommand({
|
|
438
|
+
Bucket: this.bucketName,
|
|
439
|
+
Key: op.key,
|
|
440
|
+
Body: JSON.stringify(op.data),
|
|
441
|
+
ContentType: 'application/json'
|
|
442
|
+
}));
|
|
443
|
+
}));
|
|
444
|
+
}
|
|
445
|
+
/**
|
|
446
|
+
* Process bulk reads
|
|
447
|
+
*/
|
|
448
|
+
async processBulkReads(reads) {
|
|
449
|
+
const { GetObjectCommand } = await import('@aws-sdk/client-s3');
|
|
450
|
+
await Promise.all(reads.map(async (op) => {
|
|
451
|
+
try {
|
|
452
|
+
const response = await this.s3Client.send(new GetObjectCommand({
|
|
453
|
+
Bucket: this.bucketName,
|
|
454
|
+
Key: op.key
|
|
455
|
+
}));
|
|
456
|
+
if (response.Body) {
|
|
457
|
+
const data = await response.Body.transformToString();
|
|
458
|
+
op.data = JSON.parse(data);
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
catch (error) {
|
|
462
|
+
op.data = null;
|
|
463
|
+
}
|
|
464
|
+
}));
|
|
465
|
+
}
|
|
225
466
|
/**
|
|
226
467
|
* Dynamically adjust batch size based on memory pressure and error rates
|
|
227
468
|
*/
|
|
@@ -298,6 +539,17 @@ export class S3CompatibleStorage extends BaseStorage {
|
|
|
298
539
|
*/
|
|
299
540
|
async saveNode(node) {
|
|
300
541
|
await this.ensureInitialized();
|
|
542
|
+
// ALWAYS check if we should use high-volume mode (critical for detection)
|
|
543
|
+
this.checkVolumeMode();
|
|
544
|
+
// Use write buffer in high-volume mode
|
|
545
|
+
if (this.highVolumeMode && this.nounWriteBuffer) {
|
|
546
|
+
this.logger.trace(`📝 BUFFERING: Adding noun ${node.id} to write buffer (high-volume mode active)`);
|
|
547
|
+
await this.nounWriteBuffer.add(node.id, node);
|
|
548
|
+
return;
|
|
549
|
+
}
|
|
550
|
+
else if (!this.highVolumeMode) {
|
|
551
|
+
this.logger.trace(`📝 DIRECT WRITE: Saving noun ${node.id} directly (high-volume mode inactive)`);
|
|
552
|
+
}
|
|
301
553
|
// Apply backpressure before starting operation
|
|
302
554
|
const requestId = await this.applyBackpressure();
|
|
303
555
|
try {
|
|
@@ -633,6 +885,17 @@ export class S3CompatibleStorage extends BaseStorage {
|
|
|
633
885
|
*/
|
|
634
886
|
async saveEdge(edge) {
|
|
635
887
|
await this.ensureInitialized();
|
|
888
|
+
// ALWAYS check if we should use high-volume mode (critical for detection)
|
|
889
|
+
this.checkVolumeMode();
|
|
890
|
+
// Use write buffer in high-volume mode
|
|
891
|
+
if (this.highVolumeMode && this.verbWriteBuffer) {
|
|
892
|
+
this.logger.trace(`📝 BUFFERING: Adding verb ${edge.id} to write buffer (high-volume mode active)`);
|
|
893
|
+
await this.verbWriteBuffer.add(edge.id, edge);
|
|
894
|
+
return;
|
|
895
|
+
}
|
|
896
|
+
else if (!this.highVolumeMode) {
|
|
897
|
+
this.logger.trace(`📝 DIRECT WRITE: Saving verb ${edge.id} directly (high-volume mode inactive)`);
|
|
898
|
+
}
|
|
636
899
|
// Apply backpressure before starting operation
|
|
637
900
|
const requestId = await this.applyBackpressure();
|
|
638
901
|
try {
|