s3db.js 18.0.11-next.1534f717 → 18.0.11-next.e8e71b5b
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/clients/recker-http-handler.js +56 -8
- package/dist/clients/recker-http-handler.js.map +1 -1
- package/dist/concerns/high-performance-inserter.js +6 -34
- package/dist/concerns/high-performance-inserter.js.map +1 -1
- package/dist/concerns/id/alphabets.js +150 -0
- package/dist/concerns/id/alphabets.js.map +1 -0
- package/dist/concerns/id/entropy.js +243 -0
- package/dist/concerns/id/entropy.js.map +1 -0
- package/dist/concerns/id/generators/nanoid.js +74 -0
- package/dist/concerns/id/generators/nanoid.js.map +1 -0
- package/dist/concerns/id/generators/sid.js +73 -0
- package/dist/concerns/id/generators/sid.js.map +1 -0
- package/dist/concerns/id/generators/ulid.js +208 -0
- package/dist/concerns/id/generators/ulid.js.map +1 -0
- package/dist/concerns/id/generators/uuid-v7.js +150 -0
- package/dist/concerns/id/generators/uuid-v7.js.map +1 -0
- package/dist/concerns/id/index.js +74 -0
- package/dist/concerns/id/index.js.map +1 -0
- package/dist/concerns/plugin-storage.js +114 -0
- package/dist/concerns/plugin-storage.js.map +1 -1
- package/dist/concerns/s3-errors.js +72 -0
- package/dist/concerns/s3-errors.js.map +1 -0
- package/dist/concerns/s3-key.js +54 -0
- package/dist/concerns/s3-key.js.map +1 -0
- package/dist/concerns/safe-merge.js +47 -0
- package/dist/concerns/safe-merge.js.map +1 -0
- package/dist/core/resource-config-validator.js +12 -2
- package/dist/core/resource-config-validator.js.map +1 -1
- package/dist/core/resource-partitions.class.js +12 -1
- package/dist/core/resource-partitions.class.js.map +1 -1
- package/dist/core/resource-persistence.class.js +41 -12
- package/dist/core/resource-persistence.class.js.map +1 -1
- package/dist/core/resource-query.class.js +21 -47
- package/dist/core/resource-query.class.js.map +1 -1
- package/dist/database/database-connection.class.js +3 -6
- package/dist/database/database-connection.class.js.map +1 -1
- package/dist/database/database-plugins.class.js +7 -13
- package/dist/database/database-plugins.class.js.map +1 -1
- package/dist/plugins/concerns/s3-mutex.class.js +155 -0
- package/dist/plugins/concerns/s3-mutex.class.js.map +1 -0
- package/dist/plugins/eventual-consistency/consolidation.js +4 -7
- package/dist/plugins/eventual-consistency/consolidation.js.map +1 -1
- package/dist/plugins/eventual-consistency/garbage-collection.js +3 -6
- package/dist/plugins/eventual-consistency/garbage-collection.js.map +1 -1
- package/dist/plugins/queue-consumer.plugin.js +10 -16
- package/dist/plugins/queue-consumer.plugin.js.map +1 -1
- package/dist/plugins/recon/managers/scheduler-manager.js +3 -5
- package/dist/plugins/recon/managers/scheduler-manager.js.map +1 -1
- package/dist/plugins/recon/stages/recker-asn-stage.js +279 -0
- package/dist/plugins/recon/stages/recker-asn-stage.js.map +1 -0
- package/dist/plugins/recon/stages/recker-dns-stage.js +227 -0
- package/dist/plugins/recon/stages/recker-dns-stage.js.map +1 -0
- package/dist/plugins/recon/stages/recker-scrape-stage.js +369 -0
- package/dist/plugins/recon/stages/recker-scrape-stage.js.map +1 -0
- package/dist/plugins/replicator.plugin.js +13 -31
- package/dist/plugins/replicator.plugin.js.map +1 -1
- package/dist/plugins/replicators/base-replicator.class.js +10 -23
- package/dist/plugins/replicators/base-replicator.class.js.map +1 -1
- package/dist/plugins/spider/recker-link-discoverer.js +544 -0
- package/dist/plugins/spider/recker-link-discoverer.js.map +1 -0
- package/dist/plugins/spider/recker-llms-validator.js +334 -0
- package/dist/plugins/spider/recker-llms-validator.js.map +1 -0
- package/dist/plugins/spider/recker-robots-validator.js +336 -0
- package/dist/plugins/spider/recker-robots-validator.js.map +1 -0
- package/dist/plugins/spider/recker-security-adapter.js +325 -0
- package/dist/plugins/spider/recker-security-adapter.js.map +1 -0
- package/dist/plugins/spider/recker-seo-adapter.js +399 -0
- package/dist/plugins/spider/recker-seo-adapter.js.map +1 -0
- package/dist/plugins/spider/recker-sitemap-validator.js +406 -0
- package/dist/plugins/spider/recker-sitemap-validator.js.map +1 -0
- package/dist/resource.class.js +2 -0
- package/dist/resource.class.js.map +1 -1
- package/dist/s3db.cjs +444 -219
- package/dist/s3db.cjs.map +1 -1
- package/dist/s3db.es.js +445 -220
- package/dist/s3db.es.js.map +1 -1
- package/dist/stream/resource-reader.class.js +5 -7
- package/dist/stream/resource-reader.class.js.map +1 -1
- package/dist/stream/resource-writer.class.js +5 -7
- package/dist/stream/resource-writer.class.js.map +1 -1
- package/dist/tasks/tasks-pool.class.js +31 -0
- package/dist/tasks/tasks-pool.class.js.map +1 -1
- package/dist/types/clients/recker-http-handler.d.ts +1 -0
- package/dist/types/clients/recker-http-handler.d.ts.map +1 -1
- package/dist/types/clients/types.d.ts +14 -0
- package/dist/types/clients/types.d.ts.map +1 -1
- package/dist/types/concerns/high-performance-inserter.d.ts.map +1 -1
- package/dist/types/concerns/id/alphabets.d.ts +125 -0
- package/dist/types/concerns/id/alphabets.d.ts.map +1 -0
- package/dist/types/concerns/id/entropy.d.ts +84 -0
- package/dist/types/concerns/id/entropy.d.ts.map +1 -0
- package/dist/types/concerns/id/generators/nanoid.d.ts +46 -0
- package/dist/types/concerns/id/generators/nanoid.d.ts.map +1 -0
- package/dist/types/concerns/id/generators/sid.d.ts +45 -0
- package/dist/types/concerns/id/generators/sid.d.ts.map +1 -0
- package/dist/types/concerns/id/generators/ulid.d.ts +71 -0
- package/dist/types/concerns/id/generators/ulid.d.ts.map +1 -0
- package/dist/types/concerns/id/generators/uuid-v7.d.ts +60 -0
- package/dist/types/concerns/id/generators/uuid-v7.d.ts.map +1 -0
- package/dist/types/concerns/id/index.d.ts +51 -0
- package/dist/types/concerns/id/index.d.ts.map +1 -0
- package/dist/types/concerns/plugin-storage.d.ts +25 -0
- package/dist/types/concerns/plugin-storage.d.ts.map +1 -1
- package/dist/types/concerns/s3-errors.d.ts +20 -0
- package/dist/types/concerns/s3-errors.d.ts.map +1 -0
- package/dist/types/concerns/s3-key.d.ts +30 -0
- package/dist/types/concerns/s3-key.d.ts.map +1 -0
- package/dist/types/concerns/safe-merge.d.ts +22 -0
- package/dist/types/concerns/safe-merge.d.ts.map +1 -0
- package/dist/types/core/resource-config-validator.d.ts.map +1 -1
- package/dist/types/core/resource-partitions.class.d.ts.map +1 -1
- package/dist/types/core/resource-persistence.class.d.ts.map +1 -1
- package/dist/types/core/resource-query.class.d.ts.map +1 -1
- package/dist/types/database/database-connection.class.d.ts.map +1 -1
- package/dist/types/database/database-plugins.class.d.ts.map +1 -1
- package/dist/types/plugins/concerns/s3-mutex.class.d.ts +30 -0
- package/dist/types/plugins/concerns/s3-mutex.class.d.ts.map +1 -0
- package/dist/types/plugins/eventual-consistency/consolidation.d.ts.map +1 -1
- package/dist/types/plugins/eventual-consistency/garbage-collection.d.ts.map +1 -1
- package/dist/types/plugins/queue-consumer.plugin.d.ts.map +1 -1
- package/dist/types/plugins/recon/managers/scheduler-manager.d.ts.map +1 -1
- package/dist/types/plugins/recon/stages/recker-asn-stage.d.ts +90 -0
- package/dist/types/plugins/recon/stages/recker-asn-stage.d.ts.map +1 -0
- package/dist/types/plugins/recon/stages/recker-dns-stage.d.ts +125 -0
- package/dist/types/plugins/recon/stages/recker-dns-stage.d.ts.map +1 -0
- package/dist/types/plugins/recon/stages/recker-scrape-stage.d.ts +96 -0
- package/dist/types/plugins/recon/stages/recker-scrape-stage.d.ts.map +1 -0
- package/dist/types/plugins/replicator.plugin.d.ts.map +1 -1
- package/dist/types/plugins/replicators/base-replicator.class.d.ts.map +1 -1
- package/dist/types/plugins/spider/recker-link-discoverer.d.ts +54 -0
- package/dist/types/plugins/spider/recker-link-discoverer.d.ts.map +1 -0
- package/dist/types/plugins/spider/recker-llms-validator.d.ts +105 -0
- package/dist/types/plugins/spider/recker-llms-validator.d.ts.map +1 -0
- package/dist/types/plugins/spider/recker-robots-validator.d.ts +92 -0
- package/dist/types/plugins/spider/recker-robots-validator.d.ts.map +1 -0
- package/dist/types/plugins/spider/recker-security-adapter.d.ts +83 -0
- package/dist/types/plugins/spider/recker-security-adapter.d.ts.map +1 -0
- package/dist/types/plugins/spider/recker-seo-adapter.d.ts +187 -0
- package/dist/types/plugins/spider/recker-seo-adapter.d.ts.map +1 -0
- package/dist/types/plugins/spider/recker-sitemap-validator.d.ts +121 -0
- package/dist/types/plugins/spider/recker-sitemap-validator.d.ts.map +1 -0
- package/dist/types/resource.class.d.ts.map +1 -1
- package/dist/types/stream/resource-reader.class.d.ts.map +1 -1
- package/dist/types/stream/resource-writer.class.d.ts.map +1 -1
- package/dist/types/tasks/tasks-pool.class.d.ts +23 -0
- package/dist/types/tasks/tasks-pool.class.d.ts.map +1 -1
- package/mcp/prompts/index.ts +275 -0
- package/mcp/resources/index.ts +322 -0
- package/mcp/tools/plugins.ts +1137 -0
- package/mcp/tools/streams.ts +340 -0
- package/package.json +20 -22
- package/src/clients/recker-http-handler.ts +74 -8
- package/src/clients/types.ts +14 -0
- package/src/concerns/high-performance-inserter.ts +18 -57
- package/src/concerns/id/alphabets.ts +175 -0
- package/src/concerns/id/entropy.ts +286 -0
- package/src/concerns/id/generators/sid.ts +90 -0
- package/src/concerns/id/generators/ulid.ts +249 -0
- package/src/concerns/id/generators/uuid-v7.ts +179 -0
- package/src/concerns/id/index.ts +167 -0
- package/src/concerns/plugin-storage.ts +144 -0
- package/src/concerns/s3-errors.ts +97 -0
- package/src/concerns/s3-key.ts +62 -0
- package/src/concerns/safe-merge.ts +60 -0
- package/src/core/resource-config-validator.ts +9 -2
- package/src/core/resource-partitions.class.ts +14 -1
- package/src/core/resource-persistence.class.ts +47 -13
- package/src/core/resource-query.class.ts +21 -46
- package/src/database/database-connection.class.ts +7 -6
- package/src/database/database-plugins.class.ts +15 -13
- package/src/plugins/concerns/s3-mutex.class.ts +228 -0
- package/src/plugins/eventual-consistency/consolidation.ts +8 -7
- package/src/plugins/eventual-consistency/garbage-collection.ts +7 -6
- package/src/plugins/queue-consumer.plugin.ts +21 -19
- package/src/plugins/recon/managers/scheduler-manager.ts +7 -5
- package/src/plugins/recon/stages/recker-asn-stage.ts +385 -0
- package/src/plugins/recon/stages/recker-dns-stage.ts +360 -0
- package/src/plugins/recon/stages/recker-scrape-stage.ts +509 -0
- package/src/plugins/replicator.plugin.ts +41 -35
- package/src/plugins/replicators/base-replicator.class.ts +17 -23
- package/src/plugins/spider/recker-link-discoverer.ts +645 -0
- package/src/plugins/spider/recker-llms-validator.ts +500 -0
- package/src/plugins/spider/recker-robots-validator.ts +473 -0
- package/src/plugins/spider/recker-security-adapter.ts +489 -0
- package/src/plugins/spider/recker-seo-adapter.ts +605 -0
- package/src/plugins/spider/recker-sitemap-validator.ts +621 -0
- package/src/resource.class.ts +2 -0
- package/src/stream/resource-reader.class.ts +10 -8
- package/src/stream/resource-writer.class.ts +10 -8
- package/src/tasks/tasks-pool.class.ts +46 -0
package/dist/s3db.cjs
CHANGED
|
@@ -9,15 +9,14 @@ var recker = require('recker');
|
|
|
9
9
|
var node_stream = require('node:stream');
|
|
10
10
|
var clientS3 = require('@aws-sdk/client-s3');
|
|
11
11
|
var crypto$2 = require('crypto');
|
|
12
|
-
var os$2 = require('os');
|
|
13
|
-
var promises = require('timers/promises');
|
|
14
12
|
var nanoid = require('nanoid');
|
|
15
13
|
var fs$2 = require('fs/promises');
|
|
16
14
|
var stream$2 = require('stream');
|
|
17
15
|
var fs$3 = require('fs');
|
|
16
|
+
var os$2 = require('os');
|
|
18
17
|
var zlib$2 = require('zlib');
|
|
19
|
-
var promisePool = require('@supercharge/promise-pool');
|
|
20
18
|
var web = require('node:stream/web');
|
|
19
|
+
var promises = require('timers/promises');
|
|
21
20
|
var jsonStableStringify = require('json-stable-stringify');
|
|
22
21
|
var FastestValidatorModule = require('fastest-validator');
|
|
23
22
|
var v8$2 = require('v8');
|
|
@@ -260,6 +259,15 @@ function calculateRetryDelay(attempt, baseDelay, maxDelay, useJitter = true) {
|
|
|
260
259
|
}
|
|
261
260
|
function isRetryableError(error, statusCode) {
|
|
262
261
|
if (error) {
|
|
262
|
+
// Check for HTTP/2 specific errors first
|
|
263
|
+
const h2Error = recker.parseHttp2Error(error);
|
|
264
|
+
if (h2Error) {
|
|
265
|
+
return h2Error.retriable;
|
|
266
|
+
}
|
|
267
|
+
// Check for native HTTP/2 errors
|
|
268
|
+
if (error instanceof recker.Http2Error) {
|
|
269
|
+
return error.retriable;
|
|
270
|
+
}
|
|
263
271
|
const code = error.code;
|
|
264
272
|
if (code === 'ECONNRESET' || code === 'ETIMEDOUT' || code === 'ENOTFOUND' ||
|
|
265
273
|
code === 'ECONNREFUSED' || code === 'EPIPE' || code === 'UND_ERR_SOCKET' ||
|
|
@@ -276,6 +284,17 @@ function isRetryableError(error, statusCode) {
|
|
|
276
284
|
}
|
|
277
285
|
return false;
|
|
278
286
|
}
|
|
287
|
+
/**
|
|
288
|
+
* Get additional retry delay for HTTP/2 errors like ENHANCE_YOUR_CALM
|
|
289
|
+
*/
|
|
290
|
+
function getHttp2RetryDelay(error) {
|
|
291
|
+
const h2Error = recker.parseHttp2Error(error);
|
|
292
|
+
if (h2Error && h2Error.errorCode === 'ENHANCE_YOUR_CALM') {
|
|
293
|
+
// Server is rate limiting, wait longer (5-10 seconds)
|
|
294
|
+
return 5000 + Math.random() * 5000;
|
|
295
|
+
}
|
|
296
|
+
return undefined;
|
|
297
|
+
}
|
|
279
298
|
function parseRetryAfter$1(headerValue) {
|
|
280
299
|
if (!headerValue)
|
|
281
300
|
return undefined;
|
|
@@ -297,6 +316,7 @@ class ReckerHttpHandler {
|
|
|
297
316
|
deduplicator;
|
|
298
317
|
circuitBreaker;
|
|
299
318
|
metrics;
|
|
319
|
+
http2MetricsEnabled;
|
|
300
320
|
constructor(options = {}) {
|
|
301
321
|
this.options = {
|
|
302
322
|
connectTimeout: 10000,
|
|
@@ -308,6 +328,9 @@ class ReckerHttpHandler {
|
|
|
308
328
|
pipelining: 10,
|
|
309
329
|
http2: true,
|
|
310
330
|
http2MaxConcurrentStreams: 100,
|
|
331
|
+
http2Preset: 'performance', // Default to performance preset for S3 workloads
|
|
332
|
+
expectContinue: 2 * 1024 * 1024, // 2MB threshold for Expect: 100-Continue
|
|
333
|
+
enableHttp2Metrics: false,
|
|
311
334
|
enableDedup: true,
|
|
312
335
|
enableCircuitBreaker: true,
|
|
313
336
|
circuitBreakerThreshold: 5,
|
|
@@ -320,6 +343,15 @@ class ReckerHttpHandler {
|
|
|
320
343
|
respectRetryAfter: true,
|
|
321
344
|
...options,
|
|
322
345
|
};
|
|
346
|
+
this.http2MetricsEnabled = this.options.enableHttp2Metrics;
|
|
347
|
+
// Build HTTP/2 configuration using presets
|
|
348
|
+
const http2Config = this.options.http2
|
|
349
|
+
? this.options.http2Preset
|
|
350
|
+
? recker.expandHTTP2Options(this.options.http2Preset)
|
|
351
|
+
: { enabled: true, maxConcurrentStreams: this.options.http2MaxConcurrentStreams }
|
|
352
|
+
: false;
|
|
353
|
+
// Build hooks for HTTP/2 observability
|
|
354
|
+
const hooks = this.http2MetricsEnabled ? recker.createHttp2MetricsHooks() : undefined;
|
|
323
355
|
this.client = recker.createClient({
|
|
324
356
|
timeout: {
|
|
325
357
|
lookup: 5000,
|
|
@@ -328,10 +360,8 @@ class ReckerHttpHandler {
|
|
|
328
360
|
response: this.options.headersTimeout,
|
|
329
361
|
request: this.options.bodyTimeout,
|
|
330
362
|
},
|
|
331
|
-
http2:
|
|
332
|
-
|
|
333
|
-
maxConcurrentStreams: this.options.http2MaxConcurrentStreams,
|
|
334
|
-
} : false,
|
|
363
|
+
http2: http2Config,
|
|
364
|
+
expectContinue: this.options.expectContinue,
|
|
335
365
|
concurrency: {
|
|
336
366
|
max: this.options.connections * 10,
|
|
337
367
|
agent: {
|
|
@@ -342,7 +372,8 @@ class ReckerHttpHandler {
|
|
|
342
372
|
keepAliveMaxTimeout: this.options.keepAliveMaxTimeout,
|
|
343
373
|
},
|
|
344
374
|
},
|
|
345
|
-
|
|
375
|
+
hooks,
|
|
376
|
+
observability: this.http2MetricsEnabled,
|
|
346
377
|
});
|
|
347
378
|
this.deduplicator = this.options.enableDedup ? new RequestDeduplicator() : null;
|
|
348
379
|
this.circuitBreaker = this.options.enableCircuitBreaker ? new CircuitBreaker({
|
|
@@ -432,7 +463,11 @@ class ReckerHttpHandler {
|
|
|
432
463
|
}
|
|
433
464
|
if (this.options.enableRetry && attempt < maxAttempts && isRetryableError(error)) {
|
|
434
465
|
this.metrics.retries++;
|
|
435
|
-
|
|
466
|
+
// Check for HTTP/2 specific retry delay (e.g., ENHANCE_YOUR_CALM)
|
|
467
|
+
const h2Delay = getHttp2RetryDelay(error);
|
|
468
|
+
const delay = h2Delay !== undefined
|
|
469
|
+
? Math.min(h2Delay, this.options.maxRetryDelay)
|
|
470
|
+
: calculateRetryDelay(attempt, this.options.retryDelay, this.options.maxRetryDelay, this.options.retryJitter);
|
|
436
471
|
await new Promise(resolve => setTimeout(resolve, delay));
|
|
437
472
|
continue;
|
|
438
473
|
}
|
|
@@ -458,13 +493,25 @@ class ReckerHttpHandler {
|
|
|
458
493
|
return { ...this.options };
|
|
459
494
|
}
|
|
460
495
|
getMetrics() {
|
|
461
|
-
|
|
496
|
+
const metrics = {
|
|
462
497
|
...this.metrics,
|
|
463
498
|
circuitStates: this.circuitBreaker
|
|
464
499
|
? Object.fromEntries(this.circuitBreaker.circuits)
|
|
465
500
|
: {},
|
|
466
501
|
pendingDeduped: this.deduplicator?.size || 0,
|
|
467
502
|
};
|
|
503
|
+
// Include HTTP/2 metrics if enabled
|
|
504
|
+
if (this.http2MetricsEnabled) {
|
|
505
|
+
const h2Summary = recker.getGlobalHttp2Metrics().getSummary();
|
|
506
|
+
metrics.http2 = {
|
|
507
|
+
sessions: h2Summary.totals.sessions,
|
|
508
|
+
activeSessions: h2Summary.totals.activeSessions,
|
|
509
|
+
streams: h2Summary.totals.streams,
|
|
510
|
+
activeStreams: h2Summary.totals.activeStreams,
|
|
511
|
+
errors: h2Summary.totals.errors,
|
|
512
|
+
};
|
|
513
|
+
}
|
|
514
|
+
return metrics;
|
|
468
515
|
}
|
|
469
516
|
resetMetrics() {
|
|
470
517
|
this.metrics = {
|
|
@@ -3559,6 +3606,37 @@ class TasksPool extends EventEmitter.EventEmitter {
|
|
|
3559
3606
|
});
|
|
3560
3607
|
return { results: orderedResults, errors, batchId };
|
|
3561
3608
|
}
|
|
3609
|
+
/**
|
|
3610
|
+
* Process an array of items with controlled concurrency.
|
|
3611
|
+
* This is a convenience method that mimics PromisePool.for().process() API.
|
|
3612
|
+
*
|
|
3613
|
+
* @example
|
|
3614
|
+
* const { results, errors } = await TasksPool.map(
|
|
3615
|
+
* users,
|
|
3616
|
+
* async (user) => fetchUserData(user.id),
|
|
3617
|
+
* { concurrency: 10 }
|
|
3618
|
+
* );
|
|
3619
|
+
*/
|
|
3620
|
+
static async map(items, processor, options = {}) {
|
|
3621
|
+
const { concurrency = 10, onItemComplete, onItemError } = options;
|
|
3622
|
+
const pool = new TasksPool({
|
|
3623
|
+
concurrency,
|
|
3624
|
+
features: { profile: 'bare', emitEvents: false }
|
|
3625
|
+
});
|
|
3626
|
+
const fns = items.map((item, index) => async () => processor(item, index));
|
|
3627
|
+
const batchOptions = {
|
|
3628
|
+
onItemComplete: onItemComplete,
|
|
3629
|
+
onItemError: onItemError
|
|
3630
|
+
? (error, index) => onItemError(error, items[index], index)
|
|
3631
|
+
: undefined
|
|
3632
|
+
};
|
|
3633
|
+
const { results, errors } = await pool.addBatch(fns, batchOptions);
|
|
3634
|
+
await pool.destroy();
|
|
3635
|
+
return {
|
|
3636
|
+
results: results.filter((r) => r !== null),
|
|
3637
|
+
errors: errors.map(e => ({ error: e.error, item: items[e.index], index: e.index }))
|
|
3638
|
+
};
|
|
3639
|
+
}
|
|
3562
3640
|
processNext() {
|
|
3563
3641
|
if (this.lightMode) {
|
|
3564
3642
|
this._processLightQueue();
|
|
@@ -9437,15 +9515,13 @@ class ResourceReader extends EventEmitter {
|
|
|
9437
9515
|
}
|
|
9438
9516
|
async _transform(chunk, _encoding, callback) {
|
|
9439
9517
|
const [, err] = await tryFn(async () => {
|
|
9440
|
-
await
|
|
9441
|
-
.withConcurrency(this.concurrency)
|
|
9442
|
-
.handleError(async (error, content) => {
|
|
9443
|
-
this.emit("error", error, content);
|
|
9444
|
-
})
|
|
9445
|
-
.process(async (id) => {
|
|
9518
|
+
await TasksPool.map(chunk, async (id) => {
|
|
9446
9519
|
const data = await this.resource.get(id);
|
|
9447
9520
|
this.transform.push(data);
|
|
9448
9521
|
return data;
|
|
9522
|
+
}, {
|
|
9523
|
+
concurrency: this.concurrency,
|
|
9524
|
+
onItemError: (error, id) => this.emit("error", error, id)
|
|
9449
9525
|
});
|
|
9450
9526
|
});
|
|
9451
9527
|
callback(err);
|
|
@@ -9509,12 +9585,7 @@ class ResourceWriter extends EventEmitter {
|
|
|
9509
9585
|
while (this.buffer.length > 0) {
|
|
9510
9586
|
const batch = this.buffer.splice(0, this.batchSize);
|
|
9511
9587
|
const [ok, err] = await tryFn(async () => {
|
|
9512
|
-
await
|
|
9513
|
-
.withConcurrency(this.concurrency)
|
|
9514
|
-
.handleError(async (error, content) => {
|
|
9515
|
-
this.emit("error", error, content);
|
|
9516
|
-
})
|
|
9517
|
-
.process(async (item) => {
|
|
9588
|
+
await TasksPool.map(batch, async (item) => {
|
|
9518
9589
|
const [insertOk, insertErr, result] = await tryFn(async () => {
|
|
9519
9590
|
const res = await this.resource.insert(item);
|
|
9520
9591
|
return res;
|
|
@@ -9524,6 +9595,9 @@ class ResourceWriter extends EventEmitter {
|
|
|
9524
9595
|
return null;
|
|
9525
9596
|
}
|
|
9526
9597
|
return result;
|
|
9598
|
+
}, {
|
|
9599
|
+
concurrency: this.concurrency,
|
|
9600
|
+
onItemError: (error, item) => this.emit("error", error, item)
|
|
9527
9601
|
});
|
|
9528
9602
|
});
|
|
9529
9603
|
if (!ok) {
|
|
@@ -10055,10 +10129,7 @@ class DatabasePlugins {
|
|
|
10055
10129
|
}
|
|
10056
10130
|
}
|
|
10057
10131
|
const concurrency = Math.max(1, Number.isFinite(db.executorPool?.concurrency) ? db.executorPool.concurrency : 5);
|
|
10058
|
-
const installResult = await
|
|
10059
|
-
.withConcurrency(concurrency)
|
|
10060
|
-
.for(plugins)
|
|
10061
|
-
.process(async (plugin) => {
|
|
10132
|
+
const installResult = await TasksPool.map(plugins, async (plugin) => {
|
|
10062
10133
|
const pluginName = this._getPluginName(plugin);
|
|
10063
10134
|
if (typeof plugin.setInstanceName === 'function') {
|
|
10064
10135
|
plugin.setInstanceName(pluginName);
|
|
@@ -10074,11 +10145,11 @@ class DatabasePlugins {
|
|
|
10074
10145
|
});
|
|
10075
10146
|
db.pluginRegistry[pluginName] = plugin;
|
|
10076
10147
|
return pluginName;
|
|
10077
|
-
});
|
|
10148
|
+
}, { concurrency });
|
|
10078
10149
|
if (installResult.errors.length > 0) {
|
|
10079
10150
|
const errorInfo = installResult.errors[0];
|
|
10080
10151
|
const failedPlugin = errorInfo.item;
|
|
10081
|
-
const error = errorInfo.
|
|
10152
|
+
const error = errorInfo.error;
|
|
10082
10153
|
const failedName = this._getPluginName(failedPlugin);
|
|
10083
10154
|
throw new DatabaseError(`Failed to install plugin '${failedName}': ${error?.message || error}`, {
|
|
10084
10155
|
operation: 'startPlugins.install',
|
|
@@ -10086,10 +10157,7 @@ class DatabasePlugins {
|
|
|
10086
10157
|
original: error
|
|
10087
10158
|
});
|
|
10088
10159
|
}
|
|
10089
|
-
const startResult = await
|
|
10090
|
-
.withConcurrency(concurrency)
|
|
10091
|
-
.for(plugins)
|
|
10092
|
-
.process(async (plugin) => {
|
|
10160
|
+
const startResult = await TasksPool.map(plugins, async (plugin) => {
|
|
10093
10161
|
const pluginName = this._getPluginName(plugin);
|
|
10094
10162
|
await plugin.start();
|
|
10095
10163
|
db.emit('db:plugin:metrics', {
|
|
@@ -10098,11 +10166,11 @@ class DatabasePlugins {
|
|
|
10098
10166
|
...this.coordinators.collectMemorySnapshot()
|
|
10099
10167
|
});
|
|
10100
10168
|
return plugin;
|
|
10101
|
-
});
|
|
10169
|
+
}, { concurrency });
|
|
10102
10170
|
if (startResult.errors.length > 0) {
|
|
10103
10171
|
const errorInfo = startResult.errors[0];
|
|
10104
10172
|
const failedPlugin = errorInfo.item;
|
|
10105
|
-
const error = errorInfo.
|
|
10173
|
+
const error = errorInfo.error;
|
|
10106
10174
|
const failedName = this._getPluginName(failedPlugin);
|
|
10107
10175
|
throw new DatabaseError(`Failed to start plugin '${failedName}': ${error?.message || error}`, {
|
|
10108
10176
|
operation: 'startPlugins.start',
|
|
@@ -14185,6 +14253,40 @@ class ResourceMiddleware {
|
|
|
14185
14253
|
}
|
|
14186
14254
|
}
|
|
14187
14255
|
|
|
14256
|
+
/**
|
|
14257
|
+
* S3 Key Utilities
|
|
14258
|
+
*
|
|
14259
|
+
* S3 keys always use POSIX-style forward slashes regardless of the operating system.
|
|
14260
|
+
* These utilities ensure consistent key construction across all platforms.
|
|
14261
|
+
*/
|
|
14262
|
+
const UNSAFE_KEY_CHARS = /[\\\/=%]/;
|
|
14263
|
+
/**
|
|
14264
|
+
* Validates that a value is safe for use in S3 keys.
|
|
14265
|
+
* IDs and partition values must be URL-friendly (no /, \, =, or %).
|
|
14266
|
+
* Returns true if valid, false if contains unsafe characters.
|
|
14267
|
+
*/
|
|
14268
|
+
function isValidS3KeySegment(value) {
|
|
14269
|
+
return !UNSAFE_KEY_CHARS.test(value);
|
|
14270
|
+
}
|
|
14271
|
+
/**
|
|
14272
|
+
* Validates a value for S3 key usage, throwing ValidationError if invalid.
|
|
14273
|
+
* Use this for IDs and partition values.
|
|
14274
|
+
* Accepts any value type - coerces to string for validation.
|
|
14275
|
+
*/
|
|
14276
|
+
function validateS3KeySegment(value, context) {
|
|
14277
|
+
const strValue = String(value);
|
|
14278
|
+
if (UNSAFE_KEY_CHARS.test(strValue)) {
|
|
14279
|
+
const invalidChars = strValue.match(UNSAFE_KEY_CHARS);
|
|
14280
|
+
throw new ValidationError(`Invalid ${context}: contains unsafe character '${invalidChars?.[0]}'`, {
|
|
14281
|
+
field: context,
|
|
14282
|
+
value: strValue,
|
|
14283
|
+
constraint: 'url-safe',
|
|
14284
|
+
statusCode: 400,
|
|
14285
|
+
suggestion: 'IDs and partition values must be URL-friendly (no /, \\, =, or %). Use alphanumeric characters, hyphens, or underscores.'
|
|
14286
|
+
});
|
|
14287
|
+
}
|
|
14288
|
+
}
|
|
14289
|
+
|
|
14188
14290
|
class ResourcePartitions {
|
|
14189
14291
|
resource;
|
|
14190
14292
|
_strictValidation;
|
|
@@ -14397,7 +14499,11 @@ class ResourcePartitions {
|
|
|
14397
14499
|
return `resource=${this.resource.name}/partition=${partition}`;
|
|
14398
14500
|
}
|
|
14399
14501
|
extractValuesFromKey(id, keys, sortedFields) {
|
|
14400
|
-
const
|
|
14502
|
+
const idSegment = `id=${id}`;
|
|
14503
|
+
const keyForId = keys.find(key => {
|
|
14504
|
+
const segments = key.split('/');
|
|
14505
|
+
return segments.some(segment => segment === idSegment);
|
|
14506
|
+
});
|
|
14401
14507
|
if (!keyForId) {
|
|
14402
14508
|
throw new PartitionError(`Partition key not found for ID ${id}`, {
|
|
14403
14509
|
resourceName: this.resource.name,
|
|
@@ -14554,6 +14660,12 @@ class ResourcePartitions {
|
|
|
14554
14660
|
}
|
|
14555
14661
|
}
|
|
14556
14662
|
async getFromPartition({ id, partitionName, partitionValues = {} }) {
|
|
14663
|
+
validateS3KeySegment(id, 'id');
|
|
14664
|
+
for (const [fieldName, value] of Object.entries(partitionValues)) {
|
|
14665
|
+
if (value !== undefined && value !== null) {
|
|
14666
|
+
validateS3KeySegment(value, `partitionValues.${fieldName}`);
|
|
14667
|
+
}
|
|
14668
|
+
}
|
|
14557
14669
|
const partitions = this.getPartitions();
|
|
14558
14670
|
if (!partitions || !partitions[partitionName]) {
|
|
14559
14671
|
throw new PartitionError(`Partition '${partitionName}' not found`, {
|
|
@@ -14841,58 +14953,32 @@ class ResourceQuery {
|
|
|
14841
14953
|
return results;
|
|
14842
14954
|
}
|
|
14843
14955
|
async page({ offset = 0, size = 100, partition = null, partitionValues = {}, skipCount = false } = {}) {
|
|
14844
|
-
const
|
|
14845
|
-
|
|
14846
|
-
|
|
14847
|
-
|
|
14848
|
-
|
|
14849
|
-
|
|
14850
|
-
|
|
14851
|
-
|
|
14852
|
-
|
|
14853
|
-
|
|
14854
|
-
|
|
14855
|
-
|
|
14856
|
-
|
|
14857
|
-
|
|
14858
|
-
|
|
14859
|
-
|
|
14860
|
-
const pageResult = {
|
|
14861
|
-
items,
|
|
14862
|
-
totalItems,
|
|
14863
|
-
page,
|
|
14864
|
-
pageSize: size,
|
|
14865
|
-
totalPages,
|
|
14866
|
-
hasMore: items.length === size && (offset + size) < (totalItems || Infinity),
|
|
14867
|
-
_debug: {
|
|
14868
|
-
requestedSize: size,
|
|
14869
|
-
requestedOffset: offset,
|
|
14870
|
-
actualItemsReturned: items.length,
|
|
14871
|
-
skipCount,
|
|
14872
|
-
hasTotalItems: totalItems !== null
|
|
14873
|
-
}
|
|
14874
|
-
};
|
|
14875
|
-
this.resource._emitStandardized('paginated', pageResult);
|
|
14876
|
-
return pageResult;
|
|
14877
|
-
});
|
|
14878
|
-
if (ok && result)
|
|
14879
|
-
return result;
|
|
14880
|
-
return {
|
|
14881
|
-
items: [],
|
|
14882
|
-
totalItems: null,
|
|
14883
|
-
page: Math.floor(offset / size),
|
|
14884
|
-
pageSize: size,
|
|
14885
|
-
totalPages: null,
|
|
14886
|
-
hasMore: false,
|
|
14956
|
+
const effectiveSize = size > 0 ? size : 100;
|
|
14957
|
+
let totalItems = null;
|
|
14958
|
+
let totalPages = null;
|
|
14959
|
+
if (!skipCount) {
|
|
14960
|
+
totalItems = await this.count({ partition, partitionValues });
|
|
14961
|
+
totalPages = Math.ceil(totalItems / effectiveSize);
|
|
14962
|
+
}
|
|
14963
|
+
const page = Math.floor(offset / effectiveSize);
|
|
14964
|
+
const items = await this.list({ partition, partitionValues, limit: effectiveSize, offset });
|
|
14965
|
+
const pageResult = {
|
|
14966
|
+
items,
|
|
14967
|
+
totalItems,
|
|
14968
|
+
page,
|
|
14969
|
+
pageSize: effectiveSize,
|
|
14970
|
+
totalPages,
|
|
14971
|
+
hasMore: items.length === effectiveSize && (offset + effectiveSize) < (totalItems || Infinity),
|
|
14887
14972
|
_debug: {
|
|
14888
14973
|
requestedSize: size,
|
|
14889
14974
|
requestedOffset: offset,
|
|
14890
|
-
actualItemsReturned:
|
|
14891
|
-
skipCount
|
|
14892
|
-
hasTotalItems:
|
|
14893
|
-
error: err.message
|
|
14975
|
+
actualItemsReturned: items.length,
|
|
14976
|
+
skipCount,
|
|
14977
|
+
hasTotalItems: totalItems !== null
|
|
14894
14978
|
}
|
|
14895
14979
|
};
|
|
14980
|
+
this.resource._emitStandardized('paginated', pageResult);
|
|
14981
|
+
return pageResult;
|
|
14896
14982
|
}
|
|
14897
14983
|
async query(filter = {}, { limit = 100, offset = 0, partition = null, partitionValues = {} } = {}) {
|
|
14898
14984
|
await this.resource.executeHooks('beforeQuery', { filter, limit, offset, partition, partitionValues });
|
|
@@ -15613,6 +15699,71 @@ function getBehavior(behaviorName) {
|
|
|
15613
15699
|
const AVAILABLE_BEHAVIORS = Object.keys(behaviors);
|
|
15614
15700
|
const DEFAULT_BEHAVIOR = 'user-managed';
|
|
15615
15701
|
|
|
15702
|
+
/**
|
|
15703
|
+
* S3 Error Classification Utilities
|
|
15704
|
+
*
|
|
15705
|
+
* Provides consistent error classification across all S3 operations.
|
|
15706
|
+
* Handles differences between AWS SDK v3, MinIO, and other S3-compatible clients.
|
|
15707
|
+
*/
|
|
15708
|
+
/**
|
|
15709
|
+
* Checks if an error indicates the object/resource was not found.
|
|
15710
|
+
* Handles various S3 client error formats (AWS SDK v3, MinIO, etc.)
|
|
15711
|
+
*/
|
|
15712
|
+
function isNotFoundError$1(error) {
|
|
15713
|
+
if (!error)
|
|
15714
|
+
return false;
|
|
15715
|
+
const err = error;
|
|
15716
|
+
return (err.name === 'NoSuchKey' ||
|
|
15717
|
+
err.name === 'NotFound' ||
|
|
15718
|
+
err.code === 'NoSuchKey' ||
|
|
15719
|
+
err.code === 'NotFound' ||
|
|
15720
|
+
err.Code === 'NoSuchKey' ||
|
|
15721
|
+
err.Code === 'NotFound' ||
|
|
15722
|
+
err.statusCode === 404 ||
|
|
15723
|
+
err.$metadata?.httpStatusCode === 404 ||
|
|
15724
|
+
(typeof err.message === 'string' && err.message.includes('NoSuchKey')));
|
|
15725
|
+
}
|
|
15726
|
+
|
|
15727
|
+
/**
|
|
15728
|
+
* Safe Merge Utilities
|
|
15729
|
+
*
|
|
15730
|
+
* Provides functions to sanitize object keys before merging,
|
|
15731
|
+
* preventing prototype pollution attacks via __proto__, constructor, or prototype keys.
|
|
15732
|
+
*/
|
|
15733
|
+
const DANGEROUS_KEYS = ['__proto__', 'constructor', 'prototype'];
|
|
15734
|
+
/**
|
|
15735
|
+
* Check if a key is dangerous for object property assignment.
|
|
15736
|
+
* Handles both simple keys and dot-notation paths.
|
|
15737
|
+
*/
|
|
15738
|
+
function isDangerousKey(key) {
|
|
15739
|
+
if (DANGEROUS_KEYS.includes(key)) {
|
|
15740
|
+
return true;
|
|
15741
|
+
}
|
|
15742
|
+
if (key.includes('.')) {
|
|
15743
|
+
return key.split('.').some(part => DANGEROUS_KEYS.includes(part));
|
|
15744
|
+
}
|
|
15745
|
+
return false;
|
|
15746
|
+
}
|
|
15747
|
+
/**
|
|
15748
|
+
* Recursively sanitize an object, removing dangerous keys at all levels.
|
|
15749
|
+
* Use this for deep merge operations.
|
|
15750
|
+
*/
|
|
15751
|
+
function sanitizeDeep(obj) {
|
|
15752
|
+
if (obj === null || typeof obj !== 'object') {
|
|
15753
|
+
return obj;
|
|
15754
|
+
}
|
|
15755
|
+
if (Array.isArray(obj)) {
|
|
15756
|
+
return obj.map(item => sanitizeDeep(item));
|
|
15757
|
+
}
|
|
15758
|
+
const result = {};
|
|
15759
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
15760
|
+
if (!isDangerousKey(key)) {
|
|
15761
|
+
result[key] = sanitizeDeep(value);
|
|
15762
|
+
}
|
|
15763
|
+
}
|
|
15764
|
+
return result;
|
|
15765
|
+
}
|
|
15766
|
+
|
|
15616
15767
|
class ResourcePersistence {
|
|
15617
15768
|
resource;
|
|
15618
15769
|
constructor(resource) {
|
|
@@ -15638,10 +15789,10 @@ class ResourcePersistence {
|
|
|
15638
15789
|
attributes.updatedAt = new Date().toISOString();
|
|
15639
15790
|
}
|
|
15640
15791
|
const attributesWithDefaults = this.validator.applyDefaults(attributes);
|
|
15641
|
-
const completeData = id$1 !== undefined
|
|
15792
|
+
const completeData = sanitizeDeep(id$1 !== undefined
|
|
15642
15793
|
? { id: id$1, ...attributesWithDefaults }
|
|
15643
|
-
: { ...attributesWithDefaults };
|
|
15644
|
-
const preProcessedData = await this.resource.executeHooks('beforeInsert', completeData);
|
|
15794
|
+
: { ...attributesWithDefaults });
|
|
15795
|
+
const preProcessedData = sanitizeDeep(await this.resource.executeHooks('beforeInsert', completeData));
|
|
15645
15796
|
const extraProps = Object.keys(preProcessedData).filter(k => !(k in completeData) || preProcessedData[k] !== completeData[k]);
|
|
15646
15797
|
const extraData = {};
|
|
15647
15798
|
for (const k of extraProps)
|
|
@@ -15865,7 +16016,7 @@ class ResourcePersistence {
|
|
|
15865
16016
|
}
|
|
15866
16017
|
async getOrNull(id) {
|
|
15867
16018
|
const [ok, err, data] = await tryFn(() => this.get(id));
|
|
15868
|
-
if (!ok && err && (err
|
|
16019
|
+
if (!ok && err && isNotFoundError$1(err)) {
|
|
15869
16020
|
return null;
|
|
15870
16021
|
}
|
|
15871
16022
|
if (!ok || !data)
|
|
@@ -15874,7 +16025,7 @@ class ResourcePersistence {
|
|
|
15874
16025
|
}
|
|
15875
16026
|
async getOrThrow(id) {
|
|
15876
16027
|
const [ok, err, data] = await tryFn(() => this.get(id));
|
|
15877
|
-
if (!ok && err && (err
|
|
16028
|
+
if (!ok && err && isNotFoundError$1(err)) {
|
|
15878
16029
|
throw new ResourceError(`Resource '${this.name}' with id '${id}' not found`, {
|
|
15879
16030
|
resourceName: this.name,
|
|
15880
16031
|
operation: 'getOrThrow',
|
|
@@ -15889,7 +16040,12 @@ class ResourcePersistence {
|
|
|
15889
16040
|
async exists(id) {
|
|
15890
16041
|
await this.resource.executeHooks('beforeExists', { id });
|
|
15891
16042
|
const key = this.resource.getResourceKey(id);
|
|
15892
|
-
const [ok] = await tryFn(() => this.client.headObject(key));
|
|
16043
|
+
const [ok, err] = await tryFn(() => this.client.headObject(key));
|
|
16044
|
+
if (!ok && err) {
|
|
16045
|
+
if (!isNotFoundError$1(err)) {
|
|
16046
|
+
throw err;
|
|
16047
|
+
}
|
|
16048
|
+
}
|
|
15893
16049
|
await this.resource.executeHooks('afterExists', { id, exists: ok });
|
|
15894
16050
|
return ok;
|
|
15895
16051
|
}
|
|
@@ -16070,7 +16226,8 @@ class ResourcePersistence {
|
|
|
16070
16226
|
mergedData.metadata = { ...mergedData.metadata };
|
|
16071
16227
|
mergedData.metadata.updatedAt = now;
|
|
16072
16228
|
}
|
|
16073
|
-
|
|
16229
|
+
mergedData = sanitizeDeep(mergedData);
|
|
16230
|
+
const preProcessedData = sanitizeDeep(await this.resource.executeHooks('beforeUpdate', mergedData));
|
|
16074
16231
|
const completeData = { ...originalData, ...preProcessedData, id };
|
|
16075
16232
|
const { isValid, errors, data } = await this.resource.validate(completeData, { includeId: true });
|
|
16076
16233
|
if (!isValid) {
|
|
@@ -16128,9 +16285,6 @@ class ResourcePersistence {
|
|
|
16128
16285
|
if (okParse)
|
|
16129
16286
|
finalContentType = 'application/json';
|
|
16130
16287
|
}
|
|
16131
|
-
if (this.versioningEnabled && originalData._v !== this.version) {
|
|
16132
|
-
await this.resource.createHistoricalVersion(id, originalData);
|
|
16133
|
-
}
|
|
16134
16288
|
const [ok, err] = await tryFn(() => this.client.putObject({
|
|
16135
16289
|
key,
|
|
16136
16290
|
body: finalBody,
|
|
@@ -16175,6 +16329,17 @@ class ResourcePersistence {
|
|
|
16175
16329
|
id
|
|
16176
16330
|
});
|
|
16177
16331
|
}
|
|
16332
|
+
if (this.versioningEnabled && originalData._v !== this.version) {
|
|
16333
|
+
const [okHistory, errHistory] = await tryFn(() => this.resource.createHistoricalVersion(id, originalData));
|
|
16334
|
+
if (!okHistory) {
|
|
16335
|
+
this.resource.emit('historyError', {
|
|
16336
|
+
operation: 'update',
|
|
16337
|
+
id,
|
|
16338
|
+
error: errHistory,
|
|
16339
|
+
message: errHistory.message
|
|
16340
|
+
});
|
|
16341
|
+
}
|
|
16342
|
+
}
|
|
16178
16343
|
const updatedData = await this.resource.composeFullObjectFromWrite({
|
|
16179
16344
|
id,
|
|
16180
16345
|
metadata: finalMetadata,
|
|
@@ -16297,6 +16462,7 @@ class ResourcePersistence {
|
|
|
16297
16462
|
if (this.config.timestamps) {
|
|
16298
16463
|
mergedData.updatedAt = new Date().toISOString();
|
|
16299
16464
|
}
|
|
16465
|
+
mergedData = sanitizeDeep(mergedData);
|
|
16300
16466
|
const { isValid, errors } = await this.validator.validate(mergedData);
|
|
16301
16467
|
if (!isValid) {
|
|
16302
16468
|
throw new ValidationError('Validation failed during patch', {
|
|
@@ -16360,7 +16526,7 @@ class ResourcePersistence {
|
|
|
16360
16526
|
}
|
|
16361
16527
|
attributesWithDefaults.updatedAt = new Date().toISOString();
|
|
16362
16528
|
}
|
|
16363
|
-
const completeData = { id, ...attributesWithDefaults };
|
|
16529
|
+
const completeData = sanitizeDeep({ id, ...attributesWithDefaults });
|
|
16364
16530
|
const { errors, isValid, data: validated, } = await this.resource.validate(completeData, { includeId: true });
|
|
16365
16531
|
if (!isValid) {
|
|
16366
16532
|
const errorMsg = (errors && errors.length && errors[0]?.message) ? errors[0].message : 'Replace failed';
|
|
@@ -16510,7 +16676,8 @@ class ResourcePersistence {
|
|
|
16510
16676
|
mergedData.metadata = { ...mergedData.metadata };
|
|
16511
16677
|
mergedData.metadata.updatedAt = now;
|
|
16512
16678
|
}
|
|
16513
|
-
|
|
16679
|
+
mergedData = sanitizeDeep(mergedData);
|
|
16680
|
+
const preProcessedData = sanitizeDeep(await this.resource.executeHooks('beforeUpdate', mergedData));
|
|
16514
16681
|
const completeData = { ...originalData, ...preProcessedData, id };
|
|
16515
16682
|
const { isValid, errors, data } = await this.resource.validate(completeData, { includeId: true });
|
|
16516
16683
|
if (!isValid) {
|
|
@@ -16572,6 +16739,17 @@ class ResourcePersistence {
|
|
|
16572
16739
|
error: err.message || 'Update failed'
|
|
16573
16740
|
};
|
|
16574
16741
|
}
|
|
16742
|
+
if (this.versioningEnabled && originalData._v !== this.version) {
|
|
16743
|
+
const [okHistory, errHistory] = await tryFn(() => this.resource.createHistoricalVersion(id, originalData));
|
|
16744
|
+
if (!okHistory) {
|
|
16745
|
+
this.resource.emit('historyError', {
|
|
16746
|
+
operation: 'updateConditional',
|
|
16747
|
+
id,
|
|
16748
|
+
error: errHistory,
|
|
16749
|
+
message: errHistory.message
|
|
16750
|
+
});
|
|
16751
|
+
}
|
|
16752
|
+
}
|
|
16575
16753
|
const updatedData = await this.resource.composeFullObjectFromWrite({
|
|
16576
16754
|
id,
|
|
16577
16755
|
metadata: processedMetadata,
|
|
@@ -16687,6 +16865,9 @@ function validateResourceConfig(config) {
|
|
|
16687
16865
|
else if (config.name.trim() === '') {
|
|
16688
16866
|
errors.push("Resource 'name' cannot be empty");
|
|
16689
16867
|
}
|
|
16868
|
+
else if (!isValidS3KeySegment(config.name)) {
|
|
16869
|
+
errors.push(`Resource 'name' must be URL-friendly (no /, \\, =, or %). Got: '${config.name}'`);
|
|
16870
|
+
}
|
|
16690
16871
|
if (!config.client) {
|
|
16691
16872
|
errors.push("S3 'client' is required");
|
|
16692
16873
|
}
|
|
@@ -16746,7 +16927,10 @@ function validateResourceConfig(config) {
|
|
|
16746
16927
|
}
|
|
16747
16928
|
else {
|
|
16748
16929
|
for (const [partitionName, partitionDef] of Object.entries(config.partitions)) {
|
|
16749
|
-
if (
|
|
16930
|
+
if (!isValidS3KeySegment(partitionName)) {
|
|
16931
|
+
errors.push(`Partition name '${partitionName}' must be URL-friendly (no /, \\, =, or %)`);
|
|
16932
|
+
}
|
|
16933
|
+
else if (typeof partitionDef !== 'object' || Array.isArray(partitionDef)) {
|
|
16750
16934
|
errors.push(`Partition '${partitionName}' must be an object`);
|
|
16751
16935
|
}
|
|
16752
16936
|
else if (!partitionDef.fields) {
|
|
@@ -16757,7 +16941,10 @@ function validateResourceConfig(config) {
|
|
|
16757
16941
|
}
|
|
16758
16942
|
else {
|
|
16759
16943
|
for (const [fieldName, fieldType] of Object.entries(partitionDef.fields)) {
|
|
16760
|
-
if (
|
|
16944
|
+
if (!isValidS3KeySegment(fieldName)) {
|
|
16945
|
+
errors.push(`Partition field '${fieldName}' must be URL-friendly (no /, \\, =, or %)`);
|
|
16946
|
+
}
|
|
16947
|
+
else if (typeof fieldType !== 'string') {
|
|
16761
16948
|
errors.push(`Partition '${partitionName}.fields.${fieldName}' must be a string`);
|
|
16762
16949
|
}
|
|
16763
16950
|
}
|
|
@@ -17205,6 +17392,7 @@ class Resource extends AsyncEventEmitter {
|
|
|
17205
17392
|
return this._partitions.applyRule(value, rule);
|
|
17206
17393
|
}
|
|
17207
17394
|
getResourceKey(id) {
|
|
17395
|
+
validateS3KeySegment(id, 'id');
|
|
17208
17396
|
const key = path$2.join('resource=' + this.name, 'data', `id=${id}`);
|
|
17209
17397
|
return key;
|
|
17210
17398
|
}
|
|
@@ -17974,16 +18162,13 @@ class DatabaseConnection {
|
|
|
17974
18162
|
}
|
|
17975
18163
|
}
|
|
17976
18164
|
const stopConcurrency = Math.max(1, Number.isFinite(db.executorPool?.concurrency) ? db.executorPool.concurrency : 5);
|
|
17977
|
-
await
|
|
17978
|
-
.withConcurrency(stopConcurrency)
|
|
17979
|
-
.for(db.pluginList)
|
|
17980
|
-
.process(async (plugin) => {
|
|
18165
|
+
await TasksPool.map(db.pluginList, async (plugin) => {
|
|
17981
18166
|
await tryFn(async () => {
|
|
17982
18167
|
if (plugin && typeof plugin.stop === 'function') {
|
|
17983
18168
|
await plugin.stop();
|
|
17984
18169
|
}
|
|
17985
18170
|
});
|
|
17986
|
-
});
|
|
18171
|
+
}, { concurrency: stopConcurrency });
|
|
17987
18172
|
}
|
|
17988
18173
|
if (db.resources && Object.keys(db.resources).length > 0) {
|
|
17989
18174
|
for (const [, resource] of Object.entries(db.resources)) {
|
|
@@ -18087,8 +18272,8 @@ class Database extends SafeEventEmitter {
|
|
|
18087
18272
|
})();
|
|
18088
18273
|
this.version = '1';
|
|
18089
18274
|
this.s3dbVersion = (() => {
|
|
18090
|
-
const [ok, , version] = tryFnSync(() => (typeof globalThis['18.0.11-next.
|
|
18091
|
-
? globalThis['18.0.11-next.
|
|
18275
|
+
const [ok, , version] = tryFnSync(() => (typeof globalThis['18.0.11-next.e8e71b5b'] !== 'undefined' && globalThis['18.0.11-next.e8e71b5b'] !== '18.0.11-next.e8e71b5b'
|
|
18276
|
+
? globalThis['18.0.11-next.e8e71b5b']
|
|
18092
18277
|
: 'latest'));
|
|
18093
18278
|
return ok ? version : 'latest';
|
|
18094
18279
|
})();
|
|
@@ -19718,6 +19903,120 @@ class PluginStorage {
|
|
|
19718
19903
|
});
|
|
19719
19904
|
return Promise.all(promises);
|
|
19720
19905
|
}
|
|
19906
|
+
/**
|
|
19907
|
+
* Set data only if the key does not exist (conditional PUT).
|
|
19908
|
+
* Uses ifNoneMatch: '*' to ensure atomicity.
|
|
19909
|
+
* @returns The ETag (version) if set succeeded, null if key already exists.
|
|
19910
|
+
*/
|
|
19911
|
+
async setIfNotExists(key, data, options = {}) {
|
|
19912
|
+
const [ok, err, response] = await tryFn(() => this.set(key, data, { ...options, ifNoneMatch: '*' }));
|
|
19913
|
+
if (!ok) {
|
|
19914
|
+
const error = err;
|
|
19915
|
+
// PreconditionFailed (412) or similar means key already exists
|
|
19916
|
+
if (error?.name === 'PreconditionFailed' ||
|
|
19917
|
+
error?.code === 'PreconditionFailed' ||
|
|
19918
|
+
error?.statusCode === 412) {
|
|
19919
|
+
return null;
|
|
19920
|
+
}
|
|
19921
|
+
throw err;
|
|
19922
|
+
}
|
|
19923
|
+
return response?.ETag ?? null;
|
|
19924
|
+
}
|
|
19925
|
+
/**
|
|
19926
|
+
* Get data along with its version (ETag) for conditional updates.
|
|
19927
|
+
* @returns Object with data and version, or { data: null, version: null } if not found.
|
|
19928
|
+
*/
|
|
19929
|
+
async getWithVersion(key) {
|
|
19930
|
+
const [ok, err, response] = await tryFn(() => this.client.getObject(key));
|
|
19931
|
+
if (!ok || !response) {
|
|
19932
|
+
const error = err;
|
|
19933
|
+
if (error?.name === 'NoSuchKey' ||
|
|
19934
|
+
error?.code === 'NoSuchKey' ||
|
|
19935
|
+
error?.Code === 'NoSuchKey' ||
|
|
19936
|
+
error?.statusCode === 404) {
|
|
19937
|
+
return { data: null, version: null };
|
|
19938
|
+
}
|
|
19939
|
+
throw new PluginStorageError(`Failed to retrieve plugin data with version`, {
|
|
19940
|
+
pluginSlug: this.pluginSlug,
|
|
19941
|
+
key,
|
|
19942
|
+
operation: 'getWithVersion',
|
|
19943
|
+
original: err,
|
|
19944
|
+
suggestion: 'Check if the key exists and S3 permissions are correct'
|
|
19945
|
+
});
|
|
19946
|
+
}
|
|
19947
|
+
const metadata = response.Metadata || {};
|
|
19948
|
+
const parsedMetadata = this._parseMetadataValues(metadata);
|
|
19949
|
+
let data = parsedMetadata;
|
|
19950
|
+
if (response.Body) {
|
|
19951
|
+
const [parseOk, parseErr, result] = await tryFn(async () => {
|
|
19952
|
+
const bodyContent = await response.Body.transformToString();
|
|
19953
|
+
if (bodyContent && bodyContent.trim()) {
|
|
19954
|
+
const body = JSON.parse(bodyContent);
|
|
19955
|
+
return { ...parsedMetadata, ...body };
|
|
19956
|
+
}
|
|
19957
|
+
return parsedMetadata;
|
|
19958
|
+
});
|
|
19959
|
+
if (!parseOk || !result) {
|
|
19960
|
+
throw new PluginStorageError(`Failed to parse JSON body`, {
|
|
19961
|
+
pluginSlug: this.pluginSlug,
|
|
19962
|
+
key,
|
|
19963
|
+
operation: 'getWithVersion',
|
|
19964
|
+
original: parseErr,
|
|
19965
|
+
suggestion: 'Body content may be corrupted'
|
|
19966
|
+
});
|
|
19967
|
+
}
|
|
19968
|
+
data = result;
|
|
19969
|
+
}
|
|
19970
|
+
// Check expiration
|
|
19971
|
+
const expiresAt = (data._expiresat || data._expiresAt);
|
|
19972
|
+
if (expiresAt && Date.now() > expiresAt) {
|
|
19973
|
+
await this.delete(key);
|
|
19974
|
+
return { data: null, version: null };
|
|
19975
|
+
}
|
|
19976
|
+
// Clean up internal fields
|
|
19977
|
+
delete data._expiresat;
|
|
19978
|
+
delete data._expiresAt;
|
|
19979
|
+
// Extract ETag from response - need to get it from headObject since getObject may not return it
|
|
19980
|
+
const [headOk, , headResponse] = await tryFn(() => this.client.headObject(key));
|
|
19981
|
+
const version = headOk && headResponse ? headResponse.ETag ?? null : null;
|
|
19982
|
+
return { data, version };
|
|
19983
|
+
}
|
|
19984
|
+
/**
|
|
19985
|
+
* Set data only if the current version matches (conditional PUT).
|
|
19986
|
+
* Uses ifMatch to ensure no concurrent modifications.
|
|
19987
|
+
* @returns The new ETag (version) if set succeeded, null if version mismatch.
|
|
19988
|
+
*/
|
|
19989
|
+
async setIfVersion(key, data, version, options = {}) {
|
|
19990
|
+
const [ok, err, response] = await tryFn(() => this.set(key, data, { ...options, ifMatch: version }));
|
|
19991
|
+
if (!ok) {
|
|
19992
|
+
const error = err;
|
|
19993
|
+
// PreconditionFailed (412) means version mismatch
|
|
19994
|
+
if (error?.name === 'PreconditionFailed' ||
|
|
19995
|
+
error?.code === 'PreconditionFailed' ||
|
|
19996
|
+
error?.statusCode === 412) {
|
|
19997
|
+
return null;
|
|
19998
|
+
}
|
|
19999
|
+
throw err;
|
|
20000
|
+
}
|
|
20001
|
+
return response?.ETag ?? null;
|
|
20002
|
+
}
|
|
20003
|
+
/**
|
|
20004
|
+
* Delete data only if the current version matches (conditional DELETE).
|
|
20005
|
+
* @returns true if deleted, false if version mismatch or key not found.
|
|
20006
|
+
*/
|
|
20007
|
+
async deleteIfVersion(key, version) {
|
|
20008
|
+
// First verify the version matches
|
|
20009
|
+
const [headOk, , headResponse] = await tryFn(() => this.client.headObject(key));
|
|
20010
|
+
if (!headOk || !headResponse) {
|
|
20011
|
+
return false;
|
|
20012
|
+
}
|
|
20013
|
+
const currentVersion = headResponse.ETag;
|
|
20014
|
+
if (currentVersion !== version) {
|
|
20015
|
+
return false;
|
|
20016
|
+
}
|
|
20017
|
+
const [deleteOk] = await tryFn(() => this.client.deleteObject(key));
|
|
20018
|
+
return deleteOk;
|
|
20019
|
+
}
|
|
19721
20020
|
async acquireLock(lockName, options = {}) {
|
|
19722
20021
|
return this._lock.acquire(lockName, options);
|
|
19723
20022
|
}
|
|
@@ -20110,19 +20409,6 @@ class InMemoryPersistence {
|
|
|
20110
20409
|
}
|
|
20111
20410
|
}
|
|
20112
20411
|
|
|
20113
|
-
let PromisePoolCache = null;
|
|
20114
|
-
async function loadPromisePool() {
|
|
20115
|
-
if (PromisePoolCache)
|
|
20116
|
-
return PromisePoolCache;
|
|
20117
|
-
try {
|
|
20118
|
-
const module = await import('@supercharge/promise-pool');
|
|
20119
|
-
PromisePoolCache = module;
|
|
20120
|
-
return PromisePoolCache;
|
|
20121
|
-
}
|
|
20122
|
-
catch {
|
|
20123
|
-
throw new Error('Failed to load @supercharge/promise-pool. Please install it: pnpm add @supercharge/promise-pool');
|
|
20124
|
-
}
|
|
20125
|
-
}
|
|
20126
20412
|
class HighPerformanceInserter {
|
|
20127
20413
|
resource;
|
|
20128
20414
|
batchSize;
|
|
@@ -20188,13 +20474,7 @@ class HighPerformanceInserter {
|
|
|
20188
20474
|
const batch = this.insertBuffer.splice(0, this.batchSize);
|
|
20189
20475
|
const startTime = Date.now();
|
|
20190
20476
|
const [ok] = await tryFn(async () => {
|
|
20191
|
-
const {
|
|
20192
|
-
const { results, errors } = await PromisePool
|
|
20193
|
-
.for(batch)
|
|
20194
|
-
.withConcurrency(this.concurrency)
|
|
20195
|
-
.process(async (item) => {
|
|
20196
|
-
return await this.performInsert(item);
|
|
20197
|
-
});
|
|
20477
|
+
const { results, errors } = await TasksPool.map(batch, async (item) => this.performInsert(item), { concurrency: this.concurrency });
|
|
20198
20478
|
const duration = Date.now() - startTime;
|
|
20199
20479
|
this.stats.inserted += results.filter(r => r.success).length;
|
|
20200
20480
|
this.stats.failed += errors.length;
|
|
@@ -20246,11 +20526,7 @@ class HighPerformanceInserter {
|
|
|
20246
20526
|
this.partitionProcessor = null;
|
|
20247
20527
|
return;
|
|
20248
20528
|
}
|
|
20249
|
-
|
|
20250
|
-
await PromisePool
|
|
20251
|
-
.for(batch)
|
|
20252
|
-
.withConcurrency(10)
|
|
20253
|
-
.process(async (item) => {
|
|
20529
|
+
await TasksPool.map(batch, async (item) => {
|
|
20254
20530
|
const [ok, err] = await tryFn(() => this.resource.createPartitionReferences(item.data));
|
|
20255
20531
|
if (ok) {
|
|
20256
20532
|
this.stats.partitionsPending--;
|
|
@@ -20261,7 +20537,7 @@ class HighPerformanceInserter {
|
|
|
20261
20537
|
error: err
|
|
20262
20538
|
});
|
|
20263
20539
|
}
|
|
20264
|
-
});
|
|
20540
|
+
}, { concurrency: 10 });
|
|
20265
20541
|
if (this.partitionQueue.length > 0) {
|
|
20266
20542
|
this.processPartitionsAsync();
|
|
20267
20543
|
}
|
|
@@ -20324,17 +20600,11 @@ class StreamInserter {
|
|
|
20324
20600
|
return { id, inserted: true };
|
|
20325
20601
|
}
|
|
20326
20602
|
async bulkInsert(items) {
|
|
20327
|
-
const {
|
|
20328
|
-
const { results, errors } = await PromisePool
|
|
20329
|
-
.for(items)
|
|
20330
|
-
.withConcurrency(this.concurrency)
|
|
20331
|
-
.process(async (item) => {
|
|
20332
|
-
return await this.fastInsert(item);
|
|
20333
|
-
});
|
|
20603
|
+
const { results, errors } = await TasksPool.map(items, async (item) => this.fastInsert(item), { concurrency: this.concurrency });
|
|
20334
20604
|
return {
|
|
20335
20605
|
success: results.length,
|
|
20336
20606
|
failed: errors.length,
|
|
20337
|
-
errors: errors.slice(0, 10)
|
|
20607
|
+
errors: errors.map(e => e.error).slice(0, 10)
|
|
20338
20608
|
};
|
|
20339
20609
|
}
|
|
20340
20610
|
}
|
|
@@ -46079,19 +46349,16 @@ async function runConsolidation(handler, storage, config, emitFn) {
|
|
|
46079
46349
|
return result;
|
|
46080
46350
|
}
|
|
46081
46351
|
const byOriginalId = groupByOriginalId(allTransactions);
|
|
46082
|
-
const { results, errors } = await
|
|
46083
|
-
.for(Object.entries(byOriginalId))
|
|
46084
|
-
.withConcurrency(10)
|
|
46085
|
-
.process(async ([originalId, transactions]) => {
|
|
46352
|
+
const { results, errors } = await TasksPool.map(Object.entries(byOriginalId), async ([originalId, transactions]) => {
|
|
46086
46353
|
return consolidateRecord(handler, originalId, transactions, config);
|
|
46087
|
-
});
|
|
46354
|
+
}, { concurrency: 10 });
|
|
46088
46355
|
for (const recordResult of results) {
|
|
46089
46356
|
if (recordResult) {
|
|
46090
46357
|
result.recordsProcessed++;
|
|
46091
46358
|
result.transactionsApplied += recordResult.transactionsApplied;
|
|
46092
46359
|
}
|
|
46093
46360
|
}
|
|
46094
|
-
result.errors = errors;
|
|
46361
|
+
result.errors = errors.map(e => e.error);
|
|
46095
46362
|
result.success = errors.length === 0;
|
|
46096
46363
|
if (config.enableAnalytics && handler.analyticsResource) {
|
|
46097
46364
|
const analyticsConfig = {
|
|
@@ -46444,13 +46711,10 @@ async function runGarbageCollection(transactionResource, storage, config, emitFn
|
|
|
46444
46711
|
if (!oldTransactions || oldTransactions.length === 0) {
|
|
46445
46712
|
return;
|
|
46446
46713
|
}
|
|
46447
|
-
const { results, errors } = await
|
|
46448
|
-
.for(oldTransactions)
|
|
46449
|
-
.withConcurrency(10)
|
|
46450
|
-
.process(async (txn) => {
|
|
46714
|
+
const { results, errors } = await TasksPool.map(oldTransactions, async (txn) => {
|
|
46451
46715
|
const [deleted] = await tryFn(() => transactionResource.delete(txn.id));
|
|
46452
46716
|
return deleted;
|
|
46453
|
-
});
|
|
46717
|
+
}, { concurrency: 10 });
|
|
46454
46718
|
if (emitFn) {
|
|
46455
46719
|
emitFn('plg:eventual-consistency:gc-completed', {
|
|
46456
46720
|
resource: config.resource,
|
|
@@ -80655,9 +80919,7 @@ class SchedulerManager {
|
|
|
80655
80919
|
targetCount: activeTargets.length,
|
|
80656
80920
|
targets: activeTargets.map(t => t.id)
|
|
80657
80921
|
});
|
|
80658
|
-
await
|
|
80659
|
-
.for(activeTargets)
|
|
80660
|
-
.process(async (targetEntry) => {
|
|
80922
|
+
await TasksPool.map(activeTargets, async (targetEntry) => {
|
|
80661
80923
|
try {
|
|
80662
80924
|
const report = await this.plugin.runDiagnostics(targetEntry.target, {
|
|
80663
80925
|
behavior: targetEntry.behavior,
|
|
@@ -80682,7 +80944,7 @@ class SchedulerManager {
|
|
|
80682
80944
|
error
|
|
80683
80945
|
});
|
|
80684
80946
|
}
|
|
80685
|
-
});
|
|
80947
|
+
}, { concurrency: this.plugin.config.concurrency || 1 });
|
|
80686
80948
|
this.plugin.emit('recon:sweep-completed', {
|
|
80687
80949
|
reason,
|
|
80688
80950
|
targetCount: activeTargets.length
|
|
@@ -87464,29 +87726,16 @@ class BaseReplicator extends EventEmitter {
|
|
|
87464
87726
|
});
|
|
87465
87727
|
}
|
|
87466
87728
|
const limit = Math.max(1, concurrency ?? this.batchConcurrency ?? 5);
|
|
87467
|
-
const
|
|
87468
|
-
|
|
87469
|
-
|
|
87470
|
-
|
|
87471
|
-
|
|
87472
|
-
.process(async (record) => {
|
|
87473
|
-
try {
|
|
87474
|
-
const result = await handler(record);
|
|
87475
|
-
results.push(result);
|
|
87476
|
-
}
|
|
87477
|
-
catch (error) {
|
|
87478
|
-
if (typeof mapError === 'function') {
|
|
87479
|
-
const mapped = mapError(error, record);
|
|
87480
|
-
if (mapped !== undefined) {
|
|
87481
|
-
errors.push(mapped);
|
|
87482
|
-
}
|
|
87483
|
-
}
|
|
87484
|
-
else {
|
|
87485
|
-
errors.push({ record, error: error });
|
|
87486
|
-
}
|
|
87487
|
-
}
|
|
87729
|
+
const poolResult = await TasksPool.map(records, async (record) => handler(record), {
|
|
87730
|
+
concurrency: limit,
|
|
87731
|
+
onItemError: mapError
|
|
87732
|
+
? (error, record) => mapError(error, record)
|
|
87733
|
+
: undefined
|
|
87488
87734
|
});
|
|
87489
|
-
|
|
87735
|
+
const errors = mapError
|
|
87736
|
+
? poolResult.errors.map(e => mapError(e.error, e.item)).filter(m => m !== undefined)
|
|
87737
|
+
: poolResult.errors.map(e => ({ record: e.item, error: e.error }));
|
|
87738
|
+
return { results: poolResult.results, errors };
|
|
87490
87739
|
}
|
|
87491
87740
|
createError(message, details = {}) {
|
|
87492
87741
|
return new ReplicationError(message, {
|
|
@@ -89190,10 +89439,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89190
89439
|
}
|
|
89191
89440
|
const entries = applicableReplicators.map((replicator, index) => ({ replicator, index }));
|
|
89192
89441
|
const outcomes = new Array(entries.length);
|
|
89193
|
-
const poolResult = await
|
|
89194
|
-
.withConcurrency(this.config.replicatorConcurrency)
|
|
89195
|
-
.for(entries)
|
|
89196
|
-
.process(async ({ replicator, index }) => {
|
|
89442
|
+
const poolResult = await TasksPool.map(entries, async ({ replicator, index }) => {
|
|
89197
89443
|
const [ok, error, replicationResult] = await tryFn(async () => {
|
|
89198
89444
|
const result = await this.retryWithBackoff(() => replicator.replicate(resourceName, operation, sanitizedData, recordId, sanitizedBefore), this.config.maxRetries);
|
|
89199
89445
|
this.emit('plg:replicator:replicated', {
|
|
@@ -89225,7 +89471,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89225
89471
|
}
|
|
89226
89472
|
outcomes[index] = { status: 'rejected', reason: error };
|
|
89227
89473
|
throw error;
|
|
89228
|
-
});
|
|
89474
|
+
}, { concurrency: this.config.replicatorConcurrency });
|
|
89229
89475
|
if (poolResult.errors.length > 0) {
|
|
89230
89476
|
for (const poolError of poolResult.errors) {
|
|
89231
89477
|
const { item, error } = poolError;
|
|
@@ -89246,10 +89492,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89246
89492
|
}
|
|
89247
89493
|
const entries = applicableReplicators.map((replicator, index) => ({ replicator, index }));
|
|
89248
89494
|
const outcomes = new Array(entries.length);
|
|
89249
|
-
await
|
|
89250
|
-
.withConcurrency(this.config.replicatorConcurrency)
|
|
89251
|
-
.for(entries)
|
|
89252
|
-
.process(async ({ replicator, index }) => {
|
|
89495
|
+
await TasksPool.map(entries, async ({ replicator, index }) => {
|
|
89253
89496
|
const [wrapperOk, wrapperError] = await tryFn(async () => {
|
|
89254
89497
|
const preparedData = item.data ? this.filterInternalFields(item.data) : null;
|
|
89255
89498
|
const preparedBefore = item.beforeData ? this.filterInternalFields(item.beforeData) : null;
|
|
@@ -89299,7 +89542,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89299
89542
|
const failure = { success: false, error: wrapperError.message };
|
|
89300
89543
|
outcomes[index] = { status: 'fulfilled', value: failure };
|
|
89301
89544
|
return failure;
|
|
89302
|
-
});
|
|
89545
|
+
}, { concurrency: this.config.replicatorConcurrency });
|
|
89303
89546
|
return outcomes;
|
|
89304
89547
|
}
|
|
89305
89548
|
async logReplicator(item) {
|
|
@@ -89352,10 +89595,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89352
89595
|
async getReplicatorStats() {
|
|
89353
89596
|
const entries = this.replicators.map((replicator, index) => ({ replicator, index }));
|
|
89354
89597
|
const replicatorStats = new Array(entries.length);
|
|
89355
|
-
const poolResult = await
|
|
89356
|
-
.withConcurrency(this.config.replicatorConcurrency)
|
|
89357
|
-
.for(entries)
|
|
89358
|
-
.process(async ({ replicator, index }) => {
|
|
89598
|
+
const poolResult = await TasksPool.map(entries, async ({ replicator, index }) => {
|
|
89359
89599
|
const status = await replicator.getStatus();
|
|
89360
89600
|
const info = {
|
|
89361
89601
|
id: replicator.id,
|
|
@@ -89365,7 +89605,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89365
89605
|
};
|
|
89366
89606
|
replicatorStats[index] = info;
|
|
89367
89607
|
return info;
|
|
89368
|
-
});
|
|
89608
|
+
}, { concurrency: this.config.replicatorConcurrency });
|
|
89369
89609
|
if (poolResult.errors.length > 0) {
|
|
89370
89610
|
const poolError = poolResult.errors[0];
|
|
89371
89611
|
const { item, error } = poolError;
|
|
@@ -89410,10 +89650,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89410
89650
|
status: 'failed'
|
|
89411
89651
|
});
|
|
89412
89652
|
let retried = 0;
|
|
89413
|
-
const processResult = await
|
|
89414
|
-
.withConcurrency(this.config.replicatorConcurrency)
|
|
89415
|
-
.for(failedLogs || [])
|
|
89416
|
-
.process(async (log) => {
|
|
89653
|
+
const processResult = await TasksPool.map(failedLogs || [], async (log) => {
|
|
89417
89654
|
const sanitizedData = log.data ? this.filterInternalFields(log.data) : null;
|
|
89418
89655
|
const sanitizedBefore = log.beforeData ? this.filterInternalFields(log.beforeData) : null;
|
|
89419
89656
|
const [ok, err, results] = await tryFn(async () => {
|
|
@@ -89464,7 +89701,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89464
89701
|
error: failureMessage,
|
|
89465
89702
|
retryCount: (Number(log.retryCount) || 0) + 1
|
|
89466
89703
|
});
|
|
89467
|
-
});
|
|
89704
|
+
}, { concurrency: this.config.replicatorConcurrency });
|
|
89468
89705
|
if (processResult.errors.length) {
|
|
89469
89706
|
for (const poolError of processResult.errors) {
|
|
89470
89707
|
const { item, error } = poolError;
|
|
@@ -89502,10 +89739,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89502
89739
|
const records = Array.isArray(page) ? page : (page.items || []);
|
|
89503
89740
|
if (records.length === 0)
|
|
89504
89741
|
break;
|
|
89505
|
-
const poolResult = await
|
|
89506
|
-
.withConcurrency(this.config.replicatorConcurrency)
|
|
89507
|
-
.for(records)
|
|
89508
|
-
.process(async (record) => {
|
|
89742
|
+
const poolResult = await TasksPool.map(records, async (record) => {
|
|
89509
89743
|
const sanitizedRecord = this.filterInternalFields(record);
|
|
89510
89744
|
const [replicateOk, replicateError, result] = await tryFn(() => replicator.replicate(resourceName, 'insert', sanitizedRecord, sanitizedRecord.id));
|
|
89511
89745
|
if (!replicateOk) {
|
|
@@ -89533,7 +89767,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89533
89767
|
success: true
|
|
89534
89768
|
});
|
|
89535
89769
|
return result;
|
|
89536
|
-
});
|
|
89770
|
+
}, { concurrency: this.config.replicatorConcurrency });
|
|
89537
89771
|
if (poolResult.errors.length > 0) {
|
|
89538
89772
|
const poolError = poolResult.errors[0];
|
|
89539
89773
|
throw poolError.error;
|
|
@@ -89547,10 +89781,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89547
89781
|
async stop() {
|
|
89548
89782
|
const [ok, error] = await tryFn(async () => {
|
|
89549
89783
|
if (this.replicators && this.replicators.length > 0) {
|
|
89550
|
-
await
|
|
89551
|
-
.withConcurrency(this.config.stopConcurrency)
|
|
89552
|
-
.for(this.replicators)
|
|
89553
|
-
.process(async (replicator) => {
|
|
89784
|
+
await TasksPool.map(this.replicators, async (replicator) => {
|
|
89554
89785
|
const [replicatorOk, replicatorError] = await tryFn(async () => {
|
|
89555
89786
|
if (replicator && typeof replicator.stop === 'function') {
|
|
89556
89787
|
await replicator.stop();
|
|
@@ -89564,7 +89795,7 @@ class ReplicatorPlugin extends Plugin {
|
|
|
89564
89795
|
error: replicatorError.message
|
|
89565
89796
|
});
|
|
89566
89797
|
}
|
|
89567
|
-
});
|
|
89798
|
+
}, { concurrency: this.config.stopConcurrency });
|
|
89568
89799
|
}
|
|
89569
89800
|
this.removeDatabaseHooks();
|
|
89570
89801
|
if (this.database && this.database.resources) {
|
|
@@ -89914,17 +90145,14 @@ class QueueConsumerPlugin extends Plugin {
|
|
|
89914
90145
|
if (startTasks.length === 0) {
|
|
89915
90146
|
return;
|
|
89916
90147
|
}
|
|
89917
|
-
const { errors } = await
|
|
89918
|
-
.withConcurrency(this.startConcurrency)
|
|
89919
|
-
.for(startTasks)
|
|
89920
|
-
.process(async (task) => {
|
|
90148
|
+
const { errors } = await TasksPool.map(startTasks, async (task) => {
|
|
89921
90149
|
await task.start();
|
|
89922
90150
|
return `${task.driver}:${task.resource}`;
|
|
89923
|
-
});
|
|
90151
|
+
}, { concurrency: this.startConcurrency });
|
|
89924
90152
|
if (errors.length > 0) {
|
|
89925
|
-
const messages = errors.map((
|
|
89926
|
-
const task =
|
|
89927
|
-
const reason =
|
|
90153
|
+
const messages = errors.map((errorInfo) => {
|
|
90154
|
+
const task = errorInfo.item;
|
|
90155
|
+
const reason = errorInfo.error;
|
|
89928
90156
|
const identifier = task ? `${task.driver || 'unknown'}:${task.resource || 'unknown'}` : 'unknown';
|
|
89929
90157
|
return `[${identifier}] ${reason?.message || reason}`;
|
|
89930
90158
|
});
|
|
@@ -89947,16 +90175,13 @@ class QueueConsumerPlugin extends Plugin {
|
|
|
89947
90175
|
consumer,
|
|
89948
90176
|
stop: () => consumer.stop()
|
|
89949
90177
|
}));
|
|
89950
|
-
const { errors } = await
|
|
89951
|
-
.withConcurrency(this.stopConcurrency)
|
|
89952
|
-
.for(stopTasks)
|
|
89953
|
-
.process(async (task) => {
|
|
90178
|
+
const { errors } = await TasksPool.map(stopTasks, async (task) => {
|
|
89954
90179
|
await task.stop();
|
|
89955
90180
|
return task.consumer;
|
|
89956
|
-
});
|
|
90181
|
+
}, { concurrency: this.stopConcurrency });
|
|
89957
90182
|
if (errors.length > 0) {
|
|
89958
|
-
errors.forEach((
|
|
89959
|
-
const reason =
|
|
90183
|
+
errors.forEach((errorInfo) => {
|
|
90184
|
+
const reason = errorInfo.error;
|
|
89960
90185
|
this.logger.warn({ error: reason?.message || reason }, `Failed to stop consumer: ${reason?.message || reason}`);
|
|
89961
90186
|
});
|
|
89962
90187
|
}
|