s3db.js 11.2.3 → 11.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/s3db-cli.js +588 -74
- package/dist/s3db.cjs.js +2472 -150
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.es.js +2464 -151
- package/dist/s3db.es.js.map +1 -1
- package/package.json +2 -1
- package/src/behaviors/enforce-limits.js +28 -4
- package/src/behaviors/index.js +6 -1
- package/src/client.class.js +11 -1
- package/src/concerns/base62.js +70 -0
- package/src/concerns/partition-queue.js +7 -1
- package/src/concerns/plugin-storage.js +75 -13
- package/src/database.class.js +19 -4
- package/src/errors.js +306 -27
- package/src/partition-drivers/base-partition-driver.js +12 -2
- package/src/partition-drivers/index.js +7 -1
- package/src/partition-drivers/memory-partition-driver.js +20 -5
- package/src/partition-drivers/sqs-partition-driver.js +6 -1
- package/src/plugins/audit.errors.js +46 -0
- package/src/plugins/backup/base-backup-driver.class.js +36 -6
- package/src/plugins/backup/filesystem-backup-driver.class.js +55 -7
- package/src/plugins/backup/index.js +40 -9
- package/src/plugins/backup/multi-backup-driver.class.js +69 -9
- package/src/plugins/backup/s3-backup-driver.class.js +48 -6
- package/src/plugins/backup.errors.js +45 -0
- package/src/plugins/cache/cache.class.js +8 -1
- package/src/plugins/cache.errors.js +47 -0
- package/src/plugins/cache.plugin.js +8 -1
- package/src/plugins/fulltext.errors.js +46 -0
- package/src/plugins/fulltext.plugin.js +15 -3
- package/src/plugins/index.js +1 -0
- package/src/plugins/metrics.errors.js +46 -0
- package/src/plugins/queue-consumer.plugin.js +31 -4
- package/src/plugins/queue.errors.js +46 -0
- package/src/plugins/replicator.errors.js +46 -0
- package/src/plugins/replicator.plugin.js +40 -5
- package/src/plugins/replicators/base-replicator.class.js +19 -3
- package/src/plugins/replicators/index.js +9 -3
- package/src/plugins/replicators/s3db-replicator.class.js +38 -8
- package/src/plugins/scheduler.errors.js +46 -0
- package/src/plugins/scheduler.plugin.js +79 -19
- package/src/plugins/state-machine.errors.js +47 -0
- package/src/plugins/state-machine.plugin.js +86 -17
- package/src/plugins/vector/distances.js +173 -0
- package/src/plugins/vector/kmeans.js +367 -0
- package/src/plugins/vector/metrics.js +369 -0
- package/src/plugins/vector/vector-error.js +43 -0
- package/src/plugins/vector.plugin.js +687 -0
- package/src/schema.class.js +232 -41
- package/src/stream/index.js +6 -1
- package/src/stream/resource-reader.class.js +6 -1
- package/src/validator.class.js +8 -0
package/dist/s3db.es.js
CHANGED
|
@@ -77,6 +77,41 @@ const decodeDecimal = (s) => {
|
|
|
77
77
|
const num = decPart ? Number(decodedInt + "." + decPart) : decodedInt;
|
|
78
78
|
return negative ? -num : num;
|
|
79
79
|
};
|
|
80
|
+
const encodeFixedPoint = (n, precision = 6) => {
|
|
81
|
+
if (typeof n !== "number" || isNaN(n)) return "undefined";
|
|
82
|
+
if (!isFinite(n)) return "undefined";
|
|
83
|
+
const scale = Math.pow(10, precision);
|
|
84
|
+
const scaled = Math.round(n * scale);
|
|
85
|
+
if (scaled === 0) return "^0";
|
|
86
|
+
const negative = scaled < 0;
|
|
87
|
+
let num = Math.abs(scaled);
|
|
88
|
+
let s = "";
|
|
89
|
+
while (num > 0) {
|
|
90
|
+
s = alphabet[num % base] + s;
|
|
91
|
+
num = Math.floor(num / base);
|
|
92
|
+
}
|
|
93
|
+
return "^" + (negative ? "-" : "") + s;
|
|
94
|
+
};
|
|
95
|
+
const decodeFixedPoint = (s, precision = 6) => {
|
|
96
|
+
if (typeof s !== "string") return NaN;
|
|
97
|
+
if (!s.startsWith("^")) return NaN;
|
|
98
|
+
s = s.slice(1);
|
|
99
|
+
if (s === "0") return 0;
|
|
100
|
+
let negative = false;
|
|
101
|
+
if (s[0] === "-") {
|
|
102
|
+
negative = true;
|
|
103
|
+
s = s.slice(1);
|
|
104
|
+
}
|
|
105
|
+
let r = 0;
|
|
106
|
+
for (let i = 0; i < s.length; i++) {
|
|
107
|
+
const idx = charToValue[s[i]];
|
|
108
|
+
if (idx === void 0) return NaN;
|
|
109
|
+
r = r * base + idx;
|
|
110
|
+
}
|
|
111
|
+
const scale = Math.pow(10, precision);
|
|
112
|
+
const scaled = negative ? -r : r;
|
|
113
|
+
return scaled / scale;
|
|
114
|
+
};
|
|
80
115
|
|
|
81
116
|
const utf8BytesMemory = /* @__PURE__ */ new Map();
|
|
82
117
|
const UTF8_MEMORY_MAX_SIZE = 1e4;
|
|
@@ -218,7 +253,7 @@ function calculateEffectiveLimit(config = {}) {
|
|
|
218
253
|
}
|
|
219
254
|
|
|
220
255
|
class BaseError extends Error {
|
|
221
|
-
constructor({ verbose, bucket, key, message, code, statusCode, requestId, awsMessage, original, commandName, commandInput, metadata,
|
|
256
|
+
constructor({ verbose, bucket, key, message, code, statusCode, requestId, awsMessage, original, commandName, commandInput, metadata, description, ...rest }) {
|
|
222
257
|
if (verbose) message = message + `
|
|
223
258
|
|
|
224
259
|
Verbose:
|
|
@@ -243,7 +278,6 @@ ${JSON.stringify(rest, null, 2)}`;
|
|
|
243
278
|
this.commandName = commandName;
|
|
244
279
|
this.commandInput = commandInput;
|
|
245
280
|
this.metadata = metadata;
|
|
246
|
-
this.suggestion = suggestion;
|
|
247
281
|
this.description = description;
|
|
248
282
|
this.data = { bucket, key, ...rest, verbose, message };
|
|
249
283
|
}
|
|
@@ -261,7 +295,6 @@ ${JSON.stringify(rest, null, 2)}`;
|
|
|
261
295
|
commandName: this.commandName,
|
|
262
296
|
commandInput: this.commandInput,
|
|
263
297
|
metadata: this.metadata,
|
|
264
|
-
suggestion: this.suggestion,
|
|
265
298
|
description: this.description,
|
|
266
299
|
data: this.data,
|
|
267
300
|
original: this.original,
|
|
@@ -402,26 +435,26 @@ function mapAwsError(err, context = {}) {
|
|
|
402
435
|
const metadata = err.$metadata ? { ...err.$metadata } : void 0;
|
|
403
436
|
const commandName = context.commandName;
|
|
404
437
|
const commandInput = context.commandInput;
|
|
405
|
-
let
|
|
438
|
+
let description;
|
|
406
439
|
if (code === "NoSuchKey" || code === "NotFound") {
|
|
407
|
-
|
|
408
|
-
return new NoSuchKey({ ...context, original: err, metadata, commandName, commandInput,
|
|
440
|
+
description = "The specified key does not exist in the bucket. Check if the key exists and if your credentials have permission to access it.";
|
|
441
|
+
return new NoSuchKey({ ...context, original: err, metadata, commandName, commandInput, description });
|
|
409
442
|
}
|
|
410
443
|
if (code === "NoSuchBucket") {
|
|
411
|
-
|
|
412
|
-
return new NoSuchBucket({ ...context, original: err, metadata, commandName, commandInput,
|
|
444
|
+
description = "The specified bucket does not exist. Check if the bucket name is correct and if your credentials have permission to access it.";
|
|
445
|
+
return new NoSuchBucket({ ...context, original: err, metadata, commandName, commandInput, description });
|
|
413
446
|
}
|
|
414
447
|
if (code === "AccessDenied" || err.statusCode === 403 || code === "Forbidden") {
|
|
415
|
-
|
|
416
|
-
return new PermissionError("Access denied", { ...context, original: err, metadata, commandName, commandInput,
|
|
448
|
+
description = "Access denied. Check your AWS credentials, IAM permissions, and bucket policy.";
|
|
449
|
+
return new PermissionError("Access denied", { ...context, original: err, metadata, commandName, commandInput, description });
|
|
417
450
|
}
|
|
418
451
|
if (code === "ValidationError" || err.statusCode === 400) {
|
|
419
|
-
|
|
420
|
-
return new ValidationError("Validation error", { ...context, original: err, metadata, commandName, commandInput,
|
|
452
|
+
description = "Validation error. Check the request parameters and payload format.";
|
|
453
|
+
return new ValidationError("Validation error", { ...context, original: err, metadata, commandName, commandInput, description });
|
|
421
454
|
}
|
|
422
455
|
if (code === "MissingMetadata") {
|
|
423
|
-
|
|
424
|
-
return new MissingMetadata({ ...context, original: err, metadata, commandName, commandInput,
|
|
456
|
+
description = "Object metadata is missing or invalid. Check if the object was uploaded correctly.";
|
|
457
|
+
return new MissingMetadata({ ...context, original: err, metadata, commandName, commandInput, description });
|
|
425
458
|
}
|
|
426
459
|
const errorDetails = [
|
|
427
460
|
`Unknown error: ${err.message || err.toString()}`,
|
|
@@ -429,27 +462,31 @@ function mapAwsError(err, context = {}) {
|
|
|
429
462
|
err.statusCode && `Status: ${err.statusCode}`,
|
|
430
463
|
err.stack && `Stack: ${err.stack.split("\n")[0]}`
|
|
431
464
|
].filter(Boolean).join(" | ");
|
|
432
|
-
|
|
433
|
-
return new UnknownError(errorDetails, { ...context, original: err, metadata, commandName, commandInput,
|
|
465
|
+
description = `Check the error details and AWS documentation. Original error: ${err.message || err.toString()}`;
|
|
466
|
+
return new UnknownError(errorDetails, { ...context, original: err, metadata, commandName, commandInput, description });
|
|
434
467
|
}
|
|
435
468
|
class ConnectionStringError extends S3dbError {
|
|
436
469
|
constructor(message, details = {}) {
|
|
437
|
-
|
|
470
|
+
const description = details.description || "Invalid connection string format. Check the connection string syntax and credentials.";
|
|
471
|
+
super(message, { ...details, description });
|
|
438
472
|
}
|
|
439
473
|
}
|
|
440
474
|
class CryptoError extends S3dbError {
|
|
441
475
|
constructor(message, details = {}) {
|
|
442
|
-
|
|
476
|
+
const description = details.description || "Cryptography operation failed. Check if the crypto library is available and input is valid.";
|
|
477
|
+
super(message, { ...details, description });
|
|
443
478
|
}
|
|
444
479
|
}
|
|
445
480
|
class SchemaError extends S3dbError {
|
|
446
481
|
constructor(message, details = {}) {
|
|
447
|
-
|
|
482
|
+
const description = details.description || "Schema validation failed. Check schema definition and input data format.";
|
|
483
|
+
super(message, { ...details, description });
|
|
448
484
|
}
|
|
449
485
|
}
|
|
450
486
|
class ResourceError extends S3dbError {
|
|
451
487
|
constructor(message, details = {}) {
|
|
452
|
-
|
|
488
|
+
const description = details.description || "Resource operation failed. Check resource configuration, attributes, and operation context.";
|
|
489
|
+
super(message, { ...details, description });
|
|
453
490
|
Object.assign(this, details);
|
|
454
491
|
}
|
|
455
492
|
}
|
|
@@ -478,13 +515,12 @@ ${details.strictValidation === false ? " \u2022 Update partition definition to
|
|
|
478
515
|
\u2022 Update partition definition to use existing fields, OR
|
|
479
516
|
\u2022 Use strictValidation: false to skip this check during testing`}
|
|
480
517
|
|
|
481
|
-
Docs: https://
|
|
518
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#partitions
|
|
482
519
|
`.trim();
|
|
483
520
|
}
|
|
484
521
|
super(message, {
|
|
485
522
|
...details,
|
|
486
|
-
description
|
|
487
|
-
suggestion: details.suggestion || "Check partition definition, fields, and input values."
|
|
523
|
+
description
|
|
488
524
|
});
|
|
489
525
|
}
|
|
490
526
|
}
|
|
@@ -543,7 +579,7 @@ Example fix:
|
|
|
543
579
|
await db.connect(); // Plugin initialized here
|
|
544
580
|
await db.createResource({ name: '${resourceName}', ... }); // Analytics resource created here
|
|
545
581
|
|
|
546
|
-
Docs: https://
|
|
582
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/eventual-consistency.md
|
|
547
583
|
`.trim();
|
|
548
584
|
super(message, {
|
|
549
585
|
...rest,
|
|
@@ -553,8 +589,260 @@ Docs: https://docs.s3db.js.org/plugins/eventual-consistency#troubleshooting
|
|
|
553
589
|
configuredResources,
|
|
554
590
|
registeredResources,
|
|
555
591
|
pluginInitialized,
|
|
556
|
-
description
|
|
557
|
-
|
|
592
|
+
description
|
|
593
|
+
});
|
|
594
|
+
}
|
|
595
|
+
}
|
|
596
|
+
class PluginError extends S3dbError {
|
|
597
|
+
constructor(message, details = {}) {
|
|
598
|
+
const {
|
|
599
|
+
pluginName = "Unknown",
|
|
600
|
+
operation = "unknown",
|
|
601
|
+
...rest
|
|
602
|
+
} = details;
|
|
603
|
+
let description = details.description;
|
|
604
|
+
if (!description) {
|
|
605
|
+
description = `
|
|
606
|
+
Plugin Error
|
|
607
|
+
|
|
608
|
+
Plugin: ${pluginName}
|
|
609
|
+
Operation: ${operation}
|
|
610
|
+
|
|
611
|
+
Possible causes:
|
|
612
|
+
1. Plugin not properly initialized
|
|
613
|
+
2. Plugin configuration is invalid
|
|
614
|
+
3. Plugin dependencies not met
|
|
615
|
+
4. Plugin method called before installation
|
|
616
|
+
|
|
617
|
+
Solution:
|
|
618
|
+
Ensure plugin is added to database and connect() is called before usage.
|
|
619
|
+
|
|
620
|
+
Example:
|
|
621
|
+
const db = new Database({
|
|
622
|
+
bucket: 'my-bucket',
|
|
623
|
+
plugins: [new ${pluginName}({ /* config */ })]
|
|
624
|
+
});
|
|
625
|
+
|
|
626
|
+
await db.connect(); // Plugin installed here
|
|
627
|
+
// Now plugin methods are available
|
|
628
|
+
|
|
629
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/README.md
|
|
630
|
+
`.trim();
|
|
631
|
+
}
|
|
632
|
+
super(message, {
|
|
633
|
+
...rest,
|
|
634
|
+
pluginName,
|
|
635
|
+
operation,
|
|
636
|
+
description
|
|
637
|
+
});
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
class PluginStorageError extends S3dbError {
|
|
641
|
+
constructor(message, details = {}) {
|
|
642
|
+
const {
|
|
643
|
+
pluginSlug = "unknown",
|
|
644
|
+
key = "",
|
|
645
|
+
operation = "unknown",
|
|
646
|
+
...rest
|
|
647
|
+
} = details;
|
|
648
|
+
let description = details.description;
|
|
649
|
+
if (!description) {
|
|
650
|
+
description = `
|
|
651
|
+
Plugin Storage Error
|
|
652
|
+
|
|
653
|
+
Plugin: ${pluginSlug}
|
|
654
|
+
Key: ${key}
|
|
655
|
+
Operation: ${operation}
|
|
656
|
+
|
|
657
|
+
Possible causes:
|
|
658
|
+
1. Storage not initialized (plugin not installed)
|
|
659
|
+
2. Invalid key format
|
|
660
|
+
3. S3 operation failed
|
|
661
|
+
4. Permissions issue
|
|
662
|
+
|
|
663
|
+
Solution:
|
|
664
|
+
Ensure plugin has access to storage and key is valid.
|
|
665
|
+
|
|
666
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/README.md#plugin-storage
|
|
667
|
+
`.trim();
|
|
668
|
+
}
|
|
669
|
+
super(message, {
|
|
670
|
+
...rest,
|
|
671
|
+
pluginSlug,
|
|
672
|
+
key,
|
|
673
|
+
operation,
|
|
674
|
+
description
|
|
675
|
+
});
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
class PartitionDriverError extends S3dbError {
|
|
679
|
+
constructor(message, details = {}) {
|
|
680
|
+
const {
|
|
681
|
+
driver = "unknown",
|
|
682
|
+
operation = "unknown",
|
|
683
|
+
queueSize,
|
|
684
|
+
maxQueueSize,
|
|
685
|
+
...rest
|
|
686
|
+
} = details;
|
|
687
|
+
let description = details.description;
|
|
688
|
+
if (!description && queueSize !== void 0 && maxQueueSize !== void 0) {
|
|
689
|
+
description = `
|
|
690
|
+
Partition Driver Error
|
|
691
|
+
|
|
692
|
+
Driver: ${driver}
|
|
693
|
+
Operation: ${operation}
|
|
694
|
+
Queue Status: ${queueSize}/${maxQueueSize}
|
|
695
|
+
|
|
696
|
+
Possible causes:
|
|
697
|
+
1. Queue is full (backpressure)
|
|
698
|
+
2. Driver not properly configured
|
|
699
|
+
3. SQS permissions issue (if using SQS driver)
|
|
700
|
+
|
|
701
|
+
Solution:
|
|
702
|
+
${queueSize >= maxQueueSize ? "Wait for queue to drain or increase maxQueueSize" : "Check driver configuration and permissions"}
|
|
703
|
+
|
|
704
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#partition-drivers
|
|
705
|
+
`.trim();
|
|
706
|
+
} else if (!description) {
|
|
707
|
+
description = `
|
|
708
|
+
Partition Driver Error
|
|
709
|
+
|
|
710
|
+
Driver: ${driver}
|
|
711
|
+
Operation: ${operation}
|
|
712
|
+
|
|
713
|
+
Check driver configuration and permissions.
|
|
714
|
+
|
|
715
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#partition-drivers
|
|
716
|
+
`.trim();
|
|
717
|
+
}
|
|
718
|
+
super(message, {
|
|
719
|
+
...rest,
|
|
720
|
+
driver,
|
|
721
|
+
operation,
|
|
722
|
+
queueSize,
|
|
723
|
+
maxQueueSize,
|
|
724
|
+
description
|
|
725
|
+
});
|
|
726
|
+
}
|
|
727
|
+
}
|
|
728
|
+
class BehaviorError extends S3dbError {
|
|
729
|
+
constructor(message, details = {}) {
|
|
730
|
+
const {
|
|
731
|
+
behavior = "unknown",
|
|
732
|
+
availableBehaviors = [],
|
|
733
|
+
...rest
|
|
734
|
+
} = details;
|
|
735
|
+
let description = details.description;
|
|
736
|
+
if (!description) {
|
|
737
|
+
description = `
|
|
738
|
+
Behavior Error
|
|
739
|
+
|
|
740
|
+
Requested: ${behavior}
|
|
741
|
+
Available: ${availableBehaviors.join(", ") || "body-overflow, body-only, truncate-data, enforce-limits, user-managed"}
|
|
742
|
+
|
|
743
|
+
Possible causes:
|
|
744
|
+
1. Behavior name misspelled
|
|
745
|
+
2. Custom behavior not registered
|
|
746
|
+
|
|
747
|
+
Solution:
|
|
748
|
+
Use one of the available behaviors or register custom behavior.
|
|
749
|
+
|
|
750
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#behaviors
|
|
751
|
+
`.trim();
|
|
752
|
+
}
|
|
753
|
+
super(message, {
|
|
754
|
+
...rest,
|
|
755
|
+
behavior,
|
|
756
|
+
availableBehaviors,
|
|
757
|
+
description
|
|
758
|
+
});
|
|
759
|
+
}
|
|
760
|
+
}
|
|
761
|
+
class StreamError extends S3dbError {
|
|
762
|
+
constructor(message, details = {}) {
|
|
763
|
+
const {
|
|
764
|
+
operation = "unknown",
|
|
765
|
+
resource,
|
|
766
|
+
...rest
|
|
767
|
+
} = details;
|
|
768
|
+
let description = details.description;
|
|
769
|
+
if (!description) {
|
|
770
|
+
description = `
|
|
771
|
+
Stream Error
|
|
772
|
+
|
|
773
|
+
Operation: ${operation}
|
|
774
|
+
${resource ? `Resource: ${resource}` : ""}
|
|
775
|
+
|
|
776
|
+
Possible causes:
|
|
777
|
+
1. Stream not properly initialized
|
|
778
|
+
2. Resource not available
|
|
779
|
+
3. Network error during streaming
|
|
780
|
+
|
|
781
|
+
Solution:
|
|
782
|
+
Check stream configuration and resource availability.
|
|
783
|
+
|
|
784
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#streaming
|
|
785
|
+
`.trim();
|
|
786
|
+
}
|
|
787
|
+
super(message, {
|
|
788
|
+
...rest,
|
|
789
|
+
operation,
|
|
790
|
+
resource,
|
|
791
|
+
description
|
|
792
|
+
});
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
class MetadataLimitError extends S3dbError {
|
|
796
|
+
constructor(message, details = {}) {
|
|
797
|
+
const {
|
|
798
|
+
totalSize,
|
|
799
|
+
effectiveLimit,
|
|
800
|
+
absoluteLimit = 2047,
|
|
801
|
+
excess,
|
|
802
|
+
resourceName,
|
|
803
|
+
operation,
|
|
804
|
+
...rest
|
|
805
|
+
} = details;
|
|
806
|
+
let description = details.description;
|
|
807
|
+
if (!description && totalSize && effectiveLimit) {
|
|
808
|
+
description = `
|
|
809
|
+
S3 Metadata Size Limit Exceeded
|
|
810
|
+
|
|
811
|
+
Current Size: ${totalSize} bytes
|
|
812
|
+
Effective Limit: ${effectiveLimit} bytes
|
|
813
|
+
Absolute Limit: ${absoluteLimit} bytes
|
|
814
|
+
${excess ? `Excess: ${excess} bytes` : ""}
|
|
815
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
816
|
+
${operation ? `Operation: ${operation}` : ""}
|
|
817
|
+
|
|
818
|
+
S3 has a hard limit of 2KB (2047 bytes) for object metadata.
|
|
819
|
+
|
|
820
|
+
Solutions:
|
|
821
|
+
1. Use 'body-overflow' behavior to store excess in body
|
|
822
|
+
2. Use 'body-only' behavior to store everything in body
|
|
823
|
+
3. Reduce number of fields
|
|
824
|
+
4. Use shorter field values
|
|
825
|
+
5. Enable advanced metadata encoding
|
|
826
|
+
|
|
827
|
+
Example:
|
|
828
|
+
await db.createResource({
|
|
829
|
+
name: '${resourceName || "myResource"}',
|
|
830
|
+
behavior: 'body-overflow', // Automatically handles overflow
|
|
831
|
+
attributes: { ... }
|
|
832
|
+
});
|
|
833
|
+
|
|
834
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#metadata-size-limits
|
|
835
|
+
`.trim();
|
|
836
|
+
}
|
|
837
|
+
super(message, {
|
|
838
|
+
...rest,
|
|
839
|
+
totalSize,
|
|
840
|
+
effectiveLimit,
|
|
841
|
+
absoluteLimit,
|
|
842
|
+
excess,
|
|
843
|
+
resourceName,
|
|
844
|
+
operation,
|
|
845
|
+
description
|
|
558
846
|
});
|
|
559
847
|
}
|
|
560
848
|
}
|
|
@@ -898,10 +1186,17 @@ class PluginStorage {
|
|
|
898
1186
|
*/
|
|
899
1187
|
constructor(client, pluginSlug) {
|
|
900
1188
|
if (!client) {
|
|
901
|
-
throw new
|
|
1189
|
+
throw new PluginStorageError("PluginStorage requires a client instance", {
|
|
1190
|
+
operation: "constructor",
|
|
1191
|
+
pluginSlug,
|
|
1192
|
+
suggestion: "Pass a valid S3db Client instance when creating PluginStorage"
|
|
1193
|
+
});
|
|
902
1194
|
}
|
|
903
1195
|
if (!pluginSlug) {
|
|
904
|
-
throw new
|
|
1196
|
+
throw new PluginStorageError("PluginStorage requires a pluginSlug", {
|
|
1197
|
+
operation: "constructor",
|
|
1198
|
+
suggestion: 'Provide a plugin slug (e.g., "eventual-consistency", "cache", "audit")'
|
|
1199
|
+
});
|
|
905
1200
|
}
|
|
906
1201
|
this.client = client;
|
|
907
1202
|
this.pluginSlug = pluginSlug;
|
|
@@ -954,7 +1249,15 @@ class PluginStorage {
|
|
|
954
1249
|
}
|
|
955
1250
|
const [ok, err] = await tryFn(() => this.client.putObject(putParams));
|
|
956
1251
|
if (!ok) {
|
|
957
|
-
throw new
|
|
1252
|
+
throw new PluginStorageError(`Failed to save plugin data`, {
|
|
1253
|
+
pluginSlug: this.pluginSlug,
|
|
1254
|
+
key,
|
|
1255
|
+
operation: "set",
|
|
1256
|
+
behavior,
|
|
1257
|
+
ttl,
|
|
1258
|
+
original: err,
|
|
1259
|
+
suggestion: "Check S3 permissions and key format"
|
|
1260
|
+
});
|
|
958
1261
|
}
|
|
959
1262
|
}
|
|
960
1263
|
/**
|
|
@@ -976,7 +1279,13 @@ class PluginStorage {
|
|
|
976
1279
|
if (err.name === "NoSuchKey" || err.Code === "NoSuchKey") {
|
|
977
1280
|
return null;
|
|
978
1281
|
}
|
|
979
|
-
throw new
|
|
1282
|
+
throw new PluginStorageError(`Failed to retrieve plugin data`, {
|
|
1283
|
+
pluginSlug: this.pluginSlug,
|
|
1284
|
+
key,
|
|
1285
|
+
operation: "get",
|
|
1286
|
+
original: err,
|
|
1287
|
+
suggestion: "Check if the key exists and S3 permissions are correct"
|
|
1288
|
+
});
|
|
980
1289
|
}
|
|
981
1290
|
const metadata = response.Metadata || {};
|
|
982
1291
|
const parsedMetadata = this._parseMetadataValues(metadata);
|
|
@@ -989,7 +1298,13 @@ class PluginStorage {
|
|
|
989
1298
|
data = { ...parsedMetadata, ...body };
|
|
990
1299
|
}
|
|
991
1300
|
} catch (parseErr) {
|
|
992
|
-
throw new
|
|
1301
|
+
throw new PluginStorageError(`Failed to parse JSON body`, {
|
|
1302
|
+
pluginSlug: this.pluginSlug,
|
|
1303
|
+
key,
|
|
1304
|
+
operation: "get",
|
|
1305
|
+
original: parseErr,
|
|
1306
|
+
suggestion: "Body content may be corrupted. Check S3 object integrity"
|
|
1307
|
+
});
|
|
993
1308
|
}
|
|
994
1309
|
}
|
|
995
1310
|
const expiresAt = data._expiresat || data._expiresAt;
|
|
@@ -1050,7 +1365,15 @@ class PluginStorage {
|
|
|
1050
1365
|
() => this.client.listObjects({ prefix: fullPrefix, maxKeys: limit })
|
|
1051
1366
|
);
|
|
1052
1367
|
if (!ok) {
|
|
1053
|
-
throw new
|
|
1368
|
+
throw new PluginStorageError(`Failed to list plugin data`, {
|
|
1369
|
+
pluginSlug: this.pluginSlug,
|
|
1370
|
+
operation: "list",
|
|
1371
|
+
prefix,
|
|
1372
|
+
fullPrefix,
|
|
1373
|
+
limit,
|
|
1374
|
+
original: err,
|
|
1375
|
+
suggestion: "Check S3 permissions and bucket configuration"
|
|
1376
|
+
});
|
|
1054
1377
|
}
|
|
1055
1378
|
const keys = result.Contents?.map((item) => item.Key) || [];
|
|
1056
1379
|
return this._removeKeyPrefix(keys);
|
|
@@ -1070,7 +1393,16 @@ class PluginStorage {
|
|
|
1070
1393
|
() => this.client.listObjects({ prefix: fullPrefix, maxKeys: limit })
|
|
1071
1394
|
);
|
|
1072
1395
|
if (!ok) {
|
|
1073
|
-
throw new
|
|
1396
|
+
throw new PluginStorageError(`Failed to list resource data`, {
|
|
1397
|
+
pluginSlug: this.pluginSlug,
|
|
1398
|
+
operation: "listForResource",
|
|
1399
|
+
resourceName,
|
|
1400
|
+
subPrefix,
|
|
1401
|
+
fullPrefix,
|
|
1402
|
+
limit,
|
|
1403
|
+
original: err,
|
|
1404
|
+
suggestion: "Check resource name and S3 permissions"
|
|
1405
|
+
});
|
|
1074
1406
|
}
|
|
1075
1407
|
const keys = result.Contents?.map((item) => item.Key) || [];
|
|
1076
1408
|
return this._removeKeyPrefix(keys);
|
|
@@ -1210,7 +1542,13 @@ class PluginStorage {
|
|
|
1210
1542
|
async delete(key) {
|
|
1211
1543
|
const [ok, err] = await tryFn(() => this.client.deleteObject(key));
|
|
1212
1544
|
if (!ok) {
|
|
1213
|
-
throw new
|
|
1545
|
+
throw new PluginStorageError(`Failed to delete plugin data`, {
|
|
1546
|
+
pluginSlug: this.pluginSlug,
|
|
1547
|
+
key,
|
|
1548
|
+
operation: "delete",
|
|
1549
|
+
original: err,
|
|
1550
|
+
suggestion: "Check S3 delete permissions"
|
|
1551
|
+
});
|
|
1214
1552
|
}
|
|
1215
1553
|
}
|
|
1216
1554
|
/**
|
|
@@ -1397,16 +1735,28 @@ class PluginStorage {
|
|
|
1397
1735
|
const valueSize = calculateUTF8Bytes(encoded);
|
|
1398
1736
|
currentSize += keySize + valueSize;
|
|
1399
1737
|
if (currentSize > effectiveLimit) {
|
|
1400
|
-
throw new
|
|
1401
|
-
|
|
1402
|
-
|
|
1738
|
+
throw new MetadataLimitError(`Data exceeds metadata limit with enforce-limits behavior`, {
|
|
1739
|
+
totalSize: currentSize,
|
|
1740
|
+
effectiveLimit,
|
|
1741
|
+
absoluteLimit: S3_METADATA_LIMIT,
|
|
1742
|
+
excess: currentSize - effectiveLimit,
|
|
1743
|
+
operation: "PluginStorage.set",
|
|
1744
|
+
pluginSlug: this.pluginSlug,
|
|
1745
|
+
suggestion: "Use 'body-overflow' or 'body-only' behavior to handle large data"
|
|
1746
|
+
});
|
|
1403
1747
|
}
|
|
1404
1748
|
metadata[key] = jsonValue;
|
|
1405
1749
|
}
|
|
1406
1750
|
break;
|
|
1407
1751
|
}
|
|
1408
1752
|
default:
|
|
1409
|
-
throw new
|
|
1753
|
+
throw new BehaviorError(`Unknown behavior: ${behavior}`, {
|
|
1754
|
+
behavior,
|
|
1755
|
+
availableBehaviors: ["body-overflow", "body-only", "enforce-limits"],
|
|
1756
|
+
operation: "PluginStorage._applyBehavior",
|
|
1757
|
+
pluginSlug: this.pluginSlug,
|
|
1758
|
+
suggestion: "Use 'body-overflow', 'body-only', or 'enforce-limits'"
|
|
1759
|
+
});
|
|
1410
1760
|
}
|
|
1411
1761
|
return { metadata, body };
|
|
1412
1762
|
}
|
|
@@ -1971,6 +2321,35 @@ class AuditPlugin extends Plugin {
|
|
|
1971
2321
|
}
|
|
1972
2322
|
}
|
|
1973
2323
|
|
|
2324
|
+
class BackupError extends S3dbError {
|
|
2325
|
+
constructor(message, details = {}) {
|
|
2326
|
+
const { driver = "unknown", operation = "unknown", backupId, ...rest } = details;
|
|
2327
|
+
let description = details.description;
|
|
2328
|
+
if (!description) {
|
|
2329
|
+
description = `
|
|
2330
|
+
Backup Operation Error
|
|
2331
|
+
|
|
2332
|
+
Driver: ${driver}
|
|
2333
|
+
Operation: ${operation}
|
|
2334
|
+
${backupId ? `Backup ID: ${backupId}` : ""}
|
|
2335
|
+
|
|
2336
|
+
Common causes:
|
|
2337
|
+
1. Invalid backup driver configuration
|
|
2338
|
+
2. Destination storage not accessible
|
|
2339
|
+
3. Insufficient permissions
|
|
2340
|
+
4. Network connectivity issues
|
|
2341
|
+
5. Invalid backup file format
|
|
2342
|
+
|
|
2343
|
+
Solution:
|
|
2344
|
+
Check driver configuration and ensure destination storage is accessible.
|
|
2345
|
+
|
|
2346
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/backup.md
|
|
2347
|
+
`.trim();
|
|
2348
|
+
}
|
|
2349
|
+
super(message, { ...rest, driver, operation, backupId, description });
|
|
2350
|
+
}
|
|
2351
|
+
}
|
|
2352
|
+
|
|
1974
2353
|
class BaseBackupDriver {
|
|
1975
2354
|
constructor(config = {}) {
|
|
1976
2355
|
this.config = {
|
|
@@ -2001,7 +2380,12 @@ class BaseBackupDriver {
|
|
|
2001
2380
|
* @returns {Object} Upload result with destination info
|
|
2002
2381
|
*/
|
|
2003
2382
|
async upload(filePath, backupId, manifest) {
|
|
2004
|
-
throw new
|
|
2383
|
+
throw new BackupError("upload() method must be implemented by subclass", {
|
|
2384
|
+
operation: "upload",
|
|
2385
|
+
driver: this.constructor.name,
|
|
2386
|
+
backupId,
|
|
2387
|
+
suggestion: "Extend BaseBackupDriver and implement the upload() method"
|
|
2388
|
+
});
|
|
2005
2389
|
}
|
|
2006
2390
|
/**
|
|
2007
2391
|
* Download a backup file from the destination
|
|
@@ -2011,7 +2395,12 @@ class BaseBackupDriver {
|
|
|
2011
2395
|
* @returns {string} Path to downloaded file
|
|
2012
2396
|
*/
|
|
2013
2397
|
async download(backupId, targetPath, metadata) {
|
|
2014
|
-
throw new
|
|
2398
|
+
throw new BackupError("download() method must be implemented by subclass", {
|
|
2399
|
+
operation: "download",
|
|
2400
|
+
driver: this.constructor.name,
|
|
2401
|
+
backupId,
|
|
2402
|
+
suggestion: "Extend BaseBackupDriver and implement the download() method"
|
|
2403
|
+
});
|
|
2015
2404
|
}
|
|
2016
2405
|
/**
|
|
2017
2406
|
* Delete a backup from the destination
|
|
@@ -2019,7 +2408,12 @@ class BaseBackupDriver {
|
|
|
2019
2408
|
* @param {Object} metadata - Backup metadata
|
|
2020
2409
|
*/
|
|
2021
2410
|
async delete(backupId, metadata) {
|
|
2022
|
-
throw new
|
|
2411
|
+
throw new BackupError("delete() method must be implemented by subclass", {
|
|
2412
|
+
operation: "delete",
|
|
2413
|
+
driver: this.constructor.name,
|
|
2414
|
+
backupId,
|
|
2415
|
+
suggestion: "Extend BaseBackupDriver and implement the delete() method"
|
|
2416
|
+
});
|
|
2023
2417
|
}
|
|
2024
2418
|
/**
|
|
2025
2419
|
* List backups available in the destination
|
|
@@ -2027,7 +2421,11 @@ class BaseBackupDriver {
|
|
|
2027
2421
|
* @returns {Array} List of backup metadata
|
|
2028
2422
|
*/
|
|
2029
2423
|
async list(options = {}) {
|
|
2030
|
-
throw new
|
|
2424
|
+
throw new BackupError("list() method must be implemented by subclass", {
|
|
2425
|
+
operation: "list",
|
|
2426
|
+
driver: this.constructor.name,
|
|
2427
|
+
suggestion: "Extend BaseBackupDriver and implement the list() method"
|
|
2428
|
+
});
|
|
2031
2429
|
}
|
|
2032
2430
|
/**
|
|
2033
2431
|
* Verify backup integrity
|
|
@@ -2037,14 +2435,23 @@ class BaseBackupDriver {
|
|
|
2037
2435
|
* @returns {boolean} True if backup is valid
|
|
2038
2436
|
*/
|
|
2039
2437
|
async verify(backupId, expectedChecksum, metadata) {
|
|
2040
|
-
throw new
|
|
2438
|
+
throw new BackupError("verify() method must be implemented by subclass", {
|
|
2439
|
+
operation: "verify",
|
|
2440
|
+
driver: this.constructor.name,
|
|
2441
|
+
backupId,
|
|
2442
|
+
suggestion: "Extend BaseBackupDriver and implement the verify() method"
|
|
2443
|
+
});
|
|
2041
2444
|
}
|
|
2042
2445
|
/**
|
|
2043
2446
|
* Get driver type identifier
|
|
2044
2447
|
* @returns {string} Driver type
|
|
2045
2448
|
*/
|
|
2046
2449
|
getType() {
|
|
2047
|
-
throw new
|
|
2450
|
+
throw new BackupError("getType() method must be implemented by subclass", {
|
|
2451
|
+
operation: "getType",
|
|
2452
|
+
driver: this.constructor.name,
|
|
2453
|
+
suggestion: "Extend BaseBackupDriver and implement the getType() method"
|
|
2454
|
+
});
|
|
2048
2455
|
}
|
|
2049
2456
|
/**
|
|
2050
2457
|
* Get driver-specific storage info
|
|
@@ -2086,7 +2493,11 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2086
2493
|
}
|
|
2087
2494
|
async onSetup() {
|
|
2088
2495
|
if (!this.config.path) {
|
|
2089
|
-
throw new
|
|
2496
|
+
throw new BackupError("FilesystemBackupDriver: path configuration is required", {
|
|
2497
|
+
operation: "onSetup",
|
|
2498
|
+
driver: "filesystem",
|
|
2499
|
+
suggestion: 'Provide a path in config: new FilesystemBackupDriver({ path: "/path/to/backups" })'
|
|
2500
|
+
});
|
|
2090
2501
|
}
|
|
2091
2502
|
this.log(`Initialized with path: ${this.config.path}`);
|
|
2092
2503
|
}
|
|
@@ -2110,11 +2521,26 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2110
2521
|
() => mkdir(targetDir, { recursive: true, mode: this.config.directoryPermissions })
|
|
2111
2522
|
);
|
|
2112
2523
|
if (!createDirOk) {
|
|
2113
|
-
throw new
|
|
2524
|
+
throw new BackupError("Failed to create backup directory", {
|
|
2525
|
+
operation: "upload",
|
|
2526
|
+
driver: "filesystem",
|
|
2527
|
+
backupId,
|
|
2528
|
+
targetDir,
|
|
2529
|
+
original: createDirErr,
|
|
2530
|
+
suggestion: "Check directory permissions and disk space"
|
|
2531
|
+
});
|
|
2114
2532
|
}
|
|
2115
2533
|
const [copyOk, copyErr] = await tryFn(() => copyFile(filePath, targetPath));
|
|
2116
2534
|
if (!copyOk) {
|
|
2117
|
-
throw new
|
|
2535
|
+
throw new BackupError("Failed to copy backup file", {
|
|
2536
|
+
operation: "upload",
|
|
2537
|
+
driver: "filesystem",
|
|
2538
|
+
backupId,
|
|
2539
|
+
filePath,
|
|
2540
|
+
targetPath,
|
|
2541
|
+
original: copyErr,
|
|
2542
|
+
suggestion: "Check file permissions and disk space"
|
|
2543
|
+
});
|
|
2118
2544
|
}
|
|
2119
2545
|
const [manifestOk, manifestErr] = await tryFn(
|
|
2120
2546
|
() => import('fs/promises').then((fs) => fs.writeFile(
|
|
@@ -2125,7 +2551,14 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2125
2551
|
);
|
|
2126
2552
|
if (!manifestOk) {
|
|
2127
2553
|
await tryFn(() => unlink(targetPath));
|
|
2128
|
-
throw new
|
|
2554
|
+
throw new BackupError("Failed to write manifest file", {
|
|
2555
|
+
operation: "upload",
|
|
2556
|
+
driver: "filesystem",
|
|
2557
|
+
backupId,
|
|
2558
|
+
manifestPath,
|
|
2559
|
+
original: manifestErr,
|
|
2560
|
+
suggestion: "Check directory permissions and disk space"
|
|
2561
|
+
});
|
|
2129
2562
|
}
|
|
2130
2563
|
const [statOk, , stats] = await tryFn(() => stat(targetPath));
|
|
2131
2564
|
const size = statOk ? stats.size : 0;
|
|
@@ -2144,13 +2577,27 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2144
2577
|
);
|
|
2145
2578
|
const [existsOk] = await tryFn(() => access(sourcePath));
|
|
2146
2579
|
if (!existsOk) {
|
|
2147
|
-
throw new
|
|
2580
|
+
throw new BackupError("Backup file not found", {
|
|
2581
|
+
operation: "download",
|
|
2582
|
+
driver: "filesystem",
|
|
2583
|
+
backupId,
|
|
2584
|
+
sourcePath,
|
|
2585
|
+
suggestion: "Check if backup exists using list() method"
|
|
2586
|
+
});
|
|
2148
2587
|
}
|
|
2149
2588
|
const targetDir = path.dirname(targetPath);
|
|
2150
2589
|
await tryFn(() => mkdir(targetDir, { recursive: true }));
|
|
2151
2590
|
const [copyOk, copyErr] = await tryFn(() => copyFile(sourcePath, targetPath));
|
|
2152
2591
|
if (!copyOk) {
|
|
2153
|
-
throw new
|
|
2592
|
+
throw new BackupError("Failed to download backup", {
|
|
2593
|
+
operation: "download",
|
|
2594
|
+
driver: "filesystem",
|
|
2595
|
+
backupId,
|
|
2596
|
+
sourcePath,
|
|
2597
|
+
targetPath,
|
|
2598
|
+
original: copyErr,
|
|
2599
|
+
suggestion: "Check file permissions and disk space"
|
|
2600
|
+
});
|
|
2154
2601
|
}
|
|
2155
2602
|
this.log(`Downloaded backup ${backupId} from ${sourcePath} to ${targetPath}`);
|
|
2156
2603
|
return targetPath;
|
|
@@ -2167,7 +2614,14 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2167
2614
|
const [deleteBackupOk] = await tryFn(() => unlink(backupPath));
|
|
2168
2615
|
const [deleteManifestOk] = await tryFn(() => unlink(manifestPath));
|
|
2169
2616
|
if (!deleteBackupOk && !deleteManifestOk) {
|
|
2170
|
-
throw new
|
|
2617
|
+
throw new BackupError("Failed to delete backup files", {
|
|
2618
|
+
operation: "delete",
|
|
2619
|
+
driver: "filesystem",
|
|
2620
|
+
backupId,
|
|
2621
|
+
backupPath,
|
|
2622
|
+
manifestPath,
|
|
2623
|
+
suggestion: "Check file permissions"
|
|
2624
|
+
});
|
|
2171
2625
|
}
|
|
2172
2626
|
this.log(`Deleted backup ${backupId}`);
|
|
2173
2627
|
}
|
|
@@ -2272,10 +2726,18 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2272
2726
|
this.config.bucket = this.database.bucket;
|
|
2273
2727
|
}
|
|
2274
2728
|
if (!this.config.client) {
|
|
2275
|
-
throw new
|
|
2729
|
+
throw new BackupError("S3BackupDriver: client is required", {
|
|
2730
|
+
operation: "onSetup",
|
|
2731
|
+
driver: "s3",
|
|
2732
|
+
suggestion: "Provide a client in config or ensure database has a client configured"
|
|
2733
|
+
});
|
|
2276
2734
|
}
|
|
2277
2735
|
if (!this.config.bucket) {
|
|
2278
|
-
throw new
|
|
2736
|
+
throw new BackupError("S3BackupDriver: bucket is required", {
|
|
2737
|
+
operation: "onSetup",
|
|
2738
|
+
driver: "s3",
|
|
2739
|
+
suggestion: "Provide a bucket in config or ensure database has a bucket configured"
|
|
2740
|
+
});
|
|
2279
2741
|
}
|
|
2280
2742
|
this.log(`Initialized with bucket: ${this.config.bucket}, path: ${this.config.path}`);
|
|
2281
2743
|
}
|
|
@@ -2317,7 +2779,15 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2317
2779
|
});
|
|
2318
2780
|
});
|
|
2319
2781
|
if (!uploadOk) {
|
|
2320
|
-
throw new
|
|
2782
|
+
throw new BackupError("Failed to upload backup file to S3", {
|
|
2783
|
+
operation: "upload",
|
|
2784
|
+
driver: "s3",
|
|
2785
|
+
backupId,
|
|
2786
|
+
bucket: this.config.bucket,
|
|
2787
|
+
key: backupKey,
|
|
2788
|
+
original: uploadErr,
|
|
2789
|
+
suggestion: "Check S3 permissions and bucket configuration"
|
|
2790
|
+
});
|
|
2321
2791
|
}
|
|
2322
2792
|
const [manifestOk, manifestErr] = await tryFn(
|
|
2323
2793
|
() => this.config.client.uploadObject({
|
|
@@ -2338,7 +2808,15 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2338
2808
|
bucket: this.config.bucket,
|
|
2339
2809
|
key: backupKey
|
|
2340
2810
|
}));
|
|
2341
|
-
throw new
|
|
2811
|
+
throw new BackupError("Failed to upload manifest to S3", {
|
|
2812
|
+
operation: "upload",
|
|
2813
|
+
driver: "s3",
|
|
2814
|
+
backupId,
|
|
2815
|
+
bucket: this.config.bucket,
|
|
2816
|
+
manifestKey,
|
|
2817
|
+
original: manifestErr,
|
|
2818
|
+
suggestion: "Check S3 permissions and bucket configuration"
|
|
2819
|
+
});
|
|
2342
2820
|
}
|
|
2343
2821
|
this.log(`Uploaded backup ${backupId} to s3://${this.config.bucket}/${backupKey} (${fileSize} bytes)`);
|
|
2344
2822
|
return {
|
|
@@ -2361,7 +2839,16 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2361
2839
|
})
|
|
2362
2840
|
);
|
|
2363
2841
|
if (!downloadOk) {
|
|
2364
|
-
throw new
|
|
2842
|
+
throw new BackupError("Failed to download backup from S3", {
|
|
2843
|
+
operation: "download",
|
|
2844
|
+
driver: "s3",
|
|
2845
|
+
backupId,
|
|
2846
|
+
bucket: this.config.bucket,
|
|
2847
|
+
key: backupKey,
|
|
2848
|
+
targetPath,
|
|
2849
|
+
original: downloadErr,
|
|
2850
|
+
suggestion: "Check if backup exists and S3 permissions are correct"
|
|
2851
|
+
});
|
|
2365
2852
|
}
|
|
2366
2853
|
this.log(`Downloaded backup ${backupId} from s3://${this.config.bucket}/${backupKey} to ${targetPath}`);
|
|
2367
2854
|
return targetPath;
|
|
@@ -2382,7 +2869,15 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2382
2869
|
})
|
|
2383
2870
|
);
|
|
2384
2871
|
if (!deleteBackupOk && !deleteManifestOk) {
|
|
2385
|
-
throw new
|
|
2872
|
+
throw new BackupError("Failed to delete backup from S3", {
|
|
2873
|
+
operation: "delete",
|
|
2874
|
+
driver: "s3",
|
|
2875
|
+
backupId,
|
|
2876
|
+
bucket: this.config.bucket,
|
|
2877
|
+
backupKey,
|
|
2878
|
+
manifestKey,
|
|
2879
|
+
suggestion: "Check S3 delete permissions"
|
|
2880
|
+
});
|
|
2386
2881
|
}
|
|
2387
2882
|
this.log(`Deleted backup ${backupId} from S3`);
|
|
2388
2883
|
}
|
|
@@ -2495,11 +2990,22 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2495
2990
|
}
|
|
2496
2991
|
async onSetup() {
|
|
2497
2992
|
if (!Array.isArray(this.config.destinations) || this.config.destinations.length === 0) {
|
|
2498
|
-
throw new
|
|
2993
|
+
throw new BackupError("MultiBackupDriver requires non-empty destinations array", {
|
|
2994
|
+
operation: "onSetup",
|
|
2995
|
+
driver: "multi",
|
|
2996
|
+
destinationsProvided: this.config.destinations,
|
|
2997
|
+
suggestion: 'Provide destinations array: { destinations: [{ driver: "s3", config: {...} }, { driver: "filesystem", config: {...} }] }'
|
|
2998
|
+
});
|
|
2499
2999
|
}
|
|
2500
3000
|
for (const [index, destConfig] of this.config.destinations.entries()) {
|
|
2501
3001
|
if (!destConfig.driver) {
|
|
2502
|
-
throw new
|
|
3002
|
+
throw new BackupError(`Destination ${index} missing driver type`, {
|
|
3003
|
+
operation: "onSetup",
|
|
3004
|
+
driver: "multi",
|
|
3005
|
+
destinationIndex: index,
|
|
3006
|
+
destination: destConfig,
|
|
3007
|
+
suggestion: 'Each destination must have a driver property: { driver: "s3", config: {...} } or { driver: "filesystem", config: {...} }'
|
|
3008
|
+
});
|
|
2503
3009
|
}
|
|
2504
3010
|
try {
|
|
2505
3011
|
const driver = createBackupDriver(destConfig.driver, destConfig.config || {});
|
|
@@ -2511,7 +3017,15 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2511
3017
|
});
|
|
2512
3018
|
this.log(`Setup destination ${index}: ${destConfig.driver}`);
|
|
2513
3019
|
} catch (error) {
|
|
2514
|
-
throw new
|
|
3020
|
+
throw new BackupError(`Failed to setup destination ${index}`, {
|
|
3021
|
+
operation: "onSetup",
|
|
3022
|
+
driver: "multi",
|
|
3023
|
+
destinationIndex: index,
|
|
3024
|
+
destinationDriver: destConfig.driver,
|
|
3025
|
+
destinationConfig: destConfig.config,
|
|
3026
|
+
original: error,
|
|
3027
|
+
suggestion: "Check destination driver configuration and ensure dependencies are available"
|
|
3028
|
+
});
|
|
2515
3029
|
}
|
|
2516
3030
|
}
|
|
2517
3031
|
if (this.config.requireAll === false) {
|
|
@@ -2540,7 +3054,15 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2540
3054
|
this.log(`Priority upload failed to destination ${index}: ${err.message}`);
|
|
2541
3055
|
}
|
|
2542
3056
|
}
|
|
2543
|
-
throw new
|
|
3057
|
+
throw new BackupError("All priority destinations failed", {
|
|
3058
|
+
operation: "upload",
|
|
3059
|
+
driver: "multi",
|
|
3060
|
+
strategy: "priority",
|
|
3061
|
+
backupId,
|
|
3062
|
+
totalDestinations: this.drivers.length,
|
|
3063
|
+
failures: errors,
|
|
3064
|
+
suggestion: "Check destination configurations and ensure at least one destination is accessible"
|
|
3065
|
+
});
|
|
2544
3066
|
}
|
|
2545
3067
|
const uploadPromises = this.drivers.map(async ({ driver, config, index }) => {
|
|
2546
3068
|
const [ok, err, result] = await tryFn(
|
|
@@ -2570,10 +3092,28 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2570
3092
|
const successResults = allResults.filter((r) => r.status === "success");
|
|
2571
3093
|
const failedResults = allResults.filter((r) => r.status === "failed");
|
|
2572
3094
|
if (strategy === "all" && failedResults.length > 0) {
|
|
2573
|
-
throw new
|
|
3095
|
+
throw new BackupError('Some destinations failed with strategy "all"', {
|
|
3096
|
+
operation: "upload",
|
|
3097
|
+
driver: "multi",
|
|
3098
|
+
strategy: "all",
|
|
3099
|
+
backupId,
|
|
3100
|
+
totalDestinations: this.drivers.length,
|
|
3101
|
+
successCount: successResults.length,
|
|
3102
|
+
failedCount: failedResults.length,
|
|
3103
|
+
failures: failedResults,
|
|
3104
|
+
suggestion: 'All destinations must succeed with "all" strategy. Use "any" strategy to tolerate failures, or fix failing destinations.'
|
|
3105
|
+
});
|
|
2574
3106
|
}
|
|
2575
3107
|
if (strategy === "any" && successResults.length === 0) {
|
|
2576
|
-
throw new
|
|
3108
|
+
throw new BackupError('All destinations failed with strategy "any"', {
|
|
3109
|
+
operation: "upload",
|
|
3110
|
+
driver: "multi",
|
|
3111
|
+
strategy: "any",
|
|
3112
|
+
backupId,
|
|
3113
|
+
totalDestinations: this.drivers.length,
|
|
3114
|
+
failures: failedResults,
|
|
3115
|
+
suggestion: 'At least one destination must succeed with "any" strategy. Check all destination configurations.'
|
|
3116
|
+
});
|
|
2577
3117
|
}
|
|
2578
3118
|
return allResults;
|
|
2579
3119
|
}
|
|
@@ -2593,7 +3133,14 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2593
3133
|
this.log(`Download failed from destination ${destMetadata.destination}: ${err.message}`);
|
|
2594
3134
|
}
|
|
2595
3135
|
}
|
|
2596
|
-
throw new
|
|
3136
|
+
throw new BackupError("Failed to download backup from any destination", {
|
|
3137
|
+
operation: "download",
|
|
3138
|
+
driver: "multi",
|
|
3139
|
+
backupId,
|
|
3140
|
+
targetPath,
|
|
3141
|
+
attemptedDestinations: destinations.length,
|
|
3142
|
+
suggestion: "Check if backup exists in at least one destination and destinations are accessible"
|
|
3143
|
+
});
|
|
2597
3144
|
}
|
|
2598
3145
|
async delete(backupId, metadata) {
|
|
2599
3146
|
const destinations = Array.isArray(metadata.destinations) ? metadata.destinations : [metadata];
|
|
@@ -2615,7 +3162,14 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2615
3162
|
}
|
|
2616
3163
|
}
|
|
2617
3164
|
if (successCount === 0 && errors.length > 0) {
|
|
2618
|
-
throw new
|
|
3165
|
+
throw new BackupError("Failed to delete from any destination", {
|
|
3166
|
+
operation: "delete",
|
|
3167
|
+
driver: "multi",
|
|
3168
|
+
backupId,
|
|
3169
|
+
attemptedDestinations: destinations.length,
|
|
3170
|
+
failures: errors,
|
|
3171
|
+
suggestion: "Check if backup exists in destinations and destinations are accessible with delete permissions"
|
|
3172
|
+
});
|
|
2619
3173
|
}
|
|
2620
3174
|
if (errors.length > 0) {
|
|
2621
3175
|
this.log(`Partial delete success, some errors: ${errors.join("; ")}`);
|
|
@@ -2715,32 +3269,62 @@ const BACKUP_DRIVERS = {
|
|
|
2715
3269
|
function createBackupDriver(driver, config = {}) {
|
|
2716
3270
|
const DriverClass = BACKUP_DRIVERS[driver];
|
|
2717
3271
|
if (!DriverClass) {
|
|
2718
|
-
throw new
|
|
3272
|
+
throw new BackupError(`Unknown backup driver: ${driver}`, {
|
|
3273
|
+
operation: "createBackupDriver",
|
|
3274
|
+
driver,
|
|
3275
|
+
availableDrivers: Object.keys(BACKUP_DRIVERS),
|
|
3276
|
+
suggestion: `Use one of the available drivers: ${Object.keys(BACKUP_DRIVERS).join(", ")}`
|
|
3277
|
+
});
|
|
2719
3278
|
}
|
|
2720
3279
|
return new DriverClass(config);
|
|
2721
3280
|
}
|
|
2722
3281
|
function validateBackupConfig(driver, config = {}) {
|
|
2723
3282
|
if (!driver || typeof driver !== "string") {
|
|
2724
|
-
throw new
|
|
3283
|
+
throw new BackupError("Driver type must be a non-empty string", {
|
|
3284
|
+
operation: "validateBackupConfig",
|
|
3285
|
+
driver,
|
|
3286
|
+
suggestion: "Provide a valid driver type string (filesystem, s3, or multi)"
|
|
3287
|
+
});
|
|
2725
3288
|
}
|
|
2726
3289
|
if (!BACKUP_DRIVERS[driver]) {
|
|
2727
|
-
throw new
|
|
3290
|
+
throw new BackupError(`Unknown backup driver: ${driver}`, {
|
|
3291
|
+
operation: "validateBackupConfig",
|
|
3292
|
+
driver,
|
|
3293
|
+
availableDrivers: Object.keys(BACKUP_DRIVERS),
|
|
3294
|
+
suggestion: `Use one of the available drivers: ${Object.keys(BACKUP_DRIVERS).join(", ")}`
|
|
3295
|
+
});
|
|
2728
3296
|
}
|
|
2729
3297
|
switch (driver) {
|
|
2730
3298
|
case "filesystem":
|
|
2731
3299
|
if (!config.path) {
|
|
2732
|
-
throw new
|
|
3300
|
+
throw new BackupError('FilesystemBackupDriver requires "path" configuration', {
|
|
3301
|
+
operation: "validateBackupConfig",
|
|
3302
|
+
driver: "filesystem",
|
|
3303
|
+
config,
|
|
3304
|
+
suggestion: 'Provide a "path" property in config: { path: "/path/to/backups" }'
|
|
3305
|
+
});
|
|
2733
3306
|
}
|
|
2734
3307
|
break;
|
|
2735
3308
|
case "s3":
|
|
2736
3309
|
break;
|
|
2737
3310
|
case "multi":
|
|
2738
3311
|
if (!Array.isArray(config.destinations) || config.destinations.length === 0) {
|
|
2739
|
-
throw new
|
|
3312
|
+
throw new BackupError('MultiBackupDriver requires non-empty "destinations" array', {
|
|
3313
|
+
operation: "validateBackupConfig",
|
|
3314
|
+
driver: "multi",
|
|
3315
|
+
config,
|
|
3316
|
+
suggestion: 'Provide destinations array: { destinations: [{ driver: "s3", config: {...} }] }'
|
|
3317
|
+
});
|
|
2740
3318
|
}
|
|
2741
3319
|
config.destinations.forEach((dest, index) => {
|
|
2742
3320
|
if (!dest.driver) {
|
|
2743
|
-
throw new
|
|
3321
|
+
throw new BackupError(`Destination ${index} must have a "driver" property`, {
|
|
3322
|
+
operation: "validateBackupConfig",
|
|
3323
|
+
driver: "multi",
|
|
3324
|
+
destinationIndex: index,
|
|
3325
|
+
destination: dest,
|
|
3326
|
+
suggestion: 'Each destination must have a driver property: { driver: "s3", config: {...} }'
|
|
3327
|
+
});
|
|
2744
3328
|
}
|
|
2745
3329
|
if (dest.driver !== "multi") {
|
|
2746
3330
|
validateBackupConfig(dest.driver, dest.config || {});
|
|
@@ -3396,6 +3980,36 @@ class BackupPlugin extends Plugin {
|
|
|
3396
3980
|
}
|
|
3397
3981
|
}
|
|
3398
3982
|
|
|
3983
|
+
class CacheError extends S3dbError {
|
|
3984
|
+
constructor(message, details = {}) {
|
|
3985
|
+
const { driver = "unknown", operation = "unknown", resourceName, key, ...rest } = details;
|
|
3986
|
+
let description = details.description;
|
|
3987
|
+
if (!description) {
|
|
3988
|
+
description = `
|
|
3989
|
+
Cache Operation Error
|
|
3990
|
+
|
|
3991
|
+
Driver: ${driver}
|
|
3992
|
+
Operation: ${operation}
|
|
3993
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
3994
|
+
${key ? `Key: ${key}` : ""}
|
|
3995
|
+
|
|
3996
|
+
Common causes:
|
|
3997
|
+
1. Invalid cache key format
|
|
3998
|
+
2. Cache driver not properly initialized
|
|
3999
|
+
3. Resource not found or not cached
|
|
4000
|
+
4. Memory limits exceeded
|
|
4001
|
+
5. Filesystem permissions issues
|
|
4002
|
+
|
|
4003
|
+
Solution:
|
|
4004
|
+
Check cache configuration and ensure the cache driver is properly initialized.
|
|
4005
|
+
|
|
4006
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/cache.md
|
|
4007
|
+
`.trim();
|
|
4008
|
+
}
|
|
4009
|
+
super(message, { ...rest, driver, operation, resourceName, key, description });
|
|
4010
|
+
}
|
|
4011
|
+
}
|
|
4012
|
+
|
|
3399
4013
|
class Cache extends EventEmitter {
|
|
3400
4014
|
constructor(config = {}) {
|
|
3401
4015
|
super();
|
|
@@ -3412,7 +4026,13 @@ class Cache extends EventEmitter {
|
|
|
3412
4026
|
}
|
|
3413
4027
|
validateKey(key) {
|
|
3414
4028
|
if (key === null || key === void 0 || typeof key !== "string" || !key) {
|
|
3415
|
-
throw new
|
|
4029
|
+
throw new CacheError("Invalid cache key", {
|
|
4030
|
+
operation: "validateKey",
|
|
4031
|
+
driver: this.constructor.name,
|
|
4032
|
+
key,
|
|
4033
|
+
keyType: typeof key,
|
|
4034
|
+
suggestion: "Cache key must be a non-empty string"
|
|
4035
|
+
});
|
|
3416
4036
|
}
|
|
3417
4037
|
}
|
|
3418
4038
|
// generic class methods
|
|
@@ -3499,7 +4119,11 @@ class ResourceReader extends EventEmitter {
|
|
|
3499
4119
|
constructor({ resource, batchSize = 10, concurrency = 5 }) {
|
|
3500
4120
|
super();
|
|
3501
4121
|
if (!resource) {
|
|
3502
|
-
throw new
|
|
4122
|
+
throw new StreamError("Resource is required for ResourceReader", {
|
|
4123
|
+
operation: "constructor",
|
|
4124
|
+
resource: resource?.name,
|
|
4125
|
+
suggestion: "Pass a valid Resource instance when creating ResourceReader"
|
|
4126
|
+
});
|
|
3503
4127
|
}
|
|
3504
4128
|
this.resource = resource;
|
|
3505
4129
|
this.client = resource.client;
|
|
@@ -3623,7 +4247,10 @@ class ResourceWriter extends EventEmitter {
|
|
|
3623
4247
|
function streamToString(stream) {
|
|
3624
4248
|
return new Promise((resolve, reject) => {
|
|
3625
4249
|
if (!stream) {
|
|
3626
|
-
return reject(new
|
|
4250
|
+
return reject(new StreamError("Stream is undefined", {
|
|
4251
|
+
operation: "streamToString",
|
|
4252
|
+
suggestion: "Ensure a valid stream is passed to streamToString()"
|
|
4253
|
+
}));
|
|
3627
4254
|
}
|
|
3628
4255
|
const chunks = [];
|
|
3629
4256
|
stream.on("data", (chunk) => chunks.push(chunk));
|
|
@@ -5117,7 +5744,13 @@ class CachePlugin extends Plugin {
|
|
|
5117
5744
|
async warmCache(resourceName, options = {}) {
|
|
5118
5745
|
const resource = this.database.resources[resourceName];
|
|
5119
5746
|
if (!resource) {
|
|
5120
|
-
throw new
|
|
5747
|
+
throw new CacheError("Resource not found for cache warming", {
|
|
5748
|
+
operation: "warmCache",
|
|
5749
|
+
driver: this.driver?.constructor.name,
|
|
5750
|
+
resourceName,
|
|
5751
|
+
availableResources: Object.keys(this.database.resources),
|
|
5752
|
+
suggestion: "Check resource name spelling or ensure resource has been created"
|
|
5753
|
+
});
|
|
5121
5754
|
}
|
|
5122
5755
|
const { includePartitions = true, sampleSize = 100 } = options;
|
|
5123
5756
|
if (this.driver instanceof PartitionAwareFilesystemCache && resource.warmPartitionCache) {
|
|
@@ -8234,6 +8867,35 @@ class EventualConsistencyPlugin extends Plugin {
|
|
|
8234
8867
|
}
|
|
8235
8868
|
}
|
|
8236
8869
|
|
|
8870
|
+
class FulltextError extends S3dbError {
|
|
8871
|
+
constructor(message, details = {}) {
|
|
8872
|
+
const { resourceName, query, operation = "unknown", ...rest } = details;
|
|
8873
|
+
let description = details.description;
|
|
8874
|
+
if (!description) {
|
|
8875
|
+
description = `
|
|
8876
|
+
Fulltext Search Operation Error
|
|
8877
|
+
|
|
8878
|
+
Operation: ${operation}
|
|
8879
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
8880
|
+
${query ? `Query: ${query}` : ""}
|
|
8881
|
+
|
|
8882
|
+
Common causes:
|
|
8883
|
+
1. Resource not indexed for fulltext search
|
|
8884
|
+
2. Invalid query syntax
|
|
8885
|
+
3. Index not built yet
|
|
8886
|
+
4. Search configuration missing
|
|
8887
|
+
5. Field not indexed
|
|
8888
|
+
|
|
8889
|
+
Solution:
|
|
8890
|
+
Ensure resource is configured for fulltext search and index is built.
|
|
8891
|
+
|
|
8892
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/fulltext.md
|
|
8893
|
+
`.trim();
|
|
8894
|
+
}
|
|
8895
|
+
super(message, { ...rest, resourceName, query, operation, description });
|
|
8896
|
+
}
|
|
8897
|
+
}
|
|
8898
|
+
|
|
8237
8899
|
class FullTextPlugin extends Plugin {
|
|
8238
8900
|
constructor(options = {}) {
|
|
8239
8901
|
super();
|
|
@@ -8540,7 +9202,13 @@ class FullTextPlugin extends Plugin {
|
|
|
8540
9202
|
}
|
|
8541
9203
|
const resource = this.database.resources[resourceName];
|
|
8542
9204
|
if (!resource) {
|
|
8543
|
-
throw new
|
|
9205
|
+
throw new FulltextError(`Resource '${resourceName}' not found`, {
|
|
9206
|
+
operation: "searchRecords",
|
|
9207
|
+
resourceName,
|
|
9208
|
+
query,
|
|
9209
|
+
availableResources: Object.keys(this.database.resources),
|
|
9210
|
+
suggestion: "Check resource name or ensure resource is created before searching"
|
|
9211
|
+
});
|
|
8544
9212
|
}
|
|
8545
9213
|
const recordIds = searchResults.map((result2) => result2.recordId);
|
|
8546
9214
|
const records = await resource.getMany(recordIds);
|
|
@@ -8557,7 +9225,12 @@ class FullTextPlugin extends Plugin {
|
|
|
8557
9225
|
async rebuildIndex(resourceName) {
|
|
8558
9226
|
const resource = this.database.resources[resourceName];
|
|
8559
9227
|
if (!resource) {
|
|
8560
|
-
throw new
|
|
9228
|
+
throw new FulltextError(`Resource '${resourceName}' not found`, {
|
|
9229
|
+
operation: "rebuildIndex",
|
|
9230
|
+
resourceName,
|
|
9231
|
+
availableResources: Object.keys(this.database.resources),
|
|
9232
|
+
suggestion: "Check resource name or ensure resource is created before rebuilding index"
|
|
9233
|
+
});
|
|
8561
9234
|
}
|
|
8562
9235
|
for (const [key] of this.indexes.entries()) {
|
|
8563
9236
|
if (key.startsWith(`${resourceName}:`)) {
|
|
@@ -9342,6 +10015,35 @@ function createConsumer(driver, config) {
|
|
|
9342
10015
|
return new ConsumerClass(config);
|
|
9343
10016
|
}
|
|
9344
10017
|
|
|
10018
|
+
class QueueError extends S3dbError {
|
|
10019
|
+
constructor(message, details = {}) {
|
|
10020
|
+
const { queueName, operation = "unknown", messageId, ...rest } = details;
|
|
10021
|
+
let description = details.description;
|
|
10022
|
+
if (!description) {
|
|
10023
|
+
description = `
|
|
10024
|
+
Queue Operation Error
|
|
10025
|
+
|
|
10026
|
+
Operation: ${operation}
|
|
10027
|
+
${queueName ? `Queue: ${queueName}` : ""}
|
|
10028
|
+
${messageId ? `Message ID: ${messageId}` : ""}
|
|
10029
|
+
|
|
10030
|
+
Common causes:
|
|
10031
|
+
1. Queue not properly configured
|
|
10032
|
+
2. Message handler not registered
|
|
10033
|
+
3. Queue resource not found
|
|
10034
|
+
4. SQS/RabbitMQ connection failed
|
|
10035
|
+
5. Message processing timeout
|
|
10036
|
+
|
|
10037
|
+
Solution:
|
|
10038
|
+
Check queue configuration and message handler registration.
|
|
10039
|
+
|
|
10040
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/queue.md
|
|
10041
|
+
`.trim();
|
|
10042
|
+
}
|
|
10043
|
+
super(message, { ...rest, queueName, operation, messageId, description });
|
|
10044
|
+
}
|
|
10045
|
+
}
|
|
10046
|
+
|
|
9345
10047
|
class QueueConsumerPlugin extends Plugin {
|
|
9346
10048
|
constructor(options = {}) {
|
|
9347
10049
|
super(options);
|
|
@@ -9402,13 +10104,32 @@ class QueueConsumerPlugin extends Plugin {
|
|
|
9402
10104
|
let action = body.action || msg.action;
|
|
9403
10105
|
let data = body.data || msg.data;
|
|
9404
10106
|
if (!resource) {
|
|
9405
|
-
throw new
|
|
10107
|
+
throw new QueueError("Resource not found in message", {
|
|
10108
|
+
operation: "handleMessage",
|
|
10109
|
+
queueName: configuredResource,
|
|
10110
|
+
messageBody: body,
|
|
10111
|
+
suggestion: 'Ensure message includes a "resource" field specifying the target resource name'
|
|
10112
|
+
});
|
|
9406
10113
|
}
|
|
9407
10114
|
if (!action) {
|
|
9408
|
-
throw new
|
|
10115
|
+
throw new QueueError("Action not found in message", {
|
|
10116
|
+
operation: "handleMessage",
|
|
10117
|
+
queueName: configuredResource,
|
|
10118
|
+
resource,
|
|
10119
|
+
messageBody: body,
|
|
10120
|
+
suggestion: 'Ensure message includes an "action" field (insert, update, or delete)'
|
|
10121
|
+
});
|
|
9409
10122
|
}
|
|
9410
10123
|
const resourceObj = this.database.resources[resource];
|
|
9411
|
-
if (!resourceObj)
|
|
10124
|
+
if (!resourceObj) {
|
|
10125
|
+
throw new QueueError(`Resource '${resource}' not found`, {
|
|
10126
|
+
operation: "handleMessage",
|
|
10127
|
+
queueName: configuredResource,
|
|
10128
|
+
resource,
|
|
10129
|
+
availableResources: Object.keys(this.database.resources),
|
|
10130
|
+
suggestion: "Check resource name or ensure resource is created before consuming messages"
|
|
10131
|
+
});
|
|
10132
|
+
}
|
|
9412
10133
|
let result;
|
|
9413
10134
|
const [ok, err, res] = await tryFn(async () => {
|
|
9414
10135
|
if (action === "insert") {
|
|
@@ -9419,7 +10140,14 @@ class QueueConsumerPlugin extends Plugin {
|
|
|
9419
10140
|
} else if (action === "delete") {
|
|
9420
10141
|
result = await resourceObj.delete(data.id);
|
|
9421
10142
|
} else {
|
|
9422
|
-
throw new
|
|
10143
|
+
throw new QueueError(`Unsupported action '${action}'`, {
|
|
10144
|
+
operation: "handleMessage",
|
|
10145
|
+
queueName: configuredResource,
|
|
10146
|
+
resource,
|
|
10147
|
+
action,
|
|
10148
|
+
supportedActions: ["insert", "update", "delete"],
|
|
10149
|
+
suggestion: "Use one of the supported actions: insert, update, or delete"
|
|
10150
|
+
});
|
|
9423
10151
|
}
|
|
9424
10152
|
return result;
|
|
9425
10153
|
});
|
|
@@ -9432,6 +10160,35 @@ class QueueConsumerPlugin extends Plugin {
|
|
|
9432
10160
|
}
|
|
9433
10161
|
}
|
|
9434
10162
|
|
|
10163
|
+
class ReplicationError extends S3dbError {
|
|
10164
|
+
constructor(message, details = {}) {
|
|
10165
|
+
const { replicatorClass = "unknown", operation = "unknown", resourceName, ...rest } = details;
|
|
10166
|
+
let description = details.description;
|
|
10167
|
+
if (!description) {
|
|
10168
|
+
description = `
|
|
10169
|
+
Replication Operation Error
|
|
10170
|
+
|
|
10171
|
+
Replicator: ${replicatorClass}
|
|
10172
|
+
Operation: ${operation}
|
|
10173
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
10174
|
+
|
|
10175
|
+
Common causes:
|
|
10176
|
+
1. Invalid replicator configuration
|
|
10177
|
+
2. Target system not accessible
|
|
10178
|
+
3. Resource not configured for replication
|
|
10179
|
+
4. Invalid operation type
|
|
10180
|
+
5. Transformation function errors
|
|
10181
|
+
|
|
10182
|
+
Solution:
|
|
10183
|
+
Check replicator configuration and ensure target system is accessible.
|
|
10184
|
+
|
|
10185
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/replicator.md
|
|
10186
|
+
`.trim();
|
|
10187
|
+
}
|
|
10188
|
+
super(message, { ...rest, replicatorClass, operation, resourceName, description });
|
|
10189
|
+
}
|
|
10190
|
+
}
|
|
10191
|
+
|
|
9435
10192
|
class BaseReplicator extends EventEmitter {
|
|
9436
10193
|
constructor(config = {}) {
|
|
9437
10194
|
super();
|
|
@@ -9457,7 +10214,12 @@ class BaseReplicator extends EventEmitter {
|
|
|
9457
10214
|
* @returns {Promise<Object>} replicator result
|
|
9458
10215
|
*/
|
|
9459
10216
|
async replicate(resourceName, operation, data, id) {
|
|
9460
|
-
throw new
|
|
10217
|
+
throw new ReplicationError("replicate() method must be implemented by subclass", {
|
|
10218
|
+
operation: "replicate",
|
|
10219
|
+
replicatorClass: this.name,
|
|
10220
|
+
resourceName,
|
|
10221
|
+
suggestion: "Extend BaseReplicator and implement the replicate() method"
|
|
10222
|
+
});
|
|
9461
10223
|
}
|
|
9462
10224
|
/**
|
|
9463
10225
|
* Replicate multiple records in batch
|
|
@@ -9466,14 +10228,24 @@ class BaseReplicator extends EventEmitter {
|
|
|
9466
10228
|
* @returns {Promise<Object>} Batch replicator result
|
|
9467
10229
|
*/
|
|
9468
10230
|
async replicateBatch(resourceName, records) {
|
|
9469
|
-
throw new
|
|
10231
|
+
throw new ReplicationError("replicateBatch() method must be implemented by subclass", {
|
|
10232
|
+
operation: "replicateBatch",
|
|
10233
|
+
replicatorClass: this.name,
|
|
10234
|
+
resourceName,
|
|
10235
|
+
batchSize: records?.length,
|
|
10236
|
+
suggestion: "Extend BaseReplicator and implement the replicateBatch() method"
|
|
10237
|
+
});
|
|
9470
10238
|
}
|
|
9471
10239
|
/**
|
|
9472
10240
|
* Test the connection to the target
|
|
9473
10241
|
* @returns {Promise<boolean>} True if connection is successful
|
|
9474
10242
|
*/
|
|
9475
10243
|
async testConnection() {
|
|
9476
|
-
throw new
|
|
10244
|
+
throw new ReplicationError("testConnection() method must be implemented by subclass", {
|
|
10245
|
+
operation: "testConnection",
|
|
10246
|
+
replicatorClass: this.name,
|
|
10247
|
+
suggestion: "Extend BaseReplicator and implement the testConnection() method"
|
|
10248
|
+
});
|
|
9477
10249
|
}
|
|
9478
10250
|
/**
|
|
9479
10251
|
* Get replicator status and statistics
|
|
@@ -10645,7 +11417,17 @@ class Client extends EventEmitter {
|
|
|
10645
11417
|
});
|
|
10646
11418
|
this.emit("moveAllObjects", { results, errors }, { prefixFrom, prefixTo });
|
|
10647
11419
|
if (errors.length > 0) {
|
|
10648
|
-
throw new
|
|
11420
|
+
throw new UnknownError("Some objects could not be moved", {
|
|
11421
|
+
bucket: this.config.bucket,
|
|
11422
|
+
operation: "moveAllObjects",
|
|
11423
|
+
prefixFrom,
|
|
11424
|
+
prefixTo,
|
|
11425
|
+
totalKeys: keys.length,
|
|
11426
|
+
failedCount: errors.length,
|
|
11427
|
+
successCount: results.length,
|
|
11428
|
+
errors: errors.map((e) => ({ message: e.message, raw: e.raw })),
|
|
11429
|
+
suggestion: "Check S3 permissions and retry failed objects individually"
|
|
11430
|
+
});
|
|
10649
11431
|
}
|
|
10650
11432
|
return results;
|
|
10651
11433
|
}
|
|
@@ -10754,6 +11536,11 @@ class Validator extends FastestValidator {
|
|
|
10754
11536
|
type: "any",
|
|
10755
11537
|
custom: this.autoEncrypt ? jsonHandler : void 0
|
|
10756
11538
|
});
|
|
11539
|
+
this.alias("embedding", {
|
|
11540
|
+
type: "array",
|
|
11541
|
+
items: "number",
|
|
11542
|
+
empty: false
|
|
11543
|
+
});
|
|
10757
11544
|
}
|
|
10758
11545
|
}
|
|
10759
11546
|
const ValidatorManager = new Proxy(Validator, {
|
|
@@ -11002,6 +11789,59 @@ const SchemaActions = {
|
|
|
11002
11789
|
}
|
|
11003
11790
|
return NaN;
|
|
11004
11791
|
});
|
|
11792
|
+
},
|
|
11793
|
+
fromArrayOfEmbeddings: (value, { separator, precision = 6 }) => {
|
|
11794
|
+
if (value === null || value === void 0 || !Array.isArray(value)) {
|
|
11795
|
+
return value;
|
|
11796
|
+
}
|
|
11797
|
+
if (value.length === 0) {
|
|
11798
|
+
return "";
|
|
11799
|
+
}
|
|
11800
|
+
const encodedItems = value.map((item) => {
|
|
11801
|
+
if (typeof item === "number" && !isNaN(item)) {
|
|
11802
|
+
return encodeFixedPoint(item, precision);
|
|
11803
|
+
}
|
|
11804
|
+
const n = Number(item);
|
|
11805
|
+
return isNaN(n) ? "" : encodeFixedPoint(n, precision);
|
|
11806
|
+
});
|
|
11807
|
+
return encodedItems.join(separator);
|
|
11808
|
+
},
|
|
11809
|
+
toArrayOfEmbeddings: (value, { separator, precision = 6 }) => {
|
|
11810
|
+
if (Array.isArray(value)) {
|
|
11811
|
+
return value.map((v) => typeof v === "number" ? v : decodeFixedPoint(v, precision));
|
|
11812
|
+
}
|
|
11813
|
+
if (value === null || value === void 0) {
|
|
11814
|
+
return value;
|
|
11815
|
+
}
|
|
11816
|
+
if (value === "") {
|
|
11817
|
+
return [];
|
|
11818
|
+
}
|
|
11819
|
+
const str = String(value);
|
|
11820
|
+
const items = [];
|
|
11821
|
+
let current = "";
|
|
11822
|
+
let i = 0;
|
|
11823
|
+
while (i < str.length) {
|
|
11824
|
+
if (str[i] === "\\" && i + 1 < str.length) {
|
|
11825
|
+
current += str[i + 1];
|
|
11826
|
+
i += 2;
|
|
11827
|
+
} else if (str[i] === separator) {
|
|
11828
|
+
items.push(current);
|
|
11829
|
+
current = "";
|
|
11830
|
+
i++;
|
|
11831
|
+
} else {
|
|
11832
|
+
current += str[i];
|
|
11833
|
+
i++;
|
|
11834
|
+
}
|
|
11835
|
+
}
|
|
11836
|
+
items.push(current);
|
|
11837
|
+
return items.map((v) => {
|
|
11838
|
+
if (typeof v === "number") return v;
|
|
11839
|
+
if (typeof v === "string" && v !== "") {
|
|
11840
|
+
const n = decodeFixedPoint(v, precision);
|
|
11841
|
+
return isNaN(n) ? NaN : n;
|
|
11842
|
+
}
|
|
11843
|
+
return NaN;
|
|
11844
|
+
});
|
|
11005
11845
|
}
|
|
11006
11846
|
};
|
|
11007
11847
|
class Schema {
|
|
@@ -11071,18 +11911,89 @@ class Schema {
|
|
|
11071
11911
|
}
|
|
11072
11912
|
return objectKeys;
|
|
11073
11913
|
}
|
|
11914
|
+
_generateHooksFromOriginalAttributes(attributes, prefix = "") {
|
|
11915
|
+
for (const [key, value] of Object.entries(attributes)) {
|
|
11916
|
+
if (key.startsWith("$$")) continue;
|
|
11917
|
+
const fullKey = prefix ? `${prefix}.${key}` : key;
|
|
11918
|
+
if (typeof value === "object" && value !== null && !Array.isArray(value) && value.type) {
|
|
11919
|
+
if (value.type === "array" && value.items) {
|
|
11920
|
+
const itemsType = value.items;
|
|
11921
|
+
const arrayLength = typeof value.length === "number" ? value.length : null;
|
|
11922
|
+
if (itemsType === "string" || typeof itemsType === "string" && itemsType.includes("string")) {
|
|
11923
|
+
this.addHook("beforeMap", fullKey, "fromArray");
|
|
11924
|
+
this.addHook("afterUnmap", fullKey, "toArray");
|
|
11925
|
+
} else if (itemsType === "number" || typeof itemsType === "string" && itemsType.includes("number")) {
|
|
11926
|
+
const isIntegerArray = typeof itemsType === "string" && itemsType.includes("integer");
|
|
11927
|
+
const isEmbedding = !isIntegerArray && arrayLength !== null && arrayLength >= 256;
|
|
11928
|
+
if (isIntegerArray) {
|
|
11929
|
+
this.addHook("beforeMap", fullKey, "fromArrayOfNumbers");
|
|
11930
|
+
this.addHook("afterUnmap", fullKey, "toArrayOfNumbers");
|
|
11931
|
+
} else if (isEmbedding) {
|
|
11932
|
+
this.addHook("beforeMap", fullKey, "fromArrayOfEmbeddings");
|
|
11933
|
+
this.addHook("afterUnmap", fullKey, "toArrayOfEmbeddings");
|
|
11934
|
+
} else {
|
|
11935
|
+
this.addHook("beforeMap", fullKey, "fromArrayOfDecimals");
|
|
11936
|
+
this.addHook("afterUnmap", fullKey, "toArrayOfDecimals");
|
|
11937
|
+
}
|
|
11938
|
+
}
|
|
11939
|
+
}
|
|
11940
|
+
} else if (typeof value === "object" && value !== null && !Array.isArray(value) && !value.type) {
|
|
11941
|
+
this._generateHooksFromOriginalAttributes(value, fullKey);
|
|
11942
|
+
}
|
|
11943
|
+
}
|
|
11944
|
+
}
|
|
11074
11945
|
generateAutoHooks() {
|
|
11946
|
+
this._generateHooksFromOriginalAttributes(this.attributes);
|
|
11075
11947
|
const schema = flatten(cloneDeep(this.attributes), { safe: true });
|
|
11076
11948
|
for (const [name, definition] of Object.entries(schema)) {
|
|
11077
|
-
if (
|
|
11078
|
-
|
|
11949
|
+
if (name.includes("$$")) continue;
|
|
11950
|
+
if (this.options.hooks.beforeMap[name] || this.options.hooks.afterUnmap[name]) {
|
|
11951
|
+
continue;
|
|
11952
|
+
}
|
|
11953
|
+
const defStr = typeof definition === "string" ? definition : "";
|
|
11954
|
+
const defType = typeof definition === "object" && definition !== null ? definition.type : null;
|
|
11955
|
+
const isEmbeddingType = defStr.includes("embedding") || defType === "embedding";
|
|
11956
|
+
if (isEmbeddingType) {
|
|
11957
|
+
const lengthMatch = defStr.match(/embedding:(\d+)/);
|
|
11958
|
+
if (lengthMatch) {
|
|
11959
|
+
parseInt(lengthMatch[1], 10);
|
|
11960
|
+
} else if (defStr.includes("length:")) {
|
|
11961
|
+
const match = defStr.match(/length:(\d+)/);
|
|
11962
|
+
if (match) parseInt(match[1], 10);
|
|
11963
|
+
}
|
|
11964
|
+
this.addHook("beforeMap", name, "fromArrayOfEmbeddings");
|
|
11965
|
+
this.addHook("afterUnmap", name, "toArrayOfEmbeddings");
|
|
11966
|
+
continue;
|
|
11967
|
+
}
|
|
11968
|
+
const isArray = defStr.includes("array") || defType === "array";
|
|
11969
|
+
if (isArray) {
|
|
11970
|
+
let itemsType = null;
|
|
11971
|
+
if (typeof definition === "object" && definition !== null && definition.items) {
|
|
11972
|
+
itemsType = definition.items;
|
|
11973
|
+
} else if (defStr.includes("items:string")) {
|
|
11974
|
+
itemsType = "string";
|
|
11975
|
+
} else if (defStr.includes("items:number")) {
|
|
11976
|
+
itemsType = "number";
|
|
11977
|
+
}
|
|
11978
|
+
if (itemsType === "string" || typeof itemsType === "string" && itemsType.includes("string")) {
|
|
11079
11979
|
this.addHook("beforeMap", name, "fromArray");
|
|
11080
11980
|
this.addHook("afterUnmap", name, "toArray");
|
|
11081
|
-
} else if (
|
|
11082
|
-
const isIntegerArray =
|
|
11981
|
+
} else if (itemsType === "number" || typeof itemsType === "string" && itemsType.includes("number")) {
|
|
11982
|
+
const isIntegerArray = defStr.includes("integer:true") || defStr.includes("|integer:") || defStr.includes("|integer") || typeof itemsType === "string" && itemsType.includes("integer");
|
|
11983
|
+
let arrayLength = null;
|
|
11984
|
+
if (typeof definition === "object" && definition !== null && typeof definition.length === "number") {
|
|
11985
|
+
arrayLength = definition.length;
|
|
11986
|
+
} else if (defStr.includes("length:")) {
|
|
11987
|
+
const match = defStr.match(/length:(\d+)/);
|
|
11988
|
+
if (match) arrayLength = parseInt(match[1], 10);
|
|
11989
|
+
}
|
|
11990
|
+
const isEmbedding = !isIntegerArray && arrayLength !== null && arrayLength >= 256;
|
|
11083
11991
|
if (isIntegerArray) {
|
|
11084
11992
|
this.addHook("beforeMap", name, "fromArrayOfNumbers");
|
|
11085
11993
|
this.addHook("afterUnmap", name, "toArrayOfNumbers");
|
|
11994
|
+
} else if (isEmbedding) {
|
|
11995
|
+
this.addHook("beforeMap", name, "fromArrayOfEmbeddings");
|
|
11996
|
+
this.addHook("afterUnmap", name, "toArrayOfEmbeddings");
|
|
11086
11997
|
} else {
|
|
11087
11998
|
this.addHook("beforeMap", name, "fromArrayOfDecimals");
|
|
11088
11999
|
this.addHook("afterUnmap", name, "toArrayOfDecimals");
|
|
@@ -11090,7 +12001,7 @@ class Schema {
|
|
|
11090
12001
|
}
|
|
11091
12002
|
continue;
|
|
11092
12003
|
}
|
|
11093
|
-
if (
|
|
12004
|
+
if (defStr.includes("secret") || defType === "secret") {
|
|
11094
12005
|
if (this.options.autoEncrypt) {
|
|
11095
12006
|
this.addHook("beforeMap", name, "encrypt");
|
|
11096
12007
|
}
|
|
@@ -11099,8 +12010,8 @@ class Schema {
|
|
|
11099
12010
|
}
|
|
11100
12011
|
continue;
|
|
11101
12012
|
}
|
|
11102
|
-
if (
|
|
11103
|
-
const isInteger =
|
|
12013
|
+
if (defStr.includes("number") || defType === "number") {
|
|
12014
|
+
const isInteger = defStr.includes("integer:true") || defStr.includes("|integer:") || defStr.includes("|integer");
|
|
11104
12015
|
if (isInteger) {
|
|
11105
12016
|
this.addHook("beforeMap", name, "toBase62");
|
|
11106
12017
|
this.addHook("afterUnmap", name, "fromBase62");
|
|
@@ -11110,17 +12021,17 @@ class Schema {
|
|
|
11110
12021
|
}
|
|
11111
12022
|
continue;
|
|
11112
12023
|
}
|
|
11113
|
-
if (
|
|
12024
|
+
if (defStr.includes("boolean") || defType === "boolean") {
|
|
11114
12025
|
this.addHook("beforeMap", name, "fromBool");
|
|
11115
12026
|
this.addHook("afterUnmap", name, "toBool");
|
|
11116
12027
|
continue;
|
|
11117
12028
|
}
|
|
11118
|
-
if (
|
|
12029
|
+
if (defStr.includes("json") || defType === "json") {
|
|
11119
12030
|
this.addHook("beforeMap", name, "toJSON");
|
|
11120
12031
|
this.addHook("afterUnmap", name, "fromJSON");
|
|
11121
12032
|
continue;
|
|
11122
12033
|
}
|
|
11123
|
-
if (definition === "object" ||
|
|
12034
|
+
if (definition === "object" || defStr.includes("object") || defType === "object") {
|
|
11124
12035
|
this.addHook("beforeMap", name, "toJSON");
|
|
11125
12036
|
this.addHook("afterUnmap", name, "fromJSON");
|
|
11126
12037
|
continue;
|
|
@@ -11262,7 +12173,8 @@ class Schema {
|
|
|
11262
12173
|
const originalKey = reversedMap && reversedMap[key] ? reversedMap[key] : key;
|
|
11263
12174
|
let parsedValue = value;
|
|
11264
12175
|
const attrDef = this.getAttributeDefinition(originalKey);
|
|
11265
|
-
|
|
12176
|
+
const hasAfterUnmapHook = this.options.hooks?.afterUnmap?.[originalKey];
|
|
12177
|
+
if (!hasAfterUnmapHook && typeof attrDef === "string" && attrDef.includes("number") && !attrDef.includes("array") && !attrDef.includes("decimal")) {
|
|
11266
12178
|
if (typeof parsedValue === "string" && parsedValue !== "") {
|
|
11267
12179
|
parsedValue = decode(parsedValue);
|
|
11268
12180
|
} else if (typeof parsedValue === "number") ; else {
|
|
@@ -11327,18 +12239,38 @@ class Schema {
|
|
|
11327
12239
|
preprocessAttributesForValidation(attributes) {
|
|
11328
12240
|
const processed = {};
|
|
11329
12241
|
for (const [key, value] of Object.entries(attributes)) {
|
|
11330
|
-
if (typeof value === "
|
|
11331
|
-
|
|
11332
|
-
|
|
11333
|
-
|
|
11334
|
-
|
|
11335
|
-
|
|
11336
|
-
|
|
11337
|
-
|
|
11338
|
-
|
|
11339
|
-
|
|
12242
|
+
if (typeof value === "string") {
|
|
12243
|
+
if (value.startsWith("embedding:")) {
|
|
12244
|
+
const lengthMatch = value.match(/embedding:(\d+)/);
|
|
12245
|
+
if (lengthMatch) {
|
|
12246
|
+
const length = lengthMatch[1];
|
|
12247
|
+
const rest = value.substring(`embedding:${length}`.length);
|
|
12248
|
+
processed[key] = `array|items:number|length:${length}|empty:false${rest}`;
|
|
12249
|
+
continue;
|
|
12250
|
+
}
|
|
12251
|
+
}
|
|
12252
|
+
if (value.startsWith("embedding|") || value === "embedding") {
|
|
12253
|
+
processed[key] = value.replace(/^embedding/, "array|items:number|empty:false");
|
|
12254
|
+
continue;
|
|
12255
|
+
}
|
|
12256
|
+
processed[key] = value;
|
|
12257
|
+
} else if (typeof value === "object" && value !== null && !Array.isArray(value)) {
|
|
12258
|
+
const hasValidatorType = value.type !== void 0 && key !== "$$type";
|
|
12259
|
+
if (hasValidatorType) {
|
|
12260
|
+
processed[key] = value;
|
|
12261
|
+
} else {
|
|
12262
|
+
const isExplicitRequired = value.$$type && value.$$type.includes("required");
|
|
12263
|
+
const isExplicitOptional = value.$$type && value.$$type.includes("optional");
|
|
12264
|
+
const objectConfig = {
|
|
12265
|
+
type: "object",
|
|
12266
|
+
properties: this.preprocessAttributesForValidation(value),
|
|
12267
|
+
strict: false
|
|
12268
|
+
};
|
|
12269
|
+
if (isExplicitRequired) ; else if (isExplicitOptional || this.allNestedObjectsOptional) {
|
|
12270
|
+
objectConfig.optional = true;
|
|
12271
|
+
}
|
|
12272
|
+
processed[key] = objectConfig;
|
|
11340
12273
|
}
|
|
11341
|
-
processed[key] = objectConfig;
|
|
11342
12274
|
} else {
|
|
11343
12275
|
processed[key] = value;
|
|
11344
12276
|
}
|
|
@@ -11359,7 +12291,14 @@ async function handleInsert$4({ resource, data, mappedData, originalData }) {
|
|
|
11359
12291
|
}
|
|
11360
12292
|
});
|
|
11361
12293
|
if (totalSize > effectiveLimit) {
|
|
11362
|
-
throw new
|
|
12294
|
+
throw new MetadataLimitError("Metadata size exceeds 2KB limit on insert", {
|
|
12295
|
+
totalSize,
|
|
12296
|
+
effectiveLimit,
|
|
12297
|
+
absoluteLimit: S3_METADATA_LIMIT_BYTES,
|
|
12298
|
+
excess: totalSize - effectiveLimit,
|
|
12299
|
+
resourceName: resource.name,
|
|
12300
|
+
operation: "insert"
|
|
12301
|
+
});
|
|
11363
12302
|
}
|
|
11364
12303
|
return { mappedData, body: "" };
|
|
11365
12304
|
}
|
|
@@ -11374,7 +12313,15 @@ async function handleUpdate$4({ resource, id, data, mappedData, originalData })
|
|
|
11374
12313
|
}
|
|
11375
12314
|
});
|
|
11376
12315
|
if (totalSize > effectiveLimit) {
|
|
11377
|
-
throw new
|
|
12316
|
+
throw new MetadataLimitError("Metadata size exceeds 2KB limit on update", {
|
|
12317
|
+
totalSize,
|
|
12318
|
+
effectiveLimit,
|
|
12319
|
+
absoluteLimit: S3_METADATA_LIMIT_BYTES,
|
|
12320
|
+
excess: totalSize - effectiveLimit,
|
|
12321
|
+
resourceName: resource.name,
|
|
12322
|
+
operation: "update",
|
|
12323
|
+
id
|
|
12324
|
+
});
|
|
11378
12325
|
}
|
|
11379
12326
|
return { mappedData, body: JSON.stringify(mappedData) };
|
|
11380
12327
|
}
|
|
@@ -11389,7 +12336,15 @@ async function handleUpsert$4({ resource, id, data, mappedData }) {
|
|
|
11389
12336
|
}
|
|
11390
12337
|
});
|
|
11391
12338
|
if (totalSize > effectiveLimit) {
|
|
11392
|
-
throw new
|
|
12339
|
+
throw new MetadataLimitError("Metadata size exceeds 2KB limit on upsert", {
|
|
12340
|
+
totalSize,
|
|
12341
|
+
effectiveLimit,
|
|
12342
|
+
absoluteLimit: S3_METADATA_LIMIT_BYTES,
|
|
12343
|
+
excess: totalSize - effectiveLimit,
|
|
12344
|
+
resourceName: resource.name,
|
|
12345
|
+
operation: "upsert",
|
|
12346
|
+
id
|
|
12347
|
+
});
|
|
11393
12348
|
}
|
|
11394
12349
|
return { mappedData, body: "" };
|
|
11395
12350
|
}
|
|
@@ -11731,7 +12686,11 @@ const behaviors = {
|
|
|
11731
12686
|
function getBehavior(behaviorName) {
|
|
11732
12687
|
const behavior = behaviors[behaviorName];
|
|
11733
12688
|
if (!behavior) {
|
|
11734
|
-
throw new
|
|
12689
|
+
throw new BehaviorError(`Unknown behavior: ${behaviorName}`, {
|
|
12690
|
+
behavior: behaviorName,
|
|
12691
|
+
availableBehaviors: Object.keys(behaviors),
|
|
12692
|
+
operation: "getBehavior"
|
|
12693
|
+
});
|
|
11735
12694
|
}
|
|
11736
12695
|
return behavior;
|
|
11737
12696
|
}
|
|
@@ -14255,7 +15214,7 @@ class Database extends EventEmitter {
|
|
|
14255
15214
|
this.id = idGenerator(7);
|
|
14256
15215
|
this.version = "1";
|
|
14257
15216
|
this.s3dbVersion = (() => {
|
|
14258
|
-
const [ok, err, version] = tryFn(() => true ? "11.2.
|
|
15217
|
+
const [ok, err, version] = tryFn(() => true ? "11.2.5" : "latest");
|
|
14259
15218
|
return ok ? version : "latest";
|
|
14260
15219
|
})();
|
|
14261
15220
|
this.resources = {};
|
|
@@ -14600,7 +15559,12 @@ class Database extends EventEmitter {
|
|
|
14600
15559
|
const pluginName = name.toLowerCase().replace("plugin", "");
|
|
14601
15560
|
const plugin = this.plugins[pluginName] || this.pluginRegistry[pluginName];
|
|
14602
15561
|
if (!plugin) {
|
|
14603
|
-
throw new
|
|
15562
|
+
throw new DatabaseError(`Plugin '${name}' not found`, {
|
|
15563
|
+
operation: "uninstallPlugin",
|
|
15564
|
+
pluginName: name,
|
|
15565
|
+
availablePlugins: Object.keys(this.pluginRegistry),
|
|
15566
|
+
suggestion: "Check plugin name or list available plugins using Object.keys(db.pluginRegistry)"
|
|
15567
|
+
});
|
|
14604
15568
|
}
|
|
14605
15569
|
if (plugin.stop) {
|
|
14606
15570
|
await plugin.stop();
|
|
@@ -15233,10 +16197,20 @@ class Database extends EventEmitter {
|
|
|
15233
16197
|
addHook(event, fn) {
|
|
15234
16198
|
if (!this._hooks) this._initHooks();
|
|
15235
16199
|
if (!this._hooks.has(event)) {
|
|
15236
|
-
throw new
|
|
16200
|
+
throw new DatabaseError(`Unknown hook event: ${event}`, {
|
|
16201
|
+
operation: "addHook",
|
|
16202
|
+
invalidEvent: event,
|
|
16203
|
+
availableEvents: this._hookEvents,
|
|
16204
|
+
suggestion: `Use one of the available hook events: ${this._hookEvents.join(", ")}`
|
|
16205
|
+
});
|
|
15237
16206
|
}
|
|
15238
16207
|
if (typeof fn !== "function") {
|
|
15239
|
-
throw new
|
|
16208
|
+
throw new DatabaseError("Hook function must be a function", {
|
|
16209
|
+
operation: "addHook",
|
|
16210
|
+
event,
|
|
16211
|
+
receivedType: typeof fn,
|
|
16212
|
+
suggestion: "Provide a function that will be called when the hook event occurs"
|
|
16213
|
+
});
|
|
15240
16214
|
}
|
|
15241
16215
|
this._hooks.get(event).push(fn);
|
|
15242
16216
|
}
|
|
@@ -15374,7 +16348,11 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15374
16348
|
this.targetDatabase = new S3db(targetConfig);
|
|
15375
16349
|
await this.targetDatabase.connect();
|
|
15376
16350
|
} else {
|
|
15377
|
-
throw new
|
|
16351
|
+
throw new ReplicationError("S3dbReplicator requires client or connectionString", {
|
|
16352
|
+
operation: "initialize",
|
|
16353
|
+
replicatorClass: "S3dbReplicator",
|
|
16354
|
+
suggestion: 'Provide either a client instance or connectionString in config: { client: db } or { connectionString: "s3://..." }'
|
|
16355
|
+
});
|
|
15378
16356
|
}
|
|
15379
16357
|
this.emit("connected", {
|
|
15380
16358
|
replicator: this.name,
|
|
@@ -15405,7 +16383,13 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15405
16383
|
const normResource = normalizeResourceName$1(resource);
|
|
15406
16384
|
const entry = this.resourcesMap[normResource];
|
|
15407
16385
|
if (!entry) {
|
|
15408
|
-
throw new
|
|
16386
|
+
throw new ReplicationError("Resource not configured for replication", {
|
|
16387
|
+
operation: "replicate",
|
|
16388
|
+
replicatorClass: "S3dbReplicator",
|
|
16389
|
+
resourceName: resource,
|
|
16390
|
+
configuredResources: Object.keys(this.resourcesMap),
|
|
16391
|
+
suggestion: 'Add resource to replicator resources map: { resources: { [resourceName]: "destination" } }'
|
|
16392
|
+
});
|
|
15409
16393
|
}
|
|
15410
16394
|
if (Array.isArray(entry)) {
|
|
15411
16395
|
const results = [];
|
|
@@ -15473,7 +16457,14 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15473
16457
|
} else if (operation === "delete") {
|
|
15474
16458
|
result = await destResourceObj.delete(recordId);
|
|
15475
16459
|
} else {
|
|
15476
|
-
throw new
|
|
16460
|
+
throw new ReplicationError(`Invalid replication operation: ${operation}`, {
|
|
16461
|
+
operation: "replicate",
|
|
16462
|
+
replicatorClass: "S3dbReplicator",
|
|
16463
|
+
invalidOperation: operation,
|
|
16464
|
+
supportedOperations: ["insert", "update", "delete"],
|
|
16465
|
+
resourceName: sourceResource,
|
|
16466
|
+
suggestion: "Use one of the supported operations: insert, update, delete"
|
|
16467
|
+
});
|
|
15477
16468
|
}
|
|
15478
16469
|
return result;
|
|
15479
16470
|
}
|
|
@@ -15541,7 +16532,13 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15541
16532
|
const norm = normalizeResourceName$1(resource);
|
|
15542
16533
|
const found = available.find((r) => normalizeResourceName$1(r) === norm);
|
|
15543
16534
|
if (!found) {
|
|
15544
|
-
throw new
|
|
16535
|
+
throw new ReplicationError("Destination resource not found in target database", {
|
|
16536
|
+
operation: "_getDestResourceObj",
|
|
16537
|
+
replicatorClass: "S3dbReplicator",
|
|
16538
|
+
destinationResource: resource,
|
|
16539
|
+
availableResources: available,
|
|
16540
|
+
suggestion: "Create the resource in target database or check resource name spelling"
|
|
16541
|
+
});
|
|
15545
16542
|
}
|
|
15546
16543
|
return db.resources[found];
|
|
15547
16544
|
}
|
|
@@ -15590,7 +16587,13 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15590
16587
|
}
|
|
15591
16588
|
async testConnection() {
|
|
15592
16589
|
const [ok, err] = await tryFn(async () => {
|
|
15593
|
-
if (!this.targetDatabase)
|
|
16590
|
+
if (!this.targetDatabase) {
|
|
16591
|
+
throw new ReplicationError("No target database configured for connection test", {
|
|
16592
|
+
operation: "testConnection",
|
|
16593
|
+
replicatorClass: "S3dbReplicator",
|
|
16594
|
+
suggestion: "Initialize replicator with client or connectionString before testing connection"
|
|
16595
|
+
});
|
|
16596
|
+
}
|
|
15594
16597
|
if (typeof this.targetDatabase.connect === "function") {
|
|
15595
16598
|
await this.targetDatabase.connect();
|
|
15596
16599
|
}
|
|
@@ -15977,7 +16980,12 @@ const REPLICATOR_DRIVERS = {
|
|
|
15977
16980
|
function createReplicator(driver, config = {}, resources = [], client = null) {
|
|
15978
16981
|
const ReplicatorClass = REPLICATOR_DRIVERS[driver];
|
|
15979
16982
|
if (!ReplicatorClass) {
|
|
15980
|
-
throw new
|
|
16983
|
+
throw new ReplicationError(`Unknown replicator driver: ${driver}`, {
|
|
16984
|
+
operation: "createReplicator",
|
|
16985
|
+
driver,
|
|
16986
|
+
availableDrivers: Object.keys(REPLICATOR_DRIVERS),
|
|
16987
|
+
suggestion: `Use one of the available drivers: ${Object.keys(REPLICATOR_DRIVERS).join(", ")}`
|
|
16988
|
+
});
|
|
15981
16989
|
}
|
|
15982
16990
|
return new ReplicatorClass(config, resources, client);
|
|
15983
16991
|
}
|
|
@@ -15989,12 +16997,40 @@ class ReplicatorPlugin extends Plugin {
|
|
|
15989
16997
|
constructor(options = {}) {
|
|
15990
16998
|
super();
|
|
15991
16999
|
if (!options.replicators || !Array.isArray(options.replicators)) {
|
|
15992
|
-
throw new
|
|
17000
|
+
throw new ReplicationError("ReplicatorPlugin requires replicators array", {
|
|
17001
|
+
operation: "constructor",
|
|
17002
|
+
pluginName: "ReplicatorPlugin",
|
|
17003
|
+
providedOptions: Object.keys(options),
|
|
17004
|
+
suggestion: 'Provide replicators array: new ReplicatorPlugin({ replicators: [{ driver: "s3db", resources: [...] }] })'
|
|
17005
|
+
});
|
|
15993
17006
|
}
|
|
15994
17007
|
for (const rep of options.replicators) {
|
|
15995
|
-
if (!rep.driver)
|
|
15996
|
-
|
|
15997
|
-
|
|
17008
|
+
if (!rep.driver) {
|
|
17009
|
+
throw new ReplicationError("Each replicator must have a driver", {
|
|
17010
|
+
operation: "constructor",
|
|
17011
|
+
pluginName: "ReplicatorPlugin",
|
|
17012
|
+
replicatorConfig: rep,
|
|
17013
|
+
suggestion: 'Each replicator entry must specify a driver: { driver: "s3db", resources: {...} }'
|
|
17014
|
+
});
|
|
17015
|
+
}
|
|
17016
|
+
if (!rep.resources || typeof rep.resources !== "object") {
|
|
17017
|
+
throw new ReplicationError("Each replicator must have resources config", {
|
|
17018
|
+
operation: "constructor",
|
|
17019
|
+
pluginName: "ReplicatorPlugin",
|
|
17020
|
+
driver: rep.driver,
|
|
17021
|
+
replicatorConfig: rep,
|
|
17022
|
+
suggestion: 'Provide resources as object or array: { driver: "s3db", resources: ["users"] } or { resources: { users: "people" } }'
|
|
17023
|
+
});
|
|
17024
|
+
}
|
|
17025
|
+
if (Object.keys(rep.resources).length === 0) {
|
|
17026
|
+
throw new ReplicationError("Each replicator must have at least one resource configured", {
|
|
17027
|
+
operation: "constructor",
|
|
17028
|
+
pluginName: "ReplicatorPlugin",
|
|
17029
|
+
driver: rep.driver,
|
|
17030
|
+
replicatorConfig: rep,
|
|
17031
|
+
suggestion: 'Add at least one resource to replicate: { driver: "s3db", resources: ["users"] }'
|
|
17032
|
+
});
|
|
17033
|
+
}
|
|
15998
17034
|
}
|
|
15999
17035
|
this.config = {
|
|
16000
17036
|
replicators: options.replicators || [],
|
|
@@ -16420,7 +17456,13 @@ class ReplicatorPlugin extends Plugin {
|
|
|
16420
17456
|
async syncAllData(replicatorId) {
|
|
16421
17457
|
const replicator = this.replicators.find((r) => r.id === replicatorId);
|
|
16422
17458
|
if (!replicator) {
|
|
16423
|
-
throw new
|
|
17459
|
+
throw new ReplicationError("Replicator not found", {
|
|
17460
|
+
operation: "syncAllData",
|
|
17461
|
+
pluginName: "ReplicatorPlugin",
|
|
17462
|
+
replicatorId,
|
|
17463
|
+
availableReplicators: this.replicators.map((r) => r.id),
|
|
17464
|
+
suggestion: "Check replicator ID or use getReplicatorStats() to list available replicators"
|
|
17465
|
+
});
|
|
16424
17466
|
}
|
|
16425
17467
|
this.stats.lastSync = (/* @__PURE__ */ new Date()).toISOString();
|
|
16426
17468
|
for (const resourceName in this.database.resources) {
|
|
@@ -16950,6 +17992,35 @@ class S3QueuePlugin extends Plugin {
|
|
|
16950
17992
|
}
|
|
16951
17993
|
}
|
|
16952
17994
|
|
|
17995
|
+
class SchedulerError extends S3dbError {
|
|
17996
|
+
constructor(message, details = {}) {
|
|
17997
|
+
const { taskId, operation = "unknown", cronExpression, ...rest } = details;
|
|
17998
|
+
let description = details.description;
|
|
17999
|
+
if (!description) {
|
|
18000
|
+
description = `
|
|
18001
|
+
Scheduler Operation Error
|
|
18002
|
+
|
|
18003
|
+
Operation: ${operation}
|
|
18004
|
+
${taskId ? `Task ID: ${taskId}` : ""}
|
|
18005
|
+
${cronExpression ? `Cron: ${cronExpression}` : ""}
|
|
18006
|
+
|
|
18007
|
+
Common causes:
|
|
18008
|
+
1. Invalid cron expression format
|
|
18009
|
+
2. Task not found or already exists
|
|
18010
|
+
3. Scheduler not properly initialized
|
|
18011
|
+
4. Job execution failure
|
|
18012
|
+
5. Resource conflicts
|
|
18013
|
+
|
|
18014
|
+
Solution:
|
|
18015
|
+
Check task configuration and ensure scheduler is properly initialized.
|
|
18016
|
+
|
|
18017
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/scheduler.md
|
|
18018
|
+
`.trim();
|
|
18019
|
+
}
|
|
18020
|
+
super(message, { ...rest, taskId, operation, cronExpression, description });
|
|
18021
|
+
}
|
|
18022
|
+
}
|
|
18023
|
+
|
|
16953
18024
|
class SchedulerPlugin extends Plugin {
|
|
16954
18025
|
constructor(options = {}) {
|
|
16955
18026
|
super();
|
|
@@ -16983,17 +18054,36 @@ class SchedulerPlugin extends Plugin {
|
|
|
16983
18054
|
}
|
|
16984
18055
|
_validateConfiguration() {
|
|
16985
18056
|
if (Object.keys(this.config.jobs).length === 0) {
|
|
16986
|
-
throw new
|
|
18057
|
+
throw new SchedulerError("At least one job must be defined", {
|
|
18058
|
+
operation: "validateConfiguration",
|
|
18059
|
+
jobCount: 0,
|
|
18060
|
+
suggestion: 'Provide at least one job in the jobs configuration: { jobs: { myJob: { schedule: "* * * * *", action: async () => {...} } } }'
|
|
18061
|
+
});
|
|
16987
18062
|
}
|
|
16988
18063
|
for (const [jobName, job] of Object.entries(this.config.jobs)) {
|
|
16989
18064
|
if (!job.schedule) {
|
|
16990
|
-
throw new
|
|
18065
|
+
throw new SchedulerError(`Job '${jobName}' must have a schedule`, {
|
|
18066
|
+
operation: "validateConfiguration",
|
|
18067
|
+
taskId: jobName,
|
|
18068
|
+
providedConfig: Object.keys(job),
|
|
18069
|
+
suggestion: 'Add a schedule property with a valid cron expression: { schedule: "0 * * * *", action: async () => {...} }'
|
|
18070
|
+
});
|
|
16991
18071
|
}
|
|
16992
18072
|
if (!job.action || typeof job.action !== "function") {
|
|
16993
|
-
throw new
|
|
18073
|
+
throw new SchedulerError(`Job '${jobName}' must have an action function`, {
|
|
18074
|
+
operation: "validateConfiguration",
|
|
18075
|
+
taskId: jobName,
|
|
18076
|
+
actionType: typeof job.action,
|
|
18077
|
+
suggestion: 'Provide an action function: { schedule: "...", action: async (db, ctx) => {...} }'
|
|
18078
|
+
});
|
|
16994
18079
|
}
|
|
16995
18080
|
if (!this._isValidCronExpression(job.schedule)) {
|
|
16996
|
-
throw new
|
|
18081
|
+
throw new SchedulerError(`Job '${jobName}' has invalid cron expression`, {
|
|
18082
|
+
operation: "validateConfiguration",
|
|
18083
|
+
taskId: jobName,
|
|
18084
|
+
cronExpression: job.schedule,
|
|
18085
|
+
suggestion: "Use valid cron format (5 fields: minute hour day month weekday) or shortcuts (@hourly, @daily, @weekly, @monthly, @yearly)"
|
|
18086
|
+
});
|
|
16997
18087
|
}
|
|
16998
18088
|
}
|
|
16999
18089
|
}
|
|
@@ -17291,10 +18381,20 @@ class SchedulerPlugin extends Plugin {
|
|
|
17291
18381
|
async runJob(jobName, context = {}) {
|
|
17292
18382
|
const job = this.jobs.get(jobName);
|
|
17293
18383
|
if (!job) {
|
|
17294
|
-
throw new
|
|
18384
|
+
throw new SchedulerError(`Job '${jobName}' not found`, {
|
|
18385
|
+
operation: "runJob",
|
|
18386
|
+
taskId: jobName,
|
|
18387
|
+
availableJobs: Array.from(this.jobs.keys()),
|
|
18388
|
+
suggestion: "Check job name or use getAllJobsStatus() to list available jobs"
|
|
18389
|
+
});
|
|
17295
18390
|
}
|
|
17296
18391
|
if (this.activeJobs.has(jobName)) {
|
|
17297
|
-
throw new
|
|
18392
|
+
throw new SchedulerError(`Job '${jobName}' is already running`, {
|
|
18393
|
+
operation: "runJob",
|
|
18394
|
+
taskId: jobName,
|
|
18395
|
+
executionId: this.activeJobs.get(jobName),
|
|
18396
|
+
suggestion: "Wait for current execution to complete or check job status with getJobStatus()"
|
|
18397
|
+
});
|
|
17298
18398
|
}
|
|
17299
18399
|
await this._executeJob(jobName);
|
|
17300
18400
|
}
|
|
@@ -17304,7 +18404,12 @@ class SchedulerPlugin extends Plugin {
|
|
|
17304
18404
|
enableJob(jobName) {
|
|
17305
18405
|
const job = this.jobs.get(jobName);
|
|
17306
18406
|
if (!job) {
|
|
17307
|
-
throw new
|
|
18407
|
+
throw new SchedulerError(`Job '${jobName}' not found`, {
|
|
18408
|
+
operation: "enableJob",
|
|
18409
|
+
taskId: jobName,
|
|
18410
|
+
availableJobs: Array.from(this.jobs.keys()),
|
|
18411
|
+
suggestion: "Check job name or use getAllJobsStatus() to list available jobs"
|
|
18412
|
+
});
|
|
17308
18413
|
}
|
|
17309
18414
|
job.enabled = true;
|
|
17310
18415
|
this._scheduleNextExecution(jobName);
|
|
@@ -17316,7 +18421,12 @@ class SchedulerPlugin extends Plugin {
|
|
|
17316
18421
|
disableJob(jobName) {
|
|
17317
18422
|
const job = this.jobs.get(jobName);
|
|
17318
18423
|
if (!job) {
|
|
17319
|
-
throw new
|
|
18424
|
+
throw new SchedulerError(`Job '${jobName}' not found`, {
|
|
18425
|
+
operation: "disableJob",
|
|
18426
|
+
taskId: jobName,
|
|
18427
|
+
availableJobs: Array.from(this.jobs.keys()),
|
|
18428
|
+
suggestion: "Check job name or use getAllJobsStatus() to list available jobs"
|
|
18429
|
+
});
|
|
17320
18430
|
}
|
|
17321
18431
|
job.enabled = false;
|
|
17322
18432
|
const timer = this.timers.get(jobName);
|
|
@@ -17415,13 +18525,28 @@ class SchedulerPlugin extends Plugin {
|
|
|
17415
18525
|
*/
|
|
17416
18526
|
addJob(jobName, jobConfig) {
|
|
17417
18527
|
if (this.jobs.has(jobName)) {
|
|
17418
|
-
throw new
|
|
18528
|
+
throw new SchedulerError(`Job '${jobName}' already exists`, {
|
|
18529
|
+
operation: "addJob",
|
|
18530
|
+
taskId: jobName,
|
|
18531
|
+
existingJobs: Array.from(this.jobs.keys()),
|
|
18532
|
+
suggestion: "Use a different job name or remove the existing job first with removeJob()"
|
|
18533
|
+
});
|
|
17419
18534
|
}
|
|
17420
18535
|
if (!jobConfig.schedule || !jobConfig.action) {
|
|
17421
|
-
throw new
|
|
18536
|
+
throw new SchedulerError("Job must have schedule and action", {
|
|
18537
|
+
operation: "addJob",
|
|
18538
|
+
taskId: jobName,
|
|
18539
|
+
providedConfig: Object.keys(jobConfig),
|
|
18540
|
+
suggestion: 'Provide both schedule and action: { schedule: "0 * * * *", action: async (db, ctx) => {...} }'
|
|
18541
|
+
});
|
|
17422
18542
|
}
|
|
17423
18543
|
if (!this._isValidCronExpression(jobConfig.schedule)) {
|
|
17424
|
-
throw new
|
|
18544
|
+
throw new SchedulerError("Invalid cron expression", {
|
|
18545
|
+
operation: "addJob",
|
|
18546
|
+
taskId: jobName,
|
|
18547
|
+
cronExpression: jobConfig.schedule,
|
|
18548
|
+
suggestion: "Use valid cron format (5 fields) or shortcuts (@hourly, @daily, @weekly, @monthly, @yearly)"
|
|
18549
|
+
});
|
|
17425
18550
|
}
|
|
17426
18551
|
const job = {
|
|
17427
18552
|
...jobConfig,
|
|
@@ -17455,7 +18580,12 @@ class SchedulerPlugin extends Plugin {
|
|
|
17455
18580
|
removeJob(jobName) {
|
|
17456
18581
|
const job = this.jobs.get(jobName);
|
|
17457
18582
|
if (!job) {
|
|
17458
|
-
throw new
|
|
18583
|
+
throw new SchedulerError(`Job '${jobName}' not found`, {
|
|
18584
|
+
operation: "removeJob",
|
|
18585
|
+
taskId: jobName,
|
|
18586
|
+
availableJobs: Array.from(this.jobs.keys()),
|
|
18587
|
+
suggestion: "Check job name or use getAllJobsStatus() to list available jobs"
|
|
18588
|
+
});
|
|
17459
18589
|
}
|
|
17460
18590
|
const timer = this.timers.get(jobName);
|
|
17461
18591
|
if (timer) {
|
|
@@ -17509,6 +18639,36 @@ class SchedulerPlugin extends Plugin {
|
|
|
17509
18639
|
}
|
|
17510
18640
|
}
|
|
17511
18641
|
|
|
18642
|
+
class StateMachineError extends S3dbError {
|
|
18643
|
+
constructor(message, details = {}) {
|
|
18644
|
+
const { currentState, targetState, resourceName, operation = "unknown", ...rest } = details;
|
|
18645
|
+
let description = details.description;
|
|
18646
|
+
if (!description) {
|
|
18647
|
+
description = `
|
|
18648
|
+
State Machine Operation Error
|
|
18649
|
+
|
|
18650
|
+
Operation: ${operation}
|
|
18651
|
+
${currentState ? `Current State: ${currentState}` : ""}
|
|
18652
|
+
${targetState ? `Target State: ${targetState}` : ""}
|
|
18653
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
18654
|
+
|
|
18655
|
+
Common causes:
|
|
18656
|
+
1. Invalid state transition
|
|
18657
|
+
2. State machine not configured
|
|
18658
|
+
3. Transition conditions not met
|
|
18659
|
+
4. State not defined in configuration
|
|
18660
|
+
5. Missing transition handler
|
|
18661
|
+
|
|
18662
|
+
Solution:
|
|
18663
|
+
Check state machine configuration and valid transitions.
|
|
18664
|
+
|
|
18665
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/state-machine.md
|
|
18666
|
+
`.trim();
|
|
18667
|
+
}
|
|
18668
|
+
super(message, { ...rest, currentState, targetState, resourceName, operation, description });
|
|
18669
|
+
}
|
|
18670
|
+
}
|
|
18671
|
+
|
|
17512
18672
|
class StateMachinePlugin extends Plugin {
|
|
17513
18673
|
constructor(options = {}) {
|
|
17514
18674
|
super();
|
|
@@ -17529,17 +18689,36 @@ class StateMachinePlugin extends Plugin {
|
|
|
17529
18689
|
}
|
|
17530
18690
|
_validateConfiguration() {
|
|
17531
18691
|
if (!this.config.stateMachines || Object.keys(this.config.stateMachines).length === 0) {
|
|
17532
|
-
throw new
|
|
18692
|
+
throw new StateMachineError("At least one state machine must be defined", {
|
|
18693
|
+
operation: "validateConfiguration",
|
|
18694
|
+
machineCount: 0,
|
|
18695
|
+
suggestion: "Provide at least one state machine in the stateMachines configuration"
|
|
18696
|
+
});
|
|
17533
18697
|
}
|
|
17534
18698
|
for (const [machineName, machine] of Object.entries(this.config.stateMachines)) {
|
|
17535
18699
|
if (!machine.states || Object.keys(machine.states).length === 0) {
|
|
17536
|
-
throw new
|
|
18700
|
+
throw new StateMachineError(`Machine '${machineName}' must have states defined`, {
|
|
18701
|
+
operation: "validateConfiguration",
|
|
18702
|
+
machineId: machineName,
|
|
18703
|
+
suggestion: "Define at least one state in the states configuration"
|
|
18704
|
+
});
|
|
17537
18705
|
}
|
|
17538
18706
|
if (!machine.initialState) {
|
|
17539
|
-
throw new
|
|
18707
|
+
throw new StateMachineError(`Machine '${machineName}' must have an initialState`, {
|
|
18708
|
+
operation: "validateConfiguration",
|
|
18709
|
+
machineId: machineName,
|
|
18710
|
+
availableStates: Object.keys(machine.states),
|
|
18711
|
+
suggestion: "Specify an initialState property matching one of the defined states"
|
|
18712
|
+
});
|
|
17540
18713
|
}
|
|
17541
18714
|
if (!machine.states[machine.initialState]) {
|
|
17542
|
-
throw new
|
|
18715
|
+
throw new StateMachineError(`Initial state '${machine.initialState}' not found in machine '${machineName}'`, {
|
|
18716
|
+
operation: "validateConfiguration",
|
|
18717
|
+
machineId: machineName,
|
|
18718
|
+
initialState: machine.initialState,
|
|
18719
|
+
availableStates: Object.keys(machine.states),
|
|
18720
|
+
suggestion: "Set initialState to one of the defined states"
|
|
18721
|
+
});
|
|
17543
18722
|
}
|
|
17544
18723
|
}
|
|
17545
18724
|
}
|
|
@@ -17596,12 +18775,25 @@ class StateMachinePlugin extends Plugin {
|
|
|
17596
18775
|
async send(machineId, entityId, event, context = {}) {
|
|
17597
18776
|
const machine = this.machines.get(machineId);
|
|
17598
18777
|
if (!machine) {
|
|
17599
|
-
throw new
|
|
18778
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
18779
|
+
operation: "send",
|
|
18780
|
+
machineId,
|
|
18781
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
18782
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
18783
|
+
});
|
|
17600
18784
|
}
|
|
17601
18785
|
const currentState = await this.getState(machineId, entityId);
|
|
17602
18786
|
const stateConfig = machine.config.states[currentState];
|
|
17603
18787
|
if (!stateConfig || !stateConfig.on || !stateConfig.on[event]) {
|
|
17604
|
-
throw new
|
|
18788
|
+
throw new StateMachineError(`Event '${event}' not valid for state '${currentState}' in machine '${machineId}'`, {
|
|
18789
|
+
operation: "send",
|
|
18790
|
+
machineId,
|
|
18791
|
+
entityId,
|
|
18792
|
+
event,
|
|
18793
|
+
currentState,
|
|
18794
|
+
validEvents: stateConfig && stateConfig.on ? Object.keys(stateConfig.on) : [],
|
|
18795
|
+
suggestion: "Use getValidEvents() to check which events are valid for the current state"
|
|
18796
|
+
});
|
|
17605
18797
|
}
|
|
17606
18798
|
const targetState = stateConfig.on[event];
|
|
17607
18799
|
if (stateConfig.guards && stateConfig.guards[event]) {
|
|
@@ -17612,7 +18804,16 @@ class StateMachinePlugin extends Plugin {
|
|
|
17612
18804
|
() => guard(context, event, { database: this.database, machineId, entityId })
|
|
17613
18805
|
);
|
|
17614
18806
|
if (!guardOk || !guardResult) {
|
|
17615
|
-
throw new
|
|
18807
|
+
throw new StateMachineError(`Transition blocked by guard '${guardName}'`, {
|
|
18808
|
+
operation: "send",
|
|
18809
|
+
machineId,
|
|
18810
|
+
entityId,
|
|
18811
|
+
event,
|
|
18812
|
+
currentState,
|
|
18813
|
+
guardName,
|
|
18814
|
+
guardError: guardErr?.message || "Guard returned false",
|
|
18815
|
+
suggestion: "Check guard conditions or modify the context to satisfy guard requirements"
|
|
18816
|
+
});
|
|
17616
18817
|
}
|
|
17617
18818
|
}
|
|
17618
18819
|
}
|
|
@@ -17722,7 +18923,12 @@ class StateMachinePlugin extends Plugin {
|
|
|
17722
18923
|
async getState(machineId, entityId) {
|
|
17723
18924
|
const machine = this.machines.get(machineId);
|
|
17724
18925
|
if (!machine) {
|
|
17725
|
-
throw new
|
|
18926
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
18927
|
+
operation: "getState",
|
|
18928
|
+
machineId,
|
|
18929
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
18930
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
18931
|
+
});
|
|
17726
18932
|
}
|
|
17727
18933
|
if (machine.currentStates.has(entityId)) {
|
|
17728
18934
|
return machine.currentStates.get(entityId);
|
|
@@ -17748,7 +18954,12 @@ class StateMachinePlugin extends Plugin {
|
|
|
17748
18954
|
async getValidEvents(machineId, stateOrEntityId) {
|
|
17749
18955
|
const machine = this.machines.get(machineId);
|
|
17750
18956
|
if (!machine) {
|
|
17751
|
-
throw new
|
|
18957
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
18958
|
+
operation: "getValidEvents",
|
|
18959
|
+
machineId,
|
|
18960
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
18961
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
18962
|
+
});
|
|
17752
18963
|
}
|
|
17753
18964
|
let state;
|
|
17754
18965
|
if (machine.config.states[stateOrEntityId]) {
|
|
@@ -17797,7 +19008,12 @@ class StateMachinePlugin extends Plugin {
|
|
|
17797
19008
|
async initializeEntity(machineId, entityId, context = {}) {
|
|
17798
19009
|
const machine = this.machines.get(machineId);
|
|
17799
19010
|
if (!machine) {
|
|
17800
|
-
throw new
|
|
19011
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
19012
|
+
operation: "initializeEntity",
|
|
19013
|
+
machineId,
|
|
19014
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
19015
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
19016
|
+
});
|
|
17801
19017
|
}
|
|
17802
19018
|
const initialState = machine.config.initialState;
|
|
17803
19019
|
machine.currentStates.set(entityId, initialState);
|
|
@@ -17816,7 +19032,14 @@ class StateMachinePlugin extends Plugin {
|
|
|
17816
19032
|
})
|
|
17817
19033
|
);
|
|
17818
19034
|
if (!ok && err && !err.message?.includes("already exists")) {
|
|
17819
|
-
throw new
|
|
19035
|
+
throw new StateMachineError("Failed to initialize entity state", {
|
|
19036
|
+
operation: "initializeEntity",
|
|
19037
|
+
machineId,
|
|
19038
|
+
entityId,
|
|
19039
|
+
initialState,
|
|
19040
|
+
original: err,
|
|
19041
|
+
suggestion: "Check state resource configuration and database permissions"
|
|
19042
|
+
});
|
|
17820
19043
|
}
|
|
17821
19044
|
}
|
|
17822
19045
|
const initialStateConfig = machine.config.states[initialState];
|
|
@@ -17845,7 +19068,12 @@ class StateMachinePlugin extends Plugin {
|
|
|
17845
19068
|
visualize(machineId) {
|
|
17846
19069
|
const machine = this.machines.get(machineId);
|
|
17847
19070
|
if (!machine) {
|
|
17848
|
-
throw new
|
|
19071
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
19072
|
+
operation: "visualize",
|
|
19073
|
+
machineId,
|
|
19074
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
19075
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
19076
|
+
});
|
|
17849
19077
|
}
|
|
17850
19078
|
let dot = `digraph ${machineId} {
|
|
17851
19079
|
`;
|
|
@@ -17889,5 +19117,1090 @@ class StateMachinePlugin extends Plugin {
|
|
|
17889
19117
|
}
|
|
17890
19118
|
}
|
|
17891
19119
|
|
|
17892
|
-
|
|
19120
|
+
function cosineDistance(a, b) {
|
|
19121
|
+
if (a.length !== b.length) {
|
|
19122
|
+
throw new Error(`Dimension mismatch: ${a.length} vs ${b.length}`);
|
|
19123
|
+
}
|
|
19124
|
+
let dotProduct2 = 0;
|
|
19125
|
+
let normA = 0;
|
|
19126
|
+
let normB = 0;
|
|
19127
|
+
for (let i = 0; i < a.length; i++) {
|
|
19128
|
+
dotProduct2 += a[i] * b[i];
|
|
19129
|
+
normA += a[i] * a[i];
|
|
19130
|
+
normB += b[i] * b[i];
|
|
19131
|
+
}
|
|
19132
|
+
const denominator = Math.sqrt(normA) * Math.sqrt(normB);
|
|
19133
|
+
if (denominator === 0) {
|
|
19134
|
+
return a.every((v) => v === 0) && b.every((v) => v === 0) ? 0 : 1;
|
|
19135
|
+
}
|
|
19136
|
+
const similarity = dotProduct2 / denominator;
|
|
19137
|
+
return 1 - similarity;
|
|
19138
|
+
}
|
|
19139
|
+
function euclideanDistance(a, b) {
|
|
19140
|
+
if (a.length !== b.length) {
|
|
19141
|
+
throw new Error(`Dimension mismatch: ${a.length} vs ${b.length}`);
|
|
19142
|
+
}
|
|
19143
|
+
let sum = 0;
|
|
19144
|
+
for (let i = 0; i < a.length; i++) {
|
|
19145
|
+
const diff = a[i] - b[i];
|
|
19146
|
+
sum += diff * diff;
|
|
19147
|
+
}
|
|
19148
|
+
return Math.sqrt(sum);
|
|
19149
|
+
}
|
|
19150
|
+
function manhattanDistance(a, b) {
|
|
19151
|
+
if (a.length !== b.length) {
|
|
19152
|
+
throw new Error(`Dimension mismatch: ${a.length} vs ${b.length}`);
|
|
19153
|
+
}
|
|
19154
|
+
let sum = 0;
|
|
19155
|
+
for (let i = 0; i < a.length; i++) {
|
|
19156
|
+
sum += Math.abs(a[i] - b[i]);
|
|
19157
|
+
}
|
|
19158
|
+
return sum;
|
|
19159
|
+
}
|
|
19160
|
+
function dotProduct(a, b) {
|
|
19161
|
+
if (a.length !== b.length) {
|
|
19162
|
+
throw new Error(`Dimension mismatch: ${a.length} vs ${b.length}`);
|
|
19163
|
+
}
|
|
19164
|
+
let sum = 0;
|
|
19165
|
+
for (let i = 0; i < a.length; i++) {
|
|
19166
|
+
sum += a[i] * b[i];
|
|
19167
|
+
}
|
|
19168
|
+
return sum;
|
|
19169
|
+
}
|
|
19170
|
+
function normalize(vector) {
|
|
19171
|
+
const magnitude2 = Math.sqrt(
|
|
19172
|
+
vector.reduce((sum, val) => sum + val * val, 0)
|
|
19173
|
+
);
|
|
19174
|
+
if (magnitude2 === 0) {
|
|
19175
|
+
return vector.slice();
|
|
19176
|
+
}
|
|
19177
|
+
return vector.map((val) => val / magnitude2);
|
|
19178
|
+
}
|
|
19179
|
+
|
|
19180
|
+
function kmeans(vectors, k, options = {}) {
|
|
19181
|
+
const {
|
|
19182
|
+
maxIterations = 100,
|
|
19183
|
+
tolerance = 1e-4,
|
|
19184
|
+
distanceFn = euclideanDistance,
|
|
19185
|
+
seed = null,
|
|
19186
|
+
onIteration = null
|
|
19187
|
+
} = options;
|
|
19188
|
+
if (vectors.length === 0) {
|
|
19189
|
+
throw new Error("Cannot cluster empty vector array");
|
|
19190
|
+
}
|
|
19191
|
+
if (k < 1) {
|
|
19192
|
+
throw new Error(`k must be at least 1, got ${k}`);
|
|
19193
|
+
}
|
|
19194
|
+
if (k > vectors.length) {
|
|
19195
|
+
throw new Error(`k (${k}) cannot be greater than number of vectors (${vectors.length})`);
|
|
19196
|
+
}
|
|
19197
|
+
const dimensions = vectors[0].length;
|
|
19198
|
+
for (let i = 1; i < vectors.length; i++) {
|
|
19199
|
+
if (vectors[i].length !== dimensions) {
|
|
19200
|
+
throw new Error(`All vectors must have same dimensions. Expected ${dimensions}, got ${vectors[i].length} at index ${i}`);
|
|
19201
|
+
}
|
|
19202
|
+
}
|
|
19203
|
+
const centroids = initializeCentroidsKMeansPlusPlus(vectors, k, distanceFn, seed);
|
|
19204
|
+
let assignments = new Array(vectors.length);
|
|
19205
|
+
let iterations = 0;
|
|
19206
|
+
let converged = false;
|
|
19207
|
+
let previousInertia = Infinity;
|
|
19208
|
+
while (!converged && iterations < maxIterations) {
|
|
19209
|
+
const newAssignments = vectors.map((vector) => {
|
|
19210
|
+
let minDist = Infinity;
|
|
19211
|
+
let nearestCluster = 0;
|
|
19212
|
+
for (let i = 0; i < k; i++) {
|
|
19213
|
+
const dist = distanceFn(vector, centroids[i]);
|
|
19214
|
+
if (dist < minDist) {
|
|
19215
|
+
minDist = dist;
|
|
19216
|
+
nearestCluster = i;
|
|
19217
|
+
}
|
|
19218
|
+
}
|
|
19219
|
+
return nearestCluster;
|
|
19220
|
+
});
|
|
19221
|
+
let inertia2 = 0;
|
|
19222
|
+
vectors.forEach((vector, i) => {
|
|
19223
|
+
const dist = distanceFn(vector, centroids[newAssignments[i]]);
|
|
19224
|
+
inertia2 += dist * dist;
|
|
19225
|
+
});
|
|
19226
|
+
const inertiaChange = Math.abs(previousInertia - inertia2);
|
|
19227
|
+
converged = inertiaChange < tolerance;
|
|
19228
|
+
assignments = newAssignments;
|
|
19229
|
+
previousInertia = inertia2;
|
|
19230
|
+
if (onIteration) {
|
|
19231
|
+
onIteration(iterations + 1, inertia2, converged);
|
|
19232
|
+
}
|
|
19233
|
+
if (!converged) {
|
|
19234
|
+
const clusterSums = Array(k).fill(null).map(() => new Array(dimensions).fill(0));
|
|
19235
|
+
const clusterCounts = new Array(k).fill(0);
|
|
19236
|
+
vectors.forEach((vector, i) => {
|
|
19237
|
+
const cluster = assignments[i];
|
|
19238
|
+
clusterCounts[cluster]++;
|
|
19239
|
+
vector.forEach((val, j) => {
|
|
19240
|
+
clusterSums[cluster][j] += val;
|
|
19241
|
+
});
|
|
19242
|
+
});
|
|
19243
|
+
for (let i = 0; i < k; i++) {
|
|
19244
|
+
if (clusterCounts[i] > 0) {
|
|
19245
|
+
centroids[i] = clusterSums[i].map((sum) => sum / clusterCounts[i]);
|
|
19246
|
+
} else {
|
|
19247
|
+
const randomIdx = Math.floor(Math.random() * vectors.length);
|
|
19248
|
+
centroids[i] = [...vectors[randomIdx]];
|
|
19249
|
+
}
|
|
19250
|
+
}
|
|
19251
|
+
}
|
|
19252
|
+
iterations++;
|
|
19253
|
+
}
|
|
19254
|
+
let inertia = 0;
|
|
19255
|
+
vectors.forEach((vector, i) => {
|
|
19256
|
+
const dist = distanceFn(vector, centroids[assignments[i]]);
|
|
19257
|
+
inertia += dist * dist;
|
|
19258
|
+
});
|
|
19259
|
+
return {
|
|
19260
|
+
centroids,
|
|
19261
|
+
assignments,
|
|
19262
|
+
iterations,
|
|
19263
|
+
converged,
|
|
19264
|
+
inertia
|
|
19265
|
+
};
|
|
19266
|
+
}
|
|
19267
|
+
function initializeCentroidsKMeansPlusPlus(vectors, k, distanceFn, seed) {
|
|
19268
|
+
const centroids = [];
|
|
19269
|
+
const n = vectors.length;
|
|
19270
|
+
const firstIndex = seed !== null ? seed % n : Math.floor(Math.random() * n);
|
|
19271
|
+
centroids.push([...vectors[firstIndex]]);
|
|
19272
|
+
for (let i = 1; i < k; i++) {
|
|
19273
|
+
const distances = vectors.map((vector) => {
|
|
19274
|
+
return Math.min(...centroids.map((c) => distanceFn(vector, c)));
|
|
19275
|
+
});
|
|
19276
|
+
const squaredDistances = distances.map((d) => d * d);
|
|
19277
|
+
const totalSquared = squaredDistances.reduce((a, b) => a + b, 0);
|
|
19278
|
+
if (totalSquared === 0) {
|
|
19279
|
+
const randomIdx = Math.floor(Math.random() * n);
|
|
19280
|
+
centroids.push([...vectors[randomIdx]]);
|
|
19281
|
+
continue;
|
|
19282
|
+
}
|
|
19283
|
+
let threshold = Math.random() * totalSquared;
|
|
19284
|
+
let cumulativeSum = 0;
|
|
19285
|
+
for (let j = 0; j < n; j++) {
|
|
19286
|
+
cumulativeSum += squaredDistances[j];
|
|
19287
|
+
if (cumulativeSum >= threshold) {
|
|
19288
|
+
centroids.push([...vectors[j]]);
|
|
19289
|
+
break;
|
|
19290
|
+
}
|
|
19291
|
+
}
|
|
19292
|
+
}
|
|
19293
|
+
return centroids;
|
|
19294
|
+
}
|
|
19295
|
+
async function findOptimalK(vectors, options = {}) {
|
|
19296
|
+
const {
|
|
19297
|
+
minK = 2,
|
|
19298
|
+
maxK = Math.min(10, Math.floor(Math.sqrt(vectors.length / 2))),
|
|
19299
|
+
distanceFn = euclideanDistance,
|
|
19300
|
+
nReferences = 10,
|
|
19301
|
+
stabilityRuns = 5,
|
|
19302
|
+
...kmeansOptions
|
|
19303
|
+
} = options;
|
|
19304
|
+
const metricsModule = await Promise.resolve().then(function () { return metrics; });
|
|
19305
|
+
const {
|
|
19306
|
+
silhouetteScore,
|
|
19307
|
+
daviesBouldinIndex,
|
|
19308
|
+
calinskiHarabaszIndex,
|
|
19309
|
+
gapStatistic,
|
|
19310
|
+
clusteringStability
|
|
19311
|
+
} = metricsModule;
|
|
19312
|
+
const results = [];
|
|
19313
|
+
for (let k = minK; k <= maxK; k++) {
|
|
19314
|
+
const kmeansResult = kmeans(vectors, k, { ...kmeansOptions, distanceFn });
|
|
19315
|
+
const silhouette = silhouetteScore(
|
|
19316
|
+
vectors,
|
|
19317
|
+
kmeansResult.assignments,
|
|
19318
|
+
kmeansResult.centroids,
|
|
19319
|
+
distanceFn
|
|
19320
|
+
);
|
|
19321
|
+
const daviesBouldin = daviesBouldinIndex(
|
|
19322
|
+
vectors,
|
|
19323
|
+
kmeansResult.assignments,
|
|
19324
|
+
kmeansResult.centroids,
|
|
19325
|
+
distanceFn
|
|
19326
|
+
);
|
|
19327
|
+
const calinskiHarabasz = calinskiHarabaszIndex(
|
|
19328
|
+
vectors,
|
|
19329
|
+
kmeansResult.assignments,
|
|
19330
|
+
kmeansResult.centroids,
|
|
19331
|
+
distanceFn
|
|
19332
|
+
);
|
|
19333
|
+
const gap = await gapStatistic(
|
|
19334
|
+
vectors,
|
|
19335
|
+
kmeansResult.assignments,
|
|
19336
|
+
kmeansResult.centroids,
|
|
19337
|
+
distanceFn,
|
|
19338
|
+
nReferences
|
|
19339
|
+
);
|
|
19340
|
+
const stability = clusteringStability(
|
|
19341
|
+
vectors,
|
|
19342
|
+
k,
|
|
19343
|
+
{ ...kmeansOptions, distanceFn, nRuns: stabilityRuns }
|
|
19344
|
+
);
|
|
19345
|
+
results.push({
|
|
19346
|
+
k,
|
|
19347
|
+
inertia: kmeansResult.inertia,
|
|
19348
|
+
silhouette,
|
|
19349
|
+
daviesBouldin,
|
|
19350
|
+
calinskiHarabasz,
|
|
19351
|
+
gap: gap.gap,
|
|
19352
|
+
gapSk: gap.sk,
|
|
19353
|
+
stability: stability.stability,
|
|
19354
|
+
cvInertia: stability.cvInertia,
|
|
19355
|
+
iterations: kmeansResult.iterations,
|
|
19356
|
+
converged: kmeansResult.converged
|
|
19357
|
+
});
|
|
19358
|
+
}
|
|
19359
|
+
const elbowK = findElbowPoint(results.map((r) => r.inertia));
|
|
19360
|
+
const recommendations = {
|
|
19361
|
+
elbow: minK + elbowK,
|
|
19362
|
+
silhouette: results.reduce(
|
|
19363
|
+
(best, curr) => curr.silhouette > best.silhouette ? curr : best
|
|
19364
|
+
).k,
|
|
19365
|
+
daviesBouldin: results.reduce(
|
|
19366
|
+
(best, curr) => curr.daviesBouldin < best.daviesBouldin ? curr : best
|
|
19367
|
+
).k,
|
|
19368
|
+
calinskiHarabasz: results.reduce(
|
|
19369
|
+
(best, curr) => curr.calinskiHarabasz > best.calinskiHarabasz ? curr : best
|
|
19370
|
+
).k,
|
|
19371
|
+
gap: results.reduce(
|
|
19372
|
+
(best, curr) => curr.gap > best.gap ? curr : best
|
|
19373
|
+
).k,
|
|
19374
|
+
stability: results.reduce(
|
|
19375
|
+
(best, curr) => curr.stability > best.stability ? curr : best
|
|
19376
|
+
).k
|
|
19377
|
+
};
|
|
19378
|
+
const votes = Object.values(recommendations);
|
|
19379
|
+
const consensus = votes.reduce((acc, k) => {
|
|
19380
|
+
acc[k] = (acc[k] || 0) + 1;
|
|
19381
|
+
return acc;
|
|
19382
|
+
}, {});
|
|
19383
|
+
const consensusK = parseInt(
|
|
19384
|
+
Object.entries(consensus).reduce((a, b) => b[1] > a[1] ? b : a)[0]
|
|
19385
|
+
);
|
|
19386
|
+
return {
|
|
19387
|
+
results,
|
|
19388
|
+
recommendations,
|
|
19389
|
+
consensus: consensusK,
|
|
19390
|
+
summary: {
|
|
19391
|
+
analysisRange: `${minK}-${maxK}`,
|
|
19392
|
+
totalVectors: vectors.length,
|
|
19393
|
+
dimensions: vectors[0].length,
|
|
19394
|
+
recommendation: consensusK,
|
|
19395
|
+
confidence: consensus[consensusK] / votes.length
|
|
19396
|
+
}
|
|
19397
|
+
};
|
|
19398
|
+
}
|
|
19399
|
+
function findElbowPoint(inertias) {
|
|
19400
|
+
const n = inertias.length;
|
|
19401
|
+
if (n < 3) return 0;
|
|
19402
|
+
let maxCurvature = -Infinity;
|
|
19403
|
+
let elbowIndex = 0;
|
|
19404
|
+
for (let i = 1; i < n - 1; i++) {
|
|
19405
|
+
const curvature = inertias[i - 1] - 2 * inertias[i] + inertias[i + 1];
|
|
19406
|
+
if (curvature > maxCurvature) {
|
|
19407
|
+
maxCurvature = curvature;
|
|
19408
|
+
elbowIndex = i;
|
|
19409
|
+
}
|
|
19410
|
+
}
|
|
19411
|
+
return elbowIndex;
|
|
19412
|
+
}
|
|
19413
|
+
|
|
19414
|
+
class VectorError extends PluginError {
|
|
19415
|
+
constructor(message, details = {}) {
|
|
19416
|
+
super(message, {
|
|
19417
|
+
pluginName: "VectorPlugin",
|
|
19418
|
+
...details,
|
|
19419
|
+
description: details.description || `
|
|
19420
|
+
Vector Plugin Error
|
|
19421
|
+
|
|
19422
|
+
Operation: ${details.operation || "unknown"}
|
|
19423
|
+
|
|
19424
|
+
Common causes:
|
|
19425
|
+
1. Vector dimension mismatch between vectors
|
|
19426
|
+
2. Invalid distance metric specified (must be: cosine, euclidean, manhattan)
|
|
19427
|
+
3. Empty vector array provided for clustering
|
|
19428
|
+
4. k value larger than number of available vectors
|
|
19429
|
+
5. Vector field not found or invalid in resource
|
|
19430
|
+
6. Large vectors without proper behavior (use 'body-overflow' or 'body-only')
|
|
19431
|
+
|
|
19432
|
+
Available distance metrics:
|
|
19433
|
+
- cosine: Best for normalized vectors, semantic similarity. Range: [0, 2]
|
|
19434
|
+
- euclidean: Standard L2 distance, geometric proximity. Range: [0, \u221E)
|
|
19435
|
+
- manhattan: L1 distance, faster computation. Range: [0, \u221E)
|
|
19436
|
+
|
|
19437
|
+
Storage considerations:
|
|
19438
|
+
- Vectors > 250 dimensions may exceed S3 metadata limit (2KB)
|
|
19439
|
+
- Use behavior: 'body-overflow' or 'body-only' for large vectors
|
|
19440
|
+
- OpenAI ada-002 (1536 dims): ~10KB, requires body storage
|
|
19441
|
+
- Sentence Transformers (384 dims): ~2.7KB, requires body storage
|
|
19442
|
+
`.trim()
|
|
19443
|
+
});
|
|
19444
|
+
}
|
|
19445
|
+
}
|
|
19446
|
+
|
|
19447
|
+
class VectorPlugin extends Plugin {
|
|
19448
|
+
constructor(options = {}) {
|
|
19449
|
+
super(options);
|
|
19450
|
+
this.config = {
|
|
19451
|
+
dimensions: 1536,
|
|
19452
|
+
// Default to OpenAI text-embedding-3-small/3-large
|
|
19453
|
+
distanceMetric: "cosine",
|
|
19454
|
+
// Default metric
|
|
19455
|
+
storageThreshold: 1500,
|
|
19456
|
+
// Bytes - warn if vectors exceed this
|
|
19457
|
+
autoFixBehavior: false,
|
|
19458
|
+
// Automatically set body-overflow
|
|
19459
|
+
autoDetectVectorField: true,
|
|
19460
|
+
// Auto-detect embedding:XXX fields
|
|
19461
|
+
emitEvents: true,
|
|
19462
|
+
// Emit events for monitoring
|
|
19463
|
+
verboseEvents: false,
|
|
19464
|
+
// Emit detailed progress events
|
|
19465
|
+
eventThrottle: 100,
|
|
19466
|
+
// Throttle progress events (ms)
|
|
19467
|
+
...options
|
|
19468
|
+
};
|
|
19469
|
+
this.distanceFunctions = {
|
|
19470
|
+
cosine: cosineDistance,
|
|
19471
|
+
euclidean: euclideanDistance,
|
|
19472
|
+
manhattan: manhattanDistance
|
|
19473
|
+
};
|
|
19474
|
+
this._vectorFieldCache = /* @__PURE__ */ new Map();
|
|
19475
|
+
this._throttleState = /* @__PURE__ */ new Map();
|
|
19476
|
+
}
|
|
19477
|
+
async onInstall() {
|
|
19478
|
+
this.emit("installed", { plugin: "VectorPlugin" });
|
|
19479
|
+
this.validateVectorStorage();
|
|
19480
|
+
this.installResourceMethods();
|
|
19481
|
+
}
|
|
19482
|
+
async onStart() {
|
|
19483
|
+
this.emit("started", { plugin: "VectorPlugin" });
|
|
19484
|
+
}
|
|
19485
|
+
async onStop() {
|
|
19486
|
+
this.emit("stopped", { plugin: "VectorPlugin" });
|
|
19487
|
+
}
|
|
19488
|
+
async onUninstall(options) {
|
|
19489
|
+
for (const resource of Object.values(this.database.resources)) {
|
|
19490
|
+
delete resource.vectorSearch;
|
|
19491
|
+
delete resource.cluster;
|
|
19492
|
+
delete resource.vectorDistance;
|
|
19493
|
+
delete resource.similarTo;
|
|
19494
|
+
delete resource.findSimilar;
|
|
19495
|
+
delete resource.distance;
|
|
19496
|
+
}
|
|
19497
|
+
this.emit("uninstalled", { plugin: "VectorPlugin" });
|
|
19498
|
+
}
|
|
19499
|
+
/**
|
|
19500
|
+
* Validate vector storage configuration for all resources
|
|
19501
|
+
*
|
|
19502
|
+
* Detects large vector fields and warns if proper behavior is not set.
|
|
19503
|
+
* Can optionally auto-fix by setting body-overflow behavior.
|
|
19504
|
+
*/
|
|
19505
|
+
validateVectorStorage() {
|
|
19506
|
+
for (const resource of Object.values(this.database.resources)) {
|
|
19507
|
+
const vectorFields = this.findVectorFields(resource.schema.attributes);
|
|
19508
|
+
if (vectorFields.length === 0) continue;
|
|
19509
|
+
const totalVectorSize = vectorFields.reduce((sum, f) => sum + f.estimatedBytes, 0);
|
|
19510
|
+
if (totalVectorSize > this.config.storageThreshold) {
|
|
19511
|
+
const hasCorrectBehavior = ["body-overflow", "body-only"].includes(resource.behavior);
|
|
19512
|
+
if (!hasCorrectBehavior) {
|
|
19513
|
+
const warning = {
|
|
19514
|
+
resource: resource.name,
|
|
19515
|
+
vectorFields: vectorFields.map((f) => ({
|
|
19516
|
+
field: f.name,
|
|
19517
|
+
dimensions: f.length,
|
|
19518
|
+
estimatedBytes: f.estimatedBytes
|
|
19519
|
+
})),
|
|
19520
|
+
totalEstimatedBytes: totalVectorSize,
|
|
19521
|
+
metadataLimit: 2047,
|
|
19522
|
+
currentBehavior: resource.behavior || "default",
|
|
19523
|
+
recommendation: "body-overflow"
|
|
19524
|
+
};
|
|
19525
|
+
this.emit("vector:storage-warning", warning);
|
|
19526
|
+
if (this.config.autoFixBehavior) {
|
|
19527
|
+
resource.behavior = "body-overflow";
|
|
19528
|
+
this.emit("vector:behavior-fixed", {
|
|
19529
|
+
resource: resource.name,
|
|
19530
|
+
newBehavior: "body-overflow"
|
|
19531
|
+
});
|
|
19532
|
+
} else {
|
|
19533
|
+
console.warn(`\u26A0\uFE0F VectorPlugin: Resource '${resource.name}' has large vector fields (${totalVectorSize} bytes estimated)`);
|
|
19534
|
+
console.warn(` Current behavior: '${resource.behavior || "default"}'`);
|
|
19535
|
+
console.warn(` Recommendation: Add behavior: 'body-overflow' or 'body-only' to resource configuration`);
|
|
19536
|
+
console.warn(` Large vectors will exceed S3 metadata limit (2047 bytes) and cause errors.`);
|
|
19537
|
+
}
|
|
19538
|
+
}
|
|
19539
|
+
}
|
|
19540
|
+
}
|
|
19541
|
+
}
|
|
19542
|
+
/**
|
|
19543
|
+
* Auto-detect vector field from resource schema
|
|
19544
|
+
*
|
|
19545
|
+
* Looks for fields with type 'embedding:XXX' pattern.
|
|
19546
|
+
* Caches result per resource for performance.
|
|
19547
|
+
*
|
|
19548
|
+
* @param {Resource} resource - Resource instance
|
|
19549
|
+
* @returns {string|null} Detected vector field name or null
|
|
19550
|
+
*/
|
|
19551
|
+
detectVectorField(resource) {
|
|
19552
|
+
if (this._vectorFieldCache.has(resource.name)) {
|
|
19553
|
+
return this._vectorFieldCache.get(resource.name);
|
|
19554
|
+
}
|
|
19555
|
+
const vectorField = this._findEmbeddingField(resource.schema.attributes);
|
|
19556
|
+
this._vectorFieldCache.set(resource.name, vectorField);
|
|
19557
|
+
if (vectorField && this.config.emitEvents) {
|
|
19558
|
+
this.emit("vector:field-detected", {
|
|
19559
|
+
resource: resource.name,
|
|
19560
|
+
vectorField,
|
|
19561
|
+
timestamp: Date.now()
|
|
19562
|
+
});
|
|
19563
|
+
}
|
|
19564
|
+
return vectorField;
|
|
19565
|
+
}
|
|
19566
|
+
/**
|
|
19567
|
+
* Recursively find embedding:XXX field in attributes
|
|
19568
|
+
*
|
|
19569
|
+
* @param {Object} attributes - Resource attributes
|
|
19570
|
+
* @param {string} path - Current path (for nested objects)
|
|
19571
|
+
* @returns {string|null} Field path or null
|
|
19572
|
+
*/
|
|
19573
|
+
_findEmbeddingField(attributes, path = "") {
|
|
19574
|
+
for (const [key, attr] of Object.entries(attributes)) {
|
|
19575
|
+
const fullPath = path ? `${path}.${key}` : key;
|
|
19576
|
+
if (typeof attr === "string" && attr.startsWith("embedding:")) {
|
|
19577
|
+
return fullPath;
|
|
19578
|
+
}
|
|
19579
|
+
if (attr.type === "array" && attr.items === "number" && attr.length) {
|
|
19580
|
+
return fullPath;
|
|
19581
|
+
}
|
|
19582
|
+
if (attr.type === "object" && attr.props) {
|
|
19583
|
+
const nested = this._findEmbeddingField(attr.props, fullPath);
|
|
19584
|
+
if (nested) return nested;
|
|
19585
|
+
}
|
|
19586
|
+
}
|
|
19587
|
+
return null;
|
|
19588
|
+
}
|
|
19589
|
+
/**
|
|
19590
|
+
* Emit event with throttling support
|
|
19591
|
+
*
|
|
19592
|
+
* @param {string} eventName - Event name
|
|
19593
|
+
* @param {Object} data - Event data
|
|
19594
|
+
* @param {string} throttleKey - Unique key for throttling (optional)
|
|
19595
|
+
*/
|
|
19596
|
+
_emitEvent(eventName, data, throttleKey = null) {
|
|
19597
|
+
if (!this.config.emitEvents) return;
|
|
19598
|
+
if (throttleKey) {
|
|
19599
|
+
const now = Date.now();
|
|
19600
|
+
const lastEmit = this._throttleState.get(throttleKey);
|
|
19601
|
+
if (lastEmit && now - lastEmit < this.config.eventThrottle) {
|
|
19602
|
+
return;
|
|
19603
|
+
}
|
|
19604
|
+
this._throttleState.set(throttleKey, now);
|
|
19605
|
+
}
|
|
19606
|
+
this.emit(eventName, data);
|
|
19607
|
+
}
|
|
19608
|
+
/**
|
|
19609
|
+
* Find vector fields in resource attributes
|
|
19610
|
+
*
|
|
19611
|
+
* @param {Object} attributes - Resource attributes
|
|
19612
|
+
* @param {string} path - Current path (for nested objects)
|
|
19613
|
+
* @returns {Array} Array of vector field info
|
|
19614
|
+
*/
|
|
19615
|
+
findVectorFields(attributes, path = "") {
|
|
19616
|
+
const vectors = [];
|
|
19617
|
+
for (const [key, attr] of Object.entries(attributes)) {
|
|
19618
|
+
const fullPath = path ? `${path}.${key}` : key;
|
|
19619
|
+
if (attr.type === "array" && attr.items === "number" && attr.length) {
|
|
19620
|
+
vectors.push({
|
|
19621
|
+
name: fullPath,
|
|
19622
|
+
length: attr.length,
|
|
19623
|
+
estimatedBytes: this.estimateVectorBytes(attr.length)
|
|
19624
|
+
});
|
|
19625
|
+
}
|
|
19626
|
+
if (attr.type === "object" && attr.props) {
|
|
19627
|
+
vectors.push(...this.findVectorFields(attr.props, fullPath));
|
|
19628
|
+
}
|
|
19629
|
+
}
|
|
19630
|
+
return vectors;
|
|
19631
|
+
}
|
|
19632
|
+
/**
|
|
19633
|
+
* Estimate bytes required to store a vector in JSON format
|
|
19634
|
+
*
|
|
19635
|
+
* Conservative estimate: ~7 bytes per number + array overhead
|
|
19636
|
+
*
|
|
19637
|
+
* @param {number} dimensions - Number of dimensions
|
|
19638
|
+
* @returns {number} Estimated bytes
|
|
19639
|
+
*/
|
|
19640
|
+
estimateVectorBytes(dimensions) {
|
|
19641
|
+
return dimensions * 7 + 50;
|
|
19642
|
+
}
|
|
19643
|
+
/**
|
|
19644
|
+
* Install vector methods on all resources
|
|
19645
|
+
*/
|
|
19646
|
+
installResourceMethods() {
|
|
19647
|
+
for (const resource of Object.values(this.database.resources)) {
|
|
19648
|
+
const searchMethod = this.createVectorSearchMethod(resource);
|
|
19649
|
+
const clusterMethod = this.createClusteringMethod(resource);
|
|
19650
|
+
const distanceMethod = this.createDistanceMethod();
|
|
19651
|
+
resource.vectorSearch = searchMethod;
|
|
19652
|
+
resource.cluster = clusterMethod;
|
|
19653
|
+
resource.vectorDistance = distanceMethod;
|
|
19654
|
+
resource.similarTo = searchMethod;
|
|
19655
|
+
resource.findSimilar = searchMethod;
|
|
19656
|
+
resource.distance = distanceMethod;
|
|
19657
|
+
}
|
|
19658
|
+
}
|
|
19659
|
+
/**
|
|
19660
|
+
* Create vector search method for a resource
|
|
19661
|
+
*
|
|
19662
|
+
* Performs K-nearest neighbors search to find similar vectors.
|
|
19663
|
+
*
|
|
19664
|
+
* @param {Resource} resource - Resource instance
|
|
19665
|
+
* @returns {Function} Vector search method
|
|
19666
|
+
*/
|
|
19667
|
+
createVectorSearchMethod(resource) {
|
|
19668
|
+
return async (queryVector, options = {}) => {
|
|
19669
|
+
const startTime = Date.now();
|
|
19670
|
+
let vectorField = options.vectorField;
|
|
19671
|
+
if (!vectorField && this.config.autoDetectVectorField) {
|
|
19672
|
+
vectorField = this.detectVectorField(resource);
|
|
19673
|
+
if (!vectorField) {
|
|
19674
|
+
vectorField = "vector";
|
|
19675
|
+
}
|
|
19676
|
+
} else if (!vectorField) {
|
|
19677
|
+
vectorField = "vector";
|
|
19678
|
+
}
|
|
19679
|
+
const {
|
|
19680
|
+
limit = 10,
|
|
19681
|
+
distanceMetric = this.config.distanceMetric,
|
|
19682
|
+
threshold = null,
|
|
19683
|
+
partition = null
|
|
19684
|
+
} = options;
|
|
19685
|
+
const distanceFn = this.distanceFunctions[distanceMetric];
|
|
19686
|
+
if (!distanceFn) {
|
|
19687
|
+
const error = new VectorError(`Invalid distance metric: ${distanceMetric}`, {
|
|
19688
|
+
operation: "vectorSearch",
|
|
19689
|
+
availableMetrics: Object.keys(this.distanceFunctions),
|
|
19690
|
+
providedMetric: distanceMetric
|
|
19691
|
+
});
|
|
19692
|
+
this._emitEvent("vector:search-error", {
|
|
19693
|
+
resource: resource.name,
|
|
19694
|
+
error: error.message,
|
|
19695
|
+
timestamp: Date.now()
|
|
19696
|
+
});
|
|
19697
|
+
throw error;
|
|
19698
|
+
}
|
|
19699
|
+
this._emitEvent("vector:search-start", {
|
|
19700
|
+
resource: resource.name,
|
|
19701
|
+
vectorField,
|
|
19702
|
+
limit,
|
|
19703
|
+
distanceMetric,
|
|
19704
|
+
partition,
|
|
19705
|
+
threshold,
|
|
19706
|
+
queryDimensions: queryVector.length,
|
|
19707
|
+
timestamp: startTime
|
|
19708
|
+
});
|
|
19709
|
+
try {
|
|
19710
|
+
let allRecords;
|
|
19711
|
+
if (partition) {
|
|
19712
|
+
this._emitEvent("vector:partition-filter", {
|
|
19713
|
+
resource: resource.name,
|
|
19714
|
+
partition,
|
|
19715
|
+
timestamp: Date.now()
|
|
19716
|
+
});
|
|
19717
|
+
allRecords = await resource.list({ partition, partitionValues: partition });
|
|
19718
|
+
} else {
|
|
19719
|
+
allRecords = await resource.getAll();
|
|
19720
|
+
}
|
|
19721
|
+
const totalRecords = allRecords.length;
|
|
19722
|
+
let processedRecords = 0;
|
|
19723
|
+
let dimensionMismatches = 0;
|
|
19724
|
+
const results = allRecords.filter((record) => record[vectorField] && Array.isArray(record[vectorField])).map((record, index) => {
|
|
19725
|
+
try {
|
|
19726
|
+
const distance = distanceFn(queryVector, record[vectorField]);
|
|
19727
|
+
processedRecords++;
|
|
19728
|
+
if (this.config.verboseEvents && processedRecords % 100 === 0) {
|
|
19729
|
+
this._emitEvent("vector:search-progress", {
|
|
19730
|
+
resource: resource.name,
|
|
19731
|
+
processed: processedRecords,
|
|
19732
|
+
total: totalRecords,
|
|
19733
|
+
progress: processedRecords / totalRecords * 100,
|
|
19734
|
+
timestamp: Date.now()
|
|
19735
|
+
}, `search-${resource.name}`);
|
|
19736
|
+
}
|
|
19737
|
+
return { record, distance };
|
|
19738
|
+
} catch (err) {
|
|
19739
|
+
dimensionMismatches++;
|
|
19740
|
+
if (this.config.verboseEvents) {
|
|
19741
|
+
this._emitEvent("vector:dimension-mismatch", {
|
|
19742
|
+
resource: resource.name,
|
|
19743
|
+
recordIndex: index,
|
|
19744
|
+
expected: queryVector.length,
|
|
19745
|
+
got: record[vectorField]?.length,
|
|
19746
|
+
timestamp: Date.now()
|
|
19747
|
+
});
|
|
19748
|
+
}
|
|
19749
|
+
return null;
|
|
19750
|
+
}
|
|
19751
|
+
}).filter((result) => result !== null).filter((result) => threshold === null || result.distance <= threshold).sort((a, b) => a.distance - b.distance).slice(0, limit);
|
|
19752
|
+
const duration = Date.now() - startTime;
|
|
19753
|
+
const throughput = totalRecords / (duration / 1e3);
|
|
19754
|
+
this._emitEvent("vector:search-complete", {
|
|
19755
|
+
resource: resource.name,
|
|
19756
|
+
vectorField,
|
|
19757
|
+
resultsCount: results.length,
|
|
19758
|
+
totalRecords,
|
|
19759
|
+
processedRecords,
|
|
19760
|
+
dimensionMismatches,
|
|
19761
|
+
duration,
|
|
19762
|
+
throughput: throughput.toFixed(2),
|
|
19763
|
+
timestamp: Date.now()
|
|
19764
|
+
});
|
|
19765
|
+
if (this.config.verboseEvents) {
|
|
19766
|
+
this._emitEvent("vector:performance", {
|
|
19767
|
+
operation: "search",
|
|
19768
|
+
resource: resource.name,
|
|
19769
|
+
duration,
|
|
19770
|
+
throughput: throughput.toFixed(2),
|
|
19771
|
+
recordsPerSecond: (processedRecords / (duration / 1e3)).toFixed(2),
|
|
19772
|
+
timestamp: Date.now()
|
|
19773
|
+
});
|
|
19774
|
+
}
|
|
19775
|
+
return results;
|
|
19776
|
+
} catch (error) {
|
|
19777
|
+
this._emitEvent("vector:search-error", {
|
|
19778
|
+
resource: resource.name,
|
|
19779
|
+
error: error.message,
|
|
19780
|
+
stack: error.stack,
|
|
19781
|
+
timestamp: Date.now()
|
|
19782
|
+
});
|
|
19783
|
+
throw error;
|
|
19784
|
+
}
|
|
19785
|
+
};
|
|
19786
|
+
}
|
|
19787
|
+
/**
|
|
19788
|
+
* Create clustering method for a resource
|
|
19789
|
+
*
|
|
19790
|
+
* Performs k-means clustering on resource vectors.
|
|
19791
|
+
*
|
|
19792
|
+
* @param {Resource} resource - Resource instance
|
|
19793
|
+
* @returns {Function} Clustering method
|
|
19794
|
+
*/
|
|
19795
|
+
createClusteringMethod(resource) {
|
|
19796
|
+
return async (options = {}) => {
|
|
19797
|
+
const startTime = Date.now();
|
|
19798
|
+
let vectorField = options.vectorField;
|
|
19799
|
+
if (!vectorField && this.config.autoDetectVectorField) {
|
|
19800
|
+
vectorField = this.detectVectorField(resource);
|
|
19801
|
+
if (!vectorField) {
|
|
19802
|
+
vectorField = "vector";
|
|
19803
|
+
}
|
|
19804
|
+
} else if (!vectorField) {
|
|
19805
|
+
vectorField = "vector";
|
|
19806
|
+
}
|
|
19807
|
+
const {
|
|
19808
|
+
k = 5,
|
|
19809
|
+
distanceMetric = this.config.distanceMetric,
|
|
19810
|
+
partition = null,
|
|
19811
|
+
...kmeansOptions
|
|
19812
|
+
} = options;
|
|
19813
|
+
const distanceFn = this.distanceFunctions[distanceMetric];
|
|
19814
|
+
if (!distanceFn) {
|
|
19815
|
+
const error = new VectorError(`Invalid distance metric: ${distanceMetric}`, {
|
|
19816
|
+
operation: "cluster",
|
|
19817
|
+
availableMetrics: Object.keys(this.distanceFunctions),
|
|
19818
|
+
providedMetric: distanceMetric
|
|
19819
|
+
});
|
|
19820
|
+
this._emitEvent("vector:cluster-error", {
|
|
19821
|
+
resource: resource.name,
|
|
19822
|
+
error: error.message,
|
|
19823
|
+
timestamp: Date.now()
|
|
19824
|
+
});
|
|
19825
|
+
throw error;
|
|
19826
|
+
}
|
|
19827
|
+
this._emitEvent("vector:cluster-start", {
|
|
19828
|
+
resource: resource.name,
|
|
19829
|
+
vectorField,
|
|
19830
|
+
k,
|
|
19831
|
+
distanceMetric,
|
|
19832
|
+
partition,
|
|
19833
|
+
maxIterations: kmeansOptions.maxIterations || 100,
|
|
19834
|
+
timestamp: startTime
|
|
19835
|
+
});
|
|
19836
|
+
try {
|
|
19837
|
+
let allRecords;
|
|
19838
|
+
if (partition) {
|
|
19839
|
+
this._emitEvent("vector:partition-filter", {
|
|
19840
|
+
resource: resource.name,
|
|
19841
|
+
partition,
|
|
19842
|
+
timestamp: Date.now()
|
|
19843
|
+
});
|
|
19844
|
+
allRecords = await resource.list({ partition, partitionValues: partition });
|
|
19845
|
+
} else {
|
|
19846
|
+
allRecords = await resource.getAll();
|
|
19847
|
+
}
|
|
19848
|
+
const recordsWithVectors = allRecords.filter(
|
|
19849
|
+
(record) => record[vectorField] && Array.isArray(record[vectorField])
|
|
19850
|
+
);
|
|
19851
|
+
if (recordsWithVectors.length === 0) {
|
|
19852
|
+
const error = new VectorError("No vectors found in resource", {
|
|
19853
|
+
operation: "cluster",
|
|
19854
|
+
resourceName: resource.name,
|
|
19855
|
+
vectorField
|
|
19856
|
+
});
|
|
19857
|
+
this._emitEvent("vector:empty-dataset", {
|
|
19858
|
+
resource: resource.name,
|
|
19859
|
+
vectorField,
|
|
19860
|
+
totalRecords: allRecords.length,
|
|
19861
|
+
timestamp: Date.now()
|
|
19862
|
+
});
|
|
19863
|
+
throw error;
|
|
19864
|
+
}
|
|
19865
|
+
const vectors = recordsWithVectors.map((record) => record[vectorField]);
|
|
19866
|
+
const result = kmeans(vectors, k, {
|
|
19867
|
+
...kmeansOptions,
|
|
19868
|
+
distanceFn,
|
|
19869
|
+
onIteration: this.config.verboseEvents ? (iteration, inertia, converged) => {
|
|
19870
|
+
this._emitEvent("vector:cluster-iteration", {
|
|
19871
|
+
resource: resource.name,
|
|
19872
|
+
k,
|
|
19873
|
+
iteration,
|
|
19874
|
+
inertia,
|
|
19875
|
+
converged,
|
|
19876
|
+
timestamp: Date.now()
|
|
19877
|
+
}, `cluster-${resource.name}`);
|
|
19878
|
+
} : void 0
|
|
19879
|
+
});
|
|
19880
|
+
if (result.converged) {
|
|
19881
|
+
this._emitEvent("vector:cluster-converged", {
|
|
19882
|
+
resource: resource.name,
|
|
19883
|
+
k,
|
|
19884
|
+
iterations: result.iterations,
|
|
19885
|
+
inertia: result.inertia,
|
|
19886
|
+
timestamp: Date.now()
|
|
19887
|
+
});
|
|
19888
|
+
}
|
|
19889
|
+
const clusters = Array(k).fill(null).map(() => []);
|
|
19890
|
+
recordsWithVectors.forEach((record, i) => {
|
|
19891
|
+
const clusterIndex = result.assignments[i];
|
|
19892
|
+
clusters[clusterIndex].push(record);
|
|
19893
|
+
});
|
|
19894
|
+
const duration = Date.now() - startTime;
|
|
19895
|
+
const clusterSizes = clusters.map((c) => c.length);
|
|
19896
|
+
this._emitEvent("vector:cluster-complete", {
|
|
19897
|
+
resource: resource.name,
|
|
19898
|
+
vectorField,
|
|
19899
|
+
k,
|
|
19900
|
+
vectorCount: vectors.length,
|
|
19901
|
+
iterations: result.iterations,
|
|
19902
|
+
converged: result.converged,
|
|
19903
|
+
inertia: result.inertia,
|
|
19904
|
+
clusterSizes,
|
|
19905
|
+
duration,
|
|
19906
|
+
timestamp: Date.now()
|
|
19907
|
+
});
|
|
19908
|
+
if (this.config.verboseEvents) {
|
|
19909
|
+
this._emitEvent("vector:performance", {
|
|
19910
|
+
operation: "clustering",
|
|
19911
|
+
resource: resource.name,
|
|
19912
|
+
k,
|
|
19913
|
+
duration,
|
|
19914
|
+
iterationsPerSecond: (result.iterations / (duration / 1e3)).toFixed(2),
|
|
19915
|
+
vectorsPerSecond: (vectors.length / (duration / 1e3)).toFixed(2),
|
|
19916
|
+
timestamp: Date.now()
|
|
19917
|
+
});
|
|
19918
|
+
}
|
|
19919
|
+
return {
|
|
19920
|
+
clusters,
|
|
19921
|
+
centroids: result.centroids,
|
|
19922
|
+
inertia: result.inertia,
|
|
19923
|
+
iterations: result.iterations,
|
|
19924
|
+
converged: result.converged
|
|
19925
|
+
};
|
|
19926
|
+
} catch (error) {
|
|
19927
|
+
this._emitEvent("vector:cluster-error", {
|
|
19928
|
+
resource: resource.name,
|
|
19929
|
+
error: error.message,
|
|
19930
|
+
stack: error.stack,
|
|
19931
|
+
timestamp: Date.now()
|
|
19932
|
+
});
|
|
19933
|
+
throw error;
|
|
19934
|
+
}
|
|
19935
|
+
};
|
|
19936
|
+
}
|
|
19937
|
+
/**
|
|
19938
|
+
* Create distance calculation method
|
|
19939
|
+
*
|
|
19940
|
+
* @returns {Function} Distance method
|
|
19941
|
+
*/
|
|
19942
|
+
createDistanceMethod() {
|
|
19943
|
+
return (vector1, vector2, metric = this.config.distanceMetric) => {
|
|
19944
|
+
const distanceFn = this.distanceFunctions[metric];
|
|
19945
|
+
if (!distanceFn) {
|
|
19946
|
+
throw new VectorError(`Invalid distance metric: ${metric}`, {
|
|
19947
|
+
operation: "vectorDistance",
|
|
19948
|
+
availableMetrics: Object.keys(this.distanceFunctions),
|
|
19949
|
+
providedMetric: metric
|
|
19950
|
+
});
|
|
19951
|
+
}
|
|
19952
|
+
return distanceFn(vector1, vector2);
|
|
19953
|
+
};
|
|
19954
|
+
}
|
|
19955
|
+
/**
|
|
19956
|
+
* Static utility: Normalize vector
|
|
19957
|
+
*
|
|
19958
|
+
* @param {number[]} vector - Input vector
|
|
19959
|
+
* @returns {number[]} Normalized vector
|
|
19960
|
+
*/
|
|
19961
|
+
static normalize(vector) {
|
|
19962
|
+
return normalize(vector);
|
|
19963
|
+
}
|
|
19964
|
+
/**
|
|
19965
|
+
* Static utility: Calculate dot product
|
|
19966
|
+
*
|
|
19967
|
+
* @param {number[]} vector1 - First vector
|
|
19968
|
+
* @param {number[]} vector2 - Second vector
|
|
19969
|
+
* @returns {number} Dot product
|
|
19970
|
+
*/
|
|
19971
|
+
static dotProduct(vector1, vector2) {
|
|
19972
|
+
return dotProduct(vector1, vector2);
|
|
19973
|
+
}
|
|
19974
|
+
/**
|
|
19975
|
+
* Static utility: Find optimal K for clustering
|
|
19976
|
+
*
|
|
19977
|
+
* Analyzes clustering quality across a range of K values using
|
|
19978
|
+
* multiple evaluation metrics.
|
|
19979
|
+
*
|
|
19980
|
+
* @param {number[][]} vectors - Vectors to analyze
|
|
19981
|
+
* @param {Object} options - Configuration options
|
|
19982
|
+
* @returns {Promise<Object>} Analysis results with recommendations
|
|
19983
|
+
*/
|
|
19984
|
+
static async findOptimalK(vectors, options) {
|
|
19985
|
+
return findOptimalK(vectors, options);
|
|
19986
|
+
}
|
|
19987
|
+
}
|
|
19988
|
+
|
|
19989
|
+
function silhouetteScore(vectors, assignments, centroids, distanceFn = euclideanDistance) {
|
|
19990
|
+
const k = centroids.length;
|
|
19991
|
+
const n = vectors.length;
|
|
19992
|
+
const clusters = Array(k).fill(null).map(() => []);
|
|
19993
|
+
vectors.forEach((vector, i) => {
|
|
19994
|
+
clusters[assignments[i]].push(i);
|
|
19995
|
+
});
|
|
19996
|
+
let totalScore = 0;
|
|
19997
|
+
let validPoints = 0;
|
|
19998
|
+
if (clusters.every((c) => c.length <= 1)) {
|
|
19999
|
+
return 0;
|
|
20000
|
+
}
|
|
20001
|
+
for (let i = 0; i < n; i++) {
|
|
20002
|
+
const clusterIdx = assignments[i];
|
|
20003
|
+
const cluster = clusters[clusterIdx];
|
|
20004
|
+
if (cluster.length === 1) continue;
|
|
20005
|
+
let a = 0;
|
|
20006
|
+
for (const j of cluster) {
|
|
20007
|
+
if (i !== j) {
|
|
20008
|
+
a += distanceFn(vectors[i], vectors[j]);
|
|
20009
|
+
}
|
|
20010
|
+
}
|
|
20011
|
+
a /= cluster.length - 1;
|
|
20012
|
+
let b = Infinity;
|
|
20013
|
+
for (let otherCluster = 0; otherCluster < k; otherCluster++) {
|
|
20014
|
+
if (otherCluster === clusterIdx) continue;
|
|
20015
|
+
const otherPoints = clusters[otherCluster];
|
|
20016
|
+
if (otherPoints.length === 0) continue;
|
|
20017
|
+
let avgDist = 0;
|
|
20018
|
+
for (const j of otherPoints) {
|
|
20019
|
+
avgDist += distanceFn(vectors[i], vectors[j]);
|
|
20020
|
+
}
|
|
20021
|
+
avgDist /= otherPoints.length;
|
|
20022
|
+
b = Math.min(b, avgDist);
|
|
20023
|
+
}
|
|
20024
|
+
if (b === Infinity) continue;
|
|
20025
|
+
const maxAB = Math.max(a, b);
|
|
20026
|
+
const s = maxAB === 0 ? 0 : (b - a) / maxAB;
|
|
20027
|
+
totalScore += s;
|
|
20028
|
+
validPoints++;
|
|
20029
|
+
}
|
|
20030
|
+
return validPoints > 0 ? totalScore / validPoints : 0;
|
|
20031
|
+
}
|
|
20032
|
+
function daviesBouldinIndex(vectors, assignments, centroids, distanceFn = euclideanDistance) {
|
|
20033
|
+
const k = centroids.length;
|
|
20034
|
+
const scatters = new Array(k).fill(0);
|
|
20035
|
+
const clusterCounts = new Array(k).fill(0);
|
|
20036
|
+
vectors.forEach((vector, i) => {
|
|
20037
|
+
const cluster = assignments[i];
|
|
20038
|
+
scatters[cluster] += distanceFn(vector, centroids[cluster]);
|
|
20039
|
+
clusterCounts[cluster]++;
|
|
20040
|
+
});
|
|
20041
|
+
for (let i = 0; i < k; i++) {
|
|
20042
|
+
if (clusterCounts[i] > 0) {
|
|
20043
|
+
scatters[i] /= clusterCounts[i];
|
|
20044
|
+
}
|
|
20045
|
+
}
|
|
20046
|
+
let dbIndex = 0;
|
|
20047
|
+
let validClusters = 0;
|
|
20048
|
+
for (let i = 0; i < k; i++) {
|
|
20049
|
+
if (clusterCounts[i] === 0) continue;
|
|
20050
|
+
let maxRatio = 0;
|
|
20051
|
+
for (let j = 0; j < k; j++) {
|
|
20052
|
+
if (i === j || clusterCounts[j] === 0) continue;
|
|
20053
|
+
const centroidDist = distanceFn(centroids[i], centroids[j]);
|
|
20054
|
+
if (centroidDist === 0) continue;
|
|
20055
|
+
const ratio = (scatters[i] + scatters[j]) / centroidDist;
|
|
20056
|
+
maxRatio = Math.max(maxRatio, ratio);
|
|
20057
|
+
}
|
|
20058
|
+
dbIndex += maxRatio;
|
|
20059
|
+
validClusters++;
|
|
20060
|
+
}
|
|
20061
|
+
return validClusters > 0 ? dbIndex / validClusters : 0;
|
|
20062
|
+
}
|
|
20063
|
+
function calinskiHarabaszIndex(vectors, assignments, centroids, distanceFn = euclideanDistance) {
|
|
20064
|
+
const n = vectors.length;
|
|
20065
|
+
const k = centroids.length;
|
|
20066
|
+
if (k === 1 || k === n) return 0;
|
|
20067
|
+
const dimensions = vectors[0].length;
|
|
20068
|
+
const overallCentroid = new Array(dimensions).fill(0);
|
|
20069
|
+
vectors.forEach((vector) => {
|
|
20070
|
+
vector.forEach((val, dim) => {
|
|
20071
|
+
overallCentroid[dim] += val;
|
|
20072
|
+
});
|
|
20073
|
+
});
|
|
20074
|
+
overallCentroid.forEach((val, dim, arr) => {
|
|
20075
|
+
arr[dim] = val / n;
|
|
20076
|
+
});
|
|
20077
|
+
const clusterCounts = new Array(k).fill(0);
|
|
20078
|
+
vectors.forEach((vector, i) => {
|
|
20079
|
+
clusterCounts[assignments[i]]++;
|
|
20080
|
+
});
|
|
20081
|
+
let bgss = 0;
|
|
20082
|
+
for (let i = 0; i < k; i++) {
|
|
20083
|
+
if (clusterCounts[i] === 0) continue;
|
|
20084
|
+
const dist = distanceFn(centroids[i], overallCentroid);
|
|
20085
|
+
bgss += clusterCounts[i] * dist * dist;
|
|
20086
|
+
}
|
|
20087
|
+
let wcss = 0;
|
|
20088
|
+
vectors.forEach((vector, i) => {
|
|
20089
|
+
const cluster = assignments[i];
|
|
20090
|
+
const dist = distanceFn(vector, centroids[cluster]);
|
|
20091
|
+
wcss += dist * dist;
|
|
20092
|
+
});
|
|
20093
|
+
if (wcss === 0) return 0;
|
|
20094
|
+
return bgss / (k - 1) / (wcss / (n - k));
|
|
20095
|
+
}
|
|
20096
|
+
async function gapStatistic(vectors, assignments, centroids, distanceFn = euclideanDistance, nReferences = 10) {
|
|
20097
|
+
const n = vectors.length;
|
|
20098
|
+
const k = centroids.length;
|
|
20099
|
+
const dimensions = vectors[0].length;
|
|
20100
|
+
let wk = 0;
|
|
20101
|
+
vectors.forEach((vector, i) => {
|
|
20102
|
+
const dist = distanceFn(vector, centroids[assignments[i]]);
|
|
20103
|
+
wk += dist * dist;
|
|
20104
|
+
});
|
|
20105
|
+
wk = Math.log(wk + 1e-10);
|
|
20106
|
+
const referenceWks = [];
|
|
20107
|
+
const mins = new Array(dimensions).fill(Infinity);
|
|
20108
|
+
const maxs = new Array(dimensions).fill(-Infinity);
|
|
20109
|
+
vectors.forEach((vector) => {
|
|
20110
|
+
vector.forEach((val, dim) => {
|
|
20111
|
+
mins[dim] = Math.min(mins[dim], val);
|
|
20112
|
+
maxs[dim] = Math.max(maxs[dim], val);
|
|
20113
|
+
});
|
|
20114
|
+
});
|
|
20115
|
+
for (let ref = 0; ref < nReferences; ref++) {
|
|
20116
|
+
const refVectors = [];
|
|
20117
|
+
for (let i = 0; i < n; i++) {
|
|
20118
|
+
const refVector = new Array(dimensions);
|
|
20119
|
+
for (let dim = 0; dim < dimensions; dim++) {
|
|
20120
|
+
refVector[dim] = mins[dim] + Math.random() * (maxs[dim] - mins[dim]);
|
|
20121
|
+
}
|
|
20122
|
+
refVectors.push(refVector);
|
|
20123
|
+
}
|
|
20124
|
+
const refResult = kmeans(refVectors, k, { maxIterations: 50, distanceFn });
|
|
20125
|
+
let refWk = 0;
|
|
20126
|
+
refVectors.forEach((vector, i) => {
|
|
20127
|
+
const dist = distanceFn(vector, refResult.centroids[refResult.assignments[i]]);
|
|
20128
|
+
refWk += dist * dist;
|
|
20129
|
+
});
|
|
20130
|
+
referenceWks.push(Math.log(refWk + 1e-10));
|
|
20131
|
+
}
|
|
20132
|
+
const expectedWk = referenceWks.reduce((a, b) => a + b, 0) / nReferences;
|
|
20133
|
+
const gap = expectedWk - wk;
|
|
20134
|
+
const sdk = Math.sqrt(
|
|
20135
|
+
referenceWks.reduce((sum, wk2) => sum + Math.pow(wk2 - expectedWk, 2), 0) / nReferences
|
|
20136
|
+
);
|
|
20137
|
+
const sk = sdk * Math.sqrt(1 + 1 / nReferences);
|
|
20138
|
+
return { gap, sk, expectedWk, actualWk: wk };
|
|
20139
|
+
}
|
|
20140
|
+
function clusteringStability(vectors, k, options = {}) {
|
|
20141
|
+
const {
|
|
20142
|
+
nRuns = 10,
|
|
20143
|
+
distanceFn = euclideanDistance,
|
|
20144
|
+
...kmeansOptions
|
|
20145
|
+
} = options;
|
|
20146
|
+
const inertias = [];
|
|
20147
|
+
const allAssignments = [];
|
|
20148
|
+
for (let run = 0; run < nRuns; run++) {
|
|
20149
|
+
const result = kmeans(vectors, k, {
|
|
20150
|
+
...kmeansOptions,
|
|
20151
|
+
distanceFn,
|
|
20152
|
+
seed: run
|
|
20153
|
+
// Different seed for each run
|
|
20154
|
+
});
|
|
20155
|
+
inertias.push(result.inertia);
|
|
20156
|
+
allAssignments.push(result.assignments);
|
|
20157
|
+
}
|
|
20158
|
+
const assignmentSimilarities = [];
|
|
20159
|
+
for (let i = 0; i < nRuns - 1; i++) {
|
|
20160
|
+
for (let j = i + 1; j < nRuns; j++) {
|
|
20161
|
+
const similarity = calculateAssignmentSimilarity(allAssignments[i], allAssignments[j]);
|
|
20162
|
+
assignmentSimilarities.push(similarity);
|
|
20163
|
+
}
|
|
20164
|
+
}
|
|
20165
|
+
const avgInertia = inertias.reduce((a, b) => a + b, 0) / nRuns;
|
|
20166
|
+
const stdInertia = Math.sqrt(
|
|
20167
|
+
inertias.reduce((sum, val) => sum + Math.pow(val - avgInertia, 2), 0) / nRuns
|
|
20168
|
+
);
|
|
20169
|
+
const avgSimilarity = assignmentSimilarities.length > 0 ? assignmentSimilarities.reduce((a, b) => a + b, 0) / assignmentSimilarities.length : 1;
|
|
20170
|
+
return {
|
|
20171
|
+
avgInertia,
|
|
20172
|
+
stdInertia,
|
|
20173
|
+
cvInertia: avgInertia !== 0 ? stdInertia / avgInertia : 0,
|
|
20174
|
+
// Coefficient of variation
|
|
20175
|
+
avgSimilarity,
|
|
20176
|
+
stability: avgSimilarity
|
|
20177
|
+
// Higher is more stable
|
|
20178
|
+
};
|
|
20179
|
+
}
|
|
20180
|
+
function calculateAssignmentSimilarity(assignments1, assignments2) {
|
|
20181
|
+
const n = assignments1.length;
|
|
20182
|
+
let matches = 0;
|
|
20183
|
+
for (let i = 0; i < n; i++) {
|
|
20184
|
+
for (let j = i + 1; j < n; j++) {
|
|
20185
|
+
const sameCluster1 = assignments1[i] === assignments1[j];
|
|
20186
|
+
const sameCluster2 = assignments2[i] === assignments2[j];
|
|
20187
|
+
if (sameCluster1 === sameCluster2) {
|
|
20188
|
+
matches++;
|
|
20189
|
+
}
|
|
20190
|
+
}
|
|
20191
|
+
}
|
|
20192
|
+
const totalPairs = n * (n - 1) / 2;
|
|
20193
|
+
return totalPairs > 0 ? matches / totalPairs : 1;
|
|
20194
|
+
}
|
|
20195
|
+
|
|
20196
|
+
var metrics = /*#__PURE__*/Object.freeze({
|
|
20197
|
+
__proto__: null,
|
|
20198
|
+
calinskiHarabaszIndex: calinskiHarabaszIndex,
|
|
20199
|
+
clusteringStability: clusteringStability,
|
|
20200
|
+
daviesBouldinIndex: daviesBouldinIndex,
|
|
20201
|
+
gapStatistic: gapStatistic,
|
|
20202
|
+
silhouetteScore: silhouetteScore
|
|
20203
|
+
});
|
|
20204
|
+
|
|
20205
|
+
export { AVAILABLE_BEHAVIORS, AnalyticsNotEnabledError, AuditPlugin, AuthenticationError, BackupPlugin, BaseError, BehaviorError, CachePlugin, Client, ConnectionString, ConnectionStringError, CostsPlugin, CryptoError, DEFAULT_BEHAVIOR, Database, DatabaseError, EncryptionError, ErrorMap, EventualConsistencyPlugin, FullTextPlugin, InvalidResourceItem, MetadataLimitError, MetricsPlugin, MissingMetadata, NoSuchBucket, NoSuchKey, NotFound, PartitionDriverError, PartitionError, PermissionError, Plugin, PluginError, PluginObject, PluginStorageError, QueueConsumerPlugin, ReplicatorPlugin, Resource, ResourceError, ResourceIdsPageReader, ResourceIdsReader, ResourceNotFound, ResourceReader, ResourceWriter, S3QueuePlugin, Database as S3db, S3dbError, SchedulerPlugin, Schema, SchemaError, StateMachinePlugin, StreamError, UnknownError, ValidationError, Validator, VectorPlugin, behaviors, calculateAttributeNamesSize, calculateAttributeSizes, calculateEffectiveLimit, calculateSystemOverhead, calculateTotalSize, calculateUTF8Bytes, clearUTF8Cache, clearUTF8Memo, clearUTF8Memory, decode, decodeDecimal, decodeFixedPoint, decrypt, S3db as default, encode, encodeDecimal, encodeFixedPoint, encrypt, getBehavior, getSizeBreakdown, idGenerator, mapAwsError, md5, passwordGenerator, sha256, streamToString, transformValue, tryFn, tryFnSync };
|
|
17893
20206
|
//# sourceMappingURL=s3db.es.js.map
|