s3db.js 11.2.3 → 11.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/s3db-cli.js +588 -74
- package/dist/s3db.cjs.js +2472 -150
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.es.js +2464 -151
- package/dist/s3db.es.js.map +1 -1
- package/package.json +2 -1
- package/src/behaviors/enforce-limits.js +28 -4
- package/src/behaviors/index.js +6 -1
- package/src/client.class.js +11 -1
- package/src/concerns/base62.js +70 -0
- package/src/concerns/partition-queue.js +7 -1
- package/src/concerns/plugin-storage.js +75 -13
- package/src/database.class.js +19 -4
- package/src/errors.js +306 -27
- package/src/partition-drivers/base-partition-driver.js +12 -2
- package/src/partition-drivers/index.js +7 -1
- package/src/partition-drivers/memory-partition-driver.js +20 -5
- package/src/partition-drivers/sqs-partition-driver.js +6 -1
- package/src/plugins/audit.errors.js +46 -0
- package/src/plugins/backup/base-backup-driver.class.js +36 -6
- package/src/plugins/backup/filesystem-backup-driver.class.js +55 -7
- package/src/plugins/backup/index.js +40 -9
- package/src/plugins/backup/multi-backup-driver.class.js +69 -9
- package/src/plugins/backup/s3-backup-driver.class.js +48 -6
- package/src/plugins/backup.errors.js +45 -0
- package/src/plugins/cache/cache.class.js +8 -1
- package/src/plugins/cache.errors.js +47 -0
- package/src/plugins/cache.plugin.js +8 -1
- package/src/plugins/fulltext.errors.js +46 -0
- package/src/plugins/fulltext.plugin.js +15 -3
- package/src/plugins/index.js +1 -0
- package/src/plugins/metrics.errors.js +46 -0
- package/src/plugins/queue-consumer.plugin.js +31 -4
- package/src/plugins/queue.errors.js +46 -0
- package/src/plugins/replicator.errors.js +46 -0
- package/src/plugins/replicator.plugin.js +40 -5
- package/src/plugins/replicators/base-replicator.class.js +19 -3
- package/src/plugins/replicators/index.js +9 -3
- package/src/plugins/replicators/s3db-replicator.class.js +38 -8
- package/src/plugins/scheduler.errors.js +46 -0
- package/src/plugins/scheduler.plugin.js +79 -19
- package/src/plugins/state-machine.errors.js +47 -0
- package/src/plugins/state-machine.plugin.js +86 -17
- package/src/plugins/vector/distances.js +173 -0
- package/src/plugins/vector/kmeans.js +367 -0
- package/src/plugins/vector/metrics.js +369 -0
- package/src/plugins/vector/vector-error.js +43 -0
- package/src/plugins/vector.plugin.js +687 -0
- package/src/schema.class.js +232 -41
- package/src/stream/index.js +6 -1
- package/src/stream/resource-reader.class.js +6 -1
- package/src/validator.class.js +8 -0
package/dist/s3db.cjs.js
CHANGED
|
@@ -81,6 +81,41 @@ const decodeDecimal = (s) => {
|
|
|
81
81
|
const num = decPart ? Number(decodedInt + "." + decPart) : decodedInt;
|
|
82
82
|
return negative ? -num : num;
|
|
83
83
|
};
|
|
84
|
+
const encodeFixedPoint = (n, precision = 6) => {
|
|
85
|
+
if (typeof n !== "number" || isNaN(n)) return "undefined";
|
|
86
|
+
if (!isFinite(n)) return "undefined";
|
|
87
|
+
const scale = Math.pow(10, precision);
|
|
88
|
+
const scaled = Math.round(n * scale);
|
|
89
|
+
if (scaled === 0) return "^0";
|
|
90
|
+
const negative = scaled < 0;
|
|
91
|
+
let num = Math.abs(scaled);
|
|
92
|
+
let s = "";
|
|
93
|
+
while (num > 0) {
|
|
94
|
+
s = alphabet[num % base] + s;
|
|
95
|
+
num = Math.floor(num / base);
|
|
96
|
+
}
|
|
97
|
+
return "^" + (negative ? "-" : "") + s;
|
|
98
|
+
};
|
|
99
|
+
const decodeFixedPoint = (s, precision = 6) => {
|
|
100
|
+
if (typeof s !== "string") return NaN;
|
|
101
|
+
if (!s.startsWith("^")) return NaN;
|
|
102
|
+
s = s.slice(1);
|
|
103
|
+
if (s === "0") return 0;
|
|
104
|
+
let negative = false;
|
|
105
|
+
if (s[0] === "-") {
|
|
106
|
+
negative = true;
|
|
107
|
+
s = s.slice(1);
|
|
108
|
+
}
|
|
109
|
+
let r = 0;
|
|
110
|
+
for (let i = 0; i < s.length; i++) {
|
|
111
|
+
const idx = charToValue[s[i]];
|
|
112
|
+
if (idx === void 0) return NaN;
|
|
113
|
+
r = r * base + idx;
|
|
114
|
+
}
|
|
115
|
+
const scale = Math.pow(10, precision);
|
|
116
|
+
const scaled = negative ? -r : r;
|
|
117
|
+
return scaled / scale;
|
|
118
|
+
};
|
|
84
119
|
|
|
85
120
|
const utf8BytesMemory = /* @__PURE__ */ new Map();
|
|
86
121
|
const UTF8_MEMORY_MAX_SIZE = 1e4;
|
|
@@ -222,7 +257,7 @@ function calculateEffectiveLimit(config = {}) {
|
|
|
222
257
|
}
|
|
223
258
|
|
|
224
259
|
class BaseError extends Error {
|
|
225
|
-
constructor({ verbose, bucket, key, message, code, statusCode, requestId, awsMessage, original, commandName, commandInput, metadata,
|
|
260
|
+
constructor({ verbose, bucket, key, message, code, statusCode, requestId, awsMessage, original, commandName, commandInput, metadata, description, ...rest }) {
|
|
226
261
|
if (verbose) message = message + `
|
|
227
262
|
|
|
228
263
|
Verbose:
|
|
@@ -247,7 +282,6 @@ ${JSON.stringify(rest, null, 2)}`;
|
|
|
247
282
|
this.commandName = commandName;
|
|
248
283
|
this.commandInput = commandInput;
|
|
249
284
|
this.metadata = metadata;
|
|
250
|
-
this.suggestion = suggestion;
|
|
251
285
|
this.description = description;
|
|
252
286
|
this.data = { bucket, key, ...rest, verbose, message };
|
|
253
287
|
}
|
|
@@ -265,7 +299,6 @@ ${JSON.stringify(rest, null, 2)}`;
|
|
|
265
299
|
commandName: this.commandName,
|
|
266
300
|
commandInput: this.commandInput,
|
|
267
301
|
metadata: this.metadata,
|
|
268
|
-
suggestion: this.suggestion,
|
|
269
302
|
description: this.description,
|
|
270
303
|
data: this.data,
|
|
271
304
|
original: this.original,
|
|
@@ -406,26 +439,26 @@ function mapAwsError(err, context = {}) {
|
|
|
406
439
|
const metadata = err.$metadata ? { ...err.$metadata } : void 0;
|
|
407
440
|
const commandName = context.commandName;
|
|
408
441
|
const commandInput = context.commandInput;
|
|
409
|
-
let
|
|
442
|
+
let description;
|
|
410
443
|
if (code === "NoSuchKey" || code === "NotFound") {
|
|
411
|
-
|
|
412
|
-
return new NoSuchKey({ ...context, original: err, metadata, commandName, commandInput,
|
|
444
|
+
description = "The specified key does not exist in the bucket. Check if the key exists and if your credentials have permission to access it.";
|
|
445
|
+
return new NoSuchKey({ ...context, original: err, metadata, commandName, commandInput, description });
|
|
413
446
|
}
|
|
414
447
|
if (code === "NoSuchBucket") {
|
|
415
|
-
|
|
416
|
-
return new NoSuchBucket({ ...context, original: err, metadata, commandName, commandInput,
|
|
448
|
+
description = "The specified bucket does not exist. Check if the bucket name is correct and if your credentials have permission to access it.";
|
|
449
|
+
return new NoSuchBucket({ ...context, original: err, metadata, commandName, commandInput, description });
|
|
417
450
|
}
|
|
418
451
|
if (code === "AccessDenied" || err.statusCode === 403 || code === "Forbidden") {
|
|
419
|
-
|
|
420
|
-
return new PermissionError("Access denied", { ...context, original: err, metadata, commandName, commandInput,
|
|
452
|
+
description = "Access denied. Check your AWS credentials, IAM permissions, and bucket policy.";
|
|
453
|
+
return new PermissionError("Access denied", { ...context, original: err, metadata, commandName, commandInput, description });
|
|
421
454
|
}
|
|
422
455
|
if (code === "ValidationError" || err.statusCode === 400) {
|
|
423
|
-
|
|
424
|
-
return new ValidationError("Validation error", { ...context, original: err, metadata, commandName, commandInput,
|
|
456
|
+
description = "Validation error. Check the request parameters and payload format.";
|
|
457
|
+
return new ValidationError("Validation error", { ...context, original: err, metadata, commandName, commandInput, description });
|
|
425
458
|
}
|
|
426
459
|
if (code === "MissingMetadata") {
|
|
427
|
-
|
|
428
|
-
return new MissingMetadata({ ...context, original: err, metadata, commandName, commandInput,
|
|
460
|
+
description = "Object metadata is missing or invalid. Check if the object was uploaded correctly.";
|
|
461
|
+
return new MissingMetadata({ ...context, original: err, metadata, commandName, commandInput, description });
|
|
429
462
|
}
|
|
430
463
|
const errorDetails = [
|
|
431
464
|
`Unknown error: ${err.message || err.toString()}`,
|
|
@@ -433,27 +466,31 @@ function mapAwsError(err, context = {}) {
|
|
|
433
466
|
err.statusCode && `Status: ${err.statusCode}`,
|
|
434
467
|
err.stack && `Stack: ${err.stack.split("\n")[0]}`
|
|
435
468
|
].filter(Boolean).join(" | ");
|
|
436
|
-
|
|
437
|
-
return new UnknownError(errorDetails, { ...context, original: err, metadata, commandName, commandInput,
|
|
469
|
+
description = `Check the error details and AWS documentation. Original error: ${err.message || err.toString()}`;
|
|
470
|
+
return new UnknownError(errorDetails, { ...context, original: err, metadata, commandName, commandInput, description });
|
|
438
471
|
}
|
|
439
472
|
class ConnectionStringError extends S3dbError {
|
|
440
473
|
constructor(message, details = {}) {
|
|
441
|
-
|
|
474
|
+
const description = details.description || "Invalid connection string format. Check the connection string syntax and credentials.";
|
|
475
|
+
super(message, { ...details, description });
|
|
442
476
|
}
|
|
443
477
|
}
|
|
444
478
|
class CryptoError extends S3dbError {
|
|
445
479
|
constructor(message, details = {}) {
|
|
446
|
-
|
|
480
|
+
const description = details.description || "Cryptography operation failed. Check if the crypto library is available and input is valid.";
|
|
481
|
+
super(message, { ...details, description });
|
|
447
482
|
}
|
|
448
483
|
}
|
|
449
484
|
class SchemaError extends S3dbError {
|
|
450
485
|
constructor(message, details = {}) {
|
|
451
|
-
|
|
486
|
+
const description = details.description || "Schema validation failed. Check schema definition and input data format.";
|
|
487
|
+
super(message, { ...details, description });
|
|
452
488
|
}
|
|
453
489
|
}
|
|
454
490
|
class ResourceError extends S3dbError {
|
|
455
491
|
constructor(message, details = {}) {
|
|
456
|
-
|
|
492
|
+
const description = details.description || "Resource operation failed. Check resource configuration, attributes, and operation context.";
|
|
493
|
+
super(message, { ...details, description });
|
|
457
494
|
Object.assign(this, details);
|
|
458
495
|
}
|
|
459
496
|
}
|
|
@@ -482,13 +519,12 @@ ${details.strictValidation === false ? " \u2022 Update partition definition to
|
|
|
482
519
|
\u2022 Update partition definition to use existing fields, OR
|
|
483
520
|
\u2022 Use strictValidation: false to skip this check during testing`}
|
|
484
521
|
|
|
485
|
-
Docs: https://
|
|
522
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#partitions
|
|
486
523
|
`.trim();
|
|
487
524
|
}
|
|
488
525
|
super(message, {
|
|
489
526
|
...details,
|
|
490
|
-
description
|
|
491
|
-
suggestion: details.suggestion || "Check partition definition, fields, and input values."
|
|
527
|
+
description
|
|
492
528
|
});
|
|
493
529
|
}
|
|
494
530
|
}
|
|
@@ -547,7 +583,7 @@ Example fix:
|
|
|
547
583
|
await db.connect(); // Plugin initialized here
|
|
548
584
|
await db.createResource({ name: '${resourceName}', ... }); // Analytics resource created here
|
|
549
585
|
|
|
550
|
-
Docs: https://
|
|
586
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/eventual-consistency.md
|
|
551
587
|
`.trim();
|
|
552
588
|
super(message, {
|
|
553
589
|
...rest,
|
|
@@ -557,8 +593,260 @@ Docs: https://docs.s3db.js.org/plugins/eventual-consistency#troubleshooting
|
|
|
557
593
|
configuredResources,
|
|
558
594
|
registeredResources,
|
|
559
595
|
pluginInitialized,
|
|
560
|
-
description
|
|
561
|
-
|
|
596
|
+
description
|
|
597
|
+
});
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
class PluginError extends S3dbError {
|
|
601
|
+
constructor(message, details = {}) {
|
|
602
|
+
const {
|
|
603
|
+
pluginName = "Unknown",
|
|
604
|
+
operation = "unknown",
|
|
605
|
+
...rest
|
|
606
|
+
} = details;
|
|
607
|
+
let description = details.description;
|
|
608
|
+
if (!description) {
|
|
609
|
+
description = `
|
|
610
|
+
Plugin Error
|
|
611
|
+
|
|
612
|
+
Plugin: ${pluginName}
|
|
613
|
+
Operation: ${operation}
|
|
614
|
+
|
|
615
|
+
Possible causes:
|
|
616
|
+
1. Plugin not properly initialized
|
|
617
|
+
2. Plugin configuration is invalid
|
|
618
|
+
3. Plugin dependencies not met
|
|
619
|
+
4. Plugin method called before installation
|
|
620
|
+
|
|
621
|
+
Solution:
|
|
622
|
+
Ensure plugin is added to database and connect() is called before usage.
|
|
623
|
+
|
|
624
|
+
Example:
|
|
625
|
+
const db = new Database({
|
|
626
|
+
bucket: 'my-bucket',
|
|
627
|
+
plugins: [new ${pluginName}({ /* config */ })]
|
|
628
|
+
});
|
|
629
|
+
|
|
630
|
+
await db.connect(); // Plugin installed here
|
|
631
|
+
// Now plugin methods are available
|
|
632
|
+
|
|
633
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/README.md
|
|
634
|
+
`.trim();
|
|
635
|
+
}
|
|
636
|
+
super(message, {
|
|
637
|
+
...rest,
|
|
638
|
+
pluginName,
|
|
639
|
+
operation,
|
|
640
|
+
description
|
|
641
|
+
});
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
class PluginStorageError extends S3dbError {
|
|
645
|
+
constructor(message, details = {}) {
|
|
646
|
+
const {
|
|
647
|
+
pluginSlug = "unknown",
|
|
648
|
+
key = "",
|
|
649
|
+
operation = "unknown",
|
|
650
|
+
...rest
|
|
651
|
+
} = details;
|
|
652
|
+
let description = details.description;
|
|
653
|
+
if (!description) {
|
|
654
|
+
description = `
|
|
655
|
+
Plugin Storage Error
|
|
656
|
+
|
|
657
|
+
Plugin: ${pluginSlug}
|
|
658
|
+
Key: ${key}
|
|
659
|
+
Operation: ${operation}
|
|
660
|
+
|
|
661
|
+
Possible causes:
|
|
662
|
+
1. Storage not initialized (plugin not installed)
|
|
663
|
+
2. Invalid key format
|
|
664
|
+
3. S3 operation failed
|
|
665
|
+
4. Permissions issue
|
|
666
|
+
|
|
667
|
+
Solution:
|
|
668
|
+
Ensure plugin has access to storage and key is valid.
|
|
669
|
+
|
|
670
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/README.md#plugin-storage
|
|
671
|
+
`.trim();
|
|
672
|
+
}
|
|
673
|
+
super(message, {
|
|
674
|
+
...rest,
|
|
675
|
+
pluginSlug,
|
|
676
|
+
key,
|
|
677
|
+
operation,
|
|
678
|
+
description
|
|
679
|
+
});
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
class PartitionDriverError extends S3dbError {
|
|
683
|
+
constructor(message, details = {}) {
|
|
684
|
+
const {
|
|
685
|
+
driver = "unknown",
|
|
686
|
+
operation = "unknown",
|
|
687
|
+
queueSize,
|
|
688
|
+
maxQueueSize,
|
|
689
|
+
...rest
|
|
690
|
+
} = details;
|
|
691
|
+
let description = details.description;
|
|
692
|
+
if (!description && queueSize !== void 0 && maxQueueSize !== void 0) {
|
|
693
|
+
description = `
|
|
694
|
+
Partition Driver Error
|
|
695
|
+
|
|
696
|
+
Driver: ${driver}
|
|
697
|
+
Operation: ${operation}
|
|
698
|
+
Queue Status: ${queueSize}/${maxQueueSize}
|
|
699
|
+
|
|
700
|
+
Possible causes:
|
|
701
|
+
1. Queue is full (backpressure)
|
|
702
|
+
2. Driver not properly configured
|
|
703
|
+
3. SQS permissions issue (if using SQS driver)
|
|
704
|
+
|
|
705
|
+
Solution:
|
|
706
|
+
${queueSize >= maxQueueSize ? "Wait for queue to drain or increase maxQueueSize" : "Check driver configuration and permissions"}
|
|
707
|
+
|
|
708
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#partition-drivers
|
|
709
|
+
`.trim();
|
|
710
|
+
} else if (!description) {
|
|
711
|
+
description = `
|
|
712
|
+
Partition Driver Error
|
|
713
|
+
|
|
714
|
+
Driver: ${driver}
|
|
715
|
+
Operation: ${operation}
|
|
716
|
+
|
|
717
|
+
Check driver configuration and permissions.
|
|
718
|
+
|
|
719
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#partition-drivers
|
|
720
|
+
`.trim();
|
|
721
|
+
}
|
|
722
|
+
super(message, {
|
|
723
|
+
...rest,
|
|
724
|
+
driver,
|
|
725
|
+
operation,
|
|
726
|
+
queueSize,
|
|
727
|
+
maxQueueSize,
|
|
728
|
+
description
|
|
729
|
+
});
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
class BehaviorError extends S3dbError {
|
|
733
|
+
constructor(message, details = {}) {
|
|
734
|
+
const {
|
|
735
|
+
behavior = "unknown",
|
|
736
|
+
availableBehaviors = [],
|
|
737
|
+
...rest
|
|
738
|
+
} = details;
|
|
739
|
+
let description = details.description;
|
|
740
|
+
if (!description) {
|
|
741
|
+
description = `
|
|
742
|
+
Behavior Error
|
|
743
|
+
|
|
744
|
+
Requested: ${behavior}
|
|
745
|
+
Available: ${availableBehaviors.join(", ") || "body-overflow, body-only, truncate-data, enforce-limits, user-managed"}
|
|
746
|
+
|
|
747
|
+
Possible causes:
|
|
748
|
+
1. Behavior name misspelled
|
|
749
|
+
2. Custom behavior not registered
|
|
750
|
+
|
|
751
|
+
Solution:
|
|
752
|
+
Use one of the available behaviors or register custom behavior.
|
|
753
|
+
|
|
754
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#behaviors
|
|
755
|
+
`.trim();
|
|
756
|
+
}
|
|
757
|
+
super(message, {
|
|
758
|
+
...rest,
|
|
759
|
+
behavior,
|
|
760
|
+
availableBehaviors,
|
|
761
|
+
description
|
|
762
|
+
});
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
class StreamError extends S3dbError {
|
|
766
|
+
constructor(message, details = {}) {
|
|
767
|
+
const {
|
|
768
|
+
operation = "unknown",
|
|
769
|
+
resource,
|
|
770
|
+
...rest
|
|
771
|
+
} = details;
|
|
772
|
+
let description = details.description;
|
|
773
|
+
if (!description) {
|
|
774
|
+
description = `
|
|
775
|
+
Stream Error
|
|
776
|
+
|
|
777
|
+
Operation: ${operation}
|
|
778
|
+
${resource ? `Resource: ${resource}` : ""}
|
|
779
|
+
|
|
780
|
+
Possible causes:
|
|
781
|
+
1. Stream not properly initialized
|
|
782
|
+
2. Resource not available
|
|
783
|
+
3. Network error during streaming
|
|
784
|
+
|
|
785
|
+
Solution:
|
|
786
|
+
Check stream configuration and resource availability.
|
|
787
|
+
|
|
788
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#streaming
|
|
789
|
+
`.trim();
|
|
790
|
+
}
|
|
791
|
+
super(message, {
|
|
792
|
+
...rest,
|
|
793
|
+
operation,
|
|
794
|
+
resource,
|
|
795
|
+
description
|
|
796
|
+
});
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
class MetadataLimitError extends S3dbError {
|
|
800
|
+
constructor(message, details = {}) {
|
|
801
|
+
const {
|
|
802
|
+
totalSize,
|
|
803
|
+
effectiveLimit,
|
|
804
|
+
absoluteLimit = 2047,
|
|
805
|
+
excess,
|
|
806
|
+
resourceName,
|
|
807
|
+
operation,
|
|
808
|
+
...rest
|
|
809
|
+
} = details;
|
|
810
|
+
let description = details.description;
|
|
811
|
+
if (!description && totalSize && effectiveLimit) {
|
|
812
|
+
description = `
|
|
813
|
+
S3 Metadata Size Limit Exceeded
|
|
814
|
+
|
|
815
|
+
Current Size: ${totalSize} bytes
|
|
816
|
+
Effective Limit: ${effectiveLimit} bytes
|
|
817
|
+
Absolute Limit: ${absoluteLimit} bytes
|
|
818
|
+
${excess ? `Excess: ${excess} bytes` : ""}
|
|
819
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
820
|
+
${operation ? `Operation: ${operation}` : ""}
|
|
821
|
+
|
|
822
|
+
S3 has a hard limit of 2KB (2047 bytes) for object metadata.
|
|
823
|
+
|
|
824
|
+
Solutions:
|
|
825
|
+
1. Use 'body-overflow' behavior to store excess in body
|
|
826
|
+
2. Use 'body-only' behavior to store everything in body
|
|
827
|
+
3. Reduce number of fields
|
|
828
|
+
4. Use shorter field values
|
|
829
|
+
5. Enable advanced metadata encoding
|
|
830
|
+
|
|
831
|
+
Example:
|
|
832
|
+
await db.createResource({
|
|
833
|
+
name: '${resourceName || "myResource"}',
|
|
834
|
+
behavior: 'body-overflow', // Automatically handles overflow
|
|
835
|
+
attributes: { ... }
|
|
836
|
+
});
|
|
837
|
+
|
|
838
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/README.md#metadata-size-limits
|
|
839
|
+
`.trim();
|
|
840
|
+
}
|
|
841
|
+
super(message, {
|
|
842
|
+
...rest,
|
|
843
|
+
totalSize,
|
|
844
|
+
effectiveLimit,
|
|
845
|
+
absoluteLimit,
|
|
846
|
+
excess,
|
|
847
|
+
resourceName,
|
|
848
|
+
operation,
|
|
849
|
+
description
|
|
562
850
|
});
|
|
563
851
|
}
|
|
564
852
|
}
|
|
@@ -902,10 +1190,17 @@ class PluginStorage {
|
|
|
902
1190
|
*/
|
|
903
1191
|
constructor(client, pluginSlug) {
|
|
904
1192
|
if (!client) {
|
|
905
|
-
throw new
|
|
1193
|
+
throw new PluginStorageError("PluginStorage requires a client instance", {
|
|
1194
|
+
operation: "constructor",
|
|
1195
|
+
pluginSlug,
|
|
1196
|
+
suggestion: "Pass a valid S3db Client instance when creating PluginStorage"
|
|
1197
|
+
});
|
|
906
1198
|
}
|
|
907
1199
|
if (!pluginSlug) {
|
|
908
|
-
throw new
|
|
1200
|
+
throw new PluginStorageError("PluginStorage requires a pluginSlug", {
|
|
1201
|
+
operation: "constructor",
|
|
1202
|
+
suggestion: 'Provide a plugin slug (e.g., "eventual-consistency", "cache", "audit")'
|
|
1203
|
+
});
|
|
909
1204
|
}
|
|
910
1205
|
this.client = client;
|
|
911
1206
|
this.pluginSlug = pluginSlug;
|
|
@@ -958,7 +1253,15 @@ class PluginStorage {
|
|
|
958
1253
|
}
|
|
959
1254
|
const [ok, err] = await tryFn(() => this.client.putObject(putParams));
|
|
960
1255
|
if (!ok) {
|
|
961
|
-
throw new
|
|
1256
|
+
throw new PluginStorageError(`Failed to save plugin data`, {
|
|
1257
|
+
pluginSlug: this.pluginSlug,
|
|
1258
|
+
key,
|
|
1259
|
+
operation: "set",
|
|
1260
|
+
behavior,
|
|
1261
|
+
ttl,
|
|
1262
|
+
original: err,
|
|
1263
|
+
suggestion: "Check S3 permissions and key format"
|
|
1264
|
+
});
|
|
962
1265
|
}
|
|
963
1266
|
}
|
|
964
1267
|
/**
|
|
@@ -980,7 +1283,13 @@ class PluginStorage {
|
|
|
980
1283
|
if (err.name === "NoSuchKey" || err.Code === "NoSuchKey") {
|
|
981
1284
|
return null;
|
|
982
1285
|
}
|
|
983
|
-
throw new
|
|
1286
|
+
throw new PluginStorageError(`Failed to retrieve plugin data`, {
|
|
1287
|
+
pluginSlug: this.pluginSlug,
|
|
1288
|
+
key,
|
|
1289
|
+
operation: "get",
|
|
1290
|
+
original: err,
|
|
1291
|
+
suggestion: "Check if the key exists and S3 permissions are correct"
|
|
1292
|
+
});
|
|
984
1293
|
}
|
|
985
1294
|
const metadata = response.Metadata || {};
|
|
986
1295
|
const parsedMetadata = this._parseMetadataValues(metadata);
|
|
@@ -993,7 +1302,13 @@ class PluginStorage {
|
|
|
993
1302
|
data = { ...parsedMetadata, ...body };
|
|
994
1303
|
}
|
|
995
1304
|
} catch (parseErr) {
|
|
996
|
-
throw new
|
|
1305
|
+
throw new PluginStorageError(`Failed to parse JSON body`, {
|
|
1306
|
+
pluginSlug: this.pluginSlug,
|
|
1307
|
+
key,
|
|
1308
|
+
operation: "get",
|
|
1309
|
+
original: parseErr,
|
|
1310
|
+
suggestion: "Body content may be corrupted. Check S3 object integrity"
|
|
1311
|
+
});
|
|
997
1312
|
}
|
|
998
1313
|
}
|
|
999
1314
|
const expiresAt = data._expiresat || data._expiresAt;
|
|
@@ -1054,7 +1369,15 @@ class PluginStorage {
|
|
|
1054
1369
|
() => this.client.listObjects({ prefix: fullPrefix, maxKeys: limit })
|
|
1055
1370
|
);
|
|
1056
1371
|
if (!ok) {
|
|
1057
|
-
throw new
|
|
1372
|
+
throw new PluginStorageError(`Failed to list plugin data`, {
|
|
1373
|
+
pluginSlug: this.pluginSlug,
|
|
1374
|
+
operation: "list",
|
|
1375
|
+
prefix,
|
|
1376
|
+
fullPrefix,
|
|
1377
|
+
limit,
|
|
1378
|
+
original: err,
|
|
1379
|
+
suggestion: "Check S3 permissions and bucket configuration"
|
|
1380
|
+
});
|
|
1058
1381
|
}
|
|
1059
1382
|
const keys = result.Contents?.map((item) => item.Key) || [];
|
|
1060
1383
|
return this._removeKeyPrefix(keys);
|
|
@@ -1074,7 +1397,16 @@ class PluginStorage {
|
|
|
1074
1397
|
() => this.client.listObjects({ prefix: fullPrefix, maxKeys: limit })
|
|
1075
1398
|
);
|
|
1076
1399
|
if (!ok) {
|
|
1077
|
-
throw new
|
|
1400
|
+
throw new PluginStorageError(`Failed to list resource data`, {
|
|
1401
|
+
pluginSlug: this.pluginSlug,
|
|
1402
|
+
operation: "listForResource",
|
|
1403
|
+
resourceName,
|
|
1404
|
+
subPrefix,
|
|
1405
|
+
fullPrefix,
|
|
1406
|
+
limit,
|
|
1407
|
+
original: err,
|
|
1408
|
+
suggestion: "Check resource name and S3 permissions"
|
|
1409
|
+
});
|
|
1078
1410
|
}
|
|
1079
1411
|
const keys = result.Contents?.map((item) => item.Key) || [];
|
|
1080
1412
|
return this._removeKeyPrefix(keys);
|
|
@@ -1214,7 +1546,13 @@ class PluginStorage {
|
|
|
1214
1546
|
async delete(key) {
|
|
1215
1547
|
const [ok, err] = await tryFn(() => this.client.deleteObject(key));
|
|
1216
1548
|
if (!ok) {
|
|
1217
|
-
throw new
|
|
1549
|
+
throw new PluginStorageError(`Failed to delete plugin data`, {
|
|
1550
|
+
pluginSlug: this.pluginSlug,
|
|
1551
|
+
key,
|
|
1552
|
+
operation: "delete",
|
|
1553
|
+
original: err,
|
|
1554
|
+
suggestion: "Check S3 delete permissions"
|
|
1555
|
+
});
|
|
1218
1556
|
}
|
|
1219
1557
|
}
|
|
1220
1558
|
/**
|
|
@@ -1401,16 +1739,28 @@ class PluginStorage {
|
|
|
1401
1739
|
const valueSize = calculateUTF8Bytes(encoded);
|
|
1402
1740
|
currentSize += keySize + valueSize;
|
|
1403
1741
|
if (currentSize > effectiveLimit) {
|
|
1404
|
-
throw new
|
|
1405
|
-
|
|
1406
|
-
|
|
1742
|
+
throw new MetadataLimitError(`Data exceeds metadata limit with enforce-limits behavior`, {
|
|
1743
|
+
totalSize: currentSize,
|
|
1744
|
+
effectiveLimit,
|
|
1745
|
+
absoluteLimit: S3_METADATA_LIMIT,
|
|
1746
|
+
excess: currentSize - effectiveLimit,
|
|
1747
|
+
operation: "PluginStorage.set",
|
|
1748
|
+
pluginSlug: this.pluginSlug,
|
|
1749
|
+
suggestion: "Use 'body-overflow' or 'body-only' behavior to handle large data"
|
|
1750
|
+
});
|
|
1407
1751
|
}
|
|
1408
1752
|
metadata[key] = jsonValue;
|
|
1409
1753
|
}
|
|
1410
1754
|
break;
|
|
1411
1755
|
}
|
|
1412
1756
|
default:
|
|
1413
|
-
throw new
|
|
1757
|
+
throw new BehaviorError(`Unknown behavior: ${behavior}`, {
|
|
1758
|
+
behavior,
|
|
1759
|
+
availableBehaviors: ["body-overflow", "body-only", "enforce-limits"],
|
|
1760
|
+
operation: "PluginStorage._applyBehavior",
|
|
1761
|
+
pluginSlug: this.pluginSlug,
|
|
1762
|
+
suggestion: "Use 'body-overflow', 'body-only', or 'enforce-limits'"
|
|
1763
|
+
});
|
|
1414
1764
|
}
|
|
1415
1765
|
return { metadata, body };
|
|
1416
1766
|
}
|
|
@@ -1975,6 +2325,35 @@ class AuditPlugin extends Plugin {
|
|
|
1975
2325
|
}
|
|
1976
2326
|
}
|
|
1977
2327
|
|
|
2328
|
+
class BackupError extends S3dbError {
|
|
2329
|
+
constructor(message, details = {}) {
|
|
2330
|
+
const { driver = "unknown", operation = "unknown", backupId, ...rest } = details;
|
|
2331
|
+
let description = details.description;
|
|
2332
|
+
if (!description) {
|
|
2333
|
+
description = `
|
|
2334
|
+
Backup Operation Error
|
|
2335
|
+
|
|
2336
|
+
Driver: ${driver}
|
|
2337
|
+
Operation: ${operation}
|
|
2338
|
+
${backupId ? `Backup ID: ${backupId}` : ""}
|
|
2339
|
+
|
|
2340
|
+
Common causes:
|
|
2341
|
+
1. Invalid backup driver configuration
|
|
2342
|
+
2. Destination storage not accessible
|
|
2343
|
+
3. Insufficient permissions
|
|
2344
|
+
4. Network connectivity issues
|
|
2345
|
+
5. Invalid backup file format
|
|
2346
|
+
|
|
2347
|
+
Solution:
|
|
2348
|
+
Check driver configuration and ensure destination storage is accessible.
|
|
2349
|
+
|
|
2350
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/backup.md
|
|
2351
|
+
`.trim();
|
|
2352
|
+
}
|
|
2353
|
+
super(message, { ...rest, driver, operation, backupId, description });
|
|
2354
|
+
}
|
|
2355
|
+
}
|
|
2356
|
+
|
|
1978
2357
|
class BaseBackupDriver {
|
|
1979
2358
|
constructor(config = {}) {
|
|
1980
2359
|
this.config = {
|
|
@@ -2005,7 +2384,12 @@ class BaseBackupDriver {
|
|
|
2005
2384
|
* @returns {Object} Upload result with destination info
|
|
2006
2385
|
*/
|
|
2007
2386
|
async upload(filePath, backupId, manifest) {
|
|
2008
|
-
throw new
|
|
2387
|
+
throw new BackupError("upload() method must be implemented by subclass", {
|
|
2388
|
+
operation: "upload",
|
|
2389
|
+
driver: this.constructor.name,
|
|
2390
|
+
backupId,
|
|
2391
|
+
suggestion: "Extend BaseBackupDriver and implement the upload() method"
|
|
2392
|
+
});
|
|
2009
2393
|
}
|
|
2010
2394
|
/**
|
|
2011
2395
|
* Download a backup file from the destination
|
|
@@ -2015,7 +2399,12 @@ class BaseBackupDriver {
|
|
|
2015
2399
|
* @returns {string} Path to downloaded file
|
|
2016
2400
|
*/
|
|
2017
2401
|
async download(backupId, targetPath, metadata) {
|
|
2018
|
-
throw new
|
|
2402
|
+
throw new BackupError("download() method must be implemented by subclass", {
|
|
2403
|
+
operation: "download",
|
|
2404
|
+
driver: this.constructor.name,
|
|
2405
|
+
backupId,
|
|
2406
|
+
suggestion: "Extend BaseBackupDriver and implement the download() method"
|
|
2407
|
+
});
|
|
2019
2408
|
}
|
|
2020
2409
|
/**
|
|
2021
2410
|
* Delete a backup from the destination
|
|
@@ -2023,7 +2412,12 @@ class BaseBackupDriver {
|
|
|
2023
2412
|
* @param {Object} metadata - Backup metadata
|
|
2024
2413
|
*/
|
|
2025
2414
|
async delete(backupId, metadata) {
|
|
2026
|
-
throw new
|
|
2415
|
+
throw new BackupError("delete() method must be implemented by subclass", {
|
|
2416
|
+
operation: "delete",
|
|
2417
|
+
driver: this.constructor.name,
|
|
2418
|
+
backupId,
|
|
2419
|
+
suggestion: "Extend BaseBackupDriver and implement the delete() method"
|
|
2420
|
+
});
|
|
2027
2421
|
}
|
|
2028
2422
|
/**
|
|
2029
2423
|
* List backups available in the destination
|
|
@@ -2031,7 +2425,11 @@ class BaseBackupDriver {
|
|
|
2031
2425
|
* @returns {Array} List of backup metadata
|
|
2032
2426
|
*/
|
|
2033
2427
|
async list(options = {}) {
|
|
2034
|
-
throw new
|
|
2428
|
+
throw new BackupError("list() method must be implemented by subclass", {
|
|
2429
|
+
operation: "list",
|
|
2430
|
+
driver: this.constructor.name,
|
|
2431
|
+
suggestion: "Extend BaseBackupDriver and implement the list() method"
|
|
2432
|
+
});
|
|
2035
2433
|
}
|
|
2036
2434
|
/**
|
|
2037
2435
|
* Verify backup integrity
|
|
@@ -2041,14 +2439,23 @@ class BaseBackupDriver {
|
|
|
2041
2439
|
* @returns {boolean} True if backup is valid
|
|
2042
2440
|
*/
|
|
2043
2441
|
async verify(backupId, expectedChecksum, metadata) {
|
|
2044
|
-
throw new
|
|
2442
|
+
throw new BackupError("verify() method must be implemented by subclass", {
|
|
2443
|
+
operation: "verify",
|
|
2444
|
+
driver: this.constructor.name,
|
|
2445
|
+
backupId,
|
|
2446
|
+
suggestion: "Extend BaseBackupDriver and implement the verify() method"
|
|
2447
|
+
});
|
|
2045
2448
|
}
|
|
2046
2449
|
/**
|
|
2047
2450
|
* Get driver type identifier
|
|
2048
2451
|
* @returns {string} Driver type
|
|
2049
2452
|
*/
|
|
2050
2453
|
getType() {
|
|
2051
|
-
throw new
|
|
2454
|
+
throw new BackupError("getType() method must be implemented by subclass", {
|
|
2455
|
+
operation: "getType",
|
|
2456
|
+
driver: this.constructor.name,
|
|
2457
|
+
suggestion: "Extend BaseBackupDriver and implement the getType() method"
|
|
2458
|
+
});
|
|
2052
2459
|
}
|
|
2053
2460
|
/**
|
|
2054
2461
|
* Get driver-specific storage info
|
|
@@ -2090,7 +2497,11 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2090
2497
|
}
|
|
2091
2498
|
async onSetup() {
|
|
2092
2499
|
if (!this.config.path) {
|
|
2093
|
-
throw new
|
|
2500
|
+
throw new BackupError("FilesystemBackupDriver: path configuration is required", {
|
|
2501
|
+
operation: "onSetup",
|
|
2502
|
+
driver: "filesystem",
|
|
2503
|
+
suggestion: 'Provide a path in config: new FilesystemBackupDriver({ path: "/path/to/backups" })'
|
|
2504
|
+
});
|
|
2094
2505
|
}
|
|
2095
2506
|
this.log(`Initialized with path: ${this.config.path}`);
|
|
2096
2507
|
}
|
|
@@ -2114,11 +2525,26 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2114
2525
|
() => promises.mkdir(targetDir, { recursive: true, mode: this.config.directoryPermissions })
|
|
2115
2526
|
);
|
|
2116
2527
|
if (!createDirOk) {
|
|
2117
|
-
throw new
|
|
2528
|
+
throw new BackupError("Failed to create backup directory", {
|
|
2529
|
+
operation: "upload",
|
|
2530
|
+
driver: "filesystem",
|
|
2531
|
+
backupId,
|
|
2532
|
+
targetDir,
|
|
2533
|
+
original: createDirErr,
|
|
2534
|
+
suggestion: "Check directory permissions and disk space"
|
|
2535
|
+
});
|
|
2118
2536
|
}
|
|
2119
2537
|
const [copyOk, copyErr] = await tryFn(() => promises.copyFile(filePath, targetPath));
|
|
2120
2538
|
if (!copyOk) {
|
|
2121
|
-
throw new
|
|
2539
|
+
throw new BackupError("Failed to copy backup file", {
|
|
2540
|
+
operation: "upload",
|
|
2541
|
+
driver: "filesystem",
|
|
2542
|
+
backupId,
|
|
2543
|
+
filePath,
|
|
2544
|
+
targetPath,
|
|
2545
|
+
original: copyErr,
|
|
2546
|
+
suggestion: "Check file permissions and disk space"
|
|
2547
|
+
});
|
|
2122
2548
|
}
|
|
2123
2549
|
const [manifestOk, manifestErr] = await tryFn(
|
|
2124
2550
|
() => import('fs/promises').then((fs) => fs.writeFile(
|
|
@@ -2129,7 +2555,14 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2129
2555
|
);
|
|
2130
2556
|
if (!manifestOk) {
|
|
2131
2557
|
await tryFn(() => promises.unlink(targetPath));
|
|
2132
|
-
throw new
|
|
2558
|
+
throw new BackupError("Failed to write manifest file", {
|
|
2559
|
+
operation: "upload",
|
|
2560
|
+
driver: "filesystem",
|
|
2561
|
+
backupId,
|
|
2562
|
+
manifestPath,
|
|
2563
|
+
original: manifestErr,
|
|
2564
|
+
suggestion: "Check directory permissions and disk space"
|
|
2565
|
+
});
|
|
2133
2566
|
}
|
|
2134
2567
|
const [statOk, , stats] = await tryFn(() => promises.stat(targetPath));
|
|
2135
2568
|
const size = statOk ? stats.size : 0;
|
|
@@ -2148,13 +2581,27 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2148
2581
|
);
|
|
2149
2582
|
const [existsOk] = await tryFn(() => promises.access(sourcePath));
|
|
2150
2583
|
if (!existsOk) {
|
|
2151
|
-
throw new
|
|
2584
|
+
throw new BackupError("Backup file not found", {
|
|
2585
|
+
operation: "download",
|
|
2586
|
+
driver: "filesystem",
|
|
2587
|
+
backupId,
|
|
2588
|
+
sourcePath,
|
|
2589
|
+
suggestion: "Check if backup exists using list() method"
|
|
2590
|
+
});
|
|
2152
2591
|
}
|
|
2153
2592
|
const targetDir = path.dirname(targetPath);
|
|
2154
2593
|
await tryFn(() => promises.mkdir(targetDir, { recursive: true }));
|
|
2155
2594
|
const [copyOk, copyErr] = await tryFn(() => promises.copyFile(sourcePath, targetPath));
|
|
2156
2595
|
if (!copyOk) {
|
|
2157
|
-
throw new
|
|
2596
|
+
throw new BackupError("Failed to download backup", {
|
|
2597
|
+
operation: "download",
|
|
2598
|
+
driver: "filesystem",
|
|
2599
|
+
backupId,
|
|
2600
|
+
sourcePath,
|
|
2601
|
+
targetPath,
|
|
2602
|
+
original: copyErr,
|
|
2603
|
+
suggestion: "Check file permissions and disk space"
|
|
2604
|
+
});
|
|
2158
2605
|
}
|
|
2159
2606
|
this.log(`Downloaded backup ${backupId} from ${sourcePath} to ${targetPath}`);
|
|
2160
2607
|
return targetPath;
|
|
@@ -2171,7 +2618,14 @@ class FilesystemBackupDriver extends BaseBackupDriver {
|
|
|
2171
2618
|
const [deleteBackupOk] = await tryFn(() => promises.unlink(backupPath));
|
|
2172
2619
|
const [deleteManifestOk] = await tryFn(() => promises.unlink(manifestPath));
|
|
2173
2620
|
if (!deleteBackupOk && !deleteManifestOk) {
|
|
2174
|
-
throw new
|
|
2621
|
+
throw new BackupError("Failed to delete backup files", {
|
|
2622
|
+
operation: "delete",
|
|
2623
|
+
driver: "filesystem",
|
|
2624
|
+
backupId,
|
|
2625
|
+
backupPath,
|
|
2626
|
+
manifestPath,
|
|
2627
|
+
suggestion: "Check file permissions"
|
|
2628
|
+
});
|
|
2175
2629
|
}
|
|
2176
2630
|
this.log(`Deleted backup ${backupId}`);
|
|
2177
2631
|
}
|
|
@@ -2276,10 +2730,18 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2276
2730
|
this.config.bucket = this.database.bucket;
|
|
2277
2731
|
}
|
|
2278
2732
|
if (!this.config.client) {
|
|
2279
|
-
throw new
|
|
2733
|
+
throw new BackupError("S3BackupDriver: client is required", {
|
|
2734
|
+
operation: "onSetup",
|
|
2735
|
+
driver: "s3",
|
|
2736
|
+
suggestion: "Provide a client in config or ensure database has a client configured"
|
|
2737
|
+
});
|
|
2280
2738
|
}
|
|
2281
2739
|
if (!this.config.bucket) {
|
|
2282
|
-
throw new
|
|
2740
|
+
throw new BackupError("S3BackupDriver: bucket is required", {
|
|
2741
|
+
operation: "onSetup",
|
|
2742
|
+
driver: "s3",
|
|
2743
|
+
suggestion: "Provide a bucket in config or ensure database has a bucket configured"
|
|
2744
|
+
});
|
|
2283
2745
|
}
|
|
2284
2746
|
this.log(`Initialized with bucket: ${this.config.bucket}, path: ${this.config.path}`);
|
|
2285
2747
|
}
|
|
@@ -2321,7 +2783,15 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2321
2783
|
});
|
|
2322
2784
|
});
|
|
2323
2785
|
if (!uploadOk) {
|
|
2324
|
-
throw new
|
|
2786
|
+
throw new BackupError("Failed to upload backup file to S3", {
|
|
2787
|
+
operation: "upload",
|
|
2788
|
+
driver: "s3",
|
|
2789
|
+
backupId,
|
|
2790
|
+
bucket: this.config.bucket,
|
|
2791
|
+
key: backupKey,
|
|
2792
|
+
original: uploadErr,
|
|
2793
|
+
suggestion: "Check S3 permissions and bucket configuration"
|
|
2794
|
+
});
|
|
2325
2795
|
}
|
|
2326
2796
|
const [manifestOk, manifestErr] = await tryFn(
|
|
2327
2797
|
() => this.config.client.uploadObject({
|
|
@@ -2342,7 +2812,15 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2342
2812
|
bucket: this.config.bucket,
|
|
2343
2813
|
key: backupKey
|
|
2344
2814
|
}));
|
|
2345
|
-
throw new
|
|
2815
|
+
throw new BackupError("Failed to upload manifest to S3", {
|
|
2816
|
+
operation: "upload",
|
|
2817
|
+
driver: "s3",
|
|
2818
|
+
backupId,
|
|
2819
|
+
bucket: this.config.bucket,
|
|
2820
|
+
manifestKey,
|
|
2821
|
+
original: manifestErr,
|
|
2822
|
+
suggestion: "Check S3 permissions and bucket configuration"
|
|
2823
|
+
});
|
|
2346
2824
|
}
|
|
2347
2825
|
this.log(`Uploaded backup ${backupId} to s3://${this.config.bucket}/${backupKey} (${fileSize} bytes)`);
|
|
2348
2826
|
return {
|
|
@@ -2365,7 +2843,16 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2365
2843
|
})
|
|
2366
2844
|
);
|
|
2367
2845
|
if (!downloadOk) {
|
|
2368
|
-
throw new
|
|
2846
|
+
throw new BackupError("Failed to download backup from S3", {
|
|
2847
|
+
operation: "download",
|
|
2848
|
+
driver: "s3",
|
|
2849
|
+
backupId,
|
|
2850
|
+
bucket: this.config.bucket,
|
|
2851
|
+
key: backupKey,
|
|
2852
|
+
targetPath,
|
|
2853
|
+
original: downloadErr,
|
|
2854
|
+
suggestion: "Check if backup exists and S3 permissions are correct"
|
|
2855
|
+
});
|
|
2369
2856
|
}
|
|
2370
2857
|
this.log(`Downloaded backup ${backupId} from s3://${this.config.bucket}/${backupKey} to ${targetPath}`);
|
|
2371
2858
|
return targetPath;
|
|
@@ -2386,7 +2873,15 @@ class S3BackupDriver extends BaseBackupDriver {
|
|
|
2386
2873
|
})
|
|
2387
2874
|
);
|
|
2388
2875
|
if (!deleteBackupOk && !deleteManifestOk) {
|
|
2389
|
-
throw new
|
|
2876
|
+
throw new BackupError("Failed to delete backup from S3", {
|
|
2877
|
+
operation: "delete",
|
|
2878
|
+
driver: "s3",
|
|
2879
|
+
backupId,
|
|
2880
|
+
bucket: this.config.bucket,
|
|
2881
|
+
backupKey,
|
|
2882
|
+
manifestKey,
|
|
2883
|
+
suggestion: "Check S3 delete permissions"
|
|
2884
|
+
});
|
|
2390
2885
|
}
|
|
2391
2886
|
this.log(`Deleted backup ${backupId} from S3`);
|
|
2392
2887
|
}
|
|
@@ -2499,11 +2994,22 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2499
2994
|
}
|
|
2500
2995
|
async onSetup() {
|
|
2501
2996
|
if (!Array.isArray(this.config.destinations) || this.config.destinations.length === 0) {
|
|
2502
|
-
throw new
|
|
2997
|
+
throw new BackupError("MultiBackupDriver requires non-empty destinations array", {
|
|
2998
|
+
operation: "onSetup",
|
|
2999
|
+
driver: "multi",
|
|
3000
|
+
destinationsProvided: this.config.destinations,
|
|
3001
|
+
suggestion: 'Provide destinations array: { destinations: [{ driver: "s3", config: {...} }, { driver: "filesystem", config: {...} }] }'
|
|
3002
|
+
});
|
|
2503
3003
|
}
|
|
2504
3004
|
for (const [index, destConfig] of this.config.destinations.entries()) {
|
|
2505
3005
|
if (!destConfig.driver) {
|
|
2506
|
-
throw new
|
|
3006
|
+
throw new BackupError(`Destination ${index} missing driver type`, {
|
|
3007
|
+
operation: "onSetup",
|
|
3008
|
+
driver: "multi",
|
|
3009
|
+
destinationIndex: index,
|
|
3010
|
+
destination: destConfig,
|
|
3011
|
+
suggestion: 'Each destination must have a driver property: { driver: "s3", config: {...} } or { driver: "filesystem", config: {...} }'
|
|
3012
|
+
});
|
|
2507
3013
|
}
|
|
2508
3014
|
try {
|
|
2509
3015
|
const driver = createBackupDriver(destConfig.driver, destConfig.config || {});
|
|
@@ -2515,7 +3021,15 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2515
3021
|
});
|
|
2516
3022
|
this.log(`Setup destination ${index}: ${destConfig.driver}`);
|
|
2517
3023
|
} catch (error) {
|
|
2518
|
-
throw new
|
|
3024
|
+
throw new BackupError(`Failed to setup destination ${index}`, {
|
|
3025
|
+
operation: "onSetup",
|
|
3026
|
+
driver: "multi",
|
|
3027
|
+
destinationIndex: index,
|
|
3028
|
+
destinationDriver: destConfig.driver,
|
|
3029
|
+
destinationConfig: destConfig.config,
|
|
3030
|
+
original: error,
|
|
3031
|
+
suggestion: "Check destination driver configuration and ensure dependencies are available"
|
|
3032
|
+
});
|
|
2519
3033
|
}
|
|
2520
3034
|
}
|
|
2521
3035
|
if (this.config.requireAll === false) {
|
|
@@ -2544,7 +3058,15 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2544
3058
|
this.log(`Priority upload failed to destination ${index}: ${err.message}`);
|
|
2545
3059
|
}
|
|
2546
3060
|
}
|
|
2547
|
-
throw new
|
|
3061
|
+
throw new BackupError("All priority destinations failed", {
|
|
3062
|
+
operation: "upload",
|
|
3063
|
+
driver: "multi",
|
|
3064
|
+
strategy: "priority",
|
|
3065
|
+
backupId,
|
|
3066
|
+
totalDestinations: this.drivers.length,
|
|
3067
|
+
failures: errors,
|
|
3068
|
+
suggestion: "Check destination configurations and ensure at least one destination is accessible"
|
|
3069
|
+
});
|
|
2548
3070
|
}
|
|
2549
3071
|
const uploadPromises = this.drivers.map(async ({ driver, config, index }) => {
|
|
2550
3072
|
const [ok, err, result] = await tryFn(
|
|
@@ -2574,10 +3096,28 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2574
3096
|
const successResults = allResults.filter((r) => r.status === "success");
|
|
2575
3097
|
const failedResults = allResults.filter((r) => r.status === "failed");
|
|
2576
3098
|
if (strategy === "all" && failedResults.length > 0) {
|
|
2577
|
-
throw new
|
|
3099
|
+
throw new BackupError('Some destinations failed with strategy "all"', {
|
|
3100
|
+
operation: "upload",
|
|
3101
|
+
driver: "multi",
|
|
3102
|
+
strategy: "all",
|
|
3103
|
+
backupId,
|
|
3104
|
+
totalDestinations: this.drivers.length,
|
|
3105
|
+
successCount: successResults.length,
|
|
3106
|
+
failedCount: failedResults.length,
|
|
3107
|
+
failures: failedResults,
|
|
3108
|
+
suggestion: 'All destinations must succeed with "all" strategy. Use "any" strategy to tolerate failures, or fix failing destinations.'
|
|
3109
|
+
});
|
|
2578
3110
|
}
|
|
2579
3111
|
if (strategy === "any" && successResults.length === 0) {
|
|
2580
|
-
throw new
|
|
3112
|
+
throw new BackupError('All destinations failed with strategy "any"', {
|
|
3113
|
+
operation: "upload",
|
|
3114
|
+
driver: "multi",
|
|
3115
|
+
strategy: "any",
|
|
3116
|
+
backupId,
|
|
3117
|
+
totalDestinations: this.drivers.length,
|
|
3118
|
+
failures: failedResults,
|
|
3119
|
+
suggestion: 'At least one destination must succeed with "any" strategy. Check all destination configurations.'
|
|
3120
|
+
});
|
|
2581
3121
|
}
|
|
2582
3122
|
return allResults;
|
|
2583
3123
|
}
|
|
@@ -2597,7 +3137,14 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2597
3137
|
this.log(`Download failed from destination ${destMetadata.destination}: ${err.message}`);
|
|
2598
3138
|
}
|
|
2599
3139
|
}
|
|
2600
|
-
throw new
|
|
3140
|
+
throw new BackupError("Failed to download backup from any destination", {
|
|
3141
|
+
operation: "download",
|
|
3142
|
+
driver: "multi",
|
|
3143
|
+
backupId,
|
|
3144
|
+
targetPath,
|
|
3145
|
+
attemptedDestinations: destinations.length,
|
|
3146
|
+
suggestion: "Check if backup exists in at least one destination and destinations are accessible"
|
|
3147
|
+
});
|
|
2601
3148
|
}
|
|
2602
3149
|
async delete(backupId, metadata) {
|
|
2603
3150
|
const destinations = Array.isArray(metadata.destinations) ? metadata.destinations : [metadata];
|
|
@@ -2619,7 +3166,14 @@ class MultiBackupDriver extends BaseBackupDriver {
|
|
|
2619
3166
|
}
|
|
2620
3167
|
}
|
|
2621
3168
|
if (successCount === 0 && errors.length > 0) {
|
|
2622
|
-
throw new
|
|
3169
|
+
throw new BackupError("Failed to delete from any destination", {
|
|
3170
|
+
operation: "delete",
|
|
3171
|
+
driver: "multi",
|
|
3172
|
+
backupId,
|
|
3173
|
+
attemptedDestinations: destinations.length,
|
|
3174
|
+
failures: errors,
|
|
3175
|
+
suggestion: "Check if backup exists in destinations and destinations are accessible with delete permissions"
|
|
3176
|
+
});
|
|
2623
3177
|
}
|
|
2624
3178
|
if (errors.length > 0) {
|
|
2625
3179
|
this.log(`Partial delete success, some errors: ${errors.join("; ")}`);
|
|
@@ -2719,32 +3273,62 @@ const BACKUP_DRIVERS = {
|
|
|
2719
3273
|
function createBackupDriver(driver, config = {}) {
|
|
2720
3274
|
const DriverClass = BACKUP_DRIVERS[driver];
|
|
2721
3275
|
if (!DriverClass) {
|
|
2722
|
-
throw new
|
|
3276
|
+
throw new BackupError(`Unknown backup driver: ${driver}`, {
|
|
3277
|
+
operation: "createBackupDriver",
|
|
3278
|
+
driver,
|
|
3279
|
+
availableDrivers: Object.keys(BACKUP_DRIVERS),
|
|
3280
|
+
suggestion: `Use one of the available drivers: ${Object.keys(BACKUP_DRIVERS).join(", ")}`
|
|
3281
|
+
});
|
|
2723
3282
|
}
|
|
2724
3283
|
return new DriverClass(config);
|
|
2725
3284
|
}
|
|
2726
3285
|
function validateBackupConfig(driver, config = {}) {
|
|
2727
3286
|
if (!driver || typeof driver !== "string") {
|
|
2728
|
-
throw new
|
|
3287
|
+
throw new BackupError("Driver type must be a non-empty string", {
|
|
3288
|
+
operation: "validateBackupConfig",
|
|
3289
|
+
driver,
|
|
3290
|
+
suggestion: "Provide a valid driver type string (filesystem, s3, or multi)"
|
|
3291
|
+
});
|
|
2729
3292
|
}
|
|
2730
3293
|
if (!BACKUP_DRIVERS[driver]) {
|
|
2731
|
-
throw new
|
|
3294
|
+
throw new BackupError(`Unknown backup driver: ${driver}`, {
|
|
3295
|
+
operation: "validateBackupConfig",
|
|
3296
|
+
driver,
|
|
3297
|
+
availableDrivers: Object.keys(BACKUP_DRIVERS),
|
|
3298
|
+
suggestion: `Use one of the available drivers: ${Object.keys(BACKUP_DRIVERS).join(", ")}`
|
|
3299
|
+
});
|
|
2732
3300
|
}
|
|
2733
3301
|
switch (driver) {
|
|
2734
3302
|
case "filesystem":
|
|
2735
3303
|
if (!config.path) {
|
|
2736
|
-
throw new
|
|
3304
|
+
throw new BackupError('FilesystemBackupDriver requires "path" configuration', {
|
|
3305
|
+
operation: "validateBackupConfig",
|
|
3306
|
+
driver: "filesystem",
|
|
3307
|
+
config,
|
|
3308
|
+
suggestion: 'Provide a "path" property in config: { path: "/path/to/backups" }'
|
|
3309
|
+
});
|
|
2737
3310
|
}
|
|
2738
3311
|
break;
|
|
2739
3312
|
case "s3":
|
|
2740
3313
|
break;
|
|
2741
3314
|
case "multi":
|
|
2742
3315
|
if (!Array.isArray(config.destinations) || config.destinations.length === 0) {
|
|
2743
|
-
throw new
|
|
3316
|
+
throw new BackupError('MultiBackupDriver requires non-empty "destinations" array', {
|
|
3317
|
+
operation: "validateBackupConfig",
|
|
3318
|
+
driver: "multi",
|
|
3319
|
+
config,
|
|
3320
|
+
suggestion: 'Provide destinations array: { destinations: [{ driver: "s3", config: {...} }] }'
|
|
3321
|
+
});
|
|
2744
3322
|
}
|
|
2745
3323
|
config.destinations.forEach((dest, index) => {
|
|
2746
3324
|
if (!dest.driver) {
|
|
2747
|
-
throw new
|
|
3325
|
+
throw new BackupError(`Destination ${index} must have a "driver" property`, {
|
|
3326
|
+
operation: "validateBackupConfig",
|
|
3327
|
+
driver: "multi",
|
|
3328
|
+
destinationIndex: index,
|
|
3329
|
+
destination: dest,
|
|
3330
|
+
suggestion: 'Each destination must have a driver property: { driver: "s3", config: {...} }'
|
|
3331
|
+
});
|
|
2748
3332
|
}
|
|
2749
3333
|
if (dest.driver !== "multi") {
|
|
2750
3334
|
validateBackupConfig(dest.driver, dest.config || {});
|
|
@@ -3400,6 +3984,36 @@ class BackupPlugin extends Plugin {
|
|
|
3400
3984
|
}
|
|
3401
3985
|
}
|
|
3402
3986
|
|
|
3987
|
+
class CacheError extends S3dbError {
|
|
3988
|
+
constructor(message, details = {}) {
|
|
3989
|
+
const { driver = "unknown", operation = "unknown", resourceName, key, ...rest } = details;
|
|
3990
|
+
let description = details.description;
|
|
3991
|
+
if (!description) {
|
|
3992
|
+
description = `
|
|
3993
|
+
Cache Operation Error
|
|
3994
|
+
|
|
3995
|
+
Driver: ${driver}
|
|
3996
|
+
Operation: ${operation}
|
|
3997
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
3998
|
+
${key ? `Key: ${key}` : ""}
|
|
3999
|
+
|
|
4000
|
+
Common causes:
|
|
4001
|
+
1. Invalid cache key format
|
|
4002
|
+
2. Cache driver not properly initialized
|
|
4003
|
+
3. Resource not found or not cached
|
|
4004
|
+
4. Memory limits exceeded
|
|
4005
|
+
5. Filesystem permissions issues
|
|
4006
|
+
|
|
4007
|
+
Solution:
|
|
4008
|
+
Check cache configuration and ensure the cache driver is properly initialized.
|
|
4009
|
+
|
|
4010
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/cache.md
|
|
4011
|
+
`.trim();
|
|
4012
|
+
}
|
|
4013
|
+
super(message, { ...rest, driver, operation, resourceName, key, description });
|
|
4014
|
+
}
|
|
4015
|
+
}
|
|
4016
|
+
|
|
3403
4017
|
class Cache extends EventEmitter {
|
|
3404
4018
|
constructor(config = {}) {
|
|
3405
4019
|
super();
|
|
@@ -3416,7 +4030,13 @@ class Cache extends EventEmitter {
|
|
|
3416
4030
|
}
|
|
3417
4031
|
validateKey(key) {
|
|
3418
4032
|
if (key === null || key === void 0 || typeof key !== "string" || !key) {
|
|
3419
|
-
throw new
|
|
4033
|
+
throw new CacheError("Invalid cache key", {
|
|
4034
|
+
operation: "validateKey",
|
|
4035
|
+
driver: this.constructor.name,
|
|
4036
|
+
key,
|
|
4037
|
+
keyType: typeof key,
|
|
4038
|
+
suggestion: "Cache key must be a non-empty string"
|
|
4039
|
+
});
|
|
3420
4040
|
}
|
|
3421
4041
|
}
|
|
3422
4042
|
// generic class methods
|
|
@@ -3503,7 +4123,11 @@ class ResourceReader extends EventEmitter {
|
|
|
3503
4123
|
constructor({ resource, batchSize = 10, concurrency = 5 }) {
|
|
3504
4124
|
super();
|
|
3505
4125
|
if (!resource) {
|
|
3506
|
-
throw new
|
|
4126
|
+
throw new StreamError("Resource is required for ResourceReader", {
|
|
4127
|
+
operation: "constructor",
|
|
4128
|
+
resource: resource?.name,
|
|
4129
|
+
suggestion: "Pass a valid Resource instance when creating ResourceReader"
|
|
4130
|
+
});
|
|
3507
4131
|
}
|
|
3508
4132
|
this.resource = resource;
|
|
3509
4133
|
this.client = resource.client;
|
|
@@ -3627,7 +4251,10 @@ class ResourceWriter extends EventEmitter {
|
|
|
3627
4251
|
function streamToString(stream) {
|
|
3628
4252
|
return new Promise((resolve, reject) => {
|
|
3629
4253
|
if (!stream) {
|
|
3630
|
-
return reject(new
|
|
4254
|
+
return reject(new StreamError("Stream is undefined", {
|
|
4255
|
+
operation: "streamToString",
|
|
4256
|
+
suggestion: "Ensure a valid stream is passed to streamToString()"
|
|
4257
|
+
}));
|
|
3631
4258
|
}
|
|
3632
4259
|
const chunks = [];
|
|
3633
4260
|
stream.on("data", (chunk) => chunks.push(chunk));
|
|
@@ -5121,7 +5748,13 @@ class CachePlugin extends Plugin {
|
|
|
5121
5748
|
async warmCache(resourceName, options = {}) {
|
|
5122
5749
|
const resource = this.database.resources[resourceName];
|
|
5123
5750
|
if (!resource) {
|
|
5124
|
-
throw new
|
|
5751
|
+
throw new CacheError("Resource not found for cache warming", {
|
|
5752
|
+
operation: "warmCache",
|
|
5753
|
+
driver: this.driver?.constructor.name,
|
|
5754
|
+
resourceName,
|
|
5755
|
+
availableResources: Object.keys(this.database.resources),
|
|
5756
|
+
suggestion: "Check resource name spelling or ensure resource has been created"
|
|
5757
|
+
});
|
|
5125
5758
|
}
|
|
5126
5759
|
const { includePartitions = true, sampleSize = 100 } = options;
|
|
5127
5760
|
if (this.driver instanceof PartitionAwareFilesystemCache && resource.warmPartitionCache) {
|
|
@@ -8238,6 +8871,35 @@ class EventualConsistencyPlugin extends Plugin {
|
|
|
8238
8871
|
}
|
|
8239
8872
|
}
|
|
8240
8873
|
|
|
8874
|
+
class FulltextError extends S3dbError {
|
|
8875
|
+
constructor(message, details = {}) {
|
|
8876
|
+
const { resourceName, query, operation = "unknown", ...rest } = details;
|
|
8877
|
+
let description = details.description;
|
|
8878
|
+
if (!description) {
|
|
8879
|
+
description = `
|
|
8880
|
+
Fulltext Search Operation Error
|
|
8881
|
+
|
|
8882
|
+
Operation: ${operation}
|
|
8883
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
8884
|
+
${query ? `Query: ${query}` : ""}
|
|
8885
|
+
|
|
8886
|
+
Common causes:
|
|
8887
|
+
1. Resource not indexed for fulltext search
|
|
8888
|
+
2. Invalid query syntax
|
|
8889
|
+
3. Index not built yet
|
|
8890
|
+
4. Search configuration missing
|
|
8891
|
+
5. Field not indexed
|
|
8892
|
+
|
|
8893
|
+
Solution:
|
|
8894
|
+
Ensure resource is configured for fulltext search and index is built.
|
|
8895
|
+
|
|
8896
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/fulltext.md
|
|
8897
|
+
`.trim();
|
|
8898
|
+
}
|
|
8899
|
+
super(message, { ...rest, resourceName, query, operation, description });
|
|
8900
|
+
}
|
|
8901
|
+
}
|
|
8902
|
+
|
|
8241
8903
|
class FullTextPlugin extends Plugin {
|
|
8242
8904
|
constructor(options = {}) {
|
|
8243
8905
|
super();
|
|
@@ -8544,7 +9206,13 @@ class FullTextPlugin extends Plugin {
|
|
|
8544
9206
|
}
|
|
8545
9207
|
const resource = this.database.resources[resourceName];
|
|
8546
9208
|
if (!resource) {
|
|
8547
|
-
throw new
|
|
9209
|
+
throw new FulltextError(`Resource '${resourceName}' not found`, {
|
|
9210
|
+
operation: "searchRecords",
|
|
9211
|
+
resourceName,
|
|
9212
|
+
query,
|
|
9213
|
+
availableResources: Object.keys(this.database.resources),
|
|
9214
|
+
suggestion: "Check resource name or ensure resource is created before searching"
|
|
9215
|
+
});
|
|
8548
9216
|
}
|
|
8549
9217
|
const recordIds = searchResults.map((result2) => result2.recordId);
|
|
8550
9218
|
const records = await resource.getMany(recordIds);
|
|
@@ -8561,7 +9229,12 @@ class FullTextPlugin extends Plugin {
|
|
|
8561
9229
|
async rebuildIndex(resourceName) {
|
|
8562
9230
|
const resource = this.database.resources[resourceName];
|
|
8563
9231
|
if (!resource) {
|
|
8564
|
-
throw new
|
|
9232
|
+
throw new FulltextError(`Resource '${resourceName}' not found`, {
|
|
9233
|
+
operation: "rebuildIndex",
|
|
9234
|
+
resourceName,
|
|
9235
|
+
availableResources: Object.keys(this.database.resources),
|
|
9236
|
+
suggestion: "Check resource name or ensure resource is created before rebuilding index"
|
|
9237
|
+
});
|
|
8565
9238
|
}
|
|
8566
9239
|
for (const [key] of this.indexes.entries()) {
|
|
8567
9240
|
if (key.startsWith(`${resourceName}:`)) {
|
|
@@ -9346,6 +10019,35 @@ function createConsumer(driver, config) {
|
|
|
9346
10019
|
return new ConsumerClass(config);
|
|
9347
10020
|
}
|
|
9348
10021
|
|
|
10022
|
+
class QueueError extends S3dbError {
|
|
10023
|
+
constructor(message, details = {}) {
|
|
10024
|
+
const { queueName, operation = "unknown", messageId, ...rest } = details;
|
|
10025
|
+
let description = details.description;
|
|
10026
|
+
if (!description) {
|
|
10027
|
+
description = `
|
|
10028
|
+
Queue Operation Error
|
|
10029
|
+
|
|
10030
|
+
Operation: ${operation}
|
|
10031
|
+
${queueName ? `Queue: ${queueName}` : ""}
|
|
10032
|
+
${messageId ? `Message ID: ${messageId}` : ""}
|
|
10033
|
+
|
|
10034
|
+
Common causes:
|
|
10035
|
+
1. Queue not properly configured
|
|
10036
|
+
2. Message handler not registered
|
|
10037
|
+
3. Queue resource not found
|
|
10038
|
+
4. SQS/RabbitMQ connection failed
|
|
10039
|
+
5. Message processing timeout
|
|
10040
|
+
|
|
10041
|
+
Solution:
|
|
10042
|
+
Check queue configuration and message handler registration.
|
|
10043
|
+
|
|
10044
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/queue.md
|
|
10045
|
+
`.trim();
|
|
10046
|
+
}
|
|
10047
|
+
super(message, { ...rest, queueName, operation, messageId, description });
|
|
10048
|
+
}
|
|
10049
|
+
}
|
|
10050
|
+
|
|
9349
10051
|
class QueueConsumerPlugin extends Plugin {
|
|
9350
10052
|
constructor(options = {}) {
|
|
9351
10053
|
super(options);
|
|
@@ -9406,13 +10108,32 @@ class QueueConsumerPlugin extends Plugin {
|
|
|
9406
10108
|
let action = body.action || msg.action;
|
|
9407
10109
|
let data = body.data || msg.data;
|
|
9408
10110
|
if (!resource) {
|
|
9409
|
-
throw new
|
|
10111
|
+
throw new QueueError("Resource not found in message", {
|
|
10112
|
+
operation: "handleMessage",
|
|
10113
|
+
queueName: configuredResource,
|
|
10114
|
+
messageBody: body,
|
|
10115
|
+
suggestion: 'Ensure message includes a "resource" field specifying the target resource name'
|
|
10116
|
+
});
|
|
9410
10117
|
}
|
|
9411
10118
|
if (!action) {
|
|
9412
|
-
throw new
|
|
10119
|
+
throw new QueueError("Action not found in message", {
|
|
10120
|
+
operation: "handleMessage",
|
|
10121
|
+
queueName: configuredResource,
|
|
10122
|
+
resource,
|
|
10123
|
+
messageBody: body,
|
|
10124
|
+
suggestion: 'Ensure message includes an "action" field (insert, update, or delete)'
|
|
10125
|
+
});
|
|
9413
10126
|
}
|
|
9414
10127
|
const resourceObj = this.database.resources[resource];
|
|
9415
|
-
if (!resourceObj)
|
|
10128
|
+
if (!resourceObj) {
|
|
10129
|
+
throw new QueueError(`Resource '${resource}' not found`, {
|
|
10130
|
+
operation: "handleMessage",
|
|
10131
|
+
queueName: configuredResource,
|
|
10132
|
+
resource,
|
|
10133
|
+
availableResources: Object.keys(this.database.resources),
|
|
10134
|
+
suggestion: "Check resource name or ensure resource is created before consuming messages"
|
|
10135
|
+
});
|
|
10136
|
+
}
|
|
9416
10137
|
let result;
|
|
9417
10138
|
const [ok, err, res] = await tryFn(async () => {
|
|
9418
10139
|
if (action === "insert") {
|
|
@@ -9423,7 +10144,14 @@ class QueueConsumerPlugin extends Plugin {
|
|
|
9423
10144
|
} else if (action === "delete") {
|
|
9424
10145
|
result = await resourceObj.delete(data.id);
|
|
9425
10146
|
} else {
|
|
9426
|
-
throw new
|
|
10147
|
+
throw new QueueError(`Unsupported action '${action}'`, {
|
|
10148
|
+
operation: "handleMessage",
|
|
10149
|
+
queueName: configuredResource,
|
|
10150
|
+
resource,
|
|
10151
|
+
action,
|
|
10152
|
+
supportedActions: ["insert", "update", "delete"],
|
|
10153
|
+
suggestion: "Use one of the supported actions: insert, update, or delete"
|
|
10154
|
+
});
|
|
9427
10155
|
}
|
|
9428
10156
|
return result;
|
|
9429
10157
|
});
|
|
@@ -9436,6 +10164,35 @@ class QueueConsumerPlugin extends Plugin {
|
|
|
9436
10164
|
}
|
|
9437
10165
|
}
|
|
9438
10166
|
|
|
10167
|
+
class ReplicationError extends S3dbError {
|
|
10168
|
+
constructor(message, details = {}) {
|
|
10169
|
+
const { replicatorClass = "unknown", operation = "unknown", resourceName, ...rest } = details;
|
|
10170
|
+
let description = details.description;
|
|
10171
|
+
if (!description) {
|
|
10172
|
+
description = `
|
|
10173
|
+
Replication Operation Error
|
|
10174
|
+
|
|
10175
|
+
Replicator: ${replicatorClass}
|
|
10176
|
+
Operation: ${operation}
|
|
10177
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
10178
|
+
|
|
10179
|
+
Common causes:
|
|
10180
|
+
1. Invalid replicator configuration
|
|
10181
|
+
2. Target system not accessible
|
|
10182
|
+
3. Resource not configured for replication
|
|
10183
|
+
4. Invalid operation type
|
|
10184
|
+
5. Transformation function errors
|
|
10185
|
+
|
|
10186
|
+
Solution:
|
|
10187
|
+
Check replicator configuration and ensure target system is accessible.
|
|
10188
|
+
|
|
10189
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/replicator.md
|
|
10190
|
+
`.trim();
|
|
10191
|
+
}
|
|
10192
|
+
super(message, { ...rest, replicatorClass, operation, resourceName, description });
|
|
10193
|
+
}
|
|
10194
|
+
}
|
|
10195
|
+
|
|
9439
10196
|
class BaseReplicator extends EventEmitter {
|
|
9440
10197
|
constructor(config = {}) {
|
|
9441
10198
|
super();
|
|
@@ -9461,7 +10218,12 @@ class BaseReplicator extends EventEmitter {
|
|
|
9461
10218
|
* @returns {Promise<Object>} replicator result
|
|
9462
10219
|
*/
|
|
9463
10220
|
async replicate(resourceName, operation, data, id) {
|
|
9464
|
-
throw new
|
|
10221
|
+
throw new ReplicationError("replicate() method must be implemented by subclass", {
|
|
10222
|
+
operation: "replicate",
|
|
10223
|
+
replicatorClass: this.name,
|
|
10224
|
+
resourceName,
|
|
10225
|
+
suggestion: "Extend BaseReplicator and implement the replicate() method"
|
|
10226
|
+
});
|
|
9465
10227
|
}
|
|
9466
10228
|
/**
|
|
9467
10229
|
* Replicate multiple records in batch
|
|
@@ -9470,14 +10232,24 @@ class BaseReplicator extends EventEmitter {
|
|
|
9470
10232
|
* @returns {Promise<Object>} Batch replicator result
|
|
9471
10233
|
*/
|
|
9472
10234
|
async replicateBatch(resourceName, records) {
|
|
9473
|
-
throw new
|
|
10235
|
+
throw new ReplicationError("replicateBatch() method must be implemented by subclass", {
|
|
10236
|
+
operation: "replicateBatch",
|
|
10237
|
+
replicatorClass: this.name,
|
|
10238
|
+
resourceName,
|
|
10239
|
+
batchSize: records?.length,
|
|
10240
|
+
suggestion: "Extend BaseReplicator and implement the replicateBatch() method"
|
|
10241
|
+
});
|
|
9474
10242
|
}
|
|
9475
10243
|
/**
|
|
9476
10244
|
* Test the connection to the target
|
|
9477
10245
|
* @returns {Promise<boolean>} True if connection is successful
|
|
9478
10246
|
*/
|
|
9479
10247
|
async testConnection() {
|
|
9480
|
-
throw new
|
|
10248
|
+
throw new ReplicationError("testConnection() method must be implemented by subclass", {
|
|
10249
|
+
operation: "testConnection",
|
|
10250
|
+
replicatorClass: this.name,
|
|
10251
|
+
suggestion: "Extend BaseReplicator and implement the testConnection() method"
|
|
10252
|
+
});
|
|
9481
10253
|
}
|
|
9482
10254
|
/**
|
|
9483
10255
|
* Get replicator status and statistics
|
|
@@ -10649,7 +11421,17 @@ class Client extends EventEmitter {
|
|
|
10649
11421
|
});
|
|
10650
11422
|
this.emit("moveAllObjects", { results, errors }, { prefixFrom, prefixTo });
|
|
10651
11423
|
if (errors.length > 0) {
|
|
10652
|
-
throw new
|
|
11424
|
+
throw new UnknownError("Some objects could not be moved", {
|
|
11425
|
+
bucket: this.config.bucket,
|
|
11426
|
+
operation: "moveAllObjects",
|
|
11427
|
+
prefixFrom,
|
|
11428
|
+
prefixTo,
|
|
11429
|
+
totalKeys: keys.length,
|
|
11430
|
+
failedCount: errors.length,
|
|
11431
|
+
successCount: results.length,
|
|
11432
|
+
errors: errors.map((e) => ({ message: e.message, raw: e.raw })),
|
|
11433
|
+
suggestion: "Check S3 permissions and retry failed objects individually"
|
|
11434
|
+
});
|
|
10653
11435
|
}
|
|
10654
11436
|
return results;
|
|
10655
11437
|
}
|
|
@@ -10758,6 +11540,11 @@ class Validator extends FastestValidator {
|
|
|
10758
11540
|
type: "any",
|
|
10759
11541
|
custom: this.autoEncrypt ? jsonHandler : void 0
|
|
10760
11542
|
});
|
|
11543
|
+
this.alias("embedding", {
|
|
11544
|
+
type: "array",
|
|
11545
|
+
items: "number",
|
|
11546
|
+
empty: false
|
|
11547
|
+
});
|
|
10761
11548
|
}
|
|
10762
11549
|
}
|
|
10763
11550
|
const ValidatorManager = new Proxy(Validator, {
|
|
@@ -11006,6 +11793,59 @@ const SchemaActions = {
|
|
|
11006
11793
|
}
|
|
11007
11794
|
return NaN;
|
|
11008
11795
|
});
|
|
11796
|
+
},
|
|
11797
|
+
fromArrayOfEmbeddings: (value, { separator, precision = 6 }) => {
|
|
11798
|
+
if (value === null || value === void 0 || !Array.isArray(value)) {
|
|
11799
|
+
return value;
|
|
11800
|
+
}
|
|
11801
|
+
if (value.length === 0) {
|
|
11802
|
+
return "";
|
|
11803
|
+
}
|
|
11804
|
+
const encodedItems = value.map((item) => {
|
|
11805
|
+
if (typeof item === "number" && !isNaN(item)) {
|
|
11806
|
+
return encodeFixedPoint(item, precision);
|
|
11807
|
+
}
|
|
11808
|
+
const n = Number(item);
|
|
11809
|
+
return isNaN(n) ? "" : encodeFixedPoint(n, precision);
|
|
11810
|
+
});
|
|
11811
|
+
return encodedItems.join(separator);
|
|
11812
|
+
},
|
|
11813
|
+
toArrayOfEmbeddings: (value, { separator, precision = 6 }) => {
|
|
11814
|
+
if (Array.isArray(value)) {
|
|
11815
|
+
return value.map((v) => typeof v === "number" ? v : decodeFixedPoint(v, precision));
|
|
11816
|
+
}
|
|
11817
|
+
if (value === null || value === void 0) {
|
|
11818
|
+
return value;
|
|
11819
|
+
}
|
|
11820
|
+
if (value === "") {
|
|
11821
|
+
return [];
|
|
11822
|
+
}
|
|
11823
|
+
const str = String(value);
|
|
11824
|
+
const items = [];
|
|
11825
|
+
let current = "";
|
|
11826
|
+
let i = 0;
|
|
11827
|
+
while (i < str.length) {
|
|
11828
|
+
if (str[i] === "\\" && i + 1 < str.length) {
|
|
11829
|
+
current += str[i + 1];
|
|
11830
|
+
i += 2;
|
|
11831
|
+
} else if (str[i] === separator) {
|
|
11832
|
+
items.push(current);
|
|
11833
|
+
current = "";
|
|
11834
|
+
i++;
|
|
11835
|
+
} else {
|
|
11836
|
+
current += str[i];
|
|
11837
|
+
i++;
|
|
11838
|
+
}
|
|
11839
|
+
}
|
|
11840
|
+
items.push(current);
|
|
11841
|
+
return items.map((v) => {
|
|
11842
|
+
if (typeof v === "number") return v;
|
|
11843
|
+
if (typeof v === "string" && v !== "") {
|
|
11844
|
+
const n = decodeFixedPoint(v, precision);
|
|
11845
|
+
return isNaN(n) ? NaN : n;
|
|
11846
|
+
}
|
|
11847
|
+
return NaN;
|
|
11848
|
+
});
|
|
11009
11849
|
}
|
|
11010
11850
|
};
|
|
11011
11851
|
class Schema {
|
|
@@ -11075,18 +11915,89 @@ class Schema {
|
|
|
11075
11915
|
}
|
|
11076
11916
|
return objectKeys;
|
|
11077
11917
|
}
|
|
11918
|
+
_generateHooksFromOriginalAttributes(attributes, prefix = "") {
|
|
11919
|
+
for (const [key, value] of Object.entries(attributes)) {
|
|
11920
|
+
if (key.startsWith("$$")) continue;
|
|
11921
|
+
const fullKey = prefix ? `${prefix}.${key}` : key;
|
|
11922
|
+
if (typeof value === "object" && value !== null && !Array.isArray(value) && value.type) {
|
|
11923
|
+
if (value.type === "array" && value.items) {
|
|
11924
|
+
const itemsType = value.items;
|
|
11925
|
+
const arrayLength = typeof value.length === "number" ? value.length : null;
|
|
11926
|
+
if (itemsType === "string" || typeof itemsType === "string" && itemsType.includes("string")) {
|
|
11927
|
+
this.addHook("beforeMap", fullKey, "fromArray");
|
|
11928
|
+
this.addHook("afterUnmap", fullKey, "toArray");
|
|
11929
|
+
} else if (itemsType === "number" || typeof itemsType === "string" && itemsType.includes("number")) {
|
|
11930
|
+
const isIntegerArray = typeof itemsType === "string" && itemsType.includes("integer");
|
|
11931
|
+
const isEmbedding = !isIntegerArray && arrayLength !== null && arrayLength >= 256;
|
|
11932
|
+
if (isIntegerArray) {
|
|
11933
|
+
this.addHook("beforeMap", fullKey, "fromArrayOfNumbers");
|
|
11934
|
+
this.addHook("afterUnmap", fullKey, "toArrayOfNumbers");
|
|
11935
|
+
} else if (isEmbedding) {
|
|
11936
|
+
this.addHook("beforeMap", fullKey, "fromArrayOfEmbeddings");
|
|
11937
|
+
this.addHook("afterUnmap", fullKey, "toArrayOfEmbeddings");
|
|
11938
|
+
} else {
|
|
11939
|
+
this.addHook("beforeMap", fullKey, "fromArrayOfDecimals");
|
|
11940
|
+
this.addHook("afterUnmap", fullKey, "toArrayOfDecimals");
|
|
11941
|
+
}
|
|
11942
|
+
}
|
|
11943
|
+
}
|
|
11944
|
+
} else if (typeof value === "object" && value !== null && !Array.isArray(value) && !value.type) {
|
|
11945
|
+
this._generateHooksFromOriginalAttributes(value, fullKey);
|
|
11946
|
+
}
|
|
11947
|
+
}
|
|
11948
|
+
}
|
|
11078
11949
|
generateAutoHooks() {
|
|
11950
|
+
this._generateHooksFromOriginalAttributes(this.attributes);
|
|
11079
11951
|
const schema = flat.flatten(lodashEs.cloneDeep(this.attributes), { safe: true });
|
|
11080
11952
|
for (const [name, definition] of Object.entries(schema)) {
|
|
11081
|
-
if (
|
|
11082
|
-
|
|
11953
|
+
if (name.includes("$$")) continue;
|
|
11954
|
+
if (this.options.hooks.beforeMap[name] || this.options.hooks.afterUnmap[name]) {
|
|
11955
|
+
continue;
|
|
11956
|
+
}
|
|
11957
|
+
const defStr = typeof definition === "string" ? definition : "";
|
|
11958
|
+
const defType = typeof definition === "object" && definition !== null ? definition.type : null;
|
|
11959
|
+
const isEmbeddingType = defStr.includes("embedding") || defType === "embedding";
|
|
11960
|
+
if (isEmbeddingType) {
|
|
11961
|
+
const lengthMatch = defStr.match(/embedding:(\d+)/);
|
|
11962
|
+
if (lengthMatch) {
|
|
11963
|
+
parseInt(lengthMatch[1], 10);
|
|
11964
|
+
} else if (defStr.includes("length:")) {
|
|
11965
|
+
const match = defStr.match(/length:(\d+)/);
|
|
11966
|
+
if (match) parseInt(match[1], 10);
|
|
11967
|
+
}
|
|
11968
|
+
this.addHook("beforeMap", name, "fromArrayOfEmbeddings");
|
|
11969
|
+
this.addHook("afterUnmap", name, "toArrayOfEmbeddings");
|
|
11970
|
+
continue;
|
|
11971
|
+
}
|
|
11972
|
+
const isArray = defStr.includes("array") || defType === "array";
|
|
11973
|
+
if (isArray) {
|
|
11974
|
+
let itemsType = null;
|
|
11975
|
+
if (typeof definition === "object" && definition !== null && definition.items) {
|
|
11976
|
+
itemsType = definition.items;
|
|
11977
|
+
} else if (defStr.includes("items:string")) {
|
|
11978
|
+
itemsType = "string";
|
|
11979
|
+
} else if (defStr.includes("items:number")) {
|
|
11980
|
+
itemsType = "number";
|
|
11981
|
+
}
|
|
11982
|
+
if (itemsType === "string" || typeof itemsType === "string" && itemsType.includes("string")) {
|
|
11083
11983
|
this.addHook("beforeMap", name, "fromArray");
|
|
11084
11984
|
this.addHook("afterUnmap", name, "toArray");
|
|
11085
|
-
} else if (
|
|
11086
|
-
const isIntegerArray =
|
|
11985
|
+
} else if (itemsType === "number" || typeof itemsType === "string" && itemsType.includes("number")) {
|
|
11986
|
+
const isIntegerArray = defStr.includes("integer:true") || defStr.includes("|integer:") || defStr.includes("|integer") || typeof itemsType === "string" && itemsType.includes("integer");
|
|
11987
|
+
let arrayLength = null;
|
|
11988
|
+
if (typeof definition === "object" && definition !== null && typeof definition.length === "number") {
|
|
11989
|
+
arrayLength = definition.length;
|
|
11990
|
+
} else if (defStr.includes("length:")) {
|
|
11991
|
+
const match = defStr.match(/length:(\d+)/);
|
|
11992
|
+
if (match) arrayLength = parseInt(match[1], 10);
|
|
11993
|
+
}
|
|
11994
|
+
const isEmbedding = !isIntegerArray && arrayLength !== null && arrayLength >= 256;
|
|
11087
11995
|
if (isIntegerArray) {
|
|
11088
11996
|
this.addHook("beforeMap", name, "fromArrayOfNumbers");
|
|
11089
11997
|
this.addHook("afterUnmap", name, "toArrayOfNumbers");
|
|
11998
|
+
} else if (isEmbedding) {
|
|
11999
|
+
this.addHook("beforeMap", name, "fromArrayOfEmbeddings");
|
|
12000
|
+
this.addHook("afterUnmap", name, "toArrayOfEmbeddings");
|
|
11090
12001
|
} else {
|
|
11091
12002
|
this.addHook("beforeMap", name, "fromArrayOfDecimals");
|
|
11092
12003
|
this.addHook("afterUnmap", name, "toArrayOfDecimals");
|
|
@@ -11094,7 +12005,7 @@ class Schema {
|
|
|
11094
12005
|
}
|
|
11095
12006
|
continue;
|
|
11096
12007
|
}
|
|
11097
|
-
if (
|
|
12008
|
+
if (defStr.includes("secret") || defType === "secret") {
|
|
11098
12009
|
if (this.options.autoEncrypt) {
|
|
11099
12010
|
this.addHook("beforeMap", name, "encrypt");
|
|
11100
12011
|
}
|
|
@@ -11103,8 +12014,8 @@ class Schema {
|
|
|
11103
12014
|
}
|
|
11104
12015
|
continue;
|
|
11105
12016
|
}
|
|
11106
|
-
if (
|
|
11107
|
-
const isInteger =
|
|
12017
|
+
if (defStr.includes("number") || defType === "number") {
|
|
12018
|
+
const isInteger = defStr.includes("integer:true") || defStr.includes("|integer:") || defStr.includes("|integer");
|
|
11108
12019
|
if (isInteger) {
|
|
11109
12020
|
this.addHook("beforeMap", name, "toBase62");
|
|
11110
12021
|
this.addHook("afterUnmap", name, "fromBase62");
|
|
@@ -11114,17 +12025,17 @@ class Schema {
|
|
|
11114
12025
|
}
|
|
11115
12026
|
continue;
|
|
11116
12027
|
}
|
|
11117
|
-
if (
|
|
12028
|
+
if (defStr.includes("boolean") || defType === "boolean") {
|
|
11118
12029
|
this.addHook("beforeMap", name, "fromBool");
|
|
11119
12030
|
this.addHook("afterUnmap", name, "toBool");
|
|
11120
12031
|
continue;
|
|
11121
12032
|
}
|
|
11122
|
-
if (
|
|
12033
|
+
if (defStr.includes("json") || defType === "json") {
|
|
11123
12034
|
this.addHook("beforeMap", name, "toJSON");
|
|
11124
12035
|
this.addHook("afterUnmap", name, "fromJSON");
|
|
11125
12036
|
continue;
|
|
11126
12037
|
}
|
|
11127
|
-
if (definition === "object" ||
|
|
12038
|
+
if (definition === "object" || defStr.includes("object") || defType === "object") {
|
|
11128
12039
|
this.addHook("beforeMap", name, "toJSON");
|
|
11129
12040
|
this.addHook("afterUnmap", name, "fromJSON");
|
|
11130
12041
|
continue;
|
|
@@ -11266,7 +12177,8 @@ class Schema {
|
|
|
11266
12177
|
const originalKey = reversedMap && reversedMap[key] ? reversedMap[key] : key;
|
|
11267
12178
|
let parsedValue = value;
|
|
11268
12179
|
const attrDef = this.getAttributeDefinition(originalKey);
|
|
11269
|
-
|
|
12180
|
+
const hasAfterUnmapHook = this.options.hooks?.afterUnmap?.[originalKey];
|
|
12181
|
+
if (!hasAfterUnmapHook && typeof attrDef === "string" && attrDef.includes("number") && !attrDef.includes("array") && !attrDef.includes("decimal")) {
|
|
11270
12182
|
if (typeof parsedValue === "string" && parsedValue !== "") {
|
|
11271
12183
|
parsedValue = decode(parsedValue);
|
|
11272
12184
|
} else if (typeof parsedValue === "number") ; else {
|
|
@@ -11331,18 +12243,38 @@ class Schema {
|
|
|
11331
12243
|
preprocessAttributesForValidation(attributes) {
|
|
11332
12244
|
const processed = {};
|
|
11333
12245
|
for (const [key, value] of Object.entries(attributes)) {
|
|
11334
|
-
if (typeof value === "
|
|
11335
|
-
|
|
11336
|
-
|
|
11337
|
-
|
|
11338
|
-
|
|
11339
|
-
|
|
11340
|
-
|
|
11341
|
-
|
|
11342
|
-
|
|
11343
|
-
|
|
12246
|
+
if (typeof value === "string") {
|
|
12247
|
+
if (value.startsWith("embedding:")) {
|
|
12248
|
+
const lengthMatch = value.match(/embedding:(\d+)/);
|
|
12249
|
+
if (lengthMatch) {
|
|
12250
|
+
const length = lengthMatch[1];
|
|
12251
|
+
const rest = value.substring(`embedding:${length}`.length);
|
|
12252
|
+
processed[key] = `array|items:number|length:${length}|empty:false${rest}`;
|
|
12253
|
+
continue;
|
|
12254
|
+
}
|
|
12255
|
+
}
|
|
12256
|
+
if (value.startsWith("embedding|") || value === "embedding") {
|
|
12257
|
+
processed[key] = value.replace(/^embedding/, "array|items:number|empty:false");
|
|
12258
|
+
continue;
|
|
12259
|
+
}
|
|
12260
|
+
processed[key] = value;
|
|
12261
|
+
} else if (typeof value === "object" && value !== null && !Array.isArray(value)) {
|
|
12262
|
+
const hasValidatorType = value.type !== void 0 && key !== "$$type";
|
|
12263
|
+
if (hasValidatorType) {
|
|
12264
|
+
processed[key] = value;
|
|
12265
|
+
} else {
|
|
12266
|
+
const isExplicitRequired = value.$$type && value.$$type.includes("required");
|
|
12267
|
+
const isExplicitOptional = value.$$type && value.$$type.includes("optional");
|
|
12268
|
+
const objectConfig = {
|
|
12269
|
+
type: "object",
|
|
12270
|
+
properties: this.preprocessAttributesForValidation(value),
|
|
12271
|
+
strict: false
|
|
12272
|
+
};
|
|
12273
|
+
if (isExplicitRequired) ; else if (isExplicitOptional || this.allNestedObjectsOptional) {
|
|
12274
|
+
objectConfig.optional = true;
|
|
12275
|
+
}
|
|
12276
|
+
processed[key] = objectConfig;
|
|
11344
12277
|
}
|
|
11345
|
-
processed[key] = objectConfig;
|
|
11346
12278
|
} else {
|
|
11347
12279
|
processed[key] = value;
|
|
11348
12280
|
}
|
|
@@ -11363,7 +12295,14 @@ async function handleInsert$4({ resource, data, mappedData, originalData }) {
|
|
|
11363
12295
|
}
|
|
11364
12296
|
});
|
|
11365
12297
|
if (totalSize > effectiveLimit) {
|
|
11366
|
-
throw new
|
|
12298
|
+
throw new MetadataLimitError("Metadata size exceeds 2KB limit on insert", {
|
|
12299
|
+
totalSize,
|
|
12300
|
+
effectiveLimit,
|
|
12301
|
+
absoluteLimit: S3_METADATA_LIMIT_BYTES,
|
|
12302
|
+
excess: totalSize - effectiveLimit,
|
|
12303
|
+
resourceName: resource.name,
|
|
12304
|
+
operation: "insert"
|
|
12305
|
+
});
|
|
11367
12306
|
}
|
|
11368
12307
|
return { mappedData, body: "" };
|
|
11369
12308
|
}
|
|
@@ -11378,7 +12317,15 @@ async function handleUpdate$4({ resource, id, data, mappedData, originalData })
|
|
|
11378
12317
|
}
|
|
11379
12318
|
});
|
|
11380
12319
|
if (totalSize > effectiveLimit) {
|
|
11381
|
-
throw new
|
|
12320
|
+
throw new MetadataLimitError("Metadata size exceeds 2KB limit on update", {
|
|
12321
|
+
totalSize,
|
|
12322
|
+
effectiveLimit,
|
|
12323
|
+
absoluteLimit: S3_METADATA_LIMIT_BYTES,
|
|
12324
|
+
excess: totalSize - effectiveLimit,
|
|
12325
|
+
resourceName: resource.name,
|
|
12326
|
+
operation: "update",
|
|
12327
|
+
id
|
|
12328
|
+
});
|
|
11382
12329
|
}
|
|
11383
12330
|
return { mappedData, body: JSON.stringify(mappedData) };
|
|
11384
12331
|
}
|
|
@@ -11393,7 +12340,15 @@ async function handleUpsert$4({ resource, id, data, mappedData }) {
|
|
|
11393
12340
|
}
|
|
11394
12341
|
});
|
|
11395
12342
|
if (totalSize > effectiveLimit) {
|
|
11396
|
-
throw new
|
|
12343
|
+
throw new MetadataLimitError("Metadata size exceeds 2KB limit on upsert", {
|
|
12344
|
+
totalSize,
|
|
12345
|
+
effectiveLimit,
|
|
12346
|
+
absoluteLimit: S3_METADATA_LIMIT_BYTES,
|
|
12347
|
+
excess: totalSize - effectiveLimit,
|
|
12348
|
+
resourceName: resource.name,
|
|
12349
|
+
operation: "upsert",
|
|
12350
|
+
id
|
|
12351
|
+
});
|
|
11397
12352
|
}
|
|
11398
12353
|
return { mappedData, body: "" };
|
|
11399
12354
|
}
|
|
@@ -11735,7 +12690,11 @@ const behaviors = {
|
|
|
11735
12690
|
function getBehavior(behaviorName) {
|
|
11736
12691
|
const behavior = behaviors[behaviorName];
|
|
11737
12692
|
if (!behavior) {
|
|
11738
|
-
throw new
|
|
12693
|
+
throw new BehaviorError(`Unknown behavior: ${behaviorName}`, {
|
|
12694
|
+
behavior: behaviorName,
|
|
12695
|
+
availableBehaviors: Object.keys(behaviors),
|
|
12696
|
+
operation: "getBehavior"
|
|
12697
|
+
});
|
|
11739
12698
|
}
|
|
11740
12699
|
return behavior;
|
|
11741
12700
|
}
|
|
@@ -14259,7 +15218,7 @@ class Database extends EventEmitter {
|
|
|
14259
15218
|
this.id = idGenerator(7);
|
|
14260
15219
|
this.version = "1";
|
|
14261
15220
|
this.s3dbVersion = (() => {
|
|
14262
|
-
const [ok, err, version] = tryFn(() => true ? "11.2.
|
|
15221
|
+
const [ok, err, version] = tryFn(() => true ? "11.2.5" : "latest");
|
|
14263
15222
|
return ok ? version : "latest";
|
|
14264
15223
|
})();
|
|
14265
15224
|
this.resources = {};
|
|
@@ -14604,7 +15563,12 @@ class Database extends EventEmitter {
|
|
|
14604
15563
|
const pluginName = name.toLowerCase().replace("plugin", "");
|
|
14605
15564
|
const plugin = this.plugins[pluginName] || this.pluginRegistry[pluginName];
|
|
14606
15565
|
if (!plugin) {
|
|
14607
|
-
throw new
|
|
15566
|
+
throw new DatabaseError(`Plugin '${name}' not found`, {
|
|
15567
|
+
operation: "uninstallPlugin",
|
|
15568
|
+
pluginName: name,
|
|
15569
|
+
availablePlugins: Object.keys(this.pluginRegistry),
|
|
15570
|
+
suggestion: "Check plugin name or list available plugins using Object.keys(db.pluginRegistry)"
|
|
15571
|
+
});
|
|
14608
15572
|
}
|
|
14609
15573
|
if (plugin.stop) {
|
|
14610
15574
|
await plugin.stop();
|
|
@@ -15237,10 +16201,20 @@ class Database extends EventEmitter {
|
|
|
15237
16201
|
addHook(event, fn) {
|
|
15238
16202
|
if (!this._hooks) this._initHooks();
|
|
15239
16203
|
if (!this._hooks.has(event)) {
|
|
15240
|
-
throw new
|
|
16204
|
+
throw new DatabaseError(`Unknown hook event: ${event}`, {
|
|
16205
|
+
operation: "addHook",
|
|
16206
|
+
invalidEvent: event,
|
|
16207
|
+
availableEvents: this._hookEvents,
|
|
16208
|
+
suggestion: `Use one of the available hook events: ${this._hookEvents.join(", ")}`
|
|
16209
|
+
});
|
|
15241
16210
|
}
|
|
15242
16211
|
if (typeof fn !== "function") {
|
|
15243
|
-
throw new
|
|
16212
|
+
throw new DatabaseError("Hook function must be a function", {
|
|
16213
|
+
operation: "addHook",
|
|
16214
|
+
event,
|
|
16215
|
+
receivedType: typeof fn,
|
|
16216
|
+
suggestion: "Provide a function that will be called when the hook event occurs"
|
|
16217
|
+
});
|
|
15244
16218
|
}
|
|
15245
16219
|
this._hooks.get(event).push(fn);
|
|
15246
16220
|
}
|
|
@@ -15378,7 +16352,11 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15378
16352
|
this.targetDatabase = new S3db(targetConfig);
|
|
15379
16353
|
await this.targetDatabase.connect();
|
|
15380
16354
|
} else {
|
|
15381
|
-
throw new
|
|
16355
|
+
throw new ReplicationError("S3dbReplicator requires client or connectionString", {
|
|
16356
|
+
operation: "initialize",
|
|
16357
|
+
replicatorClass: "S3dbReplicator",
|
|
16358
|
+
suggestion: 'Provide either a client instance or connectionString in config: { client: db } or { connectionString: "s3://..." }'
|
|
16359
|
+
});
|
|
15382
16360
|
}
|
|
15383
16361
|
this.emit("connected", {
|
|
15384
16362
|
replicator: this.name,
|
|
@@ -15409,7 +16387,13 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15409
16387
|
const normResource = normalizeResourceName$1(resource);
|
|
15410
16388
|
const entry = this.resourcesMap[normResource];
|
|
15411
16389
|
if (!entry) {
|
|
15412
|
-
throw new
|
|
16390
|
+
throw new ReplicationError("Resource not configured for replication", {
|
|
16391
|
+
operation: "replicate",
|
|
16392
|
+
replicatorClass: "S3dbReplicator",
|
|
16393
|
+
resourceName: resource,
|
|
16394
|
+
configuredResources: Object.keys(this.resourcesMap),
|
|
16395
|
+
suggestion: 'Add resource to replicator resources map: { resources: { [resourceName]: "destination" } }'
|
|
16396
|
+
});
|
|
15413
16397
|
}
|
|
15414
16398
|
if (Array.isArray(entry)) {
|
|
15415
16399
|
const results = [];
|
|
@@ -15477,7 +16461,14 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15477
16461
|
} else if (operation === "delete") {
|
|
15478
16462
|
result = await destResourceObj.delete(recordId);
|
|
15479
16463
|
} else {
|
|
15480
|
-
throw new
|
|
16464
|
+
throw new ReplicationError(`Invalid replication operation: ${operation}`, {
|
|
16465
|
+
operation: "replicate",
|
|
16466
|
+
replicatorClass: "S3dbReplicator",
|
|
16467
|
+
invalidOperation: operation,
|
|
16468
|
+
supportedOperations: ["insert", "update", "delete"],
|
|
16469
|
+
resourceName: sourceResource,
|
|
16470
|
+
suggestion: "Use one of the supported operations: insert, update, delete"
|
|
16471
|
+
});
|
|
15481
16472
|
}
|
|
15482
16473
|
return result;
|
|
15483
16474
|
}
|
|
@@ -15545,7 +16536,13 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15545
16536
|
const norm = normalizeResourceName$1(resource);
|
|
15546
16537
|
const found = available.find((r) => normalizeResourceName$1(r) === norm);
|
|
15547
16538
|
if (!found) {
|
|
15548
|
-
throw new
|
|
16539
|
+
throw new ReplicationError("Destination resource not found in target database", {
|
|
16540
|
+
operation: "_getDestResourceObj",
|
|
16541
|
+
replicatorClass: "S3dbReplicator",
|
|
16542
|
+
destinationResource: resource,
|
|
16543
|
+
availableResources: available,
|
|
16544
|
+
suggestion: "Create the resource in target database or check resource name spelling"
|
|
16545
|
+
});
|
|
15549
16546
|
}
|
|
15550
16547
|
return db.resources[found];
|
|
15551
16548
|
}
|
|
@@ -15594,7 +16591,13 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
15594
16591
|
}
|
|
15595
16592
|
async testConnection() {
|
|
15596
16593
|
const [ok, err] = await tryFn(async () => {
|
|
15597
|
-
if (!this.targetDatabase)
|
|
16594
|
+
if (!this.targetDatabase) {
|
|
16595
|
+
throw new ReplicationError("No target database configured for connection test", {
|
|
16596
|
+
operation: "testConnection",
|
|
16597
|
+
replicatorClass: "S3dbReplicator",
|
|
16598
|
+
suggestion: "Initialize replicator with client or connectionString before testing connection"
|
|
16599
|
+
});
|
|
16600
|
+
}
|
|
15598
16601
|
if (typeof this.targetDatabase.connect === "function") {
|
|
15599
16602
|
await this.targetDatabase.connect();
|
|
15600
16603
|
}
|
|
@@ -15981,7 +16984,12 @@ const REPLICATOR_DRIVERS = {
|
|
|
15981
16984
|
function createReplicator(driver, config = {}, resources = [], client = null) {
|
|
15982
16985
|
const ReplicatorClass = REPLICATOR_DRIVERS[driver];
|
|
15983
16986
|
if (!ReplicatorClass) {
|
|
15984
|
-
throw new
|
|
16987
|
+
throw new ReplicationError(`Unknown replicator driver: ${driver}`, {
|
|
16988
|
+
operation: "createReplicator",
|
|
16989
|
+
driver,
|
|
16990
|
+
availableDrivers: Object.keys(REPLICATOR_DRIVERS),
|
|
16991
|
+
suggestion: `Use one of the available drivers: ${Object.keys(REPLICATOR_DRIVERS).join(", ")}`
|
|
16992
|
+
});
|
|
15985
16993
|
}
|
|
15986
16994
|
return new ReplicatorClass(config, resources, client);
|
|
15987
16995
|
}
|
|
@@ -15993,12 +17001,40 @@ class ReplicatorPlugin extends Plugin {
|
|
|
15993
17001
|
constructor(options = {}) {
|
|
15994
17002
|
super();
|
|
15995
17003
|
if (!options.replicators || !Array.isArray(options.replicators)) {
|
|
15996
|
-
throw new
|
|
17004
|
+
throw new ReplicationError("ReplicatorPlugin requires replicators array", {
|
|
17005
|
+
operation: "constructor",
|
|
17006
|
+
pluginName: "ReplicatorPlugin",
|
|
17007
|
+
providedOptions: Object.keys(options),
|
|
17008
|
+
suggestion: 'Provide replicators array: new ReplicatorPlugin({ replicators: [{ driver: "s3db", resources: [...] }] })'
|
|
17009
|
+
});
|
|
15997
17010
|
}
|
|
15998
17011
|
for (const rep of options.replicators) {
|
|
15999
|
-
if (!rep.driver)
|
|
16000
|
-
|
|
16001
|
-
|
|
17012
|
+
if (!rep.driver) {
|
|
17013
|
+
throw new ReplicationError("Each replicator must have a driver", {
|
|
17014
|
+
operation: "constructor",
|
|
17015
|
+
pluginName: "ReplicatorPlugin",
|
|
17016
|
+
replicatorConfig: rep,
|
|
17017
|
+
suggestion: 'Each replicator entry must specify a driver: { driver: "s3db", resources: {...} }'
|
|
17018
|
+
});
|
|
17019
|
+
}
|
|
17020
|
+
if (!rep.resources || typeof rep.resources !== "object") {
|
|
17021
|
+
throw new ReplicationError("Each replicator must have resources config", {
|
|
17022
|
+
operation: "constructor",
|
|
17023
|
+
pluginName: "ReplicatorPlugin",
|
|
17024
|
+
driver: rep.driver,
|
|
17025
|
+
replicatorConfig: rep,
|
|
17026
|
+
suggestion: 'Provide resources as object or array: { driver: "s3db", resources: ["users"] } or { resources: { users: "people" } }'
|
|
17027
|
+
});
|
|
17028
|
+
}
|
|
17029
|
+
if (Object.keys(rep.resources).length === 0) {
|
|
17030
|
+
throw new ReplicationError("Each replicator must have at least one resource configured", {
|
|
17031
|
+
operation: "constructor",
|
|
17032
|
+
pluginName: "ReplicatorPlugin",
|
|
17033
|
+
driver: rep.driver,
|
|
17034
|
+
replicatorConfig: rep,
|
|
17035
|
+
suggestion: 'Add at least one resource to replicate: { driver: "s3db", resources: ["users"] }'
|
|
17036
|
+
});
|
|
17037
|
+
}
|
|
16002
17038
|
}
|
|
16003
17039
|
this.config = {
|
|
16004
17040
|
replicators: options.replicators || [],
|
|
@@ -16424,7 +17460,13 @@ class ReplicatorPlugin extends Plugin {
|
|
|
16424
17460
|
async syncAllData(replicatorId) {
|
|
16425
17461
|
const replicator = this.replicators.find((r) => r.id === replicatorId);
|
|
16426
17462
|
if (!replicator) {
|
|
16427
|
-
throw new
|
|
17463
|
+
throw new ReplicationError("Replicator not found", {
|
|
17464
|
+
operation: "syncAllData",
|
|
17465
|
+
pluginName: "ReplicatorPlugin",
|
|
17466
|
+
replicatorId,
|
|
17467
|
+
availableReplicators: this.replicators.map((r) => r.id),
|
|
17468
|
+
suggestion: "Check replicator ID or use getReplicatorStats() to list available replicators"
|
|
17469
|
+
});
|
|
16428
17470
|
}
|
|
16429
17471
|
this.stats.lastSync = (/* @__PURE__ */ new Date()).toISOString();
|
|
16430
17472
|
for (const resourceName in this.database.resources) {
|
|
@@ -16954,6 +17996,35 @@ class S3QueuePlugin extends Plugin {
|
|
|
16954
17996
|
}
|
|
16955
17997
|
}
|
|
16956
17998
|
|
|
17999
|
+
class SchedulerError extends S3dbError {
|
|
18000
|
+
constructor(message, details = {}) {
|
|
18001
|
+
const { taskId, operation = "unknown", cronExpression, ...rest } = details;
|
|
18002
|
+
let description = details.description;
|
|
18003
|
+
if (!description) {
|
|
18004
|
+
description = `
|
|
18005
|
+
Scheduler Operation Error
|
|
18006
|
+
|
|
18007
|
+
Operation: ${operation}
|
|
18008
|
+
${taskId ? `Task ID: ${taskId}` : ""}
|
|
18009
|
+
${cronExpression ? `Cron: ${cronExpression}` : ""}
|
|
18010
|
+
|
|
18011
|
+
Common causes:
|
|
18012
|
+
1. Invalid cron expression format
|
|
18013
|
+
2. Task not found or already exists
|
|
18014
|
+
3. Scheduler not properly initialized
|
|
18015
|
+
4. Job execution failure
|
|
18016
|
+
5. Resource conflicts
|
|
18017
|
+
|
|
18018
|
+
Solution:
|
|
18019
|
+
Check task configuration and ensure scheduler is properly initialized.
|
|
18020
|
+
|
|
18021
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/scheduler.md
|
|
18022
|
+
`.trim();
|
|
18023
|
+
}
|
|
18024
|
+
super(message, { ...rest, taskId, operation, cronExpression, description });
|
|
18025
|
+
}
|
|
18026
|
+
}
|
|
18027
|
+
|
|
16957
18028
|
class SchedulerPlugin extends Plugin {
|
|
16958
18029
|
constructor(options = {}) {
|
|
16959
18030
|
super();
|
|
@@ -16987,17 +18058,36 @@ class SchedulerPlugin extends Plugin {
|
|
|
16987
18058
|
}
|
|
16988
18059
|
_validateConfiguration() {
|
|
16989
18060
|
if (Object.keys(this.config.jobs).length === 0) {
|
|
16990
|
-
throw new
|
|
18061
|
+
throw new SchedulerError("At least one job must be defined", {
|
|
18062
|
+
operation: "validateConfiguration",
|
|
18063
|
+
jobCount: 0,
|
|
18064
|
+
suggestion: 'Provide at least one job in the jobs configuration: { jobs: { myJob: { schedule: "* * * * *", action: async () => {...} } } }'
|
|
18065
|
+
});
|
|
16991
18066
|
}
|
|
16992
18067
|
for (const [jobName, job] of Object.entries(this.config.jobs)) {
|
|
16993
18068
|
if (!job.schedule) {
|
|
16994
|
-
throw new
|
|
18069
|
+
throw new SchedulerError(`Job '${jobName}' must have a schedule`, {
|
|
18070
|
+
operation: "validateConfiguration",
|
|
18071
|
+
taskId: jobName,
|
|
18072
|
+
providedConfig: Object.keys(job),
|
|
18073
|
+
suggestion: 'Add a schedule property with a valid cron expression: { schedule: "0 * * * *", action: async () => {...} }'
|
|
18074
|
+
});
|
|
16995
18075
|
}
|
|
16996
18076
|
if (!job.action || typeof job.action !== "function") {
|
|
16997
|
-
throw new
|
|
18077
|
+
throw new SchedulerError(`Job '${jobName}' must have an action function`, {
|
|
18078
|
+
operation: "validateConfiguration",
|
|
18079
|
+
taskId: jobName,
|
|
18080
|
+
actionType: typeof job.action,
|
|
18081
|
+
suggestion: 'Provide an action function: { schedule: "...", action: async (db, ctx) => {...} }'
|
|
18082
|
+
});
|
|
16998
18083
|
}
|
|
16999
18084
|
if (!this._isValidCronExpression(job.schedule)) {
|
|
17000
|
-
throw new
|
|
18085
|
+
throw new SchedulerError(`Job '${jobName}' has invalid cron expression`, {
|
|
18086
|
+
operation: "validateConfiguration",
|
|
18087
|
+
taskId: jobName,
|
|
18088
|
+
cronExpression: job.schedule,
|
|
18089
|
+
suggestion: "Use valid cron format (5 fields: minute hour day month weekday) or shortcuts (@hourly, @daily, @weekly, @monthly, @yearly)"
|
|
18090
|
+
});
|
|
17001
18091
|
}
|
|
17002
18092
|
}
|
|
17003
18093
|
}
|
|
@@ -17295,10 +18385,20 @@ class SchedulerPlugin extends Plugin {
|
|
|
17295
18385
|
async runJob(jobName, context = {}) {
|
|
17296
18386
|
const job = this.jobs.get(jobName);
|
|
17297
18387
|
if (!job) {
|
|
17298
|
-
throw new
|
|
18388
|
+
throw new SchedulerError(`Job '${jobName}' not found`, {
|
|
18389
|
+
operation: "runJob",
|
|
18390
|
+
taskId: jobName,
|
|
18391
|
+
availableJobs: Array.from(this.jobs.keys()),
|
|
18392
|
+
suggestion: "Check job name or use getAllJobsStatus() to list available jobs"
|
|
18393
|
+
});
|
|
17299
18394
|
}
|
|
17300
18395
|
if (this.activeJobs.has(jobName)) {
|
|
17301
|
-
throw new
|
|
18396
|
+
throw new SchedulerError(`Job '${jobName}' is already running`, {
|
|
18397
|
+
operation: "runJob",
|
|
18398
|
+
taskId: jobName,
|
|
18399
|
+
executionId: this.activeJobs.get(jobName),
|
|
18400
|
+
suggestion: "Wait for current execution to complete or check job status with getJobStatus()"
|
|
18401
|
+
});
|
|
17302
18402
|
}
|
|
17303
18403
|
await this._executeJob(jobName);
|
|
17304
18404
|
}
|
|
@@ -17308,7 +18408,12 @@ class SchedulerPlugin extends Plugin {
|
|
|
17308
18408
|
enableJob(jobName) {
|
|
17309
18409
|
const job = this.jobs.get(jobName);
|
|
17310
18410
|
if (!job) {
|
|
17311
|
-
throw new
|
|
18411
|
+
throw new SchedulerError(`Job '${jobName}' not found`, {
|
|
18412
|
+
operation: "enableJob",
|
|
18413
|
+
taskId: jobName,
|
|
18414
|
+
availableJobs: Array.from(this.jobs.keys()),
|
|
18415
|
+
suggestion: "Check job name or use getAllJobsStatus() to list available jobs"
|
|
18416
|
+
});
|
|
17312
18417
|
}
|
|
17313
18418
|
job.enabled = true;
|
|
17314
18419
|
this._scheduleNextExecution(jobName);
|
|
@@ -17320,7 +18425,12 @@ class SchedulerPlugin extends Plugin {
|
|
|
17320
18425
|
disableJob(jobName) {
|
|
17321
18426
|
const job = this.jobs.get(jobName);
|
|
17322
18427
|
if (!job) {
|
|
17323
|
-
throw new
|
|
18428
|
+
throw new SchedulerError(`Job '${jobName}' not found`, {
|
|
18429
|
+
operation: "disableJob",
|
|
18430
|
+
taskId: jobName,
|
|
18431
|
+
availableJobs: Array.from(this.jobs.keys()),
|
|
18432
|
+
suggestion: "Check job name or use getAllJobsStatus() to list available jobs"
|
|
18433
|
+
});
|
|
17324
18434
|
}
|
|
17325
18435
|
job.enabled = false;
|
|
17326
18436
|
const timer = this.timers.get(jobName);
|
|
@@ -17419,13 +18529,28 @@ class SchedulerPlugin extends Plugin {
|
|
|
17419
18529
|
*/
|
|
17420
18530
|
addJob(jobName, jobConfig) {
|
|
17421
18531
|
if (this.jobs.has(jobName)) {
|
|
17422
|
-
throw new
|
|
18532
|
+
throw new SchedulerError(`Job '${jobName}' already exists`, {
|
|
18533
|
+
operation: "addJob",
|
|
18534
|
+
taskId: jobName,
|
|
18535
|
+
existingJobs: Array.from(this.jobs.keys()),
|
|
18536
|
+
suggestion: "Use a different job name or remove the existing job first with removeJob()"
|
|
18537
|
+
});
|
|
17423
18538
|
}
|
|
17424
18539
|
if (!jobConfig.schedule || !jobConfig.action) {
|
|
17425
|
-
throw new
|
|
18540
|
+
throw new SchedulerError("Job must have schedule and action", {
|
|
18541
|
+
operation: "addJob",
|
|
18542
|
+
taskId: jobName,
|
|
18543
|
+
providedConfig: Object.keys(jobConfig),
|
|
18544
|
+
suggestion: 'Provide both schedule and action: { schedule: "0 * * * *", action: async (db, ctx) => {...} }'
|
|
18545
|
+
});
|
|
17426
18546
|
}
|
|
17427
18547
|
if (!this._isValidCronExpression(jobConfig.schedule)) {
|
|
17428
|
-
throw new
|
|
18548
|
+
throw new SchedulerError("Invalid cron expression", {
|
|
18549
|
+
operation: "addJob",
|
|
18550
|
+
taskId: jobName,
|
|
18551
|
+
cronExpression: jobConfig.schedule,
|
|
18552
|
+
suggestion: "Use valid cron format (5 fields) or shortcuts (@hourly, @daily, @weekly, @monthly, @yearly)"
|
|
18553
|
+
});
|
|
17429
18554
|
}
|
|
17430
18555
|
const job = {
|
|
17431
18556
|
...jobConfig,
|
|
@@ -17459,7 +18584,12 @@ class SchedulerPlugin extends Plugin {
|
|
|
17459
18584
|
removeJob(jobName) {
|
|
17460
18585
|
const job = this.jobs.get(jobName);
|
|
17461
18586
|
if (!job) {
|
|
17462
|
-
throw new
|
|
18587
|
+
throw new SchedulerError(`Job '${jobName}' not found`, {
|
|
18588
|
+
operation: "removeJob",
|
|
18589
|
+
taskId: jobName,
|
|
18590
|
+
availableJobs: Array.from(this.jobs.keys()),
|
|
18591
|
+
suggestion: "Check job name or use getAllJobsStatus() to list available jobs"
|
|
18592
|
+
});
|
|
17463
18593
|
}
|
|
17464
18594
|
const timer = this.timers.get(jobName);
|
|
17465
18595
|
if (timer) {
|
|
@@ -17513,6 +18643,36 @@ class SchedulerPlugin extends Plugin {
|
|
|
17513
18643
|
}
|
|
17514
18644
|
}
|
|
17515
18645
|
|
|
18646
|
+
class StateMachineError extends S3dbError {
|
|
18647
|
+
constructor(message, details = {}) {
|
|
18648
|
+
const { currentState, targetState, resourceName, operation = "unknown", ...rest } = details;
|
|
18649
|
+
let description = details.description;
|
|
18650
|
+
if (!description) {
|
|
18651
|
+
description = `
|
|
18652
|
+
State Machine Operation Error
|
|
18653
|
+
|
|
18654
|
+
Operation: ${operation}
|
|
18655
|
+
${currentState ? `Current State: ${currentState}` : ""}
|
|
18656
|
+
${targetState ? `Target State: ${targetState}` : ""}
|
|
18657
|
+
${resourceName ? `Resource: ${resourceName}` : ""}
|
|
18658
|
+
|
|
18659
|
+
Common causes:
|
|
18660
|
+
1. Invalid state transition
|
|
18661
|
+
2. State machine not configured
|
|
18662
|
+
3. Transition conditions not met
|
|
18663
|
+
4. State not defined in configuration
|
|
18664
|
+
5. Missing transition handler
|
|
18665
|
+
|
|
18666
|
+
Solution:
|
|
18667
|
+
Check state machine configuration and valid transitions.
|
|
18668
|
+
|
|
18669
|
+
Docs: https://github.com/forattini-dev/s3db.js/blob/main/docs/plugins/state-machine.md
|
|
18670
|
+
`.trim();
|
|
18671
|
+
}
|
|
18672
|
+
super(message, { ...rest, currentState, targetState, resourceName, operation, description });
|
|
18673
|
+
}
|
|
18674
|
+
}
|
|
18675
|
+
|
|
17516
18676
|
class StateMachinePlugin extends Plugin {
|
|
17517
18677
|
constructor(options = {}) {
|
|
17518
18678
|
super();
|
|
@@ -17533,17 +18693,36 @@ class StateMachinePlugin extends Plugin {
|
|
|
17533
18693
|
}
|
|
17534
18694
|
_validateConfiguration() {
|
|
17535
18695
|
if (!this.config.stateMachines || Object.keys(this.config.stateMachines).length === 0) {
|
|
17536
|
-
throw new
|
|
18696
|
+
throw new StateMachineError("At least one state machine must be defined", {
|
|
18697
|
+
operation: "validateConfiguration",
|
|
18698
|
+
machineCount: 0,
|
|
18699
|
+
suggestion: "Provide at least one state machine in the stateMachines configuration"
|
|
18700
|
+
});
|
|
17537
18701
|
}
|
|
17538
18702
|
for (const [machineName, machine] of Object.entries(this.config.stateMachines)) {
|
|
17539
18703
|
if (!machine.states || Object.keys(machine.states).length === 0) {
|
|
17540
|
-
throw new
|
|
18704
|
+
throw new StateMachineError(`Machine '${machineName}' must have states defined`, {
|
|
18705
|
+
operation: "validateConfiguration",
|
|
18706
|
+
machineId: machineName,
|
|
18707
|
+
suggestion: "Define at least one state in the states configuration"
|
|
18708
|
+
});
|
|
17541
18709
|
}
|
|
17542
18710
|
if (!machine.initialState) {
|
|
17543
|
-
throw new
|
|
18711
|
+
throw new StateMachineError(`Machine '${machineName}' must have an initialState`, {
|
|
18712
|
+
operation: "validateConfiguration",
|
|
18713
|
+
machineId: machineName,
|
|
18714
|
+
availableStates: Object.keys(machine.states),
|
|
18715
|
+
suggestion: "Specify an initialState property matching one of the defined states"
|
|
18716
|
+
});
|
|
17544
18717
|
}
|
|
17545
18718
|
if (!machine.states[machine.initialState]) {
|
|
17546
|
-
throw new
|
|
18719
|
+
throw new StateMachineError(`Initial state '${machine.initialState}' not found in machine '${machineName}'`, {
|
|
18720
|
+
operation: "validateConfiguration",
|
|
18721
|
+
machineId: machineName,
|
|
18722
|
+
initialState: machine.initialState,
|
|
18723
|
+
availableStates: Object.keys(machine.states),
|
|
18724
|
+
suggestion: "Set initialState to one of the defined states"
|
|
18725
|
+
});
|
|
17547
18726
|
}
|
|
17548
18727
|
}
|
|
17549
18728
|
}
|
|
@@ -17600,12 +18779,25 @@ class StateMachinePlugin extends Plugin {
|
|
|
17600
18779
|
async send(machineId, entityId, event, context = {}) {
|
|
17601
18780
|
const machine = this.machines.get(machineId);
|
|
17602
18781
|
if (!machine) {
|
|
17603
|
-
throw new
|
|
18782
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
18783
|
+
operation: "send",
|
|
18784
|
+
machineId,
|
|
18785
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
18786
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
18787
|
+
});
|
|
17604
18788
|
}
|
|
17605
18789
|
const currentState = await this.getState(machineId, entityId);
|
|
17606
18790
|
const stateConfig = machine.config.states[currentState];
|
|
17607
18791
|
if (!stateConfig || !stateConfig.on || !stateConfig.on[event]) {
|
|
17608
|
-
throw new
|
|
18792
|
+
throw new StateMachineError(`Event '${event}' not valid for state '${currentState}' in machine '${machineId}'`, {
|
|
18793
|
+
operation: "send",
|
|
18794
|
+
machineId,
|
|
18795
|
+
entityId,
|
|
18796
|
+
event,
|
|
18797
|
+
currentState,
|
|
18798
|
+
validEvents: stateConfig && stateConfig.on ? Object.keys(stateConfig.on) : [],
|
|
18799
|
+
suggestion: "Use getValidEvents() to check which events are valid for the current state"
|
|
18800
|
+
});
|
|
17609
18801
|
}
|
|
17610
18802
|
const targetState = stateConfig.on[event];
|
|
17611
18803
|
if (stateConfig.guards && stateConfig.guards[event]) {
|
|
@@ -17616,7 +18808,16 @@ class StateMachinePlugin extends Plugin {
|
|
|
17616
18808
|
() => guard(context, event, { database: this.database, machineId, entityId })
|
|
17617
18809
|
);
|
|
17618
18810
|
if (!guardOk || !guardResult) {
|
|
17619
|
-
throw new
|
|
18811
|
+
throw new StateMachineError(`Transition blocked by guard '${guardName}'`, {
|
|
18812
|
+
operation: "send",
|
|
18813
|
+
machineId,
|
|
18814
|
+
entityId,
|
|
18815
|
+
event,
|
|
18816
|
+
currentState,
|
|
18817
|
+
guardName,
|
|
18818
|
+
guardError: guardErr?.message || "Guard returned false",
|
|
18819
|
+
suggestion: "Check guard conditions or modify the context to satisfy guard requirements"
|
|
18820
|
+
});
|
|
17620
18821
|
}
|
|
17621
18822
|
}
|
|
17622
18823
|
}
|
|
@@ -17726,7 +18927,12 @@ class StateMachinePlugin extends Plugin {
|
|
|
17726
18927
|
async getState(machineId, entityId) {
|
|
17727
18928
|
const machine = this.machines.get(machineId);
|
|
17728
18929
|
if (!machine) {
|
|
17729
|
-
throw new
|
|
18930
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
18931
|
+
operation: "getState",
|
|
18932
|
+
machineId,
|
|
18933
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
18934
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
18935
|
+
});
|
|
17730
18936
|
}
|
|
17731
18937
|
if (machine.currentStates.has(entityId)) {
|
|
17732
18938
|
return machine.currentStates.get(entityId);
|
|
@@ -17752,7 +18958,12 @@ class StateMachinePlugin extends Plugin {
|
|
|
17752
18958
|
async getValidEvents(machineId, stateOrEntityId) {
|
|
17753
18959
|
const machine = this.machines.get(machineId);
|
|
17754
18960
|
if (!machine) {
|
|
17755
|
-
throw new
|
|
18961
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
18962
|
+
operation: "getValidEvents",
|
|
18963
|
+
machineId,
|
|
18964
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
18965
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
18966
|
+
});
|
|
17756
18967
|
}
|
|
17757
18968
|
let state;
|
|
17758
18969
|
if (machine.config.states[stateOrEntityId]) {
|
|
@@ -17801,7 +19012,12 @@ class StateMachinePlugin extends Plugin {
|
|
|
17801
19012
|
async initializeEntity(machineId, entityId, context = {}) {
|
|
17802
19013
|
const machine = this.machines.get(machineId);
|
|
17803
19014
|
if (!machine) {
|
|
17804
|
-
throw new
|
|
19015
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
19016
|
+
operation: "initializeEntity",
|
|
19017
|
+
machineId,
|
|
19018
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
19019
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
19020
|
+
});
|
|
17805
19021
|
}
|
|
17806
19022
|
const initialState = machine.config.initialState;
|
|
17807
19023
|
machine.currentStates.set(entityId, initialState);
|
|
@@ -17820,7 +19036,14 @@ class StateMachinePlugin extends Plugin {
|
|
|
17820
19036
|
})
|
|
17821
19037
|
);
|
|
17822
19038
|
if (!ok && err && !err.message?.includes("already exists")) {
|
|
17823
|
-
throw new
|
|
19039
|
+
throw new StateMachineError("Failed to initialize entity state", {
|
|
19040
|
+
operation: "initializeEntity",
|
|
19041
|
+
machineId,
|
|
19042
|
+
entityId,
|
|
19043
|
+
initialState,
|
|
19044
|
+
original: err,
|
|
19045
|
+
suggestion: "Check state resource configuration and database permissions"
|
|
19046
|
+
});
|
|
17824
19047
|
}
|
|
17825
19048
|
}
|
|
17826
19049
|
const initialStateConfig = machine.config.states[initialState];
|
|
@@ -17849,7 +19072,12 @@ class StateMachinePlugin extends Plugin {
|
|
|
17849
19072
|
visualize(machineId) {
|
|
17850
19073
|
const machine = this.machines.get(machineId);
|
|
17851
19074
|
if (!machine) {
|
|
17852
|
-
throw new
|
|
19075
|
+
throw new StateMachineError(`State machine '${machineId}' not found`, {
|
|
19076
|
+
operation: "visualize",
|
|
19077
|
+
machineId,
|
|
19078
|
+
availableMachines: Array.from(this.machines.keys()),
|
|
19079
|
+
suggestion: "Check machine ID or use getMachines() to list available machines"
|
|
19080
|
+
});
|
|
17853
19081
|
}
|
|
17854
19082
|
let dot = `digraph ${machineId} {
|
|
17855
19083
|
`;
|
|
@@ -17893,12 +19121,1098 @@ class StateMachinePlugin extends Plugin {
|
|
|
17893
19121
|
}
|
|
17894
19122
|
}
|
|
17895
19123
|
|
|
19124
|
+
function cosineDistance(a, b) {
|
|
19125
|
+
if (a.length !== b.length) {
|
|
19126
|
+
throw new Error(`Dimension mismatch: ${a.length} vs ${b.length}`);
|
|
19127
|
+
}
|
|
19128
|
+
let dotProduct2 = 0;
|
|
19129
|
+
let normA = 0;
|
|
19130
|
+
let normB = 0;
|
|
19131
|
+
for (let i = 0; i < a.length; i++) {
|
|
19132
|
+
dotProduct2 += a[i] * b[i];
|
|
19133
|
+
normA += a[i] * a[i];
|
|
19134
|
+
normB += b[i] * b[i];
|
|
19135
|
+
}
|
|
19136
|
+
const denominator = Math.sqrt(normA) * Math.sqrt(normB);
|
|
19137
|
+
if (denominator === 0) {
|
|
19138
|
+
return a.every((v) => v === 0) && b.every((v) => v === 0) ? 0 : 1;
|
|
19139
|
+
}
|
|
19140
|
+
const similarity = dotProduct2 / denominator;
|
|
19141
|
+
return 1 - similarity;
|
|
19142
|
+
}
|
|
19143
|
+
function euclideanDistance(a, b) {
|
|
19144
|
+
if (a.length !== b.length) {
|
|
19145
|
+
throw new Error(`Dimension mismatch: ${a.length} vs ${b.length}`);
|
|
19146
|
+
}
|
|
19147
|
+
let sum = 0;
|
|
19148
|
+
for (let i = 0; i < a.length; i++) {
|
|
19149
|
+
const diff = a[i] - b[i];
|
|
19150
|
+
sum += diff * diff;
|
|
19151
|
+
}
|
|
19152
|
+
return Math.sqrt(sum);
|
|
19153
|
+
}
|
|
19154
|
+
function manhattanDistance(a, b) {
|
|
19155
|
+
if (a.length !== b.length) {
|
|
19156
|
+
throw new Error(`Dimension mismatch: ${a.length} vs ${b.length}`);
|
|
19157
|
+
}
|
|
19158
|
+
let sum = 0;
|
|
19159
|
+
for (let i = 0; i < a.length; i++) {
|
|
19160
|
+
sum += Math.abs(a[i] - b[i]);
|
|
19161
|
+
}
|
|
19162
|
+
return sum;
|
|
19163
|
+
}
|
|
19164
|
+
function dotProduct(a, b) {
|
|
19165
|
+
if (a.length !== b.length) {
|
|
19166
|
+
throw new Error(`Dimension mismatch: ${a.length} vs ${b.length}`);
|
|
19167
|
+
}
|
|
19168
|
+
let sum = 0;
|
|
19169
|
+
for (let i = 0; i < a.length; i++) {
|
|
19170
|
+
sum += a[i] * b[i];
|
|
19171
|
+
}
|
|
19172
|
+
return sum;
|
|
19173
|
+
}
|
|
19174
|
+
function normalize(vector) {
|
|
19175
|
+
const magnitude2 = Math.sqrt(
|
|
19176
|
+
vector.reduce((sum, val) => sum + val * val, 0)
|
|
19177
|
+
);
|
|
19178
|
+
if (magnitude2 === 0) {
|
|
19179
|
+
return vector.slice();
|
|
19180
|
+
}
|
|
19181
|
+
return vector.map((val) => val / magnitude2);
|
|
19182
|
+
}
|
|
19183
|
+
|
|
19184
|
+
function kmeans(vectors, k, options = {}) {
|
|
19185
|
+
const {
|
|
19186
|
+
maxIterations = 100,
|
|
19187
|
+
tolerance = 1e-4,
|
|
19188
|
+
distanceFn = euclideanDistance,
|
|
19189
|
+
seed = null,
|
|
19190
|
+
onIteration = null
|
|
19191
|
+
} = options;
|
|
19192
|
+
if (vectors.length === 0) {
|
|
19193
|
+
throw new Error("Cannot cluster empty vector array");
|
|
19194
|
+
}
|
|
19195
|
+
if (k < 1) {
|
|
19196
|
+
throw new Error(`k must be at least 1, got ${k}`);
|
|
19197
|
+
}
|
|
19198
|
+
if (k > vectors.length) {
|
|
19199
|
+
throw new Error(`k (${k}) cannot be greater than number of vectors (${vectors.length})`);
|
|
19200
|
+
}
|
|
19201
|
+
const dimensions = vectors[0].length;
|
|
19202
|
+
for (let i = 1; i < vectors.length; i++) {
|
|
19203
|
+
if (vectors[i].length !== dimensions) {
|
|
19204
|
+
throw new Error(`All vectors must have same dimensions. Expected ${dimensions}, got ${vectors[i].length} at index ${i}`);
|
|
19205
|
+
}
|
|
19206
|
+
}
|
|
19207
|
+
const centroids = initializeCentroidsKMeansPlusPlus(vectors, k, distanceFn, seed);
|
|
19208
|
+
let assignments = new Array(vectors.length);
|
|
19209
|
+
let iterations = 0;
|
|
19210
|
+
let converged = false;
|
|
19211
|
+
let previousInertia = Infinity;
|
|
19212
|
+
while (!converged && iterations < maxIterations) {
|
|
19213
|
+
const newAssignments = vectors.map((vector) => {
|
|
19214
|
+
let minDist = Infinity;
|
|
19215
|
+
let nearestCluster = 0;
|
|
19216
|
+
for (let i = 0; i < k; i++) {
|
|
19217
|
+
const dist = distanceFn(vector, centroids[i]);
|
|
19218
|
+
if (dist < minDist) {
|
|
19219
|
+
minDist = dist;
|
|
19220
|
+
nearestCluster = i;
|
|
19221
|
+
}
|
|
19222
|
+
}
|
|
19223
|
+
return nearestCluster;
|
|
19224
|
+
});
|
|
19225
|
+
let inertia2 = 0;
|
|
19226
|
+
vectors.forEach((vector, i) => {
|
|
19227
|
+
const dist = distanceFn(vector, centroids[newAssignments[i]]);
|
|
19228
|
+
inertia2 += dist * dist;
|
|
19229
|
+
});
|
|
19230
|
+
const inertiaChange = Math.abs(previousInertia - inertia2);
|
|
19231
|
+
converged = inertiaChange < tolerance;
|
|
19232
|
+
assignments = newAssignments;
|
|
19233
|
+
previousInertia = inertia2;
|
|
19234
|
+
if (onIteration) {
|
|
19235
|
+
onIteration(iterations + 1, inertia2, converged);
|
|
19236
|
+
}
|
|
19237
|
+
if (!converged) {
|
|
19238
|
+
const clusterSums = Array(k).fill(null).map(() => new Array(dimensions).fill(0));
|
|
19239
|
+
const clusterCounts = new Array(k).fill(0);
|
|
19240
|
+
vectors.forEach((vector, i) => {
|
|
19241
|
+
const cluster = assignments[i];
|
|
19242
|
+
clusterCounts[cluster]++;
|
|
19243
|
+
vector.forEach((val, j) => {
|
|
19244
|
+
clusterSums[cluster][j] += val;
|
|
19245
|
+
});
|
|
19246
|
+
});
|
|
19247
|
+
for (let i = 0; i < k; i++) {
|
|
19248
|
+
if (clusterCounts[i] > 0) {
|
|
19249
|
+
centroids[i] = clusterSums[i].map((sum) => sum / clusterCounts[i]);
|
|
19250
|
+
} else {
|
|
19251
|
+
const randomIdx = Math.floor(Math.random() * vectors.length);
|
|
19252
|
+
centroids[i] = [...vectors[randomIdx]];
|
|
19253
|
+
}
|
|
19254
|
+
}
|
|
19255
|
+
}
|
|
19256
|
+
iterations++;
|
|
19257
|
+
}
|
|
19258
|
+
let inertia = 0;
|
|
19259
|
+
vectors.forEach((vector, i) => {
|
|
19260
|
+
const dist = distanceFn(vector, centroids[assignments[i]]);
|
|
19261
|
+
inertia += dist * dist;
|
|
19262
|
+
});
|
|
19263
|
+
return {
|
|
19264
|
+
centroids,
|
|
19265
|
+
assignments,
|
|
19266
|
+
iterations,
|
|
19267
|
+
converged,
|
|
19268
|
+
inertia
|
|
19269
|
+
};
|
|
19270
|
+
}
|
|
19271
|
+
function initializeCentroidsKMeansPlusPlus(vectors, k, distanceFn, seed) {
|
|
19272
|
+
const centroids = [];
|
|
19273
|
+
const n = vectors.length;
|
|
19274
|
+
const firstIndex = seed !== null ? seed % n : Math.floor(Math.random() * n);
|
|
19275
|
+
centroids.push([...vectors[firstIndex]]);
|
|
19276
|
+
for (let i = 1; i < k; i++) {
|
|
19277
|
+
const distances = vectors.map((vector) => {
|
|
19278
|
+
return Math.min(...centroids.map((c) => distanceFn(vector, c)));
|
|
19279
|
+
});
|
|
19280
|
+
const squaredDistances = distances.map((d) => d * d);
|
|
19281
|
+
const totalSquared = squaredDistances.reduce((a, b) => a + b, 0);
|
|
19282
|
+
if (totalSquared === 0) {
|
|
19283
|
+
const randomIdx = Math.floor(Math.random() * n);
|
|
19284
|
+
centroids.push([...vectors[randomIdx]]);
|
|
19285
|
+
continue;
|
|
19286
|
+
}
|
|
19287
|
+
let threshold = Math.random() * totalSquared;
|
|
19288
|
+
let cumulativeSum = 0;
|
|
19289
|
+
for (let j = 0; j < n; j++) {
|
|
19290
|
+
cumulativeSum += squaredDistances[j];
|
|
19291
|
+
if (cumulativeSum >= threshold) {
|
|
19292
|
+
centroids.push([...vectors[j]]);
|
|
19293
|
+
break;
|
|
19294
|
+
}
|
|
19295
|
+
}
|
|
19296
|
+
}
|
|
19297
|
+
return centroids;
|
|
19298
|
+
}
|
|
19299
|
+
async function findOptimalK(vectors, options = {}) {
|
|
19300
|
+
const {
|
|
19301
|
+
minK = 2,
|
|
19302
|
+
maxK = Math.min(10, Math.floor(Math.sqrt(vectors.length / 2))),
|
|
19303
|
+
distanceFn = euclideanDistance,
|
|
19304
|
+
nReferences = 10,
|
|
19305
|
+
stabilityRuns = 5,
|
|
19306
|
+
...kmeansOptions
|
|
19307
|
+
} = options;
|
|
19308
|
+
const metricsModule = await Promise.resolve().then(function () { return metrics; });
|
|
19309
|
+
const {
|
|
19310
|
+
silhouetteScore,
|
|
19311
|
+
daviesBouldinIndex,
|
|
19312
|
+
calinskiHarabaszIndex,
|
|
19313
|
+
gapStatistic,
|
|
19314
|
+
clusteringStability
|
|
19315
|
+
} = metricsModule;
|
|
19316
|
+
const results = [];
|
|
19317
|
+
for (let k = minK; k <= maxK; k++) {
|
|
19318
|
+
const kmeansResult = kmeans(vectors, k, { ...kmeansOptions, distanceFn });
|
|
19319
|
+
const silhouette = silhouetteScore(
|
|
19320
|
+
vectors,
|
|
19321
|
+
kmeansResult.assignments,
|
|
19322
|
+
kmeansResult.centroids,
|
|
19323
|
+
distanceFn
|
|
19324
|
+
);
|
|
19325
|
+
const daviesBouldin = daviesBouldinIndex(
|
|
19326
|
+
vectors,
|
|
19327
|
+
kmeansResult.assignments,
|
|
19328
|
+
kmeansResult.centroids,
|
|
19329
|
+
distanceFn
|
|
19330
|
+
);
|
|
19331
|
+
const calinskiHarabasz = calinskiHarabaszIndex(
|
|
19332
|
+
vectors,
|
|
19333
|
+
kmeansResult.assignments,
|
|
19334
|
+
kmeansResult.centroids,
|
|
19335
|
+
distanceFn
|
|
19336
|
+
);
|
|
19337
|
+
const gap = await gapStatistic(
|
|
19338
|
+
vectors,
|
|
19339
|
+
kmeansResult.assignments,
|
|
19340
|
+
kmeansResult.centroids,
|
|
19341
|
+
distanceFn,
|
|
19342
|
+
nReferences
|
|
19343
|
+
);
|
|
19344
|
+
const stability = clusteringStability(
|
|
19345
|
+
vectors,
|
|
19346
|
+
k,
|
|
19347
|
+
{ ...kmeansOptions, distanceFn, nRuns: stabilityRuns }
|
|
19348
|
+
);
|
|
19349
|
+
results.push({
|
|
19350
|
+
k,
|
|
19351
|
+
inertia: kmeansResult.inertia,
|
|
19352
|
+
silhouette,
|
|
19353
|
+
daviesBouldin,
|
|
19354
|
+
calinskiHarabasz,
|
|
19355
|
+
gap: gap.gap,
|
|
19356
|
+
gapSk: gap.sk,
|
|
19357
|
+
stability: stability.stability,
|
|
19358
|
+
cvInertia: stability.cvInertia,
|
|
19359
|
+
iterations: kmeansResult.iterations,
|
|
19360
|
+
converged: kmeansResult.converged
|
|
19361
|
+
});
|
|
19362
|
+
}
|
|
19363
|
+
const elbowK = findElbowPoint(results.map((r) => r.inertia));
|
|
19364
|
+
const recommendations = {
|
|
19365
|
+
elbow: minK + elbowK,
|
|
19366
|
+
silhouette: results.reduce(
|
|
19367
|
+
(best, curr) => curr.silhouette > best.silhouette ? curr : best
|
|
19368
|
+
).k,
|
|
19369
|
+
daviesBouldin: results.reduce(
|
|
19370
|
+
(best, curr) => curr.daviesBouldin < best.daviesBouldin ? curr : best
|
|
19371
|
+
).k,
|
|
19372
|
+
calinskiHarabasz: results.reduce(
|
|
19373
|
+
(best, curr) => curr.calinskiHarabasz > best.calinskiHarabasz ? curr : best
|
|
19374
|
+
).k,
|
|
19375
|
+
gap: results.reduce(
|
|
19376
|
+
(best, curr) => curr.gap > best.gap ? curr : best
|
|
19377
|
+
).k,
|
|
19378
|
+
stability: results.reduce(
|
|
19379
|
+
(best, curr) => curr.stability > best.stability ? curr : best
|
|
19380
|
+
).k
|
|
19381
|
+
};
|
|
19382
|
+
const votes = Object.values(recommendations);
|
|
19383
|
+
const consensus = votes.reduce((acc, k) => {
|
|
19384
|
+
acc[k] = (acc[k] || 0) + 1;
|
|
19385
|
+
return acc;
|
|
19386
|
+
}, {});
|
|
19387
|
+
const consensusK = parseInt(
|
|
19388
|
+
Object.entries(consensus).reduce((a, b) => b[1] > a[1] ? b : a)[0]
|
|
19389
|
+
);
|
|
19390
|
+
return {
|
|
19391
|
+
results,
|
|
19392
|
+
recommendations,
|
|
19393
|
+
consensus: consensusK,
|
|
19394
|
+
summary: {
|
|
19395
|
+
analysisRange: `${minK}-${maxK}`,
|
|
19396
|
+
totalVectors: vectors.length,
|
|
19397
|
+
dimensions: vectors[0].length,
|
|
19398
|
+
recommendation: consensusK,
|
|
19399
|
+
confidence: consensus[consensusK] / votes.length
|
|
19400
|
+
}
|
|
19401
|
+
};
|
|
19402
|
+
}
|
|
19403
|
+
function findElbowPoint(inertias) {
|
|
19404
|
+
const n = inertias.length;
|
|
19405
|
+
if (n < 3) return 0;
|
|
19406
|
+
let maxCurvature = -Infinity;
|
|
19407
|
+
let elbowIndex = 0;
|
|
19408
|
+
for (let i = 1; i < n - 1; i++) {
|
|
19409
|
+
const curvature = inertias[i - 1] - 2 * inertias[i] + inertias[i + 1];
|
|
19410
|
+
if (curvature > maxCurvature) {
|
|
19411
|
+
maxCurvature = curvature;
|
|
19412
|
+
elbowIndex = i;
|
|
19413
|
+
}
|
|
19414
|
+
}
|
|
19415
|
+
return elbowIndex;
|
|
19416
|
+
}
|
|
19417
|
+
|
|
19418
|
+
class VectorError extends PluginError {
|
|
19419
|
+
constructor(message, details = {}) {
|
|
19420
|
+
super(message, {
|
|
19421
|
+
pluginName: "VectorPlugin",
|
|
19422
|
+
...details,
|
|
19423
|
+
description: details.description || `
|
|
19424
|
+
Vector Plugin Error
|
|
19425
|
+
|
|
19426
|
+
Operation: ${details.operation || "unknown"}
|
|
19427
|
+
|
|
19428
|
+
Common causes:
|
|
19429
|
+
1. Vector dimension mismatch between vectors
|
|
19430
|
+
2. Invalid distance metric specified (must be: cosine, euclidean, manhattan)
|
|
19431
|
+
3. Empty vector array provided for clustering
|
|
19432
|
+
4. k value larger than number of available vectors
|
|
19433
|
+
5. Vector field not found or invalid in resource
|
|
19434
|
+
6. Large vectors without proper behavior (use 'body-overflow' or 'body-only')
|
|
19435
|
+
|
|
19436
|
+
Available distance metrics:
|
|
19437
|
+
- cosine: Best for normalized vectors, semantic similarity. Range: [0, 2]
|
|
19438
|
+
- euclidean: Standard L2 distance, geometric proximity. Range: [0, \u221E)
|
|
19439
|
+
- manhattan: L1 distance, faster computation. Range: [0, \u221E)
|
|
19440
|
+
|
|
19441
|
+
Storage considerations:
|
|
19442
|
+
- Vectors > 250 dimensions may exceed S3 metadata limit (2KB)
|
|
19443
|
+
- Use behavior: 'body-overflow' or 'body-only' for large vectors
|
|
19444
|
+
- OpenAI ada-002 (1536 dims): ~10KB, requires body storage
|
|
19445
|
+
- Sentence Transformers (384 dims): ~2.7KB, requires body storage
|
|
19446
|
+
`.trim()
|
|
19447
|
+
});
|
|
19448
|
+
}
|
|
19449
|
+
}
|
|
19450
|
+
|
|
19451
|
+
class VectorPlugin extends Plugin {
|
|
19452
|
+
constructor(options = {}) {
|
|
19453
|
+
super(options);
|
|
19454
|
+
this.config = {
|
|
19455
|
+
dimensions: 1536,
|
|
19456
|
+
// Default to OpenAI text-embedding-3-small/3-large
|
|
19457
|
+
distanceMetric: "cosine",
|
|
19458
|
+
// Default metric
|
|
19459
|
+
storageThreshold: 1500,
|
|
19460
|
+
// Bytes - warn if vectors exceed this
|
|
19461
|
+
autoFixBehavior: false,
|
|
19462
|
+
// Automatically set body-overflow
|
|
19463
|
+
autoDetectVectorField: true,
|
|
19464
|
+
// Auto-detect embedding:XXX fields
|
|
19465
|
+
emitEvents: true,
|
|
19466
|
+
// Emit events for monitoring
|
|
19467
|
+
verboseEvents: false,
|
|
19468
|
+
// Emit detailed progress events
|
|
19469
|
+
eventThrottle: 100,
|
|
19470
|
+
// Throttle progress events (ms)
|
|
19471
|
+
...options
|
|
19472
|
+
};
|
|
19473
|
+
this.distanceFunctions = {
|
|
19474
|
+
cosine: cosineDistance,
|
|
19475
|
+
euclidean: euclideanDistance,
|
|
19476
|
+
manhattan: manhattanDistance
|
|
19477
|
+
};
|
|
19478
|
+
this._vectorFieldCache = /* @__PURE__ */ new Map();
|
|
19479
|
+
this._throttleState = /* @__PURE__ */ new Map();
|
|
19480
|
+
}
|
|
19481
|
+
async onInstall() {
|
|
19482
|
+
this.emit("installed", { plugin: "VectorPlugin" });
|
|
19483
|
+
this.validateVectorStorage();
|
|
19484
|
+
this.installResourceMethods();
|
|
19485
|
+
}
|
|
19486
|
+
async onStart() {
|
|
19487
|
+
this.emit("started", { plugin: "VectorPlugin" });
|
|
19488
|
+
}
|
|
19489
|
+
async onStop() {
|
|
19490
|
+
this.emit("stopped", { plugin: "VectorPlugin" });
|
|
19491
|
+
}
|
|
19492
|
+
async onUninstall(options) {
|
|
19493
|
+
for (const resource of Object.values(this.database.resources)) {
|
|
19494
|
+
delete resource.vectorSearch;
|
|
19495
|
+
delete resource.cluster;
|
|
19496
|
+
delete resource.vectorDistance;
|
|
19497
|
+
delete resource.similarTo;
|
|
19498
|
+
delete resource.findSimilar;
|
|
19499
|
+
delete resource.distance;
|
|
19500
|
+
}
|
|
19501
|
+
this.emit("uninstalled", { plugin: "VectorPlugin" });
|
|
19502
|
+
}
|
|
19503
|
+
/**
|
|
19504
|
+
* Validate vector storage configuration for all resources
|
|
19505
|
+
*
|
|
19506
|
+
* Detects large vector fields and warns if proper behavior is not set.
|
|
19507
|
+
* Can optionally auto-fix by setting body-overflow behavior.
|
|
19508
|
+
*/
|
|
19509
|
+
validateVectorStorage() {
|
|
19510
|
+
for (const resource of Object.values(this.database.resources)) {
|
|
19511
|
+
const vectorFields = this.findVectorFields(resource.schema.attributes);
|
|
19512
|
+
if (vectorFields.length === 0) continue;
|
|
19513
|
+
const totalVectorSize = vectorFields.reduce((sum, f) => sum + f.estimatedBytes, 0);
|
|
19514
|
+
if (totalVectorSize > this.config.storageThreshold) {
|
|
19515
|
+
const hasCorrectBehavior = ["body-overflow", "body-only"].includes(resource.behavior);
|
|
19516
|
+
if (!hasCorrectBehavior) {
|
|
19517
|
+
const warning = {
|
|
19518
|
+
resource: resource.name,
|
|
19519
|
+
vectorFields: vectorFields.map((f) => ({
|
|
19520
|
+
field: f.name,
|
|
19521
|
+
dimensions: f.length,
|
|
19522
|
+
estimatedBytes: f.estimatedBytes
|
|
19523
|
+
})),
|
|
19524
|
+
totalEstimatedBytes: totalVectorSize,
|
|
19525
|
+
metadataLimit: 2047,
|
|
19526
|
+
currentBehavior: resource.behavior || "default",
|
|
19527
|
+
recommendation: "body-overflow"
|
|
19528
|
+
};
|
|
19529
|
+
this.emit("vector:storage-warning", warning);
|
|
19530
|
+
if (this.config.autoFixBehavior) {
|
|
19531
|
+
resource.behavior = "body-overflow";
|
|
19532
|
+
this.emit("vector:behavior-fixed", {
|
|
19533
|
+
resource: resource.name,
|
|
19534
|
+
newBehavior: "body-overflow"
|
|
19535
|
+
});
|
|
19536
|
+
} else {
|
|
19537
|
+
console.warn(`\u26A0\uFE0F VectorPlugin: Resource '${resource.name}' has large vector fields (${totalVectorSize} bytes estimated)`);
|
|
19538
|
+
console.warn(` Current behavior: '${resource.behavior || "default"}'`);
|
|
19539
|
+
console.warn(` Recommendation: Add behavior: 'body-overflow' or 'body-only' to resource configuration`);
|
|
19540
|
+
console.warn(` Large vectors will exceed S3 metadata limit (2047 bytes) and cause errors.`);
|
|
19541
|
+
}
|
|
19542
|
+
}
|
|
19543
|
+
}
|
|
19544
|
+
}
|
|
19545
|
+
}
|
|
19546
|
+
/**
|
|
19547
|
+
* Auto-detect vector field from resource schema
|
|
19548
|
+
*
|
|
19549
|
+
* Looks for fields with type 'embedding:XXX' pattern.
|
|
19550
|
+
* Caches result per resource for performance.
|
|
19551
|
+
*
|
|
19552
|
+
* @param {Resource} resource - Resource instance
|
|
19553
|
+
* @returns {string|null} Detected vector field name or null
|
|
19554
|
+
*/
|
|
19555
|
+
detectVectorField(resource) {
|
|
19556
|
+
if (this._vectorFieldCache.has(resource.name)) {
|
|
19557
|
+
return this._vectorFieldCache.get(resource.name);
|
|
19558
|
+
}
|
|
19559
|
+
const vectorField = this._findEmbeddingField(resource.schema.attributes);
|
|
19560
|
+
this._vectorFieldCache.set(resource.name, vectorField);
|
|
19561
|
+
if (vectorField && this.config.emitEvents) {
|
|
19562
|
+
this.emit("vector:field-detected", {
|
|
19563
|
+
resource: resource.name,
|
|
19564
|
+
vectorField,
|
|
19565
|
+
timestamp: Date.now()
|
|
19566
|
+
});
|
|
19567
|
+
}
|
|
19568
|
+
return vectorField;
|
|
19569
|
+
}
|
|
19570
|
+
/**
|
|
19571
|
+
* Recursively find embedding:XXX field in attributes
|
|
19572
|
+
*
|
|
19573
|
+
* @param {Object} attributes - Resource attributes
|
|
19574
|
+
* @param {string} path - Current path (for nested objects)
|
|
19575
|
+
* @returns {string|null} Field path or null
|
|
19576
|
+
*/
|
|
19577
|
+
_findEmbeddingField(attributes, path = "") {
|
|
19578
|
+
for (const [key, attr] of Object.entries(attributes)) {
|
|
19579
|
+
const fullPath = path ? `${path}.${key}` : key;
|
|
19580
|
+
if (typeof attr === "string" && attr.startsWith("embedding:")) {
|
|
19581
|
+
return fullPath;
|
|
19582
|
+
}
|
|
19583
|
+
if (attr.type === "array" && attr.items === "number" && attr.length) {
|
|
19584
|
+
return fullPath;
|
|
19585
|
+
}
|
|
19586
|
+
if (attr.type === "object" && attr.props) {
|
|
19587
|
+
const nested = this._findEmbeddingField(attr.props, fullPath);
|
|
19588
|
+
if (nested) return nested;
|
|
19589
|
+
}
|
|
19590
|
+
}
|
|
19591
|
+
return null;
|
|
19592
|
+
}
|
|
19593
|
+
/**
|
|
19594
|
+
* Emit event with throttling support
|
|
19595
|
+
*
|
|
19596
|
+
* @param {string} eventName - Event name
|
|
19597
|
+
* @param {Object} data - Event data
|
|
19598
|
+
* @param {string} throttleKey - Unique key for throttling (optional)
|
|
19599
|
+
*/
|
|
19600
|
+
_emitEvent(eventName, data, throttleKey = null) {
|
|
19601
|
+
if (!this.config.emitEvents) return;
|
|
19602
|
+
if (throttleKey) {
|
|
19603
|
+
const now = Date.now();
|
|
19604
|
+
const lastEmit = this._throttleState.get(throttleKey);
|
|
19605
|
+
if (lastEmit && now - lastEmit < this.config.eventThrottle) {
|
|
19606
|
+
return;
|
|
19607
|
+
}
|
|
19608
|
+
this._throttleState.set(throttleKey, now);
|
|
19609
|
+
}
|
|
19610
|
+
this.emit(eventName, data);
|
|
19611
|
+
}
|
|
19612
|
+
/**
|
|
19613
|
+
* Find vector fields in resource attributes
|
|
19614
|
+
*
|
|
19615
|
+
* @param {Object} attributes - Resource attributes
|
|
19616
|
+
* @param {string} path - Current path (for nested objects)
|
|
19617
|
+
* @returns {Array} Array of vector field info
|
|
19618
|
+
*/
|
|
19619
|
+
findVectorFields(attributes, path = "") {
|
|
19620
|
+
const vectors = [];
|
|
19621
|
+
for (const [key, attr] of Object.entries(attributes)) {
|
|
19622
|
+
const fullPath = path ? `${path}.${key}` : key;
|
|
19623
|
+
if (attr.type === "array" && attr.items === "number" && attr.length) {
|
|
19624
|
+
vectors.push({
|
|
19625
|
+
name: fullPath,
|
|
19626
|
+
length: attr.length,
|
|
19627
|
+
estimatedBytes: this.estimateVectorBytes(attr.length)
|
|
19628
|
+
});
|
|
19629
|
+
}
|
|
19630
|
+
if (attr.type === "object" && attr.props) {
|
|
19631
|
+
vectors.push(...this.findVectorFields(attr.props, fullPath));
|
|
19632
|
+
}
|
|
19633
|
+
}
|
|
19634
|
+
return vectors;
|
|
19635
|
+
}
|
|
19636
|
+
/**
|
|
19637
|
+
* Estimate bytes required to store a vector in JSON format
|
|
19638
|
+
*
|
|
19639
|
+
* Conservative estimate: ~7 bytes per number + array overhead
|
|
19640
|
+
*
|
|
19641
|
+
* @param {number} dimensions - Number of dimensions
|
|
19642
|
+
* @returns {number} Estimated bytes
|
|
19643
|
+
*/
|
|
19644
|
+
estimateVectorBytes(dimensions) {
|
|
19645
|
+
return dimensions * 7 + 50;
|
|
19646
|
+
}
|
|
19647
|
+
/**
|
|
19648
|
+
* Install vector methods on all resources
|
|
19649
|
+
*/
|
|
19650
|
+
installResourceMethods() {
|
|
19651
|
+
for (const resource of Object.values(this.database.resources)) {
|
|
19652
|
+
const searchMethod = this.createVectorSearchMethod(resource);
|
|
19653
|
+
const clusterMethod = this.createClusteringMethod(resource);
|
|
19654
|
+
const distanceMethod = this.createDistanceMethod();
|
|
19655
|
+
resource.vectorSearch = searchMethod;
|
|
19656
|
+
resource.cluster = clusterMethod;
|
|
19657
|
+
resource.vectorDistance = distanceMethod;
|
|
19658
|
+
resource.similarTo = searchMethod;
|
|
19659
|
+
resource.findSimilar = searchMethod;
|
|
19660
|
+
resource.distance = distanceMethod;
|
|
19661
|
+
}
|
|
19662
|
+
}
|
|
19663
|
+
/**
|
|
19664
|
+
* Create vector search method for a resource
|
|
19665
|
+
*
|
|
19666
|
+
* Performs K-nearest neighbors search to find similar vectors.
|
|
19667
|
+
*
|
|
19668
|
+
* @param {Resource} resource - Resource instance
|
|
19669
|
+
* @returns {Function} Vector search method
|
|
19670
|
+
*/
|
|
19671
|
+
createVectorSearchMethod(resource) {
|
|
19672
|
+
return async (queryVector, options = {}) => {
|
|
19673
|
+
const startTime = Date.now();
|
|
19674
|
+
let vectorField = options.vectorField;
|
|
19675
|
+
if (!vectorField && this.config.autoDetectVectorField) {
|
|
19676
|
+
vectorField = this.detectVectorField(resource);
|
|
19677
|
+
if (!vectorField) {
|
|
19678
|
+
vectorField = "vector";
|
|
19679
|
+
}
|
|
19680
|
+
} else if (!vectorField) {
|
|
19681
|
+
vectorField = "vector";
|
|
19682
|
+
}
|
|
19683
|
+
const {
|
|
19684
|
+
limit = 10,
|
|
19685
|
+
distanceMetric = this.config.distanceMetric,
|
|
19686
|
+
threshold = null,
|
|
19687
|
+
partition = null
|
|
19688
|
+
} = options;
|
|
19689
|
+
const distanceFn = this.distanceFunctions[distanceMetric];
|
|
19690
|
+
if (!distanceFn) {
|
|
19691
|
+
const error = new VectorError(`Invalid distance metric: ${distanceMetric}`, {
|
|
19692
|
+
operation: "vectorSearch",
|
|
19693
|
+
availableMetrics: Object.keys(this.distanceFunctions),
|
|
19694
|
+
providedMetric: distanceMetric
|
|
19695
|
+
});
|
|
19696
|
+
this._emitEvent("vector:search-error", {
|
|
19697
|
+
resource: resource.name,
|
|
19698
|
+
error: error.message,
|
|
19699
|
+
timestamp: Date.now()
|
|
19700
|
+
});
|
|
19701
|
+
throw error;
|
|
19702
|
+
}
|
|
19703
|
+
this._emitEvent("vector:search-start", {
|
|
19704
|
+
resource: resource.name,
|
|
19705
|
+
vectorField,
|
|
19706
|
+
limit,
|
|
19707
|
+
distanceMetric,
|
|
19708
|
+
partition,
|
|
19709
|
+
threshold,
|
|
19710
|
+
queryDimensions: queryVector.length,
|
|
19711
|
+
timestamp: startTime
|
|
19712
|
+
});
|
|
19713
|
+
try {
|
|
19714
|
+
let allRecords;
|
|
19715
|
+
if (partition) {
|
|
19716
|
+
this._emitEvent("vector:partition-filter", {
|
|
19717
|
+
resource: resource.name,
|
|
19718
|
+
partition,
|
|
19719
|
+
timestamp: Date.now()
|
|
19720
|
+
});
|
|
19721
|
+
allRecords = await resource.list({ partition, partitionValues: partition });
|
|
19722
|
+
} else {
|
|
19723
|
+
allRecords = await resource.getAll();
|
|
19724
|
+
}
|
|
19725
|
+
const totalRecords = allRecords.length;
|
|
19726
|
+
let processedRecords = 0;
|
|
19727
|
+
let dimensionMismatches = 0;
|
|
19728
|
+
const results = allRecords.filter((record) => record[vectorField] && Array.isArray(record[vectorField])).map((record, index) => {
|
|
19729
|
+
try {
|
|
19730
|
+
const distance = distanceFn(queryVector, record[vectorField]);
|
|
19731
|
+
processedRecords++;
|
|
19732
|
+
if (this.config.verboseEvents && processedRecords % 100 === 0) {
|
|
19733
|
+
this._emitEvent("vector:search-progress", {
|
|
19734
|
+
resource: resource.name,
|
|
19735
|
+
processed: processedRecords,
|
|
19736
|
+
total: totalRecords,
|
|
19737
|
+
progress: processedRecords / totalRecords * 100,
|
|
19738
|
+
timestamp: Date.now()
|
|
19739
|
+
}, `search-${resource.name}`);
|
|
19740
|
+
}
|
|
19741
|
+
return { record, distance };
|
|
19742
|
+
} catch (err) {
|
|
19743
|
+
dimensionMismatches++;
|
|
19744
|
+
if (this.config.verboseEvents) {
|
|
19745
|
+
this._emitEvent("vector:dimension-mismatch", {
|
|
19746
|
+
resource: resource.name,
|
|
19747
|
+
recordIndex: index,
|
|
19748
|
+
expected: queryVector.length,
|
|
19749
|
+
got: record[vectorField]?.length,
|
|
19750
|
+
timestamp: Date.now()
|
|
19751
|
+
});
|
|
19752
|
+
}
|
|
19753
|
+
return null;
|
|
19754
|
+
}
|
|
19755
|
+
}).filter((result) => result !== null).filter((result) => threshold === null || result.distance <= threshold).sort((a, b) => a.distance - b.distance).slice(0, limit);
|
|
19756
|
+
const duration = Date.now() - startTime;
|
|
19757
|
+
const throughput = totalRecords / (duration / 1e3);
|
|
19758
|
+
this._emitEvent("vector:search-complete", {
|
|
19759
|
+
resource: resource.name,
|
|
19760
|
+
vectorField,
|
|
19761
|
+
resultsCount: results.length,
|
|
19762
|
+
totalRecords,
|
|
19763
|
+
processedRecords,
|
|
19764
|
+
dimensionMismatches,
|
|
19765
|
+
duration,
|
|
19766
|
+
throughput: throughput.toFixed(2),
|
|
19767
|
+
timestamp: Date.now()
|
|
19768
|
+
});
|
|
19769
|
+
if (this.config.verboseEvents) {
|
|
19770
|
+
this._emitEvent("vector:performance", {
|
|
19771
|
+
operation: "search",
|
|
19772
|
+
resource: resource.name,
|
|
19773
|
+
duration,
|
|
19774
|
+
throughput: throughput.toFixed(2),
|
|
19775
|
+
recordsPerSecond: (processedRecords / (duration / 1e3)).toFixed(2),
|
|
19776
|
+
timestamp: Date.now()
|
|
19777
|
+
});
|
|
19778
|
+
}
|
|
19779
|
+
return results;
|
|
19780
|
+
} catch (error) {
|
|
19781
|
+
this._emitEvent("vector:search-error", {
|
|
19782
|
+
resource: resource.name,
|
|
19783
|
+
error: error.message,
|
|
19784
|
+
stack: error.stack,
|
|
19785
|
+
timestamp: Date.now()
|
|
19786
|
+
});
|
|
19787
|
+
throw error;
|
|
19788
|
+
}
|
|
19789
|
+
};
|
|
19790
|
+
}
|
|
19791
|
+
/**
|
|
19792
|
+
* Create clustering method for a resource
|
|
19793
|
+
*
|
|
19794
|
+
* Performs k-means clustering on resource vectors.
|
|
19795
|
+
*
|
|
19796
|
+
* @param {Resource} resource - Resource instance
|
|
19797
|
+
* @returns {Function} Clustering method
|
|
19798
|
+
*/
|
|
19799
|
+
createClusteringMethod(resource) {
|
|
19800
|
+
return async (options = {}) => {
|
|
19801
|
+
const startTime = Date.now();
|
|
19802
|
+
let vectorField = options.vectorField;
|
|
19803
|
+
if (!vectorField && this.config.autoDetectVectorField) {
|
|
19804
|
+
vectorField = this.detectVectorField(resource);
|
|
19805
|
+
if (!vectorField) {
|
|
19806
|
+
vectorField = "vector";
|
|
19807
|
+
}
|
|
19808
|
+
} else if (!vectorField) {
|
|
19809
|
+
vectorField = "vector";
|
|
19810
|
+
}
|
|
19811
|
+
const {
|
|
19812
|
+
k = 5,
|
|
19813
|
+
distanceMetric = this.config.distanceMetric,
|
|
19814
|
+
partition = null,
|
|
19815
|
+
...kmeansOptions
|
|
19816
|
+
} = options;
|
|
19817
|
+
const distanceFn = this.distanceFunctions[distanceMetric];
|
|
19818
|
+
if (!distanceFn) {
|
|
19819
|
+
const error = new VectorError(`Invalid distance metric: ${distanceMetric}`, {
|
|
19820
|
+
operation: "cluster",
|
|
19821
|
+
availableMetrics: Object.keys(this.distanceFunctions),
|
|
19822
|
+
providedMetric: distanceMetric
|
|
19823
|
+
});
|
|
19824
|
+
this._emitEvent("vector:cluster-error", {
|
|
19825
|
+
resource: resource.name,
|
|
19826
|
+
error: error.message,
|
|
19827
|
+
timestamp: Date.now()
|
|
19828
|
+
});
|
|
19829
|
+
throw error;
|
|
19830
|
+
}
|
|
19831
|
+
this._emitEvent("vector:cluster-start", {
|
|
19832
|
+
resource: resource.name,
|
|
19833
|
+
vectorField,
|
|
19834
|
+
k,
|
|
19835
|
+
distanceMetric,
|
|
19836
|
+
partition,
|
|
19837
|
+
maxIterations: kmeansOptions.maxIterations || 100,
|
|
19838
|
+
timestamp: startTime
|
|
19839
|
+
});
|
|
19840
|
+
try {
|
|
19841
|
+
let allRecords;
|
|
19842
|
+
if (partition) {
|
|
19843
|
+
this._emitEvent("vector:partition-filter", {
|
|
19844
|
+
resource: resource.name,
|
|
19845
|
+
partition,
|
|
19846
|
+
timestamp: Date.now()
|
|
19847
|
+
});
|
|
19848
|
+
allRecords = await resource.list({ partition, partitionValues: partition });
|
|
19849
|
+
} else {
|
|
19850
|
+
allRecords = await resource.getAll();
|
|
19851
|
+
}
|
|
19852
|
+
const recordsWithVectors = allRecords.filter(
|
|
19853
|
+
(record) => record[vectorField] && Array.isArray(record[vectorField])
|
|
19854
|
+
);
|
|
19855
|
+
if (recordsWithVectors.length === 0) {
|
|
19856
|
+
const error = new VectorError("No vectors found in resource", {
|
|
19857
|
+
operation: "cluster",
|
|
19858
|
+
resourceName: resource.name,
|
|
19859
|
+
vectorField
|
|
19860
|
+
});
|
|
19861
|
+
this._emitEvent("vector:empty-dataset", {
|
|
19862
|
+
resource: resource.name,
|
|
19863
|
+
vectorField,
|
|
19864
|
+
totalRecords: allRecords.length,
|
|
19865
|
+
timestamp: Date.now()
|
|
19866
|
+
});
|
|
19867
|
+
throw error;
|
|
19868
|
+
}
|
|
19869
|
+
const vectors = recordsWithVectors.map((record) => record[vectorField]);
|
|
19870
|
+
const result = kmeans(vectors, k, {
|
|
19871
|
+
...kmeansOptions,
|
|
19872
|
+
distanceFn,
|
|
19873
|
+
onIteration: this.config.verboseEvents ? (iteration, inertia, converged) => {
|
|
19874
|
+
this._emitEvent("vector:cluster-iteration", {
|
|
19875
|
+
resource: resource.name,
|
|
19876
|
+
k,
|
|
19877
|
+
iteration,
|
|
19878
|
+
inertia,
|
|
19879
|
+
converged,
|
|
19880
|
+
timestamp: Date.now()
|
|
19881
|
+
}, `cluster-${resource.name}`);
|
|
19882
|
+
} : void 0
|
|
19883
|
+
});
|
|
19884
|
+
if (result.converged) {
|
|
19885
|
+
this._emitEvent("vector:cluster-converged", {
|
|
19886
|
+
resource: resource.name,
|
|
19887
|
+
k,
|
|
19888
|
+
iterations: result.iterations,
|
|
19889
|
+
inertia: result.inertia,
|
|
19890
|
+
timestamp: Date.now()
|
|
19891
|
+
});
|
|
19892
|
+
}
|
|
19893
|
+
const clusters = Array(k).fill(null).map(() => []);
|
|
19894
|
+
recordsWithVectors.forEach((record, i) => {
|
|
19895
|
+
const clusterIndex = result.assignments[i];
|
|
19896
|
+
clusters[clusterIndex].push(record);
|
|
19897
|
+
});
|
|
19898
|
+
const duration = Date.now() - startTime;
|
|
19899
|
+
const clusterSizes = clusters.map((c) => c.length);
|
|
19900
|
+
this._emitEvent("vector:cluster-complete", {
|
|
19901
|
+
resource: resource.name,
|
|
19902
|
+
vectorField,
|
|
19903
|
+
k,
|
|
19904
|
+
vectorCount: vectors.length,
|
|
19905
|
+
iterations: result.iterations,
|
|
19906
|
+
converged: result.converged,
|
|
19907
|
+
inertia: result.inertia,
|
|
19908
|
+
clusterSizes,
|
|
19909
|
+
duration,
|
|
19910
|
+
timestamp: Date.now()
|
|
19911
|
+
});
|
|
19912
|
+
if (this.config.verboseEvents) {
|
|
19913
|
+
this._emitEvent("vector:performance", {
|
|
19914
|
+
operation: "clustering",
|
|
19915
|
+
resource: resource.name,
|
|
19916
|
+
k,
|
|
19917
|
+
duration,
|
|
19918
|
+
iterationsPerSecond: (result.iterations / (duration / 1e3)).toFixed(2),
|
|
19919
|
+
vectorsPerSecond: (vectors.length / (duration / 1e3)).toFixed(2),
|
|
19920
|
+
timestamp: Date.now()
|
|
19921
|
+
});
|
|
19922
|
+
}
|
|
19923
|
+
return {
|
|
19924
|
+
clusters,
|
|
19925
|
+
centroids: result.centroids,
|
|
19926
|
+
inertia: result.inertia,
|
|
19927
|
+
iterations: result.iterations,
|
|
19928
|
+
converged: result.converged
|
|
19929
|
+
};
|
|
19930
|
+
} catch (error) {
|
|
19931
|
+
this._emitEvent("vector:cluster-error", {
|
|
19932
|
+
resource: resource.name,
|
|
19933
|
+
error: error.message,
|
|
19934
|
+
stack: error.stack,
|
|
19935
|
+
timestamp: Date.now()
|
|
19936
|
+
});
|
|
19937
|
+
throw error;
|
|
19938
|
+
}
|
|
19939
|
+
};
|
|
19940
|
+
}
|
|
19941
|
+
/**
|
|
19942
|
+
* Create distance calculation method
|
|
19943
|
+
*
|
|
19944
|
+
* @returns {Function} Distance method
|
|
19945
|
+
*/
|
|
19946
|
+
createDistanceMethod() {
|
|
19947
|
+
return (vector1, vector2, metric = this.config.distanceMetric) => {
|
|
19948
|
+
const distanceFn = this.distanceFunctions[metric];
|
|
19949
|
+
if (!distanceFn) {
|
|
19950
|
+
throw new VectorError(`Invalid distance metric: ${metric}`, {
|
|
19951
|
+
operation: "vectorDistance",
|
|
19952
|
+
availableMetrics: Object.keys(this.distanceFunctions),
|
|
19953
|
+
providedMetric: metric
|
|
19954
|
+
});
|
|
19955
|
+
}
|
|
19956
|
+
return distanceFn(vector1, vector2);
|
|
19957
|
+
};
|
|
19958
|
+
}
|
|
19959
|
+
/**
|
|
19960
|
+
* Static utility: Normalize vector
|
|
19961
|
+
*
|
|
19962
|
+
* @param {number[]} vector - Input vector
|
|
19963
|
+
* @returns {number[]} Normalized vector
|
|
19964
|
+
*/
|
|
19965
|
+
static normalize(vector) {
|
|
19966
|
+
return normalize(vector);
|
|
19967
|
+
}
|
|
19968
|
+
/**
|
|
19969
|
+
* Static utility: Calculate dot product
|
|
19970
|
+
*
|
|
19971
|
+
* @param {number[]} vector1 - First vector
|
|
19972
|
+
* @param {number[]} vector2 - Second vector
|
|
19973
|
+
* @returns {number} Dot product
|
|
19974
|
+
*/
|
|
19975
|
+
static dotProduct(vector1, vector2) {
|
|
19976
|
+
return dotProduct(vector1, vector2);
|
|
19977
|
+
}
|
|
19978
|
+
/**
|
|
19979
|
+
* Static utility: Find optimal K for clustering
|
|
19980
|
+
*
|
|
19981
|
+
* Analyzes clustering quality across a range of K values using
|
|
19982
|
+
* multiple evaluation metrics.
|
|
19983
|
+
*
|
|
19984
|
+
* @param {number[][]} vectors - Vectors to analyze
|
|
19985
|
+
* @param {Object} options - Configuration options
|
|
19986
|
+
* @returns {Promise<Object>} Analysis results with recommendations
|
|
19987
|
+
*/
|
|
19988
|
+
static async findOptimalK(vectors, options) {
|
|
19989
|
+
return findOptimalK(vectors, options);
|
|
19990
|
+
}
|
|
19991
|
+
}
|
|
19992
|
+
|
|
19993
|
+
function silhouetteScore(vectors, assignments, centroids, distanceFn = euclideanDistance) {
|
|
19994
|
+
const k = centroids.length;
|
|
19995
|
+
const n = vectors.length;
|
|
19996
|
+
const clusters = Array(k).fill(null).map(() => []);
|
|
19997
|
+
vectors.forEach((vector, i) => {
|
|
19998
|
+
clusters[assignments[i]].push(i);
|
|
19999
|
+
});
|
|
20000
|
+
let totalScore = 0;
|
|
20001
|
+
let validPoints = 0;
|
|
20002
|
+
if (clusters.every((c) => c.length <= 1)) {
|
|
20003
|
+
return 0;
|
|
20004
|
+
}
|
|
20005
|
+
for (let i = 0; i < n; i++) {
|
|
20006
|
+
const clusterIdx = assignments[i];
|
|
20007
|
+
const cluster = clusters[clusterIdx];
|
|
20008
|
+
if (cluster.length === 1) continue;
|
|
20009
|
+
let a = 0;
|
|
20010
|
+
for (const j of cluster) {
|
|
20011
|
+
if (i !== j) {
|
|
20012
|
+
a += distanceFn(vectors[i], vectors[j]);
|
|
20013
|
+
}
|
|
20014
|
+
}
|
|
20015
|
+
a /= cluster.length - 1;
|
|
20016
|
+
let b = Infinity;
|
|
20017
|
+
for (let otherCluster = 0; otherCluster < k; otherCluster++) {
|
|
20018
|
+
if (otherCluster === clusterIdx) continue;
|
|
20019
|
+
const otherPoints = clusters[otherCluster];
|
|
20020
|
+
if (otherPoints.length === 0) continue;
|
|
20021
|
+
let avgDist = 0;
|
|
20022
|
+
for (const j of otherPoints) {
|
|
20023
|
+
avgDist += distanceFn(vectors[i], vectors[j]);
|
|
20024
|
+
}
|
|
20025
|
+
avgDist /= otherPoints.length;
|
|
20026
|
+
b = Math.min(b, avgDist);
|
|
20027
|
+
}
|
|
20028
|
+
if (b === Infinity) continue;
|
|
20029
|
+
const maxAB = Math.max(a, b);
|
|
20030
|
+
const s = maxAB === 0 ? 0 : (b - a) / maxAB;
|
|
20031
|
+
totalScore += s;
|
|
20032
|
+
validPoints++;
|
|
20033
|
+
}
|
|
20034
|
+
return validPoints > 0 ? totalScore / validPoints : 0;
|
|
20035
|
+
}
|
|
20036
|
+
function daviesBouldinIndex(vectors, assignments, centroids, distanceFn = euclideanDistance) {
|
|
20037
|
+
const k = centroids.length;
|
|
20038
|
+
const scatters = new Array(k).fill(0);
|
|
20039
|
+
const clusterCounts = new Array(k).fill(0);
|
|
20040
|
+
vectors.forEach((vector, i) => {
|
|
20041
|
+
const cluster = assignments[i];
|
|
20042
|
+
scatters[cluster] += distanceFn(vector, centroids[cluster]);
|
|
20043
|
+
clusterCounts[cluster]++;
|
|
20044
|
+
});
|
|
20045
|
+
for (let i = 0; i < k; i++) {
|
|
20046
|
+
if (clusterCounts[i] > 0) {
|
|
20047
|
+
scatters[i] /= clusterCounts[i];
|
|
20048
|
+
}
|
|
20049
|
+
}
|
|
20050
|
+
let dbIndex = 0;
|
|
20051
|
+
let validClusters = 0;
|
|
20052
|
+
for (let i = 0; i < k; i++) {
|
|
20053
|
+
if (clusterCounts[i] === 0) continue;
|
|
20054
|
+
let maxRatio = 0;
|
|
20055
|
+
for (let j = 0; j < k; j++) {
|
|
20056
|
+
if (i === j || clusterCounts[j] === 0) continue;
|
|
20057
|
+
const centroidDist = distanceFn(centroids[i], centroids[j]);
|
|
20058
|
+
if (centroidDist === 0) continue;
|
|
20059
|
+
const ratio = (scatters[i] + scatters[j]) / centroidDist;
|
|
20060
|
+
maxRatio = Math.max(maxRatio, ratio);
|
|
20061
|
+
}
|
|
20062
|
+
dbIndex += maxRatio;
|
|
20063
|
+
validClusters++;
|
|
20064
|
+
}
|
|
20065
|
+
return validClusters > 0 ? dbIndex / validClusters : 0;
|
|
20066
|
+
}
|
|
20067
|
+
function calinskiHarabaszIndex(vectors, assignments, centroids, distanceFn = euclideanDistance) {
|
|
20068
|
+
const n = vectors.length;
|
|
20069
|
+
const k = centroids.length;
|
|
20070
|
+
if (k === 1 || k === n) return 0;
|
|
20071
|
+
const dimensions = vectors[0].length;
|
|
20072
|
+
const overallCentroid = new Array(dimensions).fill(0);
|
|
20073
|
+
vectors.forEach((vector) => {
|
|
20074
|
+
vector.forEach((val, dim) => {
|
|
20075
|
+
overallCentroid[dim] += val;
|
|
20076
|
+
});
|
|
20077
|
+
});
|
|
20078
|
+
overallCentroid.forEach((val, dim, arr) => {
|
|
20079
|
+
arr[dim] = val / n;
|
|
20080
|
+
});
|
|
20081
|
+
const clusterCounts = new Array(k).fill(0);
|
|
20082
|
+
vectors.forEach((vector, i) => {
|
|
20083
|
+
clusterCounts[assignments[i]]++;
|
|
20084
|
+
});
|
|
20085
|
+
let bgss = 0;
|
|
20086
|
+
for (let i = 0; i < k; i++) {
|
|
20087
|
+
if (clusterCounts[i] === 0) continue;
|
|
20088
|
+
const dist = distanceFn(centroids[i], overallCentroid);
|
|
20089
|
+
bgss += clusterCounts[i] * dist * dist;
|
|
20090
|
+
}
|
|
20091
|
+
let wcss = 0;
|
|
20092
|
+
vectors.forEach((vector, i) => {
|
|
20093
|
+
const cluster = assignments[i];
|
|
20094
|
+
const dist = distanceFn(vector, centroids[cluster]);
|
|
20095
|
+
wcss += dist * dist;
|
|
20096
|
+
});
|
|
20097
|
+
if (wcss === 0) return 0;
|
|
20098
|
+
return bgss / (k - 1) / (wcss / (n - k));
|
|
20099
|
+
}
|
|
20100
|
+
async function gapStatistic(vectors, assignments, centroids, distanceFn = euclideanDistance, nReferences = 10) {
|
|
20101
|
+
const n = vectors.length;
|
|
20102
|
+
const k = centroids.length;
|
|
20103
|
+
const dimensions = vectors[0].length;
|
|
20104
|
+
let wk = 0;
|
|
20105
|
+
vectors.forEach((vector, i) => {
|
|
20106
|
+
const dist = distanceFn(vector, centroids[assignments[i]]);
|
|
20107
|
+
wk += dist * dist;
|
|
20108
|
+
});
|
|
20109
|
+
wk = Math.log(wk + 1e-10);
|
|
20110
|
+
const referenceWks = [];
|
|
20111
|
+
const mins = new Array(dimensions).fill(Infinity);
|
|
20112
|
+
const maxs = new Array(dimensions).fill(-Infinity);
|
|
20113
|
+
vectors.forEach((vector) => {
|
|
20114
|
+
vector.forEach((val, dim) => {
|
|
20115
|
+
mins[dim] = Math.min(mins[dim], val);
|
|
20116
|
+
maxs[dim] = Math.max(maxs[dim], val);
|
|
20117
|
+
});
|
|
20118
|
+
});
|
|
20119
|
+
for (let ref = 0; ref < nReferences; ref++) {
|
|
20120
|
+
const refVectors = [];
|
|
20121
|
+
for (let i = 0; i < n; i++) {
|
|
20122
|
+
const refVector = new Array(dimensions);
|
|
20123
|
+
for (let dim = 0; dim < dimensions; dim++) {
|
|
20124
|
+
refVector[dim] = mins[dim] + Math.random() * (maxs[dim] - mins[dim]);
|
|
20125
|
+
}
|
|
20126
|
+
refVectors.push(refVector);
|
|
20127
|
+
}
|
|
20128
|
+
const refResult = kmeans(refVectors, k, { maxIterations: 50, distanceFn });
|
|
20129
|
+
let refWk = 0;
|
|
20130
|
+
refVectors.forEach((vector, i) => {
|
|
20131
|
+
const dist = distanceFn(vector, refResult.centroids[refResult.assignments[i]]);
|
|
20132
|
+
refWk += dist * dist;
|
|
20133
|
+
});
|
|
20134
|
+
referenceWks.push(Math.log(refWk + 1e-10));
|
|
20135
|
+
}
|
|
20136
|
+
const expectedWk = referenceWks.reduce((a, b) => a + b, 0) / nReferences;
|
|
20137
|
+
const gap = expectedWk - wk;
|
|
20138
|
+
const sdk = Math.sqrt(
|
|
20139
|
+
referenceWks.reduce((sum, wk2) => sum + Math.pow(wk2 - expectedWk, 2), 0) / nReferences
|
|
20140
|
+
);
|
|
20141
|
+
const sk = sdk * Math.sqrt(1 + 1 / nReferences);
|
|
20142
|
+
return { gap, sk, expectedWk, actualWk: wk };
|
|
20143
|
+
}
|
|
20144
|
+
function clusteringStability(vectors, k, options = {}) {
|
|
20145
|
+
const {
|
|
20146
|
+
nRuns = 10,
|
|
20147
|
+
distanceFn = euclideanDistance,
|
|
20148
|
+
...kmeansOptions
|
|
20149
|
+
} = options;
|
|
20150
|
+
const inertias = [];
|
|
20151
|
+
const allAssignments = [];
|
|
20152
|
+
for (let run = 0; run < nRuns; run++) {
|
|
20153
|
+
const result = kmeans(vectors, k, {
|
|
20154
|
+
...kmeansOptions,
|
|
20155
|
+
distanceFn,
|
|
20156
|
+
seed: run
|
|
20157
|
+
// Different seed for each run
|
|
20158
|
+
});
|
|
20159
|
+
inertias.push(result.inertia);
|
|
20160
|
+
allAssignments.push(result.assignments);
|
|
20161
|
+
}
|
|
20162
|
+
const assignmentSimilarities = [];
|
|
20163
|
+
for (let i = 0; i < nRuns - 1; i++) {
|
|
20164
|
+
for (let j = i + 1; j < nRuns; j++) {
|
|
20165
|
+
const similarity = calculateAssignmentSimilarity(allAssignments[i], allAssignments[j]);
|
|
20166
|
+
assignmentSimilarities.push(similarity);
|
|
20167
|
+
}
|
|
20168
|
+
}
|
|
20169
|
+
const avgInertia = inertias.reduce((a, b) => a + b, 0) / nRuns;
|
|
20170
|
+
const stdInertia = Math.sqrt(
|
|
20171
|
+
inertias.reduce((sum, val) => sum + Math.pow(val - avgInertia, 2), 0) / nRuns
|
|
20172
|
+
);
|
|
20173
|
+
const avgSimilarity = assignmentSimilarities.length > 0 ? assignmentSimilarities.reduce((a, b) => a + b, 0) / assignmentSimilarities.length : 1;
|
|
20174
|
+
return {
|
|
20175
|
+
avgInertia,
|
|
20176
|
+
stdInertia,
|
|
20177
|
+
cvInertia: avgInertia !== 0 ? stdInertia / avgInertia : 0,
|
|
20178
|
+
// Coefficient of variation
|
|
20179
|
+
avgSimilarity,
|
|
20180
|
+
stability: avgSimilarity
|
|
20181
|
+
// Higher is more stable
|
|
20182
|
+
};
|
|
20183
|
+
}
|
|
20184
|
+
function calculateAssignmentSimilarity(assignments1, assignments2) {
|
|
20185
|
+
const n = assignments1.length;
|
|
20186
|
+
let matches = 0;
|
|
20187
|
+
for (let i = 0; i < n; i++) {
|
|
20188
|
+
for (let j = i + 1; j < n; j++) {
|
|
20189
|
+
const sameCluster1 = assignments1[i] === assignments1[j];
|
|
20190
|
+
const sameCluster2 = assignments2[i] === assignments2[j];
|
|
20191
|
+
if (sameCluster1 === sameCluster2) {
|
|
20192
|
+
matches++;
|
|
20193
|
+
}
|
|
20194
|
+
}
|
|
20195
|
+
}
|
|
20196
|
+
const totalPairs = n * (n - 1) / 2;
|
|
20197
|
+
return totalPairs > 0 ? matches / totalPairs : 1;
|
|
20198
|
+
}
|
|
20199
|
+
|
|
20200
|
+
var metrics = /*#__PURE__*/Object.freeze({
|
|
20201
|
+
__proto__: null,
|
|
20202
|
+
calinskiHarabaszIndex: calinskiHarabaszIndex,
|
|
20203
|
+
clusteringStability: clusteringStability,
|
|
20204
|
+
daviesBouldinIndex: daviesBouldinIndex,
|
|
20205
|
+
gapStatistic: gapStatistic,
|
|
20206
|
+
silhouetteScore: silhouetteScore
|
|
20207
|
+
});
|
|
20208
|
+
|
|
17896
20209
|
exports.AVAILABLE_BEHAVIORS = AVAILABLE_BEHAVIORS;
|
|
17897
20210
|
exports.AnalyticsNotEnabledError = AnalyticsNotEnabledError;
|
|
17898
20211
|
exports.AuditPlugin = AuditPlugin;
|
|
17899
20212
|
exports.AuthenticationError = AuthenticationError;
|
|
17900
20213
|
exports.BackupPlugin = BackupPlugin;
|
|
17901
20214
|
exports.BaseError = BaseError;
|
|
20215
|
+
exports.BehaviorError = BehaviorError;
|
|
17902
20216
|
exports.CachePlugin = CachePlugin;
|
|
17903
20217
|
exports.Client = Client;
|
|
17904
20218
|
exports.ConnectionString = ConnectionString;
|
|
@@ -17913,15 +20227,19 @@ exports.ErrorMap = ErrorMap;
|
|
|
17913
20227
|
exports.EventualConsistencyPlugin = EventualConsistencyPlugin;
|
|
17914
20228
|
exports.FullTextPlugin = FullTextPlugin;
|
|
17915
20229
|
exports.InvalidResourceItem = InvalidResourceItem;
|
|
20230
|
+
exports.MetadataLimitError = MetadataLimitError;
|
|
17916
20231
|
exports.MetricsPlugin = MetricsPlugin;
|
|
17917
20232
|
exports.MissingMetadata = MissingMetadata;
|
|
17918
20233
|
exports.NoSuchBucket = NoSuchBucket;
|
|
17919
20234
|
exports.NoSuchKey = NoSuchKey;
|
|
17920
20235
|
exports.NotFound = NotFound;
|
|
20236
|
+
exports.PartitionDriverError = PartitionDriverError;
|
|
17921
20237
|
exports.PartitionError = PartitionError;
|
|
17922
20238
|
exports.PermissionError = PermissionError;
|
|
17923
20239
|
exports.Plugin = Plugin;
|
|
20240
|
+
exports.PluginError = PluginError;
|
|
17924
20241
|
exports.PluginObject = PluginObject;
|
|
20242
|
+
exports.PluginStorageError = PluginStorageError;
|
|
17925
20243
|
exports.QueueConsumerPlugin = QueueConsumerPlugin;
|
|
17926
20244
|
exports.ReplicatorPlugin = ReplicatorPlugin;
|
|
17927
20245
|
exports.Resource = Resource;
|
|
@@ -17938,9 +20256,11 @@ exports.SchedulerPlugin = SchedulerPlugin;
|
|
|
17938
20256
|
exports.Schema = Schema;
|
|
17939
20257
|
exports.SchemaError = SchemaError;
|
|
17940
20258
|
exports.StateMachinePlugin = StateMachinePlugin;
|
|
20259
|
+
exports.StreamError = StreamError;
|
|
17941
20260
|
exports.UnknownError = UnknownError;
|
|
17942
20261
|
exports.ValidationError = ValidationError;
|
|
17943
20262
|
exports.Validator = Validator;
|
|
20263
|
+
exports.VectorPlugin = VectorPlugin;
|
|
17944
20264
|
exports.behaviors = behaviors;
|
|
17945
20265
|
exports.calculateAttributeNamesSize = calculateAttributeNamesSize;
|
|
17946
20266
|
exports.calculateAttributeSizes = calculateAttributeSizes;
|
|
@@ -17953,10 +20273,12 @@ exports.clearUTF8Memo = clearUTF8Memo;
|
|
|
17953
20273
|
exports.clearUTF8Memory = clearUTF8Memory;
|
|
17954
20274
|
exports.decode = decode;
|
|
17955
20275
|
exports.decodeDecimal = decodeDecimal;
|
|
20276
|
+
exports.decodeFixedPoint = decodeFixedPoint;
|
|
17956
20277
|
exports.decrypt = decrypt;
|
|
17957
20278
|
exports.default = S3db;
|
|
17958
20279
|
exports.encode = encode;
|
|
17959
20280
|
exports.encodeDecimal = encodeDecimal;
|
|
20281
|
+
exports.encodeFixedPoint = encodeFixedPoint;
|
|
17960
20282
|
exports.encrypt = encrypt;
|
|
17961
20283
|
exports.getBehavior = getBehavior;
|
|
17962
20284
|
exports.getSizeBreakdown = getSizeBreakdown;
|