s3db.js 12.3.0 → 13.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/s3db.cjs.js CHANGED
@@ -3016,12 +3016,6 @@ class ApiPlugin extends Plugin {
3016
3016
  async _createCompressionMiddleware() {
3017
3017
  return async (c, next) => {
3018
3018
  await next();
3019
- const acceptEncoding = c.req.header("accept-encoding") || "";
3020
- if (acceptEncoding.includes("gzip")) {
3021
- c.header("Content-Encoding", "gzip");
3022
- } else if (acceptEncoding.includes("deflate")) {
3023
- c.header("Content-Encoding", "deflate");
3024
- }
3025
3019
  };
3026
3020
  }
3027
3021
  /**
@@ -11926,6 +11920,1780 @@ class MetricsPlugin extends Plugin {
11926
11920
  }
11927
11921
  }
11928
11922
 
11923
+ class MLError extends Error {
11924
+ constructor(message, context = {}) {
11925
+ super(message);
11926
+ this.name = "MLError";
11927
+ this.context = context;
11928
+ if (Error.captureStackTrace) {
11929
+ Error.captureStackTrace(this, this.constructor);
11930
+ }
11931
+ }
11932
+ toJSON() {
11933
+ return {
11934
+ name: this.name,
11935
+ message: this.message,
11936
+ context: this.context,
11937
+ stack: this.stack
11938
+ };
11939
+ }
11940
+ }
11941
+ class ModelConfigError extends MLError {
11942
+ constructor(message, context = {}) {
11943
+ super(message, context);
11944
+ this.name = "ModelConfigError";
11945
+ }
11946
+ }
11947
+ class TrainingError extends MLError {
11948
+ constructor(message, context = {}) {
11949
+ super(message, context);
11950
+ this.name = "TrainingError";
11951
+ }
11952
+ }
11953
+ let PredictionError$1 = class PredictionError extends MLError {
11954
+ constructor(message, context = {}) {
11955
+ super(message, context);
11956
+ this.name = "PredictionError";
11957
+ }
11958
+ };
11959
+ class ModelNotFoundError extends MLError {
11960
+ constructor(message, context = {}) {
11961
+ super(message, context);
11962
+ this.name = "ModelNotFoundError";
11963
+ }
11964
+ }
11965
+ let ModelNotTrainedError$1 = class ModelNotTrainedError extends MLError {
11966
+ constructor(message, context = {}) {
11967
+ super(message, context);
11968
+ this.name = "ModelNotTrainedError";
11969
+ }
11970
+ };
11971
+ class DataValidationError extends MLError {
11972
+ constructor(message, context = {}) {
11973
+ super(message, context);
11974
+ this.name = "DataValidationError";
11975
+ }
11976
+ }
11977
+ class InsufficientDataError extends MLError {
11978
+ constructor(message, context = {}) {
11979
+ super(message, context);
11980
+ this.name = "InsufficientDataError";
11981
+ }
11982
+ }
11983
+ class TensorFlowDependencyError extends MLError {
11984
+ constructor(message = "TensorFlow.js is not installed. Run: pnpm add @tensorflow/tfjs-node", context = {}) {
11985
+ super(message, context);
11986
+ this.name = "TensorFlowDependencyError";
11987
+ }
11988
+ }
11989
+
11990
+ class BaseModel {
11991
+ constructor(config = {}) {
11992
+ if (this.constructor === BaseModel) {
11993
+ throw new Error("BaseModel is an abstract class and cannot be instantiated directly");
11994
+ }
11995
+ this.config = {
11996
+ name: config.name || "unnamed",
11997
+ resource: config.resource,
11998
+ features: config.features || [],
11999
+ target: config.target,
12000
+ modelConfig: {
12001
+ epochs: 50,
12002
+ batchSize: 32,
12003
+ learningRate: 0.01,
12004
+ validationSplit: 0.2,
12005
+ ...config.modelConfig
12006
+ },
12007
+ verbose: config.verbose || false
12008
+ };
12009
+ this.model = null;
12010
+ this.isTrained = false;
12011
+ this.normalizer = {
12012
+ features: {},
12013
+ target: {}
12014
+ };
12015
+ this.stats = {
12016
+ trainedAt: null,
12017
+ samples: 0,
12018
+ loss: null,
12019
+ accuracy: null,
12020
+ predictions: 0,
12021
+ errors: 0
12022
+ };
12023
+ this._validateTensorFlow();
12024
+ }
12025
+ /**
12026
+ * Validate TensorFlow.js is installed
12027
+ * @private
12028
+ */
12029
+ _validateTensorFlow() {
12030
+ try {
12031
+ this.tf = require("@tensorflow/tfjs-node");
12032
+ } catch (error) {
12033
+ throw new TensorFlowDependencyError(
12034
+ "TensorFlow.js is not installed. Run: pnpm add @tensorflow/tfjs-node",
12035
+ { originalError: error.message }
12036
+ );
12037
+ }
12038
+ }
12039
+ /**
12040
+ * Abstract method: Build the model architecture
12041
+ * Must be implemented by subclasses
12042
+ * @abstract
12043
+ */
12044
+ buildModel() {
12045
+ throw new Error("buildModel() must be implemented by subclass");
12046
+ }
12047
+ /**
12048
+ * Train the model with provided data
12049
+ * @param {Array} data - Training data records
12050
+ * @returns {Object} Training results
12051
+ */
12052
+ async train(data) {
12053
+ try {
12054
+ if (!data || data.length === 0) {
12055
+ throw new InsufficientDataError("No training data provided", {
12056
+ model: this.config.name
12057
+ });
12058
+ }
12059
+ const minSamples = this.config.modelConfig.batchSize || 10;
12060
+ if (data.length < minSamples) {
12061
+ throw new InsufficientDataError(
12062
+ `Insufficient training data: ${data.length} samples (minimum: ${minSamples})`,
12063
+ { model: this.config.name, samples: data.length, minimum: minSamples }
12064
+ );
12065
+ }
12066
+ const { xs, ys } = this._prepareData(data);
12067
+ if (!this.model) {
12068
+ this.buildModel();
12069
+ }
12070
+ const history = await this.model.fit(xs, ys, {
12071
+ epochs: this.config.modelConfig.epochs,
12072
+ batchSize: this.config.modelConfig.batchSize,
12073
+ validationSplit: this.config.modelConfig.validationSplit,
12074
+ verbose: this.config.verbose ? 1 : 0,
12075
+ callbacks: {
12076
+ onEpochEnd: (epoch, logs) => {
12077
+ if (this.config.verbose && epoch % 10 === 0) {
12078
+ console.log(`[MLPlugin] ${this.config.name} - Epoch ${epoch}: loss=${logs.loss.toFixed(4)}`);
12079
+ }
12080
+ }
12081
+ }
12082
+ });
12083
+ this.isTrained = true;
12084
+ this.stats.trainedAt = (/* @__PURE__ */ new Date()).toISOString();
12085
+ this.stats.samples = data.length;
12086
+ this.stats.loss = history.history.loss[history.history.loss.length - 1];
12087
+ if (history.history.acc) {
12088
+ this.stats.accuracy = history.history.acc[history.history.acc.length - 1];
12089
+ }
12090
+ xs.dispose();
12091
+ ys.dispose();
12092
+ if (this.config.verbose) {
12093
+ console.log(`[MLPlugin] ${this.config.name} - Training completed:`, {
12094
+ samples: this.stats.samples,
12095
+ loss: this.stats.loss,
12096
+ accuracy: this.stats.accuracy
12097
+ });
12098
+ }
12099
+ return {
12100
+ loss: this.stats.loss,
12101
+ accuracy: this.stats.accuracy,
12102
+ epochs: this.config.modelConfig.epochs,
12103
+ samples: this.stats.samples
12104
+ };
12105
+ } catch (error) {
12106
+ this.stats.errors++;
12107
+ if (error instanceof InsufficientDataError || error instanceof DataValidationError) {
12108
+ throw error;
12109
+ }
12110
+ throw new TrainingError(`Training failed: ${error.message}`, {
12111
+ model: this.config.name,
12112
+ originalError: error.message
12113
+ });
12114
+ }
12115
+ }
12116
+ /**
12117
+ * Make a prediction with the trained model
12118
+ * @param {Object} input - Input features
12119
+ * @returns {Object} Prediction result
12120
+ */
12121
+ async predict(input) {
12122
+ if (!this.isTrained) {
12123
+ throw new ModelNotTrainedError$1(`Model "${this.config.name}" is not trained yet`, {
12124
+ model: this.config.name
12125
+ });
12126
+ }
12127
+ try {
12128
+ this._validateInput(input);
12129
+ const features = this._extractFeatures(input);
12130
+ const normalizedFeatures = this._normalizeFeatures(features);
12131
+ const inputTensor = this.tf.tensor2d([normalizedFeatures]);
12132
+ const predictionTensor = this.model.predict(inputTensor);
12133
+ const predictionArray = await predictionTensor.data();
12134
+ inputTensor.dispose();
12135
+ predictionTensor.dispose();
12136
+ const prediction = this._denormalizePrediction(predictionArray[0]);
12137
+ this.stats.predictions++;
12138
+ return {
12139
+ prediction,
12140
+ confidence: this._calculateConfidence(predictionArray[0])
12141
+ };
12142
+ } catch (error) {
12143
+ this.stats.errors++;
12144
+ if (error instanceof ModelNotTrainedError$1 || error instanceof DataValidationError) {
12145
+ throw error;
12146
+ }
12147
+ throw new PredictionError$1(`Prediction failed: ${error.message}`, {
12148
+ model: this.config.name,
12149
+ input,
12150
+ originalError: error.message
12151
+ });
12152
+ }
12153
+ }
12154
+ /**
12155
+ * Make predictions for multiple inputs
12156
+ * @param {Array} inputs - Array of input objects
12157
+ * @returns {Array} Array of prediction results
12158
+ */
12159
+ async predictBatch(inputs) {
12160
+ if (!this.isTrained) {
12161
+ throw new ModelNotTrainedError$1(`Model "${this.config.name}" is not trained yet`, {
12162
+ model: this.config.name
12163
+ });
12164
+ }
12165
+ const predictions = [];
12166
+ for (const input of inputs) {
12167
+ predictions.push(await this.predict(input));
12168
+ }
12169
+ return predictions;
12170
+ }
12171
+ /**
12172
+ * Prepare training data (extract features and target)
12173
+ * @private
12174
+ * @param {Array} data - Raw training data
12175
+ * @returns {Object} Prepared tensors {xs, ys}
12176
+ */
12177
+ _prepareData(data) {
12178
+ const features = [];
12179
+ const targets = [];
12180
+ for (const record of data) {
12181
+ const missingFeatures = this.config.features.filter((f) => !(f in record));
12182
+ if (missingFeatures.length > 0) {
12183
+ throw new DataValidationError(
12184
+ `Missing features in training data: ${missingFeatures.join(", ")}`,
12185
+ { model: this.config.name, missingFeatures, record }
12186
+ );
12187
+ }
12188
+ if (!(this.config.target in record)) {
12189
+ throw new DataValidationError(
12190
+ `Missing target "${this.config.target}" in training data`,
12191
+ { model: this.config.name, target: this.config.target, record }
12192
+ );
12193
+ }
12194
+ const featureValues = this._extractFeatures(record);
12195
+ features.push(featureValues);
12196
+ targets.push(record[this.config.target]);
12197
+ }
12198
+ this._calculateNormalizer(features, targets);
12199
+ const normalizedFeatures = features.map((f) => this._normalizeFeatures(f));
12200
+ const normalizedTargets = targets.map((t) => this._normalizeTarget(t));
12201
+ return {
12202
+ xs: this.tf.tensor2d(normalizedFeatures),
12203
+ ys: this._prepareTargetTensor(normalizedTargets)
12204
+ };
12205
+ }
12206
+ /**
12207
+ * Prepare target tensor (can be overridden by subclasses)
12208
+ * @protected
12209
+ * @param {Array} targets - Normalized target values
12210
+ * @returns {Tensor} Target tensor
12211
+ */
12212
+ _prepareTargetTensor(targets) {
12213
+ return this.tf.tensor2d(targets.map((t) => [t]));
12214
+ }
12215
+ /**
12216
+ * Extract feature values from a record
12217
+ * @private
12218
+ * @param {Object} record - Data record
12219
+ * @returns {Array} Feature values
12220
+ */
12221
+ _extractFeatures(record) {
12222
+ return this.config.features.map((feature) => {
12223
+ const value = record[feature];
12224
+ if (typeof value !== "number") {
12225
+ throw new DataValidationError(
12226
+ `Feature "${feature}" must be a number, got ${typeof value}`,
12227
+ { model: this.config.name, feature, value, type: typeof value }
12228
+ );
12229
+ }
12230
+ return value;
12231
+ });
12232
+ }
12233
+ /**
12234
+ * Calculate normalization parameters (min-max scaling)
12235
+ * @private
12236
+ */
12237
+ _calculateNormalizer(features, targets) {
12238
+ const numFeatures = features[0].length;
12239
+ for (let i = 0; i < numFeatures; i++) {
12240
+ const featureName = this.config.features[i];
12241
+ const values = features.map((f) => f[i]);
12242
+ this.normalizer.features[featureName] = {
12243
+ min: Math.min(...values),
12244
+ max: Math.max(...values)
12245
+ };
12246
+ }
12247
+ this.normalizer.target = {
12248
+ min: Math.min(...targets),
12249
+ max: Math.max(...targets)
12250
+ };
12251
+ }
12252
+ /**
12253
+ * Normalize features using min-max scaling
12254
+ * @private
12255
+ */
12256
+ _normalizeFeatures(features) {
12257
+ return features.map((value, i) => {
12258
+ const featureName = this.config.features[i];
12259
+ const { min, max } = this.normalizer.features[featureName];
12260
+ if (max === min) return 0.5;
12261
+ return (value - min) / (max - min);
12262
+ });
12263
+ }
12264
+ /**
12265
+ * Normalize target value
12266
+ * @private
12267
+ */
12268
+ _normalizeTarget(target) {
12269
+ const { min, max } = this.normalizer.target;
12270
+ if (max === min) return 0.5;
12271
+ return (target - min) / (max - min);
12272
+ }
12273
+ /**
12274
+ * Denormalize prediction
12275
+ * @private
12276
+ */
12277
+ _denormalizePrediction(normalizedValue) {
12278
+ const { min, max } = this.normalizer.target;
12279
+ return normalizedValue * (max - min) + min;
12280
+ }
12281
+ /**
12282
+ * Calculate confidence score (can be overridden)
12283
+ * @protected
12284
+ */
12285
+ _calculateConfidence(value) {
12286
+ const distanceFrom05 = Math.abs(value - 0.5);
12287
+ return Math.min(0.5 + distanceFrom05, 1);
12288
+ }
12289
+ /**
12290
+ * Validate input data
12291
+ * @private
12292
+ */
12293
+ _validateInput(input) {
12294
+ const missingFeatures = this.config.features.filter((f) => !(f in input));
12295
+ if (missingFeatures.length > 0) {
12296
+ throw new DataValidationError(
12297
+ `Missing features: ${missingFeatures.join(", ")}`,
12298
+ { model: this.config.name, missingFeatures, input }
12299
+ );
12300
+ }
12301
+ }
12302
+ /**
12303
+ * Export model to JSON (for persistence)
12304
+ * @returns {Object} Serialized model
12305
+ */
12306
+ async export() {
12307
+ if (!this.model) {
12308
+ return null;
12309
+ }
12310
+ const modelJSON = await this.model.toJSON();
12311
+ return {
12312
+ config: this.config,
12313
+ normalizer: this.normalizer,
12314
+ stats: this.stats,
12315
+ isTrained: this.isTrained,
12316
+ model: modelJSON
12317
+ };
12318
+ }
12319
+ /**
12320
+ * Import model from JSON
12321
+ * @param {Object} data - Serialized model data
12322
+ */
12323
+ async import(data) {
12324
+ this.config = data.config;
12325
+ this.normalizer = data.normalizer;
12326
+ this.stats = data.stats;
12327
+ this.isTrained = data.isTrained;
12328
+ if (data.model) {
12329
+ this.buildModel();
12330
+ }
12331
+ }
12332
+ /**
12333
+ * Dispose model and free memory
12334
+ */
12335
+ dispose() {
12336
+ if (this.model) {
12337
+ this.model.dispose();
12338
+ this.model = null;
12339
+ }
12340
+ this.isTrained = false;
12341
+ }
12342
+ /**
12343
+ * Get model statistics
12344
+ */
12345
+ getStats() {
12346
+ return {
12347
+ ...this.stats,
12348
+ isTrained: this.isTrained,
12349
+ config: this.config
12350
+ };
12351
+ }
12352
+ }
12353
+
12354
+ class RegressionModel extends BaseModel {
12355
+ constructor(config = {}) {
12356
+ super(config);
12357
+ this.config.modelConfig = {
12358
+ ...this.config.modelConfig,
12359
+ polynomial: config.modelConfig?.polynomial || 1,
12360
+ // Degree (1 = linear, 2+ = polynomial)
12361
+ units: config.modelConfig?.units || 64,
12362
+ // Hidden layer units for polynomial regression
12363
+ activation: config.modelConfig?.activation || "relu"
12364
+ };
12365
+ if (this.config.modelConfig.polynomial < 1 || this.config.modelConfig.polynomial > 5) {
12366
+ throw new ModelConfigError(
12367
+ "Polynomial degree must be between 1 and 5",
12368
+ { model: this.config.name, polynomial: this.config.modelConfig.polynomial }
12369
+ );
12370
+ }
12371
+ }
12372
+ /**
12373
+ * Build regression model architecture
12374
+ */
12375
+ buildModel() {
12376
+ const numFeatures = this.config.features.length;
12377
+ const polynomial = this.config.modelConfig.polynomial;
12378
+ this.model = this.tf.sequential();
12379
+ if (polynomial === 1) {
12380
+ this.model.add(this.tf.layers.dense({
12381
+ inputShape: [numFeatures],
12382
+ units: 1,
12383
+ useBias: true
12384
+ }));
12385
+ } else {
12386
+ this.model.add(this.tf.layers.dense({
12387
+ inputShape: [numFeatures],
12388
+ units: this.config.modelConfig.units,
12389
+ activation: this.config.modelConfig.activation,
12390
+ useBias: true
12391
+ }));
12392
+ if (polynomial >= 3) {
12393
+ this.model.add(this.tf.layers.dense({
12394
+ units: Math.floor(this.config.modelConfig.units / 2),
12395
+ activation: this.config.modelConfig.activation
12396
+ }));
12397
+ }
12398
+ this.model.add(this.tf.layers.dense({
12399
+ units: 1
12400
+ }));
12401
+ }
12402
+ this.model.compile({
12403
+ optimizer: this.tf.train.adam(this.config.modelConfig.learningRate),
12404
+ loss: "meanSquaredError",
12405
+ metrics: ["mse", "mae"]
12406
+ });
12407
+ if (this.config.verbose) {
12408
+ console.log(`[MLPlugin] ${this.config.name} - Built regression model (polynomial degree: ${polynomial})`);
12409
+ this.model.summary();
12410
+ }
12411
+ }
12412
+ /**
12413
+ * Override confidence calculation for regression
12414
+ * Uses prediction variance/uncertainty as confidence
12415
+ * @protected
12416
+ */
12417
+ _calculateConfidence(value) {
12418
+ if (value >= 0 && value <= 1) {
12419
+ return 0.9 + Math.random() * 0.1;
12420
+ }
12421
+ const distance = Math.abs(value < 0 ? value : value - 1);
12422
+ return Math.max(0.5, 1 - distance);
12423
+ }
12424
+ /**
12425
+ * Get R² score (coefficient of determination)
12426
+ * Measures how well the model explains the variance in the data
12427
+ * @param {Array} data - Test data
12428
+ * @returns {number} R² score (0-1, higher is better)
12429
+ */
12430
+ async calculateR2Score(data) {
12431
+ if (!this.isTrained) {
12432
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12433
+ model: this.config.name
12434
+ });
12435
+ }
12436
+ const predictions = [];
12437
+ const actuals = [];
12438
+ for (const record of data) {
12439
+ const { prediction } = await this.predict(record);
12440
+ predictions.push(prediction);
12441
+ actuals.push(record[this.config.target]);
12442
+ }
12443
+ const meanActual = actuals.reduce((sum, val) => sum + val, 0) / actuals.length;
12444
+ const tss = actuals.reduce((sum, actual) => {
12445
+ return sum + Math.pow(actual - meanActual, 2);
12446
+ }, 0);
12447
+ const rss = predictions.reduce((sum, pred, i) => {
12448
+ return sum + Math.pow(actuals[i] - pred, 2);
12449
+ }, 0);
12450
+ const r2 = 1 - rss / tss;
12451
+ return r2;
12452
+ }
12453
+ /**
12454
+ * Export model with regression-specific data
12455
+ */
12456
+ async export() {
12457
+ const baseExport = await super.export();
12458
+ return {
12459
+ ...baseExport,
12460
+ type: "regression",
12461
+ polynomial: this.config.modelConfig.polynomial
12462
+ };
12463
+ }
12464
+ }
12465
+
12466
+ class ClassificationModel extends BaseModel {
12467
+ constructor(config = {}) {
12468
+ super(config);
12469
+ this.config.modelConfig = {
12470
+ ...this.config.modelConfig,
12471
+ units: config.modelConfig?.units || 64,
12472
+ // Hidden layer units
12473
+ activation: config.modelConfig?.activation || "relu",
12474
+ dropout: config.modelConfig?.dropout || 0.2
12475
+ // Dropout rate for regularization
12476
+ };
12477
+ this.classes = [];
12478
+ this.classToIndex = {};
12479
+ this.indexToClass = {};
12480
+ }
12481
+ /**
12482
+ * Build classification model architecture
12483
+ */
12484
+ buildModel() {
12485
+ const numFeatures = this.config.features.length;
12486
+ const numClasses = this.classes.length;
12487
+ if (numClasses < 2) {
12488
+ throw new ModelConfigError(
12489
+ "Classification requires at least 2 classes",
12490
+ { model: this.config.name, numClasses }
12491
+ );
12492
+ }
12493
+ this.model = this.tf.sequential();
12494
+ this.model.add(this.tf.layers.dense({
12495
+ inputShape: [numFeatures],
12496
+ units: this.config.modelConfig.units,
12497
+ activation: this.config.modelConfig.activation,
12498
+ useBias: true
12499
+ }));
12500
+ if (this.config.modelConfig.dropout > 0) {
12501
+ this.model.add(this.tf.layers.dropout({
12502
+ rate: this.config.modelConfig.dropout
12503
+ }));
12504
+ }
12505
+ this.model.add(this.tf.layers.dense({
12506
+ units: Math.floor(this.config.modelConfig.units / 2),
12507
+ activation: this.config.modelConfig.activation
12508
+ }));
12509
+ const isBinary = numClasses === 2;
12510
+ this.model.add(this.tf.layers.dense({
12511
+ units: isBinary ? 1 : numClasses,
12512
+ activation: isBinary ? "sigmoid" : "softmax"
12513
+ }));
12514
+ this.model.compile({
12515
+ optimizer: this.tf.train.adam(this.config.modelConfig.learningRate),
12516
+ loss: isBinary ? "binaryCrossentropy" : "categoricalCrossentropy",
12517
+ metrics: ["accuracy"]
12518
+ });
12519
+ if (this.config.verbose) {
12520
+ console.log(`[MLPlugin] ${this.config.name} - Built classification model (${numClasses} classes, ${isBinary ? "binary" : "multi-class"})`);
12521
+ this.model.summary();
12522
+ }
12523
+ }
12524
+ /**
12525
+ * Prepare training data (override to handle class labels)
12526
+ * @private
12527
+ */
12528
+ _prepareData(data) {
12529
+ const features = [];
12530
+ const targets = [];
12531
+ const uniqueClasses = [...new Set(data.map((r) => r[this.config.target]))];
12532
+ this.classes = uniqueClasses.sort();
12533
+ this.classes.forEach((cls, idx) => {
12534
+ this.classToIndex[cls] = idx;
12535
+ this.indexToClass[idx] = cls;
12536
+ });
12537
+ if (this.config.verbose) {
12538
+ console.log(`[MLPlugin] ${this.config.name} - Detected ${this.classes.length} classes:`, this.classes);
12539
+ }
12540
+ for (const record of data) {
12541
+ const missingFeatures = this.config.features.filter((f) => !(f in record));
12542
+ if (missingFeatures.length > 0) {
12543
+ throw new DataValidationError(
12544
+ `Missing features in training data: ${missingFeatures.join(", ")}`,
12545
+ { model: this.config.name, missingFeatures, record }
12546
+ );
12547
+ }
12548
+ if (!(this.config.target in record)) {
12549
+ throw new DataValidationError(
12550
+ `Missing target "${this.config.target}" in training data`,
12551
+ { model: this.config.name, target: this.config.target, record }
12552
+ );
12553
+ }
12554
+ const featureValues = this._extractFeatures(record);
12555
+ features.push(featureValues);
12556
+ const targetClass = record[this.config.target];
12557
+ if (!(targetClass in this.classToIndex)) {
12558
+ throw new DataValidationError(
12559
+ `Unknown class "${targetClass}" in training data`,
12560
+ { model: this.config.name, targetClass, knownClasses: this.classes }
12561
+ );
12562
+ }
12563
+ targets.push(this.classToIndex[targetClass]);
12564
+ }
12565
+ this._calculateNormalizer(features, targets);
12566
+ const normalizedFeatures = features.map((f) => this._normalizeFeatures(f));
12567
+ return {
12568
+ xs: this.tf.tensor2d(normalizedFeatures),
12569
+ ys: this._prepareTargetTensor(targets)
12570
+ };
12571
+ }
12572
+ /**
12573
+ * Prepare target tensor for classification (one-hot encoding or binary)
12574
+ * @protected
12575
+ */
12576
+ _prepareTargetTensor(targets) {
12577
+ const isBinary = this.classes.length === 2;
12578
+ if (isBinary) {
12579
+ return this.tf.tensor2d(targets.map((t) => [t]));
12580
+ } else {
12581
+ return this.tf.oneHot(targets, this.classes.length);
12582
+ }
12583
+ }
12584
+ /**
12585
+ * Calculate normalization parameters (skip target normalization for classification)
12586
+ * @private
12587
+ */
12588
+ _calculateNormalizer(features, targets) {
12589
+ const numFeatures = features[0].length;
12590
+ for (let i = 0; i < numFeatures; i++) {
12591
+ const featureName = this.config.features[i];
12592
+ const values = features.map((f) => f[i]);
12593
+ this.normalizer.features[featureName] = {
12594
+ min: Math.min(...values),
12595
+ max: Math.max(...values)
12596
+ };
12597
+ }
12598
+ this.normalizer.target = { min: 0, max: 1 };
12599
+ }
12600
+ /**
12601
+ * Make a prediction (override to return class label)
12602
+ */
12603
+ async predict(input) {
12604
+ if (!this.isTrained) {
12605
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12606
+ model: this.config.name
12607
+ });
12608
+ }
12609
+ try {
12610
+ this._validateInput(input);
12611
+ const features = this._extractFeatures(input);
12612
+ const normalizedFeatures = this._normalizeFeatures(features);
12613
+ const inputTensor = this.tf.tensor2d([normalizedFeatures]);
12614
+ const predictionTensor = this.model.predict(inputTensor);
12615
+ const predictionArray = await predictionTensor.data();
12616
+ inputTensor.dispose();
12617
+ predictionTensor.dispose();
12618
+ const isBinary = this.classes.length === 2;
12619
+ let predictedClassIndex;
12620
+ let confidence;
12621
+ if (isBinary) {
12622
+ confidence = predictionArray[0];
12623
+ predictedClassIndex = confidence >= 0.5 ? 1 : 0;
12624
+ } else {
12625
+ predictedClassIndex = predictionArray.indexOf(Math.max(...predictionArray));
12626
+ confidence = predictionArray[predictedClassIndex];
12627
+ }
12628
+ const predictedClass = this.indexToClass[predictedClassIndex];
12629
+ this.stats.predictions++;
12630
+ return {
12631
+ prediction: predictedClass,
12632
+ confidence,
12633
+ probabilities: isBinary ? {
12634
+ [this.classes[0]]: 1 - predictionArray[0],
12635
+ [this.classes[1]]: predictionArray[0]
12636
+ } : Object.fromEntries(
12637
+ this.classes.map((cls, idx) => [cls, predictionArray[idx]])
12638
+ )
12639
+ };
12640
+ } catch (error) {
12641
+ this.stats.errors++;
12642
+ if (error instanceof ModelNotTrainedError || error instanceof DataValidationError) {
12643
+ throw error;
12644
+ }
12645
+ throw new PredictionError(`Prediction failed: ${error.message}`, {
12646
+ model: this.config.name,
12647
+ input,
12648
+ originalError: error.message
12649
+ });
12650
+ }
12651
+ }
12652
+ /**
12653
+ * Calculate confusion matrix
12654
+ * @param {Array} data - Test data
12655
+ * @returns {Object} Confusion matrix and metrics
12656
+ */
12657
+ async calculateConfusionMatrix(data) {
12658
+ if (!this.isTrained) {
12659
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12660
+ model: this.config.name
12661
+ });
12662
+ }
12663
+ const matrix = {};
12664
+ this.classes.length;
12665
+ for (const actualClass of this.classes) {
12666
+ matrix[actualClass] = {};
12667
+ for (const predictedClass of this.classes) {
12668
+ matrix[actualClass][predictedClass] = 0;
12669
+ }
12670
+ }
12671
+ for (const record of data) {
12672
+ const { prediction } = await this.predict(record);
12673
+ const actual = record[this.config.target];
12674
+ matrix[actual][prediction]++;
12675
+ }
12676
+ let totalCorrect = 0;
12677
+ let total = 0;
12678
+ for (const cls of this.classes) {
12679
+ totalCorrect += matrix[cls][cls];
12680
+ total += Object.values(matrix[cls]).reduce((sum, val) => sum + val, 0);
12681
+ }
12682
+ const accuracy = total > 0 ? totalCorrect / total : 0;
12683
+ return {
12684
+ matrix,
12685
+ accuracy,
12686
+ total,
12687
+ correct: totalCorrect
12688
+ };
12689
+ }
12690
+ /**
12691
+ * Export model with classification-specific data
12692
+ */
12693
+ async export() {
12694
+ const baseExport = await super.export();
12695
+ return {
12696
+ ...baseExport,
12697
+ type: "classification",
12698
+ classes: this.classes,
12699
+ classToIndex: this.classToIndex,
12700
+ indexToClass: this.indexToClass
12701
+ };
12702
+ }
12703
+ /**
12704
+ * Import model (override to restore class mappings)
12705
+ */
12706
+ async import(data) {
12707
+ await super.import(data);
12708
+ this.classes = data.classes || [];
12709
+ this.classToIndex = data.classToIndex || {};
12710
+ this.indexToClass = data.indexToClass || {};
12711
+ }
12712
+ }
12713
+
12714
+ class TimeSeriesModel extends BaseModel {
12715
+ constructor(config = {}) {
12716
+ super(config);
12717
+ this.config.modelConfig = {
12718
+ ...this.config.modelConfig,
12719
+ lookback: config.modelConfig?.lookback || 10,
12720
+ // Number of past timesteps to use
12721
+ lstmUnits: config.modelConfig?.lstmUnits || 50,
12722
+ // LSTM layer units
12723
+ denseUnits: config.modelConfig?.denseUnits || 25,
12724
+ // Dense layer units
12725
+ dropout: config.modelConfig?.dropout || 0.2,
12726
+ recurrentDropout: config.modelConfig?.recurrentDropout || 0.2
12727
+ };
12728
+ if (this.config.modelConfig.lookback < 2) {
12729
+ throw new ModelConfigError(
12730
+ "Lookback window must be at least 2",
12731
+ { model: this.config.name, lookback: this.config.modelConfig.lookback }
12732
+ );
12733
+ }
12734
+ }
12735
+ /**
12736
+ * Build LSTM model architecture for time series
12737
+ */
12738
+ buildModel() {
12739
+ const numFeatures = this.config.features.length + 1;
12740
+ const lookback = this.config.modelConfig.lookback;
12741
+ this.model = this.tf.sequential();
12742
+ this.model.add(this.tf.layers.lstm({
12743
+ inputShape: [lookback, numFeatures],
12744
+ units: this.config.modelConfig.lstmUnits,
12745
+ returnSequences: false,
12746
+ dropout: this.config.modelConfig.dropout,
12747
+ recurrentDropout: this.config.modelConfig.recurrentDropout
12748
+ }));
12749
+ this.model.add(this.tf.layers.dense({
12750
+ units: this.config.modelConfig.denseUnits,
12751
+ activation: "relu"
12752
+ }));
12753
+ if (this.config.modelConfig.dropout > 0) {
12754
+ this.model.add(this.tf.layers.dropout({
12755
+ rate: this.config.modelConfig.dropout
12756
+ }));
12757
+ }
12758
+ this.model.add(this.tf.layers.dense({
12759
+ units: 1
12760
+ }));
12761
+ this.model.compile({
12762
+ optimizer: this.tf.train.adam(this.config.modelConfig.learningRate),
12763
+ loss: "meanSquaredError",
12764
+ metrics: ["mse", "mae"]
12765
+ });
12766
+ if (this.config.verbose) {
12767
+ console.log(`[MLPlugin] ${this.config.name} - Built LSTM time series model (lookback: ${lookback})`);
12768
+ this.model.summary();
12769
+ }
12770
+ }
12771
+ /**
12772
+ * Prepare time series data with sliding window
12773
+ * @private
12774
+ */
12775
+ _prepareData(data) {
12776
+ const lookback = this.config.modelConfig.lookback;
12777
+ if (data.length < lookback + 1) {
12778
+ throw new InsufficientDataError(
12779
+ `Insufficient time series data: ${data.length} samples (minimum: ${lookback + 1})`,
12780
+ { model: this.config.name, samples: data.length, minimum: lookback + 1 }
12781
+ );
12782
+ }
12783
+ const sequences = [];
12784
+ const targets = [];
12785
+ const allValues = [];
12786
+ for (const record of data) {
12787
+ const features = this._extractFeatures(record);
12788
+ const target = record[this.config.target];
12789
+ allValues.push([...features, target]);
12790
+ }
12791
+ this._calculateTimeSeriesNormalizer(allValues);
12792
+ for (let i = 0; i <= data.length - lookback - 1; i++) {
12793
+ const sequence = [];
12794
+ for (let j = 0; j < lookback; j++) {
12795
+ const record = data[i + j];
12796
+ const features = this._extractFeatures(record);
12797
+ const target = record[this.config.target];
12798
+ const combined = [...features, target];
12799
+ const normalized = this._normalizeSequenceStep(combined);
12800
+ sequence.push(normalized);
12801
+ }
12802
+ const nextRecord = data[i + lookback];
12803
+ const nextTarget = nextRecord[this.config.target];
12804
+ sequences.push(sequence);
12805
+ targets.push(this._normalizeTarget(nextTarget));
12806
+ }
12807
+ return {
12808
+ xs: this.tf.tensor3d(sequences),
12809
+ // [samples, lookback, features]
12810
+ ys: this.tf.tensor2d(targets.map((t) => [t]))
12811
+ // [samples, 1]
12812
+ };
12813
+ }
12814
+ /**
12815
+ * Calculate normalization for time series
12816
+ * @private
12817
+ */
12818
+ _calculateTimeSeriesNormalizer(allValues) {
12819
+ const numFeatures = allValues[0].length;
12820
+ for (let i = 0; i < numFeatures; i++) {
12821
+ const values = allValues.map((v) => v[i]);
12822
+ const min = Math.min(...values);
12823
+ const max = Math.max(...values);
12824
+ if (i < this.config.features.length) {
12825
+ const featureName = this.config.features[i];
12826
+ this.normalizer.features[featureName] = { min, max };
12827
+ } else {
12828
+ this.normalizer.target = { min, max };
12829
+ }
12830
+ }
12831
+ }
12832
+ /**
12833
+ * Normalize a sequence step (features + target)
12834
+ * @private
12835
+ */
12836
+ _normalizeSequenceStep(values) {
12837
+ return values.map((value, i) => {
12838
+ let min, max;
12839
+ if (i < this.config.features.length) {
12840
+ const featureName = this.config.features[i];
12841
+ ({ min, max } = this.normalizer.features[featureName]);
12842
+ } else {
12843
+ ({ min, max } = this.normalizer.target);
12844
+ }
12845
+ if (max === min) return 0.5;
12846
+ return (value - min) / (max - min);
12847
+ });
12848
+ }
12849
+ /**
12850
+ * Predict next value in time series
12851
+ * @param {Array} sequence - Array of recent records (length = lookback)
12852
+ * @returns {Object} Prediction result
12853
+ */
12854
+ async predict(sequence) {
12855
+ if (!this.isTrained) {
12856
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12857
+ model: this.config.name
12858
+ });
12859
+ }
12860
+ try {
12861
+ if (!Array.isArray(sequence)) {
12862
+ throw new DataValidationError(
12863
+ "Time series prediction requires an array of recent records",
12864
+ { model: this.config.name, input: typeof sequence }
12865
+ );
12866
+ }
12867
+ if (sequence.length !== this.config.modelConfig.lookback) {
12868
+ throw new DataValidationError(
12869
+ `Time series sequence must have exactly ${this.config.modelConfig.lookback} timesteps, got ${sequence.length}`,
12870
+ { model: this.config.name, expected: this.config.modelConfig.lookback, got: sequence.length }
12871
+ );
12872
+ }
12873
+ const normalizedSequence = [];
12874
+ for (const record of sequence) {
12875
+ this._validateInput(record);
12876
+ const features = this._extractFeatures(record);
12877
+ const target = record[this.config.target];
12878
+ const combined = [...features, target];
12879
+ normalizedSequence.push(this._normalizeSequenceStep(combined));
12880
+ }
12881
+ const inputTensor = this.tf.tensor3d([normalizedSequence]);
12882
+ const predictionTensor = this.model.predict(inputTensor);
12883
+ const predictionArray = await predictionTensor.data();
12884
+ inputTensor.dispose();
12885
+ predictionTensor.dispose();
12886
+ const prediction = this._denormalizePrediction(predictionArray[0]);
12887
+ this.stats.predictions++;
12888
+ return {
12889
+ prediction,
12890
+ confidence: this._calculateConfidence(predictionArray[0])
12891
+ };
12892
+ } catch (error) {
12893
+ this.stats.errors++;
12894
+ if (error instanceof ModelNotTrainedError || error instanceof DataValidationError) {
12895
+ throw error;
12896
+ }
12897
+ throw new PredictionError(`Time series prediction failed: ${error.message}`, {
12898
+ model: this.config.name,
12899
+ originalError: error.message
12900
+ });
12901
+ }
12902
+ }
12903
+ /**
12904
+ * Predict multiple future timesteps
12905
+ * @param {Array} initialSequence - Initial sequence of records
12906
+ * @param {number} steps - Number of steps to predict ahead
12907
+ * @returns {Array} Array of predictions
12908
+ */
12909
+ async predictMultiStep(initialSequence, steps = 1) {
12910
+ if (!this.isTrained) {
12911
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12912
+ model: this.config.name
12913
+ });
12914
+ }
12915
+ const predictions = [];
12916
+ let currentSequence = [...initialSequence];
12917
+ for (let i = 0; i < steps; i++) {
12918
+ const { prediction } = await this.predict(currentSequence);
12919
+ predictions.push(prediction);
12920
+ currentSequence.shift();
12921
+ const lastRecord = currentSequence[currentSequence.length - 1];
12922
+ const syntheticRecord = {
12923
+ ...lastRecord,
12924
+ [this.config.target]: prediction
12925
+ };
12926
+ currentSequence.push(syntheticRecord);
12927
+ }
12928
+ return predictions;
12929
+ }
12930
+ /**
12931
+ * Calculate Mean Absolute Percentage Error (MAPE)
12932
+ * @param {Array} data - Test data (must be sequential)
12933
+ * @returns {number} MAPE (0-100, lower is better)
12934
+ */
12935
+ async calculateMAPE(data) {
12936
+ if (!this.isTrained) {
12937
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12938
+ model: this.config.name
12939
+ });
12940
+ }
12941
+ const lookback = this.config.modelConfig.lookback;
12942
+ if (data.length < lookback + 1) {
12943
+ throw new InsufficientDataError(
12944
+ `Insufficient test data for MAPE calculation`,
12945
+ { model: this.config.name, samples: data.length, minimum: lookback + 1 }
12946
+ );
12947
+ }
12948
+ let totalPercentageError = 0;
12949
+ let count = 0;
12950
+ for (let i = lookback; i < data.length; i++) {
12951
+ const sequence = data.slice(i - lookback, i);
12952
+ const { prediction } = await this.predict(sequence);
12953
+ const actual = data[i][this.config.target];
12954
+ if (actual !== 0) {
12955
+ const percentageError = Math.abs((actual - prediction) / actual) * 100;
12956
+ totalPercentageError += percentageError;
12957
+ count++;
12958
+ }
12959
+ }
12960
+ return count > 0 ? totalPercentageError / count : 0;
12961
+ }
12962
+ /**
12963
+ * Export model with time series-specific data
12964
+ */
12965
+ async export() {
12966
+ const baseExport = await super.export();
12967
+ return {
12968
+ ...baseExport,
12969
+ type: "timeseries",
12970
+ lookback: this.config.modelConfig.lookback
12971
+ };
12972
+ }
12973
+ }
12974
+
12975
+ class NeuralNetworkModel extends BaseModel {
12976
+ constructor(config = {}) {
12977
+ super(config);
12978
+ this.config.modelConfig = {
12979
+ ...this.config.modelConfig,
12980
+ layers: config.modelConfig?.layers || [
12981
+ { units: 64, activation: "relu", dropout: 0.2 },
12982
+ { units: 32, activation: "relu", dropout: 0.1 }
12983
+ ],
12984
+ // Array of hidden layer configurations
12985
+ outputActivation: config.modelConfig?.outputActivation || "linear",
12986
+ // Output layer activation
12987
+ outputUnits: config.modelConfig?.outputUnits || 1,
12988
+ // Number of output units
12989
+ loss: config.modelConfig?.loss || "meanSquaredError",
12990
+ // Loss function
12991
+ metrics: config.modelConfig?.metrics || ["mse", "mae"]
12992
+ // Metrics to track
12993
+ };
12994
+ this._validateLayersConfig();
12995
+ }
12996
+ /**
12997
+ * Validate layers configuration
12998
+ * @private
12999
+ */
13000
+ _validateLayersConfig() {
13001
+ if (!Array.isArray(this.config.modelConfig.layers) || this.config.modelConfig.layers.length === 0) {
13002
+ throw new ModelConfigError(
13003
+ "Neural network must have at least one hidden layer",
13004
+ { model: this.config.name, layers: this.config.modelConfig.layers }
13005
+ );
13006
+ }
13007
+ for (const [index, layer] of this.config.modelConfig.layers.entries()) {
13008
+ if (!layer.units || typeof layer.units !== "number" || layer.units < 1) {
13009
+ throw new ModelConfigError(
13010
+ `Layer ${index} must have a valid "units" property (positive number)`,
13011
+ { model: this.config.name, layer, index }
13012
+ );
13013
+ }
13014
+ if (layer.activation && !this._isValidActivation(layer.activation)) {
13015
+ throw new ModelConfigError(
13016
+ `Layer ${index} has invalid activation function "${layer.activation}"`,
13017
+ { model: this.config.name, layer, index, validActivations: ["relu", "sigmoid", "tanh", "softmax", "elu", "selu"] }
13018
+ );
13019
+ }
13020
+ }
13021
+ }
13022
+ /**
13023
+ * Check if activation function is valid
13024
+ * @private
13025
+ */
13026
+ _isValidActivation(activation) {
13027
+ const validActivations = ["relu", "sigmoid", "tanh", "softmax", "elu", "selu", "linear"];
13028
+ return validActivations.includes(activation);
13029
+ }
13030
+ /**
13031
+ * Build custom neural network architecture
13032
+ */
13033
+ buildModel() {
13034
+ const numFeatures = this.config.features.length;
13035
+ this.model = this.tf.sequential();
13036
+ for (const [index, layerConfig] of this.config.modelConfig.layers.entries()) {
13037
+ const isFirstLayer = index === 0;
13038
+ const layerOptions = {
13039
+ units: layerConfig.units,
13040
+ activation: layerConfig.activation || "relu",
13041
+ useBias: true
13042
+ };
13043
+ if (isFirstLayer) {
13044
+ layerOptions.inputShape = [numFeatures];
13045
+ }
13046
+ this.model.add(this.tf.layers.dense(layerOptions));
13047
+ if (layerConfig.dropout && layerConfig.dropout > 0) {
13048
+ this.model.add(this.tf.layers.dropout({
13049
+ rate: layerConfig.dropout
13050
+ }));
13051
+ }
13052
+ if (layerConfig.batchNormalization) {
13053
+ this.model.add(this.tf.layers.batchNormalization());
13054
+ }
13055
+ }
13056
+ this.model.add(this.tf.layers.dense({
13057
+ units: this.config.modelConfig.outputUnits,
13058
+ activation: this.config.modelConfig.outputActivation
13059
+ }));
13060
+ this.model.compile({
13061
+ optimizer: this.tf.train.adam(this.config.modelConfig.learningRate),
13062
+ loss: this.config.modelConfig.loss,
13063
+ metrics: this.config.modelConfig.metrics
13064
+ });
13065
+ if (this.config.verbose) {
13066
+ console.log(`[MLPlugin] ${this.config.name} - Built custom neural network:`);
13067
+ console.log(` - Hidden layers: ${this.config.modelConfig.layers.length}`);
13068
+ console.log(` - Total parameters:`, this._countParameters());
13069
+ this.model.summary();
13070
+ }
13071
+ }
13072
+ /**
13073
+ * Count total trainable parameters
13074
+ * @private
13075
+ */
13076
+ _countParameters() {
13077
+ if (!this.model) return 0;
13078
+ let totalParams = 0;
13079
+ for (const layer of this.model.layers) {
13080
+ if (layer.countParams) {
13081
+ totalParams += layer.countParams();
13082
+ }
13083
+ }
13084
+ return totalParams;
13085
+ }
13086
+ /**
13087
+ * Add layer to model (before building)
13088
+ * @param {Object} layerConfig - Layer configuration
13089
+ */
13090
+ addLayer(layerConfig) {
13091
+ if (this.model) {
13092
+ throw new ModelConfigError(
13093
+ "Cannot add layer after model is built. Use addLayer() before training.",
13094
+ { model: this.config.name }
13095
+ );
13096
+ }
13097
+ this.config.modelConfig.layers.push(layerConfig);
13098
+ }
13099
+ /**
13100
+ * Set output configuration
13101
+ * @param {Object} outputConfig - Output layer configuration
13102
+ */
13103
+ setOutput(outputConfig) {
13104
+ if (this.model) {
13105
+ throw new ModelConfigError(
13106
+ "Cannot change output after model is built. Use setOutput() before training.",
13107
+ { model: this.config.name }
13108
+ );
13109
+ }
13110
+ if (outputConfig.activation) {
13111
+ this.config.modelConfig.outputActivation = outputConfig.activation;
13112
+ }
13113
+ if (outputConfig.units) {
13114
+ this.config.modelConfig.outputUnits = outputConfig.units;
13115
+ }
13116
+ if (outputConfig.loss) {
13117
+ this.config.modelConfig.loss = outputConfig.loss;
13118
+ }
13119
+ if (outputConfig.metrics) {
13120
+ this.config.modelConfig.metrics = outputConfig.metrics;
13121
+ }
13122
+ }
13123
+ /**
13124
+ * Get model architecture summary
13125
+ */
13126
+ getArchitecture() {
13127
+ return {
13128
+ inputFeatures: this.config.features,
13129
+ hiddenLayers: this.config.modelConfig.layers.map((layer, index) => ({
13130
+ index,
13131
+ units: layer.units,
13132
+ activation: layer.activation || "relu",
13133
+ dropout: layer.dropout || 0,
13134
+ batchNormalization: layer.batchNormalization || false
13135
+ })),
13136
+ outputLayer: {
13137
+ units: this.config.modelConfig.outputUnits,
13138
+ activation: this.config.modelConfig.outputActivation
13139
+ },
13140
+ totalParameters: this._countParameters(),
13141
+ loss: this.config.modelConfig.loss,
13142
+ metrics: this.config.modelConfig.metrics
13143
+ };
13144
+ }
13145
+ /**
13146
+ * Train with early stopping callback
13147
+ * @param {Array} data - Training data
13148
+ * @param {Object} earlyStoppingConfig - Early stopping configuration
13149
+ * @returns {Object} Training results
13150
+ */
13151
+ async trainWithEarlyStopping(data, earlyStoppingConfig = {}) {
13152
+ const {
13153
+ patience = 10,
13154
+ minDelta = 1e-3,
13155
+ monitor = "val_loss",
13156
+ restoreBestWeights = true
13157
+ } = earlyStoppingConfig;
13158
+ const { xs, ys } = this._prepareData(data);
13159
+ if (!this.model) {
13160
+ this.buildModel();
13161
+ }
13162
+ let bestValue = Infinity;
13163
+ let patienceCounter = 0;
13164
+ let bestWeights = null;
13165
+ const callbacks = {
13166
+ onEpochEnd: async (epoch, logs) => {
13167
+ const monitorValue = logs[monitor] || logs.loss;
13168
+ if (this.config.verbose && epoch % 10 === 0) {
13169
+ console.log(`[MLPlugin] ${this.config.name} - Epoch ${epoch}: ${monitor}=${monitorValue.toFixed(4)}`);
13170
+ }
13171
+ if (monitorValue < bestValue - minDelta) {
13172
+ bestValue = monitorValue;
13173
+ patienceCounter = 0;
13174
+ if (restoreBestWeights) {
13175
+ bestWeights = await this.model.getWeights();
13176
+ }
13177
+ } else {
13178
+ patienceCounter++;
13179
+ if (patienceCounter >= patience) {
13180
+ if (this.config.verbose) {
13181
+ console.log(`[MLPlugin] ${this.config.name} - Early stopping at epoch ${epoch}`);
13182
+ }
13183
+ this.model.stopTraining = true;
13184
+ }
13185
+ }
13186
+ }
13187
+ };
13188
+ const history = await this.model.fit(xs, ys, {
13189
+ epochs: this.config.modelConfig.epochs,
13190
+ batchSize: this.config.modelConfig.batchSize,
13191
+ validationSplit: this.config.modelConfig.validationSplit,
13192
+ verbose: this.config.verbose ? 1 : 0,
13193
+ callbacks
13194
+ });
13195
+ if (restoreBestWeights && bestWeights) {
13196
+ this.model.setWeights(bestWeights);
13197
+ }
13198
+ this.isTrained = true;
13199
+ this.stats.trainedAt = (/* @__PURE__ */ new Date()).toISOString();
13200
+ this.stats.samples = data.length;
13201
+ this.stats.loss = history.history.loss[history.history.loss.length - 1];
13202
+ xs.dispose();
13203
+ ys.dispose();
13204
+ return {
13205
+ loss: this.stats.loss,
13206
+ epochs: history.epoch.length,
13207
+ samples: this.stats.samples,
13208
+ stoppedEarly: history.epoch.length < this.config.modelConfig.epochs
13209
+ };
13210
+ }
13211
+ /**
13212
+ * Export model with neural network-specific data
13213
+ */
13214
+ async export() {
13215
+ const baseExport = await super.export();
13216
+ return {
13217
+ ...baseExport,
13218
+ type: "neural-network",
13219
+ architecture: this.getArchitecture()
13220
+ };
13221
+ }
13222
+ }
13223
+
13224
+ class MLPlugin extends Plugin {
13225
+ constructor(options = {}) {
13226
+ super(options);
13227
+ this.config = {
13228
+ models: options.models || {},
13229
+ verbose: options.verbose || false,
13230
+ minTrainingSamples: options.minTrainingSamples || 10
13231
+ };
13232
+ requirePluginDependency("@tensorflow/tfjs-node", "MLPlugin");
13233
+ this.models = {};
13234
+ this.training = /* @__PURE__ */ new Map();
13235
+ this.insertCounters = /* @__PURE__ */ new Map();
13236
+ this.intervals = [];
13237
+ this.stats = {
13238
+ totalTrainings: 0,
13239
+ totalPredictions: 0,
13240
+ totalErrors: 0,
13241
+ startedAt: null
13242
+ };
13243
+ }
13244
+ /**
13245
+ * Install the plugin
13246
+ */
13247
+ async onInstall() {
13248
+ if (this.config.verbose) {
13249
+ console.log("[MLPlugin] Installing ML Plugin...");
13250
+ }
13251
+ for (const [modelName, modelConfig] of Object.entries(this.config.models)) {
13252
+ this._validateModelConfig(modelName, modelConfig);
13253
+ }
13254
+ for (const [modelName, modelConfig] of Object.entries(this.config.models)) {
13255
+ await this._initializeModel(modelName, modelConfig);
13256
+ }
13257
+ for (const [modelName, modelConfig] of Object.entries(this.config.models)) {
13258
+ if (modelConfig.autoTrain) {
13259
+ this._setupAutoTraining(modelName, modelConfig);
13260
+ }
13261
+ }
13262
+ this.stats.startedAt = (/* @__PURE__ */ new Date()).toISOString();
13263
+ if (this.config.verbose) {
13264
+ console.log(`[MLPlugin] Installed with ${Object.keys(this.models).length} models`);
13265
+ }
13266
+ this.emit("installed", {
13267
+ plugin: "MLPlugin",
13268
+ models: Object.keys(this.models)
13269
+ });
13270
+ }
13271
+ /**
13272
+ * Start the plugin
13273
+ */
13274
+ async onStart() {
13275
+ for (const modelName of Object.keys(this.models)) {
13276
+ await this._loadModel(modelName);
13277
+ }
13278
+ if (this.config.verbose) {
13279
+ console.log("[MLPlugin] Started");
13280
+ }
13281
+ }
13282
+ /**
13283
+ * Stop the plugin
13284
+ */
13285
+ async onStop() {
13286
+ for (const handle of this.intervals) {
13287
+ clearInterval(handle);
13288
+ }
13289
+ this.intervals = [];
13290
+ for (const [modelName, model] of Object.entries(this.models)) {
13291
+ if (model && model.dispose) {
13292
+ model.dispose();
13293
+ }
13294
+ }
13295
+ if (this.config.verbose) {
13296
+ console.log("[MLPlugin] Stopped");
13297
+ }
13298
+ }
13299
+ /**
13300
+ * Uninstall the plugin
13301
+ */
13302
+ async onUninstall(options = {}) {
13303
+ await this.onStop();
13304
+ if (options.purgeData) {
13305
+ for (const modelName of Object.keys(this.models)) {
13306
+ await this._deleteModel(modelName);
13307
+ }
13308
+ if (this.config.verbose) {
13309
+ console.log("[MLPlugin] Purged all model data");
13310
+ }
13311
+ }
13312
+ }
13313
+ /**
13314
+ * Validate model configuration
13315
+ * @private
13316
+ */
13317
+ _validateModelConfig(modelName, config) {
13318
+ const validTypes = ["regression", "classification", "timeseries", "neural-network"];
13319
+ if (!config.type || !validTypes.includes(config.type)) {
13320
+ throw new ModelConfigError(
13321
+ `Model "${modelName}" must have a valid type: ${validTypes.join(", ")}`,
13322
+ { modelName, type: config.type, validTypes }
13323
+ );
13324
+ }
13325
+ if (!config.resource) {
13326
+ throw new ModelConfigError(
13327
+ `Model "${modelName}" must specify a resource`,
13328
+ { modelName }
13329
+ );
13330
+ }
13331
+ if (!config.features || !Array.isArray(config.features) || config.features.length === 0) {
13332
+ throw new ModelConfigError(
13333
+ `Model "${modelName}" must specify at least one feature`,
13334
+ { modelName, features: config.features }
13335
+ );
13336
+ }
13337
+ if (!config.target) {
13338
+ throw new ModelConfigError(
13339
+ `Model "${modelName}" must specify a target field`,
13340
+ { modelName }
13341
+ );
13342
+ }
13343
+ }
13344
+ /**
13345
+ * Initialize a model instance
13346
+ * @private
13347
+ */
13348
+ async _initializeModel(modelName, config) {
13349
+ const modelOptions = {
13350
+ name: modelName,
13351
+ resource: config.resource,
13352
+ features: config.features,
13353
+ target: config.target,
13354
+ modelConfig: config.modelConfig || {},
13355
+ verbose: this.config.verbose
13356
+ };
13357
+ try {
13358
+ switch (config.type) {
13359
+ case "regression":
13360
+ this.models[modelName] = new RegressionModel(modelOptions);
13361
+ break;
13362
+ case "classification":
13363
+ this.models[modelName] = new ClassificationModel(modelOptions);
13364
+ break;
13365
+ case "timeseries":
13366
+ this.models[modelName] = new TimeSeriesModel(modelOptions);
13367
+ break;
13368
+ case "neural-network":
13369
+ this.models[modelName] = new NeuralNetworkModel(modelOptions);
13370
+ break;
13371
+ default:
13372
+ throw new ModelConfigError(
13373
+ `Unknown model type: ${config.type}`,
13374
+ { modelName, type: config.type }
13375
+ );
13376
+ }
13377
+ if (this.config.verbose) {
13378
+ console.log(`[MLPlugin] Initialized model "${modelName}" (${config.type})`);
13379
+ }
13380
+ } catch (error) {
13381
+ console.error(`[MLPlugin] Failed to initialize model "${modelName}":`, error.message);
13382
+ throw error;
13383
+ }
13384
+ }
13385
+ /**
13386
+ * Setup auto-training for a model
13387
+ * @private
13388
+ */
13389
+ _setupAutoTraining(modelName, config) {
13390
+ const resource = this.database.resources[config.resource];
13391
+ if (!resource) {
13392
+ console.warn(`[MLPlugin] Resource "${config.resource}" not found for model "${modelName}"`);
13393
+ return;
13394
+ }
13395
+ this.insertCounters.set(modelName, 0);
13396
+ if (config.trainAfterInserts && config.trainAfterInserts > 0) {
13397
+ this.addMiddleware(resource, "insert", async (next, data, options) => {
13398
+ const result = await next(data, options);
13399
+ const currentCount = this.insertCounters.get(modelName) || 0;
13400
+ this.insertCounters.set(modelName, currentCount + 1);
13401
+ if (this.insertCounters.get(modelName) >= config.trainAfterInserts) {
13402
+ if (this.config.verbose) {
13403
+ console.log(`[MLPlugin] Auto-training "${modelName}" after ${config.trainAfterInserts} inserts`);
13404
+ }
13405
+ this.insertCounters.set(modelName, 0);
13406
+ this.train(modelName).catch((err) => {
13407
+ console.error(`[MLPlugin] Auto-training failed for "${modelName}":`, err.message);
13408
+ });
13409
+ }
13410
+ return result;
13411
+ });
13412
+ }
13413
+ if (config.trainInterval && config.trainInterval > 0) {
13414
+ const handle = setInterval(async () => {
13415
+ if (this.config.verbose) {
13416
+ console.log(`[MLPlugin] Auto-training "${modelName}" (interval: ${config.trainInterval}ms)`);
13417
+ }
13418
+ try {
13419
+ await this.train(modelName);
13420
+ } catch (error) {
13421
+ console.error(`[MLPlugin] Auto-training failed for "${modelName}":`, error.message);
13422
+ }
13423
+ }, config.trainInterval);
13424
+ this.intervals.push(handle);
13425
+ if (this.config.verbose) {
13426
+ console.log(`[MLPlugin] Setup interval training for "${modelName}" (every ${config.trainInterval}ms)`);
13427
+ }
13428
+ }
13429
+ }
13430
+ /**
13431
+ * Train a model
13432
+ * @param {string} modelName - Model name
13433
+ * @param {Object} options - Training options
13434
+ * @returns {Object} Training results
13435
+ */
13436
+ async train(modelName, options = {}) {
13437
+ const model = this.models[modelName];
13438
+ if (!model) {
13439
+ throw new ModelNotFoundError(
13440
+ `Model "${modelName}" not found`,
13441
+ { modelName, availableModels: Object.keys(this.models) }
13442
+ );
13443
+ }
13444
+ if (this.training.get(modelName)) {
13445
+ if (this.config.verbose) {
13446
+ console.log(`[MLPlugin] Model "${modelName}" is already training, skipping...`);
13447
+ }
13448
+ return { skipped: true, reason: "already_training" };
13449
+ }
13450
+ this.training.set(modelName, true);
13451
+ try {
13452
+ const modelConfig = this.config.models[modelName];
13453
+ const resource = this.database.resources[modelConfig.resource];
13454
+ if (!resource) {
13455
+ throw new ModelNotFoundError(
13456
+ `Resource "${modelConfig.resource}" not found`,
13457
+ { modelName, resource: modelConfig.resource }
13458
+ );
13459
+ }
13460
+ if (this.config.verbose) {
13461
+ console.log(`[MLPlugin] Fetching training data for "${modelName}"...`);
13462
+ }
13463
+ const [ok, err, data] = await tryFn(() => resource.list());
13464
+ if (!ok) {
13465
+ throw new TrainingError(
13466
+ `Failed to fetch training data: ${err.message}`,
13467
+ { modelName, resource: modelConfig.resource, originalError: err.message }
13468
+ );
13469
+ }
13470
+ if (!data || data.length < this.config.minTrainingSamples) {
13471
+ throw new TrainingError(
13472
+ `Insufficient training data: ${data?.length || 0} samples (minimum: ${this.config.minTrainingSamples})`,
13473
+ { modelName, samples: data?.length || 0, minimum: this.config.minTrainingSamples }
13474
+ );
13475
+ }
13476
+ if (this.config.verbose) {
13477
+ console.log(`[MLPlugin] Training "${modelName}" with ${data.length} samples...`);
13478
+ }
13479
+ const result = await model.train(data);
13480
+ await this._saveModel(modelName);
13481
+ this.stats.totalTrainings++;
13482
+ if (this.config.verbose) {
13483
+ console.log(`[MLPlugin] Training completed for "${modelName}":`, result);
13484
+ }
13485
+ this.emit("modelTrained", {
13486
+ modelName,
13487
+ type: modelConfig.type,
13488
+ result
13489
+ });
13490
+ return result;
13491
+ } catch (error) {
13492
+ this.stats.totalErrors++;
13493
+ if (error instanceof MLError) {
13494
+ throw error;
13495
+ }
13496
+ throw new TrainingError(
13497
+ `Training failed for "${modelName}": ${error.message}`,
13498
+ { modelName, originalError: error.message }
13499
+ );
13500
+ } finally {
13501
+ this.training.set(modelName, false);
13502
+ }
13503
+ }
13504
+ /**
13505
+ * Make a prediction
13506
+ * @param {string} modelName - Model name
13507
+ * @param {Object|Array} input - Input data (object for single prediction, array for time series)
13508
+ * @returns {Object} Prediction result
13509
+ */
13510
+ async predict(modelName, input) {
13511
+ const model = this.models[modelName];
13512
+ if (!model) {
13513
+ throw new ModelNotFoundError(
13514
+ `Model "${modelName}" not found`,
13515
+ { modelName, availableModels: Object.keys(this.models) }
13516
+ );
13517
+ }
13518
+ try {
13519
+ const result = await model.predict(input);
13520
+ this.stats.totalPredictions++;
13521
+ this.emit("prediction", {
13522
+ modelName,
13523
+ input,
13524
+ result
13525
+ });
13526
+ return result;
13527
+ } catch (error) {
13528
+ this.stats.totalErrors++;
13529
+ throw error;
13530
+ }
13531
+ }
13532
+ /**
13533
+ * Make predictions for multiple inputs
13534
+ * @param {string} modelName - Model name
13535
+ * @param {Array} inputs - Array of input objects
13536
+ * @returns {Array} Array of prediction results
13537
+ */
13538
+ async predictBatch(modelName, inputs) {
13539
+ const model = this.models[modelName];
13540
+ if (!model) {
13541
+ throw new ModelNotFoundError(
13542
+ `Model "${modelName}" not found`,
13543
+ { modelName, availableModels: Object.keys(this.models) }
13544
+ );
13545
+ }
13546
+ return await model.predictBatch(inputs);
13547
+ }
13548
+ /**
13549
+ * Retrain a model (reset and train from scratch)
13550
+ * @param {string} modelName - Model name
13551
+ * @param {Object} options - Options
13552
+ * @returns {Object} Training results
13553
+ */
13554
+ async retrain(modelName, options = {}) {
13555
+ const model = this.models[modelName];
13556
+ if (!model) {
13557
+ throw new ModelNotFoundError(
13558
+ `Model "${modelName}" not found`,
13559
+ { modelName, availableModels: Object.keys(this.models) }
13560
+ );
13561
+ }
13562
+ if (model.dispose) {
13563
+ model.dispose();
13564
+ }
13565
+ const modelConfig = this.config.models[modelName];
13566
+ await this._initializeModel(modelName, modelConfig);
13567
+ return await this.train(modelName, options);
13568
+ }
13569
+ /**
13570
+ * Get model statistics
13571
+ * @param {string} modelName - Model name
13572
+ * @returns {Object} Model stats
13573
+ */
13574
+ getModelStats(modelName) {
13575
+ const model = this.models[modelName];
13576
+ if (!model) {
13577
+ throw new ModelNotFoundError(
13578
+ `Model "${modelName}" not found`,
13579
+ { modelName, availableModels: Object.keys(this.models) }
13580
+ );
13581
+ }
13582
+ return model.getStats();
13583
+ }
13584
+ /**
13585
+ * Get plugin statistics
13586
+ * @returns {Object} Plugin stats
13587
+ */
13588
+ getStats() {
13589
+ return {
13590
+ ...this.stats,
13591
+ models: Object.keys(this.models).length,
13592
+ trainedModels: Object.values(this.models).filter((m) => m.isTrained).length
13593
+ };
13594
+ }
13595
+ /**
13596
+ * Export a model
13597
+ * @param {string} modelName - Model name
13598
+ * @returns {Object} Serialized model
13599
+ */
13600
+ async exportModel(modelName) {
13601
+ const model = this.models[modelName];
13602
+ if (!model) {
13603
+ throw new ModelNotFoundError(
13604
+ `Model "${modelName}" not found`,
13605
+ { modelName, availableModels: Object.keys(this.models) }
13606
+ );
13607
+ }
13608
+ return await model.export();
13609
+ }
13610
+ /**
13611
+ * Import a model
13612
+ * @param {string} modelName - Model name
13613
+ * @param {Object} data - Serialized model data
13614
+ */
13615
+ async importModel(modelName, data) {
13616
+ const model = this.models[modelName];
13617
+ if (!model) {
13618
+ throw new ModelNotFoundError(
13619
+ `Model "${modelName}" not found`,
13620
+ { modelName, availableModels: Object.keys(this.models) }
13621
+ );
13622
+ }
13623
+ await model.import(data);
13624
+ await this._saveModel(modelName);
13625
+ if (this.config.verbose) {
13626
+ console.log(`[MLPlugin] Imported model "${modelName}"`);
13627
+ }
13628
+ }
13629
+ /**
13630
+ * Save model to plugin storage
13631
+ * @private
13632
+ */
13633
+ async _saveModel(modelName) {
13634
+ try {
13635
+ const storage = this.getStorage();
13636
+ const exportedModel = await this.models[modelName].export();
13637
+ if (!exportedModel) {
13638
+ if (this.config.verbose) {
13639
+ console.log(`[MLPlugin] Model "${modelName}" not trained, skipping save`);
13640
+ }
13641
+ return;
13642
+ }
13643
+ await storage.patch(`model_${modelName}`, {
13644
+ modelName,
13645
+ data: JSON.stringify(exportedModel),
13646
+ savedAt: (/* @__PURE__ */ new Date()).toISOString()
13647
+ });
13648
+ if (this.config.verbose) {
13649
+ console.log(`[MLPlugin] Saved model "${modelName}" to plugin storage`);
13650
+ }
13651
+ } catch (error) {
13652
+ console.error(`[MLPlugin] Failed to save model "${modelName}":`, error.message);
13653
+ }
13654
+ }
13655
+ /**
13656
+ * Load model from plugin storage
13657
+ * @private
13658
+ */
13659
+ async _loadModel(modelName) {
13660
+ try {
13661
+ const storage = this.getStorage();
13662
+ const [ok, err, record] = await tryFn(() => storage.get(`model_${modelName}`));
13663
+ if (!ok || !record) {
13664
+ if (this.config.verbose) {
13665
+ console.log(`[MLPlugin] No saved model found for "${modelName}"`);
13666
+ }
13667
+ return;
13668
+ }
13669
+ const modelData = JSON.parse(record.data);
13670
+ await this.models[modelName].import(modelData);
13671
+ if (this.config.verbose) {
13672
+ console.log(`[MLPlugin] Loaded model "${modelName}" from plugin storage`);
13673
+ }
13674
+ } catch (error) {
13675
+ console.error(`[MLPlugin] Failed to load model "${modelName}":`, error.message);
13676
+ }
13677
+ }
13678
+ /**
13679
+ * Delete model from plugin storage
13680
+ * @private
13681
+ */
13682
+ async _deleteModel(modelName) {
13683
+ try {
13684
+ const storage = this.getStorage();
13685
+ await storage.delete(`model_${modelName}`);
13686
+ if (this.config.verbose) {
13687
+ console.log(`[MLPlugin] Deleted model "${modelName}" from plugin storage`);
13688
+ }
13689
+ } catch (error) {
13690
+ if (this.config.verbose) {
13691
+ console.log(`[MLPlugin] Could not delete model "${modelName}": ${error.message}`);
13692
+ }
13693
+ }
13694
+ }
13695
+ }
13696
+
11929
13697
  class SqsConsumer {
11930
13698
  constructor({ queueUrl, onMessage, onError, poolingInterval = 5e3, maxMessages = 10, region = "us-east-1", credentials, endpoint, driver = "sqs" }) {
11931
13699
  this.driver = driver;
@@ -13447,7 +15215,7 @@ function generateMySQLAlterTable(tableName, attributes, existingSchema) {
13447
15215
  }
13448
15216
  return alterStatements;
13449
15217
  }
13450
- function generateBigQuerySchema(attributes) {
15218
+ function generateBigQuerySchema(attributes, mutability = "append-only") {
13451
15219
  const fields = [];
13452
15220
  fields.push({
13453
15221
  name: "id",
@@ -13471,6 +15239,14 @@ function generateBigQuerySchema(attributes) {
13471
15239
  if (!attributes.updatedAt) {
13472
15240
  fields.push({ name: "updated_at", type: "TIMESTAMP", mode: "NULLABLE" });
13473
15241
  }
15242
+ if (mutability === "append-only" || mutability === "immutable") {
15243
+ fields.push({ name: "_operation_type", type: "STRING", mode: "NULLABLE" });
15244
+ fields.push({ name: "_operation_timestamp", type: "TIMESTAMP", mode: "NULLABLE" });
15245
+ }
15246
+ if (mutability === "immutable") {
15247
+ fields.push({ name: "_is_deleted", type: "BOOL", mode: "NULLABLE" });
15248
+ fields.push({ name: "_version", type: "INT64", mode: "NULLABLE" });
15249
+ }
13474
15250
  return fields;
13475
15251
  }
13476
15252
  async function getBigQueryTableSchema(bigqueryClient, datasetId, tableId) {
@@ -13492,7 +15268,7 @@ async function getBigQueryTableSchema(bigqueryClient, datasetId, tableId) {
13492
15268
  }
13493
15269
  return schema;
13494
15270
  }
13495
- function generateBigQuerySchemaUpdate(attributes, existingSchema) {
15271
+ function generateBigQuerySchemaUpdate(attributes, existingSchema, mutability = "append-only") {
13496
15272
  const newFields = [];
13497
15273
  for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
13498
15274
  if (fieldName === "id") continue;
@@ -13506,6 +15282,22 @@ function generateBigQuerySchemaUpdate(attributes, existingSchema) {
13506
15282
  mode: required ? "REQUIRED" : "NULLABLE"
13507
15283
  });
13508
15284
  }
15285
+ if (mutability === "append-only" || mutability === "immutable") {
15286
+ if (!existingSchema["_operation_type"]) {
15287
+ newFields.push({ name: "_operation_type", type: "STRING", mode: "NULLABLE" });
15288
+ }
15289
+ if (!existingSchema["_operation_timestamp"]) {
15290
+ newFields.push({ name: "_operation_timestamp", type: "TIMESTAMP", mode: "NULLABLE" });
15291
+ }
15292
+ }
15293
+ if (mutability === "immutable") {
15294
+ if (!existingSchema["_is_deleted"]) {
15295
+ newFields.push({ name: "_is_deleted", type: "BOOL", mode: "NULLABLE" });
15296
+ }
15297
+ if (!existingSchema["_version"]) {
15298
+ newFields.push({ name: "_version", type: "INT64", mode: "NULLABLE" });
15299
+ }
15300
+ }
13509
15301
  return newFields;
13510
15302
  }
13511
15303
  function s3dbTypeToSQLite(fieldType, fieldOptions = {}) {
@@ -13588,6 +15380,8 @@ class BigqueryReplicator extends BaseReplicator {
13588
15380
  this.credentials = config.credentials;
13589
15381
  this.location = config.location || "US";
13590
15382
  this.logTable = config.logTable;
15383
+ this.mutability = config.mutability || "append-only";
15384
+ this._validateMutability(this.mutability);
13591
15385
  this.schemaSync = {
13592
15386
  enabled: config.schemaSync?.enabled || false,
13593
15387
  strategy: config.schemaSync?.strategy || "alter",
@@ -13596,6 +15390,13 @@ class BigqueryReplicator extends BaseReplicator {
13596
15390
  autoCreateColumns: config.schemaSync?.autoCreateColumns !== false
13597
15391
  };
13598
15392
  this.resources = this.parseResourcesConfig(resources);
15393
+ this.versionCounters = /* @__PURE__ */ new Map();
15394
+ }
15395
+ _validateMutability(mutability) {
15396
+ const validModes = ["append-only", "mutable", "immutable"];
15397
+ if (!validModes.includes(mutability)) {
15398
+ throw new Error(`Invalid mutability mode: ${mutability}. Must be one of: ${validModes.join(", ")}`);
15399
+ }
13599
15400
  }
13600
15401
  parseResourcesConfig(resources) {
13601
15402
  const parsed = {};
@@ -13604,24 +15405,31 @@ class BigqueryReplicator extends BaseReplicator {
13604
15405
  parsed[resourceName] = [{
13605
15406
  table: config,
13606
15407
  actions: ["insert"],
13607
- transform: null
15408
+ transform: null,
15409
+ mutability: this.mutability
13608
15410
  }];
13609
15411
  } else if (Array.isArray(config)) {
13610
15412
  parsed[resourceName] = config.map((item) => {
13611
15413
  if (typeof item === "string") {
13612
- return { table: item, actions: ["insert"], transform: null };
15414
+ return { table: item, actions: ["insert"], transform: null, mutability: this.mutability };
13613
15415
  }
15416
+ const itemMutability = item.mutability || this.mutability;
15417
+ this._validateMutability(itemMutability);
13614
15418
  return {
13615
15419
  table: item.table,
13616
15420
  actions: item.actions || ["insert"],
13617
- transform: item.transform || null
15421
+ transform: item.transform || null,
15422
+ mutability: itemMutability
13618
15423
  };
13619
15424
  });
13620
15425
  } else if (typeof config === "object") {
15426
+ const configMutability = config.mutability || this.mutability;
15427
+ this._validateMutability(configMutability);
13621
15428
  parsed[resourceName] = [{
13622
15429
  table: config.table,
13623
15430
  actions: config.actions || ["insert"],
13624
- transform: config.transform || null
15431
+ transform: config.transform || null,
15432
+ mutability: configMutability
13625
15433
  }];
13626
15434
  }
13627
15435
  }
@@ -13700,8 +15508,9 @@ class BigqueryReplicator extends BaseReplicator {
13700
15508
  );
13701
15509
  for (const tableConfig of tableConfigs) {
13702
15510
  const tableName = tableConfig.table;
15511
+ const mutability = tableConfig.mutability;
13703
15512
  const [okSync, errSync] = await tryFn(async () => {
13704
- await this.syncTableSchema(tableName, attributes);
15513
+ await this.syncTableSchema(tableName, attributes, mutability);
13705
15514
  });
13706
15515
  if (!okSync) {
13707
15516
  const message = `Schema sync failed for table ${tableName}: ${errSync.message}`;
@@ -13721,7 +15530,7 @@ class BigqueryReplicator extends BaseReplicator {
13721
15530
  /**
13722
15531
  * Sync a single table schema in BigQuery
13723
15532
  */
13724
- async syncTableSchema(tableName, attributes) {
15533
+ async syncTableSchema(tableName, attributes, mutability = "append-only") {
13725
15534
  const dataset = this.bigqueryClient.dataset(this.datasetId);
13726
15535
  const table = dataset.table(tableName);
13727
15536
  const [exists] = await table.exists();
@@ -13732,15 +15541,16 @@ class BigqueryReplicator extends BaseReplicator {
13732
15541
  if (this.schemaSync.strategy === "validate-only") {
13733
15542
  throw new Error(`Table ${tableName} does not exist (validate-only mode)`);
13734
15543
  }
13735
- const schema = generateBigQuerySchema(attributes);
15544
+ const schema = generateBigQuerySchema(attributes, mutability);
13736
15545
  if (this.config.verbose) {
13737
- console.log(`[BigQueryReplicator] Creating table ${tableName} with schema:`, schema);
15546
+ console.log(`[BigQueryReplicator] Creating table ${tableName} with schema (mutability: ${mutability}):`, schema);
13738
15547
  }
13739
15548
  await dataset.createTable(tableName, { schema });
13740
15549
  this.emit("table_created", {
13741
15550
  replicator: this.name,
13742
15551
  tableName,
13743
- attributes: Object.keys(attributes)
15552
+ attributes: Object.keys(attributes),
15553
+ mutability
13744
15554
  });
13745
15555
  return;
13746
15556
  }
@@ -13749,18 +15559,19 @@ class BigqueryReplicator extends BaseReplicator {
13749
15559
  console.warn(`[BigQueryReplicator] Dropping and recreating table ${tableName}`);
13750
15560
  }
13751
15561
  await table.delete();
13752
- const schema = generateBigQuerySchema(attributes);
15562
+ const schema = generateBigQuerySchema(attributes, mutability);
13753
15563
  await dataset.createTable(tableName, { schema });
13754
15564
  this.emit("table_recreated", {
13755
15565
  replicator: this.name,
13756
15566
  tableName,
13757
- attributes: Object.keys(attributes)
15567
+ attributes: Object.keys(attributes),
15568
+ mutability
13758
15569
  });
13759
15570
  return;
13760
15571
  }
13761
15572
  if (this.schemaSync.strategy === "alter" && this.schemaSync.autoCreateColumns) {
13762
15573
  const existingSchema = await getBigQueryTableSchema(this.bigqueryClient, this.datasetId, tableName);
13763
- const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema);
15574
+ const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema, mutability);
13764
15575
  if (newFields.length > 0) {
13765
15576
  if (this.config.verbose) {
13766
15577
  console.log(`[BigQueryReplicator] Adding ${newFields.length} field(s) to table ${tableName}:`, newFields);
@@ -13778,7 +15589,7 @@ class BigqueryReplicator extends BaseReplicator {
13778
15589
  }
13779
15590
  if (this.schemaSync.strategy === "validate-only") {
13780
15591
  const existingSchema = await getBigQueryTableSchema(this.bigqueryClient, this.datasetId, tableName);
13781
- const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema);
15592
+ const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema, mutability);
13782
15593
  if (newFields.length > 0) {
13783
15594
  throw new Error(`Table ${tableName} schema mismatch. Missing columns: ${newFields.length}`);
13784
15595
  }
@@ -13797,7 +15608,8 @@ class BigqueryReplicator extends BaseReplicator {
13797
15608
  if (!this.resources[resourceName]) return [];
13798
15609
  return this.resources[resourceName].filter((tableConfig) => tableConfig.actions.includes(operation)).map((tableConfig) => ({
13799
15610
  table: tableConfig.table,
13800
- transform: tableConfig.transform
15611
+ transform: tableConfig.transform,
15612
+ mutability: tableConfig.mutability
13801
15613
  }));
13802
15614
  }
13803
15615
  applyTransform(data, transformFn) {
@@ -13816,6 +15628,32 @@ class BigqueryReplicator extends BaseReplicator {
13816
15628
  });
13817
15629
  return cleanData;
13818
15630
  }
15631
+ /**
15632
+ * Add tracking fields for append-only and immutable modes
15633
+ * @private
15634
+ */
15635
+ _addTrackingFields(data, operation, mutability, id) {
15636
+ const tracked = { ...data };
15637
+ if (mutability === "append-only" || mutability === "immutable") {
15638
+ tracked._operation_type = operation;
15639
+ tracked._operation_timestamp = (/* @__PURE__ */ new Date()).toISOString();
15640
+ }
15641
+ if (mutability === "immutable") {
15642
+ tracked._is_deleted = operation === "delete";
15643
+ tracked._version = this._getNextVersion(id);
15644
+ }
15645
+ return tracked;
15646
+ }
15647
+ /**
15648
+ * Get next version number for immutable mode
15649
+ * @private
15650
+ */
15651
+ _getNextVersion(id) {
15652
+ const current = this.versionCounters.get(id) || 0;
15653
+ const next = current + 1;
15654
+ this.versionCounters.set(id, next);
15655
+ return next;
15656
+ }
13819
15657
  async replicate(resourceName, operation, data, id, beforeData = null) {
13820
15658
  if (!this.enabled || !this.shouldReplicateResource(resourceName)) {
13821
15659
  return { skipped: true, reason: "resource_not_included" };
@@ -13834,9 +15672,14 @@ class BigqueryReplicator extends BaseReplicator {
13834
15672
  for (const tableConfig of tableConfigs) {
13835
15673
  const [okTable, errTable] = await tryFn(async () => {
13836
15674
  const table = dataset.table(tableConfig.table);
15675
+ const mutability = tableConfig.mutability;
13837
15676
  let job;
13838
- if (operation === "insert") {
13839
- const transformedData = this.applyTransform(data, tableConfig.transform);
15677
+ const shouldConvertToInsert = (mutability === "append-only" || mutability === "immutable") && (operation === "update" || operation === "delete");
15678
+ if (operation === "insert" || shouldConvertToInsert) {
15679
+ let transformedData = this.applyTransform(data, tableConfig.transform);
15680
+ if (shouldConvertToInsert) {
15681
+ transformedData = this._addTrackingFields(transformedData, operation, mutability, id);
15682
+ }
13840
15683
  try {
13841
15684
  job = await table.insert([transformedData]);
13842
15685
  } catch (error) {
@@ -13848,7 +15691,7 @@ class BigqueryReplicator extends BaseReplicator {
13848
15691
  }
13849
15692
  throw error;
13850
15693
  }
13851
- } else if (operation === "update") {
15694
+ } else if (operation === "update" && mutability === "mutable") {
13852
15695
  const transformedData = this.applyTransform(data, tableConfig.transform);
13853
15696
  const keys = Object.keys(transformedData).filter((k) => k !== "id");
13854
15697
  const setClause = keys.map((k) => `${k} = @${k}`).join(", ");
@@ -13890,7 +15733,7 @@ class BigqueryReplicator extends BaseReplicator {
13890
15733
  }
13891
15734
  }
13892
15735
  if (!job) throw lastError;
13893
- } else if (operation === "delete") {
15736
+ } else if (operation === "delete" && mutability === "mutable") {
13894
15737
  const query = `DELETE FROM \`${this.projectId}.${this.datasetId}.${tableConfig.table}\` WHERE id = @id`;
13895
15738
  try {
13896
15739
  const [deleteJob] = await this.bigqueryClient.createQueryJob({
@@ -14026,7 +15869,8 @@ class BigqueryReplicator extends BaseReplicator {
14026
15869
  datasetId: this.datasetId,
14027
15870
  resources: this.resources,
14028
15871
  logTable: this.logTable,
14029
- schemaSync: this.schemaSync
15872
+ schemaSync: this.schemaSync,
15873
+ mutability: this.mutability
14030
15874
  };
14031
15875
  }
14032
15876
  }
@@ -15747,11 +17591,11 @@ class ConnectionString {
15747
17591
  }
15748
17592
  }
15749
17593
 
15750
- class Client extends EventEmitter {
17594
+ class S3Client extends EventEmitter {
15751
17595
  constructor({
15752
17596
  verbose = false,
15753
17597
  id = null,
15754
- AwsS3Client,
17598
+ AwsS3Client: AwsS3Client2,
15755
17599
  connectionString,
15756
17600
  parallelism = 10,
15757
17601
  httpClientOptions = {}
@@ -15774,7 +17618,7 @@ class Client extends EventEmitter {
15774
17618
  // 60 second timeout
15775
17619
  ...httpClientOptions
15776
17620
  };
15777
- this.client = AwsS3Client || this.createClient();
17621
+ this.client = AwsS3Client2 || this.createClient();
15778
17622
  }
15779
17623
  createClient() {
15780
17624
  const httpAgent = new http.Agent(this.httpClientOptions);
@@ -18381,6 +20225,7 @@ ${errorDetails}`,
18381
20225
  events = {},
18382
20226
  asyncEvents = true,
18383
20227
  asyncPartitions = true,
20228
+ strictPartitions = false,
18384
20229
  createdBy = "user"
18385
20230
  } = config;
18386
20231
  this.name = name;
@@ -18412,6 +20257,7 @@ ${errorDetails}`,
18412
20257
  allNestedObjectsOptional,
18413
20258
  asyncEvents,
18414
20259
  asyncPartitions,
20260
+ strictPartitions,
18415
20261
  createdBy
18416
20262
  };
18417
20263
  this.hooks = {
@@ -19164,17 +21010,31 @@ ${errorDetails}`,
19164
21010
  throw errPut;
19165
21011
  }
19166
21012
  const insertedObject = await this.get(finalId);
19167
- if (this.config.asyncPartitions && this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19168
- setImmediate(() => {
19169
- this.createPartitionReferences(insertedObject).catch((err) => {
21013
+ if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
21014
+ if (this.config.strictPartitions) {
21015
+ await this.createPartitionReferences(insertedObject);
21016
+ } else if (this.config.asyncPartitions) {
21017
+ setImmediate(() => {
21018
+ this.createPartitionReferences(insertedObject).catch((err) => {
21019
+ this.emit("partitionIndexError", {
21020
+ operation: "insert",
21021
+ id: finalId,
21022
+ error: err,
21023
+ message: err.message
21024
+ });
21025
+ });
21026
+ });
21027
+ } else {
21028
+ const [ok, err] = await tryFn(() => this.createPartitionReferences(insertedObject));
21029
+ if (!ok) {
19170
21030
  this.emit("partitionIndexError", {
19171
21031
  operation: "insert",
19172
21032
  id: finalId,
19173
21033
  error: err,
19174
21034
  message: err.message
19175
21035
  });
19176
- });
19177
- });
21036
+ }
21037
+ }
19178
21038
  const nonPartitionHooks = this.hooks.afterInsert.filter(
19179
21039
  (hook) => !hook.toString().includes("createPartitionReferences")
19180
21040
  );
@@ -19469,17 +21329,31 @@ ${errorDetails}`,
19469
21329
  body: finalBody,
19470
21330
  behavior: this.behavior
19471
21331
  });
19472
- if (this.config.asyncPartitions && this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19473
- setImmediate(() => {
19474
- this.handlePartitionReferenceUpdates(originalData, updatedData).catch((err2) => {
21332
+ if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
21333
+ if (this.config.strictPartitions) {
21334
+ await this.handlePartitionReferenceUpdates(originalData, updatedData);
21335
+ } else if (this.config.asyncPartitions) {
21336
+ setImmediate(() => {
21337
+ this.handlePartitionReferenceUpdates(originalData, updatedData).catch((err2) => {
21338
+ this.emit("partitionIndexError", {
21339
+ operation: "update",
21340
+ id,
21341
+ error: err2,
21342
+ message: err2.message
21343
+ });
21344
+ });
21345
+ });
21346
+ } else {
21347
+ const [ok2, err2] = await tryFn(() => this.handlePartitionReferenceUpdates(originalData, updatedData));
21348
+ if (!ok2) {
19475
21349
  this.emit("partitionIndexError", {
19476
21350
  operation: "update",
19477
21351
  id,
19478
21352
  error: err2,
19479
21353
  message: err2.message
19480
21354
  });
19481
- });
19482
- });
21355
+ }
21356
+ }
19483
21357
  const nonPartitionHooks = this.hooks.afterUpdate.filter(
19484
21358
  (hook) => !hook.toString().includes("handlePartitionReferenceUpdates")
19485
21359
  );
@@ -19592,7 +21466,9 @@ ${errorDetails}`,
19592
21466
  if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19593
21467
  const oldData = { ...currentData, id };
19594
21468
  const newData = { ...mergedData, id };
19595
- if (this.config.asyncPartitions) {
21469
+ if (this.config.strictPartitions) {
21470
+ await this.handlePartitionReferenceUpdates(oldData, newData);
21471
+ } else if (this.config.asyncPartitions) {
19596
21472
  setImmediate(() => {
19597
21473
  this.handlePartitionReferenceUpdates(oldData, newData).catch((err) => {
19598
21474
  this.emit("partitionIndexError", {
@@ -19722,7 +21598,9 @@ ${errorDetails}`,
19722
21598
  }
19723
21599
  const replacedObject = { id, ...validatedAttributes };
19724
21600
  if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19725
- if (this.config.asyncPartitions) {
21601
+ if (this.config.strictPartitions) {
21602
+ await this.handlePartitionReferenceUpdates({}, replacedObject);
21603
+ } else if (this.config.asyncPartitions) {
19726
21604
  setImmediate(() => {
19727
21605
  this.handlePartitionReferenceUpdates({}, replacedObject).catch((err) => {
19728
21606
  this.emit("partitionIndexError", {
@@ -19862,17 +21740,31 @@ ${errorDetails}`,
19862
21740
  });
19863
21741
  const oldData = { ...originalData, id };
19864
21742
  const newData = { ...validatedAttributes, id };
19865
- if (this.config.asyncPartitions && this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19866
- setImmediate(() => {
19867
- this.handlePartitionReferenceUpdates(oldData, newData).catch((err2) => {
21743
+ if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
21744
+ if (this.config.strictPartitions) {
21745
+ await this.handlePartitionReferenceUpdates(oldData, newData);
21746
+ } else if (this.config.asyncPartitions) {
21747
+ setImmediate(() => {
21748
+ this.handlePartitionReferenceUpdates(oldData, newData).catch((err2) => {
21749
+ this.emit("partitionIndexError", {
21750
+ operation: "updateConditional",
21751
+ id,
21752
+ error: err2,
21753
+ message: err2.message
21754
+ });
21755
+ });
21756
+ });
21757
+ } else {
21758
+ const [ok2, err2] = await tryFn(() => this.handlePartitionReferenceUpdates(oldData, newData));
21759
+ if (!ok2) {
19868
21760
  this.emit("partitionIndexError", {
19869
21761
  operation: "updateConditional",
19870
21762
  id,
19871
21763
  error: err2,
19872
21764
  message: err2.message
19873
21765
  });
19874
- });
19875
- });
21766
+ }
21767
+ }
19876
21768
  const nonPartitionHooks = this.hooks.afterUpdate.filter(
19877
21769
  (hook) => !hook.toString().includes("handlePartitionReferenceUpdates")
19878
21770
  );
@@ -19948,17 +21840,31 @@ ${errorDetails}`,
19948
21840
  operation: "delete",
19949
21841
  id
19950
21842
  });
19951
- if (this.config.asyncPartitions && this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19952
- setImmediate(() => {
19953
- this.deletePartitionReferences(objectData).catch((err3) => {
21843
+ if (this.config.partitions && Object.keys(this.config.partitions).length > 0 && objectData) {
21844
+ if (this.config.strictPartitions) {
21845
+ await this.deletePartitionReferences(objectData);
21846
+ } else if (this.config.asyncPartitions) {
21847
+ setImmediate(() => {
21848
+ this.deletePartitionReferences(objectData).catch((err3) => {
21849
+ this.emit("partitionIndexError", {
21850
+ operation: "delete",
21851
+ id,
21852
+ error: err3,
21853
+ message: err3.message
21854
+ });
21855
+ });
21856
+ });
21857
+ } else {
21858
+ const [ok3, err3] = await tryFn(() => this.deletePartitionReferences(objectData));
21859
+ if (!ok3) {
19954
21860
  this.emit("partitionIndexError", {
19955
21861
  operation: "delete",
19956
21862
  id,
19957
21863
  error: err3,
19958
21864
  message: err3.message
19959
21865
  });
19960
- });
19961
- });
21866
+ }
21867
+ }
19962
21868
  const nonPartitionHooks = this.hooks.afterDelete.filter(
19963
21869
  (hook) => !hook.toString().includes("deletePartitionReferences")
19964
21870
  );
@@ -21329,10 +23235,13 @@ function validateResourceConfig(config) {
21329
23235
  class Database extends EventEmitter {
21330
23236
  constructor(options) {
21331
23237
  super();
21332
- this.id = idGenerator(7);
23238
+ this.id = (() => {
23239
+ const [ok, err, id] = tryFn(() => idGenerator(7));
23240
+ return ok && id ? id : `db-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
23241
+ })();
21333
23242
  this.version = "1";
21334
23243
  this.s3dbVersion = (() => {
21335
- const [ok, err, version] = tryFn(() => true ? "12.3.0" : "latest");
23244
+ const [ok, err, version] = tryFn(() => true ? "13.0.0" : "latest");
21336
23245
  return ok ? version : "latest";
21337
23246
  })();
21338
23247
  this._resourcesMap = {};
@@ -21366,6 +23275,7 @@ class Database extends EventEmitter {
21366
23275
  this.versioningEnabled = options.versioningEnabled || false;
21367
23276
  this.persistHooks = options.persistHooks || false;
21368
23277
  this.strictValidation = options.strictValidation !== false;
23278
+ this.strictHooks = options.strictHooks || false;
21369
23279
  this._initHooks();
21370
23280
  let connectionString = options.connectionString;
21371
23281
  if (!connectionString && (options.bucket || options.accessKeyId || options.secretAccessKey)) {
@@ -21388,7 +23298,7 @@ class Database extends EventEmitter {
21388
23298
  connectionString = `s3://${encodeURIComponent(accessKeyId)}:${encodeURIComponent(secretAccessKey)}@${bucket || "s3db"}?${params.toString()}`;
21389
23299
  }
21390
23300
  }
21391
- this.client = options.client || new Client({
23301
+ this.client = options.client || new S3Client({
21392
23302
  verbose: this.verbose,
21393
23303
  parallelism: this.parallelism,
21394
23304
  connectionString
@@ -21396,18 +23306,25 @@ class Database extends EventEmitter {
21396
23306
  this.connectionString = connectionString;
21397
23307
  this.bucket = this.client.bucket;
21398
23308
  this.keyPrefix = this.client.keyPrefix;
21399
- if (!this._exitListenerRegistered) {
23309
+ this._registerExitListener();
23310
+ }
23311
+ /**
23312
+ * Register process exit listener for automatic cleanup
23313
+ * @private
23314
+ */
23315
+ _registerExitListener() {
23316
+ if (!this._exitListenerRegistered && typeof process !== "undefined") {
21400
23317
  this._exitListenerRegistered = true;
21401
- if (typeof process !== "undefined") {
21402
- process.on("exit", async () => {
21403
- if (this.isConnected()) {
21404
- await tryFn(() => this.disconnect());
21405
- }
21406
- });
21407
- }
23318
+ this._exitListener = async () => {
23319
+ if (this.isConnected()) {
23320
+ await tryFn(() => this.disconnect());
23321
+ }
23322
+ };
23323
+ process.on("exit", this._exitListener);
21408
23324
  }
21409
23325
  }
21410
23326
  async connect() {
23327
+ this._registerExitListener();
21411
23328
  await this.startPlugins();
21412
23329
  let metadata = null;
21413
23330
  let needsHealing = false;
@@ -22370,11 +24287,16 @@ class Database extends EventEmitter {
22370
24287
  if (this.client && typeof this.client.removeAllListeners === "function") {
22371
24288
  this.client.removeAllListeners();
22372
24289
  }
24290
+ await this.emit("disconnected", /* @__PURE__ */ new Date());
22373
24291
  this.removeAllListeners();
24292
+ if (this._exitListener && typeof process !== "undefined") {
24293
+ process.off("exit", this._exitListener);
24294
+ this._exitListener = null;
24295
+ this._exitListenerRegistered = false;
24296
+ }
22374
24297
  this.savedMetadata = null;
22375
24298
  this.plugins = {};
22376
24299
  this.pluginList = [];
22377
- this.emit("disconnected", /* @__PURE__ */ new Date());
22378
24300
  });
22379
24301
  }
22380
24302
  /**
@@ -22478,6 +24400,13 @@ class Database extends EventEmitter {
22478
24400
  const [ok, error] = await tryFn(() => hook({ database: this, ...context }));
22479
24401
  if (!ok) {
22480
24402
  this.emit("hookError", { event, error, context });
24403
+ if (this.strictHooks) {
24404
+ throw new DatabaseError(`Hook execution failed for event '${event}': ${error.message}`, {
24405
+ event,
24406
+ originalError: error,
24407
+ context
24408
+ });
24409
+ }
22481
24410
  }
22482
24411
  }
22483
24412
  }
@@ -26294,7 +28223,7 @@ class S3TfStateDriver extends TfStateDriver {
26294
28223
  */
26295
28224
  async initialize() {
26296
28225
  const { bucket, credentials, region } = this.connectionConfig;
26297
- this.client = new Client({
28226
+ this.client = new S3Client({
26298
28227
  bucketName: bucket,
26299
28228
  credentials,
26300
28229
  region
@@ -37920,6 +39849,1082 @@ class VectorPlugin extends Plugin {
37920
39849
  }
37921
39850
  }
37922
39851
 
39852
+ class MemoryStorage {
39853
+ constructor(config = {}) {
39854
+ this.objects = /* @__PURE__ */ new Map();
39855
+ this.bucket = config.bucket || "s3db";
39856
+ this.enforceLimits = config.enforceLimits || false;
39857
+ this.metadataLimit = config.metadataLimit || 2048;
39858
+ this.maxObjectSize = config.maxObjectSize || 5 * 1024 * 1024 * 1024;
39859
+ this.persistPath = config.persistPath;
39860
+ this.autoPersist = config.autoPersist || false;
39861
+ this.verbose = config.verbose || false;
39862
+ }
39863
+ /**
39864
+ * Generate ETag (MD5 hash) for object body
39865
+ */
39866
+ _generateETag(body) {
39867
+ const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body || "");
39868
+ return crypto$1.createHash("md5").update(buffer).digest("hex");
39869
+ }
39870
+ /**
39871
+ * Calculate metadata size in bytes
39872
+ */
39873
+ _calculateMetadataSize(metadata) {
39874
+ if (!metadata) return 0;
39875
+ let size = 0;
39876
+ for (const [key, value] of Object.entries(metadata)) {
39877
+ size += Buffer.byteLength(key, "utf8");
39878
+ size += Buffer.byteLength(String(value), "utf8");
39879
+ }
39880
+ return size;
39881
+ }
39882
+ /**
39883
+ * Validate limits if enforceLimits is enabled
39884
+ */
39885
+ _validateLimits(body, metadata) {
39886
+ if (!this.enforceLimits) return;
39887
+ const metadataSize = this._calculateMetadataSize(metadata);
39888
+ if (metadataSize > this.metadataLimit) {
39889
+ throw new Error(
39890
+ `Metadata size (${metadataSize} bytes) exceeds limit of ${this.metadataLimit} bytes`
39891
+ );
39892
+ }
39893
+ const bodySize = Buffer.isBuffer(body) ? body.length : Buffer.byteLength(body || "", "utf8");
39894
+ if (bodySize > this.maxObjectSize) {
39895
+ throw new Error(
39896
+ `Object size (${bodySize} bytes) exceeds limit of ${this.maxObjectSize} bytes`
39897
+ );
39898
+ }
39899
+ }
39900
+ /**
39901
+ * Store an object
39902
+ */
39903
+ async put(key, { body, metadata, contentType, contentEncoding, contentLength, ifMatch }) {
39904
+ this._validateLimits(body, metadata);
39905
+ if (ifMatch !== void 0) {
39906
+ const existing = this.objects.get(key);
39907
+ if (existing && existing.etag !== ifMatch) {
39908
+ throw new Error(`Precondition failed: ETag mismatch for key "${key}"`);
39909
+ }
39910
+ }
39911
+ const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body || "");
39912
+ const etag = this._generateETag(buffer);
39913
+ const lastModified = (/* @__PURE__ */ new Date()).toISOString();
39914
+ const size = buffer.length;
39915
+ const objectData = {
39916
+ body: buffer,
39917
+ metadata: metadata || {},
39918
+ contentType: contentType || "application/octet-stream",
39919
+ etag,
39920
+ lastModified,
39921
+ size,
39922
+ contentEncoding,
39923
+ contentLength: contentLength || size
39924
+ };
39925
+ this.objects.set(key, objectData);
39926
+ if (this.verbose) {
39927
+ console.log(`[MemoryStorage] PUT ${key} (${size} bytes, etag: ${etag})`);
39928
+ }
39929
+ if (this.autoPersist && this.persistPath) {
39930
+ await this.saveToDisk();
39931
+ }
39932
+ return {
39933
+ ETag: etag,
39934
+ VersionId: null,
39935
+ // Memory storage doesn't support versioning
39936
+ ServerSideEncryption: null,
39937
+ Location: `/${this.bucket}/${key}`
39938
+ };
39939
+ }
39940
+ /**
39941
+ * Retrieve an object
39942
+ */
39943
+ async get(key) {
39944
+ const obj = this.objects.get(key);
39945
+ if (!obj) {
39946
+ const error = new Error(`Object not found: ${key}`);
39947
+ error.name = "NoSuchKey";
39948
+ error.$metadata = {
39949
+ httpStatusCode: 404,
39950
+ requestId: "memory-" + Date.now(),
39951
+ attempts: 1,
39952
+ totalRetryDelay: 0
39953
+ };
39954
+ throw error;
39955
+ }
39956
+ if (this.verbose) {
39957
+ console.log(`[MemoryStorage] GET ${key} (${obj.size} bytes)`);
39958
+ }
39959
+ const bodyStream = stream$1.Readable.from(obj.body);
39960
+ return {
39961
+ Body: bodyStream,
39962
+ Metadata: { ...obj.metadata },
39963
+ ContentType: obj.contentType,
39964
+ ContentLength: obj.size,
39965
+ ETag: obj.etag,
39966
+ LastModified: new Date(obj.lastModified),
39967
+ ContentEncoding: obj.contentEncoding
39968
+ };
39969
+ }
39970
+ /**
39971
+ * Get object metadata only (like S3 HeadObject)
39972
+ */
39973
+ async head(key) {
39974
+ const obj = this.objects.get(key);
39975
+ if (!obj) {
39976
+ const error = new Error(`Object not found: ${key}`);
39977
+ error.name = "NoSuchKey";
39978
+ error.$metadata = {
39979
+ httpStatusCode: 404,
39980
+ requestId: "memory-" + Date.now(),
39981
+ attempts: 1,
39982
+ totalRetryDelay: 0
39983
+ };
39984
+ throw error;
39985
+ }
39986
+ if (this.verbose) {
39987
+ console.log(`[MemoryStorage] HEAD ${key}`);
39988
+ }
39989
+ return {
39990
+ Metadata: { ...obj.metadata },
39991
+ ContentType: obj.contentType,
39992
+ ContentLength: obj.size,
39993
+ ETag: obj.etag,
39994
+ LastModified: new Date(obj.lastModified),
39995
+ ContentEncoding: obj.contentEncoding
39996
+ };
39997
+ }
39998
+ /**
39999
+ * Copy an object
40000
+ */
40001
+ async copy(from, to, { metadata, metadataDirective, contentType }) {
40002
+ const source = this.objects.get(from);
40003
+ if (!source) {
40004
+ const error = new Error(`Source object not found: ${from}`);
40005
+ error.name = "NoSuchKey";
40006
+ throw error;
40007
+ }
40008
+ let finalMetadata = { ...source.metadata };
40009
+ if (metadataDirective === "REPLACE" && metadata) {
40010
+ finalMetadata = metadata;
40011
+ } else if (metadata) {
40012
+ finalMetadata = { ...finalMetadata, ...metadata };
40013
+ }
40014
+ const result = await this.put(to, {
40015
+ body: source.body,
40016
+ metadata: finalMetadata,
40017
+ contentType: contentType || source.contentType,
40018
+ contentEncoding: source.contentEncoding
40019
+ });
40020
+ if (this.verbose) {
40021
+ console.log(`[MemoryStorage] COPY ${from} \u2192 ${to}`);
40022
+ }
40023
+ return result;
40024
+ }
40025
+ /**
40026
+ * Check if object exists
40027
+ */
40028
+ exists(key) {
40029
+ return this.objects.has(key);
40030
+ }
40031
+ /**
40032
+ * Delete an object
40033
+ */
40034
+ async delete(key) {
40035
+ const existed = this.objects.has(key);
40036
+ this.objects.delete(key);
40037
+ if (this.verbose) {
40038
+ console.log(`[MemoryStorage] DELETE ${key} (existed: ${existed})`);
40039
+ }
40040
+ if (this.autoPersist && this.persistPath) {
40041
+ await this.saveToDisk();
40042
+ }
40043
+ return {
40044
+ DeleteMarker: false,
40045
+ VersionId: null
40046
+ };
40047
+ }
40048
+ /**
40049
+ * Delete multiple objects (batch)
40050
+ */
40051
+ async deleteMultiple(keys) {
40052
+ const deleted = [];
40053
+ const errors = [];
40054
+ for (const key of keys) {
40055
+ try {
40056
+ await this.delete(key);
40057
+ deleted.push({ Key: key });
40058
+ } catch (error) {
40059
+ errors.push({
40060
+ Key: key,
40061
+ Code: error.name || "InternalError",
40062
+ Message: error.message
40063
+ });
40064
+ }
40065
+ }
40066
+ if (this.verbose) {
40067
+ console.log(`[MemoryStorage] DELETE BATCH (${deleted.length} deleted, ${errors.length} errors)`);
40068
+ }
40069
+ return { Deleted: deleted, Errors: errors };
40070
+ }
40071
+ /**
40072
+ * List objects with prefix/delimiter support
40073
+ */
40074
+ async list({ prefix = "", delimiter = null, maxKeys = 1e3, continuationToken = null }) {
40075
+ const allKeys = Array.from(this.objects.keys());
40076
+ let filteredKeys = prefix ? allKeys.filter((key) => key.startsWith(prefix)) : allKeys;
40077
+ filteredKeys.sort();
40078
+ let startIndex = 0;
40079
+ if (continuationToken) {
40080
+ startIndex = parseInt(continuationToken) || 0;
40081
+ }
40082
+ const paginatedKeys = filteredKeys.slice(startIndex, startIndex + maxKeys);
40083
+ const isTruncated = startIndex + maxKeys < filteredKeys.length;
40084
+ const nextContinuationToken = isTruncated ? String(startIndex + maxKeys) : null;
40085
+ const commonPrefixes = /* @__PURE__ */ new Set();
40086
+ const contents = [];
40087
+ for (const key of paginatedKeys) {
40088
+ if (delimiter && prefix) {
40089
+ const suffix = key.substring(prefix.length);
40090
+ const delimiterIndex = suffix.indexOf(delimiter);
40091
+ if (delimiterIndex !== -1) {
40092
+ const commonPrefix = prefix + suffix.substring(0, delimiterIndex + 1);
40093
+ commonPrefixes.add(commonPrefix);
40094
+ continue;
40095
+ }
40096
+ }
40097
+ const obj = this.objects.get(key);
40098
+ contents.push({
40099
+ Key: key,
40100
+ Size: obj.size,
40101
+ LastModified: new Date(obj.lastModified),
40102
+ ETag: obj.etag,
40103
+ StorageClass: "STANDARD"
40104
+ });
40105
+ }
40106
+ if (this.verbose) {
40107
+ console.log(`[MemoryStorage] LIST prefix="${prefix}" (${contents.length} objects, ${commonPrefixes.size} prefixes)`);
40108
+ }
40109
+ return {
40110
+ Contents: contents,
40111
+ CommonPrefixes: Array.from(commonPrefixes).map((prefix2) => ({ Prefix: prefix2 })),
40112
+ IsTruncated: isTruncated,
40113
+ NextContinuationToken: nextContinuationToken,
40114
+ KeyCount: contents.length + commonPrefixes.size,
40115
+ MaxKeys: maxKeys,
40116
+ Prefix: prefix,
40117
+ Delimiter: delimiter
40118
+ };
40119
+ }
40120
+ /**
40121
+ * Create a snapshot of current state
40122
+ */
40123
+ snapshot() {
40124
+ const snapshot = {
40125
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
40126
+ bucket: this.bucket,
40127
+ objectCount: this.objects.size,
40128
+ objects: {}
40129
+ };
40130
+ for (const [key, obj] of this.objects.entries()) {
40131
+ snapshot.objects[key] = {
40132
+ body: obj.body.toString("base64"),
40133
+ metadata: obj.metadata,
40134
+ contentType: obj.contentType,
40135
+ etag: obj.etag,
40136
+ lastModified: obj.lastModified,
40137
+ size: obj.size,
40138
+ contentEncoding: obj.contentEncoding,
40139
+ contentLength: obj.contentLength
40140
+ };
40141
+ }
40142
+ return snapshot;
40143
+ }
40144
+ /**
40145
+ * Restore from a snapshot
40146
+ */
40147
+ restore(snapshot) {
40148
+ if (!snapshot || !snapshot.objects) {
40149
+ throw new Error("Invalid snapshot format");
40150
+ }
40151
+ this.objects.clear();
40152
+ for (const [key, obj] of Object.entries(snapshot.objects)) {
40153
+ this.objects.set(key, {
40154
+ body: Buffer.from(obj.body, "base64"),
40155
+ metadata: obj.metadata,
40156
+ contentType: obj.contentType,
40157
+ etag: obj.etag,
40158
+ lastModified: obj.lastModified,
40159
+ size: obj.size,
40160
+ contentEncoding: obj.contentEncoding,
40161
+ contentLength: obj.contentLength
40162
+ });
40163
+ }
40164
+ if (this.verbose) {
40165
+ console.log(`[MemoryStorage] Restored snapshot with ${this.objects.size} objects`);
40166
+ }
40167
+ }
40168
+ /**
40169
+ * Save current state to disk
40170
+ */
40171
+ async saveToDisk(customPath) {
40172
+ const path = customPath || this.persistPath;
40173
+ if (!path) {
40174
+ throw new Error("No persist path configured");
40175
+ }
40176
+ const snapshot = this.snapshot();
40177
+ const json = JSON.stringify(snapshot, null, 2);
40178
+ const [ok, err] = await tryFn(() => promises.writeFile(path, json, "utf-8"));
40179
+ if (!ok) {
40180
+ throw new Error(`Failed to save to disk: ${err.message}`);
40181
+ }
40182
+ if (this.verbose) {
40183
+ console.log(`[MemoryStorage] Saved ${this.objects.size} objects to ${path}`);
40184
+ }
40185
+ return path;
40186
+ }
40187
+ /**
40188
+ * Load state from disk
40189
+ */
40190
+ async loadFromDisk(customPath) {
40191
+ const path = customPath || this.persistPath;
40192
+ if (!path) {
40193
+ throw new Error("No persist path configured");
40194
+ }
40195
+ const [ok, err, json] = await tryFn(() => promises.readFile(path, "utf-8"));
40196
+ if (!ok) {
40197
+ throw new Error(`Failed to load from disk: ${err.message}`);
40198
+ }
40199
+ const snapshot = JSON.parse(json);
40200
+ this.restore(snapshot);
40201
+ if (this.verbose) {
40202
+ console.log(`[MemoryStorage] Loaded ${this.objects.size} objects from ${path}`);
40203
+ }
40204
+ return snapshot;
40205
+ }
40206
+ /**
40207
+ * Get storage statistics
40208
+ */
40209
+ getStats() {
40210
+ let totalSize = 0;
40211
+ const keys = [];
40212
+ for (const [key, obj] of this.objects.entries()) {
40213
+ totalSize += obj.size;
40214
+ keys.push(key);
40215
+ }
40216
+ return {
40217
+ objectCount: this.objects.size,
40218
+ totalSize,
40219
+ totalSizeFormatted: this._formatBytes(totalSize),
40220
+ keys: keys.sort(),
40221
+ bucket: this.bucket
40222
+ };
40223
+ }
40224
+ /**
40225
+ * Format bytes for human reading
40226
+ */
40227
+ _formatBytes(bytes) {
40228
+ if (bytes === 0) return "0 Bytes";
40229
+ const k = 1024;
40230
+ const sizes = ["Bytes", "KB", "MB", "GB"];
40231
+ const i = Math.floor(Math.log(bytes) / Math.log(k));
40232
+ return Math.round(bytes / Math.pow(k, i) * 100) / 100 + " " + sizes[i];
40233
+ }
40234
+ /**
40235
+ * Clear all objects
40236
+ */
40237
+ clear() {
40238
+ this.objects.clear();
40239
+ if (this.verbose) {
40240
+ console.log(`[MemoryStorage] Cleared all objects`);
40241
+ }
40242
+ }
40243
+ }
40244
+
40245
+ class MemoryClient extends EventEmitter {
40246
+ constructor(config = {}) {
40247
+ super();
40248
+ this.id = config.id || idGenerator(77);
40249
+ this.verbose = config.verbose || false;
40250
+ this.parallelism = config.parallelism || 10;
40251
+ this.bucket = config.bucket || "s3db";
40252
+ this.keyPrefix = config.keyPrefix || "";
40253
+ this.region = config.region || "us-east-1";
40254
+ this.storage = new MemoryStorage({
40255
+ bucket: this.bucket,
40256
+ enforceLimits: config.enforceLimits || false,
40257
+ metadataLimit: config.metadataLimit || 2048,
40258
+ maxObjectSize: config.maxObjectSize || 5 * 1024 * 1024 * 1024,
40259
+ persistPath: config.persistPath,
40260
+ autoPersist: config.autoPersist || false,
40261
+ verbose: this.verbose
40262
+ });
40263
+ this.config = {
40264
+ bucket: this.bucket,
40265
+ keyPrefix: this.keyPrefix,
40266
+ region: this.region,
40267
+ endpoint: "memory://localhost",
40268
+ forcePathStyle: true
40269
+ };
40270
+ if (this.verbose) {
40271
+ console.log(`[MemoryClient] Initialized (id: ${this.id}, bucket: ${this.bucket})`);
40272
+ }
40273
+ }
40274
+ /**
40275
+ * Simulate sendCommand from AWS SDK
40276
+ * Used by Database/Resource to send AWS SDK commands
40277
+ */
40278
+ async sendCommand(command) {
40279
+ const commandName = command.constructor.name;
40280
+ const input = command.input || {};
40281
+ this.emit("command.request", commandName, input);
40282
+ let response;
40283
+ try {
40284
+ switch (commandName) {
40285
+ case "PutObjectCommand":
40286
+ response = await this._handlePutObject(input);
40287
+ break;
40288
+ case "GetObjectCommand":
40289
+ response = await this._handleGetObject(input);
40290
+ break;
40291
+ case "HeadObjectCommand":
40292
+ response = await this._handleHeadObject(input);
40293
+ break;
40294
+ case "CopyObjectCommand":
40295
+ response = await this._handleCopyObject(input);
40296
+ break;
40297
+ case "DeleteObjectCommand":
40298
+ response = await this._handleDeleteObject(input);
40299
+ break;
40300
+ case "DeleteObjectsCommand":
40301
+ response = await this._handleDeleteObjects(input);
40302
+ break;
40303
+ case "ListObjectsV2Command":
40304
+ response = await this._handleListObjects(input);
40305
+ break;
40306
+ default:
40307
+ throw new Error(`Unsupported command: ${commandName}`);
40308
+ }
40309
+ this.emit("command.response", commandName, response, input);
40310
+ return response;
40311
+ } catch (error) {
40312
+ const mappedError = mapAwsError(error, {
40313
+ bucket: this.bucket,
40314
+ key: input.Key,
40315
+ commandName,
40316
+ commandInput: input
40317
+ });
40318
+ throw mappedError;
40319
+ }
40320
+ }
40321
+ /**
40322
+ * PutObjectCommand handler
40323
+ */
40324
+ async _handlePutObject(input) {
40325
+ const key = input.Key;
40326
+ const metadata = input.Metadata || {};
40327
+ const contentType = input.ContentType;
40328
+ const body = input.Body;
40329
+ const contentEncoding = input.ContentEncoding;
40330
+ const contentLength = input.ContentLength;
40331
+ const ifMatch = input.IfMatch;
40332
+ return await this.storage.put(key, {
40333
+ body,
40334
+ metadata,
40335
+ contentType,
40336
+ contentEncoding,
40337
+ contentLength,
40338
+ ifMatch
40339
+ });
40340
+ }
40341
+ /**
40342
+ * GetObjectCommand handler
40343
+ */
40344
+ async _handleGetObject(input) {
40345
+ const key = input.Key;
40346
+ return await this.storage.get(key);
40347
+ }
40348
+ /**
40349
+ * HeadObjectCommand handler
40350
+ */
40351
+ async _handleHeadObject(input) {
40352
+ const key = input.Key;
40353
+ return await this.storage.head(key);
40354
+ }
40355
+ /**
40356
+ * CopyObjectCommand handler
40357
+ */
40358
+ async _handleCopyObject(input) {
40359
+ const copySource = input.CopySource;
40360
+ const parts = copySource.split("/");
40361
+ const sourceKey = parts.slice(1).join("/");
40362
+ const destinationKey = input.Key;
40363
+ const metadata = input.Metadata;
40364
+ const metadataDirective = input.MetadataDirective;
40365
+ const contentType = input.ContentType;
40366
+ return await this.storage.copy(sourceKey, destinationKey, {
40367
+ metadata,
40368
+ metadataDirective,
40369
+ contentType
40370
+ });
40371
+ }
40372
+ /**
40373
+ * DeleteObjectCommand handler
40374
+ */
40375
+ async _handleDeleteObject(input) {
40376
+ const key = input.Key;
40377
+ return await this.storage.delete(key);
40378
+ }
40379
+ /**
40380
+ * DeleteObjectsCommand handler
40381
+ */
40382
+ async _handleDeleteObjects(input) {
40383
+ const objects = input.Delete?.Objects || [];
40384
+ const keys = objects.map((obj) => obj.Key);
40385
+ return await this.storage.deleteMultiple(keys);
40386
+ }
40387
+ /**
40388
+ * ListObjectsV2Command handler
40389
+ */
40390
+ async _handleListObjects(input) {
40391
+ const fullPrefix = this.keyPrefix && input.Prefix ? path$1.join(this.keyPrefix, input.Prefix) : this.keyPrefix || input.Prefix || "";
40392
+ return await this.storage.list({
40393
+ prefix: fullPrefix,
40394
+ delimiter: input.Delimiter,
40395
+ maxKeys: input.MaxKeys,
40396
+ continuationToken: input.ContinuationToken
40397
+ });
40398
+ }
40399
+ /**
40400
+ * Put an object (Client interface method)
40401
+ */
40402
+ async putObject({ key, metadata, contentType, body, contentEncoding, contentLength, ifMatch }) {
40403
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40404
+ const stringMetadata = {};
40405
+ if (metadata) {
40406
+ for (const [k, v] of Object.entries(metadata)) {
40407
+ const validKey = String(k).replace(/[^a-zA-Z0-9\-_]/g, "_");
40408
+ const { encoded } = metadataEncode(v);
40409
+ stringMetadata[validKey] = encoded;
40410
+ }
40411
+ }
40412
+ const response = await this.storage.put(fullKey, {
40413
+ body,
40414
+ metadata: stringMetadata,
40415
+ contentType,
40416
+ contentEncoding,
40417
+ contentLength,
40418
+ ifMatch
40419
+ });
40420
+ this.emit("putObject", null, { key, metadata, contentType, body, contentEncoding, contentLength });
40421
+ return response;
40422
+ }
40423
+ /**
40424
+ * Get an object (Client interface method)
40425
+ */
40426
+ async getObject(key) {
40427
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40428
+ const response = await this.storage.get(fullKey);
40429
+ const decodedMetadata = {};
40430
+ if (response.Metadata) {
40431
+ for (const [k, v] of Object.entries(response.Metadata)) {
40432
+ decodedMetadata[k] = metadataDecode(v);
40433
+ }
40434
+ }
40435
+ this.emit("getObject", null, { key });
40436
+ return {
40437
+ ...response,
40438
+ Metadata: decodedMetadata
40439
+ };
40440
+ }
40441
+ /**
40442
+ * Head object (get metadata only)
40443
+ */
40444
+ async headObject(key) {
40445
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40446
+ const response = await this.storage.head(fullKey);
40447
+ const decodedMetadata = {};
40448
+ if (response.Metadata) {
40449
+ for (const [k, v] of Object.entries(response.Metadata)) {
40450
+ decodedMetadata[k] = metadataDecode(v);
40451
+ }
40452
+ }
40453
+ this.emit("headObject", null, { key });
40454
+ return {
40455
+ ...response,
40456
+ Metadata: decodedMetadata
40457
+ };
40458
+ }
40459
+ /**
40460
+ * Copy an object
40461
+ */
40462
+ async copyObject({ from, to, metadata, metadataDirective, contentType }) {
40463
+ const fullFrom = this.keyPrefix ? path$1.join(this.keyPrefix, from) : from;
40464
+ const fullTo = this.keyPrefix ? path$1.join(this.keyPrefix, to) : to;
40465
+ const encodedMetadata = {};
40466
+ if (metadata) {
40467
+ for (const [k, v] of Object.entries(metadata)) {
40468
+ const validKey = String(k).replace(/[^a-zA-Z0-9\-_]/g, "_");
40469
+ const { encoded } = metadataEncode(v);
40470
+ encodedMetadata[validKey] = encoded;
40471
+ }
40472
+ }
40473
+ const response = await this.storage.copy(fullFrom, fullTo, {
40474
+ metadata: encodedMetadata,
40475
+ metadataDirective,
40476
+ contentType
40477
+ });
40478
+ this.emit("copyObject", null, { from, to, metadata, metadataDirective });
40479
+ return response;
40480
+ }
40481
+ /**
40482
+ * Check if object exists
40483
+ */
40484
+ async exists(key) {
40485
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40486
+ return this.storage.exists(fullKey);
40487
+ }
40488
+ /**
40489
+ * Delete an object
40490
+ */
40491
+ async deleteObject(key) {
40492
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40493
+ const response = await this.storage.delete(fullKey);
40494
+ this.emit("deleteObject", null, { key });
40495
+ return response;
40496
+ }
40497
+ /**
40498
+ * Delete multiple objects (batch)
40499
+ */
40500
+ async deleteObjects(keys) {
40501
+ const fullKeys = keys.map(
40502
+ (key) => this.keyPrefix ? path$1.join(this.keyPrefix, key) : key
40503
+ );
40504
+ const batches = lodashEs.chunk(fullKeys, this.parallelism);
40505
+ const allResults = { Deleted: [], Errors: [] };
40506
+ const { results } = await promisePool.PromisePool.withConcurrency(this.parallelism).for(batches).process(async (batch) => {
40507
+ return await this.storage.deleteMultiple(batch);
40508
+ });
40509
+ for (const result of results) {
40510
+ allResults.Deleted.push(...result.Deleted);
40511
+ allResults.Errors.push(...result.Errors);
40512
+ }
40513
+ this.emit("deleteObjects", null, { keys, count: allResults.Deleted.length });
40514
+ return allResults;
40515
+ }
40516
+ /**
40517
+ * List objects with pagination support
40518
+ */
40519
+ async listObjects({ prefix = "", delimiter = null, maxKeys = 1e3, continuationToken = null }) {
40520
+ const fullPrefix = this.keyPrefix ? path$1.join(this.keyPrefix, prefix) : prefix;
40521
+ const response = await this.storage.list({
40522
+ prefix: fullPrefix,
40523
+ delimiter,
40524
+ maxKeys,
40525
+ continuationToken
40526
+ });
40527
+ this.emit("listObjects", null, { prefix, count: response.Contents.length });
40528
+ return response;
40529
+ }
40530
+ /**
40531
+ * Get a page of keys with offset/limit pagination
40532
+ */
40533
+ async getKeysPage(params = {}) {
40534
+ const { prefix = "", offset = 0, amount = 100 } = params;
40535
+ let keys = [];
40536
+ let truncated = true;
40537
+ let continuationToken;
40538
+ if (offset > 0) {
40539
+ const fullPrefix = this.keyPrefix ? path$1.join(this.keyPrefix, prefix) : prefix;
40540
+ const response = await this.storage.list({
40541
+ prefix: fullPrefix,
40542
+ maxKeys: offset + amount
40543
+ });
40544
+ keys = response.Contents.map((x) => x.Key).slice(offset, offset + amount);
40545
+ } else {
40546
+ while (truncated) {
40547
+ const options = {
40548
+ prefix,
40549
+ continuationToken,
40550
+ maxKeys: amount - keys.length
40551
+ };
40552
+ const res = await this.listObjects(options);
40553
+ if (res.Contents) {
40554
+ keys = keys.concat(res.Contents.map((x) => x.Key));
40555
+ }
40556
+ truncated = res.IsTruncated || false;
40557
+ continuationToken = res.NextContinuationToken;
40558
+ if (keys.length >= amount) {
40559
+ keys = keys.slice(0, amount);
40560
+ break;
40561
+ }
40562
+ }
40563
+ }
40564
+ if (this.keyPrefix) {
40565
+ keys = keys.map((x) => x.replace(this.keyPrefix, "")).map((x) => x.startsWith("/") ? x.replace("/", "") : x);
40566
+ }
40567
+ this.emit("getKeysPage", keys, params);
40568
+ return keys;
40569
+ }
40570
+ /**
40571
+ * Get all keys with a given prefix
40572
+ */
40573
+ async getAllKeys({ prefix = "" }) {
40574
+ const fullPrefix = this.keyPrefix ? path$1.join(this.keyPrefix, prefix) : prefix;
40575
+ const response = await this.storage.list({
40576
+ prefix: fullPrefix,
40577
+ maxKeys: 1e5
40578
+ // Large number to get all
40579
+ });
40580
+ let keys = response.Contents.map((x) => x.Key);
40581
+ if (this.keyPrefix) {
40582
+ keys = keys.map((x) => x.replace(this.keyPrefix, "")).map((x) => x.startsWith("/") ? x.replace("/", "") : x);
40583
+ }
40584
+ this.emit("getAllKeys", keys, { prefix });
40585
+ return keys;
40586
+ }
40587
+ /**
40588
+ * Count total objects under a prefix
40589
+ */
40590
+ async count({ prefix = "" } = {}) {
40591
+ const keys = await this.getAllKeys({ prefix });
40592
+ const count = keys.length;
40593
+ this.emit("count", count, { prefix });
40594
+ return count;
40595
+ }
40596
+ /**
40597
+ * Delete all objects under a prefix
40598
+ */
40599
+ async deleteAll({ prefix = "" } = {}) {
40600
+ const keys = await this.getAllKeys({ prefix });
40601
+ let totalDeleted = 0;
40602
+ if (keys.length > 0) {
40603
+ const result = await this.deleteObjects(keys);
40604
+ totalDeleted = result.Deleted.length;
40605
+ this.emit("deleteAll", {
40606
+ prefix,
40607
+ batch: totalDeleted,
40608
+ total: totalDeleted
40609
+ });
40610
+ }
40611
+ this.emit("deleteAllComplete", {
40612
+ prefix,
40613
+ totalDeleted
40614
+ });
40615
+ return totalDeleted;
40616
+ }
40617
+ /**
40618
+ * Get continuation token after skipping offset items
40619
+ */
40620
+ async getContinuationTokenAfterOffset({ prefix = "", offset = 1e3 } = {}) {
40621
+ if (offset === 0) return null;
40622
+ const keys = await this.getAllKeys({ prefix });
40623
+ if (offset >= keys.length) {
40624
+ this.emit("getContinuationTokenAfterOffset", null, { prefix, offset });
40625
+ return null;
40626
+ }
40627
+ const token = keys[offset];
40628
+ this.emit("getContinuationTokenAfterOffset", token, { prefix, offset });
40629
+ return token;
40630
+ }
40631
+ /**
40632
+ * Move an object from one key to another
40633
+ */
40634
+ async moveObject({ from, to }) {
40635
+ await this.copyObject({ from, to, metadataDirective: "COPY" });
40636
+ await this.deleteObject(from);
40637
+ }
40638
+ /**
40639
+ * Move all objects from one prefix to another
40640
+ */
40641
+ async moveAllObjects({ prefixFrom, prefixTo }) {
40642
+ const keys = await this.getAllKeys({ prefix: prefixFrom });
40643
+ const results = [];
40644
+ const errors = [];
40645
+ for (const key of keys) {
40646
+ try {
40647
+ const to = key.replace(prefixFrom, prefixTo);
40648
+ await this.moveObject({ from: key, to });
40649
+ results.push(to);
40650
+ } catch (error) {
40651
+ errors.push({
40652
+ message: error.message,
40653
+ raw: error,
40654
+ key
40655
+ });
40656
+ }
40657
+ }
40658
+ this.emit("moveAllObjects", { results, errors }, { prefixFrom, prefixTo });
40659
+ if (errors.length > 0) {
40660
+ const error = new Error("Some objects could not be moved");
40661
+ error.context = {
40662
+ bucket: this.bucket,
40663
+ operation: "moveAllObjects",
40664
+ prefixFrom,
40665
+ prefixTo,
40666
+ totalKeys: keys.length,
40667
+ failedCount: errors.length,
40668
+ successCount: results.length,
40669
+ errors
40670
+ };
40671
+ throw error;
40672
+ }
40673
+ return results;
40674
+ }
40675
+ /**
40676
+ * Create a snapshot of current storage state
40677
+ */
40678
+ snapshot() {
40679
+ return this.storage.snapshot();
40680
+ }
40681
+ /**
40682
+ * Restore from a snapshot
40683
+ */
40684
+ restore(snapshot) {
40685
+ return this.storage.restore(snapshot);
40686
+ }
40687
+ /**
40688
+ * Save current state to disk (persistence)
40689
+ */
40690
+ async saveToDisk(path2) {
40691
+ return await this.storage.saveToDisk(path2);
40692
+ }
40693
+ /**
40694
+ * Load state from disk
40695
+ */
40696
+ async loadFromDisk(path2) {
40697
+ return await this.storage.loadFromDisk(path2);
40698
+ }
40699
+ /**
40700
+ * Export to BackupPlugin-compatible format (s3db.json + JSONL files)
40701
+ * Compatible with BackupPlugin for easy migration
40702
+ *
40703
+ * @param {string} outputDir - Output directory path
40704
+ * @param {Object} options - Export options
40705
+ * @param {Array<string>} options.resources - Resource names to export (default: all)
40706
+ * @param {boolean} options.compress - Use gzip compression (default: true)
40707
+ * @param {Object} options.database - Database instance for schema metadata
40708
+ * @returns {Promise<Object>} Export manifest with file paths and stats
40709
+ */
40710
+ async exportBackup(outputDir, options = {}) {
40711
+ const { mkdir, writeFile } = await import('fs/promises');
40712
+ const zlib = await import('zlib');
40713
+ const { promisify } = await import('util');
40714
+ const gzip = promisify(zlib.gzip);
40715
+ await mkdir(outputDir, { recursive: true });
40716
+ const compress = options.compress !== false;
40717
+ const database = options.database;
40718
+ const resourceFilter = options.resources;
40719
+ const allKeys = await this.getAllKeys({});
40720
+ const resourceMap = /* @__PURE__ */ new Map();
40721
+ for (const key of allKeys) {
40722
+ const match = key.match(/^resource=([^/]+)\//);
40723
+ if (match) {
40724
+ const resourceName = match[1];
40725
+ if (!resourceFilter || resourceFilter.includes(resourceName)) {
40726
+ if (!resourceMap.has(resourceName)) {
40727
+ resourceMap.set(resourceName, []);
40728
+ }
40729
+ resourceMap.get(resourceName).push(key);
40730
+ }
40731
+ }
40732
+ }
40733
+ const exportedFiles = {};
40734
+ const resourceStats = {};
40735
+ for (const [resourceName, keys] of resourceMap.entries()) {
40736
+ const records = [];
40737
+ const resource = database && database.resources && database.resources[resourceName];
40738
+ for (const key of keys) {
40739
+ const idMatch = key.match(/\/id=([^/]+)/);
40740
+ const recordId = idMatch ? idMatch[1] : null;
40741
+ let record;
40742
+ if (resource && recordId) {
40743
+ try {
40744
+ record = await resource.get(recordId);
40745
+ } catch (err) {
40746
+ console.warn(`Failed to get record ${recordId} from resource ${resourceName}, using fallback`);
40747
+ record = null;
40748
+ }
40749
+ }
40750
+ if (!record) {
40751
+ const obj = await this.getObject(key);
40752
+ record = { ...obj.Metadata };
40753
+ if (recordId && !record.id) {
40754
+ record.id = recordId;
40755
+ }
40756
+ if (obj.Body) {
40757
+ const chunks = [];
40758
+ for await (const chunk2 of obj.Body) {
40759
+ chunks.push(chunk2);
40760
+ }
40761
+ const bodyBuffer = Buffer.concat(chunks);
40762
+ const bodyStr = bodyBuffer.toString("utf-8");
40763
+ if (bodyStr.startsWith("{") || bodyStr.startsWith("[")) {
40764
+ try {
40765
+ const bodyData = JSON.parse(bodyStr);
40766
+ Object.assign(record, bodyData);
40767
+ } catch {
40768
+ record._body = bodyStr;
40769
+ }
40770
+ } else if (bodyStr) {
40771
+ record._body = bodyStr;
40772
+ }
40773
+ }
40774
+ }
40775
+ records.push(record);
40776
+ }
40777
+ const jsonl = records.map((r) => JSON.stringify(r)).join("\n");
40778
+ const filename = compress ? `${resourceName}.jsonl.gz` : `${resourceName}.jsonl`;
40779
+ const filePath = `${outputDir}/${filename}`;
40780
+ if (compress) {
40781
+ const compressed = await gzip(jsonl);
40782
+ await writeFile(filePath, compressed);
40783
+ } else {
40784
+ await writeFile(filePath, jsonl, "utf-8");
40785
+ }
40786
+ exportedFiles[resourceName] = filePath;
40787
+ resourceStats[resourceName] = {
40788
+ recordCount: records.length,
40789
+ fileSize: compress ? (await gzip(jsonl)).length : Buffer.byteLength(jsonl)
40790
+ };
40791
+ }
40792
+ const s3dbMetadata = {
40793
+ version: "1.0",
40794
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
40795
+ bucket: this.bucket,
40796
+ keyPrefix: this.keyPrefix || "",
40797
+ compressed: compress,
40798
+ resources: {},
40799
+ totalRecords: 0,
40800
+ totalSize: 0
40801
+ };
40802
+ if (database && database.resources) {
40803
+ for (const [resourceName, resource] of Object.entries(database.resources)) {
40804
+ if (resourceMap.has(resourceName)) {
40805
+ s3dbMetadata.resources[resourceName] = {
40806
+ schema: resource.schema ? {
40807
+ attributes: resource.schema.attributes,
40808
+ partitions: resource.schema.partitions,
40809
+ behavior: resource.schema.behavior,
40810
+ timestamps: resource.schema.timestamps
40811
+ } : null,
40812
+ stats: resourceStats[resourceName]
40813
+ };
40814
+ }
40815
+ }
40816
+ } else {
40817
+ for (const [resourceName, stats] of Object.entries(resourceStats)) {
40818
+ s3dbMetadata.resources[resourceName] = { stats };
40819
+ }
40820
+ }
40821
+ for (const stats of Object.values(resourceStats)) {
40822
+ s3dbMetadata.totalRecords += stats.recordCount;
40823
+ s3dbMetadata.totalSize += stats.fileSize;
40824
+ }
40825
+ const s3dbPath = `${outputDir}/s3db.json`;
40826
+ await writeFile(s3dbPath, JSON.stringify(s3dbMetadata, null, 2), "utf-8");
40827
+ return {
40828
+ manifest: s3dbPath,
40829
+ files: exportedFiles,
40830
+ stats: s3dbMetadata,
40831
+ resourceCount: resourceMap.size,
40832
+ totalRecords: s3dbMetadata.totalRecords,
40833
+ totalSize: s3dbMetadata.totalSize
40834
+ };
40835
+ }
40836
+ /**
40837
+ * Import from BackupPlugin-compatible format
40838
+ * Loads data from s3db.json + JSONL files created by BackupPlugin or exportBackup()
40839
+ *
40840
+ * @param {string} backupDir - Backup directory path containing s3db.json
40841
+ * @param {Object} options - Import options
40842
+ * @param {Array<string>} options.resources - Resource names to import (default: all)
40843
+ * @param {boolean} options.clear - Clear existing data first (default: false)
40844
+ * @param {Object} options.database - Database instance to recreate schemas
40845
+ * @returns {Promise<Object>} Import stats
40846
+ */
40847
+ async importBackup(backupDir, options = {}) {
40848
+ const { readFile, readdir } = await import('fs/promises');
40849
+ const zlib = await import('zlib');
40850
+ const { promisify } = await import('util');
40851
+ const gunzip = promisify(zlib.gunzip);
40852
+ if (options.clear) {
40853
+ this.clear();
40854
+ }
40855
+ const s3dbPath = `${backupDir}/s3db.json`;
40856
+ const s3dbContent = await readFile(s3dbPath, "utf-8");
40857
+ const metadata = JSON.parse(s3dbContent);
40858
+ const database = options.database;
40859
+ const resourceFilter = options.resources;
40860
+ const importStats = {
40861
+ resourcesImported: 0,
40862
+ recordsImported: 0,
40863
+ errors: []
40864
+ };
40865
+ if (database && metadata.resources) {
40866
+ for (const [resourceName, resourceMeta] of Object.entries(metadata.resources)) {
40867
+ if (resourceFilter && !resourceFilter.includes(resourceName)) continue;
40868
+ if (resourceMeta.schema) {
40869
+ try {
40870
+ await database.createResource({
40871
+ name: resourceName,
40872
+ ...resourceMeta.schema
40873
+ });
40874
+ } catch (error) {
40875
+ }
40876
+ }
40877
+ }
40878
+ }
40879
+ const files = await readdir(backupDir);
40880
+ for (const file of files) {
40881
+ if (!file.endsWith(".jsonl") && !file.endsWith(".jsonl.gz")) continue;
40882
+ const resourceName = file.replace(/\.jsonl(\.gz)?$/, "");
40883
+ if (resourceFilter && !resourceFilter.includes(resourceName)) continue;
40884
+ const filePath = `${backupDir}/${file}`;
40885
+ let content = await readFile(filePath);
40886
+ if (file.endsWith(".gz")) {
40887
+ content = await gunzip(content);
40888
+ }
40889
+ const jsonl = content.toString("utf-8");
40890
+ const lines = jsonl.split("\n").filter((line) => line.trim());
40891
+ for (const line of lines) {
40892
+ try {
40893
+ const record = JSON.parse(line);
40894
+ const id = record.id || record._id || `imported_${Date.now()}_${Math.random()}`;
40895
+ const { _body, id: _, _id: __, ...metadata2 } = record;
40896
+ await this.putObject({
40897
+ key: `resource=${resourceName}/id=${id}`,
40898
+ metadata: metadata2,
40899
+ body: _body ? Buffer.from(_body) : void 0
40900
+ });
40901
+ importStats.recordsImported++;
40902
+ } catch (error) {
40903
+ importStats.errors.push({
40904
+ resource: resourceName,
40905
+ error: error.message,
40906
+ line
40907
+ });
40908
+ }
40909
+ }
40910
+ importStats.resourcesImported++;
40911
+ }
40912
+ return importStats;
40913
+ }
40914
+ /**
40915
+ * Get storage statistics
40916
+ */
40917
+ getStats() {
40918
+ return this.storage.getStats();
40919
+ }
40920
+ /**
40921
+ * Clear all objects
40922
+ */
40923
+ clear() {
40924
+ this.storage.clear();
40925
+ }
40926
+ }
40927
+
37923
40928
  function mapFieldTypeToTypeScript(fieldType) {
37924
40929
  const baseType = fieldType.split("|")[0].trim();
37925
40930
  const typeMap = {
@@ -38840,7 +41845,7 @@ exports.BigqueryReplicator = BigqueryReplicator;
38840
41845
  exports.CONSUMER_DRIVERS = CONSUMER_DRIVERS;
38841
41846
  exports.Cache = Cache;
38842
41847
  exports.CachePlugin = CachePlugin;
38843
- exports.Client = Client;
41848
+ exports.Client = S3Client;
38844
41849
  exports.ConnectionString = ConnectionString;
38845
41850
  exports.ConnectionStringError = ConnectionStringError;
38846
41851
  exports.CostsPlugin = CostsPlugin;
@@ -38858,7 +41863,10 @@ exports.FilesystemCache = FilesystemCache;
38858
41863
  exports.FullTextPlugin = FullTextPlugin;
38859
41864
  exports.GeoPlugin = GeoPlugin;
38860
41865
  exports.InvalidResourceItem = InvalidResourceItem;
41866
+ exports.MLPlugin = MLPlugin;
38861
41867
  exports.MemoryCache = MemoryCache;
41868
+ exports.MemoryClient = MemoryClient;
41869
+ exports.MemoryStorage = MemoryStorage;
38862
41870
  exports.MetadataLimitError = MetadataLimitError;
38863
41871
  exports.MetricsPlugin = MetricsPlugin;
38864
41872
  exports.MissingMetadata = MissingMetadata;
@@ -38892,6 +41900,7 @@ exports.ResourceReader = ResourceReader;
38892
41900
  exports.ResourceWriter = ResourceWriter;
38893
41901
  exports.S3BackupDriver = S3BackupDriver;
38894
41902
  exports.S3Cache = S3Cache;
41903
+ exports.S3Client = S3Client;
38895
41904
  exports.S3QueuePlugin = S3QueuePlugin;
38896
41905
  exports.S3db = Database;
38897
41906
  exports.S3dbError = S3dbError;