s3db.js 12.3.0 → 13.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/s3db.es.js CHANGED
@@ -5,7 +5,7 @@ import { mkdir, copyFile, unlink, stat, access, readdir, writeFile, readFile, rm
5
5
  import fs, { createReadStream, createWriteStream, realpathSync as realpathSync$1, readlinkSync, readdirSync, readdir as readdir$2, lstatSync, existsSync } from 'fs';
6
6
  import { pipeline } from 'stream/promises';
7
7
  import path$1, { join, dirname } from 'path';
8
- import { Transform, Writable } from 'stream';
8
+ import { Transform, Writable, Readable } from 'stream';
9
9
  import zlib from 'node:zlib';
10
10
  import os from 'os';
11
11
  import jsonStableStringify from 'json-stable-stringify';
@@ -15,7 +15,7 @@ import { chunk, merge, isString, isEmpty, invert, uniq, cloneDeep, get, set, isO
15
15
  import { Agent } from 'http';
16
16
  import { Agent as Agent$1 } from 'https';
17
17
  import { NodeHttpHandler } from '@smithy/node-http-handler';
18
- import { S3Client, PutObjectCommand, GetObjectCommand, HeadObjectCommand, CopyObjectCommand, DeleteObjectCommand, DeleteObjectsCommand, ListObjectsV2Command } from '@aws-sdk/client-s3';
18
+ import { S3Client as S3Client$1, PutObjectCommand, GetObjectCommand, HeadObjectCommand, CopyObjectCommand, DeleteObjectCommand, DeleteObjectsCommand, ListObjectsV2Command } from '@aws-sdk/client-s3';
19
19
  import { flatten, unflatten } from 'flat';
20
20
  import FastestValidator from 'fastest-validator';
21
21
  import { ReadableStream } from 'node:stream/web';
@@ -2993,12 +2993,6 @@ class ApiPlugin extends Plugin {
2993
2993
  async _createCompressionMiddleware() {
2994
2994
  return async (c, next) => {
2995
2995
  await next();
2996
- const acceptEncoding = c.req.header("accept-encoding") || "";
2997
- if (acceptEncoding.includes("gzip")) {
2998
- c.header("Content-Encoding", "gzip");
2999
- } else if (acceptEncoding.includes("deflate")) {
3000
- c.header("Content-Encoding", "deflate");
3001
- }
3002
2996
  };
3003
2997
  }
3004
2998
  /**
@@ -11903,6 +11897,1780 @@ class MetricsPlugin extends Plugin {
11903
11897
  }
11904
11898
  }
11905
11899
 
11900
+ class MLError extends Error {
11901
+ constructor(message, context = {}) {
11902
+ super(message);
11903
+ this.name = "MLError";
11904
+ this.context = context;
11905
+ if (Error.captureStackTrace) {
11906
+ Error.captureStackTrace(this, this.constructor);
11907
+ }
11908
+ }
11909
+ toJSON() {
11910
+ return {
11911
+ name: this.name,
11912
+ message: this.message,
11913
+ context: this.context,
11914
+ stack: this.stack
11915
+ };
11916
+ }
11917
+ }
11918
+ class ModelConfigError extends MLError {
11919
+ constructor(message, context = {}) {
11920
+ super(message, context);
11921
+ this.name = "ModelConfigError";
11922
+ }
11923
+ }
11924
+ class TrainingError extends MLError {
11925
+ constructor(message, context = {}) {
11926
+ super(message, context);
11927
+ this.name = "TrainingError";
11928
+ }
11929
+ }
11930
+ let PredictionError$1 = class PredictionError extends MLError {
11931
+ constructor(message, context = {}) {
11932
+ super(message, context);
11933
+ this.name = "PredictionError";
11934
+ }
11935
+ };
11936
+ class ModelNotFoundError extends MLError {
11937
+ constructor(message, context = {}) {
11938
+ super(message, context);
11939
+ this.name = "ModelNotFoundError";
11940
+ }
11941
+ }
11942
+ let ModelNotTrainedError$1 = class ModelNotTrainedError extends MLError {
11943
+ constructor(message, context = {}) {
11944
+ super(message, context);
11945
+ this.name = "ModelNotTrainedError";
11946
+ }
11947
+ };
11948
+ class DataValidationError extends MLError {
11949
+ constructor(message, context = {}) {
11950
+ super(message, context);
11951
+ this.name = "DataValidationError";
11952
+ }
11953
+ }
11954
+ class InsufficientDataError extends MLError {
11955
+ constructor(message, context = {}) {
11956
+ super(message, context);
11957
+ this.name = "InsufficientDataError";
11958
+ }
11959
+ }
11960
+ class TensorFlowDependencyError extends MLError {
11961
+ constructor(message = "TensorFlow.js is not installed. Run: pnpm add @tensorflow/tfjs-node", context = {}) {
11962
+ super(message, context);
11963
+ this.name = "TensorFlowDependencyError";
11964
+ }
11965
+ }
11966
+
11967
+ class BaseModel {
11968
+ constructor(config = {}) {
11969
+ if (this.constructor === BaseModel) {
11970
+ throw new Error("BaseModel is an abstract class and cannot be instantiated directly");
11971
+ }
11972
+ this.config = {
11973
+ name: config.name || "unnamed",
11974
+ resource: config.resource,
11975
+ features: config.features || [],
11976
+ target: config.target,
11977
+ modelConfig: {
11978
+ epochs: 50,
11979
+ batchSize: 32,
11980
+ learningRate: 0.01,
11981
+ validationSplit: 0.2,
11982
+ ...config.modelConfig
11983
+ },
11984
+ verbose: config.verbose || false
11985
+ };
11986
+ this.model = null;
11987
+ this.isTrained = false;
11988
+ this.normalizer = {
11989
+ features: {},
11990
+ target: {}
11991
+ };
11992
+ this.stats = {
11993
+ trainedAt: null,
11994
+ samples: 0,
11995
+ loss: null,
11996
+ accuracy: null,
11997
+ predictions: 0,
11998
+ errors: 0
11999
+ };
12000
+ this._validateTensorFlow();
12001
+ }
12002
+ /**
12003
+ * Validate TensorFlow.js is installed
12004
+ * @private
12005
+ */
12006
+ _validateTensorFlow() {
12007
+ try {
12008
+ this.tf = require("@tensorflow/tfjs-node");
12009
+ } catch (error) {
12010
+ throw new TensorFlowDependencyError(
12011
+ "TensorFlow.js is not installed. Run: pnpm add @tensorflow/tfjs-node",
12012
+ { originalError: error.message }
12013
+ );
12014
+ }
12015
+ }
12016
+ /**
12017
+ * Abstract method: Build the model architecture
12018
+ * Must be implemented by subclasses
12019
+ * @abstract
12020
+ */
12021
+ buildModel() {
12022
+ throw new Error("buildModel() must be implemented by subclass");
12023
+ }
12024
+ /**
12025
+ * Train the model with provided data
12026
+ * @param {Array} data - Training data records
12027
+ * @returns {Object} Training results
12028
+ */
12029
+ async train(data) {
12030
+ try {
12031
+ if (!data || data.length === 0) {
12032
+ throw new InsufficientDataError("No training data provided", {
12033
+ model: this.config.name
12034
+ });
12035
+ }
12036
+ const minSamples = this.config.modelConfig.batchSize || 10;
12037
+ if (data.length < minSamples) {
12038
+ throw new InsufficientDataError(
12039
+ `Insufficient training data: ${data.length} samples (minimum: ${minSamples})`,
12040
+ { model: this.config.name, samples: data.length, minimum: minSamples }
12041
+ );
12042
+ }
12043
+ const { xs, ys } = this._prepareData(data);
12044
+ if (!this.model) {
12045
+ this.buildModel();
12046
+ }
12047
+ const history = await this.model.fit(xs, ys, {
12048
+ epochs: this.config.modelConfig.epochs,
12049
+ batchSize: this.config.modelConfig.batchSize,
12050
+ validationSplit: this.config.modelConfig.validationSplit,
12051
+ verbose: this.config.verbose ? 1 : 0,
12052
+ callbacks: {
12053
+ onEpochEnd: (epoch, logs) => {
12054
+ if (this.config.verbose && epoch % 10 === 0) {
12055
+ console.log(`[MLPlugin] ${this.config.name} - Epoch ${epoch}: loss=${logs.loss.toFixed(4)}`);
12056
+ }
12057
+ }
12058
+ }
12059
+ });
12060
+ this.isTrained = true;
12061
+ this.stats.trainedAt = (/* @__PURE__ */ new Date()).toISOString();
12062
+ this.stats.samples = data.length;
12063
+ this.stats.loss = history.history.loss[history.history.loss.length - 1];
12064
+ if (history.history.acc) {
12065
+ this.stats.accuracy = history.history.acc[history.history.acc.length - 1];
12066
+ }
12067
+ xs.dispose();
12068
+ ys.dispose();
12069
+ if (this.config.verbose) {
12070
+ console.log(`[MLPlugin] ${this.config.name} - Training completed:`, {
12071
+ samples: this.stats.samples,
12072
+ loss: this.stats.loss,
12073
+ accuracy: this.stats.accuracy
12074
+ });
12075
+ }
12076
+ return {
12077
+ loss: this.stats.loss,
12078
+ accuracy: this.stats.accuracy,
12079
+ epochs: this.config.modelConfig.epochs,
12080
+ samples: this.stats.samples
12081
+ };
12082
+ } catch (error) {
12083
+ this.stats.errors++;
12084
+ if (error instanceof InsufficientDataError || error instanceof DataValidationError) {
12085
+ throw error;
12086
+ }
12087
+ throw new TrainingError(`Training failed: ${error.message}`, {
12088
+ model: this.config.name,
12089
+ originalError: error.message
12090
+ });
12091
+ }
12092
+ }
12093
+ /**
12094
+ * Make a prediction with the trained model
12095
+ * @param {Object} input - Input features
12096
+ * @returns {Object} Prediction result
12097
+ */
12098
+ async predict(input) {
12099
+ if (!this.isTrained) {
12100
+ throw new ModelNotTrainedError$1(`Model "${this.config.name}" is not trained yet`, {
12101
+ model: this.config.name
12102
+ });
12103
+ }
12104
+ try {
12105
+ this._validateInput(input);
12106
+ const features = this._extractFeatures(input);
12107
+ const normalizedFeatures = this._normalizeFeatures(features);
12108
+ const inputTensor = this.tf.tensor2d([normalizedFeatures]);
12109
+ const predictionTensor = this.model.predict(inputTensor);
12110
+ const predictionArray = await predictionTensor.data();
12111
+ inputTensor.dispose();
12112
+ predictionTensor.dispose();
12113
+ const prediction = this._denormalizePrediction(predictionArray[0]);
12114
+ this.stats.predictions++;
12115
+ return {
12116
+ prediction,
12117
+ confidence: this._calculateConfidence(predictionArray[0])
12118
+ };
12119
+ } catch (error) {
12120
+ this.stats.errors++;
12121
+ if (error instanceof ModelNotTrainedError$1 || error instanceof DataValidationError) {
12122
+ throw error;
12123
+ }
12124
+ throw new PredictionError$1(`Prediction failed: ${error.message}`, {
12125
+ model: this.config.name,
12126
+ input,
12127
+ originalError: error.message
12128
+ });
12129
+ }
12130
+ }
12131
+ /**
12132
+ * Make predictions for multiple inputs
12133
+ * @param {Array} inputs - Array of input objects
12134
+ * @returns {Array} Array of prediction results
12135
+ */
12136
+ async predictBatch(inputs) {
12137
+ if (!this.isTrained) {
12138
+ throw new ModelNotTrainedError$1(`Model "${this.config.name}" is not trained yet`, {
12139
+ model: this.config.name
12140
+ });
12141
+ }
12142
+ const predictions = [];
12143
+ for (const input of inputs) {
12144
+ predictions.push(await this.predict(input));
12145
+ }
12146
+ return predictions;
12147
+ }
12148
+ /**
12149
+ * Prepare training data (extract features and target)
12150
+ * @private
12151
+ * @param {Array} data - Raw training data
12152
+ * @returns {Object} Prepared tensors {xs, ys}
12153
+ */
12154
+ _prepareData(data) {
12155
+ const features = [];
12156
+ const targets = [];
12157
+ for (const record of data) {
12158
+ const missingFeatures = this.config.features.filter((f) => !(f in record));
12159
+ if (missingFeatures.length > 0) {
12160
+ throw new DataValidationError(
12161
+ `Missing features in training data: ${missingFeatures.join(", ")}`,
12162
+ { model: this.config.name, missingFeatures, record }
12163
+ );
12164
+ }
12165
+ if (!(this.config.target in record)) {
12166
+ throw new DataValidationError(
12167
+ `Missing target "${this.config.target}" in training data`,
12168
+ { model: this.config.name, target: this.config.target, record }
12169
+ );
12170
+ }
12171
+ const featureValues = this._extractFeatures(record);
12172
+ features.push(featureValues);
12173
+ targets.push(record[this.config.target]);
12174
+ }
12175
+ this._calculateNormalizer(features, targets);
12176
+ const normalizedFeatures = features.map((f) => this._normalizeFeatures(f));
12177
+ const normalizedTargets = targets.map((t) => this._normalizeTarget(t));
12178
+ return {
12179
+ xs: this.tf.tensor2d(normalizedFeatures),
12180
+ ys: this._prepareTargetTensor(normalizedTargets)
12181
+ };
12182
+ }
12183
+ /**
12184
+ * Prepare target tensor (can be overridden by subclasses)
12185
+ * @protected
12186
+ * @param {Array} targets - Normalized target values
12187
+ * @returns {Tensor} Target tensor
12188
+ */
12189
+ _prepareTargetTensor(targets) {
12190
+ return this.tf.tensor2d(targets.map((t) => [t]));
12191
+ }
12192
+ /**
12193
+ * Extract feature values from a record
12194
+ * @private
12195
+ * @param {Object} record - Data record
12196
+ * @returns {Array} Feature values
12197
+ */
12198
+ _extractFeatures(record) {
12199
+ return this.config.features.map((feature) => {
12200
+ const value = record[feature];
12201
+ if (typeof value !== "number") {
12202
+ throw new DataValidationError(
12203
+ `Feature "${feature}" must be a number, got ${typeof value}`,
12204
+ { model: this.config.name, feature, value, type: typeof value }
12205
+ );
12206
+ }
12207
+ return value;
12208
+ });
12209
+ }
12210
+ /**
12211
+ * Calculate normalization parameters (min-max scaling)
12212
+ * @private
12213
+ */
12214
+ _calculateNormalizer(features, targets) {
12215
+ const numFeatures = features[0].length;
12216
+ for (let i = 0; i < numFeatures; i++) {
12217
+ const featureName = this.config.features[i];
12218
+ const values = features.map((f) => f[i]);
12219
+ this.normalizer.features[featureName] = {
12220
+ min: Math.min(...values),
12221
+ max: Math.max(...values)
12222
+ };
12223
+ }
12224
+ this.normalizer.target = {
12225
+ min: Math.min(...targets),
12226
+ max: Math.max(...targets)
12227
+ };
12228
+ }
12229
+ /**
12230
+ * Normalize features using min-max scaling
12231
+ * @private
12232
+ */
12233
+ _normalizeFeatures(features) {
12234
+ return features.map((value, i) => {
12235
+ const featureName = this.config.features[i];
12236
+ const { min, max } = this.normalizer.features[featureName];
12237
+ if (max === min) return 0.5;
12238
+ return (value - min) / (max - min);
12239
+ });
12240
+ }
12241
+ /**
12242
+ * Normalize target value
12243
+ * @private
12244
+ */
12245
+ _normalizeTarget(target) {
12246
+ const { min, max } = this.normalizer.target;
12247
+ if (max === min) return 0.5;
12248
+ return (target - min) / (max - min);
12249
+ }
12250
+ /**
12251
+ * Denormalize prediction
12252
+ * @private
12253
+ */
12254
+ _denormalizePrediction(normalizedValue) {
12255
+ const { min, max } = this.normalizer.target;
12256
+ return normalizedValue * (max - min) + min;
12257
+ }
12258
+ /**
12259
+ * Calculate confidence score (can be overridden)
12260
+ * @protected
12261
+ */
12262
+ _calculateConfidence(value) {
12263
+ const distanceFrom05 = Math.abs(value - 0.5);
12264
+ return Math.min(0.5 + distanceFrom05, 1);
12265
+ }
12266
+ /**
12267
+ * Validate input data
12268
+ * @private
12269
+ */
12270
+ _validateInput(input) {
12271
+ const missingFeatures = this.config.features.filter((f) => !(f in input));
12272
+ if (missingFeatures.length > 0) {
12273
+ throw new DataValidationError(
12274
+ `Missing features: ${missingFeatures.join(", ")}`,
12275
+ { model: this.config.name, missingFeatures, input }
12276
+ );
12277
+ }
12278
+ }
12279
+ /**
12280
+ * Export model to JSON (for persistence)
12281
+ * @returns {Object} Serialized model
12282
+ */
12283
+ async export() {
12284
+ if (!this.model) {
12285
+ return null;
12286
+ }
12287
+ const modelJSON = await this.model.toJSON();
12288
+ return {
12289
+ config: this.config,
12290
+ normalizer: this.normalizer,
12291
+ stats: this.stats,
12292
+ isTrained: this.isTrained,
12293
+ model: modelJSON
12294
+ };
12295
+ }
12296
+ /**
12297
+ * Import model from JSON
12298
+ * @param {Object} data - Serialized model data
12299
+ */
12300
+ async import(data) {
12301
+ this.config = data.config;
12302
+ this.normalizer = data.normalizer;
12303
+ this.stats = data.stats;
12304
+ this.isTrained = data.isTrained;
12305
+ if (data.model) {
12306
+ this.buildModel();
12307
+ }
12308
+ }
12309
+ /**
12310
+ * Dispose model and free memory
12311
+ */
12312
+ dispose() {
12313
+ if (this.model) {
12314
+ this.model.dispose();
12315
+ this.model = null;
12316
+ }
12317
+ this.isTrained = false;
12318
+ }
12319
+ /**
12320
+ * Get model statistics
12321
+ */
12322
+ getStats() {
12323
+ return {
12324
+ ...this.stats,
12325
+ isTrained: this.isTrained,
12326
+ config: this.config
12327
+ };
12328
+ }
12329
+ }
12330
+
12331
+ class RegressionModel extends BaseModel {
12332
+ constructor(config = {}) {
12333
+ super(config);
12334
+ this.config.modelConfig = {
12335
+ ...this.config.modelConfig,
12336
+ polynomial: config.modelConfig?.polynomial || 1,
12337
+ // Degree (1 = linear, 2+ = polynomial)
12338
+ units: config.modelConfig?.units || 64,
12339
+ // Hidden layer units for polynomial regression
12340
+ activation: config.modelConfig?.activation || "relu"
12341
+ };
12342
+ if (this.config.modelConfig.polynomial < 1 || this.config.modelConfig.polynomial > 5) {
12343
+ throw new ModelConfigError(
12344
+ "Polynomial degree must be between 1 and 5",
12345
+ { model: this.config.name, polynomial: this.config.modelConfig.polynomial }
12346
+ );
12347
+ }
12348
+ }
12349
+ /**
12350
+ * Build regression model architecture
12351
+ */
12352
+ buildModel() {
12353
+ const numFeatures = this.config.features.length;
12354
+ const polynomial = this.config.modelConfig.polynomial;
12355
+ this.model = this.tf.sequential();
12356
+ if (polynomial === 1) {
12357
+ this.model.add(this.tf.layers.dense({
12358
+ inputShape: [numFeatures],
12359
+ units: 1,
12360
+ useBias: true
12361
+ }));
12362
+ } else {
12363
+ this.model.add(this.tf.layers.dense({
12364
+ inputShape: [numFeatures],
12365
+ units: this.config.modelConfig.units,
12366
+ activation: this.config.modelConfig.activation,
12367
+ useBias: true
12368
+ }));
12369
+ if (polynomial >= 3) {
12370
+ this.model.add(this.tf.layers.dense({
12371
+ units: Math.floor(this.config.modelConfig.units / 2),
12372
+ activation: this.config.modelConfig.activation
12373
+ }));
12374
+ }
12375
+ this.model.add(this.tf.layers.dense({
12376
+ units: 1
12377
+ }));
12378
+ }
12379
+ this.model.compile({
12380
+ optimizer: this.tf.train.adam(this.config.modelConfig.learningRate),
12381
+ loss: "meanSquaredError",
12382
+ metrics: ["mse", "mae"]
12383
+ });
12384
+ if (this.config.verbose) {
12385
+ console.log(`[MLPlugin] ${this.config.name} - Built regression model (polynomial degree: ${polynomial})`);
12386
+ this.model.summary();
12387
+ }
12388
+ }
12389
+ /**
12390
+ * Override confidence calculation for regression
12391
+ * Uses prediction variance/uncertainty as confidence
12392
+ * @protected
12393
+ */
12394
+ _calculateConfidence(value) {
12395
+ if (value >= 0 && value <= 1) {
12396
+ return 0.9 + Math.random() * 0.1;
12397
+ }
12398
+ const distance = Math.abs(value < 0 ? value : value - 1);
12399
+ return Math.max(0.5, 1 - distance);
12400
+ }
12401
+ /**
12402
+ * Get R² score (coefficient of determination)
12403
+ * Measures how well the model explains the variance in the data
12404
+ * @param {Array} data - Test data
12405
+ * @returns {number} R² score (0-1, higher is better)
12406
+ */
12407
+ async calculateR2Score(data) {
12408
+ if (!this.isTrained) {
12409
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12410
+ model: this.config.name
12411
+ });
12412
+ }
12413
+ const predictions = [];
12414
+ const actuals = [];
12415
+ for (const record of data) {
12416
+ const { prediction } = await this.predict(record);
12417
+ predictions.push(prediction);
12418
+ actuals.push(record[this.config.target]);
12419
+ }
12420
+ const meanActual = actuals.reduce((sum, val) => sum + val, 0) / actuals.length;
12421
+ const tss = actuals.reduce((sum, actual) => {
12422
+ return sum + Math.pow(actual - meanActual, 2);
12423
+ }, 0);
12424
+ const rss = predictions.reduce((sum, pred, i) => {
12425
+ return sum + Math.pow(actuals[i] - pred, 2);
12426
+ }, 0);
12427
+ const r2 = 1 - rss / tss;
12428
+ return r2;
12429
+ }
12430
+ /**
12431
+ * Export model with regression-specific data
12432
+ */
12433
+ async export() {
12434
+ const baseExport = await super.export();
12435
+ return {
12436
+ ...baseExport,
12437
+ type: "regression",
12438
+ polynomial: this.config.modelConfig.polynomial
12439
+ };
12440
+ }
12441
+ }
12442
+
12443
+ class ClassificationModel extends BaseModel {
12444
+ constructor(config = {}) {
12445
+ super(config);
12446
+ this.config.modelConfig = {
12447
+ ...this.config.modelConfig,
12448
+ units: config.modelConfig?.units || 64,
12449
+ // Hidden layer units
12450
+ activation: config.modelConfig?.activation || "relu",
12451
+ dropout: config.modelConfig?.dropout || 0.2
12452
+ // Dropout rate for regularization
12453
+ };
12454
+ this.classes = [];
12455
+ this.classToIndex = {};
12456
+ this.indexToClass = {};
12457
+ }
12458
+ /**
12459
+ * Build classification model architecture
12460
+ */
12461
+ buildModel() {
12462
+ const numFeatures = this.config.features.length;
12463
+ const numClasses = this.classes.length;
12464
+ if (numClasses < 2) {
12465
+ throw new ModelConfigError(
12466
+ "Classification requires at least 2 classes",
12467
+ { model: this.config.name, numClasses }
12468
+ );
12469
+ }
12470
+ this.model = this.tf.sequential();
12471
+ this.model.add(this.tf.layers.dense({
12472
+ inputShape: [numFeatures],
12473
+ units: this.config.modelConfig.units,
12474
+ activation: this.config.modelConfig.activation,
12475
+ useBias: true
12476
+ }));
12477
+ if (this.config.modelConfig.dropout > 0) {
12478
+ this.model.add(this.tf.layers.dropout({
12479
+ rate: this.config.modelConfig.dropout
12480
+ }));
12481
+ }
12482
+ this.model.add(this.tf.layers.dense({
12483
+ units: Math.floor(this.config.modelConfig.units / 2),
12484
+ activation: this.config.modelConfig.activation
12485
+ }));
12486
+ const isBinary = numClasses === 2;
12487
+ this.model.add(this.tf.layers.dense({
12488
+ units: isBinary ? 1 : numClasses,
12489
+ activation: isBinary ? "sigmoid" : "softmax"
12490
+ }));
12491
+ this.model.compile({
12492
+ optimizer: this.tf.train.adam(this.config.modelConfig.learningRate),
12493
+ loss: isBinary ? "binaryCrossentropy" : "categoricalCrossentropy",
12494
+ metrics: ["accuracy"]
12495
+ });
12496
+ if (this.config.verbose) {
12497
+ console.log(`[MLPlugin] ${this.config.name} - Built classification model (${numClasses} classes, ${isBinary ? "binary" : "multi-class"})`);
12498
+ this.model.summary();
12499
+ }
12500
+ }
12501
+ /**
12502
+ * Prepare training data (override to handle class labels)
12503
+ * @private
12504
+ */
12505
+ _prepareData(data) {
12506
+ const features = [];
12507
+ const targets = [];
12508
+ const uniqueClasses = [...new Set(data.map((r) => r[this.config.target]))];
12509
+ this.classes = uniqueClasses.sort();
12510
+ this.classes.forEach((cls, idx) => {
12511
+ this.classToIndex[cls] = idx;
12512
+ this.indexToClass[idx] = cls;
12513
+ });
12514
+ if (this.config.verbose) {
12515
+ console.log(`[MLPlugin] ${this.config.name} - Detected ${this.classes.length} classes:`, this.classes);
12516
+ }
12517
+ for (const record of data) {
12518
+ const missingFeatures = this.config.features.filter((f) => !(f in record));
12519
+ if (missingFeatures.length > 0) {
12520
+ throw new DataValidationError(
12521
+ `Missing features in training data: ${missingFeatures.join(", ")}`,
12522
+ { model: this.config.name, missingFeatures, record }
12523
+ );
12524
+ }
12525
+ if (!(this.config.target in record)) {
12526
+ throw new DataValidationError(
12527
+ `Missing target "${this.config.target}" in training data`,
12528
+ { model: this.config.name, target: this.config.target, record }
12529
+ );
12530
+ }
12531
+ const featureValues = this._extractFeatures(record);
12532
+ features.push(featureValues);
12533
+ const targetClass = record[this.config.target];
12534
+ if (!(targetClass in this.classToIndex)) {
12535
+ throw new DataValidationError(
12536
+ `Unknown class "${targetClass}" in training data`,
12537
+ { model: this.config.name, targetClass, knownClasses: this.classes }
12538
+ );
12539
+ }
12540
+ targets.push(this.classToIndex[targetClass]);
12541
+ }
12542
+ this._calculateNormalizer(features, targets);
12543
+ const normalizedFeatures = features.map((f) => this._normalizeFeatures(f));
12544
+ return {
12545
+ xs: this.tf.tensor2d(normalizedFeatures),
12546
+ ys: this._prepareTargetTensor(targets)
12547
+ };
12548
+ }
12549
+ /**
12550
+ * Prepare target tensor for classification (one-hot encoding or binary)
12551
+ * @protected
12552
+ */
12553
+ _prepareTargetTensor(targets) {
12554
+ const isBinary = this.classes.length === 2;
12555
+ if (isBinary) {
12556
+ return this.tf.tensor2d(targets.map((t) => [t]));
12557
+ } else {
12558
+ return this.tf.oneHot(targets, this.classes.length);
12559
+ }
12560
+ }
12561
+ /**
12562
+ * Calculate normalization parameters (skip target normalization for classification)
12563
+ * @private
12564
+ */
12565
+ _calculateNormalizer(features, targets) {
12566
+ const numFeatures = features[0].length;
12567
+ for (let i = 0; i < numFeatures; i++) {
12568
+ const featureName = this.config.features[i];
12569
+ const values = features.map((f) => f[i]);
12570
+ this.normalizer.features[featureName] = {
12571
+ min: Math.min(...values),
12572
+ max: Math.max(...values)
12573
+ };
12574
+ }
12575
+ this.normalizer.target = { min: 0, max: 1 };
12576
+ }
12577
+ /**
12578
+ * Make a prediction (override to return class label)
12579
+ */
12580
+ async predict(input) {
12581
+ if (!this.isTrained) {
12582
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12583
+ model: this.config.name
12584
+ });
12585
+ }
12586
+ try {
12587
+ this._validateInput(input);
12588
+ const features = this._extractFeatures(input);
12589
+ const normalizedFeatures = this._normalizeFeatures(features);
12590
+ const inputTensor = this.tf.tensor2d([normalizedFeatures]);
12591
+ const predictionTensor = this.model.predict(inputTensor);
12592
+ const predictionArray = await predictionTensor.data();
12593
+ inputTensor.dispose();
12594
+ predictionTensor.dispose();
12595
+ const isBinary = this.classes.length === 2;
12596
+ let predictedClassIndex;
12597
+ let confidence;
12598
+ if (isBinary) {
12599
+ confidence = predictionArray[0];
12600
+ predictedClassIndex = confidence >= 0.5 ? 1 : 0;
12601
+ } else {
12602
+ predictedClassIndex = predictionArray.indexOf(Math.max(...predictionArray));
12603
+ confidence = predictionArray[predictedClassIndex];
12604
+ }
12605
+ const predictedClass = this.indexToClass[predictedClassIndex];
12606
+ this.stats.predictions++;
12607
+ return {
12608
+ prediction: predictedClass,
12609
+ confidence,
12610
+ probabilities: isBinary ? {
12611
+ [this.classes[0]]: 1 - predictionArray[0],
12612
+ [this.classes[1]]: predictionArray[0]
12613
+ } : Object.fromEntries(
12614
+ this.classes.map((cls, idx) => [cls, predictionArray[idx]])
12615
+ )
12616
+ };
12617
+ } catch (error) {
12618
+ this.stats.errors++;
12619
+ if (error instanceof ModelNotTrainedError || error instanceof DataValidationError) {
12620
+ throw error;
12621
+ }
12622
+ throw new PredictionError(`Prediction failed: ${error.message}`, {
12623
+ model: this.config.name,
12624
+ input,
12625
+ originalError: error.message
12626
+ });
12627
+ }
12628
+ }
12629
+ /**
12630
+ * Calculate confusion matrix
12631
+ * @param {Array} data - Test data
12632
+ * @returns {Object} Confusion matrix and metrics
12633
+ */
12634
+ async calculateConfusionMatrix(data) {
12635
+ if (!this.isTrained) {
12636
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12637
+ model: this.config.name
12638
+ });
12639
+ }
12640
+ const matrix = {};
12641
+ this.classes.length;
12642
+ for (const actualClass of this.classes) {
12643
+ matrix[actualClass] = {};
12644
+ for (const predictedClass of this.classes) {
12645
+ matrix[actualClass][predictedClass] = 0;
12646
+ }
12647
+ }
12648
+ for (const record of data) {
12649
+ const { prediction } = await this.predict(record);
12650
+ const actual = record[this.config.target];
12651
+ matrix[actual][prediction]++;
12652
+ }
12653
+ let totalCorrect = 0;
12654
+ let total = 0;
12655
+ for (const cls of this.classes) {
12656
+ totalCorrect += matrix[cls][cls];
12657
+ total += Object.values(matrix[cls]).reduce((sum, val) => sum + val, 0);
12658
+ }
12659
+ const accuracy = total > 0 ? totalCorrect / total : 0;
12660
+ return {
12661
+ matrix,
12662
+ accuracy,
12663
+ total,
12664
+ correct: totalCorrect
12665
+ };
12666
+ }
12667
+ /**
12668
+ * Export model with classification-specific data
12669
+ */
12670
+ async export() {
12671
+ const baseExport = await super.export();
12672
+ return {
12673
+ ...baseExport,
12674
+ type: "classification",
12675
+ classes: this.classes,
12676
+ classToIndex: this.classToIndex,
12677
+ indexToClass: this.indexToClass
12678
+ };
12679
+ }
12680
+ /**
12681
+ * Import model (override to restore class mappings)
12682
+ */
12683
+ async import(data) {
12684
+ await super.import(data);
12685
+ this.classes = data.classes || [];
12686
+ this.classToIndex = data.classToIndex || {};
12687
+ this.indexToClass = data.indexToClass || {};
12688
+ }
12689
+ }
12690
+
12691
+ class TimeSeriesModel extends BaseModel {
12692
+ constructor(config = {}) {
12693
+ super(config);
12694
+ this.config.modelConfig = {
12695
+ ...this.config.modelConfig,
12696
+ lookback: config.modelConfig?.lookback || 10,
12697
+ // Number of past timesteps to use
12698
+ lstmUnits: config.modelConfig?.lstmUnits || 50,
12699
+ // LSTM layer units
12700
+ denseUnits: config.modelConfig?.denseUnits || 25,
12701
+ // Dense layer units
12702
+ dropout: config.modelConfig?.dropout || 0.2,
12703
+ recurrentDropout: config.modelConfig?.recurrentDropout || 0.2
12704
+ };
12705
+ if (this.config.modelConfig.lookback < 2) {
12706
+ throw new ModelConfigError(
12707
+ "Lookback window must be at least 2",
12708
+ { model: this.config.name, lookback: this.config.modelConfig.lookback }
12709
+ );
12710
+ }
12711
+ }
12712
+ /**
12713
+ * Build LSTM model architecture for time series
12714
+ */
12715
+ buildModel() {
12716
+ const numFeatures = this.config.features.length + 1;
12717
+ const lookback = this.config.modelConfig.lookback;
12718
+ this.model = this.tf.sequential();
12719
+ this.model.add(this.tf.layers.lstm({
12720
+ inputShape: [lookback, numFeatures],
12721
+ units: this.config.modelConfig.lstmUnits,
12722
+ returnSequences: false,
12723
+ dropout: this.config.modelConfig.dropout,
12724
+ recurrentDropout: this.config.modelConfig.recurrentDropout
12725
+ }));
12726
+ this.model.add(this.tf.layers.dense({
12727
+ units: this.config.modelConfig.denseUnits,
12728
+ activation: "relu"
12729
+ }));
12730
+ if (this.config.modelConfig.dropout > 0) {
12731
+ this.model.add(this.tf.layers.dropout({
12732
+ rate: this.config.modelConfig.dropout
12733
+ }));
12734
+ }
12735
+ this.model.add(this.tf.layers.dense({
12736
+ units: 1
12737
+ }));
12738
+ this.model.compile({
12739
+ optimizer: this.tf.train.adam(this.config.modelConfig.learningRate),
12740
+ loss: "meanSquaredError",
12741
+ metrics: ["mse", "mae"]
12742
+ });
12743
+ if (this.config.verbose) {
12744
+ console.log(`[MLPlugin] ${this.config.name} - Built LSTM time series model (lookback: ${lookback})`);
12745
+ this.model.summary();
12746
+ }
12747
+ }
12748
+ /**
12749
+ * Prepare time series data with sliding window
12750
+ * @private
12751
+ */
12752
+ _prepareData(data) {
12753
+ const lookback = this.config.modelConfig.lookback;
12754
+ if (data.length < lookback + 1) {
12755
+ throw new InsufficientDataError(
12756
+ `Insufficient time series data: ${data.length} samples (minimum: ${lookback + 1})`,
12757
+ { model: this.config.name, samples: data.length, minimum: lookback + 1 }
12758
+ );
12759
+ }
12760
+ const sequences = [];
12761
+ const targets = [];
12762
+ const allValues = [];
12763
+ for (const record of data) {
12764
+ const features = this._extractFeatures(record);
12765
+ const target = record[this.config.target];
12766
+ allValues.push([...features, target]);
12767
+ }
12768
+ this._calculateTimeSeriesNormalizer(allValues);
12769
+ for (let i = 0; i <= data.length - lookback - 1; i++) {
12770
+ const sequence = [];
12771
+ for (let j = 0; j < lookback; j++) {
12772
+ const record = data[i + j];
12773
+ const features = this._extractFeatures(record);
12774
+ const target = record[this.config.target];
12775
+ const combined = [...features, target];
12776
+ const normalized = this._normalizeSequenceStep(combined);
12777
+ sequence.push(normalized);
12778
+ }
12779
+ const nextRecord = data[i + lookback];
12780
+ const nextTarget = nextRecord[this.config.target];
12781
+ sequences.push(sequence);
12782
+ targets.push(this._normalizeTarget(nextTarget));
12783
+ }
12784
+ return {
12785
+ xs: this.tf.tensor3d(sequences),
12786
+ // [samples, lookback, features]
12787
+ ys: this.tf.tensor2d(targets.map((t) => [t]))
12788
+ // [samples, 1]
12789
+ };
12790
+ }
12791
+ /**
12792
+ * Calculate normalization for time series
12793
+ * @private
12794
+ */
12795
+ _calculateTimeSeriesNormalizer(allValues) {
12796
+ const numFeatures = allValues[0].length;
12797
+ for (let i = 0; i < numFeatures; i++) {
12798
+ const values = allValues.map((v) => v[i]);
12799
+ const min = Math.min(...values);
12800
+ const max = Math.max(...values);
12801
+ if (i < this.config.features.length) {
12802
+ const featureName = this.config.features[i];
12803
+ this.normalizer.features[featureName] = { min, max };
12804
+ } else {
12805
+ this.normalizer.target = { min, max };
12806
+ }
12807
+ }
12808
+ }
12809
+ /**
12810
+ * Normalize a sequence step (features + target)
12811
+ * @private
12812
+ */
12813
+ _normalizeSequenceStep(values) {
12814
+ return values.map((value, i) => {
12815
+ let min, max;
12816
+ if (i < this.config.features.length) {
12817
+ const featureName = this.config.features[i];
12818
+ ({ min, max } = this.normalizer.features[featureName]);
12819
+ } else {
12820
+ ({ min, max } = this.normalizer.target);
12821
+ }
12822
+ if (max === min) return 0.5;
12823
+ return (value - min) / (max - min);
12824
+ });
12825
+ }
12826
+ /**
12827
+ * Predict next value in time series
12828
+ * @param {Array} sequence - Array of recent records (length = lookback)
12829
+ * @returns {Object} Prediction result
12830
+ */
12831
+ async predict(sequence) {
12832
+ if (!this.isTrained) {
12833
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12834
+ model: this.config.name
12835
+ });
12836
+ }
12837
+ try {
12838
+ if (!Array.isArray(sequence)) {
12839
+ throw new DataValidationError(
12840
+ "Time series prediction requires an array of recent records",
12841
+ { model: this.config.name, input: typeof sequence }
12842
+ );
12843
+ }
12844
+ if (sequence.length !== this.config.modelConfig.lookback) {
12845
+ throw new DataValidationError(
12846
+ `Time series sequence must have exactly ${this.config.modelConfig.lookback} timesteps, got ${sequence.length}`,
12847
+ { model: this.config.name, expected: this.config.modelConfig.lookback, got: sequence.length }
12848
+ );
12849
+ }
12850
+ const normalizedSequence = [];
12851
+ for (const record of sequence) {
12852
+ this._validateInput(record);
12853
+ const features = this._extractFeatures(record);
12854
+ const target = record[this.config.target];
12855
+ const combined = [...features, target];
12856
+ normalizedSequence.push(this._normalizeSequenceStep(combined));
12857
+ }
12858
+ const inputTensor = this.tf.tensor3d([normalizedSequence]);
12859
+ const predictionTensor = this.model.predict(inputTensor);
12860
+ const predictionArray = await predictionTensor.data();
12861
+ inputTensor.dispose();
12862
+ predictionTensor.dispose();
12863
+ const prediction = this._denormalizePrediction(predictionArray[0]);
12864
+ this.stats.predictions++;
12865
+ return {
12866
+ prediction,
12867
+ confidence: this._calculateConfidence(predictionArray[0])
12868
+ };
12869
+ } catch (error) {
12870
+ this.stats.errors++;
12871
+ if (error instanceof ModelNotTrainedError || error instanceof DataValidationError) {
12872
+ throw error;
12873
+ }
12874
+ throw new PredictionError(`Time series prediction failed: ${error.message}`, {
12875
+ model: this.config.name,
12876
+ originalError: error.message
12877
+ });
12878
+ }
12879
+ }
12880
+ /**
12881
+ * Predict multiple future timesteps
12882
+ * @param {Array} initialSequence - Initial sequence of records
12883
+ * @param {number} steps - Number of steps to predict ahead
12884
+ * @returns {Array} Array of predictions
12885
+ */
12886
+ async predictMultiStep(initialSequence, steps = 1) {
12887
+ if (!this.isTrained) {
12888
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12889
+ model: this.config.name
12890
+ });
12891
+ }
12892
+ const predictions = [];
12893
+ let currentSequence = [...initialSequence];
12894
+ for (let i = 0; i < steps; i++) {
12895
+ const { prediction } = await this.predict(currentSequence);
12896
+ predictions.push(prediction);
12897
+ currentSequence.shift();
12898
+ const lastRecord = currentSequence[currentSequence.length - 1];
12899
+ const syntheticRecord = {
12900
+ ...lastRecord,
12901
+ [this.config.target]: prediction
12902
+ };
12903
+ currentSequence.push(syntheticRecord);
12904
+ }
12905
+ return predictions;
12906
+ }
12907
+ /**
12908
+ * Calculate Mean Absolute Percentage Error (MAPE)
12909
+ * @param {Array} data - Test data (must be sequential)
12910
+ * @returns {number} MAPE (0-100, lower is better)
12911
+ */
12912
+ async calculateMAPE(data) {
12913
+ if (!this.isTrained) {
12914
+ throw new ModelNotTrainedError(`Model "${this.config.name}" is not trained yet`, {
12915
+ model: this.config.name
12916
+ });
12917
+ }
12918
+ const lookback = this.config.modelConfig.lookback;
12919
+ if (data.length < lookback + 1) {
12920
+ throw new InsufficientDataError(
12921
+ `Insufficient test data for MAPE calculation`,
12922
+ { model: this.config.name, samples: data.length, minimum: lookback + 1 }
12923
+ );
12924
+ }
12925
+ let totalPercentageError = 0;
12926
+ let count = 0;
12927
+ for (let i = lookback; i < data.length; i++) {
12928
+ const sequence = data.slice(i - lookback, i);
12929
+ const { prediction } = await this.predict(sequence);
12930
+ const actual = data[i][this.config.target];
12931
+ if (actual !== 0) {
12932
+ const percentageError = Math.abs((actual - prediction) / actual) * 100;
12933
+ totalPercentageError += percentageError;
12934
+ count++;
12935
+ }
12936
+ }
12937
+ return count > 0 ? totalPercentageError / count : 0;
12938
+ }
12939
+ /**
12940
+ * Export model with time series-specific data
12941
+ */
12942
+ async export() {
12943
+ const baseExport = await super.export();
12944
+ return {
12945
+ ...baseExport,
12946
+ type: "timeseries",
12947
+ lookback: this.config.modelConfig.lookback
12948
+ };
12949
+ }
12950
+ }
12951
+
12952
+ class NeuralNetworkModel extends BaseModel {
12953
+ constructor(config = {}) {
12954
+ super(config);
12955
+ this.config.modelConfig = {
12956
+ ...this.config.modelConfig,
12957
+ layers: config.modelConfig?.layers || [
12958
+ { units: 64, activation: "relu", dropout: 0.2 },
12959
+ { units: 32, activation: "relu", dropout: 0.1 }
12960
+ ],
12961
+ // Array of hidden layer configurations
12962
+ outputActivation: config.modelConfig?.outputActivation || "linear",
12963
+ // Output layer activation
12964
+ outputUnits: config.modelConfig?.outputUnits || 1,
12965
+ // Number of output units
12966
+ loss: config.modelConfig?.loss || "meanSquaredError",
12967
+ // Loss function
12968
+ metrics: config.modelConfig?.metrics || ["mse", "mae"]
12969
+ // Metrics to track
12970
+ };
12971
+ this._validateLayersConfig();
12972
+ }
12973
+ /**
12974
+ * Validate layers configuration
12975
+ * @private
12976
+ */
12977
+ _validateLayersConfig() {
12978
+ if (!Array.isArray(this.config.modelConfig.layers) || this.config.modelConfig.layers.length === 0) {
12979
+ throw new ModelConfigError(
12980
+ "Neural network must have at least one hidden layer",
12981
+ { model: this.config.name, layers: this.config.modelConfig.layers }
12982
+ );
12983
+ }
12984
+ for (const [index, layer] of this.config.modelConfig.layers.entries()) {
12985
+ if (!layer.units || typeof layer.units !== "number" || layer.units < 1) {
12986
+ throw new ModelConfigError(
12987
+ `Layer ${index} must have a valid "units" property (positive number)`,
12988
+ { model: this.config.name, layer, index }
12989
+ );
12990
+ }
12991
+ if (layer.activation && !this._isValidActivation(layer.activation)) {
12992
+ throw new ModelConfigError(
12993
+ `Layer ${index} has invalid activation function "${layer.activation}"`,
12994
+ { model: this.config.name, layer, index, validActivations: ["relu", "sigmoid", "tanh", "softmax", "elu", "selu"] }
12995
+ );
12996
+ }
12997
+ }
12998
+ }
12999
+ /**
13000
+ * Check if activation function is valid
13001
+ * @private
13002
+ */
13003
+ _isValidActivation(activation) {
13004
+ const validActivations = ["relu", "sigmoid", "tanh", "softmax", "elu", "selu", "linear"];
13005
+ return validActivations.includes(activation);
13006
+ }
13007
+ /**
13008
+ * Build custom neural network architecture
13009
+ */
13010
+ buildModel() {
13011
+ const numFeatures = this.config.features.length;
13012
+ this.model = this.tf.sequential();
13013
+ for (const [index, layerConfig] of this.config.modelConfig.layers.entries()) {
13014
+ const isFirstLayer = index === 0;
13015
+ const layerOptions = {
13016
+ units: layerConfig.units,
13017
+ activation: layerConfig.activation || "relu",
13018
+ useBias: true
13019
+ };
13020
+ if (isFirstLayer) {
13021
+ layerOptions.inputShape = [numFeatures];
13022
+ }
13023
+ this.model.add(this.tf.layers.dense(layerOptions));
13024
+ if (layerConfig.dropout && layerConfig.dropout > 0) {
13025
+ this.model.add(this.tf.layers.dropout({
13026
+ rate: layerConfig.dropout
13027
+ }));
13028
+ }
13029
+ if (layerConfig.batchNormalization) {
13030
+ this.model.add(this.tf.layers.batchNormalization());
13031
+ }
13032
+ }
13033
+ this.model.add(this.tf.layers.dense({
13034
+ units: this.config.modelConfig.outputUnits,
13035
+ activation: this.config.modelConfig.outputActivation
13036
+ }));
13037
+ this.model.compile({
13038
+ optimizer: this.tf.train.adam(this.config.modelConfig.learningRate),
13039
+ loss: this.config.modelConfig.loss,
13040
+ metrics: this.config.modelConfig.metrics
13041
+ });
13042
+ if (this.config.verbose) {
13043
+ console.log(`[MLPlugin] ${this.config.name} - Built custom neural network:`);
13044
+ console.log(` - Hidden layers: ${this.config.modelConfig.layers.length}`);
13045
+ console.log(` - Total parameters:`, this._countParameters());
13046
+ this.model.summary();
13047
+ }
13048
+ }
13049
+ /**
13050
+ * Count total trainable parameters
13051
+ * @private
13052
+ */
13053
+ _countParameters() {
13054
+ if (!this.model) return 0;
13055
+ let totalParams = 0;
13056
+ for (const layer of this.model.layers) {
13057
+ if (layer.countParams) {
13058
+ totalParams += layer.countParams();
13059
+ }
13060
+ }
13061
+ return totalParams;
13062
+ }
13063
+ /**
13064
+ * Add layer to model (before building)
13065
+ * @param {Object} layerConfig - Layer configuration
13066
+ */
13067
+ addLayer(layerConfig) {
13068
+ if (this.model) {
13069
+ throw new ModelConfigError(
13070
+ "Cannot add layer after model is built. Use addLayer() before training.",
13071
+ { model: this.config.name }
13072
+ );
13073
+ }
13074
+ this.config.modelConfig.layers.push(layerConfig);
13075
+ }
13076
+ /**
13077
+ * Set output configuration
13078
+ * @param {Object} outputConfig - Output layer configuration
13079
+ */
13080
+ setOutput(outputConfig) {
13081
+ if (this.model) {
13082
+ throw new ModelConfigError(
13083
+ "Cannot change output after model is built. Use setOutput() before training.",
13084
+ { model: this.config.name }
13085
+ );
13086
+ }
13087
+ if (outputConfig.activation) {
13088
+ this.config.modelConfig.outputActivation = outputConfig.activation;
13089
+ }
13090
+ if (outputConfig.units) {
13091
+ this.config.modelConfig.outputUnits = outputConfig.units;
13092
+ }
13093
+ if (outputConfig.loss) {
13094
+ this.config.modelConfig.loss = outputConfig.loss;
13095
+ }
13096
+ if (outputConfig.metrics) {
13097
+ this.config.modelConfig.metrics = outputConfig.metrics;
13098
+ }
13099
+ }
13100
+ /**
13101
+ * Get model architecture summary
13102
+ */
13103
+ getArchitecture() {
13104
+ return {
13105
+ inputFeatures: this.config.features,
13106
+ hiddenLayers: this.config.modelConfig.layers.map((layer, index) => ({
13107
+ index,
13108
+ units: layer.units,
13109
+ activation: layer.activation || "relu",
13110
+ dropout: layer.dropout || 0,
13111
+ batchNormalization: layer.batchNormalization || false
13112
+ })),
13113
+ outputLayer: {
13114
+ units: this.config.modelConfig.outputUnits,
13115
+ activation: this.config.modelConfig.outputActivation
13116
+ },
13117
+ totalParameters: this._countParameters(),
13118
+ loss: this.config.modelConfig.loss,
13119
+ metrics: this.config.modelConfig.metrics
13120
+ };
13121
+ }
13122
+ /**
13123
+ * Train with early stopping callback
13124
+ * @param {Array} data - Training data
13125
+ * @param {Object} earlyStoppingConfig - Early stopping configuration
13126
+ * @returns {Object} Training results
13127
+ */
13128
+ async trainWithEarlyStopping(data, earlyStoppingConfig = {}) {
13129
+ const {
13130
+ patience = 10,
13131
+ minDelta = 1e-3,
13132
+ monitor = "val_loss",
13133
+ restoreBestWeights = true
13134
+ } = earlyStoppingConfig;
13135
+ const { xs, ys } = this._prepareData(data);
13136
+ if (!this.model) {
13137
+ this.buildModel();
13138
+ }
13139
+ let bestValue = Infinity;
13140
+ let patienceCounter = 0;
13141
+ let bestWeights = null;
13142
+ const callbacks = {
13143
+ onEpochEnd: async (epoch, logs) => {
13144
+ const monitorValue = logs[monitor] || logs.loss;
13145
+ if (this.config.verbose && epoch % 10 === 0) {
13146
+ console.log(`[MLPlugin] ${this.config.name} - Epoch ${epoch}: ${monitor}=${monitorValue.toFixed(4)}`);
13147
+ }
13148
+ if (monitorValue < bestValue - minDelta) {
13149
+ bestValue = monitorValue;
13150
+ patienceCounter = 0;
13151
+ if (restoreBestWeights) {
13152
+ bestWeights = await this.model.getWeights();
13153
+ }
13154
+ } else {
13155
+ patienceCounter++;
13156
+ if (patienceCounter >= patience) {
13157
+ if (this.config.verbose) {
13158
+ console.log(`[MLPlugin] ${this.config.name} - Early stopping at epoch ${epoch}`);
13159
+ }
13160
+ this.model.stopTraining = true;
13161
+ }
13162
+ }
13163
+ }
13164
+ };
13165
+ const history = await this.model.fit(xs, ys, {
13166
+ epochs: this.config.modelConfig.epochs,
13167
+ batchSize: this.config.modelConfig.batchSize,
13168
+ validationSplit: this.config.modelConfig.validationSplit,
13169
+ verbose: this.config.verbose ? 1 : 0,
13170
+ callbacks
13171
+ });
13172
+ if (restoreBestWeights && bestWeights) {
13173
+ this.model.setWeights(bestWeights);
13174
+ }
13175
+ this.isTrained = true;
13176
+ this.stats.trainedAt = (/* @__PURE__ */ new Date()).toISOString();
13177
+ this.stats.samples = data.length;
13178
+ this.stats.loss = history.history.loss[history.history.loss.length - 1];
13179
+ xs.dispose();
13180
+ ys.dispose();
13181
+ return {
13182
+ loss: this.stats.loss,
13183
+ epochs: history.epoch.length,
13184
+ samples: this.stats.samples,
13185
+ stoppedEarly: history.epoch.length < this.config.modelConfig.epochs
13186
+ };
13187
+ }
13188
+ /**
13189
+ * Export model with neural network-specific data
13190
+ */
13191
+ async export() {
13192
+ const baseExport = await super.export();
13193
+ return {
13194
+ ...baseExport,
13195
+ type: "neural-network",
13196
+ architecture: this.getArchitecture()
13197
+ };
13198
+ }
13199
+ }
13200
+
13201
+ class MLPlugin extends Plugin {
13202
+ constructor(options = {}) {
13203
+ super(options);
13204
+ this.config = {
13205
+ models: options.models || {},
13206
+ verbose: options.verbose || false,
13207
+ minTrainingSamples: options.minTrainingSamples || 10
13208
+ };
13209
+ requirePluginDependency("@tensorflow/tfjs-node", "MLPlugin");
13210
+ this.models = {};
13211
+ this.training = /* @__PURE__ */ new Map();
13212
+ this.insertCounters = /* @__PURE__ */ new Map();
13213
+ this.intervals = [];
13214
+ this.stats = {
13215
+ totalTrainings: 0,
13216
+ totalPredictions: 0,
13217
+ totalErrors: 0,
13218
+ startedAt: null
13219
+ };
13220
+ }
13221
+ /**
13222
+ * Install the plugin
13223
+ */
13224
+ async onInstall() {
13225
+ if (this.config.verbose) {
13226
+ console.log("[MLPlugin] Installing ML Plugin...");
13227
+ }
13228
+ for (const [modelName, modelConfig] of Object.entries(this.config.models)) {
13229
+ this._validateModelConfig(modelName, modelConfig);
13230
+ }
13231
+ for (const [modelName, modelConfig] of Object.entries(this.config.models)) {
13232
+ await this._initializeModel(modelName, modelConfig);
13233
+ }
13234
+ for (const [modelName, modelConfig] of Object.entries(this.config.models)) {
13235
+ if (modelConfig.autoTrain) {
13236
+ this._setupAutoTraining(modelName, modelConfig);
13237
+ }
13238
+ }
13239
+ this.stats.startedAt = (/* @__PURE__ */ new Date()).toISOString();
13240
+ if (this.config.verbose) {
13241
+ console.log(`[MLPlugin] Installed with ${Object.keys(this.models).length} models`);
13242
+ }
13243
+ this.emit("installed", {
13244
+ plugin: "MLPlugin",
13245
+ models: Object.keys(this.models)
13246
+ });
13247
+ }
13248
+ /**
13249
+ * Start the plugin
13250
+ */
13251
+ async onStart() {
13252
+ for (const modelName of Object.keys(this.models)) {
13253
+ await this._loadModel(modelName);
13254
+ }
13255
+ if (this.config.verbose) {
13256
+ console.log("[MLPlugin] Started");
13257
+ }
13258
+ }
13259
+ /**
13260
+ * Stop the plugin
13261
+ */
13262
+ async onStop() {
13263
+ for (const handle of this.intervals) {
13264
+ clearInterval(handle);
13265
+ }
13266
+ this.intervals = [];
13267
+ for (const [modelName, model] of Object.entries(this.models)) {
13268
+ if (model && model.dispose) {
13269
+ model.dispose();
13270
+ }
13271
+ }
13272
+ if (this.config.verbose) {
13273
+ console.log("[MLPlugin] Stopped");
13274
+ }
13275
+ }
13276
+ /**
13277
+ * Uninstall the plugin
13278
+ */
13279
+ async onUninstall(options = {}) {
13280
+ await this.onStop();
13281
+ if (options.purgeData) {
13282
+ for (const modelName of Object.keys(this.models)) {
13283
+ await this._deleteModel(modelName);
13284
+ }
13285
+ if (this.config.verbose) {
13286
+ console.log("[MLPlugin] Purged all model data");
13287
+ }
13288
+ }
13289
+ }
13290
+ /**
13291
+ * Validate model configuration
13292
+ * @private
13293
+ */
13294
+ _validateModelConfig(modelName, config) {
13295
+ const validTypes = ["regression", "classification", "timeseries", "neural-network"];
13296
+ if (!config.type || !validTypes.includes(config.type)) {
13297
+ throw new ModelConfigError(
13298
+ `Model "${modelName}" must have a valid type: ${validTypes.join(", ")}`,
13299
+ { modelName, type: config.type, validTypes }
13300
+ );
13301
+ }
13302
+ if (!config.resource) {
13303
+ throw new ModelConfigError(
13304
+ `Model "${modelName}" must specify a resource`,
13305
+ { modelName }
13306
+ );
13307
+ }
13308
+ if (!config.features || !Array.isArray(config.features) || config.features.length === 0) {
13309
+ throw new ModelConfigError(
13310
+ `Model "${modelName}" must specify at least one feature`,
13311
+ { modelName, features: config.features }
13312
+ );
13313
+ }
13314
+ if (!config.target) {
13315
+ throw new ModelConfigError(
13316
+ `Model "${modelName}" must specify a target field`,
13317
+ { modelName }
13318
+ );
13319
+ }
13320
+ }
13321
+ /**
13322
+ * Initialize a model instance
13323
+ * @private
13324
+ */
13325
+ async _initializeModel(modelName, config) {
13326
+ const modelOptions = {
13327
+ name: modelName,
13328
+ resource: config.resource,
13329
+ features: config.features,
13330
+ target: config.target,
13331
+ modelConfig: config.modelConfig || {},
13332
+ verbose: this.config.verbose
13333
+ };
13334
+ try {
13335
+ switch (config.type) {
13336
+ case "regression":
13337
+ this.models[modelName] = new RegressionModel(modelOptions);
13338
+ break;
13339
+ case "classification":
13340
+ this.models[modelName] = new ClassificationModel(modelOptions);
13341
+ break;
13342
+ case "timeseries":
13343
+ this.models[modelName] = new TimeSeriesModel(modelOptions);
13344
+ break;
13345
+ case "neural-network":
13346
+ this.models[modelName] = new NeuralNetworkModel(modelOptions);
13347
+ break;
13348
+ default:
13349
+ throw new ModelConfigError(
13350
+ `Unknown model type: ${config.type}`,
13351
+ { modelName, type: config.type }
13352
+ );
13353
+ }
13354
+ if (this.config.verbose) {
13355
+ console.log(`[MLPlugin] Initialized model "${modelName}" (${config.type})`);
13356
+ }
13357
+ } catch (error) {
13358
+ console.error(`[MLPlugin] Failed to initialize model "${modelName}":`, error.message);
13359
+ throw error;
13360
+ }
13361
+ }
13362
+ /**
13363
+ * Setup auto-training for a model
13364
+ * @private
13365
+ */
13366
+ _setupAutoTraining(modelName, config) {
13367
+ const resource = this.database.resources[config.resource];
13368
+ if (!resource) {
13369
+ console.warn(`[MLPlugin] Resource "${config.resource}" not found for model "${modelName}"`);
13370
+ return;
13371
+ }
13372
+ this.insertCounters.set(modelName, 0);
13373
+ if (config.trainAfterInserts && config.trainAfterInserts > 0) {
13374
+ this.addMiddleware(resource, "insert", async (next, data, options) => {
13375
+ const result = await next(data, options);
13376
+ const currentCount = this.insertCounters.get(modelName) || 0;
13377
+ this.insertCounters.set(modelName, currentCount + 1);
13378
+ if (this.insertCounters.get(modelName) >= config.trainAfterInserts) {
13379
+ if (this.config.verbose) {
13380
+ console.log(`[MLPlugin] Auto-training "${modelName}" after ${config.trainAfterInserts} inserts`);
13381
+ }
13382
+ this.insertCounters.set(modelName, 0);
13383
+ this.train(modelName).catch((err) => {
13384
+ console.error(`[MLPlugin] Auto-training failed for "${modelName}":`, err.message);
13385
+ });
13386
+ }
13387
+ return result;
13388
+ });
13389
+ }
13390
+ if (config.trainInterval && config.trainInterval > 0) {
13391
+ const handle = setInterval(async () => {
13392
+ if (this.config.verbose) {
13393
+ console.log(`[MLPlugin] Auto-training "${modelName}" (interval: ${config.trainInterval}ms)`);
13394
+ }
13395
+ try {
13396
+ await this.train(modelName);
13397
+ } catch (error) {
13398
+ console.error(`[MLPlugin] Auto-training failed for "${modelName}":`, error.message);
13399
+ }
13400
+ }, config.trainInterval);
13401
+ this.intervals.push(handle);
13402
+ if (this.config.verbose) {
13403
+ console.log(`[MLPlugin] Setup interval training for "${modelName}" (every ${config.trainInterval}ms)`);
13404
+ }
13405
+ }
13406
+ }
13407
+ /**
13408
+ * Train a model
13409
+ * @param {string} modelName - Model name
13410
+ * @param {Object} options - Training options
13411
+ * @returns {Object} Training results
13412
+ */
13413
+ async train(modelName, options = {}) {
13414
+ const model = this.models[modelName];
13415
+ if (!model) {
13416
+ throw new ModelNotFoundError(
13417
+ `Model "${modelName}" not found`,
13418
+ { modelName, availableModels: Object.keys(this.models) }
13419
+ );
13420
+ }
13421
+ if (this.training.get(modelName)) {
13422
+ if (this.config.verbose) {
13423
+ console.log(`[MLPlugin] Model "${modelName}" is already training, skipping...`);
13424
+ }
13425
+ return { skipped: true, reason: "already_training" };
13426
+ }
13427
+ this.training.set(modelName, true);
13428
+ try {
13429
+ const modelConfig = this.config.models[modelName];
13430
+ const resource = this.database.resources[modelConfig.resource];
13431
+ if (!resource) {
13432
+ throw new ModelNotFoundError(
13433
+ `Resource "${modelConfig.resource}" not found`,
13434
+ { modelName, resource: modelConfig.resource }
13435
+ );
13436
+ }
13437
+ if (this.config.verbose) {
13438
+ console.log(`[MLPlugin] Fetching training data for "${modelName}"...`);
13439
+ }
13440
+ const [ok, err, data] = await tryFn(() => resource.list());
13441
+ if (!ok) {
13442
+ throw new TrainingError(
13443
+ `Failed to fetch training data: ${err.message}`,
13444
+ { modelName, resource: modelConfig.resource, originalError: err.message }
13445
+ );
13446
+ }
13447
+ if (!data || data.length < this.config.minTrainingSamples) {
13448
+ throw new TrainingError(
13449
+ `Insufficient training data: ${data?.length || 0} samples (minimum: ${this.config.minTrainingSamples})`,
13450
+ { modelName, samples: data?.length || 0, minimum: this.config.minTrainingSamples }
13451
+ );
13452
+ }
13453
+ if (this.config.verbose) {
13454
+ console.log(`[MLPlugin] Training "${modelName}" with ${data.length} samples...`);
13455
+ }
13456
+ const result = await model.train(data);
13457
+ await this._saveModel(modelName);
13458
+ this.stats.totalTrainings++;
13459
+ if (this.config.verbose) {
13460
+ console.log(`[MLPlugin] Training completed for "${modelName}":`, result);
13461
+ }
13462
+ this.emit("modelTrained", {
13463
+ modelName,
13464
+ type: modelConfig.type,
13465
+ result
13466
+ });
13467
+ return result;
13468
+ } catch (error) {
13469
+ this.stats.totalErrors++;
13470
+ if (error instanceof MLError) {
13471
+ throw error;
13472
+ }
13473
+ throw new TrainingError(
13474
+ `Training failed for "${modelName}": ${error.message}`,
13475
+ { modelName, originalError: error.message }
13476
+ );
13477
+ } finally {
13478
+ this.training.set(modelName, false);
13479
+ }
13480
+ }
13481
+ /**
13482
+ * Make a prediction
13483
+ * @param {string} modelName - Model name
13484
+ * @param {Object|Array} input - Input data (object for single prediction, array for time series)
13485
+ * @returns {Object} Prediction result
13486
+ */
13487
+ async predict(modelName, input) {
13488
+ const model = this.models[modelName];
13489
+ if (!model) {
13490
+ throw new ModelNotFoundError(
13491
+ `Model "${modelName}" not found`,
13492
+ { modelName, availableModels: Object.keys(this.models) }
13493
+ );
13494
+ }
13495
+ try {
13496
+ const result = await model.predict(input);
13497
+ this.stats.totalPredictions++;
13498
+ this.emit("prediction", {
13499
+ modelName,
13500
+ input,
13501
+ result
13502
+ });
13503
+ return result;
13504
+ } catch (error) {
13505
+ this.stats.totalErrors++;
13506
+ throw error;
13507
+ }
13508
+ }
13509
+ /**
13510
+ * Make predictions for multiple inputs
13511
+ * @param {string} modelName - Model name
13512
+ * @param {Array} inputs - Array of input objects
13513
+ * @returns {Array} Array of prediction results
13514
+ */
13515
+ async predictBatch(modelName, inputs) {
13516
+ const model = this.models[modelName];
13517
+ if (!model) {
13518
+ throw new ModelNotFoundError(
13519
+ `Model "${modelName}" not found`,
13520
+ { modelName, availableModels: Object.keys(this.models) }
13521
+ );
13522
+ }
13523
+ return await model.predictBatch(inputs);
13524
+ }
13525
+ /**
13526
+ * Retrain a model (reset and train from scratch)
13527
+ * @param {string} modelName - Model name
13528
+ * @param {Object} options - Options
13529
+ * @returns {Object} Training results
13530
+ */
13531
+ async retrain(modelName, options = {}) {
13532
+ const model = this.models[modelName];
13533
+ if (!model) {
13534
+ throw new ModelNotFoundError(
13535
+ `Model "${modelName}" not found`,
13536
+ { modelName, availableModels: Object.keys(this.models) }
13537
+ );
13538
+ }
13539
+ if (model.dispose) {
13540
+ model.dispose();
13541
+ }
13542
+ const modelConfig = this.config.models[modelName];
13543
+ await this._initializeModel(modelName, modelConfig);
13544
+ return await this.train(modelName, options);
13545
+ }
13546
+ /**
13547
+ * Get model statistics
13548
+ * @param {string} modelName - Model name
13549
+ * @returns {Object} Model stats
13550
+ */
13551
+ getModelStats(modelName) {
13552
+ const model = this.models[modelName];
13553
+ if (!model) {
13554
+ throw new ModelNotFoundError(
13555
+ `Model "${modelName}" not found`,
13556
+ { modelName, availableModels: Object.keys(this.models) }
13557
+ );
13558
+ }
13559
+ return model.getStats();
13560
+ }
13561
+ /**
13562
+ * Get plugin statistics
13563
+ * @returns {Object} Plugin stats
13564
+ */
13565
+ getStats() {
13566
+ return {
13567
+ ...this.stats,
13568
+ models: Object.keys(this.models).length,
13569
+ trainedModels: Object.values(this.models).filter((m) => m.isTrained).length
13570
+ };
13571
+ }
13572
+ /**
13573
+ * Export a model
13574
+ * @param {string} modelName - Model name
13575
+ * @returns {Object} Serialized model
13576
+ */
13577
+ async exportModel(modelName) {
13578
+ const model = this.models[modelName];
13579
+ if (!model) {
13580
+ throw new ModelNotFoundError(
13581
+ `Model "${modelName}" not found`,
13582
+ { modelName, availableModels: Object.keys(this.models) }
13583
+ );
13584
+ }
13585
+ return await model.export();
13586
+ }
13587
+ /**
13588
+ * Import a model
13589
+ * @param {string} modelName - Model name
13590
+ * @param {Object} data - Serialized model data
13591
+ */
13592
+ async importModel(modelName, data) {
13593
+ const model = this.models[modelName];
13594
+ if (!model) {
13595
+ throw new ModelNotFoundError(
13596
+ `Model "${modelName}" not found`,
13597
+ { modelName, availableModels: Object.keys(this.models) }
13598
+ );
13599
+ }
13600
+ await model.import(data);
13601
+ await this._saveModel(modelName);
13602
+ if (this.config.verbose) {
13603
+ console.log(`[MLPlugin] Imported model "${modelName}"`);
13604
+ }
13605
+ }
13606
+ /**
13607
+ * Save model to plugin storage
13608
+ * @private
13609
+ */
13610
+ async _saveModel(modelName) {
13611
+ try {
13612
+ const storage = this.getStorage();
13613
+ const exportedModel = await this.models[modelName].export();
13614
+ if (!exportedModel) {
13615
+ if (this.config.verbose) {
13616
+ console.log(`[MLPlugin] Model "${modelName}" not trained, skipping save`);
13617
+ }
13618
+ return;
13619
+ }
13620
+ await storage.patch(`model_${modelName}`, {
13621
+ modelName,
13622
+ data: JSON.stringify(exportedModel),
13623
+ savedAt: (/* @__PURE__ */ new Date()).toISOString()
13624
+ });
13625
+ if (this.config.verbose) {
13626
+ console.log(`[MLPlugin] Saved model "${modelName}" to plugin storage`);
13627
+ }
13628
+ } catch (error) {
13629
+ console.error(`[MLPlugin] Failed to save model "${modelName}":`, error.message);
13630
+ }
13631
+ }
13632
+ /**
13633
+ * Load model from plugin storage
13634
+ * @private
13635
+ */
13636
+ async _loadModel(modelName) {
13637
+ try {
13638
+ const storage = this.getStorage();
13639
+ const [ok, err, record] = await tryFn(() => storage.get(`model_${modelName}`));
13640
+ if (!ok || !record) {
13641
+ if (this.config.verbose) {
13642
+ console.log(`[MLPlugin] No saved model found for "${modelName}"`);
13643
+ }
13644
+ return;
13645
+ }
13646
+ const modelData = JSON.parse(record.data);
13647
+ await this.models[modelName].import(modelData);
13648
+ if (this.config.verbose) {
13649
+ console.log(`[MLPlugin] Loaded model "${modelName}" from plugin storage`);
13650
+ }
13651
+ } catch (error) {
13652
+ console.error(`[MLPlugin] Failed to load model "${modelName}":`, error.message);
13653
+ }
13654
+ }
13655
+ /**
13656
+ * Delete model from plugin storage
13657
+ * @private
13658
+ */
13659
+ async _deleteModel(modelName) {
13660
+ try {
13661
+ const storage = this.getStorage();
13662
+ await storage.delete(`model_${modelName}`);
13663
+ if (this.config.verbose) {
13664
+ console.log(`[MLPlugin] Deleted model "${modelName}" from plugin storage`);
13665
+ }
13666
+ } catch (error) {
13667
+ if (this.config.verbose) {
13668
+ console.log(`[MLPlugin] Could not delete model "${modelName}": ${error.message}`);
13669
+ }
13670
+ }
13671
+ }
13672
+ }
13673
+
11906
13674
  class SqsConsumer {
11907
13675
  constructor({ queueUrl, onMessage, onError, poolingInterval = 5e3, maxMessages = 10, region = "us-east-1", credentials, endpoint, driver = "sqs" }) {
11908
13676
  this.driver = driver;
@@ -13424,7 +15192,7 @@ function generateMySQLAlterTable(tableName, attributes, existingSchema) {
13424
15192
  }
13425
15193
  return alterStatements;
13426
15194
  }
13427
- function generateBigQuerySchema(attributes) {
15195
+ function generateBigQuerySchema(attributes, mutability = "append-only") {
13428
15196
  const fields = [];
13429
15197
  fields.push({
13430
15198
  name: "id",
@@ -13448,6 +15216,14 @@ function generateBigQuerySchema(attributes) {
13448
15216
  if (!attributes.updatedAt) {
13449
15217
  fields.push({ name: "updated_at", type: "TIMESTAMP", mode: "NULLABLE" });
13450
15218
  }
15219
+ if (mutability === "append-only" || mutability === "immutable") {
15220
+ fields.push({ name: "_operation_type", type: "STRING", mode: "NULLABLE" });
15221
+ fields.push({ name: "_operation_timestamp", type: "TIMESTAMP", mode: "NULLABLE" });
15222
+ }
15223
+ if (mutability === "immutable") {
15224
+ fields.push({ name: "_is_deleted", type: "BOOL", mode: "NULLABLE" });
15225
+ fields.push({ name: "_version", type: "INT64", mode: "NULLABLE" });
15226
+ }
13451
15227
  return fields;
13452
15228
  }
13453
15229
  async function getBigQueryTableSchema(bigqueryClient, datasetId, tableId) {
@@ -13469,7 +15245,7 @@ async function getBigQueryTableSchema(bigqueryClient, datasetId, tableId) {
13469
15245
  }
13470
15246
  return schema;
13471
15247
  }
13472
- function generateBigQuerySchemaUpdate(attributes, existingSchema) {
15248
+ function generateBigQuerySchemaUpdate(attributes, existingSchema, mutability = "append-only") {
13473
15249
  const newFields = [];
13474
15250
  for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
13475
15251
  if (fieldName === "id") continue;
@@ -13483,6 +15259,22 @@ function generateBigQuerySchemaUpdate(attributes, existingSchema) {
13483
15259
  mode: required ? "REQUIRED" : "NULLABLE"
13484
15260
  });
13485
15261
  }
15262
+ if (mutability === "append-only" || mutability === "immutable") {
15263
+ if (!existingSchema["_operation_type"]) {
15264
+ newFields.push({ name: "_operation_type", type: "STRING", mode: "NULLABLE" });
15265
+ }
15266
+ if (!existingSchema["_operation_timestamp"]) {
15267
+ newFields.push({ name: "_operation_timestamp", type: "TIMESTAMP", mode: "NULLABLE" });
15268
+ }
15269
+ }
15270
+ if (mutability === "immutable") {
15271
+ if (!existingSchema["_is_deleted"]) {
15272
+ newFields.push({ name: "_is_deleted", type: "BOOL", mode: "NULLABLE" });
15273
+ }
15274
+ if (!existingSchema["_version"]) {
15275
+ newFields.push({ name: "_version", type: "INT64", mode: "NULLABLE" });
15276
+ }
15277
+ }
13486
15278
  return newFields;
13487
15279
  }
13488
15280
  function s3dbTypeToSQLite(fieldType, fieldOptions = {}) {
@@ -13565,6 +15357,8 @@ class BigqueryReplicator extends BaseReplicator {
13565
15357
  this.credentials = config.credentials;
13566
15358
  this.location = config.location || "US";
13567
15359
  this.logTable = config.logTable;
15360
+ this.mutability = config.mutability || "append-only";
15361
+ this._validateMutability(this.mutability);
13568
15362
  this.schemaSync = {
13569
15363
  enabled: config.schemaSync?.enabled || false,
13570
15364
  strategy: config.schemaSync?.strategy || "alter",
@@ -13573,6 +15367,13 @@ class BigqueryReplicator extends BaseReplicator {
13573
15367
  autoCreateColumns: config.schemaSync?.autoCreateColumns !== false
13574
15368
  };
13575
15369
  this.resources = this.parseResourcesConfig(resources);
15370
+ this.versionCounters = /* @__PURE__ */ new Map();
15371
+ }
15372
+ _validateMutability(mutability) {
15373
+ const validModes = ["append-only", "mutable", "immutable"];
15374
+ if (!validModes.includes(mutability)) {
15375
+ throw new Error(`Invalid mutability mode: ${mutability}. Must be one of: ${validModes.join(", ")}`);
15376
+ }
13576
15377
  }
13577
15378
  parseResourcesConfig(resources) {
13578
15379
  const parsed = {};
@@ -13581,24 +15382,31 @@ class BigqueryReplicator extends BaseReplicator {
13581
15382
  parsed[resourceName] = [{
13582
15383
  table: config,
13583
15384
  actions: ["insert"],
13584
- transform: null
15385
+ transform: null,
15386
+ mutability: this.mutability
13585
15387
  }];
13586
15388
  } else if (Array.isArray(config)) {
13587
15389
  parsed[resourceName] = config.map((item) => {
13588
15390
  if (typeof item === "string") {
13589
- return { table: item, actions: ["insert"], transform: null };
15391
+ return { table: item, actions: ["insert"], transform: null, mutability: this.mutability };
13590
15392
  }
15393
+ const itemMutability = item.mutability || this.mutability;
15394
+ this._validateMutability(itemMutability);
13591
15395
  return {
13592
15396
  table: item.table,
13593
15397
  actions: item.actions || ["insert"],
13594
- transform: item.transform || null
15398
+ transform: item.transform || null,
15399
+ mutability: itemMutability
13595
15400
  };
13596
15401
  });
13597
15402
  } else if (typeof config === "object") {
15403
+ const configMutability = config.mutability || this.mutability;
15404
+ this._validateMutability(configMutability);
13598
15405
  parsed[resourceName] = [{
13599
15406
  table: config.table,
13600
15407
  actions: config.actions || ["insert"],
13601
- transform: config.transform || null
15408
+ transform: config.transform || null,
15409
+ mutability: configMutability
13602
15410
  }];
13603
15411
  }
13604
15412
  }
@@ -13677,8 +15485,9 @@ class BigqueryReplicator extends BaseReplicator {
13677
15485
  );
13678
15486
  for (const tableConfig of tableConfigs) {
13679
15487
  const tableName = tableConfig.table;
15488
+ const mutability = tableConfig.mutability;
13680
15489
  const [okSync, errSync] = await tryFn(async () => {
13681
- await this.syncTableSchema(tableName, attributes);
15490
+ await this.syncTableSchema(tableName, attributes, mutability);
13682
15491
  });
13683
15492
  if (!okSync) {
13684
15493
  const message = `Schema sync failed for table ${tableName}: ${errSync.message}`;
@@ -13698,7 +15507,7 @@ class BigqueryReplicator extends BaseReplicator {
13698
15507
  /**
13699
15508
  * Sync a single table schema in BigQuery
13700
15509
  */
13701
- async syncTableSchema(tableName, attributes) {
15510
+ async syncTableSchema(tableName, attributes, mutability = "append-only") {
13702
15511
  const dataset = this.bigqueryClient.dataset(this.datasetId);
13703
15512
  const table = dataset.table(tableName);
13704
15513
  const [exists] = await table.exists();
@@ -13709,15 +15518,16 @@ class BigqueryReplicator extends BaseReplicator {
13709
15518
  if (this.schemaSync.strategy === "validate-only") {
13710
15519
  throw new Error(`Table ${tableName} does not exist (validate-only mode)`);
13711
15520
  }
13712
- const schema = generateBigQuerySchema(attributes);
15521
+ const schema = generateBigQuerySchema(attributes, mutability);
13713
15522
  if (this.config.verbose) {
13714
- console.log(`[BigQueryReplicator] Creating table ${tableName} with schema:`, schema);
15523
+ console.log(`[BigQueryReplicator] Creating table ${tableName} with schema (mutability: ${mutability}):`, schema);
13715
15524
  }
13716
15525
  await dataset.createTable(tableName, { schema });
13717
15526
  this.emit("table_created", {
13718
15527
  replicator: this.name,
13719
15528
  tableName,
13720
- attributes: Object.keys(attributes)
15529
+ attributes: Object.keys(attributes),
15530
+ mutability
13721
15531
  });
13722
15532
  return;
13723
15533
  }
@@ -13726,18 +15536,19 @@ class BigqueryReplicator extends BaseReplicator {
13726
15536
  console.warn(`[BigQueryReplicator] Dropping and recreating table ${tableName}`);
13727
15537
  }
13728
15538
  await table.delete();
13729
- const schema = generateBigQuerySchema(attributes);
15539
+ const schema = generateBigQuerySchema(attributes, mutability);
13730
15540
  await dataset.createTable(tableName, { schema });
13731
15541
  this.emit("table_recreated", {
13732
15542
  replicator: this.name,
13733
15543
  tableName,
13734
- attributes: Object.keys(attributes)
15544
+ attributes: Object.keys(attributes),
15545
+ mutability
13735
15546
  });
13736
15547
  return;
13737
15548
  }
13738
15549
  if (this.schemaSync.strategy === "alter" && this.schemaSync.autoCreateColumns) {
13739
15550
  const existingSchema = await getBigQueryTableSchema(this.bigqueryClient, this.datasetId, tableName);
13740
- const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema);
15551
+ const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema, mutability);
13741
15552
  if (newFields.length > 0) {
13742
15553
  if (this.config.verbose) {
13743
15554
  console.log(`[BigQueryReplicator] Adding ${newFields.length} field(s) to table ${tableName}:`, newFields);
@@ -13755,7 +15566,7 @@ class BigqueryReplicator extends BaseReplicator {
13755
15566
  }
13756
15567
  if (this.schemaSync.strategy === "validate-only") {
13757
15568
  const existingSchema = await getBigQueryTableSchema(this.bigqueryClient, this.datasetId, tableName);
13758
- const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema);
15569
+ const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema, mutability);
13759
15570
  if (newFields.length > 0) {
13760
15571
  throw new Error(`Table ${tableName} schema mismatch. Missing columns: ${newFields.length}`);
13761
15572
  }
@@ -13774,7 +15585,8 @@ class BigqueryReplicator extends BaseReplicator {
13774
15585
  if (!this.resources[resourceName]) return [];
13775
15586
  return this.resources[resourceName].filter((tableConfig) => tableConfig.actions.includes(operation)).map((tableConfig) => ({
13776
15587
  table: tableConfig.table,
13777
- transform: tableConfig.transform
15588
+ transform: tableConfig.transform,
15589
+ mutability: tableConfig.mutability
13778
15590
  }));
13779
15591
  }
13780
15592
  applyTransform(data, transformFn) {
@@ -13793,6 +15605,32 @@ class BigqueryReplicator extends BaseReplicator {
13793
15605
  });
13794
15606
  return cleanData;
13795
15607
  }
15608
+ /**
15609
+ * Add tracking fields for append-only and immutable modes
15610
+ * @private
15611
+ */
15612
+ _addTrackingFields(data, operation, mutability, id) {
15613
+ const tracked = { ...data };
15614
+ if (mutability === "append-only" || mutability === "immutable") {
15615
+ tracked._operation_type = operation;
15616
+ tracked._operation_timestamp = (/* @__PURE__ */ new Date()).toISOString();
15617
+ }
15618
+ if (mutability === "immutable") {
15619
+ tracked._is_deleted = operation === "delete";
15620
+ tracked._version = this._getNextVersion(id);
15621
+ }
15622
+ return tracked;
15623
+ }
15624
+ /**
15625
+ * Get next version number for immutable mode
15626
+ * @private
15627
+ */
15628
+ _getNextVersion(id) {
15629
+ const current = this.versionCounters.get(id) || 0;
15630
+ const next = current + 1;
15631
+ this.versionCounters.set(id, next);
15632
+ return next;
15633
+ }
13796
15634
  async replicate(resourceName, operation, data, id, beforeData = null) {
13797
15635
  if (!this.enabled || !this.shouldReplicateResource(resourceName)) {
13798
15636
  return { skipped: true, reason: "resource_not_included" };
@@ -13811,9 +15649,14 @@ class BigqueryReplicator extends BaseReplicator {
13811
15649
  for (const tableConfig of tableConfigs) {
13812
15650
  const [okTable, errTable] = await tryFn(async () => {
13813
15651
  const table = dataset.table(tableConfig.table);
15652
+ const mutability = tableConfig.mutability;
13814
15653
  let job;
13815
- if (operation === "insert") {
13816
- const transformedData = this.applyTransform(data, tableConfig.transform);
15654
+ const shouldConvertToInsert = (mutability === "append-only" || mutability === "immutable") && (operation === "update" || operation === "delete");
15655
+ if (operation === "insert" || shouldConvertToInsert) {
15656
+ let transformedData = this.applyTransform(data, tableConfig.transform);
15657
+ if (shouldConvertToInsert) {
15658
+ transformedData = this._addTrackingFields(transformedData, operation, mutability, id);
15659
+ }
13817
15660
  try {
13818
15661
  job = await table.insert([transformedData]);
13819
15662
  } catch (error) {
@@ -13825,7 +15668,7 @@ class BigqueryReplicator extends BaseReplicator {
13825
15668
  }
13826
15669
  throw error;
13827
15670
  }
13828
- } else if (operation === "update") {
15671
+ } else if (operation === "update" && mutability === "mutable") {
13829
15672
  const transformedData = this.applyTransform(data, tableConfig.transform);
13830
15673
  const keys = Object.keys(transformedData).filter((k) => k !== "id");
13831
15674
  const setClause = keys.map((k) => `${k} = @${k}`).join(", ");
@@ -13867,7 +15710,7 @@ class BigqueryReplicator extends BaseReplicator {
13867
15710
  }
13868
15711
  }
13869
15712
  if (!job) throw lastError;
13870
- } else if (operation === "delete") {
15713
+ } else if (operation === "delete" && mutability === "mutable") {
13871
15714
  const query = `DELETE FROM \`${this.projectId}.${this.datasetId}.${tableConfig.table}\` WHERE id = @id`;
13872
15715
  try {
13873
15716
  const [deleteJob] = await this.bigqueryClient.createQueryJob({
@@ -14003,7 +15846,8 @@ class BigqueryReplicator extends BaseReplicator {
14003
15846
  datasetId: this.datasetId,
14004
15847
  resources: this.resources,
14005
15848
  logTable: this.logTable,
14006
- schemaSync: this.schemaSync
15849
+ schemaSync: this.schemaSync,
15850
+ mutability: this.mutability
14007
15851
  };
14008
15852
  }
14009
15853
  }
@@ -15724,11 +17568,11 @@ class ConnectionString {
15724
17568
  }
15725
17569
  }
15726
17570
 
15727
- class Client extends EventEmitter {
17571
+ class S3Client extends EventEmitter {
15728
17572
  constructor({
15729
17573
  verbose = false,
15730
17574
  id = null,
15731
- AwsS3Client,
17575
+ AwsS3Client: AwsS3Client2,
15732
17576
  connectionString,
15733
17577
  parallelism = 10,
15734
17578
  httpClientOptions = {}
@@ -15751,7 +17595,7 @@ class Client extends EventEmitter {
15751
17595
  // 60 second timeout
15752
17596
  ...httpClientOptions
15753
17597
  };
15754
- this.client = AwsS3Client || this.createClient();
17598
+ this.client = AwsS3Client2 || this.createClient();
15755
17599
  }
15756
17600
  createClient() {
15757
17601
  const httpAgent = new Agent(this.httpClientOptions);
@@ -15772,7 +17616,7 @@ class Client extends EventEmitter {
15772
17616
  secretAccessKey: this.config.secretAccessKey
15773
17617
  };
15774
17618
  }
15775
- const client = new S3Client(options);
17619
+ const client = new S3Client$1(options);
15776
17620
  client.middlewareStack.add(
15777
17621
  (next, context) => async (args) => {
15778
17622
  if (context.commandName === "DeleteObjectsCommand") {
@@ -18358,6 +20202,7 @@ ${errorDetails}`,
18358
20202
  events = {},
18359
20203
  asyncEvents = true,
18360
20204
  asyncPartitions = true,
20205
+ strictPartitions = false,
18361
20206
  createdBy = "user"
18362
20207
  } = config;
18363
20208
  this.name = name;
@@ -18389,6 +20234,7 @@ ${errorDetails}`,
18389
20234
  allNestedObjectsOptional,
18390
20235
  asyncEvents,
18391
20236
  asyncPartitions,
20237
+ strictPartitions,
18392
20238
  createdBy
18393
20239
  };
18394
20240
  this.hooks = {
@@ -19141,17 +20987,31 @@ ${errorDetails}`,
19141
20987
  throw errPut;
19142
20988
  }
19143
20989
  const insertedObject = await this.get(finalId);
19144
- if (this.config.asyncPartitions && this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19145
- setImmediate(() => {
19146
- this.createPartitionReferences(insertedObject).catch((err) => {
20990
+ if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
20991
+ if (this.config.strictPartitions) {
20992
+ await this.createPartitionReferences(insertedObject);
20993
+ } else if (this.config.asyncPartitions) {
20994
+ setImmediate(() => {
20995
+ this.createPartitionReferences(insertedObject).catch((err) => {
20996
+ this.emit("partitionIndexError", {
20997
+ operation: "insert",
20998
+ id: finalId,
20999
+ error: err,
21000
+ message: err.message
21001
+ });
21002
+ });
21003
+ });
21004
+ } else {
21005
+ const [ok, err] = await tryFn(() => this.createPartitionReferences(insertedObject));
21006
+ if (!ok) {
19147
21007
  this.emit("partitionIndexError", {
19148
21008
  operation: "insert",
19149
21009
  id: finalId,
19150
21010
  error: err,
19151
21011
  message: err.message
19152
21012
  });
19153
- });
19154
- });
21013
+ }
21014
+ }
19155
21015
  const nonPartitionHooks = this.hooks.afterInsert.filter(
19156
21016
  (hook) => !hook.toString().includes("createPartitionReferences")
19157
21017
  );
@@ -19446,17 +21306,31 @@ ${errorDetails}`,
19446
21306
  body: finalBody,
19447
21307
  behavior: this.behavior
19448
21308
  });
19449
- if (this.config.asyncPartitions && this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19450
- setImmediate(() => {
19451
- this.handlePartitionReferenceUpdates(originalData, updatedData).catch((err2) => {
21309
+ if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
21310
+ if (this.config.strictPartitions) {
21311
+ await this.handlePartitionReferenceUpdates(originalData, updatedData);
21312
+ } else if (this.config.asyncPartitions) {
21313
+ setImmediate(() => {
21314
+ this.handlePartitionReferenceUpdates(originalData, updatedData).catch((err2) => {
21315
+ this.emit("partitionIndexError", {
21316
+ operation: "update",
21317
+ id,
21318
+ error: err2,
21319
+ message: err2.message
21320
+ });
21321
+ });
21322
+ });
21323
+ } else {
21324
+ const [ok2, err2] = await tryFn(() => this.handlePartitionReferenceUpdates(originalData, updatedData));
21325
+ if (!ok2) {
19452
21326
  this.emit("partitionIndexError", {
19453
21327
  operation: "update",
19454
21328
  id,
19455
21329
  error: err2,
19456
21330
  message: err2.message
19457
21331
  });
19458
- });
19459
- });
21332
+ }
21333
+ }
19460
21334
  const nonPartitionHooks = this.hooks.afterUpdate.filter(
19461
21335
  (hook) => !hook.toString().includes("handlePartitionReferenceUpdates")
19462
21336
  );
@@ -19569,7 +21443,9 @@ ${errorDetails}`,
19569
21443
  if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19570
21444
  const oldData = { ...currentData, id };
19571
21445
  const newData = { ...mergedData, id };
19572
- if (this.config.asyncPartitions) {
21446
+ if (this.config.strictPartitions) {
21447
+ await this.handlePartitionReferenceUpdates(oldData, newData);
21448
+ } else if (this.config.asyncPartitions) {
19573
21449
  setImmediate(() => {
19574
21450
  this.handlePartitionReferenceUpdates(oldData, newData).catch((err) => {
19575
21451
  this.emit("partitionIndexError", {
@@ -19699,7 +21575,9 @@ ${errorDetails}`,
19699
21575
  }
19700
21576
  const replacedObject = { id, ...validatedAttributes };
19701
21577
  if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19702
- if (this.config.asyncPartitions) {
21578
+ if (this.config.strictPartitions) {
21579
+ await this.handlePartitionReferenceUpdates({}, replacedObject);
21580
+ } else if (this.config.asyncPartitions) {
19703
21581
  setImmediate(() => {
19704
21582
  this.handlePartitionReferenceUpdates({}, replacedObject).catch((err) => {
19705
21583
  this.emit("partitionIndexError", {
@@ -19839,17 +21717,31 @@ ${errorDetails}`,
19839
21717
  });
19840
21718
  const oldData = { ...originalData, id };
19841
21719
  const newData = { ...validatedAttributes, id };
19842
- if (this.config.asyncPartitions && this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19843
- setImmediate(() => {
19844
- this.handlePartitionReferenceUpdates(oldData, newData).catch((err2) => {
21720
+ if (this.config.partitions && Object.keys(this.config.partitions).length > 0) {
21721
+ if (this.config.strictPartitions) {
21722
+ await this.handlePartitionReferenceUpdates(oldData, newData);
21723
+ } else if (this.config.asyncPartitions) {
21724
+ setImmediate(() => {
21725
+ this.handlePartitionReferenceUpdates(oldData, newData).catch((err2) => {
21726
+ this.emit("partitionIndexError", {
21727
+ operation: "updateConditional",
21728
+ id,
21729
+ error: err2,
21730
+ message: err2.message
21731
+ });
21732
+ });
21733
+ });
21734
+ } else {
21735
+ const [ok2, err2] = await tryFn(() => this.handlePartitionReferenceUpdates(oldData, newData));
21736
+ if (!ok2) {
19845
21737
  this.emit("partitionIndexError", {
19846
21738
  operation: "updateConditional",
19847
21739
  id,
19848
21740
  error: err2,
19849
21741
  message: err2.message
19850
21742
  });
19851
- });
19852
- });
21743
+ }
21744
+ }
19853
21745
  const nonPartitionHooks = this.hooks.afterUpdate.filter(
19854
21746
  (hook) => !hook.toString().includes("handlePartitionReferenceUpdates")
19855
21747
  );
@@ -19925,17 +21817,31 @@ ${errorDetails}`,
19925
21817
  operation: "delete",
19926
21818
  id
19927
21819
  });
19928
- if (this.config.asyncPartitions && this.config.partitions && Object.keys(this.config.partitions).length > 0) {
19929
- setImmediate(() => {
19930
- this.deletePartitionReferences(objectData).catch((err3) => {
21820
+ if (this.config.partitions && Object.keys(this.config.partitions).length > 0 && objectData) {
21821
+ if (this.config.strictPartitions) {
21822
+ await this.deletePartitionReferences(objectData);
21823
+ } else if (this.config.asyncPartitions) {
21824
+ setImmediate(() => {
21825
+ this.deletePartitionReferences(objectData).catch((err3) => {
21826
+ this.emit("partitionIndexError", {
21827
+ operation: "delete",
21828
+ id,
21829
+ error: err3,
21830
+ message: err3.message
21831
+ });
21832
+ });
21833
+ });
21834
+ } else {
21835
+ const [ok3, err3] = await tryFn(() => this.deletePartitionReferences(objectData));
21836
+ if (!ok3) {
19931
21837
  this.emit("partitionIndexError", {
19932
21838
  operation: "delete",
19933
21839
  id,
19934
21840
  error: err3,
19935
21841
  message: err3.message
19936
21842
  });
19937
- });
19938
- });
21843
+ }
21844
+ }
19939
21845
  const nonPartitionHooks = this.hooks.afterDelete.filter(
19940
21846
  (hook) => !hook.toString().includes("deletePartitionReferences")
19941
21847
  );
@@ -21306,10 +23212,13 @@ function validateResourceConfig(config) {
21306
23212
  class Database extends EventEmitter {
21307
23213
  constructor(options) {
21308
23214
  super();
21309
- this.id = idGenerator(7);
23215
+ this.id = (() => {
23216
+ const [ok, err, id] = tryFn(() => idGenerator(7));
23217
+ return ok && id ? id : `db-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
23218
+ })();
21310
23219
  this.version = "1";
21311
23220
  this.s3dbVersion = (() => {
21312
- const [ok, err, version] = tryFn(() => true ? "12.3.0" : "latest");
23221
+ const [ok, err, version] = tryFn(() => true ? "13.0.0" : "latest");
21313
23222
  return ok ? version : "latest";
21314
23223
  })();
21315
23224
  this._resourcesMap = {};
@@ -21343,6 +23252,7 @@ class Database extends EventEmitter {
21343
23252
  this.versioningEnabled = options.versioningEnabled || false;
21344
23253
  this.persistHooks = options.persistHooks || false;
21345
23254
  this.strictValidation = options.strictValidation !== false;
23255
+ this.strictHooks = options.strictHooks || false;
21346
23256
  this._initHooks();
21347
23257
  let connectionString = options.connectionString;
21348
23258
  if (!connectionString && (options.bucket || options.accessKeyId || options.secretAccessKey)) {
@@ -21365,7 +23275,7 @@ class Database extends EventEmitter {
21365
23275
  connectionString = `s3://${encodeURIComponent(accessKeyId)}:${encodeURIComponent(secretAccessKey)}@${bucket || "s3db"}?${params.toString()}`;
21366
23276
  }
21367
23277
  }
21368
- this.client = options.client || new Client({
23278
+ this.client = options.client || new S3Client({
21369
23279
  verbose: this.verbose,
21370
23280
  parallelism: this.parallelism,
21371
23281
  connectionString
@@ -21373,18 +23283,25 @@ class Database extends EventEmitter {
21373
23283
  this.connectionString = connectionString;
21374
23284
  this.bucket = this.client.bucket;
21375
23285
  this.keyPrefix = this.client.keyPrefix;
21376
- if (!this._exitListenerRegistered) {
23286
+ this._registerExitListener();
23287
+ }
23288
+ /**
23289
+ * Register process exit listener for automatic cleanup
23290
+ * @private
23291
+ */
23292
+ _registerExitListener() {
23293
+ if (!this._exitListenerRegistered && typeof process !== "undefined") {
21377
23294
  this._exitListenerRegistered = true;
21378
- if (typeof process !== "undefined") {
21379
- process.on("exit", async () => {
21380
- if (this.isConnected()) {
21381
- await tryFn(() => this.disconnect());
21382
- }
21383
- });
21384
- }
23295
+ this._exitListener = async () => {
23296
+ if (this.isConnected()) {
23297
+ await tryFn(() => this.disconnect());
23298
+ }
23299
+ };
23300
+ process.on("exit", this._exitListener);
21385
23301
  }
21386
23302
  }
21387
23303
  async connect() {
23304
+ this._registerExitListener();
21388
23305
  await this.startPlugins();
21389
23306
  let metadata = null;
21390
23307
  let needsHealing = false;
@@ -22347,11 +24264,16 @@ class Database extends EventEmitter {
22347
24264
  if (this.client && typeof this.client.removeAllListeners === "function") {
22348
24265
  this.client.removeAllListeners();
22349
24266
  }
24267
+ await this.emit("disconnected", /* @__PURE__ */ new Date());
22350
24268
  this.removeAllListeners();
24269
+ if (this._exitListener && typeof process !== "undefined") {
24270
+ process.off("exit", this._exitListener);
24271
+ this._exitListener = null;
24272
+ this._exitListenerRegistered = false;
24273
+ }
22351
24274
  this.savedMetadata = null;
22352
24275
  this.plugins = {};
22353
24276
  this.pluginList = [];
22354
- this.emit("disconnected", /* @__PURE__ */ new Date());
22355
24277
  });
22356
24278
  }
22357
24279
  /**
@@ -22455,6 +24377,13 @@ class Database extends EventEmitter {
22455
24377
  const [ok, error] = await tryFn(() => hook({ database: this, ...context }));
22456
24378
  if (!ok) {
22457
24379
  this.emit("hookError", { event, error, context });
24380
+ if (this.strictHooks) {
24381
+ throw new DatabaseError(`Hook execution failed for event '${event}': ${error.message}`, {
24382
+ event,
24383
+ originalError: error,
24384
+ context
24385
+ });
24386
+ }
22458
24387
  }
22459
24388
  }
22460
24389
  }
@@ -26271,7 +28200,7 @@ class S3TfStateDriver extends TfStateDriver {
26271
28200
  */
26272
28201
  async initialize() {
26273
28202
  const { bucket, credentials, region } = this.connectionConfig;
26274
- this.client = new Client({
28203
+ this.client = new S3Client({
26275
28204
  bucketName: bucket,
26276
28205
  credentials,
26277
28206
  region
@@ -37897,6 +39826,1082 @@ class VectorPlugin extends Plugin {
37897
39826
  }
37898
39827
  }
37899
39828
 
39829
+ class MemoryStorage {
39830
+ constructor(config = {}) {
39831
+ this.objects = /* @__PURE__ */ new Map();
39832
+ this.bucket = config.bucket || "s3db";
39833
+ this.enforceLimits = config.enforceLimits || false;
39834
+ this.metadataLimit = config.metadataLimit || 2048;
39835
+ this.maxObjectSize = config.maxObjectSize || 5 * 1024 * 1024 * 1024;
39836
+ this.persistPath = config.persistPath;
39837
+ this.autoPersist = config.autoPersist || false;
39838
+ this.verbose = config.verbose || false;
39839
+ }
39840
+ /**
39841
+ * Generate ETag (MD5 hash) for object body
39842
+ */
39843
+ _generateETag(body) {
39844
+ const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body || "");
39845
+ return createHash("md5").update(buffer).digest("hex");
39846
+ }
39847
+ /**
39848
+ * Calculate metadata size in bytes
39849
+ */
39850
+ _calculateMetadataSize(metadata) {
39851
+ if (!metadata) return 0;
39852
+ let size = 0;
39853
+ for (const [key, value] of Object.entries(metadata)) {
39854
+ size += Buffer.byteLength(key, "utf8");
39855
+ size += Buffer.byteLength(String(value), "utf8");
39856
+ }
39857
+ return size;
39858
+ }
39859
+ /**
39860
+ * Validate limits if enforceLimits is enabled
39861
+ */
39862
+ _validateLimits(body, metadata) {
39863
+ if (!this.enforceLimits) return;
39864
+ const metadataSize = this._calculateMetadataSize(metadata);
39865
+ if (metadataSize > this.metadataLimit) {
39866
+ throw new Error(
39867
+ `Metadata size (${metadataSize} bytes) exceeds limit of ${this.metadataLimit} bytes`
39868
+ );
39869
+ }
39870
+ const bodySize = Buffer.isBuffer(body) ? body.length : Buffer.byteLength(body || "", "utf8");
39871
+ if (bodySize > this.maxObjectSize) {
39872
+ throw new Error(
39873
+ `Object size (${bodySize} bytes) exceeds limit of ${this.maxObjectSize} bytes`
39874
+ );
39875
+ }
39876
+ }
39877
+ /**
39878
+ * Store an object
39879
+ */
39880
+ async put(key, { body, metadata, contentType, contentEncoding, contentLength, ifMatch }) {
39881
+ this._validateLimits(body, metadata);
39882
+ if (ifMatch !== void 0) {
39883
+ const existing = this.objects.get(key);
39884
+ if (existing && existing.etag !== ifMatch) {
39885
+ throw new Error(`Precondition failed: ETag mismatch for key "${key}"`);
39886
+ }
39887
+ }
39888
+ const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body || "");
39889
+ const etag = this._generateETag(buffer);
39890
+ const lastModified = (/* @__PURE__ */ new Date()).toISOString();
39891
+ const size = buffer.length;
39892
+ const objectData = {
39893
+ body: buffer,
39894
+ metadata: metadata || {},
39895
+ contentType: contentType || "application/octet-stream",
39896
+ etag,
39897
+ lastModified,
39898
+ size,
39899
+ contentEncoding,
39900
+ contentLength: contentLength || size
39901
+ };
39902
+ this.objects.set(key, objectData);
39903
+ if (this.verbose) {
39904
+ console.log(`[MemoryStorage] PUT ${key} (${size} bytes, etag: ${etag})`);
39905
+ }
39906
+ if (this.autoPersist && this.persistPath) {
39907
+ await this.saveToDisk();
39908
+ }
39909
+ return {
39910
+ ETag: etag,
39911
+ VersionId: null,
39912
+ // Memory storage doesn't support versioning
39913
+ ServerSideEncryption: null,
39914
+ Location: `/${this.bucket}/${key}`
39915
+ };
39916
+ }
39917
+ /**
39918
+ * Retrieve an object
39919
+ */
39920
+ async get(key) {
39921
+ const obj = this.objects.get(key);
39922
+ if (!obj) {
39923
+ const error = new Error(`Object not found: ${key}`);
39924
+ error.name = "NoSuchKey";
39925
+ error.$metadata = {
39926
+ httpStatusCode: 404,
39927
+ requestId: "memory-" + Date.now(),
39928
+ attempts: 1,
39929
+ totalRetryDelay: 0
39930
+ };
39931
+ throw error;
39932
+ }
39933
+ if (this.verbose) {
39934
+ console.log(`[MemoryStorage] GET ${key} (${obj.size} bytes)`);
39935
+ }
39936
+ const bodyStream = Readable.from(obj.body);
39937
+ return {
39938
+ Body: bodyStream,
39939
+ Metadata: { ...obj.metadata },
39940
+ ContentType: obj.contentType,
39941
+ ContentLength: obj.size,
39942
+ ETag: obj.etag,
39943
+ LastModified: new Date(obj.lastModified),
39944
+ ContentEncoding: obj.contentEncoding
39945
+ };
39946
+ }
39947
+ /**
39948
+ * Get object metadata only (like S3 HeadObject)
39949
+ */
39950
+ async head(key) {
39951
+ const obj = this.objects.get(key);
39952
+ if (!obj) {
39953
+ const error = new Error(`Object not found: ${key}`);
39954
+ error.name = "NoSuchKey";
39955
+ error.$metadata = {
39956
+ httpStatusCode: 404,
39957
+ requestId: "memory-" + Date.now(),
39958
+ attempts: 1,
39959
+ totalRetryDelay: 0
39960
+ };
39961
+ throw error;
39962
+ }
39963
+ if (this.verbose) {
39964
+ console.log(`[MemoryStorage] HEAD ${key}`);
39965
+ }
39966
+ return {
39967
+ Metadata: { ...obj.metadata },
39968
+ ContentType: obj.contentType,
39969
+ ContentLength: obj.size,
39970
+ ETag: obj.etag,
39971
+ LastModified: new Date(obj.lastModified),
39972
+ ContentEncoding: obj.contentEncoding
39973
+ };
39974
+ }
39975
+ /**
39976
+ * Copy an object
39977
+ */
39978
+ async copy(from, to, { metadata, metadataDirective, contentType }) {
39979
+ const source = this.objects.get(from);
39980
+ if (!source) {
39981
+ const error = new Error(`Source object not found: ${from}`);
39982
+ error.name = "NoSuchKey";
39983
+ throw error;
39984
+ }
39985
+ let finalMetadata = { ...source.metadata };
39986
+ if (metadataDirective === "REPLACE" && metadata) {
39987
+ finalMetadata = metadata;
39988
+ } else if (metadata) {
39989
+ finalMetadata = { ...finalMetadata, ...metadata };
39990
+ }
39991
+ const result = await this.put(to, {
39992
+ body: source.body,
39993
+ metadata: finalMetadata,
39994
+ contentType: contentType || source.contentType,
39995
+ contentEncoding: source.contentEncoding
39996
+ });
39997
+ if (this.verbose) {
39998
+ console.log(`[MemoryStorage] COPY ${from} \u2192 ${to}`);
39999
+ }
40000
+ return result;
40001
+ }
40002
+ /**
40003
+ * Check if object exists
40004
+ */
40005
+ exists(key) {
40006
+ return this.objects.has(key);
40007
+ }
40008
+ /**
40009
+ * Delete an object
40010
+ */
40011
+ async delete(key) {
40012
+ const existed = this.objects.has(key);
40013
+ this.objects.delete(key);
40014
+ if (this.verbose) {
40015
+ console.log(`[MemoryStorage] DELETE ${key} (existed: ${existed})`);
40016
+ }
40017
+ if (this.autoPersist && this.persistPath) {
40018
+ await this.saveToDisk();
40019
+ }
40020
+ return {
40021
+ DeleteMarker: false,
40022
+ VersionId: null
40023
+ };
40024
+ }
40025
+ /**
40026
+ * Delete multiple objects (batch)
40027
+ */
40028
+ async deleteMultiple(keys) {
40029
+ const deleted = [];
40030
+ const errors = [];
40031
+ for (const key of keys) {
40032
+ try {
40033
+ await this.delete(key);
40034
+ deleted.push({ Key: key });
40035
+ } catch (error) {
40036
+ errors.push({
40037
+ Key: key,
40038
+ Code: error.name || "InternalError",
40039
+ Message: error.message
40040
+ });
40041
+ }
40042
+ }
40043
+ if (this.verbose) {
40044
+ console.log(`[MemoryStorage] DELETE BATCH (${deleted.length} deleted, ${errors.length} errors)`);
40045
+ }
40046
+ return { Deleted: deleted, Errors: errors };
40047
+ }
40048
+ /**
40049
+ * List objects with prefix/delimiter support
40050
+ */
40051
+ async list({ prefix = "", delimiter = null, maxKeys = 1e3, continuationToken = null }) {
40052
+ const allKeys = Array.from(this.objects.keys());
40053
+ let filteredKeys = prefix ? allKeys.filter((key) => key.startsWith(prefix)) : allKeys;
40054
+ filteredKeys.sort();
40055
+ let startIndex = 0;
40056
+ if (continuationToken) {
40057
+ startIndex = parseInt(continuationToken) || 0;
40058
+ }
40059
+ const paginatedKeys = filteredKeys.slice(startIndex, startIndex + maxKeys);
40060
+ const isTruncated = startIndex + maxKeys < filteredKeys.length;
40061
+ const nextContinuationToken = isTruncated ? String(startIndex + maxKeys) : null;
40062
+ const commonPrefixes = /* @__PURE__ */ new Set();
40063
+ const contents = [];
40064
+ for (const key of paginatedKeys) {
40065
+ if (delimiter && prefix) {
40066
+ const suffix = key.substring(prefix.length);
40067
+ const delimiterIndex = suffix.indexOf(delimiter);
40068
+ if (delimiterIndex !== -1) {
40069
+ const commonPrefix = prefix + suffix.substring(0, delimiterIndex + 1);
40070
+ commonPrefixes.add(commonPrefix);
40071
+ continue;
40072
+ }
40073
+ }
40074
+ const obj = this.objects.get(key);
40075
+ contents.push({
40076
+ Key: key,
40077
+ Size: obj.size,
40078
+ LastModified: new Date(obj.lastModified),
40079
+ ETag: obj.etag,
40080
+ StorageClass: "STANDARD"
40081
+ });
40082
+ }
40083
+ if (this.verbose) {
40084
+ console.log(`[MemoryStorage] LIST prefix="${prefix}" (${contents.length} objects, ${commonPrefixes.size} prefixes)`);
40085
+ }
40086
+ return {
40087
+ Contents: contents,
40088
+ CommonPrefixes: Array.from(commonPrefixes).map((prefix2) => ({ Prefix: prefix2 })),
40089
+ IsTruncated: isTruncated,
40090
+ NextContinuationToken: nextContinuationToken,
40091
+ KeyCount: contents.length + commonPrefixes.size,
40092
+ MaxKeys: maxKeys,
40093
+ Prefix: prefix,
40094
+ Delimiter: delimiter
40095
+ };
40096
+ }
40097
+ /**
40098
+ * Create a snapshot of current state
40099
+ */
40100
+ snapshot() {
40101
+ const snapshot = {
40102
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
40103
+ bucket: this.bucket,
40104
+ objectCount: this.objects.size,
40105
+ objects: {}
40106
+ };
40107
+ for (const [key, obj] of this.objects.entries()) {
40108
+ snapshot.objects[key] = {
40109
+ body: obj.body.toString("base64"),
40110
+ metadata: obj.metadata,
40111
+ contentType: obj.contentType,
40112
+ etag: obj.etag,
40113
+ lastModified: obj.lastModified,
40114
+ size: obj.size,
40115
+ contentEncoding: obj.contentEncoding,
40116
+ contentLength: obj.contentLength
40117
+ };
40118
+ }
40119
+ return snapshot;
40120
+ }
40121
+ /**
40122
+ * Restore from a snapshot
40123
+ */
40124
+ restore(snapshot) {
40125
+ if (!snapshot || !snapshot.objects) {
40126
+ throw new Error("Invalid snapshot format");
40127
+ }
40128
+ this.objects.clear();
40129
+ for (const [key, obj] of Object.entries(snapshot.objects)) {
40130
+ this.objects.set(key, {
40131
+ body: Buffer.from(obj.body, "base64"),
40132
+ metadata: obj.metadata,
40133
+ contentType: obj.contentType,
40134
+ etag: obj.etag,
40135
+ lastModified: obj.lastModified,
40136
+ size: obj.size,
40137
+ contentEncoding: obj.contentEncoding,
40138
+ contentLength: obj.contentLength
40139
+ });
40140
+ }
40141
+ if (this.verbose) {
40142
+ console.log(`[MemoryStorage] Restored snapshot with ${this.objects.size} objects`);
40143
+ }
40144
+ }
40145
+ /**
40146
+ * Save current state to disk
40147
+ */
40148
+ async saveToDisk(customPath) {
40149
+ const path = customPath || this.persistPath;
40150
+ if (!path) {
40151
+ throw new Error("No persist path configured");
40152
+ }
40153
+ const snapshot = this.snapshot();
40154
+ const json = JSON.stringify(snapshot, null, 2);
40155
+ const [ok, err] = await tryFn(() => writeFile(path, json, "utf-8"));
40156
+ if (!ok) {
40157
+ throw new Error(`Failed to save to disk: ${err.message}`);
40158
+ }
40159
+ if (this.verbose) {
40160
+ console.log(`[MemoryStorage] Saved ${this.objects.size} objects to ${path}`);
40161
+ }
40162
+ return path;
40163
+ }
40164
+ /**
40165
+ * Load state from disk
40166
+ */
40167
+ async loadFromDisk(customPath) {
40168
+ const path = customPath || this.persistPath;
40169
+ if (!path) {
40170
+ throw new Error("No persist path configured");
40171
+ }
40172
+ const [ok, err, json] = await tryFn(() => readFile(path, "utf-8"));
40173
+ if (!ok) {
40174
+ throw new Error(`Failed to load from disk: ${err.message}`);
40175
+ }
40176
+ const snapshot = JSON.parse(json);
40177
+ this.restore(snapshot);
40178
+ if (this.verbose) {
40179
+ console.log(`[MemoryStorage] Loaded ${this.objects.size} objects from ${path}`);
40180
+ }
40181
+ return snapshot;
40182
+ }
40183
+ /**
40184
+ * Get storage statistics
40185
+ */
40186
+ getStats() {
40187
+ let totalSize = 0;
40188
+ const keys = [];
40189
+ for (const [key, obj] of this.objects.entries()) {
40190
+ totalSize += obj.size;
40191
+ keys.push(key);
40192
+ }
40193
+ return {
40194
+ objectCount: this.objects.size,
40195
+ totalSize,
40196
+ totalSizeFormatted: this._formatBytes(totalSize),
40197
+ keys: keys.sort(),
40198
+ bucket: this.bucket
40199
+ };
40200
+ }
40201
+ /**
40202
+ * Format bytes for human reading
40203
+ */
40204
+ _formatBytes(bytes) {
40205
+ if (bytes === 0) return "0 Bytes";
40206
+ const k = 1024;
40207
+ const sizes = ["Bytes", "KB", "MB", "GB"];
40208
+ const i = Math.floor(Math.log(bytes) / Math.log(k));
40209
+ return Math.round(bytes / Math.pow(k, i) * 100) / 100 + " " + sizes[i];
40210
+ }
40211
+ /**
40212
+ * Clear all objects
40213
+ */
40214
+ clear() {
40215
+ this.objects.clear();
40216
+ if (this.verbose) {
40217
+ console.log(`[MemoryStorage] Cleared all objects`);
40218
+ }
40219
+ }
40220
+ }
40221
+
40222
+ class MemoryClient extends EventEmitter {
40223
+ constructor(config = {}) {
40224
+ super();
40225
+ this.id = config.id || idGenerator(77);
40226
+ this.verbose = config.verbose || false;
40227
+ this.parallelism = config.parallelism || 10;
40228
+ this.bucket = config.bucket || "s3db";
40229
+ this.keyPrefix = config.keyPrefix || "";
40230
+ this.region = config.region || "us-east-1";
40231
+ this.storage = new MemoryStorage({
40232
+ bucket: this.bucket,
40233
+ enforceLimits: config.enforceLimits || false,
40234
+ metadataLimit: config.metadataLimit || 2048,
40235
+ maxObjectSize: config.maxObjectSize || 5 * 1024 * 1024 * 1024,
40236
+ persistPath: config.persistPath,
40237
+ autoPersist: config.autoPersist || false,
40238
+ verbose: this.verbose
40239
+ });
40240
+ this.config = {
40241
+ bucket: this.bucket,
40242
+ keyPrefix: this.keyPrefix,
40243
+ region: this.region,
40244
+ endpoint: "memory://localhost",
40245
+ forcePathStyle: true
40246
+ };
40247
+ if (this.verbose) {
40248
+ console.log(`[MemoryClient] Initialized (id: ${this.id}, bucket: ${this.bucket})`);
40249
+ }
40250
+ }
40251
+ /**
40252
+ * Simulate sendCommand from AWS SDK
40253
+ * Used by Database/Resource to send AWS SDK commands
40254
+ */
40255
+ async sendCommand(command) {
40256
+ const commandName = command.constructor.name;
40257
+ const input = command.input || {};
40258
+ this.emit("command.request", commandName, input);
40259
+ let response;
40260
+ try {
40261
+ switch (commandName) {
40262
+ case "PutObjectCommand":
40263
+ response = await this._handlePutObject(input);
40264
+ break;
40265
+ case "GetObjectCommand":
40266
+ response = await this._handleGetObject(input);
40267
+ break;
40268
+ case "HeadObjectCommand":
40269
+ response = await this._handleHeadObject(input);
40270
+ break;
40271
+ case "CopyObjectCommand":
40272
+ response = await this._handleCopyObject(input);
40273
+ break;
40274
+ case "DeleteObjectCommand":
40275
+ response = await this._handleDeleteObject(input);
40276
+ break;
40277
+ case "DeleteObjectsCommand":
40278
+ response = await this._handleDeleteObjects(input);
40279
+ break;
40280
+ case "ListObjectsV2Command":
40281
+ response = await this._handleListObjects(input);
40282
+ break;
40283
+ default:
40284
+ throw new Error(`Unsupported command: ${commandName}`);
40285
+ }
40286
+ this.emit("command.response", commandName, response, input);
40287
+ return response;
40288
+ } catch (error) {
40289
+ const mappedError = mapAwsError(error, {
40290
+ bucket: this.bucket,
40291
+ key: input.Key,
40292
+ commandName,
40293
+ commandInput: input
40294
+ });
40295
+ throw mappedError;
40296
+ }
40297
+ }
40298
+ /**
40299
+ * PutObjectCommand handler
40300
+ */
40301
+ async _handlePutObject(input) {
40302
+ const key = input.Key;
40303
+ const metadata = input.Metadata || {};
40304
+ const contentType = input.ContentType;
40305
+ const body = input.Body;
40306
+ const contentEncoding = input.ContentEncoding;
40307
+ const contentLength = input.ContentLength;
40308
+ const ifMatch = input.IfMatch;
40309
+ return await this.storage.put(key, {
40310
+ body,
40311
+ metadata,
40312
+ contentType,
40313
+ contentEncoding,
40314
+ contentLength,
40315
+ ifMatch
40316
+ });
40317
+ }
40318
+ /**
40319
+ * GetObjectCommand handler
40320
+ */
40321
+ async _handleGetObject(input) {
40322
+ const key = input.Key;
40323
+ return await this.storage.get(key);
40324
+ }
40325
+ /**
40326
+ * HeadObjectCommand handler
40327
+ */
40328
+ async _handleHeadObject(input) {
40329
+ const key = input.Key;
40330
+ return await this.storage.head(key);
40331
+ }
40332
+ /**
40333
+ * CopyObjectCommand handler
40334
+ */
40335
+ async _handleCopyObject(input) {
40336
+ const copySource = input.CopySource;
40337
+ const parts = copySource.split("/");
40338
+ const sourceKey = parts.slice(1).join("/");
40339
+ const destinationKey = input.Key;
40340
+ const metadata = input.Metadata;
40341
+ const metadataDirective = input.MetadataDirective;
40342
+ const contentType = input.ContentType;
40343
+ return await this.storage.copy(sourceKey, destinationKey, {
40344
+ metadata,
40345
+ metadataDirective,
40346
+ contentType
40347
+ });
40348
+ }
40349
+ /**
40350
+ * DeleteObjectCommand handler
40351
+ */
40352
+ async _handleDeleteObject(input) {
40353
+ const key = input.Key;
40354
+ return await this.storage.delete(key);
40355
+ }
40356
+ /**
40357
+ * DeleteObjectsCommand handler
40358
+ */
40359
+ async _handleDeleteObjects(input) {
40360
+ const objects = input.Delete?.Objects || [];
40361
+ const keys = objects.map((obj) => obj.Key);
40362
+ return await this.storage.deleteMultiple(keys);
40363
+ }
40364
+ /**
40365
+ * ListObjectsV2Command handler
40366
+ */
40367
+ async _handleListObjects(input) {
40368
+ const fullPrefix = this.keyPrefix && input.Prefix ? path$1.join(this.keyPrefix, input.Prefix) : this.keyPrefix || input.Prefix || "";
40369
+ return await this.storage.list({
40370
+ prefix: fullPrefix,
40371
+ delimiter: input.Delimiter,
40372
+ maxKeys: input.MaxKeys,
40373
+ continuationToken: input.ContinuationToken
40374
+ });
40375
+ }
40376
+ /**
40377
+ * Put an object (Client interface method)
40378
+ */
40379
+ async putObject({ key, metadata, contentType, body, contentEncoding, contentLength, ifMatch }) {
40380
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40381
+ const stringMetadata = {};
40382
+ if (metadata) {
40383
+ for (const [k, v] of Object.entries(metadata)) {
40384
+ const validKey = String(k).replace(/[^a-zA-Z0-9\-_]/g, "_");
40385
+ const { encoded } = metadataEncode(v);
40386
+ stringMetadata[validKey] = encoded;
40387
+ }
40388
+ }
40389
+ const response = await this.storage.put(fullKey, {
40390
+ body,
40391
+ metadata: stringMetadata,
40392
+ contentType,
40393
+ contentEncoding,
40394
+ contentLength,
40395
+ ifMatch
40396
+ });
40397
+ this.emit("putObject", null, { key, metadata, contentType, body, contentEncoding, contentLength });
40398
+ return response;
40399
+ }
40400
+ /**
40401
+ * Get an object (Client interface method)
40402
+ */
40403
+ async getObject(key) {
40404
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40405
+ const response = await this.storage.get(fullKey);
40406
+ const decodedMetadata = {};
40407
+ if (response.Metadata) {
40408
+ for (const [k, v] of Object.entries(response.Metadata)) {
40409
+ decodedMetadata[k] = metadataDecode(v);
40410
+ }
40411
+ }
40412
+ this.emit("getObject", null, { key });
40413
+ return {
40414
+ ...response,
40415
+ Metadata: decodedMetadata
40416
+ };
40417
+ }
40418
+ /**
40419
+ * Head object (get metadata only)
40420
+ */
40421
+ async headObject(key) {
40422
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40423
+ const response = await this.storage.head(fullKey);
40424
+ const decodedMetadata = {};
40425
+ if (response.Metadata) {
40426
+ for (const [k, v] of Object.entries(response.Metadata)) {
40427
+ decodedMetadata[k] = metadataDecode(v);
40428
+ }
40429
+ }
40430
+ this.emit("headObject", null, { key });
40431
+ return {
40432
+ ...response,
40433
+ Metadata: decodedMetadata
40434
+ };
40435
+ }
40436
+ /**
40437
+ * Copy an object
40438
+ */
40439
+ async copyObject({ from, to, metadata, metadataDirective, contentType }) {
40440
+ const fullFrom = this.keyPrefix ? path$1.join(this.keyPrefix, from) : from;
40441
+ const fullTo = this.keyPrefix ? path$1.join(this.keyPrefix, to) : to;
40442
+ const encodedMetadata = {};
40443
+ if (metadata) {
40444
+ for (const [k, v] of Object.entries(metadata)) {
40445
+ const validKey = String(k).replace(/[^a-zA-Z0-9\-_]/g, "_");
40446
+ const { encoded } = metadataEncode(v);
40447
+ encodedMetadata[validKey] = encoded;
40448
+ }
40449
+ }
40450
+ const response = await this.storage.copy(fullFrom, fullTo, {
40451
+ metadata: encodedMetadata,
40452
+ metadataDirective,
40453
+ contentType
40454
+ });
40455
+ this.emit("copyObject", null, { from, to, metadata, metadataDirective });
40456
+ return response;
40457
+ }
40458
+ /**
40459
+ * Check if object exists
40460
+ */
40461
+ async exists(key) {
40462
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40463
+ return this.storage.exists(fullKey);
40464
+ }
40465
+ /**
40466
+ * Delete an object
40467
+ */
40468
+ async deleteObject(key) {
40469
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
40470
+ const response = await this.storage.delete(fullKey);
40471
+ this.emit("deleteObject", null, { key });
40472
+ return response;
40473
+ }
40474
+ /**
40475
+ * Delete multiple objects (batch)
40476
+ */
40477
+ async deleteObjects(keys) {
40478
+ const fullKeys = keys.map(
40479
+ (key) => this.keyPrefix ? path$1.join(this.keyPrefix, key) : key
40480
+ );
40481
+ const batches = chunk(fullKeys, this.parallelism);
40482
+ const allResults = { Deleted: [], Errors: [] };
40483
+ const { results } = await PromisePool.withConcurrency(this.parallelism).for(batches).process(async (batch) => {
40484
+ return await this.storage.deleteMultiple(batch);
40485
+ });
40486
+ for (const result of results) {
40487
+ allResults.Deleted.push(...result.Deleted);
40488
+ allResults.Errors.push(...result.Errors);
40489
+ }
40490
+ this.emit("deleteObjects", null, { keys, count: allResults.Deleted.length });
40491
+ return allResults;
40492
+ }
40493
+ /**
40494
+ * List objects with pagination support
40495
+ */
40496
+ async listObjects({ prefix = "", delimiter = null, maxKeys = 1e3, continuationToken = null }) {
40497
+ const fullPrefix = this.keyPrefix ? path$1.join(this.keyPrefix, prefix) : prefix;
40498
+ const response = await this.storage.list({
40499
+ prefix: fullPrefix,
40500
+ delimiter,
40501
+ maxKeys,
40502
+ continuationToken
40503
+ });
40504
+ this.emit("listObjects", null, { prefix, count: response.Contents.length });
40505
+ return response;
40506
+ }
40507
+ /**
40508
+ * Get a page of keys with offset/limit pagination
40509
+ */
40510
+ async getKeysPage(params = {}) {
40511
+ const { prefix = "", offset = 0, amount = 100 } = params;
40512
+ let keys = [];
40513
+ let truncated = true;
40514
+ let continuationToken;
40515
+ if (offset > 0) {
40516
+ const fullPrefix = this.keyPrefix ? path$1.join(this.keyPrefix, prefix) : prefix;
40517
+ const response = await this.storage.list({
40518
+ prefix: fullPrefix,
40519
+ maxKeys: offset + amount
40520
+ });
40521
+ keys = response.Contents.map((x) => x.Key).slice(offset, offset + amount);
40522
+ } else {
40523
+ while (truncated) {
40524
+ const options = {
40525
+ prefix,
40526
+ continuationToken,
40527
+ maxKeys: amount - keys.length
40528
+ };
40529
+ const res = await this.listObjects(options);
40530
+ if (res.Contents) {
40531
+ keys = keys.concat(res.Contents.map((x) => x.Key));
40532
+ }
40533
+ truncated = res.IsTruncated || false;
40534
+ continuationToken = res.NextContinuationToken;
40535
+ if (keys.length >= amount) {
40536
+ keys = keys.slice(0, amount);
40537
+ break;
40538
+ }
40539
+ }
40540
+ }
40541
+ if (this.keyPrefix) {
40542
+ keys = keys.map((x) => x.replace(this.keyPrefix, "")).map((x) => x.startsWith("/") ? x.replace("/", "") : x);
40543
+ }
40544
+ this.emit("getKeysPage", keys, params);
40545
+ return keys;
40546
+ }
40547
+ /**
40548
+ * Get all keys with a given prefix
40549
+ */
40550
+ async getAllKeys({ prefix = "" }) {
40551
+ const fullPrefix = this.keyPrefix ? path$1.join(this.keyPrefix, prefix) : prefix;
40552
+ const response = await this.storage.list({
40553
+ prefix: fullPrefix,
40554
+ maxKeys: 1e5
40555
+ // Large number to get all
40556
+ });
40557
+ let keys = response.Contents.map((x) => x.Key);
40558
+ if (this.keyPrefix) {
40559
+ keys = keys.map((x) => x.replace(this.keyPrefix, "")).map((x) => x.startsWith("/") ? x.replace("/", "") : x);
40560
+ }
40561
+ this.emit("getAllKeys", keys, { prefix });
40562
+ return keys;
40563
+ }
40564
+ /**
40565
+ * Count total objects under a prefix
40566
+ */
40567
+ async count({ prefix = "" } = {}) {
40568
+ const keys = await this.getAllKeys({ prefix });
40569
+ const count = keys.length;
40570
+ this.emit("count", count, { prefix });
40571
+ return count;
40572
+ }
40573
+ /**
40574
+ * Delete all objects under a prefix
40575
+ */
40576
+ async deleteAll({ prefix = "" } = {}) {
40577
+ const keys = await this.getAllKeys({ prefix });
40578
+ let totalDeleted = 0;
40579
+ if (keys.length > 0) {
40580
+ const result = await this.deleteObjects(keys);
40581
+ totalDeleted = result.Deleted.length;
40582
+ this.emit("deleteAll", {
40583
+ prefix,
40584
+ batch: totalDeleted,
40585
+ total: totalDeleted
40586
+ });
40587
+ }
40588
+ this.emit("deleteAllComplete", {
40589
+ prefix,
40590
+ totalDeleted
40591
+ });
40592
+ return totalDeleted;
40593
+ }
40594
+ /**
40595
+ * Get continuation token after skipping offset items
40596
+ */
40597
+ async getContinuationTokenAfterOffset({ prefix = "", offset = 1e3 } = {}) {
40598
+ if (offset === 0) return null;
40599
+ const keys = await this.getAllKeys({ prefix });
40600
+ if (offset >= keys.length) {
40601
+ this.emit("getContinuationTokenAfterOffset", null, { prefix, offset });
40602
+ return null;
40603
+ }
40604
+ const token = keys[offset];
40605
+ this.emit("getContinuationTokenAfterOffset", token, { prefix, offset });
40606
+ return token;
40607
+ }
40608
+ /**
40609
+ * Move an object from one key to another
40610
+ */
40611
+ async moveObject({ from, to }) {
40612
+ await this.copyObject({ from, to, metadataDirective: "COPY" });
40613
+ await this.deleteObject(from);
40614
+ }
40615
+ /**
40616
+ * Move all objects from one prefix to another
40617
+ */
40618
+ async moveAllObjects({ prefixFrom, prefixTo }) {
40619
+ const keys = await this.getAllKeys({ prefix: prefixFrom });
40620
+ const results = [];
40621
+ const errors = [];
40622
+ for (const key of keys) {
40623
+ try {
40624
+ const to = key.replace(prefixFrom, prefixTo);
40625
+ await this.moveObject({ from: key, to });
40626
+ results.push(to);
40627
+ } catch (error) {
40628
+ errors.push({
40629
+ message: error.message,
40630
+ raw: error,
40631
+ key
40632
+ });
40633
+ }
40634
+ }
40635
+ this.emit("moveAllObjects", { results, errors }, { prefixFrom, prefixTo });
40636
+ if (errors.length > 0) {
40637
+ const error = new Error("Some objects could not be moved");
40638
+ error.context = {
40639
+ bucket: this.bucket,
40640
+ operation: "moveAllObjects",
40641
+ prefixFrom,
40642
+ prefixTo,
40643
+ totalKeys: keys.length,
40644
+ failedCount: errors.length,
40645
+ successCount: results.length,
40646
+ errors
40647
+ };
40648
+ throw error;
40649
+ }
40650
+ return results;
40651
+ }
40652
+ /**
40653
+ * Create a snapshot of current storage state
40654
+ */
40655
+ snapshot() {
40656
+ return this.storage.snapshot();
40657
+ }
40658
+ /**
40659
+ * Restore from a snapshot
40660
+ */
40661
+ restore(snapshot) {
40662
+ return this.storage.restore(snapshot);
40663
+ }
40664
+ /**
40665
+ * Save current state to disk (persistence)
40666
+ */
40667
+ async saveToDisk(path2) {
40668
+ return await this.storage.saveToDisk(path2);
40669
+ }
40670
+ /**
40671
+ * Load state from disk
40672
+ */
40673
+ async loadFromDisk(path2) {
40674
+ return await this.storage.loadFromDisk(path2);
40675
+ }
40676
+ /**
40677
+ * Export to BackupPlugin-compatible format (s3db.json + JSONL files)
40678
+ * Compatible with BackupPlugin for easy migration
40679
+ *
40680
+ * @param {string} outputDir - Output directory path
40681
+ * @param {Object} options - Export options
40682
+ * @param {Array<string>} options.resources - Resource names to export (default: all)
40683
+ * @param {boolean} options.compress - Use gzip compression (default: true)
40684
+ * @param {Object} options.database - Database instance for schema metadata
40685
+ * @returns {Promise<Object>} Export manifest with file paths and stats
40686
+ */
40687
+ async exportBackup(outputDir, options = {}) {
40688
+ const { mkdir, writeFile } = await import('fs/promises');
40689
+ const zlib = await import('zlib');
40690
+ const { promisify } = await import('util');
40691
+ const gzip = promisify(zlib.gzip);
40692
+ await mkdir(outputDir, { recursive: true });
40693
+ const compress = options.compress !== false;
40694
+ const database = options.database;
40695
+ const resourceFilter = options.resources;
40696
+ const allKeys = await this.getAllKeys({});
40697
+ const resourceMap = /* @__PURE__ */ new Map();
40698
+ for (const key of allKeys) {
40699
+ const match = key.match(/^resource=([^/]+)\//);
40700
+ if (match) {
40701
+ const resourceName = match[1];
40702
+ if (!resourceFilter || resourceFilter.includes(resourceName)) {
40703
+ if (!resourceMap.has(resourceName)) {
40704
+ resourceMap.set(resourceName, []);
40705
+ }
40706
+ resourceMap.get(resourceName).push(key);
40707
+ }
40708
+ }
40709
+ }
40710
+ const exportedFiles = {};
40711
+ const resourceStats = {};
40712
+ for (const [resourceName, keys] of resourceMap.entries()) {
40713
+ const records = [];
40714
+ const resource = database && database.resources && database.resources[resourceName];
40715
+ for (const key of keys) {
40716
+ const idMatch = key.match(/\/id=([^/]+)/);
40717
+ const recordId = idMatch ? idMatch[1] : null;
40718
+ let record;
40719
+ if (resource && recordId) {
40720
+ try {
40721
+ record = await resource.get(recordId);
40722
+ } catch (err) {
40723
+ console.warn(`Failed to get record ${recordId} from resource ${resourceName}, using fallback`);
40724
+ record = null;
40725
+ }
40726
+ }
40727
+ if (!record) {
40728
+ const obj = await this.getObject(key);
40729
+ record = { ...obj.Metadata };
40730
+ if (recordId && !record.id) {
40731
+ record.id = recordId;
40732
+ }
40733
+ if (obj.Body) {
40734
+ const chunks = [];
40735
+ for await (const chunk2 of obj.Body) {
40736
+ chunks.push(chunk2);
40737
+ }
40738
+ const bodyBuffer = Buffer.concat(chunks);
40739
+ const bodyStr = bodyBuffer.toString("utf-8");
40740
+ if (bodyStr.startsWith("{") || bodyStr.startsWith("[")) {
40741
+ try {
40742
+ const bodyData = JSON.parse(bodyStr);
40743
+ Object.assign(record, bodyData);
40744
+ } catch {
40745
+ record._body = bodyStr;
40746
+ }
40747
+ } else if (bodyStr) {
40748
+ record._body = bodyStr;
40749
+ }
40750
+ }
40751
+ }
40752
+ records.push(record);
40753
+ }
40754
+ const jsonl = records.map((r) => JSON.stringify(r)).join("\n");
40755
+ const filename = compress ? `${resourceName}.jsonl.gz` : `${resourceName}.jsonl`;
40756
+ const filePath = `${outputDir}/${filename}`;
40757
+ if (compress) {
40758
+ const compressed = await gzip(jsonl);
40759
+ await writeFile(filePath, compressed);
40760
+ } else {
40761
+ await writeFile(filePath, jsonl, "utf-8");
40762
+ }
40763
+ exportedFiles[resourceName] = filePath;
40764
+ resourceStats[resourceName] = {
40765
+ recordCount: records.length,
40766
+ fileSize: compress ? (await gzip(jsonl)).length : Buffer.byteLength(jsonl)
40767
+ };
40768
+ }
40769
+ const s3dbMetadata = {
40770
+ version: "1.0",
40771
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
40772
+ bucket: this.bucket,
40773
+ keyPrefix: this.keyPrefix || "",
40774
+ compressed: compress,
40775
+ resources: {},
40776
+ totalRecords: 0,
40777
+ totalSize: 0
40778
+ };
40779
+ if (database && database.resources) {
40780
+ for (const [resourceName, resource] of Object.entries(database.resources)) {
40781
+ if (resourceMap.has(resourceName)) {
40782
+ s3dbMetadata.resources[resourceName] = {
40783
+ schema: resource.schema ? {
40784
+ attributes: resource.schema.attributes,
40785
+ partitions: resource.schema.partitions,
40786
+ behavior: resource.schema.behavior,
40787
+ timestamps: resource.schema.timestamps
40788
+ } : null,
40789
+ stats: resourceStats[resourceName]
40790
+ };
40791
+ }
40792
+ }
40793
+ } else {
40794
+ for (const [resourceName, stats] of Object.entries(resourceStats)) {
40795
+ s3dbMetadata.resources[resourceName] = { stats };
40796
+ }
40797
+ }
40798
+ for (const stats of Object.values(resourceStats)) {
40799
+ s3dbMetadata.totalRecords += stats.recordCount;
40800
+ s3dbMetadata.totalSize += stats.fileSize;
40801
+ }
40802
+ const s3dbPath = `${outputDir}/s3db.json`;
40803
+ await writeFile(s3dbPath, JSON.stringify(s3dbMetadata, null, 2), "utf-8");
40804
+ return {
40805
+ manifest: s3dbPath,
40806
+ files: exportedFiles,
40807
+ stats: s3dbMetadata,
40808
+ resourceCount: resourceMap.size,
40809
+ totalRecords: s3dbMetadata.totalRecords,
40810
+ totalSize: s3dbMetadata.totalSize
40811
+ };
40812
+ }
40813
+ /**
40814
+ * Import from BackupPlugin-compatible format
40815
+ * Loads data from s3db.json + JSONL files created by BackupPlugin or exportBackup()
40816
+ *
40817
+ * @param {string} backupDir - Backup directory path containing s3db.json
40818
+ * @param {Object} options - Import options
40819
+ * @param {Array<string>} options.resources - Resource names to import (default: all)
40820
+ * @param {boolean} options.clear - Clear existing data first (default: false)
40821
+ * @param {Object} options.database - Database instance to recreate schemas
40822
+ * @returns {Promise<Object>} Import stats
40823
+ */
40824
+ async importBackup(backupDir, options = {}) {
40825
+ const { readFile, readdir } = await import('fs/promises');
40826
+ const zlib = await import('zlib');
40827
+ const { promisify } = await import('util');
40828
+ const gunzip = promisify(zlib.gunzip);
40829
+ if (options.clear) {
40830
+ this.clear();
40831
+ }
40832
+ const s3dbPath = `${backupDir}/s3db.json`;
40833
+ const s3dbContent = await readFile(s3dbPath, "utf-8");
40834
+ const metadata = JSON.parse(s3dbContent);
40835
+ const database = options.database;
40836
+ const resourceFilter = options.resources;
40837
+ const importStats = {
40838
+ resourcesImported: 0,
40839
+ recordsImported: 0,
40840
+ errors: []
40841
+ };
40842
+ if (database && metadata.resources) {
40843
+ for (const [resourceName, resourceMeta] of Object.entries(metadata.resources)) {
40844
+ if (resourceFilter && !resourceFilter.includes(resourceName)) continue;
40845
+ if (resourceMeta.schema) {
40846
+ try {
40847
+ await database.createResource({
40848
+ name: resourceName,
40849
+ ...resourceMeta.schema
40850
+ });
40851
+ } catch (error) {
40852
+ }
40853
+ }
40854
+ }
40855
+ }
40856
+ const files = await readdir(backupDir);
40857
+ for (const file of files) {
40858
+ if (!file.endsWith(".jsonl") && !file.endsWith(".jsonl.gz")) continue;
40859
+ const resourceName = file.replace(/\.jsonl(\.gz)?$/, "");
40860
+ if (resourceFilter && !resourceFilter.includes(resourceName)) continue;
40861
+ const filePath = `${backupDir}/${file}`;
40862
+ let content = await readFile(filePath);
40863
+ if (file.endsWith(".gz")) {
40864
+ content = await gunzip(content);
40865
+ }
40866
+ const jsonl = content.toString("utf-8");
40867
+ const lines = jsonl.split("\n").filter((line) => line.trim());
40868
+ for (const line of lines) {
40869
+ try {
40870
+ const record = JSON.parse(line);
40871
+ const id = record.id || record._id || `imported_${Date.now()}_${Math.random()}`;
40872
+ const { _body, id: _, _id: __, ...metadata2 } = record;
40873
+ await this.putObject({
40874
+ key: `resource=${resourceName}/id=${id}`,
40875
+ metadata: metadata2,
40876
+ body: _body ? Buffer.from(_body) : void 0
40877
+ });
40878
+ importStats.recordsImported++;
40879
+ } catch (error) {
40880
+ importStats.errors.push({
40881
+ resource: resourceName,
40882
+ error: error.message,
40883
+ line
40884
+ });
40885
+ }
40886
+ }
40887
+ importStats.resourcesImported++;
40888
+ }
40889
+ return importStats;
40890
+ }
40891
+ /**
40892
+ * Get storage statistics
40893
+ */
40894
+ getStats() {
40895
+ return this.storage.getStats();
40896
+ }
40897
+ /**
40898
+ * Clear all objects
40899
+ */
40900
+ clear() {
40901
+ this.storage.clear();
40902
+ }
40903
+ }
40904
+
37900
40905
  function mapFieldTypeToTypeScript(fieldType) {
37901
40906
  const baseType = fieldType.split("|")[0].trim();
37902
40907
  const typeMap = {
@@ -38802,5 +41807,5 @@ var metrics = /*#__PURE__*/Object.freeze({
38802
41807
  silhouetteScore: silhouetteScore
38803
41808
  });
38804
41809
 
38805
- export { AVAILABLE_BEHAVIORS, AnalyticsNotEnabledError, ApiPlugin, AuditPlugin, AuthenticationError, BACKUP_DRIVERS, BackupPlugin, BaseBackupDriver, BaseError, BaseReplicator, BehaviorError, BigqueryReplicator, CONSUMER_DRIVERS, Cache, CachePlugin, Client, ConnectionString, ConnectionStringError, CostsPlugin, CryptoError, DEFAULT_BEHAVIOR, Database, DatabaseError, DynamoDBReplicator, EncryptionError, ErrorMap, EventualConsistencyPlugin, Factory, FilesystemBackupDriver, FilesystemCache, FullTextPlugin, GeoPlugin, InvalidResourceItem, MemoryCache, MetadataLimitError, MetricsPlugin, MissingMetadata, MongoDBReplicator, MultiBackupDriver, MySQLReplicator, NoSuchBucket, NoSuchKey, NotFound, PartitionAwareFilesystemCache, PartitionDriverError, PartitionError, PermissionError, PlanetScaleReplicator, Plugin, PluginError, PluginObject, PluginStorageError, PostgresReplicator, QueueConsumerPlugin, REPLICATOR_DRIVERS, RabbitMqConsumer, RelationPlugin, ReplicatorPlugin, Resource, ResourceError, ResourceIdsPageReader, ResourceIdsReader, ResourceNotFound, ResourceReader, ResourceWriter, S3BackupDriver, S3Cache, S3QueuePlugin, Database as S3db, S3dbError, S3dbReplicator, SchedulerPlugin, Schema, SchemaError, Seeder, SqsConsumer, SqsReplicator, StateMachinePlugin, StreamError, TTLPlugin, TfStatePlugin, TursoReplicator, UnknownError, ValidationError, Validator, VectorPlugin, WebhookReplicator, behaviors, calculateAttributeNamesSize, calculateAttributeSizes, calculateEffectiveLimit, calculateSystemOverhead, calculateTotalSize, calculateUTF8Bytes, clearUTF8Memory, createBackupDriver, createConsumer, createReplicator, decode, decodeDecimal, decodeFixedPoint, decodeFixedPointBatch, decrypt, S3db as default, encode, encodeDecimal, encodeFixedPoint, encodeFixedPointBatch, encrypt, generateTypes, getBehavior, getSizeBreakdown, idGenerator, mapAwsError, md5, passwordGenerator, printTypes, sha256, streamToString, transformValue, tryFn, tryFnSync, validateBackupConfig, validateReplicatorConfig };
41810
+ export { AVAILABLE_BEHAVIORS, AnalyticsNotEnabledError, ApiPlugin, AuditPlugin, AuthenticationError, BACKUP_DRIVERS, BackupPlugin, BaseBackupDriver, BaseError, BaseReplicator, BehaviorError, BigqueryReplicator, CONSUMER_DRIVERS, Cache, CachePlugin, S3Client as Client, ConnectionString, ConnectionStringError, CostsPlugin, CryptoError, DEFAULT_BEHAVIOR, Database, DatabaseError, DynamoDBReplicator, EncryptionError, ErrorMap, EventualConsistencyPlugin, Factory, FilesystemBackupDriver, FilesystemCache, FullTextPlugin, GeoPlugin, InvalidResourceItem, MLPlugin, MemoryCache, MemoryClient, MemoryStorage, MetadataLimitError, MetricsPlugin, MissingMetadata, MongoDBReplicator, MultiBackupDriver, MySQLReplicator, NoSuchBucket, NoSuchKey, NotFound, PartitionAwareFilesystemCache, PartitionDriverError, PartitionError, PermissionError, PlanetScaleReplicator, Plugin, PluginError, PluginObject, PluginStorageError, PostgresReplicator, QueueConsumerPlugin, REPLICATOR_DRIVERS, RabbitMqConsumer, RelationPlugin, ReplicatorPlugin, Resource, ResourceError, ResourceIdsPageReader, ResourceIdsReader, ResourceNotFound, ResourceReader, ResourceWriter, S3BackupDriver, S3Cache, S3Client, S3QueuePlugin, Database as S3db, S3dbError, S3dbReplicator, SchedulerPlugin, Schema, SchemaError, Seeder, SqsConsumer, SqsReplicator, StateMachinePlugin, StreamError, TTLPlugin, TfStatePlugin, TursoReplicator, UnknownError, ValidationError, Validator, VectorPlugin, WebhookReplicator, behaviors, calculateAttributeNamesSize, calculateAttributeSizes, calculateEffectiveLimit, calculateSystemOverhead, calculateTotalSize, calculateUTF8Bytes, clearUTF8Memory, createBackupDriver, createConsumer, createReplicator, decode, decodeDecimal, decodeFixedPoint, decodeFixedPointBatch, decrypt, S3db as default, encode, encodeDecimal, encodeFixedPoint, encodeFixedPointBatch, encrypt, generateTypes, getBehavior, getSizeBreakdown, idGenerator, mapAwsError, md5, passwordGenerator, printTypes, sha256, streamToString, transformValue, tryFn, tryFnSync, validateBackupConfig, validateReplicatorConfig };
38806
41811
  //# sourceMappingURL=s3db.es.js.map