s3db.js 12.2.4 → 12.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/s3db.es.js CHANGED
@@ -5,7 +5,7 @@ import { mkdir, copyFile, unlink, stat, access, readdir, writeFile, readFile, rm
5
5
  import fs, { createReadStream, createWriteStream, realpathSync as realpathSync$1, readlinkSync, readdirSync, readdir as readdir$2, lstatSync, existsSync } from 'fs';
6
6
  import { pipeline } from 'stream/promises';
7
7
  import path$1, { join, dirname } from 'path';
8
- import { Transform, Writable } from 'stream';
8
+ import { Transform, Writable, Readable } from 'stream';
9
9
  import zlib from 'node:zlib';
10
10
  import os from 'os';
11
11
  import jsonStableStringify from 'json-stable-stringify';
@@ -15,7 +15,7 @@ import { chunk, merge, isString, isEmpty, invert, uniq, cloneDeep, get, set, isO
15
15
  import { Agent } from 'http';
16
16
  import { Agent as Agent$1 } from 'https';
17
17
  import { NodeHttpHandler } from '@smithy/node-http-handler';
18
- import { S3Client, PutObjectCommand, GetObjectCommand, HeadObjectCommand, CopyObjectCommand, DeleteObjectCommand, DeleteObjectsCommand, ListObjectsV2Command } from '@aws-sdk/client-s3';
18
+ import { S3Client as S3Client$1, PutObjectCommand, GetObjectCommand, HeadObjectCommand, CopyObjectCommand, DeleteObjectCommand, DeleteObjectsCommand, ListObjectsV2Command } from '@aws-sdk/client-s3';
19
19
  import { flatten, unflatten } from 'flat';
20
20
  import FastestValidator from 'fastest-validator';
21
21
  import { ReadableStream } from 'node:stream/web';
@@ -13424,7 +13424,7 @@ function generateMySQLAlterTable(tableName, attributes, existingSchema) {
13424
13424
  }
13425
13425
  return alterStatements;
13426
13426
  }
13427
- function generateBigQuerySchema(attributes) {
13427
+ function generateBigQuerySchema(attributes, mutability = "append-only") {
13428
13428
  const fields = [];
13429
13429
  fields.push({
13430
13430
  name: "id",
@@ -13448,6 +13448,14 @@ function generateBigQuerySchema(attributes) {
13448
13448
  if (!attributes.updatedAt) {
13449
13449
  fields.push({ name: "updated_at", type: "TIMESTAMP", mode: "NULLABLE" });
13450
13450
  }
13451
+ if (mutability === "append-only" || mutability === "immutable") {
13452
+ fields.push({ name: "_operation_type", type: "STRING", mode: "NULLABLE" });
13453
+ fields.push({ name: "_operation_timestamp", type: "TIMESTAMP", mode: "NULLABLE" });
13454
+ }
13455
+ if (mutability === "immutable") {
13456
+ fields.push({ name: "_is_deleted", type: "BOOL", mode: "NULLABLE" });
13457
+ fields.push({ name: "_version", type: "INT64", mode: "NULLABLE" });
13458
+ }
13451
13459
  return fields;
13452
13460
  }
13453
13461
  async function getBigQueryTableSchema(bigqueryClient, datasetId, tableId) {
@@ -13469,7 +13477,7 @@ async function getBigQueryTableSchema(bigqueryClient, datasetId, tableId) {
13469
13477
  }
13470
13478
  return schema;
13471
13479
  }
13472
- function generateBigQuerySchemaUpdate(attributes, existingSchema) {
13480
+ function generateBigQuerySchemaUpdate(attributes, existingSchema, mutability = "append-only") {
13473
13481
  const newFields = [];
13474
13482
  for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
13475
13483
  if (fieldName === "id") continue;
@@ -13483,6 +13491,22 @@ function generateBigQuerySchemaUpdate(attributes, existingSchema) {
13483
13491
  mode: required ? "REQUIRED" : "NULLABLE"
13484
13492
  });
13485
13493
  }
13494
+ if (mutability === "append-only" || mutability === "immutable") {
13495
+ if (!existingSchema["_operation_type"]) {
13496
+ newFields.push({ name: "_operation_type", type: "STRING", mode: "NULLABLE" });
13497
+ }
13498
+ if (!existingSchema["_operation_timestamp"]) {
13499
+ newFields.push({ name: "_operation_timestamp", type: "TIMESTAMP", mode: "NULLABLE" });
13500
+ }
13501
+ }
13502
+ if (mutability === "immutable") {
13503
+ if (!existingSchema["_is_deleted"]) {
13504
+ newFields.push({ name: "_is_deleted", type: "BOOL", mode: "NULLABLE" });
13505
+ }
13506
+ if (!existingSchema["_version"]) {
13507
+ newFields.push({ name: "_version", type: "INT64", mode: "NULLABLE" });
13508
+ }
13509
+ }
13486
13510
  return newFields;
13487
13511
  }
13488
13512
  function s3dbTypeToSQLite(fieldType, fieldOptions = {}) {
@@ -13565,6 +13589,8 @@ class BigqueryReplicator extends BaseReplicator {
13565
13589
  this.credentials = config.credentials;
13566
13590
  this.location = config.location || "US";
13567
13591
  this.logTable = config.logTable;
13592
+ this.mutability = config.mutability || "append-only";
13593
+ this._validateMutability(this.mutability);
13568
13594
  this.schemaSync = {
13569
13595
  enabled: config.schemaSync?.enabled || false,
13570
13596
  strategy: config.schemaSync?.strategy || "alter",
@@ -13573,6 +13599,13 @@ class BigqueryReplicator extends BaseReplicator {
13573
13599
  autoCreateColumns: config.schemaSync?.autoCreateColumns !== false
13574
13600
  };
13575
13601
  this.resources = this.parseResourcesConfig(resources);
13602
+ this.versionCounters = /* @__PURE__ */ new Map();
13603
+ }
13604
+ _validateMutability(mutability) {
13605
+ const validModes = ["append-only", "mutable", "immutable"];
13606
+ if (!validModes.includes(mutability)) {
13607
+ throw new Error(`Invalid mutability mode: ${mutability}. Must be one of: ${validModes.join(", ")}`);
13608
+ }
13576
13609
  }
13577
13610
  parseResourcesConfig(resources) {
13578
13611
  const parsed = {};
@@ -13581,24 +13614,31 @@ class BigqueryReplicator extends BaseReplicator {
13581
13614
  parsed[resourceName] = [{
13582
13615
  table: config,
13583
13616
  actions: ["insert"],
13584
- transform: null
13617
+ transform: null,
13618
+ mutability: this.mutability
13585
13619
  }];
13586
13620
  } else if (Array.isArray(config)) {
13587
13621
  parsed[resourceName] = config.map((item) => {
13588
13622
  if (typeof item === "string") {
13589
- return { table: item, actions: ["insert"], transform: null };
13623
+ return { table: item, actions: ["insert"], transform: null, mutability: this.mutability };
13590
13624
  }
13625
+ const itemMutability = item.mutability || this.mutability;
13626
+ this._validateMutability(itemMutability);
13591
13627
  return {
13592
13628
  table: item.table,
13593
13629
  actions: item.actions || ["insert"],
13594
- transform: item.transform || null
13630
+ transform: item.transform || null,
13631
+ mutability: itemMutability
13595
13632
  };
13596
13633
  });
13597
13634
  } else if (typeof config === "object") {
13635
+ const configMutability = config.mutability || this.mutability;
13636
+ this._validateMutability(configMutability);
13598
13637
  parsed[resourceName] = [{
13599
13638
  table: config.table,
13600
13639
  actions: config.actions || ["insert"],
13601
- transform: config.transform || null
13640
+ transform: config.transform || null,
13641
+ mutability: configMutability
13602
13642
  }];
13603
13643
  }
13604
13644
  }
@@ -13670,11 +13710,16 @@ class BigqueryReplicator extends BaseReplicator {
13670
13710
  }
13671
13711
  continue;
13672
13712
  }
13673
- const attributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
13713
+ const allAttributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
13714
+ const pluginAttrNames = resource.schema?._pluginAttributes ? Object.values(resource.schema._pluginAttributes).flat() : [];
13715
+ const attributes = Object.fromEntries(
13716
+ Object.entries(allAttributes).filter(([name]) => !pluginAttrNames.includes(name))
13717
+ );
13674
13718
  for (const tableConfig of tableConfigs) {
13675
13719
  const tableName = tableConfig.table;
13720
+ const mutability = tableConfig.mutability;
13676
13721
  const [okSync, errSync] = await tryFn(async () => {
13677
- await this.syncTableSchema(tableName, attributes);
13722
+ await this.syncTableSchema(tableName, attributes, mutability);
13678
13723
  });
13679
13724
  if (!okSync) {
13680
13725
  const message = `Schema sync failed for table ${tableName}: ${errSync.message}`;
@@ -13694,7 +13739,7 @@ class BigqueryReplicator extends BaseReplicator {
13694
13739
  /**
13695
13740
  * Sync a single table schema in BigQuery
13696
13741
  */
13697
- async syncTableSchema(tableName, attributes) {
13742
+ async syncTableSchema(tableName, attributes, mutability = "append-only") {
13698
13743
  const dataset = this.bigqueryClient.dataset(this.datasetId);
13699
13744
  const table = dataset.table(tableName);
13700
13745
  const [exists] = await table.exists();
@@ -13705,15 +13750,16 @@ class BigqueryReplicator extends BaseReplicator {
13705
13750
  if (this.schemaSync.strategy === "validate-only") {
13706
13751
  throw new Error(`Table ${tableName} does not exist (validate-only mode)`);
13707
13752
  }
13708
- const schema = generateBigQuerySchema(attributes);
13753
+ const schema = generateBigQuerySchema(attributes, mutability);
13709
13754
  if (this.config.verbose) {
13710
- console.log(`[BigQueryReplicator] Creating table ${tableName} with schema:`, schema);
13755
+ console.log(`[BigQueryReplicator] Creating table ${tableName} with schema (mutability: ${mutability}):`, schema);
13711
13756
  }
13712
13757
  await dataset.createTable(tableName, { schema });
13713
13758
  this.emit("table_created", {
13714
13759
  replicator: this.name,
13715
13760
  tableName,
13716
- attributes: Object.keys(attributes)
13761
+ attributes: Object.keys(attributes),
13762
+ mutability
13717
13763
  });
13718
13764
  return;
13719
13765
  }
@@ -13722,18 +13768,19 @@ class BigqueryReplicator extends BaseReplicator {
13722
13768
  console.warn(`[BigQueryReplicator] Dropping and recreating table ${tableName}`);
13723
13769
  }
13724
13770
  await table.delete();
13725
- const schema = generateBigQuerySchema(attributes);
13771
+ const schema = generateBigQuerySchema(attributes, mutability);
13726
13772
  await dataset.createTable(tableName, { schema });
13727
13773
  this.emit("table_recreated", {
13728
13774
  replicator: this.name,
13729
13775
  tableName,
13730
- attributes: Object.keys(attributes)
13776
+ attributes: Object.keys(attributes),
13777
+ mutability
13731
13778
  });
13732
13779
  return;
13733
13780
  }
13734
13781
  if (this.schemaSync.strategy === "alter" && this.schemaSync.autoCreateColumns) {
13735
13782
  const existingSchema = await getBigQueryTableSchema(this.bigqueryClient, this.datasetId, tableName);
13736
- const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema);
13783
+ const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema, mutability);
13737
13784
  if (newFields.length > 0) {
13738
13785
  if (this.config.verbose) {
13739
13786
  console.log(`[BigQueryReplicator] Adding ${newFields.length} field(s) to table ${tableName}:`, newFields);
@@ -13751,7 +13798,7 @@ class BigqueryReplicator extends BaseReplicator {
13751
13798
  }
13752
13799
  if (this.schemaSync.strategy === "validate-only") {
13753
13800
  const existingSchema = await getBigQueryTableSchema(this.bigqueryClient, this.datasetId, tableName);
13754
- const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema);
13801
+ const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema, mutability);
13755
13802
  if (newFields.length > 0) {
13756
13803
  throw new Error(`Table ${tableName} schema mismatch. Missing columns: ${newFields.length}`);
13757
13804
  }
@@ -13770,7 +13817,8 @@ class BigqueryReplicator extends BaseReplicator {
13770
13817
  if (!this.resources[resourceName]) return [];
13771
13818
  return this.resources[resourceName].filter((tableConfig) => tableConfig.actions.includes(operation)).map((tableConfig) => ({
13772
13819
  table: tableConfig.table,
13773
- transform: tableConfig.transform
13820
+ transform: tableConfig.transform,
13821
+ mutability: tableConfig.mutability
13774
13822
  }));
13775
13823
  }
13776
13824
  applyTransform(data, transformFn) {
@@ -13789,6 +13837,32 @@ class BigqueryReplicator extends BaseReplicator {
13789
13837
  });
13790
13838
  return cleanData;
13791
13839
  }
13840
+ /**
13841
+ * Add tracking fields for append-only and immutable modes
13842
+ * @private
13843
+ */
13844
+ _addTrackingFields(data, operation, mutability, id) {
13845
+ const tracked = { ...data };
13846
+ if (mutability === "append-only" || mutability === "immutable") {
13847
+ tracked._operation_type = operation;
13848
+ tracked._operation_timestamp = (/* @__PURE__ */ new Date()).toISOString();
13849
+ }
13850
+ if (mutability === "immutable") {
13851
+ tracked._is_deleted = operation === "delete";
13852
+ tracked._version = this._getNextVersion(id);
13853
+ }
13854
+ return tracked;
13855
+ }
13856
+ /**
13857
+ * Get next version number for immutable mode
13858
+ * @private
13859
+ */
13860
+ _getNextVersion(id) {
13861
+ const current = this.versionCounters.get(id) || 0;
13862
+ const next = current + 1;
13863
+ this.versionCounters.set(id, next);
13864
+ return next;
13865
+ }
13792
13866
  async replicate(resourceName, operation, data, id, beforeData = null) {
13793
13867
  if (!this.enabled || !this.shouldReplicateResource(resourceName)) {
13794
13868
  return { skipped: true, reason: "resource_not_included" };
@@ -13807,9 +13881,14 @@ class BigqueryReplicator extends BaseReplicator {
13807
13881
  for (const tableConfig of tableConfigs) {
13808
13882
  const [okTable, errTable] = await tryFn(async () => {
13809
13883
  const table = dataset.table(tableConfig.table);
13884
+ const mutability = tableConfig.mutability;
13810
13885
  let job;
13811
- if (operation === "insert") {
13812
- const transformedData = this.applyTransform(data, tableConfig.transform);
13886
+ const shouldConvertToInsert = (mutability === "append-only" || mutability === "immutable") && (operation === "update" || operation === "delete");
13887
+ if (operation === "insert" || shouldConvertToInsert) {
13888
+ let transformedData = this.applyTransform(data, tableConfig.transform);
13889
+ if (shouldConvertToInsert) {
13890
+ transformedData = this._addTrackingFields(transformedData, operation, mutability, id);
13891
+ }
13813
13892
  try {
13814
13893
  job = await table.insert([transformedData]);
13815
13894
  } catch (error) {
@@ -13821,7 +13900,7 @@ class BigqueryReplicator extends BaseReplicator {
13821
13900
  }
13822
13901
  throw error;
13823
13902
  }
13824
- } else if (operation === "update") {
13903
+ } else if (operation === "update" && mutability === "mutable") {
13825
13904
  const transformedData = this.applyTransform(data, tableConfig.transform);
13826
13905
  const keys = Object.keys(transformedData).filter((k) => k !== "id");
13827
13906
  const setClause = keys.map((k) => `${k} = @${k}`).join(", ");
@@ -13863,7 +13942,7 @@ class BigqueryReplicator extends BaseReplicator {
13863
13942
  }
13864
13943
  }
13865
13944
  if (!job) throw lastError;
13866
- } else if (operation === "delete") {
13945
+ } else if (operation === "delete" && mutability === "mutable") {
13867
13946
  const query = `DELETE FROM \`${this.projectId}.${this.datasetId}.${tableConfig.table}\` WHERE id = @id`;
13868
13947
  try {
13869
13948
  const [deleteJob] = await this.bigqueryClient.createQueryJob({
@@ -13999,7 +14078,8 @@ class BigqueryReplicator extends BaseReplicator {
13999
14078
  datasetId: this.datasetId,
14000
14079
  resources: this.resources,
14001
14080
  logTable: this.logTable,
14002
- schemaSync: this.schemaSync
14081
+ schemaSync: this.schemaSync,
14082
+ mutability: this.mutability
14003
14083
  };
14004
14084
  }
14005
14085
  }
@@ -14688,7 +14768,11 @@ class MySQLReplicator extends BaseReplicator {
14688
14768
  }
14689
14769
  continue;
14690
14770
  }
14691
- const attributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
14771
+ const allAttributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
14772
+ const pluginAttrNames = resource.schema?._pluginAttributes ? Object.values(resource.schema._pluginAttributes).flat() : [];
14773
+ const attributes = Object.fromEntries(
14774
+ Object.entries(allAttributes).filter(([name]) => !pluginAttrNames.includes(name))
14775
+ );
14692
14776
  for (const tableConfig of tableConfigs) {
14693
14777
  const tableName = tableConfig.table;
14694
14778
  const [okSync, errSync] = await tryFn(async () => {
@@ -15067,7 +15151,11 @@ class PlanetScaleReplicator extends BaseReplicator {
15067
15151
  }
15068
15152
  continue;
15069
15153
  }
15070
- const attributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
15154
+ const allAttributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
15155
+ const pluginAttrNames = resource.schema?._pluginAttributes ? Object.values(resource.schema._pluginAttributes).flat() : [];
15156
+ const attributes = Object.fromEntries(
15157
+ Object.entries(allAttributes).filter(([name]) => !pluginAttrNames.includes(name))
15158
+ );
15071
15159
  for (const tableConfig of tableConfigs) {
15072
15160
  const tableName = tableConfig.table;
15073
15161
  const [okSync, errSync] = await tryFn(async () => {
@@ -15388,7 +15476,11 @@ class PostgresReplicator extends BaseReplicator {
15388
15476
  }
15389
15477
  continue;
15390
15478
  }
15391
- const attributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
15479
+ const allAttributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
15480
+ const pluginAttrNames = resource.schema?._pluginAttributes ? Object.values(resource.schema._pluginAttributes).flat() : [];
15481
+ const attributes = Object.fromEntries(
15482
+ Object.entries(allAttributes).filter(([name]) => !pluginAttrNames.includes(name))
15483
+ );
15392
15484
  for (const tableConfig of tableConfigs) {
15393
15485
  const tableName = tableConfig.table;
15394
15486
  const [okSync, errSync] = await tryFn(async () => {
@@ -15708,11 +15800,11 @@ class ConnectionString {
15708
15800
  }
15709
15801
  }
15710
15802
 
15711
- class Client extends EventEmitter {
15803
+ class S3Client extends EventEmitter {
15712
15804
  constructor({
15713
15805
  verbose = false,
15714
15806
  id = null,
15715
- AwsS3Client,
15807
+ AwsS3Client: AwsS3Client2,
15716
15808
  connectionString,
15717
15809
  parallelism = 10,
15718
15810
  httpClientOptions = {}
@@ -15735,7 +15827,7 @@ class Client extends EventEmitter {
15735
15827
  // 60 second timeout
15736
15828
  ...httpClientOptions
15737
15829
  };
15738
- this.client = AwsS3Client || this.createClient();
15830
+ this.client = AwsS3Client2 || this.createClient();
15739
15831
  }
15740
15832
  createClient() {
15741
15833
  const httpAgent = new Agent(this.httpClientOptions);
@@ -15756,7 +15848,7 @@ class Client extends EventEmitter {
15756
15848
  secretAccessKey: this.config.secretAccessKey
15757
15849
  };
15758
15850
  }
15759
- const client = new S3Client(options);
15851
+ const client = new S3Client$1(options);
15760
15852
  client.middlewareStack.add(
15761
15853
  (next, context) => async (args) => {
15762
15854
  if (context.commandName === "DeleteObjectsCommand") {
@@ -16510,6 +16602,32 @@ function generateBase62Mapping(keys) {
16510
16602
  });
16511
16603
  return { mapping, reversedMapping };
16512
16604
  }
16605
+ function generatePluginAttributeHash(pluginName, attributeName) {
16606
+ const input = `${pluginName}:${attributeName}`;
16607
+ const hash = createHash("sha256").update(input).digest();
16608
+ const num = hash.readUInt32BE(0);
16609
+ const base62Hash = encode(num);
16610
+ const paddedHash = base62Hash.padStart(3, "0").substring(0, 3);
16611
+ return "p" + paddedHash.toLowerCase();
16612
+ }
16613
+ function generatePluginMapping(attributes) {
16614
+ const mapping = {};
16615
+ const reversedMapping = {};
16616
+ const usedHashes = /* @__PURE__ */ new Set();
16617
+ for (const { key, pluginName } of attributes) {
16618
+ let hash = generatePluginAttributeHash(pluginName, key);
16619
+ let counter = 1;
16620
+ let finalHash = hash;
16621
+ while (usedHashes.has(finalHash)) {
16622
+ finalHash = `${hash}${counter}`;
16623
+ counter++;
16624
+ }
16625
+ usedHashes.add(finalHash);
16626
+ mapping[key] = finalHash;
16627
+ reversedMapping[finalHash] = key;
16628
+ }
16629
+ return { mapping, reversedMapping };
16630
+ }
16513
16631
  const SchemaActions = {
16514
16632
  trim: (value) => value == null ? value : value.trim(),
16515
16633
  encrypt: async (value, { passphrase }) => {
@@ -16900,11 +17018,14 @@ class Schema {
16900
17018
  constructor(args) {
16901
17019
  const {
16902
17020
  map,
17021
+ pluginMap,
16903
17022
  name,
16904
17023
  attributes,
16905
17024
  passphrase,
16906
17025
  version = 1,
16907
- options = {}
17026
+ options = {},
17027
+ _pluginAttributeMetadata,
17028
+ _pluginAttributes
16908
17029
  } = args;
16909
17030
  this.name = name;
16910
17031
  this.version = version;
@@ -16912,6 +17033,8 @@ class Schema {
16912
17033
  this.passphrase = passphrase ?? "secret";
16913
17034
  this.options = merge({}, this.defaultOptions(), options);
16914
17035
  this.allNestedObjectsOptional = this.options.allNestedObjectsOptional ?? false;
17036
+ this._pluginAttributeMetadata = _pluginAttributeMetadata || {};
17037
+ this._pluginAttributes = _pluginAttributes || {};
16915
17038
  const processedAttributes = this.preprocessAttributesForValidation(this.attributes);
16916
17039
  this.validator = new ValidatorManager({ autoEncrypt: false }).compile(merge(
16917
17040
  { $$async: true, $$strict: false },
@@ -16926,9 +17049,43 @@ class Schema {
16926
17049
  const leafKeys = Object.keys(flatAttrs).filter((k) => !k.includes("$$"));
16927
17050
  const objectKeys = this.extractObjectKeys(this.attributes);
16928
17051
  const allKeys = [.../* @__PURE__ */ new Set([...leafKeys, ...objectKeys])];
16929
- const { mapping, reversedMapping } = generateBase62Mapping(allKeys);
17052
+ const userKeys = [];
17053
+ const pluginAttributes = [];
17054
+ for (const key of allKeys) {
17055
+ const attrDef = this.getAttributeDefinition(key);
17056
+ if (typeof attrDef === "object" && attrDef !== null && attrDef.__plugin__) {
17057
+ pluginAttributes.push({ key, pluginName: attrDef.__plugin__ });
17058
+ } else if (typeof attrDef === "string" && this._pluginAttributeMetadata && this._pluginAttributeMetadata[key]) {
17059
+ const pluginName = this._pluginAttributeMetadata[key].__plugin__;
17060
+ pluginAttributes.push({ key, pluginName });
17061
+ } else {
17062
+ userKeys.push(key);
17063
+ }
17064
+ }
17065
+ const { mapping, reversedMapping } = generateBase62Mapping(userKeys);
16930
17066
  this.map = mapping;
16931
17067
  this.reversedMap = reversedMapping;
17068
+ const { mapping: pMapping, reversedMapping: pReversedMapping } = generatePluginMapping(pluginAttributes);
17069
+ this.pluginMap = pMapping;
17070
+ this.reversedPluginMap = pReversedMapping;
17071
+ this._pluginAttributes = {};
17072
+ for (const { key, pluginName } of pluginAttributes) {
17073
+ if (!this._pluginAttributes[pluginName]) {
17074
+ this._pluginAttributes[pluginName] = [];
17075
+ }
17076
+ this._pluginAttributes[pluginName].push(key);
17077
+ }
17078
+ }
17079
+ if (!isEmpty(pluginMap)) {
17080
+ this.pluginMap = pluginMap;
17081
+ this.reversedPluginMap = invert(pluginMap);
17082
+ }
17083
+ if (!this.pluginMap) {
17084
+ this.pluginMap = {};
17085
+ this.reversedPluginMap = {};
17086
+ }
17087
+ if (!this._pluginAttributes) {
17088
+ this._pluginAttributes = {};
16932
17089
  }
16933
17090
  }
16934
17091
  defaultOptions() {
@@ -17157,6 +17314,8 @@ class Schema {
17157
17314
  static import(data) {
17158
17315
  let {
17159
17316
  map,
17317
+ pluginMap,
17318
+ _pluginAttributeMetadata,
17160
17319
  name,
17161
17320
  options,
17162
17321
  version,
@@ -17167,11 +17326,15 @@ class Schema {
17167
17326
  attributes = attrs;
17168
17327
  const schema = new Schema({
17169
17328
  map,
17329
+ pluginMap: pluginMap || {},
17170
17330
  name,
17171
17331
  options,
17172
17332
  version,
17173
17333
  attributes
17174
17334
  });
17335
+ if (_pluginAttributeMetadata) {
17336
+ schema._pluginAttributeMetadata = _pluginAttributeMetadata;
17337
+ }
17175
17338
  return schema;
17176
17339
  }
17177
17340
  /**
@@ -17209,7 +17372,10 @@ class Schema {
17209
17372
  name: this.name,
17210
17373
  options: this.options,
17211
17374
  attributes: this._exportAttributes(this.attributes),
17212
- map: this.map
17375
+ map: this.map,
17376
+ pluginMap: this.pluginMap || {},
17377
+ _pluginAttributeMetadata: this._pluginAttributeMetadata || {},
17378
+ _pluginAttributes: this._pluginAttributes || {}
17213
17379
  };
17214
17380
  return data;
17215
17381
  }
@@ -17262,7 +17428,7 @@ class Schema {
17262
17428
  const flattenedObj = flatten(obj, { safe: true });
17263
17429
  const rest = { "_v": this.version + "" };
17264
17430
  for (const [key, value] of Object.entries(flattenedObj)) {
17265
- const mappedKey = this.map[key] || key;
17431
+ const mappedKey = this.pluginMap[key] || this.map[key] || key;
17266
17432
  const attrDef = this.getAttributeDefinition(key);
17267
17433
  if (typeof value === "number" && typeof attrDef === "string" && attrDef.includes("number")) {
17268
17434
  rest[mappedKey] = encode(value);
@@ -17283,14 +17449,18 @@ class Schema {
17283
17449
  await this.applyHooksActions(rest, "afterMap");
17284
17450
  return rest;
17285
17451
  }
17286
- async unmapper(mappedResourceItem, mapOverride) {
17452
+ async unmapper(mappedResourceItem, mapOverride, pluginMapOverride) {
17287
17453
  let obj = cloneDeep(mappedResourceItem);
17288
17454
  delete obj._v;
17289
17455
  obj = await this.applyHooksActions(obj, "beforeUnmap");
17290
17456
  const reversedMap = mapOverride ? invert(mapOverride) : this.reversedMap;
17457
+ const reversedPluginMap = pluginMapOverride ? invert(pluginMapOverride) : this.reversedPluginMap;
17291
17458
  const rest = {};
17292
17459
  for (const [key, value] of Object.entries(obj)) {
17293
- const originalKey = reversedMap && reversedMap[key] ? reversedMap[key] : key;
17460
+ let originalKey = reversedPluginMap[key] || reversedMap[key] || key;
17461
+ if (!originalKey) {
17462
+ originalKey = key;
17463
+ }
17294
17464
  let parsedValue = value;
17295
17465
  const attrDef = this.getAttributeDefinition(originalKey);
17296
17466
  const hasAfterUnmapHook = this.options.hooks?.afterUnmap?.[originalKey];
@@ -17357,6 +17527,37 @@ class Schema {
17357
17527
  }
17358
17528
  return def;
17359
17529
  }
17530
+ /**
17531
+ * Regenerate plugin attribute mapping
17532
+ * Called when plugin attributes are added or removed
17533
+ * @returns {void}
17534
+ */
17535
+ regeneratePluginMapping() {
17536
+ const flatAttrs = flatten(this.attributes, { safe: true });
17537
+ const leafKeys = Object.keys(flatAttrs).filter((k) => !k.includes("$$"));
17538
+ const objectKeys = this.extractObjectKeys(this.attributes);
17539
+ const allKeys = [.../* @__PURE__ */ new Set([...leafKeys, ...objectKeys])];
17540
+ const pluginAttributes = [];
17541
+ for (const key of allKeys) {
17542
+ const attrDef = this.getAttributeDefinition(key);
17543
+ if (typeof attrDef === "object" && attrDef !== null && attrDef.__plugin__) {
17544
+ pluginAttributes.push({ key, pluginName: attrDef.__plugin__ });
17545
+ } else if (typeof attrDef === "string" && this._pluginAttributeMetadata && this._pluginAttributeMetadata[key]) {
17546
+ const pluginName = this._pluginAttributeMetadata[key].__plugin__;
17547
+ pluginAttributes.push({ key, pluginName });
17548
+ }
17549
+ }
17550
+ const { mapping, reversedMapping } = generatePluginMapping(pluginAttributes);
17551
+ this.pluginMap = mapping;
17552
+ this.reversedPluginMap = reversedMapping;
17553
+ this._pluginAttributes = {};
17554
+ for (const { key, pluginName } of pluginAttributes) {
17555
+ if (!this._pluginAttributes[pluginName]) {
17556
+ this._pluginAttributes[pluginName] = [];
17557
+ }
17558
+ this._pluginAttributes[pluginName].push(key);
17559
+ }
17560
+ }
17360
17561
  /**
17361
17562
  * Preprocess attributes to convert nested objects into validator-compatible format
17362
17563
  * @param {Object} attributes - Original attributes
@@ -17426,37 +17627,38 @@ class Schema {
17426
17627
  } else if (typeof value === "object" && value !== null && !Array.isArray(value)) {
17427
17628
  const hasValidatorType = value.type !== void 0 && key !== "$$type";
17428
17629
  if (hasValidatorType) {
17429
- if (value.type === "ip4") {
17430
- processed[key] = { ...value, type: "string" };
17431
- } else if (value.type === "ip6") {
17432
- processed[key] = { ...value, type: "string" };
17433
- } else if (value.type === "money" || value.type === "crypto") {
17434
- processed[key] = { ...value, type: "number", min: value.min !== void 0 ? value.min : 0 };
17435
- } else if (value.type === "decimal") {
17436
- processed[key] = { ...value, type: "number" };
17437
- } else if (value.type === "geo:lat" || value.type === "geo-lat") {
17630
+ const { __plugin__, __pluginCreated__, ...cleanValue } = value;
17631
+ if (cleanValue.type === "ip4") {
17632
+ processed[key] = { ...cleanValue, type: "string" };
17633
+ } else if (cleanValue.type === "ip6") {
17634
+ processed[key] = { ...cleanValue, type: "string" };
17635
+ } else if (cleanValue.type === "money" || cleanValue.type === "crypto") {
17636
+ processed[key] = { ...cleanValue, type: "number", min: cleanValue.min !== void 0 ? cleanValue.min : 0 };
17637
+ } else if (cleanValue.type === "decimal") {
17638
+ processed[key] = { ...cleanValue, type: "number" };
17639
+ } else if (cleanValue.type === "geo:lat" || cleanValue.type === "geo-lat") {
17438
17640
  processed[key] = {
17439
- ...value,
17641
+ ...cleanValue,
17440
17642
  type: "number",
17441
- min: value.min !== void 0 ? value.min : -90,
17442
- max: value.max !== void 0 ? value.max : 90
17643
+ min: cleanValue.min !== void 0 ? cleanValue.min : -90,
17644
+ max: cleanValue.max !== void 0 ? cleanValue.max : 90
17443
17645
  };
17444
- } else if (value.type === "geo:lon" || value.type === "geo-lon") {
17646
+ } else if (cleanValue.type === "geo:lon" || cleanValue.type === "geo-lon") {
17445
17647
  processed[key] = {
17446
- ...value,
17648
+ ...cleanValue,
17447
17649
  type: "number",
17448
- min: value.min !== void 0 ? value.min : -180,
17449
- max: value.max !== void 0 ? value.max : 180
17650
+ min: cleanValue.min !== void 0 ? cleanValue.min : -180,
17651
+ max: cleanValue.max !== void 0 ? cleanValue.max : 180
17450
17652
  };
17451
- } else if (value.type === "geo:point" || value.type === "geo-point") {
17452
- processed[key] = { ...value, type: "any" };
17453
- } else if (value.type === "object" && value.properties) {
17653
+ } else if (cleanValue.type === "geo:point" || cleanValue.type === "geo-point") {
17654
+ processed[key] = { ...cleanValue, type: "any" };
17655
+ } else if (cleanValue.type === "object" && cleanValue.properties) {
17454
17656
  processed[key] = {
17455
- ...value,
17456
- properties: this.preprocessAttributesForValidation(value.properties)
17657
+ ...cleanValue,
17658
+ properties: this.preprocessAttributesForValidation(cleanValue.properties)
17457
17659
  };
17458
17660
  } else {
17459
- processed[key] = value;
17661
+ processed[key] = cleanValue;
17460
17662
  }
17461
17663
  } else {
17462
17664
  const isExplicitRequired = value.$$type && value.$$type.includes("required");
@@ -17774,7 +17976,11 @@ async function handleInsert$3({ resource, data, mappedData, originalData }) {
17774
17976
  excess: totalSize - 2047,
17775
17977
  data: originalData || data
17776
17978
  });
17777
- return { mappedData: { _v: mappedData._v }, body: JSON.stringify(mappedData) };
17979
+ const metadataOnly = { _v: mappedData._v };
17980
+ if (resource.schema?.pluginMap && Object.keys(resource.schema.pluginMap).length > 0) {
17981
+ metadataOnly._pluginMap = JSON.stringify(resource.schema.pluginMap);
17982
+ }
17983
+ return { mappedData: metadataOnly, body: JSON.stringify(mappedData) };
17778
17984
  }
17779
17985
  return { mappedData, body: "" };
17780
17986
  }
@@ -17977,6 +18183,12 @@ async function handleInsert$1({ resource, data, mappedData, originalData }) {
17977
18183
  metadataFields._v = mappedData._v;
17978
18184
  currentSize += attributeSizes._v;
17979
18185
  }
18186
+ if (resource.schema?.pluginMap && Object.keys(resource.schema.pluginMap).length > 0) {
18187
+ const pluginMapStr = JSON.stringify(resource.schema.pluginMap);
18188
+ const pluginMapSize = calculateUTF8Bytes("_pluginMap") + calculateUTF8Bytes(pluginMapStr);
18189
+ metadataFields._pluginMap = pluginMapStr;
18190
+ currentSize += pluginMapSize;
18191
+ }
17980
18192
  let reservedLimit = effectiveLimit;
17981
18193
  for (const [fieldName, size] of sortedFields) {
17982
18194
  if (fieldName === "_v") continue;
@@ -18036,6 +18248,9 @@ async function handleInsert({ resource, data, mappedData }) {
18036
18248
  "_v": mappedData._v || String(resource.version)
18037
18249
  };
18038
18250
  metadataOnly._map = JSON.stringify(resource.schema.map);
18251
+ if (resource.schema.pluginMap && Object.keys(resource.schema.pluginMap).length > 0) {
18252
+ metadataOnly._pluginMap = JSON.stringify(resource.schema.pluginMap);
18253
+ }
18039
18254
  const body = JSON.stringify(mappedData);
18040
18255
  return { mappedData: metadataOnly, body };
18041
18256
  }
@@ -18044,6 +18259,9 @@ async function handleUpdate({ resource, id, data, mappedData }) {
18044
18259
  "_v": mappedData._v || String(resource.version)
18045
18260
  };
18046
18261
  metadataOnly._map = JSON.stringify(resource.schema.map);
18262
+ if (resource.schema.pluginMap && Object.keys(resource.schema.pluginMap).length > 0) {
18263
+ metadataOnly._pluginMap = JSON.stringify(resource.schema.pluginMap);
18264
+ }
18047
18265
  const body = JSON.stringify(mappedData);
18048
18266
  return { mappedData: metadataOnly, body };
18049
18267
  }
@@ -18424,6 +18642,118 @@ ${errorDetails}`,
18424
18642
  this.applyConfiguration();
18425
18643
  return { oldAttributes, newAttributes };
18426
18644
  }
18645
+ /**
18646
+ * Add a plugin-created attribute to the resource schema
18647
+ * This ensures plugin attributes don't interfere with user-defined attributes
18648
+ * by using a separate mapping namespace (p0, p1, p2, ...)
18649
+ *
18650
+ * @param {string} name - Attribute name (e.g., '_hasEmbedding', 'clusterId')
18651
+ * @param {Object|string} definition - Attribute definition
18652
+ * @param {string} pluginName - Name of plugin adding the attribute
18653
+ * @returns {void}
18654
+ *
18655
+ * @example
18656
+ * // VectorPlugin adding tracking field
18657
+ * resource.addPluginAttribute('_hasEmbedding', {
18658
+ * type: 'boolean',
18659
+ * optional: true,
18660
+ * default: false
18661
+ * }, 'VectorPlugin');
18662
+ *
18663
+ * // Shorthand notation
18664
+ * resource.addPluginAttribute('clusterId', 'string|optional', 'VectorPlugin');
18665
+ */
18666
+ addPluginAttribute(name, definition, pluginName) {
18667
+ if (!pluginName) {
18668
+ throw new ResourceError(
18669
+ "Plugin name is required when adding plugin attributes",
18670
+ { resource: this.name, attribute: name }
18671
+ );
18672
+ }
18673
+ const existingDef = this.schema.getAttributeDefinition(name);
18674
+ if (existingDef && (!existingDef.__plugin__ || existingDef.__plugin__ !== pluginName)) {
18675
+ throw new ResourceError(
18676
+ `Attribute '${name}' already exists and is not from plugin '${pluginName}'`,
18677
+ { resource: this.name, attribute: name, plugin: pluginName }
18678
+ );
18679
+ }
18680
+ let defObject = definition;
18681
+ if (typeof definition === "object" && definition !== null) {
18682
+ defObject = { ...definition };
18683
+ }
18684
+ if (typeof defObject === "object" && defObject !== null) {
18685
+ defObject.__plugin__ = pluginName;
18686
+ defObject.__pluginCreated__ = Date.now();
18687
+ }
18688
+ this.schema.attributes[name] = defObject;
18689
+ this.attributes[name] = defObject;
18690
+ if (typeof defObject === "string") {
18691
+ if (!this.schema._pluginAttributeMetadata) {
18692
+ this.schema._pluginAttributeMetadata = {};
18693
+ }
18694
+ this.schema._pluginAttributeMetadata[name] = {
18695
+ __plugin__: pluginName,
18696
+ __pluginCreated__: Date.now()
18697
+ };
18698
+ }
18699
+ this.schema.regeneratePluginMapping();
18700
+ if (this.schema.options.generateAutoHooks) {
18701
+ this.schema.generateAutoHooks();
18702
+ }
18703
+ const processedAttributes = this.schema.preprocessAttributesForValidation(this.schema.attributes);
18704
+ this.schema.validator = new ValidatorManager({ autoEncrypt: false }).compile(merge(
18705
+ { $$async: true, $$strict: false },
18706
+ processedAttributes
18707
+ ));
18708
+ if (this.database) {
18709
+ this.database.emit("plugin-attribute-added", {
18710
+ resource: this.name,
18711
+ attribute: name,
18712
+ plugin: pluginName,
18713
+ definition: defObject
18714
+ });
18715
+ }
18716
+ }
18717
+ /**
18718
+ * Remove a plugin-created attribute from the resource schema
18719
+ * Called when a plugin is uninstalled or no longer needs the attribute
18720
+ *
18721
+ * @param {string} name - Attribute name to remove
18722
+ * @param {string} [pluginName] - Optional plugin name for safety check
18723
+ * @returns {boolean} True if attribute was removed, false if not found
18724
+ *
18725
+ * @example
18726
+ * resource.removePluginAttribute('_hasEmbedding', 'VectorPlugin');
18727
+ */
18728
+ removePluginAttribute(name, pluginName = null) {
18729
+ const attrDef = this.schema.getAttributeDefinition(name);
18730
+ const metadata = this.schema._pluginAttributeMetadata?.[name];
18731
+ const isPluginAttr = typeof attrDef === "object" && attrDef?.__plugin__ || metadata;
18732
+ if (!attrDef || !isPluginAttr) {
18733
+ return false;
18734
+ }
18735
+ const actualPlugin = attrDef?.__plugin__ || metadata?.__plugin__;
18736
+ if (pluginName && actualPlugin !== pluginName) {
18737
+ throw new ResourceError(
18738
+ `Attribute '${name}' belongs to plugin '${actualPlugin}', not '${pluginName}'`,
18739
+ { resource: this.name, attribute: name, actualPlugin, requestedPlugin: pluginName }
18740
+ );
18741
+ }
18742
+ delete this.schema.attributes[name];
18743
+ delete this.attributes[name];
18744
+ if (this.schema._pluginAttributeMetadata?.[name]) {
18745
+ delete this.schema._pluginAttributeMetadata[name];
18746
+ }
18747
+ this.schema.regeneratePluginMapping();
18748
+ if (this.database) {
18749
+ this.database.emit("plugin-attribute-removed", {
18750
+ resource: this.name,
18751
+ attribute: name,
18752
+ plugin: actualPlugin
18753
+ });
18754
+ }
18755
+ return true;
18756
+ }
18427
18757
  /**
18428
18758
  * Add a hook function for a specific event
18429
18759
  * @param {string} event - Hook event (beforeInsert, afterInsert, etc.)
@@ -20725,8 +21055,9 @@ ${errorDetails}`,
20725
21055
  const filterInternalFields = (obj) => {
20726
21056
  if (!obj || typeof obj !== "object") return obj;
20727
21057
  const filtered2 = {};
21058
+ const pluginAttrNames = this.schema._pluginAttributes ? Object.values(this.schema._pluginAttributes).flat() : [];
20728
21059
  for (const [key, value] of Object.entries(obj)) {
20729
- if (!key.startsWith("_") || key === "_geohash" || key.startsWith("_geohash_zoom")) {
21060
+ if (!key.startsWith("_") || key === "_geohash" || key.startsWith("_geohash_zoom") || pluginAttrNames.includes(key)) {
20730
21061
  filtered2[key] = value;
20731
21062
  }
20732
21063
  }
@@ -20752,7 +21083,16 @@ ${errorDetails}`,
20752
21083
  if (hasOverflow && body) {
20753
21084
  const [okBody, errBody, parsedBody] = await tryFn(() => Promise.resolve(JSON.parse(body)));
20754
21085
  if (okBody) {
20755
- const [okUnmap, errUnmap, unmappedBody] = await tryFn(() => this.schema.unmapper(parsedBody));
21086
+ let pluginMapFromMeta = null;
21087
+ if (metadata && metadata._pluginmap) {
21088
+ const [okPluginMap, errPluginMap, parsedPluginMap] = await tryFn(
21089
+ () => Promise.resolve(typeof metadata._pluginmap === "string" ? JSON.parse(metadata._pluginmap) : metadata._pluginmap)
21090
+ );
21091
+ pluginMapFromMeta = okPluginMap ? parsedPluginMap : null;
21092
+ }
21093
+ const [okUnmap, errUnmap, unmappedBody] = await tryFn(
21094
+ () => this.schema.unmapper(parsedBody, void 0, pluginMapFromMeta)
21095
+ );
20756
21096
  bodyData = okUnmap ? unmappedBody : {};
20757
21097
  }
20758
21098
  }
@@ -20769,11 +21109,16 @@ ${errorDetails}`,
20769
21109
  if (behavior === "body-only") {
20770
21110
  const [okBody, errBody, parsedBody] = await tryFn(() => Promise.resolve(body ? JSON.parse(body) : {}));
20771
21111
  let mapFromMeta = this.schema.map;
21112
+ let pluginMapFromMeta = null;
20772
21113
  if (metadata && metadata._map) {
20773
21114
  const [okMap, errMap, parsedMap] = await tryFn(() => Promise.resolve(typeof metadata._map === "string" ? JSON.parse(metadata._map) : metadata._map));
20774
21115
  mapFromMeta = okMap ? parsedMap : this.schema.map;
20775
21116
  }
20776
- const [okUnmap, errUnmap, unmappedBody] = await tryFn(() => this.schema.unmapper(parsedBody, mapFromMeta));
21117
+ if (metadata && metadata._pluginmap) {
21118
+ const [okPluginMap, errPluginMap, parsedPluginMap] = await tryFn(() => Promise.resolve(typeof metadata._pluginmap === "string" ? JSON.parse(metadata._pluginmap) : metadata._pluginmap));
21119
+ pluginMapFromMeta = okPluginMap ? parsedPluginMap : null;
21120
+ }
21121
+ const [okUnmap, errUnmap, unmappedBody] = await tryFn(() => this.schema.unmapper(parsedBody, mapFromMeta, pluginMapFromMeta));
20777
21122
  const result2 = okUnmap ? { ...unmappedBody, id } : { id };
20778
21123
  Object.keys(result2).forEach((k) => {
20779
21124
  result2[k] = fixValue(result2[k]);
@@ -20783,7 +21128,16 @@ ${errorDetails}`,
20783
21128
  if (behavior === "user-managed" && body && body.trim() !== "") {
20784
21129
  const [okBody, errBody, parsedBody] = await tryFn(() => Promise.resolve(JSON.parse(body)));
20785
21130
  if (okBody) {
20786
- const [okUnmap, errUnmap, unmappedBody] = await tryFn(() => this.schema.unmapper(parsedBody));
21131
+ let pluginMapFromMeta = null;
21132
+ if (metadata && metadata._pluginmap) {
21133
+ const [okPluginMap, errPluginMap, parsedPluginMap] = await tryFn(
21134
+ () => Promise.resolve(typeof metadata._pluginmap === "string" ? JSON.parse(metadata._pluginmap) : metadata._pluginmap)
21135
+ );
21136
+ pluginMapFromMeta = okPluginMap ? parsedPluginMap : null;
21137
+ }
21138
+ const [okUnmap, errUnmap, unmappedBody] = await tryFn(
21139
+ () => this.schema.unmapper(parsedBody, void 0, pluginMapFromMeta)
21140
+ );
20787
21141
  const bodyData = okUnmap ? unmappedBody : {};
20788
21142
  const merged = { ...bodyData, ...unmappedMetadata, id };
20789
21143
  Object.keys(merged).forEach((k) => {
@@ -21031,7 +21385,7 @@ class Database extends EventEmitter {
21031
21385
  this.id = idGenerator(7);
21032
21386
  this.version = "1";
21033
21387
  this.s3dbVersion = (() => {
21034
- const [ok, err, version] = tryFn(() => true ? "12.2.4" : "latest");
21388
+ const [ok, err, version] = tryFn(() => true ? "12.4.0" : "latest");
21035
21389
  return ok ? version : "latest";
21036
21390
  })();
21037
21391
  this._resourcesMap = {};
@@ -21087,7 +21441,7 @@ class Database extends EventEmitter {
21087
21441
  connectionString = `s3://${encodeURIComponent(accessKeyId)}:${encodeURIComponent(secretAccessKey)}@${bucket || "s3db"}?${params.toString()}`;
21088
21442
  }
21089
21443
  }
21090
- this.client = options.client || new Client({
21444
+ this.client = options.client || new S3Client({
21091
21445
  verbose: this.verbose,
21092
21446
  parallelism: this.parallelism,
21093
21447
  connectionString
@@ -23021,7 +23375,11 @@ class TursoReplicator extends BaseReplicator {
23021
23375
  }
23022
23376
  continue;
23023
23377
  }
23024
- const attributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
23378
+ const allAttributes = resource.config.versions[resource.config.currentVersion]?.attributes || {};
23379
+ const pluginAttrNames = resource.schema?._pluginAttributes ? Object.values(resource.schema._pluginAttributes).flat() : [];
23380
+ const attributes = Object.fromEntries(
23381
+ Object.entries(allAttributes).filter(([name]) => !pluginAttrNames.includes(name))
23382
+ );
23025
23383
  for (const tableConfig of tableConfigs) {
23026
23384
  const tableName = tableConfig.table;
23027
23385
  const [okSync, errSync] = await tryFn(async () => {
@@ -25989,7 +26347,7 @@ class S3TfStateDriver extends TfStateDriver {
25989
26347
  */
25990
26348
  async initialize() {
25991
26349
  const { bucket, credentials, region } = this.connectionConfig;
25992
- this.client = new Client({
26350
+ this.client = new S3Client({
25993
26351
  bucketName: bucket,
25994
26352
  credentials,
25995
26353
  region
@@ -36930,11 +37288,11 @@ class VectorPlugin extends Plugin {
36930
37288
  }
36931
37289
  };
36932
37290
  if (!resource.schema.attributes[trackingFieldName]) {
36933
- resource.schema.attributes[trackingFieldName] = {
37291
+ resource.addPluginAttribute(trackingFieldName, {
36934
37292
  type: "boolean",
36935
37293
  optional: true,
36936
37294
  default: false
36937
- };
37295
+ }, "VectorPlugin");
36938
37296
  }
36939
37297
  this.emit("vector:partition-created", {
36940
37298
  resource: resource.name,
@@ -37615,6 +37973,1070 @@ class VectorPlugin extends Plugin {
37615
37973
  }
37616
37974
  }
37617
37975
 
37976
+ class MemoryStorage {
37977
+ constructor(config = {}) {
37978
+ this.objects = /* @__PURE__ */ new Map();
37979
+ this.bucket = config.bucket || "s3db";
37980
+ this.enforceLimits = config.enforceLimits || false;
37981
+ this.metadataLimit = config.metadataLimit || 2048;
37982
+ this.maxObjectSize = config.maxObjectSize || 5 * 1024 * 1024 * 1024;
37983
+ this.persistPath = config.persistPath;
37984
+ this.autoPersist = config.autoPersist || false;
37985
+ this.verbose = config.verbose || false;
37986
+ }
37987
+ /**
37988
+ * Generate ETag (MD5 hash) for object body
37989
+ */
37990
+ _generateETag(body) {
37991
+ const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body || "");
37992
+ return createHash("md5").update(buffer).digest("hex");
37993
+ }
37994
+ /**
37995
+ * Calculate metadata size in bytes
37996
+ */
37997
+ _calculateMetadataSize(metadata) {
37998
+ if (!metadata) return 0;
37999
+ let size = 0;
38000
+ for (const [key, value] of Object.entries(metadata)) {
38001
+ size += Buffer.byteLength(key, "utf8");
38002
+ size += Buffer.byteLength(String(value), "utf8");
38003
+ }
38004
+ return size;
38005
+ }
38006
+ /**
38007
+ * Validate limits if enforceLimits is enabled
38008
+ */
38009
+ _validateLimits(body, metadata) {
38010
+ if (!this.enforceLimits) return;
38011
+ const metadataSize = this._calculateMetadataSize(metadata);
38012
+ if (metadataSize > this.metadataLimit) {
38013
+ throw new Error(
38014
+ `Metadata size (${metadataSize} bytes) exceeds limit of ${this.metadataLimit} bytes`
38015
+ );
38016
+ }
38017
+ const bodySize = Buffer.isBuffer(body) ? body.length : Buffer.byteLength(body || "", "utf8");
38018
+ if (bodySize > this.maxObjectSize) {
38019
+ throw new Error(
38020
+ `Object size (${bodySize} bytes) exceeds limit of ${this.maxObjectSize} bytes`
38021
+ );
38022
+ }
38023
+ }
38024
+ /**
38025
+ * Store an object
38026
+ */
38027
+ async put(key, { body, metadata, contentType, contentEncoding, contentLength, ifMatch }) {
38028
+ this._validateLimits(body, metadata);
38029
+ if (ifMatch !== void 0) {
38030
+ const existing = this.objects.get(key);
38031
+ if (existing && existing.etag !== ifMatch) {
38032
+ throw new Error(`Precondition failed: ETag mismatch for key "${key}"`);
38033
+ }
38034
+ }
38035
+ const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body || "");
38036
+ const etag = this._generateETag(buffer);
38037
+ const lastModified = (/* @__PURE__ */ new Date()).toISOString();
38038
+ const size = buffer.length;
38039
+ const objectData = {
38040
+ body: buffer,
38041
+ metadata: metadata || {},
38042
+ contentType: contentType || "application/octet-stream",
38043
+ etag,
38044
+ lastModified,
38045
+ size,
38046
+ contentEncoding,
38047
+ contentLength: contentLength || size
38048
+ };
38049
+ this.objects.set(key, objectData);
38050
+ if (this.verbose) {
38051
+ console.log(`[MemoryStorage] PUT ${key} (${size} bytes, etag: ${etag})`);
38052
+ }
38053
+ if (this.autoPersist && this.persistPath) {
38054
+ await this.saveToDisk();
38055
+ }
38056
+ return {
38057
+ ETag: etag,
38058
+ VersionId: null,
38059
+ // Memory storage doesn't support versioning
38060
+ ServerSideEncryption: null,
38061
+ Location: `/${this.bucket}/${key}`
38062
+ };
38063
+ }
38064
+ /**
38065
+ * Retrieve an object
38066
+ */
38067
+ async get(key) {
38068
+ const obj = this.objects.get(key);
38069
+ if (!obj) {
38070
+ const error = new Error(`Object not found: ${key}`);
38071
+ error.name = "NoSuchKey";
38072
+ error.$metadata = {
38073
+ httpStatusCode: 404,
38074
+ requestId: "memory-" + Date.now(),
38075
+ attempts: 1,
38076
+ totalRetryDelay: 0
38077
+ };
38078
+ throw error;
38079
+ }
38080
+ if (this.verbose) {
38081
+ console.log(`[MemoryStorage] GET ${key} (${obj.size} bytes)`);
38082
+ }
38083
+ const bodyStream = Readable.from(obj.body);
38084
+ return {
38085
+ Body: bodyStream,
38086
+ Metadata: { ...obj.metadata },
38087
+ ContentType: obj.contentType,
38088
+ ContentLength: obj.size,
38089
+ ETag: obj.etag,
38090
+ LastModified: new Date(obj.lastModified),
38091
+ ContentEncoding: obj.contentEncoding
38092
+ };
38093
+ }
38094
+ /**
38095
+ * Get object metadata only (like S3 HeadObject)
38096
+ */
38097
+ async head(key) {
38098
+ const obj = this.objects.get(key);
38099
+ if (!obj) {
38100
+ const error = new Error(`Object not found: ${key}`);
38101
+ error.name = "NoSuchKey";
38102
+ error.$metadata = {
38103
+ httpStatusCode: 404,
38104
+ requestId: "memory-" + Date.now(),
38105
+ attempts: 1,
38106
+ totalRetryDelay: 0
38107
+ };
38108
+ throw error;
38109
+ }
38110
+ if (this.verbose) {
38111
+ console.log(`[MemoryStorage] HEAD ${key}`);
38112
+ }
38113
+ return {
38114
+ Metadata: { ...obj.metadata },
38115
+ ContentType: obj.contentType,
38116
+ ContentLength: obj.size,
38117
+ ETag: obj.etag,
38118
+ LastModified: new Date(obj.lastModified),
38119
+ ContentEncoding: obj.contentEncoding
38120
+ };
38121
+ }
38122
+ /**
38123
+ * Copy an object
38124
+ */
38125
+ async copy(from, to, { metadata, metadataDirective, contentType }) {
38126
+ const source = this.objects.get(from);
38127
+ if (!source) {
38128
+ const error = new Error(`Source object not found: ${from}`);
38129
+ error.name = "NoSuchKey";
38130
+ throw error;
38131
+ }
38132
+ let finalMetadata = { ...source.metadata };
38133
+ if (metadataDirective === "REPLACE" && metadata) {
38134
+ finalMetadata = metadata;
38135
+ } else if (metadata) {
38136
+ finalMetadata = { ...finalMetadata, ...metadata };
38137
+ }
38138
+ const result = await this.put(to, {
38139
+ body: source.body,
38140
+ metadata: finalMetadata,
38141
+ contentType: contentType || source.contentType,
38142
+ contentEncoding: source.contentEncoding
38143
+ });
38144
+ if (this.verbose) {
38145
+ console.log(`[MemoryStorage] COPY ${from} \u2192 ${to}`);
38146
+ }
38147
+ return result;
38148
+ }
38149
+ /**
38150
+ * Check if object exists
38151
+ */
38152
+ exists(key) {
38153
+ return this.objects.has(key);
38154
+ }
38155
+ /**
38156
+ * Delete an object
38157
+ */
38158
+ async delete(key) {
38159
+ const existed = this.objects.has(key);
38160
+ this.objects.delete(key);
38161
+ if (this.verbose) {
38162
+ console.log(`[MemoryStorage] DELETE ${key} (existed: ${existed})`);
38163
+ }
38164
+ if (this.autoPersist && this.persistPath) {
38165
+ await this.saveToDisk();
38166
+ }
38167
+ return {
38168
+ DeleteMarker: false,
38169
+ VersionId: null
38170
+ };
38171
+ }
38172
+ /**
38173
+ * Delete multiple objects (batch)
38174
+ */
38175
+ async deleteMultiple(keys) {
38176
+ const deleted = [];
38177
+ const errors = [];
38178
+ for (const key of keys) {
38179
+ try {
38180
+ await this.delete(key);
38181
+ deleted.push({ Key: key });
38182
+ } catch (error) {
38183
+ errors.push({
38184
+ Key: key,
38185
+ Code: error.name || "InternalError",
38186
+ Message: error.message
38187
+ });
38188
+ }
38189
+ }
38190
+ if (this.verbose) {
38191
+ console.log(`[MemoryStorage] DELETE BATCH (${deleted.length} deleted, ${errors.length} errors)`);
38192
+ }
38193
+ return { Deleted: deleted, Errors: errors };
38194
+ }
38195
+ /**
38196
+ * List objects with prefix/delimiter support
38197
+ */
38198
+ async list({ prefix = "", delimiter = null, maxKeys = 1e3, continuationToken = null }) {
38199
+ const allKeys = Array.from(this.objects.keys());
38200
+ let filteredKeys = prefix ? allKeys.filter((key) => key.startsWith(prefix)) : allKeys;
38201
+ filteredKeys.sort();
38202
+ let startIndex = 0;
38203
+ if (continuationToken) {
38204
+ startIndex = parseInt(continuationToken) || 0;
38205
+ }
38206
+ const paginatedKeys = filteredKeys.slice(startIndex, startIndex + maxKeys);
38207
+ const isTruncated = startIndex + maxKeys < filteredKeys.length;
38208
+ const nextContinuationToken = isTruncated ? String(startIndex + maxKeys) : null;
38209
+ const commonPrefixes = /* @__PURE__ */ new Set();
38210
+ const contents = [];
38211
+ for (const key of paginatedKeys) {
38212
+ if (delimiter && prefix) {
38213
+ const suffix = key.substring(prefix.length);
38214
+ const delimiterIndex = suffix.indexOf(delimiter);
38215
+ if (delimiterIndex !== -1) {
38216
+ const commonPrefix = prefix + suffix.substring(0, delimiterIndex + 1);
38217
+ commonPrefixes.add(commonPrefix);
38218
+ continue;
38219
+ }
38220
+ }
38221
+ const obj = this.objects.get(key);
38222
+ contents.push({
38223
+ Key: key,
38224
+ Size: obj.size,
38225
+ LastModified: new Date(obj.lastModified),
38226
+ ETag: obj.etag,
38227
+ StorageClass: "STANDARD"
38228
+ });
38229
+ }
38230
+ if (this.verbose) {
38231
+ console.log(`[MemoryStorage] LIST prefix="${prefix}" (${contents.length} objects, ${commonPrefixes.size} prefixes)`);
38232
+ }
38233
+ return {
38234
+ Contents: contents,
38235
+ CommonPrefixes: Array.from(commonPrefixes).map((prefix2) => ({ Prefix: prefix2 })),
38236
+ IsTruncated: isTruncated,
38237
+ NextContinuationToken: nextContinuationToken,
38238
+ KeyCount: contents.length + commonPrefixes.size,
38239
+ MaxKeys: maxKeys,
38240
+ Prefix: prefix,
38241
+ Delimiter: delimiter
38242
+ };
38243
+ }
38244
+ /**
38245
+ * Create a snapshot of current state
38246
+ */
38247
+ snapshot() {
38248
+ const snapshot = {
38249
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
38250
+ bucket: this.bucket,
38251
+ objectCount: this.objects.size,
38252
+ objects: {}
38253
+ };
38254
+ for (const [key, obj] of this.objects.entries()) {
38255
+ snapshot.objects[key] = {
38256
+ body: obj.body.toString("base64"),
38257
+ metadata: obj.metadata,
38258
+ contentType: obj.contentType,
38259
+ etag: obj.etag,
38260
+ lastModified: obj.lastModified,
38261
+ size: obj.size,
38262
+ contentEncoding: obj.contentEncoding,
38263
+ contentLength: obj.contentLength
38264
+ };
38265
+ }
38266
+ return snapshot;
38267
+ }
38268
+ /**
38269
+ * Restore from a snapshot
38270
+ */
38271
+ restore(snapshot) {
38272
+ if (!snapshot || !snapshot.objects) {
38273
+ throw new Error("Invalid snapshot format");
38274
+ }
38275
+ this.objects.clear();
38276
+ for (const [key, obj] of Object.entries(snapshot.objects)) {
38277
+ this.objects.set(key, {
38278
+ body: Buffer.from(obj.body, "base64"),
38279
+ metadata: obj.metadata,
38280
+ contentType: obj.contentType,
38281
+ etag: obj.etag,
38282
+ lastModified: obj.lastModified,
38283
+ size: obj.size,
38284
+ contentEncoding: obj.contentEncoding,
38285
+ contentLength: obj.contentLength
38286
+ });
38287
+ }
38288
+ if (this.verbose) {
38289
+ console.log(`[MemoryStorage] Restored snapshot with ${this.objects.size} objects`);
38290
+ }
38291
+ }
38292
+ /**
38293
+ * Save current state to disk
38294
+ */
38295
+ async saveToDisk(customPath) {
38296
+ const path = customPath || this.persistPath;
38297
+ if (!path) {
38298
+ throw new Error("No persist path configured");
38299
+ }
38300
+ const snapshot = this.snapshot();
38301
+ const json = JSON.stringify(snapshot, null, 2);
38302
+ const [ok, err] = await tryFn(() => writeFile(path, json, "utf-8"));
38303
+ if (!ok) {
38304
+ throw new Error(`Failed to save to disk: ${err.message}`);
38305
+ }
38306
+ if (this.verbose) {
38307
+ console.log(`[MemoryStorage] Saved ${this.objects.size} objects to ${path}`);
38308
+ }
38309
+ return path;
38310
+ }
38311
+ /**
38312
+ * Load state from disk
38313
+ */
38314
+ async loadFromDisk(customPath) {
38315
+ const path = customPath || this.persistPath;
38316
+ if (!path) {
38317
+ throw new Error("No persist path configured");
38318
+ }
38319
+ const [ok, err, json] = await tryFn(() => readFile(path, "utf-8"));
38320
+ if (!ok) {
38321
+ throw new Error(`Failed to load from disk: ${err.message}`);
38322
+ }
38323
+ const snapshot = JSON.parse(json);
38324
+ this.restore(snapshot);
38325
+ if (this.verbose) {
38326
+ console.log(`[MemoryStorage] Loaded ${this.objects.size} objects from ${path}`);
38327
+ }
38328
+ return snapshot;
38329
+ }
38330
+ /**
38331
+ * Get storage statistics
38332
+ */
38333
+ getStats() {
38334
+ let totalSize = 0;
38335
+ const keys = [];
38336
+ for (const [key, obj] of this.objects.entries()) {
38337
+ totalSize += obj.size;
38338
+ keys.push(key);
38339
+ }
38340
+ return {
38341
+ objectCount: this.objects.size,
38342
+ totalSize,
38343
+ totalSizeFormatted: this._formatBytes(totalSize),
38344
+ keys: keys.sort(),
38345
+ bucket: this.bucket
38346
+ };
38347
+ }
38348
+ /**
38349
+ * Format bytes for human reading
38350
+ */
38351
+ _formatBytes(bytes) {
38352
+ if (bytes === 0) return "0 Bytes";
38353
+ const k = 1024;
38354
+ const sizes = ["Bytes", "KB", "MB", "GB"];
38355
+ const i = Math.floor(Math.log(bytes) / Math.log(k));
38356
+ return Math.round(bytes / Math.pow(k, i) * 100) / 100 + " " + sizes[i];
38357
+ }
38358
+ /**
38359
+ * Clear all objects
38360
+ */
38361
+ clear() {
38362
+ this.objects.clear();
38363
+ if (this.verbose) {
38364
+ console.log(`[MemoryStorage] Cleared all objects`);
38365
+ }
38366
+ }
38367
+ }
38368
+
38369
+ class MemoryClient extends EventEmitter {
38370
+ constructor(config = {}) {
38371
+ super();
38372
+ this.id = config.id || idGenerator(77);
38373
+ this.verbose = config.verbose || false;
38374
+ this.parallelism = config.parallelism || 10;
38375
+ this.bucket = config.bucket || "s3db";
38376
+ this.keyPrefix = config.keyPrefix || "";
38377
+ this.region = config.region || "us-east-1";
38378
+ this.storage = new MemoryStorage({
38379
+ bucket: this.bucket,
38380
+ enforceLimits: config.enforceLimits || false,
38381
+ metadataLimit: config.metadataLimit || 2048,
38382
+ maxObjectSize: config.maxObjectSize || 5 * 1024 * 1024 * 1024,
38383
+ persistPath: config.persistPath,
38384
+ autoPersist: config.autoPersist || false,
38385
+ verbose: this.verbose
38386
+ });
38387
+ this.config = {
38388
+ bucket: this.bucket,
38389
+ keyPrefix: this.keyPrefix,
38390
+ region: this.region,
38391
+ endpoint: "memory://localhost",
38392
+ forcePathStyle: true
38393
+ };
38394
+ if (this.verbose) {
38395
+ console.log(`[MemoryClient] Initialized (id: ${this.id}, bucket: ${this.bucket})`);
38396
+ }
38397
+ }
38398
+ /**
38399
+ * Simulate sendCommand from AWS SDK
38400
+ * Used by Database/Resource to send AWS SDK commands
38401
+ */
38402
+ async sendCommand(command) {
38403
+ const commandName = command.constructor.name;
38404
+ const input = command.input || {};
38405
+ this.emit("command.request", commandName, input);
38406
+ let response;
38407
+ try {
38408
+ switch (commandName) {
38409
+ case "PutObjectCommand":
38410
+ response = await this._handlePutObject(input);
38411
+ break;
38412
+ case "GetObjectCommand":
38413
+ response = await this._handleGetObject(input);
38414
+ break;
38415
+ case "HeadObjectCommand":
38416
+ response = await this._handleHeadObject(input);
38417
+ break;
38418
+ case "CopyObjectCommand":
38419
+ response = await this._handleCopyObject(input);
38420
+ break;
38421
+ case "DeleteObjectCommand":
38422
+ response = await this._handleDeleteObject(input);
38423
+ break;
38424
+ case "DeleteObjectsCommand":
38425
+ response = await this._handleDeleteObjects(input);
38426
+ break;
38427
+ case "ListObjectsV2Command":
38428
+ response = await this._handleListObjects(input);
38429
+ break;
38430
+ default:
38431
+ throw new Error(`Unsupported command: ${commandName}`);
38432
+ }
38433
+ this.emit("command.response", commandName, response, input);
38434
+ return response;
38435
+ } catch (error) {
38436
+ const mappedError = mapAwsError(error, {
38437
+ bucket: this.bucket,
38438
+ key: input.Key,
38439
+ commandName,
38440
+ commandInput: input
38441
+ });
38442
+ throw mappedError;
38443
+ }
38444
+ }
38445
+ /**
38446
+ * PutObjectCommand handler
38447
+ */
38448
+ async _handlePutObject(input) {
38449
+ const key = input.Key;
38450
+ const metadata = input.Metadata || {};
38451
+ const contentType = input.ContentType;
38452
+ const body = input.Body;
38453
+ const contentEncoding = input.ContentEncoding;
38454
+ const contentLength = input.ContentLength;
38455
+ const ifMatch = input.IfMatch;
38456
+ return await this.storage.put(key, {
38457
+ body,
38458
+ metadata,
38459
+ contentType,
38460
+ contentEncoding,
38461
+ contentLength,
38462
+ ifMatch
38463
+ });
38464
+ }
38465
+ /**
38466
+ * GetObjectCommand handler
38467
+ */
38468
+ async _handleGetObject(input) {
38469
+ const key = input.Key;
38470
+ return await this.storage.get(key);
38471
+ }
38472
+ /**
38473
+ * HeadObjectCommand handler
38474
+ */
38475
+ async _handleHeadObject(input) {
38476
+ const key = input.Key;
38477
+ return await this.storage.head(key);
38478
+ }
38479
+ /**
38480
+ * CopyObjectCommand handler
38481
+ */
38482
+ async _handleCopyObject(input) {
38483
+ const copySource = input.CopySource;
38484
+ const parts = copySource.split("/");
38485
+ const sourceKey = parts.slice(1).join("/");
38486
+ const destinationKey = input.Key;
38487
+ const metadata = input.Metadata;
38488
+ const metadataDirective = input.MetadataDirective;
38489
+ const contentType = input.ContentType;
38490
+ return await this.storage.copy(sourceKey, destinationKey, {
38491
+ metadata,
38492
+ metadataDirective,
38493
+ contentType
38494
+ });
38495
+ }
38496
+ /**
38497
+ * DeleteObjectCommand handler
38498
+ */
38499
+ async _handleDeleteObject(input) {
38500
+ const key = input.Key;
38501
+ return await this.storage.delete(key);
38502
+ }
38503
+ /**
38504
+ * DeleteObjectsCommand handler
38505
+ */
38506
+ async _handleDeleteObjects(input) {
38507
+ const objects = input.Delete?.Objects || [];
38508
+ const keys = objects.map((obj) => obj.Key);
38509
+ return await this.storage.deleteMultiple(keys);
38510
+ }
38511
+ /**
38512
+ * ListObjectsV2Command handler
38513
+ */
38514
+ async _handleListObjects(input) {
38515
+ const fullPrefix = this.keyPrefix && input.Prefix ? path$1.join(this.keyPrefix, input.Prefix) : this.keyPrefix || input.Prefix || "";
38516
+ return await this.storage.list({
38517
+ prefix: fullPrefix,
38518
+ delimiter: input.Delimiter,
38519
+ maxKeys: input.MaxKeys,
38520
+ continuationToken: input.ContinuationToken
38521
+ });
38522
+ }
38523
+ /**
38524
+ * Put an object (Client interface method)
38525
+ */
38526
+ async putObject({ key, metadata, contentType, body, contentEncoding, contentLength, ifMatch }) {
38527
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
38528
+ const stringMetadata = {};
38529
+ if (metadata) {
38530
+ for (const [k, v] of Object.entries(metadata)) {
38531
+ const validKey = String(k).replace(/[^a-zA-Z0-9\-_]/g, "_");
38532
+ const { encoded } = metadataEncode(v);
38533
+ stringMetadata[validKey] = encoded;
38534
+ }
38535
+ }
38536
+ const response = await this.storage.put(fullKey, {
38537
+ body,
38538
+ metadata: stringMetadata,
38539
+ contentType,
38540
+ contentEncoding,
38541
+ contentLength,
38542
+ ifMatch
38543
+ });
38544
+ this.emit("putObject", null, { key, metadata, contentType, body, contentEncoding, contentLength });
38545
+ return response;
38546
+ }
38547
+ /**
38548
+ * Get an object (Client interface method)
38549
+ */
38550
+ async getObject(key) {
38551
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
38552
+ const response = await this.storage.get(fullKey);
38553
+ const decodedMetadata = {};
38554
+ if (response.Metadata) {
38555
+ for (const [k, v] of Object.entries(response.Metadata)) {
38556
+ decodedMetadata[k] = metadataDecode(v);
38557
+ }
38558
+ }
38559
+ this.emit("getObject", null, { key });
38560
+ return {
38561
+ ...response,
38562
+ Metadata: decodedMetadata
38563
+ };
38564
+ }
38565
+ /**
38566
+ * Head object (get metadata only)
38567
+ */
38568
+ async headObject(key) {
38569
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
38570
+ const response = await this.storage.head(fullKey);
38571
+ const decodedMetadata = {};
38572
+ if (response.Metadata) {
38573
+ for (const [k, v] of Object.entries(response.Metadata)) {
38574
+ decodedMetadata[k] = metadataDecode(v);
38575
+ }
38576
+ }
38577
+ this.emit("headObject", null, { key });
38578
+ return {
38579
+ ...response,
38580
+ Metadata: decodedMetadata
38581
+ };
38582
+ }
38583
+ /**
38584
+ * Copy an object
38585
+ */
38586
+ async copyObject({ from, to, metadata, metadataDirective, contentType }) {
38587
+ const fullFrom = this.keyPrefix ? path$1.join(this.keyPrefix, from) : from;
38588
+ const fullTo = this.keyPrefix ? path$1.join(this.keyPrefix, to) : to;
38589
+ const encodedMetadata = {};
38590
+ if (metadata) {
38591
+ for (const [k, v] of Object.entries(metadata)) {
38592
+ const validKey = String(k).replace(/[^a-zA-Z0-9\-_]/g, "_");
38593
+ const { encoded } = metadataEncode(v);
38594
+ encodedMetadata[validKey] = encoded;
38595
+ }
38596
+ }
38597
+ const response = await this.storage.copy(fullFrom, fullTo, {
38598
+ metadata: encodedMetadata,
38599
+ metadataDirective,
38600
+ contentType
38601
+ });
38602
+ this.emit("copyObject", null, { from, to, metadata, metadataDirective });
38603
+ return response;
38604
+ }
38605
+ /**
38606
+ * Check if object exists
38607
+ */
38608
+ async exists(key) {
38609
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
38610
+ return this.storage.exists(fullKey);
38611
+ }
38612
+ /**
38613
+ * Delete an object
38614
+ */
38615
+ async deleteObject(key) {
38616
+ const fullKey = this.keyPrefix ? path$1.join(this.keyPrefix, key) : key;
38617
+ const response = await this.storage.delete(fullKey);
38618
+ this.emit("deleteObject", null, { key });
38619
+ return response;
38620
+ }
38621
+ /**
38622
+ * Delete multiple objects (batch)
38623
+ */
38624
+ async deleteObjects(keys) {
38625
+ const fullKeys = keys.map(
38626
+ (key) => this.keyPrefix ? path$1.join(this.keyPrefix, key) : key
38627
+ );
38628
+ const batches = chunk(fullKeys, this.parallelism);
38629
+ const allResults = { Deleted: [], Errors: [] };
38630
+ const { results } = await PromisePool.withConcurrency(this.parallelism).for(batches).process(async (batch) => {
38631
+ return await this.storage.deleteMultiple(batch);
38632
+ });
38633
+ for (const result of results) {
38634
+ allResults.Deleted.push(...result.Deleted);
38635
+ allResults.Errors.push(...result.Errors);
38636
+ }
38637
+ this.emit("deleteObjects", null, { keys, count: allResults.Deleted.length });
38638
+ return allResults;
38639
+ }
38640
+ /**
38641
+ * List objects with pagination support
38642
+ */
38643
+ async listObjects({ prefix = "", delimiter = null, maxKeys = 1e3, continuationToken = null }) {
38644
+ const fullPrefix = this.keyPrefix ? path$1.join(this.keyPrefix, prefix) : prefix;
38645
+ const response = await this.storage.list({
38646
+ prefix: fullPrefix,
38647
+ delimiter,
38648
+ maxKeys,
38649
+ continuationToken
38650
+ });
38651
+ this.emit("listObjects", null, { prefix, count: response.Contents.length });
38652
+ return response;
38653
+ }
38654
+ /**
38655
+ * Get a page of keys with offset/limit pagination
38656
+ */
38657
+ async getKeysPage(params = {}) {
38658
+ const { prefix = "", offset = 0, amount = 100 } = params;
38659
+ let keys = [];
38660
+ let truncated = true;
38661
+ let continuationToken;
38662
+ if (offset > 0) {
38663
+ const fullPrefix = this.keyPrefix ? path$1.join(this.keyPrefix, prefix) : prefix;
38664
+ const response = await this.storage.list({
38665
+ prefix: fullPrefix,
38666
+ maxKeys: offset + amount
38667
+ });
38668
+ keys = response.Contents.map((x) => x.Key).slice(offset, offset + amount);
38669
+ } else {
38670
+ while (truncated) {
38671
+ const options = {
38672
+ prefix,
38673
+ continuationToken,
38674
+ maxKeys: amount - keys.length
38675
+ };
38676
+ const res = await this.listObjects(options);
38677
+ if (res.Contents) {
38678
+ keys = keys.concat(res.Contents.map((x) => x.Key));
38679
+ }
38680
+ truncated = res.IsTruncated || false;
38681
+ continuationToken = res.NextContinuationToken;
38682
+ if (keys.length >= amount) {
38683
+ keys = keys.slice(0, amount);
38684
+ break;
38685
+ }
38686
+ }
38687
+ }
38688
+ if (this.keyPrefix) {
38689
+ keys = keys.map((x) => x.replace(this.keyPrefix, "")).map((x) => x.startsWith("/") ? x.replace("/", "") : x);
38690
+ }
38691
+ this.emit("getKeysPage", keys, params);
38692
+ return keys;
38693
+ }
38694
+ /**
38695
+ * Get all keys with a given prefix
38696
+ */
38697
+ async getAllKeys({ prefix = "" }) {
38698
+ const fullPrefix = this.keyPrefix ? path$1.join(this.keyPrefix, prefix) : prefix;
38699
+ const response = await this.storage.list({
38700
+ prefix: fullPrefix,
38701
+ maxKeys: 1e5
38702
+ // Large number to get all
38703
+ });
38704
+ let keys = response.Contents.map((x) => x.Key);
38705
+ if (this.keyPrefix) {
38706
+ keys = keys.map((x) => x.replace(this.keyPrefix, "")).map((x) => x.startsWith("/") ? x.replace("/", "") : x);
38707
+ }
38708
+ this.emit("getAllKeys", keys, { prefix });
38709
+ return keys;
38710
+ }
38711
+ /**
38712
+ * Count total objects under a prefix
38713
+ */
38714
+ async count({ prefix = "" } = {}) {
38715
+ const keys = await this.getAllKeys({ prefix });
38716
+ const count = keys.length;
38717
+ this.emit("count", count, { prefix });
38718
+ return count;
38719
+ }
38720
+ /**
38721
+ * Delete all objects under a prefix
38722
+ */
38723
+ async deleteAll({ prefix = "" } = {}) {
38724
+ const keys = await this.getAllKeys({ prefix });
38725
+ let totalDeleted = 0;
38726
+ if (keys.length > 0) {
38727
+ const result = await this.deleteObjects(keys);
38728
+ totalDeleted = result.Deleted.length;
38729
+ this.emit("deleteAll", {
38730
+ prefix,
38731
+ batch: totalDeleted,
38732
+ total: totalDeleted
38733
+ });
38734
+ }
38735
+ this.emit("deleteAllComplete", {
38736
+ prefix,
38737
+ totalDeleted
38738
+ });
38739
+ return totalDeleted;
38740
+ }
38741
+ /**
38742
+ * Get continuation token after skipping offset items
38743
+ */
38744
+ async getContinuationTokenAfterOffset({ prefix = "", offset = 1e3 } = {}) {
38745
+ if (offset === 0) return null;
38746
+ const keys = await this.getAllKeys({ prefix });
38747
+ if (offset >= keys.length) {
38748
+ this.emit("getContinuationTokenAfterOffset", null, { prefix, offset });
38749
+ return null;
38750
+ }
38751
+ const token = keys[offset];
38752
+ this.emit("getContinuationTokenAfterOffset", token, { prefix, offset });
38753
+ return token;
38754
+ }
38755
+ /**
38756
+ * Move an object from one key to another
38757
+ */
38758
+ async moveObject({ from, to }) {
38759
+ await this.copyObject({ from, to, metadataDirective: "COPY" });
38760
+ await this.deleteObject(from);
38761
+ }
38762
+ /**
38763
+ * Move all objects from one prefix to another
38764
+ */
38765
+ async moveAllObjects({ prefixFrom, prefixTo }) {
38766
+ const keys = await this.getAllKeys({ prefix: prefixFrom });
38767
+ const results = [];
38768
+ const errors = [];
38769
+ for (const key of keys) {
38770
+ try {
38771
+ const to = key.replace(prefixFrom, prefixTo);
38772
+ await this.moveObject({ from: key, to });
38773
+ results.push(to);
38774
+ } catch (error) {
38775
+ errors.push({
38776
+ message: error.message,
38777
+ raw: error,
38778
+ key
38779
+ });
38780
+ }
38781
+ }
38782
+ this.emit("moveAllObjects", { results, errors }, { prefixFrom, prefixTo });
38783
+ if (errors.length > 0) {
38784
+ const error = new Error("Some objects could not be moved");
38785
+ error.context = {
38786
+ bucket: this.bucket,
38787
+ operation: "moveAllObjects",
38788
+ prefixFrom,
38789
+ prefixTo,
38790
+ totalKeys: keys.length,
38791
+ failedCount: errors.length,
38792
+ successCount: results.length,
38793
+ errors
38794
+ };
38795
+ throw error;
38796
+ }
38797
+ return results;
38798
+ }
38799
+ /**
38800
+ * Create a snapshot of current storage state
38801
+ */
38802
+ snapshot() {
38803
+ return this.storage.snapshot();
38804
+ }
38805
+ /**
38806
+ * Restore from a snapshot
38807
+ */
38808
+ restore(snapshot) {
38809
+ return this.storage.restore(snapshot);
38810
+ }
38811
+ /**
38812
+ * Save current state to disk (persistence)
38813
+ */
38814
+ async saveToDisk(path2) {
38815
+ return await this.storage.saveToDisk(path2);
38816
+ }
38817
+ /**
38818
+ * Load state from disk
38819
+ */
38820
+ async loadFromDisk(path2) {
38821
+ return await this.storage.loadFromDisk(path2);
38822
+ }
38823
+ /**
38824
+ * Export to BackupPlugin-compatible format (s3db.json + JSONL files)
38825
+ * Compatible with BackupPlugin for easy migration
38826
+ *
38827
+ * @param {string} outputDir - Output directory path
38828
+ * @param {Object} options - Export options
38829
+ * @param {Array<string>} options.resources - Resource names to export (default: all)
38830
+ * @param {boolean} options.compress - Use gzip compression (default: true)
38831
+ * @param {Object} options.database - Database instance for schema metadata
38832
+ * @returns {Promise<Object>} Export manifest with file paths and stats
38833
+ */
38834
+ async exportBackup(outputDir, options = {}) {
38835
+ const { mkdir, writeFile } = await import('fs/promises');
38836
+ const zlib = await import('zlib');
38837
+ const { promisify } = await import('util');
38838
+ const gzip = promisify(zlib.gzip);
38839
+ await mkdir(outputDir, { recursive: true });
38840
+ const compress = options.compress !== false;
38841
+ const database = options.database;
38842
+ const resourceFilter = options.resources;
38843
+ const allKeys = await this.getAllKeys({});
38844
+ const resourceMap = /* @__PURE__ */ new Map();
38845
+ for (const key of allKeys) {
38846
+ const match = key.match(/^resource=([^/]+)\//);
38847
+ if (match) {
38848
+ const resourceName = match[1];
38849
+ if (!resourceFilter || resourceFilter.includes(resourceName)) {
38850
+ if (!resourceMap.has(resourceName)) {
38851
+ resourceMap.set(resourceName, []);
38852
+ }
38853
+ resourceMap.get(resourceName).push(key);
38854
+ }
38855
+ }
38856
+ }
38857
+ const exportedFiles = {};
38858
+ const resourceStats = {};
38859
+ for (const [resourceName, keys] of resourceMap.entries()) {
38860
+ const records = [];
38861
+ for (const key of keys) {
38862
+ const obj = await this.getObject(key);
38863
+ const idMatch = key.match(/\/id=([^/]+)/);
38864
+ const recordId = idMatch ? idMatch[1] : null;
38865
+ const record = { ...obj.Metadata };
38866
+ if (recordId && !record.id) {
38867
+ record.id = recordId;
38868
+ }
38869
+ if (obj.Body) {
38870
+ const chunks = [];
38871
+ for await (const chunk2 of obj.Body) {
38872
+ chunks.push(chunk2);
38873
+ }
38874
+ const bodyBuffer = Buffer.concat(chunks);
38875
+ const bodyStr = bodyBuffer.toString("utf-8");
38876
+ if (bodyStr.startsWith("{") || bodyStr.startsWith("[")) {
38877
+ try {
38878
+ const bodyData = JSON.parse(bodyStr);
38879
+ Object.assign(record, bodyData);
38880
+ } catch {
38881
+ record._body = bodyStr;
38882
+ }
38883
+ } else if (bodyStr) {
38884
+ record._body = bodyStr;
38885
+ }
38886
+ }
38887
+ records.push(record);
38888
+ }
38889
+ const jsonl = records.map((r) => JSON.stringify(r)).join("\n");
38890
+ const filename = compress ? `${resourceName}.jsonl.gz` : `${resourceName}.jsonl`;
38891
+ const filePath = `${outputDir}/${filename}`;
38892
+ if (compress) {
38893
+ const compressed = await gzip(jsonl);
38894
+ await writeFile(filePath, compressed);
38895
+ } else {
38896
+ await writeFile(filePath, jsonl, "utf-8");
38897
+ }
38898
+ exportedFiles[resourceName] = filePath;
38899
+ resourceStats[resourceName] = {
38900
+ recordCount: records.length,
38901
+ fileSize: compress ? (await gzip(jsonl)).length : Buffer.byteLength(jsonl)
38902
+ };
38903
+ }
38904
+ const s3dbMetadata = {
38905
+ version: "1.0",
38906
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
38907
+ bucket: this.bucket,
38908
+ keyPrefix: this.keyPrefix || "",
38909
+ compressed: compress,
38910
+ resources: {},
38911
+ totalRecords: 0,
38912
+ totalSize: 0
38913
+ };
38914
+ if (database && database.resources) {
38915
+ for (const [resourceName, resource] of Object.entries(database.resources)) {
38916
+ if (resourceMap.has(resourceName)) {
38917
+ s3dbMetadata.resources[resourceName] = {
38918
+ schema: resource.schema ? {
38919
+ attributes: resource.schema.attributes,
38920
+ partitions: resource.schema.partitions,
38921
+ behavior: resource.schema.behavior,
38922
+ timestamps: resource.schema.timestamps
38923
+ } : null,
38924
+ stats: resourceStats[resourceName]
38925
+ };
38926
+ }
38927
+ }
38928
+ } else {
38929
+ for (const [resourceName, stats] of Object.entries(resourceStats)) {
38930
+ s3dbMetadata.resources[resourceName] = { stats };
38931
+ }
38932
+ }
38933
+ for (const stats of Object.values(resourceStats)) {
38934
+ s3dbMetadata.totalRecords += stats.recordCount;
38935
+ s3dbMetadata.totalSize += stats.fileSize;
38936
+ }
38937
+ const s3dbPath = `${outputDir}/s3db.json`;
38938
+ await writeFile(s3dbPath, JSON.stringify(s3dbMetadata, null, 2), "utf-8");
38939
+ return {
38940
+ manifest: s3dbPath,
38941
+ files: exportedFiles,
38942
+ stats: s3dbMetadata,
38943
+ resourceCount: resourceMap.size,
38944
+ totalRecords: s3dbMetadata.totalRecords,
38945
+ totalSize: s3dbMetadata.totalSize
38946
+ };
38947
+ }
38948
+ /**
38949
+ * Import from BackupPlugin-compatible format
38950
+ * Loads data from s3db.json + JSONL files created by BackupPlugin or exportBackup()
38951
+ *
38952
+ * @param {string} backupDir - Backup directory path containing s3db.json
38953
+ * @param {Object} options - Import options
38954
+ * @param {Array<string>} options.resources - Resource names to import (default: all)
38955
+ * @param {boolean} options.clear - Clear existing data first (default: false)
38956
+ * @param {Object} options.database - Database instance to recreate schemas
38957
+ * @returns {Promise<Object>} Import stats
38958
+ */
38959
+ async importBackup(backupDir, options = {}) {
38960
+ const { readFile, readdir } = await import('fs/promises');
38961
+ const zlib = await import('zlib');
38962
+ const { promisify } = await import('util');
38963
+ const gunzip = promisify(zlib.gunzip);
38964
+ if (options.clear) {
38965
+ this.clear();
38966
+ }
38967
+ const s3dbPath = `${backupDir}/s3db.json`;
38968
+ const s3dbContent = await readFile(s3dbPath, "utf-8");
38969
+ const metadata = JSON.parse(s3dbContent);
38970
+ const database = options.database;
38971
+ const resourceFilter = options.resources;
38972
+ const importStats = {
38973
+ resourcesImported: 0,
38974
+ recordsImported: 0,
38975
+ errors: []
38976
+ };
38977
+ if (database && metadata.resources) {
38978
+ for (const [resourceName, resourceMeta] of Object.entries(metadata.resources)) {
38979
+ if (resourceFilter && !resourceFilter.includes(resourceName)) continue;
38980
+ if (resourceMeta.schema) {
38981
+ try {
38982
+ await database.createResource({
38983
+ name: resourceName,
38984
+ ...resourceMeta.schema
38985
+ });
38986
+ } catch (error) {
38987
+ }
38988
+ }
38989
+ }
38990
+ }
38991
+ const files = await readdir(backupDir);
38992
+ for (const file of files) {
38993
+ if (!file.endsWith(".jsonl") && !file.endsWith(".jsonl.gz")) continue;
38994
+ const resourceName = file.replace(/\.jsonl(\.gz)?$/, "");
38995
+ if (resourceFilter && !resourceFilter.includes(resourceName)) continue;
38996
+ const filePath = `${backupDir}/${file}`;
38997
+ let content = await readFile(filePath);
38998
+ if (file.endsWith(".gz")) {
38999
+ content = await gunzip(content);
39000
+ }
39001
+ const jsonl = content.toString("utf-8");
39002
+ const lines = jsonl.split("\n").filter((line) => line.trim());
39003
+ for (const line of lines) {
39004
+ try {
39005
+ const record = JSON.parse(line);
39006
+ const id = record.id || record._id || `imported_${Date.now()}_${Math.random()}`;
39007
+ const { _body, id: _, _id: __, ...metadata2 } = record;
39008
+ await this.putObject({
39009
+ key: `resource=${resourceName}/id=${id}`,
39010
+ metadata: metadata2,
39011
+ body: _body ? Buffer.from(_body) : void 0
39012
+ });
39013
+ importStats.recordsImported++;
39014
+ } catch (error) {
39015
+ importStats.errors.push({
39016
+ resource: resourceName,
39017
+ error: error.message,
39018
+ line
39019
+ });
39020
+ }
39021
+ }
39022
+ importStats.resourcesImported++;
39023
+ }
39024
+ return importStats;
39025
+ }
39026
+ /**
39027
+ * Get storage statistics
39028
+ */
39029
+ getStats() {
39030
+ return this.storage.getStats();
39031
+ }
39032
+ /**
39033
+ * Clear all objects
39034
+ */
39035
+ clear() {
39036
+ this.storage.clear();
39037
+ }
39038
+ }
39039
+
37618
39040
  function mapFieldTypeToTypeScript(fieldType) {
37619
39041
  const baseType = fieldType.split("|")[0].trim();
37620
39042
  const typeMap = {
@@ -37720,9 +39142,13 @@ async function generateTypes(database, options = {}) {
37720
39142
  }
37721
39143
  const resourceInterfaces = [];
37722
39144
  for (const [name, resource] of Object.entries(database.resources)) {
37723
- const attributes = resource.config?.attributes || resource.attributes || {};
39145
+ const allAttributes = resource.config?.attributes || resource.attributes || {};
37724
39146
  const timestamps = resource.config?.timestamps || false;
37725
- const interfaceDef = generateResourceInterface(name, attributes, timestamps);
39147
+ const pluginAttrNames = resource.schema?._pluginAttributes ? Object.values(resource.schema._pluginAttributes).flat() : [];
39148
+ const userAttributes = Object.fromEntries(
39149
+ Object.entries(allAttributes).filter(([name2]) => !pluginAttrNames.includes(name2))
39150
+ );
39151
+ const interfaceDef = generateResourceInterface(name, userAttributes, timestamps);
37726
39152
  lines.push(interfaceDef);
37727
39153
  resourceInterfaces.push({
37728
39154
  name,
@@ -38516,5 +39942,5 @@ var metrics = /*#__PURE__*/Object.freeze({
38516
39942
  silhouetteScore: silhouetteScore
38517
39943
  });
38518
39944
 
38519
- export { AVAILABLE_BEHAVIORS, AnalyticsNotEnabledError, ApiPlugin, AuditPlugin, AuthenticationError, BACKUP_DRIVERS, BackupPlugin, BaseBackupDriver, BaseError, BaseReplicator, BehaviorError, BigqueryReplicator, CONSUMER_DRIVERS, Cache, CachePlugin, Client, ConnectionString, ConnectionStringError, CostsPlugin, CryptoError, DEFAULT_BEHAVIOR, Database, DatabaseError, DynamoDBReplicator, EncryptionError, ErrorMap, EventualConsistencyPlugin, Factory, FilesystemBackupDriver, FilesystemCache, FullTextPlugin, GeoPlugin, InvalidResourceItem, MemoryCache, MetadataLimitError, MetricsPlugin, MissingMetadata, MongoDBReplicator, MultiBackupDriver, MySQLReplicator, NoSuchBucket, NoSuchKey, NotFound, PartitionAwareFilesystemCache, PartitionDriverError, PartitionError, PermissionError, PlanetScaleReplicator, Plugin, PluginError, PluginObject, PluginStorageError, PostgresReplicator, QueueConsumerPlugin, REPLICATOR_DRIVERS, RabbitMqConsumer, RelationPlugin, ReplicatorPlugin, Resource, ResourceError, ResourceIdsPageReader, ResourceIdsReader, ResourceNotFound, ResourceReader, ResourceWriter, S3BackupDriver, S3Cache, S3QueuePlugin, Database as S3db, S3dbError, S3dbReplicator, SchedulerPlugin, Schema, SchemaError, Seeder, SqsConsumer, SqsReplicator, StateMachinePlugin, StreamError, TTLPlugin, TfStatePlugin, TursoReplicator, UnknownError, ValidationError, Validator, VectorPlugin, WebhookReplicator, behaviors, calculateAttributeNamesSize, calculateAttributeSizes, calculateEffectiveLimit, calculateSystemOverhead, calculateTotalSize, calculateUTF8Bytes, clearUTF8Memory, createBackupDriver, createConsumer, createReplicator, decode, decodeDecimal, decodeFixedPoint, decodeFixedPointBatch, decrypt, S3db as default, encode, encodeDecimal, encodeFixedPoint, encodeFixedPointBatch, encrypt, generateTypes, getBehavior, getSizeBreakdown, idGenerator, mapAwsError, md5, passwordGenerator, printTypes, sha256, streamToString, transformValue, tryFn, tryFnSync, validateBackupConfig, validateReplicatorConfig };
39945
+ export { AVAILABLE_BEHAVIORS, AnalyticsNotEnabledError, ApiPlugin, AuditPlugin, AuthenticationError, BACKUP_DRIVERS, BackupPlugin, BaseBackupDriver, BaseError, BaseReplicator, BehaviorError, BigqueryReplicator, CONSUMER_DRIVERS, Cache, CachePlugin, S3Client as Client, ConnectionString, ConnectionStringError, CostsPlugin, CryptoError, DEFAULT_BEHAVIOR, Database, DatabaseError, DynamoDBReplicator, EncryptionError, ErrorMap, EventualConsistencyPlugin, Factory, FilesystemBackupDriver, FilesystemCache, FullTextPlugin, GeoPlugin, InvalidResourceItem, MemoryCache, MemoryClient, MemoryStorage, MetadataLimitError, MetricsPlugin, MissingMetadata, MongoDBReplicator, MultiBackupDriver, MySQLReplicator, NoSuchBucket, NoSuchKey, NotFound, PartitionAwareFilesystemCache, PartitionDriverError, PartitionError, PermissionError, PlanetScaleReplicator, Plugin, PluginError, PluginObject, PluginStorageError, PostgresReplicator, QueueConsumerPlugin, REPLICATOR_DRIVERS, RabbitMqConsumer, RelationPlugin, ReplicatorPlugin, Resource, ResourceError, ResourceIdsPageReader, ResourceIdsReader, ResourceNotFound, ResourceReader, ResourceWriter, S3BackupDriver, S3Cache, S3Client, S3QueuePlugin, Database as S3db, S3dbError, S3dbReplicator, SchedulerPlugin, Schema, SchemaError, Seeder, SqsConsumer, SqsReplicator, StateMachinePlugin, StreamError, TTLPlugin, TfStatePlugin, TursoReplicator, UnknownError, ValidationError, Validator, VectorPlugin, WebhookReplicator, behaviors, calculateAttributeNamesSize, calculateAttributeSizes, calculateEffectiveLimit, calculateSystemOverhead, calculateTotalSize, calculateUTF8Bytes, clearUTF8Memory, createBackupDriver, createConsumer, createReplicator, decode, decodeDecimal, decodeFixedPoint, decodeFixedPointBatch, decrypt, S3db as default, encode, encodeDecimal, encodeFixedPoint, encodeFixedPointBatch, encrypt, generateTypes, getBehavior, getSizeBreakdown, idGenerator, mapAwsError, md5, passwordGenerator, printTypes, sha256, streamToString, transformValue, tryFn, tryFnSync, validateBackupConfig, validateReplicatorConfig };
38520
39946
  //# sourceMappingURL=s3db.es.js.map