s3db.js 12.3.0 → 12.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,12 +27,17 @@ import {
27
27
  * @param {string} schemaSync.onMismatch - Action on schema mismatch: 'error' | 'warn' | 'ignore' (default: 'error')
28
28
  * @param {boolean} schemaSync.autoCreateTable - Auto-create table if not exists (default: true)
29
29
  * @param {boolean} schemaSync.autoCreateColumns - Auto-add missing columns (default: true, only with strategy: 'alter')
30
+ * @param {string} mutability - Global mutability mode: 'append-only' | 'mutable' | 'immutable' (default: 'append-only')
31
+ * - 'append-only': Updates/deletes become inserts with _operation_type and _operation_timestamp (most performant, no streaming buffer issues)
32
+ * - 'mutable': Traditional UPDATE/DELETE queries with streaming buffer retry logic
33
+ * - 'immutable': Full audit trail with _operation_type, _operation_timestamp, _is_deleted, _version fields
30
34
  *
31
35
  * @example
32
36
  * new BigqueryReplicator({
33
37
  * projectId: 'my-gcp-project',
34
38
  * datasetId: 'analytics',
35
39
  * credentials: JSON.parse(Buffer.from(GOOGLE_CREDENTIALS, 'base64').toString()),
40
+ * mutability: 'append-only', // Global default
36
41
  * schemaSync: {
37
42
  * enabled: true,
38
43
  * strategy: 'alter',
@@ -41,6 +46,7 @@ import {
41
46
  * }, {
42
47
  * users: {
43
48
  * table: 'users_table',
49
+ * mutability: 'immutable', // Override for audit trail
44
50
  * transform: (data) => ({ ...data, ip: data.ip || 'unknown' })
45
51
  * },
46
52
  * orders: 'orders_table'
@@ -58,6 +64,10 @@ class BigqueryReplicator extends BaseReplicator {
58
64
  this.location = config.location || 'US';
59
65
  this.logTable = config.logTable;
60
66
 
67
+ // Mutability configuration
68
+ this.mutability = config.mutability || 'append-only';
69
+ this._validateMutability(this.mutability);
70
+
61
71
  // Schema sync configuration
62
72
  this.schemaSync = {
63
73
  enabled: config.schemaSync?.enabled || false,
@@ -69,6 +79,16 @@ class BigqueryReplicator extends BaseReplicator {
69
79
 
70
80
  // Parse resources configuration
71
81
  this.resources = this.parseResourcesConfig(resources);
82
+
83
+ // Version tracking for immutable mode
84
+ this.versionCounters = new Map();
85
+ }
86
+
87
+ _validateMutability(mutability) {
88
+ const validModes = ['append-only', 'mutable', 'immutable'];
89
+ if (!validModes.includes(mutability)) {
90
+ throw new Error(`Invalid mutability mode: ${mutability}. Must be one of: ${validModes.join(', ')}`);
91
+ }
72
92
  }
73
93
 
74
94
  parseResourcesConfig(resources) {
@@ -80,26 +100,33 @@ class BigqueryReplicator extends BaseReplicator {
80
100
  parsed[resourceName] = [{
81
101
  table: config,
82
102
  actions: ['insert'],
83
- transform: null
103
+ transform: null,
104
+ mutability: this.mutability
84
105
  }];
85
106
  } else if (Array.isArray(config)) {
86
107
  // Array form: multiple table mappings
87
108
  parsed[resourceName] = config.map(item => {
88
109
  if (typeof item === 'string') {
89
- return { table: item, actions: ['insert'], transform: null };
110
+ return { table: item, actions: ['insert'], transform: null, mutability: this.mutability };
90
111
  }
112
+ const itemMutability = item.mutability || this.mutability;
113
+ this._validateMutability(itemMutability);
91
114
  return {
92
115
  table: item.table,
93
116
  actions: item.actions || ['insert'],
94
- transform: item.transform || null
117
+ transform: item.transform || null,
118
+ mutability: itemMutability
95
119
  };
96
120
  });
97
121
  } else if (typeof config === 'object') {
98
122
  // Single object form
123
+ const configMutability = config.mutability || this.mutability;
124
+ this._validateMutability(configMutability);
99
125
  parsed[resourceName] = [{
100
126
  table: config.table,
101
127
  actions: config.actions || ['insert'],
102
- transform: config.transform || null
128
+ transform: config.transform || null,
129
+ mutability: configMutability
103
130
  }];
104
131
  }
105
132
  }
@@ -198,9 +225,10 @@ class BigqueryReplicator extends BaseReplicator {
198
225
 
199
226
  for (const tableConfig of tableConfigs) {
200
227
  const tableName = tableConfig.table;
228
+ const mutability = tableConfig.mutability;
201
229
 
202
230
  const [okSync, errSync] = await tryFn(async () => {
203
- await this.syncTableSchema(tableName, attributes);
231
+ await this.syncTableSchema(tableName, attributes, mutability);
204
232
  });
205
233
 
206
234
  if (!okSync) {
@@ -224,7 +252,7 @@ class BigqueryReplicator extends BaseReplicator {
224
252
  /**
225
253
  * Sync a single table schema in BigQuery
226
254
  */
227
- async syncTableSchema(tableName, attributes) {
255
+ async syncTableSchema(tableName, attributes, mutability = 'append-only') {
228
256
  const dataset = this.bigqueryClient.dataset(this.datasetId);
229
257
  const table = dataset.table(tableName);
230
258
 
@@ -240,11 +268,11 @@ class BigqueryReplicator extends BaseReplicator {
240
268
  throw new Error(`Table ${tableName} does not exist (validate-only mode)`);
241
269
  }
242
270
 
243
- // Create table with schema
244
- const schema = generateBigQuerySchema(attributes);
271
+ // Create table with schema (including tracking fields based on mutability)
272
+ const schema = generateBigQuerySchema(attributes, mutability);
245
273
 
246
274
  if (this.config.verbose) {
247
- console.log(`[BigQueryReplicator] Creating table ${tableName} with schema:`, schema);
275
+ console.log(`[BigQueryReplicator] Creating table ${tableName} with schema (mutability: ${mutability}):`, schema);
248
276
  }
249
277
 
250
278
  await dataset.createTable(tableName, { schema });
@@ -252,7 +280,8 @@ class BigqueryReplicator extends BaseReplicator {
252
280
  this.emit('table_created', {
253
281
  replicator: this.name,
254
282
  tableName,
255
- attributes: Object.keys(attributes)
283
+ attributes: Object.keys(attributes),
284
+ mutability
256
285
  });
257
286
 
258
287
  return;
@@ -265,13 +294,14 @@ class BigqueryReplicator extends BaseReplicator {
265
294
  }
266
295
 
267
296
  await table.delete();
268
- const schema = generateBigQuerySchema(attributes);
297
+ const schema = generateBigQuerySchema(attributes, mutability);
269
298
  await dataset.createTable(tableName, { schema });
270
299
 
271
300
  this.emit('table_recreated', {
272
301
  replicator: this.name,
273
302
  tableName,
274
- attributes: Object.keys(attributes)
303
+ attributes: Object.keys(attributes),
304
+ mutability
275
305
  });
276
306
 
277
307
  return;
@@ -279,7 +309,7 @@ class BigqueryReplicator extends BaseReplicator {
279
309
 
280
310
  if (this.schemaSync.strategy === 'alter' && this.schemaSync.autoCreateColumns) {
281
311
  const existingSchema = await getBigQueryTableSchema(this.bigqueryClient, this.datasetId, tableName);
282
- const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema);
312
+ const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema, mutability);
283
313
 
284
314
  if (newFields.length > 0) {
285
315
  if (this.config.verbose) {
@@ -306,7 +336,7 @@ class BigqueryReplicator extends BaseReplicator {
306
336
 
307
337
  if (this.schemaSync.strategy === 'validate-only') {
308
338
  const existingSchema = await getBigQueryTableSchema(this.bigqueryClient, this.datasetId, tableName);
309
- const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema);
339
+ const newFields = generateBigQuerySchemaUpdate(attributes, existingSchema, mutability);
310
340
 
311
341
  if (newFields.length > 0) {
312
342
  throw new Error(`Table ${tableName} schema mismatch. Missing columns: ${newFields.length}`);
@@ -333,7 +363,8 @@ class BigqueryReplicator extends BaseReplicator {
333
363
  .filter(tableConfig => tableConfig.actions.includes(operation))
334
364
  .map(tableConfig => ({
335
365
  table: tableConfig.table,
336
- transform: tableConfig.transform
366
+ transform: tableConfig.transform,
367
+ mutability: tableConfig.mutability
337
368
  }));
338
369
  }
339
370
 
@@ -362,6 +393,39 @@ class BigqueryReplicator extends BaseReplicator {
362
393
  return cleanData;
363
394
  }
364
395
 
396
+ /**
397
+ * Add tracking fields for append-only and immutable modes
398
+ * @private
399
+ */
400
+ _addTrackingFields(data, operation, mutability, id) {
401
+ const tracked = { ...data };
402
+
403
+ // Add operation tracking for append-only and immutable modes
404
+ if (mutability === 'append-only' || mutability === 'immutable') {
405
+ tracked._operation_type = operation;
406
+ tracked._operation_timestamp = new Date().toISOString();
407
+ }
408
+
409
+ // Add additional fields for immutable mode
410
+ if (mutability === 'immutable') {
411
+ tracked._is_deleted = operation === 'delete';
412
+ tracked._version = this._getNextVersion(id);
413
+ }
414
+
415
+ return tracked;
416
+ }
417
+
418
+ /**
419
+ * Get next version number for immutable mode
420
+ * @private
421
+ */
422
+ _getNextVersion(id) {
423
+ const current = this.versionCounters.get(id) || 0;
424
+ const next = current + 1;
425
+ this.versionCounters.set(id, next);
426
+ return next;
427
+ }
428
+
365
429
  async replicate(resourceName, operation, data, id, beforeData = null) {
366
430
 
367
431
  if (!this.enabled || !this.shouldReplicateResource(resourceName)) {
@@ -387,10 +451,23 @@ class BigqueryReplicator extends BaseReplicator {
387
451
  for (const tableConfig of tableConfigs) {
388
452
  const [okTable, errTable] = await tryFn(async () => {
389
453
  const table = dataset.table(tableConfig.table);
454
+ const mutability = tableConfig.mutability;
390
455
  let job;
391
456
 
392
- if (operation === 'insert') {
393
- const transformedData = this.applyTransform(data, tableConfig.transform);
457
+ // For append-only and immutable modes, convert update/delete to insert
458
+ const shouldConvertToInsert =
459
+ (mutability === 'append-only' || mutability === 'immutable') &&
460
+ (operation === 'update' || operation === 'delete');
461
+
462
+ if (operation === 'insert' || shouldConvertToInsert) {
463
+ // Apply transform first
464
+ let transformedData = this.applyTransform(data, tableConfig.transform);
465
+
466
+ // Add tracking fields if needed
467
+ if (shouldConvertToInsert) {
468
+ transformedData = this._addTrackingFields(transformedData, operation, mutability, id);
469
+ }
470
+
394
471
  try {
395
472
  job = await table.insert([transformedData]);
396
473
  } catch (error) {
@@ -403,7 +480,8 @@ class BigqueryReplicator extends BaseReplicator {
403
480
  }
404
481
  throw error;
405
482
  }
406
- } else if (operation === 'update') {
483
+ } else if (operation === 'update' && mutability === 'mutable') {
484
+ // Traditional UPDATE for mutable mode
407
485
  const transformedData = this.applyTransform(data, tableConfig.transform);
408
486
  const keys = Object.keys(transformedData).filter(k => k !== 'id');
409
487
  const setClause = keys.map(k => `${k} = @${k}`).join(', ');
@@ -455,7 +533,8 @@ class BigqueryReplicator extends BaseReplicator {
455
533
  }
456
534
 
457
535
  if (!job) throw lastError;
458
- } else if (operation === 'delete') {
536
+ } else if (operation === 'delete' && mutability === 'mutable') {
537
+ // Traditional DELETE for mutable mode
459
538
  const query = `DELETE FROM \`${this.projectId}.${this.datasetId}.${tableConfig.table}\` WHERE id = @id`;
460
539
  try {
461
540
  const [deleteJob] = await this.bigqueryClient.createQueryJob({
@@ -614,7 +693,8 @@ class BigqueryReplicator extends BaseReplicator {
614
693
  datasetId: this.datasetId,
615
694
  resources: this.resources,
616
695
  logTable: this.logTable,
617
- schemaSync: this.schemaSync
696
+ schemaSync: this.schemaSync,
697
+ mutability: this.mutability
618
698
  };
619
699
  }
620
700
  }
@@ -413,7 +413,7 @@ export function generateMySQLAlterTable(tableName, attributes, existingSchema) {
413
413
  /**
414
414
  * Generate BigQuery table schema from S3DB resource schema
415
415
  */
416
- export function generateBigQuerySchema(attributes) {
416
+ export function generateBigQuerySchema(attributes, mutability = 'append-only') {
417
417
  const fields = [];
418
418
 
419
419
  // Always add id field
@@ -446,6 +446,18 @@ export function generateBigQuerySchema(attributes) {
446
446
  fields.push({ name: 'updated_at', type: 'TIMESTAMP', mode: 'NULLABLE' });
447
447
  }
448
448
 
449
+ // Add tracking fields for append-only and immutable modes
450
+ if (mutability === 'append-only' || mutability === 'immutable') {
451
+ fields.push({ name: '_operation_type', type: 'STRING', mode: 'NULLABLE' });
452
+ fields.push({ name: '_operation_timestamp', type: 'TIMESTAMP', mode: 'NULLABLE' });
453
+ }
454
+
455
+ // Add additional fields for immutable mode
456
+ if (mutability === 'immutable') {
457
+ fields.push({ name: '_is_deleted', type: 'BOOL', mode: 'NULLABLE' });
458
+ fields.push({ name: '_version', type: 'INT64', mode: 'NULLABLE' });
459
+ }
460
+
449
461
  return fields;
450
462
  }
451
463
 
@@ -478,7 +490,7 @@ export async function getBigQueryTableSchema(bigqueryClient, datasetId, tableId)
478
490
  /**
479
491
  * Generate BigQuery schema update (add missing fields)
480
492
  */
481
- export function generateBigQuerySchemaUpdate(attributes, existingSchema) {
493
+ export function generateBigQuerySchemaUpdate(attributes, existingSchema, mutability = 'append-only') {
482
494
  const newFields = [];
483
495
 
484
496
  for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
@@ -496,6 +508,26 @@ export function generateBigQuerySchemaUpdate(attributes, existingSchema) {
496
508
  });
497
509
  }
498
510
 
511
+ // Add tracking fields for append-only and immutable modes if they don't exist
512
+ if (mutability === 'append-only' || mutability === 'immutable') {
513
+ if (!existingSchema['_operation_type']) {
514
+ newFields.push({ name: '_operation_type', type: 'STRING', mode: 'NULLABLE' });
515
+ }
516
+ if (!existingSchema['_operation_timestamp']) {
517
+ newFields.push({ name: '_operation_timestamp', type: 'TIMESTAMP', mode: 'NULLABLE' });
518
+ }
519
+ }
520
+
521
+ // Add additional fields for immutable mode if they don't exist
522
+ if (mutability === 'immutable') {
523
+ if (!existingSchema['_is_deleted']) {
524
+ newFields.push({ name: '_is_deleted', type: 'BOOL', mode: 'NULLABLE' });
525
+ }
526
+ if (!existingSchema['_version']) {
527
+ newFields.push({ name: '_version', type: 'INT64', mode: 'NULLABLE' });
528
+ }
529
+ }
530
+
499
531
  return newFields;
500
532
  }
501
533
 
@@ -4,7 +4,7 @@
4
4
  * Reads Terraform/OpenTofu state files from S3 buckets
5
5
  */
6
6
  import { TfStateDriver } from './base-driver.js';
7
- import { Client } from '../../client.class.js';
7
+ import { S3Client } from '../../clients/s3-client.class.js';
8
8
  import tryFn from '../../concerns/try-fn.js';
9
9
 
10
10
  export class S3TfStateDriver extends TfStateDriver {
@@ -71,8 +71,8 @@ export class S3TfStateDriver extends TfStateDriver {
71
71
  async initialize() {
72
72
  const { bucket, credentials, region } = this.connectionConfig;
73
73
 
74
- // Create S3 client using s3db's Client class
75
- this.client = new Client({
74
+ // Create S3 client using s3db's S3Client class
75
+ this.client = new S3Client({
76
76
  bucketName: bucket,
77
77
  credentials,
78
78
  region