s3db.js 11.3.2 → 12.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +102 -8
- package/dist/s3db.cjs.js +36664 -15480
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.d.ts +57 -0
- package/dist/s3db.es.js +36661 -15531
- package/dist/s3db.es.js.map +1 -1
- package/mcp/entrypoint.js +58 -0
- package/mcp/tools/documentation.js +434 -0
- package/mcp/tools/index.js +4 -0
- package/package.json +27 -6
- package/src/behaviors/user-managed.js +13 -6
- package/src/client.class.js +41 -46
- package/src/concerns/base62.js +85 -0
- package/src/concerns/dictionary-encoding.js +294 -0
- package/src/concerns/geo-encoding.js +256 -0
- package/src/concerns/high-performance-inserter.js +34 -30
- package/src/concerns/ip.js +325 -0
- package/src/concerns/metadata-encoding.js +345 -66
- package/src/concerns/money.js +193 -0
- package/src/concerns/partition-queue.js +7 -4
- package/src/concerns/plugin-storage.js +39 -19
- package/src/database.class.js +76 -74
- package/src/errors.js +0 -4
- package/src/plugins/api/auth/api-key-auth.js +88 -0
- package/src/plugins/api/auth/basic-auth.js +154 -0
- package/src/plugins/api/auth/index.js +112 -0
- package/src/plugins/api/auth/jwt-auth.js +169 -0
- package/src/plugins/api/index.js +539 -0
- package/src/plugins/api/middlewares/index.js +15 -0
- package/src/plugins/api/middlewares/validator.js +185 -0
- package/src/plugins/api/routes/auth-routes.js +241 -0
- package/src/plugins/api/routes/resource-routes.js +304 -0
- package/src/plugins/api/server.js +350 -0
- package/src/plugins/api/utils/error-handler.js +147 -0
- package/src/plugins/api/utils/openapi-generator.js +1240 -0
- package/src/plugins/api/utils/response-formatter.js +218 -0
- package/src/plugins/backup/streaming-exporter.js +132 -0
- package/src/plugins/backup.plugin.js +103 -50
- package/src/plugins/cache/s3-cache.class.js +95 -47
- package/src/plugins/cache.plugin.js +107 -9
- package/src/plugins/concerns/plugin-dependencies.js +313 -0
- package/src/plugins/concerns/prometheus-formatter.js +255 -0
- package/src/plugins/consumers/rabbitmq-consumer.js +4 -0
- package/src/plugins/consumers/sqs-consumer.js +4 -0
- package/src/plugins/costs.plugin.js +255 -39
- package/src/plugins/eventual-consistency/helpers.js +15 -1
- package/src/plugins/geo.plugin.js +873 -0
- package/src/plugins/importer/index.js +1020 -0
- package/src/plugins/index.js +11 -0
- package/src/plugins/metrics.plugin.js +163 -4
- package/src/plugins/queue-consumer.plugin.js +6 -27
- package/src/plugins/relation.errors.js +139 -0
- package/src/plugins/relation.plugin.js +1242 -0
- package/src/plugins/replicators/bigquery-replicator.class.js +180 -8
- package/src/plugins/replicators/dynamodb-replicator.class.js +383 -0
- package/src/plugins/replicators/index.js +28 -3
- package/src/plugins/replicators/mongodb-replicator.class.js +391 -0
- package/src/plugins/replicators/mysql-replicator.class.js +558 -0
- package/src/plugins/replicators/planetscale-replicator.class.js +409 -0
- package/src/plugins/replicators/postgres-replicator.class.js +182 -7
- package/src/plugins/replicators/s3db-replicator.class.js +1 -12
- package/src/plugins/replicators/schema-sync.helper.js +601 -0
- package/src/plugins/replicators/sqs-replicator.class.js +11 -9
- package/src/plugins/replicators/turso-replicator.class.js +416 -0
- package/src/plugins/replicators/webhook-replicator.class.js +612 -0
- package/src/plugins/state-machine.plugin.js +122 -68
- package/src/plugins/tfstate/README.md +745 -0
- package/src/plugins/tfstate/base-driver.js +80 -0
- package/src/plugins/tfstate/errors.js +112 -0
- package/src/plugins/tfstate/filesystem-driver.js +129 -0
- package/src/plugins/tfstate/index.js +2660 -0
- package/src/plugins/tfstate/s3-driver.js +192 -0
- package/src/plugins/ttl.plugin.js +536 -0
- package/src/resource.class.js +14 -10
- package/src/s3db.d.ts +57 -0
- package/src/schema.class.js +366 -32
- package/SECURITY.md +0 -76
- package/src/partition-drivers/base-partition-driver.js +0 -106
- package/src/partition-drivers/index.js +0 -66
- package/src/partition-drivers/memory-partition-driver.js +0 -289
- package/src/partition-drivers/sqs-partition-driver.js +0 -337
- package/src/partition-drivers/sync-partition-driver.js +0 -38
|
@@ -0,0 +1,601 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Schema Sync Helper - Convert S3DB resource schemas to SQL DDL
|
|
3
|
+
*
|
|
4
|
+
* This module provides utilities to automatically create and sync database tables
|
|
5
|
+
* based on S3DB resource schemas.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import tryFn from "#src/concerns/try-fn.js";
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Parse s3db field type notation (e.g., 'string|required|maxlength:50')
|
|
12
|
+
*/
|
|
13
|
+
export function parseFieldType(typeNotation) {
|
|
14
|
+
if (typeof typeNotation !== 'string') {
|
|
15
|
+
return { type: 'string', required: false, maxLength: null, options: {} };
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const parts = typeNotation.split('|');
|
|
19
|
+
const baseType = parts[0];
|
|
20
|
+
const options = {};
|
|
21
|
+
let required = false;
|
|
22
|
+
let maxLength = null;
|
|
23
|
+
|
|
24
|
+
for (const part of parts.slice(1)) {
|
|
25
|
+
if (part === 'required') {
|
|
26
|
+
required = true;
|
|
27
|
+
} else if (part.startsWith('maxlength:')) {
|
|
28
|
+
maxLength = parseInt(part.split(':')[1]);
|
|
29
|
+
} else if (part.startsWith('min:')) {
|
|
30
|
+
options.min = parseFloat(part.split(':')[1]);
|
|
31
|
+
} else if (part.startsWith('max:')) {
|
|
32
|
+
options.max = parseFloat(part.split(':')[1]);
|
|
33
|
+
} else if (part.startsWith('length:')) {
|
|
34
|
+
options.length = parseInt(part.split(':')[1]);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return { type: baseType, required, maxLength, options };
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Convert S3DB type to PostgreSQL type
|
|
43
|
+
*/
|
|
44
|
+
export function s3dbTypeToPostgres(fieldType, fieldOptions = {}) {
|
|
45
|
+
const { type, maxLength, options } = parseFieldType(fieldType);
|
|
46
|
+
|
|
47
|
+
switch (type) {
|
|
48
|
+
case 'string':
|
|
49
|
+
if (maxLength) return `VARCHAR(${maxLength})`;
|
|
50
|
+
return 'TEXT';
|
|
51
|
+
|
|
52
|
+
case 'number':
|
|
53
|
+
if (options.min !== undefined && options.min >= 0 && options.max !== undefined && options.max <= 2147483647) {
|
|
54
|
+
return 'INTEGER';
|
|
55
|
+
}
|
|
56
|
+
return 'DOUBLE PRECISION';
|
|
57
|
+
|
|
58
|
+
case 'boolean':
|
|
59
|
+
return 'BOOLEAN';
|
|
60
|
+
|
|
61
|
+
case 'object':
|
|
62
|
+
case 'json':
|
|
63
|
+
return 'JSONB';
|
|
64
|
+
|
|
65
|
+
case 'array':
|
|
66
|
+
return 'JSONB';
|
|
67
|
+
|
|
68
|
+
case 'embedding':
|
|
69
|
+
// Vector embeddings - store as JSONB or use pgvector extension
|
|
70
|
+
return 'JSONB';
|
|
71
|
+
|
|
72
|
+
case 'ip4':
|
|
73
|
+
case 'ip6':
|
|
74
|
+
return 'INET';
|
|
75
|
+
|
|
76
|
+
case 'secret':
|
|
77
|
+
return 'TEXT';
|
|
78
|
+
|
|
79
|
+
case 'uuid':
|
|
80
|
+
return 'UUID';
|
|
81
|
+
|
|
82
|
+
case 'date':
|
|
83
|
+
case 'datetime':
|
|
84
|
+
return 'TIMESTAMP WITH TIME ZONE';
|
|
85
|
+
|
|
86
|
+
default:
|
|
87
|
+
return 'TEXT';
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Convert S3DB type to BigQuery type
|
|
93
|
+
*/
|
|
94
|
+
export function s3dbTypeToBigQuery(fieldType, fieldOptions = {}) {
|
|
95
|
+
const { type, maxLength, options } = parseFieldType(fieldType);
|
|
96
|
+
|
|
97
|
+
switch (type) {
|
|
98
|
+
case 'string':
|
|
99
|
+
return 'STRING';
|
|
100
|
+
|
|
101
|
+
case 'number':
|
|
102
|
+
// BigQuery has INTEGER, FLOAT, NUMERIC
|
|
103
|
+
if (options.min !== undefined && options.min >= 0 && options.max !== undefined && options.max <= 2147483647) {
|
|
104
|
+
return 'INT64';
|
|
105
|
+
}
|
|
106
|
+
return 'FLOAT64';
|
|
107
|
+
|
|
108
|
+
case 'boolean':
|
|
109
|
+
return 'BOOL';
|
|
110
|
+
|
|
111
|
+
case 'object':
|
|
112
|
+
case 'json':
|
|
113
|
+
return 'JSON';
|
|
114
|
+
|
|
115
|
+
case 'array':
|
|
116
|
+
// BigQuery supports ARRAY types, but we'll use JSON for flexibility
|
|
117
|
+
return 'JSON';
|
|
118
|
+
|
|
119
|
+
case 'embedding':
|
|
120
|
+
// Vector embeddings stored as ARRAY<FLOAT64> or JSON
|
|
121
|
+
return 'JSON';
|
|
122
|
+
|
|
123
|
+
case 'ip4':
|
|
124
|
+
case 'ip6':
|
|
125
|
+
return 'STRING';
|
|
126
|
+
|
|
127
|
+
case 'secret':
|
|
128
|
+
return 'STRING';
|
|
129
|
+
|
|
130
|
+
case 'uuid':
|
|
131
|
+
return 'STRING';
|
|
132
|
+
|
|
133
|
+
case 'date':
|
|
134
|
+
return 'DATE';
|
|
135
|
+
|
|
136
|
+
case 'datetime':
|
|
137
|
+
return 'TIMESTAMP';
|
|
138
|
+
|
|
139
|
+
default:
|
|
140
|
+
return 'STRING';
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* Convert S3DB type to MySQL type
|
|
146
|
+
*/
|
|
147
|
+
export function s3dbTypeToMySQL(fieldType, fieldOptions = {}) {
|
|
148
|
+
const { type, maxLength, options } = parseFieldType(fieldType);
|
|
149
|
+
|
|
150
|
+
switch (type) {
|
|
151
|
+
case 'string':
|
|
152
|
+
if (maxLength && maxLength <= 255) return `VARCHAR(${maxLength})`;
|
|
153
|
+
return 'TEXT';
|
|
154
|
+
|
|
155
|
+
case 'number':
|
|
156
|
+
if (options.min !== undefined && options.min >= 0 && options.max !== undefined && options.max <= 2147483647) {
|
|
157
|
+
return 'INT';
|
|
158
|
+
}
|
|
159
|
+
return 'DOUBLE';
|
|
160
|
+
|
|
161
|
+
case 'boolean':
|
|
162
|
+
return 'TINYINT(1)';
|
|
163
|
+
|
|
164
|
+
case 'object':
|
|
165
|
+
case 'json':
|
|
166
|
+
case 'array':
|
|
167
|
+
return 'JSON';
|
|
168
|
+
|
|
169
|
+
case 'embedding':
|
|
170
|
+
return 'JSON';
|
|
171
|
+
|
|
172
|
+
case 'ip4':
|
|
173
|
+
return 'VARCHAR(15)';
|
|
174
|
+
|
|
175
|
+
case 'ip6':
|
|
176
|
+
return 'VARCHAR(45)';
|
|
177
|
+
|
|
178
|
+
case 'secret':
|
|
179
|
+
return 'TEXT';
|
|
180
|
+
|
|
181
|
+
case 'uuid':
|
|
182
|
+
return 'CHAR(36)';
|
|
183
|
+
|
|
184
|
+
case 'date':
|
|
185
|
+
case 'datetime':
|
|
186
|
+
return 'DATETIME';
|
|
187
|
+
|
|
188
|
+
default:
|
|
189
|
+
return 'TEXT';
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
/**
|
|
194
|
+
* Generate PostgreSQL CREATE TABLE statement from S3DB resource schema
|
|
195
|
+
*/
|
|
196
|
+
export function generatePostgresCreateTable(tableName, attributes) {
|
|
197
|
+
const columns = [];
|
|
198
|
+
|
|
199
|
+
// Always add id as primary key
|
|
200
|
+
columns.push('id VARCHAR(255) PRIMARY KEY');
|
|
201
|
+
|
|
202
|
+
for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
|
|
203
|
+
if (fieldName === 'id') continue; // Skip id, already added
|
|
204
|
+
|
|
205
|
+
const fieldType = typeof fieldConfig === 'string' ? fieldConfig : fieldConfig.type;
|
|
206
|
+
const { required } = parseFieldType(fieldType);
|
|
207
|
+
|
|
208
|
+
const sqlType = s3dbTypeToPostgres(fieldType);
|
|
209
|
+
const nullConstraint = required ? 'NOT NULL' : 'NULL';
|
|
210
|
+
|
|
211
|
+
columns.push(`"${fieldName}" ${sqlType} ${nullConstraint}`);
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// Add timestamps if they exist in attributes
|
|
215
|
+
if (!attributes.createdAt) {
|
|
216
|
+
columns.push('created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()');
|
|
217
|
+
}
|
|
218
|
+
if (!attributes.updatedAt) {
|
|
219
|
+
columns.push('updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()');
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
return `CREATE TABLE IF NOT EXISTS ${tableName} (\n ${columns.join(',\n ')}\n)`;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
/**
|
|
226
|
+
* Generate MySQL CREATE TABLE statement from S3DB resource schema
|
|
227
|
+
*/
|
|
228
|
+
export function generateMySQLCreateTable(tableName, attributes) {
|
|
229
|
+
const columns = [];
|
|
230
|
+
|
|
231
|
+
// Always add id as primary key
|
|
232
|
+
columns.push('id VARCHAR(255) PRIMARY KEY');
|
|
233
|
+
|
|
234
|
+
for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
|
|
235
|
+
if (fieldName === 'id') continue;
|
|
236
|
+
|
|
237
|
+
const fieldType = typeof fieldConfig === 'string' ? fieldConfig : fieldConfig.type;
|
|
238
|
+
const { required } = parseFieldType(fieldType);
|
|
239
|
+
|
|
240
|
+
const sqlType = s3dbTypeToMySQL(fieldType);
|
|
241
|
+
const nullConstraint = required ? 'NOT NULL' : 'NULL';
|
|
242
|
+
|
|
243
|
+
columns.push(`\`${fieldName}\` ${sqlType} ${nullConstraint}`);
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// Add timestamps
|
|
247
|
+
if (!attributes.createdAt) {
|
|
248
|
+
columns.push('created_at DATETIME DEFAULT CURRENT_TIMESTAMP');
|
|
249
|
+
}
|
|
250
|
+
if (!attributes.updatedAt) {
|
|
251
|
+
columns.push('updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP');
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
return `CREATE TABLE IF NOT EXISTS ${tableName} (\n ${columns.join(',\n ')}\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci`;
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
/**
|
|
258
|
+
* Get existing table schema from PostgreSQL
|
|
259
|
+
*/
|
|
260
|
+
export async function getPostgresTableSchema(client, tableName) {
|
|
261
|
+
const [ok, err, result] = await tryFn(async () => {
|
|
262
|
+
return await client.query(`
|
|
263
|
+
SELECT column_name, data_type, is_nullable, character_maximum_length
|
|
264
|
+
FROM information_schema.columns
|
|
265
|
+
WHERE table_name = $1
|
|
266
|
+
ORDER BY ordinal_position
|
|
267
|
+
`, [tableName]);
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
if (!ok) return null;
|
|
271
|
+
|
|
272
|
+
const schema = {};
|
|
273
|
+
for (const row of result.rows) {
|
|
274
|
+
schema[row.column_name] = {
|
|
275
|
+
type: row.data_type,
|
|
276
|
+
nullable: row.is_nullable === 'YES',
|
|
277
|
+
maxLength: row.character_maximum_length
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
return schema;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Get existing table schema from MySQL
|
|
286
|
+
*/
|
|
287
|
+
export async function getMySQLTableSchema(connection, tableName) {
|
|
288
|
+
const [ok, err, [rows]] = await tryFn(async () => {
|
|
289
|
+
return await connection.query(`
|
|
290
|
+
SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE, CHARACTER_MAXIMUM_LENGTH
|
|
291
|
+
FROM INFORMATION_SCHEMA.COLUMNS
|
|
292
|
+
WHERE TABLE_NAME = ?
|
|
293
|
+
ORDER BY ORDINAL_POSITION
|
|
294
|
+
`, [tableName]);
|
|
295
|
+
});
|
|
296
|
+
|
|
297
|
+
if (!ok) return null;
|
|
298
|
+
|
|
299
|
+
const schema = {};
|
|
300
|
+
for (const row of rows) {
|
|
301
|
+
schema[row.COLUMN_NAME] = {
|
|
302
|
+
type: row.DATA_TYPE,
|
|
303
|
+
nullable: row.IS_NULLABLE === 'YES',
|
|
304
|
+
maxLength: row.CHARACTER_MAXIMUM_LENGTH
|
|
305
|
+
};
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
return schema;
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
/**
|
|
312
|
+
* Compare two schemas and return differences
|
|
313
|
+
*/
|
|
314
|
+
export function compareSchemas(expectedSchema, actualSchema) {
|
|
315
|
+
const missingColumns = [];
|
|
316
|
+
const extraColumns = [];
|
|
317
|
+
const typeMismatches = [];
|
|
318
|
+
|
|
319
|
+
// Find missing columns
|
|
320
|
+
for (const column of Object.keys(expectedSchema)) {
|
|
321
|
+
if (!actualSchema[column]) {
|
|
322
|
+
missingColumns.push(column);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
// Find extra columns
|
|
327
|
+
for (const column of Object.keys(actualSchema)) {
|
|
328
|
+
if (!expectedSchema[column]) {
|
|
329
|
+
extraColumns.push(column);
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
// Find type mismatches (simplified - just check if types exist)
|
|
334
|
+
for (const column of Object.keys(expectedSchema)) {
|
|
335
|
+
if (actualSchema[column] && actualSchema[column].type !== expectedSchema[column].type) {
|
|
336
|
+
typeMismatches.push({
|
|
337
|
+
column,
|
|
338
|
+
expected: expectedSchema[column].type,
|
|
339
|
+
actual: actualSchema[column].type
|
|
340
|
+
});
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
return {
|
|
345
|
+
missingColumns,
|
|
346
|
+
extraColumns,
|
|
347
|
+
typeMismatches,
|
|
348
|
+
hasChanges: missingColumns.length > 0 || extraColumns.length > 0 || typeMismatches.length > 0
|
|
349
|
+
};
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
/**
|
|
353
|
+
* Generate ALTER TABLE statements for PostgreSQL
|
|
354
|
+
*/
|
|
355
|
+
export function generatePostgresAlterTable(tableName, attributes, existingSchema) {
|
|
356
|
+
const alterStatements = [];
|
|
357
|
+
|
|
358
|
+
for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
|
|
359
|
+
if (fieldName === 'id') continue;
|
|
360
|
+
if (existingSchema[fieldName]) continue; // Column exists
|
|
361
|
+
|
|
362
|
+
const fieldType = typeof fieldConfig === 'string' ? fieldConfig : fieldConfig.type;
|
|
363
|
+
const { required } = parseFieldType(fieldType);
|
|
364
|
+
const sqlType = s3dbTypeToPostgres(fieldType);
|
|
365
|
+
const nullConstraint = required ? 'NOT NULL' : 'NULL';
|
|
366
|
+
|
|
367
|
+
alterStatements.push(`ALTER TABLE ${tableName} ADD COLUMN IF NOT EXISTS "${fieldName}" ${sqlType} ${nullConstraint}`);
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
return alterStatements;
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
/**
|
|
374
|
+
* Generate ALTER TABLE statements for MySQL
|
|
375
|
+
*/
|
|
376
|
+
export function generateMySQLAlterTable(tableName, attributes, existingSchema) {
|
|
377
|
+
const alterStatements = [];
|
|
378
|
+
|
|
379
|
+
for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
|
|
380
|
+
if (fieldName === 'id') continue;
|
|
381
|
+
if (existingSchema[fieldName]) continue;
|
|
382
|
+
|
|
383
|
+
const fieldType = typeof fieldConfig === 'string' ? fieldConfig : fieldConfig.type;
|
|
384
|
+
const { required } = parseFieldType(fieldType);
|
|
385
|
+
const sqlType = s3dbTypeToMySQL(fieldType);
|
|
386
|
+
const nullConstraint = required ? 'NOT NULL' : 'NULL';
|
|
387
|
+
|
|
388
|
+
alterStatements.push(`ALTER TABLE ${tableName} ADD COLUMN \`${fieldName}\` ${sqlType} ${nullConstraint}`);
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
return alterStatements;
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
/**
|
|
395
|
+
* Generate BigQuery table schema from S3DB resource schema
|
|
396
|
+
*/
|
|
397
|
+
export function generateBigQuerySchema(attributes) {
|
|
398
|
+
const fields = [];
|
|
399
|
+
|
|
400
|
+
// Always add id field
|
|
401
|
+
fields.push({
|
|
402
|
+
name: 'id',
|
|
403
|
+
type: 'STRING',
|
|
404
|
+
mode: 'REQUIRED'
|
|
405
|
+
});
|
|
406
|
+
|
|
407
|
+
for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
|
|
408
|
+
if (fieldName === 'id') continue;
|
|
409
|
+
|
|
410
|
+
const fieldType = typeof fieldConfig === 'string' ? fieldConfig : fieldConfig.type;
|
|
411
|
+
const { required } = parseFieldType(fieldType);
|
|
412
|
+
|
|
413
|
+
const bqType = s3dbTypeToBigQuery(fieldType);
|
|
414
|
+
|
|
415
|
+
fields.push({
|
|
416
|
+
name: fieldName,
|
|
417
|
+
type: bqType,
|
|
418
|
+
mode: required ? 'REQUIRED' : 'NULLABLE'
|
|
419
|
+
});
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
// Add timestamps if they don't exist
|
|
423
|
+
if (!attributes.createdAt) {
|
|
424
|
+
fields.push({ name: 'created_at', type: 'TIMESTAMP', mode: 'NULLABLE' });
|
|
425
|
+
}
|
|
426
|
+
if (!attributes.updatedAt) {
|
|
427
|
+
fields.push({ name: 'updated_at', type: 'TIMESTAMP', mode: 'NULLABLE' });
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
return fields;
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
/**
|
|
434
|
+
* Get existing BigQuery table schema
|
|
435
|
+
*/
|
|
436
|
+
export async function getBigQueryTableSchema(bigqueryClient, datasetId, tableId) {
|
|
437
|
+
const [ok, err, table] = await tryFn(async () => {
|
|
438
|
+
const dataset = bigqueryClient.dataset(datasetId);
|
|
439
|
+
const table = dataset.table(tableId);
|
|
440
|
+
const [metadata] = await table.getMetadata();
|
|
441
|
+
return metadata;
|
|
442
|
+
});
|
|
443
|
+
|
|
444
|
+
if (!ok) return null;
|
|
445
|
+
|
|
446
|
+
const schema = {};
|
|
447
|
+
if (table.schema && table.schema.fields) {
|
|
448
|
+
for (const field of table.schema.fields) {
|
|
449
|
+
schema[field.name] = {
|
|
450
|
+
type: field.type,
|
|
451
|
+
mode: field.mode
|
|
452
|
+
};
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
return schema;
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
/**
|
|
460
|
+
* Generate BigQuery schema update (add missing fields)
|
|
461
|
+
*/
|
|
462
|
+
export function generateBigQuerySchemaUpdate(attributes, existingSchema) {
|
|
463
|
+
const newFields = [];
|
|
464
|
+
|
|
465
|
+
for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
|
|
466
|
+
if (fieldName === 'id') continue;
|
|
467
|
+
if (existingSchema[fieldName]) continue; // Field exists
|
|
468
|
+
|
|
469
|
+
const fieldType = typeof fieldConfig === 'string' ? fieldConfig : fieldConfig.type;
|
|
470
|
+
const { required } = parseFieldType(fieldType);
|
|
471
|
+
const bqType = s3dbTypeToBigQuery(fieldType);
|
|
472
|
+
|
|
473
|
+
newFields.push({
|
|
474
|
+
name: fieldName,
|
|
475
|
+
type: bqType,
|
|
476
|
+
mode: required ? 'REQUIRED' : 'NULLABLE'
|
|
477
|
+
});
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
return newFields;
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
/**
|
|
484
|
+
* Convert S3DB type to SQLite type (for Turso)
|
|
485
|
+
*/
|
|
486
|
+
export function s3dbTypeToSQLite(fieldType, fieldOptions = {}) {
|
|
487
|
+
const { type, maxLength, options } = parseFieldType(fieldType);
|
|
488
|
+
|
|
489
|
+
switch (type) {
|
|
490
|
+
case 'string':
|
|
491
|
+
return 'TEXT';
|
|
492
|
+
|
|
493
|
+
case 'number':
|
|
494
|
+
// SQLite uses REAL for floating point, INTEGER for integers
|
|
495
|
+
if (options.min !== undefined && options.min >= 0 && options.max !== undefined && options.max <= 2147483647) {
|
|
496
|
+
return 'INTEGER';
|
|
497
|
+
}
|
|
498
|
+
return 'REAL';
|
|
499
|
+
|
|
500
|
+
case 'boolean':
|
|
501
|
+
return 'INTEGER'; // 0 or 1
|
|
502
|
+
|
|
503
|
+
case 'object':
|
|
504
|
+
case 'json':
|
|
505
|
+
case 'array':
|
|
506
|
+
return 'TEXT'; // Store as JSON string
|
|
507
|
+
|
|
508
|
+
case 'embedding':
|
|
509
|
+
return 'TEXT'; // Store as JSON array
|
|
510
|
+
|
|
511
|
+
case 'ip4':
|
|
512
|
+
case 'ip6':
|
|
513
|
+
return 'TEXT';
|
|
514
|
+
|
|
515
|
+
case 'secret':
|
|
516
|
+
return 'TEXT';
|
|
517
|
+
|
|
518
|
+
case 'uuid':
|
|
519
|
+
return 'TEXT';
|
|
520
|
+
|
|
521
|
+
case 'date':
|
|
522
|
+
case 'datetime':
|
|
523
|
+
return 'TEXT'; // SQLite stores dates as ISO strings or Unix timestamps
|
|
524
|
+
|
|
525
|
+
default:
|
|
526
|
+
return 'TEXT';
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
/**
|
|
531
|
+
* Generate SQLite CREATE TABLE statement from S3DB resource schema
|
|
532
|
+
*/
|
|
533
|
+
export function generateSQLiteCreateTable(tableName, attributes) {
|
|
534
|
+
const columns = [];
|
|
535
|
+
|
|
536
|
+
// Always add id as primary key
|
|
537
|
+
columns.push('id TEXT PRIMARY KEY');
|
|
538
|
+
|
|
539
|
+
for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
|
|
540
|
+
if (fieldName === 'id') continue;
|
|
541
|
+
|
|
542
|
+
const fieldType = typeof fieldConfig === 'string' ? fieldConfig : fieldConfig.type;
|
|
543
|
+
const { required } = parseFieldType(fieldType);
|
|
544
|
+
|
|
545
|
+
const sqlType = s3dbTypeToSQLite(fieldType);
|
|
546
|
+
const nullConstraint = required ? 'NOT NULL' : 'NULL';
|
|
547
|
+
|
|
548
|
+
columns.push(`${fieldName} ${sqlType} ${nullConstraint}`);
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
// Add timestamps
|
|
552
|
+
if (!attributes.createdAt) {
|
|
553
|
+
columns.push('created_at TEXT DEFAULT (datetime(\'now\'))');
|
|
554
|
+
}
|
|
555
|
+
if (!attributes.updatedAt) {
|
|
556
|
+
columns.push('updated_at TEXT DEFAULT (datetime(\'now\'))');
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
return `CREATE TABLE IF NOT EXISTS ${tableName} (\n ${columns.join(',\n ')}\n)`;
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
/**
|
|
563
|
+
* Generate ALTER TABLE statements for SQLite
|
|
564
|
+
*/
|
|
565
|
+
export function generateSQLiteAlterTable(tableName, attributes, existingSchema) {
|
|
566
|
+
const alterStatements = [];
|
|
567
|
+
|
|
568
|
+
for (const [fieldName, fieldConfig] of Object.entries(attributes)) {
|
|
569
|
+
if (fieldName === 'id') continue;
|
|
570
|
+
if (existingSchema[fieldName]) continue;
|
|
571
|
+
|
|
572
|
+
const fieldType = typeof fieldConfig === 'string' ? fieldConfig : fieldConfig.type;
|
|
573
|
+
const { required } = parseFieldType(fieldType);
|
|
574
|
+
const sqlType = s3dbTypeToSQLite(fieldType);
|
|
575
|
+
const nullConstraint = required ? 'NOT NULL' : 'NULL';
|
|
576
|
+
|
|
577
|
+
alterStatements.push(`ALTER TABLE ${tableName} ADD COLUMN ${fieldName} ${sqlType} ${nullConstraint}`);
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
return alterStatements;
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
export default {
|
|
584
|
+
parseFieldType,
|
|
585
|
+
s3dbTypeToPostgres,
|
|
586
|
+
s3dbTypeToMySQL,
|
|
587
|
+
s3dbTypeToBigQuery,
|
|
588
|
+
s3dbTypeToSQLite,
|
|
589
|
+
generatePostgresCreateTable,
|
|
590
|
+
generateMySQLCreateTable,
|
|
591
|
+
generateBigQuerySchema,
|
|
592
|
+
generateSQLiteCreateTable,
|
|
593
|
+
getPostgresTableSchema,
|
|
594
|
+
getMySQLTableSchema,
|
|
595
|
+
getBigQueryTableSchema,
|
|
596
|
+
compareSchemas,
|
|
597
|
+
generatePostgresAlterTable,
|
|
598
|
+
generateMySQLAlterTable,
|
|
599
|
+
generateBigQuerySchemaUpdate,
|
|
600
|
+
generateSQLiteAlterTable
|
|
601
|
+
};
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import tryFn from "#src/concerns/try-fn.js";
|
|
2
|
+
import requirePluginDependency from "#src/plugins/concerns/plugin-dependencies.js";
|
|
2
3
|
import BaseReplicator from './base-replicator.class.js';
|
|
3
4
|
|
|
4
5
|
/**
|
|
@@ -32,8 +33,7 @@ class SqsReplicator extends BaseReplicator {
|
|
|
32
33
|
this.client = client;
|
|
33
34
|
this.queueUrl = config.queueUrl;
|
|
34
35
|
this.queues = config.queues || {};
|
|
35
|
-
|
|
36
|
-
this.defaultQueue = config.defaultQueue || config.defaultQueueUrl || config.queueUrlDefault || null;
|
|
36
|
+
this.defaultQueue = config.defaultQueue || null;
|
|
37
37
|
this.region = config.region || 'us-east-1';
|
|
38
38
|
this.sqsClient = client || null;
|
|
39
39
|
this.messageGroupId = config.messageGroupId;
|
|
@@ -94,19 +94,17 @@ class SqsReplicator extends BaseReplicator {
|
|
|
94
94
|
_applyTransformer(resource, data) {
|
|
95
95
|
// First, clean internal fields that shouldn't go to SQS
|
|
96
96
|
let cleanData = this._cleanInternalFields(data);
|
|
97
|
-
|
|
97
|
+
|
|
98
98
|
const entry = this.resources[resource];
|
|
99
99
|
let result = cleanData;
|
|
100
|
-
|
|
100
|
+
|
|
101
101
|
if (!entry) return cleanData;
|
|
102
|
-
|
|
103
|
-
//
|
|
102
|
+
|
|
103
|
+
// Apply transform function if configured
|
|
104
104
|
if (typeof entry.transform === 'function') {
|
|
105
105
|
result = entry.transform(cleanData);
|
|
106
|
-
} else if (typeof entry.transformer === 'function') {
|
|
107
|
-
result = entry.transformer(cleanData);
|
|
108
106
|
}
|
|
109
|
-
|
|
107
|
+
|
|
110
108
|
return result || cleanData;
|
|
111
109
|
}
|
|
112
110
|
|
|
@@ -163,6 +161,10 @@ class SqsReplicator extends BaseReplicator {
|
|
|
163
161
|
|
|
164
162
|
async initialize(database, client) {
|
|
165
163
|
await super.initialize(database);
|
|
164
|
+
|
|
165
|
+
// Validate plugin dependencies are installed
|
|
166
|
+
await requirePluginDependency('sqs-replicator');
|
|
167
|
+
|
|
166
168
|
if (!this.sqsClient) {
|
|
167
169
|
const [ok, err, sdk] = await tryFn(() => import('@aws-sdk/client-sqs'));
|
|
168
170
|
if (!ok) {
|