s3db.js 12.2.4 → 12.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +117 -0
- package/dist/s3db.cjs.js +1596 -167
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.es.js +1499 -73
- package/dist/s3db.es.js.map +1 -1
- package/package.json +2 -2
- package/src/behaviors/body-only.js +15 -5
- package/src/behaviors/body-overflow.js +9 -0
- package/src/behaviors/user-managed.js +8 -1
- package/src/clients/index.js +14 -0
- package/src/clients/memory-client.class.js +883 -0
- package/src/clients/memory-client.md +917 -0
- package/src/clients/memory-storage.class.js +504 -0
- package/src/{client.class.js → clients/s3-client.class.js} +11 -10
- package/src/concerns/typescript-generator.js +12 -2
- package/src/database.class.js +2 -2
- package/src/index.js +2 -1
- package/src/plugins/api/utils/openapi-generator.js +21 -2
- package/src/plugins/replicators/bigquery-replicator.class.js +109 -21
- package/src/plugins/replicators/mysql-replicator.class.js +9 -1
- package/src/plugins/replicators/planetscale-replicator.class.js +9 -1
- package/src/plugins/replicators/postgres-replicator.class.js +9 -1
- package/src/plugins/replicators/schema-sync.helper.js +53 -2
- package/src/plugins/replicators/turso-replicator.class.js +9 -1
- package/src/plugins/tfstate/s3-driver.js +3 -3
- package/src/plugins/vector.plugin.js +3 -3
- package/src/resource.class.js +203 -4
- package/src/schema.class.js +223 -33
|
@@ -0,0 +1,883 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MemoryClient - In-Memory S3 Client Implementation
|
|
3
|
+
*
|
|
4
|
+
* Drop-in replacement for the standard S3 Client that stores everything in memory.
|
|
5
|
+
* Implements the complete Client interface including all AWS SDK commands.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* import { Database } from 's3db.js';
|
|
9
|
+
* import { MemoryClient } from 's3db.js/plugins/emulator';
|
|
10
|
+
*
|
|
11
|
+
* const db = new Database({ client: new MemoryClient() });
|
|
12
|
+
* await db.connect();
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
import path from 'path';
|
|
16
|
+
import EventEmitter from 'events';
|
|
17
|
+
import { chunk } from 'lodash-es';
|
|
18
|
+
import { PromisePool } from '@supercharge/promise-pool';
|
|
19
|
+
|
|
20
|
+
import tryFn from '../concerns/try-fn.js';
|
|
21
|
+
import { idGenerator } from '../concerns/id.js';
|
|
22
|
+
import { metadataEncode, metadataDecode } from '../concerns/metadata-encoding.js';
|
|
23
|
+
import { mapAwsError } from '../errors.js';
|
|
24
|
+
import { MemoryStorage } from './memory-storage.class.js';
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* MemoryClient - simulates S3Client entirely in memory
|
|
28
|
+
*/
|
|
29
|
+
export class MemoryClient extends EventEmitter {
|
|
30
|
+
constructor(config = {}) {
|
|
31
|
+
super();
|
|
32
|
+
|
|
33
|
+
// Client configuration
|
|
34
|
+
this.id = config.id || idGenerator(77);
|
|
35
|
+
this.verbose = config.verbose || false;
|
|
36
|
+
this.parallelism = config.parallelism || 10;
|
|
37
|
+
|
|
38
|
+
// Storage configuration
|
|
39
|
+
this.bucket = config.bucket || 's3db';
|
|
40
|
+
this.keyPrefix = config.keyPrefix || '';
|
|
41
|
+
this.region = config.region || 'us-east-1';
|
|
42
|
+
|
|
43
|
+
// Create internal storage engine
|
|
44
|
+
this.storage = new MemoryStorage({
|
|
45
|
+
bucket: this.bucket,
|
|
46
|
+
enforceLimits: config.enforceLimits || false,
|
|
47
|
+
metadataLimit: config.metadataLimit || 2048,
|
|
48
|
+
maxObjectSize: config.maxObjectSize || 5 * 1024 * 1024 * 1024,
|
|
49
|
+
persistPath: config.persistPath,
|
|
50
|
+
autoPersist: config.autoPersist || false,
|
|
51
|
+
verbose: this.verbose
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
// Mock config object (for compatibility with Client interface)
|
|
55
|
+
this.config = {
|
|
56
|
+
bucket: this.bucket,
|
|
57
|
+
keyPrefix: this.keyPrefix,
|
|
58
|
+
region: this.region,
|
|
59
|
+
endpoint: 'memory://localhost',
|
|
60
|
+
forcePathStyle: true
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
if (this.verbose) {
|
|
64
|
+
console.log(`[MemoryClient] Initialized (id: ${this.id}, bucket: ${this.bucket})`);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Simulate sendCommand from AWS SDK
|
|
70
|
+
* Used by Database/Resource to send AWS SDK commands
|
|
71
|
+
*/
|
|
72
|
+
async sendCommand(command) {
|
|
73
|
+
const commandName = command.constructor.name;
|
|
74
|
+
const input = command.input || {};
|
|
75
|
+
|
|
76
|
+
this.emit('command.request', commandName, input);
|
|
77
|
+
|
|
78
|
+
let response;
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
// Route to appropriate handler based on command type
|
|
82
|
+
switch (commandName) {
|
|
83
|
+
case 'PutObjectCommand':
|
|
84
|
+
response = await this._handlePutObject(input);
|
|
85
|
+
break;
|
|
86
|
+
case 'GetObjectCommand':
|
|
87
|
+
response = await this._handleGetObject(input);
|
|
88
|
+
break;
|
|
89
|
+
case 'HeadObjectCommand':
|
|
90
|
+
response = await this._handleHeadObject(input);
|
|
91
|
+
break;
|
|
92
|
+
case 'CopyObjectCommand':
|
|
93
|
+
response = await this._handleCopyObject(input);
|
|
94
|
+
break;
|
|
95
|
+
case 'DeleteObjectCommand':
|
|
96
|
+
response = await this._handleDeleteObject(input);
|
|
97
|
+
break;
|
|
98
|
+
case 'DeleteObjectsCommand':
|
|
99
|
+
response = await this._handleDeleteObjects(input);
|
|
100
|
+
break;
|
|
101
|
+
case 'ListObjectsV2Command':
|
|
102
|
+
response = await this._handleListObjects(input);
|
|
103
|
+
break;
|
|
104
|
+
default:
|
|
105
|
+
throw new Error(`Unsupported command: ${commandName}`);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
this.emit('command.response', commandName, response, input);
|
|
109
|
+
return response;
|
|
110
|
+
|
|
111
|
+
} catch (error) {
|
|
112
|
+
// Map errors to AWS SDK format
|
|
113
|
+
const mappedError = mapAwsError(error, {
|
|
114
|
+
bucket: this.bucket,
|
|
115
|
+
key: input.Key,
|
|
116
|
+
commandName,
|
|
117
|
+
commandInput: input
|
|
118
|
+
});
|
|
119
|
+
throw mappedError;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
/**
|
|
124
|
+
* PutObjectCommand handler
|
|
125
|
+
*/
|
|
126
|
+
async _handlePutObject(input) {
|
|
127
|
+
const key = input.Key;
|
|
128
|
+
const metadata = input.Metadata || {};
|
|
129
|
+
const contentType = input.ContentType;
|
|
130
|
+
const body = input.Body;
|
|
131
|
+
const contentEncoding = input.ContentEncoding;
|
|
132
|
+
const contentLength = input.ContentLength;
|
|
133
|
+
const ifMatch = input.IfMatch;
|
|
134
|
+
|
|
135
|
+
return await this.storage.put(key, {
|
|
136
|
+
body,
|
|
137
|
+
metadata,
|
|
138
|
+
contentType,
|
|
139
|
+
contentEncoding,
|
|
140
|
+
contentLength,
|
|
141
|
+
ifMatch
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* GetObjectCommand handler
|
|
147
|
+
*/
|
|
148
|
+
async _handleGetObject(input) {
|
|
149
|
+
const key = input.Key;
|
|
150
|
+
return await this.storage.get(key);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* HeadObjectCommand handler
|
|
155
|
+
*/
|
|
156
|
+
async _handleHeadObject(input) {
|
|
157
|
+
const key = input.Key;
|
|
158
|
+
return await this.storage.head(key);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
/**
|
|
162
|
+
* CopyObjectCommand handler
|
|
163
|
+
*/
|
|
164
|
+
async _handleCopyObject(input) {
|
|
165
|
+
// Parse source: "bucket/key" format
|
|
166
|
+
const copySource = input.CopySource;
|
|
167
|
+
const parts = copySource.split('/');
|
|
168
|
+
const sourceKey = parts.slice(1).join('/'); // Remove bucket part
|
|
169
|
+
|
|
170
|
+
const destinationKey = input.Key;
|
|
171
|
+
const metadata = input.Metadata;
|
|
172
|
+
const metadataDirective = input.MetadataDirective;
|
|
173
|
+
const contentType = input.ContentType;
|
|
174
|
+
|
|
175
|
+
return await this.storage.copy(sourceKey, destinationKey, {
|
|
176
|
+
metadata,
|
|
177
|
+
metadataDirective,
|
|
178
|
+
contentType
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* DeleteObjectCommand handler
|
|
184
|
+
*/
|
|
185
|
+
async _handleDeleteObject(input) {
|
|
186
|
+
const key = input.Key;
|
|
187
|
+
return await this.storage.delete(key);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* DeleteObjectsCommand handler
|
|
192
|
+
*/
|
|
193
|
+
async _handleDeleteObjects(input) {
|
|
194
|
+
const objects = input.Delete?.Objects || [];
|
|
195
|
+
const keys = objects.map(obj => obj.Key);
|
|
196
|
+
return await this.storage.deleteMultiple(keys);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* ListObjectsV2Command handler
|
|
201
|
+
*/
|
|
202
|
+
async _handleListObjects(input) {
|
|
203
|
+
const fullPrefix = this.keyPrefix && input.Prefix
|
|
204
|
+
? path.join(this.keyPrefix, input.Prefix)
|
|
205
|
+
: (this.keyPrefix || input.Prefix || '');
|
|
206
|
+
|
|
207
|
+
return await this.storage.list({
|
|
208
|
+
prefix: fullPrefix,
|
|
209
|
+
delimiter: input.Delimiter,
|
|
210
|
+
maxKeys: input.MaxKeys,
|
|
211
|
+
continuationToken: input.ContinuationToken
|
|
212
|
+
});
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Put an object (Client interface method)
|
|
217
|
+
*/
|
|
218
|
+
async putObject({ key, metadata, contentType, body, contentEncoding, contentLength, ifMatch }) {
|
|
219
|
+
const fullKey = this.keyPrefix ? path.join(this.keyPrefix, key) : key;
|
|
220
|
+
|
|
221
|
+
// Encode metadata using s3db encoding
|
|
222
|
+
const stringMetadata = {};
|
|
223
|
+
if (metadata) {
|
|
224
|
+
for (const [k, v] of Object.entries(metadata)) {
|
|
225
|
+
const validKey = String(k).replace(/[^a-zA-Z0-9\-_]/g, '_');
|
|
226
|
+
const { encoded } = metadataEncode(v);
|
|
227
|
+
stringMetadata[validKey] = encoded;
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
const response = await this.storage.put(fullKey, {
|
|
232
|
+
body,
|
|
233
|
+
metadata: stringMetadata,
|
|
234
|
+
contentType,
|
|
235
|
+
contentEncoding,
|
|
236
|
+
contentLength,
|
|
237
|
+
ifMatch
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
this.emit('putObject', null, { key, metadata, contentType, body, contentEncoding, contentLength });
|
|
241
|
+
|
|
242
|
+
return response;
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/**
|
|
246
|
+
* Get an object (Client interface method)
|
|
247
|
+
*/
|
|
248
|
+
async getObject(key) {
|
|
249
|
+
const fullKey = this.keyPrefix ? path.join(this.keyPrefix, key) : key;
|
|
250
|
+
const response = await this.storage.get(fullKey);
|
|
251
|
+
|
|
252
|
+
// Decode metadata
|
|
253
|
+
const decodedMetadata = {};
|
|
254
|
+
if (response.Metadata) {
|
|
255
|
+
for (const [k, v] of Object.entries(response.Metadata)) {
|
|
256
|
+
decodedMetadata[k] = metadataDecode(v);
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
this.emit('getObject', null, { key });
|
|
261
|
+
|
|
262
|
+
return {
|
|
263
|
+
...response,
|
|
264
|
+
Metadata: decodedMetadata
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
/**
|
|
269
|
+
* Head object (get metadata only)
|
|
270
|
+
*/
|
|
271
|
+
async headObject(key) {
|
|
272
|
+
const fullKey = this.keyPrefix ? path.join(this.keyPrefix, key) : key;
|
|
273
|
+
const response = await this.storage.head(fullKey);
|
|
274
|
+
|
|
275
|
+
// Decode metadata
|
|
276
|
+
const decodedMetadata = {};
|
|
277
|
+
if (response.Metadata) {
|
|
278
|
+
for (const [k, v] of Object.entries(response.Metadata)) {
|
|
279
|
+
decodedMetadata[k] = metadataDecode(v);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
this.emit('headObject', null, { key });
|
|
284
|
+
|
|
285
|
+
return {
|
|
286
|
+
...response,
|
|
287
|
+
Metadata: decodedMetadata
|
|
288
|
+
};
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
/**
|
|
292
|
+
* Copy an object
|
|
293
|
+
*/
|
|
294
|
+
async copyObject({ from, to, metadata, metadataDirective, contentType }) {
|
|
295
|
+
const fullFrom = this.keyPrefix ? path.join(this.keyPrefix, from) : from;
|
|
296
|
+
const fullTo = this.keyPrefix ? path.join(this.keyPrefix, to) : to;
|
|
297
|
+
|
|
298
|
+
// Encode new metadata if provided
|
|
299
|
+
const encodedMetadata = {};
|
|
300
|
+
if (metadata) {
|
|
301
|
+
for (const [k, v] of Object.entries(metadata)) {
|
|
302
|
+
const validKey = String(k).replace(/[^a-zA-Z0-9\-_]/g, '_');
|
|
303
|
+
const { encoded } = metadataEncode(v);
|
|
304
|
+
encodedMetadata[validKey] = encoded;
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
const response = await this.storage.copy(fullFrom, fullTo, {
|
|
309
|
+
metadata: encodedMetadata,
|
|
310
|
+
metadataDirective,
|
|
311
|
+
contentType
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
this.emit('copyObject', null, { from, to, metadata, metadataDirective });
|
|
315
|
+
|
|
316
|
+
return response;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
/**
|
|
320
|
+
* Check if object exists
|
|
321
|
+
*/
|
|
322
|
+
async exists(key) {
|
|
323
|
+
const fullKey = this.keyPrefix ? path.join(this.keyPrefix, key) : key;
|
|
324
|
+
return this.storage.exists(fullKey);
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
/**
|
|
328
|
+
* Delete an object
|
|
329
|
+
*/
|
|
330
|
+
async deleteObject(key) {
|
|
331
|
+
const fullKey = this.keyPrefix ? path.join(this.keyPrefix, key) : key;
|
|
332
|
+
const response = await this.storage.delete(fullKey);
|
|
333
|
+
|
|
334
|
+
this.emit('deleteObject', null, { key });
|
|
335
|
+
|
|
336
|
+
return response;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
/**
|
|
340
|
+
* Delete multiple objects (batch)
|
|
341
|
+
*/
|
|
342
|
+
async deleteObjects(keys) {
|
|
343
|
+
// Add keyPrefix to all keys
|
|
344
|
+
const fullKeys = keys.map(key =>
|
|
345
|
+
this.keyPrefix ? path.join(this.keyPrefix, key) : key
|
|
346
|
+
);
|
|
347
|
+
|
|
348
|
+
// Split into batches for parallel processing
|
|
349
|
+
const batches = chunk(fullKeys, this.parallelism);
|
|
350
|
+
const allResults = { Deleted: [], Errors: [] };
|
|
351
|
+
|
|
352
|
+
const { results } = await PromisePool
|
|
353
|
+
.withConcurrency(this.parallelism)
|
|
354
|
+
.for(batches)
|
|
355
|
+
.process(async (batch) => {
|
|
356
|
+
return await this.storage.deleteMultiple(batch);
|
|
357
|
+
});
|
|
358
|
+
|
|
359
|
+
// Merge results
|
|
360
|
+
for (const result of results) {
|
|
361
|
+
allResults.Deleted.push(...result.Deleted);
|
|
362
|
+
allResults.Errors.push(...result.Errors);
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
this.emit('deleteObjects', null, { keys, count: allResults.Deleted.length });
|
|
366
|
+
|
|
367
|
+
return allResults;
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
/**
|
|
371
|
+
* List objects with pagination support
|
|
372
|
+
*/
|
|
373
|
+
async listObjects({ prefix = '', delimiter = null, maxKeys = 1000, continuationToken = null }) {
|
|
374
|
+
const fullPrefix = this.keyPrefix ? path.join(this.keyPrefix, prefix) : prefix;
|
|
375
|
+
|
|
376
|
+
const response = await this.storage.list({
|
|
377
|
+
prefix: fullPrefix,
|
|
378
|
+
delimiter,
|
|
379
|
+
maxKeys,
|
|
380
|
+
continuationToken
|
|
381
|
+
});
|
|
382
|
+
|
|
383
|
+
this.emit('listObjects', null, { prefix, count: response.Contents.length });
|
|
384
|
+
|
|
385
|
+
return response;
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
/**
|
|
389
|
+
* Get a page of keys with offset/limit pagination
|
|
390
|
+
*/
|
|
391
|
+
async getKeysPage(params = {}) {
|
|
392
|
+
const { prefix = '', offset = 0, amount = 100 } = params;
|
|
393
|
+
let keys = [];
|
|
394
|
+
let truncated = true;
|
|
395
|
+
let continuationToken;
|
|
396
|
+
|
|
397
|
+
// If offset > 0, need to skip ahead
|
|
398
|
+
if (offset > 0) {
|
|
399
|
+
// For simplicity, fetch all up to offset + amount and slice
|
|
400
|
+
const fullPrefix = this.keyPrefix ? path.join(this.keyPrefix, prefix) : prefix;
|
|
401
|
+
const response = await this.storage.list({
|
|
402
|
+
prefix: fullPrefix,
|
|
403
|
+
maxKeys: offset + amount
|
|
404
|
+
});
|
|
405
|
+
keys = response.Contents.map(x => x.Key).slice(offset, offset + amount);
|
|
406
|
+
} else {
|
|
407
|
+
// Regular fetch with amount as maxKeys
|
|
408
|
+
while (truncated) {
|
|
409
|
+
const options = {
|
|
410
|
+
prefix,
|
|
411
|
+
continuationToken,
|
|
412
|
+
maxKeys: amount - keys.length
|
|
413
|
+
};
|
|
414
|
+
const res = await this.listObjects(options);
|
|
415
|
+
if (res.Contents) {
|
|
416
|
+
keys = keys.concat(res.Contents.map(x => x.Key));
|
|
417
|
+
}
|
|
418
|
+
truncated = res.IsTruncated || false;
|
|
419
|
+
continuationToken = res.NextContinuationToken;
|
|
420
|
+
if (keys.length >= amount) {
|
|
421
|
+
keys = keys.slice(0, amount);
|
|
422
|
+
break;
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
// Strip keyPrefix from results
|
|
428
|
+
if (this.keyPrefix) {
|
|
429
|
+
keys = keys
|
|
430
|
+
.map(x => x.replace(this.keyPrefix, ''))
|
|
431
|
+
.map(x => (x.startsWith('/') ? x.replace('/', '') : x));
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
this.emit('getKeysPage', keys, params);
|
|
435
|
+
return keys;
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
/**
|
|
439
|
+
* Get all keys with a given prefix
|
|
440
|
+
*/
|
|
441
|
+
async getAllKeys({ prefix = '' }) {
|
|
442
|
+
const fullPrefix = this.keyPrefix ? path.join(this.keyPrefix, prefix) : prefix;
|
|
443
|
+
const response = await this.storage.list({
|
|
444
|
+
prefix: fullPrefix,
|
|
445
|
+
maxKeys: 100000 // Large number to get all
|
|
446
|
+
});
|
|
447
|
+
|
|
448
|
+
let keys = response.Contents.map(x => x.Key);
|
|
449
|
+
|
|
450
|
+
// Strip keyPrefix from results
|
|
451
|
+
if (this.keyPrefix) {
|
|
452
|
+
keys = keys
|
|
453
|
+
.map(x => x.replace(this.keyPrefix, ''))
|
|
454
|
+
.map(x => (x.startsWith('/') ? x.replace('/', '') : x));
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
this.emit('getAllKeys', keys, { prefix });
|
|
458
|
+
return keys;
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
/**
|
|
462
|
+
* Count total objects under a prefix
|
|
463
|
+
*/
|
|
464
|
+
async count({ prefix = '' } = {}) {
|
|
465
|
+
const keys = await this.getAllKeys({ prefix });
|
|
466
|
+
const count = keys.length;
|
|
467
|
+
this.emit('count', count, { prefix });
|
|
468
|
+
return count;
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
/**
|
|
472
|
+
* Delete all objects under a prefix
|
|
473
|
+
*/
|
|
474
|
+
async deleteAll({ prefix = '' } = {}) {
|
|
475
|
+
const keys = await this.getAllKeys({ prefix });
|
|
476
|
+
let totalDeleted = 0;
|
|
477
|
+
|
|
478
|
+
if (keys.length > 0) {
|
|
479
|
+
const result = await this.deleteObjects(keys);
|
|
480
|
+
totalDeleted = result.Deleted.length;
|
|
481
|
+
|
|
482
|
+
this.emit('deleteAll', {
|
|
483
|
+
prefix,
|
|
484
|
+
batch: totalDeleted,
|
|
485
|
+
total: totalDeleted
|
|
486
|
+
});
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
this.emit('deleteAllComplete', {
|
|
490
|
+
prefix,
|
|
491
|
+
totalDeleted
|
|
492
|
+
});
|
|
493
|
+
|
|
494
|
+
return totalDeleted;
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
/**
|
|
498
|
+
* Get continuation token after skipping offset items
|
|
499
|
+
*/
|
|
500
|
+
async getContinuationTokenAfterOffset({ prefix = '', offset = 1000 } = {}) {
|
|
501
|
+
if (offset === 0) return null;
|
|
502
|
+
|
|
503
|
+
const keys = await this.getAllKeys({ prefix });
|
|
504
|
+
|
|
505
|
+
// If offset is beyond available keys, return null
|
|
506
|
+
if (offset >= keys.length) {
|
|
507
|
+
this.emit('getContinuationTokenAfterOffset', null, { prefix, offset });
|
|
508
|
+
return null;
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
// Return the key at offset position as continuation token
|
|
512
|
+
const token = keys[offset];
|
|
513
|
+
this.emit('getContinuationTokenAfterOffset', token, { prefix, offset });
|
|
514
|
+
return token;
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
/**
|
|
518
|
+
* Move an object from one key to another
|
|
519
|
+
*/
|
|
520
|
+
async moveObject({ from, to }) {
|
|
521
|
+
await this.copyObject({ from, to, metadataDirective: 'COPY' });
|
|
522
|
+
await this.deleteObject(from);
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
/**
|
|
526
|
+
* Move all objects from one prefix to another
|
|
527
|
+
*/
|
|
528
|
+
async moveAllObjects({ prefixFrom, prefixTo }) {
|
|
529
|
+
const keys = await this.getAllKeys({ prefix: prefixFrom });
|
|
530
|
+
const results = [];
|
|
531
|
+
const errors = [];
|
|
532
|
+
|
|
533
|
+
for (const key of keys) {
|
|
534
|
+
try {
|
|
535
|
+
const to = key.replace(prefixFrom, prefixTo);
|
|
536
|
+
await this.moveObject({ from: key, to });
|
|
537
|
+
results.push(to);
|
|
538
|
+
} catch (error) {
|
|
539
|
+
errors.push({
|
|
540
|
+
message: error.message,
|
|
541
|
+
raw: error,
|
|
542
|
+
key
|
|
543
|
+
});
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
this.emit('moveAllObjects', { results, errors }, { prefixFrom, prefixTo });
|
|
548
|
+
|
|
549
|
+
if (errors.length > 0) {
|
|
550
|
+
const error = new Error('Some objects could not be moved');
|
|
551
|
+
error.context = {
|
|
552
|
+
bucket: this.bucket,
|
|
553
|
+
operation: 'moveAllObjects',
|
|
554
|
+
prefixFrom,
|
|
555
|
+
prefixTo,
|
|
556
|
+
totalKeys: keys.length,
|
|
557
|
+
failedCount: errors.length,
|
|
558
|
+
successCount: results.length,
|
|
559
|
+
errors
|
|
560
|
+
};
|
|
561
|
+
throw error;
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
return results;
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
/**
|
|
568
|
+
* Create a snapshot of current storage state
|
|
569
|
+
*/
|
|
570
|
+
snapshot() {
|
|
571
|
+
return this.storage.snapshot();
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
/**
|
|
575
|
+
* Restore from a snapshot
|
|
576
|
+
*/
|
|
577
|
+
restore(snapshot) {
|
|
578
|
+
return this.storage.restore(snapshot);
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
/**
|
|
582
|
+
* Save current state to disk (persistence)
|
|
583
|
+
*/
|
|
584
|
+
async saveToDisk(path) {
|
|
585
|
+
return await this.storage.saveToDisk(path);
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
/**
|
|
589
|
+
* Load state from disk
|
|
590
|
+
*/
|
|
591
|
+
async loadFromDisk(path) {
|
|
592
|
+
return await this.storage.loadFromDisk(path);
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
/**
|
|
596
|
+
* Export to BackupPlugin-compatible format (s3db.json + JSONL files)
|
|
597
|
+
* Compatible with BackupPlugin for easy migration
|
|
598
|
+
*
|
|
599
|
+
* @param {string} outputDir - Output directory path
|
|
600
|
+
* @param {Object} options - Export options
|
|
601
|
+
* @param {Array<string>} options.resources - Resource names to export (default: all)
|
|
602
|
+
* @param {boolean} options.compress - Use gzip compression (default: true)
|
|
603
|
+
* @param {Object} options.database - Database instance for schema metadata
|
|
604
|
+
* @returns {Promise<Object>} Export manifest with file paths and stats
|
|
605
|
+
*/
|
|
606
|
+
async exportBackup(outputDir, options = {}) {
|
|
607
|
+
const { mkdir, writeFile } = await import('fs/promises');
|
|
608
|
+
const zlib = await import('zlib');
|
|
609
|
+
const { promisify } = await import('util');
|
|
610
|
+
const gzip = promisify(zlib.gzip);
|
|
611
|
+
|
|
612
|
+
await mkdir(outputDir, { recursive: true });
|
|
613
|
+
|
|
614
|
+
const compress = options.compress !== false;
|
|
615
|
+
const database = options.database;
|
|
616
|
+
const resourceFilter = options.resources;
|
|
617
|
+
|
|
618
|
+
// Get all keys grouped by resource
|
|
619
|
+
const allKeys = await this.getAllKeys({});
|
|
620
|
+
const resourceMap = new Map();
|
|
621
|
+
|
|
622
|
+
// Group keys by resource name
|
|
623
|
+
for (const key of allKeys) {
|
|
624
|
+
const match = key.match(/^resource=([^/]+)\//);
|
|
625
|
+
if (match) {
|
|
626
|
+
const resourceName = match[1];
|
|
627
|
+
if (!resourceFilter || resourceFilter.includes(resourceName)) {
|
|
628
|
+
if (!resourceMap.has(resourceName)) {
|
|
629
|
+
resourceMap.set(resourceName, []);
|
|
630
|
+
}
|
|
631
|
+
resourceMap.get(resourceName).push(key);
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
const exportedFiles = {};
|
|
637
|
+
const resourceStats = {};
|
|
638
|
+
|
|
639
|
+
// Export each resource to JSONL format
|
|
640
|
+
for (const [resourceName, keys] of resourceMap.entries()) {
|
|
641
|
+
const records = [];
|
|
642
|
+
|
|
643
|
+
for (const key of keys) {
|
|
644
|
+
const obj = await this.getObject(key);
|
|
645
|
+
|
|
646
|
+
// Extract id from key (e.g., resource=products/id=pr1 -> pr1)
|
|
647
|
+
const idMatch = key.match(/\/id=([^/]+)/);
|
|
648
|
+
const recordId = idMatch ? idMatch[1] : null;
|
|
649
|
+
|
|
650
|
+
// Reconstruct record from metadata and body
|
|
651
|
+
const record = { ...obj.Metadata };
|
|
652
|
+
|
|
653
|
+
// Include id in record if extracted from key
|
|
654
|
+
if (recordId && !record.id) {
|
|
655
|
+
record.id = recordId;
|
|
656
|
+
}
|
|
657
|
+
|
|
658
|
+
// If body exists, parse it
|
|
659
|
+
if (obj.Body) {
|
|
660
|
+
const chunks = [];
|
|
661
|
+
for await (const chunk of obj.Body) {
|
|
662
|
+
chunks.push(chunk);
|
|
663
|
+
}
|
|
664
|
+
const bodyBuffer = Buffer.concat(chunks);
|
|
665
|
+
|
|
666
|
+
// Try to parse as JSON if it looks like JSON
|
|
667
|
+
const bodyStr = bodyBuffer.toString('utf-8');
|
|
668
|
+
if (bodyStr.startsWith('{') || bodyStr.startsWith('[')) {
|
|
669
|
+
try {
|
|
670
|
+
const bodyData = JSON.parse(bodyStr);
|
|
671
|
+
Object.assign(record, bodyData);
|
|
672
|
+
} catch {
|
|
673
|
+
// If not JSON, store as _body field
|
|
674
|
+
record._body = bodyStr;
|
|
675
|
+
}
|
|
676
|
+
} else if (bodyStr) {
|
|
677
|
+
record._body = bodyStr;
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
records.push(record);
|
|
682
|
+
}
|
|
683
|
+
|
|
684
|
+
// Convert to JSONL (newline-delimited JSON)
|
|
685
|
+
const jsonl = records.map(r => JSON.stringify(r)).join('\n');
|
|
686
|
+
const filename = compress ? `${resourceName}.jsonl.gz` : `${resourceName}.jsonl`;
|
|
687
|
+
const filePath = `${outputDir}/${filename}`;
|
|
688
|
+
|
|
689
|
+
// Write file (compressed or not)
|
|
690
|
+
if (compress) {
|
|
691
|
+
const compressed = await gzip(jsonl);
|
|
692
|
+
await writeFile(filePath, compressed);
|
|
693
|
+
} else {
|
|
694
|
+
await writeFile(filePath, jsonl, 'utf-8');
|
|
695
|
+
}
|
|
696
|
+
|
|
697
|
+
exportedFiles[resourceName] = filePath;
|
|
698
|
+
resourceStats[resourceName] = {
|
|
699
|
+
recordCount: records.length,
|
|
700
|
+
fileSize: compress ? (await gzip(jsonl)).length : Buffer.byteLength(jsonl)
|
|
701
|
+
};
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
// Create s3db.json metadata file
|
|
705
|
+
const s3dbMetadata = {
|
|
706
|
+
version: '1.0',
|
|
707
|
+
timestamp: new Date().toISOString(),
|
|
708
|
+
bucket: this.bucket,
|
|
709
|
+
keyPrefix: this.keyPrefix || '',
|
|
710
|
+
compressed: compress,
|
|
711
|
+
resources: {},
|
|
712
|
+
totalRecords: 0,
|
|
713
|
+
totalSize: 0
|
|
714
|
+
};
|
|
715
|
+
|
|
716
|
+
// Add database schemas if available
|
|
717
|
+
if (database && database.resources) {
|
|
718
|
+
for (const [resourceName, resource] of Object.entries(database.resources)) {
|
|
719
|
+
if (resourceMap.has(resourceName)) {
|
|
720
|
+
s3dbMetadata.resources[resourceName] = {
|
|
721
|
+
schema: resource.schema ? {
|
|
722
|
+
attributes: resource.schema.attributes,
|
|
723
|
+
partitions: resource.schema.partitions,
|
|
724
|
+
behavior: resource.schema.behavior,
|
|
725
|
+
timestamps: resource.schema.timestamps
|
|
726
|
+
} : null,
|
|
727
|
+
stats: resourceStats[resourceName]
|
|
728
|
+
};
|
|
729
|
+
}
|
|
730
|
+
}
|
|
731
|
+
} else {
|
|
732
|
+
// No database instance, just add stats
|
|
733
|
+
for (const [resourceName, stats] of Object.entries(resourceStats)) {
|
|
734
|
+
s3dbMetadata.resources[resourceName] = { stats };
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
|
|
738
|
+
// Calculate totals
|
|
739
|
+
for (const stats of Object.values(resourceStats)) {
|
|
740
|
+
s3dbMetadata.totalRecords += stats.recordCount;
|
|
741
|
+
s3dbMetadata.totalSize += stats.fileSize;
|
|
742
|
+
}
|
|
743
|
+
|
|
744
|
+
// Write s3db.json
|
|
745
|
+
const s3dbPath = `${outputDir}/s3db.json`;
|
|
746
|
+
await writeFile(s3dbPath, JSON.stringify(s3dbMetadata, null, 2), 'utf-8');
|
|
747
|
+
|
|
748
|
+
return {
|
|
749
|
+
manifest: s3dbPath,
|
|
750
|
+
files: exportedFiles,
|
|
751
|
+
stats: s3dbMetadata,
|
|
752
|
+
resourceCount: resourceMap.size,
|
|
753
|
+
totalRecords: s3dbMetadata.totalRecords,
|
|
754
|
+
totalSize: s3dbMetadata.totalSize
|
|
755
|
+
};
|
|
756
|
+
}
|
|
757
|
+
|
|
758
|
+
/**
|
|
759
|
+
* Import from BackupPlugin-compatible format
|
|
760
|
+
* Loads data from s3db.json + JSONL files created by BackupPlugin or exportBackup()
|
|
761
|
+
*
|
|
762
|
+
* @param {string} backupDir - Backup directory path containing s3db.json
|
|
763
|
+
* @param {Object} options - Import options
|
|
764
|
+
* @param {Array<string>} options.resources - Resource names to import (default: all)
|
|
765
|
+
* @param {boolean} options.clear - Clear existing data first (default: false)
|
|
766
|
+
* @param {Object} options.database - Database instance to recreate schemas
|
|
767
|
+
* @returns {Promise<Object>} Import stats
|
|
768
|
+
*/
|
|
769
|
+
async importBackup(backupDir, options = {}) {
|
|
770
|
+
const { readFile, readdir } = await import('fs/promises');
|
|
771
|
+
const zlib = await import('zlib');
|
|
772
|
+
const { promisify } = await import('util');
|
|
773
|
+
const gunzip = promisify(zlib.gunzip);
|
|
774
|
+
|
|
775
|
+
// Clear existing data if requested
|
|
776
|
+
if (options.clear) {
|
|
777
|
+
this.clear();
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
// Read s3db.json metadata
|
|
781
|
+
const s3dbPath = `${backupDir}/s3db.json`;
|
|
782
|
+
const s3dbContent = await readFile(s3dbPath, 'utf-8');
|
|
783
|
+
const metadata = JSON.parse(s3dbContent);
|
|
784
|
+
|
|
785
|
+
const database = options.database;
|
|
786
|
+
const resourceFilter = options.resources;
|
|
787
|
+
const importStats = {
|
|
788
|
+
resourcesImported: 0,
|
|
789
|
+
recordsImported: 0,
|
|
790
|
+
errors: []
|
|
791
|
+
};
|
|
792
|
+
|
|
793
|
+
// Recreate resources if database instance provided
|
|
794
|
+
if (database && metadata.resources) {
|
|
795
|
+
for (const [resourceName, resourceMeta] of Object.entries(metadata.resources)) {
|
|
796
|
+
if (resourceFilter && !resourceFilter.includes(resourceName)) continue;
|
|
797
|
+
|
|
798
|
+
if (resourceMeta.schema) {
|
|
799
|
+
try {
|
|
800
|
+
await database.createResource({
|
|
801
|
+
name: resourceName,
|
|
802
|
+
...resourceMeta.schema
|
|
803
|
+
});
|
|
804
|
+
} catch (error) {
|
|
805
|
+
// Resource might already exist, that's ok
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
|
|
811
|
+
// Read all files in backup directory
|
|
812
|
+
const files = await readdir(backupDir);
|
|
813
|
+
|
|
814
|
+
// Process each JSONL file
|
|
815
|
+
for (const file of files) {
|
|
816
|
+
if (!file.endsWith('.jsonl') && !file.endsWith('.jsonl.gz')) continue;
|
|
817
|
+
|
|
818
|
+
const resourceName = file.replace(/\.jsonl(\.gz)?$/, '');
|
|
819
|
+
if (resourceFilter && !resourceFilter.includes(resourceName)) continue;
|
|
820
|
+
|
|
821
|
+
const filePath = `${backupDir}/${file}`;
|
|
822
|
+
let content = await readFile(filePath);
|
|
823
|
+
|
|
824
|
+
// Decompress if .gz
|
|
825
|
+
if (file.endsWith('.gz')) {
|
|
826
|
+
content = await gunzip(content);
|
|
827
|
+
}
|
|
828
|
+
|
|
829
|
+
// Parse JSONL (one JSON per line)
|
|
830
|
+
const jsonl = content.toString('utf-8');
|
|
831
|
+
const lines = jsonl.split('\n').filter(line => line.trim());
|
|
832
|
+
|
|
833
|
+
for (const line of lines) {
|
|
834
|
+
try {
|
|
835
|
+
const record = JSON.parse(line);
|
|
836
|
+
|
|
837
|
+
// Extract id or use generated one
|
|
838
|
+
const id = record.id || record._id || `imported_${Date.now()}_${Math.random()}`;
|
|
839
|
+
|
|
840
|
+
// Separate _body from other fields
|
|
841
|
+
const { _body, id: _, _id: __, ...metadata } = record;
|
|
842
|
+
|
|
843
|
+
// Store in MemoryClient
|
|
844
|
+
// If _body exists, it's non-JSON body data
|
|
845
|
+
// Otherwise, metadata contains all the data
|
|
846
|
+
await this.putObject({
|
|
847
|
+
key: `resource=${resourceName}/id=${id}`,
|
|
848
|
+
metadata,
|
|
849
|
+
body: _body ? Buffer.from(_body) : undefined
|
|
850
|
+
});
|
|
851
|
+
|
|
852
|
+
importStats.recordsImported++;
|
|
853
|
+
} catch (error) {
|
|
854
|
+
importStats.errors.push({
|
|
855
|
+
resource: resourceName,
|
|
856
|
+
error: error.message,
|
|
857
|
+
line
|
|
858
|
+
});
|
|
859
|
+
}
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
importStats.resourcesImported++;
|
|
863
|
+
}
|
|
864
|
+
|
|
865
|
+
return importStats;
|
|
866
|
+
}
|
|
867
|
+
|
|
868
|
+
/**
|
|
869
|
+
* Get storage statistics
|
|
870
|
+
*/
|
|
871
|
+
getStats() {
|
|
872
|
+
return this.storage.getStats();
|
|
873
|
+
}
|
|
874
|
+
|
|
875
|
+
/**
|
|
876
|
+
* Clear all objects
|
|
877
|
+
*/
|
|
878
|
+
clear() {
|
|
879
|
+
this.storage.clear();
|
|
880
|
+
}
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
export default MemoryClient;
|