s3db.js 11.3.2 → 12.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +102 -8
- package/dist/s3db.cjs.js +36945 -15510
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.d.ts +66 -1
- package/dist/s3db.es.js +36914 -15534
- package/dist/s3db.es.js.map +1 -1
- package/mcp/entrypoint.js +58 -0
- package/mcp/tools/documentation.js +434 -0
- package/mcp/tools/index.js +4 -0
- package/package.json +35 -15
- package/src/behaviors/user-managed.js +13 -6
- package/src/client.class.js +79 -49
- package/src/concerns/base62.js +85 -0
- package/src/concerns/dictionary-encoding.js +294 -0
- package/src/concerns/geo-encoding.js +256 -0
- package/src/concerns/high-performance-inserter.js +34 -30
- package/src/concerns/ip.js +325 -0
- package/src/concerns/metadata-encoding.js +345 -66
- package/src/concerns/money.js +193 -0
- package/src/concerns/partition-queue.js +7 -4
- package/src/concerns/plugin-storage.js +97 -47
- package/src/database.class.js +76 -74
- package/src/errors.js +0 -4
- package/src/plugins/api/auth/api-key-auth.js +88 -0
- package/src/plugins/api/auth/basic-auth.js +154 -0
- package/src/plugins/api/auth/index.js +112 -0
- package/src/plugins/api/auth/jwt-auth.js +169 -0
- package/src/plugins/api/index.js +544 -0
- package/src/plugins/api/middlewares/index.js +15 -0
- package/src/plugins/api/middlewares/validator.js +185 -0
- package/src/plugins/api/routes/auth-routes.js +241 -0
- package/src/plugins/api/routes/resource-routes.js +304 -0
- package/src/plugins/api/server.js +354 -0
- package/src/plugins/api/utils/error-handler.js +147 -0
- package/src/plugins/api/utils/openapi-generator.js +1240 -0
- package/src/plugins/api/utils/response-formatter.js +218 -0
- package/src/plugins/backup/streaming-exporter.js +132 -0
- package/src/plugins/backup.plugin.js +103 -50
- package/src/plugins/cache/s3-cache.class.js +95 -47
- package/src/plugins/cache.plugin.js +107 -9
- package/src/plugins/concerns/plugin-dependencies.js +313 -0
- package/src/plugins/concerns/prometheus-formatter.js +255 -0
- package/src/plugins/consumers/rabbitmq-consumer.js +4 -0
- package/src/plugins/consumers/sqs-consumer.js +4 -0
- package/src/plugins/costs.plugin.js +255 -39
- package/src/plugins/eventual-consistency/helpers.js +15 -1
- package/src/plugins/geo.plugin.js +873 -0
- package/src/plugins/importer/index.js +1020 -0
- package/src/plugins/index.js +11 -0
- package/src/plugins/metrics.plugin.js +163 -4
- package/src/plugins/queue-consumer.plugin.js +6 -27
- package/src/plugins/relation.errors.js +139 -0
- package/src/plugins/relation.plugin.js +1242 -0
- package/src/plugins/replicator.plugin.js +2 -1
- package/src/plugins/replicators/bigquery-replicator.class.js +180 -8
- package/src/plugins/replicators/dynamodb-replicator.class.js +383 -0
- package/src/plugins/replicators/index.js +28 -3
- package/src/plugins/replicators/mongodb-replicator.class.js +391 -0
- package/src/plugins/replicators/mysql-replicator.class.js +558 -0
- package/src/plugins/replicators/planetscale-replicator.class.js +409 -0
- package/src/plugins/replicators/postgres-replicator.class.js +182 -7
- package/src/plugins/replicators/s3db-replicator.class.js +1 -12
- package/src/plugins/replicators/schema-sync.helper.js +601 -0
- package/src/plugins/replicators/sqs-replicator.class.js +11 -9
- package/src/plugins/replicators/turso-replicator.class.js +416 -0
- package/src/plugins/replicators/webhook-replicator.class.js +612 -0
- package/src/plugins/state-machine.plugin.js +122 -68
- package/src/plugins/tfstate/README.md +745 -0
- package/src/plugins/tfstate/base-driver.js +80 -0
- package/src/plugins/tfstate/errors.js +112 -0
- package/src/plugins/tfstate/filesystem-driver.js +129 -0
- package/src/plugins/tfstate/index.js +2660 -0
- package/src/plugins/tfstate/s3-driver.js +192 -0
- package/src/plugins/ttl.plugin.js +536 -0
- package/src/resource.class.js +315 -36
- package/src/s3db.d.ts +66 -1
- package/src/schema.class.js +366 -32
- package/SECURITY.md +0 -76
- package/src/partition-drivers/base-partition-driver.js +0 -106
- package/src/partition-drivers/index.js +0 -66
- package/src/partition-drivers/memory-partition-driver.js +0 -289
- package/src/partition-drivers/sqs-partition-driver.js +0 -337
- package/src/partition-drivers/sync-partition-driver.js +0 -38
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Response Formatter - Standard JSON API responses
|
|
3
|
+
*
|
|
4
|
+
* Provides consistent response formatting across all API endpoints
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Format successful response
|
|
9
|
+
* @param {Object} data - Response data
|
|
10
|
+
* @param {Object} options - Response options
|
|
11
|
+
* @param {number} options.status - HTTP status code (default: 200)
|
|
12
|
+
* @param {Object} options.meta - Additional metadata
|
|
13
|
+
* @returns {Object} Formatted response
|
|
14
|
+
*/
|
|
15
|
+
export function success(data, options = {}) {
|
|
16
|
+
const { status = 200, meta = {} } = options;
|
|
17
|
+
|
|
18
|
+
return {
|
|
19
|
+
success: true,
|
|
20
|
+
data,
|
|
21
|
+
meta: {
|
|
22
|
+
timestamp: new Date().toISOString(),
|
|
23
|
+
...meta
|
|
24
|
+
},
|
|
25
|
+
_status: status
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Format error response
|
|
31
|
+
* @param {string|Error} error - Error message or Error object
|
|
32
|
+
* @param {Object} options - Error options
|
|
33
|
+
* @param {number} options.status - HTTP status code (default: 500)
|
|
34
|
+
* @param {string} options.code - Error code
|
|
35
|
+
* @param {Object} options.details - Additional error details
|
|
36
|
+
* @returns {Object} Formatted error response
|
|
37
|
+
*/
|
|
38
|
+
export function error(error, options = {}) {
|
|
39
|
+
const { status = 500, code = 'INTERNAL_ERROR', details = {} } = options;
|
|
40
|
+
|
|
41
|
+
const errorMessage = error instanceof Error ? error.message : error;
|
|
42
|
+
const errorStack = error instanceof Error && process.env.NODE_ENV !== 'production'
|
|
43
|
+
? error.stack
|
|
44
|
+
: undefined;
|
|
45
|
+
|
|
46
|
+
return {
|
|
47
|
+
success: false,
|
|
48
|
+
error: {
|
|
49
|
+
message: errorMessage,
|
|
50
|
+
code,
|
|
51
|
+
details,
|
|
52
|
+
stack: errorStack
|
|
53
|
+
},
|
|
54
|
+
meta: {
|
|
55
|
+
timestamp: new Date().toISOString()
|
|
56
|
+
},
|
|
57
|
+
_status: status
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Format list response with pagination
|
|
63
|
+
* @param {Array} items - List items
|
|
64
|
+
* @param {Object} pagination - Pagination info
|
|
65
|
+
* @param {number} pagination.total - Total count
|
|
66
|
+
* @param {number} pagination.page - Current page
|
|
67
|
+
* @param {number} pagination.pageSize - Items per page
|
|
68
|
+
* @param {number} pagination.pageCount - Total pages
|
|
69
|
+
* @returns {Object} Formatted list response
|
|
70
|
+
*/
|
|
71
|
+
export function list(items, pagination = {}) {
|
|
72
|
+
const { total, page, pageSize, pageCount } = pagination;
|
|
73
|
+
|
|
74
|
+
return {
|
|
75
|
+
success: true,
|
|
76
|
+
data: items,
|
|
77
|
+
pagination: {
|
|
78
|
+
total: total || items.length,
|
|
79
|
+
page: page || 1,
|
|
80
|
+
pageSize: pageSize || items.length,
|
|
81
|
+
pageCount: pageCount || 1
|
|
82
|
+
},
|
|
83
|
+
meta: {
|
|
84
|
+
timestamp: new Date().toISOString()
|
|
85
|
+
},
|
|
86
|
+
_status: 200
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Format created response
|
|
92
|
+
* @param {Object} data - Created resource data
|
|
93
|
+
* @param {string} location - Resource location URL
|
|
94
|
+
* @returns {Object} Formatted created response
|
|
95
|
+
*/
|
|
96
|
+
export function created(data, location) {
|
|
97
|
+
return {
|
|
98
|
+
success: true,
|
|
99
|
+
data,
|
|
100
|
+
meta: {
|
|
101
|
+
timestamp: new Date().toISOString(),
|
|
102
|
+
location
|
|
103
|
+
},
|
|
104
|
+
_status: 201
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Format no content response
|
|
110
|
+
* @returns {Object} Formatted no content response
|
|
111
|
+
*/
|
|
112
|
+
export function noContent() {
|
|
113
|
+
return {
|
|
114
|
+
success: true,
|
|
115
|
+
data: null,
|
|
116
|
+
meta: {
|
|
117
|
+
timestamp: new Date().toISOString()
|
|
118
|
+
},
|
|
119
|
+
_status: 204
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
/**
|
|
124
|
+
* Format validation error response
|
|
125
|
+
* @param {Array} errors - Validation errors
|
|
126
|
+
* @returns {Object} Formatted validation error response
|
|
127
|
+
*/
|
|
128
|
+
export function validationError(errors) {
|
|
129
|
+
return error('Validation failed', {
|
|
130
|
+
status: 400,
|
|
131
|
+
code: 'VALIDATION_ERROR',
|
|
132
|
+
details: { errors }
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* Format not found response
|
|
138
|
+
* @param {string} resource - Resource name
|
|
139
|
+
* @param {string} id - Resource ID
|
|
140
|
+
* @returns {Object} Formatted not found response
|
|
141
|
+
*/
|
|
142
|
+
export function notFound(resource, id) {
|
|
143
|
+
return error(`${resource} with id '${id}' not found`, {
|
|
144
|
+
status: 404,
|
|
145
|
+
code: 'NOT_FOUND',
|
|
146
|
+
details: { resource, id }
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Format unauthorized response
|
|
152
|
+
* @param {string} message - Unauthorized message
|
|
153
|
+
* @returns {Object} Formatted unauthorized response
|
|
154
|
+
*/
|
|
155
|
+
export function unauthorized(message = 'Unauthorized') {
|
|
156
|
+
return error(message, {
|
|
157
|
+
status: 401,
|
|
158
|
+
code: 'UNAUTHORIZED'
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Format forbidden response
|
|
164
|
+
* @param {string} message - Forbidden message
|
|
165
|
+
* @returns {Object} Formatted forbidden response
|
|
166
|
+
*/
|
|
167
|
+
export function forbidden(message = 'Forbidden') {
|
|
168
|
+
return error(message, {
|
|
169
|
+
status: 403,
|
|
170
|
+
code: 'FORBIDDEN'
|
|
171
|
+
});
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Format rate limit exceeded response
|
|
176
|
+
* @param {number} retryAfter - Retry after seconds
|
|
177
|
+
* @returns {Object} Formatted rate limit response
|
|
178
|
+
*/
|
|
179
|
+
export function rateLimitExceeded(retryAfter) {
|
|
180
|
+
return error('Rate limit exceeded', {
|
|
181
|
+
status: 429,
|
|
182
|
+
code: 'RATE_LIMIT_EXCEEDED',
|
|
183
|
+
details: { retryAfter }
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Format payload too large response
|
|
189
|
+
* @param {number} size - Received payload size in bytes
|
|
190
|
+
* @param {number} limit - Maximum allowed size in bytes
|
|
191
|
+
* @returns {Object} Formatted payload too large response
|
|
192
|
+
*/
|
|
193
|
+
export function payloadTooLarge(size, limit) {
|
|
194
|
+
return error('Request payload too large', {
|
|
195
|
+
status: 413,
|
|
196
|
+
code: 'PAYLOAD_TOO_LARGE',
|
|
197
|
+
details: {
|
|
198
|
+
receivedSize: size,
|
|
199
|
+
maxSize: limit,
|
|
200
|
+
receivedMB: (size / 1024 / 1024).toFixed(2),
|
|
201
|
+
maxMB: (limit / 1024 / 1024).toFixed(2)
|
|
202
|
+
}
|
|
203
|
+
});
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
export default {
|
|
207
|
+
success,
|
|
208
|
+
error,
|
|
209
|
+
list,
|
|
210
|
+
created,
|
|
211
|
+
noContent,
|
|
212
|
+
validationError,
|
|
213
|
+
notFound,
|
|
214
|
+
unauthorized,
|
|
215
|
+
forbidden,
|
|
216
|
+
rateLimitExceeded,
|
|
217
|
+
payloadTooLarge
|
|
218
|
+
};
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
import { createWriteStream } from 'fs';
|
|
2
|
+
import { pipeline } from 'stream/promises';
|
|
3
|
+
import { Readable } from 'stream';
|
|
4
|
+
import zlib from 'node:zlib';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Streaming Exporter - Memory-efficient export of S3DB resources
|
|
8
|
+
*
|
|
9
|
+
* Exports resources to JSONL format with streaming (constant memory usage).
|
|
10
|
+
* Never loads the full dataset into memory.
|
|
11
|
+
*
|
|
12
|
+
* Features:
|
|
13
|
+
* - Streaming reads from resource
|
|
14
|
+
* - Streaming writes to file
|
|
15
|
+
* - Gzip compression
|
|
16
|
+
* - Constant memory usage (~10KB buffer)
|
|
17
|
+
* - Progress callbacks
|
|
18
|
+
*/
|
|
19
|
+
export class StreamingExporter {
|
|
20
|
+
constructor(options = {}) {
|
|
21
|
+
this.encoding = options.encoding || 'utf8';
|
|
22
|
+
this.compress = options.compress !== false;
|
|
23
|
+
this.batchSize = options.batchSize || 100; // Read 100 records at a time
|
|
24
|
+
this.onProgress = options.onProgress || null;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Export single resource to JSONL file
|
|
29
|
+
*
|
|
30
|
+
* @param {Resource} resource - S3DB resource
|
|
31
|
+
* @param {string} outputPath - Output file path
|
|
32
|
+
* @param {string} type - Export type ('full' or 'incremental')
|
|
33
|
+
* @param {Date} sinceTimestamp - For incremental backups
|
|
34
|
+
* @returns {Promise<{recordCount: number, bytesWritten: number}>}
|
|
35
|
+
*/
|
|
36
|
+
async exportResource(resource, outputPath, type = 'full', sinceTimestamp = null) {
|
|
37
|
+
let recordCount = 0;
|
|
38
|
+
let bytesWritten = 0;
|
|
39
|
+
|
|
40
|
+
// Create write stream
|
|
41
|
+
const writeStream = createWriteStream(outputPath);
|
|
42
|
+
|
|
43
|
+
// Add gzip if enabled
|
|
44
|
+
let outputStream = writeStream;
|
|
45
|
+
if (this.compress) {
|
|
46
|
+
const gzipStream = zlib.createGzip();
|
|
47
|
+
gzipStream.pipe(writeStream);
|
|
48
|
+
outputStream = gzipStream;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
try {
|
|
52
|
+
// Get records based on type
|
|
53
|
+
let records;
|
|
54
|
+
if (type === 'incremental' && sinceTimestamp) {
|
|
55
|
+
records = await resource.list({
|
|
56
|
+
filter: { updatedAt: { '>': sinceTimestamp.toISOString() } }
|
|
57
|
+
});
|
|
58
|
+
} else {
|
|
59
|
+
records = await resource.list();
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Write records as JSONL (one JSON per line)
|
|
63
|
+
for (const record of records) {
|
|
64
|
+
const line = JSON.stringify(record) + '\n';
|
|
65
|
+
const canWrite = outputStream.write(line, this.encoding);
|
|
66
|
+
|
|
67
|
+
recordCount++;
|
|
68
|
+
bytesWritten += Buffer.byteLength(line, this.encoding);
|
|
69
|
+
|
|
70
|
+
// Progress callback
|
|
71
|
+
if (this.onProgress && recordCount % 1000 === 0) {
|
|
72
|
+
this.onProgress({
|
|
73
|
+
resourceName: resource.name,
|
|
74
|
+
recordCount,
|
|
75
|
+
bytesWritten
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Handle backpressure
|
|
80
|
+
if (!canWrite) {
|
|
81
|
+
await new Promise(resolve => outputStream.once('drain', resolve));
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// End stream
|
|
86
|
+
outputStream.end();
|
|
87
|
+
|
|
88
|
+
// Wait for finish
|
|
89
|
+
await new Promise((resolve, reject) => {
|
|
90
|
+
writeStream.on('finish', resolve);
|
|
91
|
+
writeStream.on('error', reject);
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
return { recordCount, bytesWritten };
|
|
95
|
+
|
|
96
|
+
} catch (error) {
|
|
97
|
+
// Cleanup on error
|
|
98
|
+
outputStream.destroy();
|
|
99
|
+
throw error;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Export multiple resources
|
|
105
|
+
*
|
|
106
|
+
* @param {Object} resources - Map of resource name -> resource
|
|
107
|
+
* @param {string} outputDir - Output directory
|
|
108
|
+
* @param {string} type - Export type
|
|
109
|
+
* @param {Date} sinceTimestamp - For incremental
|
|
110
|
+
* @returns {Promise<Map<string, {recordCount, bytesWritten}>>}
|
|
111
|
+
*/
|
|
112
|
+
async exportResources(resources, outputDir, type = 'full', sinceTimestamp = null) {
|
|
113
|
+
const results = new Map();
|
|
114
|
+
|
|
115
|
+
for (const [resourceName, resource] of Object.entries(resources)) {
|
|
116
|
+
const ext = this.compress ? '.jsonl.gz' : '.jsonl';
|
|
117
|
+
const outputPath = `${outputDir}/${resourceName}${ext}`;
|
|
118
|
+
|
|
119
|
+
const stats = await this.exportResource(resource, outputPath, type, sinceTimestamp);
|
|
120
|
+
|
|
121
|
+
results.set(resourceName, {
|
|
122
|
+
...stats,
|
|
123
|
+
filePath: outputPath,
|
|
124
|
+
compressed: this.compress
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
return results;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
export default StreamingExporter;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import Plugin from "./plugin.class.js";
|
|
2
2
|
import tryFn from "../concerns/try-fn.js";
|
|
3
3
|
import { createBackupDriver, validateBackupConfig } from "./backup/index.js";
|
|
4
|
+
import { StreamingExporter } from "./backup/streaming-exporter.js";
|
|
4
5
|
import { createWriteStream, createReadStream } from 'fs';
|
|
5
6
|
import zlib from 'node:zlib';
|
|
6
7
|
import { pipeline } from 'stream/promises';
|
|
@@ -370,7 +371,45 @@ export class BackupPlugin extends Plugin {
|
|
|
370
371
|
|
|
371
372
|
async _exportResources(resourceNames, tempDir, type) {
|
|
372
373
|
const exportedFiles = [];
|
|
373
|
-
|
|
374
|
+
const resourceStats = new Map();
|
|
375
|
+
|
|
376
|
+
// Create StreamingExporter
|
|
377
|
+
const exporter = new StreamingExporter({
|
|
378
|
+
compress: true, // Always use gzip for backups
|
|
379
|
+
onProgress: this.config.verbose ? (stats) => {
|
|
380
|
+
if (stats.recordCount % 10000 === 0) {
|
|
381
|
+
console.log(`[BackupPlugin] Exported ${stats.recordCount} records from '${stats.resourceName}'`);
|
|
382
|
+
}
|
|
383
|
+
} : null
|
|
384
|
+
});
|
|
385
|
+
|
|
386
|
+
// Determine timestamp for incremental backups
|
|
387
|
+
let sinceTimestamp = null;
|
|
388
|
+
if (type === 'incremental') {
|
|
389
|
+
const [lastBackupOk, , lastBackups] = await tryFn(() =>
|
|
390
|
+
this.database.resource(this.config.backupMetadataResource).list({
|
|
391
|
+
filter: {
|
|
392
|
+
status: 'completed',
|
|
393
|
+
type: { $in: ['full', 'incremental'] }
|
|
394
|
+
},
|
|
395
|
+
sort: { timestamp: -1 },
|
|
396
|
+
limit: 1
|
|
397
|
+
})
|
|
398
|
+
);
|
|
399
|
+
|
|
400
|
+
if (lastBackupOk && lastBackups && lastBackups.length > 0) {
|
|
401
|
+
sinceTimestamp = new Date(lastBackups[0].timestamp);
|
|
402
|
+
} else {
|
|
403
|
+
// No previous backup found, use last 24 hours as fallback
|
|
404
|
+
sinceTimestamp = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
if (this.config.verbose) {
|
|
408
|
+
console.log(`[BackupPlugin] Incremental backup since ${sinceTimestamp.toISOString()}`);
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
// Export each resource using streaming
|
|
374
413
|
for (const resourceName of resourceNames) {
|
|
375
414
|
const resource = this.database.resources[resourceName];
|
|
376
415
|
if (!resource) {
|
|
@@ -379,63 +418,77 @@ export class BackupPlugin extends Plugin {
|
|
|
379
418
|
}
|
|
380
419
|
continue;
|
|
381
420
|
}
|
|
382
|
-
|
|
383
|
-
const exportPath = path.join(tempDir, `${resourceName}.json`);
|
|
384
|
-
|
|
385
|
-
// Export resource data
|
|
386
|
-
let records;
|
|
387
|
-
if (type === 'incremental') {
|
|
388
|
-
// For incremental, only export records changed since last successful backup
|
|
389
|
-
const [lastBackupOk, , lastBackups] = await tryFn(() =>
|
|
390
|
-
this.database.resource(this.config.backupMetadataResource).list({
|
|
391
|
-
filter: {
|
|
392
|
-
status: 'completed',
|
|
393
|
-
type: { $in: ['full', 'incremental'] }
|
|
394
|
-
},
|
|
395
|
-
sort: { timestamp: -1 },
|
|
396
|
-
limit: 1
|
|
397
|
-
})
|
|
398
|
-
);
|
|
399
|
-
|
|
400
|
-
let sinceTimestamp;
|
|
401
|
-
if (lastBackupOk && lastBackups && lastBackups.length > 0) {
|
|
402
|
-
sinceTimestamp = new Date(lastBackups[0].timestamp);
|
|
403
|
-
} else {
|
|
404
|
-
// No previous backup found, use last 24 hours as fallback
|
|
405
|
-
sinceTimestamp = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
|
406
|
-
}
|
|
407
421
|
|
|
408
|
-
|
|
409
|
-
console.log(`[BackupPlugin] Incremental backup for '${resourceName}' since ${sinceTimestamp.toISOString()}`);
|
|
410
|
-
}
|
|
422
|
+
const exportPath = path.join(tempDir, `${resourceName}.jsonl.gz`);
|
|
411
423
|
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
424
|
+
try {
|
|
425
|
+
// Export with streaming (constant memory usage!)
|
|
426
|
+
const stats = await exporter.exportResource(resource, exportPath, type, sinceTimestamp);
|
|
427
|
+
|
|
428
|
+
exportedFiles.push(exportPath);
|
|
429
|
+
resourceStats.set(resourceName, {
|
|
430
|
+
...stats,
|
|
431
|
+
definition: resource.config
|
|
415
432
|
});
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
await writeFile(exportPath, JSON.stringify(exportData, null, 2));
|
|
429
|
-
exportedFiles.push(exportPath);
|
|
430
|
-
|
|
431
|
-
if (this.config.verbose) {
|
|
432
|
-
console.log(`[BackupPlugin] Exported ${records.length} records from '${resourceName}'`);
|
|
433
|
+
|
|
434
|
+
if (this.config.verbose) {
|
|
435
|
+
console.log(
|
|
436
|
+
`[BackupPlugin] Exported ${stats.recordCount} records from '${resourceName}' ` +
|
|
437
|
+
`(${(stats.bytesWritten / 1024 / 1024).toFixed(2)} MB compressed)`
|
|
438
|
+
);
|
|
439
|
+
}
|
|
440
|
+
} catch (error) {
|
|
441
|
+
if (this.config.verbose) {
|
|
442
|
+
console.error(`[BackupPlugin] Error exporting '${resourceName}': ${error.message}`);
|
|
443
|
+
}
|
|
444
|
+
throw error;
|
|
433
445
|
}
|
|
434
446
|
}
|
|
435
|
-
|
|
447
|
+
|
|
448
|
+
// Generate s3db.json metadata file
|
|
449
|
+
await this._generateMetadataFile(tempDir, resourceStats, type);
|
|
450
|
+
exportedFiles.push(path.join(tempDir, 's3db.json'));
|
|
451
|
+
|
|
436
452
|
return exportedFiles;
|
|
437
453
|
}
|
|
438
454
|
|
|
455
|
+
/**
|
|
456
|
+
* Generate s3db.json metadata file
|
|
457
|
+
*/
|
|
458
|
+
async _generateMetadataFile(tempDir, resourceStats, type) {
|
|
459
|
+
const metadata = {
|
|
460
|
+
version: '1.0',
|
|
461
|
+
backupType: type,
|
|
462
|
+
exportedAt: new Date().toISOString(),
|
|
463
|
+
database: {
|
|
464
|
+
bucket: this.database.bucket,
|
|
465
|
+
region: this.database.region
|
|
466
|
+
},
|
|
467
|
+
resources: {}
|
|
468
|
+
};
|
|
469
|
+
|
|
470
|
+
for (const [resourceName, stats] of resourceStats.entries()) {
|
|
471
|
+
metadata.resources[resourceName] = {
|
|
472
|
+
name: resourceName,
|
|
473
|
+
attributes: stats.definition.attributes || {},
|
|
474
|
+
partitions: stats.definition.partitions || {},
|
|
475
|
+
timestamps: stats.definition.timestamps || false,
|
|
476
|
+
recordCount: stats.recordCount,
|
|
477
|
+
exportFile: `${resourceName}.jsonl.gz`,
|
|
478
|
+
compression: 'gzip',
|
|
479
|
+
format: 'jsonl',
|
|
480
|
+
bytesWritten: stats.bytesWritten
|
|
481
|
+
};
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
const metadataPath = path.join(tempDir, 's3db.json');
|
|
485
|
+
await writeFile(metadataPath, JSON.stringify(metadata, null, 2));
|
|
486
|
+
|
|
487
|
+
if (this.config.verbose) {
|
|
488
|
+
console.log(`[BackupPlugin] Generated s3db.json metadata`);
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
|
|
439
492
|
async _createArchive(files, targetPath, compressionType) {
|
|
440
493
|
// Create a JSON-based archive with file metadata and contents
|
|
441
494
|
const archive = {
|