datasette-ts 0.0.5 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +89 -8
- package/dist/cli.js.map +2 -2
- package/package.json +1 -1
- package/scripts/cloudflare-deploy-helpers.mjs +57 -5
package/package.json
CHANGED
|
@@ -14,11 +14,16 @@ export async function dumpSqliteForD1(options) {
|
|
|
14
14
|
options.outputName ?? path.basename(options.dbFile, path.extname(options.dbFile));
|
|
15
15
|
const outputPath = path.join(options.outputDir, `${baseName}.sql`);
|
|
16
16
|
const rawPath = path.join(options.outputDir, `${baseName}.raw.sql`);
|
|
17
|
+
const log = typeof options.log === "function" ? options.log : null;
|
|
18
|
+
const progressIntervalMs =
|
|
19
|
+
typeof options.progressIntervalMs === "number" && Number.isFinite(options.progressIntervalMs)
|
|
20
|
+
? options.progressIntervalMs
|
|
21
|
+
: 5000;
|
|
17
22
|
|
|
18
23
|
await mkdir(options.outputDir, { recursive: true });
|
|
19
|
-
await dumpSqliteToFile(options.dbFile, rawPath);
|
|
24
|
+
await dumpSqliteToFile(options.dbFile, rawPath, { log, progressIntervalMs });
|
|
20
25
|
try {
|
|
21
|
-
await normalizeDumpFile(rawPath, outputPath);
|
|
26
|
+
await normalizeDumpFile(rawPath, outputPath, { log });
|
|
22
27
|
} finally {
|
|
23
28
|
await unlink(rawPath).catch(() => undefined);
|
|
24
29
|
}
|
|
@@ -163,9 +168,12 @@ function escapeIdentifier(name) {
|
|
|
163
168
|
return `"${escaped}"`;
|
|
164
169
|
}
|
|
165
170
|
|
|
166
|
-
async function normalizeDumpFile(inputPath, outputPath) {
|
|
171
|
+
async function normalizeDumpFile(inputPath, outputPath, { log } = {}) {
|
|
167
172
|
const tablesInOrder = [];
|
|
168
173
|
const viewsInOrder = [];
|
|
174
|
+
if (log) {
|
|
175
|
+
log(`Normalizing D1 import SQL: ${outputPath}`);
|
|
176
|
+
}
|
|
169
177
|
await forEachLine(inputPath, (line) => {
|
|
170
178
|
const tableMatch = line.match(/^CREATE TABLE\s+("?[^"]+"?)/i);
|
|
171
179
|
if (tableMatch) {
|
|
@@ -196,9 +204,16 @@ async function normalizeDumpFile(inputPath, outputPath) {
|
|
|
196
204
|
});
|
|
197
205
|
outputStream.end();
|
|
198
206
|
await once(outputStream, "finish");
|
|
207
|
+
if (log) {
|
|
208
|
+
const { size } = await stat(outputPath);
|
|
209
|
+
log(`D1 import SQL ready (${formatBytes(size)})`);
|
|
210
|
+
}
|
|
199
211
|
}
|
|
200
212
|
|
|
201
|
-
async function dumpSqliteToFile(dbFile, outputPath) {
|
|
213
|
+
async function dumpSqliteToFile(dbFile, outputPath, { log, progressIntervalMs } = {}) {
|
|
214
|
+
if (log) {
|
|
215
|
+
log(`Dumping SQLite database via sqlite3 .dump`);
|
|
216
|
+
}
|
|
202
217
|
const child = spawn("sqlite3", [dbFile, ".dump"], {
|
|
203
218
|
stdio: ["ignore", "pipe", "pipe"],
|
|
204
219
|
});
|
|
@@ -221,13 +236,24 @@ async function dumpSqliteToFile(dbFile, outputPath) {
|
|
|
221
236
|
outputStream.destroy(error);
|
|
222
237
|
});
|
|
223
238
|
|
|
224
|
-
|
|
239
|
+
const stopProgress = log
|
|
240
|
+
? startProgressLogger(outputPath, log, progressIntervalMs)
|
|
241
|
+
: () => undefined;
|
|
242
|
+
try {
|
|
243
|
+
await pipeline(stdout, outputStream);
|
|
244
|
+
} finally {
|
|
245
|
+
stopProgress();
|
|
246
|
+
}
|
|
225
247
|
const [code, signal] = await once(child, "close");
|
|
226
248
|
if (code !== 0) {
|
|
227
249
|
const suffix = signal ? ` (signal ${signal})` : "";
|
|
228
250
|
const message = stderr.trim() || `sqlite3 exited with code ${code ?? "unknown"}${suffix}`;
|
|
229
251
|
throw new Error(message);
|
|
230
252
|
}
|
|
253
|
+
if (log) {
|
|
254
|
+
const { size } = await stat(outputPath);
|
|
255
|
+
log(`SQLite dump completed (${formatBytes(size)})`);
|
|
256
|
+
}
|
|
231
257
|
}
|
|
232
258
|
|
|
233
259
|
async function forEachLine(filePath, handler) {
|
|
@@ -249,6 +275,32 @@ async function writeLine(stream, line) {
|
|
|
249
275
|
}
|
|
250
276
|
}
|
|
251
277
|
|
|
278
|
+
function startProgressLogger(filePath, log, intervalMs) {
|
|
279
|
+
const interval = setInterval(() => {
|
|
280
|
+
void stat(filePath)
|
|
281
|
+
.then((info) => {
|
|
282
|
+
log(`Dump size: ${formatBytes(info.size)}`);
|
|
283
|
+
})
|
|
284
|
+
.catch(() => undefined);
|
|
285
|
+
}, intervalMs);
|
|
286
|
+
return () => clearInterval(interval);
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
function formatBytes(bytes) {
|
|
290
|
+
if (!Number.isFinite(bytes) || bytes < 0) {
|
|
291
|
+
return "0 B";
|
|
292
|
+
}
|
|
293
|
+
const units = ["B", "KB", "MB", "GB", "TB"];
|
|
294
|
+
let value = bytes;
|
|
295
|
+
let index = 0;
|
|
296
|
+
while (value >= 1024 && index < units.length - 1) {
|
|
297
|
+
value /= 1024;
|
|
298
|
+
index += 1;
|
|
299
|
+
}
|
|
300
|
+
const rounded = index === 0 ? value.toFixed(0) : value.toFixed(1);
|
|
301
|
+
return `${rounded} ${units[index]}`;
|
|
302
|
+
}
|
|
303
|
+
|
|
252
304
|
async function hashFile(filePath) {
|
|
253
305
|
return new Promise((resolve, reject) => {
|
|
254
306
|
const hash = createHash("sha256");
|