datasette-ts 0.0.6 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +51 -6
- package/dist/cli.js.map +2 -2
- package/package.json +1 -1
- package/scripts/cloudflare-deploy-helpers.mjs +57 -5
package/dist/cli.js
CHANGED
|
@@ -23687,10 +23687,12 @@ async function dumpSqliteForD1(options) {
|
|
|
23687
23687
|
const baseName = options.outputName ?? path.basename(options.dbFile, path.extname(options.dbFile));
|
|
23688
23688
|
const outputPath = path.join(options.outputDir, `${baseName}.sql`);
|
|
23689
23689
|
const rawPath = path.join(options.outputDir, `${baseName}.raw.sql`);
|
|
23690
|
+
const log = typeof options.log === "function" ? options.log : null;
|
|
23691
|
+
const progressIntervalMs = typeof options.progressIntervalMs === "number" && Number.isFinite(options.progressIntervalMs) ? options.progressIntervalMs : 5e3;
|
|
23690
23692
|
await mkdir(options.outputDir, { recursive: true });
|
|
23691
|
-
await dumpSqliteToFile(options.dbFile, rawPath);
|
|
23693
|
+
await dumpSqliteToFile(options.dbFile, rawPath, { log, progressIntervalMs });
|
|
23692
23694
|
try {
|
|
23693
|
-
await normalizeDumpFile(rawPath, outputPath);
|
|
23695
|
+
await normalizeDumpFile(rawPath, outputPath, { log });
|
|
23694
23696
|
} finally {
|
|
23695
23697
|
await unlink(rawPath).catch(() => void 0);
|
|
23696
23698
|
}
|
|
@@ -23816,9 +23818,12 @@ function escapeIdentifier(name) {
|
|
|
23816
23818
|
const escaped = String(name).replace(/"/g, '""');
|
|
23817
23819
|
return `"${escaped}"`;
|
|
23818
23820
|
}
|
|
23819
|
-
async function normalizeDumpFile(inputPath, outputPath) {
|
|
23821
|
+
async function normalizeDumpFile(inputPath, outputPath, { log } = {}) {
|
|
23820
23822
|
const tablesInOrder = [];
|
|
23821
23823
|
const viewsInOrder = [];
|
|
23824
|
+
if (log) {
|
|
23825
|
+
log(`Normalizing D1 import SQL: ${outputPath}`);
|
|
23826
|
+
}
|
|
23822
23827
|
await forEachLine(inputPath, (line) => {
|
|
23823
23828
|
const tableMatch = line.match(/^CREATE TABLE\s+("?[^"]+"?)/i);
|
|
23824
23829
|
if (tableMatch) {
|
|
@@ -23848,8 +23853,15 @@ async function normalizeDumpFile(inputPath, outputPath) {
|
|
|
23848
23853
|
});
|
|
23849
23854
|
outputStream.end();
|
|
23850
23855
|
await once(outputStream, "finish");
|
|
23856
|
+
if (log) {
|
|
23857
|
+
const { size } = await stat(outputPath);
|
|
23858
|
+
log(`D1 import SQL ready (${formatBytes(size)})`);
|
|
23859
|
+
}
|
|
23851
23860
|
}
|
|
23852
|
-
async function dumpSqliteToFile(dbFile, outputPath) {
|
|
23861
|
+
async function dumpSqliteToFile(dbFile, outputPath, { log, progressIntervalMs } = {}) {
|
|
23862
|
+
if (log) {
|
|
23863
|
+
log(`Dumping SQLite database via sqlite3 .dump`);
|
|
23864
|
+
}
|
|
23853
23865
|
const child = spawn("sqlite3", [dbFile, ".dump"], {
|
|
23854
23866
|
stdio: ["ignore", "pipe", "pipe"]
|
|
23855
23867
|
});
|
|
@@ -23870,13 +23882,22 @@ async function dumpSqliteToFile(dbFile, outputPath) {
|
|
|
23870
23882
|
stdout.destroy(error);
|
|
23871
23883
|
outputStream.destroy(error);
|
|
23872
23884
|
});
|
|
23873
|
-
|
|
23885
|
+
const stopProgress = log ? startProgressLogger(outputPath, log, progressIntervalMs) : () => void 0;
|
|
23886
|
+
try {
|
|
23887
|
+
await pipeline(stdout, outputStream);
|
|
23888
|
+
} finally {
|
|
23889
|
+
stopProgress();
|
|
23890
|
+
}
|
|
23874
23891
|
const [code, signal] = await once(child, "close");
|
|
23875
23892
|
if (code !== 0) {
|
|
23876
23893
|
const suffix = signal ? ` (signal ${signal})` : "";
|
|
23877
23894
|
const message = stderr.trim() || `sqlite3 exited with code ${code ?? "unknown"}${suffix}`;
|
|
23878
23895
|
throw new Error(message);
|
|
23879
23896
|
}
|
|
23897
|
+
if (log) {
|
|
23898
|
+
const { size } = await stat(outputPath);
|
|
23899
|
+
log(`SQLite dump completed (${formatBytes(size)})`);
|
|
23900
|
+
}
|
|
23880
23901
|
}
|
|
23881
23902
|
async function forEachLine(filePath, handler) {
|
|
23882
23903
|
const stream = createReadStream(filePath, { encoding: "utf8" });
|
|
@@ -23896,6 +23917,28 @@ async function writeLine(stream, line) {
|
|
|
23896
23917
|
await once(stream, "drain");
|
|
23897
23918
|
}
|
|
23898
23919
|
}
|
|
23920
|
+
function startProgressLogger(filePath, log, intervalMs) {
|
|
23921
|
+
const interval = setInterval(() => {
|
|
23922
|
+
void stat(filePath).then((info) => {
|
|
23923
|
+
log(`Dump size: ${formatBytes(info.size)}`);
|
|
23924
|
+
}).catch(() => void 0);
|
|
23925
|
+
}, intervalMs);
|
|
23926
|
+
return () => clearInterval(interval);
|
|
23927
|
+
}
|
|
23928
|
+
function formatBytes(bytes) {
|
|
23929
|
+
if (!Number.isFinite(bytes) || bytes < 0) {
|
|
23930
|
+
return "0 B";
|
|
23931
|
+
}
|
|
23932
|
+
const units = ["B", "KB", "MB", "GB", "TB"];
|
|
23933
|
+
let value = bytes;
|
|
23934
|
+
let index = 0;
|
|
23935
|
+
while (value >= 1024 && index < units.length - 1) {
|
|
23936
|
+
value /= 1024;
|
|
23937
|
+
index += 1;
|
|
23938
|
+
}
|
|
23939
|
+
const rounded = index === 0 ? value.toFixed(0) : value.toFixed(1);
|
|
23940
|
+
return `${rounded} ${units[index]}`;
|
|
23941
|
+
}
|
|
23899
23942
|
async function hashFile(filePath) {
|
|
23900
23943
|
return new Promise((resolve2, reject) => {
|
|
23901
23944
|
const hash = createHash("sha256");
|
|
@@ -23996,7 +24039,9 @@ async function runCloudflareDeploy(args) {
|
|
|
23996
24039
|
importFile = await dumpSqliteForD1({
|
|
23997
24040
|
dbFile: options.dbFile,
|
|
23998
24041
|
outputDir: options.importsDir,
|
|
23999
|
-
outputName: options.d1Name
|
|
24042
|
+
outputName: options.d1Name,
|
|
24043
|
+
log: logStep,
|
|
24044
|
+
progressIntervalMs: 5e3
|
|
24000
24045
|
});
|
|
24001
24046
|
logStep(`D1 import file: ${importFile}`);
|
|
24002
24047
|
} catch (error) {
|