datasette-ts 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +76 -36
- package/dist/cli.js.map +3 -3
- package/package.json +1 -1
- package/scripts/cloudflare-deploy-helpers.mjs +77 -30
package/dist/cli.js
CHANGED
|
@@ -23673,22 +23673,27 @@ import alchemy from "alchemy";
|
|
|
23673
23673
|
import { Assets, D1Database, Worker } from "alchemy/cloudflare";
|
|
23674
23674
|
|
|
23675
23675
|
// scripts/cloudflare-deploy-helpers.mjs
|
|
23676
|
-
import {
|
|
23676
|
+
import { spawn } from "node:child_process";
|
|
23677
23677
|
import { createHash } from "node:crypto";
|
|
23678
|
-
import { createReadStream } from "node:fs";
|
|
23679
|
-
import { mkdir, stat,
|
|
23678
|
+
import { createReadStream, createWriteStream } from "node:fs";
|
|
23679
|
+
import { mkdir, stat, unlink } from "node:fs/promises";
|
|
23680
|
+
import { once } from "node:events";
|
|
23680
23681
|
import path from "node:path";
|
|
23682
|
+
import { createInterface } from "node:readline";
|
|
23683
|
+
import { pipeline } from "node:stream/promises";
|
|
23681
23684
|
import { pathToFileURL } from "node:url";
|
|
23682
23685
|
import { createClient } from "@libsql/client";
|
|
23683
23686
|
async function dumpSqliteForD1(options) {
|
|
23684
23687
|
const baseName = options.outputName ?? path.basename(options.dbFile, path.extname(options.dbFile));
|
|
23685
23688
|
const outputPath = path.join(options.outputDir, `${baseName}.sql`);
|
|
23689
|
+
const rawPath = path.join(options.outputDir, `${baseName}.raw.sql`);
|
|
23686
23690
|
await mkdir(options.outputDir, { recursive: true });
|
|
23687
|
-
|
|
23688
|
-
|
|
23689
|
-
|
|
23690
|
-
|
|
23691
|
-
|
|
23691
|
+
await dumpSqliteToFile(options.dbFile, rawPath);
|
|
23692
|
+
try {
|
|
23693
|
+
await normalizeDumpFile(rawPath, outputPath);
|
|
23694
|
+
} finally {
|
|
23695
|
+
await unlink(rawPath).catch(() => void 0);
|
|
23696
|
+
}
|
|
23692
23697
|
return outputPath;
|
|
23693
23698
|
}
|
|
23694
23699
|
async function loadSchemaFromFile(dbFile) {
|
|
@@ -23811,48 +23816,85 @@ function escapeIdentifier(name) {
|
|
|
23811
23816
|
const escaped = String(name).replace(/"/g, '""');
|
|
23812
23817
|
return `"${escaped}"`;
|
|
23813
23818
|
}
|
|
23814
|
-
function
|
|
23815
|
-
const lines = rawDump.split("\n");
|
|
23816
|
-
const output = [];
|
|
23819
|
+
async function normalizeDumpFile(inputPath, outputPath) {
|
|
23817
23820
|
const tablesInOrder = [];
|
|
23818
23821
|
const viewsInOrder = [];
|
|
23819
|
-
|
|
23822
|
+
await forEachLine(inputPath, (line) => {
|
|
23820
23823
|
const tableMatch = line.match(/^CREATE TABLE\s+("?[^"]+"?)/i);
|
|
23821
23824
|
if (tableMatch) {
|
|
23822
23825
|
tablesInOrder.push(tableMatch[1].replace(/\s*\($/, ""));
|
|
23823
|
-
|
|
23826
|
+
return;
|
|
23824
23827
|
}
|
|
23825
23828
|
const viewMatch = line.match(/^CREATE VIEW\s+("?[^"]+"?)/i);
|
|
23826
23829
|
if (viewMatch) {
|
|
23827
23830
|
viewsInOrder.push(viewMatch[1].replace(/\s*\($/, ""));
|
|
23828
23831
|
}
|
|
23829
|
-
}
|
|
23832
|
+
});
|
|
23833
|
+
const outputStream = createWriteStream(outputPath, { encoding: "utf8" });
|
|
23830
23834
|
for (const viewName of viewsInOrder.reverse()) {
|
|
23831
|
-
|
|
23835
|
+
await writeLine(outputStream, `DROP VIEW IF EXISTS ${viewName};`);
|
|
23832
23836
|
}
|
|
23833
23837
|
for (const tableName of tablesInOrder.reverse()) {
|
|
23834
|
-
|
|
23838
|
+
await writeLine(outputStream, `DROP TABLE IF EXISTS ${tableName};`);
|
|
23835
23839
|
}
|
|
23836
|
-
|
|
23840
|
+
await forEachLine(inputPath, async (line) => {
|
|
23837
23841
|
if (line === "BEGIN TRANSACTION;" || line === "COMMIT;") {
|
|
23838
|
-
|
|
23842
|
+
return;
|
|
23839
23843
|
}
|
|
23840
23844
|
if (line.startsWith("PRAGMA foreign_keys=")) {
|
|
23841
|
-
|
|
23842
|
-
}
|
|
23843
|
-
const tableMatch = line.match(/^CREATE TABLE\s+("?[^"]+"?)/i);
|
|
23844
|
-
if (tableMatch) {
|
|
23845
|
-
output.push(line);
|
|
23846
|
-
continue;
|
|
23845
|
+
return;
|
|
23847
23846
|
}
|
|
23848
|
-
|
|
23849
|
-
|
|
23850
|
-
|
|
23851
|
-
|
|
23847
|
+
await writeLine(outputStream, line);
|
|
23848
|
+
});
|
|
23849
|
+
outputStream.end();
|
|
23850
|
+
await once(outputStream, "finish");
|
|
23851
|
+
}
|
|
23852
|
+
async function dumpSqliteToFile(dbFile, outputPath) {
|
|
23853
|
+
const child = spawn("sqlite3", [dbFile, ".dump"], {
|
|
23854
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
23855
|
+
});
|
|
23856
|
+
let stderr = "";
|
|
23857
|
+
if (child.stderr) {
|
|
23858
|
+
child.stderr.setEncoding("utf8");
|
|
23859
|
+
child.stderr.on("data", (chunk) => {
|
|
23860
|
+
stderr += chunk;
|
|
23861
|
+
});
|
|
23862
|
+
}
|
|
23863
|
+
const stdout = child.stdout;
|
|
23864
|
+
if (!stdout) {
|
|
23865
|
+
const [error] = await once(child, "error");
|
|
23866
|
+
throw error ?? new Error("sqlite3 stdout is unavailable.");
|
|
23867
|
+
}
|
|
23868
|
+
const outputStream = createWriteStream(outputPath, { encoding: "utf8" });
|
|
23869
|
+
child.once("error", (error) => {
|
|
23870
|
+
stdout.destroy(error);
|
|
23871
|
+
outputStream.destroy(error);
|
|
23872
|
+
});
|
|
23873
|
+
await pipeline(stdout, outputStream);
|
|
23874
|
+
const [code, signal] = await once(child, "close");
|
|
23875
|
+
if (code !== 0) {
|
|
23876
|
+
const suffix = signal ? ` (signal ${signal})` : "";
|
|
23877
|
+
const message = stderr.trim() || `sqlite3 exited with code ${code ?? "unknown"}${suffix}`;
|
|
23878
|
+
throw new Error(message);
|
|
23879
|
+
}
|
|
23880
|
+
}
|
|
23881
|
+
async function forEachLine(filePath, handler) {
|
|
23882
|
+
const stream = createReadStream(filePath, { encoding: "utf8" });
|
|
23883
|
+
const rl = createInterface({ input: stream, crlfDelay: Infinity });
|
|
23884
|
+
try {
|
|
23885
|
+
for await (const line of rl) {
|
|
23886
|
+
await handler(line);
|
|
23852
23887
|
}
|
|
23853
|
-
|
|
23888
|
+
} finally {
|
|
23889
|
+
rl.close();
|
|
23890
|
+
stream.close();
|
|
23891
|
+
}
|
|
23892
|
+
}
|
|
23893
|
+
async function writeLine(stream, line) {
|
|
23894
|
+
if (!stream.write(`${line}
|
|
23895
|
+
`)) {
|
|
23896
|
+
await once(stream, "drain");
|
|
23854
23897
|
}
|
|
23855
|
-
return output.join("\n");
|
|
23856
23898
|
}
|
|
23857
23899
|
async function hashFile(filePath) {
|
|
23858
23900
|
return new Promise((resolve2, reject) => {
|
|
@@ -24167,7 +24209,7 @@ function isSqliteCliMissing(error) {
|
|
|
24167
24209
|
|
|
24168
24210
|
// src/cli/serve.ts
|
|
24169
24211
|
init_registry();
|
|
24170
|
-
import { stat as stat5, writeFile
|
|
24212
|
+
import { stat as stat5, writeFile } from "node:fs/promises";
|
|
24171
24213
|
import { join as join2 } from "node:path";
|
|
24172
24214
|
|
|
24173
24215
|
// src/core/inspect.ts
|
|
@@ -24340,7 +24382,7 @@ async function runInspectCommand(args) {
|
|
|
24340
24382
|
const data = await inspectDatabases(databasePaths);
|
|
24341
24383
|
const json = JSON.stringify(data, null, 2);
|
|
24342
24384
|
if (inspectFile) {
|
|
24343
|
-
await
|
|
24385
|
+
await writeFile(inspectFile, json, "utf8");
|
|
24344
24386
|
return;
|
|
24345
24387
|
}
|
|
24346
24388
|
process.stdout.write(`${json}
|
|
@@ -24465,12 +24507,10 @@ function parseNamedArg2(arg, nextValue) {
|
|
|
24465
24507
|
}
|
|
24466
24508
|
|
|
24467
24509
|
// src/cli.ts
|
|
24468
|
-
|
|
24469
|
-
await main();
|
|
24470
|
-
} catch (error) {
|
|
24510
|
+
void main().catch((error) => {
|
|
24471
24511
|
console.error(error);
|
|
24472
24512
|
process.exitCode = 1;
|
|
24473
|
-
}
|
|
24513
|
+
});
|
|
24474
24514
|
async function main() {
|
|
24475
24515
|
const args = process.argv.slice(2);
|
|
24476
24516
|
const [command, subcommand] = args;
|