datasette-ts 0.0.8 → 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +101 -9
- package/dist/cli.js.map +3 -3
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -23666,8 +23666,8 @@ var init_node4 = __esm({
|
|
|
23666
23666
|
});
|
|
23667
23667
|
|
|
23668
23668
|
// src/cli/deploy-cloudflare.ts
|
|
23669
|
-
import { stat as stat2 } from "node:fs/promises";
|
|
23670
|
-
import { dirname, extname, join, resolve } from "node:path";
|
|
23669
|
+
import { mkdir as mkdir2, stat as stat2, writeFile } from "node:fs/promises";
|
|
23670
|
+
import { dirname, extname, join, relative, resolve } from "node:path";
|
|
23671
23671
|
import { fileURLToPath } from "node:url";
|
|
23672
23672
|
import alchemy from "alchemy";
|
|
23673
23673
|
import { Assets, D1Database, Worker } from "alchemy/cloudflare";
|
|
@@ -24017,6 +24017,8 @@ function normalizeLibsqlRows(result) {
|
|
|
24017
24017
|
|
|
24018
24018
|
// src/cli/deploy-cloudflare.ts
|
|
24019
24019
|
var DEFAULT_IMPORTS_DIR = ".datasette-ts/imports";
|
|
24020
|
+
var MAX_TEXT_BINDING_BYTES = 5 * 1024;
|
|
24021
|
+
var WORKER_OVERRIDE_DIR = "worker";
|
|
24020
24022
|
async function runCloudflareDeploy(args) {
|
|
24021
24023
|
const startedAt = Date.now();
|
|
24022
24024
|
logStep("Starting Cloudflare deploy");
|
|
@@ -24057,8 +24059,41 @@ async function runCloudflareDeploy(args) {
|
|
|
24057
24059
|
logStep("Precomputing inspect data");
|
|
24058
24060
|
inspectData = await loadInspectDataFromFile(options.dbFile, options.dbName);
|
|
24059
24061
|
}
|
|
24062
|
+
const schemaJson = JSON.stringify({ [options.dbName]: schema });
|
|
24063
|
+
const schemaSize = byteLength(schemaJson);
|
|
24064
|
+
let schemaBinding = null;
|
|
24065
|
+
let embedSchema = false;
|
|
24066
|
+
if (schemaSize <= MAX_TEXT_BINDING_BYTES) {
|
|
24067
|
+
schemaBinding = schemaJson;
|
|
24068
|
+
} else {
|
|
24069
|
+
embedSchema = true;
|
|
24070
|
+
logStep(`Schema JSON is ${formatBytes2(schemaSize)}; embedding in worker bundle.`);
|
|
24071
|
+
}
|
|
24072
|
+
let inspectJson = null;
|
|
24073
|
+
let inspectBinding = null;
|
|
24074
|
+
let embedInspect = false;
|
|
24075
|
+
if (inspectData) {
|
|
24076
|
+
inspectJson = JSON.stringify(inspectData);
|
|
24077
|
+
const inspectSize = byteLength(inspectJson);
|
|
24078
|
+
if (inspectSize <= MAX_TEXT_BINDING_BYTES) {
|
|
24079
|
+
inspectBinding = inspectJson;
|
|
24080
|
+
} else {
|
|
24081
|
+
embedInspect = true;
|
|
24082
|
+
logStep(`Inspect data is ${formatBytes2(inspectSize)}; embedding in worker bundle.`);
|
|
24083
|
+
}
|
|
24084
|
+
}
|
|
24060
24085
|
logStep("Initializing Alchemy app");
|
|
24061
24086
|
const app = await alchemy(options.workerName);
|
|
24087
|
+
let finalWorkerEntrypoint = workerEntrypoint;
|
|
24088
|
+
if (embedSchema || embedInspect) {
|
|
24089
|
+
finalWorkerEntrypoint = await createEmbeddedWorkerEntrypoint({
|
|
24090
|
+
baseEntrypoint: workerEntrypoint,
|
|
24091
|
+
outputDir: options.importsDir,
|
|
24092
|
+
schemaJson: embedSchema ? schemaJson : null,
|
|
24093
|
+
inspectJson: embedInspect ? inspectJson : null,
|
|
24094
|
+
log: logStep
|
|
24095
|
+
});
|
|
24096
|
+
}
|
|
24062
24097
|
logStep("Creating D1 database");
|
|
24063
24098
|
const db = await D1Database(`d1-${options.d1Name}`, {
|
|
24064
24099
|
name: options.d1Name,
|
|
@@ -24073,15 +24108,17 @@ async function runCloudflareDeploy(args) {
|
|
|
24073
24108
|
const bindings = {
|
|
24074
24109
|
DATASETTE_DB: db,
|
|
24075
24110
|
ASSETS: staticAssets,
|
|
24076
|
-
DATASETTE_DB_NAME: options.dbName
|
|
24077
|
-
DATASETTE_SCHEMA: JSON.stringify({ [options.dbName]: schema })
|
|
24111
|
+
DATASETTE_DB_NAME: options.dbName
|
|
24078
24112
|
};
|
|
24079
|
-
if (
|
|
24080
|
-
bindings.
|
|
24113
|
+
if (schemaBinding) {
|
|
24114
|
+
bindings.DATASETTE_SCHEMA = schemaBinding;
|
|
24115
|
+
}
|
|
24116
|
+
if (inspectBinding) {
|
|
24117
|
+
bindings.DATASETTE_INSPECT_DATA = inspectBinding;
|
|
24081
24118
|
}
|
|
24082
24119
|
logStep("Deploying worker");
|
|
24083
24120
|
const worker = await Worker(`worker-${options.workerName}`, {
|
|
24084
|
-
entrypoint:
|
|
24121
|
+
entrypoint: finalWorkerEntrypoint,
|
|
24085
24122
|
compatibilityFlags: ["nodejs_compat"],
|
|
24086
24123
|
bundle: {
|
|
24087
24124
|
external: ["bun:sqlite"]
|
|
@@ -24281,10 +24318,65 @@ function formatDuration(ms) {
|
|
|
24281
24318
|
const seconds = (ms / 1e3).toFixed(1);
|
|
24282
24319
|
return `${seconds}s`;
|
|
24283
24320
|
}
|
|
24321
|
+
function byteLength(value) {
|
|
24322
|
+
if (typeof Buffer !== "undefined") {
|
|
24323
|
+
return Buffer.byteLength(value, "utf8");
|
|
24324
|
+
}
|
|
24325
|
+
return new TextEncoder().encode(value).length;
|
|
24326
|
+
}
|
|
24327
|
+
function formatBytes2(bytes) {
|
|
24328
|
+
if (!Number.isFinite(bytes) || bytes < 0) {
|
|
24329
|
+
return "0 B";
|
|
24330
|
+
}
|
|
24331
|
+
const units = ["B", "KB", "MB", "GB"];
|
|
24332
|
+
let value = bytes;
|
|
24333
|
+
let index = 0;
|
|
24334
|
+
while (value >= 1024 && index < units.length - 1) {
|
|
24335
|
+
value /= 1024;
|
|
24336
|
+
index += 1;
|
|
24337
|
+
}
|
|
24338
|
+
const rounded = index === 0 ? value.toFixed(0) : value.toFixed(1);
|
|
24339
|
+
return `${rounded} ${units[index]}`;
|
|
24340
|
+
}
|
|
24341
|
+
async function createEmbeddedWorkerEntrypoint(options) {
|
|
24342
|
+
const workerDir = join(options.outputDir, WORKER_OVERRIDE_DIR);
|
|
24343
|
+
await mkdir2(workerDir, { recursive: true });
|
|
24344
|
+
const entrypointPath = join(workerDir, "embedded-worker.js");
|
|
24345
|
+
const relativeEntrypoint = toPosixPath(relative(workerDir, options.baseEntrypoint));
|
|
24346
|
+
const entrypointImport = relativeEntrypoint.startsWith(".") ? relativeEntrypoint : `./${relativeEntrypoint}`;
|
|
24347
|
+
const schemaLiteral = options.schemaJson ? JSON.stringify(options.schemaJson) : "null";
|
|
24348
|
+
const inspectLiteral = options.inspectJson ? JSON.stringify(options.inspectJson) : "null";
|
|
24349
|
+
const contents = [
|
|
24350
|
+
`import worker from ${JSON.stringify(entrypointImport)};`,
|
|
24351
|
+
`const EMBEDDED_SCHEMA = ${schemaLiteral};`,
|
|
24352
|
+
`const EMBEDDED_INSPECT = ${inspectLiteral};`,
|
|
24353
|
+
"",
|
|
24354
|
+
"export default {",
|
|
24355
|
+
" fetch(request, env, ctx) {",
|
|
24356
|
+
" let nextEnv = env;",
|
|
24357
|
+
" if (EMBEDDED_SCHEMA || EMBEDDED_INSPECT) {",
|
|
24358
|
+
" nextEnv = {",
|
|
24359
|
+
" ...env,",
|
|
24360
|
+
" ...(EMBEDDED_SCHEMA ? { DATASETTE_SCHEMA: EMBEDDED_SCHEMA } : {}),",
|
|
24361
|
+
" ...(EMBEDDED_INSPECT ? { DATASETTE_INSPECT_DATA: EMBEDDED_INSPECT } : {}),",
|
|
24362
|
+
" };",
|
|
24363
|
+
" }",
|
|
24364
|
+
" return worker.fetch(request, nextEnv, ctx);",
|
|
24365
|
+
" },",
|
|
24366
|
+
"};",
|
|
24367
|
+
""
|
|
24368
|
+
].join("\n");
|
|
24369
|
+
await writeFile(entrypointPath, contents, "utf8");
|
|
24370
|
+
options.log(`Embedded worker entrypoint: ${entrypointPath}`);
|
|
24371
|
+
return entrypointPath;
|
|
24372
|
+
}
|
|
24373
|
+
function toPosixPath(value) {
|
|
24374
|
+
return value.replace(/\\/g, "/");
|
|
24375
|
+
}
|
|
24284
24376
|
|
|
24285
24377
|
// src/cli/serve.ts
|
|
24286
24378
|
init_registry();
|
|
24287
|
-
import { stat as stat5, writeFile } from "node:fs/promises";
|
|
24379
|
+
import { stat as stat5, writeFile as writeFile2 } from "node:fs/promises";
|
|
24288
24380
|
import { join as join2 } from "node:path";
|
|
24289
24381
|
|
|
24290
24382
|
// src/core/inspect.ts
|
|
@@ -24457,7 +24549,7 @@ async function runInspectCommand(args) {
|
|
|
24457
24549
|
const data = await inspectDatabases(databasePaths);
|
|
24458
24550
|
const json = JSON.stringify(data, null, 2);
|
|
24459
24551
|
if (inspectFile) {
|
|
24460
|
-
await
|
|
24552
|
+
await writeFile2(inspectFile, json, "utf8");
|
|
24461
24553
|
return;
|
|
24462
24554
|
}
|
|
24463
24555
|
process.stdout.write(`${json}
|