datasette-ts 0.0.9 → 0.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -23666,8 +23666,8 @@ var init_node4 = __esm({
23666
23666
  });
23667
23667
 
23668
23668
  // src/cli/deploy-cloudflare.ts
23669
- import { stat as stat2 } from "node:fs/promises";
23670
- import { dirname, extname, join, resolve } from "node:path";
23669
+ import { mkdir as mkdir2, stat as stat2, writeFile } from "node:fs/promises";
23670
+ import { dirname, extname, join, relative, resolve } from "node:path";
23671
23671
  import { fileURLToPath } from "node:url";
23672
23672
  import alchemy from "alchemy";
23673
23673
  import { Assets, D1Database, Worker } from "alchemy/cloudflare";
@@ -24018,6 +24018,7 @@ function normalizeLibsqlRows(result) {
24018
24018
  // src/cli/deploy-cloudflare.ts
24019
24019
  var DEFAULT_IMPORTS_DIR = ".datasette-ts/imports";
24020
24020
  var MAX_TEXT_BINDING_BYTES = 5 * 1024;
24021
+ var WORKER_OVERRIDE_DIR = "worker";
24021
24022
  async function runCloudflareDeploy(args) {
24022
24023
  const startedAt = Date.now();
24023
24024
  logStep("Starting Cloudflare deploy");
@@ -24061,27 +24062,38 @@ async function runCloudflareDeploy(args) {
24061
24062
  const schemaJson = JSON.stringify({ [options.dbName]: schema });
24062
24063
  const schemaSize = byteLength(schemaJson);
24063
24064
  let schemaBinding = null;
24065
+ let embedSchema = false;
24064
24066
  if (schemaSize <= MAX_TEXT_BINDING_BYTES) {
24065
24067
  schemaBinding = schemaJson;
24066
24068
  } else {
24067
- logStep(
24068
- `Schema JSON is ${formatBytes2(schemaSize)}; skipping text binding and using runtime introspection.`
24069
- );
24069
+ embedSchema = true;
24070
+ logStep(`Schema JSON is ${formatBytes2(schemaSize)}; embedding in worker bundle.`);
24070
24071
  }
24072
+ let inspectJson = null;
24071
24073
  let inspectBinding = null;
24074
+ let embedInspect = false;
24072
24075
  if (inspectData) {
24073
- const inspectJson = JSON.stringify(inspectData);
24076
+ inspectJson = JSON.stringify(inspectData);
24074
24077
  const inspectSize = byteLength(inspectJson);
24075
24078
  if (inspectSize <= MAX_TEXT_BINDING_BYTES) {
24076
24079
  inspectBinding = inspectJson;
24077
24080
  } else {
24078
- logStep(
24079
- `Inspect data is ${formatBytes2(inspectSize)}; skipping text binding.`
24080
- );
24081
+ embedInspect = true;
24082
+ logStep(`Inspect data is ${formatBytes2(inspectSize)}; embedding in worker bundle.`);
24081
24083
  }
24082
24084
  }
24083
24085
  logStep("Initializing Alchemy app");
24084
24086
  const app = await alchemy(options.workerName);
24087
+ let finalWorkerEntrypoint = workerEntrypoint;
24088
+ if (embedSchema || embedInspect) {
24089
+ finalWorkerEntrypoint = await createEmbeddedWorkerEntrypoint({
24090
+ baseEntrypoint: workerEntrypoint,
24091
+ outputDir: options.importsDir,
24092
+ schemaJson: embedSchema ? schemaJson : null,
24093
+ inspectJson: embedInspect ? inspectJson : null,
24094
+ log: logStep
24095
+ });
24096
+ }
24085
24097
  logStep("Creating D1 database");
24086
24098
  const db = await D1Database(`d1-${options.d1Name}`, {
24087
24099
  name: options.d1Name,
@@ -24106,7 +24118,8 @@ async function runCloudflareDeploy(args) {
24106
24118
  }
24107
24119
  logStep("Deploying worker");
24108
24120
  const worker = await Worker(`worker-${options.workerName}`, {
24109
- entrypoint: workerEntrypoint,
24121
+ name: options.workerName,
24122
+ entrypoint: finalWorkerEntrypoint,
24110
24123
  compatibilityFlags: ["nodejs_compat"],
24111
24124
  bundle: {
24112
24125
  external: ["bun:sqlite"]
@@ -24326,10 +24339,45 @@ function formatBytes2(bytes) {
24326
24339
  const rounded = index === 0 ? value.toFixed(0) : value.toFixed(1);
24327
24340
  return `${rounded} ${units[index]}`;
24328
24341
  }
24342
+ async function createEmbeddedWorkerEntrypoint(options) {
24343
+ const workerDir = join(options.outputDir, WORKER_OVERRIDE_DIR);
24344
+ await mkdir2(workerDir, { recursive: true });
24345
+ const entrypointPath = join(workerDir, "embedded-worker.js");
24346
+ const relativeEntrypoint = toPosixPath(relative(workerDir, options.baseEntrypoint));
24347
+ const entrypointImport = relativeEntrypoint.startsWith(".") ? relativeEntrypoint : `./${relativeEntrypoint}`;
24348
+ const schemaLiteral = options.schemaJson ? JSON.stringify(options.schemaJson) : "null";
24349
+ const inspectLiteral = options.inspectJson ? JSON.stringify(options.inspectJson) : "null";
24350
+ const contents = [
24351
+ `import worker from ${JSON.stringify(entrypointImport)};`,
24352
+ `const EMBEDDED_SCHEMA = ${schemaLiteral};`,
24353
+ `const EMBEDDED_INSPECT = ${inspectLiteral};`,
24354
+ "",
24355
+ "export default {",
24356
+ " fetch(request, env, ctx) {",
24357
+ " let nextEnv = env;",
24358
+ " if (EMBEDDED_SCHEMA || EMBEDDED_INSPECT) {",
24359
+ " nextEnv = {",
24360
+ " ...env,",
24361
+ " ...(EMBEDDED_SCHEMA ? { DATASETTE_SCHEMA: EMBEDDED_SCHEMA } : {}),",
24362
+ " ...(EMBEDDED_INSPECT ? { DATASETTE_INSPECT_DATA: EMBEDDED_INSPECT } : {}),",
24363
+ " };",
24364
+ " }",
24365
+ " return worker.fetch(request, nextEnv, ctx);",
24366
+ " },",
24367
+ "};",
24368
+ ""
24369
+ ].join("\n");
24370
+ await writeFile(entrypointPath, contents, "utf8");
24371
+ options.log(`Embedded worker entrypoint: ${entrypointPath}`);
24372
+ return entrypointPath;
24373
+ }
24374
+ function toPosixPath(value) {
24375
+ return value.replace(/\\/g, "/");
24376
+ }
24329
24377
 
24330
24378
  // src/cli/serve.ts
24331
24379
  init_registry();
24332
- import { stat as stat5, writeFile } from "node:fs/promises";
24380
+ import { stat as stat5, writeFile as writeFile2 } from "node:fs/promises";
24333
24381
  import { join as join2 } from "node:path";
24334
24382
 
24335
24383
  // src/core/inspect.ts
@@ -24502,7 +24550,7 @@ async function runInspectCommand(args) {
24502
24550
  const data = await inspectDatabases(databasePaths);
24503
24551
  const json = JSON.stringify(data, null, 2);
24504
24552
  if (inspectFile) {
24505
- await writeFile(inspectFile, json, "utf8");
24553
+ await writeFile2(inspectFile, json, "utf8");
24506
24554
  return;
24507
24555
  }
24508
24556
  process.stdout.write(`${json}