create-nodejs-fn 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.mjs ADDED
@@ -0,0 +1,455 @@
1
+ #!/usr/bin/env node
2
+ import {
3
+ ensureDir,
4
+ writeFileIfChanged
5
+ } from "./chunk-XP4XDEWF.mjs";
6
+
7
+ // src/cli.ts
8
+ import fs from "fs";
9
+ import path from "path";
10
+ import { fileURLToPath } from "url";
11
+ import readline from "readline/promises";
12
+ import { cli, define } from "gunshi";
13
+ var moduleDir = typeof __dirname === "string" ? __dirname : path.dirname(fileURLToPath(import.meta.url));
14
+ var pkgJsonPath = path.resolve(moduleDir, "../package.json");
15
+ var pkg = fs.existsSync(pkgJsonPath) ? JSON.parse(fs.readFileSync(pkgJsonPath, "utf8")) : { version: "0.0.0" };
16
+ var VERSION = pkg.version ?? "0.0.0";
17
+ var DEFAULTS = {
18
+ className: "NodejsFnContainer",
19
+ binding: "NODEJS_FN",
20
+ image: "./.create-nodejs-fn/Dockerfile",
21
+ maxInstances: 10
22
+ };
23
+ var initCommand = define({
24
+ name: "init",
25
+ description: "Configure create-nodejs-fn for an existing Workers project (requires wrangler.jsonc)",
26
+ args: {
27
+ yes: {
28
+ type: "boolean",
29
+ short: "y",
30
+ description: "Non-interactive. Assume defaults and skip confirmations (safe defaults)."
31
+ },
32
+ force: {
33
+ type: "boolean",
34
+ short: "f",
35
+ description: "Overwrite existing files without asking (also rewrites wrangler.jsonc even if it has comments)."
36
+ },
37
+ "dry-run": {
38
+ type: "boolean",
39
+ description: "Show what would change, but do not write files."
40
+ },
41
+ "class-name": {
42
+ type: "string",
43
+ description: `Container class name (default: ${DEFAULTS.className})`
44
+ },
45
+ binding: {
46
+ type: "string",
47
+ description: `Durable Object binding name (default: ${DEFAULTS.binding})`
48
+ },
49
+ image: {
50
+ type: "string",
51
+ description: `Container Dockerfile path for wrangler containers.image (default: ${DEFAULTS.image})`
52
+ },
53
+ "max-instances": {
54
+ type: "number",
55
+ description: `Max container instances (default: ${DEFAULTS.maxInstances})`
56
+ },
57
+ "compatibility-date": {
58
+ type: "string",
59
+ description: "Compatibility date. If omitted, keeps existing; if missing, uses today (UTC)."
60
+ },
61
+ entry: {
62
+ type: "string",
63
+ description: "Entry file to append export to. If omitted, uses wrangler.jsonc main; otherwise tries common files; else skips."
64
+ }
65
+ },
66
+ run: async (ctx) => {
67
+ const opts = {
68
+ yes: Boolean(ctx.values.yes),
69
+ force: Boolean(ctx.values.force),
70
+ dryRun: Boolean(ctx.values["dry-run"]),
71
+ className: String(ctx.values["class-name"] ?? DEFAULTS.className),
72
+ binding: String(ctx.values.binding ?? DEFAULTS.binding),
73
+ image: String(ctx.values.image ?? DEFAULTS.image),
74
+ maxInstances: Number(ctx.values["max-instances"] ?? DEFAULTS.maxInstances),
75
+ compatibilityDate: ctx.values["compatibility-date"] ? String(ctx.values["compatibility-date"]) : void 0,
76
+ entry: ctx.values.entry ? String(ctx.values.entry) : void 0
77
+ };
78
+ await runInit(opts);
79
+ }
80
+ });
81
+ async function runInit(opts) {
82
+ const cwd = process.cwd();
83
+ const prompter = createPrompter(opts.yes);
84
+ const wranglerPath = path.join(cwd, "wrangler.jsonc");
85
+ if (!fs.existsSync(wranglerPath)) {
86
+ logError(`wrangler.jsonc not found: ${wranglerPath}`);
87
+ logInfo("This CLI is designed for existing Workers projects with wrangler.jsonc.");
88
+ process.exitCode = 1;
89
+ return;
90
+ }
91
+ const wrangler = readWranglerJsonc(wranglerPath);
92
+ if (!wrangler.data || typeof wrangler.data !== "object") {
93
+ logError("Failed to parse wrangler.jsonc (it might be invalid JSONC).");
94
+ process.exitCode = 1;
95
+ return;
96
+ }
97
+ const plan = {
98
+ created: [],
99
+ updated: [],
100
+ skipped: [],
101
+ notes: []
102
+ };
103
+ pushResult(plan, await ensureDockerfileFromImagePath(cwd, opts, prompter));
104
+ pushResult(plan, updateGitignore(cwd, opts));
105
+ pushResult(plan, ensureGeneratedDir(cwd, opts));
106
+ pushResult(
107
+ plan,
108
+ await writeWranglerJsoncForExistingProject(wranglerPath, wrangler, opts, prompter)
109
+ );
110
+ const entry = resolveEntryFile(cwd, wrangler.data, opts);
111
+ if (!entry) {
112
+ plan.skipped.push("(entry) export injection (could not determine entry file)");
113
+ plan.notes.push(
114
+ `Could not determine an entry file, so export injection was skipped. If needed, manually export ${opts.className}.`
115
+ );
116
+ } else {
117
+ pushResult(plan, ensureEntryExportsDo(cwd, entry, opts));
118
+ }
119
+ prompter.close();
120
+ printSummary(plan, opts);
121
+ printReminders(cwd);
122
+ }
123
+ async function ensureDockerfileFromImagePath(cwd, opts, prompter) {
124
+ const dockerfileRel = normalizePathLike(opts.image);
125
+ const dockerfileAbs = path.resolve(cwd, dockerfileRel);
126
+ const dockerDir = path.dirname(dockerfileAbs);
127
+ ensureDir(dockerDir);
128
+ const relDisplay = path.relative(cwd, dockerfileAbs).replace(/\\/g, "/");
129
+ const beforeExists = fs.existsSync(dockerfileAbs);
130
+ const content = [
131
+ "# create-nodejs-fn container image",
132
+ "# Generated by `create-nodejs-fn init`. The build step will refresh this file.",
133
+ "FROM node:20-slim",
134
+ "WORKDIR /app",
135
+ "RUN corepack enable",
136
+ "",
137
+ "# Dependencies are injected via the generated package.json during build.",
138
+ "COPY package.json ./",
139
+ "RUN pnpm install --prod --no-frozen-lockfile",
140
+ "",
141
+ "# The server bundle is generated at build time.",
142
+ "COPY ./server.mjs ./server.mjs",
143
+ "ENV NODE_ENV=production",
144
+ "EXPOSE 8080",
145
+ 'CMD ["node", "./server.mjs"]',
146
+ ""
147
+ ].join("\n");
148
+ if (beforeExists && !opts.force) {
149
+ const existing = fs.readFileSync(dockerfileAbs, "utf8");
150
+ if (existing === content) return { status: "skipped", file: relDisplay };
151
+ const overwrite = await prompter.confirm(`${relDisplay} already exists. Overwrite it?`, false);
152
+ if (!overwrite) return { status: "skipped", file: relDisplay };
153
+ }
154
+ if (!opts.dryRun) writeFileIfChanged(dockerfileAbs, content);
155
+ return { status: beforeExists ? "updated" : "created", file: relDisplay };
156
+ }
157
+ function updateGitignore(cwd, opts) {
158
+ const target = path.join(cwd, ".gitignore");
159
+ const beforeExists = fs.existsSync(target);
160
+ const existing = beforeExists ? fs.readFileSync(target, "utf8").split(/\r?\n/) : [];
161
+ const block = [
162
+ "# create-nodejs-fn",
163
+ ".create-nodejs-fn/*",
164
+ "!.create-nodejs-fn/Dockerfile",
165
+ "src/__generated__"
166
+ ];
167
+ const lines = [...existing];
168
+ const present = new Set(lines);
169
+ const missing = block.filter((line) => !present.has(line));
170
+ const changed = missing.length > 0;
171
+ if (!changed) return { status: "skipped", file: ".gitignore" };
172
+ if (lines.length && lines[lines.length - 1] !== "") lines.push("");
173
+ for (const line of block) if (!present.has(line)) lines.push(line);
174
+ const cleaned = trimBlankDuplicates(lines).join("\n");
175
+ const next = cleaned.endsWith("\n") ? cleaned : `${cleaned}
176
+ `;
177
+ if (!opts.dryRun) fs.writeFileSync(target, next);
178
+ return { status: beforeExists ? "updated" : "created", file: ".gitignore" };
179
+ }
180
+ function ensureGeneratedDir(cwd, opts) {
181
+ const dir = path.join(cwd, "src", "__generated__");
182
+ const rel = "src/__generated__/";
183
+ if (fs.existsSync(dir)) return { status: "skipped", file: rel };
184
+ if (!opts.dryRun) ensureDir(dir);
185
+ return { status: "created", file: rel };
186
+ }
187
+ async function writeWranglerJsoncForExistingProject(wranglerPath, read, opts, prompter) {
188
+ const before = read.data;
189
+ const merged = mergeWranglerConfigExisting(before, {
190
+ className: opts.className,
191
+ binding: opts.binding,
192
+ image: opts.image,
193
+ maxInstances: opts.maxInstances
194
+ });
195
+ const nextBody = JSON.stringify(merged, null, 2);
196
+ const next = `${nextBody}
197
+ `;
198
+ const beforeBody = JSON.stringify(before, null, 2);
199
+ if (beforeBody === nextBody) return { status: "skipped", file: "wrangler.jsonc" };
200
+ if (read.hasJsoncComments && !opts.force && !opts.yes) {
201
+ const ok = await prompter.confirm(
202
+ "wrangler.jsonc contains comments. This CLI cannot preserve JSONC comments and will rewrite it as JSON. Continue?",
203
+ false
204
+ );
205
+ if (!ok) return { status: "skipped", file: "wrangler.jsonc" };
206
+ }
207
+ if (!opts.dryRun) writeFileIfChanged(wranglerPath, next);
208
+ return { status: "updated", file: "wrangler.jsonc" };
209
+ }
210
+ function mergeWranglerConfigExisting(base, params) {
211
+ const out = { ...base };
212
+ const containers = Array.isArray(out.containers) ? [...out.containers] : [];
213
+ const idx = containers.findIndex((c) => c?.class_name === params.className);
214
+ const entry = {
215
+ class_name: params.className,
216
+ image: normalizePathLike(params.image),
217
+ max_instances: params.maxInstances
218
+ };
219
+ if (idx >= 0) containers[idx] = { ...containers[idx], ...entry };
220
+ else containers.push(entry);
221
+ out.containers = containers;
222
+ const durable = typeof out.durable_objects === "object" && out.durable_objects !== null ? { ...out.durable_objects } : {};
223
+ const bindings = Array.isArray(durable.bindings) ? [...durable.bindings] : [];
224
+ const bidx = bindings.findIndex((b) => b?.name === params.binding);
225
+ const be = { name: params.binding, class_name: params.className };
226
+ if (bidx >= 0) bindings[bidx] = { ...bindings[bidx], ...be };
227
+ else bindings.push(be);
228
+ durable.bindings = bindings;
229
+ out.durable_objects = durable;
230
+ const migrations = Array.isArray(out.migrations) ? [...out.migrations] : [];
231
+ const already = migrations.some(
232
+ (m) => Array.isArray(m?.new_sqlite_classes) && m.new_sqlite_classes.includes(params.className)
233
+ ) || migrations.some(
234
+ (m) => Array.isArray(m?.new_classes) && m.new_classes.includes(params.className)
235
+ );
236
+ if (!already) {
237
+ const tag = nextMigrationTag(migrations);
238
+ migrations.push({ tag, new_sqlite_classes: [params.className] });
239
+ }
240
+ out.migrations = migrations;
241
+ return out;
242
+ }
243
+ function ensureEntryExportsDo(cwd, entryRelInput, opts) {
244
+ const entryRel = normalizeEntryRel(entryRelInput);
245
+ const entryAbs = path.join(cwd, entryRel);
246
+ ensureDir(path.dirname(entryAbs));
247
+ const doAbs = path.join(cwd, "src", "__generated__", "create-nodejs-fn.do.ts");
248
+ const doRel = path.relative(path.dirname(entryAbs), doAbs).replace(/\\/g, "/").replace(/\.ts$/, "");
249
+ const exportLine = `export { ${opts.className} } from "${doRel.startsWith(".") ? doRel : `./${doRel}`}";`;
250
+ const display = entryRel.replace(/\\/g, "/");
251
+ const beforeExists = fs.existsSync(entryAbs);
252
+ if (beforeExists) {
253
+ const content = fs.readFileSync(entryAbs, "utf8");
254
+ const already = content.includes(exportLine) || content.match(
255
+ new RegExp(
256
+ `export\\s+\\{\\s*${escapeRegExp(opts.className)}\\s*\\}.*create-nodejs-fn\\.do`
257
+ )
258
+ );
259
+ if (already) return { status: "skipped", file: display };
260
+ const next = content.endsWith("\n") ? `${content}${exportLine}
261
+ ` : `${content}
262
+ ${exportLine}
263
+ `;
264
+ if (!opts.dryRun) writeFileIfChanged(entryAbs, next);
265
+ return { status: "updated", file: display };
266
+ }
267
+ if (!opts.dryRun) writeFileIfChanged(entryAbs, `${exportLine}
268
+ `);
269
+ return { status: "created", file: display };
270
+ }
271
+ function readWranglerJsonc(filePath) {
272
+ const raw = fs.readFileSync(filePath, "utf8");
273
+ const hasJsoncComments = /\/\*[\s\S]*?\*\//.test(raw) || /(^|[^:])\/\/.*$/m.test(raw);
274
+ const withoutBlock = raw.replace(/\/\*[\s\S]*?\*\//g, "");
275
+ const withoutLine = withoutBlock.replace(/(^|[^:])\/\/.*$/gm, "$1");
276
+ try {
277
+ const data = JSON.parse(withoutLine);
278
+ return { raw, data, hasJsoncComments };
279
+ } catch {
280
+ return { raw, data: null, hasJsoncComments };
281
+ }
282
+ }
283
+ function resolveEntryFile(cwd, wrangler, opts) {
284
+ if (opts.entry) return opts.entry;
285
+ const main2 = typeof wrangler?.main === "string" ? wrangler.main.trim() : "";
286
+ if (main2) return main2;
287
+ const candidates = [
288
+ "src/index.ts",
289
+ "src/index.tsx",
290
+ "src/worker.ts",
291
+ "src/worker.tsx",
292
+ "src/main.ts",
293
+ "src/main.tsx",
294
+ "index.ts"
295
+ ];
296
+ for (const c of candidates) {
297
+ if (fs.existsSync(path.join(cwd, c))) return c;
298
+ }
299
+ return null;
300
+ }
301
+ function normalizeEntryRel(input) {
302
+ const s = input.startsWith("./") ? input.slice(2) : input;
303
+ return s.replace(/\\/g, "/");
304
+ }
305
+ function normalizePathLike(input) {
306
+ const s = input.trim();
307
+ if (!s) return input;
308
+ const withDot = s.startsWith("./") || s.startsWith("/") ? s : `./${s}`;
309
+ return withDot.replace(/\\/g, "/");
310
+ }
311
+ function nextMigrationTag(migrations) {
312
+ const tags = /* @__PURE__ */ new Set();
313
+ for (const m2 of migrations) if (m2?.tag) tags.add(String(m2.tag));
314
+ const lastTag = migrations.length ? String(migrations[migrations.length - 1]?.tag ?? "") : "";
315
+ const m = lastTag.match(/^v(\d+)$/);
316
+ if (m) {
317
+ let n = Number(m[1]) + 1;
318
+ while (tags.has(`v${n}`)) n++;
319
+ return `v${n}`;
320
+ }
321
+ let i = 1;
322
+ while (tags.has(`cnf${i}`)) i++;
323
+ return migrations.length ? `cnf${i}` : "v1";
324
+ }
325
+ function createPrompter(skip) {
326
+ if (skip || !process.stdin.isTTY) {
327
+ return {
328
+ confirm: async (_message, defaultValue = false) => defaultValue,
329
+ close: () => {
330
+ }
331
+ };
332
+ }
333
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
334
+ return {
335
+ async confirm(message, defaultValue = false) {
336
+ const suffix = defaultValue ? " [Y/n] " : " [y/N] ";
337
+ const answer = (await rl.question(`${message}${suffix}`)).trim().toLowerCase();
338
+ if (!answer) return defaultValue;
339
+ return answer.startsWith("y");
340
+ },
341
+ close() {
342
+ rl.close();
343
+ }
344
+ };
345
+ }
346
+ function pushResult(plan, r) {
347
+ if (r.status === "created") plan.created.push(r.file);
348
+ else if (r.status === "updated") plan.updated.push(r.file);
349
+ else plan.skipped.push(r.file);
350
+ }
351
+ function printSummary(plan, opts) {
352
+ const header = opts.dryRun ? "\u{1F9EA} Dry run: previewing changes" : "\u2705 create-nodejs-fn: setup complete";
353
+ console.log(`
354
+ ${header}`);
355
+ const printBlock = (title, items) => {
356
+ if (!items.length) return;
357
+ console.log(`
358
+ ${title}`);
359
+ for (const it of items) console.log(` - ${it}`);
360
+ };
361
+ printBlock("Created", plan.created);
362
+ printBlock("Updated", plan.updated);
363
+ printBlock("Skipped", plan.skipped);
364
+ if (plan.notes.length) {
365
+ console.log("\nNotes");
366
+ for (const n of plan.notes) console.log(` - ${n}`);
367
+ }
368
+ console.log("");
369
+ }
370
+ function printReminders(cwd) {
371
+ const pkgPath = path.join(cwd, "package.json");
372
+ const pkg2 = fs.existsSync(pkgPath) ? safeJsonRead(pkgPath) : null;
373
+ const viteConfig = findFirstExisting(cwd, [
374
+ "vite.config.ts",
375
+ "vite.config.js",
376
+ "vite.config.mjs",
377
+ "vite.config.cjs"
378
+ ]);
379
+ if (viteConfig) {
380
+ const raw = fs.readFileSync(viteConfig, "utf8");
381
+ const hasPlugin = raw.includes("createNodejsFnPlugin");
382
+ if (!hasPlugin) {
383
+ console.log(
384
+ "\u{1F4CC} Reminder: If you use Vite, add the plugin to your Vite config (not detected)."
385
+ );
386
+ console.log(' import { createNodejsFnPlugin } from "create-nodejs-fn";');
387
+ console.log(" export default defineConfig({ plugins: [createNodejsFnPlugin()] });\n");
388
+ }
389
+ } else {
390
+ console.log("\u{1F4CC} Reminder: If you use Vite, add the plugin to your Vite config.");
391
+ console.log(' import { createNodejsFnPlugin } from "create-nodejs-fn";');
392
+ console.log(" export default defineConfig({ plugins: [createNodejsFnPlugin()] });\n");
393
+ }
394
+ const missing = [];
395
+ const deps = { ...pkg2?.dependencies ?? {}, ...pkg2?.devDependencies ?? {} };
396
+ if (!deps["@cloudflare/containers"]) missing.push("@cloudflare/containers");
397
+ if (!deps["capnweb"]) missing.push("capnweb@0.2.0");
398
+ if (missing.length) {
399
+ const pm = detectPackageManager(cwd, pkg2);
400
+ console.log("\u{1F4CC} Reminder: Install required dependencies for Workers containers.");
401
+ console.log(` ${pm} add ${missing.join(" ")}
402
+ `);
403
+ }
404
+ }
405
+ function safeJsonRead(p) {
406
+ try {
407
+ return JSON.parse(fs.readFileSync(p, "utf8"));
408
+ } catch {
409
+ return null;
410
+ }
411
+ }
412
+ function findFirstExisting(cwd, files) {
413
+ for (const f of files) {
414
+ const abs = path.join(cwd, f);
415
+ if (fs.existsSync(abs)) return abs;
416
+ }
417
+ return null;
418
+ }
419
+ function detectPackageManager(cwd, pkg2) {
420
+ const pmField = typeof pkg2?.packageManager === "string" ? pkg2.packageManager : "";
421
+ if (pmField.startsWith("pnpm")) return "pnpm";
422
+ if (pmField.startsWith("yarn")) return "yarn";
423
+ if (pmField.startsWith("bun")) return "bun";
424
+ if (fs.existsSync(path.join(cwd, "pnpm-lock.yaml"))) return "pnpm";
425
+ if (fs.existsSync(path.join(cwd, "yarn.lock"))) return "yarn";
426
+ if (fs.existsSync(path.join(cwd, "bun.lockb"))) return "bun";
427
+ return "npm";
428
+ }
429
+ function trimBlankDuplicates(lines) {
430
+ const out = [];
431
+ for (const line of lines) {
432
+ if (line === "" && out.length > 0 && out[out.length - 1] === "") continue;
433
+ out.push(line);
434
+ }
435
+ return out;
436
+ }
437
+ function escapeRegExp(s) {
438
+ return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
439
+ }
440
+ function logInfo(msg) {
441
+ console.log(`\u2139\uFE0F ${msg}`);
442
+ }
443
+ function logError(msg) {
444
+ console.error(`\u2716 ${msg}`);
445
+ }
446
+ async function main() {
447
+ await cli(process.argv.slice(2), initCommand, {
448
+ name: "create-nodejs-fn",
449
+ version: VERSION
450
+ });
451
+ }
452
+ main().catch((err) => {
453
+ console.error(err);
454
+ process.exitCode = 1;
455
+ });
@@ -0,0 +1,36 @@
1
+ import { Plugin } from 'vite';
2
+
3
+ type DockerOptions = {
4
+ baseImage?: string;
5
+ systemPackages?: string[];
6
+ preInstallCommands?: string[];
7
+ postInstallCommands?: string[];
8
+ env?: Record<string, string>;
9
+ extraLines?: string[];
10
+ };
11
+ type Opts = {
12
+ files?: string[];
13
+ generatedDir?: string;
14
+ binding?: string;
15
+ className?: string;
16
+ containerPort?: number;
17
+ external?: string[];
18
+ docker?: DockerOptions;
19
+ /**
20
+ * Worker env vars to forward into the container via Container.envVars.
21
+ * Accepts an array of names (same name) or a map of containerName -> workerEnvKey.
22
+ */
23
+ workerEnvVars?: string[] | Record<string, string>;
24
+ /**
25
+ * Automatically rebuild containers in local dev when *.container.ts files change. default: true
26
+ */
27
+ autoRebuildContainers?: boolean;
28
+ /**
29
+ * Debounce duration in milliseconds for container rebuilds during dev. default: 200
30
+ */
31
+ rebuildDebounceMs?: number;
32
+ };
33
+
34
+ declare function createNodejsFnPlugin(opts?: Opts): Plugin;
35
+
36
+ export { createNodejsFnPlugin };
@@ -0,0 +1,36 @@
1
+ import { Plugin } from 'vite';
2
+
3
+ type DockerOptions = {
4
+ baseImage?: string;
5
+ systemPackages?: string[];
6
+ preInstallCommands?: string[];
7
+ postInstallCommands?: string[];
8
+ env?: Record<string, string>;
9
+ extraLines?: string[];
10
+ };
11
+ type Opts = {
12
+ files?: string[];
13
+ generatedDir?: string;
14
+ binding?: string;
15
+ className?: string;
16
+ containerPort?: number;
17
+ external?: string[];
18
+ docker?: DockerOptions;
19
+ /**
20
+ * Worker env vars to forward into the container via Container.envVars.
21
+ * Accepts an array of names (same name) or a map of containerName -> workerEnvKey.
22
+ */
23
+ workerEnvVars?: string[] | Record<string, string>;
24
+ /**
25
+ * Automatically rebuild containers in local dev when *.container.ts files change. default: true
26
+ */
27
+ autoRebuildContainers?: boolean;
28
+ /**
29
+ * Debounce duration in milliseconds for container rebuilds during dev. default: 200
30
+ */
31
+ rebuildDebounceMs?: number;
32
+ };
33
+
34
+ declare function createNodejsFnPlugin(opts?: Opts): Plugin;
35
+
36
+ export { createNodejsFnPlugin };