@bensandee/tooling 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin.mjs ADDED
@@ -0,0 +1,2371 @@
1
+ #!/usr/bin/env node
2
+ import { defineCommand, runMain } from "citty";
3
+ import * as p from "@clack/prompts";
4
+ import path from "node:path";
5
+ import { existsSync, mkdirSync, readFileSync, readdirSync, writeFileSync } from "node:fs";
6
+ import { parse } from "jsonc-parser";
7
+ import { z } from "zod";
8
+ import { execSync } from "node:child_process";
9
+
10
+ //#region src/types.ts
11
+ /** Default CI platform when not explicitly chosen. */
12
+ const DEFAULT_CI = "forgejo";
13
+ const LEGACY_TOOLS = [
14
+ "eslint",
15
+ "prettier",
16
+ "jest",
17
+ "webpack",
18
+ "rollup"
19
+ ];
20
+
21
+ //#endregion
22
+ //#region src/utils/json.ts
23
+ const StringRecord = z.record(z.string(), z.string());
24
+ const PackageJsonSchema = z.object({
25
+ name: z.string().optional(),
26
+ version: z.string().optional(),
27
+ private: z.boolean().optional(),
28
+ type: z.string().optional(),
29
+ scripts: StringRecord.optional(),
30
+ dependencies: StringRecord.optional(),
31
+ devDependencies: StringRecord.optional(),
32
+ bin: z.union([z.string(), StringRecord]).optional(),
33
+ exports: z.unknown().optional(),
34
+ main: z.string().optional(),
35
+ types: z.string().optional(),
36
+ typings: z.string().optional(),
37
+ engines: StringRecord.optional()
38
+ }).loose();
39
+ const TsconfigSchema = z.object({
40
+ extends: z.union([z.string(), z.array(z.string())]).optional(),
41
+ include: z.array(z.string()).optional(),
42
+ exclude: z.array(z.string()).optional(),
43
+ files: z.array(z.string()).optional(),
44
+ references: z.array(z.object({ path: z.string() })).optional(),
45
+ compilerOptions: z.record(z.string(), z.unknown()).optional()
46
+ }).loose();
47
+ const RenovateSchema = z.object({
48
+ $schema: z.string().optional(),
49
+ extends: z.array(z.string()).optional()
50
+ }).loose();
51
+ const KnipWorkspaceConfig = z.object({
52
+ entry: z.array(z.string()).optional(),
53
+ project: z.array(z.string()).optional(),
54
+ ignore: z.array(z.string()).optional()
55
+ });
56
+ const KnipSchema = z.object({
57
+ $schema: z.string().optional(),
58
+ entry: z.array(z.string()).optional(),
59
+ project: z.array(z.string()).optional(),
60
+ ignore: z.array(z.string()).optional(),
61
+ workspaces: z.record(z.string(), KnipWorkspaceConfig).optional()
62
+ }).loose();
63
+ /** Parse a JSONC string as a tsconfig.json. Returns a typed object with `{}` fallback on failure. */
64
+ function parseTsconfig(raw) {
65
+ const result = TsconfigSchema.safeParse(parse(raw));
66
+ return result.success ? result.data : {};
67
+ }
68
+ /** Parse a JSON string as a renovate.json. Returns a typed object with `{}` fallback on failure. */
69
+ function parseRenovateJson(raw) {
70
+ const result = RenovateSchema.safeParse(parse(raw));
71
+ return result.success ? result.data : {};
72
+ }
73
+ /** Parse a JSONC string as a knip.json. Returns a typed object with `{}` fallback on failure. */
74
+ function parseKnipJson(raw) {
75
+ const result = KnipSchema.safeParse(parse(raw));
76
+ return result.success ? result.data : {};
77
+ }
78
+ /** Parse a JSON string as a package.json. Returns `undefined` on failure. */
79
+ function parsePackageJson(raw) {
80
+ try {
81
+ const result = PackageJsonSchema.safeParse(JSON.parse(raw));
82
+ return result.success ? result.data : void 0;
83
+ } catch {
84
+ return;
85
+ }
86
+ }
87
+
88
+ //#endregion
89
+ //#region src/utils/detect.ts
90
+ const LEGACY_PATTERNS = {
91
+ eslint: [/^\.eslintrc/, /^eslint\.config\./],
92
+ prettier: [/^\.prettierrc/, /^prettier\.config\./],
93
+ jest: [/^jest\.config\./, /^jest\.setup\./],
94
+ webpack: [/^webpack\.config\./],
95
+ rollup: [/^rollup\.config\./]
96
+ };
97
+ /** Detect existing project state in the target directory. */
98
+ function detectProject(targetDir) {
99
+ const exists = (rel) => existsSync(path.join(targetDir, rel));
100
+ return {
101
+ hasPackageJson: exists("package.json"),
102
+ hasTsconfig: exists("tsconfig.json"),
103
+ hasOxlintConfig: exists("oxlint.config.ts"),
104
+ hasLegacyOxlintJson: exists(".oxlintrc.json"),
105
+ hasGitignore: exists(".gitignore"),
106
+ hasVitestConfig: exists("vitest.config.ts") || exists("vitest.config.mts"),
107
+ hasTsdownConfig: exists("tsdown.config.ts") || exists("tsdown.config.mts"),
108
+ hasPnpmWorkspace: exists("pnpm-workspace.yaml"),
109
+ hasKnipConfig: exists("knip.json") || exists("knip.jsonc") || exists("knip.ts") || exists("knip.mts") || exists("knip.config.ts") || exists("knip.config.mts"),
110
+ hasRenovateConfig: exists("renovate.json") || exists("renovate.json5") || exists(".renovaterc") || exists(".renovaterc.json") || exists(".github/renovate.json") || exists(".github/renovate.json5"),
111
+ hasReleaseItConfig: exists(".release-it.json") || exists(".release-it.yaml") || exists(".release-it.toml"),
112
+ hasCommitAndTagVersionConfig: exists(".versionrc") || exists(".versionrc.json") || exists(".versionrc.js"),
113
+ hasChangesetsConfig: exists(".changeset/config.json"),
114
+ legacyConfigs: detectLegacyConfigs(targetDir)
115
+ };
116
+ }
117
+ /** Scan for legacy/conflicting tooling config files. */
118
+ function detectLegacyConfigs(targetDir) {
119
+ let entries;
120
+ try {
121
+ entries = readdirSync(targetDir);
122
+ } catch (_error) {
123
+ return [];
124
+ }
125
+ const results = [];
126
+ for (const tool of LEGACY_TOOLS) {
127
+ const patterns = LEGACY_PATTERNS[tool];
128
+ const matched = entries.filter((entry) => patterns.some((p) => p.test(entry)));
129
+ if (matched.length > 0) results.push({
130
+ tool,
131
+ files: matched
132
+ });
133
+ }
134
+ return results;
135
+ }
136
+ /** Read and parse existing package.json, or return undefined. */
137
+ function readPackageJson(targetDir) {
138
+ const pkgPath = path.join(targetDir, "package.json");
139
+ if (!existsSync(pkgPath)) return void 0;
140
+ try {
141
+ return parsePackageJson(readFileSync(pkgPath, "utf-8"));
142
+ } catch (_error) {
143
+ return;
144
+ }
145
+ }
146
+ /** Detect whether the project is a monorepo. */
147
+ function detectMonorepo(targetDir) {
148
+ return existsSync(path.join(targetDir, "pnpm-workspace.yaml"));
149
+ }
150
+ /** Detect project type from package.json signals. */
151
+ function detectProjectType(targetDir) {
152
+ const pkg = readPackageJson(targetDir);
153
+ if (!pkg) return "default";
154
+ const deps = {
155
+ ...pkg.dependencies,
156
+ ...pkg.devDependencies
157
+ };
158
+ if ("react" in deps || "react-dom" in deps) return "react";
159
+ const scripts = pkg.scripts ?? {};
160
+ if (!!pkg.bin || Object.values(scripts).some((cmd) => typeof cmd === "string" && /\b(node|tsx|ts-node)\b/.test(cmd))) return "node";
161
+ if (pkg.exports || pkg.main || pkg.types || pkg.typings) return "library";
162
+ return "default";
163
+ }
164
+ /** List packages in a monorepo's packages/ directory. */
165
+ function getMonorepoPackages(targetDir) {
166
+ const packagesDir = path.join(targetDir, "packages");
167
+ if (!existsSync(packagesDir)) return [];
168
+ const results = [];
169
+ try {
170
+ for (const entry of readdirSync(packagesDir, { withFileTypes: true })) {
171
+ if (!entry.isDirectory()) continue;
172
+ const pkgDir = path.join(packagesDir, entry.name);
173
+ const pkg = readPackageJson(pkgDir);
174
+ if (!pkg) continue;
175
+ results.push({
176
+ dir: pkgDir,
177
+ name: pkg.name ?? entry.name
178
+ });
179
+ }
180
+ } catch (_error) {}
181
+ return results;
182
+ }
183
+
184
+ //#endregion
185
+ //#region src/prompts/init-prompts.ts
186
+ function isCancelled(value) {
187
+ return p.isCancel(value);
188
+ }
189
+ async function runInitPrompts(targetDir) {
190
+ p.intro("@bensandee/tooling repo:init");
191
+ const existingPkg = readPackageJson(targetDir);
192
+ const detected = detectProject(targetDir);
193
+ const isExisting = detected.hasPackageJson;
194
+ const name = existingPkg?.name ?? path.basename(targetDir);
195
+ const detectedMonorepo = detectMonorepo(targetDir);
196
+ const structure = await p.select({
197
+ message: "Project structure",
198
+ initialValue: detectedMonorepo ? "monorepo" : "single",
199
+ options: [{
200
+ value: "single",
201
+ label: "Single repo"
202
+ }, {
203
+ value: "monorepo",
204
+ label: "Monorepo (pnpm workspaces)"
205
+ }]
206
+ });
207
+ if (isCancelled(structure)) {
208
+ p.cancel("Cancelled.");
209
+ process.exit(0);
210
+ }
211
+ const useLintRules = await p.confirm({
212
+ message: "Include @bensandee/lint-rules?",
213
+ initialValue: true
214
+ });
215
+ if (isCancelled(useLintRules)) {
216
+ p.cancel("Cancelled.");
217
+ process.exit(0);
218
+ }
219
+ const hasExistingPrettier = detected.legacyConfigs.some((l) => l.tool === "prettier");
220
+ const formatter = await p.select({
221
+ message: "Formatter",
222
+ initialValue: hasExistingPrettier ? "prettier" : "oxfmt",
223
+ options: [{
224
+ value: "oxfmt",
225
+ label: "oxfmt",
226
+ hint: "fast, Rust-based"
227
+ }, {
228
+ value: "prettier",
229
+ label: "Prettier"
230
+ }]
231
+ });
232
+ if (isCancelled(formatter)) {
233
+ p.cancel("Cancelled.");
234
+ process.exit(0);
235
+ }
236
+ const setupVitest = await p.confirm({
237
+ message: "Set up vitest with a starter test?",
238
+ initialValue: !isExisting
239
+ });
240
+ if (isCancelled(setupVitest)) {
241
+ p.cancel("Cancelled.");
242
+ process.exit(0);
243
+ }
244
+ const ci = await p.select({
245
+ message: "CI workflow",
246
+ options: [
247
+ {
248
+ value: "forgejo",
249
+ label: "Forgejo Actions"
250
+ },
251
+ {
252
+ value: "github",
253
+ label: "GitHub Actions"
254
+ },
255
+ {
256
+ value: "none",
257
+ label: "None"
258
+ }
259
+ ]
260
+ });
261
+ if (isCancelled(ci)) {
262
+ p.cancel("Cancelled.");
263
+ process.exit(0);
264
+ }
265
+ let setupRenovate = true;
266
+ if (ci === "github") {
267
+ const renovateAnswer = await p.confirm({
268
+ message: "Set up Renovate for automated dependency updates?",
269
+ initialValue: true
270
+ });
271
+ if (isCancelled(renovateAnswer)) {
272
+ p.cancel("Cancelled.");
273
+ process.exit(0);
274
+ }
275
+ setupRenovate = renovateAnswer;
276
+ }
277
+ const releaseStrategy = await p.select({
278
+ message: "Release management",
279
+ initialValue: "none",
280
+ options: [
281
+ {
282
+ value: "none",
283
+ label: "None"
284
+ },
285
+ {
286
+ value: "release-it",
287
+ label: "release-it",
288
+ hint: "interactive, conventional commits"
289
+ },
290
+ {
291
+ value: "changesets",
292
+ label: "Changesets",
293
+ hint: "PR-based versioning"
294
+ },
295
+ {
296
+ value: "commit-and-tag-version",
297
+ label: "commit-and-tag-version",
298
+ hint: "conventional commits, automatic CHANGELOG"
299
+ }
300
+ ]
301
+ });
302
+ if (isCancelled(releaseStrategy)) {
303
+ p.cancel("Cancelled.");
304
+ process.exit(0);
305
+ }
306
+ let projectType = "default";
307
+ let detectPackageTypes = false;
308
+ if (structure === "monorepo") {
309
+ const packages = getMonorepoPackages(targetDir);
310
+ if (packages.length > 0) {
311
+ const detections = packages.map((pkg) => {
312
+ const type = detectProjectType(pkg.dir);
313
+ return ` ${pkg.name} → ${type}`;
314
+ });
315
+ p.note(detections.join("\n"), "Detected package types");
316
+ const applyDetected = await p.confirm({
317
+ message: "Apply detected tsconfig bases to packages?",
318
+ initialValue: true
319
+ });
320
+ if (isCancelled(applyDetected)) {
321
+ p.cancel("Cancelled.");
322
+ process.exit(0);
323
+ }
324
+ detectPackageTypes = applyDetected;
325
+ }
326
+ } else {
327
+ const projectTypeAnswer = await p.select({
328
+ message: "Project type",
329
+ initialValue: "default",
330
+ options: [
331
+ {
332
+ value: "default",
333
+ label: "Default",
334
+ hint: "strictest base, no runtime assumptions"
335
+ },
336
+ {
337
+ value: "node",
338
+ label: "Node.js",
339
+ hint: "adds types: [\"node\"]"
340
+ },
341
+ {
342
+ value: "react",
343
+ label: "React",
344
+ hint: "browser app with JSX + DOM types"
345
+ },
346
+ {
347
+ value: "library",
348
+ label: "Library",
349
+ hint: "publishable package (ES2022 target)"
350
+ }
351
+ ]
352
+ });
353
+ if (isCancelled(projectTypeAnswer)) {
354
+ p.cancel("Cancelled.");
355
+ process.exit(0);
356
+ }
357
+ projectType = projectTypeAnswer;
358
+ }
359
+ p.outro("Configuration complete!");
360
+ return {
361
+ name,
362
+ isNew: !isExisting,
363
+ structure,
364
+ useLintRules,
365
+ formatter,
366
+ setupVitest,
367
+ ci,
368
+ setupRenovate,
369
+ releaseStrategy,
370
+ projectType,
371
+ detectPackageTypes,
372
+ targetDir
373
+ };
374
+ }
375
+ /** Build a ProjectConfig from CLI flags for non-interactive mode. */
376
+ function buildDefaultConfig(targetDir, flags) {
377
+ const existingPkg = readPackageJson(targetDir);
378
+ const detected = detectProject(targetDir);
379
+ return {
380
+ name: existingPkg?.name ?? path.basename(targetDir),
381
+ isNew: !detected.hasPackageJson,
382
+ structure: detected.hasPnpmWorkspace ? "monorepo" : "single",
383
+ useLintRules: flags.lintRules ?? true,
384
+ formatter: detected.legacyConfigs.some((l) => l.tool === "prettier") ? "prettier" : "oxfmt",
385
+ setupVitest: !detected.hasVitestConfig,
386
+ ci: flags.noCi ? "none" : DEFAULT_CI,
387
+ setupRenovate: true,
388
+ releaseStrategy: detected.hasReleaseItConfig ? "release-it" : detected.hasCommitAndTagVersionConfig ? "commit-and-tag-version" : detected.hasChangesetsConfig ? "changesets" : "none",
389
+ projectType: "default",
390
+ detectPackageTypes: true,
391
+ targetDir
392
+ };
393
+ }
394
+
395
+ //#endregion
396
+ //#region src/utils/fs.ts
397
+ /** Check whether a file exists at the given path. */
398
+ function fileExists(targetDir, relativePath) {
399
+ return existsSync(path.join(targetDir, relativePath));
400
+ }
401
+ /** Read a file from the target directory. Returns undefined if not found. */
402
+ function readFile(targetDir, relativePath) {
403
+ const fullPath = path.join(targetDir, relativePath);
404
+ if (!existsSync(fullPath)) return void 0;
405
+ return readFileSync(fullPath, "utf-8");
406
+ }
407
+ /** Write a file to the target directory, creating parent directories as needed. */
408
+ function writeFile(targetDir, relativePath, content) {
409
+ const fullPath = path.join(targetDir, relativePath);
410
+ mkdirSync(path.dirname(fullPath), { recursive: true });
411
+ writeFileSync(fullPath, content);
412
+ }
413
+ /**
414
+ * Create a GeneratorContext from a ProjectConfig and a conflict resolution handler.
415
+ */
416
+ function createContext(config, confirmOverwrite) {
417
+ const pkgRaw = readFile(config.targetDir, "package.json");
418
+ return {
419
+ config,
420
+ targetDir: config.targetDir,
421
+ packageJson: pkgRaw ? parsePackageJson(pkgRaw) : void 0,
422
+ exists: (rel) => fileExists(config.targetDir, rel),
423
+ read: (rel) => readFile(config.targetDir, rel),
424
+ write: (rel, content) => writeFile(config.targetDir, rel, content),
425
+ confirmOverwrite
426
+ };
427
+ }
428
+
429
+ //#endregion
430
+ //#region src/generators/package-json.ts
431
+ const STANDARD_SCRIPTS_SINGLE = {
432
+ build: "tsdown",
433
+ dev: "tsdown --watch",
434
+ typecheck: "tsc --noEmit",
435
+ test: "vitest run",
436
+ lint: "oxlint",
437
+ knip: "knip",
438
+ check: "pnpm typecheck && pnpm build && pnpm lint && pnpm knip",
439
+ prepare: "husky"
440
+ };
441
+ const STANDARD_SCRIPTS_MONOREPO = {
442
+ build: "pnpm -r build",
443
+ test: "pnpm -r test",
444
+ typecheck: "pnpm -r --parallel run typecheck",
445
+ lint: "oxlint",
446
+ knip: "knip",
447
+ check: "pnpm typecheck && pnpm build && pnpm lint && pnpm knip",
448
+ prepare: "husky"
449
+ };
450
+ /** DevDeps that belong in every project (single repo) or per-package (monorepo). */
451
+ const PER_PACKAGE_DEV_DEPS = {
452
+ "@types/node": "25.3.2",
453
+ tsdown: "0.20.3",
454
+ typescript: "5.9.3",
455
+ vitest: "4.0.18"
456
+ };
457
+ /** DevDeps that belong at the root regardless of structure. */
458
+ const ROOT_DEV_DEPS = {
459
+ husky: "9.1.7",
460
+ knip: "5.85.0",
461
+ "lint-staged": "16.3.1",
462
+ oxlint: "1.50.0"
463
+ };
464
+ /**
465
+ * Check if a package name is available as a workspace dependency.
466
+ * Looks for a matching package in the packages/ directory.
467
+ */
468
+ function isWorkspacePackage(ctx, packageName) {
469
+ if (ctx.config.structure !== "monorepo") return false;
470
+ const packagesDir = path.join(ctx.targetDir, "packages");
471
+ if (!existsSync(packagesDir)) return false;
472
+ try {
473
+ for (const entry of readdirSync(packagesDir, { withFileTypes: true })) {
474
+ if (!entry.isDirectory()) continue;
475
+ const pkgJsonPath = path.join(packagesDir, entry.name, "package.json");
476
+ if (!existsSync(pkgJsonPath)) continue;
477
+ try {
478
+ if (parsePackageJson(readFileSync(pkgJsonPath, "utf-8"))?.name === packageName) return true;
479
+ } catch (_error) {}
480
+ }
481
+ } catch (_error) {}
482
+ return false;
483
+ }
484
+ /** Deps that should not be blindly bumped to latest (version-sensitive). */
485
+ const UPDATE_EXCLUDE = new Set(["@types/node"]);
486
+ /** Add release-strategy-specific devDeps to a deps record. */
487
+ function addReleaseDeps(deps, config) {
488
+ switch (config.releaseStrategy) {
489
+ case "release-it":
490
+ deps["release-it"] = "18.1.2";
491
+ if (config.structure === "monorepo") deps["@release-it/bumper"] = "7.0.2";
492
+ if (config.ci === "forgejo") deps["@bensandee/release-it-forgejo"] = "0.1.1";
493
+ break;
494
+ case "commit-and-tag-version":
495
+ deps["commit-and-tag-version"] = "12.5.0";
496
+ break;
497
+ case "changesets":
498
+ deps["@changesets/cli"] = "2.29.4";
499
+ break;
500
+ }
501
+ }
502
+ /** Returns the list of pinned devDependency names that the tool would add for a given config. */
503
+ function getAddedDevDepNames(config) {
504
+ const deps = { ...ROOT_DEV_DEPS };
505
+ if (config.structure !== "monorepo") Object.assign(deps, PER_PACKAGE_DEV_DEPS);
506
+ deps["@bensandee/config"] = "latest";
507
+ deps["@bensandee/tooling"] = "latest";
508
+ if (config.formatter === "oxfmt") deps["oxfmt"] = "0.35.0";
509
+ if (config.formatter === "prettier") deps["prettier"] = "3.8.1";
510
+ addReleaseDeps(deps, config);
511
+ return Object.keys(deps).filter((name) => !UPDATE_EXCLUDE.has(name));
512
+ }
513
+ async function generatePackageJson(ctx) {
514
+ const filePath = "package.json";
515
+ const existing = ctx.read(filePath);
516
+ const isMonorepo = ctx.config.structure === "monorepo";
517
+ const scripts = isMonorepo ? STANDARD_SCRIPTS_MONOREPO : STANDARD_SCRIPTS_SINGLE;
518
+ const formatScript = ctx.config.formatter === "oxfmt" ? "oxfmt ." : "prettier --write .";
519
+ const allScripts = {
520
+ ...scripts,
521
+ format: formatScript
522
+ };
523
+ if (ctx.config.releaseStrategy === "changesets") allScripts["changeset"] = "changeset";
524
+ if (ctx.config.releaseStrategy !== "none") allScripts["trigger-release"] = "pnpm exec tooling release:trigger";
525
+ const devDeps = { ...ROOT_DEV_DEPS };
526
+ if (!isMonorepo) Object.assign(devDeps, PER_PACKAGE_DEV_DEPS);
527
+ devDeps["@bensandee/config"] = isWorkspacePackage(ctx, "@bensandee/config") ? "workspace:*" : "latest";
528
+ devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "latest";
529
+ if (ctx.config.useLintRules) devDeps["@bensandee/lint-rules"] = isWorkspacePackage(ctx, "@bensandee/lint-rules") ? "workspace:*" : "latest";
530
+ if (ctx.config.formatter === "oxfmt") devDeps["oxfmt"] = "0.35.0";
531
+ if (ctx.config.formatter === "prettier") devDeps["prettier"] = "3.8.1";
532
+ addReleaseDeps(devDeps, ctx.config);
533
+ if (existing) {
534
+ const pkg = parsePackageJson(existing);
535
+ if (!pkg) return {
536
+ filePath,
537
+ action: "skipped",
538
+ description: "Could not parse existing package.json"
539
+ };
540
+ const changes = [];
541
+ if (pkg.type !== "module") {
542
+ pkg.type = "module";
543
+ changes.push("set type: \"module\"");
544
+ }
545
+ const existingScripts = pkg.scripts ?? {};
546
+ for (const [key, value] of Object.entries(allScripts)) if (!(key in existingScripts)) {
547
+ existingScripts[key] = value;
548
+ changes.push(`added script: ${key}`);
549
+ }
550
+ pkg.scripts = existingScripts;
551
+ const existingDevDeps = pkg.devDependencies ?? {};
552
+ for (const [key, value] of Object.entries(devDeps)) if (!(key in existingDevDeps)) {
553
+ existingDevDeps[key] = value;
554
+ changes.push(`added devDependency: ${key}`);
555
+ }
556
+ pkg.devDependencies = existingDevDeps;
557
+ if (!pkg["engines"]) {
558
+ pkg["engines"] = { node: ">=24.13.0" };
559
+ changes.push("set engines.node >= 24.13.0");
560
+ }
561
+ if (changes.length === 0) return {
562
+ filePath,
563
+ action: "skipped",
564
+ description: "Already up to spec"
565
+ };
566
+ ctx.write(filePath, JSON.stringify(pkg, null, 2) + "\n");
567
+ return {
568
+ filePath,
569
+ action: "updated",
570
+ description: changes.join(", ")
571
+ };
572
+ }
573
+ const pkg = {
574
+ name: ctx.config.name,
575
+ version: "0.1.0",
576
+ private: true,
577
+ type: "module",
578
+ scripts: allScripts,
579
+ devDependencies: devDeps,
580
+ engines: { node: ">=24.13.0" },
581
+ packageManager: "pnpm@10.29.3"
582
+ };
583
+ ctx.write(filePath, JSON.stringify(pkg, null, 2) + "\n");
584
+ return {
585
+ filePath,
586
+ action: "created",
587
+ description: "Generated package.json"
588
+ };
589
+ }
590
+
591
+ //#endregion
592
+ //#region src/generators/migrate-prompt.ts
593
+ /**
594
+ * Generate a context-aware AI migration prompt based on what the CLI did.
595
+ * This prompt can be pasted into Claude Code (or similar) to finish the migration.
596
+ */
597
+ function generateMigratePrompt(results, config, detected) {
598
+ const sections = [];
599
+ sections.push("# Migration Prompt");
600
+ sections.push("");
601
+ sections.push("The following prompt was generated by `@bensandee/tooling repo:init`. Paste it into Claude Code or another AI assistant to finish migrating this repository.");
602
+ sections.push("");
603
+ sections.push("> **Tip:** Before starting, run `/init` in Claude Code to generate a `CLAUDE.md` that gives the AI a complete picture of your repository's structure, conventions, and build commands.");
604
+ sections.push("");
605
+ sections.push("## What was changed");
606
+ sections.push("");
607
+ const created = results.filter((r) => r.action === "created");
608
+ const updated = results.filter((r) => r.action === "updated");
609
+ const skipped = results.filter((r) => r.action === "skipped");
610
+ if (created.length > 0) {
611
+ sections.push("**Created:**");
612
+ for (const r of created) sections.push(`- \`${r.filePath}\` — ${r.description}`);
613
+ sections.push("");
614
+ }
615
+ if (updated.length > 0) {
616
+ sections.push("**Updated:**");
617
+ for (const r of updated) sections.push(`- \`${r.filePath}\` — ${r.description}`);
618
+ sections.push("");
619
+ }
620
+ if (skipped.length > 0) {
621
+ sections.push("**Skipped (review these):**");
622
+ for (const r of skipped) sections.push(`- \`${r.filePath}\` — ${r.description}`);
623
+ sections.push("");
624
+ }
625
+ sections.push("## Migration tasks");
626
+ sections.push("");
627
+ const legacyToRemove = detected.legacyConfigs.filter((legacy) => !(legacy.tool === "prettier" && config.formatter === "prettier"));
628
+ if (legacyToRemove.length > 0) {
629
+ sections.push("### Remove legacy tooling");
630
+ sections.push("");
631
+ for (const legacy of legacyToRemove) {
632
+ const replacement = {
633
+ eslint: "oxlint",
634
+ prettier: "oxfmt",
635
+ jest: "vitest",
636
+ webpack: "tsdown",
637
+ rollup: "tsdown"
638
+ }[legacy.tool];
639
+ sections.push(`- Remove ${legacy.tool} config files (${legacy.files.map((f) => `\`${f}\``).join(", ")}). This project now uses **${replacement}**.`);
640
+ sections.push(` - Uninstall ${legacy.tool}-related packages from devDependencies`);
641
+ if (legacy.tool === "eslint") sections.push(" - Migrate any custom ESLint rules that don't have oxlint equivalents");
642
+ if (legacy.tool === "jest") sections.push(" - Migrate any jest-specific test utilities (jest.mock, jest.fn) to vitest equivalents (vi.mock, vi.fn)");
643
+ }
644
+ sections.push("");
645
+ }
646
+ const oxlintWasSkipped = results.find((r) => r.filePath === "oxlint.config.ts")?.action === "skipped";
647
+ if (detected.hasLegacyOxlintJson) {
648
+ sections.push("### Migrate .oxlintrc.json to oxlint.config.ts");
649
+ sections.push("");
650
+ sections.push("A new `oxlint.config.ts` has been generated using `defineConfig` from the `oxlint` package. The existing `.oxlintrc.json` needs to be migrated:");
651
+ sections.push("");
652
+ sections.push("1. Read `.oxlintrc.json` and compare its `rules` against the rules provided by `@bensandee/config/oxlint/recommended` (check `node_modules/@bensandee/config`). Most standard rules are already included in the recommended config.");
653
+ sections.push("2. If there are any custom rules, overrides, settings, or `jsPlugins` not covered by the recommended config, add them to `oxlint.config.ts` alongside the `extends`.");
654
+ sections.push("3. Delete `.oxlintrc.json`.");
655
+ sections.push("4. Run `pnpm lint` to verify the new config works correctly.");
656
+ sections.push("");
657
+ } else if (oxlintWasSkipped && detected.hasOxlintConfig) {
658
+ sections.push("### Verify oxlint.config.ts includes recommended rules");
659
+ sections.push("");
660
+ sections.push("The existing `oxlint.config.ts` was kept as-is. Verify that it extends the recommended config from `@bensandee/config/oxlint`:");
661
+ sections.push("");
662
+ sections.push("1. Open `oxlint.config.ts` and check that it imports and extends `@bensandee/config/oxlint/recommended`.");
663
+ sections.push("2. The expected pattern is:");
664
+ sections.push(" ```ts");
665
+ sections.push(" import recommended from \"@bensandee/config/oxlint/recommended\";");
666
+ sections.push(" import { defineConfig } from \"oxlint\";");
667
+ sections.push("");
668
+ sections.push(" export default defineConfig({ extends: [recommended] });");
669
+ sections.push(" ```");
670
+ sections.push("3. If it uses a different pattern, update it to extend the recommended config while preserving any project-specific customizations.");
671
+ sections.push("4. Run `pnpm lint` to verify the config works correctly.");
672
+ sections.push("");
673
+ }
674
+ if (config.structure === "monorepo" && !detected.hasPnpmWorkspace) {
675
+ sections.push("### Migrate to monorepo structure");
676
+ sections.push("");
677
+ sections.push("This project was converted from a single repo to a monorepo. Complete the migration:");
678
+ sections.push("");
679
+ sections.push("1. Move existing source into `packages/<name>/` (using the existing package name)");
680
+ sections.push("2. Split the root `package.json` into a root workspace manifest + package-level `package.json`");
681
+ sections.push("3. Move the existing `tsconfig.json` into the package and update the root tsconfig with project references");
682
+ sections.push("4. Create a package-level `tsdown.config.ts` in the new package");
683
+ sections.push("5. Update any import paths or build scripts affected by the move");
684
+ sections.push("");
685
+ }
686
+ const skippedConfigs = skipped.filter((r) => r.filePath !== "ci" && r.description !== "Not a monorepo");
687
+ if (skippedConfigs.length > 0) {
688
+ sections.push("### Review skipped files");
689
+ sections.push("");
690
+ sections.push("The following files were left unchanged. Review them for compatibility:");
691
+ sections.push("");
692
+ for (const r of skippedConfigs) sections.push(`- \`${r.filePath}\` — ${r.description}`);
693
+ sections.push("");
694
+ }
695
+ if (results.some((r) => r.filePath === "test/example.test.ts" && r.action === "created")) {
696
+ sections.push("### Generate tests");
697
+ sections.push("");
698
+ sections.push("A starter test was created at `test/example.test.ts`. Now:");
699
+ sections.push("");
700
+ sections.push("1. Review the existing source code in `src/`");
701
+ sections.push("2. Create additional test files following the starter test's patterns (import style, describe/it structure)");
702
+ sections.push("3. Focus on edge cases and core business logic");
703
+ sections.push("4. Aim for meaningful coverage of exported functions and key code paths");
704
+ sections.push("");
705
+ }
706
+ sections.push("## Ground rules");
707
+ sections.push("");
708
+ sections.push("When resolving errors from the checklist below, prefer fixing the root cause over suppressing the issue. For example:");
709
+ sections.push("");
710
+ sections.push("- **Lint errors**: fix the code rather than adding disable comments or rule exceptions");
711
+ sections.push("- **Test failures**: update the test or fix the underlying bug rather than skipping or deleting the test");
712
+ sections.push("- **Knip findings**: remove genuinely unused code/exports/dependencies rather than adding ignores to `knip.json`");
713
+ sections.push("- **Type errors**: add proper types rather than using `any` or `@ts-expect-error`");
714
+ sections.push("");
715
+ sections.push("Only suppress an issue if there is a clear, documented reason why the fix is not feasible (e.g. a third-party type mismatch). Leave a comment explaining why.");
716
+ sections.push("");
717
+ sections.push("## Verification checklist");
718
+ sections.push("");
719
+ sections.push("Run each of these commands and fix any errors before moving on:");
720
+ sections.push("");
721
+ sections.push("1. `pnpm install`");
722
+ const updateCmd = `pnpm update --latest ${getAddedDevDepNames(config).join(" ")}`;
723
+ sections.push(`2. \`${updateCmd}\` — bump added dependencies to their latest versions`);
724
+ sections.push("3. `pnpm typecheck` — fix any type errors");
725
+ sections.push("4. `pnpm build` — fix any build errors");
726
+ sections.push("5. `pnpm test` — fix any test failures");
727
+ sections.push("6. `pnpm lint` — fix the code to satisfy lint rules");
728
+ sections.push("7. `pnpm knip` — remove unused exports, dependencies, and dead code");
729
+ sections.push("8. `pnpm format` — fix any formatting issues");
730
+ sections.push("");
731
+ return sections.join("\n");
732
+ }
733
+
734
+ //#endregion
735
+ //#region src/generators/tsconfig.ts
736
+ async function generateTsconfig(ctx) {
737
+ const filePath = "tsconfig.json";
738
+ const existing = ctx.read(filePath);
739
+ if (ctx.config.structure === "monorepo") return [generateMonorepoRootTsconfig(ctx, existing), ...ctx.config.detectPackageTypes ? generateMonorepoPackageTsconfigs(ctx) : []];
740
+ const extendsValue = `@bensandee/config/tsconfig/${ctx.config.projectType}`;
741
+ const config = {
742
+ extends: extendsValue,
743
+ include: ["src"],
744
+ exclude: ["node_modules", "dist"]
745
+ };
746
+ if (existing) {
747
+ const parsed = parseTsconfig(existing);
748
+ const changes = [];
749
+ if (!parsed.extends) {
750
+ parsed.extends = extendsValue;
751
+ changes.push(`added extends: ${extendsValue}`);
752
+ }
753
+ const existingInclude = parsed.include ?? [];
754
+ for (const entry of config.include) if (!existingInclude.includes(entry)) {
755
+ existingInclude.push(entry);
756
+ changes.push(`added "${entry}" to include`);
757
+ }
758
+ parsed.include = existingInclude;
759
+ if (changes.length === 0) return [{
760
+ filePath,
761
+ action: "skipped",
762
+ description: "Already up to spec"
763
+ }];
764
+ ctx.write(filePath, JSON.stringify(parsed, null, 2) + "\n");
765
+ return [{
766
+ filePath,
767
+ action: "updated",
768
+ description: changes.join(", ")
769
+ }];
770
+ }
771
+ ctx.write(filePath, JSON.stringify(config, null, 2) + "\n");
772
+ return [{
773
+ filePath,
774
+ action: "created",
775
+ description: `Generated tsconfig.json with @bensandee/config/tsconfig/${ctx.config.projectType}`
776
+ }];
777
+ }
778
+ function generateMonorepoRootTsconfig(ctx, existing) {
779
+ const filePath = "tsconfig.json";
780
+ if (existing) {
781
+ const parsed = parseTsconfig(existing);
782
+ if (!parsed.references) {
783
+ parsed.files = [];
784
+ parsed.references = [];
785
+ ctx.write(filePath, JSON.stringify(parsed, null, 2) + "\n");
786
+ return {
787
+ filePath,
788
+ action: "updated",
789
+ description: "Added project references structure for monorepo"
790
+ };
791
+ }
792
+ return {
793
+ filePath,
794
+ action: "skipped",
795
+ description: "Already has project references"
796
+ };
797
+ }
798
+ ctx.write(filePath, JSON.stringify({
799
+ files: [],
800
+ references: []
801
+ }, null, 2) + "\n");
802
+ return {
803
+ filePath,
804
+ action: "created",
805
+ description: "Generated monorepo root tsconfig.json with project references"
806
+ };
807
+ }
808
+ function generateMonorepoPackageTsconfigs(ctx) {
809
+ const packages = getMonorepoPackages(ctx.targetDir);
810
+ const results = [];
811
+ for (const pkg of packages) {
812
+ const relDir = path.relative(ctx.targetDir, pkg.dir);
813
+ const filePath = path.join(relDir, "tsconfig.json");
814
+ const existing = ctx.read(filePath);
815
+ const projectType = detectProjectType(pkg.dir);
816
+ const extendsValue = `@bensandee/config/tsconfig/${projectType}`;
817
+ if (existing) {
818
+ const parsed = parseTsconfig(existing);
819
+ const changes = [];
820
+ if (parsed.extends !== extendsValue) {
821
+ const prev = parsed.extends;
822
+ parsed.extends = extendsValue;
823
+ changes.push(prev ? `changed extends: ${String(prev)} → ${extendsValue}` : `added extends: ${extendsValue}`);
824
+ }
825
+ const existingInclude = parsed.include ?? [];
826
+ if (!existingInclude.includes("src")) {
827
+ existingInclude.push("src");
828
+ changes.push("added \"src\" to include");
829
+ }
830
+ parsed.include = existingInclude;
831
+ if (changes.length === 0) {
832
+ results.push({
833
+ filePath,
834
+ action: "skipped",
835
+ description: `Already up to spec (${projectType})`
836
+ });
837
+ continue;
838
+ }
839
+ ctx.write(filePath, JSON.stringify(parsed, null, 2) + "\n");
840
+ results.push({
841
+ filePath,
842
+ action: "updated",
843
+ description: changes.join(", ")
844
+ });
845
+ } else {
846
+ const config = {
847
+ extends: extendsValue,
848
+ include: ["src"],
849
+ exclude: ["node_modules", "dist"]
850
+ };
851
+ ctx.write(filePath, JSON.stringify(config, null, 2) + "\n");
852
+ results.push({
853
+ filePath,
854
+ action: "created",
855
+ description: `Generated tsconfig.json with @bensandee/config/tsconfig/${projectType} (detected ${projectType})`
856
+ });
857
+ }
858
+ }
859
+ return results;
860
+ }
861
+
862
+ //#endregion
863
+ //#region src/generators/vitest.ts
864
+ const VITEST_CONFIG = `import { defineConfig } from "vitest/config";
865
+
866
+ export default defineConfig({
867
+ test: {
868
+ include: ["test/**/*.test.ts"],
869
+ },
870
+ });
871
+ `;
872
+ const STARTER_TEST = `import { describe, it, expect } from "vitest";
873
+
874
+ describe("example", () => {
875
+ it("should pass a basic assertion", () => {
876
+ expect(1 + 1).toBe(2);
877
+ });
878
+ });
879
+ `;
880
+ async function generateVitest(ctx) {
881
+ const results = [];
882
+ if (!ctx.config.setupVitest) return [{
883
+ filePath: "vitest.config.ts",
884
+ action: "skipped",
885
+ description: "Vitest setup not requested"
886
+ }];
887
+ if (ctx.config.structure === "monorepo") return [{
888
+ filePath: "vitest.config.ts",
889
+ action: "skipped",
890
+ description: "Monorepo: vitest config belongs in individual packages"
891
+ }];
892
+ const configPath = "vitest.config.ts";
893
+ if (ctx.exists(configPath)) if (await ctx.confirmOverwrite(configPath) === "skip") results.push({
894
+ filePath: configPath,
895
+ action: "skipped",
896
+ description: "Existing config preserved"
897
+ });
898
+ else {
899
+ ctx.write(configPath, VITEST_CONFIG);
900
+ results.push({
901
+ filePath: configPath,
902
+ action: "updated",
903
+ description: "Replaced vitest config"
904
+ });
905
+ }
906
+ else {
907
+ ctx.write(configPath, VITEST_CONFIG);
908
+ results.push({
909
+ filePath: configPath,
910
+ action: "created",
911
+ description: "Generated vitest.config.ts"
912
+ });
913
+ }
914
+ const testPath = "test/example.test.ts";
915
+ if (!ctx.exists("test")) {
916
+ ctx.write(testPath, STARTER_TEST);
917
+ results.push({
918
+ filePath: testPath,
919
+ action: "created",
920
+ description: "Generated starter test file"
921
+ });
922
+ }
923
+ return results;
924
+ }
925
+
926
+ //#endregion
927
+ //#region src/generators/oxlint.ts
928
+ const CONFIG_WITH_LINT_RULES = `import recommended from "@bensandee/config/oxlint/recommended";
929
+ import { defineConfig } from "oxlint";
930
+
931
+ export default defineConfig({
932
+ extends: [recommended],
933
+ });
934
+ `;
935
+ const CONFIG_PRESET_ONLY = `import { presetRules } from "@bensandee/config/oxlint";
936
+ import { defineConfig } from "oxlint";
937
+
938
+ export default defineConfig({
939
+ rules: presetRules,
940
+ });
941
+ `;
942
+ async function generateOxlint(ctx) {
943
+ const filePath = "oxlint.config.ts";
944
+ const content = ctx.config.useLintRules ? CONFIG_WITH_LINT_RULES : CONFIG_PRESET_ONLY;
945
+ const existing = ctx.read(filePath);
946
+ if (existing) {
947
+ if (existing === content) return {
948
+ filePath,
949
+ action: "skipped",
950
+ description: "Already configured"
951
+ };
952
+ if (await ctx.confirmOverwrite(filePath) === "skip") return {
953
+ filePath,
954
+ action: "skipped",
955
+ description: "User chose to keep existing"
956
+ };
957
+ }
958
+ ctx.write(filePath, content);
959
+ return {
960
+ filePath,
961
+ action: existing ? "updated" : "created",
962
+ description: "Generated oxlint.config.ts"
963
+ };
964
+ }
965
+
966
+ //#endregion
967
+ //#region src/generators/formatter.ts
968
+ const OXFMT_CONFIG = `{}\n`;
969
+ const PRETTIER_CONFIG = `{
970
+ "semi": true,
971
+ "singleQuote": false,
972
+ "trailingComma": "all",
973
+ "tabWidth": 2,
974
+ "printWidth": 120
975
+ }
976
+ `;
977
+ async function generateFormatter(ctx) {
978
+ if (ctx.config.formatter === "oxfmt") return generateOxfmt(ctx);
979
+ return generatePrettier(ctx);
980
+ }
981
+ async function generateOxfmt(ctx) {
982
+ const filePath = ".oxfmtrc.json";
983
+ const existed = ctx.exists(filePath);
984
+ if (existed) {
985
+ if (await ctx.confirmOverwrite(filePath) === "skip") return {
986
+ filePath,
987
+ action: "skipped",
988
+ description: "Existing oxfmt config preserved"
989
+ };
990
+ }
991
+ ctx.write(filePath, OXFMT_CONFIG);
992
+ return {
993
+ filePath,
994
+ action: existed ? "updated" : "created",
995
+ description: "Generated .oxfmtrc.json"
996
+ };
997
+ }
998
+ async function generatePrettier(ctx) {
999
+ const filePath = ".prettierrc";
1000
+ if (ctx.exists(filePath)) return {
1001
+ filePath,
1002
+ action: "skipped",
1003
+ description: "Existing prettier config preserved"
1004
+ };
1005
+ ctx.write(filePath, PRETTIER_CONFIG);
1006
+ return {
1007
+ filePath,
1008
+ action: "created",
1009
+ description: "Generated .prettierrc"
1010
+ };
1011
+ }
1012
+
1013
+ //#endregion
1014
+ //#region src/generators/tsdown.ts
1015
+ const TSDOWN_CONFIG = `import { defineConfig } from "tsdown";
1016
+
1017
+ export default defineConfig({
1018
+ entry: ["src/index.ts"],
1019
+ });
1020
+ `;
1021
+ async function generateTsdown(ctx) {
1022
+ const filePath = "tsdown.config.ts";
1023
+ if (ctx.config.structure === "monorepo") return {
1024
+ filePath,
1025
+ action: "skipped",
1026
+ description: "Monorepo: tsdown config belongs in individual packages"
1027
+ };
1028
+ if (ctx.exists(filePath)) {
1029
+ if (await ctx.confirmOverwrite(filePath) === "skip") return {
1030
+ filePath,
1031
+ action: "skipped",
1032
+ description: "Existing tsdown config preserved"
1033
+ };
1034
+ }
1035
+ ctx.write(filePath, TSDOWN_CONFIG);
1036
+ return {
1037
+ filePath,
1038
+ action: "created",
1039
+ description: "Generated tsdown.config.ts"
1040
+ };
1041
+ }
1042
+
1043
+ //#endregion
1044
+ //#region src/generators/gitignore.ts
1045
+ const STANDARD_ENTRIES = [
1046
+ "node_modules/",
1047
+ "dist/",
1048
+ "*.tsbuildinfo",
1049
+ ".env",
1050
+ ".env.*",
1051
+ "!.env.example",
1052
+ ".tooling-migrate.md"
1053
+ ];
1054
+ async function generateGitignore(ctx) {
1055
+ const filePath = ".gitignore";
1056
+ const existing = ctx.read(filePath);
1057
+ if (existing) {
1058
+ const existingLines = new Set(existing.split("\n").map((line) => line.trim()).filter((line) => line.length > 0));
1059
+ const missing = STANDARD_ENTRIES.filter((entry) => !existingLines.has(entry));
1060
+ if (missing.length === 0) return {
1061
+ filePath,
1062
+ action: "skipped",
1063
+ description: "Already has all standard entries"
1064
+ };
1065
+ const updated = existing.trimEnd() + "\n\n# Added by @bensandee/tooling\n" + missing.join("\n") + "\n";
1066
+ ctx.write(filePath, updated);
1067
+ return {
1068
+ filePath,
1069
+ action: "updated",
1070
+ description: `Appended ${String(missing.length)} missing entries`
1071
+ };
1072
+ }
1073
+ ctx.write(filePath, STANDARD_ENTRIES.join("\n") + "\n");
1074
+ return {
1075
+ filePath,
1076
+ action: "created",
1077
+ description: "Generated .gitignore"
1078
+ };
1079
+ }
1080
+
1081
+ //#endregion
1082
+ //#region src/generators/ci.ts
1083
+ function hasEnginesNode$1(ctx) {
1084
+ return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
1085
+ }
1086
+ function ciWorkflow(isMonorepo, nodeVersionYaml) {
1087
+ return `name: CI
1088
+ on:
1089
+ push:
1090
+ branches: [main]
1091
+ pull_request:
1092
+ branches: [main]
1093
+
1094
+ jobs:
1095
+ check:
1096
+ runs-on: ubuntu-latest
1097
+ steps:
1098
+ - uses: actions/checkout@v4
1099
+ - uses: pnpm/action-setup@v4
1100
+ - uses: actions/setup-node@v4
1101
+ with:
1102
+ ${nodeVersionYaml}
1103
+ cache: pnpm
1104
+ - run: pnpm install --frozen-lockfile
1105
+ - run: ${isMonorepo ? "pnpm -r --parallel run typecheck" : "pnpm typecheck"}
1106
+ - run: pnpm lint
1107
+ - run: ${isMonorepo ? "pnpm -r build" : "pnpm build"}
1108
+ - run: ${isMonorepo ? "pnpm -r test" : "pnpm test"}
1109
+ - run: pnpm format --check
1110
+ - run: pnpm knip
1111
+ `;
1112
+ }
1113
+ async function generateCi(ctx) {
1114
+ if (ctx.config.ci === "none") return {
1115
+ filePath: "ci",
1116
+ action: "skipped",
1117
+ description: "CI workflow not requested"
1118
+ };
1119
+ const isMonorepo = ctx.config.structure === "monorepo";
1120
+ const isGitHub = ctx.config.ci === "github";
1121
+ const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
1122
+ const filePath = isGitHub ? ".github/workflows/check.yml" : ".forgejo/workflows/check.yml";
1123
+ const content = ciWorkflow(isMonorepo, nodeVersionYaml);
1124
+ if (ctx.exists(filePath)) {
1125
+ if (await ctx.confirmOverwrite(filePath) === "skip") return {
1126
+ filePath,
1127
+ action: "skipped",
1128
+ description: "Existing CI workflow preserved"
1129
+ };
1130
+ }
1131
+ ctx.write(filePath, content);
1132
+ return {
1133
+ filePath,
1134
+ action: "created",
1135
+ description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions CI workflow`
1136
+ };
1137
+ }
1138
+
1139
+ //#endregion
1140
+ //#region src/generators/knip.ts
1141
+ const KNIP_CONFIG_SINGLE = {
1142
+ $schema: "https://unpkg.com/knip@latest/schema.json",
1143
+ entry: ["src/index.ts"],
1144
+ project: ["src/**/*.ts"],
1145
+ ignore: ["dist/**"]
1146
+ };
1147
+ const KNIP_CONFIG_MONOREPO = {
1148
+ $schema: "https://unpkg.com/knip@latest/schema.json",
1149
+ workspaces: {
1150
+ ".": {
1151
+ entry: [],
1152
+ project: []
1153
+ },
1154
+ "packages/*": {
1155
+ entry: ["src/index.ts", "src/bin.ts"],
1156
+ project: ["src/**/*.ts"],
1157
+ ignore: ["dist/**"]
1158
+ }
1159
+ }
1160
+ };
1161
+ async function generateKnip(ctx) {
1162
+ const filePath = "knip.json";
1163
+ const existing = ctx.read(filePath);
1164
+ const isMonorepo = ctx.config.structure === "monorepo";
1165
+ if (existing) {
1166
+ const parsed = parseKnipJson(existing);
1167
+ const changes = [];
1168
+ if (isMonorepo && !parsed.workspaces) {
1169
+ parsed.workspaces = KNIP_CONFIG_MONOREPO.workspaces;
1170
+ changes.push("added monorepo workspaces config");
1171
+ }
1172
+ if (!isMonorepo) {
1173
+ if (!parsed.entry) {
1174
+ parsed.entry = KNIP_CONFIG_SINGLE.entry;
1175
+ changes.push("added entry patterns");
1176
+ }
1177
+ if (!parsed.project) {
1178
+ parsed.project = KNIP_CONFIG_SINGLE.project;
1179
+ changes.push("added project patterns");
1180
+ }
1181
+ }
1182
+ if (changes.length === 0) return {
1183
+ filePath,
1184
+ action: "skipped",
1185
+ description: "Already configured"
1186
+ };
1187
+ ctx.write(filePath, JSON.stringify(parsed, null, 2) + "\n");
1188
+ return {
1189
+ filePath,
1190
+ action: "updated",
1191
+ description: changes.join(", ")
1192
+ };
1193
+ }
1194
+ const config = isMonorepo ? KNIP_CONFIG_MONOREPO : KNIP_CONFIG_SINGLE;
1195
+ ctx.write(filePath, JSON.stringify(config, null, 2) + "\n");
1196
+ return {
1197
+ filePath,
1198
+ action: "created",
1199
+ description: "Generated knip.json for dead code analysis"
1200
+ };
1201
+ }
1202
+
1203
+ //#endregion
1204
+ //#region src/generators/renovate.ts
1205
+ const SHARED_PRESET = "@bensandee/config";
1206
+ async function generateRenovate(ctx) {
1207
+ const filePath = "renovate.json";
1208
+ if (!ctx.config.setupRenovate) return {
1209
+ filePath,
1210
+ action: "skipped",
1211
+ description: "Renovate not requested"
1212
+ };
1213
+ const existing = ctx.read(filePath);
1214
+ if (existing) {
1215
+ const parsed = parseRenovateJson(existing);
1216
+ const existingExtends = parsed.extends ?? [];
1217
+ if (!existingExtends.includes(SHARED_PRESET)) {
1218
+ existingExtends.unshift(SHARED_PRESET);
1219
+ parsed.extends = existingExtends;
1220
+ ctx.write(filePath, JSON.stringify(parsed, null, 2) + "\n");
1221
+ return {
1222
+ filePath,
1223
+ action: "updated",
1224
+ description: `Added extends: ${SHARED_PRESET}`
1225
+ };
1226
+ }
1227
+ return {
1228
+ filePath,
1229
+ action: "skipped",
1230
+ description: "Already extends shared config"
1231
+ };
1232
+ }
1233
+ const config = {
1234
+ $schema: "https://docs.renovatebot.com/renovate-schema.json",
1235
+ extends: [SHARED_PRESET]
1236
+ };
1237
+ ctx.write(filePath, JSON.stringify(config, null, 2) + "\n");
1238
+ return {
1239
+ filePath,
1240
+ action: "created",
1241
+ description: "Generated renovate.json extending shared config"
1242
+ };
1243
+ }
1244
+
1245
+ //#endregion
1246
+ //#region src/generators/pnpm-workspace.ts
1247
+ const WORKSPACE_YAML = `packages:
1248
+ - "packages/*"
1249
+ `;
1250
+ async function generatePnpmWorkspace(ctx) {
1251
+ const filePath = "pnpm-workspace.yaml";
1252
+ if (ctx.config.structure !== "monorepo") return {
1253
+ filePath,
1254
+ action: "skipped",
1255
+ description: "Not a monorepo"
1256
+ };
1257
+ if (ctx.exists(filePath)) return {
1258
+ filePath,
1259
+ action: "skipped",
1260
+ description: "pnpm-workspace.yaml already exists"
1261
+ };
1262
+ ctx.write(filePath, WORKSPACE_YAML);
1263
+ return {
1264
+ filePath,
1265
+ action: "created",
1266
+ description: "Generated pnpm-workspace.yaml"
1267
+ };
1268
+ }
1269
+
1270
+ //#endregion
1271
+ //#region src/generators/claude-settings.ts
1272
+ const ClaudeSettingsSchema = z.object({
1273
+ permissions: z.object({
1274
+ allow: z.array(z.string()).default([]),
1275
+ deny: z.array(z.string()).default([])
1276
+ }).default({
1277
+ allow: [],
1278
+ deny: []
1279
+ }),
1280
+ instructions: z.array(z.string()).default([])
1281
+ });
1282
+ function parseClaudeSettings(raw) {
1283
+ try {
1284
+ const result = ClaudeSettingsSchema.safeParse(JSON.parse(raw));
1285
+ return result.success ? result.data : void 0;
1286
+ } catch {
1287
+ return;
1288
+ }
1289
+ }
1290
+ function buildSettings(ctx) {
1291
+ const pm = "pnpm";
1292
+ const allow = [
1293
+ `Bash(${pm} install)`,
1294
+ `Bash(${pm} install *)`,
1295
+ `Bash(${pm} add *)`,
1296
+ `Bash(${pm} update *)`,
1297
+ `Bash(${pm} view *)`,
1298
+ `Bash(${pm} list)`,
1299
+ `Bash(${pm} list *)`,
1300
+ "Bash(npm view *)",
1301
+ "Bash(npm info *)",
1302
+ "Bash(npm show *)",
1303
+ `Bash(${pm} build)`,
1304
+ `Bash(${pm} -r build)`,
1305
+ `Bash(${pm} dev)`,
1306
+ `Bash(${pm} test)`,
1307
+ `Bash(${pm} test *)`,
1308
+ `Bash(${pm} -r test)`,
1309
+ `Bash(${pm} vitest)`,
1310
+ `Bash(${pm} vitest *)`,
1311
+ `Bash(${pm} exec vitest)`,
1312
+ `Bash(${pm} exec vitest *)`,
1313
+ `Bash(${pm} exec oxfmt)`,
1314
+ `Bash(${pm} exec oxfmt *)`,
1315
+ `Bash(${pm} typecheck)`,
1316
+ `Bash(${pm} -r --parallel run typecheck)`,
1317
+ `Bash(${pm} tsc *)`,
1318
+ `Bash(${pm} exec tsc *)`,
1319
+ `Bash(${pm} lint)`,
1320
+ `Bash(${pm} lint *)`,
1321
+ `Bash(${pm} format)`,
1322
+ `Bash(${pm} format *)`,
1323
+ `Bash(${pm} knip)`,
1324
+ `Bash(${pm} knip *)`,
1325
+ `Bash(${pm} check)`,
1326
+ `Bash(${pm} exec oxlint *)`,
1327
+ `Bash(${pm} exec knip *)`,
1328
+ "Bash(git status *)",
1329
+ "Bash(git log *)",
1330
+ "Bash(git diff *)",
1331
+ "Bash(git branch *)",
1332
+ "Bash(git show *)",
1333
+ "Bash(git rev-parse *)",
1334
+ "Bash(ls *)",
1335
+ "Bash(cat *)",
1336
+ "Bash(head *)",
1337
+ "Bash(tail *)",
1338
+ "Bash(wc *)",
1339
+ "Bash(find *)",
1340
+ "Bash(which *)",
1341
+ "Bash(node -e *)",
1342
+ "Bash(node -p *)"
1343
+ ];
1344
+ if (ctx.config.structure === "monorepo") allow.push(`Bash(${pm} --filter *)`, `Bash(${pm} -r *)`);
1345
+ return {
1346
+ permissions: {
1347
+ allow,
1348
+ deny: [
1349
+ "Bash(npx *)",
1350
+ "Bash(git push *)",
1351
+ "Bash(git push)",
1352
+ "Bash(git reset --hard *)",
1353
+ "Bash(rm -rf *)"
1354
+ ]
1355
+ },
1356
+ instructions: [
1357
+ "Use pnpm, not npm/yarn/npx. Run binaries with `pnpm exec`.",
1358
+ "No typecasts (as/any). Use zod schemas, type guards, or narrowing instead.",
1359
+ "Fix lint violations instead of suppressing them. Only add disable comments when suppression is genuinely the best option."
1360
+ ]
1361
+ };
1362
+ }
1363
+ async function generateClaudeSettings(ctx) {
1364
+ const filePath = ".claude/settings.json";
1365
+ const existing = ctx.read(filePath);
1366
+ const generated = buildSettings(ctx);
1367
+ if (existing) {
1368
+ const parsed = parseClaudeSettings(existing);
1369
+ if (!parsed) return {
1370
+ filePath,
1371
+ action: "skipped",
1372
+ description: "Could not parse existing settings"
1373
+ };
1374
+ const missingAllow = generated.permissions.allow.filter((rule) => !parsed.permissions.allow.includes(rule));
1375
+ const missingDeny = generated.permissions.deny.filter((rule) => !parsed.permissions.deny.includes(rule));
1376
+ const missingInstructions = generated.instructions.filter((inst) => !parsed.instructions.includes(inst));
1377
+ if (missingAllow.length === 0 && missingDeny.length === 0 && missingInstructions.length === 0) return {
1378
+ filePath,
1379
+ action: "skipped",
1380
+ description: "Already has all rules and instructions"
1381
+ };
1382
+ parsed.permissions.allow = [...parsed.permissions.allow, ...missingAllow];
1383
+ parsed.permissions.deny = [...parsed.permissions.deny, ...missingDeny];
1384
+ parsed.instructions = [...parsed.instructions, ...missingInstructions];
1385
+ const added = missingAllow.length + missingDeny.length + missingInstructions.length;
1386
+ ctx.write(filePath, JSON.stringify(parsed, null, 2) + "\n");
1387
+ return {
1388
+ filePath,
1389
+ action: "updated",
1390
+ description: `Added ${String(added)} rules/instructions`
1391
+ };
1392
+ }
1393
+ ctx.write(filePath, JSON.stringify(generated, null, 2) + "\n");
1394
+ return {
1395
+ filePath,
1396
+ action: "created",
1397
+ description: "Generated .claude/settings.json with safe operation permissions"
1398
+ };
1399
+ }
1400
+
1401
+ //#endregion
1402
+ //#region src/generators/release-it.ts
1403
+ function buildConfig$2(ci, isMonorepo) {
1404
+ const config = {
1405
+ $schema: "https://unpkg.com/release-it/schema/release-it.json",
1406
+ git: {
1407
+ commitMessage: "chore: release v${version}",
1408
+ tagName: "v${version}"
1409
+ },
1410
+ npm: { publish: true }
1411
+ };
1412
+ if (ci === "github") config["github"] = { release: true };
1413
+ const plugins = {};
1414
+ if (ci === "forgejo") plugins["@bensandee/release-it-forgejo"] = { release: true };
1415
+ if (isMonorepo) {
1416
+ config["npm"] = {
1417
+ publish: true,
1418
+ allowSameVersion: true
1419
+ };
1420
+ plugins["@release-it/bumper"] = { out: "packages/*/package.json" };
1421
+ }
1422
+ if (Object.keys(plugins).length > 0) config["plugins"] = plugins;
1423
+ return config;
1424
+ }
1425
+ async function generateReleaseIt(ctx) {
1426
+ const filePath = ".release-it.json";
1427
+ if (ctx.config.releaseStrategy !== "release-it") return {
1428
+ filePath,
1429
+ action: "skipped",
1430
+ description: "release-it not requested"
1431
+ };
1432
+ const content = JSON.stringify(buildConfig$2(ctx.config.ci, ctx.config.structure === "monorepo"), null, 2) + "\n";
1433
+ const existing = ctx.read(filePath);
1434
+ if (existing) {
1435
+ if (existing === content) return {
1436
+ filePath,
1437
+ action: "skipped",
1438
+ description: "Already configured"
1439
+ };
1440
+ if (await ctx.confirmOverwrite(filePath) === "skip") return {
1441
+ filePath,
1442
+ action: "skipped",
1443
+ description: "Existing release-it config preserved"
1444
+ };
1445
+ }
1446
+ ctx.write(filePath, content);
1447
+ return {
1448
+ filePath,
1449
+ action: existing ? "updated" : "created",
1450
+ description: "Generated .release-it.json"
1451
+ };
1452
+ }
1453
+
1454
+ //#endregion
1455
+ //#region src/generators/changesets.ts
1456
+ function buildConfig$1() {
1457
+ return {
1458
+ $schema: "https://unpkg.com/@changesets/config@3.1.1/schema.json",
1459
+ changelog: "@changesets/cli/changelog",
1460
+ commit: false,
1461
+ fixed: [],
1462
+ linked: [],
1463
+ access: "public",
1464
+ baseBranch: "main",
1465
+ updateInternalDependencies: "patch",
1466
+ ignore: []
1467
+ };
1468
+ }
1469
+ async function generateChangesets(ctx) {
1470
+ const filePath = ".changeset/config.json";
1471
+ if (ctx.config.releaseStrategy !== "changesets") return {
1472
+ filePath,
1473
+ action: "skipped",
1474
+ description: "Changesets not requested"
1475
+ };
1476
+ const content = JSON.stringify(buildConfig$1(), null, 2) + "\n";
1477
+ const existing = ctx.read(filePath);
1478
+ if (existing) {
1479
+ if (existing === content) return {
1480
+ filePath,
1481
+ action: "skipped",
1482
+ description: "Already configured"
1483
+ };
1484
+ if (await ctx.confirmOverwrite(filePath) === "skip") return {
1485
+ filePath,
1486
+ action: "skipped",
1487
+ description: "Existing changesets config preserved"
1488
+ };
1489
+ }
1490
+ ctx.write(filePath, content);
1491
+ return {
1492
+ filePath,
1493
+ action: existing ? "updated" : "created",
1494
+ description: "Generated .changeset/config.json"
1495
+ };
1496
+ }
1497
+
1498
+ //#endregion
1499
+ //#region src/generators/release-ci.ts
1500
+ function hasEnginesNode(ctx) {
1501
+ return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
1502
+ }
1503
+ function commonSteps(nodeVersionYaml) {
1504
+ return ` - uses: actions/checkout@v4
1505
+ with:
1506
+ fetch-depth: 0
1507
+ - uses: pnpm/action-setup@v4
1508
+ - uses: actions/setup-node@v4
1509
+ with:
1510
+ ${nodeVersionYaml}
1511
+ cache: pnpm
1512
+ registry-url: "https://registry.npmjs.org"
1513
+ - run: pnpm install --frozen-lockfile
1514
+ - run: pnpm build`;
1515
+ }
1516
+ function releaseItWorkflow(ci, nodeVersionYaml) {
1517
+ const isGitHub = ci === "github";
1518
+ const permissions = isGitHub ? `
1519
+ permissions:
1520
+ contents: write
1521
+ ` : "";
1522
+ const tokenEnv = isGitHub ? `GITHUB_TOKEN: \${{ github.token }}` : `FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}`;
1523
+ return `name: Release
1524
+ on:
1525
+ workflow_dispatch:
1526
+ ${permissions}
1527
+ jobs:
1528
+ release:
1529
+ runs-on: ubuntu-latest
1530
+ steps:
1531
+ ${commonSteps(nodeVersionYaml)}
1532
+ - run: pnpm release-it --ci
1533
+ env:
1534
+ ${tokenEnv}
1535
+ NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}
1536
+ `;
1537
+ }
1538
+ function commitAndTagVersionWorkflow(ci, nodeVersionYaml) {
1539
+ const isGitHub = ci === "github";
1540
+ const permissions = isGitHub ? `
1541
+ permissions:
1542
+ contents: write
1543
+ ` : "";
1544
+ const gitConfigStep = `
1545
+ - name: Configure git
1546
+ run: |
1547
+ git config user.name "${isGitHub ? "github-actions[bot]" : "forgejo-actions[bot]"}"
1548
+ git config user.email "${isGitHub ? "github-actions[bot]@users.noreply.github.com" : "forgejo-actions[bot]@noreply.localhost"}"`;
1549
+ const releaseStep = isGitHub ? `
1550
+ - name: Release
1551
+ env:
1552
+ GITHUB_TOKEN: \${{ github.token }}
1553
+ NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}
1554
+ run: |
1555
+ pnpm exec commit-and-tag-version
1556
+ git push --follow-tags
1557
+ TAG=$(git describe --tags --abbrev=0)
1558
+ pnpm publish --no-git-checks
1559
+ gh release create "$TAG" --generate-notes` : `
1560
+ - name: Release
1561
+ env:
1562
+ FORGEJO_SERVER_URL: \${{ github.server_url }}
1563
+ FORGEJO_REPOSITORY: \${{ github.repository }}
1564
+ FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}
1565
+ NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}
1566
+ run: |
1567
+ pnpm exec commit-and-tag-version
1568
+ git push --follow-tags
1569
+ TAG=$(git describe --tags --abbrev=0)
1570
+ pnpm publish --no-git-checks
1571
+ pnpm exec tooling release:create-forgejo-release --tag "$TAG"`;
1572
+ return `name: Release
1573
+ on:
1574
+ workflow_dispatch:
1575
+ ${permissions}
1576
+ jobs:
1577
+ release:
1578
+ runs-on: ubuntu-latest
1579
+ steps:
1580
+ ${commonSteps(nodeVersionYaml)}${gitConfigStep}${releaseStep}
1581
+ `;
1582
+ }
1583
+ function changesetsWorkflow(ci, nodeVersionYaml) {
1584
+ if (ci === "github") return `name: Release
1585
+ on:
1586
+ push:
1587
+ branches:
1588
+ - main
1589
+
1590
+ permissions:
1591
+ contents: write
1592
+ pull-requests: write
1593
+
1594
+ jobs:
1595
+ release:
1596
+ runs-on: ubuntu-latest
1597
+ steps:
1598
+ ${commonSteps(nodeVersionYaml)}
1599
+ - uses: changesets/action@v1
1600
+ with:
1601
+ publish: pnpm changeset publish
1602
+ version: pnpm changeset version
1603
+ env:
1604
+ GITHUB_TOKEN: \${{ github.token }}
1605
+ NPM_TOKEN: \${{ secrets.NPM_TOKEN }}
1606
+ `;
1607
+ return `name: Release
1608
+ on:
1609
+ push:
1610
+ branches:
1611
+ - main
1612
+
1613
+ jobs:
1614
+ release:
1615
+ runs-on: ubuntu-latest
1616
+ steps:
1617
+ ${commonSteps(nodeVersionYaml)}
1618
+ - name: Configure git
1619
+ run: |
1620
+ git config user.name "forgejo-actions[bot]"
1621
+ git config user.email "forgejo-actions[bot]@noreply.localhost"
1622
+ - name: Release
1623
+ env:
1624
+ FORGEJO_SERVER_URL: \${{ github.server_url }}
1625
+ FORGEJO_REPOSITORY: \${{ github.repository }}
1626
+ FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}
1627
+ NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}
1628
+ run: pnpm exec tooling release:changesets
1629
+ `;
1630
+ }
1631
+ function buildWorkflow(strategy, ci, nodeVersionYaml) {
1632
+ switch (strategy) {
1633
+ case "release-it": return releaseItWorkflow(ci, nodeVersionYaml);
1634
+ case "commit-and-tag-version": return commitAndTagVersionWorkflow(ci, nodeVersionYaml);
1635
+ case "changesets": return changesetsWorkflow(ci, nodeVersionYaml);
1636
+ default: return null;
1637
+ }
1638
+ }
1639
+ async function generateReleaseCi(ctx) {
1640
+ const filePath = "release-ci";
1641
+ if (ctx.config.releaseStrategy === "none" || ctx.config.ci === "none") return {
1642
+ filePath,
1643
+ action: "skipped",
1644
+ description: "Release CI workflow not applicable"
1645
+ };
1646
+ const isGitHub = ctx.config.ci === "github";
1647
+ const workflowPath = isGitHub ? ".github/workflows/release.yml" : ".forgejo/workflows/release.yml";
1648
+ const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
1649
+ const content = buildWorkflow(ctx.config.releaseStrategy, ctx.config.ci, nodeVersionYaml);
1650
+ if (!content) return {
1651
+ filePath,
1652
+ action: "skipped",
1653
+ description: "Release CI workflow not applicable"
1654
+ };
1655
+ if (ctx.exists(workflowPath)) {
1656
+ if (await ctx.confirmOverwrite(workflowPath) === "skip") return {
1657
+ filePath: workflowPath,
1658
+ action: "skipped",
1659
+ description: "Existing release workflow preserved"
1660
+ };
1661
+ }
1662
+ ctx.write(workflowPath, content);
1663
+ return {
1664
+ filePath: workflowPath,
1665
+ action: "created",
1666
+ description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions release workflow`
1667
+ };
1668
+ }
1669
+
1670
+ //#endregion
1671
+ //#region src/generators/lint-staged.ts
1672
+ function buildConfig(formatter) {
1673
+ return `export default {\n "*": "${formatter === "prettier" ? "prettier --write" : "oxfmt"}",\n};\n`;
1674
+ }
1675
+ const HUSKY_PRE_COMMIT = "pnpm exec lint-staged\n";
1676
+ async function generateLintStaged(ctx) {
1677
+ const filePath = "lint-staged.config.mjs";
1678
+ const huskyPath = ".husky/pre-commit";
1679
+ const content = buildConfig(ctx.config.formatter);
1680
+ const existing = ctx.read(filePath);
1681
+ if (ctx.read(huskyPath) !== HUSKY_PRE_COMMIT) ctx.write(huskyPath, HUSKY_PRE_COMMIT);
1682
+ if (existing) {
1683
+ if (existing === content) return {
1684
+ filePath,
1685
+ action: "skipped",
1686
+ description: "Already configured"
1687
+ };
1688
+ if (await ctx.confirmOverwrite(filePath) === "skip") return {
1689
+ filePath,
1690
+ action: "skipped",
1691
+ description: "Existing lint-staged config preserved"
1692
+ };
1693
+ }
1694
+ ctx.write(filePath, content);
1695
+ return {
1696
+ filePath,
1697
+ action: existing ? "updated" : "created",
1698
+ description: "Generated lint-staged config and husky pre-commit hook"
1699
+ };
1700
+ }
1701
+
1702
+ //#endregion
1703
+ //#region src/commands/repo-init.ts
1704
+ const initCommand = defineCommand({
1705
+ meta: {
1706
+ name: "repo:init",
1707
+ description: "Interactive setup wizard"
1708
+ },
1709
+ args: {
1710
+ dir: {
1711
+ type: "positional",
1712
+ description: "Target directory (default: current directory)",
1713
+ required: false
1714
+ },
1715
+ yes: {
1716
+ type: "boolean",
1717
+ alias: "y",
1718
+ description: "Accept all defaults (non-interactive)"
1719
+ },
1720
+ "lint-rules": {
1721
+ type: "boolean",
1722
+ description: "Include @bensandee/lint-rules (default: true)"
1723
+ },
1724
+ "no-ci": {
1725
+ type: "boolean",
1726
+ description: "Skip CI workflow generation"
1727
+ },
1728
+ "no-prompt": {
1729
+ type: "boolean",
1730
+ description: "Skip migration prompt generation"
1731
+ }
1732
+ },
1733
+ async run({ args }) {
1734
+ const targetDir = path.resolve(args.dir ?? ".");
1735
+ await runInit(args.yes ? buildDefaultConfig(targetDir, {
1736
+ lintRules: args["lint-rules"] === true ? true : void 0,
1737
+ noCi: args["no-ci"] === true ? true : void 0
1738
+ }) : await runInitPrompts(targetDir), args["no-prompt"] === true ? { noPrompt: true } : {});
1739
+ }
1740
+ });
1741
+ async function runInit(config, options = {}) {
1742
+ const detected = detectProject(config.targetDir);
1743
+ const ctx = createContext(config, options.confirmOverwrite ?? (async (relativePath) => {
1744
+ const result = await p.select({
1745
+ message: `${relativePath} already exists. What do you want to do?`,
1746
+ options: [{
1747
+ value: "overwrite",
1748
+ label: "Overwrite"
1749
+ }, {
1750
+ value: "skip",
1751
+ label: "Skip"
1752
+ }]
1753
+ });
1754
+ if (p.isCancel(result)) return "skip";
1755
+ return result;
1756
+ }));
1757
+ const results = [];
1758
+ const s = p.spinner();
1759
+ s.start("Generating configuration files...");
1760
+ results.push(await generatePackageJson(ctx));
1761
+ results.push(await generatePnpmWorkspace(ctx));
1762
+ results.push(...await generateTsconfig(ctx));
1763
+ results.push(await generateTsdown(ctx));
1764
+ results.push(await generateOxlint(ctx));
1765
+ results.push(await generateFormatter(ctx));
1766
+ results.push(await generateLintStaged(ctx));
1767
+ results.push(await generateGitignore(ctx));
1768
+ results.push(await generateKnip(ctx));
1769
+ results.push(await generateRenovate(ctx));
1770
+ results.push(await generateCi(ctx));
1771
+ results.push(await generateClaudeSettings(ctx));
1772
+ results.push(await generateReleaseIt(ctx));
1773
+ results.push(await generateChangesets(ctx));
1774
+ results.push(await generateReleaseCi(ctx));
1775
+ const vitestResults = await generateVitest(ctx);
1776
+ results.push(...vitestResults);
1777
+ s.stop("Done!");
1778
+ const created = results.filter((r) => r.action === "created");
1779
+ const updated = results.filter((r) => r.action === "updated");
1780
+ const skipped = results.filter((r) => r.action === "skipped");
1781
+ const summaryLines = [];
1782
+ if (created.length > 0) summaryLines.push(`Created: ${created.map((r) => r.filePath).join(", ")}`);
1783
+ if (updated.length > 0) summaryLines.push(`Updated: ${updated.map((r) => r.filePath).join(", ")}`);
1784
+ if (skipped.length > 0) summaryLines.push(`Skipped: ${skipped.map((r) => r.filePath).join(", ")}`);
1785
+ p.note(summaryLines.join("\n"), "Summary");
1786
+ if (!options.noPrompt) {
1787
+ const prompt = generateMigratePrompt(results, config, detected);
1788
+ const promptPath = ".tooling-migrate.md";
1789
+ ctx.write(promptPath, prompt);
1790
+ p.log.info(`Migration prompt written to ${promptPath}`);
1791
+ p.log.info("Paste its contents into Claude Code to finish the migration.");
1792
+ }
1793
+ const updateCmd = `pnpm update --latest ${getAddedDevDepNames(config).join(" ")}`;
1794
+ p.note([
1795
+ "1. Run: pnpm install",
1796
+ `2. Run: ${updateCmd}`,
1797
+ "3. Run: pnpm typecheck",
1798
+ "4. Run: pnpm build",
1799
+ "5. Run: pnpm test",
1800
+ ...options.noPrompt ? [] : ["6. Paste .tooling-migrate.md into Claude Code for cleanup"]
1801
+ ].join("\n"), "Next steps");
1802
+ return results;
1803
+ }
1804
+
1805
+ //#endregion
1806
+ //#region src/commands/repo-update.ts
1807
+ const updateCommand = defineCommand({
1808
+ meta: {
1809
+ name: "repo:update",
1810
+ description: "Add missing config (never overwrites)"
1811
+ },
1812
+ args: { dir: {
1813
+ type: "positional",
1814
+ description: "Target directory (default: current directory)",
1815
+ required: false
1816
+ } },
1817
+ async run({ args }) {
1818
+ await runUpdate(path.resolve(args.dir ?? "."));
1819
+ }
1820
+ });
1821
+ async function runUpdate(targetDir) {
1822
+ return runInit(buildDefaultConfig(targetDir, {}), {
1823
+ noPrompt: true,
1824
+ confirmOverwrite: async () => "skip"
1825
+ });
1826
+ }
1827
+
1828
+ //#endregion
1829
+ //#region src/utils/errors.ts
1830
+ /** An error caused by an external transient condition (network, API rate limit, etc). */
1831
+ var TransientError = class extends Error {
1832
+ name = "TransientError";
1833
+ };
1834
+ /** An error indicating a fatal misconfiguration or invariant violation. */
1835
+ var FatalError = class extends Error {
1836
+ name = "FatalError";
1837
+ };
1838
+ /** An error for conditions that should be unreachable. */
1839
+ var UnexpectedError = class extends Error {
1840
+ name = "UnexpectedError";
1841
+ };
1842
+
1843
+ //#endregion
1844
+ //#region src/utils/exec.ts
1845
+ /** Type guard for `execSync` errors that carry stdout/stderr/status. */
1846
+ function isExecSyncError(err) {
1847
+ return err instanceof Error && "stdout" in err && typeof err.stdout === "string" && "stderr" in err && typeof err.stderr === "string" && "status" in err && typeof err.status === "number";
1848
+ }
1849
+
1850
+ //#endregion
1851
+ //#region src/release/executor.ts
1852
+ /** Create an executor that runs real commands, fetches, and reads the filesystem. */
1853
+ function createRealExecutor() {
1854
+ return {
1855
+ exec(command, options) {
1856
+ try {
1857
+ return {
1858
+ stdout: execSync(command, {
1859
+ cwd: options?.cwd,
1860
+ env: {
1861
+ ...process.env,
1862
+ ...options?.env
1863
+ },
1864
+ encoding: "utf-8",
1865
+ stdio: [
1866
+ "pipe",
1867
+ "pipe",
1868
+ "pipe"
1869
+ ]
1870
+ }),
1871
+ stderr: "",
1872
+ exitCode: 0
1873
+ };
1874
+ } catch (err) {
1875
+ if (isExecSyncError(err)) return {
1876
+ stdout: err.stdout,
1877
+ stderr: err.stderr,
1878
+ exitCode: err.status
1879
+ };
1880
+ return {
1881
+ stdout: "",
1882
+ stderr: "",
1883
+ exitCode: 1
1884
+ };
1885
+ }
1886
+ },
1887
+ fetch: globalThis.fetch,
1888
+ listChangesetFiles(cwd) {
1889
+ const dir = path.join(cwd, ".changeset");
1890
+ try {
1891
+ return readdirSync(dir).filter((f) => f.endsWith(".md") && f !== "README.md");
1892
+ } catch {
1893
+ return [];
1894
+ }
1895
+ },
1896
+ listWorkspacePackages(cwd) {
1897
+ const packagesDir = path.join(cwd, "packages");
1898
+ const packages = [];
1899
+ try {
1900
+ for (const entry of readdirSync(packagesDir)) {
1901
+ const pkgPath = path.join(packagesDir, entry, "package.json");
1902
+ try {
1903
+ const pkg = parsePackageJson(readFileSync(pkgPath, "utf-8"));
1904
+ if (pkg?.name && pkg.version && !pkg.private) packages.push({
1905
+ name: pkg.name,
1906
+ version: pkg.version,
1907
+ dir: entry
1908
+ });
1909
+ } catch (_error) {}
1910
+ }
1911
+ } catch (_error) {}
1912
+ return packages;
1913
+ },
1914
+ readFile(filePath) {
1915
+ try {
1916
+ return readFileSync(filePath, "utf-8");
1917
+ } catch {
1918
+ return null;
1919
+ }
1920
+ }
1921
+ };
1922
+ }
1923
+ /** Check whether there are pending changeset files. */
1924
+ function hasChangesets(executor, cwd) {
1925
+ return executor.listChangesetFiles(cwd).length > 0;
1926
+ }
1927
+ /** Parse "New tag:" lines from changeset publish output. */
1928
+ function parseNewTags(output) {
1929
+ const tags = [];
1930
+ for (const line of output.split("\n")) {
1931
+ const match = /New tag:\s+(\S+)/.exec(line);
1932
+ if (match?.[1]) tags.push(match[1]);
1933
+ }
1934
+ return tags;
1935
+ }
1936
+ /** Map workspace packages to their expected tag strings (name@version). */
1937
+ function computeExpectedTags(packages) {
1938
+ return packages.map((p) => `${p.name}@${p.version}`);
1939
+ }
1940
+ /** Parse `git ls-remote --tags` output into tag names, filtering out `^{}` dereference entries. */
1941
+ function parseRemoteTags(output) {
1942
+ const tags = [];
1943
+ for (const line of output.split("\n")) {
1944
+ const match = /refs\/tags\/(.+)/.exec(line);
1945
+ if (match?.[1] && !match[1].endsWith("^{}")) tags.push(match[1]);
1946
+ }
1947
+ return tags;
1948
+ }
1949
+ /**
1950
+ * Reconcile expected tags with what already exists on the remote.
1951
+ * Returns `(expected - remote) ∪ stdoutTags`, deduplicated.
1952
+ */
1953
+ function reconcileTags(expectedTags, remoteTags, stdoutTags) {
1954
+ const remoteSet = new Set(remoteTags);
1955
+ const result = /* @__PURE__ */ new Set();
1956
+ for (const tag of expectedTags) if (!remoteSet.has(tag)) result.add(tag);
1957
+ for (const tag of stdoutTags) result.add(tag);
1958
+ return [...result];
1959
+ }
1960
+
1961
+ //#endregion
1962
+ //#region src/release/forgejo.ts
1963
+ const PullRequestSchema = z.array(z.object({ number: z.number() }));
1964
+ /** Find an open PR with the given head branch. Returns the PR number or null. */
1965
+ async function findOpenPr(executor, conn, head) {
1966
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls?state=open&head=${head}`;
1967
+ const res = await executor.fetch(url, { headers: { Authorization: `token ${conn.token}` } });
1968
+ if (!res.ok) throw new TransientError(`Failed to list PRs: ${res.status} ${res.statusText}`);
1969
+ const parsed = PullRequestSchema.safeParse(await res.json());
1970
+ if (!parsed.success) throw new UnexpectedError(`Unexpected PR list response: ${parsed.error.message}`);
1971
+ return parsed.data[0]?.number ?? null;
1972
+ }
1973
+ /** Create a new pull request. */
1974
+ async function createPr(executor, conn, options) {
1975
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls`;
1976
+ const payload = {
1977
+ title: options.title,
1978
+ head: options.head,
1979
+ base: options.base
1980
+ };
1981
+ if (options.body) payload["body"] = options.body;
1982
+ const res = await executor.fetch(url, {
1983
+ method: "POST",
1984
+ headers: {
1985
+ Authorization: `token ${conn.token}`,
1986
+ "Content-Type": "application/json"
1987
+ },
1988
+ body: JSON.stringify(payload)
1989
+ });
1990
+ if (!res.ok) throw new TransientError(`Failed to create PR: ${res.status} ${res.statusText}`);
1991
+ }
1992
+ /** Update an existing pull request's title and body. */
1993
+ async function updatePr(executor, conn, prNumber, options) {
1994
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls/${String(prNumber)}`;
1995
+ const res = await executor.fetch(url, {
1996
+ method: "PATCH",
1997
+ headers: {
1998
+ Authorization: `token ${conn.token}`,
1999
+ "Content-Type": "application/json"
2000
+ },
2001
+ body: JSON.stringify({
2002
+ title: options.title,
2003
+ body: options.body
2004
+ })
2005
+ });
2006
+ if (!res.ok) throw new TransientError(`Failed to update PR #${String(prNumber)}: ${res.status} ${res.statusText}`);
2007
+ }
2008
+ /** Check whether a Forgejo release already exists for a given tag. */
2009
+ async function findRelease(executor, conn, tag) {
2010
+ const encodedTag = encodeURIComponent(tag);
2011
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/releases/tags/${encodedTag}`;
2012
+ const res = await executor.fetch(url, { headers: { Authorization: `token ${conn.token}` } });
2013
+ if (res.status === 200) return true;
2014
+ if (res.status === 404) return false;
2015
+ throw new TransientError(`Failed to check release for ${tag}: ${res.status} ${res.statusText}`);
2016
+ }
2017
+ /** Create a Forgejo release for a given tag. */
2018
+ async function createRelease(executor, conn, tag) {
2019
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/releases`;
2020
+ const res = await executor.fetch(url, {
2021
+ method: "POST",
2022
+ headers: {
2023
+ Authorization: `token ${conn.token}`,
2024
+ "Content-Type": "application/json"
2025
+ },
2026
+ body: JSON.stringify({
2027
+ tag_name: tag,
2028
+ name: tag,
2029
+ body: `Published ${tag}`
2030
+ })
2031
+ });
2032
+ if (!res.ok) throw new TransientError(`Failed to create release for ${tag}: ${res.status} ${res.statusText}`);
2033
+ }
2034
+
2035
+ //#endregion
2036
+ //#region src/release/version.ts
2037
+ const BRANCH = "changeset-release/main";
2038
+ /** Extract the latest changelog entry (content between first and second `## ` heading). */
2039
+ function extractLatestEntry(changelog) {
2040
+ const lines = changelog.split("\n");
2041
+ let start = -1;
2042
+ let end = lines.length;
2043
+ for (let i = 0; i < lines.length; i++) if (lines[i]?.startsWith("## ")) if (start === -1) start = i;
2044
+ else {
2045
+ end = i;
2046
+ break;
2047
+ }
2048
+ if (start === -1) return null;
2049
+ return lines.slice(start, end).join("\n").trim();
2050
+ }
2051
+ /** Read the root package.json name and version. */
2052
+ function readRootPackage(executor, cwd) {
2053
+ const content = executor.readFile(path.join(cwd, "package.json"));
2054
+ if (!content) return null;
2055
+ const pkg = parsePackageJson(content);
2056
+ if (!pkg?.name || !pkg.version) return null;
2057
+ if (pkg.private) return null;
2058
+ return {
2059
+ name: pkg.name,
2060
+ version: pkg.version
2061
+ };
2062
+ }
2063
+ /** Determine which packages changed and collect their changelog entries. */
2064
+ function buildPrContent(executor, cwd, packagesBefore) {
2065
+ const packagesAfter = executor.listWorkspacePackages(cwd);
2066
+ if (!(packagesBefore.length > 0 || packagesAfter.length > 0)) {
2067
+ const rootPkg = readRootPackage(executor, cwd);
2068
+ if (rootPkg) {
2069
+ const changelog = executor.readFile(path.join(cwd, "CHANGELOG.md"));
2070
+ const entry = changelog ? extractLatestEntry(changelog) : null;
2071
+ return {
2072
+ title: `chore: release ${rootPkg.name}@${rootPkg.version}`,
2073
+ body: entry ?? ""
2074
+ };
2075
+ }
2076
+ return {
2077
+ title: "chore: version packages",
2078
+ body: ""
2079
+ };
2080
+ }
2081
+ const beforeMap = new Map(packagesBefore.map((pkg) => [pkg.name, pkg.version]));
2082
+ const changed = packagesAfter.filter((pkg) => beforeMap.get(pkg.name) !== pkg.version);
2083
+ if (changed.length === 0) return {
2084
+ title: "chore: version packages",
2085
+ body: ""
2086
+ };
2087
+ const title = `chore: release ${changed.map((pkg) => `${pkg.name}@${pkg.version}`).join(", ")}`;
2088
+ const entries = [];
2089
+ for (const pkg of changed) {
2090
+ const changelogPath = path.join(cwd, "packages", pkg.dir, "CHANGELOG.md");
2091
+ const changelog = executor.readFile(changelogPath);
2092
+ const entry = changelog ? extractLatestEntry(changelog) : null;
2093
+ if (entry) {
2094
+ const labeled = entry.replace(/^## .+/, `## ${pkg.name}@${pkg.version}`);
2095
+ entries.push(labeled);
2096
+ }
2097
+ }
2098
+ return {
2099
+ title,
2100
+ body: entries.join("\n\n")
2101
+ };
2102
+ }
2103
+ /** Mode 1: version packages and create/update a PR. */
2104
+ async function runVersionMode(executor, config) {
2105
+ p.log.info("Changesets detected — versioning packages");
2106
+ const packagesBefore = executor.listWorkspacePackages(config.cwd);
2107
+ executor.exec("pnpm changeset version", { cwd: config.cwd });
2108
+ executor.exec("pnpm install --no-frozen-lockfile", { cwd: config.cwd });
2109
+ const { title, body } = buildPrContent(executor, config.cwd, packagesBefore);
2110
+ executor.exec("git add -A", { cwd: config.cwd });
2111
+ if (executor.exec("git commit -m \"chore: version packages\"", { cwd: config.cwd }).exitCode !== 0) {
2112
+ p.log.info("Nothing to commit after versioning");
2113
+ return {
2114
+ mode: "version",
2115
+ pr: "none"
2116
+ };
2117
+ }
2118
+ if (config.dryRun) {
2119
+ p.log.info("[dry-run] Would push and create/update PR");
2120
+ return {
2121
+ mode: "version",
2122
+ pr: "none"
2123
+ };
2124
+ }
2125
+ executor.exec(`git push origin "HEAD:refs/heads/${BRANCH}" --force`, { cwd: config.cwd });
2126
+ const conn = {
2127
+ serverUrl: config.serverUrl,
2128
+ repository: config.repository,
2129
+ token: config.token
2130
+ };
2131
+ const existingPr = await findOpenPr(executor, conn, BRANCH);
2132
+ if (existingPr === null) {
2133
+ await createPr(executor, conn, {
2134
+ title,
2135
+ head: BRANCH,
2136
+ base: "main",
2137
+ body
2138
+ });
2139
+ p.log.info("Created version PR");
2140
+ return {
2141
+ mode: "version",
2142
+ pr: "created"
2143
+ };
2144
+ }
2145
+ await updatePr(executor, conn, existingPr, {
2146
+ title,
2147
+ body
2148
+ });
2149
+ p.log.info(`Updated version PR #${String(existingPr)}`);
2150
+ return {
2151
+ mode: "version",
2152
+ pr: "updated"
2153
+ };
2154
+ }
2155
+
2156
+ //#endregion
2157
+ //#region src/release/publish.ts
2158
+ const RETRY_ATTEMPTS = 3;
2159
+ const RETRY_BASE_DELAY_MS = 1e3;
2160
+ async function retryAsync(fn) {
2161
+ let lastError;
2162
+ for (let attempt = 0; attempt <= RETRY_ATTEMPTS; attempt++) try {
2163
+ return await fn();
2164
+ } catch (error) {
2165
+ lastError = error;
2166
+ if (attempt < RETRY_ATTEMPTS) {
2167
+ const delay = RETRY_BASE_DELAY_MS * 2 ** attempt;
2168
+ await new Promise((resolve) => setTimeout(resolve, delay));
2169
+ }
2170
+ }
2171
+ throw lastError;
2172
+ }
2173
+ /** Mode 2: publish to npm, push tags, and create Forgejo releases. */
2174
+ async function runPublishMode(executor, config) {
2175
+ p.log.info("No changesets — publishing packages");
2176
+ const publishResult = executor.exec("pnpm changeset publish", { cwd: config.cwd });
2177
+ if (publishResult.exitCode !== 0) throw new FatalError(`pnpm changeset publish failed (exit code ${String(publishResult.exitCode)}):\n${publishResult.stderr}`);
2178
+ const stdoutTags = parseNewTags(publishResult.stdout + "\n" + publishResult.stderr);
2179
+ const expectedTags = computeExpectedTags(executor.listWorkspacePackages(config.cwd));
2180
+ const remoteTags = parseRemoteTags(executor.exec("git ls-remote --tags origin", { cwd: config.cwd }).stdout);
2181
+ const remoteSet = new Set(remoteTags);
2182
+ const tagsToPush = reconcileTags(expectedTags, remoteTags, stdoutTags);
2183
+ if (config.dryRun) {
2184
+ if (tagsToPush.length === 0) {
2185
+ p.log.info("No packages were published");
2186
+ return { mode: "none" };
2187
+ }
2188
+ p.log.info(`Tags to process: ${tagsToPush.join(", ")}`);
2189
+ p.log.info("[dry-run] Would push tags and create releases");
2190
+ return {
2191
+ mode: "publish",
2192
+ tags: tagsToPush
2193
+ };
2194
+ }
2195
+ const conn = {
2196
+ serverUrl: config.serverUrl,
2197
+ repository: config.repository,
2198
+ token: config.token
2199
+ };
2200
+ const remoteExpectedTags = expectedTags.filter((t) => remoteSet.has(t) && !tagsToPush.includes(t));
2201
+ const tagsWithMissingReleases = [];
2202
+ for (const tag of remoteExpectedTags) if (!await findRelease(executor, conn, tag)) tagsWithMissingReleases.push(tag);
2203
+ const allTags = [...tagsToPush, ...tagsWithMissingReleases];
2204
+ if (allTags.length === 0) {
2205
+ p.log.info("No packages were published");
2206
+ return { mode: "none" };
2207
+ }
2208
+ p.log.info(`Tags to process: ${allTags.join(", ")}`);
2209
+ const errors = [];
2210
+ for (const tag of allTags) try {
2211
+ if (!remoteSet.has(tag)) {
2212
+ if (executor.exec(`git tag -l ${JSON.stringify(tag)}`, { cwd: config.cwd }).stdout.trim() === "") executor.exec(`git tag ${JSON.stringify(tag)}`, { cwd: config.cwd });
2213
+ executor.exec(`git push origin refs/tags/${tag}`, { cwd: config.cwd });
2214
+ }
2215
+ if (await findRelease(executor, conn, tag)) p.log.warn(`Release for ${tag} already exists — skipping`);
2216
+ else {
2217
+ await retryAsync(async () => {
2218
+ try {
2219
+ await createRelease(executor, conn, tag);
2220
+ } catch (error) {
2221
+ if (await findRelease(executor, conn, tag)) return;
2222
+ throw error;
2223
+ }
2224
+ });
2225
+ p.log.info(`Created release for ${tag}`);
2226
+ }
2227
+ } catch (error) {
2228
+ errors.push({
2229
+ tag,
2230
+ error
2231
+ });
2232
+ p.log.warn(`Failed to process ${tag}: ${error instanceof Error ? error.message : String(error)}`);
2233
+ }
2234
+ if (errors.length > 0) throw new TransientError(`Failed to create releases for: ${errors.map((e) => e.tag).join(", ")}`);
2235
+ return {
2236
+ mode: "publish",
2237
+ tags: allTags
2238
+ };
2239
+ }
2240
+
2241
+ //#endregion
2242
+ //#region src/commands/release-changesets.ts
2243
+ const releaseForgejoCommand = defineCommand({
2244
+ meta: {
2245
+ name: "release:changesets",
2246
+ description: "Changesets version/publish for Forgejo CI"
2247
+ },
2248
+ args: { "dry-run": {
2249
+ type: "boolean",
2250
+ description: "Skip push, API calls, and publishing side effects"
2251
+ } },
2252
+ async run({ args }) {
2253
+ if ((await runRelease(buildReleaseConfig({ dryRun: args["dry-run"] === true }), createRealExecutor())).mode === "none") process.exitCode = 0;
2254
+ }
2255
+ });
2256
+ /** Build release config from environment variables and CLI flags. */
2257
+ function buildReleaseConfig(flags) {
2258
+ const serverUrl = process.env["FORGEJO_SERVER_URL"];
2259
+ const repository = process.env["FORGEJO_REPOSITORY"];
2260
+ const token = process.env["FORGEJO_TOKEN"];
2261
+ if (!serverUrl) throw new FatalError("FORGEJO_SERVER_URL environment variable is required");
2262
+ if (!repository) throw new FatalError("FORGEJO_REPOSITORY environment variable is required");
2263
+ if (!token) throw new FatalError("FORGEJO_TOKEN environment variable is required");
2264
+ return {
2265
+ serverUrl,
2266
+ repository,
2267
+ token,
2268
+ cwd: process.cwd(),
2269
+ dryRun: flags.dryRun ?? false
2270
+ };
2271
+ }
2272
+ /** Core release logic — testable with a mock executor. */
2273
+ async function runRelease(config, executor) {
2274
+ if (hasChangesets(executor, config.cwd)) return runVersionMode(executor, config);
2275
+ return runPublishMode(executor, config);
2276
+ }
2277
+
2278
+ //#endregion
2279
+ //#region src/commands/release-trigger.ts
2280
+ const releaseTriggerCommand = defineCommand({
2281
+ meta: {
2282
+ name: "release:trigger",
2283
+ description: "Trigger the release CI workflow"
2284
+ },
2285
+ args: { ref: {
2286
+ type: "string",
2287
+ description: "Git ref to trigger on (default: main)",
2288
+ required: false
2289
+ } },
2290
+ async run({ args }) {
2291
+ const ref = args.ref ?? "main";
2292
+ const serverUrl = process.env["FORGEJO_SERVER_URL"];
2293
+ if (serverUrl) await triggerForgejo(serverUrl, ref);
2294
+ else triggerGitHub(ref);
2295
+ }
2296
+ });
2297
+ async function triggerForgejo(serverUrl, ref) {
2298
+ const repository = process.env["FORGEJO_REPOSITORY"];
2299
+ const token = process.env["FORGEJO_TOKEN"];
2300
+ if (!repository) throw new FatalError("FORGEJO_REPOSITORY environment variable is required");
2301
+ if (!token) throw new FatalError("FORGEJO_TOKEN environment variable is required");
2302
+ const url = `${serverUrl}/api/v1/repos/${repository}/actions/workflows/release.yml/dispatches`;
2303
+ const res = await fetch(url, {
2304
+ method: "POST",
2305
+ headers: {
2306
+ Authorization: `token ${token}`,
2307
+ "Content-Type": "application/json"
2308
+ },
2309
+ body: JSON.stringify({ ref })
2310
+ });
2311
+ if (!res.ok) throw new FatalError(`Failed to trigger Forgejo workflow: ${res.status} ${res.statusText}`);
2312
+ p.log.info(`Triggered release workflow on Forgejo (ref: ${ref})`);
2313
+ }
2314
+ function triggerGitHub(ref) {
2315
+ createRealExecutor().exec(`gh workflow run release.yml --ref ${ref}`, { cwd: process.cwd() });
2316
+ p.log.info(`Triggered release workflow on GitHub (ref: ${ref})`);
2317
+ }
2318
+
2319
+ //#endregion
2320
+ //#region src/commands/release-create-forgejo-release.ts
2321
+ const createForgejoReleaseCommand = defineCommand({
2322
+ meta: {
2323
+ name: "release:create-forgejo-release",
2324
+ description: "Create a Forgejo release for a given tag"
2325
+ },
2326
+ args: { tag: {
2327
+ type: "string",
2328
+ description: "Git tag to create a release for",
2329
+ required: true
2330
+ } },
2331
+ async run({ args }) {
2332
+ const serverUrl = process.env["FORGEJO_SERVER_URL"];
2333
+ const repository = process.env["FORGEJO_REPOSITORY"];
2334
+ const token = process.env["FORGEJO_TOKEN"];
2335
+ if (!serverUrl) throw new FatalError("FORGEJO_SERVER_URL environment variable is required");
2336
+ if (!repository) throw new FatalError("FORGEJO_REPOSITORY environment variable is required");
2337
+ if (!token) throw new FatalError("FORGEJO_TOKEN environment variable is required");
2338
+ const executor = createRealExecutor();
2339
+ const conn = {
2340
+ serverUrl,
2341
+ repository,
2342
+ token
2343
+ };
2344
+ if (await findRelease(executor, conn, args.tag)) {
2345
+ p.log.info(`Release for ${args.tag} already exists — skipping`);
2346
+ return;
2347
+ }
2348
+ await createRelease(executor, conn, args.tag);
2349
+ p.log.info(`Created Forgejo release for ${args.tag}`);
2350
+ }
2351
+ });
2352
+
2353
+ //#endregion
2354
+ //#region src/bin.ts
2355
+ runMain(defineCommand({
2356
+ meta: {
2357
+ name: "tooling",
2358
+ version: "0.1.0",
2359
+ description: "Bootstrap and maintain standardized TypeScript project tooling"
2360
+ },
2361
+ subCommands: {
2362
+ "repo:init": initCommand,
2363
+ "repo:update": updateCommand,
2364
+ "release:changesets": releaseForgejoCommand,
2365
+ "release:trigger": releaseTriggerCommand,
2366
+ "release:create-forgejo-release": createForgejoReleaseCommand
2367
+ }
2368
+ }));
2369
+
2370
+ //#endregion
2371
+ export { };