@bensandee/tooling 0.18.0 → 0.21.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin.mjs
CHANGED
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { l as createRealExecutor$1, t as
|
|
2
|
+
import { l as createRealExecutor$1, t as runDockerCheck, u as isExecSyncError } from "./check-VAgrEX2D.mjs";
|
|
3
3
|
import { defineCommand, runMain } from "citty";
|
|
4
4
|
import * as p from "@clack/prompts";
|
|
5
5
|
import path from "node:path";
|
|
6
|
-
import { existsSync, mkdirSync, readFileSync, readdirSync, rmSync, writeFileSync } from "node:fs";
|
|
6
|
+
import { existsSync, mkdirSync, readFileSync, readdirSync, rmSync, unlinkSync, writeFileSync } from "node:fs";
|
|
7
7
|
import JSON5 from "json5";
|
|
8
8
|
import { parse } from "jsonc-parser";
|
|
9
9
|
import { z } from "zod";
|
|
10
|
-
import { isMap, isSeq, parse as parse$1, parseDocument } from "yaml";
|
|
10
|
+
import { isMap, isScalar, isSeq, parse as parse$1, parseDocument, stringify } from "yaml";
|
|
11
11
|
import { execSync } from "node:child_process";
|
|
12
12
|
import { FatalError, TransientError, UnexpectedError } from "@bensandee/common";
|
|
13
|
+
import { tmpdir } from "node:os";
|
|
13
14
|
//#region src/types.ts
|
|
14
15
|
const LEGACY_TOOLS = [
|
|
15
16
|
"eslint",
|
|
@@ -41,7 +42,7 @@ const TsconfigSchema = z.object({
|
|
|
41
42
|
include: z.array(z.string()).optional(),
|
|
42
43
|
exclude: z.array(z.string()).optional(),
|
|
43
44
|
files: z.array(z.string()).optional(),
|
|
44
|
-
references: z.array(z.object({ path: z.string() }).
|
|
45
|
+
references: z.array(z.object({ path: z.string() }).loose()).optional(),
|
|
45
46
|
compilerOptions: z.record(z.string(), z.unknown()).optional()
|
|
46
47
|
}).loose();
|
|
47
48
|
const RenovateSchema = z.object({
|
|
@@ -205,6 +206,32 @@ function computeDefaults(targetDir) {
|
|
|
205
206
|
detectPackageTypes: true
|
|
206
207
|
};
|
|
207
208
|
}
|
|
209
|
+
/**
|
|
210
|
+
* List packages that would be published to npm (non-private, have a name).
|
|
211
|
+
* For monorepos, scans packages/ subdirectories. For single repos, checks root package.json.
|
|
212
|
+
* An optional pre-parsed root package.json can be passed to avoid re-reading from disk.
|
|
213
|
+
*/
|
|
214
|
+
function getPublishablePackages(targetDir, structure, rootPackageJson) {
|
|
215
|
+
if (structure === "monorepo") {
|
|
216
|
+
const packages = getMonorepoPackages(targetDir);
|
|
217
|
+
const results = [];
|
|
218
|
+
for (const pkg of packages) {
|
|
219
|
+
const pkgJson = readPackageJson(pkg.dir);
|
|
220
|
+
if (!pkgJson || pkgJson.private || !pkgJson.name) continue;
|
|
221
|
+
results.push({
|
|
222
|
+
name: pkgJson.name,
|
|
223
|
+
dir: pkg.dir
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
return results;
|
|
227
|
+
}
|
|
228
|
+
const pkg = rootPackageJson ?? readPackageJson(targetDir);
|
|
229
|
+
if (!pkg || pkg.private || !pkg.name) return [];
|
|
230
|
+
return [{
|
|
231
|
+
name: pkg.name,
|
|
232
|
+
dir: targetDir
|
|
233
|
+
}];
|
|
234
|
+
}
|
|
208
235
|
/** List packages in a monorepo's packages/ directory. */
|
|
209
236
|
function getMonorepoPackages(targetDir) {
|
|
210
237
|
const packagesDir = path.join(targetDir, "packages");
|
|
@@ -456,7 +483,7 @@ const DeclarativeHealthCheckSchema = z.object({
|
|
|
456
483
|
url: z.string(),
|
|
457
484
|
status: z.number().int().optional()
|
|
458
485
|
});
|
|
459
|
-
const
|
|
486
|
+
const DockerCheckConfigSchema = z.object({
|
|
460
487
|
composeFiles: z.array(z.string()).optional(),
|
|
461
488
|
envFile: z.string().optional(),
|
|
462
489
|
services: z.array(z.string()).optional(),
|
|
@@ -495,7 +522,7 @@ const ToolingConfigSchema = z.object({
|
|
|
495
522
|
dockerfile: z.string(),
|
|
496
523
|
context: z.string().default(".")
|
|
497
524
|
})).optional(),
|
|
498
|
-
|
|
525
|
+
dockerCheck: z.union([z.literal(false), DockerCheckConfigSchema]).optional()
|
|
499
526
|
});
|
|
500
527
|
/** Load saved tooling config from the target directory. Returns undefined if missing or invalid. */
|
|
501
528
|
function loadToolingConfig(targetDir) {
|
|
@@ -568,6 +595,334 @@ function mergeWithSavedConfig(detected, saved) {
|
|
|
568
595
|
};
|
|
569
596
|
}
|
|
570
597
|
//#endregion
|
|
598
|
+
//#region src/utils/yaml-merge.ts
|
|
599
|
+
const IGNORE_PATTERN = "@bensandee/tooling:ignore";
|
|
600
|
+
const FORGEJO_SCHEMA_COMMENT = "# yaml-language-server: $schema=../../.vscode/forgejo-workflow.schema.json\n";
|
|
601
|
+
/** Returns a yaml-language-server schema comment for Forgejo workflows, empty string otherwise. */
|
|
602
|
+
function workflowSchemaComment(ci) {
|
|
603
|
+
return ci === "forgejo" ? FORGEJO_SCHEMA_COMMENT : "";
|
|
604
|
+
}
|
|
605
|
+
/** Prepend the Forgejo schema comment if it's not already present. No-op for GitHub. */
|
|
606
|
+
function ensureSchemaComment(content, ci) {
|
|
607
|
+
if (ci !== "forgejo") return content;
|
|
608
|
+
if (content.includes("yaml-language-server")) return content;
|
|
609
|
+
return FORGEJO_SCHEMA_COMMENT + content;
|
|
610
|
+
}
|
|
611
|
+
/** Check if a YAML file has an opt-out comment in the first 10 lines. */
|
|
612
|
+
function isToolingIgnored(content) {
|
|
613
|
+
return content.split("\n", 10).some((line) => line.includes(IGNORE_PATTERN));
|
|
614
|
+
}
|
|
615
|
+
/**
|
|
616
|
+
* Ensure required commands exist under `pre-commit.commands` in a lefthook config.
|
|
617
|
+
* Only adds missing commands — never modifies existing ones.
|
|
618
|
+
* Returns unchanged content if the file has an opt-out comment or can't be parsed.
|
|
619
|
+
*/
|
|
620
|
+
function mergeLefthookCommands(existing, requiredCommands) {
|
|
621
|
+
if (isToolingIgnored(existing)) return {
|
|
622
|
+
content: existing,
|
|
623
|
+
changed: false
|
|
624
|
+
};
|
|
625
|
+
try {
|
|
626
|
+
const doc = parseDocument(existing);
|
|
627
|
+
let changed = false;
|
|
628
|
+
if (!doc.hasIn(["pre-commit", "commands"])) {
|
|
629
|
+
doc.setIn(["pre-commit", "commands"], requiredCommands);
|
|
630
|
+
return {
|
|
631
|
+
content: doc.toString(),
|
|
632
|
+
changed: true
|
|
633
|
+
};
|
|
634
|
+
}
|
|
635
|
+
const commands = doc.getIn(["pre-commit", "commands"]);
|
|
636
|
+
if (!isMap(commands)) return {
|
|
637
|
+
content: existing,
|
|
638
|
+
changed: false
|
|
639
|
+
};
|
|
640
|
+
for (const [name, config] of Object.entries(requiredCommands)) if (!commands.has(name)) {
|
|
641
|
+
commands.set(name, config);
|
|
642
|
+
changed = true;
|
|
643
|
+
}
|
|
644
|
+
return {
|
|
645
|
+
content: changed ? doc.toString() : existing,
|
|
646
|
+
changed
|
|
647
|
+
};
|
|
648
|
+
} catch {
|
|
649
|
+
return {
|
|
650
|
+
content: existing,
|
|
651
|
+
changed: false
|
|
652
|
+
};
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
/**
|
|
656
|
+
* Ensure required steps exist in a workflow job's steps array.
|
|
657
|
+
* Only adds missing steps at the end — never modifies existing ones.
|
|
658
|
+
* Returns unchanged content if the file has an opt-out comment or can't be parsed.
|
|
659
|
+
*/
|
|
660
|
+
function mergeWorkflowSteps(existing, jobName, requiredSteps) {
|
|
661
|
+
if (isToolingIgnored(existing)) return {
|
|
662
|
+
content: existing,
|
|
663
|
+
changed: false
|
|
664
|
+
};
|
|
665
|
+
try {
|
|
666
|
+
const doc = parseDocument(existing);
|
|
667
|
+
const steps = doc.getIn([
|
|
668
|
+
"jobs",
|
|
669
|
+
jobName,
|
|
670
|
+
"steps"
|
|
671
|
+
]);
|
|
672
|
+
if (!isSeq(steps)) return {
|
|
673
|
+
content: existing,
|
|
674
|
+
changed: false
|
|
675
|
+
};
|
|
676
|
+
let changed = false;
|
|
677
|
+
for (const { match, step } of requiredSteps) if (!steps.items.some((item) => {
|
|
678
|
+
if (!isMap(item)) return false;
|
|
679
|
+
if (match.run) {
|
|
680
|
+
const run = item.get("run");
|
|
681
|
+
return typeof run === "string" && run.includes(match.run);
|
|
682
|
+
}
|
|
683
|
+
if (match.uses) {
|
|
684
|
+
const uses = item.get("uses");
|
|
685
|
+
return typeof uses === "string" && uses.startsWith(match.uses);
|
|
686
|
+
}
|
|
687
|
+
return false;
|
|
688
|
+
})) {
|
|
689
|
+
steps.add(doc.createNode(step));
|
|
690
|
+
changed = true;
|
|
691
|
+
}
|
|
692
|
+
return {
|
|
693
|
+
content: changed ? doc.toString() : existing,
|
|
694
|
+
changed
|
|
695
|
+
};
|
|
696
|
+
} catch {
|
|
697
|
+
return {
|
|
698
|
+
content: existing,
|
|
699
|
+
changed: false
|
|
700
|
+
};
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
/**
|
|
704
|
+
* Add a job to an existing workflow YAML if it doesn't already exist.
|
|
705
|
+
* Returns unchanged content if the job already exists, the file has an opt-out comment,
|
|
706
|
+
* or the document can't be parsed.
|
|
707
|
+
*/
|
|
708
|
+
/**
|
|
709
|
+
* Ensure a `concurrency` block exists at the workflow top level.
|
|
710
|
+
* Adds it if missing — never modifies an existing one.
|
|
711
|
+
* Returns unchanged content if the file has an opt-out comment or can't be parsed.
|
|
712
|
+
*/
|
|
713
|
+
function ensureWorkflowConcurrency(existing, concurrency) {
|
|
714
|
+
if (isToolingIgnored(existing)) return {
|
|
715
|
+
content: existing,
|
|
716
|
+
changed: false
|
|
717
|
+
};
|
|
718
|
+
try {
|
|
719
|
+
const doc = parseDocument(existing);
|
|
720
|
+
if (doc.has("concurrency")) return {
|
|
721
|
+
content: existing,
|
|
722
|
+
changed: false
|
|
723
|
+
};
|
|
724
|
+
doc.set("concurrency", concurrency);
|
|
725
|
+
const contents = doc.contents;
|
|
726
|
+
if (isMap(contents)) {
|
|
727
|
+
const items = contents.items;
|
|
728
|
+
const nameIdx = items.findIndex((p) => isScalar(p.key) && p.key.value === "name");
|
|
729
|
+
const concPair = items.pop();
|
|
730
|
+
if (concPair) items.splice(nameIdx + 1, 0, concPair);
|
|
731
|
+
}
|
|
732
|
+
return {
|
|
733
|
+
content: doc.toString(),
|
|
734
|
+
changed: true
|
|
735
|
+
};
|
|
736
|
+
} catch {
|
|
737
|
+
return {
|
|
738
|
+
content: existing,
|
|
739
|
+
changed: false
|
|
740
|
+
};
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
function addWorkflowJob(existing, jobName, jobConfig) {
|
|
744
|
+
if (isToolingIgnored(existing)) return {
|
|
745
|
+
content: existing,
|
|
746
|
+
changed: false
|
|
747
|
+
};
|
|
748
|
+
try {
|
|
749
|
+
const doc = parseDocument(existing);
|
|
750
|
+
const jobs = doc.getIn(["jobs"]);
|
|
751
|
+
if (!isMap(jobs)) return {
|
|
752
|
+
content: existing,
|
|
753
|
+
changed: false
|
|
754
|
+
};
|
|
755
|
+
if (jobs.has(jobName)) return {
|
|
756
|
+
content: existing,
|
|
757
|
+
changed: false
|
|
758
|
+
};
|
|
759
|
+
jobs.set(jobName, doc.createNode(jobConfig));
|
|
760
|
+
return {
|
|
761
|
+
content: doc.toString(),
|
|
762
|
+
changed: true
|
|
763
|
+
};
|
|
764
|
+
} catch {
|
|
765
|
+
return {
|
|
766
|
+
content: existing,
|
|
767
|
+
changed: false
|
|
768
|
+
};
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
//#endregion
|
|
772
|
+
//#region src/generators/deploy-ci.ts
|
|
773
|
+
/** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
|
|
774
|
+
function actionsExpr$2(expr) {
|
|
775
|
+
return `\${{ ${expr} }}`;
|
|
776
|
+
}
|
|
777
|
+
function hasEnginesNode$2(ctx) {
|
|
778
|
+
return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
|
|
779
|
+
}
|
|
780
|
+
function deployWorkflow(ci, nodeVersionYaml) {
|
|
781
|
+
return `${workflowSchemaComment(ci)}name: Deploy
|
|
782
|
+
on:
|
|
783
|
+
push:
|
|
784
|
+
tags:
|
|
785
|
+
- "v[0-9]+.[0-9]+.[0-9]+"
|
|
786
|
+
|
|
787
|
+
jobs:
|
|
788
|
+
deploy:
|
|
789
|
+
runs-on: ubuntu-latest
|
|
790
|
+
steps:
|
|
791
|
+
- uses: actions/checkout@v4
|
|
792
|
+
- uses: pnpm/action-setup@v4
|
|
793
|
+
- uses: actions/setup-node@v4
|
|
794
|
+
with:
|
|
795
|
+
${nodeVersionYaml}
|
|
796
|
+
- run: pnpm install --frozen-lockfile
|
|
797
|
+
- name: Publish Docker images
|
|
798
|
+
env:
|
|
799
|
+
DOCKER_REGISTRY_HOST: ${actionsExpr$2("vars.DOCKER_REGISTRY_HOST")}
|
|
800
|
+
DOCKER_REGISTRY_NAMESPACE: ${actionsExpr$2("vars.DOCKER_REGISTRY_NAMESPACE")}
|
|
801
|
+
DOCKER_USERNAME: ${actionsExpr$2("secrets.DOCKER_USERNAME")}
|
|
802
|
+
DOCKER_PASSWORD: ${actionsExpr$2("secrets.DOCKER_PASSWORD")}
|
|
803
|
+
run: pnpm exec tooling docker:publish
|
|
804
|
+
`;
|
|
805
|
+
}
|
|
806
|
+
function requiredDeploySteps() {
|
|
807
|
+
return [
|
|
808
|
+
{
|
|
809
|
+
match: { uses: "actions/checkout" },
|
|
810
|
+
step: { uses: "actions/checkout@v4" }
|
|
811
|
+
},
|
|
812
|
+
{
|
|
813
|
+
match: { uses: "pnpm/action-setup" },
|
|
814
|
+
step: { uses: "pnpm/action-setup@v4" }
|
|
815
|
+
},
|
|
816
|
+
{
|
|
817
|
+
match: { uses: "actions/setup-node" },
|
|
818
|
+
step: { uses: "actions/setup-node@v4" }
|
|
819
|
+
},
|
|
820
|
+
{
|
|
821
|
+
match: { run: "pnpm install" },
|
|
822
|
+
step: { run: "pnpm install --frozen-lockfile" }
|
|
823
|
+
},
|
|
824
|
+
{
|
|
825
|
+
match: { run: "docker:publish" },
|
|
826
|
+
step: { run: "pnpm exec tooling docker:publish" }
|
|
827
|
+
}
|
|
828
|
+
];
|
|
829
|
+
}
|
|
830
|
+
/** Convention paths to check for Dockerfiles. */
|
|
831
|
+
const CONVENTION_DOCKERFILE_PATHS$1 = ["Dockerfile", "docker/Dockerfile"];
|
|
832
|
+
const DockerMapSchema = z.object({ docker: z.record(z.string(), z.unknown()).optional() });
|
|
833
|
+
/** Get names of packages that have Docker builds (by convention or .tooling.json config). */
|
|
834
|
+
function getDockerPackageNames(ctx) {
|
|
835
|
+
const names = [];
|
|
836
|
+
const configRaw = ctx.read(".tooling.json");
|
|
837
|
+
if (configRaw) {
|
|
838
|
+
const result = DockerMapSchema.safeParse(JSON.parse(configRaw));
|
|
839
|
+
if (result.success && result.data.docker) names.push(...Object.keys(result.data.docker));
|
|
840
|
+
}
|
|
841
|
+
if (ctx.config.structure === "monorepo") {
|
|
842
|
+
const packages = getMonorepoPackages(ctx.targetDir);
|
|
843
|
+
for (const pkg of packages) {
|
|
844
|
+
const dirName = pkg.name.split("/").pop() ?? pkg.name;
|
|
845
|
+
if (names.includes(dirName)) continue;
|
|
846
|
+
for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(`packages/${dirName}/${rel}`)) {
|
|
847
|
+
names.push(dirName);
|
|
848
|
+
break;
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
} else for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(rel)) {
|
|
852
|
+
if (!names.includes(ctx.config.name)) names.push(ctx.config.name);
|
|
853
|
+
break;
|
|
854
|
+
}
|
|
855
|
+
return names;
|
|
856
|
+
}
|
|
857
|
+
/** Check whether any Docker packages exist by convention or .tooling.json config. */
|
|
858
|
+
function hasDockerPackages(ctx) {
|
|
859
|
+
return getDockerPackageNames(ctx).length > 0;
|
|
860
|
+
}
|
|
861
|
+
async function generateDeployCi(ctx) {
|
|
862
|
+
const filePath = "deploy-ci";
|
|
863
|
+
if (!hasDockerPackages(ctx) || ctx.config.ci === "none") return {
|
|
864
|
+
filePath,
|
|
865
|
+
action: "skipped",
|
|
866
|
+
description: "Deploy CI workflow not applicable"
|
|
867
|
+
};
|
|
868
|
+
const isGitHub = ctx.config.ci === "github";
|
|
869
|
+
const workflowPath = isGitHub ? ".github/workflows/publish.yml" : ".forgejo/workflows/publish.yml";
|
|
870
|
+
const nodeVersionYaml = hasEnginesNode$2(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
|
|
871
|
+
const content = deployWorkflow(ctx.config.ci, nodeVersionYaml);
|
|
872
|
+
if (ctx.exists(workflowPath)) {
|
|
873
|
+
const existing = ctx.read(workflowPath);
|
|
874
|
+
if (existing) {
|
|
875
|
+
if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) return {
|
|
876
|
+
filePath: workflowPath,
|
|
877
|
+
action: "skipped",
|
|
878
|
+
description: "Deploy workflow already up to date"
|
|
879
|
+
};
|
|
880
|
+
const merged = mergeWorkflowSteps(existing, "deploy", requiredDeploySteps());
|
|
881
|
+
const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
|
|
882
|
+
if (withComment === content) {
|
|
883
|
+
ctx.write(workflowPath, content);
|
|
884
|
+
return {
|
|
885
|
+
filePath: workflowPath,
|
|
886
|
+
action: "updated",
|
|
887
|
+
description: "Added missing steps to deploy workflow"
|
|
888
|
+
};
|
|
889
|
+
}
|
|
890
|
+
if (await ctx.confirmOverwrite(workflowPath) === "skip") {
|
|
891
|
+
if (merged.changed || withComment !== merged.content) {
|
|
892
|
+
ctx.write(workflowPath, withComment);
|
|
893
|
+
return {
|
|
894
|
+
filePath: workflowPath,
|
|
895
|
+
action: "updated",
|
|
896
|
+
description: "Added missing steps to deploy workflow"
|
|
897
|
+
};
|
|
898
|
+
}
|
|
899
|
+
return {
|
|
900
|
+
filePath: workflowPath,
|
|
901
|
+
action: "skipped",
|
|
902
|
+
description: "Existing deploy workflow preserved"
|
|
903
|
+
};
|
|
904
|
+
}
|
|
905
|
+
ctx.write(workflowPath, content);
|
|
906
|
+
return {
|
|
907
|
+
filePath: workflowPath,
|
|
908
|
+
action: "updated",
|
|
909
|
+
description: "Replaced deploy workflow with updated template"
|
|
910
|
+
};
|
|
911
|
+
}
|
|
912
|
+
return {
|
|
913
|
+
filePath: workflowPath,
|
|
914
|
+
action: "skipped",
|
|
915
|
+
description: "Deploy workflow already up to date"
|
|
916
|
+
};
|
|
917
|
+
}
|
|
918
|
+
ctx.write(workflowPath, content);
|
|
919
|
+
return {
|
|
920
|
+
filePath: workflowPath,
|
|
921
|
+
action: "created",
|
|
922
|
+
description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions deploy workflow`
|
|
923
|
+
};
|
|
924
|
+
}
|
|
925
|
+
//#endregion
|
|
571
926
|
//#region src/generators/package-json.ts
|
|
572
927
|
const STANDARD_SCRIPTS_SINGLE = {
|
|
573
928
|
build: "tsdown",
|
|
@@ -597,7 +952,9 @@ const MANAGED_SCRIPTS = {
|
|
|
597
952
|
check: "checks:run",
|
|
598
953
|
"ci:check": "pnpm check",
|
|
599
954
|
"tooling:check": "repo:sync --check",
|
|
600
|
-
"tooling:sync": "repo:sync"
|
|
955
|
+
"tooling:sync": "repo:sync",
|
|
956
|
+
"docker:build": "docker:build",
|
|
957
|
+
"docker:check": "docker:check"
|
|
601
958
|
};
|
|
602
959
|
/** Deprecated scripts to remove during migration. */
|
|
603
960
|
const DEPRECATED_SCRIPTS = ["tooling:init", "tooling:update"];
|
|
@@ -643,9 +1000,7 @@ function addReleaseDeps(deps, config) {
|
|
|
643
1000
|
deps["release-it"] = "18.1.2";
|
|
644
1001
|
if (config.structure === "monorepo") deps["@release-it/bumper"] = "7.0.2";
|
|
645
1002
|
break;
|
|
646
|
-
case "simple":
|
|
647
|
-
deps["commit-and-tag-version"] = "12.5.0";
|
|
648
|
-
break;
|
|
1003
|
+
case "simple": break;
|
|
649
1004
|
case "changesets":
|
|
650
1005
|
deps["@changesets/cli"] = "2.29.4";
|
|
651
1006
|
break;
|
|
@@ -656,7 +1011,7 @@ function getAddedDevDepNames(config) {
|
|
|
656
1011
|
const deps = { ...ROOT_DEV_DEPS };
|
|
657
1012
|
if (config.structure !== "monorepo") Object.assign(deps, PER_PACKAGE_DEV_DEPS);
|
|
658
1013
|
deps["@bensandee/config"] = "0.8.2";
|
|
659
|
-
deps["@bensandee/tooling"] = "0.
|
|
1014
|
+
deps["@bensandee/tooling"] = "0.21.0";
|
|
660
1015
|
if (config.formatter === "oxfmt") deps["oxfmt"] = "0.35.0";
|
|
661
1016
|
if (config.formatter === "prettier") deps["prettier"] = "3.8.1";
|
|
662
1017
|
addReleaseDeps(deps, config);
|
|
@@ -674,10 +1029,14 @@ async function generatePackageJson(ctx) {
|
|
|
674
1029
|
};
|
|
675
1030
|
if (ctx.config.releaseStrategy === "changesets") allScripts["changeset"] = "changeset";
|
|
676
1031
|
if (ctx.config.releaseStrategy !== "none" && ctx.config.releaseStrategy !== "changesets") allScripts["trigger-release"] = "pnpm exec tooling release:trigger";
|
|
1032
|
+
if (hasDockerPackages(ctx)) {
|
|
1033
|
+
allScripts["docker:build"] = "pnpm exec tooling docker:build";
|
|
1034
|
+
allScripts["docker:check"] = "pnpm exec tooling docker:check";
|
|
1035
|
+
}
|
|
677
1036
|
const devDeps = { ...ROOT_DEV_DEPS };
|
|
678
1037
|
if (!isMonorepo) Object.assign(devDeps, PER_PACKAGE_DEV_DEPS);
|
|
679
1038
|
devDeps["@bensandee/config"] = isWorkspacePackage(ctx, "@bensandee/config") ? "workspace:*" : "0.8.2";
|
|
680
|
-
devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.
|
|
1039
|
+
devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.21.0";
|
|
681
1040
|
if (ctx.config.useEslintPlugin) devDeps["@bensandee/eslint-plugin"] = isWorkspacePackage(ctx, "@bensandee/eslint-plugin") ? "workspace:*" : "0.9.2";
|
|
682
1041
|
if (ctx.config.formatter === "oxfmt") devDeps["oxfmt"] = "0.35.0";
|
|
683
1042
|
if (ctx.config.formatter === "prettier") devDeps["prettier"] = "3.8.1";
|
|
@@ -1139,164 +1498,33 @@ async function generateGitignore(ctx) {
|
|
|
1139
1498
|
ctx.write(filePath, updated);
|
|
1140
1499
|
if (missingRequired.length === 0) return {
|
|
1141
1500
|
filePath,
|
|
1142
|
-
action: "skipped",
|
|
1143
|
-
description: "Only optional entries missing"
|
|
1144
|
-
};
|
|
1145
|
-
return {
|
|
1146
|
-
filePath,
|
|
1147
|
-
action: "updated",
|
|
1148
|
-
description: `Appended ${String(missing.length)} missing entries`
|
|
1149
|
-
};
|
|
1150
|
-
}
|
|
1151
|
-
ctx.write(filePath, ALL_ENTRIES.join("\n") + "\n");
|
|
1152
|
-
return {
|
|
1153
|
-
filePath,
|
|
1154
|
-
action: "created",
|
|
1155
|
-
description: "Generated .gitignore"
|
|
1156
|
-
};
|
|
1157
|
-
}
|
|
1158
|
-
//#endregion
|
|
1159
|
-
//#region src/utils/yaml-merge.ts
|
|
1160
|
-
const IGNORE_PATTERN = "@bensandee/tooling:ignore";
|
|
1161
|
-
const FORGEJO_SCHEMA_COMMENT = "# yaml-language-server: $schema=../../.vscode/forgejo-workflow.schema.json\n";
|
|
1162
|
-
/** Returns a yaml-language-server schema comment for Forgejo workflows, empty string otherwise. */
|
|
1163
|
-
function workflowSchemaComment(ci) {
|
|
1164
|
-
return ci === "forgejo" ? FORGEJO_SCHEMA_COMMENT : "";
|
|
1165
|
-
}
|
|
1166
|
-
/** Prepend the Forgejo schema comment if it's not already present. No-op for GitHub. */
|
|
1167
|
-
function ensureSchemaComment(content, ci) {
|
|
1168
|
-
if (ci !== "forgejo") return content;
|
|
1169
|
-
if (content.includes("yaml-language-server")) return content;
|
|
1170
|
-
return FORGEJO_SCHEMA_COMMENT + content;
|
|
1171
|
-
}
|
|
1172
|
-
/** Check if a YAML file has an opt-out comment in the first 10 lines. */
|
|
1173
|
-
function isToolingIgnored(content) {
|
|
1174
|
-
return content.split("\n", 10).some((line) => line.includes(IGNORE_PATTERN));
|
|
1175
|
-
}
|
|
1176
|
-
/**
|
|
1177
|
-
* Ensure required commands exist under `pre-commit.commands` in a lefthook config.
|
|
1178
|
-
* Only adds missing commands — never modifies existing ones.
|
|
1179
|
-
* Returns unchanged content if the file has an opt-out comment or can't be parsed.
|
|
1180
|
-
*/
|
|
1181
|
-
function mergeLefthookCommands(existing, requiredCommands) {
|
|
1182
|
-
if (isToolingIgnored(existing)) return {
|
|
1183
|
-
content: existing,
|
|
1184
|
-
changed: false
|
|
1185
|
-
};
|
|
1186
|
-
try {
|
|
1187
|
-
const doc = parseDocument(existing);
|
|
1188
|
-
let changed = false;
|
|
1189
|
-
if (!doc.hasIn(["pre-commit", "commands"])) {
|
|
1190
|
-
doc.setIn(["pre-commit", "commands"], requiredCommands);
|
|
1191
|
-
return {
|
|
1192
|
-
content: doc.toString(),
|
|
1193
|
-
changed: true
|
|
1194
|
-
};
|
|
1195
|
-
}
|
|
1196
|
-
const commands = doc.getIn(["pre-commit", "commands"]);
|
|
1197
|
-
if (!isMap(commands)) return {
|
|
1198
|
-
content: existing,
|
|
1199
|
-
changed: false
|
|
1200
|
-
};
|
|
1201
|
-
for (const [name, config] of Object.entries(requiredCommands)) if (!commands.has(name)) {
|
|
1202
|
-
commands.set(name, config);
|
|
1203
|
-
changed = true;
|
|
1204
|
-
}
|
|
1205
|
-
return {
|
|
1206
|
-
content: changed ? doc.toString() : existing,
|
|
1207
|
-
changed
|
|
1208
|
-
};
|
|
1209
|
-
} catch {
|
|
1210
|
-
return {
|
|
1211
|
-
content: existing,
|
|
1212
|
-
changed: false
|
|
1213
|
-
};
|
|
1214
|
-
}
|
|
1215
|
-
}
|
|
1216
|
-
/**
|
|
1217
|
-
* Ensure required steps exist in a workflow job's steps array.
|
|
1218
|
-
* Only adds missing steps at the end — never modifies existing ones.
|
|
1219
|
-
* Returns unchanged content if the file has an opt-out comment or can't be parsed.
|
|
1220
|
-
*/
|
|
1221
|
-
function mergeWorkflowSteps(existing, jobName, requiredSteps) {
|
|
1222
|
-
if (isToolingIgnored(existing)) return {
|
|
1223
|
-
content: existing,
|
|
1224
|
-
changed: false
|
|
1225
|
-
};
|
|
1226
|
-
try {
|
|
1227
|
-
const doc = parseDocument(existing);
|
|
1228
|
-
const steps = doc.getIn([
|
|
1229
|
-
"jobs",
|
|
1230
|
-
jobName,
|
|
1231
|
-
"steps"
|
|
1232
|
-
]);
|
|
1233
|
-
if (!isSeq(steps)) return {
|
|
1234
|
-
content: existing,
|
|
1235
|
-
changed: false
|
|
1236
|
-
};
|
|
1237
|
-
let changed = false;
|
|
1238
|
-
for (const { match, step } of requiredSteps) if (!steps.items.some((item) => {
|
|
1239
|
-
if (!isMap(item)) return false;
|
|
1240
|
-
if (match.run) {
|
|
1241
|
-
const run = item.get("run");
|
|
1242
|
-
return typeof run === "string" && run.includes(match.run);
|
|
1243
|
-
}
|
|
1244
|
-
if (match.uses) {
|
|
1245
|
-
const uses = item.get("uses");
|
|
1246
|
-
return typeof uses === "string" && uses.startsWith(match.uses);
|
|
1247
|
-
}
|
|
1248
|
-
return false;
|
|
1249
|
-
})) {
|
|
1250
|
-
steps.add(doc.createNode(step));
|
|
1251
|
-
changed = true;
|
|
1252
|
-
}
|
|
1253
|
-
return {
|
|
1254
|
-
content: changed ? doc.toString() : existing,
|
|
1255
|
-
changed
|
|
1256
|
-
};
|
|
1257
|
-
} catch {
|
|
1258
|
-
return {
|
|
1259
|
-
content: existing,
|
|
1260
|
-
changed: false
|
|
1261
|
-
};
|
|
1262
|
-
}
|
|
1263
|
-
}
|
|
1264
|
-
/**
|
|
1265
|
-
* Add a job to an existing workflow YAML if it doesn't already exist.
|
|
1266
|
-
* Returns unchanged content if the job already exists, the file has an opt-out comment,
|
|
1267
|
-
* or the document can't be parsed.
|
|
1268
|
-
*/
|
|
1269
|
-
function addWorkflowJob(existing, jobName, jobConfig) {
|
|
1270
|
-
if (isToolingIgnored(existing)) return {
|
|
1271
|
-
content: existing,
|
|
1272
|
-
changed: false
|
|
1273
|
-
};
|
|
1274
|
-
try {
|
|
1275
|
-
const doc = parseDocument(existing);
|
|
1276
|
-
const jobs = doc.getIn(["jobs"]);
|
|
1277
|
-
if (!isMap(jobs)) return {
|
|
1278
|
-
content: existing,
|
|
1279
|
-
changed: false
|
|
1280
|
-
};
|
|
1281
|
-
if (jobs.has(jobName)) return {
|
|
1282
|
-
content: existing,
|
|
1283
|
-
changed: false
|
|
1284
|
-
};
|
|
1285
|
-
jobs.set(jobName, doc.createNode(jobConfig));
|
|
1286
|
-
return {
|
|
1287
|
-
content: doc.toString(),
|
|
1288
|
-
changed: true
|
|
1501
|
+
action: "skipped",
|
|
1502
|
+
description: "Only optional entries missing"
|
|
1289
1503
|
};
|
|
1290
|
-
} catch {
|
|
1291
1504
|
return {
|
|
1292
|
-
|
|
1293
|
-
|
|
1505
|
+
filePath,
|
|
1506
|
+
action: "updated",
|
|
1507
|
+
description: `Appended ${String(missing.length)} missing entries`
|
|
1294
1508
|
};
|
|
1295
1509
|
}
|
|
1510
|
+
ctx.write(filePath, ALL_ENTRIES.join("\n") + "\n");
|
|
1511
|
+
return {
|
|
1512
|
+
filePath,
|
|
1513
|
+
action: "created",
|
|
1514
|
+
description: "Generated .gitignore"
|
|
1515
|
+
};
|
|
1296
1516
|
}
|
|
1297
1517
|
//#endregion
|
|
1298
1518
|
//#region src/generators/ci.ts
|
|
1299
|
-
|
|
1519
|
+
/** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
|
|
1520
|
+
function actionsExpr$1(expr) {
|
|
1521
|
+
return `\${{ ${expr} }}`;
|
|
1522
|
+
}
|
|
1523
|
+
const CI_CONCURRENCY = {
|
|
1524
|
+
group: `ci-${actionsExpr$1("github.ref")}`,
|
|
1525
|
+
"cancel-in-progress": true
|
|
1526
|
+
};
|
|
1527
|
+
function hasEnginesNode$1(ctx) {
|
|
1300
1528
|
const raw = ctx.read("package.json");
|
|
1301
1529
|
if (!raw) return false;
|
|
1302
1530
|
return typeof parsePackageJson(raw)?.engines?.["node"] === "string";
|
|
@@ -1309,6 +1537,10 @@ ${emailNotifications}on:
|
|
|
1309
1537
|
branches: [main]
|
|
1310
1538
|
pull_request:
|
|
1311
1539
|
|
|
1540
|
+
concurrency:
|
|
1541
|
+
group: ci-${actionsExpr$1("github.ref")}
|
|
1542
|
+
cancel-in-progress: true
|
|
1543
|
+
|
|
1312
1544
|
jobs:
|
|
1313
1545
|
check:
|
|
1314
1546
|
runs-on: ubuntu-latest
|
|
@@ -1364,15 +1596,16 @@ async function generateCi(ctx) {
|
|
|
1364
1596
|
description: "CI workflow not requested"
|
|
1365
1597
|
};
|
|
1366
1598
|
const isGitHub = ctx.config.ci === "github";
|
|
1367
|
-
const nodeVersionYaml = hasEnginesNode$
|
|
1599
|
+
const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
|
|
1368
1600
|
const filePath = isGitHub ? ".github/workflows/check.yml" : ".forgejo/workflows/check.yml";
|
|
1369
1601
|
const content = ciWorkflow(nodeVersionYaml, !isGitHub);
|
|
1370
1602
|
if (ctx.exists(filePath)) {
|
|
1371
1603
|
const existing = ctx.read(filePath);
|
|
1372
1604
|
if (existing) {
|
|
1373
1605
|
const merged = mergeWorkflowSteps(existing, "check", requiredCheckSteps(nodeVersionYaml));
|
|
1374
|
-
const
|
|
1375
|
-
|
|
1606
|
+
const withConcurrency = ensureWorkflowConcurrency(merged.content, CI_CONCURRENCY);
|
|
1607
|
+
const withComment = ensureSchemaComment(withConcurrency.content, isGitHub ? "github" : "forgejo");
|
|
1608
|
+
if (merged.changed || withConcurrency.changed || withComment !== withConcurrency.content) {
|
|
1376
1609
|
ctx.write(filePath, withComment);
|
|
1377
1610
|
return {
|
|
1378
1611
|
filePath,
|
|
@@ -1841,13 +2074,13 @@ async function generateChangesets(ctx) {
|
|
|
1841
2074
|
//#endregion
|
|
1842
2075
|
//#region src/generators/release-ci.ts
|
|
1843
2076
|
/** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
|
|
1844
|
-
function actionsExpr
|
|
2077
|
+
function actionsExpr(expr) {
|
|
1845
2078
|
return `\${{ ${expr} }}`;
|
|
1846
2079
|
}
|
|
1847
|
-
function hasEnginesNode
|
|
2080
|
+
function hasEnginesNode(ctx) {
|
|
1848
2081
|
return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
|
|
1849
2082
|
}
|
|
1850
|
-
function commonSteps(nodeVersionYaml) {
|
|
2083
|
+
function commonSteps(nodeVersionYaml, publishesNpm) {
|
|
1851
2084
|
return ` - uses: actions/checkout@v4
|
|
1852
2085
|
with:
|
|
1853
2086
|
fetch-depth: 0
|
|
@@ -1855,18 +2088,18 @@ function commonSteps(nodeVersionYaml) {
|
|
|
1855
2088
|
- uses: actions/setup-node@v4
|
|
1856
2089
|
with:
|
|
1857
2090
|
${nodeVersionYaml}
|
|
1858
|
-
cache: pnpm
|
|
1859
|
-
registry-url: "https://registry.npmjs.org"
|
|
2091
|
+
cache: pnpm${publishesNpm ? `\n registry-url: "https://registry.npmjs.org"` : ""}
|
|
1860
2092
|
- run: pnpm install --frozen-lockfile
|
|
1861
2093
|
- run: pnpm build`;
|
|
1862
2094
|
}
|
|
1863
|
-
function releaseItWorkflow(ci, nodeVersionYaml) {
|
|
2095
|
+
function releaseItWorkflow(ci, nodeVersionYaml, publishesNpm) {
|
|
1864
2096
|
const isGitHub = ci === "github";
|
|
1865
2097
|
const permissions = isGitHub ? `
|
|
1866
2098
|
permissions:
|
|
1867
2099
|
contents: write
|
|
1868
2100
|
` : "";
|
|
1869
2101
|
const tokenEnv = isGitHub ? `GITHUB_TOKEN: \${{ github.token }}` : `FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}`;
|
|
2102
|
+
const npmEnv = publishesNpm ? `\n NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}` : "";
|
|
1870
2103
|
return `${workflowSchemaComment(ci)}name: Release
|
|
1871
2104
|
on:
|
|
1872
2105
|
workflow_dispatch:
|
|
@@ -1875,14 +2108,13 @@ jobs:
|
|
|
1875
2108
|
release:
|
|
1876
2109
|
runs-on: ubuntu-latest
|
|
1877
2110
|
steps:
|
|
1878
|
-
${commonSteps(nodeVersionYaml)}
|
|
2111
|
+
${commonSteps(nodeVersionYaml, publishesNpm)}
|
|
1879
2112
|
- run: pnpm release-it --ci
|
|
1880
2113
|
env:
|
|
1881
|
-
${tokenEnv}
|
|
1882
|
-
NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}
|
|
2114
|
+
${tokenEnv}${npmEnv}
|
|
1883
2115
|
`;
|
|
1884
2116
|
}
|
|
1885
|
-
function commitAndTagVersionWorkflow(ci, nodeVersionYaml) {
|
|
2117
|
+
function commitAndTagVersionWorkflow(ci, nodeVersionYaml, publishesNpm) {
|
|
1886
2118
|
const isGitHub = ci === "github";
|
|
1887
2119
|
const permissions = isGitHub ? `
|
|
1888
2120
|
permissions:
|
|
@@ -1912,52 +2144,69 @@ jobs:
|
|
|
1912
2144
|
release:
|
|
1913
2145
|
runs-on: ubuntu-latest
|
|
1914
2146
|
steps:
|
|
1915
|
-
${commonSteps(nodeVersionYaml)}${gitConfigStep}${releaseStep}
|
|
2147
|
+
${commonSteps(nodeVersionYaml, publishesNpm)}${gitConfigStep}${releaseStep}
|
|
1916
2148
|
`;
|
|
1917
2149
|
}
|
|
1918
|
-
function changesetsReleaseJobConfig(ci, nodeVersionYaml) {
|
|
2150
|
+
function changesetsReleaseJobConfig(ci, nodeVersionYaml, publishesNpm) {
|
|
1919
2151
|
const isGitHub = ci === "github";
|
|
1920
2152
|
const nodeWith = {
|
|
1921
2153
|
...nodeVersionYaml.startsWith("node-version-file") ? { "node-version-file": "package.json" } : { "node-version": "24" },
|
|
1922
2154
|
cache: "pnpm",
|
|
1923
|
-
"registry-url": "https://registry.npmjs.org"
|
|
2155
|
+
...publishesNpm && { "registry-url": "https://registry.npmjs.org" }
|
|
1924
2156
|
};
|
|
1925
|
-
if (isGitHub)
|
|
1926
|
-
|
|
1927
|
-
|
|
1928
|
-
|
|
1929
|
-
|
|
1930
|
-
|
|
1931
|
-
|
|
1932
|
-
|
|
1933
|
-
|
|
1934
|
-
|
|
1935
|
-
|
|
1936
|
-
with: { "fetch-depth": 0 }
|
|
2157
|
+
if (isGitHub) {
|
|
2158
|
+
const changesetsEnv = {
|
|
2159
|
+
GITHUB_TOKEN: actionsExpr("github.token"),
|
|
2160
|
+
...publishesNpm && { NPM_TOKEN: actionsExpr("secrets.NPM_TOKEN") }
|
|
2161
|
+
};
|
|
2162
|
+
return {
|
|
2163
|
+
needs: "check",
|
|
2164
|
+
if: "github.ref == 'refs/heads/main'",
|
|
2165
|
+
concurrency: {
|
|
2166
|
+
group: "release",
|
|
2167
|
+
"cancel-in-progress": false
|
|
1937
2168
|
},
|
|
1938
|
-
|
|
1939
|
-
{
|
|
1940
|
-
|
|
1941
|
-
|
|
2169
|
+
"runs-on": "ubuntu-latest",
|
|
2170
|
+
permissions: {
|
|
2171
|
+
contents: "write",
|
|
2172
|
+
"pull-requests": "write"
|
|
1942
2173
|
},
|
|
1943
|
-
|
|
1944
|
-
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
2174
|
+
steps: [
|
|
2175
|
+
{
|
|
2176
|
+
uses: "actions/checkout@v4",
|
|
2177
|
+
with: { "fetch-depth": 0 }
|
|
2178
|
+
},
|
|
2179
|
+
{ uses: "pnpm/action-setup@v4" },
|
|
2180
|
+
{
|
|
2181
|
+
uses: "actions/setup-node@v4",
|
|
2182
|
+
with: nodeWith
|
|
1950
2183
|
},
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
2184
|
+
{ run: "pnpm install --frozen-lockfile" },
|
|
2185
|
+
{ run: "pnpm build" },
|
|
2186
|
+
{
|
|
2187
|
+
uses: "changesets/action@v1",
|
|
2188
|
+
with: {
|
|
2189
|
+
publish: "pnpm changeset publish",
|
|
2190
|
+
version: "pnpm changeset version"
|
|
2191
|
+
},
|
|
2192
|
+
env: changesetsEnv
|
|
1954
2193
|
}
|
|
1955
|
-
|
|
1956
|
-
|
|
2194
|
+
]
|
|
2195
|
+
};
|
|
2196
|
+
}
|
|
2197
|
+
const releaseEnv = {
|
|
2198
|
+
FORGEJO_SERVER_URL: actionsExpr("github.server_url"),
|
|
2199
|
+
FORGEJO_REPOSITORY: actionsExpr("github.repository"),
|
|
2200
|
+
FORGEJO_TOKEN: actionsExpr("secrets.FORGEJO_TOKEN"),
|
|
2201
|
+
...publishesNpm && { NODE_AUTH_TOKEN: actionsExpr("secrets.NPM_TOKEN") }
|
|
1957
2202
|
};
|
|
1958
2203
|
return {
|
|
1959
2204
|
needs: "check",
|
|
1960
2205
|
if: "github.ref == 'refs/heads/main'",
|
|
2206
|
+
concurrency: {
|
|
2207
|
+
group: "release",
|
|
2208
|
+
"cancel-in-progress": false
|
|
2209
|
+
},
|
|
1961
2210
|
"runs-on": "ubuntu-latest",
|
|
1962
2211
|
steps: [
|
|
1963
2212
|
{
|
|
@@ -1977,18 +2226,13 @@ function changesetsReleaseJobConfig(ci, nodeVersionYaml) {
|
|
|
1977
2226
|
},
|
|
1978
2227
|
{
|
|
1979
2228
|
name: "Release",
|
|
1980
|
-
env:
|
|
1981
|
-
FORGEJO_SERVER_URL: actionsExpr$1("github.server_url"),
|
|
1982
|
-
FORGEJO_REPOSITORY: actionsExpr$1("github.repository"),
|
|
1983
|
-
FORGEJO_TOKEN: actionsExpr$1("secrets.FORGEJO_TOKEN"),
|
|
1984
|
-
NODE_AUTH_TOKEN: actionsExpr$1("secrets.NPM_TOKEN")
|
|
1985
|
-
},
|
|
2229
|
+
env: releaseEnv,
|
|
1986
2230
|
run: "pnpm exec tooling release:changesets"
|
|
1987
2231
|
}
|
|
1988
2232
|
]
|
|
1989
2233
|
};
|
|
1990
2234
|
}
|
|
1991
|
-
function requiredReleaseSteps(strategy, nodeVersionYaml) {
|
|
2235
|
+
function requiredReleaseSteps(strategy, nodeVersionYaml, publishesNpm) {
|
|
1992
2236
|
const isNodeVersionFile = nodeVersionYaml.startsWith("node-version-file");
|
|
1993
2237
|
const steps = [
|
|
1994
2238
|
{
|
|
@@ -2009,7 +2253,7 @@ function requiredReleaseSteps(strategy, nodeVersionYaml) {
|
|
|
2009
2253
|
with: {
|
|
2010
2254
|
...isNodeVersionFile ? { "node-version-file": "package.json" } : { "node-version": "24" },
|
|
2011
2255
|
cache: "pnpm",
|
|
2012
|
-
"registry-url": "https://registry.npmjs.org"
|
|
2256
|
+
...publishesNpm && { "registry-url": "https://registry.npmjs.org" }
|
|
2013
2257
|
}
|
|
2014
2258
|
}
|
|
2015
2259
|
},
|
|
@@ -2044,23 +2288,23 @@ function requiredReleaseSteps(strategy, nodeVersionYaml) {
|
|
|
2044
2288
|
}
|
|
2045
2289
|
return steps;
|
|
2046
2290
|
}
|
|
2047
|
-
function buildWorkflow(strategy, ci, nodeVersionYaml) {
|
|
2291
|
+
function buildWorkflow(strategy, ci, nodeVersionYaml, publishesNpm) {
|
|
2048
2292
|
switch (strategy) {
|
|
2049
|
-
case "release-it": return releaseItWorkflow(ci, nodeVersionYaml);
|
|
2050
|
-
case "simple": return commitAndTagVersionWorkflow(ci, nodeVersionYaml);
|
|
2293
|
+
case "release-it": return releaseItWorkflow(ci, nodeVersionYaml, publishesNpm);
|
|
2294
|
+
case "simple": return commitAndTagVersionWorkflow(ci, nodeVersionYaml, publishesNpm);
|
|
2051
2295
|
default: return null;
|
|
2052
2296
|
}
|
|
2053
2297
|
}
|
|
2054
|
-
function generateChangesetsReleaseCi(ctx) {
|
|
2298
|
+
function generateChangesetsReleaseCi(ctx, publishesNpm) {
|
|
2055
2299
|
const checkPath = ctx.config.ci === "github" ? ".github/workflows/check.yml" : ".forgejo/workflows/check.yml";
|
|
2056
|
-
const nodeVersionYaml = hasEnginesNode
|
|
2300
|
+
const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
|
|
2057
2301
|
const existing = ctx.read(checkPath);
|
|
2058
2302
|
if (!existing) return {
|
|
2059
2303
|
filePath: checkPath,
|
|
2060
2304
|
action: "skipped",
|
|
2061
2305
|
description: "CI workflow not found — run check generator first"
|
|
2062
2306
|
};
|
|
2063
|
-
const addResult = addWorkflowJob(existing, "release", changesetsReleaseJobConfig(ctx.config.ci, nodeVersionYaml));
|
|
2307
|
+
const addResult = addWorkflowJob(existing, "release", changesetsReleaseJobConfig(ctx.config.ci, nodeVersionYaml, publishesNpm));
|
|
2064
2308
|
if (addResult.changed) {
|
|
2065
2309
|
const withComment = ensureSchemaComment(addResult.content, ctx.config.ci);
|
|
2066
2310
|
ctx.write(checkPath, withComment);
|
|
@@ -2070,7 +2314,7 @@ function generateChangesetsReleaseCi(ctx) {
|
|
|
2070
2314
|
description: "Added release job to CI workflow"
|
|
2071
2315
|
};
|
|
2072
2316
|
}
|
|
2073
|
-
const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps("changesets", nodeVersionYaml));
|
|
2317
|
+
const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps("changesets", nodeVersionYaml, publishesNpm));
|
|
2074
2318
|
if (!merged.changed) return {
|
|
2075
2319
|
filePath: checkPath,
|
|
2076
2320
|
action: "skipped",
|
|
@@ -2091,11 +2335,12 @@ async function generateReleaseCi(ctx) {
|
|
|
2091
2335
|
action: "skipped",
|
|
2092
2336
|
description: "Release CI workflow not applicable"
|
|
2093
2337
|
};
|
|
2094
|
-
|
|
2338
|
+
const publishesNpm = getPublishablePackages(ctx.targetDir, ctx.config.structure, ctx.packageJson).length > 0;
|
|
2339
|
+
if (ctx.config.releaseStrategy === "changesets") return generateChangesetsReleaseCi(ctx, publishesNpm);
|
|
2095
2340
|
const isGitHub = ctx.config.ci === "github";
|
|
2096
2341
|
const workflowPath = isGitHub ? ".github/workflows/release.yml" : ".forgejo/workflows/release.yml";
|
|
2097
|
-
const nodeVersionYaml = hasEnginesNode
|
|
2098
|
-
const content = buildWorkflow(ctx.config.releaseStrategy, ctx.config.ci, nodeVersionYaml);
|
|
2342
|
+
const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
|
|
2343
|
+
const content = buildWorkflow(ctx.config.releaseStrategy, ctx.config.ci, nodeVersionYaml, publishesNpm);
|
|
2099
2344
|
if (!content) return {
|
|
2100
2345
|
filePath,
|
|
2101
2346
|
action: "skipped",
|
|
@@ -2109,7 +2354,7 @@ async function generateReleaseCi(ctx) {
|
|
|
2109
2354
|
action: "skipped",
|
|
2110
2355
|
description: "Release workflow already up to date"
|
|
2111
2356
|
};
|
|
2112
|
-
const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps(ctx.config.releaseStrategy, nodeVersionYaml));
|
|
2357
|
+
const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps(ctx.config.releaseStrategy, nodeVersionYaml, publishesNpm));
|
|
2113
2358
|
const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
|
|
2114
2359
|
if (withComment === content) {
|
|
2115
2360
|
ctx.write(workflowPath, content);
|
|
@@ -2312,7 +2557,7 @@ const SCHEMA_NPM_PATH = "@bensandee/config/schemas/forgejo-workflow.schema.json"
|
|
|
2312
2557
|
const SCHEMA_LOCAL_PATH = ".vscode/forgejo-workflow.schema.json";
|
|
2313
2558
|
const SETTINGS_PATH = ".vscode/settings.json";
|
|
2314
2559
|
const SCHEMA_GLOB = ".forgejo/workflows/*.{yml,yaml}";
|
|
2315
|
-
const VscodeSettingsSchema = z.object({ "yaml.schemas": z.record(z.string(), z.unknown()).default({}) }).
|
|
2560
|
+
const VscodeSettingsSchema = z.object({ "yaml.schemas": z.record(z.string(), z.unknown()).default({}) }).loose();
|
|
2316
2561
|
function readSchemaFromNodeModules(targetDir) {
|
|
2317
2562
|
const candidate = path.join(targetDir, "node_modules", SCHEMA_NPM_PATH);
|
|
2318
2563
|
if (!existsSync(candidate)) return void 0;
|
|
@@ -2410,148 +2655,6 @@ async function generateVscodeSettings(ctx) {
|
|
|
2410
2655
|
return results;
|
|
2411
2656
|
}
|
|
2412
2657
|
//#endregion
|
|
2413
|
-
//#region src/generators/deploy-ci.ts
|
|
2414
|
-
/** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
|
|
2415
|
-
function actionsExpr(expr) {
|
|
2416
|
-
return `\${{ ${expr} }}`;
|
|
2417
|
-
}
|
|
2418
|
-
function hasEnginesNode(ctx) {
|
|
2419
|
-
return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
|
|
2420
|
-
}
|
|
2421
|
-
function deployWorkflow(ci, nodeVersionYaml) {
|
|
2422
|
-
return `${workflowSchemaComment(ci)}name: Deploy
|
|
2423
|
-
on:
|
|
2424
|
-
push:
|
|
2425
|
-
tags:
|
|
2426
|
-
- "v[0-9]+.[0-9]+.[0-9]+"
|
|
2427
|
-
|
|
2428
|
-
jobs:
|
|
2429
|
-
deploy:
|
|
2430
|
-
runs-on: ubuntu-latest
|
|
2431
|
-
steps:
|
|
2432
|
-
- uses: actions/checkout@v4
|
|
2433
|
-
- uses: pnpm/action-setup@v4
|
|
2434
|
-
- uses: actions/setup-node@v4
|
|
2435
|
-
with:
|
|
2436
|
-
${nodeVersionYaml}
|
|
2437
|
-
- run: pnpm install --frozen-lockfile
|
|
2438
|
-
- name: Publish Docker images
|
|
2439
|
-
env:
|
|
2440
|
-
DOCKER_REGISTRY_HOST: ${actionsExpr("vars.DOCKER_REGISTRY_HOST")}
|
|
2441
|
-
DOCKER_REGISTRY_NAMESPACE: ${actionsExpr("vars.DOCKER_REGISTRY_NAMESPACE")}
|
|
2442
|
-
DOCKER_USERNAME: ${actionsExpr("secrets.DOCKER_USERNAME")}
|
|
2443
|
-
DOCKER_PASSWORD: ${actionsExpr("secrets.DOCKER_PASSWORD")}
|
|
2444
|
-
run: pnpm exec tooling docker:publish
|
|
2445
|
-
`;
|
|
2446
|
-
}
|
|
2447
|
-
function requiredDeploySteps() {
|
|
2448
|
-
return [
|
|
2449
|
-
{
|
|
2450
|
-
match: { uses: "actions/checkout" },
|
|
2451
|
-
step: { uses: "actions/checkout@v4" }
|
|
2452
|
-
},
|
|
2453
|
-
{
|
|
2454
|
-
match: { uses: "pnpm/action-setup" },
|
|
2455
|
-
step: { uses: "pnpm/action-setup@v4" }
|
|
2456
|
-
},
|
|
2457
|
-
{
|
|
2458
|
-
match: { uses: "actions/setup-node" },
|
|
2459
|
-
step: { uses: "actions/setup-node@v4" }
|
|
2460
|
-
},
|
|
2461
|
-
{
|
|
2462
|
-
match: { run: "pnpm install" },
|
|
2463
|
-
step: { run: "pnpm install --frozen-lockfile" }
|
|
2464
|
-
},
|
|
2465
|
-
{
|
|
2466
|
-
match: { run: "docker:publish" },
|
|
2467
|
-
step: { run: "pnpm exec tooling docker:publish" }
|
|
2468
|
-
}
|
|
2469
|
-
];
|
|
2470
|
-
}
|
|
2471
|
-
/** Convention paths to check for Dockerfiles. */
|
|
2472
|
-
const CONVENTION_DOCKERFILE_PATHS$1 = ["Dockerfile", "docker/Dockerfile"];
|
|
2473
|
-
const DockerMapSchema = z.object({ docker: z.record(z.string(), z.unknown()).optional() });
|
|
2474
|
-
/** Check whether any Docker packages exist by convention or .tooling.json config. */
|
|
2475
|
-
function hasDockerPackages(ctx) {
|
|
2476
|
-
const configRaw = ctx.read(".tooling.json");
|
|
2477
|
-
if (configRaw) {
|
|
2478
|
-
const result = DockerMapSchema.safeParse(JSON.parse(configRaw));
|
|
2479
|
-
if (result.success && result.data.docker && Object.keys(result.data.docker).length > 0) return true;
|
|
2480
|
-
}
|
|
2481
|
-
if (ctx.config.structure === "monorepo") {
|
|
2482
|
-
const packages = getMonorepoPackages(ctx.targetDir);
|
|
2483
|
-
for (const pkg of packages) {
|
|
2484
|
-
const dirName = pkg.name.split("/").pop() ?? pkg.name;
|
|
2485
|
-
for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(`packages/${dirName}/${rel}`)) return true;
|
|
2486
|
-
}
|
|
2487
|
-
} else for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(rel)) return true;
|
|
2488
|
-
return false;
|
|
2489
|
-
}
|
|
2490
|
-
async function generateDeployCi(ctx) {
|
|
2491
|
-
const filePath = "deploy-ci";
|
|
2492
|
-
if (!hasDockerPackages(ctx) || ctx.config.ci === "none") return {
|
|
2493
|
-
filePath,
|
|
2494
|
-
action: "skipped",
|
|
2495
|
-
description: "Deploy CI workflow not applicable"
|
|
2496
|
-
};
|
|
2497
|
-
const isGitHub = ctx.config.ci === "github";
|
|
2498
|
-
const workflowPath = isGitHub ? ".github/workflows/publish.yml" : ".forgejo/workflows/publish.yml";
|
|
2499
|
-
const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
|
|
2500
|
-
const content = deployWorkflow(ctx.config.ci, nodeVersionYaml);
|
|
2501
|
-
if (ctx.exists(workflowPath)) {
|
|
2502
|
-
const existing = ctx.read(workflowPath);
|
|
2503
|
-
if (existing) {
|
|
2504
|
-
if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) return {
|
|
2505
|
-
filePath: workflowPath,
|
|
2506
|
-
action: "skipped",
|
|
2507
|
-
description: "Deploy workflow already up to date"
|
|
2508
|
-
};
|
|
2509
|
-
const merged = mergeWorkflowSteps(existing, "deploy", requiredDeploySteps());
|
|
2510
|
-
const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
|
|
2511
|
-
if (withComment === content) {
|
|
2512
|
-
ctx.write(workflowPath, content);
|
|
2513
|
-
return {
|
|
2514
|
-
filePath: workflowPath,
|
|
2515
|
-
action: "updated",
|
|
2516
|
-
description: "Added missing steps to deploy workflow"
|
|
2517
|
-
};
|
|
2518
|
-
}
|
|
2519
|
-
if (await ctx.confirmOverwrite(workflowPath) === "skip") {
|
|
2520
|
-
if (merged.changed || withComment !== merged.content) {
|
|
2521
|
-
ctx.write(workflowPath, withComment);
|
|
2522
|
-
return {
|
|
2523
|
-
filePath: workflowPath,
|
|
2524
|
-
action: "updated",
|
|
2525
|
-
description: "Added missing steps to deploy workflow"
|
|
2526
|
-
};
|
|
2527
|
-
}
|
|
2528
|
-
return {
|
|
2529
|
-
filePath: workflowPath,
|
|
2530
|
-
action: "skipped",
|
|
2531
|
-
description: "Existing deploy workflow preserved"
|
|
2532
|
-
};
|
|
2533
|
-
}
|
|
2534
|
-
ctx.write(workflowPath, content);
|
|
2535
|
-
return {
|
|
2536
|
-
filePath: workflowPath,
|
|
2537
|
-
action: "updated",
|
|
2538
|
-
description: "Replaced deploy workflow with updated template"
|
|
2539
|
-
};
|
|
2540
|
-
}
|
|
2541
|
-
return {
|
|
2542
|
-
filePath: workflowPath,
|
|
2543
|
-
action: "skipped",
|
|
2544
|
-
description: "Deploy workflow already up to date"
|
|
2545
|
-
};
|
|
2546
|
-
}
|
|
2547
|
-
ctx.write(workflowPath, content);
|
|
2548
|
-
return {
|
|
2549
|
-
filePath: workflowPath,
|
|
2550
|
-
action: "created",
|
|
2551
|
-
description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions deploy workflow`
|
|
2552
|
-
};
|
|
2553
|
-
}
|
|
2554
|
-
//#endregion
|
|
2555
2658
|
//#region src/generators/pipeline.ts
|
|
2556
2659
|
/** Run all generators sequentially and return their results. */
|
|
2557
2660
|
async function runGenerators(ctx) {
|
|
@@ -2748,6 +2851,16 @@ function generateMigratePrompt(results, config, detected) {
|
|
|
2748
2851
|
}
|
|
2749
2852
|
//#endregion
|
|
2750
2853
|
//#region src/commands/repo-init.ts
|
|
2854
|
+
/** Log what was detected so the user understands generator decisions. */
|
|
2855
|
+
function logDetectionSummary(ctx) {
|
|
2856
|
+
const dockerNames = getDockerPackageNames(ctx);
|
|
2857
|
+
if (dockerNames.length > 0) p.log.info(`Detected Docker packages: ${dockerNames.join(", ")}`);
|
|
2858
|
+
if (ctx.config.releaseStrategy !== "none") {
|
|
2859
|
+
const publishable = getPublishablePackages(ctx.targetDir, ctx.config.structure, ctx.packageJson);
|
|
2860
|
+
if (publishable.length > 0) p.log.info(`Will publish npm packages: ${publishable.map((pkg) => pkg.name).join(", ")}`);
|
|
2861
|
+
else p.log.info("No publishable npm packages — npm registry setup will be skipped");
|
|
2862
|
+
}
|
|
2863
|
+
}
|
|
2751
2864
|
async function runInit(config, options = {}) {
|
|
2752
2865
|
const detected = detectProject(config.targetDir);
|
|
2753
2866
|
const s = p.spinner();
|
|
@@ -2767,6 +2880,7 @@ async function runInit(config, options = {}) {
|
|
|
2767
2880
|
if (p.isCancel(result)) return "skip";
|
|
2768
2881
|
return result;
|
|
2769
2882
|
}));
|
|
2883
|
+
logDetectionSummary(ctx);
|
|
2770
2884
|
s.start("Generating configuration files...");
|
|
2771
2885
|
const results = await runGenerators(ctx);
|
|
2772
2886
|
const alreadyArchived = new Set(results.filter((r) => r.action === "archived").map((r) => r.filePath));
|
|
@@ -2777,8 +2891,7 @@ async function runInit(config, options = {}) {
|
|
|
2777
2891
|
});
|
|
2778
2892
|
const created = results.filter((r) => r.action === "created");
|
|
2779
2893
|
const updated = results.filter((r) => r.action === "updated");
|
|
2780
|
-
|
|
2781
|
-
if (!(created.length > 0 || updated.length > 0 || archived.length > 0) && options.noPrompt) {
|
|
2894
|
+
if (!(created.length > 0 || updated.length > 0 || archivedFiles.length > 0) && options.noPrompt) {
|
|
2782
2895
|
s.stop("Repository is up to date.");
|
|
2783
2896
|
return results;
|
|
2784
2897
|
}
|
|
@@ -2792,7 +2905,6 @@ async function runInit(config, options = {}) {
|
|
|
2792
2905
|
const summaryLines = [];
|
|
2793
2906
|
if (created.length > 0) summaryLines.push(`Created: ${created.map((r) => r.filePath).join(", ")}`);
|
|
2794
2907
|
if (updated.length > 0) summaryLines.push(`Updated: ${updated.map((r) => r.filePath).join(", ")}`);
|
|
2795
|
-
if (archived.length > 0) summaryLines.push(`Archived: ${archived.map((r) => r.filePath).join(", ")}`);
|
|
2796
2908
|
p.note(summaryLines.join("\n"), "Summary");
|
|
2797
2909
|
if (!options.noPrompt) {
|
|
2798
2910
|
const prompt = generateMigratePrompt(results, config, detected);
|
|
@@ -2887,6 +2999,7 @@ async function runCheck(targetDir) {
|
|
|
2887
2999
|
const saved = loadToolingConfig(targetDir);
|
|
2888
3000
|
const detected = buildDefaultConfig(targetDir, {});
|
|
2889
3001
|
const { ctx, pendingWrites } = createDryRunContext(saved ? mergeWithSavedConfig(detected, saved) : detected);
|
|
3002
|
+
logDetectionSummary(ctx);
|
|
2890
3003
|
const actionable = (await runGenerators(ctx)).filter((r) => {
|
|
2891
3004
|
if (r.action !== "created" && r.action !== "updated") return false;
|
|
2892
3005
|
const newContent = pendingWrites.get(r.filePath);
|
|
@@ -3803,7 +3916,7 @@ const CHECKS = [
|
|
|
3803
3916
|
},
|
|
3804
3917
|
{ name: "knip" },
|
|
3805
3918
|
{ name: "tooling:check" },
|
|
3806
|
-
{ name: "
|
|
3919
|
+
{ name: "docker:check" }
|
|
3807
3920
|
];
|
|
3808
3921
|
function defaultGetScripts(targetDir) {
|
|
3809
3922
|
try {
|
|
@@ -3870,7 +3983,7 @@ function runRunChecks(targetDir, options = {}) {
|
|
|
3870
3983
|
const runChecksCommand = defineCommand({
|
|
3871
3984
|
meta: {
|
|
3872
3985
|
name: "checks:run",
|
|
3873
|
-
description: "Run all standard checks (build, typecheck, lint, test, format, knip, tooling:check,
|
|
3986
|
+
description: "Run all standard checks (build, typecheck, lint, test, format, knip, tooling:check, docker:check)"
|
|
3874
3987
|
},
|
|
3875
3988
|
args: {
|
|
3876
3989
|
dir: {
|
|
@@ -3880,7 +3993,7 @@ const runChecksCommand = defineCommand({
|
|
|
3880
3993
|
},
|
|
3881
3994
|
skip: {
|
|
3882
3995
|
type: "string",
|
|
3883
|
-
description: "Comma-separated list of checks to skip (build, typecheck, lint, test, format, knip, tooling:check,
|
|
3996
|
+
description: "Comma-separated list of checks to skip (build, typecheck, lint, test, format, knip, tooling:check, docker:check)",
|
|
3884
3997
|
required: false
|
|
3885
3998
|
},
|
|
3886
3999
|
add: {
|
|
@@ -4073,7 +4186,7 @@ function generateTags(version) {
|
|
|
4073
4186
|
function imageRef(namespace, imageName, tag) {
|
|
4074
4187
|
return `${namespace}/${imageName}:${tag}`;
|
|
4075
4188
|
}
|
|
4076
|
-
function log(message) {
|
|
4189
|
+
function log$1(message) {
|
|
4077
4190
|
console.log(message);
|
|
4078
4191
|
}
|
|
4079
4192
|
function debug(verbose, message) {
|
|
@@ -4114,22 +4227,22 @@ function runDockerBuild(executor, config) {
|
|
|
4114
4227
|
const repoName = readRepoName(executor, config.cwd);
|
|
4115
4228
|
if (config.packageDir) {
|
|
4116
4229
|
const pkg = readSinglePackageDocker(executor, config.cwd, config.packageDir, repoName);
|
|
4117
|
-
log(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
|
|
4230
|
+
log$1(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
|
|
4118
4231
|
buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
|
|
4119
|
-
log(`Built ${pkg.imageName}:latest`);
|
|
4232
|
+
log$1(`Built ${pkg.imageName}:latest`);
|
|
4120
4233
|
return { packages: [pkg] };
|
|
4121
4234
|
}
|
|
4122
4235
|
const packages = detectDockerPackages(executor, config.cwd, repoName);
|
|
4123
4236
|
if (packages.length === 0) {
|
|
4124
|
-
log("No packages with docker config found");
|
|
4237
|
+
log$1("No packages with docker config found");
|
|
4125
4238
|
return { packages: [] };
|
|
4126
4239
|
}
|
|
4127
|
-
log(`Found ${packages.length} Docker package(s): ${packages.map((p) => p.dir).join(", ")}`);
|
|
4240
|
+
log$1(`Found ${packages.length} Docker package(s): ${packages.map((p) => p.dir).join(", ")}`);
|
|
4128
4241
|
for (const pkg of packages) {
|
|
4129
|
-
log(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
|
|
4242
|
+
log$1(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
|
|
4130
4243
|
buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
|
|
4131
4244
|
}
|
|
4132
|
-
log(`Built ${packages.length} image(s)`);
|
|
4245
|
+
log$1(`Built ${packages.length} image(s)`);
|
|
4133
4246
|
return { packages };
|
|
4134
4247
|
}
|
|
4135
4248
|
/**
|
|
@@ -4153,35 +4266,35 @@ function runDockerPublish(executor, config) {
|
|
|
4153
4266
|
};
|
|
4154
4267
|
for (const pkg of packages) if (!pkg.version) throw new FatalError(`Package ${pkg.dir} has docker config but no version in package.json`);
|
|
4155
4268
|
if (!config.dryRun) {
|
|
4156
|
-
log(`Logging in to ${config.registryHost}...`);
|
|
4269
|
+
log$1(`Logging in to ${config.registryHost}...`);
|
|
4157
4270
|
const loginResult = executor.exec(`echo "${config.password}" | docker login ${config.registryHost} -u ${config.username} --password-stdin`);
|
|
4158
4271
|
if (loginResult.exitCode !== 0) throw new FatalError(`Docker login failed: ${loginResult.stderr}`);
|
|
4159
|
-
} else log("[dry-run] Skipping docker login");
|
|
4272
|
+
} else log$1("[dry-run] Skipping docker login");
|
|
4160
4273
|
const allTags = [];
|
|
4161
4274
|
try {
|
|
4162
4275
|
for (const pkg of packages) {
|
|
4163
4276
|
const tags = generateTags(pkg.version ?? "");
|
|
4164
|
-
log(`${pkg.dir} v${pkg.version} → tags: ${tags.join(", ")}`);
|
|
4277
|
+
log$1(`${pkg.dir} v${pkg.version} → tags: ${tags.join(", ")}`);
|
|
4165
4278
|
for (const tag of tags) {
|
|
4166
4279
|
const ref = imageRef(config.registryNamespace, pkg.imageName, tag);
|
|
4167
4280
|
allTags.push(ref);
|
|
4168
|
-
log(`Tagging ${pkg.imageName} → ${ref}`);
|
|
4281
|
+
log$1(`Tagging ${pkg.imageName} → ${ref}`);
|
|
4169
4282
|
const tagResult = executor.exec(`docker tag ${pkg.imageName} ${ref}`);
|
|
4170
4283
|
if (tagResult.exitCode !== 0) throw new FatalError(`docker tag failed: ${tagResult.stderr}`);
|
|
4171
4284
|
if (!config.dryRun) {
|
|
4172
|
-
log(`Pushing ${ref}...`);
|
|
4285
|
+
log$1(`Pushing ${ref}...`);
|
|
4173
4286
|
const pushResult = executor.exec(`docker push ${ref}`);
|
|
4174
4287
|
if (pushResult.exitCode !== 0) throw new FatalError(`docker push failed: ${pushResult.stderr}`);
|
|
4175
|
-
} else log(`[dry-run] Skipping push for ${ref}`);
|
|
4288
|
+
} else log$1(`[dry-run] Skipping push for ${ref}`);
|
|
4176
4289
|
}
|
|
4177
4290
|
}
|
|
4178
4291
|
} finally {
|
|
4179
4292
|
if (!config.dryRun) {
|
|
4180
|
-
log(`Logging out from ${config.registryHost}...`);
|
|
4293
|
+
log$1(`Logging out from ${config.registryHost}...`);
|
|
4181
4294
|
executor.exec(`docker logout ${config.registryHost}`);
|
|
4182
4295
|
}
|
|
4183
4296
|
}
|
|
4184
|
-
log(`Published ${allTags.length} image tag(s)`);
|
|
4297
|
+
log$1(`Published ${allTags.length} image tag(s)`);
|
|
4185
4298
|
return {
|
|
4186
4299
|
packages,
|
|
4187
4300
|
tags: allTags
|
|
@@ -4257,7 +4370,7 @@ const dockerBuildCommand = defineCommand({
|
|
|
4257
4370
|
}
|
|
4258
4371
|
});
|
|
4259
4372
|
//#endregion
|
|
4260
|
-
//#region src/docker-
|
|
4373
|
+
//#region src/docker-check/detect.ts
|
|
4261
4374
|
/** Compose file names to scan, in priority order. */
|
|
4262
4375
|
const COMPOSE_FILE_CANDIDATES = [
|
|
4263
4376
|
"docker-compose.yaml",
|
|
@@ -4269,15 +4382,30 @@ const COMPOSE_FILE_CANDIDATES = [
|
|
|
4269
4382
|
const ComposePortSchema = z.union([z.string(), z.object({
|
|
4270
4383
|
published: z.union([z.string(), z.number()]),
|
|
4271
4384
|
target: z.union([z.string(), z.number()]).optional()
|
|
4272
|
-
}).
|
|
4385
|
+
}).loose()]);
|
|
4273
4386
|
const ComposeServiceSchema = z.object({
|
|
4387
|
+
image: z.string().optional(),
|
|
4274
4388
|
ports: z.array(ComposePortSchema).optional(),
|
|
4275
4389
|
healthcheck: z.unknown().optional()
|
|
4276
|
-
}).
|
|
4277
|
-
const ComposeFileSchema = z.object({ services: z.record(z.string(), ComposeServiceSchema).optional() }).
|
|
4278
|
-
/**
|
|
4390
|
+
}).loose();
|
|
4391
|
+
const ComposeFileSchema = z.object({ services: z.record(z.string(), ComposeServiceSchema).optional() }).loose();
|
|
4392
|
+
/** Directories to scan for compose files, in priority order. */
|
|
4393
|
+
const COMPOSE_DIR_CANDIDATES = [".", "docker"];
|
|
4394
|
+
/** Detect which compose files exist at conventional paths.
|
|
4395
|
+
* Returns the resolved directory and the matching file names. */
|
|
4279
4396
|
function detectComposeFiles(cwd) {
|
|
4280
|
-
|
|
4397
|
+
for (const dir of COMPOSE_DIR_CANDIDATES) {
|
|
4398
|
+
const absDir = path.resolve(cwd, dir);
|
|
4399
|
+
const files = COMPOSE_FILE_CANDIDATES.filter((name) => existsSync(path.join(absDir, name)));
|
|
4400
|
+
if (files.length > 0) return {
|
|
4401
|
+
dir: absDir,
|
|
4402
|
+
files
|
|
4403
|
+
};
|
|
4404
|
+
}
|
|
4405
|
+
return {
|
|
4406
|
+
dir: cwd,
|
|
4407
|
+
files: []
|
|
4408
|
+
};
|
|
4281
4409
|
}
|
|
4282
4410
|
/** Parse a single port mapping string and extract the host port. */
|
|
4283
4411
|
function parsePortString(port) {
|
|
@@ -4331,6 +4459,7 @@ function parseComposeServices(cwd, composeFiles) {
|
|
|
4331
4459
|
}
|
|
4332
4460
|
serviceMap.set(name, {
|
|
4333
4461
|
name,
|
|
4462
|
+
image: existing?.image ?? service.image,
|
|
4334
4463
|
hostPort,
|
|
4335
4464
|
hasHealthcheck: existing?.hasHealthcheck ?? service.healthcheck !== void 0
|
|
4336
4465
|
});
|
|
@@ -4338,6 +4467,15 @@ function parseComposeServices(cwd, composeFiles) {
|
|
|
4338
4467
|
}
|
|
4339
4468
|
return [...serviceMap.values()];
|
|
4340
4469
|
}
|
|
4470
|
+
/** Extract deduplicated bare image names (without tags) from parsed services. */
|
|
4471
|
+
function extractComposeImageNames(services) {
|
|
4472
|
+
const names = /* @__PURE__ */ new Set();
|
|
4473
|
+
for (const service of services) if (service.image) {
|
|
4474
|
+
const bare = service.image.split(":")[0];
|
|
4475
|
+
if (bare) names.add(bare);
|
|
4476
|
+
}
|
|
4477
|
+
return [...names];
|
|
4478
|
+
}
|
|
4341
4479
|
/** Generate health checks from parsed services: services with exposed ports get HTTP checks, unless they define a compose-level healthcheck. */
|
|
4342
4480
|
function deriveHealthChecks(services) {
|
|
4343
4481
|
return services.filter((s) => s.hostPort !== void 0 && !s.hasHealthcheck).map((s) => ({
|
|
@@ -4345,20 +4483,81 @@ function deriveHealthChecks(services) {
|
|
|
4345
4483
|
url: `http://localhost:${String(s.hostPort)}/`
|
|
4346
4484
|
}));
|
|
4347
4485
|
}
|
|
4486
|
+
/** Check overlay file name patterns, matched against the base compose file name. */
|
|
4487
|
+
const CHECK_OVERLAY_PATTERNS = [(base) => base.replace(/\.(yaml|yml)$/, `.check.$1`)];
|
|
4488
|
+
/** Detect a user-provided check overlay file alongside the base compose file. */
|
|
4489
|
+
function detectCheckOverlay(dir, baseFile) {
|
|
4490
|
+
for (const pattern of CHECK_OVERLAY_PATTERNS) {
|
|
4491
|
+
const candidate = pattern(baseFile);
|
|
4492
|
+
if (existsSync(path.join(dir, candidate))) return candidate;
|
|
4493
|
+
}
|
|
4494
|
+
}
|
|
4495
|
+
/** Detect a check env file in the compose directory. */
|
|
4496
|
+
function detectCheckEnvFile(dir) {
|
|
4497
|
+
if (existsSync(path.join(dir, "docker-compose.check.env"))) return "docker-compose.check.env";
|
|
4498
|
+
}
|
|
4499
|
+
/** Fast healthcheck intervals for the generated check overlay. */
|
|
4500
|
+
const CHECK_HEALTHCHECK = {
|
|
4501
|
+
interval: "5s",
|
|
4502
|
+
timeout: "5s",
|
|
4503
|
+
retries: 12,
|
|
4504
|
+
start_period: "10s",
|
|
4505
|
+
start_interval: "5s"
|
|
4506
|
+
};
|
|
4507
|
+
/**
|
|
4508
|
+
* Generate a check overlay YAML string from parsed services.
|
|
4509
|
+
* Sets `restart: "no"` on all services, and overrides healthcheck intervals
|
|
4510
|
+
* for services that define a healthcheck in the base compose file.
|
|
4511
|
+
*/
|
|
4512
|
+
function generateCheckOverlay(services) {
|
|
4513
|
+
const serviceOverrides = {};
|
|
4514
|
+
for (const service of services) {
|
|
4515
|
+
const override = { restart: "no" };
|
|
4516
|
+
if (service.hasHealthcheck) override["healthcheck"] = { ...CHECK_HEALTHCHECK };
|
|
4517
|
+
serviceOverrides[service.name] = override;
|
|
4518
|
+
}
|
|
4519
|
+
return stringify({ services: serviceOverrides });
|
|
4520
|
+
}
|
|
4348
4521
|
/** Auto-detect compose config from conventional file locations. */
|
|
4349
|
-
function
|
|
4350
|
-
const
|
|
4351
|
-
if (
|
|
4352
|
-
const
|
|
4522
|
+
function computeCheckDefaults(cwd) {
|
|
4523
|
+
const { dir, files } = detectComposeFiles(cwd);
|
|
4524
|
+
if (files.length === 0) return {};
|
|
4525
|
+
const baseFile = files[0];
|
|
4526
|
+
const checkOverlay = baseFile ? detectCheckOverlay(dir, baseFile) : void 0;
|
|
4527
|
+
const envFile = detectCheckEnvFile(dir);
|
|
4528
|
+
const services = parseComposeServices(dir, files);
|
|
4353
4529
|
const healthChecks = deriveHealthChecks(services);
|
|
4354
4530
|
return {
|
|
4355
|
-
|
|
4531
|
+
composeCwd: dir !== cwd ? dir : void 0,
|
|
4532
|
+
composeFiles: files,
|
|
4533
|
+
checkOverlay,
|
|
4534
|
+
envFile,
|
|
4356
4535
|
services: services.map((s) => s.name),
|
|
4357
4536
|
healthChecks: healthChecks.length > 0 ? healthChecks : void 0
|
|
4358
4537
|
};
|
|
4359
4538
|
}
|
|
4539
|
+
/** Create a DockerFileReader backed by the real filesystem. */
|
|
4540
|
+
function createFileReader() {
|
|
4541
|
+
return {
|
|
4542
|
+
listPackageDirs(cwd) {
|
|
4543
|
+
const packagesDir = path.join(cwd, "packages");
|
|
4544
|
+
try {
|
|
4545
|
+
return readdirSync(packagesDir, { withFileTypes: true }).filter((entry) => entry.isDirectory()).map((entry) => entry.name);
|
|
4546
|
+
} catch {
|
|
4547
|
+
return [];
|
|
4548
|
+
}
|
|
4549
|
+
},
|
|
4550
|
+
readFile(filePath) {
|
|
4551
|
+
try {
|
|
4552
|
+
return readFileSync(filePath, "utf-8");
|
|
4553
|
+
} catch {
|
|
4554
|
+
return null;
|
|
4555
|
+
}
|
|
4556
|
+
}
|
|
4557
|
+
};
|
|
4558
|
+
}
|
|
4360
4559
|
//#endregion
|
|
4361
|
-
//#region src/commands/docker-
|
|
4560
|
+
//#region src/commands/docker-check.ts
|
|
4362
4561
|
/** Convert declarative health checks to functional ones. */
|
|
4363
4562
|
function toHttpHealthChecks(checks) {
|
|
4364
4563
|
return checks.map((check) => ({
|
|
@@ -4367,10 +4566,23 @@ function toHttpHealthChecks(checks) {
|
|
|
4367
4566
|
validate: async (res) => check.status ? res.status === check.status : res.ok
|
|
4368
4567
|
}));
|
|
4369
4568
|
}
|
|
4370
|
-
|
|
4569
|
+
/** Write the generated check overlay to a temp file. Returns the absolute path. */
|
|
4570
|
+
function writeTempOverlay(content) {
|
|
4571
|
+
const name = `tooling-check-overlay-${process.pid}.yaml`;
|
|
4572
|
+
const filePath = path.join(tmpdir(), name);
|
|
4573
|
+
writeFileSync(filePath, content, "utf-8");
|
|
4574
|
+
return filePath;
|
|
4575
|
+
}
|
|
4576
|
+
function log(message) {
|
|
4577
|
+
console.log(message);
|
|
4578
|
+
}
|
|
4579
|
+
function warn(message) {
|
|
4580
|
+
console.warn(message);
|
|
4581
|
+
}
|
|
4582
|
+
const dockerCheckCommand = defineCommand({
|
|
4371
4583
|
meta: {
|
|
4372
|
-
name: "docker:
|
|
4373
|
-
description: "
|
|
4584
|
+
name: "docker:check",
|
|
4585
|
+
description: "Check Docker Compose stack health by auto-detecting services from compose files"
|
|
4374
4586
|
},
|
|
4375
4587
|
args: {
|
|
4376
4588
|
timeout: {
|
|
@@ -4384,24 +4596,59 @@ const dockerVerifyCommand = defineCommand({
|
|
|
4384
4596
|
},
|
|
4385
4597
|
async run({ args }) {
|
|
4386
4598
|
const cwd = process.cwd();
|
|
4387
|
-
|
|
4388
|
-
|
|
4599
|
+
if (loadToolingConfig(cwd)?.dockerCheck === false) {
|
|
4600
|
+
log("Docker check is disabled in .tooling.json");
|
|
4601
|
+
return;
|
|
4602
|
+
}
|
|
4603
|
+
const defaults = computeCheckDefaults(cwd);
|
|
4604
|
+
if (!defaults.composeFiles || defaults.composeFiles.length === 0) throw new FatalError("No compose files found. Expected docker-compose.yaml or compose.yaml in ./ or docker/.");
|
|
4605
|
+
if (!defaults.checkOverlay) {
|
|
4606
|
+
const composeCwd = defaults.composeCwd ?? cwd;
|
|
4607
|
+
const expectedOverlay = (defaults.composeFiles[0] ?? "docker-compose.yaml").replace(/\.(yaml|yml)$/, ".check.$1");
|
|
4608
|
+
warn(`Compose files found but no check overlay. Create ${path.relative(cwd, path.join(composeCwd, expectedOverlay))} to enable docker:check.`);
|
|
4609
|
+
warn("To suppress this warning, set \"dockerCheck\": false in .tooling.json.");
|
|
4610
|
+
return;
|
|
4611
|
+
}
|
|
4389
4612
|
if (!defaults.services || defaults.services.length === 0) throw new FatalError("No services found in compose files.");
|
|
4390
|
-
const
|
|
4391
|
-
|
|
4392
|
-
|
|
4393
|
-
|
|
4394
|
-
|
|
4395
|
-
|
|
4396
|
-
|
|
4397
|
-
|
|
4398
|
-
|
|
4399
|
-
|
|
4400
|
-
|
|
4401
|
-
|
|
4402
|
-
|
|
4403
|
-
const
|
|
4404
|
-
|
|
4613
|
+
const composeCwd = defaults.composeCwd ?? cwd;
|
|
4614
|
+
const services = parseComposeServices(composeCwd, defaults.composeFiles);
|
|
4615
|
+
const fileReader = createFileReader();
|
|
4616
|
+
const rootPkgRaw = fileReader.readFile(path.join(cwd, "package.json"));
|
|
4617
|
+
if (rootPkgRaw) {
|
|
4618
|
+
const rootPkg = parsePackageJson(rootPkgRaw);
|
|
4619
|
+
if (rootPkg?.name) {
|
|
4620
|
+
const dockerPackages = detectDockerPackages(fileReader, cwd, rootPkg.name);
|
|
4621
|
+
const composeImages = extractComposeImageNames(services);
|
|
4622
|
+
for (const pkg of dockerPackages) if (!composeImages.some((img) => img === pkg.imageName || img.endsWith(`/${pkg.imageName}`))) warn(`Docker package "${pkg.dir}" (image: ${pkg.imageName}) is not referenced in any compose service.`);
|
|
4623
|
+
}
|
|
4624
|
+
}
|
|
4625
|
+
const tempOverlayPath = writeTempOverlay(generateCheckOverlay(services));
|
|
4626
|
+
const composeFiles = [
|
|
4627
|
+
...defaults.composeFiles,
|
|
4628
|
+
tempOverlayPath,
|
|
4629
|
+
defaults.checkOverlay
|
|
4630
|
+
];
|
|
4631
|
+
try {
|
|
4632
|
+
const config = {
|
|
4633
|
+
compose: {
|
|
4634
|
+
cwd: composeCwd,
|
|
4635
|
+
composeFiles,
|
|
4636
|
+
envFile: defaults.envFile,
|
|
4637
|
+
services: defaults.services
|
|
4638
|
+
},
|
|
4639
|
+
buildCommand: defaults.buildCommand,
|
|
4640
|
+
buildCwd: defaults.buildCwd,
|
|
4641
|
+
healthChecks: defaults.healthChecks ? toHttpHealthChecks(defaults.healthChecks) : [],
|
|
4642
|
+
timeoutMs: args.timeout ? Number.parseInt(args.timeout, 10) : defaults.timeoutMs,
|
|
4643
|
+
pollIntervalMs: args["poll-interval"] ? Number.parseInt(args["poll-interval"], 10) : defaults.pollIntervalMs
|
|
4644
|
+
};
|
|
4645
|
+
const result = await runDockerCheck(createRealExecutor$1(), config);
|
|
4646
|
+
if (!result.success) throw new FatalError(`Check failed (${result.reason}): ${result.message}`);
|
|
4647
|
+
} finally {
|
|
4648
|
+
try {
|
|
4649
|
+
unlinkSync(tempOverlayPath);
|
|
4650
|
+
} catch (_error) {}
|
|
4651
|
+
}
|
|
4405
4652
|
}
|
|
4406
4653
|
});
|
|
4407
4654
|
//#endregion
|
|
@@ -4409,7 +4656,7 @@ const dockerVerifyCommand = defineCommand({
|
|
|
4409
4656
|
const main = defineCommand({
|
|
4410
4657
|
meta: {
|
|
4411
4658
|
name: "tooling",
|
|
4412
|
-
version: "0.
|
|
4659
|
+
version: "0.21.0",
|
|
4413
4660
|
description: "Bootstrap and maintain standardized TypeScript project tooling"
|
|
4414
4661
|
},
|
|
4415
4662
|
subCommands: {
|
|
@@ -4422,10 +4669,10 @@ const main = defineCommand({
|
|
|
4422
4669
|
"release:simple": releaseSimpleCommand,
|
|
4423
4670
|
"docker:publish": publishDockerCommand,
|
|
4424
4671
|
"docker:build": dockerBuildCommand,
|
|
4425
|
-
"docker:
|
|
4672
|
+
"docker:check": dockerCheckCommand
|
|
4426
4673
|
}
|
|
4427
4674
|
});
|
|
4428
|
-
console.log(`@bensandee/tooling v0.
|
|
4675
|
+
console.log(`@bensandee/tooling v0.21.0`);
|
|
4429
4676
|
runMain(main);
|
|
4430
4677
|
//#endregion
|
|
4431
4678
|
export {};
|