@bensandee/tooling 0.28.0 → 0.28.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin.mjs +2114 -2087
- package/dist/index.d.mts +2 -3
- package/package.json +1 -1
- package/tooling.schema.json +0 -4
package/dist/bin.mjs
CHANGED
|
@@ -280,14 +280,18 @@ function getMonorepoPackages(targetDir) {
|
|
|
280
280
|
function isCancelled(value) {
|
|
281
281
|
return clack.isCancel(value);
|
|
282
282
|
}
|
|
283
|
+
function detectProjectInfo(targetDir) {
|
|
284
|
+
const existingPkg = readPackageJson(targetDir);
|
|
285
|
+
return {
|
|
286
|
+
detected: detectProject(targetDir),
|
|
287
|
+
defaults: computeDefaults(targetDir),
|
|
288
|
+
name: existingPkg?.name ?? path.basename(targetDir)
|
|
289
|
+
};
|
|
290
|
+
}
|
|
283
291
|
async function runInitPrompts(targetDir, saved) {
|
|
284
292
|
clack.intro("@bensandee/tooling repo:sync");
|
|
285
|
-
const
|
|
286
|
-
const detected = detectProject(targetDir);
|
|
287
|
-
const defaults = computeDefaults(targetDir);
|
|
288
|
-
const isExisting = detected.hasPackageJson;
|
|
293
|
+
const { detected, defaults, name } = detectProjectInfo(targetDir);
|
|
289
294
|
const isFirstInit = !saved;
|
|
290
|
-
const name = existingPkg?.name ?? path.basename(targetDir);
|
|
291
295
|
const structure = saved?.structure ?? defaults.structure;
|
|
292
296
|
const useEslintPlugin = saved?.useEslintPlugin ?? defaults.useEslintPlugin;
|
|
293
297
|
let formatter = saved?.formatter ?? defaults.formatter;
|
|
@@ -406,7 +410,6 @@ async function runInitPrompts(targetDir, saved) {
|
|
|
406
410
|
clack.outro("Configuration complete!");
|
|
407
411
|
return {
|
|
408
412
|
name,
|
|
409
|
-
isNew: !isExisting,
|
|
410
413
|
structure,
|
|
411
414
|
useEslintPlugin,
|
|
412
415
|
formatter,
|
|
@@ -423,12 +426,9 @@ async function runInitPrompts(targetDir, saved) {
|
|
|
423
426
|
}
|
|
424
427
|
/** Build a ProjectConfig from CLI flags for non-interactive mode. */
|
|
425
428
|
function buildDefaultConfig(targetDir, flags) {
|
|
426
|
-
const
|
|
427
|
-
const detected = detectProject(targetDir);
|
|
428
|
-
const defaults = computeDefaults(targetDir);
|
|
429
|
+
const { defaults, name } = detectProjectInfo(targetDir);
|
|
429
430
|
return {
|
|
430
|
-
name
|
|
431
|
-
isNew: !detected.hasPackageJson,
|
|
431
|
+
name,
|
|
432
432
|
...defaults,
|
|
433
433
|
...flags.eslintPlugin !== void 0 && { useEslintPlugin: flags.eslintPlugin },
|
|
434
434
|
...flags.noCi && { ci: "none" },
|
|
@@ -574,7 +574,6 @@ const ToolingConfigSchema = z.strictObject({
|
|
|
574
574
|
"library"
|
|
575
575
|
]).optional().meta({ description: "Project type (determines tsconfig base)" }),
|
|
576
576
|
detectPackageTypes: z.boolean().optional().meta({ description: "Auto-detect project types for monorepo packages" }),
|
|
577
|
-
setupDocker: z.boolean().optional().meta({ description: "Generate Docker build/check scripts" }),
|
|
578
577
|
docker: z.record(z.string(), z.object({
|
|
579
578
|
dockerfile: z.string().meta({ description: "Path to Dockerfile relative to package" }),
|
|
580
579
|
context: z.string().default(".").meta({ description: "Docker build context relative to package" })
|
|
@@ -637,7 +636,6 @@ function saveToolingConfig(ctx, config) {
|
|
|
637
636
|
function mergeWithSavedConfig(detected, saved) {
|
|
638
637
|
return {
|
|
639
638
|
name: detected.name,
|
|
640
|
-
isNew: detected.isNew,
|
|
641
639
|
targetDir: detected.targetDir,
|
|
642
640
|
structure: saved.structure ?? detected.structure,
|
|
643
641
|
useEslintPlugin: saved.useEslintPlugin ?? detected.useEslintPlugin,
|
|
@@ -653,1624 +651,1281 @@ function mergeWithSavedConfig(detected, saved) {
|
|
|
653
651
|
};
|
|
654
652
|
}
|
|
655
653
|
//#endregion
|
|
656
|
-
//#region src/
|
|
657
|
-
const
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
654
|
+
//#region src/release/docker.ts
|
|
655
|
+
const ToolingDockerMapSchema = z.record(z.string(), z.object({
|
|
656
|
+
dockerfile: z.string(),
|
|
657
|
+
context: z.string().default(".")
|
|
658
|
+
}));
|
|
659
|
+
const ToolingConfigDockerSchema = z.object({ docker: ToolingDockerMapSchema.optional() });
|
|
660
|
+
const PackageInfoSchema = z.object({
|
|
661
|
+
name: z.string().optional(),
|
|
662
|
+
version: z.string().optional()
|
|
663
|
+
});
|
|
664
|
+
/** Read the docker map from .tooling.json. Returns empty record if missing or invalid. */
|
|
665
|
+
function loadDockerMap(executor, cwd) {
|
|
666
|
+
const configPath = path.join(cwd, ".tooling.json");
|
|
667
|
+
const raw = executor.readFile(configPath);
|
|
668
|
+
if (!raw) return {};
|
|
669
|
+
try {
|
|
670
|
+
const result = ToolingConfigDockerSchema.safeParse(JSON.parse(raw));
|
|
671
|
+
if (!result.success || !result.data.docker) return {};
|
|
672
|
+
return result.data.docker;
|
|
673
|
+
} catch (_error) {
|
|
674
|
+
return {};
|
|
675
|
+
}
|
|
676
676
|
}
|
|
677
|
-
/**
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
if (isToolingIgnored(existing)) return {
|
|
684
|
-
content: existing,
|
|
685
|
-
changed: false
|
|
677
|
+
/** Read name and version from a package's package.json. */
|
|
678
|
+
function readPackageInfo(executor, packageJsonPath) {
|
|
679
|
+
const raw = executor.readFile(packageJsonPath);
|
|
680
|
+
if (!raw) return {
|
|
681
|
+
name: void 0,
|
|
682
|
+
version: void 0
|
|
686
683
|
};
|
|
687
684
|
try {
|
|
688
|
-
const
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
return {
|
|
693
|
-
content: doc.toString(),
|
|
694
|
-
changed: true
|
|
695
|
-
};
|
|
696
|
-
}
|
|
697
|
-
const commands = doc.getIn(["pre-commit", "commands"]);
|
|
698
|
-
if (!isMap(commands)) return {
|
|
699
|
-
content: existing,
|
|
700
|
-
changed: false
|
|
685
|
+
const result = PackageInfoSchema.safeParse(JSON.parse(raw));
|
|
686
|
+
if (!result.success) return {
|
|
687
|
+
name: void 0,
|
|
688
|
+
version: void 0
|
|
701
689
|
};
|
|
702
|
-
for (const [name, config] of Object.entries(requiredCommands)) if (!commands.has(name)) {
|
|
703
|
-
commands.set(name, config);
|
|
704
|
-
changed = true;
|
|
705
|
-
}
|
|
706
690
|
return {
|
|
707
|
-
|
|
708
|
-
|
|
691
|
+
name: result.data.name,
|
|
692
|
+
version: result.data.version
|
|
709
693
|
};
|
|
710
|
-
} catch {
|
|
694
|
+
} catch (_error) {
|
|
711
695
|
return {
|
|
712
|
-
|
|
713
|
-
|
|
696
|
+
name: void 0,
|
|
697
|
+
version: void 0
|
|
714
698
|
};
|
|
715
699
|
}
|
|
716
700
|
}
|
|
701
|
+
/** Strip npm scope from a package name: "@scope/foo" → "foo", "foo" → "foo". */
|
|
702
|
+
function stripScope(name) {
|
|
703
|
+
const slashIndex = name.indexOf("/");
|
|
704
|
+
return name.startsWith("@") && slashIndex !== -1 ? name.slice(slashIndex + 1) : name;
|
|
705
|
+
}
|
|
706
|
+
/** Convention paths to check for Dockerfiles in a package directory. */
|
|
707
|
+
const CONVENTION_DOCKERFILE_PATHS = ["Dockerfile", "docker/Dockerfile"];
|
|
717
708
|
/**
|
|
718
|
-
*
|
|
719
|
-
*
|
|
720
|
-
* Returns unchanged content if the file has an opt-out comment or can't be parsed.
|
|
709
|
+
* Find a Dockerfile at convention paths for a monorepo package.
|
|
710
|
+
* Checks packages/{dir}/Dockerfile and packages/{dir}/docker/Dockerfile.
|
|
721
711
|
*/
|
|
722
|
-
function
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
const doc = parseDocument(existing);
|
|
729
|
-
const steps = doc.getIn([
|
|
730
|
-
"jobs",
|
|
731
|
-
jobName,
|
|
732
|
-
"steps"
|
|
733
|
-
]);
|
|
734
|
-
if (!isSeq(steps)) return {
|
|
735
|
-
content: existing,
|
|
736
|
-
changed: false
|
|
737
|
-
};
|
|
738
|
-
let changed = false;
|
|
739
|
-
for (const { match, step } of requiredSteps) if (!steps.items.some((item) => {
|
|
740
|
-
if (!isMap(item)) return false;
|
|
741
|
-
if (match.run) {
|
|
742
|
-
const run = item.get("run");
|
|
743
|
-
return typeof run === "string" && run.includes(match.run);
|
|
744
|
-
}
|
|
745
|
-
if (match.uses) {
|
|
746
|
-
const uses = item.get("uses");
|
|
747
|
-
return typeof uses === "string" && uses.startsWith(match.uses);
|
|
748
|
-
}
|
|
749
|
-
return false;
|
|
750
|
-
})) {
|
|
751
|
-
steps.add(doc.createNode(step));
|
|
752
|
-
changed = true;
|
|
753
|
-
}
|
|
754
|
-
return {
|
|
755
|
-
content: changed ? doc.toString() : existing,
|
|
756
|
-
changed
|
|
757
|
-
};
|
|
758
|
-
} catch {
|
|
759
|
-
return {
|
|
760
|
-
content: existing,
|
|
761
|
-
changed: false
|
|
712
|
+
function findConventionDockerfile(executor, cwd, dir) {
|
|
713
|
+
for (const rel of CONVENTION_DOCKERFILE_PATHS) {
|
|
714
|
+
const dockerfilePath = `packages/${dir}/${rel}`;
|
|
715
|
+
if (executor.readFile(path.join(cwd, dockerfilePath)) !== null) return {
|
|
716
|
+
dockerfile: dockerfilePath,
|
|
717
|
+
context: "."
|
|
762
718
|
};
|
|
763
719
|
}
|
|
764
720
|
}
|
|
765
721
|
/**
|
|
766
|
-
*
|
|
767
|
-
*
|
|
768
|
-
* or the document can't be parsed.
|
|
722
|
+
* Find a Dockerfile at convention paths for a single-package repo.
|
|
723
|
+
* Checks Dockerfile and docker/Dockerfile at the project root.
|
|
769
724
|
*/
|
|
725
|
+
function findRootDockerfile(executor, cwd) {
|
|
726
|
+
for (const rel of CONVENTION_DOCKERFILE_PATHS) if (executor.readFile(path.join(cwd, rel)) !== null) return {
|
|
727
|
+
dockerfile: rel,
|
|
728
|
+
context: "."
|
|
729
|
+
};
|
|
730
|
+
}
|
|
770
731
|
/**
|
|
771
|
-
*
|
|
772
|
-
*
|
|
773
|
-
*
|
|
732
|
+
* Discover Docker packages by convention and merge with .tooling.json overrides.
|
|
733
|
+
*
|
|
734
|
+
* Convention: any package with a Dockerfile or docker/Dockerfile is a Docker package.
|
|
735
|
+
* For monorepos, scans packages/{name}/. For single-package repos, scans the root.
|
|
736
|
+
* The docker map in .tooling.json overrides convention-discovered config and can add
|
|
737
|
+
* packages at non-standard locations.
|
|
738
|
+
*
|
|
739
|
+
* Image names are derived from {root-name}-{package-name} using each package's package.json name.
|
|
740
|
+
* Versions are read from each package's own package.json.
|
|
774
741
|
*/
|
|
775
|
-
function
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
const
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
742
|
+
function detectDockerPackages(executor, cwd, repoName) {
|
|
743
|
+
const overrides = loadDockerMap(executor, cwd);
|
|
744
|
+
const packageDirs = executor.listPackageDirs(cwd);
|
|
745
|
+
const packages = [];
|
|
746
|
+
const seen = /* @__PURE__ */ new Set();
|
|
747
|
+
if (packageDirs.length > 0) {
|
|
748
|
+
for (const dir of packageDirs) {
|
|
749
|
+
const convention = findConventionDockerfile(executor, cwd, dir);
|
|
750
|
+
const docker = overrides[dir] ?? convention;
|
|
751
|
+
if (docker) {
|
|
752
|
+
const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
|
|
753
|
+
packages.push({
|
|
754
|
+
dir,
|
|
755
|
+
imageName: `${repoName}-${stripScope(name ?? dir)}`,
|
|
756
|
+
version,
|
|
757
|
+
docker
|
|
758
|
+
});
|
|
759
|
+
seen.add(dir);
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
for (const [dir, docker] of Object.entries(overrides)) if (!seen.has(dir)) {
|
|
763
|
+
const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
|
|
764
|
+
packages.push({
|
|
765
|
+
dir,
|
|
766
|
+
imageName: `${repoName}-${stripScope(name ?? dir)}`,
|
|
767
|
+
version,
|
|
768
|
+
docker
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
} else {
|
|
772
|
+
const convention = findRootDockerfile(executor, cwd);
|
|
773
|
+
const docker = overrides["."] ?? convention;
|
|
774
|
+
if (docker) {
|
|
775
|
+
const { name, version } = readPackageInfo(executor, path.join(cwd, "package.json"));
|
|
776
|
+
packages.push({
|
|
777
|
+
dir: ".",
|
|
778
|
+
imageName: stripScope(name ?? repoName),
|
|
779
|
+
version,
|
|
780
|
+
docker
|
|
781
|
+
});
|
|
793
782
|
}
|
|
794
|
-
return {
|
|
795
|
-
content: doc.toString(),
|
|
796
|
-
changed: true
|
|
797
|
-
};
|
|
798
|
-
} catch {
|
|
799
|
-
return {
|
|
800
|
-
content: existing,
|
|
801
|
-
changed: false
|
|
802
|
-
};
|
|
803
783
|
}
|
|
784
|
+
return packages;
|
|
804
785
|
}
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
}
|
|
814
|
-
|
|
815
|
-
return
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
jobs:
|
|
822
|
-
publish:
|
|
823
|
-
runs-on: ubuntu-latest
|
|
824
|
-
steps:
|
|
825
|
-
- uses: actions/checkout@v4
|
|
826
|
-
- uses: pnpm/action-setup@v4
|
|
827
|
-
- uses: actions/setup-node@v4
|
|
828
|
-
with:
|
|
829
|
-
${nodeVersionYaml}
|
|
830
|
-
- run: pnpm install --frozen-lockfile
|
|
831
|
-
- name: Publish Docker images
|
|
832
|
-
env:
|
|
833
|
-
DOCKER_REGISTRY_HOST: ${actionsExpr$2("vars.DOCKER_REGISTRY_HOST")}
|
|
834
|
-
DOCKER_REGISTRY_NAMESPACE: ${actionsExpr$2("vars.DOCKER_REGISTRY_NAMESPACE")}
|
|
835
|
-
DOCKER_USERNAME: ${actionsExpr$2("secrets.DOCKER_USERNAME")}
|
|
836
|
-
DOCKER_PASSWORD: ${actionsExpr$2("secrets.DOCKER_PASSWORD")}
|
|
837
|
-
run: pnpm exec bst docker:publish
|
|
838
|
-
`;
|
|
786
|
+
/**
|
|
787
|
+
* Read docker config for a single package, checking convention paths first,
|
|
788
|
+
* then .tooling.json overrides. Used by the per-package image:build script.
|
|
789
|
+
*/
|
|
790
|
+
function readSinglePackageDocker(executor, cwd, packageDir, repoName) {
|
|
791
|
+
const dir = path.basename(path.resolve(cwd, packageDir));
|
|
792
|
+
const convention = findConventionDockerfile(executor, cwd, dir);
|
|
793
|
+
const docker = loadDockerMap(executor, cwd)[dir] ?? convention;
|
|
794
|
+
if (!docker) throw new FatalError(`No Dockerfile found for package "${dir}" (checked convention paths and .tooling.json)`);
|
|
795
|
+
const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
|
|
796
|
+
return {
|
|
797
|
+
dir,
|
|
798
|
+
imageName: `${repoName}-${stripScope(name ?? dir)}`,
|
|
799
|
+
version,
|
|
800
|
+
docker
|
|
801
|
+
};
|
|
839
802
|
}
|
|
840
|
-
|
|
803
|
+
/** Parse semver version string into major, minor, patch components. */
|
|
804
|
+
function parseSemver(version) {
|
|
805
|
+
const clean = version.replace(/^v/, "");
|
|
806
|
+
const match = /^(\d+)\.(\d+)\.(\d+)/.exec(clean);
|
|
807
|
+
if (!match?.[1] || !match[2] || !match[3]) throw new FatalError(`Invalid semver version: ${version}`);
|
|
808
|
+
return {
|
|
809
|
+
major: Number(match[1]),
|
|
810
|
+
minor: Number(match[2]),
|
|
811
|
+
patch: Number(match[3])
|
|
812
|
+
};
|
|
813
|
+
}
|
|
814
|
+
/** Generate semver tag variants: latest, vX.Y.Z, vX.Y, vX */
|
|
815
|
+
function generateTags(version) {
|
|
816
|
+
const { major, minor, patch } = parseSemver(version);
|
|
841
817
|
return [
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
}
|
|
846
|
-
{
|
|
847
|
-
match: { uses: "pnpm/action-setup" },
|
|
848
|
-
step: { uses: "pnpm/action-setup@v4" }
|
|
849
|
-
},
|
|
850
|
-
{
|
|
851
|
-
match: { uses: "actions/setup-node" },
|
|
852
|
-
step: { uses: "actions/setup-node@v4" }
|
|
853
|
-
},
|
|
854
|
-
{
|
|
855
|
-
match: { run: "pnpm install" },
|
|
856
|
-
step: { run: "pnpm install --frozen-lockfile" }
|
|
857
|
-
},
|
|
858
|
-
{
|
|
859
|
-
match: { run: "docker:publish" },
|
|
860
|
-
step: { run: "pnpm exec bst docker:publish" }
|
|
861
|
-
}
|
|
818
|
+
"latest",
|
|
819
|
+
`v${major}.${minor}.${patch}`,
|
|
820
|
+
`v${major}.${minor}`,
|
|
821
|
+
`v${major}`
|
|
862
822
|
];
|
|
863
823
|
}
|
|
864
|
-
/**
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
function
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
824
|
+
/** Build the full image reference: namespace/imageName:tag */
|
|
825
|
+
function imageRef(namespace, imageName, tag) {
|
|
826
|
+
return `${namespace}/${imageName}:${tag}`;
|
|
827
|
+
}
|
|
828
|
+
function log$1(message) {
|
|
829
|
+
console.log(message);
|
|
830
|
+
}
|
|
831
|
+
/** Read the repo name from root package.json. */
|
|
832
|
+
function readRepoName(executor, cwd) {
|
|
833
|
+
const rootPkgRaw = executor.readFile(path.join(cwd, "package.json"));
|
|
834
|
+
if (!rootPkgRaw) throw new FatalError("No package.json found in project root");
|
|
835
|
+
const repoName = parsePackageJson(rootPkgRaw)?.name;
|
|
836
|
+
if (!repoName) throw new FatalError("Root package.json must have a name field");
|
|
837
|
+
return repoName;
|
|
838
|
+
}
|
|
839
|
+
/** Build a single docker image from its config. Paths are resolved relative to cwd. */
|
|
840
|
+
function buildImage(executor, pkg, cwd, extraArgs) {
|
|
841
|
+
const dockerfilePath = path.resolve(cwd, pkg.docker.dockerfile);
|
|
842
|
+
const contextPath = path.resolve(cwd, pkg.docker.context);
|
|
843
|
+
const command = [
|
|
844
|
+
"docker build",
|
|
845
|
+
`-f ${dockerfilePath}`,
|
|
846
|
+
`-t ${pkg.imageName}:latest`,
|
|
847
|
+
...extraArgs,
|
|
848
|
+
contextPath
|
|
849
|
+
].join(" ");
|
|
850
|
+
executor.execInherit(command);
|
|
851
|
+
}
|
|
852
|
+
/**
|
|
853
|
+
* Detect packages with docker config in .tooling.json and build each one.
|
|
854
|
+
* Runs `docker build -f <dockerfile> -t <image-name>:latest <context>` for each package.
|
|
855
|
+
* Dockerfile and context paths are resolved relative to the project root.
|
|
856
|
+
*
|
|
857
|
+
* When `packageDir` is set, builds only that single package (for use as an image:build script).
|
|
858
|
+
*/
|
|
859
|
+
function runDockerBuild(executor, config) {
|
|
860
|
+
const repoName = readRepoName(executor, config.cwd);
|
|
861
|
+
if (config.packageDir) {
|
|
862
|
+
const pkg = readSinglePackageDocker(executor, config.cwd, config.packageDir, repoName);
|
|
863
|
+
log$1(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
|
|
864
|
+
buildImage(executor, pkg, config.cwd, config.extraArgs);
|
|
865
|
+
log$1(`Built ${pkg.imageName}:latest`);
|
|
866
|
+
return { packages: [pkg] };
|
|
874
867
|
}
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
if (names.includes(dirName)) continue;
|
|
880
|
-
for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(`packages/${dirName}/${rel}`)) {
|
|
881
|
-
names.push(dirName);
|
|
882
|
-
break;
|
|
883
|
-
}
|
|
884
|
-
}
|
|
885
|
-
} else for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(rel)) {
|
|
886
|
-
if (!names.includes(ctx.config.name)) names.push(ctx.config.name);
|
|
887
|
-
break;
|
|
868
|
+
const packages = detectDockerPackages(executor, config.cwd, repoName);
|
|
869
|
+
if (packages.length === 0) {
|
|
870
|
+
log$1("No packages with docker config found");
|
|
871
|
+
return { packages: [] };
|
|
888
872
|
}
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
873
|
+
log$1(`Found ${packages.length} Docker package(s): ${packages.map((p) => p.dir).join(", ")}`);
|
|
874
|
+
for (const pkg of packages) {
|
|
875
|
+
log$1(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
|
|
876
|
+
buildImage(executor, pkg, config.cwd, config.extraArgs);
|
|
877
|
+
}
|
|
878
|
+
log$1(`Built ${packages.length} image(s)`);
|
|
879
|
+
return { packages };
|
|
894
880
|
}
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
881
|
+
/**
|
|
882
|
+
* Run the full Docker publish pipeline:
|
|
883
|
+
* 1. Build all images via runDockerBuild
|
|
884
|
+
* 2. Login to registry
|
|
885
|
+
* 3. Tag each image with semver variants from its own package.json version
|
|
886
|
+
* 4. Push all tags
|
|
887
|
+
* 5. Logout from registry
|
|
888
|
+
*/
|
|
889
|
+
function runDockerPublish(executor, config) {
|
|
890
|
+
const { packages } = runDockerBuild(executor, {
|
|
891
|
+
cwd: config.cwd,
|
|
892
|
+
packageDir: void 0,
|
|
893
|
+
extraArgs: []
|
|
894
|
+
});
|
|
895
|
+
if (packages.length === 0) return {
|
|
896
|
+
packages: [],
|
|
897
|
+
tags: []
|
|
901
898
|
};
|
|
902
|
-
const
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
}
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
};
|
|
924
|
-
}
|
|
925
|
-
const merged = mergeWorkflowSteps(existing, "publish", requiredDeploySteps());
|
|
926
|
-
const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
|
|
927
|
-
if (!merged.changed) {
|
|
928
|
-
if (withComment !== raw) {
|
|
929
|
-
ctx.write(workflowPath, withComment);
|
|
930
|
-
return {
|
|
931
|
-
filePath: workflowPath,
|
|
932
|
-
action: "updated",
|
|
933
|
-
description: existing !== raw ? "Migrated tooling binary name in publish workflow" : "Added schema comment to publish workflow"
|
|
934
|
-
};
|
|
935
|
-
}
|
|
936
|
-
return {
|
|
937
|
-
filePath: workflowPath,
|
|
938
|
-
action: "skipped",
|
|
939
|
-
description: "Existing publish workflow preserved"
|
|
940
|
-
};
|
|
899
|
+
for (const pkg of packages) if (!pkg.version) throw new FatalError(`Package ${pkg.dir} has docker config but no version in package.json`);
|
|
900
|
+
if (!config.dryRun) {
|
|
901
|
+
log$1(`Logging in to ${config.registryHost}...`);
|
|
902
|
+
const loginResult = executor.exec(`echo "${config.password}" | docker login ${config.registryHost} -u ${config.username} --password-stdin`);
|
|
903
|
+
if (loginResult.exitCode !== 0) throw new FatalError(`Docker login failed: ${loginResult.stderr}`);
|
|
904
|
+
} else log$1("[dry-run] Skipping docker login");
|
|
905
|
+
const allTags = [];
|
|
906
|
+
try {
|
|
907
|
+
for (const pkg of packages) {
|
|
908
|
+
const tags = generateTags(pkg.version ?? "");
|
|
909
|
+
log$1(`${pkg.dir} v${pkg.version} → tags: ${tags.join(", ")}`);
|
|
910
|
+
for (const tag of tags) {
|
|
911
|
+
const ref = imageRef(config.registryNamespace, pkg.imageName, tag);
|
|
912
|
+
allTags.push(ref);
|
|
913
|
+
log$1(`Tagging ${pkg.imageName} → ${ref}`);
|
|
914
|
+
const tagResult = executor.exec(`docker tag ${pkg.imageName} ${ref}`);
|
|
915
|
+
if (tagResult.exitCode !== 0) throw new FatalError(`docker tag failed: ${tagResult.stderr}`);
|
|
916
|
+
if (!config.dryRun) {
|
|
917
|
+
log$1(`Pushing ${ref}...`);
|
|
918
|
+
const pushResult = executor.exec(`docker push ${ref}`);
|
|
919
|
+
if (pushResult.exitCode !== 0) throw new FatalError(`docker push failed: ${pushResult.stderr}`);
|
|
920
|
+
} else log$1(`[dry-run] Skipping push for ${ref}`);
|
|
941
921
|
}
|
|
942
|
-
ctx.write(workflowPath, withComment);
|
|
943
|
-
return {
|
|
944
|
-
filePath: workflowPath,
|
|
945
|
-
action: "updated",
|
|
946
|
-
description: "Added missing steps to publish workflow"
|
|
947
|
-
};
|
|
948
922
|
}
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
}
|
|
923
|
+
} finally {
|
|
924
|
+
if (!config.dryRun) {
|
|
925
|
+
log$1(`Logging out from ${config.registryHost}...`);
|
|
926
|
+
executor.exec(`docker logout ${config.registryHost}`);
|
|
927
|
+
}
|
|
954
928
|
}
|
|
955
|
-
|
|
929
|
+
log$1(`Published ${allTags.length} image tag(s)`);
|
|
956
930
|
return {
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions publish workflow`
|
|
931
|
+
packages,
|
|
932
|
+
tags: allTags
|
|
960
933
|
};
|
|
961
934
|
}
|
|
962
935
|
//#endregion
|
|
963
|
-
//#region src/
|
|
964
|
-
const
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
lint: "oxlint",
|
|
970
|
-
knip: "knip",
|
|
971
|
-
check: "bst checks:run",
|
|
972
|
-
"ci:check": "pnpm check --skip 'docker:*'",
|
|
973
|
-
"tooling:check": "bst repo:sync --check",
|
|
974
|
-
"tooling:sync": "bst repo:sync"
|
|
975
|
-
};
|
|
976
|
-
const STANDARD_SCRIPTS_MONOREPO = {
|
|
977
|
-
build: "pnpm -r build",
|
|
978
|
-
test: "pnpm -r test",
|
|
979
|
-
typecheck: "pnpm -r --parallel run typecheck",
|
|
980
|
-
lint: "oxlint",
|
|
981
|
-
knip: "knip",
|
|
982
|
-
check: "bst checks:run",
|
|
983
|
-
"ci:check": "pnpm check --skip 'docker:*'",
|
|
984
|
-
"tooling:check": "bst repo:sync --check",
|
|
985
|
-
"tooling:sync": "bst repo:sync"
|
|
986
|
-
};
|
|
987
|
-
/** Scripts that tooling owns — map from script name to keyword that must appear in the value. */
|
|
988
|
-
const MANAGED_SCRIPTS = {
|
|
989
|
-
check: "bst checks:run",
|
|
990
|
-
"ci:check": "pnpm check",
|
|
991
|
-
"tooling:check": "bst repo:sync --check",
|
|
992
|
-
"tooling:sync": "bst repo:sync",
|
|
993
|
-
"trigger-release": "bst release:trigger",
|
|
994
|
-
"docker:build": "bst docker:build",
|
|
995
|
-
"docker:check": "bst docker:check"
|
|
996
|
-
};
|
|
997
|
-
/** Check if an existing script value satisfies a managed script requirement.
|
|
998
|
-
* Accepts both `bst <cmd>` and `bin.mjs <cmd>` (used in the tooling repo itself). */
|
|
999
|
-
function matchesManagedScript(scriptValue, expectedFragment) {
|
|
1000
|
-
if (scriptValue.includes(expectedFragment)) return true;
|
|
1001
|
-
const binMjsFragment = expectedFragment.replace(/^bst /, "bin.mjs ");
|
|
1002
|
-
return scriptValue.includes(binMjsFragment);
|
|
936
|
+
//#region src/utils/yaml-merge.ts
|
|
937
|
+
const IGNORE_PATTERN = "@bensandee/tooling:ignore";
|
|
938
|
+
const FORGEJO_SCHEMA_COMMENT = "# yaml-language-server: $schema=../../.vscode/forgejo-workflow.schema.json\n";
|
|
939
|
+
/** Returns a yaml-language-server schema comment for Forgejo workflows, empty string otherwise. */
|
|
940
|
+
function workflowSchemaComment(ci) {
|
|
941
|
+
return ci === "forgejo" ? FORGEJO_SCHEMA_COMMENT : "";
|
|
1003
942
|
}
|
|
1004
|
-
/**
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
tsdown: "0.20.3",
|
|
1010
|
-
typescript: "5.9.3",
|
|
1011
|
-
vitest: "4.0.18"
|
|
1012
|
-
};
|
|
1013
|
-
/** DevDeps that belong at the root regardless of structure. */
|
|
1014
|
-
const ROOT_DEV_DEPS = {
|
|
1015
|
-
knip: "5.85.0",
|
|
1016
|
-
lefthook: "2.1.2",
|
|
1017
|
-
oxlint: "1.50.0"
|
|
1018
|
-
};
|
|
1019
|
-
/**
|
|
1020
|
-
* Check if a package name is available as a workspace dependency.
|
|
1021
|
-
* Looks for a matching package in the packages/ directory.
|
|
1022
|
-
*/
|
|
1023
|
-
function isWorkspacePackage(ctx, packageName) {
|
|
1024
|
-
if (ctx.config.structure !== "monorepo") return false;
|
|
1025
|
-
const packagesDir = path.join(ctx.targetDir, "packages");
|
|
1026
|
-
if (!existsSync(packagesDir)) return false;
|
|
1027
|
-
try {
|
|
1028
|
-
for (const entry of readdirSync(packagesDir, { withFileTypes: true })) {
|
|
1029
|
-
if (!entry.isDirectory()) continue;
|
|
1030
|
-
const pkgJsonPath = path.join(packagesDir, entry.name, "package.json");
|
|
1031
|
-
if (!existsSync(pkgJsonPath)) continue;
|
|
1032
|
-
try {
|
|
1033
|
-
if (parsePackageJson(readFileSync(pkgJsonPath, "utf-8"))?.name === packageName) return true;
|
|
1034
|
-
} catch (_error) {}
|
|
1035
|
-
}
|
|
1036
|
-
} catch (_error) {}
|
|
1037
|
-
return false;
|
|
943
|
+
/** Prepend the Forgejo schema comment if it's not already present. No-op for GitHub. */
|
|
944
|
+
function ensureSchemaComment(content, ci) {
|
|
945
|
+
if (ci !== "forgejo") return content;
|
|
946
|
+
if (content.includes("yaml-language-server")) return content;
|
|
947
|
+
return FORGEJO_SCHEMA_COMMENT + content;
|
|
1038
948
|
}
|
|
1039
|
-
/**
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
function addReleaseDeps(deps, config) {
|
|
1043
|
-
switch (config.releaseStrategy) {
|
|
1044
|
-
case "release-it":
|
|
1045
|
-
deps["release-it"] = "18.1.2";
|
|
1046
|
-
if (config.structure === "monorepo") deps["@release-it/bumper"] = "7.0.2";
|
|
1047
|
-
break;
|
|
1048
|
-
case "simple": break;
|
|
1049
|
-
case "changesets":
|
|
1050
|
-
deps["@changesets/cli"] = "2.29.4";
|
|
1051
|
-
break;
|
|
1052
|
-
}
|
|
949
|
+
/** Migrate content from old tooling binary name to new. */
|
|
950
|
+
function migrateToolingBinary(content) {
|
|
951
|
+
return content.replaceAll("pnpm exec tooling ", "pnpm exec bst ");
|
|
1053
952
|
}
|
|
1054
|
-
/**
|
|
1055
|
-
function
|
|
1056
|
-
|
|
1057
|
-
if (config.structure !== "monorepo") Object.assign(deps, PER_PACKAGE_DEV_DEPS);
|
|
1058
|
-
deps["@bensandee/config"] = "0.9.1";
|
|
1059
|
-
deps["@bensandee/tooling"] = "0.28.0";
|
|
1060
|
-
if (config.formatter === "oxfmt") deps["oxfmt"] = "0.35.0";
|
|
1061
|
-
if (config.formatter === "prettier") deps["prettier"] = "3.8.1";
|
|
1062
|
-
addReleaseDeps(deps, config);
|
|
1063
|
-
return Object.keys(deps).filter((name) => !UPDATE_EXCLUDE.has(name));
|
|
953
|
+
/** Check if a YAML file has an opt-out comment in the first 10 lines. */
|
|
954
|
+
function isToolingIgnored(content) {
|
|
955
|
+
return content.split("\n", 10).some((line) => line.includes(IGNORE_PATTERN));
|
|
1064
956
|
}
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
957
|
+
/**
|
|
958
|
+
* Ensure required commands exist under `pre-commit.commands` in a lefthook config.
|
|
959
|
+
* Only adds missing commands — never modifies existing ones.
|
|
960
|
+
* Returns unchanged content if the file has an opt-out comment or can't be parsed.
|
|
961
|
+
*/
|
|
962
|
+
function mergeLefthookCommands(existing, requiredCommands) {
|
|
963
|
+
if (isToolingIgnored(existing)) return {
|
|
964
|
+
content: existing,
|
|
965
|
+
changed: false
|
|
1074
966
|
};
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.28.0";
|
|
1085
|
-
if (ctx.config.useEslintPlugin) devDeps["@bensandee/eslint-plugin"] = isWorkspacePackage(ctx, "@bensandee/eslint-plugin") ? "workspace:*" : "0.9.2";
|
|
1086
|
-
if (ctx.config.formatter === "oxfmt") devDeps["oxfmt"] = "0.35.0";
|
|
1087
|
-
if (ctx.config.formatter === "prettier") devDeps["prettier"] = "3.8.1";
|
|
1088
|
-
addReleaseDeps(devDeps, ctx.config);
|
|
1089
|
-
if (existing) {
|
|
1090
|
-
const pkg = parsePackageJson(existing);
|
|
1091
|
-
if (!pkg) return {
|
|
1092
|
-
filePath,
|
|
1093
|
-
action: "skipped",
|
|
1094
|
-
description: "Could not parse existing package.json"
|
|
1095
|
-
};
|
|
1096
|
-
const changes = [];
|
|
1097
|
-
if (pkg.type !== "module") {
|
|
1098
|
-
pkg.type = "module";
|
|
1099
|
-
changes.push("set type: \"module\"");
|
|
1100
|
-
}
|
|
1101
|
-
const existingScripts = pkg.scripts ?? {};
|
|
1102
|
-
for (const [key, value] of Object.entries(existingScripts)) if (typeof value === "string" && value.includes("pnpm exec tooling ")) {
|
|
1103
|
-
existingScripts[key] = migrateToolingBinary(value);
|
|
1104
|
-
changes.push(`migrated script: ${key}`);
|
|
1105
|
-
}
|
|
1106
|
-
for (const [key, value] of Object.entries(allScripts)) if (!(key in existingScripts)) {
|
|
1107
|
-
existingScripts[key] = value;
|
|
1108
|
-
changes.push(`added script: ${key}`);
|
|
1109
|
-
} else if (key in MANAGED_SCRIPTS && !matchesManagedScript(existingScripts[key] ?? "", MANAGED_SCRIPTS[key] ?? "")) {
|
|
1110
|
-
existingScripts[key] = value;
|
|
1111
|
-
changes.push(`updated script: ${key}`);
|
|
1112
|
-
}
|
|
1113
|
-
for (const key of DEPRECATED_SCRIPTS) if (key in existingScripts) {
|
|
1114
|
-
delete existingScripts[key];
|
|
1115
|
-
changes.push(`removed deprecated script: ${key}`);
|
|
1116
|
-
}
|
|
1117
|
-
pkg.scripts = existingScripts;
|
|
1118
|
-
const existingDevDeps = pkg.devDependencies ?? {};
|
|
1119
|
-
for (const [key, value] of Object.entries(devDeps)) if (!(key in existingDevDeps)) {
|
|
1120
|
-
existingDevDeps[key] = value;
|
|
1121
|
-
changes.push(`added devDependency: ${key}`);
|
|
1122
|
-
} else if (key.startsWith("@bensandee/") && existingDevDeps[key] !== value && existingDevDeps[key] !== "workspace:*") {
|
|
1123
|
-
existingDevDeps[key] = value;
|
|
1124
|
-
changes.push(`updated devDependency: ${key} to ${value}`);
|
|
967
|
+
try {
|
|
968
|
+
const doc = parseDocument(existing);
|
|
969
|
+
let changed = false;
|
|
970
|
+
if (!doc.hasIn(["pre-commit", "commands"])) {
|
|
971
|
+
doc.setIn(["pre-commit", "commands"], requiredCommands);
|
|
972
|
+
return {
|
|
973
|
+
content: doc.toString(),
|
|
974
|
+
changed: true
|
|
975
|
+
};
|
|
1125
976
|
}
|
|
1126
|
-
|
|
1127
|
-
if (!
|
|
1128
|
-
|
|
1129
|
-
|
|
977
|
+
const commands = doc.getIn(["pre-commit", "commands"]);
|
|
978
|
+
if (!isMap(commands)) return {
|
|
979
|
+
content: existing,
|
|
980
|
+
changed: false
|
|
981
|
+
};
|
|
982
|
+
for (const [name, config] of Object.entries(requiredCommands)) if (!commands.has(name)) {
|
|
983
|
+
commands.set(name, config);
|
|
984
|
+
changed = true;
|
|
1130
985
|
}
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
description: "Already up to spec"
|
|
986
|
+
return {
|
|
987
|
+
content: changed ? doc.toString() : existing,
|
|
988
|
+
changed
|
|
1135
989
|
};
|
|
1136
|
-
|
|
990
|
+
} catch {
|
|
1137
991
|
return {
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
description: changes.join(", ")
|
|
992
|
+
content: existing,
|
|
993
|
+
changed: false
|
|
1141
994
|
};
|
|
1142
995
|
}
|
|
1143
|
-
const pkg = {
|
|
1144
|
-
name: ctx.config.name,
|
|
1145
|
-
version: "0.1.0",
|
|
1146
|
-
private: true,
|
|
1147
|
-
type: "module",
|
|
1148
|
-
scripts: allScripts,
|
|
1149
|
-
devDependencies: devDeps,
|
|
1150
|
-
engines: { node: ">=24.13.0" },
|
|
1151
|
-
packageManager: "pnpm@10.29.3"
|
|
1152
|
-
};
|
|
1153
|
-
ctx.write(filePath, JSON.stringify(pkg, null, 2) + "\n");
|
|
1154
|
-
return {
|
|
1155
|
-
filePath,
|
|
1156
|
-
action: "created",
|
|
1157
|
-
description: "Generated package.json"
|
|
1158
|
-
};
|
|
1159
996
|
}
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
997
|
+
/**
|
|
998
|
+
* Ensure required steps exist in a workflow job's steps array.
|
|
999
|
+
* Only adds missing steps at the end — never modifies existing ones.
|
|
1000
|
+
* Returns unchanged content if the file has an opt-out comment or can't be parsed.
|
|
1001
|
+
*/
|
|
1002
|
+
function mergeWorkflowSteps(existing, jobName, requiredSteps) {
|
|
1003
|
+
if (isToolingIgnored(existing)) return {
|
|
1004
|
+
content: existing,
|
|
1005
|
+
changed: false
|
|
1006
|
+
};
|
|
1007
|
+
try {
|
|
1008
|
+
const doc = parseDocument(existing);
|
|
1009
|
+
const steps = doc.getIn([
|
|
1010
|
+
"jobs",
|
|
1011
|
+
jobName,
|
|
1012
|
+
"steps"
|
|
1013
|
+
]);
|
|
1014
|
+
if (!isSeq(steps)) return {
|
|
1015
|
+
content: existing,
|
|
1016
|
+
changed: false
|
|
1017
|
+
};
|
|
1018
|
+
let changed = false;
|
|
1019
|
+
for (const { match, step } of requiredSteps) if (!steps.items.some((item) => {
|
|
1020
|
+
if (!isMap(item)) return false;
|
|
1021
|
+
if (match.run) {
|
|
1022
|
+
const run = item.get("run");
|
|
1023
|
+
return typeof run === "string" && run.includes(match.run);
|
|
1024
|
+
}
|
|
1025
|
+
if (match.uses) {
|
|
1026
|
+
const uses = item.get("uses");
|
|
1027
|
+
return typeof uses === "string" && uses.startsWith(match.uses);
|
|
1028
|
+
}
|
|
1029
|
+
return false;
|
|
1030
|
+
})) {
|
|
1031
|
+
steps.add(doc.createNode(step));
|
|
1032
|
+
changed = true;
|
|
1194
1033
|
}
|
|
1195
|
-
return
|
|
1034
|
+
return {
|
|
1035
|
+
content: changed ? doc.toString() : existing,
|
|
1036
|
+
changed
|
|
1037
|
+
};
|
|
1038
|
+
} catch {
|
|
1039
|
+
return {
|
|
1040
|
+
content: existing,
|
|
1041
|
+
changed: false
|
|
1042
|
+
};
|
|
1196
1043
|
}
|
|
1197
|
-
return [mergeSingleTsconfig(ctx, filePath, extendsValue)];
|
|
1198
|
-
}
|
|
1199
|
-
function isSolutionStyle(parsed) {
|
|
1200
|
-
return Array.isArray(parsed.references) && parsed.references.length > 0 && Array.isArray(parsed.files) && parsed.files.length === 0;
|
|
1201
|
-
}
|
|
1202
|
-
function resolveReferencePath(refPath) {
|
|
1203
|
-
const resolved = refPath.endsWith(".json") ? refPath : path.join(refPath, "tsconfig.json");
|
|
1204
|
-
return path.normalize(resolved);
|
|
1205
1044
|
}
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1045
|
+
/**
|
|
1046
|
+
* Add a job to an existing workflow YAML if it doesn't already exist.
|
|
1047
|
+
* Returns unchanged content if the job already exists, the file has an opt-out comment,
|
|
1048
|
+
* or the document can't be parsed.
|
|
1049
|
+
*/
|
|
1050
|
+
/**
|
|
1051
|
+
* Ensure a `concurrency` block exists at the workflow top level.
|
|
1052
|
+
* Adds it if missing — never modifies an existing one.
|
|
1053
|
+
* Returns unchanged content if the file has an opt-out comment or can't be parsed.
|
|
1054
|
+
*/
|
|
1055
|
+
/**
|
|
1056
|
+
* Ensure `on.push` has `tags-ignore: ["**"]` so tag pushes don't trigger CI.
|
|
1057
|
+
* Only adds the filter when `on.push` exists and `tags-ignore` is absent.
|
|
1058
|
+
*/
|
|
1059
|
+
function ensureWorkflowTagsIgnore(existing) {
|
|
1060
|
+
if (isToolingIgnored(existing)) return {
|
|
1061
|
+
content: existing,
|
|
1062
|
+
changed: false
|
|
1212
1063
|
};
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1064
|
+
try {
|
|
1065
|
+
const doc = parseDocument(existing);
|
|
1066
|
+
const on = doc.get("on");
|
|
1067
|
+
if (!isMap(on)) return {
|
|
1068
|
+
content: existing,
|
|
1069
|
+
changed: false
|
|
1070
|
+
};
|
|
1071
|
+
const push = on.get("push");
|
|
1072
|
+
if (!isMap(push)) return {
|
|
1073
|
+
content: existing,
|
|
1074
|
+
changed: false
|
|
1075
|
+
};
|
|
1076
|
+
if (push.has("tags-ignore")) return {
|
|
1077
|
+
content: existing,
|
|
1078
|
+
changed: false
|
|
1079
|
+
};
|
|
1080
|
+
push.set("tags-ignore", ["**"]);
|
|
1081
|
+
return {
|
|
1082
|
+
content: doc.toString(),
|
|
1083
|
+
changed: true
|
|
1084
|
+
};
|
|
1085
|
+
} catch {
|
|
1086
|
+
return {
|
|
1087
|
+
content: existing,
|
|
1088
|
+
changed: false
|
|
1089
|
+
};
|
|
1090
|
+
}
|
|
1091
|
+
}
|
|
1092
|
+
function ensureWorkflowConcurrency(existing, concurrency) {
|
|
1093
|
+
if (isToolingIgnored(existing)) return {
|
|
1094
|
+
content: existing,
|
|
1095
|
+
changed: false
|
|
1217
1096
|
};
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1097
|
+
try {
|
|
1098
|
+
const doc = parseDocument(existing);
|
|
1099
|
+
if (doc.has("concurrency")) return {
|
|
1100
|
+
content: existing,
|
|
1101
|
+
changed: false
|
|
1102
|
+
};
|
|
1103
|
+
doc.set("concurrency", concurrency);
|
|
1104
|
+
const contents = doc.contents;
|
|
1105
|
+
if (isMap(contents)) {
|
|
1106
|
+
const items = contents.items;
|
|
1107
|
+
const nameIdx = items.findIndex((p) => isScalar(p.key) && p.key.value === "name");
|
|
1108
|
+
const concPair = items.pop();
|
|
1109
|
+
if (concPair) items.splice(nameIdx + 1, 0, concPair);
|
|
1110
|
+
}
|
|
1111
|
+
return {
|
|
1112
|
+
content: doc.toString(),
|
|
1113
|
+
changed: true
|
|
1114
|
+
};
|
|
1115
|
+
} catch {
|
|
1116
|
+
return {
|
|
1117
|
+
content: existing,
|
|
1118
|
+
changed: false
|
|
1119
|
+
};
|
|
1223
1120
|
}
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1121
|
+
}
|
|
1122
|
+
//#endregion
|
|
1123
|
+
//#region src/generators/ci-utils.ts
|
|
1124
|
+
/** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
|
|
1125
|
+
function actionsExpr(expr) {
|
|
1126
|
+
return `\${{ ${expr} }}`;
|
|
1127
|
+
}
|
|
1128
|
+
function hasEnginesNode(ctx) {
|
|
1129
|
+
const raw = ctx.read("package.json");
|
|
1130
|
+
if (!raw) return false;
|
|
1131
|
+
return typeof parsePackageJson(raw)?.engines?.["node"] === "string";
|
|
1132
|
+
}
|
|
1133
|
+
function computeNodeVersionYaml(ctx) {
|
|
1134
|
+
return hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
|
|
1135
|
+
}
|
|
1136
|
+
//#endregion
|
|
1137
|
+
//#region src/generators/publish-ci.ts
|
|
1138
|
+
function deployWorkflow(ci, nodeVersionYaml) {
|
|
1139
|
+
return `${workflowSchemaComment(ci)}name: Publish
|
|
1140
|
+
on:
|
|
1141
|
+
push:
|
|
1142
|
+
tags:
|
|
1143
|
+
- "v[0-9]+.[0-9]+.[0-9]+"
|
|
1144
|
+
|
|
1145
|
+
jobs:
|
|
1146
|
+
publish:
|
|
1147
|
+
runs-on: ubuntu-latest
|
|
1148
|
+
steps:
|
|
1149
|
+
- uses: actions/checkout@v4
|
|
1150
|
+
- uses: pnpm/action-setup@v4
|
|
1151
|
+
- uses: actions/setup-node@v4
|
|
1152
|
+
with:
|
|
1153
|
+
${nodeVersionYaml}
|
|
1154
|
+
- run: pnpm install --frozen-lockfile
|
|
1155
|
+
- name: Publish Docker images
|
|
1156
|
+
env:
|
|
1157
|
+
DOCKER_REGISTRY_HOST: ${actionsExpr("vars.DOCKER_REGISTRY_HOST")}
|
|
1158
|
+
DOCKER_REGISTRY_NAMESPACE: ${actionsExpr("vars.DOCKER_REGISTRY_NAMESPACE")}
|
|
1159
|
+
DOCKER_USERNAME: ${actionsExpr("secrets.DOCKER_USERNAME")}
|
|
1160
|
+
DOCKER_PASSWORD: ${actionsExpr("secrets.DOCKER_PASSWORD")}
|
|
1161
|
+
run: pnpm exec bst docker:publish
|
|
1162
|
+
`;
|
|
1163
|
+
}
|
|
1164
|
+
function requiredDeploySteps() {
|
|
1165
|
+
return [
|
|
1166
|
+
{
|
|
1167
|
+
match: { uses: "actions/checkout" },
|
|
1168
|
+
step: { uses: "actions/checkout@v4" }
|
|
1169
|
+
},
|
|
1170
|
+
{
|
|
1171
|
+
match: { uses: "pnpm/action-setup" },
|
|
1172
|
+
step: { uses: "pnpm/action-setup@v4" }
|
|
1173
|
+
},
|
|
1174
|
+
{
|
|
1175
|
+
match: { uses: "actions/setup-node" },
|
|
1176
|
+
step: { uses: "actions/setup-node@v4" }
|
|
1177
|
+
},
|
|
1178
|
+
{
|
|
1179
|
+
match: { run: "pnpm install" },
|
|
1180
|
+
step: { run: "pnpm install --frozen-lockfile" }
|
|
1181
|
+
},
|
|
1182
|
+
{
|
|
1183
|
+
match: { run: "docker:publish" },
|
|
1184
|
+
step: { run: "pnpm exec bst docker:publish" }
|
|
1230
1185
|
}
|
|
1186
|
+
];
|
|
1187
|
+
}
|
|
1188
|
+
const DockerMapSchema = z.object({ docker: z.record(z.string(), z.unknown()).optional() });
|
|
1189
|
+
/** Get names of packages that have Docker builds (by convention or .tooling.json config). */
|
|
1190
|
+
function getDockerPackageNames(ctx) {
|
|
1191
|
+
const names = [];
|
|
1192
|
+
const configRaw = ctx.read(".tooling.json");
|
|
1193
|
+
if (configRaw) {
|
|
1194
|
+
const result = DockerMapSchema.safeParse(JSON.parse(configRaw));
|
|
1195
|
+
if (result.success && result.data.docker) names.push(...Object.keys(result.data.docker));
|
|
1231
1196
|
}
|
|
1232
|
-
if (
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
}
|
|
1197
|
+
if (ctx.config.structure === "monorepo") {
|
|
1198
|
+
const packages = getMonorepoPackages(ctx.targetDir);
|
|
1199
|
+
for (const pkg of packages) {
|
|
1200
|
+
const dirName = pkg.name.split("/").pop() ?? pkg.name;
|
|
1201
|
+
if (names.includes(dirName)) continue;
|
|
1202
|
+
for (const rel of CONVENTION_DOCKERFILE_PATHS) if (ctx.exists(`packages/${dirName}/${rel}`)) {
|
|
1203
|
+
names.push(dirName);
|
|
1204
|
+
break;
|
|
1205
|
+
}
|
|
1206
|
+
}
|
|
1207
|
+
} else for (const rel of CONVENTION_DOCKERFILE_PATHS) if (ctx.exists(rel)) {
|
|
1208
|
+
if (!names.includes(ctx.config.name)) names.push(ctx.config.name);
|
|
1209
|
+
break;
|
|
1210
|
+
}
|
|
1211
|
+
return names;
|
|
1243
1212
|
}
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1213
|
+
/** Check whether any Docker packages exist by convention or .tooling.json config. */
|
|
1214
|
+
function hasDockerPackages(ctx) {
|
|
1215
|
+
return getDockerPackageNames(ctx).length > 0;
|
|
1216
|
+
}
|
|
1217
|
+
async function generateDeployCi(ctx) {
|
|
1218
|
+
const filePath = "deploy-ci";
|
|
1219
|
+
if (!ctx.config.publishDocker || ctx.config.ci === "none") return {
|
|
1247
1220
|
filePath,
|
|
1248
1221
|
action: "skipped",
|
|
1249
|
-
description: "
|
|
1222
|
+
description: "Deploy CI workflow not applicable"
|
|
1250
1223
|
};
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
filePath,
|
|
1224
|
+
const isGitHub = ctx.config.ci === "github";
|
|
1225
|
+
const workflowPath = isGitHub ? ".github/workflows/publish.yml" : ".forgejo/workflows/publish.yml";
|
|
1226
|
+
const nodeVersionYaml = computeNodeVersionYaml(ctx);
|
|
1227
|
+
const content = deployWorkflow(ctx.config.ci, nodeVersionYaml);
|
|
1228
|
+
if (ctx.exists(workflowPath)) {
|
|
1229
|
+
const raw = ctx.read(workflowPath);
|
|
1230
|
+
if (raw) {
|
|
1231
|
+
const existing = migrateToolingBinary(raw);
|
|
1232
|
+
if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) {
|
|
1233
|
+
if (existing !== raw) {
|
|
1234
|
+
ctx.write(workflowPath, ensureSchemaComment(existing, ctx.config.ci));
|
|
1235
|
+
return {
|
|
1236
|
+
filePath: workflowPath,
|
|
1237
|
+
action: "updated",
|
|
1238
|
+
description: "Migrated tooling binary name in publish workflow"
|
|
1239
|
+
};
|
|
1240
|
+
}
|
|
1241
|
+
return {
|
|
1242
|
+
filePath: workflowPath,
|
|
1270
1243
|
action: "skipped",
|
|
1271
|
-
description: "
|
|
1272
|
-
}
|
|
1273
|
-
continue;
|
|
1244
|
+
description: "Publish workflow already up to date"
|
|
1245
|
+
};
|
|
1274
1246
|
}
|
|
1275
|
-
const
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1247
|
+
const merged = mergeWorkflowSteps(existing, "publish", requiredDeploySteps());
|
|
1248
|
+
const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
|
|
1249
|
+
if (!merged.changed) {
|
|
1250
|
+
if (withComment !== raw) {
|
|
1251
|
+
ctx.write(workflowPath, withComment);
|
|
1252
|
+
return {
|
|
1253
|
+
filePath: workflowPath,
|
|
1254
|
+
action: "updated",
|
|
1255
|
+
description: existing !== raw ? "Migrated tooling binary name in publish workflow" : "Added schema comment to publish workflow"
|
|
1256
|
+
};
|
|
1285
1257
|
}
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
const changes = [];
|
|
1289
|
-
if (parsed.extends !== extendsValue) {
|
|
1290
|
-
const prev = parsed.extends;
|
|
1291
|
-
parsed.extends = extendsValue;
|
|
1292
|
-
changes.push(prev ? `changed extends: ${String(prev)} → ${extendsValue}` : `added extends: ${extendsValue}`);
|
|
1293
|
-
}
|
|
1294
|
-
if (!parsed.include && ctx.exists(path.join(relDir, "src"))) {
|
|
1295
|
-
parsed.include = ["src"];
|
|
1296
|
-
changes.push("added include: [\"src\"]");
|
|
1297
|
-
}
|
|
1298
|
-
if (changes.length === 0) {
|
|
1299
|
-
results.push({
|
|
1300
|
-
filePath,
|
|
1258
|
+
return {
|
|
1259
|
+
filePath: workflowPath,
|
|
1301
1260
|
action: "skipped",
|
|
1302
|
-
description:
|
|
1303
|
-
}
|
|
1304
|
-
continue;
|
|
1261
|
+
description: "Existing publish workflow preserved"
|
|
1262
|
+
};
|
|
1305
1263
|
}
|
|
1306
|
-
ctx.write(
|
|
1307
|
-
|
|
1308
|
-
filePath,
|
|
1264
|
+
ctx.write(workflowPath, withComment);
|
|
1265
|
+
return {
|
|
1266
|
+
filePath: workflowPath,
|
|
1309
1267
|
action: "updated",
|
|
1310
|
-
description:
|
|
1311
|
-
});
|
|
1312
|
-
} else {
|
|
1313
|
-
const config = {
|
|
1314
|
-
extends: extendsValue,
|
|
1315
|
-
...ctx.exists(path.join(relDir, "src")) ? { include: ["src"] } : {}
|
|
1268
|
+
description: "Added missing steps to publish workflow"
|
|
1316
1269
|
};
|
|
1317
|
-
ctx.write(filePath, JSON.stringify(config, null, 2) + "\n");
|
|
1318
|
-
results.push({
|
|
1319
|
-
filePath,
|
|
1320
|
-
action: "created",
|
|
1321
|
-
description: `Generated tsconfig.json with @bensandee/config/tsconfig/${projectType} (detected ${projectType})`
|
|
1322
|
-
});
|
|
1323
1270
|
}
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
}
|
|
1327
|
-
//#endregion
|
|
1328
|
-
//#region src/generators/vitest.ts
|
|
1329
|
-
const VITEST_CONFIG = `import { defineConfig } from "vitest/config";
|
|
1330
|
-
|
|
1331
|
-
export default defineConfig({
|
|
1332
|
-
test: {
|
|
1333
|
-
include: ["test/**/*.test.ts"],
|
|
1334
|
-
},
|
|
1335
|
-
});
|
|
1336
|
-
`;
|
|
1337
|
-
const STARTER_TEST = `import { describe, it, expect } from "vitest";
|
|
1338
|
-
|
|
1339
|
-
describe("example", () => {
|
|
1340
|
-
it("should pass a basic assertion", () => {
|
|
1341
|
-
expect(1 + 1).toBe(2);
|
|
1342
|
-
});
|
|
1343
|
-
});
|
|
1344
|
-
`;
|
|
1345
|
-
async function generateVitest(ctx) {
|
|
1346
|
-
const results = [];
|
|
1347
|
-
if (!ctx.config.setupVitest) return [{
|
|
1348
|
-
filePath: "vitest.config.ts",
|
|
1349
|
-
action: "skipped",
|
|
1350
|
-
description: "Vitest setup not requested"
|
|
1351
|
-
}];
|
|
1352
|
-
if (ctx.config.structure === "monorepo") return [{
|
|
1353
|
-
filePath: "vitest.config.ts",
|
|
1354
|
-
action: "skipped",
|
|
1355
|
-
description: "Monorepo: vitest config belongs in individual packages"
|
|
1356
|
-
}];
|
|
1357
|
-
const configPath = "vitest.config.ts";
|
|
1358
|
-
if (ctx.exists(configPath)) if (ctx.read(configPath) === VITEST_CONFIG) results.push({
|
|
1359
|
-
filePath: configPath,
|
|
1360
|
-
action: "skipped",
|
|
1361
|
-
description: "Config already up to date"
|
|
1362
|
-
});
|
|
1363
|
-
else if (await ctx.confirmOverwrite(configPath) === "skip") results.push({
|
|
1364
|
-
filePath: configPath,
|
|
1365
|
-
action: "skipped",
|
|
1366
|
-
description: "Existing config preserved"
|
|
1367
|
-
});
|
|
1368
|
-
else {
|
|
1369
|
-
ctx.write(configPath, VITEST_CONFIG);
|
|
1370
|
-
results.push({
|
|
1371
|
-
filePath: configPath,
|
|
1372
|
-
action: "updated",
|
|
1373
|
-
description: "Replaced vitest config"
|
|
1374
|
-
});
|
|
1375
|
-
}
|
|
1376
|
-
else {
|
|
1377
|
-
ctx.write(configPath, VITEST_CONFIG);
|
|
1378
|
-
results.push({
|
|
1379
|
-
filePath: configPath,
|
|
1380
|
-
action: "created",
|
|
1381
|
-
description: "Generated vitest.config.ts"
|
|
1382
|
-
});
|
|
1383
|
-
}
|
|
1384
|
-
const testPath = "test/example.test.ts";
|
|
1385
|
-
if (!ctx.exists("test")) {
|
|
1386
|
-
ctx.write(testPath, STARTER_TEST);
|
|
1387
|
-
results.push({
|
|
1388
|
-
filePath: testPath,
|
|
1389
|
-
action: "created",
|
|
1390
|
-
description: "Generated starter test file"
|
|
1391
|
-
});
|
|
1392
|
-
}
|
|
1393
|
-
return results;
|
|
1394
|
-
}
|
|
1395
|
-
//#endregion
|
|
1396
|
-
//#region src/generators/oxlint.ts
|
|
1397
|
-
const CONFIG_WITH_LINT_RULES = `import recommended from "@bensandee/config/oxlint/recommended";
|
|
1398
|
-
import { defineConfig } from "oxlint";
|
|
1399
|
-
|
|
1400
|
-
export default defineConfig({
|
|
1401
|
-
extends: [recommended],
|
|
1402
|
-
});
|
|
1403
|
-
`;
|
|
1404
|
-
const CONFIG_PRESET_ONLY = `import { presetRules } from "@bensandee/config/oxlint";
|
|
1405
|
-
import { defineConfig } from "oxlint";
|
|
1406
|
-
|
|
1407
|
-
export default defineConfig({
|
|
1408
|
-
rules: presetRules,
|
|
1409
|
-
});
|
|
1410
|
-
`;
|
|
1411
|
-
async function generateOxlint(ctx) {
|
|
1412
|
-
const filePath = "oxlint.config.ts";
|
|
1413
|
-
const content = ctx.config.useEslintPlugin ? CONFIG_WITH_LINT_RULES : CONFIG_PRESET_ONLY;
|
|
1414
|
-
const existing = ctx.read(filePath);
|
|
1415
|
-
if (existing) {
|
|
1416
|
-
if (existing === content || existing.includes("@bensandee/config/oxlint")) return {
|
|
1417
|
-
filePath,
|
|
1418
|
-
action: "skipped",
|
|
1419
|
-
description: "Already configured"
|
|
1420
|
-
};
|
|
1421
|
-
if (await ctx.confirmOverwrite(filePath) === "skip") return {
|
|
1422
|
-
filePath,
|
|
1271
|
+
return {
|
|
1272
|
+
filePath: workflowPath,
|
|
1423
1273
|
action: "skipped",
|
|
1424
|
-
description: "
|
|
1274
|
+
description: "Publish workflow already up to date"
|
|
1425
1275
|
};
|
|
1426
1276
|
}
|
|
1427
|
-
ctx.write(
|
|
1428
|
-
return {
|
|
1429
|
-
filePath,
|
|
1430
|
-
action: existing ? "updated" : "created",
|
|
1431
|
-
description: "Generated oxlint.config.ts"
|
|
1432
|
-
};
|
|
1433
|
-
}
|
|
1434
|
-
//#endregion
|
|
1435
|
-
//#region src/generators/formatter.ts
|
|
1436
|
-
const OXFMT_CONFIG = `{}\n`;
|
|
1437
|
-
const PRETTIER_CONFIG = `{
|
|
1438
|
-
"semi": true,
|
|
1439
|
-
"singleQuote": false,
|
|
1440
|
-
"trailingComma": "all",
|
|
1441
|
-
"tabWidth": 2,
|
|
1442
|
-
"printWidth": 120
|
|
1443
|
-
}
|
|
1444
|
-
`;
|
|
1445
|
-
async function generateFormatter(ctx) {
|
|
1446
|
-
if (ctx.config.formatter === "oxfmt") return generateOxfmt(ctx);
|
|
1447
|
-
return generatePrettier(ctx);
|
|
1448
|
-
}
|
|
1449
|
-
async function generateOxfmt(ctx) {
|
|
1450
|
-
const filePath = ".oxfmtrc.json";
|
|
1451
|
-
if (ctx.exists(filePath)) return {
|
|
1452
|
-
filePath,
|
|
1453
|
-
action: "skipped",
|
|
1454
|
-
description: "Existing oxfmt config preserved"
|
|
1455
|
-
};
|
|
1456
|
-
ctx.write(filePath, OXFMT_CONFIG);
|
|
1457
|
-
return {
|
|
1458
|
-
filePath,
|
|
1459
|
-
action: "created",
|
|
1460
|
-
description: "Generated .oxfmtrc.json"
|
|
1461
|
-
};
|
|
1462
|
-
}
|
|
1463
|
-
async function generatePrettier(ctx) {
|
|
1464
|
-
const filePath = ".prettierrc";
|
|
1465
|
-
if (ctx.exists(filePath)) return {
|
|
1466
|
-
filePath,
|
|
1467
|
-
action: "skipped",
|
|
1468
|
-
description: "Existing prettier config preserved"
|
|
1469
|
-
};
|
|
1470
|
-
ctx.write(filePath, PRETTIER_CONFIG);
|
|
1277
|
+
ctx.write(workflowPath, content);
|
|
1471
1278
|
return {
|
|
1472
|
-
filePath,
|
|
1279
|
+
filePath: workflowPath,
|
|
1473
1280
|
action: "created",
|
|
1474
|
-
description:
|
|
1281
|
+
description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions publish workflow`
|
|
1475
1282
|
};
|
|
1476
1283
|
}
|
|
1477
1284
|
//#endregion
|
|
1478
|
-
//#region src/generators/
|
|
1479
|
-
const
|
|
1480
|
-
|
|
1481
|
-
|
|
1482
|
-
|
|
1483
|
-
|
|
1484
|
-
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1488
|
-
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
|
|
1285
|
+
//#region src/generators/package-json.ts
|
|
1286
|
+
const STANDARD_SCRIPTS_SINGLE = {
|
|
1287
|
+
build: "tsdown",
|
|
1288
|
+
dev: "tsdown --watch",
|
|
1289
|
+
typecheck: "tsc --noEmit",
|
|
1290
|
+
test: "vitest run",
|
|
1291
|
+
lint: "oxlint",
|
|
1292
|
+
knip: "knip",
|
|
1293
|
+
check: "bst checks:run",
|
|
1294
|
+
"ci:check": "pnpm check --skip 'docker:*'",
|
|
1295
|
+
"tooling:check": "bst repo:sync --check",
|
|
1296
|
+
"tooling:sync": "bst repo:sync"
|
|
1297
|
+
};
|
|
1298
|
+
const STANDARD_SCRIPTS_MONOREPO = {
|
|
1299
|
+
build: "pnpm -r build",
|
|
1300
|
+
test: "pnpm -r test",
|
|
1301
|
+
typecheck: "pnpm -r --parallel run typecheck",
|
|
1302
|
+
lint: "oxlint",
|
|
1303
|
+
knip: "knip",
|
|
1304
|
+
check: "bst checks:run",
|
|
1305
|
+
"ci:check": "pnpm check --skip 'docker:*'",
|
|
1306
|
+
"tooling:check": "bst repo:sync --check",
|
|
1307
|
+
"tooling:sync": "bst repo:sync"
|
|
1308
|
+
};
|
|
1309
|
+
/** Scripts that tooling owns — map from script name to keyword that must appear in the value. */
|
|
1310
|
+
const MANAGED_SCRIPTS = {
|
|
1311
|
+
check: "bst checks:run",
|
|
1312
|
+
"ci:check": "pnpm check",
|
|
1313
|
+
"tooling:check": "bst repo:sync --check",
|
|
1314
|
+
"tooling:sync": "bst repo:sync",
|
|
1315
|
+
"trigger-release": "bst release:trigger",
|
|
1316
|
+
"docker:build": "bst docker:build",
|
|
1317
|
+
"docker:check": "bst docker:check"
|
|
1318
|
+
};
|
|
1319
|
+
/** Check if an existing script value satisfies a managed script requirement.
|
|
1320
|
+
* Accepts both `bst <cmd>` and `bin.mjs <cmd>` (used in the tooling repo itself). */
|
|
1321
|
+
function matchesManagedScript(scriptValue, expectedFragment) {
|
|
1322
|
+
if (scriptValue.includes(expectedFragment)) return true;
|
|
1323
|
+
const binMjsFragment = expectedFragment.replace(/^bst /, "bin.mjs ");
|
|
1324
|
+
return scriptValue.includes(binMjsFragment);
|
|
1325
|
+
}
|
|
1326
|
+
/** Deprecated scripts to remove during migration. */
|
|
1327
|
+
const DEPRECATED_SCRIPTS = ["tooling:init", "tooling:update"];
|
|
1328
|
+
/** DevDeps that belong in every project (single repo) or per-package (monorepo). */
|
|
1329
|
+
const PER_PACKAGE_DEV_DEPS = {
|
|
1330
|
+
"@types/node": "25.3.2",
|
|
1331
|
+
tsdown: "0.20.3",
|
|
1332
|
+
typescript: "5.9.3",
|
|
1333
|
+
vitest: "4.0.18"
|
|
1334
|
+
};
|
|
1335
|
+
/** DevDeps that belong at the root regardless of structure. */
|
|
1336
|
+
const ROOT_DEV_DEPS = {
|
|
1337
|
+
knip: "5.85.0",
|
|
1338
|
+
lefthook: "2.1.2",
|
|
1339
|
+
oxlint: "1.50.0"
|
|
1340
|
+
};
|
|
1341
|
+
/**
|
|
1342
|
+
* Check if a package name is available as a workspace dependency.
|
|
1343
|
+
* Looks for a matching package in the packages/ directory.
|
|
1344
|
+
*/
|
|
1345
|
+
function isWorkspacePackage(ctx, packageName) {
|
|
1346
|
+
if (ctx.config.structure !== "monorepo") return false;
|
|
1347
|
+
const packagesDir = path.join(ctx.targetDir, "packages");
|
|
1348
|
+
if (!existsSync(packagesDir)) return false;
|
|
1349
|
+
try {
|
|
1350
|
+
for (const entry of readdirSync(packagesDir, { withFileTypes: true })) {
|
|
1351
|
+
if (!entry.isDirectory()) continue;
|
|
1352
|
+
const pkgJsonPath = path.join(packagesDir, entry.name, "package.json");
|
|
1353
|
+
if (!existsSync(pkgJsonPath)) continue;
|
|
1354
|
+
try {
|
|
1355
|
+
if (parsePackageJson(readFileSync(pkgJsonPath, "utf-8"))?.name === packageName) return true;
|
|
1356
|
+
} catch (_error) {}
|
|
1357
|
+
}
|
|
1358
|
+
} catch (_error) {}
|
|
1359
|
+
return false;
|
|
1360
|
+
}
|
|
1361
|
+
/** Deps that should not be blindly bumped to latest (version-sensitive). */
|
|
1362
|
+
const UPDATE_EXCLUDE = new Set(["@types/node"]);
|
|
1363
|
+
/** Add release-strategy-specific devDeps to a deps record. */
|
|
1364
|
+
function addReleaseDeps(deps, config) {
|
|
1365
|
+
switch (config.releaseStrategy) {
|
|
1366
|
+
case "release-it":
|
|
1367
|
+
deps["release-it"] = "18.1.2";
|
|
1368
|
+
if (config.structure === "monorepo") deps["@release-it/bumper"] = "7.0.2";
|
|
1369
|
+
break;
|
|
1370
|
+
case "simple": break;
|
|
1371
|
+
case "changesets":
|
|
1372
|
+
deps["@changesets/cli"] = "2.29.4";
|
|
1373
|
+
break;
|
|
1504
1374
|
}
|
|
1505
|
-
ctx.write(filePath, TSDOWN_CONFIG);
|
|
1506
|
-
return {
|
|
1507
|
-
filePath,
|
|
1508
|
-
action: existing ? "updated" : "created",
|
|
1509
|
-
description: "Generated tsdown.config.ts"
|
|
1510
|
-
};
|
|
1511
1375
|
}
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
|
|
1516
|
-
"
|
|
1517
|
-
"
|
|
1518
|
-
"
|
|
1519
|
-
"
|
|
1520
|
-
|
|
1521
|
-
|
|
1522
|
-
"!.env.example"
|
|
1523
|
-
];
|
|
1524
|
-
/** Tooling-specific entries added during init/update but not required for repo:sync --check. */
|
|
1525
|
-
const OPTIONAL_ENTRIES = [".tooling-migrate.md", ".tooling-archived/"];
|
|
1526
|
-
const ALL_ENTRIES = [...REQUIRED_ENTRIES, ...OPTIONAL_ENTRIES];
|
|
1527
|
-
/** Normalize a gitignore entry for comparison: strip leading `/` and trailing `/`. */
|
|
1528
|
-
function normalizeEntry(entry) {
|
|
1529
|
-
let s = entry.trim();
|
|
1530
|
-
if (s.startsWith("/")) s = s.slice(1);
|
|
1531
|
-
if (s.endsWith("/")) s = s.slice(0, -1);
|
|
1532
|
-
return s;
|
|
1376
|
+
/** Returns the list of pinned devDependency names that the tool would add for a given config. */
|
|
1377
|
+
function getAddedDevDepNames(config) {
|
|
1378
|
+
const deps = { ...ROOT_DEV_DEPS };
|
|
1379
|
+
if (config.structure !== "monorepo") Object.assign(deps, PER_PACKAGE_DEV_DEPS);
|
|
1380
|
+
deps["@bensandee/config"] = "0.9.1";
|
|
1381
|
+
deps["@bensandee/tooling"] = "0.28.1";
|
|
1382
|
+
if (config.formatter === "oxfmt") deps["oxfmt"] = "0.35.0";
|
|
1383
|
+
if (config.formatter === "prettier") deps["prettier"] = "3.8.1";
|
|
1384
|
+
addReleaseDeps(deps, config);
|
|
1385
|
+
return Object.keys(deps).filter((name) => !UPDATE_EXCLUDE.has(name));
|
|
1533
1386
|
}
|
|
1534
|
-
async function
|
|
1535
|
-
const filePath = ".
|
|
1387
|
+
async function generatePackageJson(ctx) {
|
|
1388
|
+
const filePath = "package.json";
|
|
1536
1389
|
const existing = ctx.read(filePath);
|
|
1390
|
+
const isMonorepo = ctx.config.structure === "monorepo";
|
|
1391
|
+
const scripts = isMonorepo ? STANDARD_SCRIPTS_MONOREPO : STANDARD_SCRIPTS_SINGLE;
|
|
1392
|
+
const formatScript = ctx.config.formatter === "oxfmt" ? "oxfmt ." : "prettier --write .";
|
|
1393
|
+
const allScripts = {
|
|
1394
|
+
...scripts,
|
|
1395
|
+
format: formatScript
|
|
1396
|
+
};
|
|
1397
|
+
if (ctx.config.releaseStrategy === "changesets") allScripts["changeset"] = "changeset";
|
|
1398
|
+
if (ctx.config.releaseStrategy !== "none" && ctx.config.releaseStrategy !== "changesets") allScripts["trigger-release"] = "bst release:trigger";
|
|
1399
|
+
if (hasDockerPackages(ctx)) {
|
|
1400
|
+
allScripts["docker:build"] = "bst docker:build";
|
|
1401
|
+
allScripts["docker:check"] = "bst docker:check";
|
|
1402
|
+
}
|
|
1403
|
+
const devDeps = { ...ROOT_DEV_DEPS };
|
|
1404
|
+
if (!isMonorepo) Object.assign(devDeps, PER_PACKAGE_DEV_DEPS);
|
|
1405
|
+
devDeps["@bensandee/config"] = isWorkspacePackage(ctx, "@bensandee/config") ? "workspace:*" : "0.9.1";
|
|
1406
|
+
devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.28.1";
|
|
1407
|
+
if (ctx.config.useEslintPlugin) devDeps["@bensandee/eslint-plugin"] = isWorkspacePackage(ctx, "@bensandee/eslint-plugin") ? "workspace:*" : "0.9.2";
|
|
1408
|
+
if (ctx.config.formatter === "oxfmt") devDeps["oxfmt"] = "0.35.0";
|
|
1409
|
+
if (ctx.config.formatter === "prettier") devDeps["prettier"] = "3.8.1";
|
|
1410
|
+
addReleaseDeps(devDeps, ctx.config);
|
|
1537
1411
|
if (existing) {
|
|
1538
|
-
const
|
|
1539
|
-
|
|
1540
|
-
if (missing.length === 0) return {
|
|
1412
|
+
const pkg = parsePackageJson(existing);
|
|
1413
|
+
if (!pkg) return {
|
|
1541
1414
|
filePath,
|
|
1542
1415
|
action: "skipped",
|
|
1543
|
-
description: "
|
|
1416
|
+
description: "Could not parse existing package.json"
|
|
1544
1417
|
};
|
|
1545
|
-
const
|
|
1546
|
-
|
|
1547
|
-
|
|
1548
|
-
|
|
1418
|
+
const changes = [];
|
|
1419
|
+
if (pkg.type !== "module") {
|
|
1420
|
+
pkg.type = "module";
|
|
1421
|
+
changes.push("set type: \"module\"");
|
|
1422
|
+
}
|
|
1423
|
+
const existingScripts = pkg.scripts ?? {};
|
|
1424
|
+
for (const [key, value] of Object.entries(existingScripts)) if (typeof value === "string" && value.includes("pnpm exec tooling ")) {
|
|
1425
|
+
existingScripts[key] = migrateToolingBinary(value);
|
|
1426
|
+
changes.push(`migrated script: ${key}`);
|
|
1427
|
+
}
|
|
1428
|
+
for (const [key, value] of Object.entries(allScripts)) if (!(key in existingScripts)) {
|
|
1429
|
+
existingScripts[key] = value;
|
|
1430
|
+
changes.push(`added script: ${key}`);
|
|
1431
|
+
} else if (key in MANAGED_SCRIPTS && !matchesManagedScript(existingScripts[key] ?? "", MANAGED_SCRIPTS[key] ?? "")) {
|
|
1432
|
+
existingScripts[key] = value;
|
|
1433
|
+
changes.push(`updated script: ${key}`);
|
|
1434
|
+
}
|
|
1435
|
+
for (const key of DEPRECATED_SCRIPTS) if (key in existingScripts) {
|
|
1436
|
+
delete existingScripts[key];
|
|
1437
|
+
changes.push(`removed deprecated script: ${key}`);
|
|
1438
|
+
}
|
|
1439
|
+
pkg.scripts = existingScripts;
|
|
1440
|
+
const existingDevDeps = pkg.devDependencies ?? {};
|
|
1441
|
+
for (const [key, value] of Object.entries(devDeps)) if (!(key in existingDevDeps)) {
|
|
1442
|
+
existingDevDeps[key] = value;
|
|
1443
|
+
changes.push(`added devDependency: ${key}`);
|
|
1444
|
+
} else if (key.startsWith("@bensandee/") && existingDevDeps[key] !== value && existingDevDeps[key] !== "workspace:*") {
|
|
1445
|
+
existingDevDeps[key] = value;
|
|
1446
|
+
changes.push(`updated devDependency: ${key} to ${value}`);
|
|
1447
|
+
}
|
|
1448
|
+
pkg.devDependencies = existingDevDeps;
|
|
1449
|
+
if (!pkg["engines"]) {
|
|
1450
|
+
pkg["engines"] = { node: ">=24.13.0" };
|
|
1451
|
+
changes.push("set engines.node >= 24.13.0");
|
|
1452
|
+
}
|
|
1453
|
+
if (changes.length === 0) return {
|
|
1549
1454
|
filePath,
|
|
1550
1455
|
action: "skipped",
|
|
1551
|
-
description: "
|
|
1456
|
+
description: "Already up to spec"
|
|
1552
1457
|
};
|
|
1458
|
+
ctx.write(filePath, JSON.stringify(pkg, null, 2) + "\n");
|
|
1553
1459
|
return {
|
|
1554
1460
|
filePath,
|
|
1555
1461
|
action: "updated",
|
|
1556
|
-
description:
|
|
1462
|
+
description: changes.join(", ")
|
|
1557
1463
|
};
|
|
1558
1464
|
}
|
|
1559
|
-
|
|
1465
|
+
const pkg = {
|
|
1466
|
+
name: ctx.config.name,
|
|
1467
|
+
version: "0.1.0",
|
|
1468
|
+
private: true,
|
|
1469
|
+
type: "module",
|
|
1470
|
+
scripts: allScripts,
|
|
1471
|
+
devDependencies: devDeps,
|
|
1472
|
+
engines: { node: ">=24.13.0" },
|
|
1473
|
+
packageManager: "pnpm@10.29.3"
|
|
1474
|
+
};
|
|
1475
|
+
ctx.write(filePath, JSON.stringify(pkg, null, 2) + "\n");
|
|
1560
1476
|
return {
|
|
1561
1477
|
filePath,
|
|
1562
1478
|
action: "created",
|
|
1563
|
-
description: "Generated .
|
|
1479
|
+
description: "Generated package.json"
|
|
1564
1480
|
};
|
|
1565
1481
|
}
|
|
1566
1482
|
//#endregion
|
|
1567
|
-
//#region src/generators/
|
|
1568
|
-
|
|
1569
|
-
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
const
|
|
1573
|
-
|
|
1574
|
-
|
|
1575
|
-
|
|
1576
|
-
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
|
|
1583
|
-
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
|
|
1589
|
-
|
|
1590
|
-
|
|
1591
|
-
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
|
|
1598
|
-
|
|
1599
|
-
|
|
1600
|
-
- uses: actions/setup-node@v4
|
|
1601
|
-
with:
|
|
1602
|
-
${nodeVersionYaml}
|
|
1603
|
-
cache: pnpm
|
|
1604
|
-
- run: pnpm install --frozen-lockfile
|
|
1605
|
-
- name: Run all checks
|
|
1606
|
-
run: pnpm ci:check
|
|
1607
|
-
`;
|
|
1608
|
-
}
|
|
1609
|
-
function requiredCheckSteps(nodeVersionYaml) {
|
|
1610
|
-
return [
|
|
1611
|
-
{
|
|
1612
|
-
match: { uses: "actions/checkout" },
|
|
1613
|
-
step: { uses: "actions/checkout@v4" }
|
|
1614
|
-
},
|
|
1615
|
-
{
|
|
1616
|
-
match: { uses: "pnpm/action-setup" },
|
|
1617
|
-
step: { uses: "pnpm/action-setup@v4" }
|
|
1618
|
-
},
|
|
1619
|
-
{
|
|
1620
|
-
match: { uses: "actions/setup-node" },
|
|
1621
|
-
step: {
|
|
1622
|
-
uses: "actions/setup-node@v4",
|
|
1623
|
-
with: {
|
|
1624
|
-
...nodeVersionYaml.startsWith("node-version-file") ? { "node-version-file": "package.json" } : { "node-version": "24" },
|
|
1625
|
-
cache: "pnpm"
|
|
1626
|
-
}
|
|
1627
|
-
}
|
|
1628
|
-
},
|
|
1629
|
-
{
|
|
1630
|
-
match: { run: "pnpm install" },
|
|
1631
|
-
step: { run: "pnpm install --frozen-lockfile" }
|
|
1632
|
-
},
|
|
1633
|
-
{
|
|
1634
|
-
match: { run: "check" },
|
|
1635
|
-
step: {
|
|
1636
|
-
name: "Run all checks",
|
|
1637
|
-
run: "pnpm ci:check"
|
|
1638
|
-
}
|
|
1483
|
+
//#region src/generators/tsconfig.ts
|
|
1484
|
+
async function generateTsconfig(ctx) {
|
|
1485
|
+
const filePath = "tsconfig.json";
|
|
1486
|
+
const existing = ctx.read(filePath);
|
|
1487
|
+
if (ctx.config.structure === "monorepo") return [generateMonorepoRootTsconfig(ctx), ...ctx.config.detectPackageTypes ? generateMonorepoPackageTsconfigs(ctx) : []];
|
|
1488
|
+
const extendsValue = `@bensandee/config/tsconfig/${ctx.config.projectType}`;
|
|
1489
|
+
if (!existing) {
|
|
1490
|
+
const config = {
|
|
1491
|
+
extends: extendsValue,
|
|
1492
|
+
...ctx.exists("src") ? { include: ["src"] } : {}
|
|
1493
|
+
};
|
|
1494
|
+
ctx.write(filePath, JSON.stringify(config, null, 2) + "\n");
|
|
1495
|
+
return [{
|
|
1496
|
+
filePath,
|
|
1497
|
+
action: "created",
|
|
1498
|
+
description: `Generated tsconfig.json with ${extendsValue}`
|
|
1499
|
+
}];
|
|
1500
|
+
}
|
|
1501
|
+
if (existing.includes("// @bensandee/tooling:ignore")) return [{
|
|
1502
|
+
filePath,
|
|
1503
|
+
action: "skipped",
|
|
1504
|
+
description: "Ignored via tooling:ignore comment"
|
|
1505
|
+
}];
|
|
1506
|
+
const parsed = parseTsconfig(existing);
|
|
1507
|
+
if (isSolutionStyle(parsed)) {
|
|
1508
|
+
const results = [{
|
|
1509
|
+
filePath,
|
|
1510
|
+
action: "skipped",
|
|
1511
|
+
description: "Solution-style tsconfig — traversing references"
|
|
1512
|
+
}];
|
|
1513
|
+
for (const ref of parsed.references ?? []) {
|
|
1514
|
+
const refPath = resolveReferencePath(ref.path);
|
|
1515
|
+
results.push(mergeSingleTsconfig(ctx, refPath, extendsValue));
|
|
1639
1516
|
}
|
|
1640
|
-
|
|
1517
|
+
return results;
|
|
1518
|
+
}
|
|
1519
|
+
return [mergeSingleTsconfig(ctx, filePath, extendsValue)];
|
|
1641
1520
|
}
|
|
1642
|
-
|
|
1643
|
-
|
|
1644
|
-
return `${ci === "github" ? ".github/workflows" : ".forgejo/workflows"}/${releaseStrategy === "changesets" ? "ci.yml" : "check.yml"}`;
|
|
1521
|
+
function isSolutionStyle(parsed) {
|
|
1522
|
+
return Array.isArray(parsed.references) && parsed.references.length > 0 && Array.isArray(parsed.files) && parsed.files.length === 0;
|
|
1645
1523
|
}
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
|
|
1524
|
+
function resolveReferencePath(refPath) {
|
|
1525
|
+
const resolved = refPath.endsWith(".json") ? refPath : path.join(refPath, "tsconfig.json");
|
|
1526
|
+
return path.normalize(resolved);
|
|
1527
|
+
}
|
|
1528
|
+
function mergeSingleTsconfig(ctx, filePath, extendsValue) {
|
|
1529
|
+
const existing = ctx.read(filePath);
|
|
1530
|
+
if (!existing) return {
|
|
1531
|
+
filePath,
|
|
1649
1532
|
action: "skipped",
|
|
1650
|
-
description: "
|
|
1533
|
+
description: "File not found"
|
|
1651
1534
|
};
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
if (result.changed || withComment !== result.content) {
|
|
1670
|
-
ctx.write(filePath, withComment);
|
|
1671
|
-
return {
|
|
1672
|
-
filePath,
|
|
1673
|
-
action: "updated",
|
|
1674
|
-
description: "Added missing steps to CI workflow"
|
|
1675
|
-
};
|
|
1676
|
-
}
|
|
1535
|
+
if (existing.includes("// @bensandee/tooling:ignore")) return {
|
|
1536
|
+
filePath,
|
|
1537
|
+
action: "skipped",
|
|
1538
|
+
description: "Ignored via tooling:ignore comment"
|
|
1539
|
+
};
|
|
1540
|
+
const parsed = parseTsconfig(existing);
|
|
1541
|
+
const changes = [];
|
|
1542
|
+
if (!parsed.extends) {
|
|
1543
|
+
parsed.extends = extendsValue;
|
|
1544
|
+
changes.push(`added extends: ${extendsValue}`);
|
|
1545
|
+
}
|
|
1546
|
+
if (!parsed.include) {
|
|
1547
|
+
const tsconfigDir = path.dirname(filePath);
|
|
1548
|
+
const srcDir = tsconfigDir === "." ? "src" : path.join(tsconfigDir, "src");
|
|
1549
|
+
if (ctx.exists(srcDir)) {
|
|
1550
|
+
parsed.include = ["src"];
|
|
1551
|
+
changes.push("added include: [\"src\"]");
|
|
1677
1552
|
}
|
|
1678
|
-
return {
|
|
1679
|
-
filePath,
|
|
1680
|
-
action: "skipped",
|
|
1681
|
-
description: "CI workflow already up to date"
|
|
1682
|
-
};
|
|
1683
1553
|
}
|
|
1684
|
-
|
|
1554
|
+
if (changes.length === 0) return {
|
|
1555
|
+
filePath,
|
|
1556
|
+
action: "skipped",
|
|
1557
|
+
description: "Already up to spec"
|
|
1558
|
+
};
|
|
1559
|
+
ctx.write(filePath, JSON.stringify(parsed, null, 2) + "\n");
|
|
1685
1560
|
return {
|
|
1686
1561
|
filePath,
|
|
1687
|
-
action: "
|
|
1688
|
-
description:
|
|
1562
|
+
action: "updated",
|
|
1563
|
+
description: changes.join(", ")
|
|
1689
1564
|
};
|
|
1690
1565
|
}
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
export default {
|
|
1696
|
-
entry: ["src/index.ts"],
|
|
1697
|
-
project: ["src/**/*.ts"],
|
|
1698
|
-
ignore: ["dist/**"],
|
|
1699
|
-
} satisfies KnipConfig;
|
|
1700
|
-
`;
|
|
1701
|
-
const KNIP_CONFIG_MONOREPO = `import type { KnipConfig } from "knip";
|
|
1702
|
-
|
|
1703
|
-
export default {
|
|
1704
|
-
workspaces: {
|
|
1705
|
-
".": {
|
|
1706
|
-
entry: [],
|
|
1707
|
-
project: [],
|
|
1708
|
-
},
|
|
1709
|
-
"packages/*": {
|
|
1710
|
-
entry: ["src/index.ts", "src/bin.ts"],
|
|
1711
|
-
project: ["src/**/*.ts"],
|
|
1712
|
-
ignore: ["dist/**"],
|
|
1713
|
-
},
|
|
1714
|
-
},
|
|
1715
|
-
} satisfies KnipConfig;
|
|
1716
|
-
`;
|
|
1717
|
-
/** All known knip config file locations, in priority order. */
|
|
1718
|
-
const KNIP_CONFIG_PATHS = [
|
|
1719
|
-
"knip.config.ts",
|
|
1720
|
-
"knip.config.mts",
|
|
1721
|
-
"knip.json",
|
|
1722
|
-
"knip.jsonc",
|
|
1723
|
-
"knip.ts",
|
|
1724
|
-
"knip.mts"
|
|
1725
|
-
];
|
|
1726
|
-
async function generateKnip(ctx) {
|
|
1727
|
-
const filePath = "knip.config.ts";
|
|
1728
|
-
const isMonorepo = ctx.config.structure === "monorepo";
|
|
1729
|
-
const existingPath = KNIP_CONFIG_PATHS.find((p) => ctx.exists(p));
|
|
1730
|
-
if (existingPath) return {
|
|
1731
|
-
filePath: existingPath,
|
|
1566
|
+
function generateMonorepoRootTsconfig(ctx) {
|
|
1567
|
+
const filePath = "tsconfig.json";
|
|
1568
|
+
if (!ctx.read(filePath)) return {
|
|
1569
|
+
filePath,
|
|
1732
1570
|
action: "skipped",
|
|
1733
|
-
description:
|
|
1571
|
+
description: "No tsconfig.json found"
|
|
1734
1572
|
};
|
|
1735
|
-
const config = isMonorepo ? KNIP_CONFIG_MONOREPO : KNIP_CONFIG_SINGLE;
|
|
1736
|
-
ctx.write(filePath, config);
|
|
1737
1573
|
return {
|
|
1738
1574
|
filePath,
|
|
1739
|
-
action: "created",
|
|
1740
|
-
description: "Generated knip.config.ts for dead code analysis"
|
|
1741
|
-
};
|
|
1742
|
-
}
|
|
1743
|
-
//#endregion
|
|
1744
|
-
//#region src/generators/renovate.ts
|
|
1745
|
-
const SHARED_PRESET = "local>bensandee/tooling";
|
|
1746
|
-
/** Deprecated npm-based preset to migrate away from. */
|
|
1747
|
-
const LEGACY_PRESET = "@bensandee/config";
|
|
1748
|
-
/** All known renovate config file locations, in priority order. */
|
|
1749
|
-
const RENOVATE_CONFIG_PATHS = [
|
|
1750
|
-
"renovate.json",
|
|
1751
|
-
"renovate.json5",
|
|
1752
|
-
".renovaterc",
|
|
1753
|
-
".renovaterc.json",
|
|
1754
|
-
".github/renovate.json",
|
|
1755
|
-
".github/renovate.json5"
|
|
1756
|
-
];
|
|
1757
|
-
async function generateRenovate(ctx) {
|
|
1758
|
-
const defaultPath = "renovate.json";
|
|
1759
|
-
if (!ctx.config.setupRenovate) return {
|
|
1760
|
-
filePath: defaultPath,
|
|
1761
1575
|
action: "skipped",
|
|
1762
|
-
description: "
|
|
1576
|
+
description: "Root tsconfig left as-is"
|
|
1763
1577
|
};
|
|
1764
|
-
|
|
1765
|
-
|
|
1766
|
-
|
|
1578
|
+
}
|
|
1579
|
+
function generateMonorepoPackageTsconfigs(ctx) {
|
|
1580
|
+
const packages = getMonorepoPackages(ctx.targetDir);
|
|
1581
|
+
const results = [];
|
|
1582
|
+
for (const pkg of packages) {
|
|
1583
|
+
const relDir = path.relative(ctx.targetDir, pkg.dir);
|
|
1584
|
+
const filePath = path.join(relDir, "tsconfig.json");
|
|
1585
|
+
const existing = ctx.read(filePath);
|
|
1586
|
+
const projectType = detectProjectType(pkg.dir);
|
|
1587
|
+
const extendsValue = `@bensandee/config/tsconfig/${projectType}`;
|
|
1767
1588
|
if (existing) {
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
return {
|
|
1776
|
-
filePath: existingPath,
|
|
1777
|
-
action: "updated",
|
|
1778
|
-
description: `Migrated extends: ${LEGACY_PRESET} → ${SHARED_PRESET}`
|
|
1779
|
-
};
|
|
1589
|
+
if (existing.includes("// @bensandee/tooling:ignore")) {
|
|
1590
|
+
results.push({
|
|
1591
|
+
filePath,
|
|
1592
|
+
action: "skipped",
|
|
1593
|
+
description: "Ignored via tooling:ignore comment"
|
|
1594
|
+
});
|
|
1595
|
+
continue;
|
|
1780
1596
|
}
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
|
|
1788
|
-
|
|
1789
|
-
|
|
1597
|
+
const parsed = parseTsconfig(existing);
|
|
1598
|
+
if (isSolutionStyle(parsed)) {
|
|
1599
|
+
results.push({
|
|
1600
|
+
filePath,
|
|
1601
|
+
action: "skipped",
|
|
1602
|
+
description: "Solution-style tsconfig — traversing references"
|
|
1603
|
+
});
|
|
1604
|
+
for (const ref of parsed.references ?? []) {
|
|
1605
|
+
const refPath = path.join(relDir, resolveReferencePath(ref.path));
|
|
1606
|
+
results.push(mergeSingleTsconfig(ctx, refPath, extendsValue));
|
|
1607
|
+
}
|
|
1608
|
+
continue;
|
|
1790
1609
|
}
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1794
|
-
|
|
1610
|
+
const changes = [];
|
|
1611
|
+
if (parsed.extends !== extendsValue) {
|
|
1612
|
+
const prev = parsed.extends;
|
|
1613
|
+
parsed.extends = extendsValue;
|
|
1614
|
+
changes.push(prev ? `changed extends: ${String(prev)} → ${extendsValue}` : `added extends: ${extendsValue}`);
|
|
1615
|
+
}
|
|
1616
|
+
if (!parsed.include && ctx.exists(path.join(relDir, "src"))) {
|
|
1617
|
+
parsed.include = ["src"];
|
|
1618
|
+
changes.push("added include: [\"src\"]");
|
|
1619
|
+
}
|
|
1620
|
+
if (changes.length === 0) {
|
|
1621
|
+
results.push({
|
|
1622
|
+
filePath,
|
|
1623
|
+
action: "skipped",
|
|
1624
|
+
description: `Already up to spec (${projectType})`
|
|
1625
|
+
});
|
|
1626
|
+
continue;
|
|
1627
|
+
}
|
|
1628
|
+
ctx.write(filePath, JSON.stringify(parsed, null, 2) + "\n");
|
|
1629
|
+
results.push({
|
|
1630
|
+
filePath,
|
|
1631
|
+
action: "updated",
|
|
1632
|
+
description: changes.join(", ")
|
|
1633
|
+
});
|
|
1634
|
+
} else {
|
|
1635
|
+
const config = {
|
|
1636
|
+
extends: extendsValue,
|
|
1637
|
+
...ctx.exists(path.join(relDir, "src")) ? { include: ["src"] } : {}
|
|
1795
1638
|
};
|
|
1639
|
+
ctx.write(filePath, JSON.stringify(config, null, 2) + "\n");
|
|
1640
|
+
results.push({
|
|
1641
|
+
filePath,
|
|
1642
|
+
action: "created",
|
|
1643
|
+
description: `Generated tsconfig.json with @bensandee/config/tsconfig/${projectType} (detected ${projectType})`
|
|
1644
|
+
});
|
|
1796
1645
|
}
|
|
1797
1646
|
}
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1647
|
+
return results;
|
|
1648
|
+
}
|
|
1649
|
+
//#endregion
|
|
1650
|
+
//#region src/generators/vitest.ts
|
|
1651
|
+
const VITEST_CONFIG = `import { defineConfig } from "vitest/config";
|
|
1652
|
+
|
|
1653
|
+
export default defineConfig({
|
|
1654
|
+
test: {
|
|
1655
|
+
include: ["test/**/*.test.ts"],
|
|
1656
|
+
},
|
|
1657
|
+
});
|
|
1658
|
+
`;
|
|
1659
|
+
const STARTER_TEST = `import { describe, it, expect } from "vitest";
|
|
1660
|
+
|
|
1661
|
+
describe("example", () => {
|
|
1662
|
+
it("should pass a basic assertion", () => {
|
|
1663
|
+
expect(1 + 1).toBe(2);
|
|
1664
|
+
});
|
|
1665
|
+
});
|
|
1666
|
+
`;
|
|
1667
|
+
async function generateVitest(ctx) {
|
|
1668
|
+
const results = [];
|
|
1669
|
+
if (!ctx.config.setupVitest) return [{
|
|
1670
|
+
filePath: "vitest.config.ts",
|
|
1671
|
+
action: "skipped",
|
|
1672
|
+
description: "Vitest setup not requested"
|
|
1673
|
+
}];
|
|
1674
|
+
if (ctx.config.structure === "monorepo") return [{
|
|
1675
|
+
filePath: "vitest.config.ts",
|
|
1676
|
+
action: "skipped",
|
|
1677
|
+
description: "Monorepo: vitest config belongs in individual packages"
|
|
1678
|
+
}];
|
|
1679
|
+
const configPath = "vitest.config.ts";
|
|
1680
|
+
if (ctx.exists(configPath)) if (ctx.read(configPath) === VITEST_CONFIG) results.push({
|
|
1681
|
+
filePath: configPath,
|
|
1682
|
+
action: "skipped",
|
|
1683
|
+
description: "Config already up to date"
|
|
1684
|
+
});
|
|
1685
|
+
else if (await ctx.confirmOverwrite(configPath) === "skip") results.push({
|
|
1686
|
+
filePath: configPath,
|
|
1687
|
+
action: "skipped",
|
|
1688
|
+
description: "Existing config preserved"
|
|
1689
|
+
});
|
|
1690
|
+
else {
|
|
1691
|
+
ctx.write(configPath, VITEST_CONFIG);
|
|
1692
|
+
results.push({
|
|
1693
|
+
filePath: configPath,
|
|
1694
|
+
action: "updated",
|
|
1695
|
+
description: "Replaced vitest config"
|
|
1696
|
+
});
|
|
1697
|
+
}
|
|
1698
|
+
else {
|
|
1699
|
+
ctx.write(configPath, VITEST_CONFIG);
|
|
1700
|
+
results.push({
|
|
1701
|
+
filePath: configPath,
|
|
1702
|
+
action: "created",
|
|
1703
|
+
description: "Generated vitest.config.ts"
|
|
1704
|
+
});
|
|
1705
|
+
}
|
|
1706
|
+
const testPath = "test/example.test.ts";
|
|
1707
|
+
if (!ctx.exists("test")) {
|
|
1708
|
+
ctx.write(testPath, STARTER_TEST);
|
|
1709
|
+
results.push({
|
|
1710
|
+
filePath: testPath,
|
|
1711
|
+
action: "created",
|
|
1712
|
+
description: "Generated starter test file"
|
|
1713
|
+
});
|
|
1714
|
+
}
|
|
1715
|
+
return results;
|
|
1716
|
+
}
|
|
1717
|
+
//#endregion
|
|
1718
|
+
//#region src/generators/oxlint.ts
|
|
1719
|
+
const CONFIG_WITH_LINT_RULES = `import recommended from "@bensandee/config/oxlint/recommended";
|
|
1720
|
+
import { defineConfig } from "oxlint";
|
|
1721
|
+
|
|
1722
|
+
export default defineConfig({
|
|
1723
|
+
extends: [recommended],
|
|
1724
|
+
});
|
|
1725
|
+
`;
|
|
1726
|
+
const CONFIG_PRESET_ONLY = `import { presetRules } from "@bensandee/config/oxlint";
|
|
1727
|
+
import { defineConfig } from "oxlint";
|
|
1728
|
+
|
|
1729
|
+
export default defineConfig({
|
|
1730
|
+
rules: presetRules,
|
|
1731
|
+
});
|
|
1732
|
+
`;
|
|
1733
|
+
async function generateOxlint(ctx) {
|
|
1734
|
+
const filePath = "oxlint.config.ts";
|
|
1735
|
+
const content = ctx.config.useEslintPlugin ? CONFIG_WITH_LINT_RULES : CONFIG_PRESET_ONLY;
|
|
1736
|
+
const existing = ctx.read(filePath);
|
|
1737
|
+
if (existing) {
|
|
1738
|
+
if (existing === content || existing.includes("@bensandee/config/oxlint")) return {
|
|
1739
|
+
filePath,
|
|
1740
|
+
action: "skipped",
|
|
1741
|
+
description: "Already configured"
|
|
1742
|
+
};
|
|
1743
|
+
if (await ctx.confirmOverwrite(filePath) === "skip") return {
|
|
1744
|
+
filePath,
|
|
1745
|
+
action: "skipped",
|
|
1746
|
+
description: "User chose to keep existing"
|
|
1747
|
+
};
|
|
1748
|
+
}
|
|
1749
|
+
ctx.write(filePath, content);
|
|
1803
1750
|
return {
|
|
1804
|
-
filePath
|
|
1805
|
-
action: "created",
|
|
1806
|
-
description: "Generated
|
|
1751
|
+
filePath,
|
|
1752
|
+
action: existing ? "updated" : "created",
|
|
1753
|
+
description: "Generated oxlint.config.ts"
|
|
1807
1754
|
};
|
|
1808
1755
|
}
|
|
1809
1756
|
//#endregion
|
|
1810
|
-
//#region src/generators/
|
|
1811
|
-
const
|
|
1812
|
-
|
|
1757
|
+
//#region src/generators/formatter.ts
|
|
1758
|
+
const OXFMT_CONFIG = `{}\n`;
|
|
1759
|
+
const PRETTIER_CONFIG = `{
|
|
1760
|
+
"semi": true,
|
|
1761
|
+
"singleQuote": false,
|
|
1762
|
+
"trailingComma": "all",
|
|
1763
|
+
"tabWidth": 2,
|
|
1764
|
+
"printWidth": 120
|
|
1765
|
+
}
|
|
1813
1766
|
`;
|
|
1814
|
-
async function
|
|
1815
|
-
|
|
1816
|
-
|
|
1767
|
+
async function generateFormatter(ctx) {
|
|
1768
|
+
if (ctx.config.formatter === "oxfmt") return generateOxfmt(ctx);
|
|
1769
|
+
return generatePrettier(ctx);
|
|
1770
|
+
}
|
|
1771
|
+
async function generateOxfmt(ctx) {
|
|
1772
|
+
const filePath = ".oxfmtrc.json";
|
|
1773
|
+
if (ctx.exists(filePath)) return {
|
|
1817
1774
|
filePath,
|
|
1818
1775
|
action: "skipped",
|
|
1819
|
-
description: "
|
|
1776
|
+
description: "Existing oxfmt config preserved"
|
|
1777
|
+
};
|
|
1778
|
+
ctx.write(filePath, OXFMT_CONFIG);
|
|
1779
|
+
return {
|
|
1780
|
+
filePath,
|
|
1781
|
+
action: "created",
|
|
1782
|
+
description: "Generated .oxfmtrc.json"
|
|
1820
1783
|
};
|
|
1784
|
+
}
|
|
1785
|
+
async function generatePrettier(ctx) {
|
|
1786
|
+
const filePath = ".prettierrc";
|
|
1821
1787
|
if (ctx.exists(filePath)) return {
|
|
1822
1788
|
filePath,
|
|
1823
1789
|
action: "skipped",
|
|
1824
|
-
description: "
|
|
1790
|
+
description: "Existing prettier config preserved"
|
|
1825
1791
|
};
|
|
1826
|
-
ctx.write(filePath,
|
|
1792
|
+
ctx.write(filePath, PRETTIER_CONFIG);
|
|
1827
1793
|
return {
|
|
1828
1794
|
filePath,
|
|
1829
1795
|
action: "created",
|
|
1830
|
-
description: "Generated
|
|
1796
|
+
description: "Generated .prettierrc"
|
|
1831
1797
|
};
|
|
1832
1798
|
}
|
|
1833
1799
|
//#endregion
|
|
1834
|
-
//#region src/generators/
|
|
1835
|
-
const
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
}).default({
|
|
1840
|
-
allow: [],
|
|
1841
|
-
deny: []
|
|
1842
|
-
}),
|
|
1843
|
-
instructions: z.array(z.string()).default([]),
|
|
1844
|
-
enabledPlugins: z.record(z.string(), z.boolean()).default({}),
|
|
1845
|
-
extraKnownMarketplaces: z.record(z.string(), z.record(z.string(), z.unknown())).default({})
|
|
1800
|
+
//#region src/generators/tsdown.ts
|
|
1801
|
+
const TSDOWN_CONFIG = `import { defineConfig } from "tsdown";
|
|
1802
|
+
|
|
1803
|
+
export default defineConfig({
|
|
1804
|
+
entry: ["src/index.ts"],
|
|
1846
1805
|
});
|
|
1847
|
-
|
|
1848
|
-
|
|
1849
|
-
|
|
1850
|
-
|
|
1851
|
-
|
|
1852
|
-
|
|
1853
|
-
|
|
1854
|
-
|
|
1855
|
-
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
|
|
1859
|
-
|
|
1860
|
-
|
|
1861
|
-
|
|
1862
|
-
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
1867
|
-
`Bash(${pm} update *)`,
|
|
1868
|
-
`Bash(${pm} view *)`,
|
|
1869
|
-
`Bash(${pm} why *)`,
|
|
1870
|
-
`Bash(${pm} list)`,
|
|
1871
|
-
`Bash(${pm} list *)`,
|
|
1872
|
-
`Bash(${pm} ls)`,
|
|
1873
|
-
`Bash(${pm} ls *)`,
|
|
1874
|
-
"Bash(npm view *)",
|
|
1875
|
-
"Bash(npm info *)",
|
|
1876
|
-
"Bash(npm show *)",
|
|
1877
|
-
`Bash(${pm} build)`,
|
|
1878
|
-
`Bash(${pm} -r build)`,
|
|
1879
|
-
`Bash(${pm} dev)`,
|
|
1880
|
-
`Bash(${pm} test)`,
|
|
1881
|
-
`Bash(${pm} test *)`,
|
|
1882
|
-
`Bash(${pm} -r test)`,
|
|
1883
|
-
`Bash(${pm} vitest)`,
|
|
1884
|
-
`Bash(${pm} vitest *)`,
|
|
1885
|
-
`Bash(${pm} exec vitest)`,
|
|
1886
|
-
`Bash(${pm} exec vitest *)`,
|
|
1887
|
-
`Bash(${pm} exec oxfmt)`,
|
|
1888
|
-
`Bash(${pm} exec oxfmt *)`,
|
|
1889
|
-
`Bash(${pm} typecheck)`,
|
|
1890
|
-
`Bash(${pm} -r --parallel run typecheck)`,
|
|
1891
|
-
`Bash(${pm} tsc *)`,
|
|
1892
|
-
`Bash(${pm} exec tsc *)`,
|
|
1893
|
-
`Bash(${pm} lint)`,
|
|
1894
|
-
`Bash(${pm} lint *)`,
|
|
1895
|
-
`Bash(${pm} format)`,
|
|
1896
|
-
`Bash(${pm} format *)`,
|
|
1897
|
-
`Bash(${pm} knip)`,
|
|
1898
|
-
`Bash(${pm} knip *)`,
|
|
1899
|
-
`Bash(${pm} check)`,
|
|
1900
|
-
`Bash(${pm} exec oxlint *)`,
|
|
1901
|
-
`Bash(${pm} exec knip *)`,
|
|
1902
|
-
"Bash(git status *)",
|
|
1903
|
-
"Bash(git log *)",
|
|
1904
|
-
"Bash(git diff *)",
|
|
1905
|
-
"Bash(git branch *)",
|
|
1906
|
-
"Bash(git show *)",
|
|
1907
|
-
"Bash(git rev-parse *)",
|
|
1908
|
-
"Bash(ls *)",
|
|
1909
|
-
"Bash(cat *)",
|
|
1910
|
-
"Bash(head *)",
|
|
1911
|
-
"Bash(tail *)",
|
|
1912
|
-
"Bash(wc *)",
|
|
1913
|
-
"Bash(test *)",
|
|
1914
|
-
"Bash([ *)",
|
|
1915
|
-
"Bash(grep *)",
|
|
1916
|
-
"Bash(which *)",
|
|
1917
|
-
"Bash(node -e *)",
|
|
1918
|
-
"Bash(node -p *)"
|
|
1919
|
-
];
|
|
1920
|
-
if (ctx.config.structure === "monorepo") allow.push(`Bash(${pm} --filter *)`, `Bash(${pm} -r *)`);
|
|
1921
|
-
const enabledPlugins = { "code-simplifier@claude-plugins-official": true };
|
|
1922
|
-
const extraKnownMarketplaces = {};
|
|
1923
|
-
if (ctx.config.structure !== "monorepo") {
|
|
1924
|
-
if (ctx.packageJson ? packageHasWebUIDeps(ctx.packageJson) : false) enabledPlugins["frontend-design@claude-plugins-official"] = true;
|
|
1806
|
+
`;
|
|
1807
|
+
async function generateTsdown(ctx) {
|
|
1808
|
+
const filePath = "tsdown.config.ts";
|
|
1809
|
+
if (ctx.config.structure === "monorepo") return {
|
|
1810
|
+
filePath,
|
|
1811
|
+
action: "skipped",
|
|
1812
|
+
description: "Monorepo: tsdown config belongs in individual packages"
|
|
1813
|
+
};
|
|
1814
|
+
const existing = ctx.read(filePath);
|
|
1815
|
+
if (existing) {
|
|
1816
|
+
if (existing === TSDOWN_CONFIG) return {
|
|
1817
|
+
filePath,
|
|
1818
|
+
action: "skipped",
|
|
1819
|
+
description: "Already configured"
|
|
1820
|
+
};
|
|
1821
|
+
if (await ctx.confirmOverwrite(filePath) === "skip") return {
|
|
1822
|
+
filePath,
|
|
1823
|
+
action: "skipped",
|
|
1824
|
+
description: "Existing tsdown config preserved"
|
|
1825
|
+
};
|
|
1925
1826
|
}
|
|
1827
|
+
ctx.write(filePath, TSDOWN_CONFIG);
|
|
1926
1828
|
return {
|
|
1927
|
-
|
|
1928
|
-
|
|
1929
|
-
|
|
1930
|
-
"Bash(npx *)",
|
|
1931
|
-
"Bash(git push *)",
|
|
1932
|
-
"Bash(git push)",
|
|
1933
|
-
"Bash(git add *)",
|
|
1934
|
-
"Bash(git add)",
|
|
1935
|
-
"Bash(git commit *)",
|
|
1936
|
-
"Bash(git commit)",
|
|
1937
|
-
"Bash(git reset *)",
|
|
1938
|
-
"Bash(git merge *)",
|
|
1939
|
-
"Bash(git rebase *)",
|
|
1940
|
-
"Bash(git cherry-pick *)",
|
|
1941
|
-
"Bash(git checkout *)",
|
|
1942
|
-
"Bash(git switch *)",
|
|
1943
|
-
"Bash(git stash *)",
|
|
1944
|
-
"Bash(git tag *)",
|
|
1945
|
-
"Bash(git revert *)",
|
|
1946
|
-
"Bash(git clean *)",
|
|
1947
|
-
"Bash(git rm *)",
|
|
1948
|
-
"Bash(git mv *)",
|
|
1949
|
-
"Bash(rm -rf *)",
|
|
1950
|
-
"Bash(cat *.env)",
|
|
1951
|
-
"Bash(cat *.env.*)",
|
|
1952
|
-
"Bash(cat .env)",
|
|
1953
|
-
"Bash(cat .env.*)",
|
|
1954
|
-
"Bash(head *.env)",
|
|
1955
|
-
"Bash(head *.env.*)",
|
|
1956
|
-
"Bash(head .env)",
|
|
1957
|
-
"Bash(head .env.*)",
|
|
1958
|
-
"Bash(tail *.env)",
|
|
1959
|
-
"Bash(tail *.env.*)",
|
|
1960
|
-
"Bash(tail .env)",
|
|
1961
|
-
"Bash(tail .env.*)",
|
|
1962
|
-
"Bash(less *.env)",
|
|
1963
|
-
"Bash(less *.env.*)",
|
|
1964
|
-
"Bash(less .env)",
|
|
1965
|
-
"Bash(less .env.*)",
|
|
1966
|
-
"Bash(more *.env)",
|
|
1967
|
-
"Bash(more *.env.*)",
|
|
1968
|
-
"Bash(more .env)",
|
|
1969
|
-
"Bash(more .env.*)",
|
|
1970
|
-
"Bash(grep * .env)",
|
|
1971
|
-
"Bash(grep * .env.*)",
|
|
1972
|
-
"Read(.env)",
|
|
1973
|
-
"Read(.env.*)",
|
|
1974
|
-
"Read(*.env)",
|
|
1975
|
-
"Read(*.env.*)"
|
|
1976
|
-
]
|
|
1977
|
-
},
|
|
1978
|
-
instructions: [
|
|
1979
|
-
"Use pnpm, not npm/yarn/npx. Run binaries with `pnpm exec`.",
|
|
1980
|
-
"No typecasts (as/any). Use zod schemas, type guards, or narrowing instead.",
|
|
1981
|
-
"Fix lint violations instead of suppressing them. Only add disable comments when suppression is genuinely the best option. For no-empty-function: add a `{ /* no-op */ }` comment body instead of a disable comment.",
|
|
1982
|
-
"Prefer extensionless imports; if an extension is required, use .ts over .js."
|
|
1983
|
-
],
|
|
1984
|
-
enabledPlugins,
|
|
1985
|
-
extraKnownMarketplaces
|
|
1829
|
+
filePath,
|
|
1830
|
+
action: existing ? "updated" : "created",
|
|
1831
|
+
description: "Generated tsdown.config.ts"
|
|
1986
1832
|
};
|
|
1987
1833
|
}
|
|
1988
|
-
|
|
1989
|
-
|
|
1990
|
-
|
|
1991
|
-
|
|
1992
|
-
|
|
1993
|
-
|
|
1994
|
-
|
|
1834
|
+
//#endregion
|
|
1835
|
+
//#region src/generators/gitignore.ts
|
|
1836
|
+
/** Entries that every project should have — repo:sync --check flags these as missing. */
|
|
1837
|
+
const REQUIRED_ENTRIES = [
|
|
1838
|
+
"node_modules/",
|
|
1839
|
+
".pnpm-store/",
|
|
1840
|
+
"dist/",
|
|
1841
|
+
"*.tsbuildinfo",
|
|
1842
|
+
".env",
|
|
1843
|
+
".env.*",
|
|
1844
|
+
"!.env.example"
|
|
1845
|
+
];
|
|
1846
|
+
/** Tooling-specific entries added during init/update but not required for repo:sync --check. */
|
|
1847
|
+
const OPTIONAL_ENTRIES = [".tooling-migrate.md", ".tooling-archived/"];
|
|
1848
|
+
const ALL_ENTRIES = [...REQUIRED_ENTRIES, ...OPTIONAL_ENTRIES];
|
|
1849
|
+
/** Normalize a gitignore entry for comparison: strip leading `/` and trailing `/`. */
|
|
1850
|
+
function normalizeEntry(entry) {
|
|
1851
|
+
let s = entry.trim();
|
|
1852
|
+
if (s.startsWith("/")) s = s.slice(1);
|
|
1853
|
+
if (s.endsWith("/")) s = s.slice(0, -1);
|
|
1854
|
+
return s;
|
|
1995
1855
|
}
|
|
1996
|
-
function
|
|
1856
|
+
async function generateGitignore(ctx) {
|
|
1857
|
+
const filePath = ".gitignore";
|
|
1997
1858
|
const existing = ctx.read(filePath);
|
|
1998
1859
|
if (existing) {
|
|
1999
|
-
const
|
|
2000
|
-
|
|
1860
|
+
const existingNormalized = new Set(existing.split("\n").map(normalizeEntry).filter((line) => line.length > 0));
|
|
1861
|
+
const missing = ALL_ENTRIES.filter((entry) => !existingNormalized.has(normalizeEntry(entry)));
|
|
1862
|
+
if (missing.length === 0) return {
|
|
2001
1863
|
filePath,
|
|
2002
1864
|
action: "skipped",
|
|
2003
|
-
description: "
|
|
1865
|
+
description: "Already has all standard entries"
|
|
2004
1866
|
};
|
|
2005
|
-
const
|
|
2006
|
-
const
|
|
2007
|
-
|
|
2008
|
-
|
|
2009
|
-
let instructionChanges = 0;
|
|
2010
|
-
for (const inst of generated.instructions) {
|
|
2011
|
-
if (mergedInstructions.includes(inst)) continue;
|
|
2012
|
-
const prefixIdx = mergedInstructions.findIndex((e) => inst.startsWith(e) || e.startsWith(inst));
|
|
2013
|
-
if (prefixIdx !== -1) mergedInstructions[prefixIdx] = inst;
|
|
2014
|
-
else mergedInstructions.push(inst);
|
|
2015
|
-
instructionChanges++;
|
|
2016
|
-
}
|
|
2017
|
-
const missingPlugins = Object.entries(generated.enabledPlugins).filter(([key]) => !(key in settings.enabledPlugins));
|
|
2018
|
-
const missingMarketplaces = Object.entries(generated.extraKnownMarketplaces).filter(([key]) => !(key in settings.extraKnownMarketplaces));
|
|
2019
|
-
const changed = missingAllow.length + missingDeny.length + instructionChanges + missingPlugins.length + missingMarketplaces.length;
|
|
2020
|
-
if (changed === 0) return {
|
|
1867
|
+
const missingRequired = REQUIRED_ENTRIES.filter((entry) => !existingNormalized.has(normalizeEntry(entry)));
|
|
1868
|
+
const updated = existing.trimEnd() + "\n\n# Added by @bensandee/tooling\n" + missing.join("\n") + "\n";
|
|
1869
|
+
ctx.write(filePath, updated);
|
|
1870
|
+
if (missingRequired.length === 0) return {
|
|
2021
1871
|
filePath,
|
|
2022
1872
|
action: "skipped",
|
|
2023
|
-
description: "
|
|
2024
|
-
};
|
|
2025
|
-
rawJson["permissions"] = {
|
|
2026
|
-
allow: [...settings.permissions.allow, ...missingAllow],
|
|
2027
|
-
deny: [...settings.permissions.deny, ...missingDeny]
|
|
1873
|
+
description: "Only optional entries missing"
|
|
2028
1874
|
};
|
|
2029
|
-
rawJson["instructions"] = mergedInstructions;
|
|
2030
|
-
const updatedPlugins = { ...settings.enabledPlugins };
|
|
2031
|
-
for (const [key, value] of missingPlugins) updatedPlugins[key] = value;
|
|
2032
|
-
const updatedMarketplaces = { ...settings.extraKnownMarketplaces };
|
|
2033
|
-
for (const [key, value] of missingMarketplaces) updatedMarketplaces[key] = value;
|
|
2034
|
-
if (Object.keys(updatedPlugins).length > 0) rawJson["enabledPlugins"] = updatedPlugins;
|
|
2035
|
-
else delete rawJson["enabledPlugins"];
|
|
2036
|
-
if (Object.keys(updatedMarketplaces).length > 0) rawJson["extraKnownMarketplaces"] = updatedMarketplaces;
|
|
2037
|
-
else delete rawJson["extraKnownMarketplaces"];
|
|
2038
|
-
ctx.write(filePath, JSON.stringify(rawJson, null, 2) + "\n");
|
|
2039
1875
|
return {
|
|
2040
1876
|
filePath,
|
|
2041
1877
|
action: "updated",
|
|
2042
|
-
description: `
|
|
1878
|
+
description: `Appended ${String(missing.length)} missing entries`
|
|
2043
1879
|
};
|
|
2044
1880
|
}
|
|
2045
|
-
ctx.write(filePath,
|
|
1881
|
+
ctx.write(filePath, ALL_ENTRIES.join("\n") + "\n");
|
|
2046
1882
|
return {
|
|
2047
1883
|
filePath,
|
|
2048
1884
|
action: "created",
|
|
2049
|
-
description:
|
|
1885
|
+
description: "Generated .gitignore"
|
|
2050
1886
|
};
|
|
2051
1887
|
}
|
|
2052
|
-
async function generateClaudeSettings(ctx) {
|
|
2053
|
-
const results = [];
|
|
2054
|
-
results.push(writeOrMergeSettings(ctx, ".claude/settings.json", buildSettings(ctx)));
|
|
2055
|
-
if (ctx.config.structure === "monorepo") for (const pkg of getMonorepoPackages(ctx.targetDir)) {
|
|
2056
|
-
if (!hasWebUIDeps(pkg.dir)) continue;
|
|
2057
|
-
const pkgRelDir = path.relative(ctx.targetDir, pkg.dir);
|
|
2058
|
-
const pkgSettingsPath = path.join(pkgRelDir, ".claude/settings.json");
|
|
2059
|
-
results.push(writeOrMergeSettings(ctx, pkgSettingsPath, {
|
|
2060
|
-
permissions: {
|
|
2061
|
-
allow: [],
|
|
2062
|
-
deny: []
|
|
2063
|
-
},
|
|
2064
|
-
instructions: [],
|
|
2065
|
-
enabledPlugins: { "frontend-design@claude-plugins-official": true },
|
|
2066
|
-
extraKnownMarketplaces: {}
|
|
2067
|
-
}));
|
|
2068
|
-
}
|
|
2069
|
-
return results;
|
|
2070
|
-
}
|
|
2071
1888
|
//#endregion
|
|
2072
|
-
//#region src/generators/
|
|
2073
|
-
|
|
2074
|
-
|
|
2075
|
-
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
|
|
2079
|
-
|
|
2080
|
-
|
|
2081
|
-
|
|
2082
|
-
|
|
2083
|
-
const plugins = {};
|
|
2084
|
-
if (isMonorepo) {
|
|
2085
|
-
config["npm"] = {
|
|
2086
|
-
publish: true,
|
|
2087
|
-
allowSameVersion: true
|
|
2088
|
-
};
|
|
2089
|
-
plugins["@release-it/bumper"] = { out: "packages/*/package.json" };
|
|
2090
|
-
}
|
|
2091
|
-
if (Object.keys(plugins).length > 0) config["plugins"] = plugins;
|
|
2092
|
-
return config;
|
|
2093
|
-
}
|
|
2094
|
-
async function generateReleaseIt(ctx) {
|
|
2095
|
-
const filePath = ".release-it.json";
|
|
2096
|
-
if (ctx.config.releaseStrategy !== "release-it") return {
|
|
2097
|
-
filePath,
|
|
2098
|
-
action: "skipped",
|
|
2099
|
-
description: "release-it not requested"
|
|
2100
|
-
};
|
|
2101
|
-
const content = JSON.stringify(buildConfig$2(ctx.config.ci, ctx.config.structure === "monorepo"), null, 2) + "\n";
|
|
2102
|
-
const existing = ctx.read(filePath);
|
|
2103
|
-
if (existing) {
|
|
2104
|
-
if (contentEqual(filePath, existing, content)) return {
|
|
2105
|
-
filePath,
|
|
2106
|
-
action: "skipped",
|
|
2107
|
-
description: "Already configured"
|
|
2108
|
-
};
|
|
2109
|
-
if (await ctx.confirmOverwrite(filePath) === "skip") return {
|
|
2110
|
-
filePath,
|
|
2111
|
-
action: "skipped",
|
|
2112
|
-
description: "Existing release-it config preserved"
|
|
2113
|
-
};
|
|
2114
|
-
}
|
|
2115
|
-
ctx.write(filePath, content);
|
|
2116
|
-
return {
|
|
2117
|
-
filePath,
|
|
2118
|
-
action: existing ? "updated" : "created",
|
|
2119
|
-
description: "Generated .release-it.json"
|
|
2120
|
-
};
|
|
2121
|
-
}
|
|
2122
|
-
//#endregion
|
|
2123
|
-
//#region src/generators/changesets.ts
|
|
2124
|
-
function buildConfig$1() {
|
|
2125
|
-
return {
|
|
2126
|
-
$schema: "https://unpkg.com/@changesets/config@3.1.1/schema.json",
|
|
2127
|
-
changelog: "@changesets/cli/changelog",
|
|
2128
|
-
commit: false,
|
|
2129
|
-
fixed: [],
|
|
2130
|
-
linked: [],
|
|
2131
|
-
access: "public",
|
|
2132
|
-
baseBranch: "main",
|
|
2133
|
-
updateInternalDependencies: "patch",
|
|
2134
|
-
ignore: []
|
|
2135
|
-
};
|
|
2136
|
-
}
|
|
2137
|
-
async function generateChangesets(ctx) {
|
|
2138
|
-
const filePath = ".changeset/config.json";
|
|
2139
|
-
if (ctx.config.releaseStrategy !== "changesets") return {
|
|
2140
|
-
filePath,
|
|
2141
|
-
action: "skipped",
|
|
2142
|
-
description: "Changesets not requested"
|
|
2143
|
-
};
|
|
2144
|
-
const content = JSON.stringify(buildConfig$1(), null, 2) + "\n";
|
|
2145
|
-
const existing = ctx.read(filePath);
|
|
2146
|
-
if (existing) return {
|
|
2147
|
-
filePath,
|
|
2148
|
-
action: "skipped",
|
|
2149
|
-
description: "Existing changesets config preserved"
|
|
2150
|
-
};
|
|
2151
|
-
ctx.write(filePath, content);
|
|
2152
|
-
return {
|
|
2153
|
-
filePath,
|
|
2154
|
-
action: existing ? "updated" : "created",
|
|
2155
|
-
description: "Generated .changeset/config.json"
|
|
2156
|
-
};
|
|
2157
|
-
}
|
|
2158
|
-
//#endregion
|
|
2159
|
-
//#region src/generators/release-ci.ts
|
|
2160
|
-
/** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
|
|
2161
|
-
function actionsExpr(expr) {
|
|
2162
|
-
return `\${{ ${expr} }}`;
|
|
2163
|
-
}
|
|
2164
|
-
function hasEnginesNode(ctx) {
|
|
2165
|
-
return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
|
|
2166
|
-
}
|
|
2167
|
-
function commonSteps(nodeVersionYaml, publishesNpm, { build = true } = {}) {
|
|
2168
|
-
return ` - uses: actions/checkout@v4
|
|
2169
|
-
with:
|
|
2170
|
-
fetch-depth: 0
|
|
2171
|
-
- uses: pnpm/action-setup@v4
|
|
2172
|
-
- uses: actions/setup-node@v4
|
|
2173
|
-
with:
|
|
2174
|
-
${nodeVersionYaml}
|
|
2175
|
-
cache: pnpm${publishesNpm ? `\n registry-url: "https://registry.npmjs.org"` : ""}
|
|
2176
|
-
- run: pnpm install --frozen-lockfile${build ? `\n - run: pnpm build` : ""}`;
|
|
2177
|
-
}
|
|
2178
|
-
function releaseItWorkflow(ci, nodeVersionYaml, publishesNpm) {
|
|
2179
|
-
const isGitHub = ci === "github";
|
|
2180
|
-
const permissions = isGitHub ? `
|
|
2181
|
-
permissions:
|
|
2182
|
-
contents: write
|
|
2183
|
-
` : "";
|
|
2184
|
-
const tokenEnv = isGitHub ? `GITHUB_TOKEN: \${{ github.token }}` : `FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}`;
|
|
2185
|
-
const npmEnv = publishesNpm ? `\n NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}` : "";
|
|
2186
|
-
return `${workflowSchemaComment(ci)}name: Release
|
|
2187
|
-
on:
|
|
2188
|
-
workflow_dispatch:
|
|
2189
|
-
${permissions}
|
|
2190
|
-
jobs:
|
|
2191
|
-
release:
|
|
2192
|
-
runs-on: ubuntu-latest
|
|
2193
|
-
steps:
|
|
2194
|
-
${commonSteps(nodeVersionYaml, publishesNpm)}
|
|
2195
|
-
- run: pnpm release-it --ci
|
|
2196
|
-
env:
|
|
2197
|
-
${tokenEnv}${npmEnv}
|
|
2198
|
-
`;
|
|
2199
|
-
}
|
|
2200
|
-
function commitAndTagVersionWorkflow(ci, nodeVersionYaml, publishesNpm) {
|
|
2201
|
-
const isGitHub = ci === "github";
|
|
2202
|
-
const permissions = isGitHub ? `
|
|
2203
|
-
permissions:
|
|
2204
|
-
contents: write
|
|
1889
|
+
//#region src/generators/ci.ts
|
|
1890
|
+
const CI_CONCURRENCY = {
|
|
1891
|
+
group: `ci-${actionsExpr("github.ref")}`,
|
|
1892
|
+
"cancel-in-progress": actionsExpr("github.ref != 'refs/heads/main'")
|
|
1893
|
+
};
|
|
1894
|
+
function ciWorkflow(nodeVersionYaml, isForgejo, isChangesets) {
|
|
1895
|
+
const emailNotifications = isForgejo ? "\nenable-email-notifications: true\n" : "";
|
|
1896
|
+
const concurrencyBlock = isChangesets ? `
|
|
1897
|
+
concurrency:
|
|
1898
|
+
group: ci-${actionsExpr("github.ref")}
|
|
1899
|
+
cancel-in-progress: ${actionsExpr("github.ref != 'refs/heads/main'")}
|
|
2205
1900
|
` : "";
|
|
2206
|
-
|
|
2207
|
-
|
|
2208
|
-
|
|
2209
|
-
|
|
2210
|
-
|
|
2211
|
-
|
|
2212
|
-
|
|
2213
|
-
|
|
2214
|
-
GITHUB_TOKEN: \${{ github.token }}
|
|
2215
|
-
run: pnpm exec bst release:simple` : `
|
|
2216
|
-
- name: Release
|
|
2217
|
-
env:
|
|
2218
|
-
FORGEJO_SERVER_URL: \${{ github.server_url }}
|
|
2219
|
-
FORGEJO_REPOSITORY: \${{ github.repository }}
|
|
2220
|
-
FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}
|
|
2221
|
-
run: pnpm exec bst release:simple`;
|
|
2222
|
-
return `${workflowSchemaComment(ci)}name: Release
|
|
2223
|
-
on:
|
|
2224
|
-
workflow_dispatch:
|
|
2225
|
-
${permissions}
|
|
1901
|
+
return `${workflowSchemaComment(isForgejo ? "forgejo" : "github")}name: CI
|
|
1902
|
+
${emailNotifications}on:
|
|
1903
|
+
push:
|
|
1904
|
+
branches: [main]
|
|
1905
|
+
tags-ignore:
|
|
1906
|
+
- "**"
|
|
1907
|
+
pull_request:
|
|
1908
|
+
${concurrencyBlock}
|
|
2226
1909
|
jobs:
|
|
2227
|
-
|
|
1910
|
+
check:
|
|
2228
1911
|
runs-on: ubuntu-latest
|
|
2229
1912
|
steps:
|
|
2230
|
-
|
|
1913
|
+
- uses: actions/checkout@v4
|
|
1914
|
+
- uses: pnpm/action-setup@v4
|
|
1915
|
+
- uses: actions/setup-node@v4
|
|
1916
|
+
with:
|
|
1917
|
+
${nodeVersionYaml}
|
|
1918
|
+
cache: pnpm
|
|
1919
|
+
- run: pnpm install --frozen-lockfile
|
|
1920
|
+
- name: Run all checks
|
|
1921
|
+
run: pnpm ci:check
|
|
2231
1922
|
`;
|
|
2232
1923
|
}
|
|
2233
|
-
|
|
2234
|
-
|
|
2235
|
-
if (ci === "github") return {
|
|
2236
|
-
match: { uses: "changesets/action" },
|
|
2237
|
-
step: {
|
|
2238
|
-
uses: "changesets/action@v1",
|
|
2239
|
-
if: "github.ref == 'refs/heads/main'",
|
|
2240
|
-
with: {
|
|
2241
|
-
publish: "pnpm changeset publish",
|
|
2242
|
-
version: "pnpm changeset version"
|
|
2243
|
-
},
|
|
2244
|
-
env: {
|
|
2245
|
-
GITHUB_TOKEN: actionsExpr("github.token"),
|
|
2246
|
-
...publishesNpm && { NPM_TOKEN: actionsExpr("secrets.NPM_TOKEN") }
|
|
2247
|
-
}
|
|
2248
|
-
}
|
|
2249
|
-
};
|
|
2250
|
-
return {
|
|
2251
|
-
match: { run: "release:changesets" },
|
|
2252
|
-
step: {
|
|
2253
|
-
name: "Release",
|
|
2254
|
-
if: "github.ref == 'refs/heads/main'",
|
|
2255
|
-
env: {
|
|
2256
|
-
FORGEJO_SERVER_URL: actionsExpr("github.server_url"),
|
|
2257
|
-
FORGEJO_REPOSITORY: actionsExpr("github.repository"),
|
|
2258
|
-
FORGEJO_TOKEN: actionsExpr("secrets.FORGEJO_TOKEN"),
|
|
2259
|
-
...publishesNpm && { NODE_AUTH_TOKEN: actionsExpr("secrets.NPM_TOKEN") }
|
|
2260
|
-
},
|
|
2261
|
-
run: "pnpm exec bst release:changesets"
|
|
2262
|
-
}
|
|
2263
|
-
};
|
|
2264
|
-
}
|
|
2265
|
-
function requiredReleaseSteps(strategy, nodeVersionYaml, publishesNpm) {
|
|
2266
|
-
const isNodeVersionFile = nodeVersionYaml.startsWith("node-version-file");
|
|
2267
|
-
const steps = [
|
|
1924
|
+
function requiredCheckSteps(nodeVersionYaml) {
|
|
1925
|
+
return [
|
|
2268
1926
|
{
|
|
2269
1927
|
match: { uses: "actions/checkout" },
|
|
2270
|
-
step: {
|
|
2271
|
-
uses: "actions/checkout@v4",
|
|
2272
|
-
with: { "fetch-depth": 0 }
|
|
2273
|
-
}
|
|
1928
|
+
step: { uses: "actions/checkout@v4" }
|
|
2274
1929
|
},
|
|
2275
1930
|
{
|
|
2276
1931
|
match: { uses: "pnpm/action-setup" },
|
|
@@ -2281,9 +1936,8 @@ function requiredReleaseSteps(strategy, nodeVersionYaml, publishesNpm) {
|
|
|
2281
1936
|
step: {
|
|
2282
1937
|
uses: "actions/setup-node@v4",
|
|
2283
1938
|
with: {
|
|
2284
|
-
...
|
|
2285
|
-
cache: "pnpm"
|
|
2286
|
-
...publishesNpm && { "registry-url": "https://registry.npmjs.org" }
|
|
1939
|
+
...nodeVersionYaml.startsWith("node-version-file") ? { "node-version-file": "package.json" } : { "node-version": "24" },
|
|
1940
|
+
cache: "pnpm"
|
|
2287
1941
|
}
|
|
2288
1942
|
}
|
|
2289
1943
|
},
|
|
@@ -2291,710 +1945,1083 @@ function requiredReleaseSteps(strategy, nodeVersionYaml, publishesNpm) {
|
|
|
2291
1945
|
match: { run: "pnpm install" },
|
|
2292
1946
|
step: { run: "pnpm install --frozen-lockfile" }
|
|
2293
1947
|
},
|
|
2294
|
-
|
|
2295
|
-
match: { run: "
|
|
2296
|
-
step: {
|
|
2297
|
-
|
|
1948
|
+
{
|
|
1949
|
+
match: { run: "check" },
|
|
1950
|
+
step: {
|
|
1951
|
+
name: "Run all checks",
|
|
1952
|
+
run: "pnpm ci:check"
|
|
1953
|
+
}
|
|
1954
|
+
}
|
|
2298
1955
|
];
|
|
2299
|
-
switch (strategy) {
|
|
2300
|
-
case "release-it":
|
|
2301
|
-
steps.push({
|
|
2302
|
-
match: { run: "release-it" },
|
|
2303
|
-
step: { run: "pnpm release-it --ci" }
|
|
2304
|
-
});
|
|
2305
|
-
break;
|
|
2306
|
-
case "simple":
|
|
2307
|
-
steps.push({
|
|
2308
|
-
match: { run: "release:simple" },
|
|
2309
|
-
step: { run: "pnpm exec bst release:simple" }
|
|
2310
|
-
});
|
|
2311
|
-
break;
|
|
2312
|
-
case "changesets":
|
|
2313
|
-
steps.push({
|
|
2314
|
-
match: { run: "changeset" },
|
|
2315
|
-
step: { run: "pnpm exec bst release:changesets" }
|
|
2316
|
-
});
|
|
2317
|
-
break;
|
|
2318
|
-
}
|
|
2319
|
-
return steps;
|
|
2320
1956
|
}
|
|
2321
|
-
|
|
2322
|
-
|
|
2323
|
-
|
|
2324
|
-
case "simple": return commitAndTagVersionWorkflow(ci, nodeVersionYaml, publishesNpm);
|
|
2325
|
-
default: return null;
|
|
2326
|
-
}
|
|
1957
|
+
/** Resolve the CI workflow filename based on release strategy. */
|
|
1958
|
+
function ciWorkflowPath(ci, releaseStrategy) {
|
|
1959
|
+
return `${ci === "github" ? ".github/workflows" : ".forgejo/workflows"}/${releaseStrategy === "changesets" ? "ci.yml" : "check.yml"}`;
|
|
2327
1960
|
}
|
|
2328
|
-
function
|
|
2329
|
-
|
|
2330
|
-
|
|
2331
|
-
if (!raw) return {
|
|
2332
|
-
filePath: ciPath,
|
|
1961
|
+
async function generateCi(ctx) {
|
|
1962
|
+
if (ctx.config.ci === "none") return {
|
|
1963
|
+
filePath: "ci",
|
|
2333
1964
|
action: "skipped",
|
|
2334
|
-
description: "CI workflow not
|
|
1965
|
+
description: "CI workflow not requested"
|
|
2335
1966
|
};
|
|
2336
|
-
const
|
|
2337
|
-
const
|
|
2338
|
-
|
|
2339
|
-
|
|
2340
|
-
|
|
2341
|
-
|
|
2342
|
-
|
|
2343
|
-
|
|
2344
|
-
|
|
2345
|
-
|
|
1967
|
+
const isGitHub = ctx.config.ci === "github";
|
|
1968
|
+
const isChangesets = ctx.config.releaseStrategy === "changesets";
|
|
1969
|
+
const nodeVersionYaml = computeNodeVersionYaml(ctx);
|
|
1970
|
+
const filePath = ciWorkflowPath(ctx.config.ci, ctx.config.releaseStrategy);
|
|
1971
|
+
const content = ciWorkflow(nodeVersionYaml, !isGitHub, isChangesets);
|
|
1972
|
+
if (ctx.exists(filePath)) {
|
|
1973
|
+
const existing = ctx.read(filePath);
|
|
1974
|
+
if (existing) {
|
|
1975
|
+
let result = mergeWorkflowSteps(existing, "check", requiredCheckSteps(nodeVersionYaml));
|
|
1976
|
+
const withTagsIgnore = ensureWorkflowTagsIgnore(result.content);
|
|
1977
|
+
result = {
|
|
1978
|
+
content: withTagsIgnore.content,
|
|
1979
|
+
changed: result.changed || withTagsIgnore.changed
|
|
2346
1980
|
};
|
|
1981
|
+
if (isChangesets) {
|
|
1982
|
+
const withConcurrency = ensureWorkflowConcurrency(result.content, CI_CONCURRENCY);
|
|
1983
|
+
result = {
|
|
1984
|
+
content: withConcurrency.content,
|
|
1985
|
+
changed: result.changed || withConcurrency.changed
|
|
1986
|
+
};
|
|
1987
|
+
}
|
|
1988
|
+
const withComment = ensureSchemaComment(result.content, isGitHub ? "github" : "forgejo");
|
|
1989
|
+
if (result.changed || withComment !== result.content) {
|
|
1990
|
+
ctx.write(filePath, withComment);
|
|
1991
|
+
return {
|
|
1992
|
+
filePath,
|
|
1993
|
+
action: "updated",
|
|
1994
|
+
description: "Added missing steps to CI workflow"
|
|
1995
|
+
};
|
|
1996
|
+
}
|
|
2347
1997
|
}
|
|
2348
1998
|
return {
|
|
2349
|
-
filePath
|
|
1999
|
+
filePath,
|
|
2350
2000
|
action: "skipped",
|
|
2351
|
-
description: "
|
|
2001
|
+
description: "CI workflow already up to date"
|
|
2352
2002
|
};
|
|
2353
2003
|
}
|
|
2354
|
-
|
|
2355
|
-
ctx.write(ciPath, withComment);
|
|
2004
|
+
ctx.write(filePath, content);
|
|
2356
2005
|
return {
|
|
2357
|
-
filePath
|
|
2358
|
-
action: "
|
|
2359
|
-
description: "
|
|
2006
|
+
filePath,
|
|
2007
|
+
action: "created",
|
|
2008
|
+
description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions CI workflow`
|
|
2360
2009
|
};
|
|
2361
2010
|
}
|
|
2362
|
-
|
|
2363
|
-
|
|
2364
|
-
|
|
2365
|
-
|
|
2011
|
+
//#endregion
|
|
2012
|
+
//#region src/generators/knip.ts
|
|
2013
|
+
const KNIP_CONFIG_SINGLE = `import type { KnipConfig } from "knip";
|
|
2014
|
+
|
|
2015
|
+
export default {
|
|
2016
|
+
entry: ["src/index.ts"],
|
|
2017
|
+
project: ["src/**/*.ts"],
|
|
2018
|
+
ignore: ["dist/**"],
|
|
2019
|
+
} satisfies KnipConfig;
|
|
2020
|
+
`;
|
|
2021
|
+
const KNIP_CONFIG_MONOREPO = `import type { KnipConfig } from "knip";
|
|
2022
|
+
|
|
2023
|
+
export default {
|
|
2024
|
+
workspaces: {
|
|
2025
|
+
".": {
|
|
2026
|
+
entry: [],
|
|
2027
|
+
project: [],
|
|
2028
|
+
},
|
|
2029
|
+
"packages/*": {
|
|
2030
|
+
entry: ["src/index.ts", "src/bin.ts"],
|
|
2031
|
+
project: ["src/**/*.ts"],
|
|
2032
|
+
ignore: ["dist/**"],
|
|
2033
|
+
},
|
|
2034
|
+
},
|
|
2035
|
+
} satisfies KnipConfig;
|
|
2036
|
+
`;
|
|
2037
|
+
/** All known knip config file locations, in priority order. */
|
|
2038
|
+
const KNIP_CONFIG_PATHS = [
|
|
2039
|
+
"knip.config.ts",
|
|
2040
|
+
"knip.config.mts",
|
|
2041
|
+
"knip.json",
|
|
2042
|
+
"knip.jsonc",
|
|
2043
|
+
"knip.ts",
|
|
2044
|
+
"knip.mts"
|
|
2045
|
+
];
|
|
2046
|
+
async function generateKnip(ctx) {
|
|
2047
|
+
const filePath = "knip.config.ts";
|
|
2048
|
+
const isMonorepo = ctx.config.structure === "monorepo";
|
|
2049
|
+
const existingPath = KNIP_CONFIG_PATHS.find((p) => ctx.exists(p));
|
|
2050
|
+
if (existingPath) return {
|
|
2051
|
+
filePath: existingPath,
|
|
2366
2052
|
action: "skipped",
|
|
2367
|
-
description: "
|
|
2053
|
+
description: existingPath === filePath ? "Already configured" : `Existing config found at ${existingPath}`
|
|
2368
2054
|
};
|
|
2369
|
-
const
|
|
2370
|
-
|
|
2371
|
-
|
|
2372
|
-
const workflowPath = isGitHub ? ".github/workflows/release.yml" : ".forgejo/workflows/release.yml";
|
|
2373
|
-
const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
|
|
2374
|
-
const content = buildWorkflow(ctx.config.releaseStrategy, ctx.config.ci, nodeVersionYaml, publishesNpm);
|
|
2375
|
-
if (!content) return {
|
|
2055
|
+
const config = isMonorepo ? KNIP_CONFIG_MONOREPO : KNIP_CONFIG_SINGLE;
|
|
2056
|
+
ctx.write(filePath, config);
|
|
2057
|
+
return {
|
|
2376
2058
|
filePath,
|
|
2059
|
+
action: "created",
|
|
2060
|
+
description: "Generated knip.config.ts for dead code analysis"
|
|
2061
|
+
};
|
|
2062
|
+
}
|
|
2063
|
+
//#endregion
|
|
2064
|
+
//#region src/generators/renovate.ts
|
|
2065
|
+
const SHARED_PRESET = "local>bensandee/tooling";
|
|
2066
|
+
/** Deprecated npm-based preset to migrate away from. */
|
|
2067
|
+
const LEGACY_PRESET = "@bensandee/config";
|
|
2068
|
+
/** All known renovate config file locations, in priority order. */
|
|
2069
|
+
const RENOVATE_CONFIG_PATHS = [
|
|
2070
|
+
"renovate.json",
|
|
2071
|
+
"renovate.json5",
|
|
2072
|
+
".renovaterc",
|
|
2073
|
+
".renovaterc.json",
|
|
2074
|
+
".github/renovate.json",
|
|
2075
|
+
".github/renovate.json5"
|
|
2076
|
+
];
|
|
2077
|
+
async function generateRenovate(ctx) {
|
|
2078
|
+
const defaultPath = "renovate.json";
|
|
2079
|
+
if (!ctx.config.setupRenovate) return {
|
|
2080
|
+
filePath: defaultPath,
|
|
2377
2081
|
action: "skipped",
|
|
2378
|
-
description: "
|
|
2082
|
+
description: "Renovate not requested"
|
|
2379
2083
|
};
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2391
|
-
};
|
|
2392
|
-
}
|
|
2084
|
+
const existingPath = RENOVATE_CONFIG_PATHS.find((p) => ctx.exists(p));
|
|
2085
|
+
if (existingPath) {
|
|
2086
|
+
const existing = ctx.read(existingPath);
|
|
2087
|
+
if (existing) {
|
|
2088
|
+
const parsed = parseRenovateJson(existing);
|
|
2089
|
+
const existingExtends = parsed.extends ?? [];
|
|
2090
|
+
const legacyIndex = existingExtends.indexOf(LEGACY_PRESET);
|
|
2091
|
+
if (legacyIndex !== -1) {
|
|
2092
|
+
existingExtends[legacyIndex] = SHARED_PRESET;
|
|
2093
|
+
parsed.extends = existingExtends;
|
|
2094
|
+
ctx.write(existingPath, JSON.stringify(parsed, null, 2) + "\n");
|
|
2393
2095
|
return {
|
|
2394
|
-
filePath:
|
|
2395
|
-
action: "
|
|
2396
|
-
description:
|
|
2096
|
+
filePath: existingPath,
|
|
2097
|
+
action: "updated",
|
|
2098
|
+
description: `Migrated extends: ${LEGACY_PRESET} → ${SHARED_PRESET}`
|
|
2397
2099
|
};
|
|
2398
2100
|
}
|
|
2399
|
-
|
|
2400
|
-
|
|
2401
|
-
|
|
2402
|
-
|
|
2403
|
-
ctx.write(workflowPath, withComment);
|
|
2404
|
-
return {
|
|
2405
|
-
filePath: workflowPath,
|
|
2406
|
-
action: "updated",
|
|
2407
|
-
description: existing !== raw ? "Migrated tooling binary name in release workflow" : "Added schema comment to release workflow"
|
|
2408
|
-
};
|
|
2409
|
-
}
|
|
2101
|
+
if (!existingExtends.includes(SHARED_PRESET)) {
|
|
2102
|
+
existingExtends.unshift(SHARED_PRESET);
|
|
2103
|
+
parsed.extends = existingExtends;
|
|
2104
|
+
ctx.write(existingPath, JSON.stringify(parsed, null, 2) + "\n");
|
|
2410
2105
|
return {
|
|
2411
|
-
filePath:
|
|
2412
|
-
action: "
|
|
2413
|
-
description:
|
|
2106
|
+
filePath: existingPath,
|
|
2107
|
+
action: "updated",
|
|
2108
|
+
description: `Added extends: ${SHARED_PRESET}`
|
|
2414
2109
|
};
|
|
2415
2110
|
}
|
|
2416
|
-
ctx.write(workflowPath, withComment);
|
|
2417
2111
|
return {
|
|
2418
|
-
filePath:
|
|
2419
|
-
action: "
|
|
2420
|
-
description: "
|
|
2112
|
+
filePath: existingPath,
|
|
2113
|
+
action: "skipped",
|
|
2114
|
+
description: "Already extends shared config"
|
|
2421
2115
|
};
|
|
2422
2116
|
}
|
|
2423
|
-
return {
|
|
2424
|
-
filePath: workflowPath,
|
|
2425
|
-
action: "skipped",
|
|
2426
|
-
description: "Release workflow already up to date"
|
|
2427
|
-
};
|
|
2428
2117
|
}
|
|
2429
|
-
|
|
2118
|
+
const config = {
|
|
2119
|
+
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
|
2120
|
+
extends: [SHARED_PRESET]
|
|
2121
|
+
};
|
|
2122
|
+
ctx.write(defaultPath, JSON.stringify(config, null, 2) + "\n");
|
|
2430
2123
|
return {
|
|
2431
|
-
filePath:
|
|
2124
|
+
filePath: defaultPath,
|
|
2432
2125
|
action: "created",
|
|
2433
|
-
description:
|
|
2126
|
+
description: "Generated renovate.json extending shared config"
|
|
2434
2127
|
};
|
|
2435
2128
|
}
|
|
2436
2129
|
//#endregion
|
|
2437
|
-
//#region src/generators/
|
|
2438
|
-
|
|
2130
|
+
//#region src/generators/pnpm-workspace.ts
|
|
2131
|
+
const WORKSPACE_YAML = `packages:
|
|
2132
|
+
- "packages/*"
|
|
2133
|
+
`;
|
|
2134
|
+
async function generatePnpmWorkspace(ctx) {
|
|
2135
|
+
const filePath = "pnpm-workspace.yaml";
|
|
2136
|
+
if (ctx.config.structure !== "monorepo") return {
|
|
2137
|
+
filePath,
|
|
2138
|
+
action: "skipped",
|
|
2139
|
+
description: "Not a monorepo"
|
|
2140
|
+
};
|
|
2141
|
+
if (ctx.exists(filePath)) return {
|
|
2142
|
+
filePath,
|
|
2143
|
+
action: "skipped",
|
|
2144
|
+
description: "pnpm-workspace.yaml already exists"
|
|
2145
|
+
};
|
|
2146
|
+
ctx.write(filePath, WORKSPACE_YAML);
|
|
2439
2147
|
return {
|
|
2440
|
-
|
|
2441
|
-
|
|
2442
|
-
|
|
2443
|
-
stage_fixed: true
|
|
2444
|
-
}
|
|
2148
|
+
filePath,
|
|
2149
|
+
action: "created",
|
|
2150
|
+
description: "Generated pnpm-workspace.yaml"
|
|
2445
2151
|
};
|
|
2446
2152
|
}
|
|
2447
|
-
|
|
2448
|
-
|
|
2449
|
-
|
|
2450
|
-
|
|
2451
|
-
|
|
2452
|
-
|
|
2453
|
-
|
|
2454
|
-
|
|
2455
|
-
|
|
2456
|
-
|
|
2457
|
-
|
|
2458
|
-
}
|
|
2459
|
-
|
|
2460
|
-
|
|
2461
|
-
|
|
2462
|
-
|
|
2463
|
-
|
|
2464
|
-
|
|
2465
|
-
|
|
2466
|
-
|
|
2467
|
-
|
|
2468
|
-
|
|
2469
|
-
|
|
2470
|
-
|
|
2471
|
-
|
|
2472
|
-
|
|
2473
|
-
|
|
2474
|
-
|
|
2475
|
-
|
|
2476
|
-
"
|
|
2477
|
-
|
|
2478
|
-
|
|
2479
|
-
|
|
2480
|
-
|
|
2481
|
-
|
|
2482
|
-
|
|
2483
|
-
|
|
2484
|
-
|
|
2485
|
-
|
|
2486
|
-
|
|
2487
|
-
|
|
2488
|
-
|
|
2489
|
-
|
|
2490
|
-
|
|
2491
|
-
|
|
2492
|
-
|
|
2493
|
-
|
|
2494
|
-
|
|
2495
|
-
|
|
2496
|
-
|
|
2497
|
-
|
|
2153
|
+
//#endregion
|
|
2154
|
+
//#region src/generators/claude-settings.ts
|
|
2155
|
+
const ClaudeSettingsSchema = z.object({
|
|
2156
|
+
permissions: z.object({
|
|
2157
|
+
allow: z.array(z.string()).default([]),
|
|
2158
|
+
deny: z.array(z.string()).default([])
|
|
2159
|
+
}).default({
|
|
2160
|
+
allow: [],
|
|
2161
|
+
deny: []
|
|
2162
|
+
}),
|
|
2163
|
+
instructions: z.array(z.string()).default([]),
|
|
2164
|
+
enabledPlugins: z.record(z.string(), z.boolean()).default({}),
|
|
2165
|
+
extraKnownMarketplaces: z.record(z.string(), z.record(z.string(), z.unknown())).default({})
|
|
2166
|
+
});
|
|
2167
|
+
function parseClaudeSettings(raw) {
|
|
2168
|
+
try {
|
|
2169
|
+
const json = JSON.parse(raw);
|
|
2170
|
+
const rawResult = z.record(z.string(), z.unknown()).safeParse(json);
|
|
2171
|
+
const settingsResult = ClaudeSettingsSchema.safeParse(json);
|
|
2172
|
+
if (!rawResult.success || !settingsResult.success) return void 0;
|
|
2173
|
+
return {
|
|
2174
|
+
settings: settingsResult.data,
|
|
2175
|
+
rawJson: rawResult.data
|
|
2176
|
+
};
|
|
2177
|
+
} catch {
|
|
2178
|
+
return;
|
|
2179
|
+
}
|
|
2180
|
+
}
|
|
2181
|
+
function buildSettings(ctx) {
|
|
2182
|
+
const pm = "pnpm";
|
|
2183
|
+
const allow = [
|
|
2184
|
+
`Bash(${pm} install)`,
|
|
2185
|
+
`Bash(${pm} install *)`,
|
|
2186
|
+
`Bash(${pm} add *)`,
|
|
2187
|
+
`Bash(${pm} update *)`,
|
|
2188
|
+
`Bash(${pm} view *)`,
|
|
2189
|
+
`Bash(${pm} why *)`,
|
|
2190
|
+
`Bash(${pm} list)`,
|
|
2191
|
+
`Bash(${pm} list *)`,
|
|
2192
|
+
`Bash(${pm} ls)`,
|
|
2193
|
+
`Bash(${pm} ls *)`,
|
|
2194
|
+
"Bash(npm view *)",
|
|
2195
|
+
"Bash(npm info *)",
|
|
2196
|
+
"Bash(npm show *)",
|
|
2197
|
+
`Bash(${pm} build)`,
|
|
2198
|
+
`Bash(${pm} -r build)`,
|
|
2199
|
+
`Bash(${pm} dev)`,
|
|
2200
|
+
`Bash(${pm} test)`,
|
|
2201
|
+
`Bash(${pm} test *)`,
|
|
2202
|
+
`Bash(${pm} -r test)`,
|
|
2203
|
+
`Bash(${pm} vitest)`,
|
|
2204
|
+
`Bash(${pm} vitest *)`,
|
|
2205
|
+
`Bash(${pm} exec vitest)`,
|
|
2206
|
+
`Bash(${pm} exec vitest *)`,
|
|
2207
|
+
`Bash(${pm} exec oxfmt)`,
|
|
2208
|
+
`Bash(${pm} exec oxfmt *)`,
|
|
2209
|
+
`Bash(${pm} typecheck)`,
|
|
2210
|
+
`Bash(${pm} -r --parallel run typecheck)`,
|
|
2211
|
+
`Bash(${pm} tsc *)`,
|
|
2212
|
+
`Bash(${pm} exec tsc *)`,
|
|
2213
|
+
`Bash(${pm} lint)`,
|
|
2214
|
+
`Bash(${pm} lint *)`,
|
|
2215
|
+
`Bash(${pm} format)`,
|
|
2216
|
+
`Bash(${pm} format *)`,
|
|
2217
|
+
`Bash(${pm} knip)`,
|
|
2218
|
+
`Bash(${pm} knip *)`,
|
|
2219
|
+
`Bash(${pm} check)`,
|
|
2220
|
+
`Bash(${pm} exec oxlint *)`,
|
|
2221
|
+
`Bash(${pm} exec knip *)`,
|
|
2222
|
+
"Bash(git status *)",
|
|
2223
|
+
"Bash(git log *)",
|
|
2224
|
+
"Bash(git diff *)",
|
|
2225
|
+
"Bash(git branch *)",
|
|
2226
|
+
"Bash(git show *)",
|
|
2227
|
+
"Bash(git rev-parse *)",
|
|
2228
|
+
"Bash(ls *)",
|
|
2229
|
+
"Bash(cat *)",
|
|
2230
|
+
"Bash(head *)",
|
|
2231
|
+
"Bash(tail *)",
|
|
2232
|
+
"Bash(wc *)",
|
|
2233
|
+
"Bash(test *)",
|
|
2234
|
+
"Bash([ *)",
|
|
2235
|
+
"Bash(grep *)",
|
|
2236
|
+
"Bash(which *)",
|
|
2237
|
+
"Bash(node -e *)",
|
|
2238
|
+
"Bash(node -p *)"
|
|
2239
|
+
];
|
|
2240
|
+
if (ctx.config.structure === "monorepo") allow.push(`Bash(${pm} --filter *)`, `Bash(${pm} -r *)`);
|
|
2241
|
+
const enabledPlugins = { "code-simplifier@claude-plugins-official": true };
|
|
2242
|
+
const extraKnownMarketplaces = {};
|
|
2243
|
+
if (ctx.config.structure !== "monorepo") {
|
|
2244
|
+
if (ctx.packageJson ? packageHasWebUIDeps(ctx.packageJson) : false) enabledPlugins["frontend-design@claude-plugins-official"] = true;
|
|
2245
|
+
}
|
|
2246
|
+
return {
|
|
2247
|
+
permissions: {
|
|
2248
|
+
allow,
|
|
2249
|
+
deny: [
|
|
2250
|
+
"Bash(npx *)",
|
|
2251
|
+
"Bash(git push *)",
|
|
2252
|
+
"Bash(git push)",
|
|
2253
|
+
"Bash(git add *)",
|
|
2254
|
+
"Bash(git add)",
|
|
2255
|
+
"Bash(git commit *)",
|
|
2256
|
+
"Bash(git commit)",
|
|
2257
|
+
"Bash(git reset *)",
|
|
2258
|
+
"Bash(git merge *)",
|
|
2259
|
+
"Bash(git rebase *)",
|
|
2260
|
+
"Bash(git cherry-pick *)",
|
|
2261
|
+
"Bash(git checkout *)",
|
|
2262
|
+
"Bash(git switch *)",
|
|
2263
|
+
"Bash(git stash *)",
|
|
2264
|
+
"Bash(git tag *)",
|
|
2265
|
+
"Bash(git revert *)",
|
|
2266
|
+
"Bash(git clean *)",
|
|
2267
|
+
"Bash(git rm *)",
|
|
2268
|
+
"Bash(git mv *)",
|
|
2269
|
+
"Bash(rm -rf *)",
|
|
2270
|
+
"Bash(cat *.env)",
|
|
2271
|
+
"Bash(cat *.env.*)",
|
|
2272
|
+
"Bash(cat .env)",
|
|
2273
|
+
"Bash(cat .env.*)",
|
|
2274
|
+
"Bash(head *.env)",
|
|
2275
|
+
"Bash(head *.env.*)",
|
|
2276
|
+
"Bash(head .env)",
|
|
2277
|
+
"Bash(head .env.*)",
|
|
2278
|
+
"Bash(tail *.env)",
|
|
2279
|
+
"Bash(tail *.env.*)",
|
|
2280
|
+
"Bash(tail .env)",
|
|
2281
|
+
"Bash(tail .env.*)",
|
|
2282
|
+
"Bash(less *.env)",
|
|
2283
|
+
"Bash(less *.env.*)",
|
|
2284
|
+
"Bash(less .env)",
|
|
2285
|
+
"Bash(less .env.*)",
|
|
2286
|
+
"Bash(more *.env)",
|
|
2287
|
+
"Bash(more *.env.*)",
|
|
2288
|
+
"Bash(more .env)",
|
|
2289
|
+
"Bash(more .env.*)",
|
|
2290
|
+
"Bash(grep * .env)",
|
|
2291
|
+
"Bash(grep * .env.*)",
|
|
2292
|
+
"Read(.env)",
|
|
2293
|
+
"Read(.env.*)",
|
|
2294
|
+
"Read(*.env)",
|
|
2295
|
+
"Read(*.env.*)"
|
|
2296
|
+
]
|
|
2297
|
+
},
|
|
2298
|
+
instructions: [
|
|
2299
|
+
"Use pnpm, not npm/yarn/npx. Run binaries with `pnpm exec`.",
|
|
2300
|
+
"No typecasts (as/any). Use zod schemas, type guards, or narrowing instead.",
|
|
2301
|
+
"Fix lint violations instead of suppressing them. Only add disable comments when suppression is genuinely the best option. For no-empty-function: add a `{ /* no-op */ }` comment body instead of a disable comment.",
|
|
2302
|
+
"Prefer extensionless imports; if an extension is required, use .ts over .js."
|
|
2303
|
+
],
|
|
2304
|
+
enabledPlugins,
|
|
2305
|
+
extraKnownMarketplaces
|
|
2306
|
+
};
|
|
2307
|
+
}
|
|
2308
|
+
/** Remove enabledPlugins/extraKnownMarketplaces when empty to keep the file clean. */
|
|
2309
|
+
function serializeSettings$1(settings) {
|
|
2310
|
+
const { enabledPlugins, extraKnownMarketplaces, ...rest } = settings;
|
|
2311
|
+
const out = { ...rest };
|
|
2312
|
+
if (Object.keys(enabledPlugins).length > 0) out["enabledPlugins"] = enabledPlugins;
|
|
2313
|
+
if (Object.keys(extraKnownMarketplaces).length > 0) out["extraKnownMarketplaces"] = extraKnownMarketplaces;
|
|
2314
|
+
return JSON.stringify(out, null, 2) + "\n";
|
|
2315
|
+
}
|
|
2316
|
+
function writeOrMergeSettings(ctx, filePath, generated) {
|
|
2317
|
+
const existing = ctx.read(filePath);
|
|
2318
|
+
if (existing) {
|
|
2319
|
+
const parsed = parseClaudeSettings(existing);
|
|
2320
|
+
if (!parsed) return {
|
|
2321
|
+
filePath,
|
|
2322
|
+
action: "skipped",
|
|
2323
|
+
description: "Could not parse existing settings"
|
|
2324
|
+
};
|
|
2325
|
+
const { settings, rawJson } = parsed;
|
|
2326
|
+
const missingAllow = generated.permissions.allow.filter((rule) => !settings.permissions.allow.includes(rule));
|
|
2327
|
+
const missingDeny = generated.permissions.deny.filter((rule) => !settings.permissions.deny.includes(rule));
|
|
2328
|
+
const mergedInstructions = [...settings.instructions];
|
|
2329
|
+
let instructionChanges = 0;
|
|
2330
|
+
for (const inst of generated.instructions) {
|
|
2331
|
+
if (mergedInstructions.includes(inst)) continue;
|
|
2332
|
+
const prefixIdx = mergedInstructions.findIndex((e) => inst.startsWith(e) || e.startsWith(inst));
|
|
2333
|
+
if (prefixIdx !== -1) mergedInstructions[prefixIdx] = inst;
|
|
2334
|
+
else mergedInstructions.push(inst);
|
|
2335
|
+
instructionChanges++;
|
|
2498
2336
|
}
|
|
2337
|
+
const missingPlugins = Object.entries(generated.enabledPlugins).filter(([key]) => !(key in settings.enabledPlugins));
|
|
2338
|
+
const missingMarketplaces = Object.entries(generated.extraKnownMarketplaces).filter(([key]) => !(key in settings.extraKnownMarketplaces));
|
|
2339
|
+
const changed = missingAllow.length + missingDeny.length + instructionChanges + missingPlugins.length + missingMarketplaces.length;
|
|
2340
|
+
if (changed === 0) return {
|
|
2341
|
+
filePath,
|
|
2342
|
+
action: "skipped",
|
|
2343
|
+
description: "Already has all rules and instructions"
|
|
2344
|
+
};
|
|
2345
|
+
rawJson["permissions"] = {
|
|
2346
|
+
allow: [...settings.permissions.allow, ...missingAllow],
|
|
2347
|
+
deny: [...settings.permissions.deny, ...missingDeny]
|
|
2348
|
+
};
|
|
2349
|
+
rawJson["instructions"] = mergedInstructions;
|
|
2350
|
+
const updatedPlugins = { ...settings.enabledPlugins };
|
|
2351
|
+
for (const [key, value] of missingPlugins) updatedPlugins[key] = value;
|
|
2352
|
+
const updatedMarketplaces = { ...settings.extraKnownMarketplaces };
|
|
2353
|
+
for (const [key, value] of missingMarketplaces) updatedMarketplaces[key] = value;
|
|
2354
|
+
if (Object.keys(updatedPlugins).length > 0) rawJson["enabledPlugins"] = updatedPlugins;
|
|
2355
|
+
else delete rawJson["enabledPlugins"];
|
|
2356
|
+
if (Object.keys(updatedMarketplaces).length > 0) rawJson["extraKnownMarketplaces"] = updatedMarketplaces;
|
|
2357
|
+
else delete rawJson["extraKnownMarketplaces"];
|
|
2358
|
+
ctx.write(filePath, JSON.stringify(rawJson, null, 2) + "\n");
|
|
2359
|
+
return {
|
|
2360
|
+
filePath,
|
|
2361
|
+
action: "updated",
|
|
2362
|
+
description: `Updated ${String(changed)} rules/instructions`
|
|
2363
|
+
};
|
|
2364
|
+
}
|
|
2365
|
+
ctx.write(filePath, serializeSettings$1(generated));
|
|
2366
|
+
return {
|
|
2367
|
+
filePath,
|
|
2368
|
+
action: "created",
|
|
2369
|
+
description: `Generated ${filePath}`
|
|
2370
|
+
};
|
|
2371
|
+
}
|
|
2372
|
+
async function generateClaudeSettings(ctx) {
|
|
2373
|
+
const results = [];
|
|
2374
|
+
results.push(writeOrMergeSettings(ctx, ".claude/settings.json", buildSettings(ctx)));
|
|
2375
|
+
if (ctx.config.structure === "monorepo") for (const pkg of getMonorepoPackages(ctx.targetDir)) {
|
|
2376
|
+
if (!hasWebUIDeps(pkg.dir)) continue;
|
|
2377
|
+
const pkgRelDir = path.relative(ctx.targetDir, pkg.dir);
|
|
2378
|
+
const pkgSettingsPath = path.join(pkgRelDir, ".claude/settings.json");
|
|
2379
|
+
results.push(writeOrMergeSettings(ctx, pkgSettingsPath, {
|
|
2380
|
+
permissions: {
|
|
2381
|
+
allow: [],
|
|
2382
|
+
deny: []
|
|
2383
|
+
},
|
|
2384
|
+
instructions: [],
|
|
2385
|
+
enabledPlugins: { "frontend-design@claude-plugins-official": true },
|
|
2386
|
+
extraKnownMarketplaces: {}
|
|
2387
|
+
}));
|
|
2388
|
+
}
|
|
2389
|
+
return results;
|
|
2390
|
+
}
|
|
2391
|
+
//#endregion
|
|
2392
|
+
//#region src/generators/release-it.ts
|
|
2393
|
+
function buildConfig$2(ci, isMonorepo) {
|
|
2394
|
+
const config = {
|
|
2395
|
+
$schema: "https://unpkg.com/release-it/schema/release-it.json",
|
|
2396
|
+
git: {
|
|
2397
|
+
commitMessage: "chore: release v${version}",
|
|
2398
|
+
tagName: "v${version}"
|
|
2399
|
+
},
|
|
2400
|
+
npm: { publish: true }
|
|
2401
|
+
};
|
|
2402
|
+
if (ci === "github") config["github"] = { release: true };
|
|
2403
|
+
const plugins = {};
|
|
2404
|
+
if (isMonorepo) {
|
|
2405
|
+
config["npm"] = {
|
|
2406
|
+
publish: true,
|
|
2407
|
+
allowSameVersion: true
|
|
2408
|
+
};
|
|
2409
|
+
plugins["@release-it/bumper"] = { out: "packages/*/package.json" };
|
|
2499
2410
|
}
|
|
2500
|
-
|
|
2411
|
+
if (Object.keys(plugins).length > 0) config["plugins"] = plugins;
|
|
2412
|
+
return config;
|
|
2501
2413
|
}
|
|
2502
|
-
|
|
2503
|
-
|
|
2504
|
-
|
|
2505
|
-
|
|
2506
|
-
|
|
2507
|
-
|
|
2508
|
-
|
|
2509
|
-
|
|
2510
|
-
|
|
2511
|
-
|
|
2512
|
-
|
|
2513
|
-
|
|
2514
|
-
|
|
2515
|
-
|
|
2516
|
-
}
|
|
2414
|
+
async function generateReleaseIt(ctx) {
|
|
2415
|
+
const filePath = ".release-it.json";
|
|
2416
|
+
if (ctx.config.releaseStrategy !== "release-it") return {
|
|
2417
|
+
filePath,
|
|
2418
|
+
action: "skipped",
|
|
2419
|
+
description: "release-it not requested"
|
|
2420
|
+
};
|
|
2421
|
+
const content = JSON.stringify(buildConfig$2(ctx.config.ci, ctx.config.structure === "monorepo"), null, 2) + "\n";
|
|
2422
|
+
const existing = ctx.read(filePath);
|
|
2423
|
+
if (existing) {
|
|
2424
|
+
if (contentEqual(filePath, existing, content)) return {
|
|
2425
|
+
filePath,
|
|
2426
|
+
action: "skipped",
|
|
2427
|
+
description: "Already configured"
|
|
2428
|
+
};
|
|
2429
|
+
if (await ctx.confirmOverwrite(filePath) === "skip") return {
|
|
2430
|
+
filePath,
|
|
2431
|
+
action: "skipped",
|
|
2432
|
+
description: "Existing release-it config preserved"
|
|
2433
|
+
};
|
|
2517
2434
|
}
|
|
2518
|
-
|
|
2435
|
+
ctx.write(filePath, content);
|
|
2436
|
+
return {
|
|
2437
|
+
filePath,
|
|
2438
|
+
action: existing ? "updated" : "created",
|
|
2439
|
+
description: "Generated .release-it.json"
|
|
2440
|
+
};
|
|
2519
2441
|
}
|
|
2520
|
-
|
|
2521
|
-
|
|
2522
|
-
|
|
2523
|
-
|
|
2524
|
-
|
|
2525
|
-
|
|
2526
|
-
|
|
2527
|
-
|
|
2528
|
-
|
|
2529
|
-
|
|
2530
|
-
|
|
2442
|
+
//#endregion
|
|
2443
|
+
//#region src/generators/changesets.ts
|
|
2444
|
+
function buildConfig$1() {
|
|
2445
|
+
return {
|
|
2446
|
+
$schema: "https://unpkg.com/@changesets/config@3.1.1/schema.json",
|
|
2447
|
+
changelog: "@changesets/cli/changelog",
|
|
2448
|
+
commit: false,
|
|
2449
|
+
fixed: [],
|
|
2450
|
+
linked: [],
|
|
2451
|
+
access: "public",
|
|
2452
|
+
baseBranch: "main",
|
|
2453
|
+
updateInternalDependencies: "patch",
|
|
2454
|
+
ignore: []
|
|
2455
|
+
};
|
|
2456
|
+
}
|
|
2457
|
+
async function generateChangesets(ctx) {
|
|
2458
|
+
const filePath = ".changeset/config.json";
|
|
2459
|
+
if (ctx.config.releaseStrategy !== "changesets") return {
|
|
2460
|
+
filePath,
|
|
2461
|
+
action: "skipped",
|
|
2462
|
+
description: "Changesets not requested"
|
|
2463
|
+
};
|
|
2464
|
+
const content = JSON.stringify(buildConfig$1(), null, 2) + "\n";
|
|
2465
|
+
const existing = ctx.read(filePath);
|
|
2466
|
+
if (existing) return {
|
|
2467
|
+
filePath,
|
|
2468
|
+
action: "skipped",
|
|
2469
|
+
description: "Existing changesets config preserved"
|
|
2470
|
+
};
|
|
2471
|
+
ctx.write(filePath, content);
|
|
2472
|
+
return {
|
|
2473
|
+
filePath,
|
|
2474
|
+
action: existing ? "updated" : "created",
|
|
2475
|
+
description: "Generated .changeset/config.json"
|
|
2476
|
+
};
|
|
2477
|
+
}
|
|
2478
|
+
//#endregion
|
|
2479
|
+
//#region src/generators/release-ci.ts
|
|
2480
|
+
function commonSteps(nodeVersionYaml, publishesNpm) {
|
|
2481
|
+
return ` - uses: actions/checkout@v4
|
|
2482
|
+
with:
|
|
2483
|
+
fetch-depth: 0
|
|
2484
|
+
- uses: pnpm/action-setup@v4
|
|
2485
|
+
- uses: actions/setup-node@v4
|
|
2486
|
+
with:
|
|
2487
|
+
${nodeVersionYaml}
|
|
2488
|
+
cache: pnpm${publishesNpm ? `\n registry-url: "https://registry.npmjs.org"` : ""}
|
|
2489
|
+
- run: pnpm install --frozen-lockfile`;
|
|
2490
|
+
}
|
|
2491
|
+
function releaseItWorkflow(ci, nodeVersionYaml, publishesNpm) {
|
|
2492
|
+
const isGitHub = ci === "github";
|
|
2493
|
+
const permissions = isGitHub ? `
|
|
2494
|
+
permissions:
|
|
2495
|
+
contents: write
|
|
2496
|
+
` : "";
|
|
2497
|
+
const tokenEnv = isGitHub ? `GITHUB_TOKEN: \${{ github.token }}` : `FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}`;
|
|
2498
|
+
const npmEnv = publishesNpm ? `\n NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}` : "";
|
|
2499
|
+
return `${workflowSchemaComment(ci)}name: Release
|
|
2500
|
+
on:
|
|
2501
|
+
workflow_dispatch:
|
|
2502
|
+
${permissions}
|
|
2503
|
+
jobs:
|
|
2504
|
+
release:
|
|
2505
|
+
runs-on: ubuntu-latest
|
|
2506
|
+
steps:
|
|
2507
|
+
${commonSteps(nodeVersionYaml, publishesNpm)}
|
|
2508
|
+
- run: pnpm release-it --ci
|
|
2509
|
+
env:
|
|
2510
|
+
${tokenEnv}${npmEnv}
|
|
2511
|
+
`;
|
|
2512
|
+
}
|
|
2513
|
+
function commitAndTagVersionWorkflow(ci, nodeVersionYaml, publishesNpm) {
|
|
2514
|
+
const isGitHub = ci === "github";
|
|
2515
|
+
const permissions = isGitHub ? `
|
|
2516
|
+
permissions:
|
|
2517
|
+
contents: write
|
|
2518
|
+
` : "";
|
|
2519
|
+
const gitConfigStep = `
|
|
2520
|
+
- name: Configure git
|
|
2521
|
+
run: |
|
|
2522
|
+
git config user.name "${isGitHub ? "github-actions[bot]" : "forgejo-actions[bot]"}"
|
|
2523
|
+
git config user.email "${isGitHub ? "github-actions[bot]@users.noreply.github.com" : "forgejo-actions[bot]@noreply.localhost"}"`;
|
|
2524
|
+
const releaseStep = isGitHub ? `
|
|
2525
|
+
- name: Release
|
|
2526
|
+
env:
|
|
2527
|
+
GITHUB_TOKEN: \${{ github.token }}
|
|
2528
|
+
run: pnpm exec bst release:simple` : `
|
|
2529
|
+
- name: Release
|
|
2530
|
+
env:
|
|
2531
|
+
FORGEJO_SERVER_URL: \${{ github.server_url }}
|
|
2532
|
+
FORGEJO_REPOSITORY: \${{ github.repository }}
|
|
2533
|
+
FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}
|
|
2534
|
+
run: pnpm exec bst release:simple`;
|
|
2535
|
+
return `${workflowSchemaComment(ci)}name: Release
|
|
2536
|
+
on:
|
|
2537
|
+
workflow_dispatch:
|
|
2538
|
+
${permissions}
|
|
2539
|
+
jobs:
|
|
2540
|
+
release:
|
|
2541
|
+
runs-on: ubuntu-latest
|
|
2542
|
+
steps:
|
|
2543
|
+
${commonSteps(nodeVersionYaml, publishesNpm)}${gitConfigStep}${releaseStep}
|
|
2544
|
+
`;
|
|
2545
|
+
}
|
|
2546
|
+
/** Build the required release step for the check job (changesets). */
|
|
2547
|
+
function changesetsReleaseStep(ci, publishesNpm) {
|
|
2548
|
+
if (ci === "github") return {
|
|
2549
|
+
match: { uses: "changesets/action" },
|
|
2550
|
+
step: {
|
|
2551
|
+
uses: "changesets/action@v1",
|
|
2552
|
+
if: "github.ref == 'refs/heads/main'",
|
|
2553
|
+
with: {
|
|
2554
|
+
publish: "pnpm changeset publish",
|
|
2555
|
+
version: "pnpm changeset version"
|
|
2556
|
+
},
|
|
2557
|
+
env: {
|
|
2558
|
+
GITHUB_TOKEN: actionsExpr("github.token"),
|
|
2559
|
+
...publishesNpm && { NPM_TOKEN: actionsExpr("secrets.NPM_TOKEN") }
|
|
2560
|
+
}
|
|
2531
2561
|
}
|
|
2532
|
-
|
|
2533
|
-
|
|
2534
|
-
|
|
2562
|
+
};
|
|
2563
|
+
return {
|
|
2564
|
+
match: { run: "release:changesets" },
|
|
2565
|
+
step: {
|
|
2566
|
+
name: "Release",
|
|
2567
|
+
if: "github.ref == 'refs/heads/main'",
|
|
2568
|
+
env: {
|
|
2569
|
+
FORGEJO_SERVER_URL: actionsExpr("github.server_url"),
|
|
2570
|
+
FORGEJO_REPOSITORY: actionsExpr("github.repository"),
|
|
2571
|
+
FORGEJO_TOKEN: actionsExpr("secrets.FORGEJO_TOKEN"),
|
|
2572
|
+
...publishesNpm && { NODE_AUTH_TOKEN: actionsExpr("secrets.NPM_TOKEN") }
|
|
2573
|
+
},
|
|
2574
|
+
run: "pnpm exec bst release:changesets"
|
|
2535
2575
|
}
|
|
2536
|
-
}
|
|
2537
|
-
if (pkg.scripts?.["prepare"] && /\bhusky\b/.test(pkg.scripts["prepare"])) {
|
|
2538
|
-
delete pkg.scripts["prepare"];
|
|
2539
|
-
changes.push("removed husky prepare script");
|
|
2540
|
-
}
|
|
2541
|
-
if (changes.length === 0) return;
|
|
2542
|
-
ctx.write("package.json", JSON.stringify(pkg, null, 2) + "\n");
|
|
2543
|
-
results.push({
|
|
2544
|
-
filePath: "package.json",
|
|
2545
|
-
action: "updated",
|
|
2546
|
-
description: changes.join(", ")
|
|
2547
|
-
});
|
|
2576
|
+
};
|
|
2548
2577
|
}
|
|
2549
|
-
|
|
2550
|
-
const
|
|
2551
|
-
const
|
|
2552
|
-
|
|
2553
|
-
|
|
2554
|
-
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
|
|
2558
|
-
|
|
2559
|
-
|
|
2560
|
-
|
|
2561
|
-
|
|
2562
|
-
|
|
2563
|
-
|
|
2564
|
-
|
|
2565
|
-
|
|
2566
|
-
|
|
2567
|
-
|
|
2568
|
-
|
|
2569
|
-
|
|
2570
|
-
|
|
2571
|
-
|
|
2578
|
+
function requiredReleaseSteps(strategy, nodeVersionYaml, publishesNpm) {
|
|
2579
|
+
const isNodeVersionFile = nodeVersionYaml.startsWith("node-version-file");
|
|
2580
|
+
const steps = [
|
|
2581
|
+
{
|
|
2582
|
+
match: { uses: "actions/checkout" },
|
|
2583
|
+
step: {
|
|
2584
|
+
uses: "actions/checkout@v4",
|
|
2585
|
+
with: { "fetch-depth": 0 }
|
|
2586
|
+
}
|
|
2587
|
+
},
|
|
2588
|
+
{
|
|
2589
|
+
match: { uses: "pnpm/action-setup" },
|
|
2590
|
+
step: { uses: "pnpm/action-setup@v4" }
|
|
2591
|
+
},
|
|
2592
|
+
{
|
|
2593
|
+
match: { uses: "actions/setup-node" },
|
|
2594
|
+
step: {
|
|
2595
|
+
uses: "actions/setup-node@v4",
|
|
2596
|
+
with: {
|
|
2597
|
+
...isNodeVersionFile ? { "node-version-file": "package.json" } : { "node-version": "24" },
|
|
2598
|
+
cache: "pnpm",
|
|
2599
|
+
...publishesNpm && { "registry-url": "https://registry.npmjs.org" }
|
|
2600
|
+
}
|
|
2601
|
+
}
|
|
2602
|
+
},
|
|
2603
|
+
{
|
|
2604
|
+
match: { run: "pnpm install" },
|
|
2605
|
+
step: { run: "pnpm install --frozen-lockfile" }
|
|
2606
|
+
}
|
|
2607
|
+
];
|
|
2608
|
+
switch (strategy) {
|
|
2609
|
+
case "release-it":
|
|
2610
|
+
steps.push({
|
|
2611
|
+
match: { run: "release-it" },
|
|
2612
|
+
step: { run: "pnpm release-it --ci" }
|
|
2572
2613
|
});
|
|
2573
|
-
|
|
2574
|
-
|
|
2575
|
-
|
|
2576
|
-
|
|
2577
|
-
|
|
2578
|
-
|
|
2614
|
+
break;
|
|
2615
|
+
case "simple":
|
|
2616
|
+
steps.push({
|
|
2617
|
+
match: { run: "release:simple" },
|
|
2618
|
+
step: { run: "pnpm exec bst release:simple" }
|
|
2619
|
+
});
|
|
2620
|
+
break;
|
|
2621
|
+
case "changesets":
|
|
2622
|
+
steps.push({
|
|
2623
|
+
match: { run: "changeset" },
|
|
2624
|
+
step: { run: "pnpm exec bst release:changesets" }
|
|
2625
|
+
});
|
|
2626
|
+
break;
|
|
2579
2627
|
}
|
|
2580
|
-
|
|
2581
|
-
results.push({
|
|
2582
|
-
filePath,
|
|
2583
|
-
action: "created",
|
|
2584
|
-
description: "Generated lefthook pre-commit config"
|
|
2585
|
-
});
|
|
2586
|
-
return results;
|
|
2587
|
-
}
|
|
2588
|
-
//#endregion
|
|
2589
|
-
//#region src/generators/vscode-settings.ts
|
|
2590
|
-
const SCHEMA_NPM_PATH = "@bensandee/config/schemas/forgejo-workflow.schema.json";
|
|
2591
|
-
const SCHEMA_LOCAL_PATH = ".vscode/forgejo-workflow.schema.json";
|
|
2592
|
-
const SETTINGS_PATH = ".vscode/settings.json";
|
|
2593
|
-
const SCHEMA_GLOB = ".forgejo/workflows/*.{yml,yaml}";
|
|
2594
|
-
const VscodeSettingsSchema = z.looseObject({ "yaml.schemas": z.record(z.string(), z.unknown()).default({}) });
|
|
2595
|
-
function readSchemaFromNodeModules(targetDir) {
|
|
2596
|
-
const candidate = path.join(targetDir, "node_modules", SCHEMA_NPM_PATH);
|
|
2597
|
-
if (!existsSync(candidate)) return void 0;
|
|
2598
|
-
return readFileSync(candidate, "utf-8");
|
|
2628
|
+
return steps;
|
|
2599
2629
|
}
|
|
2600
|
-
function
|
|
2601
|
-
|
|
2630
|
+
function buildWorkflow(strategy, ci, nodeVersionYaml, publishesNpm) {
|
|
2631
|
+
switch (strategy) {
|
|
2632
|
+
case "release-it": return releaseItWorkflow(ci, nodeVersionYaml, publishesNpm);
|
|
2633
|
+
case "simple": return commitAndTagVersionWorkflow(ci, nodeVersionYaml, publishesNpm);
|
|
2634
|
+
default: return null;
|
|
2635
|
+
}
|
|
2602
2636
|
}
|
|
2603
|
-
|
|
2604
|
-
|
|
2605
|
-
|
|
2606
|
-
|
|
2607
|
-
|
|
2608
|
-
|
|
2609
|
-
|
|
2610
|
-
changed: false
|
|
2611
|
-
};
|
|
2612
|
-
yamlSchemas[SCHEMA_LOCAL_PATH] = SCHEMA_GLOB;
|
|
2613
|
-
return {
|
|
2614
|
-
merged: {
|
|
2615
|
-
...settings,
|
|
2616
|
-
"yaml.schemas": yamlSchemas
|
|
2617
|
-
},
|
|
2618
|
-
changed: true
|
|
2637
|
+
function generateChangesetsReleaseCi(ctx, publishesNpm) {
|
|
2638
|
+
const ciPath = ciWorkflowPath(ctx.config.ci, ctx.config.releaseStrategy);
|
|
2639
|
+
const raw = ctx.read(ciPath);
|
|
2640
|
+
if (!raw) return {
|
|
2641
|
+
filePath: ciPath,
|
|
2642
|
+
action: "skipped",
|
|
2643
|
+
description: "CI workflow not found — run check generator first"
|
|
2619
2644
|
};
|
|
2620
|
-
|
|
2621
|
-
|
|
2622
|
-
if (
|
|
2623
|
-
|
|
2624
|
-
|
|
2625
|
-
|
|
2626
|
-
|
|
2627
|
-
|
|
2628
|
-
|
|
2629
|
-
|
|
2630
|
-
|
|
2631
|
-
|
|
2632
|
-
action: "skipped",
|
|
2633
|
-
description: "Could not parse existing settings"
|
|
2634
|
-
};
|
|
2635
|
-
const { merged, changed } = mergeYamlSchemas(parsed.data);
|
|
2636
|
-
if (!changed) return {
|
|
2637
|
-
filePath: SETTINGS_PATH,
|
|
2638
|
-
action: "skipped",
|
|
2639
|
-
description: "Already has Forgejo schema mapping"
|
|
2640
|
-
};
|
|
2641
|
-
ctx.write(SETTINGS_PATH, serializeSettings(merged));
|
|
2645
|
+
const existing = migrateToolingBinary(raw);
|
|
2646
|
+
const merged = mergeWorkflowSteps(existing, "check", [changesetsReleaseStep(ctx.config.ci, publishesNpm)]);
|
|
2647
|
+
if (!merged.changed) {
|
|
2648
|
+
if (existing !== raw) {
|
|
2649
|
+
const withComment = ensureSchemaComment(existing, ctx.config.ci);
|
|
2650
|
+
ctx.write(ciPath, withComment);
|
|
2651
|
+
return {
|
|
2652
|
+
filePath: ciPath,
|
|
2653
|
+
action: "updated",
|
|
2654
|
+
description: "Migrated tooling binary name in CI workflow"
|
|
2655
|
+
};
|
|
2656
|
+
}
|
|
2642
2657
|
return {
|
|
2643
|
-
filePath:
|
|
2644
|
-
action: "
|
|
2645
|
-
description: "
|
|
2658
|
+
filePath: ciPath,
|
|
2659
|
+
action: "skipped",
|
|
2660
|
+
description: "Release step in CI workflow already up to date"
|
|
2646
2661
|
};
|
|
2647
2662
|
}
|
|
2648
|
-
|
|
2663
|
+
const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
|
|
2664
|
+
ctx.write(ciPath, withComment);
|
|
2649
2665
|
return {
|
|
2650
|
-
filePath:
|
|
2651
|
-
action: "
|
|
2652
|
-
description: "
|
|
2666
|
+
filePath: ciPath,
|
|
2667
|
+
action: "updated",
|
|
2668
|
+
description: "Added release step to CI workflow"
|
|
2653
2669
|
};
|
|
2654
2670
|
}
|
|
2655
|
-
async function
|
|
2656
|
-
const
|
|
2657
|
-
if (ctx.config.ci
|
|
2658
|
-
|
|
2659
|
-
filePath: SETTINGS_PATH,
|
|
2660
|
-
action: "skipped",
|
|
2661
|
-
description: "Not a Forgejo project"
|
|
2662
|
-
});
|
|
2663
|
-
return results;
|
|
2664
|
-
}
|
|
2665
|
-
const schemaContent = readSchemaFromNodeModules(ctx.targetDir);
|
|
2666
|
-
if (!schemaContent) {
|
|
2667
|
-
results.push({
|
|
2668
|
-
filePath: SCHEMA_LOCAL_PATH,
|
|
2669
|
-
action: "skipped",
|
|
2670
|
-
description: "Could not find @bensandee/config schema in node_modules"
|
|
2671
|
-
});
|
|
2672
|
-
return results;
|
|
2673
|
-
}
|
|
2674
|
-
const existingSchema = ctx.read(SCHEMA_LOCAL_PATH);
|
|
2675
|
-
if (existingSchema !== void 0 && contentEqual(SCHEMA_LOCAL_PATH, existingSchema, schemaContent)) results.push({
|
|
2676
|
-
filePath: SCHEMA_LOCAL_PATH,
|
|
2671
|
+
async function generateReleaseCi(ctx) {
|
|
2672
|
+
const filePath = "release-ci";
|
|
2673
|
+
if (ctx.config.releaseStrategy === "none" || ctx.config.ci === "none") return {
|
|
2674
|
+
filePath,
|
|
2677
2675
|
action: "skipped",
|
|
2678
|
-
description: "
|
|
2679
|
-
});
|
|
2680
|
-
else {
|
|
2681
|
-
ctx.write(SCHEMA_LOCAL_PATH, schemaContent);
|
|
2682
|
-
results.push({
|
|
2683
|
-
filePath: SCHEMA_LOCAL_PATH,
|
|
2684
|
-
action: existingSchema ? "updated" : "created",
|
|
2685
|
-
description: "Copied Forgejo workflow schema from @bensandee/config"
|
|
2686
|
-
});
|
|
2687
|
-
}
|
|
2688
|
-
results.push(writeSchemaToSettings(ctx));
|
|
2689
|
-
return results;
|
|
2690
|
-
}
|
|
2691
|
-
//#endregion
|
|
2692
|
-
//#region src/generators/pipeline.ts
|
|
2693
|
-
/** Run all generators sequentially and return their results. */
|
|
2694
|
-
async function runGenerators(ctx) {
|
|
2695
|
-
const results = [];
|
|
2696
|
-
results.push(await generatePackageJson(ctx));
|
|
2697
|
-
results.push(await generatePnpmWorkspace(ctx));
|
|
2698
|
-
results.push(...await generateTsconfig(ctx));
|
|
2699
|
-
results.push(await generateTsdown(ctx));
|
|
2700
|
-
results.push(await generateOxlint(ctx));
|
|
2701
|
-
results.push(await generateFormatter(ctx));
|
|
2702
|
-
results.push(...await generateLefthook(ctx));
|
|
2703
|
-
results.push(await generateGitignore(ctx));
|
|
2704
|
-
results.push(await generateKnip(ctx));
|
|
2705
|
-
results.push(await generateRenovate(ctx));
|
|
2706
|
-
results.push(await generateCi(ctx));
|
|
2707
|
-
results.push(...await generateClaudeSettings(ctx));
|
|
2708
|
-
results.push(await generateReleaseIt(ctx));
|
|
2709
|
-
results.push(await generateChangesets(ctx));
|
|
2710
|
-
results.push(await generateReleaseCi(ctx));
|
|
2711
|
-
results.push(await generateDeployCi(ctx));
|
|
2712
|
-
results.push(...await generateVitest(ctx));
|
|
2713
|
-
results.push(...await generateVscodeSettings(ctx));
|
|
2714
|
-
results.push(saveToolingConfig(ctx, ctx.config));
|
|
2715
|
-
return results;
|
|
2716
|
-
}
|
|
2717
|
-
//#endregion
|
|
2718
|
-
//#region src/release/docker.ts
|
|
2719
|
-
const ToolingDockerMapSchema = z.record(z.string(), z.object({
|
|
2720
|
-
dockerfile: z.string(),
|
|
2721
|
-
context: z.string().default(".")
|
|
2722
|
-
}));
|
|
2723
|
-
const ToolingConfigDockerSchema = z.object({ docker: ToolingDockerMapSchema.optional() });
|
|
2724
|
-
const PackageInfoSchema = z.object({
|
|
2725
|
-
name: z.string().optional(),
|
|
2726
|
-
version: z.string().optional()
|
|
2727
|
-
});
|
|
2728
|
-
/** Read the docker map from .tooling.json. Returns empty record if missing or invalid. */
|
|
2729
|
-
function loadDockerMap(executor, cwd) {
|
|
2730
|
-
const configPath = path.join(cwd, ".tooling.json");
|
|
2731
|
-
const raw = executor.readFile(configPath);
|
|
2732
|
-
if (!raw) return {};
|
|
2733
|
-
try {
|
|
2734
|
-
const result = ToolingConfigDockerSchema.safeParse(JSON.parse(raw));
|
|
2735
|
-
if (!result.success || !result.data.docker) return {};
|
|
2736
|
-
return result.data.docker;
|
|
2737
|
-
} catch (_error) {
|
|
2738
|
-
return {};
|
|
2739
|
-
}
|
|
2740
|
-
}
|
|
2741
|
-
/** Read name and version from a package's package.json. */
|
|
2742
|
-
function readPackageInfo(executor, packageJsonPath) {
|
|
2743
|
-
const raw = executor.readFile(packageJsonPath);
|
|
2744
|
-
if (!raw) return {
|
|
2745
|
-
name: void 0,
|
|
2746
|
-
version: void 0
|
|
2676
|
+
description: "Release CI workflow not applicable"
|
|
2747
2677
|
};
|
|
2748
|
-
|
|
2749
|
-
|
|
2750
|
-
|
|
2751
|
-
|
|
2752
|
-
|
|
2753
|
-
|
|
2754
|
-
|
|
2755
|
-
|
|
2756
|
-
|
|
2757
|
-
|
|
2758
|
-
}
|
|
2678
|
+
const publishesNpm = ctx.config.publishNpm === true;
|
|
2679
|
+
if (ctx.config.releaseStrategy === "changesets") return generateChangesetsReleaseCi(ctx, publishesNpm);
|
|
2680
|
+
const isGitHub = ctx.config.ci === "github";
|
|
2681
|
+
const workflowPath = isGitHub ? ".github/workflows/release.yml" : ".forgejo/workflows/release.yml";
|
|
2682
|
+
const nodeVersionYaml = computeNodeVersionYaml(ctx);
|
|
2683
|
+
const content = buildWorkflow(ctx.config.releaseStrategy, ctx.config.ci, nodeVersionYaml, publishesNpm);
|
|
2684
|
+
if (!content) return {
|
|
2685
|
+
filePath,
|
|
2686
|
+
action: "skipped",
|
|
2687
|
+
description: "Release CI workflow not applicable"
|
|
2688
|
+
};
|
|
2689
|
+
if (ctx.exists(workflowPath)) {
|
|
2690
|
+
const raw = ctx.read(workflowPath);
|
|
2691
|
+
if (raw) {
|
|
2692
|
+
const existing = migrateToolingBinary(raw);
|
|
2693
|
+
if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) {
|
|
2694
|
+
if (existing !== raw) {
|
|
2695
|
+
ctx.write(workflowPath, ensureSchemaComment(existing, ctx.config.ci));
|
|
2696
|
+
return {
|
|
2697
|
+
filePath: workflowPath,
|
|
2698
|
+
action: "updated",
|
|
2699
|
+
description: "Migrated tooling binary name in release workflow"
|
|
2700
|
+
};
|
|
2701
|
+
}
|
|
2702
|
+
return {
|
|
2703
|
+
filePath: workflowPath,
|
|
2704
|
+
action: "skipped",
|
|
2705
|
+
description: "Release workflow already up to date"
|
|
2706
|
+
};
|
|
2707
|
+
}
|
|
2708
|
+
const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps(ctx.config.releaseStrategy, nodeVersionYaml, publishesNpm));
|
|
2709
|
+
const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
|
|
2710
|
+
if (!merged.changed) {
|
|
2711
|
+
if (withComment !== raw) {
|
|
2712
|
+
ctx.write(workflowPath, withComment);
|
|
2713
|
+
return {
|
|
2714
|
+
filePath: workflowPath,
|
|
2715
|
+
action: "updated",
|
|
2716
|
+
description: existing !== raw ? "Migrated tooling binary name in release workflow" : "Added schema comment to release workflow"
|
|
2717
|
+
};
|
|
2718
|
+
}
|
|
2719
|
+
return {
|
|
2720
|
+
filePath: workflowPath,
|
|
2721
|
+
action: "skipped",
|
|
2722
|
+
description: "Existing release workflow preserved"
|
|
2723
|
+
};
|
|
2724
|
+
}
|
|
2725
|
+
ctx.write(workflowPath, withComment);
|
|
2726
|
+
return {
|
|
2727
|
+
filePath: workflowPath,
|
|
2728
|
+
action: "updated",
|
|
2729
|
+
description: "Added missing steps to release workflow"
|
|
2730
|
+
};
|
|
2731
|
+
}
|
|
2759
2732
|
return {
|
|
2760
|
-
|
|
2761
|
-
|
|
2733
|
+
filePath: workflowPath,
|
|
2734
|
+
action: "skipped",
|
|
2735
|
+
description: "Release workflow already up to date"
|
|
2762
2736
|
};
|
|
2763
2737
|
}
|
|
2738
|
+
ctx.write(workflowPath, content);
|
|
2739
|
+
return {
|
|
2740
|
+
filePath: workflowPath,
|
|
2741
|
+
action: "created",
|
|
2742
|
+
description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions release workflow`
|
|
2743
|
+
};
|
|
2764
2744
|
}
|
|
2765
|
-
|
|
2766
|
-
|
|
2767
|
-
|
|
2768
|
-
return
|
|
2745
|
+
//#endregion
|
|
2746
|
+
//#region src/generators/lefthook.ts
|
|
2747
|
+
function requiredCommands(formatter) {
|
|
2748
|
+
return {
|
|
2749
|
+
lint: { run: "pnpm exec oxlint {staged_files}" },
|
|
2750
|
+
format: {
|
|
2751
|
+
run: formatter === "prettier" ? "pnpm exec prettier --write {staged_files}" : "pnpm exec oxfmt --no-error-on-unmatched-pattern {staged_files}",
|
|
2752
|
+
stage_fixed: true
|
|
2753
|
+
}
|
|
2754
|
+
};
|
|
2769
2755
|
}
|
|
2770
|
-
|
|
2771
|
-
|
|
2772
|
-
|
|
2773
|
-
|
|
2774
|
-
|
|
2775
|
-
|
|
2776
|
-
|
|
2777
|
-
|
|
2778
|
-
|
|
2779
|
-
|
|
2780
|
-
|
|
2781
|
-
|
|
2782
|
-
|
|
2756
|
+
function buildConfig(formatter) {
|
|
2757
|
+
return [
|
|
2758
|
+
"pre-commit:",
|
|
2759
|
+
" commands:",
|
|
2760
|
+
" lint:",
|
|
2761
|
+
" run: pnpm exec oxlint {staged_files}",
|
|
2762
|
+
" format:",
|
|
2763
|
+
` run: ${formatter === "prettier" ? "pnpm exec prettier --write {staged_files}" : "pnpm exec oxfmt --no-error-on-unmatched-pattern {staged_files}"}`,
|
|
2764
|
+
" stage_fixed: true",
|
|
2765
|
+
""
|
|
2766
|
+
].join("\n");
|
|
2767
|
+
}
|
|
2768
|
+
const ARCHIVE_DIR = ".tooling-archived";
|
|
2769
|
+
/** Common client-side git hooks that husky may have configured. */
|
|
2770
|
+
const HUSKY_HOOK_NAMES = [
|
|
2771
|
+
"pre-commit",
|
|
2772
|
+
"commit-msg",
|
|
2773
|
+
"pre-push",
|
|
2774
|
+
"post-merge",
|
|
2775
|
+
"post-checkout",
|
|
2776
|
+
"prepare-commit-msg"
|
|
2777
|
+
];
|
|
2778
|
+
/** All known lint-staged config file locations to archive. */
|
|
2779
|
+
const LINT_STAGED_CONFIG_PATHS = [
|
|
2780
|
+
"lint-staged.config.mjs",
|
|
2781
|
+
"lint-staged.config.js",
|
|
2782
|
+
"lint-staged.config.cjs",
|
|
2783
|
+
".lintstagedrc",
|
|
2784
|
+
".lintstagedrc.json",
|
|
2785
|
+
".lintstagedrc.yaml",
|
|
2786
|
+
".lintstagedrc.yml",
|
|
2787
|
+
".lintstagedrc.mjs",
|
|
2788
|
+
".lintstagedrc.cjs"
|
|
2789
|
+
];
|
|
2790
|
+
/** All known lefthook config file locations, in priority order. */
|
|
2791
|
+
const LEFTHOOK_CONFIG_PATHS = ["lefthook.yml", ".lefthook.yml"];
|
|
2792
|
+
/** Archive all husky hook files found in .husky/ */
|
|
2793
|
+
function archiveHuskyHooks(ctx, results) {
|
|
2794
|
+
let found = false;
|
|
2795
|
+
for (const hook of HUSKY_HOOK_NAMES) {
|
|
2796
|
+
const huskyPath = `.husky/${hook}`;
|
|
2797
|
+
const existing = ctx.read(huskyPath);
|
|
2798
|
+
if (existing !== void 0) {
|
|
2799
|
+
ctx.write(`${ARCHIVE_DIR}/${huskyPath}`, existing);
|
|
2800
|
+
ctx.remove(huskyPath);
|
|
2801
|
+
results.push({
|
|
2802
|
+
filePath: huskyPath,
|
|
2803
|
+
action: "archived",
|
|
2804
|
+
description: `Moved to ${ARCHIVE_DIR}/${huskyPath}`
|
|
2805
|
+
});
|
|
2806
|
+
found = true;
|
|
2807
|
+
}
|
|
2783
2808
|
}
|
|
2809
|
+
return found;
|
|
2784
2810
|
}
|
|
2785
|
-
/**
|
|
2786
|
-
|
|
2787
|
-
|
|
2788
|
-
|
|
2789
|
-
|
|
2790
|
-
|
|
2791
|
-
|
|
2792
|
-
|
|
2793
|
-
|
|
2811
|
+
/** Archive all lint-staged config files. */
|
|
2812
|
+
function archiveLintStagedConfigs(ctx, results) {
|
|
2813
|
+
let found = false;
|
|
2814
|
+
for (const lsPath of LINT_STAGED_CONFIG_PATHS) {
|
|
2815
|
+
const existing = ctx.read(lsPath);
|
|
2816
|
+
if (existing !== void 0) {
|
|
2817
|
+
ctx.write(`${ARCHIVE_DIR}/${lsPath}`, existing);
|
|
2818
|
+
ctx.remove(lsPath);
|
|
2819
|
+
results.push({
|
|
2820
|
+
filePath: lsPath,
|
|
2821
|
+
action: "archived",
|
|
2822
|
+
description: `Moved to ${ARCHIVE_DIR}/${lsPath}`
|
|
2823
|
+
});
|
|
2824
|
+
found = true;
|
|
2825
|
+
}
|
|
2826
|
+
}
|
|
2827
|
+
return found;
|
|
2794
2828
|
}
|
|
2795
|
-
/**
|
|
2796
|
-
|
|
2797
|
-
|
|
2798
|
-
|
|
2799
|
-
|
|
2800
|
-
|
|
2801
|
-
|
|
2802
|
-
|
|
2803
|
-
|
|
2804
|
-
|
|
2805
|
-
|
|
2806
|
-
function detectDockerPackages(executor, cwd, repoName) {
|
|
2807
|
-
const overrides = loadDockerMap(executor, cwd);
|
|
2808
|
-
const packageDirs = executor.listPackageDirs(cwd);
|
|
2809
|
-
const packages = [];
|
|
2810
|
-
const seen = /* @__PURE__ */ new Set();
|
|
2811
|
-
if (packageDirs.length > 0) {
|
|
2812
|
-
for (const dir of packageDirs) {
|
|
2813
|
-
const convention = findConventionDockerfile(executor, cwd, dir);
|
|
2814
|
-
const docker = overrides[dir] ?? convention;
|
|
2815
|
-
if (docker) {
|
|
2816
|
-
const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
|
|
2817
|
-
packages.push({
|
|
2818
|
-
dir,
|
|
2819
|
-
imageName: `${repoName}-${stripScope(name ?? dir)}`,
|
|
2820
|
-
version,
|
|
2821
|
-
docker
|
|
2822
|
-
});
|
|
2823
|
-
seen.add(dir);
|
|
2824
|
-
}
|
|
2829
|
+
/** Remove husky/lint-staged from package.json devDependencies and fix prepare script. */
|
|
2830
|
+
function cleanPackageJson(ctx, results) {
|
|
2831
|
+
const raw = ctx.read("package.json");
|
|
2832
|
+
if (!raw) return;
|
|
2833
|
+
const pkg = parsePackageJson(raw);
|
|
2834
|
+
if (!pkg) return;
|
|
2835
|
+
const changes = [];
|
|
2836
|
+
if (pkg.devDependencies) {
|
|
2837
|
+
if ("husky" in pkg.devDependencies) {
|
|
2838
|
+
delete pkg.devDependencies["husky"];
|
|
2839
|
+
changes.push("removed devDependency: husky");
|
|
2825
2840
|
}
|
|
2826
|
-
|
|
2827
|
-
|
|
2828
|
-
|
|
2829
|
-
dir,
|
|
2830
|
-
imageName: `${repoName}-${stripScope(name ?? dir)}`,
|
|
2831
|
-
version,
|
|
2832
|
-
docker
|
|
2833
|
-
});
|
|
2841
|
+
if ("lint-staged" in pkg.devDependencies) {
|
|
2842
|
+
delete pkg.devDependencies["lint-staged"];
|
|
2843
|
+
changes.push("removed devDependency: lint-staged");
|
|
2834
2844
|
}
|
|
2835
|
-
}
|
|
2836
|
-
|
|
2837
|
-
|
|
2838
|
-
|
|
2839
|
-
|
|
2840
|
-
|
|
2841
|
-
|
|
2842
|
-
|
|
2843
|
-
|
|
2844
|
-
|
|
2845
|
+
}
|
|
2846
|
+
if (pkg.scripts?.["prepare"] && /\bhusky\b/.test(pkg.scripts["prepare"])) {
|
|
2847
|
+
delete pkg.scripts["prepare"];
|
|
2848
|
+
changes.push("removed husky prepare script");
|
|
2849
|
+
}
|
|
2850
|
+
if (changes.length === 0) return;
|
|
2851
|
+
ctx.write("package.json", JSON.stringify(pkg, null, 2) + "\n");
|
|
2852
|
+
results.push({
|
|
2853
|
+
filePath: "package.json",
|
|
2854
|
+
action: "updated",
|
|
2855
|
+
description: changes.join(", ")
|
|
2856
|
+
});
|
|
2857
|
+
}
|
|
2858
|
+
async function generateLefthook(ctx) {
|
|
2859
|
+
const filePath = "lefthook.yml";
|
|
2860
|
+
const content = buildConfig(ctx.config.formatter);
|
|
2861
|
+
const results = [];
|
|
2862
|
+
archiveHuskyHooks(ctx, results);
|
|
2863
|
+
archiveLintStagedConfigs(ctx, results);
|
|
2864
|
+
cleanPackageJson(ctx, results);
|
|
2865
|
+
const existingPath = LEFTHOOK_CONFIG_PATHS.find((p) => ctx.exists(p));
|
|
2866
|
+
if (existingPath) {
|
|
2867
|
+
const existing = ctx.read(existingPath);
|
|
2868
|
+
if (existing) {
|
|
2869
|
+
const merged = mergeLefthookCommands(existing, requiredCommands(ctx.config.formatter));
|
|
2870
|
+
if (merged.changed) {
|
|
2871
|
+
ctx.write(existingPath, merged.content);
|
|
2872
|
+
results.push({
|
|
2873
|
+
filePath: existingPath,
|
|
2874
|
+
action: "updated",
|
|
2875
|
+
description: "Added missing pre-commit commands"
|
|
2876
|
+
});
|
|
2877
|
+
} else results.push({
|
|
2878
|
+
filePath: existingPath,
|
|
2879
|
+
action: "skipped",
|
|
2880
|
+
description: "Lefthook config already up to date"
|
|
2845
2881
|
});
|
|
2846
|
-
}
|
|
2882
|
+
} else results.push({
|
|
2883
|
+
filePath: existingPath,
|
|
2884
|
+
action: "skipped",
|
|
2885
|
+
description: "Could not read existing lefthook config"
|
|
2886
|
+
});
|
|
2887
|
+
return results;
|
|
2847
2888
|
}
|
|
2848
|
-
|
|
2889
|
+
ctx.write(filePath, content);
|
|
2890
|
+
results.push({
|
|
2891
|
+
filePath,
|
|
2892
|
+
action: "created",
|
|
2893
|
+
description: "Generated lefthook pre-commit config"
|
|
2894
|
+
});
|
|
2895
|
+
return results;
|
|
2849
2896
|
}
|
|
2850
|
-
|
|
2851
|
-
|
|
2852
|
-
|
|
2853
|
-
|
|
2854
|
-
|
|
2855
|
-
|
|
2856
|
-
|
|
2857
|
-
|
|
2858
|
-
|
|
2859
|
-
|
|
2897
|
+
//#endregion
|
|
2898
|
+
//#region src/generators/vscode-settings.ts
|
|
2899
|
+
const SCHEMA_NPM_PATH = "@bensandee/config/schemas/forgejo-workflow.schema.json";
|
|
2900
|
+
const SCHEMA_LOCAL_PATH = ".vscode/forgejo-workflow.schema.json";
|
|
2901
|
+
const SETTINGS_PATH = ".vscode/settings.json";
|
|
2902
|
+
const SCHEMA_GLOB = ".forgejo/workflows/*.{yml,yaml}";
|
|
2903
|
+
const VscodeSettingsSchema = z.looseObject({ "yaml.schemas": z.record(z.string(), z.unknown()).default({}) });
|
|
2904
|
+
function readSchemaFromNodeModules(targetDir) {
|
|
2905
|
+
const candidate = path.join(targetDir, "node_modules", SCHEMA_NPM_PATH);
|
|
2906
|
+
if (!existsSync(candidate)) return void 0;
|
|
2907
|
+
return readFileSync(candidate, "utf-8");
|
|
2908
|
+
}
|
|
2909
|
+
function serializeSettings(settings) {
|
|
2910
|
+
return JSON.stringify(settings, null, 2) + "\n";
|
|
2911
|
+
}
|
|
2912
|
+
const YamlSchemasSchema = z.record(z.string(), z.unknown());
|
|
2913
|
+
/** Merge yaml.schemas into a settings object. Returns the result and whether anything changed. */
|
|
2914
|
+
function mergeYamlSchemas(settings) {
|
|
2915
|
+
const parsed = YamlSchemasSchema.safeParse(settings["yaml.schemas"]);
|
|
2916
|
+
const yamlSchemas = parsed.success ? { ...parsed.data } : {};
|
|
2917
|
+
if (SCHEMA_LOCAL_PATH in yamlSchemas) return {
|
|
2918
|
+
merged: settings,
|
|
2919
|
+
changed: false
|
|
2920
|
+
};
|
|
2921
|
+
yamlSchemas[SCHEMA_LOCAL_PATH] = SCHEMA_GLOB;
|
|
2860
2922
|
return {
|
|
2861
|
-
|
|
2862
|
-
|
|
2863
|
-
|
|
2864
|
-
|
|
2923
|
+
merged: {
|
|
2924
|
+
...settings,
|
|
2925
|
+
"yaml.schemas": yamlSchemas
|
|
2926
|
+
},
|
|
2927
|
+
changed: true
|
|
2865
2928
|
};
|
|
2866
2929
|
}
|
|
2867
|
-
|
|
2868
|
-
|
|
2869
|
-
|
|
2870
|
-
|
|
2871
|
-
|
|
2930
|
+
function writeSchemaToSettings(ctx) {
|
|
2931
|
+
if (ctx.exists(SETTINGS_PATH)) {
|
|
2932
|
+
const raw = ctx.read(SETTINGS_PATH);
|
|
2933
|
+
if (!raw) return {
|
|
2934
|
+
filePath: SETTINGS_PATH,
|
|
2935
|
+
action: "skipped",
|
|
2936
|
+
description: "Could not read existing settings"
|
|
2937
|
+
};
|
|
2938
|
+
const parsed = VscodeSettingsSchema.safeParse(parse(raw));
|
|
2939
|
+
if (!parsed.success) return {
|
|
2940
|
+
filePath: SETTINGS_PATH,
|
|
2941
|
+
action: "skipped",
|
|
2942
|
+
description: "Could not parse existing settings"
|
|
2943
|
+
};
|
|
2944
|
+
const { merged, changed } = mergeYamlSchemas(parsed.data);
|
|
2945
|
+
if (!changed) return {
|
|
2946
|
+
filePath: SETTINGS_PATH,
|
|
2947
|
+
action: "skipped",
|
|
2948
|
+
description: "Already has Forgejo schema mapping"
|
|
2949
|
+
};
|
|
2950
|
+
ctx.write(SETTINGS_PATH, serializeSettings(merged));
|
|
2951
|
+
return {
|
|
2952
|
+
filePath: SETTINGS_PATH,
|
|
2953
|
+
action: "updated",
|
|
2954
|
+
description: "Added Forgejo workflow schema mapping"
|
|
2955
|
+
};
|
|
2956
|
+
}
|
|
2957
|
+
ctx.write(SETTINGS_PATH, serializeSettings({ "yaml.schemas": { [SCHEMA_LOCAL_PATH]: SCHEMA_GLOB } }));
|
|
2872
2958
|
return {
|
|
2873
|
-
|
|
2874
|
-
|
|
2875
|
-
|
|
2959
|
+
filePath: SETTINGS_PATH,
|
|
2960
|
+
action: "created",
|
|
2961
|
+
description: "Generated .vscode/settings.json with Forgejo workflow schema"
|
|
2876
2962
|
};
|
|
2877
2963
|
}
|
|
2878
|
-
|
|
2879
|
-
|
|
2880
|
-
|
|
2881
|
-
|
|
2882
|
-
|
|
2883
|
-
|
|
2884
|
-
|
|
2885
|
-
|
|
2886
|
-
|
|
2887
|
-
}
|
|
2888
|
-
/** Build the full image reference: namespace/imageName:tag */
|
|
2889
|
-
function imageRef(namespace, imageName, tag) {
|
|
2890
|
-
return `${namespace}/${imageName}:${tag}`;
|
|
2891
|
-
}
|
|
2892
|
-
function log$1(message) {
|
|
2893
|
-
console.log(message);
|
|
2894
|
-
}
|
|
2895
|
-
/** Read the repo name from root package.json. */
|
|
2896
|
-
function readRepoName(executor, cwd) {
|
|
2897
|
-
const rootPkgRaw = executor.readFile(path.join(cwd, "package.json"));
|
|
2898
|
-
if (!rootPkgRaw) throw new FatalError("No package.json found in project root");
|
|
2899
|
-
const repoName = parsePackageJson(rootPkgRaw)?.name;
|
|
2900
|
-
if (!repoName) throw new FatalError("Root package.json must have a name field");
|
|
2901
|
-
return repoName;
|
|
2902
|
-
}
|
|
2903
|
-
/** Build a single docker image from its config. Paths are resolved relative to cwd. */
|
|
2904
|
-
function buildImage(executor, pkg, cwd, extraArgs) {
|
|
2905
|
-
const dockerfilePath = path.resolve(cwd, pkg.docker.dockerfile);
|
|
2906
|
-
const contextPath = path.resolve(cwd, pkg.docker.context);
|
|
2907
|
-
const command = [
|
|
2908
|
-
"docker build",
|
|
2909
|
-
`-f ${dockerfilePath}`,
|
|
2910
|
-
`-t ${pkg.imageName}:latest`,
|
|
2911
|
-
...extraArgs,
|
|
2912
|
-
contextPath
|
|
2913
|
-
].join(" ");
|
|
2914
|
-
executor.execInherit(command);
|
|
2915
|
-
}
|
|
2916
|
-
/**
|
|
2917
|
-
* Detect packages with docker config in .tooling.json and build each one.
|
|
2918
|
-
* Runs `docker build -f <dockerfile> -t <image-name>:latest <context>` for each package.
|
|
2919
|
-
* Dockerfile and context paths are resolved relative to the project root.
|
|
2920
|
-
*
|
|
2921
|
-
* When `packageDir` is set, builds only that single package (for use as an image:build script).
|
|
2922
|
-
*/
|
|
2923
|
-
function runDockerBuild(executor, config) {
|
|
2924
|
-
const repoName = readRepoName(executor, config.cwd);
|
|
2925
|
-
if (config.packageDir) {
|
|
2926
|
-
const pkg = readSinglePackageDocker(executor, config.cwd, config.packageDir, repoName);
|
|
2927
|
-
log$1(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
|
|
2928
|
-
buildImage(executor, pkg, config.cwd, config.extraArgs);
|
|
2929
|
-
log$1(`Built ${pkg.imageName}:latest`);
|
|
2930
|
-
return { packages: [pkg] };
|
|
2931
|
-
}
|
|
2932
|
-
const packages = detectDockerPackages(executor, config.cwd, repoName);
|
|
2933
|
-
if (packages.length === 0) {
|
|
2934
|
-
log$1("No packages with docker config found");
|
|
2935
|
-
return { packages: [] };
|
|
2964
|
+
async function generateVscodeSettings(ctx) {
|
|
2965
|
+
const results = [];
|
|
2966
|
+
if (ctx.config.ci !== "forgejo") {
|
|
2967
|
+
results.push({
|
|
2968
|
+
filePath: SETTINGS_PATH,
|
|
2969
|
+
action: "skipped",
|
|
2970
|
+
description: "Not a Forgejo project"
|
|
2971
|
+
});
|
|
2972
|
+
return results;
|
|
2936
2973
|
}
|
|
2937
|
-
|
|
2938
|
-
|
|
2939
|
-
|
|
2940
|
-
|
|
2974
|
+
const schemaContent = readSchemaFromNodeModules(ctx.targetDir);
|
|
2975
|
+
if (!schemaContent) {
|
|
2976
|
+
results.push({
|
|
2977
|
+
filePath: SCHEMA_LOCAL_PATH,
|
|
2978
|
+
action: "skipped",
|
|
2979
|
+
description: "Could not find @bensandee/config schema in node_modules"
|
|
2980
|
+
});
|
|
2981
|
+
return results;
|
|
2941
2982
|
}
|
|
2942
|
-
|
|
2943
|
-
|
|
2944
|
-
|
|
2945
|
-
|
|
2946
|
-
|
|
2947
|
-
* 1. Build all images via runDockerBuild
|
|
2948
|
-
* 2. Login to registry
|
|
2949
|
-
* 3. Tag each image with semver variants from its own package.json version
|
|
2950
|
-
* 4. Push all tags
|
|
2951
|
-
* 5. Logout from registry
|
|
2952
|
-
*/
|
|
2953
|
-
function runDockerPublish(executor, config) {
|
|
2954
|
-
const { packages } = runDockerBuild(executor, {
|
|
2955
|
-
cwd: config.cwd,
|
|
2956
|
-
packageDir: void 0,
|
|
2957
|
-
extraArgs: []
|
|
2983
|
+
const existingSchema = ctx.read(SCHEMA_LOCAL_PATH);
|
|
2984
|
+
if (existingSchema !== void 0 && contentEqual(SCHEMA_LOCAL_PATH, existingSchema, schemaContent)) results.push({
|
|
2985
|
+
filePath: SCHEMA_LOCAL_PATH,
|
|
2986
|
+
action: "skipped",
|
|
2987
|
+
description: "Schema already up to date"
|
|
2958
2988
|
});
|
|
2959
|
-
|
|
2960
|
-
|
|
2961
|
-
|
|
2962
|
-
|
|
2963
|
-
|
|
2964
|
-
|
|
2965
|
-
|
|
2966
|
-
const loginResult = executor.exec(`echo "${config.password}" | docker login ${config.registryHost} -u ${config.username} --password-stdin`);
|
|
2967
|
-
if (loginResult.exitCode !== 0) throw new FatalError(`Docker login failed: ${loginResult.stderr}`);
|
|
2968
|
-
} else log$1("[dry-run] Skipping docker login");
|
|
2969
|
-
const allTags = [];
|
|
2970
|
-
try {
|
|
2971
|
-
for (const pkg of packages) {
|
|
2972
|
-
const tags = generateTags(pkg.version ?? "");
|
|
2973
|
-
log$1(`${pkg.dir} v${pkg.version} → tags: ${tags.join(", ")}`);
|
|
2974
|
-
for (const tag of tags) {
|
|
2975
|
-
const ref = imageRef(config.registryNamespace, pkg.imageName, tag);
|
|
2976
|
-
allTags.push(ref);
|
|
2977
|
-
log$1(`Tagging ${pkg.imageName} → ${ref}`);
|
|
2978
|
-
const tagResult = executor.exec(`docker tag ${pkg.imageName} ${ref}`);
|
|
2979
|
-
if (tagResult.exitCode !== 0) throw new FatalError(`docker tag failed: ${tagResult.stderr}`);
|
|
2980
|
-
if (!config.dryRun) {
|
|
2981
|
-
log$1(`Pushing ${ref}...`);
|
|
2982
|
-
const pushResult = executor.exec(`docker push ${ref}`);
|
|
2983
|
-
if (pushResult.exitCode !== 0) throw new FatalError(`docker push failed: ${pushResult.stderr}`);
|
|
2984
|
-
} else log$1(`[dry-run] Skipping push for ${ref}`);
|
|
2985
|
-
}
|
|
2986
|
-
}
|
|
2987
|
-
} finally {
|
|
2988
|
-
if (!config.dryRun) {
|
|
2989
|
-
log$1(`Logging out from ${config.registryHost}...`);
|
|
2990
|
-
executor.exec(`docker logout ${config.registryHost}`);
|
|
2991
|
-
}
|
|
2989
|
+
else {
|
|
2990
|
+
ctx.write(SCHEMA_LOCAL_PATH, schemaContent);
|
|
2991
|
+
results.push({
|
|
2992
|
+
filePath: SCHEMA_LOCAL_PATH,
|
|
2993
|
+
action: existingSchema ? "updated" : "created",
|
|
2994
|
+
description: "Copied Forgejo workflow schema from @bensandee/config"
|
|
2995
|
+
});
|
|
2992
2996
|
}
|
|
2993
|
-
|
|
2994
|
-
return
|
|
2995
|
-
|
|
2996
|
-
|
|
2997
|
-
|
|
2997
|
+
results.push(writeSchemaToSettings(ctx));
|
|
2998
|
+
return results;
|
|
2999
|
+
}
|
|
3000
|
+
//#endregion
|
|
3001
|
+
//#region src/generators/pipeline.ts
|
|
3002
|
+
/** Run all generators sequentially and return their results. */
|
|
3003
|
+
async function runGenerators(ctx) {
|
|
3004
|
+
const results = [];
|
|
3005
|
+
results.push(await generatePackageJson(ctx));
|
|
3006
|
+
results.push(await generatePnpmWorkspace(ctx));
|
|
3007
|
+
results.push(...await generateTsconfig(ctx));
|
|
3008
|
+
results.push(await generateTsdown(ctx));
|
|
3009
|
+
results.push(await generateOxlint(ctx));
|
|
3010
|
+
results.push(await generateFormatter(ctx));
|
|
3011
|
+
results.push(...await generateLefthook(ctx));
|
|
3012
|
+
results.push(await generateGitignore(ctx));
|
|
3013
|
+
results.push(await generateKnip(ctx));
|
|
3014
|
+
results.push(await generateRenovate(ctx));
|
|
3015
|
+
results.push(await generateCi(ctx));
|
|
3016
|
+
results.push(...await generateClaudeSettings(ctx));
|
|
3017
|
+
results.push(await generateReleaseIt(ctx));
|
|
3018
|
+
results.push(await generateChangesets(ctx));
|
|
3019
|
+
results.push(await generateReleaseCi(ctx));
|
|
3020
|
+
results.push(await generateDeployCi(ctx));
|
|
3021
|
+
results.push(...await generateVitest(ctx));
|
|
3022
|
+
results.push(...await generateVscodeSettings(ctx));
|
|
3023
|
+
results.push(saveToolingConfig(ctx, ctx.config));
|
|
3024
|
+
return results;
|
|
2998
3025
|
}
|
|
2999
3026
|
//#endregion
|
|
3000
3027
|
//#region src/generators/migrate-prompt.ts
|
|
@@ -3007,7 +3034,7 @@ function generateMigratePrompt(results, config, detected) {
|
|
|
3007
3034
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
3008
3035
|
sections.push("# Migration Prompt");
|
|
3009
3036
|
sections.push("");
|
|
3010
|
-
sections.push(`_Generated by \`@bensandee/tooling@0.28.
|
|
3037
|
+
sections.push(`_Generated by \`@bensandee/tooling@0.28.1 repo:sync\` on ${timestamp}_`);
|
|
3011
3038
|
sections.push("");
|
|
3012
3039
|
sections.push("The following prompt was generated by `@bensandee/tooling repo:sync`. Paste it into Claude Code or another AI assistant to finish migrating this repository.");
|
|
3013
3040
|
sections.push("");
|
|
@@ -4820,7 +4847,7 @@ const dockerCheckCommand = defineCommand({
|
|
|
4820
4847
|
const main = defineCommand({
|
|
4821
4848
|
meta: {
|
|
4822
4849
|
name: "bst",
|
|
4823
|
-
version: "0.28.
|
|
4850
|
+
version: "0.28.1",
|
|
4824
4851
|
description: "Bootstrap and maintain standardized TypeScript project tooling"
|
|
4825
4852
|
},
|
|
4826
4853
|
subCommands: {
|
|
@@ -4836,7 +4863,7 @@ const main = defineCommand({
|
|
|
4836
4863
|
"docker:check": dockerCheckCommand
|
|
4837
4864
|
}
|
|
4838
4865
|
});
|
|
4839
|
-
console.log(`@bensandee/tooling v0.28.
|
|
4866
|
+
console.log(`@bensandee/tooling v0.28.1`);
|
|
4840
4867
|
async function run() {
|
|
4841
4868
|
await runMain(main);
|
|
4842
4869
|
process.exit(process.exitCode ?? 0);
|