@opndev/rzilla 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,285 @@
1
+ // SPDX-FileCopyrightText: 2026 Wesley Schwengle <wesleys@opperschaap.net>
2
+ //
3
+ // SPDX-License-Identifier: MIT
4
+
5
+ import fs from "node:fs/promises";
6
+ import path from "node:path";
7
+ import { execFile } from "node:child_process";
8
+ import { promisify } from "node:util";
9
+ import readline from "node:readline/promises";
10
+ import { stdin as input, stdout as output } from "node:process";
11
+
12
+ import { asArray} from "@opndev/util";
13
+
14
+ import { bumpSemver } from "../util.mjs";
15
+ import { createBuildDir } from "../builddir.mjs";
16
+ import { populateBuildDir } from "../populate.mjs";
17
+ import { readDistToml } from "../config.mjs";
18
+ import { readVersion, writeVersion } from "../version.mjs";
19
+ import { runPkg } from "./pkg.mjs";
20
+
21
+ import {
22
+ readChanges,
23
+ writeChanges,
24
+ nextHasEntries,
25
+ finalizeNextToVersion,
26
+ ensureNextInserted,
27
+ } from "../changes.mjs";
28
+
29
+ const execFileAsync = promisify(execFile);
30
+
31
+ async function sh(cmd, args, { inherit = false, cwd } = {}) {
32
+ if (inherit) {
33
+ await new Promise((resolve, reject) => {
34
+ const p = execFile(cmd, args, { stdio: "inherit", cwd }, (err) =>
35
+ err ? reject(err) : resolve()
36
+ );
37
+ p.on("error", reject);
38
+ });
39
+ return { stdout: "" };
40
+ }
41
+ const { stdout } = await execFileAsync(cmd, args, { encoding: "utf8", cwd });
42
+ return { stdout: stdout.trimEnd() };
43
+ }
44
+
45
+
46
+ async function requireHumanYes(prompt) {
47
+ if (!process.stdout.isTTY || !process.stdin.isTTY) {
48
+ throw new Error("Refusing to publish: no TTY available (human confirmation required).");
49
+ }
50
+
51
+ const rl = readline.createInterface({ input, output });
52
+ try {
53
+ const ans = (await rl.question(`${prompt} Type 'y' to continue: `)).trim().toLowerCase();
54
+ if (ans !== "y" && ans !== "yes") {
55
+ throw new Error("Aborted: publish not confirmed.");
56
+ }
57
+ } finally {
58
+ rl.close();
59
+ }
60
+ }
61
+
62
+ async function gitDirtyPaths() {
63
+ const { stdout } = await sh("git", ["status", "--porcelain"]);
64
+ if (!stdout.trim()) return [];
65
+ return stdout
66
+ .split("\n")
67
+ .map((l) => l.slice(3).trim())
68
+ .filter(Boolean);
69
+ }
70
+
71
+ async function requireBranch(branch) {
72
+ const { stdout } = await sh("git", ["rev-parse", "--abbrev-ref", "HEAD"]);
73
+ const cur = stdout.trim();
74
+ if (cur !== branch) throw new Error(`Not on '${branch}' (currently on '${cur}').`);
75
+ }
76
+
77
+ async function readDefaultRemoteBranch(remote, { airplane }) {
78
+ // .git/refs/remotes/<remote>/HEAD contains: "ref: refs/remotes/<remote>/<branch>"
79
+ const headRefPath = `.git/refs/remotes/${remote}/HEAD`;
80
+
81
+ // Try local read first
82
+ try {
83
+ const txt = await fs.readFile(headRefPath, "utf8");
84
+ const m = txt.match(/refs\/remotes\/[^/]+\/(.+)\s*$/);
85
+ if (m) return m[1].trim();
86
+ } catch {
87
+ // ignore
88
+ }
89
+
90
+ if (airplane) {
91
+ throw new Error(`Cannot determine default branch for remote '${remote}' in airplane mode.`);
92
+ }
93
+
94
+ // Ask git to update the remote HEAD
95
+ await sh("git", ["remote", "set-head", remote, "--auto"], { inherit: true });
96
+
97
+ // Try again
98
+ try {
99
+ const txt = await fs.readFile(headRefPath, "utf8");
100
+ const m = txt.match(/refs\/remotes\/[^/]+\/(.+)\s*$/);
101
+ if (m) return m[1].trim();
102
+ } catch {
103
+ // ignore
104
+ }
105
+
106
+ // Last resort fetch (network)
107
+ await sh("git", ["fetch", remote], { inherit: true });
108
+
109
+ const txt2 = await fs.readFile(headRefPath, "utf8");
110
+ const m2 = txt2.match(/refs\/remotes\/[^/]+\/(.+)\s*$/);
111
+ if (m2) return m2[1].trim();
112
+
113
+ throw new Error(`Unable to determine default branch for remote '${remote}'.`);
114
+ }
115
+
116
+ async function writeBuildLicense({ buildDir, licenseCfg }) {
117
+ // dist.toml:
118
+ // [license]
119
+ // spdx = "MIT"
120
+ // file = "LICENSES/MIT.txt"
121
+ if (!licenseCfg?.file) return;
122
+
123
+ const srcPath = licenseCfg.file;
124
+ const text = await fs.readFile(srcPath, "utf8");
125
+ await fs.writeFile(path.join(buildDir, "LICENSE"), text, "utf8");
126
+ }
127
+
128
+ export async function runRelease(opts = {}) {
129
+ const { cfg } = await readDistToml("dist.toml");
130
+
131
+ const rel = cfg.release ?? {};
132
+ const pre = rel.preflight ?? {};
133
+ const after = rel.after ?? {};
134
+
135
+ const airplane = !!pre.airplane;
136
+
137
+ const changesPath = rel.changes ?? rel.changesFile ?? "Changes";
138
+ const version = await readVersion(cfg);
139
+ const tagPrefix = rel.tagPrefix ?? "v";
140
+ const tag = `${tagPrefix}${version}`;
141
+
142
+ // --------- requested structure ---------
143
+
144
+ await preflightChecks();
145
+ await testPackage();
146
+ await assertDirty();
147
+ await assertBranch();
148
+
149
+ const build = await createBuild();
150
+ await copyFilesAfterBuild(build);
151
+ await generateArtifacts(build);
152
+
153
+ await commitAndTagRelease();
154
+
155
+ await publishNpm(build);
156
+
157
+ await postRelease();
158
+
159
+ console.log(
160
+ `Released ${version} (${tag})${airplane ? " [airplane]" : ""}`
161
+ );
162
+ if (airplane) console.log("Airplane mode: no network actions were performed.");
163
+ console.log("Reminder: git push --follow-tags (if desired).");
164
+
165
+ // --------- steps ---------
166
+
167
+ async function preflightChecks() {
168
+ const changesBefore = await readChanges(changesPath);
169
+ if (!nextHasEntries(changesBefore)) {
170
+ throw new Error("{{ NEXT }} has no entries. Aborting release.");
171
+ }
172
+
173
+ // tag must not already exist
174
+ try {
175
+ await sh("git", ["rev-parse", "-q", "--verify", `refs/tags/${tag}`]);
176
+ throw new Error(`Tag '${tag}' already exists.`);
177
+ } catch {
178
+ // ok: rev-parse failed => tag doesn't exist
179
+ }
180
+ }
181
+
182
+ async function testPackage() {
183
+ await sh("npm", ["test"], { inherit: true });
184
+ }
185
+
186
+ async function assertDirty() {
187
+ const allowedDirty = asArray(pre.dirty ?? []);
188
+ const dirty = await gitDirtyPaths();
189
+ const disallowed = dirty.filter((p) => !allowedDirty.includes(p));
190
+ if (disallowed.length) {
191
+ throw new Error(`Git working tree is dirty (disallowed): ${disallowed.join(", ")}`);
192
+ }
193
+ }
194
+
195
+ async function assertBranch() {
196
+ let branch = rel.branch ?? null;
197
+ if (!branch) {
198
+ const remote = cfg.repository?.remote ?? "origin";
199
+ branch = await readDefaultRemoteBranch(remote, { airplane });
200
+ }
201
+ await requireBranch(branch);
202
+ }
203
+
204
+ async function createBuild() {
205
+ // .build/<random>/ + .build/current
206
+ const { buildDir, currentPath } = await createBuildDir({ buildRoot: ".build" });
207
+ return { buildDir, currentPath };
208
+ }
209
+
210
+ async function copyFilesAfterBuild({ buildDir }) {
211
+ // ONLY publishable files go into build dir.
212
+ const g = cfg.gather ?? {};
213
+ const binDirs = asArray(g.bin ?? []);
214
+ const files = asArray(g.files ?? "lib/**/*.{mjs,cjs,js}");
215
+ const include = asArray(g.include ?? []);
216
+
217
+ await populateBuildDir({ buildDir, files, include, binDirs });
218
+ }
219
+
220
+ async function generateArtifacts({ buildDir }) {
221
+ // package.json into build dir; emitFiles=false because build dir is already pruned
222
+ await runPkg({ outDir: buildDir, emitFiles: false });
223
+
224
+ // LICENSE into build dir from [license].file
225
+ await writeBuildLicense({ buildDir, licenseCfg: cfg.license });
226
+ }
227
+
228
+ async function commitAndTagRelease() {
229
+ // stamp NEXT -> version + UTC(Z) timestamp (repo)
230
+ const changesBefore = await readChanges(changesPath);
231
+ const releasedChanges = finalizeNextToVersion(changesBefore, version);
232
+ await writeChanges(changesPath, releasedChanges);
233
+
234
+ await sh("git", ["add", changesPath], { inherit: true });
235
+ await sh("git", ["commit", "-m", `Release ${version}`], { inherit: true });
236
+
237
+ await sh("git", ["tag", tag], { inherit: true });
238
+ }
239
+
240
+ async function publishNpm({ currentPath }) {
241
+ if (airplane) return;
242
+
243
+ const name = String(cfg.name ?? "");
244
+ const isScoped = name.startsWith("@") && name.includes("/");
245
+
246
+
247
+ // default: public for scoped, otherwise no flag needed
248
+ const access = rel.access ?? (isScoped ? "public" : null);
249
+
250
+ const args = ["publish"];
251
+ if (access) args.push("--access", access);
252
+
253
+ if (!opts.yes) {
254
+ await requireHumanYes(
255
+ `Publish ${cfg.name}@${version}${access ? ` (${access})` : ""} to npm?`
256
+ );
257
+ }
258
+
259
+ // publish from .build/current
260
+ await sh("npm", args, { inherit: true, cwd: currentPath });
261
+ }
262
+
263
+ async function postRelease() {
264
+ // restore NEXT in Changes (repo)
265
+ const afterRelease = await readChanges(changesPath);
266
+ await writeChanges(changesPath, ensureNextInserted(afterRelease));
267
+
268
+ // bump dist.toml and regen repo-root package.json (optional)
269
+ const bumpType = rel.bump ?? "patch";
270
+ const doBump = after.bump ?? true;
271
+ const commitFiles = asArray(after.commit ?? []);
272
+ // commit explicitly listed files
273
+ if (doBump) {
274
+ const nextVersion = bumpSemver(version, bumpType);
275
+ await writeVersion(cfg, nextVersion);
276
+ await runPkg({msg: false}); // repo convenience
277
+ commitFiles.push(cfg.version.from);
278
+ }
279
+
280
+ if (commitFiles.length) {
281
+ await sh("git", ["add", ...commitFiles], { inherit: true });
282
+ await sh("git", ["commit", "-m", "Prepare for next release"], { inherit: true });
283
+ }
284
+ }
285
+ }
package/lib/config.mjs ADDED
@@ -0,0 +1,12 @@
1
+ // SPDX-FileCopyrightText: 2026 Wesley Schwengle <wesleys@opperschaap.net>
2
+ //
3
+ // SPDX-License-Identifier: MIT
4
+
5
+ import fs from "node:fs/promises";
6
+ import TOML from "@iarna/toml";
7
+
8
+ export async function readDistToml(path = "dist.toml") {
9
+ const text = await fs.readFile(path, "utf8");
10
+ const cfg = TOML.parse(text);
11
+ return { cfg, text };
12
+ }
@@ -0,0 +1,140 @@
1
+ // SPDX-FileCopyrightText: 2026 Wesley Schwengle <wesleys@opperschaap.net>
2
+ //
3
+ // SPDX-License-Identifier: MIT
4
+
5
+ import fs from "node:fs/promises";
6
+ import fg from "fast-glob";
7
+ import { asArray } from "@opndev/util";
8
+
9
+ const BUILTINS = new Set([
10
+ "assert","buffer","child_process","cluster","console","constants","crypto","dgram",
11
+ "dns","domain","events","fs","http","http2","https","inspector","module","net",
12
+ "os","path","perf_hooks","process","punycode","querystring","readline","repl",
13
+ "stream","string_decoder","sys","timers","tls","trace_events","tty","url","util",
14
+ "v8","vm","wasi","worker_threads","zlib",
15
+ ]);
16
+
17
+ function isBuiltin(spec) {
18
+ if (spec.startsWith("node:")) return true;
19
+ return BUILTINS.has(spec);
20
+ }
21
+ function isRelative(spec) {
22
+ return spec.startsWith("./") || spec.startsWith("../") || spec.startsWith("/");
23
+ }
24
+ function pkgNameFromSpecifier(spec) {
25
+ if (spec.startsWith("@")) {
26
+ const parts = spec.split("/");
27
+ return parts.length >= 2 ? `${parts[0]}/${parts[1]}` : spec;
28
+ }
29
+ return spec.split("/")[0];
30
+ }
31
+ function extractSpecifiers(code) {
32
+ const out = new Set();
33
+ for (const m of code.matchAll(/\bimport\s+(?:[^'"]*?\s+from\s+)?["']([^"']+)["']/g)) out.add(m[1]);
34
+ for (const m of code.matchAll(/\bimport\s*\(\s*["']([^"']+)["']\s*\)/g)) out.add(m[1]);
35
+ for (const m of code.matchAll(/\brequire\s*\(\s*["']([^"']+)["']\s*\)/g)) out.add(m[1]);
36
+ return out;
37
+ }
38
+
39
+ function isGlobish(s) {
40
+ return /[*?[{\]}!()]/.test(s);
41
+ }
42
+
43
+ function toScanGlobs(items, exts = "{mjs,cjs,js,ts,tsx,jsx}") {
44
+ const list = Array.isArray(items) ? items : (items == null ? [] : [items]);
45
+ return list
46
+ .map(String)
47
+ .map(s => s.trim())
48
+ .filter(Boolean)
49
+ .map(s => {
50
+ if (isGlobish(s)) return s;
51
+ if (/\.(mjs|cjs|js|ts|tsx|jsx)$/i.test(s)) return s;
52
+ return `${s}/**/*.${exts}`;
53
+ });
54
+ }
55
+
56
+ async function scanDirs(dirsOrGlobs, ignoreGlobs) {
57
+ const patterns = toScanGlobs(dirsOrGlobs);
58
+ if (!patterns.length) return [];
59
+ return fg(patterns, {
60
+ dot: true,
61
+ onlyFiles: true,
62
+ unique: true,
63
+ ignore: ignoreGlobs,
64
+ });
65
+ }
66
+
67
+ async function inferFromFiles(files, ignoreSet) {
68
+ const found = new Set();
69
+ for (const f of files) {
70
+ const code = await fs.readFile(f, "utf8");
71
+ for (const spec of extractSpecifiers(code)) {
72
+ if (!spec) continue;
73
+ if (ignoreSet.has(spec)) continue;
74
+ if (isRelative(spec)) continue;
75
+ if (isBuiltin(spec)) continue;
76
+
77
+ const name = pkgNameFromSpecifier(spec);
78
+ if (!ignoreSet.has(name)) found.add(name);
79
+ }
80
+ }
81
+ return found;
82
+ }
83
+
84
+ export async function applyAutoPrereqs(cfg, pkg) {
85
+ const ap = cfg.autoprereqs ?? {};
86
+ if (ap.enabled === false) return;
87
+
88
+ const ignoreSet = new Set(asArray(ap.ignore ?? []).map(String));
89
+
90
+ // defaults
91
+ const defaultTestDirs = ["t", "test", "tests", "__tests__"];
92
+
93
+ // runtime dirs default: derive from gather if not provided
94
+ let runtimeDirs = asArray(ap.runtime ?? []);
95
+ if (!runtimeDirs.length) {
96
+ const g = cfg.gather ?? {};
97
+ const binDirs = asArray(g.bin ?? []);
98
+ runtimeDirs = []
99
+ .concat(asArray(g.files ?? []))
100
+ .concat(asArray(g.bin ?? []).map(d => `${d}/**/*.{mjs,cjs,js}`));
101
+ }
102
+
103
+ const testDirs = asArray(ap.tests ?? defaultTestDirs);
104
+
105
+ const ignoreGlobs = [
106
+ "**/node_modules/**",
107
+ "**/.git/**",
108
+ "**/.build/**",
109
+ "**/.DS_Store",
110
+ ];
111
+
112
+ const runtimeFiles = await scanDirs(runtimeDirs, ignoreGlobs);
113
+ const testFiles = await scanDirs(testDirs, ignoreGlobs);
114
+
115
+ const runtimeDeps = await inferFromFiles(runtimeFiles, ignoreSet);
116
+ const testDeps = await inferFromFiles(testFiles, ignoreSet);
117
+
118
+ pkg.dependencies ??= {};
119
+ pkg.devDependencies ??= {};
120
+
121
+ // runtime wins: put these in dependencies
122
+ for (const name of runtimeDeps) {
123
+ if (pkg.dependencies[name] || pkg.devDependencies[name] || pkg.peerDependencies?.[name]) continue;
124
+ pkg.dependencies[name] = "latest";
125
+ }
126
+
127
+ // test-only deps go in devDependencies (unless already anywhere)
128
+ for (const name of testDeps) {
129
+ if (runtimeDeps.has(name)) continue; // already covered by dependencies
130
+ if (pkg.dependencies[name] || pkg.devDependencies[name] || pkg.peerDependencies?.[name]) continue;
131
+ pkg.devDependencies[name] = "latest";
132
+ }
133
+
134
+ if (!Object.keys(pkg.dependencies).length)
135
+ delete pkg["dependencies"];
136
+ if (!Object.keys(pkg.devDependencies).length)
137
+ delete pkg["devDependencies"];
138
+
139
+ }
140
+
@@ -0,0 +1,36 @@
1
+ // SPDX-FileCopyrightText: 2026 Wesley Schwengle <wesleys@opperschaap.net>
2
+ //
3
+ // SPDX-License-Identifier: MIT
4
+
5
+ import { prefixDotSlash } from "../util.mjs";
6
+
7
+ export function buildExports(cfg) {
8
+ const exp = cfg.exports ?? null;
9
+ const deny = cfg.exports?.deny ?? null;
10
+
11
+ const out = {};
12
+
13
+ // main always defines "." (unless overridden by __DOT__)
14
+ const main = cfg.gather?.main;
15
+ if (main) out["."] = prefixDotSlash(main);
16
+
17
+ if (exp && typeof exp === "object") {
18
+ for (const [k, v] of Object.entries(exp)) {
19
+ if (k === "deny") continue;
20
+ if (k === "__DOT__") out["."] = prefixDotSlash(v);
21
+ else out[`./${k}`] = prefixDotSlash(v);
22
+ }
23
+ }
24
+
25
+ if (deny && typeof deny === "object") {
26
+ if (Object.prototype.hasOwnProperty.call(deny, "__DOT__")) {
27
+ throw new Error("exports.deny may not contain __DOT__");
28
+ }
29
+ for (const k of Object.keys(deny)) {
30
+ delete out[`./${k}`];
31
+ }
32
+ }
33
+
34
+ return Object.keys(out).length ? out : null;
35
+ }
36
+
@@ -0,0 +1,36 @@
1
+ // SPDX-FileCopyrightText: 2026 Wesley Schwengle <wesleys@opperschaap.net>
2
+ //
3
+ // SPDX-License-Identifier: MIT
4
+
5
+ import fg from "fast-glob";
6
+ import { asArray, uniq } from "@opndev/util"
7
+
8
+ export function gatherConfig(cfg) {
9
+ const g = cfg.gather ?? {};
10
+
11
+ const binGlob = g.bin ?? "bin/*.{mjs,cjs,js}";
12
+ const files = asArray(g.files ?? "lib/**/*.{mjs,cjs,js}");
13
+ const include = asArray(g.include ?? []);
14
+
15
+ return { main: g.main ?? null, binGlob, files, include };
16
+ }
17
+
18
+ export async function discoverBins(binGlob) {
19
+ const files = await fg(binGlob, { onlyFiles: true, dot: true });
20
+ const bin = {};
21
+ for (const f of files.sort()) {
22
+ const base = f.split("/").pop();
23
+ const name = base.replace(/\.(mjs|cjs|js)$/i, "");
24
+ bin[name] = `./${f}`;
25
+ }
26
+ return { bin, binFiles: files };
27
+ }
28
+
29
+ export function buildFilesList({ files, include, binGlob }) {
30
+ // package.json "files" supports globs; keep it clean and deterministic
31
+ return uniq([
32
+ ...asArray(files),
33
+ binGlob,
34
+ ...asArray(include),
35
+ ]);
36
+ }
@@ -0,0 +1,45 @@
1
+ // SPDX-FileCopyrightText: 2026 Wesley Schwengle <wesleys@opperschaap.net>
2
+ //
3
+ // SPDX-License-Identifier: MIT
4
+
5
+ import { normalizePrereqValue } from "../util.mjs";
6
+
7
+ export function applyPrereqs(cfg, pkg) {
8
+ const prereqs = cfg.prereqs ?? {};
9
+ const dev = prereqs.dev ?? null;
10
+ const peer = prereqs.peer ?? null;
11
+
12
+ pkg.engines ??= {};
13
+
14
+ // Top-level prereqs table maps to dependencies, except node/npm → engines
15
+ for (const [name, val] of Object.entries(prereqs)) {
16
+ if (name === "dev" || name === "peer") continue;
17
+
18
+ const v = normalizePrereqValue(val);
19
+
20
+ if (name === "node") { pkg.engines.node = v; continue; }
21
+ if (name === "npm") { pkg.engines.npm = v; continue; }
22
+
23
+ pkg.dependencies ??= {};
24
+ pkg.dependencies[name] = v;
25
+ }
26
+
27
+ if (!Object.keys(pkg.engines).length)
28
+ delete pkg["engines"];
29
+
30
+ if (dev && typeof dev === "object") {
31
+ pkg.devDependencies ??= {};
32
+ for (const [name, val] of Object.entries(dev)) {
33
+ pkg.devDependencies[name] = normalizePrereqValue(val);
34
+ }
35
+ }
36
+ if (peer && typeof peer === "object") {
37
+ pkg.peerDependencies ??= {};
38
+ for (const [name, val] of Object.entries(peer)) {
39
+ pkg.peerDependencies[name] = normalizePrereqValue(val);
40
+ }
41
+ if (!Object.keys(pkg.peerDependencies).length)
42
+ delete pkg["peerDependencies"];
43
+ }
44
+ }
45
+
@@ -0,0 +1,141 @@
1
+ // SPDX-FileCopyrightText: 2026 Wesley Schwengle <wesleys@opperschaap.net>
2
+ //
3
+ // SPDX-License-Identifier: MIT
4
+
5
+ import { execFile } from "node:child_process";
6
+ import { promisify } from "node:util";
7
+ import { requireString } from "../util.mjs";
8
+
9
+ const execFileAsync = promisify(execFile);
10
+
11
+ async function sh(cmd, args) {
12
+ const { stdout } = await execFileAsync(cmd, args, { encoding: "utf8" });
13
+ return stdout.trim();
14
+ }
15
+
16
+ async function listRemotes() {
17
+ const raw = await sh("git", ["remote"]);
18
+ return raw.split("\n").map(s => s.trim()).filter(Boolean);
19
+ }
20
+
21
+ export async function pickRemote(preferred) {
22
+ const remotes = await listRemotes();
23
+ if (preferred && remotes.includes(preferred)) return preferred;
24
+ if (remotes.includes("origin")) return "origin";
25
+ if (remotes.includes("upstream")) return "upstream";
26
+ return remotes[0] ?? null;
27
+ }
28
+
29
+ export async function getRemoteUrl(remote) {
30
+ if (!remote) return null;
31
+ return sh("git", ["remote", "get-url", remote]);
32
+ }
33
+
34
+ export function parseRemoteUrl(remoteUrl) {
35
+ if (!remoteUrl) return null;
36
+
37
+ // scp-like: git@host:org/repo.git
38
+ const scp = remoteUrl.match(/^([^@]+)@([^:]+):(.+)$/);
39
+ if (scp) {
40
+ return {
41
+ host: scp[2],
42
+ path: scp[3].replace(/^\/*/, "").replace(/\.git$/, ""),
43
+ };
44
+ }
45
+
46
+ try {
47
+ const normalized = remoteUrl.startsWith("git+") ? remoteUrl.slice(4) : remoteUrl;
48
+ const u = new URL(normalized);
49
+ return {
50
+ host: u.host,
51
+ path: u.pathname.replace(/^\/*/, "").replace(/\.git$/, ""),
52
+ };
53
+ } catch {
54
+ return null;
55
+ }
56
+ }
57
+
58
+ export function inferProviderFromHost(host) {
59
+ const h = String(host ?? "").toLowerCase();
60
+ if (h === "github.com") return "github";
61
+ if (h === "gitlab.com") return "gitlab";
62
+ if (h === "codeberg.org") return "codeberg";
63
+ if (h === "bitbucket.org") return "bitbucket";
64
+ return null;
65
+ }
66
+
67
+ const PROVIDERS = {
68
+ github: { issuesSuffix: "/issues" },
69
+ codeberg: { issuesSuffix: "/issues" },
70
+ gitlab: { issuesSuffix: "/-/issues" },
71
+ bitbucket:{ issuesSuffix: "/issues" },
72
+ };
73
+
74
+ export function deriveWebAndBugs({ host, path, provider }) {
75
+ if (!host || !path) return { web: null, bugs: null };
76
+ const web = `https://${host}/${path}`;
77
+ const prov = provider ? PROVIDERS[provider] : null;
78
+ const bugs = prov ? `${web}${prov.issuesSuffix}` : null;
79
+ return { web, bugs };
80
+ }
81
+
82
+
83
+ function toHttpsUrl(url) {
84
+ if (!url) return null;
85
+
86
+ // git@host:user/repo.git
87
+ const scp = url.match(/^git@([^:]+):(.+)$/);
88
+ if (scp) {
89
+ const [, host, path] = scp;
90
+ return `https://${host}/${path}`;
91
+ }
92
+
93
+ // ssh://git@host/user/repo.git
94
+ const ssh = url.match(/^ssh:\/\/git@([^/]+)\/(.+)$/);
95
+ if (ssh) {
96
+ const [, host, path] = ssh;
97
+ return `https://${host}/${path}`;
98
+ }
99
+
100
+ // already http/https
101
+ if (url.startsWith("http://") || url.startsWith("https://")) {
102
+ return url;
103
+ }
104
+
105
+ return url; // fallback
106
+ }
107
+
108
+ export async function deriveRepoAndBugs({ repository, bugtracker }) {
109
+ // repository table presence enables the feature
110
+ const repo = repository ?? {};
111
+ const bt = bugtracker ?? {};
112
+
113
+ // strictness: remote must be one string (later you can enforce harder)
114
+ const preferredRemote = requireString(repo.remote, "[repository].remote");
115
+
116
+ const remote = await pickRemote(preferredRemote);
117
+ const gitUrl = repo.url ?? (await getRemoteUrl(remote));
118
+ const remoteUrl = toHttpsUrl(gitUrl);
119
+ const parsed = parseRemoteUrl(remoteUrl);
120
+
121
+ const host = parsed?.host ?? null;
122
+ const path = parsed?.path ?? null;
123
+
124
+ const provider = repo.provider ?? inferProviderFromHost(host);
125
+ const { web: derivedWeb, bugs: derivedBugs } = deriveWebAndBugs({ host, path, provider });
126
+
127
+ const web = repo.web ?? derivedWeb;
128
+ const bugsUrl = bt.web ?? derivedBugs;
129
+
130
+ return {
131
+ remote,
132
+ remoteUrl,
133
+ gitUrl,
134
+ provider,
135
+ host,
136
+ path,
137
+ web,
138
+ bugsUrl,
139
+ };
140
+ }
141
+