@hellpig/anarchy-legal 1.11.1 → 1.12.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,125 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ function NoticeUtilsService() {
4
+ function splitEntriesFromMarkdown(md) {
5
+ const parts = md.split(/\r?\n---\r?\n/g);
6
+ return parts.filter((chunk) => /^##\s+.+/m.test(chunk));
7
+ }
8
+ function parseHeaderLine(chunk) {
9
+ const m = /^##\s+(.+?)\s*$/m.exec(chunk);
10
+ if (!m) return void 0;
11
+ const full = m[1].trim();
12
+ const at = full.lastIndexOf("@");
13
+ if (at <= 0 || at === full.length - 1) return void 0;
14
+ const name = full.slice(0, at).trim();
15
+ const version = full.slice(at + 1).trim();
16
+ if (!name || !version) return void 0;
17
+ return { name, version };
18
+ }
19
+ function parseOneEntry(chunk) {
20
+ const header = parseHeaderLine(chunk);
21
+ if (!header) return void 0;
22
+ const { name, version } = header;
23
+ const id = `${name}@${version}`;
24
+ const field = (label) => {
25
+ const re = new RegExp(`^\\*\\*${label}:\\*\\*\\s*(.+)\\s*$`, "mi");
26
+ const m = re.exec(chunk);
27
+ return m ? m[1].trim() : void 0;
28
+ };
29
+ const licensesStr = field("License") ?? "UNKNOWN";
30
+ const licenses = licensesStr.split(",").map((s) => s.trim()).filter(Boolean);
31
+ const repository = field("Repository");
32
+ const url = field("URL");
33
+ const publisher = field("Publisher")?.replace(/\s+<[^>]+>\s*$/, "").trim();
34
+ const path2 = field("Path");
35
+ let licenseText = void 0;
36
+ {
37
+ const lines = chunk.split(/\r?\n/);
38
+ const firstBlankAfterHeaderIdx = (() => {
39
+ let seenHeader = false;
40
+ return lines.findIndex((ln) => {
41
+ if (ln.startsWith("## ")) {
42
+ seenHeader = true;
43
+ return false;
44
+ }
45
+ return seenHeader && ln.trim() === "";
46
+ });
47
+ })();
48
+ const startIdx = firstBlankAfterHeaderIdx >= 0 ? firstBlankAfterHeaderIdx + 1 : lines.length;
49
+ const tail = lines.slice(startIdx).join("\n").trim();
50
+ if (tail && !/^_No license text file found;/m.test(tail)) licenseText = tail;
51
+ }
52
+ const inferredCopyright = (() => {
53
+ if (licenseText) {
54
+ const ln = licenseText.split(/\r?\n/).find((l) => /^\s*(?:copyright|\(c\)|©)\s+/i.test(l));
55
+ if (ln) return ln.trim();
56
+ }
57
+ return publisher?.trim();
58
+ })();
59
+ return {
60
+ id,
61
+ name,
62
+ version,
63
+ licenses,
64
+ repository: repository ?? void 0,
65
+ url: url ?? void 0,
66
+ publisher: publisher ?? void 0,
67
+ path: path2 ?? void 0,
68
+ licenseText,
69
+ inferredCopyright
70
+ };
71
+ }
72
+ function parseThirdPartyMarkdown(md) {
73
+ const chunks = splitEntriesFromMarkdown(md);
74
+ const entries = chunks.flatMap((ch) => {
75
+ const e = parseOneEntry(ch);
76
+ return e ? [e] : [];
77
+ });
78
+ return entries.toSorted((a, b) => a.name === b.name ? a.version.localeCompare(b.version) : a.name.localeCompare(b.name));
79
+ }
80
+ function collectAllHeadingIds(md) {
81
+ const re = /^##\s+(.+?)\s*$/gm;
82
+ return [...md.matchAll(re)].reduce((ids, m) => {
83
+ const full = String(m[1]).trim();
84
+ const at = full.lastIndexOf("@");
85
+ if (at > 0 && at < full.length - 1) {
86
+ ids.add(`${full.slice(0, at).trim()}@${full.slice(at + 1).trim()}`);
87
+ }
88
+ return ids;
89
+ }, /* @__PURE__ */ new Set());
90
+ }
91
+ async function findUpstreamNoticeFile(dir) {
92
+ try {
93
+ const list = await fs.readdir(dir);
94
+ const candidate = list.find((f) => /^(notice|notice\.txt|notice\.md)$/i.test(f));
95
+ return candidate ? path.join(dir, candidate) : void 0;
96
+ } catch {
97
+ return void 0;
98
+ }
99
+ }
100
+ async function loadUpstreamNotice(dir, maxBytes) {
101
+ const p = await findUpstreamNoticeFile(dir);
102
+ if (!p) return void 0;
103
+ try {
104
+ const stat = await fs.stat(p);
105
+ const text = await fs.readFile(p, "utf8");
106
+ if (stat.size > maxBytes) {
107
+ return `Upstream NOTICE is too large (${stat.size} bytes); truncated.
108
+
109
+ ` + text.slice(0, maxBytes);
110
+ }
111
+ return text;
112
+ } catch {
113
+ return void 0;
114
+ }
115
+ }
116
+ return {
117
+ collectAllHeadingIds,
118
+ loadUpstreamNotice,
119
+ parseThirdPartyMarkdown
120
+ };
121
+ }
122
+ export {
123
+ NoticeUtilsService
124
+ };
125
+ //# sourceMappingURL=NoticeUtilsService.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"NoticeUtilsService.js","sources":["../../src/Services/NoticeUtilsService.ts"],"sourcesContent":["import fs from 'node:fs/promises';\nimport path from 'node:path';\n\nimport type { TNoticeUtilsService, TTemplateParsedEntry } from '@Anarchy/Legal/Models';\n\nexport function NoticeUtilsService(): TNoticeUtilsService {\n function splitEntriesFromMarkdown(md: string): ReadonlyArray<string> {\n const parts: string[] = md.split(/\\r?\\n---\\r?\\n/g);\n return parts.filter((chunk) => /^##\\s+.+/m.test(chunk));\n }\n\n function parseHeaderLine(chunk: string): { name: string; version: string } | undefined {\n const m: RegExpExecArray | null = /^##\\s+(.+?)\\s*$/m.exec(chunk);\n if (!m) return undefined;\n const full: string = m[1].trim(); // e.g. \"@babel/core@7.27.1\"\n const at: number = full.lastIndexOf('@');\n if (at <= 0 || at === full.length - 1) return undefined;\n const name: string = full.slice(0, at).trim();\n const version: string = full.slice(at + 1).trim();\n if (!name || !version) return undefined;\n return { name, version };\n }\n\n function parseOneEntry(chunk: string): TTemplateParsedEntry | undefined {\n const header: { name: string; version: string } | undefined = parseHeaderLine(chunk);\n if (!header) return undefined;\n const { name, version } = header;\n const id = `${name}@${version}`;\n\n const field = (label: string): string | undefined => {\n const re = new RegExp(`^\\\\*\\\\*${label}:\\\\*\\\\*\\\\s*(.+)\\\\s*$`, 'mi');\n const m: RegExpExecArray | null = re.exec(chunk);\n return m ? m[1].trim() : undefined;\n };\n\n const licensesStr: string = field('License') ?? 'UNKNOWN';\n const licenses: string[] = licensesStr\n .split(',')\n .map((s: string): string => s.trim())\n .filter(Boolean);\n\n const repository: string | undefined = field('Repository');\n const url: string | undefined = field('URL');\n const publisher = field('Publisher')\n ?.replace(/\\s+<[^>]+>\\s*$/, '')\n .trim();\n const path: string | undefined = field('Path');\n\n // License text: tail after the first blank line following the header+KV area\n let licenseText: string | undefined = undefined;\n {\n const lines: string[] = chunk.split(/\\r?\\n/);\n const firstBlankAfterHeaderIdx: number = ((): number => {\n let seenHeader: boolean = false;\n return lines.findIndex((ln: string): boolean => {\n if (ln.startsWith('## ')) {\n seenHeader = true;\n return false;\n }\n return seenHeader && ln.trim() === '';\n });\n })();\n const startIdx: number = firstBlankAfterHeaderIdx >= 0 ? firstBlankAfterHeaderIdx + 1 : lines.length;\n const tail: string = lines.slice(startIdx).join('\\n').trim();\n if (tail && !/^_No license text file found;/m.test(tail)) licenseText = tail;\n }\n\n const inferredCopyright: string | undefined = ((): string | undefined => {\n if (licenseText) {\n const ln: string | undefined = licenseText.split(/\\r?\\n/).find((l: string): boolean => /^\\s*(?:copyright|\\(c\\)|©)\\s+/i.test(l));\n if (ln) return ln.trim();\n }\n return publisher?.trim();\n })();\n\n return {\n id,\n name,\n version,\n licenses,\n repository: repository ?? undefined,\n url: url ?? undefined,\n publisher: publisher ?? undefined,\n path: path ?? undefined,\n licenseText,\n inferredCopyright\n };\n }\n\n function parseThirdPartyMarkdown(md: string): ReadonlyArray<TTemplateParsedEntry> {\n const chunks: ReadonlyArray<string> = splitEntriesFromMarkdown(md);\n const entries: TTemplateParsedEntry[] = chunks.flatMap((ch) => {\n const e: TTemplateParsedEntry | undefined = parseOneEntry(ch);\n return e ? [e] : [];\n });\n return entries.toSorted((a, b) => (a.name === b.name ? a.version.localeCompare(b.version) : a.name.localeCompare(b.name)));\n }\n\n function collectAllHeadingIds(md: string): ReadonlySet<string> {\n const re = /^##\\s+(.+?)\\s*$/gm;\n return [...md.matchAll(re)].reduce<Set<string>>((ids: Set<string>, m: RegExpExecArray) => {\n const full: string = String(m[1]).trim();\n const at: number = full.lastIndexOf('@');\n if (at > 0 && at < full.length - 1) {\n ids.add(`${full.slice(0, at).trim()}@${full.slice(at + 1).trim()}`);\n }\n return ids;\n }, new Set<string>());\n }\n\n async function findUpstreamNoticeFile(dir: string): Promise<string | undefined> {\n try {\n const list: string[] = await fs.readdir(dir);\n const candidate = list.find((f) => /^(notice|notice\\.txt|notice\\.md)$/i.test(f));\n return candidate ? path.join(dir, candidate) : undefined;\n } catch {\n return undefined;\n }\n }\n\n async function loadUpstreamNotice(dir: string, maxBytes: number): Promise<string | undefined> {\n const p = await findUpstreamNoticeFile(dir);\n if (!p) return undefined;\n try {\n const stat = await fs.stat(p);\n const text: string = await fs.readFile(p, 'utf8');\n if (stat.size > maxBytes) {\n return `Upstream NOTICE is too large (${stat.size} bytes); truncated.\\n\\n` + text.slice(0, maxBytes);\n }\n return text;\n } catch {\n return undefined;\n }\n }\n\n return {\n collectAllHeadingIds,\n loadUpstreamNotice,\n parseThirdPartyMarkdown\n };\n}\n"],"names":["path"],"mappings":";;AAKO,SAAS,qBAA0C;AACxD,WAAS,yBAAyB,IAAmC;AACnE,UAAM,QAAkB,GAAG,MAAM,gBAAgB;AACjD,WAAO,MAAM,OAAO,CAAC,UAAU,YAAY,KAAK,KAAK,CAAC;AAAA,EACxD;AAEA,WAAS,gBAAgB,OAA8D;AACrF,UAAM,IAA4B,mBAAmB,KAAK,KAAK;AAC/D,QAAI,CAAC,EAAG,QAAO;AACf,UAAM,OAAe,EAAE,CAAC,EAAE,KAAA;AAC1B,UAAM,KAAa,KAAK,YAAY,GAAG;AACvC,QAAI,MAAM,KAAK,OAAO,KAAK,SAAS,EAAG,QAAO;AAC9C,UAAM,OAAe,KAAK,MAAM,GAAG,EAAE,EAAE,KAAA;AACvC,UAAM,UAAkB,KAAK,MAAM,KAAK,CAAC,EAAE,KAAA;AAC3C,QAAI,CAAC,QAAQ,CAAC,QAAS,QAAO;AAC9B,WAAO,EAAE,MAAM,QAAA;AAAA,EACjB;AAEA,WAAS,cAAc,OAAiD;AACtE,UAAM,SAAwD,gBAAgB,KAAK;AACnF,QAAI,CAAC,OAAQ,QAAO;AACpB,UAAM,EAAE,MAAM,QAAA,IAAY;AAC1B,UAAM,KAAK,GAAG,IAAI,IAAI,OAAO;AAE7B,UAAM,QAAQ,CAAC,UAAsC;AACnD,YAAM,KAAK,IAAI,OAAO,UAAU,KAAK,wBAAwB,IAAI;AACjE,YAAM,IAA4B,GAAG,KAAK,KAAK;AAC/C,aAAO,IAAI,EAAE,CAAC,EAAE,SAAS;AAAA,IAC3B;AAEA,UAAM,cAAsB,MAAM,SAAS,KAAK;AAChD,UAAM,WAAqB,YACxB,MAAM,GAAG,EACT,IAAI,CAAC,MAAsB,EAAE,KAAA,CAAM,EACnC,OAAO,OAAO;AAEjB,UAAM,aAAiC,MAAM,YAAY;AACzD,UAAM,MAA0B,MAAM,KAAK;AAC3C,UAAM,YAAY,MAAM,WAAW,GAC/B,QAAQ,kBAAkB,EAAE,EAC7B,KAAA;AACH,UAAMA,QAA2B,MAAM,MAAM;AAG7C,QAAI,cAAkC;AACtC;AACE,YAAM,QAAkB,MAAM,MAAM,OAAO;AAC3C,YAAM,4BAAoC,MAAc;AACtD,YAAI,aAAsB;AAC1B,eAAO,MAAM,UAAU,CAAC,OAAwB;AAC9C,cAAI,GAAG,WAAW,KAAK,GAAG;AACxB,yBAAa;AACb,mBAAO;AAAA,UACT;AACA,iBAAO,cAAc,GAAG,KAAA,MAAW;AAAA,QACrC,CAAC;AAAA,MACH,GAAA;AACA,YAAM,WAAmB,4BAA4B,IAAI,2BAA2B,IAAI,MAAM;AAC9F,YAAM,OAAe,MAAM,MAAM,QAAQ,EAAE,KAAK,IAAI,EAAE,KAAA;AACtD,UAAI,QAAQ,CAAC,iCAAiC,KAAK,IAAI,EAAG,eAAc;AAAA,IAC1E;AAEA,UAAM,qBAAyC,MAA0B;AACvE,UAAI,aAAa;AACf,cAAM,KAAyB,YAAY,MAAM,OAAO,EAAE,KAAK,CAAC,MAAuB,gCAAgC,KAAK,CAAC,CAAC;AAC9H,YAAI,GAAI,QAAO,GAAG,KAAA;AAAA,MACpB;AACA,aAAO,WAAW,KAAA;AAAA,IACpB,GAAA;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAY,cAAc;AAAA,MAC1B,KAAK,OAAO;AAAA,MACZ,WAAW,aAAa;AAAA,MACxB,MAAMA,SAAQ;AAAA,MACd;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAEA,WAAS,wBAAwB,IAAiD;AAChF,UAAM,SAAgC,yBAAyB,EAAE;AACjE,UAAM,UAAkC,OAAO,QAAQ,CAAC,OAAO;AAC7D,YAAM,IAAsC,cAAc,EAAE;AAC5D,aAAO,IAAI,CAAC,CAAC,IAAI,CAAA;AAAA,IACnB,CAAC;AACD,WAAO,QAAQ,SAAS,CAAC,GAAG,MAAO,EAAE,SAAS,EAAE,OAAO,EAAE,QAAQ,cAAc,EAAE,OAAO,IAAI,EAAE,KAAK,cAAc,EAAE,IAAI,CAAE;AAAA,EAC3H;AAEA,WAAS,qBAAqB,IAAiC;AAC7D,UAAM,KAAK;AACX,WAAO,CAAC,GAAG,GAAG,SAAS,EAAE,CAAC,EAAE,OAAoB,CAAC,KAAkB,MAAuB;AACxF,YAAM,OAAe,OAAO,EAAE,CAAC,CAAC,EAAE,KAAA;AAClC,YAAM,KAAa,KAAK,YAAY,GAAG;AACvC,UAAI,KAAK,KAAK,KAAK,KAAK,SAAS,GAAG;AAClC,YAAI,IAAI,GAAG,KAAK,MAAM,GAAG,EAAE,EAAE,KAAA,CAAM,IAAI,KAAK,MAAM,KAAK,CAAC,EAAE,KAAA,CAAM,EAAE;AAAA,MACpE;AACA,aAAO;AAAA,IACT,GAAG,oBAAI,KAAa;AAAA,EACtB;AAEA,iBAAe,uBAAuB,KAA0C;AAC9E,QAAI;AACF,YAAM,OAAiB,MAAM,GAAG,QAAQ,GAAG;AAC3C,YAAM,YAAY,KAAK,KAAK,CAAC,MAAM,qCAAqC,KAAK,CAAC,CAAC;AAC/E,aAAO,YAAY,KAAK,KAAK,KAAK,SAAS,IAAI;AAAA,IACjD,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,iBAAe,mBAAmB,KAAa,UAA+C;AAC5F,UAAM,IAAI,MAAM,uBAAuB,GAAG;AAC1C,QAAI,CAAC,EAAG,QAAO;AACf,QAAI;AACF,YAAM,OAAO,MAAM,GAAG,KAAK,CAAC;AAC5B,YAAM,OAAe,MAAM,GAAG,SAAS,GAAG,MAAM;AAChD,UAAI,KAAK,OAAO,UAAU;AACxB,eAAO,iCAAiC,KAAK,IAAI;AAAA;AAAA,IAA4B,KAAK,MAAM,GAAG,QAAQ;AAAA,MACrG;AACA,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;"}
@@ -0,0 +1,450 @@
1
+ import { spawn } from "node:child_process";
2
+ import fs from "node:fs/promises";
3
+ import { createRequire } from "node:module";
4
+ import path from "node:path";
5
+ import { globby } from "globby";
6
+ function RepoUtilsService() {
7
+ let isDebug = false;
8
+ const setDebugMode = (debug) => void (isDebug = debug);
9
+ const readJson = async (p) => JSON.parse(await fs.readFile(p, "utf8"));
10
+ const isExist = async (p) => {
11
+ try {
12
+ await fs.access(p);
13
+ return true;
14
+ } catch {
15
+ return false;
16
+ }
17
+ };
18
+ function debugLog(isDebug2, ...args) {
19
+ if (isDebug2) console.log("[debug]", ...args);
20
+ }
21
+ function hasWorkspacesField(pkg) {
22
+ const ws = pkg?.workspaces;
23
+ if (!ws) return false;
24
+ if (Array.isArray(ws)) return ws.length > 0;
25
+ if (typeof ws === "object" && Array.isArray(ws.packages)) return ws.packages.length > 0;
26
+ return false;
27
+ }
28
+ async function loadWorkspaces(rootDir) {
29
+ const rootPkg = await readJson(path.join(rootDir, "package.json"));
30
+ const patterns = Array.isArray(rootPkg.workspaces) ? rootPkg.workspaces : rootPkg.workspaces?.packages ?? [];
31
+ if (!patterns.length) throw new Error(`No workspaces patterns in ${path.join(rootDir, "package.json")}`);
32
+ const dirs = await globby(patterns, {
33
+ cwd: rootDir,
34
+ absolute: true,
35
+ onlyDirectories: true,
36
+ gitignore: true,
37
+ ignore: ["**/node_modules/**", "**/dist/**", "**/dist-*/**", "**/.*/**"]
38
+ });
39
+ const entries = (await Promise.all(
40
+ dirs.map(async (dir) => {
41
+ const pkgPath = path.join(dir, "package.json");
42
+ if (!await isExist(pkgPath)) return void 0;
43
+ const pkg = await readJson(pkgPath);
44
+ const name = typeof pkg.name === "string" ? pkg.name : void 0;
45
+ return name ? [name, { name, dir, pkgPath, pkg }] : void 0;
46
+ })
47
+ )).filter(Boolean);
48
+ return new Map(entries);
49
+ }
50
+ async function findMonorepoRoot(startDir) {
51
+ const start = path.resolve(startDir);
52
+ debugLog(isDebug, "findMonorepoRoot: start at", start);
53
+ const searchUp = async (dir, depth) => {
54
+ if (depth > 50) return void 0;
55
+ const pkgPath = path.join(dir, "package.json");
56
+ debugLog(isDebug, "check", pkgPath);
57
+ if (await isExist(pkgPath)) {
58
+ try {
59
+ const pkg = await readJson(pkgPath);
60
+ if (hasWorkspacesField(pkg)) {
61
+ debugLog(isDebug, "found workspaces at", pkgPath);
62
+ return dir;
63
+ }
64
+ } catch (e) {
65
+ debugLog(isDebug, " ! failed to parse", pkgPath, "-", e.message);
66
+ }
67
+ }
68
+ const parent = path.dirname(dir);
69
+ if (parent === dir) return void 0;
70
+ return searchUp(parent, depth + 1);
71
+ };
72
+ const found = await searchUp(start, 0);
73
+ if (!found) throw new Error(`Monorepo root not found starting from "${startDir}". Provide --root explicitly pointing to a package.json with "workspaces".`);
74
+ return found;
75
+ }
76
+ async function loadRoot(rootDir) {
77
+ const rootPkgPath = path.join(rootDir, "package.json");
78
+ if (!await isExist(rootPkgPath)) throw new Error(`Root package.json not found at: ${rootPkgPath}`);
79
+ const rootPkg = await readJson(rootPkgPath);
80
+ const wsField = rootPkg.workspaces;
81
+ if (!wsField) throw new Error(`"workspaces" not found in root package.json at ${rootPkgPath}`);
82
+ const patterns = Array.isArray(wsField) ? wsField : wsField.packages ?? [];
83
+ if (patterns.length === 0) throw new Error(`"workspaces" has no packages in ${rootPkgPath}`);
84
+ debugLog(isDebug, "workspaces patterns:", patterns);
85
+ const dirs = await globby(patterns, {
86
+ cwd: rootDir,
87
+ onlyDirectories: true,
88
+ absolute: true,
89
+ expandDirectories: false,
90
+ gitignore: true,
91
+ ignore: ["**/node_modules/**", "**/dist/**", "**/dist-*/**", "**/.*/**"]
92
+ });
93
+ debugLog(isDebug, "workspace dirs found:", dirs.length);
94
+ const entries = (await Promise.all(
95
+ dirs.map(async (dir) => {
96
+ const pkgPath = path.join(dir, "package.json");
97
+ if (!await isExist(pkgPath)) return void 0;
98
+ const pkg = await readJson(pkgPath);
99
+ if (!pkg.name) return void 0;
100
+ return [
101
+ pkg.name,
102
+ {
103
+ name: pkg.name,
104
+ dir,
105
+ pkgPath,
106
+ pkg
107
+ }
108
+ ];
109
+ })
110
+ )).filter(Boolean);
111
+ debugLog(isDebug, "workspace packages loaded:", entries.length);
112
+ if (entries.length === 0) throw new Error(`No workspace package.json files found by patterns: ${patterns.join(", ")}`);
113
+ return {
114
+ rootDir,
115
+ rootPkgPath,
116
+ rootPkg,
117
+ workspaces: new Map(entries)
118
+ };
119
+ }
120
+ function buildWsGraph(ws) {
121
+ const names = new Set(ws.keys());
122
+ const graph = Array.from(ws.entries()).reduce((acc, [name, info]) => {
123
+ const deps = info.pkg.dependencies ?? {};
124
+ const edges = new Set(Object.keys(deps).filter((dependencyName) => names.has(dependencyName)));
125
+ acc.set(name, edges);
126
+ return acc;
127
+ }, /* @__PURE__ */ new Map());
128
+ if (isDebug) {
129
+ console.log("[debug] workspace graph:");
130
+ graph.forEach((v, k) => console.log(" ", k, "->", [...v].join(", ") || "∅"));
131
+ }
132
+ return graph;
133
+ }
134
+ function assertNoCycles(graph, start) {
135
+ const temp = /* @__PURE__ */ new Set();
136
+ const perm = /* @__PURE__ */ new Set();
137
+ let pathStack = [];
138
+ const dfs = (u) => {
139
+ if (perm.has(u)) return;
140
+ if (temp.has(u)) {
141
+ const idx = pathStack.lastIndexOf(u);
142
+ const cyclePath = [...pathStack.slice(idx), u].join(" -> ");
143
+ throw new Error(`Cycle detected between workspaces (prod deps): ${cyclePath}`);
144
+ }
145
+ temp.add(u);
146
+ pathStack = [...pathStack];
147
+ (graph.get(u) ?? /* @__PURE__ */ new Set()).forEach(dfs);
148
+ pathStack = pathStack.slice(0, pathStack.length - 1);
149
+ temp.delete(u);
150
+ perm.add(u);
151
+ };
152
+ dfs(start);
153
+ }
154
+ function collectWorkspaceClosure(graph, start) {
155
+ const visited = /* @__PURE__ */ new Set();
156
+ const visit = (u) => {
157
+ if (visited.has(u)) return;
158
+ visited.add(u);
159
+ (graph.get(u) ?? /* @__PURE__ */ new Set()).forEach(visit);
160
+ };
161
+ visit(start);
162
+ return visited;
163
+ }
164
+ async function npmLsJson(rootDir, workspace) {
165
+ return new Promise((resolve, reject) => {
166
+ const args = ["ls", "-w", workspace, "--json", "--omit=dev", "--all", "--long"];
167
+ debugLog(isDebug, "spawn:", "npm", args.join(" "), "cwd:", rootDir);
168
+ const child = spawn("npm", args, { cwd: rootDir, stdio: ["ignore", "pipe", "pipe"] });
169
+ let out = "";
170
+ let err = "";
171
+ child.stdout.on("data", (d) => out += String(d));
172
+ child.stderr.on("data", (d) => err += String(d));
173
+ child.on("close", (code) => {
174
+ if (code !== 0 && !out) return reject(new Error(`npm ls failed (code ${code}): ${err || "unknown error"}`));
175
+ try {
176
+ const json = JSON.parse(out);
177
+ const normPath = (o) => (
178
+ // eslint-disable-next-line spellcheck/spell-checker
179
+ typeof o?.path === "string" ? o.path : typeof o?.realpath === "string" ? o.realpath : typeof o?.location === "string" ? path.isAbsolute(o.location) ? o.location : void 0 : void 0
180
+ );
181
+ const toNode = (name, o) => ({
182
+ name,
183
+ version: typeof o?.version === "string" ? o.version : "0.0.0",
184
+ path: normPath(o),
185
+ license: o?.license,
186
+ repository: o?.repository,
187
+ dependencies: o?.dependencies ? Object.fromEntries(Object.entries(o.dependencies).map(([k, v]) => [k, toNode(k, v)])) : void 0
188
+ });
189
+ const rootNode = {
190
+ name: json?.name ?? workspace,
191
+ version: json?.version ?? "0.0.0",
192
+ path: normPath(json),
193
+ license: json?.license,
194
+ repository: json?.repository,
195
+ dependencies: json?.dependencies ? Object.fromEntries(Object.entries(json.dependencies).map(([k, v]) => [k, toNode(k, v)])) : void 0
196
+ };
197
+ debugLog(isDebug, "npm ls parsed root:", rootNode.name, rootNode.version);
198
+ return resolve(rootNode);
199
+ } catch (e) {
200
+ return reject(new Error(`Failed to parse npm ls JSON: ${e.message}
201
+ Raw: ${out.slice(0, 2e3)}`));
202
+ }
203
+ });
204
+ });
205
+ }
206
+ function collectExternalSeedNames(closure, wsMap, wsNames) {
207
+ const seeds = /* @__PURE__ */ new Set();
208
+ [...closure].forEach((wsName) => {
209
+ const info = wsMap.get(wsName);
210
+ if (!info) return;
211
+ const deps = info.pkg.dependencies ?? {};
212
+ Object.keys(deps).forEach((dependencyName) => {
213
+ if (!wsNames.has(dependencyName)) seeds.add(dependencyName);
214
+ });
215
+ });
216
+ return seeds;
217
+ }
218
+ function collectThirdPartyMap(root, wsNames, seedNames) {
219
+ const acc = /* @__PURE__ */ new Map();
220
+ if (!root || !root.dependencies) return acc;
221
+ if (seedNames.size === 0) return acc;
222
+ const visit = (node, inside) => {
223
+ const isWs = wsNames.has(node.name);
224
+ const nowInside = inside || seedNames.has(node.name);
225
+ if (nowInside && !isWs && node.version && node.version !== "0.0.0") {
226
+ const id = `${node.name}@${node.version}`;
227
+ const prev = acc.get(id);
228
+ const installPath = node.path;
229
+ if (!prev) acc.set(id, { id, name: node.name, version: node.version, installPath });
230
+ else if (!prev.installPath && installPath) acc.set(id, { ...prev, installPath });
231
+ }
232
+ if (node.dependencies) Object.values(node.dependencies).forEach((child) => visit(child, nowInside));
233
+ };
234
+ Object.values(root.dependencies).forEach((child) => visit(child, false));
235
+ debugLog(isDebug, "third-party collected (seed-filtered):", acc.size);
236
+ return acc;
237
+ }
238
+ function resolvePackageDir(pkgName, fromDir) {
239
+ try {
240
+ const req = createRequire(path.join(fromDir, "package.json"));
241
+ const p = req.resolve(`${pkgName}/package.json`);
242
+ return path.dirname(p);
243
+ } catch {
244
+ return void 0;
245
+ }
246
+ }
247
+ function fillMissingInstallPaths(collected, wsDir, rootDir) {
248
+ let filled = 0;
249
+ Array.from(collected.entries()).forEach(([id, item]) => {
250
+ if (!item.installPath) {
251
+ const p = resolvePackageDir(item.name, wsDir) ?? resolvePackageDir(item.name, rootDir);
252
+ if (p) {
253
+ collected.set(id, { ...item, installPath: p });
254
+ filled++;
255
+ }
256
+ }
257
+ });
258
+ debugLog(isDebug, "install paths filled via resolver:", filled);
259
+ }
260
+ async function findLicenseFile(dir) {
261
+ try {
262
+ const list = await fs.readdir(dir);
263
+ const c = list.find((f) => {
264
+ const base = f.toLowerCase();
265
+ return /^(license|licence|copying|unlicense|notice)(\..+)?$/.test(base);
266
+ });
267
+ return c ? path.join(dir, c) : void 0;
268
+ } catch {
269
+ return void 0;
270
+ }
271
+ }
272
+ function parseSeeLicenseIn(licenseField) {
273
+ if (!licenseField) return void 0;
274
+ const s = typeof licenseField === "string" ? licenseField : typeof licenseField?.type === "string" ? licenseField.type : void 0;
275
+ if (!s) return void 0;
276
+ const m = /see\s+license\s+in\s+(.+)$/i.exec(s);
277
+ return m?.[1]?.trim();
278
+ }
279
+ async function tryReadLicenseText(pkgDir, licenseField) {
280
+ const see = parseSeeLicenseIn(licenseField);
281
+ if (see) {
282
+ const p = path.join(pkgDir, see);
283
+ if (await isExist(p)) {
284
+ try {
285
+ return await fs.readFile(p, "utf8");
286
+ } catch {
287
+ }
288
+ }
289
+ }
290
+ const license = await findLicenseFile(pkgDir);
291
+ if (license) {
292
+ try {
293
+ return await fs.readFile(license, "utf8");
294
+ } catch {
295
+ }
296
+ }
297
+ return void 0;
298
+ }
299
+ const safeString = (v) => typeof v === "string" ? v : void 0;
300
+ function normalizeLicenseValue(licenseField) {
301
+ if (!licenseField) return "UNKNOWN";
302
+ if (typeof licenseField === "string") return licenseField;
303
+ if (Array.isArray(licenseField)) {
304
+ const arr = licenseField.map((x) => typeof x === "string" ? x : typeof x?.type === "string" ? x.type : "UNKNOWN");
305
+ return arr.length > 0 ? arr : "UNKNOWN";
306
+ }
307
+ if (typeof licenseField === "object") {
308
+ const t = licenseField?.type;
309
+ if (typeof t === "string") return t;
310
+ }
311
+ return "UNKNOWN";
312
+ }
313
+ async function readPackageMeta(pkgDir) {
314
+ try {
315
+ const pkg = await readJson(path.join(pkgDir, "package.json"));
316
+ const repo = typeof pkg.repository === "string" ? pkg.repository : typeof pkg.repository?.url === "string" ? pkg.repository.url : void 0;
317
+ return {
318
+ licenseField: pkg.license ?? pkg.licenses,
319
+ repository: repo,
320
+ publisher: safeString(pkg.author?.name) ?? safeString(pkg.author) ?? void 0,
321
+ email: safeString(pkg.author?.email) ?? void 0,
322
+ url: safeString(pkg.homepage) ?? void 0
323
+ };
324
+ } catch {
325
+ return {};
326
+ }
327
+ }
328
+ async function buildLicenseEntries(collected) {
329
+ const list = await Promise.all(
330
+ Array.from(collected.values()).map(async ({ id, name, version, installPath }) => {
331
+ let licenseText;
332
+ let licenseType = "UNKNOWN";
333
+ let repository;
334
+ let publisher;
335
+ let email;
336
+ let url;
337
+ if (installPath) {
338
+ const meta = await readPackageMeta(installPath);
339
+ licenseType = normalizeLicenseValue(meta.licenseField);
340
+ repository = meta.repository;
341
+ publisher = meta.publisher;
342
+ email = meta.email;
343
+ url = meta.url;
344
+ licenseText = await tryReadLicenseText(installPath, meta.licenseField);
345
+ }
346
+ return {
347
+ id,
348
+ name,
349
+ version,
350
+ licenses: licenseType,
351
+ licenseText,
352
+ repository,
353
+ publisher,
354
+ email,
355
+ url,
356
+ path: installPath
357
+ };
358
+ })
359
+ );
360
+ return [...list].sort((a, b) => a.name === b.name ? a.version.localeCompare(b.version) : a.name.localeCompare(b.name));
361
+ }
362
+ async function buildWorkspaceLicenseEntries(names, wsMap, excludeName) {
363
+ const filtered = [...names].filter((name) => !(excludeName && name === excludeName));
364
+ const entries = await Promise.all(
365
+ filtered.map((name) => wsMap.get(name)).filter((info) => Boolean(info)).map(async (info) => {
366
+ const version = info.pkg.version ?? "0.0.0";
367
+ const meta = await readPackageMeta(info.dir);
368
+ const licenseType = normalizeLicenseValue(meta.licenseField);
369
+ const licenseText = await tryReadLicenseText(info.dir, meta.licenseField);
370
+ return {
371
+ id: `${info.name}@${version}`,
372
+ name: info.name,
373
+ version,
374
+ licenses: licenseType,
375
+ licenseText,
376
+ repository: meta.repository,
377
+ publisher: meta.publisher,
378
+ email: meta.email,
379
+ url: meta.url,
380
+ path: info.dir
381
+ };
382
+ })
383
+ );
384
+ return [...entries].sort((a, b) => a.name === b.name ? a.version.localeCompare(b.version) : a.name.localeCompare(b.name));
385
+ }
386
+ function renderMarkdown(workspaceLabel, items, emptyNote) {
387
+ const header = [`# Third-Party Licenses`, `## Application: ${workspaceLabel}`, `Production dependencies (including transition dependencies): ${items.length}`, ``];
388
+ const note = items.length === 0 && emptyNote ? [`**Note:** ${emptyNote}`, ``] : [];
389
+ const body = items.flatMap((it) => {
390
+ const licenseStr = Array.isArray(it.licenses) ? it.licenses.join(", ") : String(it.licenses ?? "UNKNOWN");
391
+ return [
392
+ `---`,
393
+ ``,
394
+ `## ${it.name}@${it.version}`,
395
+ `**License:** ${licenseStr}
396
+ `,
397
+ ...it.repository ? [`**Repository:** ${it.repository}
398
+ `] : [],
399
+ ...it.url ? [`**URL:** ${it.url}
400
+ `] : [],
401
+ ...it.publisher ? [`**Publisher:** ${it.publisher}${it.email ? ` <${it.email}>` : ""}
402
+ `] : [],
403
+ ``,
404
+ ...it.licenseText ? [it.licenseText.trim(), ``] : [`_No license text file found; relying on package metadata._`, ``]
405
+ ];
406
+ });
407
+ return [...header, ...note, ...body].join("\n");
408
+ }
409
+ async function resolveWorkspaceFromArg(arg, workspaces, rootDir) {
410
+ const byName = workspaces.get(arg);
411
+ if (byName) return byName;
412
+ const asPath = path.isAbsolute(arg) ? arg : path.join(rootDir, arg);
413
+ const norm = path.resolve(asPath);
414
+ const found = [...workspaces.values()].find((w) => path.resolve(w.dir) === norm);
415
+ if (found) return found;
416
+ const wantRoot = norm === path.resolve(rootDir) || arg === ":root" || arg === "root" || arg === ".";
417
+ if (wantRoot) {
418
+ const pkgPath = path.join(rootDir, "package.json");
419
+ const pkg = await readJson(pkgPath);
420
+ const name = typeof pkg.name === "string" ? pkg.name : "monorepo-root";
421
+ return { name, dir: rootDir, pkgPath, pkg };
422
+ }
423
+ throw new Error(`Workspace "${arg}" not found by name or path. Tip: use "--workspace ." or "--workspace :root" to target the monorepo root.`);
424
+ }
425
+ return {
426
+ assertNoCycles,
427
+ buildLicenseEntries,
428
+ buildWorkspaceLicenseEntries,
429
+ buildWsGraph,
430
+ collectExternalSeedNames,
431
+ collectThirdPartyMap,
432
+ collectWorkspaceClosure,
433
+ debugLog,
434
+ fillMissingInstallPaths,
435
+ findMonorepoRoot,
436
+ isDebug: () => isDebug,
437
+ isExist,
438
+ loadRoot,
439
+ loadWorkspaces,
440
+ npmLsJson,
441
+ readJson,
442
+ renderMarkdown,
443
+ resolveWorkspaceFromArg,
444
+ setDebugMode
445
+ };
446
+ }
447
+ export {
448
+ RepoUtilsService
449
+ };
450
+ //# sourceMappingURL=RepoUtilsService.js.map