@hellpig/anarchy-legal 1.11.1 → 1.12.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,1114 +1,10 @@
1
- import path from "node:path";
2
- import { pathToFileURL, fileURLToPath } from "node:url";
3
- import yargs from "yargs";
4
- import { hideBin } from "yargs/helpers";
5
- import fs from "node:fs/promises";
6
- import { UTCDate } from "@date-fns/utc";
7
- import { parseISO, isValid } from "date-fns";
8
- import { format } from "date-fns/format";
9
- import { globby } from "globby";
10
- import { spawn } from "node:child_process";
11
- import { createRequire } from "node:module";
12
- const LegalDocumentType = {
13
- DISCLAIMER: "DISCLAIMER",
14
- EULA: "EULA",
15
- EU_DECLARATION_OF_CONFORMITY: "EU_DECLARATION_OF_CONFORMITY",
16
- INSTRUCTIONS: "INSTRUCTIONS",
17
- PRIVACY: "PRIVACY",
18
- SECURITY: "SECURITY",
19
- SUPPORT: "SUPPORT",
20
- TECHNICAL_DOCUMENTATION: "TECHNICAL_DOCUMENTATION",
21
- VULN_HANDLING: "VULN_HANDLING"
22
- };
23
- function LegalFilesUtilsService(repoUtilsService) {
24
- const { debugLog, isExist, isDebug } = repoUtilsService;
25
- async function readConfig(wsDir) {
26
- const candidates = [path.join(wsDir, "anarchy-legal.config.js"), path.join(wsDir, "anarchy-legal.config.mjs"), path.join(wsDir, "anarchy-legal.config.cjs")];
27
- const found = await candidates.reduce(async (prevPromise, p) => {
28
- const prev = await prevPromise;
29
- if (prev) return prev;
30
- return await isExist(p) ? p : void 0;
31
- }, Promise.resolve(void 0));
32
- if (!found) {
33
- debugLog(isDebug(), "config: <none> (no JS config found)");
34
- return {};
35
- }
36
- try {
37
- const mod = await import(pathToFileURL(found).href);
38
- const exported = mod && "default" in mod ? mod.default : mod;
39
- if (!exported || typeof exported !== "object" || Array.isArray(exported)) throw new Error("Invalid config export. Expected an object like { GENERIC: {...}, EULA: {...}, ... }.");
40
- const obj = exported;
41
- const processConfigSection = (k) => {
42
- const v = obj[k];
43
- if (v === void 0) return void 0;
44
- if (!v || typeof v !== "object" || Array.isArray(v)) {
45
- console.warn(`[warn] anarchy-legal.config: section "${k}" must be an object; got ${typeof v}. Skipped.`);
46
- return void 0;
47
- }
48
- const { template, messages, relativeOutput, outputName } = v;
49
- return [
50
- k,
51
- {
52
- ...template ? { template } : {},
53
- ...messages ? { messages } : {},
54
- ...relativeOutput ? { relativeOutput } : {},
55
- ...outputName ? { outputName } : {}
56
- }
57
- ];
58
- };
59
- const processedEntries = Object.keys(obj).map(processConfigSection).filter((entry) => entry !== void 0);
60
- const config = Object.fromEntries(processedEntries);
61
- debugLog(isDebug(), "config file:", found, "keys:", Object.keys(config));
62
- return config;
63
- } catch (e) {
64
- const msg = e instanceof Error ? e.message : String(e);
65
- throw new Error(`Failed to load config ${found}: ${msg}`);
66
- }
67
- }
68
- async function findTemplateFile(templatesDir, docType, { templateExtension, defaultTemplateBaseName }, desiredBase) {
69
- if (desiredBase) {
70
- const exact = path.join(templatesDir, `${desiredBase}${templateExtension}`);
71
- if (await isExist(exact)) return exact;
72
- }
73
- const def = path.join(templatesDir, `${defaultTemplateBaseName(docType)}${templateExtension}`);
74
- if (await isExist(def)) return def;
75
- const pattern = path.join(templatesDir, `${docType}_*${templateExtension}`);
76
- const found = await globby([pattern], { absolute: true });
77
- const [first] = found.toSorted();
78
- return first;
79
- }
80
- const PLACEHOLDER_RE = /{{\s*([A-Z0-9_]+)\s*}}/g;
81
- const formatWithDateFns = (dateStr, format$1) => {
82
- const d = dateStr.toLowerCase() === "now" ? new UTCDate() : (() => {
83
- const iso = parseISO(dateStr);
84
- return isValid(iso) ? iso : new Date(dateStr);
85
- })();
86
- if (!isValid(d)) return "";
87
- try {
88
- return format(d, format$1);
89
- } catch {
90
- return "";
91
- }
92
- };
93
- function materializeMessage(v) {
94
- if (v === null || v === void 0) return "";
95
- if (typeof v === "string") return v;
96
- if (typeof v === "number") return Number.isFinite(v) ? String(v) : "";
97
- if (typeof v === "boolean") return v ? "true" : "false";
98
- if (typeof v === "object" && "date" in v && "format" in v) {
99
- const { date, format: format2 } = v;
100
- if (typeof date === "string" && typeof format2 === "string") return formatWithDateFns(date, format2);
101
- }
102
- return "";
103
- }
104
- function collectPlaceholders(tpl) {
105
- return new Set([...tpl.matchAll(PLACEHOLDER_RE)].map((m) => m[1]));
106
- }
107
- function packagePlaceholder(key, pkg) {
108
- const k = key.toUpperCase();
109
- const str = (v) => typeof v === "string" ? v : void 0;
110
- const arrStr = (v) => Array.isArray(v) ? v.map((x) => typeof x === "string" ? x : JSON.stringify(x)).join(", ") : void 0;
111
- function authorToString(a) {
112
- if (!a) return void 0;
113
- if (typeof a === "string") return a;
114
- if (typeof a === "object") {
115
- const n = str(a.name) ?? "";
116
- const e = str(a.email);
117
- const w = str(a.url);
118
- return [n, e ? `<${e}>` : "", w ? `(${w})` : ""].filter(Boolean).join(" ").trim();
119
- }
120
- return void 0;
121
- }
122
- switch (k) {
123
- case "NAME":
124
- return str(pkg.name);
125
- case "VERSION":
126
- return str(pkg.version);
127
- case "DESCRIPTION":
128
- return str(pkg.description);
129
- case "HOMEPAGE":
130
- return str(pkg.homepage);
131
- case "LICENSE":
132
- return str(pkg.license);
133
- case "REPOSITORY": {
134
- const repo = pkg.repository;
135
- if (typeof repo === "string") return repo;
136
- const url = str(repo?.url);
137
- return url ?? void 0;
138
- }
139
- case "BUGS_URL": {
140
- const bugs = pkg.bugs;
141
- if (typeof bugs === "string") return bugs;
142
- const url = str(bugs?.url);
143
- return url ?? void 0;
144
- }
145
- case "AUTHOR":
146
- return authorToString(pkg.author);
147
- case "AUTHORS":
148
- return arrStr(pkg.authors);
149
- case "KEYWORDS":
150
- return arrStr(pkg.keywords);
151
- default: {
152
- const direct = pkg[key.toLowerCase()];
153
- if (typeof direct === "string") return direct;
154
- return void 0;
155
- }
156
- }
157
- }
158
- function buildPlaceholderValues(_docType, tplText, pkg, generic, specific) {
159
- const names = collectPlaceholders(tplText);
160
- const pkgValues = Array.from(names).reduce((acc, name) => {
161
- if (!name.startsWith("PACKAGE_")) return acc;
162
- const suffix = name.slice("PACKAGE_".length);
163
- const v = packagePlaceholder(suffix, pkg);
164
- return v !== void 0 ? { ...acc, [name]: v } : acc;
165
- }, {});
166
- const genericValues = generic ? Object.fromEntries(Object.entries(generic).map(([k, v]) => [k, materializeMessage(v)])) : {};
167
- const specificValues = specific ? Object.fromEntries(Object.entries(specific).map(([k, v]) => [k, materializeMessage(v)])) : {};
168
- return { ...pkgValues, ...genericValues, ...specificValues };
169
- }
170
- function buildContext(docType, tplText, pkg, generic, specific) {
171
- const values = buildPlaceholderValues(docType, tplText, pkg, generic, specific);
172
- const raw = {
173
- ...Object.fromEntries(Object.entries(values)),
174
- ...generic || {},
175
- ...specific || {}
176
- };
177
- return { values, raw };
178
- }
179
- function renderVariables(tpl, values, onMissing) {
180
- const VAR_RE = /{{\s*([A-Z0-9_]+)\s*}}/g;
181
- return tpl.replace(VAR_RE, (_m, g1) => {
182
- const v = values[g1];
183
- if (v === void 0) {
184
- return "";
185
- }
186
- return v;
187
- });
188
- }
189
- function renderSections(input, truthyMap) {
190
- const SECTION_RE = /{{\s*([#^])\s*([A-Z0-9_]+)\s*}}([\s\S]*?){{\s*\/\s*\2\s*}}/g;
191
- const processUntilConverged = (current) => {
192
- const next = current.replace(SECTION_RE, (_m, sigil, name, body) => {
193
- const condition = Boolean(truthyMap[name]);
194
- const pass = sigil === "#" ? condition : !condition;
195
- return pass ? renderSections(body, truthyMap) : "";
196
- });
197
- return next === current ? current : processUntilConverged(next);
198
- };
199
- return processUntilConverged(input);
200
- }
201
- async function generateForType(input, key, options) {
202
- const genericConfig = input.config["GENERIC"];
203
- const specificConfig = input.config[key];
204
- if (!specificConfig?.template || specificConfig.template.trim() === "") throw new Error(`[${key}] missing "template" in anarchy-legal.config.js`);
205
- const desiredBase = specificConfig.template;
206
- const tplPath = await findTemplateFile(input.templatesDir, key, options, desiredBase);
207
- if (!tplPath) throw new Error(`[${key}] template "${desiredBase}" not found under templates dir: ${input.templatesDir}`);
208
- const tplText = await fs.readFile(tplPath, "utf8");
209
- const { values, raw } = buildContext(key, tplText, input.ws.pkg, genericConfig?.messages, specificConfig?.messages);
210
- const afterSections = renderSections(tplText, raw);
211
- const namesAfter = collectPlaceholders(afterSections);
212
- const missing = Array.from(namesAfter).filter((name) => values[name] === void 0);
213
- if (missing.length) console.warn(`[warn] ${key}: ${missing.length} placeholders had no value: ${missing.slice(0, 10).join(", ")}${missing.length > 10 ? "…" : ""}`);
214
- const rendered = renderVariables(afterSections, values);
215
- const relOut = specificConfig.relativeOutput?.trim();
216
- if (relOut && path.isAbsolute(relOut)) console.warn(`[warn] ${key}: relativeOutput is absolute ("${relOut}"); it will be used as-is.`);
217
- const targetDir = relOut ? path.resolve(input.outDir, relOut) : input.outDir;
218
- const baseName = (specificConfig.outputName?.trim() || key).replace(/\s+$/, "");
219
- const outName = `${baseName}.md`;
220
- const outPath = path.join(targetDir, outName);
221
- await fs.mkdir(path.dirname(outPath), { recursive: true });
222
- await fs.writeFile(outPath, rendered, "utf8");
223
- console.log(`${baseName}.md written -> ${outPath}`);
224
- }
225
- async function generateAll(renderInput, options) {
226
- for (const k of renderInput.keys) {
227
- await generateForType(renderInput, k, options);
228
- }
229
- }
230
- function getConfiguredDocTypes(config) {
231
- const set = /* @__PURE__ */ new Set();
232
- Object.values(LegalDocumentType).forEach((docType) => {
233
- if (config[docType]) set.add(docType);
234
- });
235
- return set;
236
- }
237
- function assertTemplatesPresent(config, keys) {
238
- const missing = Array.from(keys).filter((k) => {
239
- const tpl = config[k]?.template;
240
- return typeof tpl !== "string" || tpl.trim() === "";
241
- });
242
- if (missing.length) {
243
- throw new Error(`anarchy-legal.config.js: "template" is required for sections: ${missing.join(", ")}`);
244
- }
245
- }
246
- function getConfiguredDocKeys(config) {
247
- return new Set(Object.keys(config || {}).filter((k) => k !== "GENERIC"));
248
- }
249
- return {
250
- assertTemplatesPresent,
251
- generateAll,
252
- getConfiguredDocKeys,
253
- getConfiguredDocTypes,
254
- readConfig
255
- };
256
- }
257
- function RepoUtilsService() {
258
- let isDebug = false;
259
- const setDebugMode = (debug) => void (isDebug = debug);
260
- const readJson = async (p) => JSON.parse(await fs.readFile(p, "utf8"));
261
- const isExist = async (p) => {
262
- try {
263
- await fs.access(p);
264
- return true;
265
- } catch {
266
- return false;
267
- }
268
- };
269
- function debugLog(isDebug2, ...args) {
270
- if (isDebug2) console.log("[debug]", ...args);
271
- }
272
- function hasWorkspacesField(pkg) {
273
- const ws = pkg?.workspaces;
274
- if (!ws) return false;
275
- if (Array.isArray(ws)) return ws.length > 0;
276
- if (typeof ws === "object" && Array.isArray(ws.packages)) return ws.packages.length > 0;
277
- return false;
278
- }
279
- async function loadWorkspaces(rootDir) {
280
- const rootPkg = await readJson(path.join(rootDir, "package.json"));
281
- const patterns = Array.isArray(rootPkg.workspaces) ? rootPkg.workspaces : rootPkg.workspaces?.packages ?? [];
282
- if (!patterns.length) throw new Error(`No workspaces patterns in ${path.join(rootDir, "package.json")}`);
283
- const dirs = await globby(patterns, {
284
- cwd: rootDir,
285
- absolute: true,
286
- onlyDirectories: true,
287
- gitignore: true,
288
- ignore: ["**/node_modules/**", "**/dist/**", "**/dist-*/**", "**/.*/**"]
289
- });
290
- const entries = (await Promise.all(
291
- dirs.map(async (dir) => {
292
- const pkgPath = path.join(dir, "package.json");
293
- if (!await isExist(pkgPath)) return void 0;
294
- const pkg = await readJson(pkgPath);
295
- const name = typeof pkg.name === "string" ? pkg.name : void 0;
296
- return name ? [name, { name, dir, pkgPath, pkg }] : void 0;
297
- })
298
- )).filter(Boolean);
299
- return new Map(entries);
300
- }
301
- async function findMonorepoRoot(startDir) {
302
- const start = path.resolve(startDir);
303
- debugLog(isDebug, "findMonorepoRoot: start at", start);
304
- const searchUp = async (dir, depth) => {
305
- if (depth > 50) return void 0;
306
- const pkgPath = path.join(dir, "package.json");
307
- debugLog(isDebug, "check", pkgPath);
308
- if (await isExist(pkgPath)) {
309
- try {
310
- const pkg = await readJson(pkgPath);
311
- if (hasWorkspacesField(pkg)) {
312
- debugLog(isDebug, "found workspaces at", pkgPath);
313
- return dir;
314
- }
315
- } catch (e) {
316
- debugLog(isDebug, " ! failed to parse", pkgPath, "-", e.message);
317
- }
318
- }
319
- const parent = path.dirname(dir);
320
- if (parent === dir) return void 0;
321
- return searchUp(parent, depth + 1);
322
- };
323
- const found = await searchUp(start, 0);
324
- if (!found) throw new Error(`Monorepo root not found starting from "${startDir}". Provide --root explicitly pointing to a package.json with "workspaces".`);
325
- return found;
326
- }
327
- async function loadRoot(rootDir) {
328
- const rootPkgPath = path.join(rootDir, "package.json");
329
- if (!await isExist(rootPkgPath)) throw new Error(`Root package.json not found at: ${rootPkgPath}`);
330
- const rootPkg = await readJson(rootPkgPath);
331
- const wsField = rootPkg.workspaces;
332
- if (!wsField) throw new Error(`"workspaces" not found in root package.json at ${rootPkgPath}`);
333
- const patterns = Array.isArray(wsField) ? wsField : wsField.packages ?? [];
334
- if (patterns.length === 0) throw new Error(`"workspaces" has no packages in ${rootPkgPath}`);
335
- debugLog(isDebug, "workspaces patterns:", patterns);
336
- const dirs = await globby(patterns, {
337
- cwd: rootDir,
338
- onlyDirectories: true,
339
- absolute: true,
340
- expandDirectories: false,
341
- gitignore: true,
342
- ignore: ["**/node_modules/**", "**/dist/**", "**/dist-*/**", "**/.*/**"]
343
- });
344
- debugLog(isDebug, "workspace dirs found:", dirs.length);
345
- const entries = (await Promise.all(
346
- dirs.map(async (dir) => {
347
- const pkgPath = path.join(dir, "package.json");
348
- if (!await isExist(pkgPath)) return void 0;
349
- const pkg = await readJson(pkgPath);
350
- if (!pkg.name) return void 0;
351
- return [
352
- pkg.name,
353
- {
354
- name: pkg.name,
355
- dir,
356
- pkgPath,
357
- pkg
358
- }
359
- ];
360
- })
361
- )).filter(Boolean);
362
- debugLog(isDebug, "workspace packages loaded:", entries.length);
363
- if (entries.length === 0) throw new Error(`No workspace package.json files found by patterns: ${patterns.join(", ")}`);
364
- return {
365
- rootDir,
366
- rootPkgPath,
367
- rootPkg,
368
- workspaces: new Map(entries)
369
- };
370
- }
371
- function buildWsGraph(ws) {
372
- const names = new Set(ws.keys());
373
- const graph = Array.from(ws.entries()).reduce((acc, [name, info]) => {
374
- const deps = info.pkg.dependencies ?? {};
375
- const edges = new Set(Object.keys(deps).filter((dependencyName) => names.has(dependencyName)));
376
- acc.set(name, edges);
377
- return acc;
378
- }, /* @__PURE__ */ new Map());
379
- if (isDebug) {
380
- console.log("[debug] workspace graph:");
381
- graph.forEach((v, k) => console.log(" ", k, "->", [...v].join(", ") || "∅"));
382
- }
383
- return graph;
384
- }
385
- function assertNoCycles(graph, start) {
386
- const temp = /* @__PURE__ */ new Set();
387
- const perm = /* @__PURE__ */ new Set();
388
- let pathStack = [];
389
- const dfs = (u) => {
390
- if (perm.has(u)) return;
391
- if (temp.has(u)) {
392
- const idx = pathStack.lastIndexOf(u);
393
- const cyclePath = [...pathStack.slice(idx), u].join(" -> ");
394
- throw new Error(`Cycle detected between workspaces (prod deps): ${cyclePath}`);
395
- }
396
- temp.add(u);
397
- pathStack = [...pathStack];
398
- (graph.get(u) ?? /* @__PURE__ */ new Set()).forEach(dfs);
399
- pathStack = pathStack.slice(0, pathStack.length - 1);
400
- temp.delete(u);
401
- perm.add(u);
402
- };
403
- dfs(start);
404
- }
405
- function collectWorkspaceClosure(graph, start) {
406
- const visited = /* @__PURE__ */ new Set();
407
- const visit = (u) => {
408
- if (visited.has(u)) return;
409
- visited.add(u);
410
- (graph.get(u) ?? /* @__PURE__ */ new Set()).forEach(visit);
411
- };
412
- visit(start);
413
- return visited;
414
- }
415
- async function npmLsJson(rootDir, workspace) {
416
- return new Promise((resolve, reject) => {
417
- const args = ["ls", "-w", workspace, "--json", "--omit=dev", "--all", "--long"];
418
- debugLog(isDebug, "spawn:", "npm", args.join(" "), "cwd:", rootDir);
419
- const child = spawn("npm", args, { cwd: rootDir, stdio: ["ignore", "pipe", "pipe"] });
420
- let out = "";
421
- let err = "";
422
- child.stdout.on("data", (d) => out += String(d));
423
- child.stderr.on("data", (d) => err += String(d));
424
- child.on("close", (code) => {
425
- if (code !== 0 && !out) return reject(new Error(`npm ls failed (code ${code}): ${err || "unknown error"}`));
426
- try {
427
- const json = JSON.parse(out);
428
- const normPath = (o) => (
429
- // eslint-disable-next-line spellcheck/spell-checker
430
- typeof o?.path === "string" ? o.path : typeof o?.realpath === "string" ? o.realpath : typeof o?.location === "string" ? path.isAbsolute(o.location) ? o.location : void 0 : void 0
431
- );
432
- const toNode = (name, o) => ({
433
- name,
434
- version: typeof o?.version === "string" ? o.version : "0.0.0",
435
- path: normPath(o),
436
- license: o?.license,
437
- repository: o?.repository,
438
- dependencies: o?.dependencies ? Object.fromEntries(Object.entries(o.dependencies).map(([k, v]) => [k, toNode(k, v)])) : void 0
439
- });
440
- const rootNode = {
441
- name: json?.name ?? workspace,
442
- version: json?.version ?? "0.0.0",
443
- path: normPath(json),
444
- license: json?.license,
445
- repository: json?.repository,
446
- dependencies: json?.dependencies ? Object.fromEntries(Object.entries(json.dependencies).map(([k, v]) => [k, toNode(k, v)])) : void 0
447
- };
448
- debugLog(isDebug, "npm ls parsed root:", rootNode.name, rootNode.version);
449
- return resolve(rootNode);
450
- } catch (e) {
451
- return reject(new Error(`Failed to parse npm ls JSON: ${e.message}
452
- Raw: ${out.slice(0, 2e3)}`));
453
- }
454
- });
455
- });
456
- }
457
- function collectExternalSeedNames(closure, wsMap, wsNames) {
458
- const seeds = /* @__PURE__ */ new Set();
459
- [...closure].forEach((wsName) => {
460
- const info = wsMap.get(wsName);
461
- if (!info) return;
462
- const deps = info.pkg.dependencies ?? {};
463
- Object.keys(deps).forEach((dependencyName) => {
464
- if (!wsNames.has(dependencyName)) seeds.add(dependencyName);
465
- });
466
- });
467
- return seeds;
468
- }
469
- function collectThirdPartyMap(root, wsNames, seedNames) {
470
- const acc = /* @__PURE__ */ new Map();
471
- if (!root || !root.dependencies) return acc;
472
- if (seedNames.size === 0) return acc;
473
- const visit = (node, inside) => {
474
- const isWs = wsNames.has(node.name);
475
- const nowInside = inside || seedNames.has(node.name);
476
- if (nowInside && !isWs && node.version && node.version !== "0.0.0") {
477
- const id = `${node.name}@${node.version}`;
478
- const prev = acc.get(id);
479
- const installPath = node.path;
480
- if (!prev) acc.set(id, { id, name: node.name, version: node.version, installPath });
481
- else if (!prev.installPath && installPath) acc.set(id, { ...prev, installPath });
482
- }
483
- if (node.dependencies) Object.values(node.dependencies).forEach((child) => visit(child, nowInside));
484
- };
485
- Object.values(root.dependencies).forEach((child) => visit(child, false));
486
- debugLog(isDebug, "third-party collected (seed-filtered):", acc.size);
487
- return acc;
488
- }
489
- function resolvePackageDir(pkgName, fromDir) {
490
- try {
491
- const req = createRequire(path.join(fromDir, "package.json"));
492
- const p = req.resolve(`${pkgName}/package.json`);
493
- return path.dirname(p);
494
- } catch {
495
- return void 0;
496
- }
497
- }
498
- function fillMissingInstallPaths(collected, wsDir, rootDir) {
499
- let filled = 0;
500
- Array.from(collected.entries()).forEach(([id, item]) => {
501
- if (!item.installPath) {
502
- const p = resolvePackageDir(item.name, wsDir) ?? resolvePackageDir(item.name, rootDir);
503
- if (p) {
504
- collected.set(id, { ...item, installPath: p });
505
- filled++;
506
- }
507
- }
508
- });
509
- debugLog(isDebug, "install paths filled via resolver:", filled);
510
- }
511
- async function findLicenseFile(dir) {
512
- try {
513
- const list = await fs.readdir(dir);
514
- const c = list.find((f) => {
515
- const base = f.toLowerCase();
516
- return /^(license|licence|copying|unlicense|notice)(\..+)?$/.test(base);
517
- });
518
- return c ? path.join(dir, c) : void 0;
519
- } catch {
520
- return void 0;
521
- }
522
- }
523
- function parseSeeLicenseIn(licenseField) {
524
- if (!licenseField) return void 0;
525
- const s = typeof licenseField === "string" ? licenseField : typeof licenseField?.type === "string" ? licenseField.type : void 0;
526
- if (!s) return void 0;
527
- const m = /see\s+license\s+in\s+(.+)$/i.exec(s);
528
- return m?.[1]?.trim();
529
- }
530
- async function tryReadLicenseText(pkgDir, licenseField) {
531
- const see = parseSeeLicenseIn(licenseField);
532
- if (see) {
533
- const p = path.join(pkgDir, see);
534
- if (await isExist(p)) {
535
- try {
536
- return await fs.readFile(p, "utf8");
537
- } catch {
538
- }
539
- }
540
- }
541
- const license = await findLicenseFile(pkgDir);
542
- if (license) {
543
- try {
544
- return await fs.readFile(license, "utf8");
545
- } catch {
546
- }
547
- }
548
- return void 0;
549
- }
550
- const safeString = (v) => typeof v === "string" ? v : void 0;
551
- function normalizeLicenseValue(licenseField) {
552
- if (!licenseField) return "UNKNOWN";
553
- if (typeof licenseField === "string") return licenseField;
554
- if (Array.isArray(licenseField)) {
555
- const arr = licenseField.map((x) => typeof x === "string" ? x : typeof x?.type === "string" ? x.type : "UNKNOWN");
556
- return arr.length > 0 ? arr : "UNKNOWN";
557
- }
558
- if (typeof licenseField === "object") {
559
- const t = licenseField?.type;
560
- if (typeof t === "string") return t;
561
- }
562
- return "UNKNOWN";
563
- }
564
- async function readPackageMeta(pkgDir) {
565
- try {
566
- const pkg = await readJson(path.join(pkgDir, "package.json"));
567
- const repo = typeof pkg.repository === "string" ? pkg.repository : typeof pkg.repository?.url === "string" ? pkg.repository.url : void 0;
568
- return {
569
- licenseField: pkg.license ?? pkg.licenses,
570
- repository: repo,
571
- publisher: safeString(pkg.author?.name) ?? safeString(pkg.author) ?? void 0,
572
- email: safeString(pkg.author?.email) ?? void 0,
573
- url: safeString(pkg.homepage) ?? void 0
574
- };
575
- } catch {
576
- return {};
577
- }
578
- }
579
- async function buildLicenseEntries(collected) {
580
- const list = await Promise.all(
581
- Array.from(collected.values()).map(async ({ id, name, version, installPath }) => {
582
- let licenseText;
583
- let licenseType = "UNKNOWN";
584
- let repository;
585
- let publisher;
586
- let email;
587
- let url;
588
- if (installPath) {
589
- const meta = await readPackageMeta(installPath);
590
- licenseType = normalizeLicenseValue(meta.licenseField);
591
- repository = meta.repository;
592
- publisher = meta.publisher;
593
- email = meta.email;
594
- url = meta.url;
595
- licenseText = await tryReadLicenseText(installPath, meta.licenseField);
596
- }
597
- return {
598
- id,
599
- name,
600
- version,
601
- licenses: licenseType,
602
- licenseText,
603
- repository,
604
- publisher,
605
- email,
606
- url,
607
- path: installPath
608
- };
609
- })
610
- );
611
- return [...list].sort((a, b) => a.name === b.name ? a.version.localeCompare(b.version) : a.name.localeCompare(b.name));
612
- }
613
- async function buildWorkspaceLicenseEntries(names, wsMap, excludeName) {
614
- const filtered = [...names].filter((name) => !(excludeName && name === excludeName));
615
- const entries = await Promise.all(
616
- filtered.map((name) => wsMap.get(name)).filter((info) => Boolean(info)).map(async (info) => {
617
- const version = info.pkg.version ?? "0.0.0";
618
- const meta = await readPackageMeta(info.dir);
619
- const licenseType = normalizeLicenseValue(meta.licenseField);
620
- const licenseText = await tryReadLicenseText(info.dir, meta.licenseField);
621
- return {
622
- id: `${info.name}@${version}`,
623
- name: info.name,
624
- version,
625
- licenses: licenseType,
626
- licenseText,
627
- repository: meta.repository,
628
- publisher: meta.publisher,
629
- email: meta.email,
630
- url: meta.url,
631
- path: info.dir
632
- };
633
- })
634
- );
635
- return [...entries].sort((a, b) => a.name === b.name ? a.version.localeCompare(b.version) : a.name.localeCompare(b.name));
636
- }
637
- function renderMarkdown(workspaceLabel, items, emptyNote) {
638
- const header = [`# Third-Party Licenses`, `## Application: ${workspaceLabel}`, `Production dependencies (including transition dependencies): ${items.length}`, ``];
639
- const note = items.length === 0 && emptyNote ? [`**Note:** ${emptyNote}`, ``] : [];
640
- const body = items.flatMap((it) => {
641
- const licenseStr = Array.isArray(it.licenses) ? it.licenses.join(", ") : String(it.licenses ?? "UNKNOWN");
642
- return [
643
- `---`,
644
- ``,
645
- `## ${it.name}@${it.version}`,
646
- `**License:** ${licenseStr}
647
- `,
648
- ...it.repository ? [`**Repository:** ${it.repository}
649
- `] : [],
650
- ...it.url ? [`**URL:** ${it.url}
651
- `] : [],
652
- ...it.publisher ? [`**Publisher:** ${it.publisher}${it.email ? ` <${it.email}>` : ""}
653
- `] : [],
654
- ``,
655
- ...it.licenseText ? [it.licenseText.trim(), ``] : [`_No license text file found; relying on package metadata._`, ``]
656
- ];
657
- });
658
- return [...header, ...note, ...body].join("\n");
659
- }
660
- async function resolveWorkspaceFromArg(arg, workspaces, rootDir) {
661
- const byName = workspaces.get(arg);
662
- if (byName) return byName;
663
- const asPath = path.isAbsolute(arg) ? arg : path.join(rootDir, arg);
664
- const norm = path.resolve(asPath);
665
- const found = [...workspaces.values()].find((w) => path.resolve(w.dir) === norm);
666
- if (found) return found;
667
- const wantRoot = norm === path.resolve(rootDir) || arg === ":root" || arg === "root" || arg === ".";
668
- if (wantRoot) {
669
- const pkgPath = path.join(rootDir, "package.json");
670
- const pkg = await readJson(pkgPath);
671
- const name = typeof pkg.name === "string" ? pkg.name : "monorepo-root";
672
- return { name, dir: rootDir, pkgPath, pkg };
673
- }
674
- throw new Error(`Workspace "${arg}" not found by name or path. Tip: use "--workspace ." or "--workspace :root" to target the monorepo root.`);
675
- }
676
- return {
677
- assertNoCycles,
678
- buildLicenseEntries,
679
- buildWorkspaceLicenseEntries,
680
- buildWsGraph,
681
- collectExternalSeedNames,
682
- collectThirdPartyMap,
683
- collectWorkspaceClosure,
684
- debugLog,
685
- fillMissingInstallPaths,
686
- findMonorepoRoot,
687
- isDebug: () => isDebug,
688
- isExist,
689
- loadRoot,
690
- loadWorkspaces,
691
- npmLsJson,
692
- readJson,
693
- renderMarkdown,
694
- resolveWorkspaceFromArg,
695
- setDebugMode
696
- };
697
- }
698
- function LegalFilesService() {
699
- let isDebug = false;
700
- const repoUtilsService = RepoUtilsService();
701
- const legalFilesUtilsService = LegalFilesUtilsService(repoUtilsService);
702
- const { debugLog, findMonorepoRoot, resolveWorkspaceFromArg, loadWorkspaces } = repoUtilsService;
703
- const { assertTemplatesPresent, getConfiguredDocKeys, generateAll, readConfig } = legalFilesUtilsService;
704
- const options = {
705
- templateExtension: ".md",
706
- defaultTemplateBaseName: (docType) => `${docType}_TEMPLATE`
707
- };
708
- async function generate() {
709
- const argv = await yargs(hideBin(process.argv)).usage("$0 --workspace <name|path> --out <dir> [--templates <dir>] [--types DISCLAIMER,EULA,...] [--debug]").option("workspace", { type: "string", demandOption: true, describe: "Target workspace (name or path relative to monorepo root)" }).option("out", { type: "string", demandOption: true, describe: "Output directory for generated files (relative to current working dir allowed)" }).option("templates", { type: "string", describe: "Templates directory. Default: packages/anarchy-legal/templates" }).option("types", { type: "string", describe: `Comma-separated list of doc types. Default: ${Object.values(LegalDocumentType).join(",")}` }).option("debug", { type: "boolean", default: false }).help().parseAsync();
710
- isDebug = Boolean(argv.debug);
711
- repoUtilsService.setDebugMode(isDebug);
712
- const scriptDir = path.dirname(fileURLToPath(import.meta.url));
713
- const startCandidates = [process.env.INIT_CWD, process.cwd(), scriptDir].filter(Boolean);
714
- const rootDir = await startCandidates.reduce(async (accP, c) => {
715
- const acc = await accP;
716
- if (acc) return acc;
717
- try {
718
- return await findMonorepoRoot(c);
719
- } catch (e) {
720
- debugLog(isDebug, "no root from", c, ":", e.message);
721
- return void 0;
722
- }
723
- }, Promise.resolve(void 0));
724
- if (!rootDir) throw new Error(`Failed to find monorepo root from: ${startCandidates.join(", ")}`);
725
- const workspaces = await loadWorkspaces(rootDir);
726
- const ws = await resolveWorkspaceFromArg(String(argv.workspace), workspaces, rootDir);
727
- debugLog(isDebug, "target workspace:", ws.name, ws.dir);
728
- const templatesDir = argv.templates ? path.isAbsolute(argv.templates) ? argv.templates : path.resolve(process.cwd(), argv.templates) : path.resolve(scriptDir, "../../src/Templates");
729
- debugLog(isDebug, "templates dir:", templatesDir);
730
- const outDir = path.isAbsolute(argv.out) ? argv.out : path.resolve(process.cwd(), String(argv.out));
731
- debugLog(isDebug, "out dir:", outDir);
732
- const config = await readConfig(ws.dir);
733
- const configKeys = Array.from(getConfiguredDocKeys(config));
734
- if (!configKeys.length) {
735
- console.log("Nothing to generate: no doc types configured (or filtered out by --types).");
736
- return;
737
- }
738
- const cliKeys = (() => {
739
- if (!argv.types) return new Set(configKeys);
740
- const parts = String(argv.types).split(",").map((s) => s.trim()).filter(Boolean);
741
- const ok = new Set(configKeys);
742
- return new Set(parts.filter((p) => ok.has(p)));
743
- })();
744
- if (!cliKeys.size) {
745
- console.log("Nothing to generate: all requested sections were filtered out or missing in config.");
746
- return;
747
- }
748
- assertTemplatesPresent(config, cliKeys);
749
- await generateAll({ ws, outDir, templatesDir, keys: cliKeys, config }, options);
750
- }
751
- return { generate };
752
- }
753
- function NoticeUtilsService() {
754
- function splitEntriesFromMarkdown(md) {
755
- const parts = md.split(/\r?\n---\r?\n/g);
756
- return parts.filter((chunk) => /^##\s+.+/m.test(chunk));
757
- }
758
- function parseHeaderLine(chunk) {
759
- const m = /^##\s+(.+?)\s*$/m.exec(chunk);
760
- if (!m) return void 0;
761
- const full = m[1].trim();
762
- const at = full.lastIndexOf("@");
763
- if (at <= 0 || at === full.length - 1) return void 0;
764
- const name = full.slice(0, at).trim();
765
- const version = full.slice(at + 1).trim();
766
- if (!name || !version) return void 0;
767
- return { name, version };
768
- }
769
- function parseOneEntry(chunk) {
770
- const header = parseHeaderLine(chunk);
771
- if (!header) return void 0;
772
- const { name, version } = header;
773
- const id = `${name}@${version}`;
774
- const field = (label) => {
775
- const re = new RegExp(`^\\*\\*${label}:\\*\\*\\s*(.+)\\s*$`, "mi");
776
- const m = re.exec(chunk);
777
- return m ? m[1].trim() : void 0;
778
- };
779
- const licensesStr = field("License") ?? "UNKNOWN";
780
- const licenses = licensesStr.split(",").map((s) => s.trim()).filter(Boolean);
781
- const repository = field("Repository");
782
- const url = field("URL");
783
- const publisher = field("Publisher")?.replace(/\s+<[^>]+>\s*$/, "").trim();
784
- const path2 = field("Path");
785
- let licenseText = void 0;
786
- {
787
- const lines = chunk.split(/\r?\n/);
788
- const firstBlankAfterHeaderIdx = (() => {
789
- let seenHeader = false;
790
- return lines.findIndex((ln) => {
791
- if (ln.startsWith("## ")) {
792
- seenHeader = true;
793
- return false;
794
- }
795
- return seenHeader && ln.trim() === "";
796
- });
797
- })();
798
- const startIdx = firstBlankAfterHeaderIdx >= 0 ? firstBlankAfterHeaderIdx + 1 : lines.length;
799
- const tail = lines.slice(startIdx).join("\n").trim();
800
- if (tail && !/^_No license text file found;/m.test(tail)) licenseText = tail;
801
- }
802
- const inferredCopyright = (() => {
803
- if (licenseText) {
804
- const ln = licenseText.split(/\r?\n/).find((l) => /^\s*(?:copyright|\(c\)|©)\s+/i.test(l));
805
- if (ln) return ln.trim();
806
- }
807
- return publisher?.trim();
808
- })();
809
- return {
810
- id,
811
- name,
812
- version,
813
- licenses,
814
- repository: repository ?? void 0,
815
- url: url ?? void 0,
816
- publisher: publisher ?? void 0,
817
- path: path2 ?? void 0,
818
- licenseText,
819
- inferredCopyright
820
- };
821
- }
822
- function parseThirdPartyMarkdown(md) {
823
- const chunks = splitEntriesFromMarkdown(md);
824
- const entries = chunks.flatMap((ch) => {
825
- const e = parseOneEntry(ch);
826
- return e ? [e] : [];
827
- });
828
- return entries.toSorted((a, b) => a.name === b.name ? a.version.localeCompare(b.version) : a.name.localeCompare(b.name));
829
- }
830
- function collectAllHeadingIds(md) {
831
- const re = /^##\s+(.+?)\s*$/gm;
832
- return [...md.matchAll(re)].reduce((ids, m) => {
833
- const full = String(m[1]).trim();
834
- const at = full.lastIndexOf("@");
835
- if (at > 0 && at < full.length - 1) {
836
- ids.add(`${full.slice(0, at).trim()}@${full.slice(at + 1).trim()}`);
837
- }
838
- return ids;
839
- }, /* @__PURE__ */ new Set());
840
- }
841
- async function findUpstreamNoticeFile(dir) {
842
- try {
843
- const list = await fs.readdir(dir);
844
- const candidate = list.find((f) => /^(notice|notice\.txt|notice\.md)$/i.test(f));
845
- return candidate ? path.join(dir, candidate) : void 0;
846
- } catch {
847
- return void 0;
848
- }
849
- }
850
- async function loadUpstreamNotice(dir, maxBytes) {
851
- const p = await findUpstreamNoticeFile(dir);
852
- if (!p) return void 0;
853
- try {
854
- const stat = await fs.stat(p);
855
- const text = await fs.readFile(p, "utf8");
856
- if (stat.size > maxBytes) {
857
- return `Upstream NOTICE is too large (${stat.size} bytes); truncated.
858
-
859
- ` + text.slice(0, maxBytes);
860
- }
861
- return text;
862
- } catch {
863
- return void 0;
864
- }
865
- }
866
- return {
867
- collectAllHeadingIds,
868
- loadUpstreamNotice,
869
- parseThirdPartyMarkdown
870
- };
871
- }
872
- function NoticeService() {
873
- let isDebug = false;
874
- const repoUtilsService = RepoUtilsService();
875
- const noticeUtilsService = NoticeUtilsService();
876
- const { debugLog, findMonorepoRoot, isExist, loadWorkspaces, resolveWorkspaceFromArg } = repoUtilsService;
877
- const { collectAllHeadingIds, parseThirdPartyMarkdown, loadUpstreamNotice } = noticeUtilsService;
878
- function renderNotice(wsName, entries, includeUpstream, sourceName) {
879
- const header = `# Third-Party Notices
880
-
881
- ## Application: ${wsName}
882
-
883
- `;
884
- const subHeader = `This product includes third-party components. Their **licenses and attributions** are listed below.
885
- For the **full license texts**, see \`${sourceName}\`.
886
-
887
- Components listed: ${entries.length}
888
-
889
- ## 1) Mandatory Attributions (verbatim)
890
-
891
- The following notices are reproduced as provided by the respective licensors (e.g., **Apache-2.0 NOTICE**, **CC-BY credits**, **font attributions**):
892
- `;
893
- const noRecordsNote = "**Note:** No third-party components included.";
894
- const blocks = entries.map((v) => {
895
- const licenses = v.licenses.length ? v.licenses.join(", ") + "\n" : "UNKNOWN";
896
- const repository = v.repository ? `**Repository:** ${v.repository}
897
-
898
- ` : "";
899
- const url = v.url ? `**URL:** ${v.url}
900
-
901
- ` : "";
902
- const inferredCopyright = v.inferredCopyright ? `**Attribution:** ${v.inferredCopyright}
903
- ` : "";
904
- const base = `
905
- ## ${v.name}@${v.version}
906
-
907
- **License(s):** ${licenses}
908
- ${repository}${url}${inferredCopyright}
909
- ---
910
- `;
911
- const upstream = includeUpstream && v.upstreamNotice ? [`**Upstream NOTICE:**`, ...v.upstreamNotice.split(/\r?\n/).map((ln) => `> ${ln}`), ``] : [];
912
- return [base, ...upstream];
913
- });
914
- const footer = `
915
- ## 2) General OSS Acknowledgment
916
-
917
- This product incorporates open-source software. **If any term of this file or the EULA conflicts with an OSS license for a specific component, the OSS license controls for that component.**
918
- `;
919
- return entries.length !== 0 ? [header, subHeader, ...blocks.flat(), footer].join("") : [header, noRecordsNote].join("");
920
- }
921
- async function generate() {
922
- const argv = await yargs(hideBin(process.argv)).scriptName("anarchy-legal:notice").usage("$0 --workspace <name|path> [--source <path>] [--source-name <file>] [--out <NOTICE.md>] [--include-upstream-notices] [--max-upstream-notice-kb <N>] [--audit] [--strict] [--debug]").option("workspace", { type: "string", demandOption: true, describe: "Target workspace (name or path relative to monorepo root)" }).option("source", { type: "string", describe: "Path to the input attribution file (default is <workspace>/<source-name>)" }).option("source-name", { type: "string", default: "THIRD_PARTY_LICENSES.md", describe: "File name inside the workspace to read from when --source is not provided" }).option("out", { type: "string", describe: "Path to output NOTICE.md. Default: <workspace>/NOTICE.md" }).option("include-upstream-notices", { type: "boolean", default: false, describe: "Also read upstream NOTICE files from dependency install paths (if present in source)" }).option("max-upstream-notice-kb", { type: "number", default: 128, describe: "Max size per upstream NOTICE to read (kilobytes)" }).option("audit", { type: "boolean", default: false, describe: "Print a diff between headings in source and parsed entries" }).option("strict", { type: "boolean", default: false, describe: "With --audit, exit with code 2 if mismatches found" }).option("debug", { type: "boolean", default: false }).help().parseAsync();
923
- isDebug = Boolean(argv.debug);
924
- repoUtilsService.setDebugMode(isDebug);
925
- const scriptDir = path.dirname(fileURLToPath(import.meta.url));
926
- const startCandidates = [process.env.INIT_CWD, process.cwd(), scriptDir].filter(Boolean);
927
- const rootDir = await startCandidates.reduce(async (accP, c) => {
928
- const acc = await accP;
929
- if (acc) return acc;
930
- try {
931
- return await findMonorepoRoot(c);
932
- } catch (e) {
933
- debugLog(isDebug, "no root from", c, ":", e.message);
934
- return void 0;
935
- }
936
- }, Promise.resolve(void 0));
937
- if (!rootDir) throw new Error(`Failed to find monorepo root from: ${startCandidates.join(", ")}`);
938
- const workspaces = await loadWorkspaces(rootDir);
939
- const ws = await resolveWorkspaceFromArg(String(argv.workspace), workspaces, rootDir);
940
- debugLog(isDebug, "target workspace:", ws.name, ws.dir);
941
- const sourceName = String(argv["source-name"] || "THIRD_PARTY_LICENSES.md");
942
- const defaultSource = path.join(ws.dir, sourceName);
943
- const srcPath = argv.source ? path.isAbsolute(argv.source) ? argv.source : path.resolve(process.cwd(), argv.source) : defaultSource;
944
- const outPath = argv.out ? path.isAbsolute(argv.out) ? argv.out : path.resolve(process.cwd(), argv.out) : path.join(ws.dir, "NOTICE.md");
945
- debugLog(isDebug, "source:", srcPath);
946
- debugLog(isDebug, "out:", outPath);
947
- if (!await isExist(srcPath)) {
948
- console.error(`Source file not found: ${srcPath}`);
949
- process.exit(1);
950
- }
951
- const src = await fs.readFile(srcPath, "utf8");
952
- const declaredIds = collectAllHeadingIds(src);
953
- const entries = parseThirdPartyMarkdown(src);
954
- debugLog(isDebug, "parsed entries:", entries.length);
955
- const finalEntries = await (async () => {
956
- if (!argv["include-upstream-notices"]) return entries;
957
- const maxBytes = Math.max(1, Math.floor(Number(argv["max-upstream-notice-kb"]) || 128)) * 1024;
958
- const withUpstream = await Promise.all(
959
- entries.map(async (e) => {
960
- if (!e.path) return e;
961
- const u = await loadUpstreamNotice(e.path, maxBytes);
962
- return u ? { ...e, upstreamNotice: u } : e;
963
- })
964
- );
965
- const filledCount = withUpstream.filter((e) => Boolean(e.upstreamNotice)).length;
966
- debugLog(isDebug, "upstream notices loaded:", filledCount);
967
- return withUpstream;
968
- })();
969
- if (argv.audit) {
970
- const parsedIds = new Set(finalEntries.map((e) => e.id));
971
- const missing = Array.from(declaredIds).filter((id) => !parsedIds.has(id)).toSorted();
972
- console.log(`NOTICE audit:
973
- headings in source: ${declaredIds.size}
974
- parsed entries: ${parsedIds.size}
975
- missing in NOTICE: ${missing.length}`);
976
- if (missing.length) {
977
- console.log(missing.map((x) => ` - ${x}`).join("\n"));
978
- if (argv.strict) {
979
- console.error("Audit failed: some entries were not parsed into NOTICE.");
980
- process.exit(2);
981
- }
982
- } else {
983
- console.log("Audit OK: all entries accounted for.");
984
- }
985
- }
986
- const md = renderNotice(ws.name, entries, Boolean(argv["include-upstream-notices"]), sourceName);
987
- await fs.mkdir(path.dirname(outPath), { recursive: true });
988
- await fs.writeFile(outPath, md, "utf8");
989
- console.log(`NOTICE.md written -> ${outPath}`);
990
- }
991
- return { generate };
992
- }
993
- function ThirdPartyLicensesService() {
994
- let isDebug = false;
995
- const repoUtilsService = RepoUtilsService();
996
- const {
997
- assertNoCycles,
998
- buildLicenseEntries,
999
- buildWorkspaceLicenseEntries,
1000
- buildWsGraph,
1001
- collectExternalSeedNames,
1002
- collectThirdPartyMap,
1003
- collectWorkspaceClosure,
1004
- debugLog,
1005
- fillMissingInstallPaths,
1006
- findMonorepoRoot,
1007
- loadRoot,
1008
- npmLsJson,
1009
- renderMarkdown,
1010
- resolveWorkspaceFromArg
1011
- } = repoUtilsService;
1012
- function getStartCandidates(argv) {
1013
- const scriptDir = path.dirname(fileURLToPath(import.meta.url));
1014
- return [argv.root, process.env.INIT_CWD, process.cwd(), scriptDir].filter(Boolean);
1015
- }
1016
- function getMonorepoRoot(startCandidates) {
1017
- return startCandidates.reduce(async (prev, c) => {
1018
- const acc = await prev;
1019
- if (acc) return acc;
1020
- try {
1021
- const found = await findMonorepoRoot(c);
1022
- debugLog(isDebug, "monorepo root picked:", found, "(from", c + ")");
1023
- return found;
1024
- } catch (e) {
1025
- debugLog(isDebug, "no root from", c, ":", e.message);
1026
- return void 0;
1027
- }
1028
- }, Promise.resolve(void 0));
1029
- }
1030
- async function getWorkspaceEntries(argv, wsName, closure, root) {
1031
- let wsEntries = [];
1032
- if (argv["include-workspaces"] !== false) {
1033
- wsEntries = await buildWorkspaceLicenseEntries(
1034
- closure,
1035
- root.workspaces,
1036
- argv["include-workspace-self"] ? void 0 : wsName
1037
- // exclude self
1038
- );
1039
- }
1040
- return wsEntries;
1041
- }
1042
- function getEmptyNote(sorted, seedNames) {
1043
- if (sorted.length !== 0) return void 0;
1044
- const noSeeds = seedNames.size === 0;
1045
- return noSeeds ? "This workspace declares no production dependencies and has no reachable internal workspaces. Therefore, there are no third-party licenses to list." : "There are no third-party production dependencies reachable from this workspace. Therefore, there are no third-party licenses to list.";
1046
- }
1047
- async function writeResultFile(outPath, wsName, sorted, emptyNote) {
1048
- const resultFile = renderMarkdown(wsName, sorted, emptyNote);
1049
- await fs.mkdir(path.dirname(outPath), { recursive: true });
1050
- await fs.writeFile(outPath, resultFile, "utf8");
1051
- console.log(`The result file written to: ${outPath}`);
1052
- }
1053
- async function generate() {
1054
- const argv = await yargs(hideBin(process.argv)).usage("$0 --workspace <name|path> --out <file> [--root <dir>] [--debug] [--no-include-workspaces]").option("root", {
1055
- type: "string",
1056
- describe: "Starting directory to search for monorepo root. If omitted, uses INIT_CWD, then process.cwd(), then script dir."
1057
- }).option("workspace", {
1058
- type: "string",
1059
- demandOption: true,
1060
- describe: "Target workspace (name from package.json or folder path relative to monorepo root)"
1061
- }).option("out", {
1062
- type: "string",
1063
- demandOption: true,
1064
- describe: "Output path for the result file (relative to current working dir)"
1065
- }).option("debug", {
1066
- type: "boolean",
1067
- default: false,
1068
- describe: "Print verbose diagnostic information"
1069
- }).option("include-workspaces", {
1070
- type: "boolean",
1071
- default: true,
1072
- describe: "Also include licenses of reachable internal workspaces (excluding self by default)"
1073
- }).option("include-workspace-self", {
1074
- type: "boolean",
1075
- default: false,
1076
- describe: "Also include license of the target workspace itself"
1077
- }).help().parseAsync();
1078
- isDebug = Boolean(argv.debug);
1079
- repoUtilsService.setDebugMode(isDebug);
1080
- const startCandidates = getStartCandidates(argv);
1081
- debugLog(isDebug, "start candidates:", startCandidates);
1082
- const monorepoRoot = await getMonorepoRoot(startCandidates);
1083
- if (!monorepoRoot) throw new Error(`Failed to locate monorepo root from candidates: ${startCandidates.join(", ")}`);
1084
- const root = await loadRoot(monorepoRoot);
1085
- debugLog(isDebug, "rootDir:", root.rootDir);
1086
- const { name, dir } = await resolveWorkspaceFromArg(argv.workspace, root.workspaces, root.rootDir);
1087
- debugLog(isDebug, "target workspace:", name, "dir:", dir);
1088
- const graph = buildWsGraph(root.workspaces);
1089
- assertNoCycles(graph, name);
1090
- const closure = collectWorkspaceClosure(graph, name);
1091
- const wsNamesSet = new Set(root.workspaces.keys());
1092
- const seedNames = collectExternalSeedNames(closure, root.workspaces, wsNamesSet);
1093
- debugLog(isDebug, "workspace closure size:", closure.size, "seed external deps:", seedNames.size, "sample:", [...seedNames].slice(0, 10));
1094
- const tree = await npmLsJson(root.rootDir, name);
1095
- const thirdPartyMap = new Map(collectThirdPartyMap(tree, wsNamesSet, seedNames));
1096
- fillMissingInstallPaths(thirdPartyMap, name, root.rootDir);
1097
- if (thirdPartyMap.size === 0) debugLog(isDebug, `[info] No third-party prod deps reachable from seeds.`);
1098
- else if (isDebug) console.log("[debug] examples (third-party):", [...thirdPartyMap.values()].slice(0, 5));
1099
- const wsEntries = await getWorkspaceEntries(argv, name, closure, root);
1100
- debugLog(isDebug, "workspace license entries (after self-filter):", wsEntries.length);
1101
- const thirdEntries = await buildLicenseEntries(thirdPartyMap);
1102
- debugLog(isDebug, "third-party license entries:", thirdEntries.length);
1103
- const merged = [...wsEntries, ...thirdEntries];
1104
- const sorted = [...merged].sort((a, b) => a.name === b.name ? a.version.localeCompare(b.version) : a.name.localeCompare(b.name));
1105
- const outPath = path.isAbsolute(argv.out) ? argv.out : path.join(process.cwd(), argv.out);
1106
- const emptyNote = getEmptyNote(sorted, seedNames);
1107
- debugLog(isDebug, "write output to:", outPath, "total entries:", sorted.length);
1108
- await writeResultFile(outPath, name, sorted, emptyNote);
1109
- }
1110
- return { generate };
1111
- }
1
+ import { LegalDocumentType } from "./Constants/LegalDocumentType.js";
2
+ import { LegalFilesService } from "./Services/LegalFilesService.js";
3
+ import { LegalFilesUtilsService } from "./Services/LegalFilesUtilsService.js";
4
+ import { NoticeService } from "./Services/NoticeService.js";
5
+ import { NoticeUtilsService } from "./Services/NoticeUtilsService.js";
6
+ import { RepoUtilsService } from "./Services/RepoUtilsService.js";
7
+ import { ThirdPartyLicensesService } from "./Services/ThirdPartyLicensesService.js";
1112
8
  export {
1113
9
  LegalDocumentType,
1114
10
  LegalFilesService,