codealmanac 0.2.5 → 0.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +25 -20
- package/dist/{agents-RVTQYE6A.js → agents-V2ZOIACP.js} +6 -5
- package/dist/{chunk-P5WGG4FJ.js → chunk-5BWUMAOX.js} +2 -2
- package/dist/chunk-5BWUMAOX.js.map +1 -0
- package/dist/{chunk-KQUVMF27.js → chunk-BFIG2CXM.js} +2 -516
- package/dist/chunk-BFIG2CXM.js.map +1 -0
- package/dist/{chunk-DL5BXZCX.js → chunk-BQY5L3DL.js} +3 -53
- package/dist/chunk-BQY5L3DL.js.map +1 -0
- package/dist/{chunk-F53U6JQG.js → chunk-CQJVM34R.js} +2 -2
- package/dist/chunk-FUBE6KCO.js +124 -0
- package/dist/chunk-FUBE6KCO.js.map +1 -0
- package/dist/chunk-IZBXXAVL.js +524 -0
- package/dist/chunk-IZBXXAVL.js.map +1 -0
- package/dist/{chunk-7JUX4ADQ.js → chunk-IZT6RBHS.js} +1 -1
- package/dist/{chunk-SMIK2YLU.js → chunk-JLQZELHQ.js} +82 -88
- package/dist/chunk-JLQZELHQ.js.map +1 -0
- package/dist/{chunk-TT6ZP4GS.js → chunk-KZXWPG4P.js} +2 -2
- package/dist/{chunk-6BJUYZ43.js → chunk-QIA22IAM.js} +8 -16
- package/dist/chunk-QIA22IAM.js.map +1 -0
- package/dist/{chunk-BGUID5BS.js → chunk-RALBM6HZ.js} +20 -139
- package/dist/chunk-RALBM6HZ.js.map +1 -0
- package/dist/{chunk-TILAKDN6.js → chunk-U5DLLWIC.js} +3 -3
- package/dist/chunk-WL4UE7Q6.js +1386 -0
- package/dist/chunk-WL4UE7Q6.js.map +1 -0
- package/dist/{chunk-GFUB57IT.js → chunk-ZUQN5Y3K.js} +48 -124
- package/dist/chunk-ZUQN5Y3K.js.map +1 -0
- package/dist/{chunk-MRRX4UQB.js → chunk-ZZLLOAI6.js} +3 -3
- package/dist/{cli-CL4ID7EO.js → cli-XWPNARA6.js} +35 -18
- package/dist/cli-XWPNARA6.js.map +1 -0
- package/dist/codealmanac.js +1 -1
- package/dist/{config-ML2RCR7J.js → config-KH3JUMG6.js} +4 -4
- package/dist/doctor-ENJT665Z.js +18 -0
- package/dist/paths-O5CZADP2.js +14 -0
- package/dist/process-KFSLENL3.js +61 -0
- package/dist/{register-commands-FBJ6XQ3L.js → register-commands-LULZUSPO.js} +993 -1015
- package/dist/register-commands-LULZUSPO.js.map +1 -0
- package/dist/uninstall-BD4MMQ7M.js +16 -0
- package/dist/uninstall-BD4MMQ7M.js.map +1 -0
- package/dist/update-XSKPDFMJ.js +11 -0
- package/dist/update-XSKPDFMJ.js.map +1 -0
- package/dist/{wiki-IGNRNLUZ.js → wiki-O4RWMAE6.js} +8 -6
- package/dist/wiki-O4RWMAE6.js.map +1 -0
- package/guides/mini.md +11 -9
- package/guides/reference.md +96 -39
- package/hooks/almanac-capture.sh +7 -8
- package/package.json +1 -1
- package/prompts/agents/.gitkeep +1 -0
- package/prompts/base/notability.md +139 -0
- package/prompts/base/purpose.md +85 -0
- package/prompts/base/syntax.md +114 -0
- package/prompts/operations/absorb.md +43 -0
- package/prompts/operations/build.md +49 -0
- package/prompts/operations/garden.md +51 -0
- package/dist/chunk-6BJUYZ43.js.map +0 -1
- package/dist/chunk-BGUID5BS.js.map +0 -1
- package/dist/chunk-DL5BXZCX.js.map +0 -1
- package/dist/chunk-GFUB57IT.js.map +0 -1
- package/dist/chunk-KQUVMF27.js.map +0 -1
- package/dist/chunk-P5WGG4FJ.js.map +0 -1
- package/dist/chunk-SMIK2YLU.js.map +0 -1
- package/dist/cli-CL4ID7EO.js.map +0 -1
- package/dist/doctor-DOLJRGS4.js +0 -17
- package/dist/register-commands-FBJ6XQ3L.js.map +0 -1
- package/dist/uninstall-DX6LFKMX.js +0 -15
- package/dist/update-P2IPG7RO.js +0 -11
- package/dist/wiki-IGNRNLUZ.js.map +0 -1
- package/prompts/bootstrap.md +0 -176
- package/prompts/reviewer.md +0 -129
- package/prompts/writer.md +0 -134
- /package/dist/{agents-RVTQYE6A.js.map → agents-V2ZOIACP.js.map} +0 -0
- /package/dist/{chunk-F53U6JQG.js.map → chunk-CQJVM34R.js.map} +0 -0
- /package/dist/{chunk-7JUX4ADQ.js.map → chunk-IZT6RBHS.js.map} +0 -0
- /package/dist/{chunk-TT6ZP4GS.js.map → chunk-KZXWPG4P.js.map} +0 -0
- /package/dist/{chunk-TILAKDN6.js.map → chunk-U5DLLWIC.js.map} +0 -0
- /package/dist/{chunk-MRRX4UQB.js.map → chunk-ZZLLOAI6.js.map} +0 -0
- /package/dist/{config-ML2RCR7J.js.map → config-KH3JUMG6.js.map} +0 -0
- /package/dist/{doctor-DOLJRGS4.js.map → doctor-ENJT665Z.js.map} +0 -0
- /package/dist/{uninstall-DX6LFKMX.js.map → paths-O5CZADP2.js.map} +0 -0
- /package/dist/{update-P2IPG7RO.js.map → process-KFSLENL3.js.map} +0 -0
|
@@ -0,0 +1,524 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
ensureFreshIndex,
|
|
4
|
+
openIndex,
|
|
5
|
+
toKebabCase
|
|
6
|
+
} from "./chunk-BFIG2CXM.js";
|
|
7
|
+
import {
|
|
8
|
+
BLUE,
|
|
9
|
+
BOLD,
|
|
10
|
+
DIM,
|
|
11
|
+
GREEN,
|
|
12
|
+
RED,
|
|
13
|
+
RST
|
|
14
|
+
} from "./chunk-FM3VRDK7.js";
|
|
15
|
+
import {
|
|
16
|
+
findNearestAlmanacDir,
|
|
17
|
+
getGlobalAlmanacDir,
|
|
18
|
+
getRegistryPath
|
|
19
|
+
} from "./chunk-IZT6RBHS.js";
|
|
20
|
+
|
|
21
|
+
// src/registry/index.ts
|
|
22
|
+
import { mkdir, readFile, rename, writeFile } from "fs/promises";
|
|
23
|
+
import { dirname } from "path";
|
|
24
|
+
async function readRegistry() {
|
|
25
|
+
const path = getRegistryPath();
|
|
26
|
+
let raw;
|
|
27
|
+
try {
|
|
28
|
+
raw = await readFile(path, "utf8");
|
|
29
|
+
} catch (err) {
|
|
30
|
+
if (isNodeError(err) && err.code === "ENOENT") {
|
|
31
|
+
return [];
|
|
32
|
+
}
|
|
33
|
+
throw err;
|
|
34
|
+
}
|
|
35
|
+
const trimmed = raw.trim();
|
|
36
|
+
if (trimmed.length === 0) {
|
|
37
|
+
return [];
|
|
38
|
+
}
|
|
39
|
+
let parsed;
|
|
40
|
+
try {
|
|
41
|
+
parsed = JSON.parse(trimmed);
|
|
42
|
+
} catch (err) {
|
|
43
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
44
|
+
throw new Error(`registry at ${path} is not valid JSON: ${message}`);
|
|
45
|
+
}
|
|
46
|
+
if (!Array.isArray(parsed)) {
|
|
47
|
+
throw new Error(`registry at ${path} must be a JSON array`);
|
|
48
|
+
}
|
|
49
|
+
return parsed.map((item, idx) => {
|
|
50
|
+
if (typeof item !== "object" || item === null) {
|
|
51
|
+
throw new Error(`registry entry ${idx} is not an object`);
|
|
52
|
+
}
|
|
53
|
+
const e = item;
|
|
54
|
+
const name = typeof e.name === "string" ? e.name : "";
|
|
55
|
+
const path2 = typeof e.path === "string" ? e.path : "";
|
|
56
|
+
if (name.length === 0) {
|
|
57
|
+
throw new Error(`registry entry ${idx} is missing a non-empty "name"`);
|
|
58
|
+
}
|
|
59
|
+
if (path2.length === 0) {
|
|
60
|
+
throw new Error(`registry entry ${idx} is missing a non-empty "path"`);
|
|
61
|
+
}
|
|
62
|
+
return {
|
|
63
|
+
name,
|
|
64
|
+
description: typeof e.description === "string" ? e.description : "",
|
|
65
|
+
path: path2,
|
|
66
|
+
registered_at: typeof e.registered_at === "string" ? e.registered_at : ""
|
|
67
|
+
};
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
async function writeRegistry(entries) {
|
|
71
|
+
const path = getRegistryPath();
|
|
72
|
+
await mkdir(dirname(path), { recursive: true });
|
|
73
|
+
const body = `${JSON.stringify(entries, null, 2)}
|
|
74
|
+
`;
|
|
75
|
+
const tmpPath = `${path}.tmp`;
|
|
76
|
+
await writeFile(tmpPath, body, "utf8");
|
|
77
|
+
await rename(tmpPath, path);
|
|
78
|
+
}
|
|
79
|
+
function pathsEqual(a, b) {
|
|
80
|
+
if (process.platform === "darwin" || process.platform === "win32") {
|
|
81
|
+
return a.toLowerCase() === b.toLowerCase();
|
|
82
|
+
}
|
|
83
|
+
return a === b;
|
|
84
|
+
}
|
|
85
|
+
async function addEntry(entry) {
|
|
86
|
+
const existing = await readRegistry();
|
|
87
|
+
const filtered = existing.filter(
|
|
88
|
+
(e) => e.name !== entry.name && !pathsEqual(e.path, entry.path)
|
|
89
|
+
);
|
|
90
|
+
filtered.push(entry);
|
|
91
|
+
await writeRegistry(filtered);
|
|
92
|
+
return filtered;
|
|
93
|
+
}
|
|
94
|
+
async function dropEntry(name) {
|
|
95
|
+
const existing = await readRegistry();
|
|
96
|
+
const idx = existing.findIndex((e) => e.name === name);
|
|
97
|
+
if (idx === -1) {
|
|
98
|
+
return null;
|
|
99
|
+
}
|
|
100
|
+
const [removed] = existing.splice(idx, 1);
|
|
101
|
+
await writeRegistry(existing);
|
|
102
|
+
return removed ?? null;
|
|
103
|
+
}
|
|
104
|
+
async function findEntry(params) {
|
|
105
|
+
const entries = await readRegistry();
|
|
106
|
+
for (const entry of entries) {
|
|
107
|
+
if (params.name !== void 0 && entry.name === params.name) return entry;
|
|
108
|
+
if (params.path !== void 0 && pathsEqual(entry.path, params.path)) {
|
|
109
|
+
return entry;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
return null;
|
|
113
|
+
}
|
|
114
|
+
async function ensureGlobalDir() {
|
|
115
|
+
await mkdir(getGlobalAlmanacDir(), { recursive: true });
|
|
116
|
+
}
|
|
117
|
+
function isNodeError(err) {
|
|
118
|
+
return err instanceof Error && "code" in err;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// src/commands/health.ts
|
|
122
|
+
import { existsSync as existsSync2 } from "fs";
|
|
123
|
+
import { readFile as readFile2 } from "fs/promises";
|
|
124
|
+
import { basename, join as join2 } from "path";
|
|
125
|
+
import fg from "fast-glob";
|
|
126
|
+
|
|
127
|
+
// src/indexer/duration.ts
|
|
128
|
+
function parseDuration(input) {
|
|
129
|
+
const trimmed = input.trim();
|
|
130
|
+
const m = trimmed.match(/^(\d+)([mhdw])$/);
|
|
131
|
+
if (m === null) {
|
|
132
|
+
throw new Error(
|
|
133
|
+
`invalid duration "${input}" (expected Nw, Nd, Nh, or Nm \u2014 e.g. 2w, 30d)`
|
|
134
|
+
);
|
|
135
|
+
}
|
|
136
|
+
const n = Number.parseInt(m[1] ?? "0", 10);
|
|
137
|
+
const unit = m[2];
|
|
138
|
+
switch (unit) {
|
|
139
|
+
case "m":
|
|
140
|
+
return n * 60;
|
|
141
|
+
case "h":
|
|
142
|
+
return n * 60 * 60;
|
|
143
|
+
case "d":
|
|
144
|
+
return n * 60 * 60 * 24;
|
|
145
|
+
case "w":
|
|
146
|
+
return n * 60 * 60 * 24 * 7;
|
|
147
|
+
default:
|
|
148
|
+
throw new Error(`invalid duration unit "${unit ?? ""}"`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// src/indexer/resolve-wiki.ts
|
|
153
|
+
import { existsSync } from "fs";
|
|
154
|
+
import { join } from "path";
|
|
155
|
+
async function resolveWikiRoot(params) {
|
|
156
|
+
if (params.wiki !== void 0) {
|
|
157
|
+
const entry = await findEntry({ name: params.wiki });
|
|
158
|
+
if (entry === null) {
|
|
159
|
+
throw new Error(`no registered wiki named "${params.wiki}"`);
|
|
160
|
+
}
|
|
161
|
+
if (!existsSync(join(entry.path, ".almanac"))) {
|
|
162
|
+
throw new Error(
|
|
163
|
+
`wiki "${params.wiki}" path is unreachable (${entry.path})`
|
|
164
|
+
);
|
|
165
|
+
}
|
|
166
|
+
return entry.path;
|
|
167
|
+
}
|
|
168
|
+
const nearest = findNearestAlmanacDir(params.cwd);
|
|
169
|
+
if (nearest === null) {
|
|
170
|
+
throw new Error(
|
|
171
|
+
"no .almanac/ found in this directory or any parent; run `almanac init` first"
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
return nearest;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// src/topics/dag.ts
|
|
178
|
+
var DAG_DEPTH_CAP = 32;
|
|
179
|
+
function ancestorsInFile(file, slug) {
|
|
180
|
+
const parentsOf = /* @__PURE__ */ new Map();
|
|
181
|
+
for (const t of file.topics) {
|
|
182
|
+
parentsOf.set(t.slug, t.parents);
|
|
183
|
+
}
|
|
184
|
+
const ancestors = /* @__PURE__ */ new Set();
|
|
185
|
+
let frontier = parentsOf.get(slug) ?? [];
|
|
186
|
+
let depth = 0;
|
|
187
|
+
while (frontier.length > 0 && depth < DAG_DEPTH_CAP) {
|
|
188
|
+
const next = [];
|
|
189
|
+
for (const node of frontier) {
|
|
190
|
+
if (ancestors.has(node)) continue;
|
|
191
|
+
ancestors.add(node);
|
|
192
|
+
const ps = parentsOf.get(node);
|
|
193
|
+
if (ps !== void 0) next.push(...ps);
|
|
194
|
+
}
|
|
195
|
+
frontier = next;
|
|
196
|
+
depth += 1;
|
|
197
|
+
}
|
|
198
|
+
return ancestors;
|
|
199
|
+
}
|
|
200
|
+
function descendantsInDb(db, slug) {
|
|
201
|
+
const rows = db.prepare(
|
|
202
|
+
`WITH RECURSIVE desc(slug, depth) AS (
|
|
203
|
+
SELECT child_slug, 1 FROM topic_parents WHERE parent_slug = ?
|
|
204
|
+
UNION
|
|
205
|
+
SELECT tp.child_slug, d.depth + 1
|
|
206
|
+
FROM topic_parents tp
|
|
207
|
+
JOIN desc d ON tp.parent_slug = d.slug
|
|
208
|
+
WHERE d.depth < ?
|
|
209
|
+
)
|
|
210
|
+
SELECT DISTINCT slug FROM desc ORDER BY slug`
|
|
211
|
+
).all(slug, DAG_DEPTH_CAP).map((r) => r.slug);
|
|
212
|
+
return rows;
|
|
213
|
+
}
|
|
214
|
+
function subtreeInDb(db, slug) {
|
|
215
|
+
return [slug, ...descendantsInDb(db, slug)];
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// src/commands/health.ts
|
|
219
|
+
var DEFAULT_STALE_SECONDS = 90 * 24 * 60 * 60;
|
|
220
|
+
async function runHealth(options) {
|
|
221
|
+
const repoRoot = await resolveWikiRoot({ cwd: options.cwd, wiki: options.wiki });
|
|
222
|
+
await ensureFreshIndex({ repoRoot });
|
|
223
|
+
const almanacDir = join2(repoRoot, ".almanac");
|
|
224
|
+
const pagesDir = join2(almanacDir, "pages");
|
|
225
|
+
const db = openIndex(join2(almanacDir, "index.db"));
|
|
226
|
+
try {
|
|
227
|
+
const staleSeconds = options.stale !== void 0 ? parseDuration(options.stale) : DEFAULT_STALE_SECONDS;
|
|
228
|
+
const scope = resolveScope(db, options);
|
|
229
|
+
const report = {
|
|
230
|
+
orphans: findOrphans(db, scope),
|
|
231
|
+
stale: findStale(db, scope, staleSeconds),
|
|
232
|
+
dead_refs: await findDeadRefs(db, scope, repoRoot),
|
|
233
|
+
broken_links: findBrokenLinks(db, scope),
|
|
234
|
+
broken_xwiki: await findBrokenXwiki(db, scope),
|
|
235
|
+
empty_topics: findEmptyTopics(db, scope),
|
|
236
|
+
empty_pages: await findEmptyPages(db, scope, pagesDir),
|
|
237
|
+
slug_collisions: await findSlugCollisions(pagesDir)
|
|
238
|
+
};
|
|
239
|
+
if (options.json === true) {
|
|
240
|
+
return {
|
|
241
|
+
stdout: `${JSON.stringify(report, null, 2)}
|
|
242
|
+
`,
|
|
243
|
+
stderr: "",
|
|
244
|
+
exitCode: 0
|
|
245
|
+
};
|
|
246
|
+
}
|
|
247
|
+
return {
|
|
248
|
+
stdout: formatReport(report),
|
|
249
|
+
stderr: "",
|
|
250
|
+
exitCode: 0
|
|
251
|
+
};
|
|
252
|
+
} finally {
|
|
253
|
+
db.close();
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
function resolveScope(db, options) {
|
|
257
|
+
let pages = null;
|
|
258
|
+
let topics = null;
|
|
259
|
+
if (options.topic !== void 0) {
|
|
260
|
+
const rootSlug = toKebabCase(options.topic);
|
|
261
|
+
if (rootSlug.length > 0) {
|
|
262
|
+
const subtree = subtreeInDb(db, rootSlug);
|
|
263
|
+
topics = new Set(subtree);
|
|
264
|
+
const placeholders = subtree.map(() => "?").join(", ");
|
|
265
|
+
const rows = db.prepare(
|
|
266
|
+
`SELECT DISTINCT page_slug FROM page_topics
|
|
267
|
+
WHERE topic_slug IN (${placeholders})`
|
|
268
|
+
).all(...subtree);
|
|
269
|
+
pages = new Set(rows.map((r) => r.page_slug));
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
if (options.stdin === true && options.stdinInput !== void 0) {
|
|
273
|
+
const stdinPages = /* @__PURE__ */ new Set();
|
|
274
|
+
for (const line of options.stdinInput.split(/\r?\n/)) {
|
|
275
|
+
const s = line.trim();
|
|
276
|
+
if (s.length > 0) stdinPages.add(s);
|
|
277
|
+
}
|
|
278
|
+
if (pages === null) pages = stdinPages;
|
|
279
|
+
else {
|
|
280
|
+
const out = /* @__PURE__ */ new Set();
|
|
281
|
+
for (const s of stdinPages) if (pages.has(s)) out.add(s);
|
|
282
|
+
pages = out;
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
return { pages, topics };
|
|
286
|
+
}
|
|
287
|
+
function inPageScope(scope, slug) {
|
|
288
|
+
if (scope.pages === null) return true;
|
|
289
|
+
return scope.pages.has(slug);
|
|
290
|
+
}
|
|
291
|
+
function findOrphans(db, scope) {
|
|
292
|
+
const rows = db.prepare(
|
|
293
|
+
`SELECT p.slug FROM pages p
|
|
294
|
+
WHERE p.archived_at IS NULL
|
|
295
|
+
AND NOT EXISTS (
|
|
296
|
+
SELECT 1 FROM page_topics pt WHERE pt.page_slug = p.slug
|
|
297
|
+
)
|
|
298
|
+
ORDER BY p.slug`
|
|
299
|
+
).all();
|
|
300
|
+
return rows.filter((r) => inPageScope(scope, r.slug));
|
|
301
|
+
}
|
|
302
|
+
function findStale(db, scope, staleSeconds) {
|
|
303
|
+
const now = Math.floor(Date.now() / 1e3);
|
|
304
|
+
const threshold = now - staleSeconds;
|
|
305
|
+
const rows = db.prepare(
|
|
306
|
+
`SELECT slug, updated_at FROM pages
|
|
307
|
+
WHERE archived_at IS NULL AND updated_at < ?
|
|
308
|
+
ORDER BY updated_at ASC`
|
|
309
|
+
).all(threshold);
|
|
310
|
+
return rows.filter((r) => inPageScope(scope, r.slug)).map((r) => ({
|
|
311
|
+
slug: r.slug,
|
|
312
|
+
days_since_update: Math.floor((now - r.updated_at) / (60 * 60 * 24))
|
|
313
|
+
}));
|
|
314
|
+
}
|
|
315
|
+
async function findDeadRefs(db, scope, repoRoot) {
|
|
316
|
+
const rows = db.prepare(
|
|
317
|
+
`SELECT p.slug, r.path, r.original_path, r.is_dir
|
|
318
|
+
FROM file_refs r
|
|
319
|
+
JOIN pages p ON p.slug = r.page_slug
|
|
320
|
+
WHERE p.archived_at IS NULL
|
|
321
|
+
ORDER BY p.slug, r.path`
|
|
322
|
+
).all();
|
|
323
|
+
const out = [];
|
|
324
|
+
for (const r of rows) {
|
|
325
|
+
if (!inPageScope(scope, r.slug)) continue;
|
|
326
|
+
const abs = join2(repoRoot, r.original_path);
|
|
327
|
+
if (!existsSync2(abs)) {
|
|
328
|
+
out.push({ slug: r.slug, path: r.original_path });
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
return out;
|
|
332
|
+
}
|
|
333
|
+
function findBrokenLinks(db, scope) {
|
|
334
|
+
const rows = db.prepare(
|
|
335
|
+
`SELECT w.source_slug, w.target_slug
|
|
336
|
+
FROM wikilinks w
|
|
337
|
+
JOIN pages src ON src.slug = w.source_slug
|
|
338
|
+
LEFT JOIN pages tgt ON tgt.slug = w.target_slug
|
|
339
|
+
WHERE tgt.slug IS NULL AND src.archived_at IS NULL
|
|
340
|
+
ORDER BY w.source_slug, w.target_slug`
|
|
341
|
+
).all();
|
|
342
|
+
return rows.filter((r) => inPageScope(scope, r.source_slug));
|
|
343
|
+
}
|
|
344
|
+
async function findBrokenXwiki(db, scope) {
|
|
345
|
+
const rows = db.prepare(
|
|
346
|
+
// Same archived-source filter as `findBrokenLinks`. Retired pages
|
|
347
|
+
// shouldn't spam the report with links to wikis that may have
|
|
348
|
+
// been intentionally retired too.
|
|
349
|
+
`SELECT x.source_slug, x.target_wiki, x.target_slug
|
|
350
|
+
FROM cross_wiki_links x
|
|
351
|
+
JOIN pages src ON src.slug = x.source_slug
|
|
352
|
+
WHERE src.archived_at IS NULL
|
|
353
|
+
ORDER BY x.source_slug, x.target_wiki, x.target_slug`
|
|
354
|
+
).all();
|
|
355
|
+
const out = [];
|
|
356
|
+
const reachableCache = /* @__PURE__ */ new Map();
|
|
357
|
+
for (const r of rows) {
|
|
358
|
+
if (!inPageScope(scope, r.source_slug)) continue;
|
|
359
|
+
let ok = reachableCache.get(r.target_wiki);
|
|
360
|
+
if (ok === void 0) {
|
|
361
|
+
const entry = await findEntry({ name: r.target_wiki });
|
|
362
|
+
ok = entry !== null && existsSync2(join2(entry.path, ".almanac"));
|
|
363
|
+
reachableCache.set(r.target_wiki, ok);
|
|
364
|
+
}
|
|
365
|
+
if (!ok) {
|
|
366
|
+
out.push({
|
|
367
|
+
source_slug: r.source_slug,
|
|
368
|
+
target_wiki: r.target_wiki,
|
|
369
|
+
target_slug: r.target_slug
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
return out;
|
|
374
|
+
}
|
|
375
|
+
function findEmptyTopics(db, scope) {
|
|
376
|
+
const rows = db.prepare(
|
|
377
|
+
`SELECT t.slug FROM topics t
|
|
378
|
+
WHERE NOT EXISTS (
|
|
379
|
+
SELECT 1 FROM page_topics pt WHERE pt.topic_slug = t.slug
|
|
380
|
+
)
|
|
381
|
+
ORDER BY t.slug`
|
|
382
|
+
).all();
|
|
383
|
+
if (scope.topics === null) return rows;
|
|
384
|
+
return rows.filter((r) => scope.topics.has(r.slug));
|
|
385
|
+
}
|
|
386
|
+
async function findEmptyPages(db, scope, pagesDir) {
|
|
387
|
+
const rows = db.prepare(
|
|
388
|
+
`SELECT slug, file_path FROM pages
|
|
389
|
+
WHERE archived_at IS NULL
|
|
390
|
+
ORDER BY slug`
|
|
391
|
+
).all();
|
|
392
|
+
const out = [];
|
|
393
|
+
for (const r of rows) {
|
|
394
|
+
if (!inPageScope(scope, r.slug)) continue;
|
|
395
|
+
let raw;
|
|
396
|
+
try {
|
|
397
|
+
raw = await readFile2(r.file_path, "utf8");
|
|
398
|
+
} catch {
|
|
399
|
+
continue;
|
|
400
|
+
}
|
|
401
|
+
const m = raw.match(/^---\r?\n[\s\S]*?\r?\n---\r?\n?([\s\S]*)$/);
|
|
402
|
+
const body = m !== null ? m[1] ?? "" : raw;
|
|
403
|
+
void pagesDir;
|
|
404
|
+
const hasSubstance = body.split(/\r?\n/).some((l) => {
|
|
405
|
+
const t = l.trim();
|
|
406
|
+
if (t.length === 0) return false;
|
|
407
|
+
if (t.startsWith("#")) return false;
|
|
408
|
+
return true;
|
|
409
|
+
});
|
|
410
|
+
if (!hasSubstance) {
|
|
411
|
+
out.push({ slug: r.slug });
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
return out;
|
|
415
|
+
}
|
|
416
|
+
async function findSlugCollisions(pagesDir) {
|
|
417
|
+
if (!existsSync2(pagesDir)) return [];
|
|
418
|
+
const files = await fg("**/*.md", {
|
|
419
|
+
cwd: pagesDir,
|
|
420
|
+
absolute: false,
|
|
421
|
+
onlyFiles: true,
|
|
422
|
+
caseSensitiveMatch: true
|
|
423
|
+
});
|
|
424
|
+
const bySlug = /* @__PURE__ */ new Map();
|
|
425
|
+
for (const rel of files) {
|
|
426
|
+
const slug = toKebabCase(basename(rel, ".md"));
|
|
427
|
+
if (slug.length === 0) continue;
|
|
428
|
+
const list = bySlug.get(slug) ?? [];
|
|
429
|
+
list.push(rel);
|
|
430
|
+
bySlug.set(slug, list);
|
|
431
|
+
}
|
|
432
|
+
const out = [];
|
|
433
|
+
for (const [slug, paths] of bySlug.entries()) {
|
|
434
|
+
if (paths.length > 1) {
|
|
435
|
+
out.push({ slug, paths: paths.sort() });
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
out.sort((a, b) => a.slug.localeCompare(b.slug));
|
|
439
|
+
return out;
|
|
440
|
+
}
|
|
441
|
+
function formatReport(r) {
|
|
442
|
+
const sections = [];
|
|
443
|
+
sections.push(
|
|
444
|
+
section(
|
|
445
|
+
"orphans",
|
|
446
|
+
r.orphans.length,
|
|
447
|
+
r.orphans.map((o) => ` ${BLUE}${o.slug}${RST}`)
|
|
448
|
+
)
|
|
449
|
+
);
|
|
450
|
+
sections.push(
|
|
451
|
+
section(
|
|
452
|
+
"stale",
|
|
453
|
+
r.stale.length,
|
|
454
|
+
r.stale.map((s) => ` ${BLUE}${s.slug}${RST} ${DIM}(${s.days_since_update} days)${RST}`)
|
|
455
|
+
)
|
|
456
|
+
);
|
|
457
|
+
sections.push(
|
|
458
|
+
section(
|
|
459
|
+
"dead-refs",
|
|
460
|
+
r.dead_refs.length,
|
|
461
|
+
r.dead_refs.map((d) => ` ${BLUE}${d.slug}${RST} references ${d.path} ${DIM}(missing)${RST}`)
|
|
462
|
+
)
|
|
463
|
+
);
|
|
464
|
+
sections.push(
|
|
465
|
+
section(
|
|
466
|
+
"broken-links",
|
|
467
|
+
r.broken_links.length,
|
|
468
|
+
r.broken_links.map(
|
|
469
|
+
(b) => ` ${BLUE}${b.source_slug}${RST} \u2192 ${b.target_slug} ${DIM}(target does not exist)${RST}`
|
|
470
|
+
)
|
|
471
|
+
)
|
|
472
|
+
);
|
|
473
|
+
sections.push(
|
|
474
|
+
section(
|
|
475
|
+
"broken-xwiki",
|
|
476
|
+
r.broken_xwiki.length,
|
|
477
|
+
r.broken_xwiki.map(
|
|
478
|
+
(b) => ` ${BLUE}${b.source_slug}${RST} \u2192 ${b.target_wiki}:${b.target_slug} ${DIM}(wiki unregistered or unreachable)${RST}`
|
|
479
|
+
)
|
|
480
|
+
)
|
|
481
|
+
);
|
|
482
|
+
sections.push(
|
|
483
|
+
section(
|
|
484
|
+
"empty-topics",
|
|
485
|
+
r.empty_topics.length,
|
|
486
|
+
r.empty_topics.map((e) => ` ${BLUE}${e.slug}${RST}`)
|
|
487
|
+
)
|
|
488
|
+
);
|
|
489
|
+
sections.push(
|
|
490
|
+
section(
|
|
491
|
+
"empty-pages",
|
|
492
|
+
r.empty_pages.length,
|
|
493
|
+
r.empty_pages.map((e) => ` ${BLUE}${e.slug}${RST}`)
|
|
494
|
+
)
|
|
495
|
+
);
|
|
496
|
+
sections.push(
|
|
497
|
+
section(
|
|
498
|
+
"slug-collisions",
|
|
499
|
+
r.slug_collisions.length,
|
|
500
|
+
r.slug_collisions.map((c) => ` ${BLUE}${c.slug}${RST}: ${c.paths.join(", ")}`)
|
|
501
|
+
)
|
|
502
|
+
);
|
|
503
|
+
return `${sections.join("\n\n")}
|
|
504
|
+
`;
|
|
505
|
+
}
|
|
506
|
+
function section(label, count, lines) {
|
|
507
|
+
if (count === 0) return `${BOLD}${label}${RST} ${GREEN}(0): (ok)${RST}`;
|
|
508
|
+
return `${BOLD}${label}${RST} ${RED}(${count})${RST}:
|
|
509
|
+
${lines.join("\n")}`;
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
export {
|
|
513
|
+
readRegistry,
|
|
514
|
+
addEntry,
|
|
515
|
+
dropEntry,
|
|
516
|
+
findEntry,
|
|
517
|
+
ensureGlobalDir,
|
|
518
|
+
resolveWikiRoot,
|
|
519
|
+
ancestorsInFile,
|
|
520
|
+
descendantsInDb,
|
|
521
|
+
parseDuration,
|
|
522
|
+
runHealth
|
|
523
|
+
};
|
|
524
|
+
//# sourceMappingURL=chunk-IZBXXAVL.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/registry/index.ts","../src/commands/health.ts","../src/indexer/duration.ts","../src/indexer/resolve-wiki.ts","../src/topics/dag.ts"],"sourcesContent":["import { mkdir, readFile, rename, writeFile } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\n\nimport { getGlobalAlmanacDir, getRegistryPath } from \"../paths.js\";\nimport { toKebabCase } from \"../slug.js\";\n\n// Re-export so existing import sites (`from \"../registry/index.js\"`) keep\n// working without a mechanical fan-out. The canonical home is `../slug.js`.\nexport { toKebabCase };\n\n/**\n * One entry in `~/.almanac/registry.json`.\n *\n * `name` is the canonical kebab-case slug the user types. `path` is the\n * absolute repo root (the directory that contains `.almanac/`). We store\n * absolute paths so cross-wiki resolution works regardless of the caller's\n * cwd.\n */\nexport interface RegistryEntry {\n name: string;\n description: string;\n path: string;\n registered_at: string;\n}\n\n/**\n * Read the registry file into memory.\n *\n * A missing file is not an error — it's the first-run state, which we\n * treat as an empty registry. A malformed file IS an error; we surface it\n * rather than silently clobbering the user's data.\n */\nexport async function readRegistry(): Promise<RegistryEntry[]> {\n const path = getRegistryPath();\n let raw: string;\n try {\n raw = await readFile(path, \"utf8\");\n } catch (err: unknown) {\n if (isNodeError(err) && err.code === \"ENOENT\") {\n return [];\n }\n throw err;\n }\n\n const trimmed = raw.trim();\n if (trimmed.length === 0) {\n return [];\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(trimmed);\n } catch (err: unknown) {\n const message = err instanceof Error ? err.message : String(err);\n throw new Error(`registry at ${path} is not valid JSON: ${message}`);\n }\n\n if (!Array.isArray(parsed)) {\n throw new Error(`registry at ${path} must be a JSON array`);\n }\n\n // Validate every entry. We do NOT silently coerce missing `name` or\n // `path` — an entry with `name: \"\"` would be unremovable via `--drop`\n // and an empty `path` would match any `findEntry({ path: \"\" })` call.\n // If someone hand-edited the registry into a bad state, surfacing the\n // error is strictly better than limping along with corrupt data.\n return parsed.map((item, idx) => {\n if (typeof item !== \"object\" || item === null) {\n throw new Error(`registry entry ${idx} is not an object`);\n }\n const e = item as Record<string, unknown>;\n const name = typeof e.name === \"string\" ? e.name : \"\";\n const path = typeof e.path === \"string\" ? e.path : \"\";\n if (name.length === 0) {\n throw new Error(`registry entry ${idx} is missing a non-empty \"name\"`);\n }\n if (path.length === 0) {\n throw new Error(`registry entry ${idx} is missing a non-empty \"path\"`);\n }\n return {\n name,\n description: typeof e.description === \"string\" ? e.description : \"\",\n path,\n registered_at:\n typeof e.registered_at === \"string\" ? e.registered_at : \"\",\n };\n });\n}\n\n/**\n * Persist the registry to disk. Creates `~/.almanac/` if it doesn't exist.\n *\n * We write with a trailing newline and 2-space indentation so the file is\n * diff-friendly if someone ever commits or inspects it manually.\n *\n * The write is atomic: we write to `registry.json.tmp` and then rename,\n * which is an atomic operation on every mainstream filesystem. This\n * matters because two concurrent `almanac init` (or autoregister) calls\n * from different shells would otherwise race on a partial write and\n * corrupt the file — a single `rename` means one wins cleanly and the\n * other's contents are simply dropped.\n */\nexport async function writeRegistry(entries: RegistryEntry[]): Promise<void> {\n const path = getRegistryPath();\n await mkdir(dirname(path), { recursive: true });\n const body = `${JSON.stringify(entries, null, 2)}\\n`;\n const tmpPath = `${path}.tmp`;\n await writeFile(tmpPath, body, \"utf8\");\n await rename(tmpPath, path);\n}\n\n/**\n * macOS (HFS+/APFS default) and Windows (NTFS default) are case-insensitive\n * but case-preserving. `/Users/x/Project` and `/Users/x/project` are the\n * same directory. We must treat them as the same registry entry, or a\n * single `almanac init` from a differently-cased cwd would duplicate the\n * row. Linux is case-sensitive — do not normalize there.\n *\n * Callers still store the original casing; only comparisons are lowercased.\n */\nfunction pathsEqual(a: string, b: string): boolean {\n if (process.platform === \"darwin\" || process.platform === \"win32\") {\n return a.toLowerCase() === b.toLowerCase();\n }\n return a === b;\n}\n\n/**\n * Add (or replace) an entry in the registry.\n *\n * Uniqueness is enforced on BOTH `name` and `path`: a repo can only appear\n * once, and a name can only refer to one repo. If either matches, we\n * replace the existing entry rather than creating a duplicate. This is\n * what makes auto-registration idempotent.\n */\nexport async function addEntry(entry: RegistryEntry): Promise<RegistryEntry[]> {\n const existing = await readRegistry();\n const filtered = existing.filter(\n (e) => e.name !== entry.name && !pathsEqual(e.path, entry.path),\n );\n filtered.push(entry);\n await writeRegistry(filtered);\n return filtered;\n}\n\n/**\n * Remove an entry by name. Returns the removed entry (or `null` if none\n * matched). Only `almanac list --drop <name>` calls this — we never drop\n * automatically, even for unreachable paths.\n */\nexport async function dropEntry(name: string): Promise<RegistryEntry | null> {\n const existing = await readRegistry();\n const idx = existing.findIndex((e) => e.name === name);\n if (idx === -1) {\n return null;\n }\n const [removed] = existing.splice(idx, 1);\n await writeRegistry(existing);\n return removed ?? null;\n}\n\n/**\n * Find an entry by either name or absolute path. Used by auto-registration\n * to decide whether the current repo is already known.\n *\n * Path comparison is case-insensitive on macOS/Windows (see `pathsEqual`).\n */\nexport async function findEntry(params: {\n name?: string;\n path?: string;\n}): Promise<RegistryEntry | null> {\n const entries = await readRegistry();\n for (const entry of entries) {\n if (params.name !== undefined && entry.name === params.name) return entry;\n if (params.path !== undefined && pathsEqual(entry.path, params.path)) {\n return entry;\n }\n }\n return null;\n}\n\n/**\n * Ensure the global `.almanac/` directory exists. Safe to call repeatedly;\n * `mkdir recursive` is a no-op when the directory already exists.\n */\nexport async function ensureGlobalDir(): Promise<void> {\n await mkdir(getGlobalAlmanacDir(), { recursive: true });\n}\n\nfunction isNodeError(err: unknown): err is NodeJS.ErrnoException {\n return err instanceof Error && \"code\" in err;\n}\n","import { existsSync } from \"node:fs\";\nimport { readFile } from \"node:fs/promises\";\nimport { basename, join } from \"node:path\";\n\nimport fg from \"fast-glob\";\nimport type Database from \"better-sqlite3\";\n\nimport { BLUE, BOLD, DIM, GREEN, RED, RST } from \"../ansi.js\";\nimport { parseDuration } from \"../indexer/duration.js\";\nimport { ensureFreshIndex } from \"../indexer/index.js\";\nimport { resolveWikiRoot } from \"../indexer/resolve-wiki.js\";\nimport { openIndex } from \"../indexer/schema.js\";\nimport { findEntry } from \"../registry/index.js\";\nimport { toKebabCase } from \"../slug.js\";\nimport { subtreeInDb } from \"../topics/dag.js\";\n\n/**\n * `almanac health` — flag problems in the wiki.\n *\n * Eight independent categories, each checked against the current index\n * and filesystem. Categories never throw each other off; one failing\n * is not a reason to skip the others.\n *\n * Scoping:\n * - `--topic <slug>` narrows every page-scoped category to pages\n * tagged with that topic OR any descendant topic (DAG traversal).\n * Topic-level categories (`empty_topics`) are narrowed to the\n * subtree itself.\n * - `--stdin` reads page slugs from stdin and limits page-scoped\n * categories to that set.\n *\n * Output:\n * - default: human-readable, grouped by category with counts.\n * - `--json`: one big object, shape = `HealthReport`.\n */\n\nexport interface HealthReport {\n orphans: { slug: string }[];\n stale: { slug: string; days_since_update: number }[];\n dead_refs: { slug: string; path: string }[];\n broken_links: { source_slug: string; target_slug: string }[];\n broken_xwiki: { source_slug: string; target_wiki: string; target_slug: string }[];\n empty_topics: { slug: string }[];\n empty_pages: { slug: string }[];\n slug_collisions: { slug: string; paths: string[] }[];\n}\n\nexport interface HealthOptions {\n cwd: string;\n wiki?: string;\n topic?: string;\n stale?: string;\n stdin?: boolean;\n stdinInput?: string;\n json?: boolean;\n}\n\nexport interface HealthCommandOutput {\n stdout: string;\n stderr: string;\n exitCode: number;\n}\n\n/**\n * Default `--stale` window. 90 days matches the spec. Users can tune\n * with `--stale <duration>` using the shared parser.\n */\nconst DEFAULT_STALE_SECONDS = 90 * 24 * 60 * 60;\n\nexport async function runHealth(\n options: HealthOptions,\n): Promise<HealthCommandOutput> {\n const repoRoot = await resolveWikiRoot({ cwd: options.cwd, wiki: options.wiki });\n await ensureFreshIndex({ repoRoot });\n\n const almanacDir = join(repoRoot, \".almanac\");\n const pagesDir = join(almanacDir, \"pages\");\n const db = openIndex(join(almanacDir, \"index.db\"));\n\n try {\n const staleSeconds = options.stale !== undefined\n ? parseDuration(options.stale)\n : DEFAULT_STALE_SECONDS;\n\n const scope = resolveScope(db, options);\n\n const report: HealthReport = {\n orphans: findOrphans(db, scope),\n stale: findStale(db, scope, staleSeconds),\n dead_refs: await findDeadRefs(db, scope, repoRoot),\n broken_links: findBrokenLinks(db, scope),\n broken_xwiki: await findBrokenXwiki(db, scope),\n empty_topics: findEmptyTopics(db, scope),\n empty_pages: await findEmptyPages(db, scope, pagesDir),\n slug_collisions: await findSlugCollisions(pagesDir),\n };\n\n if (options.json === true) {\n return {\n stdout: `${JSON.stringify(report, null, 2)}\\n`,\n stderr: \"\",\n exitCode: 0,\n };\n }\n\n return {\n stdout: formatReport(report),\n stderr: \"\",\n exitCode: 0,\n };\n } finally {\n db.close();\n }\n}\n\ninterface HealthScope {\n /** When non-null, restrict page-scoped checks to these slugs. */\n pages: Set<string> | null;\n /** When non-null, restrict topic-scoped checks to these slugs. */\n topics: Set<string> | null;\n}\n\n/**\n * Compute the active page/topic scope from `--topic` and `--stdin`\n * flags. Both null = no restriction (report everything).\n */\nfunction resolveScope(db: Database.Database, options: HealthOptions): HealthScope {\n let pages: Set<string> | null = null;\n let topics: Set<string> | null = null;\n\n if (options.topic !== undefined) {\n const rootSlug = toKebabCase(options.topic);\n if (rootSlug.length > 0) {\n const subtree = subtreeInDb(db, rootSlug);\n topics = new Set(subtree);\n const placeholders = subtree.map(() => \"?\").join(\", \");\n const rows = db\n .prepare<unknown[], { page_slug: string }>(\n `SELECT DISTINCT page_slug FROM page_topics\n WHERE topic_slug IN (${placeholders})`,\n )\n .all(...subtree);\n pages = new Set(rows.map((r) => r.page_slug));\n }\n }\n\n if (options.stdin === true && options.stdinInput !== undefined) {\n const stdinPages = new Set<string>();\n for (const line of options.stdinInput.split(/\\r?\\n/)) {\n const s = line.trim();\n if (s.length > 0) stdinPages.add(s);\n }\n // Intersect with any existing topic-scoped set.\n if (pages === null) pages = stdinPages;\n else {\n const out = new Set<string>();\n for (const s of stdinPages) if (pages.has(s)) out.add(s);\n pages = out;\n }\n }\n\n return { pages, topics };\n}\n\nfunction inPageScope(scope: HealthScope, slug: string): boolean {\n if (scope.pages === null) return true;\n return scope.pages.has(slug);\n}\n\n// ─────────────────────────────────────────────────────────────────────\n// individual checks\n// ─────────────────────────────────────────────────────────────────────\n\n/**\n * Pages with zero `topics:`. Archived pages are exempt — the spec\n * excludes them from search by default and they're inherently\n * \"retired\", not \"abandoned\".\n */\nfunction findOrphans(\n db: Database.Database,\n scope: HealthScope,\n): { slug: string }[] {\n const rows = db\n .prepare<[], { slug: string }>(\n `SELECT p.slug FROM pages p\n WHERE p.archived_at IS NULL\n AND NOT EXISTS (\n SELECT 1 FROM page_topics pt WHERE pt.page_slug = p.slug\n )\n ORDER BY p.slug`,\n )\n .all();\n return rows.filter((r) => inPageScope(scope, r.slug));\n}\n\n/**\n * Active pages whose `updated_at` is older than `staleSeconds`. We\n * report `days_since_update` rather than a raw timestamp because the\n * spec's example output (\"old-architecture (124 days)\") shows that.\n */\nfunction findStale(\n db: Database.Database,\n scope: HealthScope,\n staleSeconds: number,\n): { slug: string; days_since_update: number }[] {\n const now = Math.floor(Date.now() / 1000);\n const threshold = now - staleSeconds;\n const rows = db\n .prepare<[number], { slug: string; updated_at: number }>(\n `SELECT slug, updated_at FROM pages\n WHERE archived_at IS NULL AND updated_at < ?\n ORDER BY updated_at ASC`,\n )\n .all(threshold);\n return rows\n .filter((r) => inPageScope(scope, r.slug))\n .map((r) => ({\n slug: r.slug,\n days_since_update: Math.floor((now - r.updated_at) / (60 * 60 * 24)),\n }));\n}\n\n/**\n * `file_refs` whose target paths no longer exist on disk. We `stat`\n * each referenced path, relative to the repo root, and report misses.\n *\n * Only checks active pages — archived pages are allowed to reference\n * files that have since been deleted (that's often why they were\n * archived in the first place).\n *\n * We stat the `original_path` (author's casing) rather than the\n * lowercased `path` — on case-sensitive filesystems like Linux, stat\n * of a lowercased alias of `src/Dockerfile` returns ENOENT even\n * though the file exists. macOS and Windows are case-insensitive so\n * either form resolves there; using the original consistently means\n * the code behaves identically on every host.\n */\nasync function findDeadRefs(\n db: Database.Database,\n scope: HealthScope,\n repoRoot: string,\n): Promise<{ slug: string; path: string }[]> {\n const rows = db\n .prepare<\n [],\n { slug: string; path: string; original_path: string; is_dir: number }\n >(\n `SELECT p.slug, r.path, r.original_path, r.is_dir\n FROM file_refs r\n JOIN pages p ON p.slug = r.page_slug\n WHERE p.archived_at IS NULL\n ORDER BY p.slug, r.path`,\n )\n .all();\n const out: { slug: string; path: string }[] = [];\n for (const r of rows) {\n if (!inPageScope(scope, r.slug)) continue;\n const abs = join(repoRoot, r.original_path);\n if (!existsSync(abs)) {\n // Surface the author's casing in the report — matches what's in\n // the user's frontmatter/wikilink, which is what they'll search\n // for when fixing the miss.\n out.push({ slug: r.slug, path: r.original_path });\n }\n }\n return out;\n}\n\n/**\n * Wikilinks whose target slug has no row in `pages`. Every other\n * page-scoped check filters archived source pages out; this one and\n * `findBrokenXwiki` follow the same rule so the report doesn't flag\n * broken links from pages that have been retired.\n */\nfunction findBrokenLinks(\n db: Database.Database,\n scope: HealthScope,\n): { source_slug: string; target_slug: string }[] {\n const rows = db\n .prepare<[], { source_slug: string; target_slug: string }>(\n `SELECT w.source_slug, w.target_slug\n FROM wikilinks w\n JOIN pages src ON src.slug = w.source_slug\n LEFT JOIN pages tgt ON tgt.slug = w.target_slug\n WHERE tgt.slug IS NULL AND src.archived_at IS NULL\n ORDER BY w.source_slug, w.target_slug`,\n )\n .all();\n return rows.filter((r) => inPageScope(scope, r.source_slug));\n}\n\n/**\n * Cross-wiki links whose target wiki isn't registered OR whose path\n * is unreachable. Per the plan we stop at \"wiki unregistered or path\n * missing\" — walking into the other wiki's `index.db` to check the\n * slug exists is explicitly out of scope for slice 3 (documented in\n * the plan). A follow-up slice can deepen this.\n */\nasync function findBrokenXwiki(\n db: Database.Database,\n scope: HealthScope,\n): Promise<{ source_slug: string; target_wiki: string; target_slug: string }[]> {\n const rows = db\n .prepare<\n [],\n { source_slug: string; target_wiki: string; target_slug: string }\n >(\n // Same archived-source filter as `findBrokenLinks`. Retired pages\n // shouldn't spam the report with links to wikis that may have\n // been intentionally retired too.\n `SELECT x.source_slug, x.target_wiki, x.target_slug\n FROM cross_wiki_links x\n JOIN pages src ON src.slug = x.source_slug\n WHERE src.archived_at IS NULL\n ORDER BY x.source_slug, x.target_wiki, x.target_slug`,\n )\n .all();\n const out: { source_slug: string; target_wiki: string; target_slug: string }[] = [];\n // Cache the registry lookup so we only resolve each wiki once.\n const reachableCache = new Map<string, boolean>();\n for (const r of rows) {\n if (!inPageScope(scope, r.source_slug)) continue;\n let ok = reachableCache.get(r.target_wiki);\n if (ok === undefined) {\n const entry = await findEntry({ name: r.target_wiki });\n ok = entry !== null && existsSync(join(entry.path, \".almanac\"));\n reachableCache.set(r.target_wiki, ok);\n }\n if (!ok) {\n out.push({\n source_slug: r.source_slug,\n target_wiki: r.target_wiki,\n target_slug: r.target_slug,\n });\n }\n }\n return out;\n}\n\n/** Topics with zero pages. */\nfunction findEmptyTopics(\n db: Database.Database,\n scope: HealthScope,\n): { slug: string }[] {\n const rows = db\n .prepare<[], { slug: string }>(\n `SELECT t.slug FROM topics t\n WHERE NOT EXISTS (\n SELECT 1 FROM page_topics pt WHERE pt.topic_slug = t.slug\n )\n ORDER BY t.slug`,\n )\n .all();\n if (scope.topics === null) return rows;\n return rows.filter((r) => scope.topics!.has(r.slug));\n}\n\n/**\n * Pages whose body is effectively empty — only frontmatter, maybe a\n * heading, no prose. \"Empty\" = after dropping frontmatter and heading\n * lines, the remaining non-blank non-whitespace content is < 40\n * characters. This matches the test from the plan: \"a page with only\n * frontmatter + heading is empty; with a paragraph it's not.\"\n *\n * Archived pages are exempt — deliberately minimal archive stubs\n * shouldn't be flagged.\n */\nasync function findEmptyPages(\n db: Database.Database,\n scope: HealthScope,\n pagesDir: string,\n): Promise<{ slug: string }[]> {\n const rows = db\n .prepare<[], { slug: string; file_path: string }>(\n `SELECT slug, file_path FROM pages\n WHERE archived_at IS NULL\n ORDER BY slug`,\n )\n .all();\n const out: { slug: string }[] = [];\n for (const r of rows) {\n if (!inPageScope(scope, r.slug)) continue;\n let raw: string;\n try {\n raw = await readFile(r.file_path, \"utf8\");\n } catch {\n continue;\n }\n // Strip frontmatter if present.\n const m = raw.match(/^---\\r?\\n[\\s\\S]*?\\r?\\n---\\r?\\n?([\\s\\S]*)$/);\n const body = m !== null ? (m[1] ?? \"\") : raw;\n // \"Empty\" = after dropping frontmatter, heading lines, and blank\n // lines, nothing non-trivial remains. A single-line wikilink or\n // one-sentence paragraph counts as content; a page with only a\n // heading (or a heading + whitespace) does not.\n //\n // `pagesDir` is accepted for parity with future content-resolution\n // checks (e.g., resolving includes); referenced so lint doesn't\n // complain about an unused parameter.\n void pagesDir;\n const hasSubstance = body\n .split(/\\r?\\n/)\n .some((l) => {\n const t = l.trim();\n if (t.length === 0) return false;\n if (t.startsWith(\"#\")) return false;\n return true;\n });\n if (!hasSubstance) {\n out.push({ slug: r.slug });\n }\n }\n return out;\n}\n\n/**\n * Walk `.almanac/pages/` and group filenames by their kebab-cased\n * slug. Any slug with >1 filename is a collision. We rescan rather\n * than reading a persisted table — indexing surfaces collisions only\n * as warnings, so a dedicated rescan gives us a definitive answer\n * without adding a new table.\n */\nasync function findSlugCollisions(\n pagesDir: string,\n): Promise<{ slug: string; paths: string[] }[]> {\n if (!existsSync(pagesDir)) return [];\n const files = await fg(\"**/*.md\", {\n cwd: pagesDir,\n absolute: false,\n onlyFiles: true,\n caseSensitiveMatch: true,\n });\n const bySlug = new Map<string, string[]>();\n for (const rel of files) {\n const slug = toKebabCase(basename(rel, \".md\"));\n if (slug.length === 0) continue;\n const list = bySlug.get(slug) ?? [];\n list.push(rel);\n bySlug.set(slug, list);\n }\n const out: { slug: string; paths: string[] }[] = [];\n for (const [slug, paths] of bySlug.entries()) {\n if (paths.length > 1) {\n out.push({ slug, paths: paths.sort() });\n }\n }\n out.sort((a, b) => a.slug.localeCompare(b.slug));\n return out;\n}\n\n// ─────────────────────────────────────────────────────────────────────\n// pretty-print\n// ─────────────────────────────────────────────────────────────────────\n\nfunction formatReport(r: HealthReport): string {\n const sections: string[] = [];\n sections.push(\n section(\n \"orphans\",\n r.orphans.length,\n r.orphans.map((o) => ` ${BLUE}${o.slug}${RST}`),\n ),\n );\n sections.push(\n section(\n \"stale\",\n r.stale.length,\n r.stale.map((s) => ` ${BLUE}${s.slug}${RST} ${DIM}(${s.days_since_update} days)${RST}`),\n ),\n );\n sections.push(\n section(\n \"dead-refs\",\n r.dead_refs.length,\n r.dead_refs.map((d) => ` ${BLUE}${d.slug}${RST} references ${d.path} ${DIM}(missing)${RST}`),\n ),\n );\n sections.push(\n section(\n \"broken-links\",\n r.broken_links.length,\n r.broken_links.map(\n (b) => ` ${BLUE}${b.source_slug}${RST} → ${b.target_slug} ${DIM}(target does not exist)${RST}`,\n ),\n ),\n );\n sections.push(\n section(\n \"broken-xwiki\",\n r.broken_xwiki.length,\n r.broken_xwiki.map(\n (b) =>\n ` ${BLUE}${b.source_slug}${RST} → ${b.target_wiki}:${b.target_slug} ${DIM}(wiki unregistered or unreachable)${RST}`,\n ),\n ),\n );\n sections.push(\n section(\n \"empty-topics\",\n r.empty_topics.length,\n r.empty_topics.map((e) => ` ${BLUE}${e.slug}${RST}`),\n ),\n );\n sections.push(\n section(\n \"empty-pages\",\n r.empty_pages.length,\n r.empty_pages.map((e) => ` ${BLUE}${e.slug}${RST}`),\n ),\n );\n sections.push(\n section(\n \"slug-collisions\",\n r.slug_collisions.length,\n r.slug_collisions.map((c) => ` ${BLUE}${c.slug}${RST}: ${c.paths.join(\", \")}`),\n ),\n );\n return `${sections.join(\"\\n\\n\")}\\n`;\n}\n\nfunction section(label: string, count: number, lines: string[]): string {\n if (count === 0) return `${BOLD}${label}${RST} ${GREEN}(0): (ok)${RST}`;\n return `${BOLD}${label}${RST} ${RED}(${count})${RST}:\\n${lines.join(\"\\n\")}`;\n}\n","/**\n * Parse a compact duration string of the form `<N><unit>` into seconds.\n *\n * Accepted units (from the spec, `--since` / `--stale`):\n * - `m` — minutes\n * - `h` — hours\n * - `d` — days\n * - `w` — weeks\n *\n * Examples: `2w` → 1209600, `30d` → 2592000, `12h` → 43200.\n *\n * Anything else throws — the CLI surfaces the error with the usual\n * `almanac: <message>` prefix, which is clearer than silently treating\n * `2weeks` or `30 days` as zero.\n */\nexport function parseDuration(input: string): number {\n const trimmed = input.trim();\n const m = trimmed.match(/^(\\d+)([mhdw])$/);\n if (m === null) {\n throw new Error(\n `invalid duration \"${input}\" (expected Nw, Nd, Nh, or Nm — e.g. 2w, 30d)`,\n );\n }\n const n = Number.parseInt(m[1] ?? \"0\", 10);\n const unit = m[2];\n switch (unit) {\n case \"m\":\n return n * 60;\n case \"h\":\n return n * 60 * 60;\n case \"d\":\n return n * 60 * 60 * 24;\n case \"w\":\n return n * 60 * 60 * 24 * 7;\n default:\n // Unreachable — regex pins the unit — but satisfies exhaustiveness.\n throw new Error(`invalid duration unit \"${unit ?? \"\"}\"`);\n }\n}\n","import { existsSync } from \"node:fs\";\nimport { join } from \"node:path\";\n\nimport { findNearestAlmanacDir } from \"../paths.js\";\nimport { findEntry } from \"../registry/index.js\";\n\n/**\n * Figure out which repo root a query command should run against.\n *\n * Two modes, in order of precedence:\n * 1. `--wiki <name>` — look it up in the global registry. Fails\n * explicitly if the name isn't registered or its path has gone\n * missing (unmounted drive, deleted repo). No silent fallback, which\n * would hide the real problem from the user.\n * 2. default — walk up from `cwd` like git does. Fails if we're not\n * inside a `.almanac/` repo.\n *\n * Returns the absolute path to the repo root (the directory containing\n * `.almanac/`).\n *\n * NOTE (spec contract, not yet implemented): when `--all` lands in a\n * future slice, it must silently skip wikis whose paths have gone\n * unreachable — the asymmetry with `--wiki <name>` is deliberate.\n * Explicit lookup is loud about failures (user named a specific wiki);\n * bulk `--all` is quiet (user asked \"whatever's available\"). Don't\n * unify the error behavior when adding `--all`.\n */\nexport async function resolveWikiRoot(params: {\n cwd: string;\n wiki?: string;\n}): Promise<string> {\n if (params.wiki !== undefined) {\n const entry = await findEntry({ name: params.wiki });\n if (entry === null) {\n throw new Error(`no registered wiki named \"${params.wiki}\"`);\n }\n if (!existsSync(join(entry.path, \".almanac\"))) {\n throw new Error(\n `wiki \"${params.wiki}\" path is unreachable (${entry.path})`,\n );\n }\n return entry.path;\n }\n\n const nearest = findNearestAlmanacDir(params.cwd);\n if (nearest === null) {\n throw new Error(\n \"no .almanac/ found in this directory or any parent; run `almanac init` first\",\n );\n }\n return nearest;\n}\n","import type Database from \"better-sqlite3\";\n\nimport type { TopicsFile } from \"./yaml.js\";\n\n/**\n * Depth cap for all recursive traversals of the topics DAG. Belt and\n * suspenders alongside the `CHECK (child_slug != parent_slug)` on the\n * `topic_parents` table — even if a cycle somehow slipped into the data\n * (hand-edited `topics.yaml`, past bug), the CTE can't runaway.\n *\n * 32 is chosen as \"deeper than any real human-authored taxonomy will\n * ever go\". A 32-level topic hierarchy is absurd; anything hitting this\n * cap is almost certainly a cycle.\n */\nexport const DAG_DEPTH_CAP = 32;\n\n/**\n * Given a `topics.yaml` in memory, compute the set of ancestors of a\n * given slug (not including the slug itself). Used by `topics link`\n * to check whether a proposed edge would create a cycle.\n *\n * Running off the in-memory file lets `link` validate BEFORE touching\n * either the DB or the YAML, so a refusal doesn't leave half the state\n * mutated. Depth-capped with the same constant as the SQLite CTE.\n */\nexport function ancestorsInFile(\n file: TopicsFile,\n slug: string,\n): Set<string> {\n // Build a child → parents map once.\n const parentsOf = new Map<string, string[]>();\n for (const t of file.topics) {\n parentsOf.set(t.slug, t.parents);\n }\n const ancestors = new Set<string>();\n // BFS, depth-capped. We stop descending when we've hit the cap or\n // revisit an already-seen node (self-loop defense).\n let frontier: string[] = parentsOf.get(slug) ?? [];\n let depth = 0;\n while (frontier.length > 0 && depth < DAG_DEPTH_CAP) {\n const next: string[] = [];\n for (const node of frontier) {\n if (ancestors.has(node)) continue;\n ancestors.add(node);\n const ps = parentsOf.get(node);\n if (ps !== undefined) next.push(...ps);\n }\n frontier = next;\n depth += 1;\n }\n return ancestors;\n}\n\n/**\n * Return all descendants of a given topic slug via the SQLite\n * `topic_parents` table. Depth-capped at `DAG_DEPTH_CAP`.\n *\n * Used by `topics show --descendants` to expand a topic's page list\n * through its subtopics. The query is a canonical recursive CTE; we\n * `UNION` (not `UNION ALL`) so cycles in the data don't spin forever.\n */\nexport function descendantsInDb(\n db: Database.Database,\n slug: string,\n): string[] {\n const rows = db\n .prepare<[string, number], { slug: string }>(\n `WITH RECURSIVE desc(slug, depth) AS (\n SELECT child_slug, 1 FROM topic_parents WHERE parent_slug = ?\n UNION\n SELECT tp.child_slug, d.depth + 1\n FROM topic_parents tp\n JOIN desc d ON tp.parent_slug = d.slug\n WHERE d.depth < ?\n )\n SELECT DISTINCT slug FROM desc ORDER BY slug`,\n )\n .all(slug, DAG_DEPTH_CAP)\n .map((r) => r.slug);\n return rows;\n}\n\n/**\n * Return the subtree rooted at `slug` (the slug itself + all\n * descendants). Convenience wrapper used by `health --topic` to scope\n * reports through the DAG.\n */\nexport function subtreeInDb(db: Database.Database, slug: string): string[] {\n return [slug, ...descendantsInDb(db, slug)];\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,OAAO,UAAU,QAAQ,iBAAiB;AACnD,SAAS,eAAe;AA+BxB,eAAsB,eAAyC;AAC7D,QAAM,OAAO,gBAAgB;AAC7B,MAAI;AACJ,MAAI;AACF,UAAM,MAAM,SAAS,MAAM,MAAM;AAAA,EACnC,SAAS,KAAc;AACrB,QAAI,YAAY,GAAG,KAAK,IAAI,SAAS,UAAU;AAC7C,aAAO,CAAC;AAAA,IACV;AACA,UAAM;AAAA,EACR;AAEA,QAAM,UAAU,IAAI,KAAK;AACzB,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,CAAC;AAAA,EACV;AAEA,MAAI;AACJ,MAAI;AACF,aAAS,KAAK,MAAM,OAAO;AAAA,EAC7B,SAAS,KAAc;AACrB,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,UAAM,IAAI,MAAM,eAAe,IAAI,uBAAuB,OAAO,EAAE;AAAA,EACrE;AAEA,MAAI,CAAC,MAAM,QAAQ,MAAM,GAAG;AAC1B,UAAM,IAAI,MAAM,eAAe,IAAI,uBAAuB;AAAA,EAC5D;AAOA,SAAO,OAAO,IAAI,CAAC,MAAM,QAAQ;AAC/B,QAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC7C,YAAM,IAAI,MAAM,kBAAkB,GAAG,mBAAmB;AAAA,IAC1D;AACA,UAAM,IAAI;AACV,UAAM,OAAO,OAAO,EAAE,SAAS,WAAW,EAAE,OAAO;AACnD,UAAMA,QAAO,OAAO,EAAE,SAAS,WAAW,EAAE,OAAO;AACnD,QAAI,KAAK,WAAW,GAAG;AACrB,YAAM,IAAI,MAAM,kBAAkB,GAAG,gCAAgC;AAAA,IACvE;AACA,QAAIA,MAAK,WAAW,GAAG;AACrB,YAAM,IAAI,MAAM,kBAAkB,GAAG,gCAAgC;AAAA,IACvE;AACA,WAAO;AAAA,MACL;AAAA,MACA,aAAa,OAAO,EAAE,gBAAgB,WAAW,EAAE,cAAc;AAAA,MACjE,MAAAA;AAAA,MACA,eACE,OAAO,EAAE,kBAAkB,WAAW,EAAE,gBAAgB;AAAA,IAC5D;AAAA,EACF,CAAC;AACH;AAeA,eAAsB,cAAc,SAAyC;AAC3E,QAAM,OAAO,gBAAgB;AAC7B,QAAM,MAAM,QAAQ,IAAI,GAAG,EAAE,WAAW,KAAK,CAAC;AAC9C,QAAM,OAAO,GAAG,KAAK,UAAU,SAAS,MAAM,CAAC,CAAC;AAAA;AAChD,QAAM,UAAU,GAAG,IAAI;AACvB,QAAM,UAAU,SAAS,MAAM,MAAM;AACrC,QAAM,OAAO,SAAS,IAAI;AAC5B;AAWA,SAAS,WAAW,GAAW,GAAoB;AACjD,MAAI,QAAQ,aAAa,YAAY,QAAQ,aAAa,SAAS;AACjE,WAAO,EAAE,YAAY,MAAM,EAAE,YAAY;AAAA,EAC3C;AACA,SAAO,MAAM;AACf;AAUA,eAAsB,SAAS,OAAgD;AAC7E,QAAM,WAAW,MAAM,aAAa;AACpC,QAAM,WAAW,SAAS;AAAA,IACxB,CAAC,MAAM,EAAE,SAAS,MAAM,QAAQ,CAAC,WAAW,EAAE,MAAM,MAAM,IAAI;AAAA,EAChE;AACA,WAAS,KAAK,KAAK;AACnB,QAAM,cAAc,QAAQ;AAC5B,SAAO;AACT;AAOA,eAAsB,UAAU,MAA6C;AAC3E,QAAM,WAAW,MAAM,aAAa;AACpC,QAAM,MAAM,SAAS,UAAU,CAAC,MAAM,EAAE,SAAS,IAAI;AACrD,MAAI,QAAQ,IAAI;AACd,WAAO;AAAA,EACT;AACA,QAAM,CAAC,OAAO,IAAI,SAAS,OAAO,KAAK,CAAC;AACxC,QAAM,cAAc,QAAQ;AAC5B,SAAO,WAAW;AACpB;AAQA,eAAsB,UAAU,QAGE;AAChC,QAAM,UAAU,MAAM,aAAa;AACnC,aAAW,SAAS,SAAS;AAC3B,QAAI,OAAO,SAAS,UAAa,MAAM,SAAS,OAAO,KAAM,QAAO;AACpE,QAAI,OAAO,SAAS,UAAa,WAAW,MAAM,MAAM,OAAO,IAAI,GAAG;AACpE,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAMA,eAAsB,kBAAiC;AACrD,QAAM,MAAM,oBAAoB,GAAG,EAAE,WAAW,KAAK,CAAC;AACxD;AAEA,SAAS,YAAY,KAA4C;AAC/D,SAAO,eAAe,SAAS,UAAU;AAC3C;;;AC/LA,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,YAAAC,iBAAgB;AACzB,SAAS,UAAU,QAAAC,aAAY;AAE/B,OAAO,QAAQ;;;ACWR,SAAS,cAAc,OAAuB;AACnD,QAAM,UAAU,MAAM,KAAK;AAC3B,QAAM,IAAI,QAAQ,MAAM,iBAAiB;AACzC,MAAI,MAAM,MAAM;AACd,UAAM,IAAI;AAAA,MACR,qBAAqB,KAAK;AAAA,IAC5B;AAAA,EACF;AACA,QAAM,IAAI,OAAO,SAAS,EAAE,CAAC,KAAK,KAAK,EAAE;AACzC,QAAM,OAAO,EAAE,CAAC;AAChB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO,IAAI;AAAA,IACb,KAAK;AACH,aAAO,IAAI,KAAK;AAAA,IAClB,KAAK;AACH,aAAO,IAAI,KAAK,KAAK;AAAA,IACvB,KAAK;AACH,aAAO,IAAI,KAAK,KAAK,KAAK;AAAA,IAC5B;AAEE,YAAM,IAAI,MAAM,0BAA0B,QAAQ,EAAE,GAAG;AAAA,EAC3D;AACF;;;ACtCA,SAAS,kBAAkB;AAC3B,SAAS,YAAY;AA0BrB,eAAsB,gBAAgB,QAGlB;AAClB,MAAI,OAAO,SAAS,QAAW;AAC7B,UAAM,QAAQ,MAAM,UAAU,EAAE,MAAM,OAAO,KAAK,CAAC;AACnD,QAAI,UAAU,MAAM;AAClB,YAAM,IAAI,MAAM,6BAA6B,OAAO,IAAI,GAAG;AAAA,IAC7D;AACA,QAAI,CAAC,WAAW,KAAK,MAAM,MAAM,UAAU,CAAC,GAAG;AAC7C,YAAM,IAAI;AAAA,QACR,SAAS,OAAO,IAAI,0BAA0B,MAAM,IAAI;AAAA,MAC1D;AAAA,IACF;AACA,WAAO,MAAM;AAAA,EACf;AAEA,QAAM,UAAU,sBAAsB,OAAO,GAAG;AAChD,MAAI,YAAY,MAAM;AACpB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;;;ACrCO,IAAM,gBAAgB;AAWtB,SAAS,gBACd,MACA,MACa;AAEb,QAAM,YAAY,oBAAI,IAAsB;AAC5C,aAAW,KAAK,KAAK,QAAQ;AAC3B,cAAU,IAAI,EAAE,MAAM,EAAE,OAAO;AAAA,EACjC;AACA,QAAM,YAAY,oBAAI,IAAY;AAGlC,MAAI,WAAqB,UAAU,IAAI,IAAI,KAAK,CAAC;AACjD,MAAI,QAAQ;AACZ,SAAO,SAAS,SAAS,KAAK,QAAQ,eAAe;AACnD,UAAM,OAAiB,CAAC;AACxB,eAAW,QAAQ,UAAU;AAC3B,UAAI,UAAU,IAAI,IAAI,EAAG;AACzB,gBAAU,IAAI,IAAI;AAClB,YAAM,KAAK,UAAU,IAAI,IAAI;AAC7B,UAAI,OAAO,OAAW,MAAK,KAAK,GAAG,EAAE;AAAA,IACvC;AACA,eAAW;AACX,aAAS;AAAA,EACX;AACA,SAAO;AACT;AAUO,SAAS,gBACd,IACA,MACU;AACV,QAAM,OAAO,GACV;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASF,EACC,IAAI,MAAM,aAAa,EACvB,IAAI,CAAC,MAAM,EAAE,IAAI;AACpB,SAAO;AACT;AAOO,SAAS,YAAY,IAAuB,MAAwB;AACzE,SAAO,CAAC,MAAM,GAAG,gBAAgB,IAAI,IAAI,CAAC;AAC5C;;;AHtBA,IAAM,wBAAwB,KAAK,KAAK,KAAK;AAE7C,eAAsB,UACpB,SAC8B;AAC9B,QAAM,WAAW,MAAM,gBAAgB,EAAE,KAAK,QAAQ,KAAK,MAAM,QAAQ,KAAK,CAAC;AAC/E,QAAM,iBAAiB,EAAE,SAAS,CAAC;AAEnC,QAAM,aAAaC,MAAK,UAAU,UAAU;AAC5C,QAAM,WAAWA,MAAK,YAAY,OAAO;AACzC,QAAM,KAAK,UAAUA,MAAK,YAAY,UAAU,CAAC;AAEjD,MAAI;AACF,UAAM,eAAe,QAAQ,UAAU,SACnC,cAAc,QAAQ,KAAK,IAC3B;AAEJ,UAAM,QAAQ,aAAa,IAAI,OAAO;AAEtC,UAAM,SAAuB;AAAA,MAC3B,SAAS,YAAY,IAAI,KAAK;AAAA,MAC9B,OAAO,UAAU,IAAI,OAAO,YAAY;AAAA,MACxC,WAAW,MAAM,aAAa,IAAI,OAAO,QAAQ;AAAA,MACjD,cAAc,gBAAgB,IAAI,KAAK;AAAA,MACvC,cAAc,MAAM,gBAAgB,IAAI,KAAK;AAAA,MAC7C,cAAc,gBAAgB,IAAI,KAAK;AAAA,MACvC,aAAa,MAAM,eAAe,IAAI,OAAO,QAAQ;AAAA,MACrD,iBAAiB,MAAM,mBAAmB,QAAQ;AAAA,IACpD;AAEA,QAAI,QAAQ,SAAS,MAAM;AACzB,aAAO;AAAA,QACL,QAAQ,GAAG,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA;AAAA,QAC1C,QAAQ;AAAA,QACR,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,WAAO;AAAA,MACL,QAAQ,aAAa,MAAM;AAAA,MAC3B,QAAQ;AAAA,MACR,UAAU;AAAA,IACZ;AAAA,EACF,UAAE;AACA,OAAG,MAAM;AAAA,EACX;AACF;AAaA,SAAS,aAAa,IAAuB,SAAqC;AAChF,MAAI,QAA4B;AAChC,MAAI,SAA6B;AAEjC,MAAI,QAAQ,UAAU,QAAW;AAC/B,UAAM,WAAW,YAAY,QAAQ,KAAK;AAC1C,QAAI,SAAS,SAAS,GAAG;AACvB,YAAM,UAAU,YAAY,IAAI,QAAQ;AACxC,eAAS,IAAI,IAAI,OAAO;AACxB,YAAM,eAAe,QAAQ,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACrD,YAAM,OAAO,GACV;AAAA,QACC;AAAA,kCACwB,YAAY;AAAA,MACtC,EACC,IAAI,GAAG,OAAO;AACjB,cAAQ,IAAI,IAAI,KAAK,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,QAAQ,UAAU,QAAQ,QAAQ,eAAe,QAAW;AAC9D,UAAM,aAAa,oBAAI,IAAY;AACnC,eAAW,QAAQ,QAAQ,WAAW,MAAM,OAAO,GAAG;AACpD,YAAM,IAAI,KAAK,KAAK;AACpB,UAAI,EAAE,SAAS,EAAG,YAAW,IAAI,CAAC;AAAA,IACpC;AAEA,QAAI,UAAU,KAAM,SAAQ;AAAA,SACvB;AACH,YAAM,MAAM,oBAAI,IAAY;AAC5B,iBAAW,KAAK,WAAY,KAAI,MAAM,IAAI,CAAC,EAAG,KAAI,IAAI,CAAC;AACvD,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,OAAO;AACzB;AAEA,SAAS,YAAY,OAAoB,MAAuB;AAC9D,MAAI,MAAM,UAAU,KAAM,QAAO;AACjC,SAAO,MAAM,MAAM,IAAI,IAAI;AAC7B;AAWA,SAAS,YACP,IACA,OACoB;AACpB,QAAM,OAAO,GACV;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMF,EACC,IAAI;AACP,SAAO,KAAK,OAAO,CAAC,MAAM,YAAY,OAAO,EAAE,IAAI,CAAC;AACtD;AAOA,SAAS,UACP,IACA,OACA,cAC+C;AAC/C,QAAM,MAAM,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AACxC,QAAM,YAAY,MAAM;AACxB,QAAM,OAAO,GACV;AAAA,IACC;AAAA;AAAA;AAAA,EAGF,EACC,IAAI,SAAS;AAChB,SAAO,KACJ,OAAO,CAAC,MAAM,YAAY,OAAO,EAAE,IAAI,CAAC,EACxC,IAAI,CAAC,OAAO;AAAA,IACX,MAAM,EAAE;AAAA,IACR,mBAAmB,KAAK,OAAO,MAAM,EAAE,eAAe,KAAK,KAAK,GAAG;AAAA,EACrE,EAAE;AACN;AAiBA,eAAe,aACb,IACA,OACA,UAC2C;AAC3C,QAAM,OAAO,GACV;AAAA,IAIC;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,IAAI;AACP,QAAM,MAAwC,CAAC;AAC/C,aAAW,KAAK,MAAM;AACpB,QAAI,CAAC,YAAY,OAAO,EAAE,IAAI,EAAG;AACjC,UAAM,MAAMA,MAAK,UAAU,EAAE,aAAa;AAC1C,QAAI,CAACC,YAAW,GAAG,GAAG;AAIpB,UAAI,KAAK,EAAE,MAAM,EAAE,MAAM,MAAM,EAAE,cAAc,CAAC;AAAA,IAClD;AAAA,EACF;AACA,SAAO;AACT;AAQA,SAAS,gBACP,IACA,OACgD;AAChD,QAAM,OAAO,GACV;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMF,EACC,IAAI;AACP,SAAO,KAAK,OAAO,CAAC,MAAM,YAAY,OAAO,EAAE,WAAW,CAAC;AAC7D;AASA,eAAe,gBACb,IACA,OAC8E;AAC9E,QAAM,OAAO,GACV;AAAA;AAAA;AAAA;AAAA,IAOC;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,IAAI;AACP,QAAM,MAA2E,CAAC;AAElF,QAAM,iBAAiB,oBAAI,IAAqB;AAChD,aAAW,KAAK,MAAM;AACpB,QAAI,CAAC,YAAY,OAAO,EAAE,WAAW,EAAG;AACxC,QAAI,KAAK,eAAe,IAAI,EAAE,WAAW;AACzC,QAAI,OAAO,QAAW;AACpB,YAAM,QAAQ,MAAM,UAAU,EAAE,MAAM,EAAE,YAAY,CAAC;AACrD,WAAK,UAAU,QAAQA,YAAWD,MAAK,MAAM,MAAM,UAAU,CAAC;AAC9D,qBAAe,IAAI,EAAE,aAAa,EAAE;AAAA,IACtC;AACA,QAAI,CAAC,IAAI;AACP,UAAI,KAAK;AAAA,QACP,aAAa,EAAE;AAAA,QACf,aAAa,EAAE;AAAA,QACf,aAAa,EAAE;AAAA,MACjB,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO;AACT;AAGA,SAAS,gBACP,IACA,OACoB;AACpB,QAAM,OAAO,GACV;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,IAAI;AACP,MAAI,MAAM,WAAW,KAAM,QAAO;AAClC,SAAO,KAAK,OAAO,CAAC,MAAM,MAAM,OAAQ,IAAI,EAAE,IAAI,CAAC;AACrD;AAYA,eAAe,eACb,IACA,OACA,UAC6B;AAC7B,QAAM,OAAO,GACV;AAAA,IACC;AAAA;AAAA;AAAA,EAGF,EACC,IAAI;AACP,QAAM,MAA0B,CAAC;AACjC,aAAW,KAAK,MAAM;AACpB,QAAI,CAAC,YAAY,OAAO,EAAE,IAAI,EAAG;AACjC,QAAI;AACJ,QAAI;AACF,YAAM,MAAME,UAAS,EAAE,WAAW,MAAM;AAAA,IAC1C,QAAQ;AACN;AAAA,IACF;AAEA,UAAM,IAAI,IAAI,MAAM,2CAA2C;AAC/D,UAAM,OAAO,MAAM,OAAQ,EAAE,CAAC,KAAK,KAAM;AASzC,SAAK;AACL,UAAM,eAAe,KAClB,MAAM,OAAO,EACb,KAAK,CAAC,MAAM;AACX,YAAM,IAAI,EAAE,KAAK;AACjB,UAAI,EAAE,WAAW,EAAG,QAAO;AAC3B,UAAI,EAAE,WAAW,GAAG,EAAG,QAAO;AAC9B,aAAO;AAAA,IACT,CAAC;AACH,QAAI,CAAC,cAAc;AACjB,UAAI,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC;AAAA,IAC3B;AAAA,EACF;AACA,SAAO;AACT;AASA,eAAe,mBACb,UAC8C;AAC9C,MAAI,CAACD,YAAW,QAAQ,EAAG,QAAO,CAAC;AACnC,QAAM,QAAQ,MAAM,GAAG,WAAW;AAAA,IAChC,KAAK;AAAA,IACL,UAAU;AAAA,IACV,WAAW;AAAA,IACX,oBAAoB;AAAA,EACtB,CAAC;AACD,QAAM,SAAS,oBAAI,IAAsB;AACzC,aAAW,OAAO,OAAO;AACvB,UAAM,OAAO,YAAY,SAAS,KAAK,KAAK,CAAC;AAC7C,QAAI,KAAK,WAAW,EAAG;AACvB,UAAM,OAAO,OAAO,IAAI,IAAI,KAAK,CAAC;AAClC,SAAK,KAAK,GAAG;AACb,WAAO,IAAI,MAAM,IAAI;AAAA,EACvB;AACA,QAAM,MAA2C,CAAC;AAClD,aAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,GAAG;AAC5C,QAAI,MAAM,SAAS,GAAG;AACpB,UAAI,KAAK,EAAE,MAAM,OAAO,MAAM,KAAK,EAAE,CAAC;AAAA,IACxC;AAAA,EACF;AACA,MAAI,KAAK,CAAC,GAAG,MAAM,EAAE,KAAK,cAAc,EAAE,IAAI,CAAC;AAC/C,SAAO;AACT;AAMA,SAAS,aAAa,GAAyB;AAC7C,QAAM,WAAqB,CAAC;AAC5B,WAAS;AAAA,IACP;AAAA,MACE;AAAA,MACA,EAAE,QAAQ;AAAA,MACV,EAAE,QAAQ,IAAI,CAAC,MAAM,KAAK,IAAI,GAAG,EAAE,IAAI,GAAG,GAAG,EAAE;AAAA,IACjD;AAAA,EACF;AACA,WAAS;AAAA,IACP;AAAA,MACE;AAAA,MACA,EAAE,MAAM;AAAA,MACR,EAAE,MAAM,IAAI,CAAC,MAAM,KAAK,IAAI,GAAG,EAAE,IAAI,GAAG,GAAG,QAAQ,GAAG,IAAI,EAAE,iBAAiB,SAAS,GAAG,EAAE;AAAA,IAC7F;AAAA,EACF;AACA,WAAS;AAAA,IACP;AAAA,MACE;AAAA,MACA,EAAE,UAAU;AAAA,MACZ,EAAE,UAAU,IAAI,CAAC,MAAM,KAAK,IAAI,GAAG,EAAE,IAAI,GAAG,GAAG,gBAAgB,EAAE,IAAI,IAAI,GAAG,YAAY,GAAG,EAAE;AAAA,IAC/F;AAAA,EACF;AACA,WAAS;AAAA,IACP;AAAA,MACE;AAAA,MACA,EAAE,aAAa;AAAA,MACf,EAAE,aAAa;AAAA,QACb,CAAC,MAAM,KAAK,IAAI,GAAG,EAAE,WAAW,GAAG,GAAG,WAAM,EAAE,WAAW,IAAI,GAAG,0BAA0B,GAAG;AAAA,MAC/F;AAAA,IACF;AAAA,EACF;AACA,WAAS;AAAA,IACP;AAAA,MACE;AAAA,MACA,EAAE,aAAa;AAAA,MACf,EAAE,aAAa;AAAA,QACb,CAAC,MACC,KAAK,IAAI,GAAG,EAAE,WAAW,GAAG,GAAG,WAAM,EAAE,WAAW,IAAI,EAAE,WAAW,IAAI,GAAG,qCAAqC,GAAG;AAAA,MACtH;AAAA,IACF;AAAA,EACF;AACA,WAAS;AAAA,IACP;AAAA,MACE;AAAA,MACA,EAAE,aAAa;AAAA,MACf,EAAE,aAAa,IAAI,CAAC,MAAM,KAAK,IAAI,GAAG,EAAE,IAAI,GAAG,GAAG,EAAE;AAAA,IACtD;AAAA,EACF;AACA,WAAS;AAAA,IACP;AAAA,MACE;AAAA,MACA,EAAE,YAAY;AAAA,MACd,EAAE,YAAY,IAAI,CAAC,MAAM,KAAK,IAAI,GAAG,EAAE,IAAI,GAAG,GAAG,EAAE;AAAA,IACrD;AAAA,EACF;AACA,WAAS;AAAA,IACP;AAAA,MACE;AAAA,MACA,EAAE,gBAAgB;AAAA,MAClB,EAAE,gBAAgB,IAAI,CAAC,MAAM,KAAK,IAAI,GAAG,EAAE,IAAI,GAAG,GAAG,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC,EAAE;AAAA,IAChF;AAAA,EACF;AACA,SAAO,GAAG,SAAS,KAAK,MAAM,CAAC;AAAA;AACjC;AAEA,SAAS,QAAQ,OAAe,OAAe,OAAyB;AACtE,MAAI,UAAU,EAAG,QAAO,GAAG,IAAI,GAAG,KAAK,GAAG,GAAG,IAAI,KAAK,YAAY,GAAG;AACrE,SAAO,GAAG,IAAI,GAAG,KAAK,GAAG,GAAG,IAAI,GAAG,IAAI,KAAK,IAAI,GAAG;AAAA,EAAM,MAAM,KAAK,IAAI,CAAC;AAC3E;","names":["path","existsSync","readFile","join","join","existsSync","readFile"]}
|