akm-cli 0.4.1 → 0.5.0-rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/asset-registry.js +7 -0
- package/dist/asset-spec.js +35 -0
- package/dist/cli.js +1120 -31
- package/dist/completions.js +2 -2
- package/dist/config-cli.js +41 -0
- package/dist/config.js +62 -0
- package/dist/file-context.js +2 -1
- package/dist/github.js +20 -1
- package/dist/indexer.js +55 -5
- package/dist/init.js +11 -0
- package/dist/install-audit.js +53 -8
- package/dist/installed-kits.js +2 -0
- package/dist/llm.js +64 -23
- package/dist/matchers.js +56 -4
- package/dist/metadata.js +68 -4
- package/dist/paths.js +3 -0
- package/dist/registry-install.js +36 -7
- package/dist/registry-resolve.js +25 -0
- package/dist/renderers.js +182 -2
- package/dist/search-fields.js +4 -0
- package/dist/search-source.js +12 -8
- package/dist/setup.js +158 -33
- package/dist/stash-add.js +84 -11
- package/dist/stash-providers/git.js +182 -44
- package/dist/stash-show.js +56 -1
- package/dist/stash-source-manage.js +14 -4
- package/dist/templates/wiki-templates.js +100 -0
- package/dist/vault.js +290 -0
- package/dist/wiki.js +886 -0
- package/dist/workflow-authoring.js +131 -0
- package/dist/workflow-cli.js +44 -0
- package/dist/workflow-db.js +55 -0
- package/dist/workflow-markdown.js +251 -0
- package/dist/workflow-runs.js +364 -0
- package/package.json +2 -1
- package/LICENSE +0 -374
package/dist/wiki.js
ADDED
|
@@ -0,0 +1,886 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-wiki support for akm (issue #119).
|
|
3
|
+
*
|
|
4
|
+
* A wiki lives at `<stashDir>/wikis/<name>/` and contains:
|
|
5
|
+
* - `schema.md` — the rulebook the agent reads first
|
|
6
|
+
* - `index.md` — catalog, regenerable (rebuilt by `akm index`)
|
|
7
|
+
* - `log.md` — append-only, agent-maintained
|
|
8
|
+
* - `raw/<slug>.md` — immutable ingested sources
|
|
9
|
+
* - `<page>.md` — wiki pages (optionally nested)
|
|
10
|
+
*
|
|
11
|
+
* Principle: "akm surfaces. The agent writes." akm owns lifecycle, raw-slug
|
|
12
|
+
* generation, structural lint, and `index.md` regeneration. The agent uses
|
|
13
|
+
* its native file tools for every other page operation.
|
|
14
|
+
*/
|
|
15
|
+
import fs from "node:fs";
|
|
16
|
+
import path from "node:path";
|
|
17
|
+
import { parse as yamlParse } from "yaml";
|
|
18
|
+
import { isWithin } from "./common";
|
|
19
|
+
import { NotFoundError, UsageError } from "./errors";
|
|
20
|
+
import { parseFrontmatter, parseFrontmatterBlock } from "./frontmatter";
|
|
21
|
+
import { akmSearch } from "./stash-search";
|
|
22
|
+
import { buildIndexMd, buildLogMd, buildSchemaMd } from "./templates/wiki-templates";
|
|
23
|
+
// ── Constants ───────────────────────────────────────────────────────────────
|
|
24
|
+
export const WIKIS_SUBDIR = "wikis";
|
|
25
|
+
export const SCHEMA_MD = "schema.md";
|
|
26
|
+
export const INDEX_MD = "index.md";
|
|
27
|
+
export const LOG_MD = "log.md";
|
|
28
|
+
export const RAW_SUBDIR = "raw";
|
|
29
|
+
/** Files at a wiki root that are not pages. */
|
|
30
|
+
const WIKI_SPECIAL_FILES = new Set([SCHEMA_MD, INDEX_MD, LOG_MD]);
|
|
31
|
+
const WIKI_NAME_RE = /^[a-z0-9][a-z0-9-]*$/;
|
|
32
|
+
// ── Validation + resolution ─────────────────────────────────────────────────
|
|
33
|
+
export function validateWikiName(name) {
|
|
34
|
+
if (!name)
|
|
35
|
+
throw new UsageError("Wiki name cannot be empty.");
|
|
36
|
+
if (!WIKI_NAME_RE.test(name)) {
|
|
37
|
+
throw new UsageError(`Invalid wiki name "${name}". Use lowercase letters, digits, and hyphens (must start with a lowercase letter or digit).`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
export function resolveWikisRoot(stashDir) {
|
|
41
|
+
return path.join(stashDir, WIKIS_SUBDIR);
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Resolve `<stashDir>/wikis/<name>/` with an isWithin guard so a malicious
|
|
45
|
+
* or mistyped name can never escape the wikis root.
|
|
46
|
+
*/
|
|
47
|
+
export function resolveWikiDir(stashDir, name) {
|
|
48
|
+
validateWikiName(name);
|
|
49
|
+
const wikisRoot = resolveWikisRoot(stashDir);
|
|
50
|
+
const dir = path.join(wikisRoot, name);
|
|
51
|
+
if (!isWithin(dir, wikisRoot)) {
|
|
52
|
+
throw new UsageError(`Invalid wiki path for name "${name}".`);
|
|
53
|
+
}
|
|
54
|
+
return dir;
|
|
55
|
+
}
|
|
56
|
+
/** Parse a wiki name out of a `wiki:<name>/<...>` ref, or return undefined. */
|
|
57
|
+
export function extractWikiNameFromRef(ref) {
|
|
58
|
+
const match = ref.match(/^wiki:([a-z0-9][a-z0-9-]*)(?:\/|$)/);
|
|
59
|
+
return match?.[1];
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Walk a wiki directory and bucket files into pages vs raws.
|
|
63
|
+
*
|
|
64
|
+
* "Pages" are any `.md` files under the wiki root EXCEPT `schema.md`,
|
|
65
|
+
* `index.md`, `log.md`, or anything under `raw/`. This matches the set the
|
|
66
|
+
* agent edits, and the set `akm wiki pages` exposes.
|
|
67
|
+
*
|
|
68
|
+
* Returns two mtime signals:
|
|
69
|
+
* - `lastModifiedMs` — newest across all .md files. Used for the `show` /
|
|
70
|
+
* `list` "last activity" display, which should reflect any edit.
|
|
71
|
+
* - `pagesLastModifiedMs` — newest page mtime only. Used by `lintWiki` to
|
|
72
|
+
* decide `stale-index`: the index tracks pages, so stashing a raw or
|
|
73
|
+
* editing log.md must NOT flag the index stale.
|
|
74
|
+
*/
|
|
75
|
+
function scanWikiFiles(wikiDir) {
|
|
76
|
+
const pages = [];
|
|
77
|
+
const raws = [];
|
|
78
|
+
let lastModifiedMs;
|
|
79
|
+
let pagesLastModifiedMs;
|
|
80
|
+
const stack = [{ abs: wikiDir, relDirSegs: [] }];
|
|
81
|
+
while (stack.length > 0) {
|
|
82
|
+
const current = stack.pop();
|
|
83
|
+
if (!current)
|
|
84
|
+
continue;
|
|
85
|
+
let entries;
|
|
86
|
+
try {
|
|
87
|
+
entries = fs.readdirSync(current.abs, { withFileTypes: true });
|
|
88
|
+
}
|
|
89
|
+
catch {
|
|
90
|
+
continue;
|
|
91
|
+
}
|
|
92
|
+
for (const entry of entries) {
|
|
93
|
+
if (entry.name.startsWith("."))
|
|
94
|
+
continue;
|
|
95
|
+
const abs = path.join(current.abs, entry.name);
|
|
96
|
+
if (entry.isSymbolicLink())
|
|
97
|
+
continue;
|
|
98
|
+
if (entry.isDirectory()) {
|
|
99
|
+
stack.push({ abs, relDirSegs: [...current.relDirSegs, entry.name] });
|
|
100
|
+
continue;
|
|
101
|
+
}
|
|
102
|
+
if (!entry.isFile() || !entry.name.endsWith(".md"))
|
|
103
|
+
continue;
|
|
104
|
+
let mtimeMs;
|
|
105
|
+
try {
|
|
106
|
+
mtimeMs = fs.statSync(abs).mtimeMs;
|
|
107
|
+
}
|
|
108
|
+
catch {
|
|
109
|
+
/* best-effort */
|
|
110
|
+
}
|
|
111
|
+
if (mtimeMs !== undefined) {
|
|
112
|
+
lastModifiedMs = lastModifiedMs === undefined ? mtimeMs : Math.max(lastModifiedMs, mtimeMs);
|
|
113
|
+
}
|
|
114
|
+
const atRoot = current.relDirSegs.length === 0;
|
|
115
|
+
const firstDir = current.relDirSegs[0];
|
|
116
|
+
if (firstDir === RAW_SUBDIR) {
|
|
117
|
+
raws.push(abs);
|
|
118
|
+
}
|
|
119
|
+
else if (!(atRoot && WIKI_SPECIAL_FILES.has(entry.name))) {
|
|
120
|
+
// schema.md / index.md / log.md at the wiki root are not pages
|
|
121
|
+
pages.push(abs);
|
|
122
|
+
if (mtimeMs !== undefined) {
|
|
123
|
+
pagesLastModifiedMs = pagesLastModifiedMs === undefined ? mtimeMs : Math.max(pagesLastModifiedMs, mtimeMs);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
return { pages, raws, lastModifiedMs, pagesLastModifiedMs };
|
|
129
|
+
}
|
|
130
|
+
function readSchemaDescription(wikiDir) {
|
|
131
|
+
const schemaPath = path.join(wikiDir, SCHEMA_MD);
|
|
132
|
+
let raw;
|
|
133
|
+
try {
|
|
134
|
+
raw = fs.readFileSync(schemaPath, "utf8");
|
|
135
|
+
}
|
|
136
|
+
catch {
|
|
137
|
+
return undefined;
|
|
138
|
+
}
|
|
139
|
+
try {
|
|
140
|
+
const parsed = parseFrontmatter(raw);
|
|
141
|
+
const desc = parsed.data.description;
|
|
142
|
+
return typeof desc === "string" && desc.trim().length > 0 ? desc.trim() : undefined;
|
|
143
|
+
}
|
|
144
|
+
catch {
|
|
145
|
+
return undefined;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
function toIsoDate(ms) {
|
|
149
|
+
return new Date(ms).toISOString();
|
|
150
|
+
}
|
|
151
|
+
// ── List ────────────────────────────────────────────────────────────────────
|
|
152
|
+
/**
|
|
153
|
+
* Return summaries for every wiki directly under `<stashDir>/wikis/`.
|
|
154
|
+
*
|
|
155
|
+
* A "wiki" is any directory whose name matches {@link WIKI_NAME_RE}. Anything
|
|
156
|
+
* else (dot-directories, lone files, directories with odd names) is skipped
|
|
157
|
+
* silently so `ls` noise doesn't crash listing.
|
|
158
|
+
*/
|
|
159
|
+
export function listWikis(stashDir) {
|
|
160
|
+
const wikisRoot = resolveWikisRoot(stashDir);
|
|
161
|
+
if (!fs.existsSync(wikisRoot))
|
|
162
|
+
return [];
|
|
163
|
+
let entries;
|
|
164
|
+
try {
|
|
165
|
+
entries = fs.readdirSync(wikisRoot, { withFileTypes: true });
|
|
166
|
+
}
|
|
167
|
+
catch {
|
|
168
|
+
return [];
|
|
169
|
+
}
|
|
170
|
+
const summaries = [];
|
|
171
|
+
for (const entry of entries) {
|
|
172
|
+
if (!entry.isDirectory())
|
|
173
|
+
continue;
|
|
174
|
+
if (!WIKI_NAME_RE.test(entry.name))
|
|
175
|
+
continue;
|
|
176
|
+
const dir = path.join(wikisRoot, entry.name);
|
|
177
|
+
const buckets = scanWikiFiles(dir);
|
|
178
|
+
const summary = {
|
|
179
|
+
name: entry.name,
|
|
180
|
+
path: dir,
|
|
181
|
+
pages: buckets.pages.length,
|
|
182
|
+
raws: buckets.raws.length,
|
|
183
|
+
};
|
|
184
|
+
const description = readSchemaDescription(dir);
|
|
185
|
+
if (description)
|
|
186
|
+
summary.description = description;
|
|
187
|
+
if (buckets.lastModifiedMs !== undefined)
|
|
188
|
+
summary.lastModified = toIsoDate(buckets.lastModifiedMs);
|
|
189
|
+
summaries.push(summary);
|
|
190
|
+
}
|
|
191
|
+
summaries.sort((a, b) => a.name.localeCompare(b.name));
|
|
192
|
+
return summaries;
|
|
193
|
+
}
|
|
194
|
+
// ── Show ────────────────────────────────────────────────────────────────────
|
|
195
|
+
/**
|
|
196
|
+
* Extract the top N `##` log entries from `log.md`.
|
|
197
|
+
*
|
|
198
|
+
* The log convention (defined by `schema.md` and enforced by nothing) is
|
|
199
|
+
* newest-first: the most recent entry sits at the top of the file, so the
|
|
200
|
+
* first `limit` `##` blocks encountered in file order are the most recent.
|
|
201
|
+
* Agents that append to the bottom instead will have their entries appear
|
|
202
|
+
* at the end of this list.
|
|
203
|
+
*
|
|
204
|
+
* `log.md` is agent-maintained and can be free-form, so `parseFrontmatter`
|
|
205
|
+
* is called defensively: if the frontmatter is malformed we fall back to
|
|
206
|
+
* treating the whole file as body.
|
|
207
|
+
*/
|
|
208
|
+
function readRecentLog(wikiDir, limit = 3) {
|
|
209
|
+
const logPath = path.join(wikiDir, LOG_MD);
|
|
210
|
+
let raw;
|
|
211
|
+
try {
|
|
212
|
+
raw = fs.readFileSync(logPath, "utf8");
|
|
213
|
+
}
|
|
214
|
+
catch {
|
|
215
|
+
return [];
|
|
216
|
+
}
|
|
217
|
+
let body;
|
|
218
|
+
try {
|
|
219
|
+
body = parseFrontmatter(raw).content ?? raw;
|
|
220
|
+
}
|
|
221
|
+
catch {
|
|
222
|
+
body = raw;
|
|
223
|
+
}
|
|
224
|
+
const sections = [];
|
|
225
|
+
let current;
|
|
226
|
+
for (const line of body.split(/\r?\n/)) {
|
|
227
|
+
if (/^##\s+/.test(line)) {
|
|
228
|
+
if (current && current.length > 0)
|
|
229
|
+
sections.push(current.join("\n").trim());
|
|
230
|
+
current = [line];
|
|
231
|
+
}
|
|
232
|
+
else if (current) {
|
|
233
|
+
current.push(line);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
if (current && current.length > 0)
|
|
237
|
+
sections.push(current.join("\n").trim());
|
|
238
|
+
// Newest-first convention: the top `limit` `##` blocks are the most recent.
|
|
239
|
+
return sections.slice(0, limit);
|
|
240
|
+
}
|
|
241
|
+
export function showWiki(stashDir, name) {
|
|
242
|
+
const wikiDir = resolveWikiDir(stashDir, name);
|
|
243
|
+
if (!fs.existsSync(wikiDir)) {
|
|
244
|
+
throw new NotFoundError(`Wiki not found: ${name}. Run \`akm wiki create ${name}\` to create it.`);
|
|
245
|
+
}
|
|
246
|
+
const buckets = scanWikiFiles(wikiDir);
|
|
247
|
+
const result = {
|
|
248
|
+
name,
|
|
249
|
+
ref: `wiki:${name}`,
|
|
250
|
+
path: wikiDir,
|
|
251
|
+
pages: buckets.pages.length,
|
|
252
|
+
raws: buckets.raws.length,
|
|
253
|
+
recentLog: readRecentLog(wikiDir),
|
|
254
|
+
};
|
|
255
|
+
const description = readSchemaDescription(wikiDir);
|
|
256
|
+
if (description)
|
|
257
|
+
result.description = description;
|
|
258
|
+
if (buckets.lastModifiedMs !== undefined)
|
|
259
|
+
result.lastModified = toIsoDate(buckets.lastModifiedMs);
|
|
260
|
+
return result;
|
|
261
|
+
}
|
|
262
|
+
// ── Create ──────────────────────────────────────────────────────────────────
|
|
263
|
+
export function createWiki(stashDir, name) {
|
|
264
|
+
const wikiDir = resolveWikiDir(stashDir, name);
|
|
265
|
+
fs.mkdirSync(wikiDir, { recursive: true });
|
|
266
|
+
const files = [
|
|
267
|
+
{ relPath: SCHEMA_MD, content: buildSchemaMd(name) },
|
|
268
|
+
{ relPath: INDEX_MD, content: buildIndexMd(name) },
|
|
269
|
+
{ relPath: LOG_MD, content: buildLogMd(name) },
|
|
270
|
+
];
|
|
271
|
+
const created = [];
|
|
272
|
+
const skipped = [];
|
|
273
|
+
for (const { relPath, content } of files) {
|
|
274
|
+
const absPath = path.join(wikiDir, relPath);
|
|
275
|
+
if (fs.existsSync(absPath)) {
|
|
276
|
+
skipped.push(absPath);
|
|
277
|
+
continue;
|
|
278
|
+
}
|
|
279
|
+
fs.writeFileSync(absPath, content, "utf8");
|
|
280
|
+
created.push(absPath);
|
|
281
|
+
}
|
|
282
|
+
// Ensure raw/ exists with a .gitkeep so empty wikis survive clean clones.
|
|
283
|
+
// Handle the dir-exists-but-no-.gitkeep case too (partial scaffolds,
|
|
284
|
+
// user-created directories) so the invariant always holds after `create`.
|
|
285
|
+
const rawDir = path.join(wikiDir, RAW_SUBDIR);
|
|
286
|
+
fs.mkdirSync(rawDir, { recursive: true });
|
|
287
|
+
const gitkeepPath = path.join(rawDir, ".gitkeep");
|
|
288
|
+
if (fs.existsSync(gitkeepPath)) {
|
|
289
|
+
skipped.push(gitkeepPath);
|
|
290
|
+
}
|
|
291
|
+
else {
|
|
292
|
+
fs.writeFileSync(gitkeepPath, "", "utf8");
|
|
293
|
+
created.push(gitkeepPath);
|
|
294
|
+
}
|
|
295
|
+
return { name, ref: `wiki:${name}`, path: wikiDir, created, skipped };
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* Remove a wiki.
|
|
299
|
+
*
|
|
300
|
+
* Deletes pages + `schema.md` + `index.md` + `log.md` by default. The `raw/`
|
|
301
|
+
* directory is preserved because raw sources are often hand-curated and
|
|
302
|
+
* outlive the wiki pages built from them. Pass `withSources: true` to
|
|
303
|
+
* delete everything including `raw/`.
|
|
304
|
+
*
|
|
305
|
+
* The guard: `wikiDir` must resolve under `<stashDir>/wikis/`. If the wiki
|
|
306
|
+
* directory doesn't exist, throws `NotFoundError` — callers can decide to
|
|
307
|
+
* ignore that (e.g. idempotent cleanup) by catching.
|
|
308
|
+
*/
|
|
309
|
+
export function removeWiki(stashDir, name, options = {}) {
|
|
310
|
+
const wikiDir = resolveWikiDir(stashDir, name);
|
|
311
|
+
if (!fs.existsSync(wikiDir)) {
|
|
312
|
+
throw new NotFoundError(`Wiki not found: ${name}.`);
|
|
313
|
+
}
|
|
314
|
+
const wikisRoot = resolveWikisRoot(stashDir);
|
|
315
|
+
if (!isWithin(wikiDir, wikisRoot)) {
|
|
316
|
+
throw new UsageError(`Refusing to remove a path outside the wikis root: ${wikiDir}`);
|
|
317
|
+
}
|
|
318
|
+
const removed = [];
|
|
319
|
+
const rawDir = path.join(wikiDir, RAW_SUBDIR);
|
|
320
|
+
const preserveRaw = !options.withSources && fs.existsSync(rawDir);
|
|
321
|
+
let entries;
|
|
322
|
+
try {
|
|
323
|
+
entries = fs.readdirSync(wikiDir, { withFileTypes: true });
|
|
324
|
+
}
|
|
325
|
+
catch {
|
|
326
|
+
entries = [];
|
|
327
|
+
}
|
|
328
|
+
for (const entry of entries) {
|
|
329
|
+
const abs = path.join(wikiDir, entry.name);
|
|
330
|
+
if (preserveRaw && entry.name === RAW_SUBDIR)
|
|
331
|
+
continue;
|
|
332
|
+
try {
|
|
333
|
+
fs.rmSync(abs, { recursive: true, force: true });
|
|
334
|
+
removed.push(abs);
|
|
335
|
+
}
|
|
336
|
+
catch {
|
|
337
|
+
/* best-effort — entry may have been removed concurrently */
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
if (!preserveRaw) {
|
|
341
|
+
// Remove the now-empty wiki directory itself.
|
|
342
|
+
try {
|
|
343
|
+
fs.rmdirSync(wikiDir);
|
|
344
|
+
}
|
|
345
|
+
catch {
|
|
346
|
+
/* dir may be non-empty (e.g. uncollected dotfiles); leave it */
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
const result = {
|
|
350
|
+
name,
|
|
351
|
+
path: wikiDir,
|
|
352
|
+
removed,
|
|
353
|
+
preservedRaw: preserveRaw,
|
|
354
|
+
};
|
|
355
|
+
if (preserveRaw)
|
|
356
|
+
result.rawPath = rawDir;
|
|
357
|
+
return result;
|
|
358
|
+
}
|
|
359
|
+
function pageNameFromPath(wikiDir, absPath) {
|
|
360
|
+
const rel = path.relative(wikiDir, absPath).split(path.sep).join("/");
|
|
361
|
+
return rel.endsWith(".md") ? rel.slice(0, -3) : rel;
|
|
362
|
+
}
|
|
363
|
+
/**
|
|
364
|
+
* Parse the raw frontmatter block with a real YAML parser so list-valued
|
|
365
|
+
* keys (`xrefs:`, `sources:`) round-trip correctly. The project's hand-rolled
|
|
366
|
+
* `parseFrontmatter` deliberately drops YAML lists; for lint + index work we
|
|
367
|
+
* need them, and the `yaml` package is already a runtime dependency.
|
|
368
|
+
*/
|
|
369
|
+
function parsePageFrontmatterYaml(raw) {
|
|
370
|
+
const block = parseFrontmatterBlock(raw);
|
|
371
|
+
if (!block)
|
|
372
|
+
return {};
|
|
373
|
+
try {
|
|
374
|
+
const value = yamlParse(block.frontmatter);
|
|
375
|
+
if (value && typeof value === "object" && !Array.isArray(value)) {
|
|
376
|
+
return value;
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
catch {
|
|
380
|
+
/* malformed YAML — fall through to the lightweight parser */
|
|
381
|
+
}
|
|
382
|
+
try {
|
|
383
|
+
return parseFrontmatter(raw).data;
|
|
384
|
+
}
|
|
385
|
+
catch {
|
|
386
|
+
return {};
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
function readPageFrontmatter(absPath) {
|
|
390
|
+
let raw;
|
|
391
|
+
try {
|
|
392
|
+
raw = fs.readFileSync(absPath, "utf8");
|
|
393
|
+
}
|
|
394
|
+
catch {
|
|
395
|
+
return {};
|
|
396
|
+
}
|
|
397
|
+
const data = parsePageFrontmatterYaml(raw);
|
|
398
|
+
const out = {};
|
|
399
|
+
if (typeof data.description === "string" && data.description.trim().length > 0) {
|
|
400
|
+
out.description = data.description.trim();
|
|
401
|
+
}
|
|
402
|
+
if (typeof data.pageKind === "string" && data.pageKind.trim().length > 0) {
|
|
403
|
+
out.pageKind = data.pageKind.trim();
|
|
404
|
+
}
|
|
405
|
+
if (Array.isArray(data.xrefs)) {
|
|
406
|
+
const xrefs = data.xrefs.filter((x) => typeof x === "string" && x.trim().length > 0);
|
|
407
|
+
if (xrefs.length > 0)
|
|
408
|
+
out.xrefs = xrefs;
|
|
409
|
+
}
|
|
410
|
+
if (Array.isArray(data.sources)) {
|
|
411
|
+
const sources = data.sources.filter((s) => typeof s === "string" && s.trim().length > 0);
|
|
412
|
+
if (sources.length > 0)
|
|
413
|
+
out.sources = sources;
|
|
414
|
+
}
|
|
415
|
+
return out;
|
|
416
|
+
}
|
|
417
|
+
/**
|
|
418
|
+
* List the pages in a wiki, excluding `schema.md`, `index.md`, `log.md`, and
|
|
419
|
+
* anything under `raw/`. Each entry carries its ref (`wiki:<name>/<page>`),
|
|
420
|
+
* path, and frontmatter-derived fields for orientation.
|
|
421
|
+
*/
|
|
422
|
+
export function listPages(stashDir, name) {
|
|
423
|
+
const wikiDir = resolveWikiDir(stashDir, name);
|
|
424
|
+
if (!fs.existsSync(wikiDir)) {
|
|
425
|
+
throw new NotFoundError(`Wiki not found: ${name}.`);
|
|
426
|
+
}
|
|
427
|
+
const { pages } = scanWikiFiles(wikiDir);
|
|
428
|
+
const result = [];
|
|
429
|
+
for (const abs of pages) {
|
|
430
|
+
const pageName = pageNameFromPath(wikiDir, abs);
|
|
431
|
+
const ref = `wiki:${name}/${pageName}`;
|
|
432
|
+
const fm = readPageFrontmatter(abs);
|
|
433
|
+
const entry = { ref, name: pageName, path: abs, ...fm };
|
|
434
|
+
result.push(entry);
|
|
435
|
+
}
|
|
436
|
+
result.sort((a, b) => a.name.localeCompare(b.name));
|
|
437
|
+
return result;
|
|
438
|
+
}
|
|
439
|
+
/**
|
|
440
|
+
* Thin wrapper over the stash-wide search that narrows to one wiki.
|
|
441
|
+
*
|
|
442
|
+
* Uses `akmSearch({ type: "wiki" })` to reuse the full FTS5+boost pipeline,
|
|
443
|
+
* then drops hits that aren't inside `wikis/<name>/`. No parallel scorer.
|
|
444
|
+
*
|
|
445
|
+
* When the index is absent (e.g. fresh stash), `akmSearch` falls back to its
|
|
446
|
+
* substring walker; hits still come through path-filtered here.
|
|
447
|
+
*/
|
|
448
|
+
export async function searchInWiki(input) {
|
|
449
|
+
validateWikiName(input.wikiName);
|
|
450
|
+
const wikiDir = resolveWikiDir(input.stashDir, input.wikiName);
|
|
451
|
+
const response = await akmSearch({
|
|
452
|
+
query: input.query,
|
|
453
|
+
type: "wiki",
|
|
454
|
+
limit: input.limit,
|
|
455
|
+
source: "stash",
|
|
456
|
+
});
|
|
457
|
+
const rawDir = path.join(wikiDir, RAW_SUBDIR);
|
|
458
|
+
const filtered = [];
|
|
459
|
+
for (const hit of response.hits) {
|
|
460
|
+
// hits can be StashSearchHit or RegistrySearchResultHit (union); filter
|
|
461
|
+
// by path inclusion. Registry hits have no path and are dropped.
|
|
462
|
+
if (hit.type === "registry")
|
|
463
|
+
continue;
|
|
464
|
+
const stashHit = hit;
|
|
465
|
+
if (!stashHit.path)
|
|
466
|
+
continue;
|
|
467
|
+
if (!isWithin(stashHit.path, wikiDir))
|
|
468
|
+
continue;
|
|
469
|
+
// Exclude infrastructure files: schema.md, index.md, log.md at wiki root
|
|
470
|
+
const basename = path.basename(stashHit.path);
|
|
471
|
+
if (WIKI_SPECIAL_FILES.has(basename) && path.dirname(stashHit.path) === wikiDir)
|
|
472
|
+
continue;
|
|
473
|
+
// Exclude anything under raw/
|
|
474
|
+
if (isWithin(stashHit.path, rawDir))
|
|
475
|
+
continue;
|
|
476
|
+
filtered.push(stashHit);
|
|
477
|
+
}
|
|
478
|
+
return { ...response, hits: filtered, registryHits: undefined };
|
|
479
|
+
}
|
|
480
|
+
// ── Slug + raw stash ───────────────────────────────────────────────────────
|
|
481
|
+
const SLUG_MAX_LENGTH = 64;
|
|
482
|
+
/**
|
|
483
|
+
* Turn an arbitrary string into a filesystem-safe wiki slug.
|
|
484
|
+
*
|
|
485
|
+
* - lowercased
|
|
486
|
+
* - leading markdown noise (`#`, `>`, `-`, whitespace) stripped
|
|
487
|
+
* - non-alphanumerics collapsed to `-`
|
|
488
|
+
* - leading/trailing `-` trimmed
|
|
489
|
+
* - capped at {@link SLUG_MAX_LENGTH}
|
|
490
|
+
*
|
|
491
|
+
* Falls back to `note-<base36-ms>` for empty inputs so raw files are never
|
|
492
|
+
* written to a blank name.
|
|
493
|
+
*/
|
|
494
|
+
export function slugifyForWiki(value) {
|
|
495
|
+
const slug = value
|
|
496
|
+
.toLowerCase()
|
|
497
|
+
.replace(/^[#>\-\s]+/, "")
|
|
498
|
+
.replace(/[^a-z0-9]+/g, "-")
|
|
499
|
+
.replace(/^-+|-+$/g, "")
|
|
500
|
+
.slice(0, SLUG_MAX_LENGTH);
|
|
501
|
+
return slug || `note-${Date.now().toString(36)}`;
|
|
502
|
+
}
|
|
503
|
+
/**
|
|
504
|
+
* Derive a slug hint from the first non-empty line of source content.
|
|
505
|
+
*
|
|
506
|
+
* Used when the caller didn't pass a preferredName. Skips frontmatter.
|
|
507
|
+
* Caps words at 8 so the slug stays manageable.
|
|
508
|
+
*/
|
|
509
|
+
function deriveQueryFromSource(content) {
|
|
510
|
+
const lines = content.split(/\r?\n/);
|
|
511
|
+
let inFrontmatter = false;
|
|
512
|
+
let closed = false;
|
|
513
|
+
for (let i = 0; i < lines.length; i++) {
|
|
514
|
+
const trimmed = lines[i].trim();
|
|
515
|
+
if (i === 0 && trimmed === "---") {
|
|
516
|
+
inFrontmatter = true;
|
|
517
|
+
continue;
|
|
518
|
+
}
|
|
519
|
+
if (inFrontmatter && !closed) {
|
|
520
|
+
if (trimmed === "---")
|
|
521
|
+
closed = true;
|
|
522
|
+
continue;
|
|
523
|
+
}
|
|
524
|
+
if (!trimmed)
|
|
525
|
+
continue;
|
|
526
|
+
return trimmed
|
|
527
|
+
.replace(/^#+\s*/, "")
|
|
528
|
+
.split(/\s+/)
|
|
529
|
+
.slice(0, 8)
|
|
530
|
+
.join(" ");
|
|
531
|
+
}
|
|
532
|
+
return "";
|
|
533
|
+
}
|
|
534
|
+
function pickUniqueRawSlug(rawDir, baseSlug) {
|
|
535
|
+
let candidate = baseSlug;
|
|
536
|
+
let n = 0;
|
|
537
|
+
while (fs.existsSync(path.join(rawDir, `${candidate}.md`))) {
|
|
538
|
+
n += 1;
|
|
539
|
+
candidate = `${baseSlug}-${n}`;
|
|
540
|
+
}
|
|
541
|
+
return candidate;
|
|
542
|
+
}
|
|
543
|
+
function withRawFrontmatter(content, slug) {
|
|
544
|
+
// If the source already starts with a YAML frontmatter block, keep it — we
|
|
545
|
+
// don't want to shadow user metadata. The raw location itself is enough to
|
|
546
|
+
// tag the wikiRole for the indexer.
|
|
547
|
+
if (content.startsWith("---"))
|
|
548
|
+
return content;
|
|
549
|
+
const date = new Date().toISOString().slice(0, 10);
|
|
550
|
+
return `---\nwikiRole: raw\ningestedAt: ${date}\nslug: ${slug}\n---\n\n${content}`;
|
|
551
|
+
}
|
|
552
|
+
function ensureTrailingNewline(value) {
|
|
553
|
+
return value.endsWith("\n") ? value : `${value}\n`;
|
|
554
|
+
}
|
|
555
|
+
/**
|
|
556
|
+
* Copy raw content into `wikis/<name>/raw/<slug>.md`.
|
|
557
|
+
*
|
|
558
|
+
* Invariants this owns (which an agent could get wrong):
|
|
559
|
+
* 1. Raw files never overwrite — collisions get `-1`, `-2`, … suffixes.
|
|
560
|
+
* 2. Path is guaranteed to stay within the wiki's raw/ directory.
|
|
561
|
+
* 3. If the content has no frontmatter, a `wikiRole: raw` block is added.
|
|
562
|
+
*
|
|
563
|
+
* Does not update the log, does not write any wiki pages. That's the agent's
|
|
564
|
+
* job (see `akm wiki ingest <name>` for the workflow).
|
|
565
|
+
*/
|
|
566
|
+
export function stashRaw(input) {
|
|
567
|
+
const wikiDir = resolveWikiDir(input.stashDir, input.wikiName);
|
|
568
|
+
if (!fs.existsSync(wikiDir)) {
|
|
569
|
+
throw new NotFoundError(`Wiki not found: ${input.wikiName}. Run \`akm wiki create ${input.wikiName}\` first.`);
|
|
570
|
+
}
|
|
571
|
+
const rawDir = path.join(wikiDir, RAW_SUBDIR);
|
|
572
|
+
fs.mkdirSync(rawDir, { recursive: true });
|
|
573
|
+
const baseSlug = slugifyForWiki(input.preferredName ?? deriveQueryFromSource(input.content) ?? "source");
|
|
574
|
+
if (input.explicitSlug === true && fs.existsSync(path.join(rawDir, `${baseSlug}.md`))) {
|
|
575
|
+
throw new UsageError(`Raw slug "${baseSlug}" already exists in wiki:${input.wikiName}. Pass a different --as or omit --as to auto-increment.`);
|
|
576
|
+
}
|
|
577
|
+
const slug = pickUniqueRawSlug(rawDir, baseSlug);
|
|
578
|
+
const absPath = path.join(rawDir, `${slug}.md`);
|
|
579
|
+
if (!isWithin(absPath, rawDir)) {
|
|
580
|
+
throw new UsageError(`Invalid raw path for slug "${slug}".`);
|
|
581
|
+
}
|
|
582
|
+
fs.writeFileSync(absPath, ensureTrailingNewline(withRawFrontmatter(input.content, slug)), "utf8");
|
|
583
|
+
return {
|
|
584
|
+
slug,
|
|
585
|
+
path: absPath,
|
|
586
|
+
wrote: true,
|
|
587
|
+
ref: `wiki:${input.wikiName}/raw/${slug}`,
|
|
588
|
+
};
|
|
589
|
+
}
|
|
590
|
+
/**
|
|
591
|
+
* Deterministic structural lint for a single wiki. No reasoning, no LLM.
|
|
592
|
+
*
|
|
593
|
+
* Checks:
|
|
594
|
+
* - `orphan`: page has no incoming AND no outgoing xrefs
|
|
595
|
+
* - `broken-xref`: page xref points at a nonexistent wiki page
|
|
596
|
+
* - `missing-description`: page frontmatter `description` is empty/missing
|
|
597
|
+
* - `uncited-raw`: `raw/<slug>.md` not listed in any page's `sources:`
|
|
598
|
+
* - `stale-index`: `index.md` mtime is older than the newest page mtime
|
|
599
|
+
*/
|
|
600
|
+
export function lintWiki(stashDir, name) {
|
|
601
|
+
const wikiDir = resolveWikiDir(stashDir, name);
|
|
602
|
+
if (!fs.existsSync(wikiDir)) {
|
|
603
|
+
throw new NotFoundError(`Wiki not found: ${name}.`);
|
|
604
|
+
}
|
|
605
|
+
const pages = listPages(stashDir, name);
|
|
606
|
+
const { raws, pagesLastModifiedMs } = scanWikiFiles(wikiDir);
|
|
607
|
+
const pageRefs = new Set(pages.map((p) => p.ref));
|
|
608
|
+
const incomingXrefs = new Map();
|
|
609
|
+
for (const page of pages) {
|
|
610
|
+
for (const xref of page.xrefs ?? []) {
|
|
611
|
+
incomingXrefs.set(xref, (incomingXrefs.get(xref) ?? 0) + 1);
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
const findings = [];
|
|
615
|
+
// orphans + missing-description + broken-xref
|
|
616
|
+
for (const page of pages) {
|
|
617
|
+
const outCount = page.xrefs?.length ?? 0;
|
|
618
|
+
const inCount = incomingXrefs.get(page.ref) ?? 0;
|
|
619
|
+
if (outCount === 0 && inCount === 0) {
|
|
620
|
+
findings.push({
|
|
621
|
+
kind: "orphan",
|
|
622
|
+
refs: [page.ref],
|
|
623
|
+
message: `Page ${page.ref} has no incoming or outgoing xrefs.`,
|
|
624
|
+
});
|
|
625
|
+
}
|
|
626
|
+
if (!page.description) {
|
|
627
|
+
findings.push({
|
|
628
|
+
kind: "missing-description",
|
|
629
|
+
refs: [page.ref],
|
|
630
|
+
message: `Page ${page.ref} is missing a frontmatter \`description\`.`,
|
|
631
|
+
});
|
|
632
|
+
}
|
|
633
|
+
for (const xref of page.xrefs ?? []) {
|
|
634
|
+
// Only validate wiki:<this-wiki>/... refs. External refs (other wikis,
|
|
635
|
+
// knowledge:, skill:, etc.) are left alone — a cross-wiki link is a
|
|
636
|
+
// feature, not a defect.
|
|
637
|
+
const target = extractWikiNameFromRef(xref);
|
|
638
|
+
if (target !== name)
|
|
639
|
+
continue;
|
|
640
|
+
if (!pageRefs.has(xref)) {
|
|
641
|
+
findings.push({
|
|
642
|
+
kind: "broken-xref",
|
|
643
|
+
refs: [page.ref, xref],
|
|
644
|
+
message: `Page ${page.ref} has xref to nonexistent page ${xref}.`,
|
|
645
|
+
});
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
// uncited-raw
|
|
650
|
+
const citedRawSlugs = new Set();
|
|
651
|
+
for (const page of pages) {
|
|
652
|
+
for (const src of page.sources ?? []) {
|
|
653
|
+
// accept "raw/<slug>.md" or "raw/<slug>"
|
|
654
|
+
const match = src.match(/^raw\/([^/\s]+?)(?:\.md)?$/);
|
|
655
|
+
if (match)
|
|
656
|
+
citedRawSlugs.add(match[1]);
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
for (const rawPath of raws) {
|
|
660
|
+
const base = path.basename(rawPath, ".md");
|
|
661
|
+
if (!citedRawSlugs.has(base)) {
|
|
662
|
+
findings.push({
|
|
663
|
+
kind: "uncited-raw",
|
|
664
|
+
refs: [`wiki:${name}/raw/${base}`],
|
|
665
|
+
message: `Raw source raw/${base}.md is not cited by any page's sources: frontmatter.`,
|
|
666
|
+
});
|
|
667
|
+
}
|
|
668
|
+
}
|
|
669
|
+
// broken-source: each page's sources: entries must resolve to an existing raw file.
|
|
670
|
+
for (const page of pages) {
|
|
671
|
+
for (const src of page.sources ?? []) {
|
|
672
|
+
const match = src.match(/^raw\/([^/\s]+?)(?:\.md)?$/);
|
|
673
|
+
if (!match)
|
|
674
|
+
continue; // non-raw source entries are out of scope
|
|
675
|
+
const slug = match[1];
|
|
676
|
+
const rawFilePath = path.join(wikiDir, RAW_SUBDIR, `${slug}.md`);
|
|
677
|
+
if (!fs.existsSync(rawFilePath)) {
|
|
678
|
+
findings.push({
|
|
679
|
+
kind: "broken-source",
|
|
680
|
+
refs: [page.ref],
|
|
681
|
+
message: `Page "${page.ref}" references missing raw source "raw/${slug}".`,
|
|
682
|
+
});
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
// stale-index: compare index.md's mtime to the newest PAGE mtime only.
|
|
687
|
+
// Stashing a raw source or appending to log.md must NOT flag the index as
|
|
688
|
+
// stale — the index catalogs pages, not raws or meta files.
|
|
689
|
+
const indexPath = path.join(wikiDir, INDEX_MD);
|
|
690
|
+
try {
|
|
691
|
+
const indexMtimeMs = fs.statSync(indexPath).mtimeMs;
|
|
692
|
+
if (pagesLastModifiedMs !== undefined && pagesLastModifiedMs > indexMtimeMs + 1) {
|
|
693
|
+
// +1 ms fudge factor: when index is regenerated in the same tick as a
|
|
694
|
+
// page, the two stats can tie exactly; don't flag equality.
|
|
695
|
+
findings.push({
|
|
696
|
+
kind: "stale-index",
|
|
697
|
+
refs: [`wiki:${name}/index`],
|
|
698
|
+
message: `index.md is older than the newest page. Run \`akm index\` to regenerate.`,
|
|
699
|
+
});
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
catch {
|
|
703
|
+
// No index.md — report it as a stale/missing index so `akm wiki lint`
|
|
704
|
+
// still gives actionable output without erroring out.
|
|
705
|
+
findings.push({
|
|
706
|
+
kind: "stale-index",
|
|
707
|
+
refs: [`wiki:${name}/index`],
|
|
708
|
+
message: "index.md is missing. Run `akm index` to regenerate.",
|
|
709
|
+
});
|
|
710
|
+
}
|
|
711
|
+
return {
|
|
712
|
+
wiki: name,
|
|
713
|
+
pagesScanned: pages.length,
|
|
714
|
+
rawsScanned: raws.length,
|
|
715
|
+
findings,
|
|
716
|
+
};
|
|
717
|
+
}
|
|
718
|
+
// ── Index regeneration ─────────────────────────────────────────────────────
|
|
719
|
+
/**
|
|
720
|
+
* Rebuild a wiki's `index.md` from its pages' frontmatter.
|
|
721
|
+
*
|
|
722
|
+
* Pages are grouped by `pageKind` (falling back to `uncategorised`) and
|
|
723
|
+
* listed alphabetically inside each group. If the wiki directory doesn't
|
|
724
|
+
* exist or has no pages, a fresh empty template is written.
|
|
725
|
+
*
|
|
726
|
+
* The function is best-effort: it catches all filesystem errors and returns
|
|
727
|
+
* a boolean so callers (the indexer) can keep going even if one wiki is
|
|
728
|
+
* broken. Never throws.
|
|
729
|
+
*/
|
|
730
|
+
export function regenerateWikiIndex(stashDir, name) {
|
|
731
|
+
try {
|
|
732
|
+
const wikiDir = resolveWikiDir(stashDir, name);
|
|
733
|
+
if (!fs.existsSync(wikiDir))
|
|
734
|
+
return false;
|
|
735
|
+
const pages = listPages(stashDir, name);
|
|
736
|
+
if (pages.length === 0) {
|
|
737
|
+
fs.writeFileSync(path.join(wikiDir, INDEX_MD), buildIndexMd(name), "utf8");
|
|
738
|
+
return true;
|
|
739
|
+
}
|
|
740
|
+
const byKind = new Map();
|
|
741
|
+
for (const page of pages) {
|
|
742
|
+
const kind = page.pageKind ?? "uncategorised";
|
|
743
|
+
const group = byKind.get(kind);
|
|
744
|
+
if (group)
|
|
745
|
+
group.push(page);
|
|
746
|
+
else
|
|
747
|
+
byKind.set(kind, [page]);
|
|
748
|
+
}
|
|
749
|
+
const kindOrder = Array.from(byKind.keys()).sort((a, b) => {
|
|
750
|
+
if (a === "uncategorised")
|
|
751
|
+
return 1;
|
|
752
|
+
if (b === "uncategorised")
|
|
753
|
+
return -1;
|
|
754
|
+
return a.localeCompare(b);
|
|
755
|
+
});
|
|
756
|
+
const lines = [
|
|
757
|
+
"---",
|
|
758
|
+
`description: Catalog of pages in the ${name} wiki. Regenerated by \`akm index\`.`,
|
|
759
|
+
"wikiRole: index",
|
|
760
|
+
"---",
|
|
761
|
+
"",
|
|
762
|
+
`# ${name} — index`,
|
|
763
|
+
"",
|
|
764
|
+
"_This file is regenerated on every `akm index` run. Manual edits are preserved until the next regeneration, then replaced._",
|
|
765
|
+
"",
|
|
766
|
+
];
|
|
767
|
+
for (const kind of kindOrder) {
|
|
768
|
+
const group = (byKind.get(kind) ?? []).slice().sort((a, b) => a.name.localeCompare(b.name));
|
|
769
|
+
const heading = kind.charAt(0).toUpperCase() + kind.slice(1);
|
|
770
|
+
lines.push(`## ${heading}`);
|
|
771
|
+
lines.push("");
|
|
772
|
+
for (const page of group) {
|
|
773
|
+
const desc = page.description ? ` — ${page.description}` : "";
|
|
774
|
+
lines.push(`- \`${page.ref}\`${desc}`);
|
|
775
|
+
}
|
|
776
|
+
lines.push("");
|
|
777
|
+
}
|
|
778
|
+
fs.writeFileSync(path.join(wikiDir, INDEX_MD), lines.join("\n"), "utf8");
|
|
779
|
+
return true;
|
|
780
|
+
}
|
|
781
|
+
catch {
|
|
782
|
+
return false;
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
/**
|
|
786
|
+
* Regenerate `index.md` for every wiki found under `<stashDir>/wikis/`.
|
|
787
|
+
*
|
|
788
|
+
* Called from `akmIndex()` as a side effect after the FTS rebuild. Never
|
|
789
|
+
* throws; returns the list of wiki names that were regenerated.
|
|
790
|
+
*/
|
|
791
|
+
export function regenerateAllWikiIndexes(stashDir) {
|
|
792
|
+
const wikisRoot = resolveWikisRoot(stashDir);
|
|
793
|
+
if (!fs.existsSync(wikisRoot))
|
|
794
|
+
return [];
|
|
795
|
+
let entries;
|
|
796
|
+
try {
|
|
797
|
+
entries = fs.readdirSync(wikisRoot, { withFileTypes: true });
|
|
798
|
+
}
|
|
799
|
+
catch {
|
|
800
|
+
return [];
|
|
801
|
+
}
|
|
802
|
+
const regenerated = [];
|
|
803
|
+
for (const entry of entries) {
|
|
804
|
+
if (!entry.isDirectory())
|
|
805
|
+
continue;
|
|
806
|
+
if (!WIKI_NAME_RE.test(entry.name))
|
|
807
|
+
continue;
|
|
808
|
+
if (regenerateWikiIndex(stashDir, entry.name))
|
|
809
|
+
regenerated.push(entry.name);
|
|
810
|
+
}
|
|
811
|
+
return regenerated;
|
|
812
|
+
}
|
|
813
|
+
/**
|
|
814
|
+
* Build a markdown workflow string for ingesting a source into the named
|
|
815
|
+
* wiki. Does NOT perform the ingest — it prints the recipe the agent
|
|
816
|
+
* follows using the other eight verbs plus its native file tools.
|
|
817
|
+
*
|
|
818
|
+
* The workflow is parameterised with the wiki's resolved absolute path and
|
|
819
|
+
* schema location so the agent can jump straight in without any additional
|
|
820
|
+
* lookup. Because the output references the CLI by name (`akm wiki stash`
|
|
821
|
+
* etc.), the printer never drifts from the actual command surface — changing
|
|
822
|
+
* a verb here and in the printer stays colocated.
|
|
823
|
+
*/
|
|
824
|
+
export function buildIngestWorkflow(stashDir, name) {
|
|
825
|
+
const wikiDir = resolveWikiDir(stashDir, name);
|
|
826
|
+
if (!fs.existsSync(wikiDir)) {
|
|
827
|
+
throw new NotFoundError(`Wiki not found: ${name}. Run \`akm wiki create ${name}\` first.`);
|
|
828
|
+
}
|
|
829
|
+
const schemaPath = path.join(wikiDir, SCHEMA_MD);
|
|
830
|
+
const workflow = `# Ingest workflow for wiki:${name}
|
|
831
|
+
|
|
832
|
+
Wiki location: ${wikiDir}
|
|
833
|
+
Schema: ${schemaPath}
|
|
834
|
+
|
|
835
|
+
Follow these steps. akm commands handle the invariants; use your native
|
|
836
|
+
Read/Write/Edit tools for page edits.
|
|
837
|
+
|
|
838
|
+
1. **Read the schema.** Open \`${schemaPath}\`. It defines the voice, page
|
|
839
|
+
kinds, contradiction policy, and any wiki-specific conventions. Do not
|
|
840
|
+
skip this step even on familiar wikis — the schema may have changed.
|
|
841
|
+
|
|
842
|
+
2. **File the source under \`raw/\`.**
|
|
843
|
+
\`\`\`sh
|
|
844
|
+
akm wiki stash ${name} <path-or-url-to-source>
|
|
845
|
+
# or: cat <source> | akm wiki stash ${name} -
|
|
846
|
+
\`\`\`
|
|
847
|
+
Returns \`{ slug, path, ref }\`. The raw copy is immutable — never edit it.
|
|
848
|
+
|
|
849
|
+
3. **Find related existing pages.**
|
|
850
|
+
\`\`\`sh
|
|
851
|
+
akm wiki search ${name} "<key terms from the source>"
|
|
852
|
+
\`\`\`
|
|
853
|
+
Read the top hits with \`akm show wiki:${name}/<page>\`. Use
|
|
854
|
+
\`akm show wiki:${name}/<page> toc\` for large pages.
|
|
855
|
+
|
|
856
|
+
4. **Decide for each candidate.** For each related page:
|
|
857
|
+
- **Append**: add a section or paragraph under the relevant heading.
|
|
858
|
+
Include the raw source in the page's \`sources:\` frontmatter list.
|
|
859
|
+
- **Contradict**: note the tension explicitly; don't silently overwrite.
|
|
860
|
+
Follow the schema's contradiction policy.
|
|
861
|
+
- **Skip**: source doesn't add to this page — move on.
|
|
862
|
+
|
|
863
|
+
5. **Create new pages for concepts/entities the source introduces.** Each
|
|
864
|
+
new page must have frontmatter with \`description\`, \`pageKind\`,
|
|
865
|
+
\`xrefs\`, and \`sources\`. Cross-reference with related pages both
|
|
866
|
+
directions.
|
|
867
|
+
|
|
868
|
+
6. **Update xrefs both ways.** If page A now xrefs page B, page B must xref
|
|
869
|
+
page A. \`akm wiki lint ${name}\` will flag violations.
|
|
870
|
+
|
|
871
|
+
7. **Append to \`log.md\`.** One entry per ingest: date, source slug, one-line
|
|
872
|
+
summary, refs to created/edited pages. Newest at the top.
|
|
873
|
+
|
|
874
|
+
8. **Regenerate the index + verify.**
|
|
875
|
+
\`\`\`sh
|
|
876
|
+
akm index
|
|
877
|
+
akm wiki lint ${name}
|
|
878
|
+
\`\`\`
|
|
879
|
+
Resolve any lint findings before calling the ingest done.
|
|
880
|
+
|
|
881
|
+
That's it. \`akm\` never calls an LLM — reasoning is your job; it just owns
|
|
882
|
+
the invariants (raw immutability, unique slugs, ref validation, index
|
|
883
|
+
regeneration, structural lint).
|
|
884
|
+
`;
|
|
885
|
+
return { wiki: name, path: wikiDir, schemaPath, workflow };
|
|
886
|
+
}
|