hypomnema 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +11 -0
- package/LICENSE +21 -0
- package/README.ko.md +160 -0
- package/README.md +160 -0
- package/commands/.gitkeep +0 -0
- package/commands/crystallize.md +116 -0
- package/commands/doctor.md +66 -0
- package/commands/feedback.md +67 -0
- package/commands/graph.md +54 -0
- package/commands/ingest.md +85 -0
- package/commands/init.md +101 -0
- package/commands/lint.md +55 -0
- package/commands/query.md +55 -0
- package/commands/resume.md +48 -0
- package/commands/stats.md +39 -0
- package/commands/uninstall.md +52 -0
- package/commands/upgrade.md +63 -0
- package/commands/verify.md +60 -0
- package/docs/.gitkeep +0 -0
- package/docs/ARCHITECTURE.md +183 -0
- package/docs/CONTRIBUTING.md +115 -0
- package/docs/TEST-CASES.md +580 -0
- package/hooks/.gitkeep +0 -0
- package/hooks/hooks.json +109 -0
- package/hooks/hypo-auto-commit.mjs +36 -0
- package/hooks/hypo-auto-stage.mjs +30 -0
- package/hooks/hypo-compact-guard.mjs +71 -0
- package/hooks/hypo-cwd-change.mjs +91 -0
- package/hooks/hypo-file-watch.mjs +47 -0
- package/hooks/hypo-first-prompt.mjs +59 -0
- package/hooks/hypo-hot-rebuild.mjs +95 -0
- package/hooks/hypo-lookup.mjs +178 -0
- package/hooks/hypo-personal-check.mjs +195 -0
- package/hooks/hypo-session-start.mjs +141 -0
- package/hooks/hypo-shared.mjs +213 -0
- package/package.json +37 -0
- package/scripts/.gitkeep +0 -0
- package/scripts/bump-version.mjs +53 -0
- package/scripts/crystallize.mjs +153 -0
- package/scripts/doctor.mjs +361 -0
- package/scripts/feedback.mjs +130 -0
- package/scripts/graph.mjs +183 -0
- package/scripts/ingest.mjs +130 -0
- package/scripts/init.mjs +515 -0
- package/scripts/lib/frontmatter.mjs +11 -0
- package/scripts/lib/hypo-ignore.mjs +54 -0
- package/scripts/lib/hypo-root.mjs +53 -0
- package/scripts/lint.mjs +210 -0
- package/scripts/query.mjs +124 -0
- package/scripts/resume.mjs +115 -0
- package/scripts/stats.mjs +132 -0
- package/scripts/uninstall.mjs +188 -0
- package/scripts/upgrade.mjs +538 -0
- package/scripts/verify.mjs +172 -0
- package/skills/.gitkeep +0 -0
- package/skills/crystallize/SKILL.md +85 -0
- package/skills/graph/SKILL.md +54 -0
- package/skills/ingest/SKILL.md +83 -0
- package/skills/lint/SKILL.md +55 -0
- package/skills/query/SKILL.md +58 -0
- package/skills/verify/SKILL.md +92 -0
- package/templates/.gitkeep +0 -0
- package/templates/.hypoignore +18 -0
- package/templates/Home.md +34 -0
- package/templates/Overview.md +50 -0
- package/templates/SCHEMA.md +106 -0
- package/templates/hot.md +22 -0
- package/templates/hypo-automation.md +69 -0
- package/templates/hypo-config.md +41 -0
- package/templates/hypo-guide.md +146 -0
- package/templates/hypo-help.md +53 -0
- package/templates/index.md +44 -0
- package/templates/log.md +25 -0
- package/templates/pages/_index.md +61 -0
- package/templates/projects/_template/hot.md +28 -0
- package/templates/projects/_template/index.md +39 -0
- package/templates/projects/_template/prd.md +29 -0
- package/templates/projects/_template/session-state.md +9 -0
- package/templates/session-state.md +12 -0
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Hypomnema verify script
|
|
4
|
+
*
|
|
5
|
+
* Checks verify_by and verify_by_date fields across wiki pages.
|
|
6
|
+
* More detailed than the doctor check: shows the actual verify_by questions
|
|
7
|
+
* and groups results by status.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* node scripts/verify.mjs [options]
|
|
11
|
+
*
|
|
12
|
+
* Options:
|
|
13
|
+
* --hypo-dir=<path> Hypomnema root (default: resolved via HYPO_DIR / hypo-config.md / ~/hypomnema)
|
|
14
|
+
* --file=<path> Check a single file only
|
|
15
|
+
* --json Output as JSON
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'fs';
|
|
19
|
+
import { join, relative, extname } from 'path';
|
|
20
|
+
import { resolveHypoRoot, expandHome } from './lib/hypo-root.mjs';
|
|
21
|
+
import { loadHypoIgnore, isIgnored } from './lib/hypo-ignore.mjs';
|
|
22
|
+
|
|
23
|
+
// ── arg parsing ──────────────────────────────────────────────────────────────
|
|
24
|
+
|
|
25
|
+
function parseArgs(argv) {
|
|
26
|
+
const args = { hypoDir: null, file: null, json: false };
|
|
27
|
+
for (const arg of argv.slice(2)) {
|
|
28
|
+
if (arg.startsWith('--hypo-dir=')) args.hypoDir = expandHome(arg.slice(11));
|
|
29
|
+
else if (arg.startsWith('--file=')) args.file = expandHome(arg.slice(7));
|
|
30
|
+
else if (arg === '--json') args.json = true;
|
|
31
|
+
}
|
|
32
|
+
if (!args.hypoDir) args.hypoDir = resolveHypoRoot();
|
|
33
|
+
return args;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// ── helpers ──────────────────────────────────────────────────────────────────
|
|
37
|
+
|
|
38
|
+
function collectMdFiles(dir, acc = [], hypoDir = '', ignorePatterns = []) {
|
|
39
|
+
if (!existsSync(dir)) return acc;
|
|
40
|
+
for (const entry of readdirSync(dir)) {
|
|
41
|
+
if (entry.startsWith('.')) continue;
|
|
42
|
+
const full = join(dir, entry);
|
|
43
|
+
if (hypoDir && isIgnored(full, hypoDir, ignorePatterns)) continue;
|
|
44
|
+
const st = statSync(full);
|
|
45
|
+
if (st.isDirectory()) collectMdFiles(full, acc, hypoDir, ignorePatterns);
|
|
46
|
+
else if (extname(entry) === '.md') acc.push(full);
|
|
47
|
+
}
|
|
48
|
+
return acc;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function parseFrontmatter(content) {
|
|
52
|
+
const m = content.match(/^---\r?\n([\s\S]*?)\r?\n---/);
|
|
53
|
+
if (!m) return null;
|
|
54
|
+
const fm = {};
|
|
55
|
+
for (const line of m[1].split('\n')) {
|
|
56
|
+
const idx = line.indexOf(':');
|
|
57
|
+
if (idx < 0) continue;
|
|
58
|
+
fm[line.slice(0, idx).trim()] = line.slice(idx + 1).trim().replace(/\s*#.*$/, '').replace(/^["']|["']$/g, '');
|
|
59
|
+
}
|
|
60
|
+
return fm;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const VERIFIED_TYPES = new Set(['adr', 'page', 'learning', 'concept', 'playbook', 'tool-eval']);
|
|
64
|
+
|
|
65
|
+
// ── main ─────────────────────────────────────────────────────────────────────
|
|
66
|
+
|
|
67
|
+
const args = parseArgs(process.argv);
|
|
68
|
+
const today = new Date().toISOString().slice(0, 10);
|
|
69
|
+
|
|
70
|
+
let files;
|
|
71
|
+
if (args.file) {
|
|
72
|
+
files = [args.file];
|
|
73
|
+
} else {
|
|
74
|
+
const ignorePatterns = loadHypoIgnore(args.hypoDir);
|
|
75
|
+
const scanDirs = ['pages', 'projects'].map(d => join(args.hypoDir, d));
|
|
76
|
+
files = scanDirs.flatMap(d => collectMdFiles(d, [], args.hypoDir, ignorePatterns));
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const overdue = [];
|
|
80
|
+
const upcoming = [];
|
|
81
|
+
const missing = [];
|
|
82
|
+
const ok = [];
|
|
83
|
+
|
|
84
|
+
for (const file of files) {
|
|
85
|
+
let content;
|
|
86
|
+
try { content = readFileSync(file, 'utf-8'); } catch { continue; }
|
|
87
|
+
const fm = parseFrontmatter(content);
|
|
88
|
+
if (!fm) continue;
|
|
89
|
+
|
|
90
|
+
const type = fm.type || '';
|
|
91
|
+
if (!VERIFIED_TYPES.has(type)) continue;
|
|
92
|
+
|
|
93
|
+
const rel = args.hypoDir ? relative(args.hypoDir, file) : file;
|
|
94
|
+
const entry = {
|
|
95
|
+
file: rel,
|
|
96
|
+
title: fm.title || rel,
|
|
97
|
+
type,
|
|
98
|
+
verify_by: fm.verify_by || null,
|
|
99
|
+
verify_by_date: fm.verify_by_date || null,
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
if (!fm.verify_by) {
|
|
103
|
+
missing.push(entry);
|
|
104
|
+
} else if (fm.verify_by_date && /^\d{4}-\d{2}-\d{2}$/.test(fm.verify_by_date)) {
|
|
105
|
+
const daysUntil = Math.ceil((new Date(fm.verify_by_date) - new Date(today)) / 86400000);
|
|
106
|
+
if (fm.verify_by_date < today) {
|
|
107
|
+
overdue.push({ ...entry, daysOverdue: -daysUntil });
|
|
108
|
+
} else if (daysUntil <= 14) {
|
|
109
|
+
upcoming.push({ ...entry, daysUntil });
|
|
110
|
+
} else {
|
|
111
|
+
ok.push(entry);
|
|
112
|
+
}
|
|
113
|
+
} else {
|
|
114
|
+
ok.push(entry);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
if (args.json) {
|
|
119
|
+
console.log(JSON.stringify({ overdue, upcoming, missing, ok }, null, 2));
|
|
120
|
+
process.exit(overdue.length > 0 ? 1 : 0);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const total = overdue.length + upcoming.length + missing.length + ok.length;
|
|
124
|
+
console.log(`Scanned ${total} tracked page(s)\n`);
|
|
125
|
+
|
|
126
|
+
if (overdue.length > 0) {
|
|
127
|
+
console.log(`✗ Overdue (${overdue.length}):`);
|
|
128
|
+
for (const p of overdue) {
|
|
129
|
+
console.log(` ${p.file} [${p.daysOverdue}d overdue]`);
|
|
130
|
+
console.log(` verify_by: ${p.verify_by}`);
|
|
131
|
+
}
|
|
132
|
+
console.log('');
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
if (upcoming.length > 0) {
|
|
136
|
+
console.log(`⚠ Due soon (${upcoming.length}):`);
|
|
137
|
+
for (const p of upcoming) {
|
|
138
|
+
console.log(` ${p.file} [in ${p.daysUntil}d, ${p.verify_by_date}]`);
|
|
139
|
+
console.log(` verify_by: ${p.verify_by}`);
|
|
140
|
+
}
|
|
141
|
+
console.log('');
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
if (missing.length > 0) {
|
|
145
|
+
console.log(`⚠ Missing verify_by (${missing.length}):`);
|
|
146
|
+
for (const p of missing) console.log(` ${p.file}`);
|
|
147
|
+
console.log('');
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
if (ok.length > 0 && overdue.length === 0 && upcoming.length === 0) {
|
|
151
|
+
console.log(`✓ All ${ok.length} page(s) verified and up to date`);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
const summary = [
|
|
155
|
+
overdue.length ? `${overdue.length} overdue` : '',
|
|
156
|
+
upcoming.length ? `${upcoming.length} due soon` : '',
|
|
157
|
+
missing.length ? `${missing.length} missing verify_by` : '',
|
|
158
|
+
ok.length ? `${ok.length} ok` : '',
|
|
159
|
+
].filter(Boolean).join(', ');
|
|
160
|
+
|
|
161
|
+
console.log(`Result: ${summary || 'nothing to verify'}`);
|
|
162
|
+
|
|
163
|
+
// Emit signal lines for overdue/upcoming pages so /hypo:verify skill can drive review
|
|
164
|
+
const needsReview = [...overdue, ...upcoming].filter(p => p.verify_by);
|
|
165
|
+
if (needsReview.length > 0) {
|
|
166
|
+
console.log('');
|
|
167
|
+
for (const p of needsReview) {
|
|
168
|
+
console.log(`[HYPO VERIFY QUESTION: ${p.verify_by} (file: ${p.file})]`);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (overdue.length > 0) process.exit(1);
|
package/skills/.gitkeep
ADDED
|
File without changes
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Surface synthesis candidates and consolidate scattered wiki knowledge into stable pages
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
You are running `/hypo:crystallize`. Find pages that are ready to be consolidated into stable, cross-linked knowledge — then guide the synthesis.
|
|
6
|
+
|
|
7
|
+
## What this does
|
|
8
|
+
|
|
9
|
+
- Finds tag groups with ≥ N pages sharing the same tag (synthesis candidates)
|
|
10
|
+
- Lists orphan pages (no outbound `[[wikilinks]]`)
|
|
11
|
+
- Lists draft / stub pages that could be fleshed out
|
|
12
|
+
- After the script runs, you help the user pick what to crystallize and do it
|
|
13
|
+
|
|
14
|
+
---
|
|
15
|
+
|
|
16
|
+
## Step 1 — Locate package root
|
|
17
|
+
|
|
18
|
+
Locate the Hypomnema package root (the directory two levels above this file (`skills/<name>/SKILL.md` → package root)).
|
|
19
|
+
|
|
20
|
+
If the user specified a wiki directory, pass it as `--wiki-dir="<path>"`. Otherwise omit the flag and the script resolves the wiki root automatically via `HYPO_DIR` → `hypo-config.md` scan → `~/hypomnema`.
|
|
21
|
+
|
|
22
|
+
---
|
|
23
|
+
|
|
24
|
+
## Step 2 — Run crystallize scan
|
|
25
|
+
|
|
26
|
+
```bash
|
|
27
|
+
node <package-root>/scripts/crystallize.mjs \
|
|
28
|
+
[--wiki-dir="<path>"] \
|
|
29
|
+
[--min-group=<n>] \
|
|
30
|
+
[--json]
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
Options:
|
|
34
|
+
- `--min-group=<n>` — minimum pages per tag group to report (default: 2)
|
|
35
|
+
- `--json` — output results as JSON
|
|
36
|
+
|
|
37
|
+
Show the output verbatim.
|
|
38
|
+
|
|
39
|
+
---
|
|
40
|
+
|
|
41
|
+
## Step 3 — Session-close checklist (if triggered at session end)
|
|
42
|
+
|
|
43
|
+
If `/hypo:crystallize` was invoked as a session-close action, run through this checklist before synthesizing. Proceed automatically without confirmation unless the user has not said "auto".
|
|
44
|
+
|
|
45
|
+
1. **session-state.md** — update `projects/<name>/session-state.md` with the next tasks list (what to tackle first next time).
|
|
46
|
+
2. **hot.md (project)** — update `projects/<name>/hot.md` with a session snapshot: what changed and decisions made. Keep under 500 words. Do not put next-step tasks here; those belong in session-state.md.
|
|
47
|
+
3. **hot.md (root)** — update `<wiki-root>/hot.md` active-projects pointer table: set the `Last Session` date for this project to today.
|
|
48
|
+
4. **session-log** — append a session entry to `projects/<name>/session-log/YYYY-MM.md` (create the file if it does not exist for this month).
|
|
49
|
+
5. **open-questions** — only if `pages/open-questions.md` exists and questions were raised or resolved this session: move resolved ones out; add newly raised ones. Skip if unchanged.
|
|
50
|
+
6. **log.md** — append a `session` entry to `<wiki-root>/log.md`.
|
|
51
|
+
|
|
52
|
+
After completing the checklist, report each item with ✓ and ask: "Session closed. Would you like to also run knowledge synthesis now, or stop here?"
|
|
53
|
+
|
|
54
|
+
---
|
|
55
|
+
|
|
56
|
+
## Step 4 — Pick a synthesis target
|
|
57
|
+
|
|
58
|
+
Present the top candidates from the script output:
|
|
59
|
+
- Tag clusters with the most pages
|
|
60
|
+
- Long-standing orphans
|
|
61
|
+
- Pages marked `status: draft`
|
|
62
|
+
|
|
63
|
+
Ask: "Which of these would you like to crystallize now? (or 'all' / 'skip')"
|
|
64
|
+
|
|
65
|
+
---
|
|
66
|
+
|
|
67
|
+
## Step 5 — Synthesize
|
|
68
|
+
|
|
69
|
+
For the chosen target:
|
|
70
|
+
|
|
71
|
+
1. Read all pages in the cluster.
|
|
72
|
+
2. Write a new synthesis page at `pages/syntheses/<slug>.md` with:
|
|
73
|
+
|
|
74
|
+
```yaml
|
|
75
|
+
---
|
|
76
|
+
title: <synthesis title>
|
|
77
|
+
type: synthesis
|
|
78
|
+
tags: [<shared tags>]
|
|
79
|
+
updated: <today YYYY-MM-DD>
|
|
80
|
+
evidence_strength: inferred
|
|
81
|
+
---
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
3. Cross-link all source pages back to the synthesis with `[[wikilink]]`.
|
|
85
|
+
4. Add the synthesis to `index.md` under `## Pages — Syntheses`.
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Generate a wikilink dependency graph from wiki pages
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
You are running `/hypo:graph`. Build a wikilink dependency graph from all pages under `pages/` and `projects/` and output it in the requested format.
|
|
6
|
+
|
|
7
|
+
## What this produces
|
|
8
|
+
|
|
9
|
+
- **json** (default) — adjacency list with in/out-degree counts per node, sorted by total edges
|
|
10
|
+
- **mermaid** — `graph TD` Mermaid diagram (paste into any Mermaid renderer)
|
|
11
|
+
- **dot** — Graphviz DOT format (pipe to `dot -Tsvg` for an SVG)
|
|
12
|
+
|
|
13
|
+
---
|
|
14
|
+
|
|
15
|
+
## Step 1 — Locate package root
|
|
16
|
+
|
|
17
|
+
Locate the Hypomnema package root (the directory two levels above this file (`skills/<name>/SKILL.md` → package root)).
|
|
18
|
+
|
|
19
|
+
If the user specified a wiki directory, pass it as `--wiki-dir="<path>"`. Otherwise omit the flag and the script resolves the wiki root automatically via `HYPO_DIR` → `hypo-config.md` scan → `~/hypomnema`.
|
|
20
|
+
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
## Step 2 — Ask for output format (optional)
|
|
24
|
+
|
|
25
|
+
If the user did not specify a format, ask:
|
|
26
|
+
|
|
27
|
+
> "Output format? (json / mermaid / dot) [json]"
|
|
28
|
+
|
|
29
|
+
Default: `json`
|
|
30
|
+
|
|
31
|
+
Optionally ask:
|
|
32
|
+
|
|
33
|
+
> "Minimum edges to include a node? (0 = all) [0]"
|
|
34
|
+
|
|
35
|
+
---
|
|
36
|
+
|
|
37
|
+
## Step 3 — Run graph
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
node <package-root>/scripts/graph.mjs \
|
|
41
|
+
[--wiki-dir="<path>"] \
|
|
42
|
+
[--format=json|mermaid|dot] \
|
|
43
|
+
[--min-edges=<n>]
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
---
|
|
47
|
+
|
|
48
|
+
## Step 4 — Present results
|
|
49
|
+
|
|
50
|
+
- **json**: summarise the top 10 most-connected nodes (by `in + out`), then offer to show the full JSON.
|
|
51
|
+
- **mermaid**: wrap the output in a fenced code block tagged `mermaid` so it renders inline.
|
|
52
|
+
- **dot**: wrap the output in a fenced code block tagged `dot` and suggest the user pipe it to `dot -Tsvg -o graph.svg`.
|
|
53
|
+
|
|
54
|
+
If the graph has 0 edges, note that no `[[wikilinks]]` were found between pages.
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Add a source document to the wiki and synthesize a source-summary page
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
You are running `/hypo:ingest`. Add a new source document to `sources/` and create (or update) its corresponding `source-summary` page under `pages/`.
|
|
6
|
+
|
|
7
|
+
## What this does
|
|
8
|
+
|
|
9
|
+
- Checks which files in `sources/` are missing a `source-summary` page
|
|
10
|
+
- Reports pages that reference a source file that does not exist in `sources/`
|
|
11
|
+
- After the script runs, guides you to synthesize a summary for any un-ingested source
|
|
12
|
+
|
|
13
|
+
---
|
|
14
|
+
|
|
15
|
+
## Step 1 — Locate package root
|
|
16
|
+
|
|
17
|
+
Locate the Hypomnema package root (the directory two levels above this file (`skills/<name>/SKILL.md` → package root)).
|
|
18
|
+
|
|
19
|
+
If the user specified a wiki directory, pass it as `--wiki-dir="<path>"`. Otherwise omit the flag and the script resolves the wiki root automatically via `HYPO_DIR` → `hypo-config.md` scan → `~/hypomnema`.
|
|
20
|
+
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
## Step 2 — Run ingest status check
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
node <package-root>/scripts/ingest.mjs [--wiki-dir="<path>"] [--json]
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
Options:
|
|
30
|
+
- `--json` — output results as JSON (useful for tooling)
|
|
31
|
+
|
|
32
|
+
Show the output verbatim.
|
|
33
|
+
|
|
34
|
+
---
|
|
35
|
+
|
|
36
|
+
## Step 3 — Handle the source file
|
|
37
|
+
|
|
38
|
+
**If the user provided a file or URL to ingest:**
|
|
39
|
+
|
|
40
|
+
1. Copy or download the source into `<wiki-root>/sources/<slug>.<ext>` (e.g., `sources/2026-05-07-article-title.md`).
|
|
41
|
+
2. Confirm the file is now present.
|
|
42
|
+
|
|
43
|
+
**If no file was provided:**
|
|
44
|
+
|
|
45
|
+
List un-ingested sources from the script output and ask which one to process now.
|
|
46
|
+
|
|
47
|
+
---
|
|
48
|
+
|
|
49
|
+
## Step 4 — Synthesize a source-summary page
|
|
50
|
+
|
|
51
|
+
For the chosen source, read its content and create `pages/<slug>.md` with the following frontmatter:
|
|
52
|
+
|
|
53
|
+
```yaml
|
|
54
|
+
---
|
|
55
|
+
title: <descriptive title>
|
|
56
|
+
type: source-summary
|
|
57
|
+
source: <filename>
|
|
58
|
+
tags: [<relevant tags>]
|
|
59
|
+
updated: <today YYYY-MM-DD>
|
|
60
|
+
evidence_strength: direct # or inferred
|
|
61
|
+
---
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
Then write a concise summary:
|
|
65
|
+
- Key ideas (bullet list)
|
|
66
|
+
- Why this source matters to the wiki
|
|
67
|
+
- Any open questions or follow-up items
|
|
68
|
+
|
|
69
|
+
Cross-reference existing pages with `[[wikilink]]` syntax where relevant.
|
|
70
|
+
|
|
71
|
+
---
|
|
72
|
+
|
|
73
|
+
## Step 5 — Update log.md
|
|
74
|
+
|
|
75
|
+
Append an ingest entry to `<wiki-root>/log.md`:
|
|
76
|
+
|
|
77
|
+
```
|
|
78
|
+
## <YYYY-MM-DD> ingest — <slug>
|
|
79
|
+
|
|
80
|
+
- source: sources/<filename>
|
|
81
|
+
- summary: pages/<slug>.md
|
|
82
|
+
- tags: <tags>
|
|
83
|
+
```
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Lint wiki pages for frontmatter and broken wikilinks
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
You are running `/hypo:lint`. Validate all wiki pages for frontmatter correctness and broken `[[wikilink]]` references.
|
|
6
|
+
|
|
7
|
+
## What this checks
|
|
8
|
+
|
|
9
|
+
- Every `.md` file under `pages/` and `projects/` should have `---` frontmatter (missing frontmatter is a warning, not an error)
|
|
10
|
+
- Required fields: `title`, `type`
|
|
11
|
+
- `type` must be one of the recognised values (concept, source-summary, learning, adr, …)
|
|
12
|
+
- `updated` field should be present
|
|
13
|
+
- All `[[wikilinks]]` must resolve to an existing page slug
|
|
14
|
+
|
|
15
|
+
---
|
|
16
|
+
|
|
17
|
+
## Step 1 — Locate package root
|
|
18
|
+
|
|
19
|
+
Locate the Hypomnema package root (the directory two levels above this file (`skills/<name>/SKILL.md` → package root)).
|
|
20
|
+
|
|
21
|
+
If the user specified a wiki directory, pass it as `--wiki-dir="<path>"`. Otherwise omit the flag and the script resolves the wiki root automatically via `HYPO_DIR` → `hypo-config.md` scan → `~/hypomnema`.
|
|
22
|
+
|
|
23
|
+
---
|
|
24
|
+
|
|
25
|
+
## Step 2 — Run lint
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
node <package-root>/scripts/lint.mjs [--wiki-dir="<path>"] [--json] [--fix]
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
Options:
|
|
32
|
+
- `--json` — output results as JSON (useful for tooling)
|
|
33
|
+
- `--fix` — auto-add missing `updated` field (safe repairs only; no other fields are modified)
|
|
34
|
+
|
|
35
|
+
Show the output verbatim.
|
|
36
|
+
|
|
37
|
+
---
|
|
38
|
+
|
|
39
|
+
## Step 3 — Interpret results
|
|
40
|
+
|
|
41
|
+
- `✓ No lint issues found` — wiki is clean
|
|
42
|
+
- `✗ <file>: <message>` — error (missing required field or malformed frontmatter); must be fixed
|
|
43
|
+
- `⚠ <file>: <message>` — warning (unknown type, missing `updated`, broken link); worth fixing
|
|
44
|
+
|
|
45
|
+
A non-zero exit code means at least one **error** was found (warnings alone do not produce a non-zero exit code).
|
|
46
|
+
|
|
47
|
+
---
|
|
48
|
+
|
|
49
|
+
## Step 4 — Offer to fix
|
|
50
|
+
|
|
51
|
+
For **broken wikilinks**: list the affected files and ask if the user wants help correcting the links now.
|
|
52
|
+
|
|
53
|
+
For **missing `updated`**: suggest running with `--fix` to auto-add `updated: <today>` to each affected page's frontmatter. Note: `--fix` only repairs files that already have a valid, closed frontmatter block — files with no frontmatter or malformed frontmatter are skipped.
|
|
54
|
+
|
|
55
|
+
For **missing required fields** (`title`, `type`): open the affected files and help the user fill them in.
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Search wiki pages by keyword and retrieve relevant knowledge
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
You are running `/hypo:query`. Full-text search across all wiki pages and projects, then synthesize an answer from the matching pages.
|
|
6
|
+
|
|
7
|
+
## What this does
|
|
8
|
+
|
|
9
|
+
- Searches `pages/` and `projects/` for the given query terms
|
|
10
|
+
- Returns matching files with a context excerpt and frontmatter summary
|
|
11
|
+
- You then read the top results and synthesize an answer
|
|
12
|
+
|
|
13
|
+
---
|
|
14
|
+
|
|
15
|
+
## Step 1 — Locate package root
|
|
16
|
+
|
|
17
|
+
Locate the Hypomnema package root (the directory two levels above this file (`skills/<name>/SKILL.md` → package root)).
|
|
18
|
+
|
|
19
|
+
If the user specified a wiki directory, pass it as `--wiki-dir="<path>"`. Otherwise omit the flag and the script resolves the wiki root automatically via `HYPO_DIR` → `hypo-config.md` scan → `~/hypomnema`.
|
|
20
|
+
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
## Step 2 — Extract the query
|
|
24
|
+
|
|
25
|
+
Use the search terms from the user's message. If no query was provided, ask:
|
|
26
|
+
|
|
27
|
+
> "What would you like to search for in your wiki?"
|
|
28
|
+
|
|
29
|
+
---
|
|
30
|
+
|
|
31
|
+
## Step 3 — Run query
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
node <package-root>/scripts/query.mjs \
|
|
35
|
+
--q="<search terms>" \
|
|
36
|
+
[--wiki-dir="<path>"] \
|
|
37
|
+
[--limit=<n>] \
|
|
38
|
+
[--json]
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
Options:
|
|
42
|
+
- `--q=<query>` — search query (required)
|
|
43
|
+
- `--limit=<n>` — max results (default: 10)
|
|
44
|
+
- `--json` — output results as JSON
|
|
45
|
+
|
|
46
|
+
Show the output verbatim.
|
|
47
|
+
|
|
48
|
+
---
|
|
49
|
+
|
|
50
|
+
## Step 4 — Synthesize an answer
|
|
51
|
+
|
|
52
|
+
Read the top matching pages (up to 5) and produce a synthesized response:
|
|
53
|
+
|
|
54
|
+
1. **Direct answer** — if the query has a clear answer from the wiki, state it first.
|
|
55
|
+
2. **Supporting pages** — list the relevant pages with one-line descriptions and `[[wikilink]]` references.
|
|
56
|
+
3. **Gaps** — if the wiki lacks coverage on the topic, note what is missing and suggest an ingest target.
|
|
57
|
+
|
|
58
|
+
If zero results are returned, say so and offer to broaden the search or suggest using `/hypo:ingest` to add relevant sources.
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Check wiki pages for stale or unverified knowledge and prompt review
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
You are running `/hypo:verify`. Check all wiki pages for `verify_by` and `verify_by_date` fields, surface overdue pages, and guide a review pass.
|
|
6
|
+
|
|
7
|
+
## What this does
|
|
8
|
+
|
|
9
|
+
- Scans all pages for `verify_by` (a question to re-check) and `verify_by_date` (deadline)
|
|
10
|
+
- Groups results: **overdue**, **due soon** (within 14 days), and **ok**
|
|
11
|
+
- After the script runs, you help the user review and update each overdue page
|
|
12
|
+
|
|
13
|
+
---
|
|
14
|
+
|
|
15
|
+
## Step 1 — Locate package root
|
|
16
|
+
|
|
17
|
+
Locate the Hypomnema package root (the directory two levels above this file (`skills/<name>/SKILL.md` → package root)).
|
|
18
|
+
|
|
19
|
+
If the user specified a wiki directory, pass it as `--wiki-dir="<path>"`. Otherwise omit the flag and the script resolves the wiki root automatically via `HYPO_DIR` → `hypo-config.md` scan → `~/hypomnema`.
|
|
20
|
+
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
## Step 2 — Run verify
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
node <package-root>/scripts/verify.mjs \
|
|
27
|
+
[--wiki-dir="<path>"] \
|
|
28
|
+
[--file="<path>"] \
|
|
29
|
+
[--json]
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
Options:
|
|
33
|
+
- `--file=<path>` — check a single file only
|
|
34
|
+
- `--json` — output results as JSON
|
|
35
|
+
|
|
36
|
+
Show the output verbatim.
|
|
37
|
+
|
|
38
|
+
---
|
|
39
|
+
|
|
40
|
+
## Step 3 — Interpret results
|
|
41
|
+
|
|
42
|
+
- **overdue** — `verify_by_date` is in the past; the page needs immediate review
|
|
43
|
+
- **due soon** — `verify_by_date` is within 14 days; worth reviewing this session
|
|
44
|
+
- **ok** — no verify fields, or deadline is far enough out
|
|
45
|
+
|
|
46
|
+
A page with `verify_by` but no `verify_by_date` is treated as **ok** (no deadline set — add `verify_by_date` to schedule a review).
|
|
47
|
+
|
|
48
|
+
---
|
|
49
|
+
|
|
50
|
+
## Step 4 — Review overdue pages
|
|
51
|
+
|
|
52
|
+
For each overdue page, in priority order:
|
|
53
|
+
|
|
54
|
+
1. Read the page content.
|
|
55
|
+
2. Show the `verify_by` question to the user.
|
|
56
|
+
3. Ask: "Is this still accurate? (yes / no / partially)"
|
|
57
|
+
4. Based on the answer:
|
|
58
|
+
- **yes** — update `last_reviewed: <today>` and push `verify_by_date` forward by 90 days.
|
|
59
|
+
- **no / partially** — help the user edit the page to correct the outdated content, then update `last_reviewed` and `verify_by_date`.
|
|
60
|
+
5. Offer to move to the next overdue page.
|
|
61
|
+
|
|
62
|
+
---
|
|
63
|
+
|
|
64
|
+
## Step 5 — Update frontmatter
|
|
65
|
+
|
|
66
|
+
After each review, apply the updated `last_reviewed` and `verify_by_date` fields to the page frontmatter. Do not change any other fields unless the user approves.
|
|
67
|
+
|
|
68
|
+
---
|
|
69
|
+
|
|
70
|
+
## Step 6 — Append stale items to open-questions.md
|
|
71
|
+
|
|
72
|
+
For each page reviewed as **no** or **partially** (content found stale or incorrect):
|
|
73
|
+
|
|
74
|
+
1. Open `<wiki-root>/pages/open-questions.md`. If absent, create it with this frontmatter:
|
|
75
|
+
|
|
76
|
+
```
|
|
77
|
+
---
|
|
78
|
+
title: Open Questions
|
|
79
|
+
type: open-questions
|
|
80
|
+
updated: <today YYYY-MM-DD>
|
|
81
|
+
---
|
|
82
|
+
|
|
83
|
+
# Open Questions
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
2. Append an entry:
|
|
87
|
+
|
|
88
|
+
```
|
|
89
|
+
- [ ] Re-verify [[<page-slug>|<page-title>]]: <verify_by question> (surfaced: <today YYYY-MM-DD>)
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
3. Save the file. Do not remove or reorder existing entries.
|
|
File without changes
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# .hypoignore — patterns excluded from Hypomnema hooks
|
|
2
|
+
# One glob per line. Lines starting with # are comments.
|
|
3
|
+
# Matched files are not read by hooks or included in index lookups.
|
|
4
|
+
# Syntax follows .gitignore glob rules.
|
|
5
|
+
|
|
6
|
+
# Large binaries
|
|
7
|
+
*.pdf
|
|
8
|
+
*.zip
|
|
9
|
+
*.tar.gz
|
|
10
|
+
|
|
11
|
+
# Credentials and secrets
|
|
12
|
+
*.pem
|
|
13
|
+
*.key
|
|
14
|
+
*.env
|
|
15
|
+
|
|
16
|
+
# Draft / scratch (uncomment to hide from hooks)
|
|
17
|
+
# drafts/
|
|
18
|
+
# scratch/
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Home
|
|
3
|
+
type: reference
|
|
4
|
+
updated: YYYY-MM-DD
|
|
5
|
+
tags: [wiki, home]
|
|
6
|
+
---
|
|
7
|
+
|
|
8
|
+
# Wiki
|
|
9
|
+
|
|
10
|
+
> Personal knowledge base powered by [Hypomnema](https://github.com/sk-lim19f/Hypomnema).
|
|
11
|
+
|
|
12
|
+
---
|
|
13
|
+
|
|
14
|
+
## Quick Start
|
|
15
|
+
|
|
16
|
+
- [[index]] — full page catalog
|
|
17
|
+
- [[hot]] — active projects and last session context
|
|
18
|
+
- [[log]] — chronological activity log
|
|
19
|
+
- [[SCHEMA]] — type system reference
|
|
20
|
+
- [[hypo-guide]] — operations guide
|
|
21
|
+
|
|
22
|
+
---
|
|
23
|
+
|
|
24
|
+
## Active Projects
|
|
25
|
+
|
|
26
|
+
<!-- Links to your active project indexes -->
|
|
27
|
+
<!-- [[projects/my-project/index|my-project]] -->
|
|
28
|
+
|
|
29
|
+
---
|
|
30
|
+
|
|
31
|
+
## Recent Pages
|
|
32
|
+
|
|
33
|
+
<!-- Update after each significant ingest or writing session -->
|
|
34
|
+
<!-- [[learnings/topic]] — date -->
|