hypomnema 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +11 -0
- package/LICENSE +21 -0
- package/README.ko.md +160 -0
- package/README.md +160 -0
- package/commands/.gitkeep +0 -0
- package/commands/crystallize.md +116 -0
- package/commands/doctor.md +66 -0
- package/commands/feedback.md +67 -0
- package/commands/graph.md +54 -0
- package/commands/ingest.md +85 -0
- package/commands/init.md +101 -0
- package/commands/lint.md +55 -0
- package/commands/query.md +55 -0
- package/commands/resume.md +48 -0
- package/commands/stats.md +39 -0
- package/commands/uninstall.md +52 -0
- package/commands/upgrade.md +63 -0
- package/commands/verify.md +60 -0
- package/docs/.gitkeep +0 -0
- package/docs/ARCHITECTURE.md +183 -0
- package/docs/CONTRIBUTING.md +115 -0
- package/docs/TEST-CASES.md +580 -0
- package/hooks/.gitkeep +0 -0
- package/hooks/hooks.json +109 -0
- package/hooks/hypo-auto-commit.mjs +36 -0
- package/hooks/hypo-auto-stage.mjs +30 -0
- package/hooks/hypo-compact-guard.mjs +71 -0
- package/hooks/hypo-cwd-change.mjs +91 -0
- package/hooks/hypo-file-watch.mjs +47 -0
- package/hooks/hypo-first-prompt.mjs +59 -0
- package/hooks/hypo-hot-rebuild.mjs +95 -0
- package/hooks/hypo-lookup.mjs +178 -0
- package/hooks/hypo-personal-check.mjs +195 -0
- package/hooks/hypo-session-start.mjs +141 -0
- package/hooks/hypo-shared.mjs +213 -0
- package/package.json +37 -0
- package/scripts/.gitkeep +0 -0
- package/scripts/bump-version.mjs +53 -0
- package/scripts/crystallize.mjs +153 -0
- package/scripts/doctor.mjs +361 -0
- package/scripts/feedback.mjs +130 -0
- package/scripts/graph.mjs +183 -0
- package/scripts/ingest.mjs +130 -0
- package/scripts/init.mjs +515 -0
- package/scripts/lib/frontmatter.mjs +11 -0
- package/scripts/lib/hypo-ignore.mjs +54 -0
- package/scripts/lib/hypo-root.mjs +53 -0
- package/scripts/lint.mjs +210 -0
- package/scripts/query.mjs +124 -0
- package/scripts/resume.mjs +115 -0
- package/scripts/stats.mjs +132 -0
- package/scripts/uninstall.mjs +188 -0
- package/scripts/upgrade.mjs +538 -0
- package/scripts/verify.mjs +172 -0
- package/skills/.gitkeep +0 -0
- package/skills/crystallize/SKILL.md +85 -0
- package/skills/graph/SKILL.md +54 -0
- package/skills/ingest/SKILL.md +83 -0
- package/skills/lint/SKILL.md +55 -0
- package/skills/query/SKILL.md +58 -0
- package/skills/verify/SKILL.md +92 -0
- package/templates/.gitkeep +0 -0
- package/templates/.hypoignore +18 -0
- package/templates/Home.md +34 -0
- package/templates/Overview.md +50 -0
- package/templates/SCHEMA.md +106 -0
- package/templates/hot.md +22 -0
- package/templates/hypo-automation.md +69 -0
- package/templates/hypo-config.md +41 -0
- package/templates/hypo-guide.md +146 -0
- package/templates/hypo-help.md +53 -0
- package/templates/index.md +44 -0
- package/templates/log.md +25 -0
- package/templates/pages/_index.md +61 -0
- package/templates/projects/_template/hot.md +28 -0
- package/templates/projects/_template/index.md +39 -0
- package/templates/projects/_template/prd.md +29 -0
- package/templates/projects/_template/session-state.md +9 -0
- package/templates/session-state.md +12 -0
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Hypomnema graph script
|
|
4
|
+
*
|
|
5
|
+
* Generates a wikilink dependency graph from wiki pages.
|
|
6
|
+
* Outputs adjacency list (default) or Mermaid diagram.
|
|
7
|
+
*
|
|
8
|
+
* Usage:
|
|
9
|
+
* node scripts/graph.mjs [options]
|
|
10
|
+
*
|
|
11
|
+
* Options:
|
|
12
|
+
* --hypo-dir=<path> Hypomnema root (default: resolved via HYPO_DIR / hypo-config.md / ~/hypomnema)
|
|
13
|
+
* --format=<fmt> Output format: json | mermaid | dot (default: json)
|
|
14
|
+
* --min-edges=<n> Only include nodes with at least N edges (default: 0)
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'fs';
|
|
18
|
+
import { join, relative, extname, basename } from 'path';
|
|
19
|
+
import { resolveHypoRoot, expandHome } from './lib/hypo-root.mjs';
|
|
20
|
+
import { loadHypoIgnore, isIgnored } from './lib/hypo-ignore.mjs';
|
|
21
|
+
|
|
22
|
+
// ── arg parsing ───────────────────────────────────────────────────────────────
|
|
23
|
+
|
|
24
|
+
function parseArgs(argv) {
|
|
25
|
+
const args = { hypoDir: null, format: 'json', minEdges: 0 };
|
|
26
|
+
for (const arg of argv.slice(2)) {
|
|
27
|
+
if (arg.startsWith('--hypo-dir=')) args.hypoDir = expandHome(arg.slice(11));
|
|
28
|
+
else if (arg.startsWith('--format=')) args.format = arg.slice(9);
|
|
29
|
+
else if (arg.startsWith('--min-edges=')) args.minEdges = parseInt(arg.slice(12), 10) || 0;
|
|
30
|
+
}
|
|
31
|
+
if (!args.hypoDir) args.hypoDir = resolveHypoRoot();
|
|
32
|
+
return args;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// ── page collector ────────────────────────────────────────────────────────────
|
|
36
|
+
|
|
37
|
+
function collectPages(dir, root, pages = [], ignorePatterns = []) {
|
|
38
|
+
if (!existsSync(dir)) return pages;
|
|
39
|
+
for (const entry of readdirSync(dir)) {
|
|
40
|
+
const full = join(dir, entry);
|
|
41
|
+
if (isIgnored(full, root, ignorePatterns)) continue;
|
|
42
|
+
const st = statSync(full);
|
|
43
|
+
if (st.isDirectory()) {
|
|
44
|
+
collectPages(full, root, pages, ignorePatterns);
|
|
45
|
+
} else if (extname(entry) === '.md' && !entry.startsWith('.')) {
|
|
46
|
+
const slug = relative(root, full).replace(/\.md$/, '').replace(/\\/g, '/');
|
|
47
|
+
pages.push({ path: full, slug, bare: basename(full, '.md') });
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return pages;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// ── slug resolver ─────────────────────────────────────────────────────────────
|
|
54
|
+
|
|
55
|
+
function buildSlugIndex(pages) {
|
|
56
|
+
const index = new Map();
|
|
57
|
+
for (const p of pages) {
|
|
58
|
+
index.set(p.slug, p.slug);
|
|
59
|
+
if (!index.has(p.bare)) index.set(p.bare, p.slug);
|
|
60
|
+
}
|
|
61
|
+
return index;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// ── wikilink extractor ────────────────────────────────────────────────────────
|
|
65
|
+
|
|
66
|
+
function extractWikilinks(content) {
|
|
67
|
+
const links = [];
|
|
68
|
+
for (const m of content.matchAll(/\[\[([^\]|#]+?)(?:[|#][^\]]*?)?\]\]/g)) {
|
|
69
|
+
links.push(m[1].trim());
|
|
70
|
+
}
|
|
71
|
+
return links;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// ── graph builder ─────────────────────────────────────────────────────────────
|
|
75
|
+
|
|
76
|
+
function buildGraph(pages, slugIndex) {
|
|
77
|
+
const edges = [];
|
|
78
|
+
const inDegree = new Map();
|
|
79
|
+
const outDegree = new Map();
|
|
80
|
+
|
|
81
|
+
for (const p of pages) {
|
|
82
|
+
inDegree.set(p.slug, 0);
|
|
83
|
+
outDegree.set(p.slug, 0);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
for (const p of pages) {
|
|
87
|
+
let content;
|
|
88
|
+
try { content = readFileSync(p.path, 'utf-8'); } catch { continue; }
|
|
89
|
+
for (const link of extractWikilinks(content)) {
|
|
90
|
+
const target = slugIndex.get(link);
|
|
91
|
+
if (target && target !== p.slug) {
|
|
92
|
+
edges.push({ from: p.slug, to: target });
|
|
93
|
+
outDegree.set(p.slug, (outDegree.get(p.slug) || 0) + 1);
|
|
94
|
+
inDegree.set(target, (inDegree.get(target) || 0) + 1);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return { edges, inDegree, outDegree };
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// ── label escapers ────────────────────────────────────────────────────────────
|
|
103
|
+
|
|
104
|
+
function escapeMermaid(s) { return s.replace(/"/g, '#quot;'); }
|
|
105
|
+
function escapeDot(s) { return s.replace(/\\/g, '\\\\').replace(/"/g, '\\"'); }
|
|
106
|
+
|
|
107
|
+
// ── formatters ────────────────────────────────────────────────────────────────
|
|
108
|
+
|
|
109
|
+
function formatJson(pages, graph, minEdges) {
|
|
110
|
+
const nodes = pages
|
|
111
|
+
.map(p => ({
|
|
112
|
+
slug: p.slug,
|
|
113
|
+
in: graph.inDegree.get(p.slug) || 0,
|
|
114
|
+
out: graph.outDegree.get(p.slug) || 0,
|
|
115
|
+
}))
|
|
116
|
+
.filter(n => minEdges === 0 || n.in + n.out >= minEdges)
|
|
117
|
+
.sort((a, b) => (b.in + b.out) - (a.in + a.out));
|
|
118
|
+
|
|
119
|
+
const edges = graph.edges.filter(e => {
|
|
120
|
+
const fn = nodes.find(n => n.slug === e.from);
|
|
121
|
+
const tn = nodes.find(n => n.slug === e.to);
|
|
122
|
+
return fn && tn;
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
return JSON.stringify({ nodes, edges }, null, 2);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
function formatMermaid(pages, graph, minEdges) {
|
|
129
|
+
const activeNodes = new Set(
|
|
130
|
+
pages
|
|
131
|
+
.filter(p => {
|
|
132
|
+
const total = (graph.inDegree.get(p.slug) || 0) + (graph.outDegree.get(p.slug) || 0);
|
|
133
|
+
return total >= minEdges;
|
|
134
|
+
})
|
|
135
|
+
.map(p => p.slug)
|
|
136
|
+
);
|
|
137
|
+
|
|
138
|
+
const lines = ['graph TD'];
|
|
139
|
+
for (const { from, to } of graph.edges) {
|
|
140
|
+
if (activeNodes.has(from) && activeNodes.has(to)) {
|
|
141
|
+
const f = from.replace(/[^a-zA-Z0-9_]/g, '_');
|
|
142
|
+
const t = to.replace(/[^a-zA-Z0-9_]/g, '_');
|
|
143
|
+
lines.push(` ${f}["${escapeMermaid(from)}"] --> ${t}["${escapeMermaid(to)}"]`);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return lines.join('\n');
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
function formatDot(pages, graph, minEdges) {
|
|
150
|
+
const activeNodes = new Set(
|
|
151
|
+
pages
|
|
152
|
+
.filter(p => {
|
|
153
|
+
const total = (graph.inDegree.get(p.slug) || 0) + (graph.outDegree.get(p.slug) || 0);
|
|
154
|
+
return total >= minEdges;
|
|
155
|
+
})
|
|
156
|
+
.map(p => p.slug)
|
|
157
|
+
);
|
|
158
|
+
|
|
159
|
+
const lines = ['digraph wiki {', ' rankdir=LR;'];
|
|
160
|
+
for (const { from, to } of graph.edges) {
|
|
161
|
+
if (activeNodes.has(from) && activeNodes.has(to)) {
|
|
162
|
+
lines.push(` "${escapeDot(from)}" -> "${escapeDot(to)}";`);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
lines.push('}');
|
|
166
|
+
return lines.join('\n');
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// ── main ──────────────────────────────────────────────────────────────────────
|
|
170
|
+
|
|
171
|
+
const args = parseArgs(process.argv);
|
|
172
|
+
|
|
173
|
+
const ignorePatterns = loadHypoIgnore(args.hypoDir);
|
|
174
|
+
const scanDirs = ['pages', 'projects'].map(d => join(args.hypoDir, d));
|
|
175
|
+
const pages = scanDirs.flatMap(d => collectPages(d, args.hypoDir, [], ignorePatterns));
|
|
176
|
+
const slugIndex = buildSlugIndex(pages);
|
|
177
|
+
const graph = buildGraph(pages, slugIndex);
|
|
178
|
+
|
|
179
|
+
switch (args.format) {
|
|
180
|
+
case 'mermaid': console.log(formatMermaid(pages, graph, args.minEdges)); break;
|
|
181
|
+
case 'dot': console.log(formatDot(pages, graph, args.minEdges)); break;
|
|
182
|
+
default: console.log(formatJson(pages, graph, args.minEdges));
|
|
183
|
+
}
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Hypomnema ingest helper script
|
|
4
|
+
*
|
|
5
|
+
* Lists files in sources/ that have no corresponding source-summary page,
|
|
6
|
+
* and reports pages that reference missing source files.
|
|
7
|
+
* Used by /hypo:ingest to surface what needs ingestion before Claude synthesizes.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* node scripts/ingest.mjs [options]
|
|
11
|
+
*
|
|
12
|
+
* Options:
|
|
13
|
+
* --hypo-dir=<path> Hypomnema root (default: resolved via HYPO_DIR / hypo-config.md / ~/hypomnema)
|
|
14
|
+
* --json Output as JSON
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'fs';
|
|
18
|
+
import { join, extname, basename } from 'path';
|
|
19
|
+
import { resolveHypoRoot, expandHome } from './lib/hypo-root.mjs';
|
|
20
|
+
import { loadHypoIgnore, isIgnored } from './lib/hypo-ignore.mjs';
|
|
21
|
+
|
|
22
|
+
// ── arg parsing ──────────────────────────────────────────────────────────────
|
|
23
|
+
|
|
24
|
+
function parseArgs(argv) {
|
|
25
|
+
const args = { hypoDir: null, json: false };
|
|
26
|
+
for (const arg of argv.slice(2)) {
|
|
27
|
+
if (arg.startsWith('--hypo-dir=')) args.hypoDir = expandHome(arg.slice(11));
|
|
28
|
+
else if (arg === '--json') args.json = true;
|
|
29
|
+
}
|
|
30
|
+
if (!args.hypoDir) args.hypoDir = resolveHypoRoot();
|
|
31
|
+
return args;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// ── helpers ──────────────────────────────────────────────────────────────────
|
|
35
|
+
|
|
36
|
+
function collectMdFiles(dir, acc = [], hypoDir = '', ignorePatterns = []) {
|
|
37
|
+
if (!existsSync(dir)) return acc;
|
|
38
|
+
for (const entry of readdirSync(dir)) {
|
|
39
|
+
if (entry.startsWith('.')) continue;
|
|
40
|
+
const full = join(dir, entry);
|
|
41
|
+
if (hypoDir && isIgnored(full, hypoDir, ignorePatterns)) continue;
|
|
42
|
+
const st = statSync(full);
|
|
43
|
+
if (st.isDirectory()) collectMdFiles(full, acc, hypoDir, ignorePatterns);
|
|
44
|
+
else if (extname(entry) === '.md') acc.push(full);
|
|
45
|
+
}
|
|
46
|
+
return acc;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function parseFrontmatter(content) {
|
|
50
|
+
const m = content.match(/^---\r?\n([\s\S]*?)\r?\n---/);
|
|
51
|
+
if (!m) return {};
|
|
52
|
+
const fm = {};
|
|
53
|
+
for (const line of m[1].split('\n')) {
|
|
54
|
+
const idx = line.indexOf(':');
|
|
55
|
+
if (idx < 0) continue;
|
|
56
|
+
fm[line.slice(0, idx).trim()] = line.slice(idx + 1).trim().replace(/^["']|["']$/g, '');
|
|
57
|
+
}
|
|
58
|
+
return fm;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// ── main ─────────────────────────────────────────────────────────────────────
|
|
62
|
+
|
|
63
|
+
const args = parseArgs(process.argv);
|
|
64
|
+
|
|
65
|
+
const ignorePatterns = loadHypoIgnore(args.hypoDir);
|
|
66
|
+
const sourcesDir = join(args.hypoDir, 'sources');
|
|
67
|
+
const allSources = existsSync(sourcesDir)
|
|
68
|
+
? readdirSync(sourcesDir).filter(e => !e.startsWith('.') && !statSync(join(sourcesDir, e)).isDirectory() && !isIgnored(join(sourcesDir, e), args.hypoDir, ignorePatterns))
|
|
69
|
+
: [];
|
|
70
|
+
|
|
71
|
+
// collect all source: references in wiki pages
|
|
72
|
+
const pageFiles = collectMdFiles(join(args.hypoDir, 'pages'), [], args.hypoDir, ignorePatterns);
|
|
73
|
+
const referencedSources = new Set();
|
|
74
|
+
|
|
75
|
+
for (const f of pageFiles) {
|
|
76
|
+
let content;
|
|
77
|
+
try { content = readFileSync(f, 'utf-8'); } catch { continue; }
|
|
78
|
+
const fm = parseFrontmatter(content);
|
|
79
|
+
if (fm.source && !fm.source.startsWith('session:')) {
|
|
80
|
+
referencedSources.add(fm.source);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// sources with no summary page
|
|
85
|
+
const orphaned = allSources.filter(s => {
|
|
86
|
+
const slug = basename(s, extname(s));
|
|
87
|
+
return !referencedSources.has(s) && !referencedSources.has(slug);
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
// pages referencing sources that don't exist on disk
|
|
91
|
+
const missingSource = [];
|
|
92
|
+
for (const f of pageFiles) {
|
|
93
|
+
let content;
|
|
94
|
+
try { content = readFileSync(f, 'utf-8'); } catch { continue; }
|
|
95
|
+
const fm = parseFrontmatter(content);
|
|
96
|
+
if (!fm.source || fm.source.startsWith('session:')) continue;
|
|
97
|
+
const sourceFile = join(sourcesDir, fm.source);
|
|
98
|
+
const sourceFileWithExt = allSources.find(s => s === fm.source || basename(s, extname(s)) === fm.source);
|
|
99
|
+
if (!sourceFileWithExt && !existsSync(sourceFile)) {
|
|
100
|
+
missingSource.push({ page: f, source: fm.source });
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (args.json) {
|
|
105
|
+
console.log(JSON.stringify({
|
|
106
|
+
totalSources: allSources.length,
|
|
107
|
+
orphaned,
|
|
108
|
+
missingSource,
|
|
109
|
+
}, null, 2));
|
|
110
|
+
} else {
|
|
111
|
+
console.log(`Sources: ${allSources.length} total`);
|
|
112
|
+
|
|
113
|
+
if (orphaned.length === 0) {
|
|
114
|
+
console.log('✓ All sources have a corresponding source-summary page');
|
|
115
|
+
} else {
|
|
116
|
+
console.log(`\n⊘ ${orphaned.length} source(s) not yet ingested:`);
|
|
117
|
+
for (const s of orphaned) console.log(` sources/${s}`);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
if (missingSource.length > 0) {
|
|
121
|
+
console.log(`\n⚠ ${missingSource.length} page(s) reference a missing source file:`);
|
|
122
|
+
for (const { page, source } of missingSource) {
|
|
123
|
+
console.log(` ${page} → source: ${source}`);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (orphaned.length > 0) {
|
|
128
|
+
console.log('\nRun /hypo:ingest to synthesize the listed sources into wiki pages.');
|
|
129
|
+
}
|
|
130
|
+
}
|