lat.md 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -0
- package/dist/src/cli/index.d.ts +2 -0
- package/dist/src/cli/index.js +19 -0
- package/dist/src/cli/locate.d.ts +1 -0
- package/dist/src/cli/locate.js +25 -0
- package/dist/src/cli/refs.d.ts +1 -0
- package/dist/src/cli/refs.js +140 -0
- package/dist/src/cli.d.ts +2 -0
- package/dist/src/cli.js +23 -0
- package/dist/src/extensions/wiki-link/from-markdown.d.ts +5 -0
- package/dist/src/extensions/wiki-link/from-markdown.js +34 -0
- package/dist/src/extensions/wiki-link/index.d.ts +4 -0
- package/dist/src/extensions/wiki-link/index.js +3 -0
- package/dist/src/extensions/wiki-link/syntax.d.ts +14 -0
- package/dist/src/extensions/wiki-link/syntax.js +121 -0
- package/dist/src/extensions/wiki-link/to-markdown.d.ts +5 -0
- package/dist/src/extensions/wiki-link/to-markdown.js +28 -0
- package/dist/src/extensions/wiki-link/types.d.ts +34 -0
- package/dist/src/extensions/wiki-link/types.js +1 -0
- package/dist/src/format.d.ts +2 -0
- package/dist/src/format.js +16 -0
- package/dist/src/lattice.d.ts +22 -0
- package/dist/src/lattice.js +177 -0
- package/dist/src/parser.d.ts +3 -0
- package/dist/src/parser.js +16 -0
- package/package.json +36 -0
package/README.md
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# Lattice
|
|
2
|
+
|
|
3
|
+
Anchor source code to high-level concepts defined in markdown.
|
|
4
|
+
See [lattice.md](lattice.md) for the full spec.
|
|
5
|
+
|
|
6
|
+
## Development
|
|
7
|
+
|
|
8
|
+
Requires Node.js 22+.
|
|
9
|
+
|
|
10
|
+
```bash
|
|
11
|
+
pnpm install
|
|
12
|
+
pnpm test # run tests once
|
|
13
|
+
pnpm test:watch # run tests in watch mode
|
|
14
|
+
pnpm typecheck # tsc --noEmit
|
|
15
|
+
pnpm format # prettier
|
|
16
|
+
```
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { locate } from './locate.js';
|
|
3
|
+
import { refs } from './refs.js';
|
|
4
|
+
const args = process.argv.slice(2);
|
|
5
|
+
const command = args[0];
|
|
6
|
+
const commands = {
|
|
7
|
+
locate,
|
|
8
|
+
refs,
|
|
9
|
+
};
|
|
10
|
+
const handler = commands[command];
|
|
11
|
+
if (!handler) {
|
|
12
|
+
console.error(`Usage: lat <command>
|
|
13
|
+
|
|
14
|
+
Commands:
|
|
15
|
+
locate <query> Find sections by id
|
|
16
|
+
refs <query> [--scope=md|code|md+code] Find references to a section`);
|
|
17
|
+
process.exit(1);
|
|
18
|
+
}
|
|
19
|
+
await handler(args.slice(1));
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function locate(args: string[]): Promise<void>;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { findLatticeDir, loadAllSections, findSections } from '../lattice.js';
|
|
2
|
+
import { formatSectionPreview } from '../format.js';
|
|
3
|
+
export async function locate(args) {
|
|
4
|
+
if (args.length < 1) {
|
|
5
|
+
console.error('Usage: lat locate <query>');
|
|
6
|
+
process.exit(1);
|
|
7
|
+
}
|
|
8
|
+
const query = args[0];
|
|
9
|
+
const latticeDir = findLatticeDir();
|
|
10
|
+
if (!latticeDir) {
|
|
11
|
+
console.error('No .lattice directory found');
|
|
12
|
+
process.exit(1);
|
|
13
|
+
}
|
|
14
|
+
const sections = await loadAllSections(latticeDir);
|
|
15
|
+
const matches = findSections(sections, query);
|
|
16
|
+
if (matches.length === 0) {
|
|
17
|
+
console.error(`No sections matching "${query}"`);
|
|
18
|
+
process.exit(1);
|
|
19
|
+
}
|
|
20
|
+
for (let i = 0; i < matches.length; i++) {
|
|
21
|
+
if (i > 0)
|
|
22
|
+
console.log('');
|
|
23
|
+
console.log(formatSectionPreview(matches[i], latticeDir));
|
|
24
|
+
}
|
|
25
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function refs(args: string[]): Promise<void>;
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import { readFile } from 'node:fs/promises';
|
|
2
|
+
import { join, relative } from 'node:path';
|
|
3
|
+
import { findLatticeDir, listLatticeFiles, parseSections, extractRefs, } from '../lattice.js';
|
|
4
|
+
import { formatSectionPreview } from '../format.js';
|
|
5
|
+
function parseArgs(args) {
|
|
6
|
+
let scope = 'md';
|
|
7
|
+
const rest = [];
|
|
8
|
+
for (const arg of args) {
|
|
9
|
+
if (arg.startsWith('--scope=')) {
|
|
10
|
+
const val = arg.slice('--scope='.length);
|
|
11
|
+
if (val === 'md' || val === 'code' || val === 'md+code') {
|
|
12
|
+
scope = val;
|
|
13
|
+
}
|
|
14
|
+
else {
|
|
15
|
+
console.error(`Unknown scope: ${val}. Use md, code, or md+code.`);
|
|
16
|
+
process.exit(1);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
else {
|
|
20
|
+
rest.push(arg);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
if (rest.length < 1) {
|
|
24
|
+
console.error('Usage: lat refs <query> [--scope=md|code|md+code]');
|
|
25
|
+
process.exit(1);
|
|
26
|
+
}
|
|
27
|
+
return { query: rest[0], scope };
|
|
28
|
+
}
|
|
29
|
+
const IGNORE_DIRS = new Set([
|
|
30
|
+
'node_modules',
|
|
31
|
+
'dist',
|
|
32
|
+
'.git',
|
|
33
|
+
'.lattice',
|
|
34
|
+
'.claude',
|
|
35
|
+
]);
|
|
36
|
+
async function walkFiles(dir) {
|
|
37
|
+
const { readdir } = await import('node:fs/promises');
|
|
38
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
39
|
+
const files = [];
|
|
40
|
+
for (const entry of entries) {
|
|
41
|
+
if (IGNORE_DIRS.has(entry.name))
|
|
42
|
+
continue;
|
|
43
|
+
const full = join(dir, entry.name);
|
|
44
|
+
if (entry.isDirectory()) {
|
|
45
|
+
files.push(...(await walkFiles(full)));
|
|
46
|
+
}
|
|
47
|
+
else if (entry.isFile()) {
|
|
48
|
+
files.push(full);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
return files;
|
|
52
|
+
}
|
|
53
|
+
const LAT_REF_RE = /@lat:\s*\[\[([^\]]+)\]\]/g;
|
|
54
|
+
async function searchCode(projectRoot, query) {
|
|
55
|
+
const files = await walkFiles(projectRoot);
|
|
56
|
+
const q = query.toLowerCase();
|
|
57
|
+
const results = [];
|
|
58
|
+
for (const file of files) {
|
|
59
|
+
const content = await readFile(file, 'utf-8');
|
|
60
|
+
const lines = content.split('\n');
|
|
61
|
+
for (let i = 0; i < lines.length; i++) {
|
|
62
|
+
const line = lines[i];
|
|
63
|
+
let match;
|
|
64
|
+
LAT_REF_RE.lastIndex = 0;
|
|
65
|
+
while ((match = LAT_REF_RE.exec(line)) !== null) {
|
|
66
|
+
if (match[1].toLowerCase() === q) {
|
|
67
|
+
const relPath = relative(process.cwd(), file);
|
|
68
|
+
results.push(` ${relPath}:${i + 1} ${line.trim()}`);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
return results;
|
|
74
|
+
}
|
|
75
|
+
export async function refs(args) {
|
|
76
|
+
const { query, scope } = parseArgs(args);
|
|
77
|
+
const latticeDir = findLatticeDir();
|
|
78
|
+
if (!latticeDir) {
|
|
79
|
+
console.error('No .lattice directory found');
|
|
80
|
+
process.exit(1);
|
|
81
|
+
}
|
|
82
|
+
const q = query.toLowerCase();
|
|
83
|
+
let hasOutput = false;
|
|
84
|
+
if (scope === 'md' || scope === 'md+code') {
|
|
85
|
+
const files = await listLatticeFiles(latticeDir);
|
|
86
|
+
const allSections = await (async () => {
|
|
87
|
+
const s = [];
|
|
88
|
+
const contents = [];
|
|
89
|
+
for (const file of files) {
|
|
90
|
+
const content = await readFile(file, 'utf-8');
|
|
91
|
+
contents.push(content);
|
|
92
|
+
s.push(...parseSections(file, content));
|
|
93
|
+
}
|
|
94
|
+
return { sections: s, files, contents };
|
|
95
|
+
})();
|
|
96
|
+
// Collect refs from all files
|
|
97
|
+
const matchingFromSections = new Set();
|
|
98
|
+
for (let i = 0; i < allSections.files.length; i++) {
|
|
99
|
+
const fileRefs = extractRefs(allSections.files[i], allSections.contents[i]);
|
|
100
|
+
for (const ref of fileRefs) {
|
|
101
|
+
if (ref.target.toLowerCase() === q) {
|
|
102
|
+
matchingFromSections.add(ref.fromSection.toLowerCase());
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
if (matchingFromSections.size > 0) {
|
|
107
|
+
const flatAll = flattenAll(allSections.sections);
|
|
108
|
+
const referrers = flatAll.filter((s) => matchingFromSections.has(s.id.toLowerCase()));
|
|
109
|
+
for (const section of referrers) {
|
|
110
|
+
if (hasOutput)
|
|
111
|
+
console.log('');
|
|
112
|
+
console.log(formatSectionPreview(section, latticeDir));
|
|
113
|
+
hasOutput = true;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
if (scope === 'code' || scope === 'md+code') {
|
|
118
|
+
// Project root is the parent of .lattice
|
|
119
|
+
const projectRoot = join(latticeDir, '..');
|
|
120
|
+
const codeResults = await searchCode(projectRoot, query);
|
|
121
|
+
for (const result of codeResults) {
|
|
122
|
+
if (hasOutput)
|
|
123
|
+
console.log('');
|
|
124
|
+
console.log(result);
|
|
125
|
+
hasOutput = true;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
if (!hasOutput) {
|
|
129
|
+
console.error(`No references to "${query}" found`);
|
|
130
|
+
process.exit(1);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
function flattenAll(sections) {
|
|
134
|
+
const result = [];
|
|
135
|
+
for (const s of sections) {
|
|
136
|
+
result.push(s);
|
|
137
|
+
result.push(...flattenAll(s.children));
|
|
138
|
+
}
|
|
139
|
+
return result;
|
|
140
|
+
}
|
package/dist/src/cli.js
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { findLatticeDir, loadAllSections, findSections } from './lattice.js';
|
|
3
|
+
const args = process.argv.slice(2);
|
|
4
|
+
const command = args[0];
|
|
5
|
+
if (command !== 'locate' || args.length < 2) {
|
|
6
|
+
console.error('Usage: lat locate <query>');
|
|
7
|
+
process.exit(1);
|
|
8
|
+
}
|
|
9
|
+
const query = args[1];
|
|
10
|
+
const latticeDir = findLatticeDir();
|
|
11
|
+
if (!latticeDir) {
|
|
12
|
+
console.error('No .lattice directory found');
|
|
13
|
+
process.exit(1);
|
|
14
|
+
}
|
|
15
|
+
const sections = await loadAllSections(latticeDir);
|
|
16
|
+
const matches = findSections(sections, query);
|
|
17
|
+
if (matches.length === 0) {
|
|
18
|
+
console.error(`No sections matching "${query}"`);
|
|
19
|
+
process.exit(1);
|
|
20
|
+
}
|
|
21
|
+
for (const m of matches) {
|
|
22
|
+
console.log(m.id);
|
|
23
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* mdast-util extension to turn wiki-link micromark tokens into mdast nodes.
|
|
3
|
+
*/
|
|
4
|
+
function enterWikiLink(token) {
|
|
5
|
+
const node = {
|
|
6
|
+
type: 'wikiLink',
|
|
7
|
+
value: '',
|
|
8
|
+
data: { alias: null },
|
|
9
|
+
};
|
|
10
|
+
this.enter(node, token);
|
|
11
|
+
}
|
|
12
|
+
function exitWikiLinkTarget(token) {
|
|
13
|
+
const target = this.sliceSerialize(token);
|
|
14
|
+
const node = this.stack[this.stack.length - 1];
|
|
15
|
+
node.value = target;
|
|
16
|
+
}
|
|
17
|
+
function exitWikiLinkAlias(token) {
|
|
18
|
+
const alias = this.sliceSerialize(token);
|
|
19
|
+
const node = this.stack[this.stack.length - 1];
|
|
20
|
+
node.data.alias = alias;
|
|
21
|
+
}
|
|
22
|
+
function exitWikiLink(token) {
|
|
23
|
+
this.exit(token);
|
|
24
|
+
}
|
|
25
|
+
export function wikiLinkFromMarkdown() {
|
|
26
|
+
return {
|
|
27
|
+
enter: { wikiLink: enterWikiLink },
|
|
28
|
+
exit: {
|
|
29
|
+
wikiLinkTarget: exitWikiLinkTarget,
|
|
30
|
+
wikiLinkAlias: exitWikiLinkAlias,
|
|
31
|
+
wikiLink: exitWikiLink,
|
|
32
|
+
},
|
|
33
|
+
};
|
|
34
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Micromark syntax extension for wiki links: [[target]] and [[target|alias]].
|
|
3
|
+
*
|
|
4
|
+
* Produces the following token types:
|
|
5
|
+
* - wikiLink (the entire construct)
|
|
6
|
+
* - wikiLinkMarker ([[ and ]])
|
|
7
|
+
* - wikiLinkData (everything between markers)
|
|
8
|
+
* - wikiLinkTarget (the target portion)
|
|
9
|
+
* - wikiLinkAliasMarker (the | divider)
|
|
10
|
+
* - wikiLinkAlias (the alias portion)
|
|
11
|
+
*/
|
|
12
|
+
import type { Extension } from 'micromark-util-types';
|
|
13
|
+
import './types.js';
|
|
14
|
+
export declare function wikiLinkSyntax(): Extension;
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Micromark syntax extension for wiki links: [[target]] and [[target|alias]].
|
|
3
|
+
*
|
|
4
|
+
* Produces the following token types:
|
|
5
|
+
* - wikiLink (the entire construct)
|
|
6
|
+
* - wikiLinkMarker ([[ and ]])
|
|
7
|
+
* - wikiLinkData (everything between markers)
|
|
8
|
+
* - wikiLinkTarget (the target portion)
|
|
9
|
+
* - wikiLinkAliasMarker (the | divider)
|
|
10
|
+
* - wikiLinkAlias (the alias portion)
|
|
11
|
+
*/
|
|
12
|
+
import './types.js';
|
|
13
|
+
const ALIAS_DIVIDER = '|';
|
|
14
|
+
const OPEN = '[[';
|
|
15
|
+
const CLOSE = ']]';
|
|
16
|
+
function tokenize(effects, ok, nok) {
|
|
17
|
+
let openCursor = 0;
|
|
18
|
+
let closeCursor = 0;
|
|
19
|
+
let aliasCursor = 0;
|
|
20
|
+
let hasData = false;
|
|
21
|
+
let hasAlias = false;
|
|
22
|
+
return start;
|
|
23
|
+
function start(code) {
|
|
24
|
+
if (code !== OPEN.charCodeAt(openCursor))
|
|
25
|
+
return nok(code);
|
|
26
|
+
effects.enter('wikiLink');
|
|
27
|
+
effects.enter('wikiLinkMarker');
|
|
28
|
+
return consumeOpen(code);
|
|
29
|
+
}
|
|
30
|
+
function consumeOpen(code) {
|
|
31
|
+
if (openCursor === OPEN.length) {
|
|
32
|
+
effects.exit('wikiLinkMarker');
|
|
33
|
+
return consumeDataStart(code);
|
|
34
|
+
}
|
|
35
|
+
if (code !== OPEN.charCodeAt(openCursor))
|
|
36
|
+
return nok(code);
|
|
37
|
+
effects.consume(code);
|
|
38
|
+
openCursor++;
|
|
39
|
+
return consumeOpen;
|
|
40
|
+
}
|
|
41
|
+
function consumeDataStart(code) {
|
|
42
|
+
if (code === null || code < -2)
|
|
43
|
+
return nok(code);
|
|
44
|
+
effects.enter('wikiLinkData');
|
|
45
|
+
effects.enter('wikiLinkTarget');
|
|
46
|
+
return consumeTarget(code);
|
|
47
|
+
}
|
|
48
|
+
function consumeTarget(code) {
|
|
49
|
+
if (code === ALIAS_DIVIDER.charCodeAt(aliasCursor)) {
|
|
50
|
+
if (!hasData)
|
|
51
|
+
return nok(code);
|
|
52
|
+
effects.exit('wikiLinkTarget');
|
|
53
|
+
effects.enter('wikiLinkAliasMarker');
|
|
54
|
+
return consumeAliasMarker(code);
|
|
55
|
+
}
|
|
56
|
+
if (code === CLOSE.charCodeAt(closeCursor)) {
|
|
57
|
+
if (!hasData)
|
|
58
|
+
return nok(code);
|
|
59
|
+
effects.exit('wikiLinkTarget');
|
|
60
|
+
effects.exit('wikiLinkData');
|
|
61
|
+
effects.enter('wikiLinkMarker');
|
|
62
|
+
return consumeClose(code);
|
|
63
|
+
}
|
|
64
|
+
// No line endings or EOF inside wiki links
|
|
65
|
+
if (code === null || code < -2)
|
|
66
|
+
return nok(code);
|
|
67
|
+
if (code !== -2 && code !== -1 && code !== 32) {
|
|
68
|
+
hasData = true;
|
|
69
|
+
}
|
|
70
|
+
effects.consume(code);
|
|
71
|
+
return consumeTarget;
|
|
72
|
+
}
|
|
73
|
+
function consumeAliasMarker(code) {
|
|
74
|
+
if (aliasCursor === ALIAS_DIVIDER.length) {
|
|
75
|
+
effects.exit('wikiLinkAliasMarker');
|
|
76
|
+
effects.enter('wikiLinkAlias');
|
|
77
|
+
return consumeAlias(code);
|
|
78
|
+
}
|
|
79
|
+
if (code !== ALIAS_DIVIDER.charCodeAt(aliasCursor))
|
|
80
|
+
return nok(code);
|
|
81
|
+
effects.consume(code);
|
|
82
|
+
aliasCursor++;
|
|
83
|
+
return consumeAliasMarker;
|
|
84
|
+
}
|
|
85
|
+
function consumeAlias(code) {
|
|
86
|
+
if (code === CLOSE.charCodeAt(closeCursor)) {
|
|
87
|
+
if (!hasAlias)
|
|
88
|
+
return nok(code);
|
|
89
|
+
effects.exit('wikiLinkAlias');
|
|
90
|
+
effects.exit('wikiLinkData');
|
|
91
|
+
effects.enter('wikiLinkMarker');
|
|
92
|
+
return consumeClose(code);
|
|
93
|
+
}
|
|
94
|
+
if (code === null || code < -2)
|
|
95
|
+
return nok(code);
|
|
96
|
+
if (code !== -2 && code !== -1 && code !== 32) {
|
|
97
|
+
hasAlias = true;
|
|
98
|
+
}
|
|
99
|
+
effects.consume(code);
|
|
100
|
+
return consumeAlias;
|
|
101
|
+
}
|
|
102
|
+
function consumeClose(code) {
|
|
103
|
+
if (closeCursor === CLOSE.length) {
|
|
104
|
+
effects.exit('wikiLinkMarker');
|
|
105
|
+
effects.exit('wikiLink');
|
|
106
|
+
return ok(code);
|
|
107
|
+
}
|
|
108
|
+
if (code !== CLOSE.charCodeAt(closeCursor))
|
|
109
|
+
return nok(code);
|
|
110
|
+
effects.consume(code);
|
|
111
|
+
closeCursor++;
|
|
112
|
+
return consumeClose;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
export function wikiLinkSyntax() {
|
|
116
|
+
return {
|
|
117
|
+
text: {
|
|
118
|
+
91: { tokenize }, // '[' character code
|
|
119
|
+
},
|
|
120
|
+
};
|
|
121
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* mdast-util extension to serialize wiki-link nodes back to markdown.
|
|
3
|
+
*/
|
|
4
|
+
function handler(node, _parent, state, _info) {
|
|
5
|
+
const exit = state.enter('wikiLink');
|
|
6
|
+
const target = state.safe(node.value, { before: '[', after: ']' });
|
|
7
|
+
let value;
|
|
8
|
+
if (node.data.alias) {
|
|
9
|
+
const alias = state.safe(node.data.alias, { before: '[', after: ']' });
|
|
10
|
+
value = `[[${target}|${alias}]]`;
|
|
11
|
+
}
|
|
12
|
+
else {
|
|
13
|
+
value = `[[${target}]]`;
|
|
14
|
+
}
|
|
15
|
+
exit();
|
|
16
|
+
return value;
|
|
17
|
+
}
|
|
18
|
+
export function wikiLinkToMarkdown() {
|
|
19
|
+
return {
|
|
20
|
+
unsafe: [
|
|
21
|
+
{ character: '[', inConstruct: ['phrasing', 'label', 'reference'] },
|
|
22
|
+
{ character: ']', inConstruct: ['label', 'reference'] },
|
|
23
|
+
],
|
|
24
|
+
handlers: {
|
|
25
|
+
wikiLink: handler,
|
|
26
|
+
},
|
|
27
|
+
};
|
|
28
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import type { Literal } from 'mdast';
|
|
2
|
+
/**
|
|
3
|
+
* mdast node for a wiki-style link like [[target]] or [[target|alias]].
|
|
4
|
+
*/
|
|
5
|
+
export interface WikiLink extends Literal {
|
|
6
|
+
type: 'wikiLink';
|
|
7
|
+
value: string;
|
|
8
|
+
data: {
|
|
9
|
+
alias: string | null;
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
declare module 'mdast' {
|
|
13
|
+
interface RootContentMap {
|
|
14
|
+
wikiLink: WikiLink;
|
|
15
|
+
}
|
|
16
|
+
interface PhrasingContentMap {
|
|
17
|
+
wikiLink: WikiLink;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
declare module 'micromark-util-types' {
|
|
21
|
+
interface TokenTypeMap {
|
|
22
|
+
wikiLink: 'wikiLink';
|
|
23
|
+
wikiLinkMarker: 'wikiLinkMarker';
|
|
24
|
+
wikiLinkData: 'wikiLinkData';
|
|
25
|
+
wikiLinkTarget: 'wikiLinkTarget';
|
|
26
|
+
wikiLinkAliasMarker: 'wikiLinkAliasMarker';
|
|
27
|
+
wikiLinkAlias: 'wikiLinkAlias';
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
declare module 'mdast-util-to-markdown' {
|
|
31
|
+
interface ConstructNameMap {
|
|
32
|
+
wikiLink: 'wikiLink';
|
|
33
|
+
}
|
|
34
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { relative } from 'node:path';
|
|
2
|
+
export function formatSectionPreview(section, latticeDir) {
|
|
3
|
+
const relPath = relative(process.cwd(), latticeDir + '/' + section.file + '.md');
|
|
4
|
+
const lines = [
|
|
5
|
+
` ${section.id}`,
|
|
6
|
+
` ${relPath}:${section.startLine}-${section.endLine}`,
|
|
7
|
+
];
|
|
8
|
+
if (section.body) {
|
|
9
|
+
const truncated = section.body.length > 200
|
|
10
|
+
? section.body.slice(0, 200) + '...'
|
|
11
|
+
: section.body;
|
|
12
|
+
lines.push('');
|
|
13
|
+
lines.push(` ${truncated}`);
|
|
14
|
+
}
|
|
15
|
+
return lines.join('\n');
|
|
16
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export type Section = {
|
|
2
|
+
id: string;
|
|
3
|
+
heading: string;
|
|
4
|
+
depth: number;
|
|
5
|
+
file: string;
|
|
6
|
+
children: Section[];
|
|
7
|
+
startLine: number;
|
|
8
|
+
endLine: number;
|
|
9
|
+
body: string;
|
|
10
|
+
};
|
|
11
|
+
export type Ref = {
|
|
12
|
+
target: string;
|
|
13
|
+
fromSection: string;
|
|
14
|
+
file: string;
|
|
15
|
+
line: number;
|
|
16
|
+
};
|
|
17
|
+
export declare function findLatticeDir(from?: string): string | null;
|
|
18
|
+
export declare function listLatticeFiles(latticeDir: string): Promise<string[]>;
|
|
19
|
+
export declare function parseSections(filePath: string, content: string): Section[];
|
|
20
|
+
export declare function loadAllSections(latticeDir: string): Promise<Section[]>;
|
|
21
|
+
export declare function findSections(sections: Section[], query: string): Section[];
|
|
22
|
+
export declare function extractRefs(filePath: string, content: string): Ref[];
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
import { readdir, readFile } from 'node:fs/promises';
|
|
2
|
+
import { dirname, join, basename, resolve } from 'node:path';
|
|
3
|
+
import { existsSync, statSync } from 'node:fs';
|
|
4
|
+
import { parse } from './parser.js';
|
|
5
|
+
import { visit } from 'unist-util-visit';
|
|
6
|
+
export function findLatticeDir(from) {
|
|
7
|
+
let dir = resolve(from ?? process.cwd());
|
|
8
|
+
while (true) {
|
|
9
|
+
const candidate = join(dir, '.lattice');
|
|
10
|
+
if (existsSync(candidate) && statSync(candidate).isDirectory()) {
|
|
11
|
+
return candidate;
|
|
12
|
+
}
|
|
13
|
+
const parent = dirname(dir);
|
|
14
|
+
if (parent === dir)
|
|
15
|
+
return null;
|
|
16
|
+
dir = parent;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
export async function listLatticeFiles(latticeDir) {
|
|
20
|
+
const entries = await readdir(latticeDir);
|
|
21
|
+
return entries
|
|
22
|
+
.filter((e) => e.endsWith('.md'))
|
|
23
|
+
.sort()
|
|
24
|
+
.map((e) => join(latticeDir, e));
|
|
25
|
+
}
|
|
26
|
+
function headingText(node) {
|
|
27
|
+
return node.children
|
|
28
|
+
.filter((c) => c.type === 'text')
|
|
29
|
+
.map((c) => c.value)
|
|
30
|
+
.join('');
|
|
31
|
+
}
|
|
32
|
+
function paragraphText(node) {
|
|
33
|
+
return node.children
|
|
34
|
+
.filter((c) => c.type === 'text')
|
|
35
|
+
.map((c) => c.value)
|
|
36
|
+
.join('');
|
|
37
|
+
}
|
|
38
|
+
function lastLine(content) {
|
|
39
|
+
const lines = content.split('\n');
|
|
40
|
+
// If trailing newline, count doesn't include empty last line
|
|
41
|
+
return lines[lines.length - 1] === '' ? lines.length - 1 : lines.length;
|
|
42
|
+
}
|
|
43
|
+
export function parseSections(filePath, content) {
|
|
44
|
+
const tree = parse(content);
|
|
45
|
+
const file = basename(filePath, '.md');
|
|
46
|
+
const roots = [];
|
|
47
|
+
const stack = [];
|
|
48
|
+
const flat = [];
|
|
49
|
+
visit(tree, 'heading', (node) => {
|
|
50
|
+
const heading = headingText(node);
|
|
51
|
+
const depth = node.depth;
|
|
52
|
+
const startLine = node.position.start.line;
|
|
53
|
+
// Pop stack until we find a parent with smaller depth
|
|
54
|
+
while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {
|
|
55
|
+
stack.pop();
|
|
56
|
+
}
|
|
57
|
+
const parent = stack.length > 0 ? stack[stack.length - 1] : null;
|
|
58
|
+
const id = parent ? `${parent.id}#${heading}` : heading;
|
|
59
|
+
const section = {
|
|
60
|
+
id,
|
|
61
|
+
heading,
|
|
62
|
+
depth,
|
|
63
|
+
file,
|
|
64
|
+
children: [],
|
|
65
|
+
startLine,
|
|
66
|
+
endLine: 0,
|
|
67
|
+
body: '',
|
|
68
|
+
};
|
|
69
|
+
if (parent) {
|
|
70
|
+
parent.children.push(section);
|
|
71
|
+
}
|
|
72
|
+
else {
|
|
73
|
+
roots.push(section);
|
|
74
|
+
}
|
|
75
|
+
stack.push(section);
|
|
76
|
+
flat.push(section);
|
|
77
|
+
});
|
|
78
|
+
// Compute endLine: line before next heading or last line of file
|
|
79
|
+
const fileLastLine = lastLine(content);
|
|
80
|
+
for (let i = 0; i < flat.length; i++) {
|
|
81
|
+
if (i + 1 < flat.length) {
|
|
82
|
+
flat[i].endLine = flat[i + 1].startLine - 1;
|
|
83
|
+
}
|
|
84
|
+
else {
|
|
85
|
+
flat[i].endLine = fileLastLine;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
// Extract body: first paragraph after each heading
|
|
89
|
+
const children = tree.children;
|
|
90
|
+
let headingIdx = 0;
|
|
91
|
+
for (let i = 0; i < children.length; i++) {
|
|
92
|
+
const node = children[i];
|
|
93
|
+
if (node.type === 'heading') {
|
|
94
|
+
// Find the first paragraph after this heading, before the next heading
|
|
95
|
+
for (let j = i + 1; j < children.length; j++) {
|
|
96
|
+
if (children[j].type === 'heading')
|
|
97
|
+
break;
|
|
98
|
+
if (children[j].type === 'paragraph') {
|
|
99
|
+
flat[headingIdx].body = paragraphText(children[j]);
|
|
100
|
+
break;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
headingIdx++;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
return roots;
|
|
107
|
+
}
|
|
108
|
+
export async function loadAllSections(latticeDir) {
|
|
109
|
+
const files = await listLatticeFiles(latticeDir);
|
|
110
|
+
const all = [];
|
|
111
|
+
for (const file of files) {
|
|
112
|
+
const content = await readFile(file, 'utf-8');
|
|
113
|
+
all.push(...parseSections(file, content));
|
|
114
|
+
}
|
|
115
|
+
return all;
|
|
116
|
+
}
|
|
117
|
+
function flattenSections(sections) {
|
|
118
|
+
const result = [];
|
|
119
|
+
for (const s of sections) {
|
|
120
|
+
result.push(s);
|
|
121
|
+
result.push(...flattenSections(s.children));
|
|
122
|
+
}
|
|
123
|
+
return result;
|
|
124
|
+
}
|
|
125
|
+
export function findSections(sections, query) {
|
|
126
|
+
const flat = flattenSections(sections);
|
|
127
|
+
const q = query.toLowerCase();
|
|
128
|
+
return flat.filter((s) => s.id.toLowerCase() === q);
|
|
129
|
+
}
|
|
130
|
+
export function extractRefs(filePath, content) {
|
|
131
|
+
const tree = parse(content);
|
|
132
|
+
const file = basename(filePath, '.md');
|
|
133
|
+
const refs = [];
|
|
134
|
+
// Build a flat list of sections to determine enclosing section for each wiki link
|
|
135
|
+
const flat = [];
|
|
136
|
+
visit(tree, 'heading', (node) => {
|
|
137
|
+
flat.push({
|
|
138
|
+
id: '', // filled below
|
|
139
|
+
startLine: node.position.start.line,
|
|
140
|
+
});
|
|
141
|
+
});
|
|
142
|
+
// Re-derive ids using the same logic as parseSections
|
|
143
|
+
const stack = [];
|
|
144
|
+
let idx = 0;
|
|
145
|
+
visit(tree, 'heading', (node) => {
|
|
146
|
+
const heading = headingText(node);
|
|
147
|
+
const depth = node.depth;
|
|
148
|
+
while (stack.length > 0 && stack[stack.length - 1].depth >= depth) {
|
|
149
|
+
stack.pop();
|
|
150
|
+
}
|
|
151
|
+
const parent = stack.length > 0 ? stack[stack.length - 1] : null;
|
|
152
|
+
const id = parent ? `${parent.id}#${heading}` : heading;
|
|
153
|
+
flat[idx].id = id;
|
|
154
|
+
stack.push({ id, depth });
|
|
155
|
+
idx++;
|
|
156
|
+
});
|
|
157
|
+
visit(tree, 'wikiLink', (node) => {
|
|
158
|
+
const line = node.position.start.line;
|
|
159
|
+
// Find enclosing section: last heading with startLine <= link line
|
|
160
|
+
let fromSection = '';
|
|
161
|
+
for (const s of flat) {
|
|
162
|
+
if (s.startLine <= line) {
|
|
163
|
+
fromSection = s.id;
|
|
164
|
+
}
|
|
165
|
+
else {
|
|
166
|
+
break;
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
refs.push({
|
|
170
|
+
target: node.value,
|
|
171
|
+
fromSection,
|
|
172
|
+
file,
|
|
173
|
+
line,
|
|
174
|
+
});
|
|
175
|
+
});
|
|
176
|
+
return refs;
|
|
177
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { unified } from 'unified';
|
|
2
|
+
import remarkParse from 'remark-parse';
|
|
3
|
+
import remarkStringify from 'remark-stringify';
|
|
4
|
+
import { wikiLinkSyntax, wikiLinkFromMarkdown, wikiLinkToMarkdown, } from './extensions/wiki-link/index.js';
|
|
5
|
+
const processor = unified()
|
|
6
|
+
.use(remarkParse)
|
|
7
|
+
.use(remarkStringify)
|
|
8
|
+
.data('micromarkExtensions', [wikiLinkSyntax()])
|
|
9
|
+
.data('fromMarkdownExtensions', [wikiLinkFromMarkdown()])
|
|
10
|
+
.data('toMarkdownExtensions', [wikiLinkToMarkdown()]);
|
|
11
|
+
export function parse(markdown) {
|
|
12
|
+
return processor.parse(markdown);
|
|
13
|
+
}
|
|
14
|
+
export function toMarkdown(tree) {
|
|
15
|
+
return processor.stringify(tree);
|
|
16
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "lat.md",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Anchor source code to high-level concepts defined in markdown",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"lat": "./dist/src/cli/index.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist/src"
|
|
11
|
+
],
|
|
12
|
+
"scripts": {
|
|
13
|
+
"build": "tsc",
|
|
14
|
+
"test": "vitest run",
|
|
15
|
+
"test:watch": "vitest",
|
|
16
|
+
"typecheck": "tsc --noEmit",
|
|
17
|
+
"format": "prettier --write 'src/**/*.ts'",
|
|
18
|
+
"format:check": "prettier --check 'src/**/*.ts'"
|
|
19
|
+
},
|
|
20
|
+
"devDependencies": {
|
|
21
|
+
"@types/mdast": "^4.0.4",
|
|
22
|
+
"@types/node": "^25.3.0",
|
|
23
|
+
"mdast-util-from-markdown": "^2.0.3",
|
|
24
|
+
"micromark-util-types": "^2.0.2",
|
|
25
|
+
"prettier": "^3.8.1",
|
|
26
|
+
"typescript": "^5.7.0",
|
|
27
|
+
"vitest": "^3.0.0"
|
|
28
|
+
},
|
|
29
|
+
"dependencies": {
|
|
30
|
+
"mdast-util-to-markdown": "^2.1.0",
|
|
31
|
+
"remark-parse": "^11.0.0",
|
|
32
|
+
"remark-stringify": "^11.0.0",
|
|
33
|
+
"unified": "^11.0.0",
|
|
34
|
+
"unist-util-visit": "^5.0.0"
|
|
35
|
+
}
|
|
36
|
+
}
|