@titan-design/brain 0.5.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +68 -44
- package/dist/{brain-service-2QO6JM3Z.js → brain-service-TYFNTBT6.js} +2 -2
- package/dist/{chunk-ZVXSW52A.js → chunk-4GDSQB2E.js} +1 -1
- package/dist/chunk-HSQ2ZVJJ.js +258 -0
- package/dist/{chunk-QL2GPXP6.js → chunk-IESQY2UZ.js} +45 -1
- package/dist/{chunk-4SD4JRLS.js → chunk-LLAHWRO4.js} +2 -2
- package/dist/cli.js +3531 -1403
- package/dist/{command-resolution-FJHE2YBQ.js → command-resolution-EJ6LTC2Z.js} +1 -1
- package/dist/deterministic-SOUYKJS4.js +8 -0
- package/dist/{search-HNUALOXQ.js → search-NPTRJV4W.js} +2 -2
- package/dist/templates/brainstorm-design.md +30 -0
- package/dist/templates/brainstorm-explore.md +30 -0
- package/dist/templates/brainstorm-interview.md +26 -0
- package/dist/templates/brainstorm-propose.md +28 -0
- package/dist/templates/brainstorm-write-doc.md +30 -0
- package/dist/templates/implementation-compact.md +46 -0
- package/dist/templates/implementation.md +92 -0
- package/dist/templates/ops.md +18 -0
- package/dist/templates/planning-critic.md +123 -0
- package/dist/templates/planning-decompose.md +221 -0
- package/dist/templates/planning-design.md +162 -0
- package/dist/templates/planning-interview.md +74 -0
- package/dist/templates/planning-research.md +114 -0
- package/dist/templates/planning-spectests.md +84 -0
- package/dist/templates/review-agent.md +155 -0
- package/dist/templates/review-fixup.md +79 -0
- package/dist/templates/validation-gates.md +84 -0
- package/dist/templates/writing-plans.md +48 -0
- package/package.json +11 -3
- package/scripts/diagnostic/run.sh +1 -1
- package/skill/SKILL.md +53 -25
package/README.md
CHANGED
|
@@ -46,52 +46,53 @@ See [PM Quick Start](docs/pm-module/quickstart.md) for the full 5-minute guide.
|
|
|
46
46
|
|
|
47
47
|
### Core
|
|
48
48
|
|
|
49
|
-
| Command
|
|
50
|
-
|
|
51
|
-
| `brain init`
|
|
52
|
-
| `brain index`
|
|
53
|
-
| `brain search "query"`
|
|
54
|
-
| `brain add <file>`
|
|
55
|
-
| `brain quick "text"`
|
|
56
|
-
| `brain inbox`
|
|
57
|
-
| `brain
|
|
58
|
-
| `brain
|
|
59
|
-
| `brain
|
|
60
|
-
| `brain
|
|
61
|
-
| `brain
|
|
62
|
-
| `brain
|
|
63
|
-
| `brain
|
|
64
|
-
| `brain
|
|
65
|
-
| `brain
|
|
66
|
-
| `brain
|
|
67
|
-
| `brain
|
|
68
|
-
| `brain
|
|
69
|
-
| `brain
|
|
70
|
-
| `brain
|
|
71
|
-
| `brain
|
|
72
|
-
| `brain
|
|
49
|
+
| Command | Description |
|
|
50
|
+
| ----------------------- | --------------------------------------------------------------- |
|
|
51
|
+
| `brain init` | Initialize workspace and database |
|
|
52
|
+
| `brain index` | Index all markdown notes |
|
|
53
|
+
| `brain search "query"` | Hybrid BM25 + vector search |
|
|
54
|
+
| `brain add <file>` | Add a note from file or stdin |
|
|
55
|
+
| `brain quick "text"` | Zero-friction capture to inbox |
|
|
56
|
+
| `brain inbox` | View/manage inbox items |
|
|
57
|
+
| `brain import <paths>` | Smart import with three-tier extraction (`--dry-run`, `--tier`) |
|
|
58
|
+
| `brain ingest` | Bulk-import files to inbox |
|
|
59
|
+
| `brain feed` | Manage RSS feed subscriptions |
|
|
60
|
+
| `brain extract` | Extract memories from notes (Ollama) |
|
|
61
|
+
| `brain memories` | List, history, and stats for memories |
|
|
62
|
+
| `brain context <id>` | Show context for a note (relations + memories) |
|
|
63
|
+
| `brain lineage <id>` | Show memory version lineage |
|
|
64
|
+
| `brain profile` | Generate agent context profile |
|
|
65
|
+
| `brain tidy` | LLM-powered note cleanup suggestions |
|
|
66
|
+
| `brain doctor` | System health checks (`--fix` for auto-repair) |
|
|
67
|
+
| `brain install-hooks` | Set up launchd/systemd scheduled processing |
|
|
68
|
+
| `brain status` | Database stats |
|
|
69
|
+
| `brain stale` | Notes needing review |
|
|
70
|
+
| `brain graph <id>` | Show note relations |
|
|
71
|
+
| `brain template <type>` | Output frontmatter template |
|
|
72
|
+
| `brain archive` | Archive expired notes |
|
|
73
|
+
| `brain config` | View/set configuration |
|
|
73
74
|
|
|
74
75
|
### Project Management (`brain pm`)
|
|
75
76
|
|
|
76
|
-
| Command
|
|
77
|
-
|
|
78
|
-
| `brain pm init <name> --prefix <P>` | Initialize a new project
|
|
79
|
-
| `brain pm use <prefix>`
|
|
80
|
-
| `brain pm list`
|
|
81
|
-
| `brain pm status [prefix]`
|
|
82
|
-
| `brain pm workstream add <name>`
|
|
83
|
-
| `brain pm task add <name>`
|
|
84
|
-
| `brain pm task list`
|
|
85
|
-
| `brain pm task done <id>`
|
|
86
|
-
| `brain pm next`
|
|
87
|
-
| `brain pm waves`
|
|
88
|
-
| `brain pm dispatch <id>`
|
|
89
|
-
| `brain pm complete <id>`
|
|
90
|
-
| `brain pm briefing`
|
|
91
|
-
| `brain pm audit summary`
|
|
92
|
-
| `brain pm check [--deep]`
|
|
93
|
-
| `brain pm setup`
|
|
94
|
-
| `brain pm install-hooks`
|
|
77
|
+
| Command | Description |
|
|
78
|
+
| ----------------------------------- | --------------------------------------------------------- |
|
|
79
|
+
| `brain pm init <name> --prefix <P>` | Initialize a new project |
|
|
80
|
+
| `brain pm use <prefix>` | Set active project context |
|
|
81
|
+
| `brain pm list` | List all projects |
|
|
82
|
+
| `brain pm status [prefix]` | Show project status |
|
|
83
|
+
| `brain pm workstream add <name>` | Add a workstream |
|
|
84
|
+
| `brain pm task add <name>` | Create a task |
|
|
85
|
+
| `brain pm task list` | List tasks (filterable by status, workstream) |
|
|
86
|
+
| `brain pm task done <id>` | Mark task done |
|
|
87
|
+
| `brain pm next` | Show eligible tasks (all deps satisfied) |
|
|
88
|
+
| `brain pm waves` | Topological wave grouping of remaining tasks |
|
|
89
|
+
| `brain pm dispatch <id>` | Assemble context bundle for a task |
|
|
90
|
+
| `brain pm complete <id>` | Mark done, run impact analysis |
|
|
91
|
+
| `brain pm briefing` | Session briefing with project state overview |
|
|
92
|
+
| `brain pm audit summary` | Activity log, cost tracking |
|
|
93
|
+
| `brain pm check [--deep]` | Consistency check (structural + semantic analysis) |
|
|
94
|
+
| `brain pm setup` | Configure PM module (paths, hooks) |
|
|
95
|
+
| `brain pm install-hooks` | Install PM hooks and skills (orchestrator + sanity-check) |
|
|
95
96
|
|
|
96
97
|
## Architecture
|
|
97
98
|
|
|
@@ -107,6 +108,7 @@ src/
|
|
|
107
108
|
context.ts — Module context factory
|
|
108
109
|
loader.ts — Module discovery and loading
|
|
109
110
|
validation.ts — Frontmatter schema validation
|
|
111
|
+
knowledge/ — Knowledge module (core note types)
|
|
110
112
|
pm/ — Project management module
|
|
111
113
|
commands/ — 15 command groups (incl. check)
|
|
112
114
|
data/ — CRUD operations and queries
|
|
@@ -146,10 +148,32 @@ Brain indexes markdown files with YAML frontmatter into a SQLite database. It co
|
|
|
146
148
|
- `slow` — permanent knowledge (decisions, patterns, research) with review intervals
|
|
147
149
|
- `fast` — ephemeral (meetings, session logs) with expiry dates
|
|
148
150
|
|
|
151
|
+
### Knowledge Graph
|
|
152
|
+
|
|
153
|
+
Link related notes and traverse connections:
|
|
154
|
+
|
|
155
|
+
1. Add `related` field to YAML frontmatter:
|
|
156
|
+
```yaml
|
|
157
|
+
related:
|
|
158
|
+
- database-migration-patterns
|
|
159
|
+
- service-architecture-overview
|
|
160
|
+
```
|
|
161
|
+
2. Re-index after adding relations: `brain index`
|
|
162
|
+
3. Traverse the graph:
|
|
163
|
+
```bash
|
|
164
|
+
brain graph <note-id> # Show direct relations
|
|
165
|
+
brain graph <note-id> --depth 2 # Show 2-hop connections
|
|
166
|
+
brain graph <note-id> --json # Machine-readable output
|
|
167
|
+
```
|
|
168
|
+
4. Use `--expand` in search to include graph-connected notes:
|
|
169
|
+
```bash
|
|
170
|
+
brain search "query" --expand
|
|
171
|
+
```
|
|
172
|
+
|
|
149
173
|
## Testing
|
|
150
174
|
|
|
151
175
|
```bash
|
|
152
|
-
npm test #
|
|
176
|
+
npm test # ~2,350 tests (Vitest)
|
|
153
177
|
npm run typecheck # TypeScript checking
|
|
154
178
|
npm run lint # ESLint
|
|
155
179
|
npm run build # Production build (tsup)
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
// src/services/extraction-tiers/deterministic.ts
|
|
2
|
+
import { extname } from "path";
|
|
3
|
+
|
|
4
|
+
// src/services/format-adapters/csv-adapter.ts
|
|
5
|
+
function parseCsv(content) {
|
|
6
|
+
const lines = content.replace(/\r\n/g, "\n").replace(/\r/g, "\n").split("\n");
|
|
7
|
+
const result = [];
|
|
8
|
+
for (const line of lines) {
|
|
9
|
+
if (!line.trim()) continue;
|
|
10
|
+
result.push(parseRow(line));
|
|
11
|
+
}
|
|
12
|
+
if (result.length === 0) return { headers: [], rows: [] };
|
|
13
|
+
const headers = result[0].map((h) => h.trim());
|
|
14
|
+
const rows = result.slice(1).map((r) => r.map((c) => c.trim()));
|
|
15
|
+
return { headers, rows };
|
|
16
|
+
}
|
|
17
|
+
function parseRow(line) {
|
|
18
|
+
const cells = [];
|
|
19
|
+
let current = "";
|
|
20
|
+
let inQuotes = false;
|
|
21
|
+
for (let i = 0; i < line.length; i++) {
|
|
22
|
+
const ch = line[i];
|
|
23
|
+
if (inQuotes) {
|
|
24
|
+
if (ch === '"' && line[i + 1] === '"') {
|
|
25
|
+
current += '"';
|
|
26
|
+
i++;
|
|
27
|
+
} else if (ch === '"') {
|
|
28
|
+
inQuotes = false;
|
|
29
|
+
} else {
|
|
30
|
+
current += ch;
|
|
31
|
+
}
|
|
32
|
+
} else if (ch === '"') {
|
|
33
|
+
inQuotes = true;
|
|
34
|
+
} else if (ch === ",") {
|
|
35
|
+
cells.push(current);
|
|
36
|
+
current = "";
|
|
37
|
+
} else {
|
|
38
|
+
current += ch;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
cells.push(current);
|
|
42
|
+
return cells;
|
|
43
|
+
}
|
|
44
|
+
function csvToMarkdownTable(parsed) {
|
|
45
|
+
const escape = (s) => s.replace(/\|/g, "\\|");
|
|
46
|
+
const header = "| " + parsed.headers.map(escape).join(" | ") + " |";
|
|
47
|
+
const separator = "| " + parsed.headers.map(() => "---").join(" | ") + " |";
|
|
48
|
+
const rows = parsed.rows.map((r) => "| " + r.map(escape).join(" | ") + " |");
|
|
49
|
+
return [header, separator, ...rows].join("\n");
|
|
50
|
+
}
|
|
51
|
+
var LINEAR_COLUMNS = /* @__PURE__ */ new Set([
|
|
52
|
+
"id",
|
|
53
|
+
"title",
|
|
54
|
+
"status",
|
|
55
|
+
"priority",
|
|
56
|
+
"assignee",
|
|
57
|
+
"labels",
|
|
58
|
+
"team",
|
|
59
|
+
"cycle",
|
|
60
|
+
"estimate"
|
|
61
|
+
]);
|
|
62
|
+
var NOTION_COLUMNS = /* @__PURE__ */ new Set([
|
|
63
|
+
"name",
|
|
64
|
+
"tags",
|
|
65
|
+
"status",
|
|
66
|
+
"created time",
|
|
67
|
+
"last edited time",
|
|
68
|
+
"url"
|
|
69
|
+
]);
|
|
70
|
+
function detectCsvFlavor(headers) {
|
|
71
|
+
const lower = headers.map((h) => h.toLowerCase().trim());
|
|
72
|
+
const linearHits = lower.filter((h) => LINEAR_COLUMNS.has(h)).length;
|
|
73
|
+
if (linearHits >= 3) return "linear";
|
|
74
|
+
const notionHits = lower.filter((h) => NOTION_COLUMNS.has(h)).length;
|
|
75
|
+
if (notionHits >= 3) return "notion-db";
|
|
76
|
+
return "generic";
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// src/services/extraction-tiers/deterministic.ts
|
|
80
|
+
import matter from "gray-matter";
|
|
81
|
+
async function extractDeterministic(content, filePath, registry, embedder) {
|
|
82
|
+
const ext = extname(filePath).toLowerCase();
|
|
83
|
+
if (ext === ".csv") {
|
|
84
|
+
const csvResult = tryCsvMatch(content, registry);
|
|
85
|
+
if (csvResult) return csvResult;
|
|
86
|
+
}
|
|
87
|
+
if (ext === ".md" || ext === ".markdown") {
|
|
88
|
+
const fmResult = tryFrontmatterMatch(content, registry);
|
|
89
|
+
if (fmResult) return fmResult;
|
|
90
|
+
}
|
|
91
|
+
const tableResult = tryTableMatch(content, registry);
|
|
92
|
+
if (tableResult) return tableResult;
|
|
93
|
+
const embeddingResult = await tryEmbeddingMatch(content, filePath, registry, embedder);
|
|
94
|
+
if (embeddingResult) return embeddingResult;
|
|
95
|
+
return { items: [], remainder: content };
|
|
96
|
+
}
|
|
97
|
+
function tryCsvMatch(content, registry) {
|
|
98
|
+
const parsed = parseCsv(content);
|
|
99
|
+
if (parsed.headers.length === 0 || parsed.rows.length === 0) return null;
|
|
100
|
+
const match = registry.matchColumnHeaders(parsed.headers);
|
|
101
|
+
if (!match) return null;
|
|
102
|
+
const items = [];
|
|
103
|
+
for (let i = 0; i < parsed.rows.length; i++) {
|
|
104
|
+
const row = parsed.rows[i];
|
|
105
|
+
const fields = {};
|
|
106
|
+
for (let j = 0; j < parsed.headers.length && j < row.length; j++) {
|
|
107
|
+
const headerLower = parsed.headers[j].toLowerCase().trim();
|
|
108
|
+
const schemaField = match.columnMapping[headerLower];
|
|
109
|
+
if (schemaField) {
|
|
110
|
+
fields[schemaField] = row[j];
|
|
111
|
+
} else {
|
|
112
|
+
fields[headerLower] = row[j];
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
const title = fields.name ?? fields.title ?? `Row ${i + 1}`;
|
|
116
|
+
delete fields.name;
|
|
117
|
+
delete fields.title;
|
|
118
|
+
items.push({
|
|
119
|
+
noteType: match.noteType,
|
|
120
|
+
title,
|
|
121
|
+
content: Object.entries(fields).filter(([k]) => k !== "description").map(([k, v]) => `**${k}:** ${v}`).join("\n"),
|
|
122
|
+
fields,
|
|
123
|
+
sourceRegion: { startLine: i + 2, endLine: i + 2 }
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
return items.length > 0 ? { items, remainder: null } : null;
|
|
127
|
+
}
|
|
128
|
+
function tryFrontmatterMatch(content, registry) {
|
|
129
|
+
try {
|
|
130
|
+
const { data, content: body } = matter(content);
|
|
131
|
+
if (!data.type || typeof data.type !== "string") return null;
|
|
132
|
+
const noteType = registry.getNoteType(data.type);
|
|
133
|
+
if (!noteType) return null;
|
|
134
|
+
const title = data.title ?? "Untitled";
|
|
135
|
+
return {
|
|
136
|
+
items: [
|
|
137
|
+
{
|
|
138
|
+
noteType: data.type,
|
|
139
|
+
title,
|
|
140
|
+
content: body.trim(),
|
|
141
|
+
fields: extractFieldsFromFrontmatter(data)
|
|
142
|
+
}
|
|
143
|
+
],
|
|
144
|
+
remainder: null
|
|
145
|
+
};
|
|
146
|
+
} catch {
|
|
147
|
+
return null;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
function extractFieldsFromFrontmatter(data) {
|
|
151
|
+
const fields = {};
|
|
152
|
+
const skip = /* @__PURE__ */ new Set(["id", "title", "type", "tier", "module"]);
|
|
153
|
+
for (const [key, value] of Object.entries(data)) {
|
|
154
|
+
if (skip.has(key)) continue;
|
|
155
|
+
if (value instanceof Date) {
|
|
156
|
+
fields[key] = value.toISOString().split("T")[0];
|
|
157
|
+
} else if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
|
158
|
+
fields[key] = String(value);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
return fields;
|
|
162
|
+
}
|
|
163
|
+
function tryTableMatch(content, registry) {
|
|
164
|
+
const lines = content.split("\n");
|
|
165
|
+
const headerLine = lines.find((l) => /^\|.+\|/.test(l));
|
|
166
|
+
if (!headerLine) return null;
|
|
167
|
+
const headerIdx = lines.indexOf(headerLine);
|
|
168
|
+
const sepIdx = headerIdx + 1;
|
|
169
|
+
if (sepIdx >= lines.length || !/^\|[\s\-:|]+\|/.test(lines[sepIdx])) return null;
|
|
170
|
+
const headers = headerLine.split("|").map((h) => h.trim()).filter(Boolean);
|
|
171
|
+
const match = registry.matchColumnHeaders(headers);
|
|
172
|
+
if (!match) return null;
|
|
173
|
+
const items = [];
|
|
174
|
+
for (let i = sepIdx + 1; i < lines.length; i++) {
|
|
175
|
+
const line = lines[i];
|
|
176
|
+
if (!/^\|.+\|/.test(line)) continue;
|
|
177
|
+
const cells = line.split("|").map((c) => c.trim()).filter(Boolean);
|
|
178
|
+
const fields = {};
|
|
179
|
+
for (let j = 0; j < headers.length && j < cells.length; j++) {
|
|
180
|
+
const headerLower = headers[j].toLowerCase().trim();
|
|
181
|
+
const schemaField = match.columnMapping[headerLower];
|
|
182
|
+
if (schemaField) {
|
|
183
|
+
fields[schemaField] = cells[j];
|
|
184
|
+
} else {
|
|
185
|
+
fields[headerLower] = cells[j];
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
const title = fields.name ?? fields.title ?? `Row ${i - sepIdx}`;
|
|
189
|
+
delete fields.name;
|
|
190
|
+
delete fields.title;
|
|
191
|
+
items.push({
|
|
192
|
+
noteType: match.noteType,
|
|
193
|
+
title,
|
|
194
|
+
content: Object.entries(fields).filter(([k]) => k !== "description").map(([k, v]) => `**${k}:** ${v}`).join("\n"),
|
|
195
|
+
fields,
|
|
196
|
+
sourceRegion: { startLine: i + 1, endLine: i + 1 }
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
return items.length > 0 ? { items, remainder: null } : null;
|
|
200
|
+
}
|
|
201
|
+
async function tryEmbeddingMatch(content, filePath, registry, embedder) {
|
|
202
|
+
const archetypeTexts = registry.getArchetypeTexts();
|
|
203
|
+
if (archetypeTexts.size === 0) return null;
|
|
204
|
+
let body;
|
|
205
|
+
try {
|
|
206
|
+
const parsed = matter(content);
|
|
207
|
+
body = parsed.content.trim();
|
|
208
|
+
} catch {
|
|
209
|
+
body = content.trim();
|
|
210
|
+
}
|
|
211
|
+
if (body.length < 50 || body.length > 1e4) return null;
|
|
212
|
+
const typeNames = Array.from(archetypeTexts.keys());
|
|
213
|
+
const texts = typeNames.map((t) => archetypeTexts.get(t));
|
|
214
|
+
const allVectors = await embedder.embed([body, ...texts]);
|
|
215
|
+
const docVec = allVectors[0];
|
|
216
|
+
let bestType = "";
|
|
217
|
+
let bestScore = 0.85;
|
|
218
|
+
for (let i = 0; i < typeNames.length; i++) {
|
|
219
|
+
const score = cosineSimilarity(docVec, allVectors[i + 1]);
|
|
220
|
+
if (score > bestScore) {
|
|
221
|
+
bestScore = score;
|
|
222
|
+
bestType = typeNames[i];
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
if (!bestType) return null;
|
|
226
|
+
const title = filePath.split("/").pop()?.replace(/\.[^.]+$/, "") ?? "Untitled";
|
|
227
|
+
return {
|
|
228
|
+
items: [
|
|
229
|
+
{
|
|
230
|
+
noteType: bestType,
|
|
231
|
+
title,
|
|
232
|
+
content: body,
|
|
233
|
+
fields: {}
|
|
234
|
+
}
|
|
235
|
+
],
|
|
236
|
+
remainder: null
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
function cosineSimilarity(a, b) {
|
|
240
|
+
let dot = 0;
|
|
241
|
+
let normA = 0;
|
|
242
|
+
let normB = 0;
|
|
243
|
+
for (let i = 0; i < a.length; i++) {
|
|
244
|
+
dot += a[i] * b[i];
|
|
245
|
+
normA += a[i] * a[i];
|
|
246
|
+
normB += b[i] * b[i];
|
|
247
|
+
}
|
|
248
|
+
const denom = Math.sqrt(normA) * Math.sqrt(normB);
|
|
249
|
+
return denom === 0 ? 0 : dot / denom;
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
export {
|
|
253
|
+
parseCsv,
|
|
254
|
+
csvToMarkdownTable,
|
|
255
|
+
detectCsvFlavor,
|
|
256
|
+
extractDeterministic,
|
|
257
|
+
cosineSimilarity
|
|
258
|
+
};
|
|
@@ -3,7 +3,7 @@ import {
|
|
|
3
3
|
} from "./chunk-BDNH2E2O.js";
|
|
4
4
|
import {
|
|
5
5
|
addFrontmatterField
|
|
6
|
-
} from "./chunk-
|
|
6
|
+
} from "./chunk-LLAHWRO4.js";
|
|
7
7
|
|
|
8
8
|
// src/services/config.ts
|
|
9
9
|
import { readFileSync, writeFileSync, mkdirSync, existsSync, copyFileSync } from "fs";
|
|
@@ -1664,6 +1664,50 @@ var ModuleRegistry = class {
|
|
|
1664
1664
|
getContentHandlers() {
|
|
1665
1665
|
return [...this.contentHandlers];
|
|
1666
1666
|
}
|
|
1667
|
+
// --- Import Hints ---
|
|
1668
|
+
/** Returns all note types that have importHints configured */
|
|
1669
|
+
getImportableNoteTypes() {
|
|
1670
|
+
return this.getAllNoteTypes().filter(({ noteType }) => noteType.importHints);
|
|
1671
|
+
}
|
|
1672
|
+
/** Match CSV/table column headers against registered tableColumnAliases.
|
|
1673
|
+
* Returns the best-matching note type if 2+ columns match, else null. */
|
|
1674
|
+
matchColumnHeaders(headers) {
|
|
1675
|
+
const lowerHeaders = headers.map((h) => h.toLowerCase().trim());
|
|
1676
|
+
let bestMatch = null;
|
|
1677
|
+
for (const { module, noteType } of this.getImportableNoteTypes()) {
|
|
1678
|
+
const aliases = noteType.importHints?.tableColumnAliases;
|
|
1679
|
+
if (!aliases) continue;
|
|
1680
|
+
const mapping = {};
|
|
1681
|
+
let hits = 0;
|
|
1682
|
+
for (const [schemaField, columnNames] of Object.entries(aliases)) {
|
|
1683
|
+
const matched = lowerHeaders.find(
|
|
1684
|
+
(h) => columnNames.map((c) => c.toLowerCase()).includes(h)
|
|
1685
|
+
);
|
|
1686
|
+
if (matched) {
|
|
1687
|
+
mapping[matched] = schemaField;
|
|
1688
|
+
hits++;
|
|
1689
|
+
}
|
|
1690
|
+
}
|
|
1691
|
+
if (hits >= 2 && (!bestMatch || hits > bestMatch.hits)) {
|
|
1692
|
+
bestMatch = { module, noteType: noteType.name, columnMapping: mapping, hits };
|
|
1693
|
+
}
|
|
1694
|
+
}
|
|
1695
|
+
return bestMatch ? {
|
|
1696
|
+
module: bestMatch.module,
|
|
1697
|
+
noteType: bestMatch.noteType,
|
|
1698
|
+
columnMapping: bestMatch.columnMapping
|
|
1699
|
+
} : null;
|
|
1700
|
+
}
|
|
1701
|
+
/** Returns archetype texts for embedding-based classification */
|
|
1702
|
+
getArchetypeTexts() {
|
|
1703
|
+
const result = /* @__PURE__ */ new Map();
|
|
1704
|
+
for (const { noteType } of this.getImportableNoteTypes()) {
|
|
1705
|
+
if (noteType.importHints?.archetypeText) {
|
|
1706
|
+
result.set(noteType.name, noteType.importHints.archetypeText);
|
|
1707
|
+
}
|
|
1708
|
+
}
|
|
1709
|
+
return result;
|
|
1710
|
+
}
|
|
1667
1711
|
};
|
|
1668
1712
|
|
|
1669
1713
|
// src/modules/context.ts
|
|
@@ -18,7 +18,8 @@ var VALID_CORE_NOTE_TYPES = [
|
|
|
18
18
|
"research",
|
|
19
19
|
"meeting",
|
|
20
20
|
"session-log",
|
|
21
|
-
"guide"
|
|
21
|
+
"guide",
|
|
22
|
+
"workflow"
|
|
22
23
|
];
|
|
23
24
|
var VALID_NOTE_TIERS = ["slow", "fast"];
|
|
24
25
|
var VALID_NOTE_CONFIDENCES = ["high", "medium", "low", "speculative"];
|
|
@@ -826,7 +827,6 @@ export {
|
|
|
826
827
|
slugify,
|
|
827
828
|
parseIntervalDays,
|
|
828
829
|
parseMarkdown,
|
|
829
|
-
splitIntoSections,
|
|
830
830
|
computeAutoLinks,
|
|
831
831
|
traverseGraph,
|
|
832
832
|
expandResults,
|