@context-vault/core 2.17.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/dist/capture.d.ts +21 -0
  2. package/dist/capture.d.ts.map +1 -0
  3. package/dist/capture.js +269 -0
  4. package/dist/capture.js.map +1 -0
  5. package/dist/categories.d.ts +6 -0
  6. package/dist/categories.d.ts.map +1 -0
  7. package/dist/categories.js +50 -0
  8. package/dist/categories.js.map +1 -0
  9. package/dist/config.d.ts +4 -0
  10. package/dist/config.d.ts.map +1 -0
  11. package/dist/config.js +190 -0
  12. package/dist/config.js.map +1 -0
  13. package/dist/constants.d.ts +33 -0
  14. package/dist/constants.d.ts.map +1 -0
  15. package/dist/constants.js +23 -0
  16. package/dist/constants.js.map +1 -0
  17. package/dist/db.d.ts +13 -0
  18. package/dist/db.d.ts.map +1 -0
  19. package/dist/db.js +191 -0
  20. package/dist/db.js.map +1 -0
  21. package/dist/embed.d.ts +5 -0
  22. package/dist/embed.d.ts.map +1 -0
  23. package/dist/embed.js +78 -0
  24. package/dist/embed.js.map +1 -0
  25. package/dist/files.d.ts +13 -0
  26. package/dist/files.d.ts.map +1 -0
  27. package/dist/files.js +66 -0
  28. package/dist/files.js.map +1 -0
  29. package/dist/formatters.d.ts +8 -0
  30. package/dist/formatters.d.ts.map +1 -0
  31. package/dist/formatters.js +18 -0
  32. package/dist/formatters.js.map +1 -0
  33. package/dist/frontmatter.d.ts +12 -0
  34. package/dist/frontmatter.d.ts.map +1 -0
  35. package/dist/frontmatter.js +101 -0
  36. package/dist/frontmatter.js.map +1 -0
  37. package/dist/index.d.ts +10 -0
  38. package/dist/index.d.ts.map +1 -0
  39. package/dist/index.js +297 -0
  40. package/dist/index.js.map +1 -0
  41. package/dist/ingest-url.d.ts +20 -0
  42. package/dist/ingest-url.d.ts.map +1 -0
  43. package/dist/ingest-url.js +113 -0
  44. package/dist/ingest-url.js.map +1 -0
  45. package/dist/main.d.ts +14 -0
  46. package/dist/main.d.ts.map +1 -0
  47. package/dist/main.js +25 -0
  48. package/dist/main.js.map +1 -0
  49. package/dist/search.d.ts +18 -0
  50. package/dist/search.d.ts.map +1 -0
  51. package/dist/search.js +238 -0
  52. package/dist/search.js.map +1 -0
  53. package/dist/types.d.ts +176 -0
  54. package/dist/types.d.ts.map +1 -0
  55. package/dist/types.js +2 -0
  56. package/dist/types.js.map +1 -0
  57. package/package.json +66 -17
  58. package/src/capture.ts +308 -0
  59. package/src/categories.ts +54 -0
  60. package/src/{core/config.js → config.ts} +34 -33
  61. package/src/{constants.js → constants.ts} +6 -3
  62. package/src/db.ts +229 -0
  63. package/src/{index/embed.js → embed.ts} +10 -35
  64. package/src/files.ts +80 -0
  65. package/src/{capture/formatters.js → formatters.ts} +13 -11
  66. package/src/{core/frontmatter.js → frontmatter.ts} +27 -33
  67. package/src/index.ts +351 -0
  68. package/src/ingest-url.ts +99 -0
  69. package/src/main.ts +111 -0
  70. package/src/search.ts +285 -0
  71. package/src/types.ts +166 -0
  72. package/src/capture/file-ops.js +0 -97
  73. package/src/capture/import-pipeline.js +0 -46
  74. package/src/capture/importers.js +0 -387
  75. package/src/capture/index.js +0 -236
  76. package/src/capture/ingest-url.js +0 -252
  77. package/src/consolidation/index.js +0 -112
  78. package/src/core/categories.js +0 -72
  79. package/src/core/error-log.js +0 -54
  80. package/src/core/files.js +0 -108
  81. package/src/core/status.js +0 -350
  82. package/src/core/telemetry.js +0 -90
  83. package/src/index/db.js +0 -416
  84. package/src/index/index.js +0 -522
  85. package/src/index.js +0 -66
  86. package/src/retrieve/index.js +0 -500
  87. package/src/server/helpers.js +0 -44
  88. package/src/server/tools/clear-context.js +0 -47
  89. package/src/server/tools/context-status.js +0 -182
  90. package/src/server/tools/create-snapshot.js +0 -231
  91. package/src/server/tools/delete-context.js +0 -60
  92. package/src/server/tools/get-context.js +0 -678
  93. package/src/server/tools/ingest-project.js +0 -244
  94. package/src/server/tools/ingest-url.js +0 -88
  95. package/src/server/tools/list-buckets.js +0 -116
  96. package/src/server/tools/list-context.js +0 -163
  97. package/src/server/tools/save-context.js +0 -609
  98. package/src/server/tools/session-start.js +0 -285
  99. package/src/server/tools/submit-feedback.js +0 -55
  100. package/src/server/tools.js +0 -174
  101. package/src/sync/sync.js +0 -235
@@ -1,387 +0,0 @@
1
- /**
2
- * importers.js — Format detection + parsers for bulk import
3
- *
4
- * Detects and parses markdown, CSV/TSV, JSON, and plain text files into
5
- * the EntryData shape that captureAndIndex() accepts.
6
- *
7
- * No external dependencies — CSV parsed with split + quote handling,
8
- * markdown uses existing parseFrontmatter().
9
- */
10
-
11
- import { readdirSync, readFileSync, statSync } from "node:fs";
12
- import { join, extname, basename } from "node:path";
13
- import {
14
- parseFrontmatter,
15
- parseEntryFromMarkdown,
16
- } from "../core/frontmatter.js";
17
- import { dirToKind } from "../core/files.js";
18
-
19
- /**
20
- * Detect the format of a file by extension and content heuristics.
21
- * @param {string} filePath
22
- * @param {string} [content]
23
- * @returns {"markdown"|"csv"|"tsv"|"json"|"text"}
24
- */
25
- export function detectFormat(filePath, content) {
26
- const ext = extname(filePath).toLowerCase();
27
-
28
- if (ext === ".md" || ext === ".markdown") return "markdown";
29
- if (ext === ".csv") return "csv";
30
- if (ext === ".tsv") return "tsv";
31
- if (ext === ".json" || ext === ".jsonl") return "json";
32
-
33
- // Content-based heuristics if extension is ambiguous
34
- if (content) {
35
- const trimmed = content.trimStart();
36
- if (trimmed.startsWith("---\n")) return "markdown";
37
- if (trimmed.startsWith("[") || trimmed.startsWith("{")) return "json";
38
- }
39
-
40
- return "text";
41
- }
42
-
43
- /**
44
- * Parse a CSV line respecting quoted fields.
45
- * @param {string} line
46
- * @param {string} delimiter
47
- * @returns {string[]}
48
- */
49
- function parseCsvLine(line, delimiter) {
50
- const fields = [];
51
- let current = "";
52
- let inQuotes = false;
53
-
54
- for (let i = 0; i < line.length; i++) {
55
- const ch = line[i];
56
- if (inQuotes) {
57
- if (ch === '"') {
58
- if (i + 1 < line.length && line[i + 1] === '"') {
59
- current += '"';
60
- i++;
61
- } else {
62
- inQuotes = false;
63
- }
64
- } else {
65
- current += ch;
66
- }
67
- } else if (ch === '"') {
68
- inQuotes = true;
69
- } else if (ch === delimiter) {
70
- fields.push(current.trim());
71
- current = "";
72
- } else {
73
- current += ch;
74
- }
75
- }
76
- fields.push(current.trim());
77
- return fields;
78
- }
79
-
80
- const KNOWN_COLUMNS = new Set([
81
- "kind",
82
- "title",
83
- "body",
84
- "tags",
85
- "source",
86
- "identity_key",
87
- "expires_at",
88
- ]);
89
-
90
- /**
91
- * Parse a markdown file into EntryData.
92
- * Reuses parseFrontmatter + parseEntryFromMarkdown from core.
93
- *
94
- * @param {string} content
95
- * @param {{ kind?: string, source?: string }} [opts]
96
- * @returns {import("./import-pipeline.js").EntryData[]}
97
- */
98
- export function parseMarkdown(content, opts = {}) {
99
- const { meta: fmMeta, body: rawBody } = parseFrontmatter(content);
100
-
101
- // Derive kind from frontmatter or option
102
- const kind = fmMeta.kind || opts.kind || "insight";
103
- const parsed = parseEntryFromMarkdown(kind, rawBody, fmMeta);
104
-
105
- return [
106
- {
107
- kind,
108
- title: parsed.title || fmMeta.title || null,
109
- body: parsed.body || rawBody,
110
- tags: Array.isArray(fmMeta.tags) ? fmMeta.tags : undefined,
111
- meta: parsed.meta || undefined,
112
- source: fmMeta.source || opts.source || "import",
113
- identity_key: fmMeta.identity_key || undefined,
114
- expires_at: fmMeta.expires_at || undefined,
115
- },
116
- ];
117
- }
118
-
119
- /**
120
- * Parse a CSV or TSV file into EntryData[].
121
- * Header row required. Recognized columns map directly; unknown → meta.
122
- * Tags column is comma-separated within field.
123
- *
124
- * @param {string} content
125
- * @param {string} delimiter - "," for CSV, "\t" for TSV
126
- * @param {{ kind?: string, source?: string }} [opts]
127
- * @returns {import("./import-pipeline.js").EntryData[]}
128
- */
129
- export function parseCsv(content, delimiter, opts = {}) {
130
- const lines = content.split(/\r?\n/).filter((l) => l.trim());
131
- if (lines.length < 2) return [];
132
-
133
- const headers = parseCsvLine(lines[0], delimiter).map((h) =>
134
- h.toLowerCase().trim(),
135
- );
136
- const entries = [];
137
-
138
- for (let i = 1; i < lines.length; i++) {
139
- const values = parseCsvLine(lines[i], delimiter);
140
- if (values.every((v) => !v)) continue; // skip empty rows
141
-
142
- const entry = {
143
- kind: opts.kind || "insight",
144
- body: "",
145
- source: opts.source || "csv-import",
146
- };
147
- const meta = {};
148
-
149
- for (let j = 0; j < headers.length; j++) {
150
- const col = headers[j];
151
- const val = values[j] || "";
152
-
153
- if (col === "kind" && val) {
154
- entry.kind = val;
155
- } else if (col === "title" && val) {
156
- entry.title = val;
157
- } else if (col === "body" && val) {
158
- entry.body = val;
159
- } else if (col === "tags" && val) {
160
- entry.tags = val
161
- .split(",")
162
- .map((t) => t.trim())
163
- .filter(Boolean);
164
- } else if (col === "source" && val) {
165
- entry.source = val;
166
- } else if (col === "identity_key" && val) {
167
- entry.identity_key = val;
168
- } else if (col === "expires_at" && val) {
169
- entry.expires_at = val;
170
- } else if (val && !KNOWN_COLUMNS.has(col)) {
171
- meta[col] = val;
172
- }
173
- }
174
-
175
- if (!entry.body) continue; // skip rows with no body
176
- if (Object.keys(meta).length) entry.meta = meta;
177
- entries.push(entry);
178
- }
179
-
180
- return entries;
181
- }
182
-
183
- /**
184
- * Parse a JSON file into EntryData[].
185
- * Supports: array-of-entries, {entries:[...]}, or ChatGPT export format.
186
- *
187
- * @param {string} content
188
- * @param {{ kind?: string, source?: string }} [opts]
189
- * @returns {import("./import-pipeline.js").EntryData[]}
190
- */
191
- export function parseJson(content, opts = {}) {
192
- let data;
193
- try {
194
- data = JSON.parse(content);
195
- } catch {
196
- return [];
197
- }
198
-
199
- // Detect format
200
- let rawEntries;
201
-
202
- if (Array.isArray(data)) {
203
- // Array-of-entries OR ChatGPT export format
204
- if (
205
- data.length > 0 &&
206
- data[0].mapping &&
207
- data[0].create_time !== undefined
208
- ) {
209
- return parseChatGptExport(data, opts);
210
- }
211
- rawEntries = data;
212
- } else if (data && Array.isArray(data.entries)) {
213
- rawEntries = data.entries;
214
- } else {
215
- // Single entry object
216
- rawEntries = [data];
217
- }
218
-
219
- return rawEntries
220
- .filter((e) => e && typeof e === "object" && e.body)
221
- .map((e) => ({
222
- kind: e.kind || opts.kind || "insight",
223
- title: e.title || null,
224
- body: e.body,
225
- tags: Array.isArray(e.tags) ? e.tags : undefined,
226
- meta: e.meta && typeof e.meta === "object" ? e.meta : undefined,
227
- source: e.source || opts.source || "json-import",
228
- identity_key: e.identity_key || undefined,
229
- expires_at: e.expires_at || undefined,
230
- }));
231
- }
232
-
233
- /**
234
- * Parse ChatGPT export format (array of conversations with mapping + create_time).
235
- */
236
- function parseChatGptExport(conversations, opts = {}) {
237
- const entries = [];
238
-
239
- for (const conv of conversations) {
240
- if (!conv.title || !conv.mapping) continue;
241
-
242
- // Extract all assistant messages from the mapping
243
- const messages = Object.values(conv.mapping)
244
- .filter(
245
- (m) =>
246
- m.message?.author?.role === "assistant" &&
247
- m.message.content?.parts?.length,
248
- )
249
- .map((m) => m.message.content.parts.join("\n"))
250
- .filter(Boolean);
251
-
252
- if (!messages.length) continue;
253
-
254
- const body = messages.join("\n\n---\n\n");
255
- const created = conv.create_time
256
- ? new Date(conv.create_time * 1000).toISOString()
257
- : undefined;
258
-
259
- entries.push({
260
- kind: opts.kind || "conversation",
261
- title: conv.title,
262
- body,
263
- tags: ["chatgpt-import"],
264
- meta: { conversation_id: conv.id, created_at_original: created },
265
- source: opts.source || "chatgpt-export",
266
- });
267
- }
268
-
269
- return entries;
270
- }
271
-
272
- /**
273
- * Parse a plain text file into a single EntryData.
274
- *
275
- * @param {string} content
276
- * @param {string} filePath
277
- * @param {{ kind?: string, source?: string }} [opts]
278
- * @returns {import("./import-pipeline.js").EntryData[]}
279
- */
280
- export function parseText(content, filePath, opts = {}) {
281
- const trimmed = content.trim();
282
- if (!trimmed) return [];
283
-
284
- const name = basename(filePath, extname(filePath));
285
- const title = name
286
- .replace(/[-_]/g, " ")
287
- .replace(/\b\w/g, (c) => c.toUpperCase());
288
-
289
- return [
290
- {
291
- kind: opts.kind || "insight",
292
- title,
293
- body: trimmed,
294
- source: opts.source || "text-import",
295
- },
296
- ];
297
- }
298
-
299
- /**
300
- * Parse a single file (auto-detect format).
301
- *
302
- * @param {string} filePath
303
- * @param {string} content
304
- * @param {{ kind?: string, source?: string }} [opts]
305
- * @returns {import("./import-pipeline.js").EntryData[]}
306
- */
307
- export function parseFile(filePath, content, opts = {}) {
308
- const format = detectFormat(filePath, content);
309
-
310
- switch (format) {
311
- case "markdown":
312
- return parseMarkdown(content, opts);
313
- case "csv":
314
- return parseCsv(content, ",", opts);
315
- case "tsv":
316
- return parseCsv(content, "\t", opts);
317
- case "json":
318
- return parseJson(content, opts);
319
- case "text":
320
- return parseText(content, filePath, opts);
321
- default:
322
- return [];
323
- }
324
- }
325
-
326
- /**
327
- * Recursively parse a directory of files.
328
- * Walks subdirectories, filters by extension, infers kind from directory name.
329
- *
330
- * @param {string} dirPath
331
- * @param {{ kind?: string, source?: string, extensions?: string[] }} [opts]
332
- * @returns {import("./import-pipeline.js").EntryData[]}
333
- */
334
- export function parseDirectory(dirPath, opts = {}) {
335
- const extensions = opts.extensions || [
336
- ".md",
337
- ".markdown",
338
- ".csv",
339
- ".tsv",
340
- ".json",
341
- ".txt",
342
- ];
343
- const entries = [];
344
-
345
- function walk(dir, inferredKind) {
346
- let items;
347
- try {
348
- items = readdirSync(dir, { withFileTypes: true });
349
- } catch {
350
- return;
351
- }
352
-
353
- for (const item of items) {
354
- if (item.name.startsWith(".") || item.name.startsWith("_")) continue;
355
-
356
- const fullPath = join(dir, item.name);
357
-
358
- if (item.isDirectory()) {
359
- // Try to infer kind from directory name
360
- const kind =
361
- dirToKind(item.name) !== item.name
362
- ? dirToKind(item.name)
363
- : inferredKind;
364
- walk(fullPath, kind);
365
- } else if (item.isFile()) {
366
- const ext = extname(item.name).toLowerCase();
367
- if (!extensions.includes(ext)) continue;
368
-
369
- try {
370
- const content = readFileSync(fullPath, "utf-8");
371
- const fileOpts = { ...opts };
372
- if (inferredKind && !fileOpts.kind) fileOpts.kind = inferredKind;
373
- const parsed = parseFile(fullPath, content, fileOpts);
374
- entries.push(...parsed);
375
- } catch {
376
- // Skip unreadable files
377
- }
378
- }
379
- }
380
- }
381
-
382
- // Infer kind from the top-level directory name
383
- const topKind = opts.kind || undefined;
384
- walk(dirPath, topKind);
385
-
386
- return entries;
387
- }
@@ -1,236 +0,0 @@
1
- /**
2
- * Capture Layer — Public API
3
- *
4
- * Writes knowledge entries to vault as .md files and indexes them.
5
- * captureAndIndex() is the write-through entry point (capture + index + rollback on failure).
6
- */
7
-
8
- import { existsSync, readFileSync, unlinkSync, writeFileSync } from "node:fs";
9
- import { resolve } from "node:path";
10
- import { ulid, slugify, kindToPath } from "../core/files.js";
11
- import { categoryFor } from "../core/categories.js";
12
- import { parseFrontmatter, formatFrontmatter } from "../core/frontmatter.js";
13
- import { formatBody } from "./formatters.js";
14
- import { writeEntryFile } from "./file-ops.js";
15
- import { indexEntry } from "../index/index.js";
16
-
17
- export function writeEntry(
18
- ctx,
19
- {
20
- kind,
21
- title,
22
- body,
23
- meta,
24
- tags,
25
- source,
26
- folder,
27
- identity_key,
28
- expires_at,
29
- supersedes,
30
- source_files,
31
- tier,
32
- userId,
33
- },
34
- ) {
35
- if (!kind || typeof kind !== "string") {
36
- throw new Error("writeEntry: kind is required (non-empty string)");
37
- }
38
- if (!body || typeof body !== "string" || !body.trim()) {
39
- throw new Error("writeEntry: body is required (non-empty string)");
40
- }
41
- if (tags != null && !Array.isArray(tags)) {
42
- throw new Error("writeEntry: tags must be an array if provided");
43
- }
44
- if (meta != null && typeof meta !== "object") {
45
- throw new Error("writeEntry: meta must be an object if provided");
46
- }
47
-
48
- const category = categoryFor(kind);
49
-
50
- // Entity upsert: check for existing file at deterministic path
51
- let id;
52
- let createdAt;
53
- let updatedAt;
54
- if (category === "entity" && identity_key) {
55
- const identitySlug = slugify(identity_key);
56
- const dir = resolve(ctx.config.vaultDir, kindToPath(kind));
57
- const existingPath = resolve(dir, `${identitySlug}.md`);
58
-
59
- if (existsSync(existingPath)) {
60
- // Preserve original ID and created timestamp from existing file
61
- const raw = readFileSync(existingPath, "utf-8");
62
- const { meta: fmMeta } = parseFrontmatter(raw);
63
- id = fmMeta.id || ulid();
64
- createdAt = fmMeta.created || new Date().toISOString();
65
- updatedAt = new Date().toISOString();
66
- } else {
67
- id = ulid();
68
- createdAt = new Date().toISOString();
69
- updatedAt = createdAt;
70
- }
71
- } else {
72
- id = ulid();
73
- createdAt = new Date().toISOString();
74
- updatedAt = createdAt;
75
- }
76
-
77
- const filePath = writeEntryFile(ctx.config.vaultDir, kind, {
78
- id,
79
- title,
80
- body,
81
- meta,
82
- tags,
83
- source,
84
- createdAt,
85
- updatedAt,
86
- folder,
87
- category,
88
- identity_key,
89
- expires_at,
90
- supersedes,
91
- });
92
-
93
- return {
94
- id,
95
- filePath,
96
- kind,
97
- category,
98
- title,
99
- body,
100
- meta,
101
- tags,
102
- source,
103
- createdAt,
104
- updatedAt,
105
- identity_key,
106
- expires_at,
107
- supersedes,
108
- source_files: source_files || null,
109
- tier: tier || null,
110
- userId: userId || null,
111
- };
112
- }
113
-
114
- /**
115
- * Update an existing entry's file on disk (merge provided fields with existing).
116
- * Does NOT re-index — caller must call indexEntry after.
117
- *
118
- * @param {{ config, stmts }} ctx
119
- * @param {object} existing — Row from vault table (from getEntryById)
120
- * @param {{ title?, body?, tags?, meta?, source?, expires_at? }} updates
121
- * @returns {object} Entry object suitable for indexEntry
122
- */
123
- export function updateEntryFile(ctx, existing, updates) {
124
- const raw = readFileSync(existing.file_path, "utf-8");
125
- const { meta: fmMeta } = parseFrontmatter(raw);
126
-
127
- const existingMeta = existing.meta ? JSON.parse(existing.meta) : {};
128
- const existingTags = existing.tags ? JSON.parse(existing.tags) : [];
129
-
130
- const title = updates.title !== undefined ? updates.title : existing.title;
131
- const body = updates.body !== undefined ? updates.body : existing.body;
132
- const tags = updates.tags !== undefined ? updates.tags : existingTags;
133
- const source =
134
- updates.source !== undefined ? updates.source : existing.source;
135
- const expires_at =
136
- updates.expires_at !== undefined ? updates.expires_at : existing.expires_at;
137
- const supersedes =
138
- updates.supersedes !== undefined
139
- ? updates.supersedes
140
- : fmMeta.supersedes || null;
141
- const source_files =
142
- updates.source_files !== undefined
143
- ? updates.source_files
144
- : existing.source_files
145
- ? JSON.parse(existing.source_files)
146
- : null;
147
-
148
- let mergedMeta;
149
- if (updates.meta !== undefined) {
150
- mergedMeta = { ...existingMeta, ...(updates.meta || {}) };
151
- } else {
152
- mergedMeta = { ...existingMeta };
153
- }
154
-
155
- // Build frontmatter
156
- const now = new Date().toISOString();
157
- const fmFields = { id: existing.id };
158
- for (const [k, v] of Object.entries(mergedMeta)) {
159
- if (k === "folder") continue;
160
- if (v !== null && v !== undefined) fmFields[k] = v;
161
- }
162
- if (existing.identity_key) fmFields.identity_key = existing.identity_key;
163
- if (expires_at) fmFields.expires_at = expires_at;
164
- if (supersedes?.length) fmFields.supersedes = supersedes;
165
- fmFields.tags = tags;
166
- fmFields.source = source || "claude-code";
167
- fmFields.created = fmMeta.created || existing.created_at;
168
- if (now !== fmFields.created) fmFields.updated = now;
169
-
170
- const mdBody = formatBody(existing.kind, { title, body, meta: mergedMeta });
171
- const md = formatFrontmatter(fmFields) + mdBody;
172
-
173
- writeFileSync(existing.file_path, md);
174
-
175
- const finalMeta = Object.keys(mergedMeta).length ? mergedMeta : undefined;
176
-
177
- return {
178
- id: existing.id,
179
- filePath: existing.file_path,
180
- kind: existing.kind,
181
- category: existing.category,
182
- title,
183
- body,
184
- meta: finalMeta,
185
- tags,
186
- source,
187
- createdAt: fmMeta.created || existing.created_at,
188
- updatedAt: now,
189
- identity_key: existing.identity_key,
190
- expires_at,
191
- supersedes,
192
- source_files: source_files || null,
193
- userId: existing.user_id || null,
194
- };
195
- }
196
-
197
- export async function captureAndIndex(ctx, data) {
198
- // For entity upserts, preserve previous file content for safe rollback
199
- let previousContent = null;
200
- if (categoryFor(data.kind) === "entity" && data.identity_key) {
201
- const identitySlug = slugify(data.identity_key);
202
- const dir = resolve(ctx.config.vaultDir, kindToPath(data.kind));
203
- const existingPath = resolve(dir, `${identitySlug}.md`);
204
- if (existsSync(existingPath)) {
205
- previousContent = readFileSync(existingPath, "utf-8");
206
- }
207
- }
208
-
209
- const entry = writeEntry(ctx, data);
210
- try {
211
- await indexEntry(ctx, entry);
212
- // Apply supersedes: mark referenced entries as superseded by this entry
213
- if (entry.supersedes?.length && ctx.stmts.updateSupersededBy) {
214
- for (const supersededId of entry.supersedes) {
215
- if (typeof supersededId === "string" && supersededId.trim()) {
216
- ctx.stmts.updateSupersededBy.run(entry.id, supersededId.trim());
217
- }
218
- }
219
- }
220
- return entry;
221
- } catch (err) {
222
- // Rollback: restore previous content for entity upserts, delete for new entries
223
- if (previousContent) {
224
- try {
225
- writeFileSync(entry.filePath, previousContent);
226
- } catch {}
227
- } else {
228
- try {
229
- unlinkSync(entry.filePath);
230
- } catch {}
231
- }
232
- throw new Error(
233
- `Capture succeeded but indexing failed — file rolled back. ${err.message}`,
234
- );
235
- }
236
- }