clitrigger 0.1.12 → 0.1.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +6 -3
  2. package/README_KR.md +6 -3
  3. package/dist/client/assets/index-BDEcscfu.css +32 -0
  4. package/dist/client/assets/index-uvmPjh-j.js +654 -0
  5. package/dist/client/index.html +2 -2
  6. package/dist/server/db/queries.d.ts +39 -3
  7. package/dist/server/db/queries.d.ts.map +1 -1
  8. package/dist/server/db/queries.js +118 -3
  9. package/dist/server/db/queries.js.map +1 -1
  10. package/dist/server/db/schema.d.ts.map +1 -1
  11. package/dist/server/db/schema.js +65 -0
  12. package/dist/server/db/schema.js.map +1 -1
  13. package/dist/server/index.d.ts.map +1 -1
  14. package/dist/server/index.js +2 -0
  15. package/dist/server/index.js.map +1 -1
  16. package/dist/server/routes/favorites.d.ts +3 -0
  17. package/dist/server/routes/favorites.d.ts.map +1 -0
  18. package/dist/server/routes/favorites.js +201 -0
  19. package/dist/server/routes/favorites.js.map +1 -0
  20. package/dist/server/routes/memory.d.ts.map +1 -1
  21. package/dist/server/routes/memory.js +397 -4
  22. package/dist/server/routes/memory.js.map +1 -1
  23. package/dist/server/routes/projects.js +2 -2
  24. package/dist/server/routes/projects.js.map +1 -1
  25. package/dist/server/routes/sessions.d.ts.map +1 -1
  26. package/dist/server/routes/sessions.js +24 -2
  27. package/dist/server/routes/sessions.js.map +1 -1
  28. package/dist/server/services/claude-manager.d.ts +18 -1
  29. package/dist/server/services/claude-manager.d.ts.map +1 -1
  30. package/dist/server/services/claude-manager.js +99 -5
  31. package/dist/server/services/claude-manager.js.map +1 -1
  32. package/dist/server/services/discussion-orchestrator.d.ts.map +1 -1
  33. package/dist/server/services/discussion-orchestrator.js +20 -0
  34. package/dist/server/services/discussion-orchestrator.js.map +1 -1
  35. package/dist/server/services/memory-ingest.d.ts +36 -0
  36. package/dist/server/services/memory-ingest.d.ts.map +1 -0
  37. package/dist/server/services/memory-ingest.js +706 -0
  38. package/dist/server/services/memory-ingest.js.map +1 -0
  39. package/dist/server/services/memory-injector.d.ts.map +1 -1
  40. package/dist/server/services/memory-injector.js +32 -2
  41. package/dist/server/services/memory-injector.js.map +1 -1
  42. package/dist/server/services/memory-wikilinks.d.ts +34 -0
  43. package/dist/server/services/memory-wikilinks.d.ts.map +1 -0
  44. package/dist/server/services/memory-wikilinks.js +86 -0
  45. package/dist/server/services/memory-wikilinks.js.map +1 -0
  46. package/dist/server/services/orchestrator.d.ts.map +1 -1
  47. package/dist/server/services/orchestrator.js +8 -0
  48. package/dist/server/services/orchestrator.js.map +1 -1
  49. package/dist/server/services/session-manager.d.ts +22 -1
  50. package/dist/server/services/session-manager.d.ts.map +1 -1
  51. package/dist/server/services/session-manager.js +82 -5
  52. package/dist/server/services/session-manager.js.map +1 -1
  53. package/dist/server/websocket/broadcaster.d.ts +11 -0
  54. package/dist/server/websocket/broadcaster.d.ts.map +1 -1
  55. package/dist/server/websocket/broadcaster.js +67 -0
  56. package/dist/server/websocket/broadcaster.js.map +1 -1
  57. package/dist/server/websocket/events.d.ts +26 -0
  58. package/dist/server/websocket/events.d.ts.map +1 -1
  59. package/dist/server/websocket/index.d.ts.map +1 -1
  60. package/dist/server/websocket/index.js +45 -2
  61. package/dist/server/websocket/index.js.map +1 -1
  62. package/package.json +1 -1
  63. package/dist/client/assets/index-CcsvxPmx.css +0 -1
  64. package/dist/client/assets/index-qSpSlrcM.js +0 -606
@@ -0,0 +1,706 @@
1
+ import { spawn } from 'child_process';
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ import * as queries from '../db/queries.js';
5
+ import { broadcaster } from '../websocket/broadcaster.js';
6
+ import { debugLogger } from './debug-logger.js';
7
+ const RAW_DIR_NAME = '.clitrigger';
8
+ const RAW_SUBDIR = 'raw';
9
+ const VALID_SOURCE_TYPES = new Set(['todo', 'discussion', 'manual']);
10
+ function ensureGitignore(projectPath, entry) {
11
+ const gitignorePath = path.join(projectPath, '.gitignore');
12
+ try {
13
+ const content = fs.existsSync(gitignorePath) ? fs.readFileSync(gitignorePath, 'utf-8') : '';
14
+ const lines = content.split(/\r?\n/);
15
+ if (!lines.some(l => l.trim() === entry)) {
16
+ const newline = content.length > 0 && !content.endsWith('\n') ? '\n' : '';
17
+ fs.appendFileSync(gitignorePath, `${newline}${entry}\n`);
18
+ }
19
+ }
20
+ catch {
21
+ // Non-fatal
22
+ }
23
+ }
24
+ function slugify(input, maxLen = 40) {
25
+ if (!input)
26
+ return 'untitled';
27
+ const cleaned = input
28
+ .replace(/[\\/:*?"<>|]/g, '')
29
+ .replace(/\s+/g, '-')
30
+ .replace(/-+/g, '-')
31
+ .replace(/^[-.]+|[-.]+$/g, '')
32
+ .slice(0, maxLen);
33
+ return cleaned || 'untitled';
34
+ }
35
+ function timestampStr(d = new Date()) {
36
+ const pad = (n) => String(n).padStart(2, '0');
37
+ return `${d.getFullYear()}${pad(d.getMonth() + 1)}${pad(d.getDate())}-${pad(d.getHours())}${pad(d.getMinutes())}${pad(d.getSeconds())}`;
38
+ }
39
+ /**
40
+ * Write the raw source text to <projectPath>/.clitrigger/raw/<sourceType>/<file>.md
41
+ * and return the project-relative path. Returns null on failure (non-fatal).
42
+ */
43
+ function writeRawSnapshot(project, sourceType, sourceId, fullText, titleHint) {
44
+ if (!VALID_SOURCE_TYPES.has(sourceType))
45
+ return null;
46
+ if (!project.path)
47
+ return null;
48
+ try {
49
+ const baseDir = path.join(project.path, RAW_DIR_NAME, RAW_SUBDIR, sourceType);
50
+ fs.mkdirSync(baseDir, { recursive: true });
51
+ ensureGitignore(project.path, `${RAW_DIR_NAME}/`);
52
+ const ts = timestampStr();
53
+ const idPart = sourceId ? `-${sourceId.slice(0, 8)}` : '';
54
+ const slug = slugify(titleHint || sourceType);
55
+ const filename = `${ts}${idPart}-${slug}.md`;
56
+ const filePath = path.join(baseDir, filename);
57
+ // Defensive: ensure final resolved path is still inside baseDir
58
+ const resolvedFinal = path.resolve(filePath);
59
+ const resolvedBase = path.resolve(baseDir);
60
+ if (!resolvedFinal.startsWith(resolvedBase + path.sep) && resolvedFinal !== resolvedBase) {
61
+ return null;
62
+ }
63
+ fs.writeFileSync(filePath, fullText, 'utf-8');
64
+ const rel = path.relative(project.path, filePath).split(path.sep).join('/');
65
+ return rel;
66
+ }
67
+ catch (err) {
68
+ console.warn('[memory-ingest] writeRawSnapshot failed:', err);
69
+ return null;
70
+ }
71
+ }
72
+ const DEFAULT_WIKI_SCHEMA = `# Wiki Schema
73
+
74
+ ## Entity Types
75
+ - **Feature** — product capabilities and implemented behaviors
76
+ - **Decision** — architectural/design choices with rationale
77
+ - **Bug** — known issues, root causes, and workarounds
78
+ - **Pattern** — reusable code/design patterns
79
+ - **Concept** — domain knowledge and terminology
80
+
81
+ ## Conventions
82
+ - Titles: short noun phrases (≤60 chars)
83
+ - Body: 2-5 sentences, factual, no filler
84
+ - **Use [[Title]] wikilinks liberally inside body text** — every time the body mentions another node by name, wrap it as [[Title]]. This is the primary way connections are made.
85
+ - Tags: first tag should be the entity type
86
+ - Prefer updating existing nodes over creating new duplicates
87
+ - Connections are the value of a wiki — a node with no inbound or outbound links is nearly useless. Always relate new entries to existing ones.`;
88
+ const WIKI_SCHEMA_TAG = '__wiki_schema__';
89
+ function stripCodeFences(text) {
90
+ const trimmed = text.trim();
91
+ const m = trimmed.match(/^```(?:json)?\s*([\s\S]*?)\s*```$/);
92
+ return m ? m[1].trim() : trimmed;
93
+ }
94
+ function safeParseIngestOp(raw) {
95
+ const empty = { create: [], update: [], edges: [] };
96
+ const cleaned = stripCodeFences(raw);
97
+ let parsed;
98
+ try {
99
+ parsed = JSON.parse(cleaned);
100
+ }
101
+ catch {
102
+ const m = cleaned.match(/\{[\s\S]*\}/);
103
+ if (!m)
104
+ return { op: empty, parseFailed: true };
105
+ try {
106
+ parsed = JSON.parse(m[0]);
107
+ }
108
+ catch {
109
+ return { op: empty, parseFailed: true };
110
+ }
111
+ }
112
+ if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
113
+ return { op: empty, parseFailed: true };
114
+ }
115
+ const p = parsed;
116
+ const create = Array.isArray(p.create) ? p.create : [];
117
+ const update = Array.isArray(p.update) ? p.update : [];
118
+ const edges = Array.isArray(p.edges) ? p.edges : [];
119
+ return { op: { create, update, edges }, parseFailed: false };
120
+ }
121
+ function safeParseLintIssues(raw) {
122
+ const cleaned = stripCodeFences(raw);
123
+ let parsed;
124
+ try {
125
+ parsed = JSON.parse(cleaned);
126
+ }
127
+ catch {
128
+ const m = cleaned.match(/\[[\s\S]*\]/);
129
+ if (!m)
130
+ return [];
131
+ try {
132
+ parsed = JSON.parse(m[0]);
133
+ }
134
+ catch {
135
+ return [];
136
+ }
137
+ }
138
+ if (!Array.isArray(parsed))
139
+ return [];
140
+ const valid = ['contradiction', 'orphan', 'duplicate', 'stale'];
141
+ return parsed
142
+ .filter((e) => e && typeof e === 'object')
143
+ .filter(e => valid.includes(String(e.type)))
144
+ .map(e => ({
145
+ type: e.type,
146
+ node_titles: Array.isArray(e.node_titles) ? e.node_titles.map(String) : [],
147
+ message: typeof e.message === 'string' ? e.message.trim() : '',
148
+ }))
149
+ .filter(e => e.message)
150
+ .slice(0, 10);
151
+ }
152
+ function buildInvocation(cliTool) {
153
+ switch (cliTool) {
154
+ case 'gemini': return { command: 'gemini', args: ['--yolo', '--prompt='] };
155
+ case 'codex': return { command: 'codex', args: ['exec'] };
156
+ case 'claude':
157
+ default: return { command: 'claude', args: ['--print'] };
158
+ }
159
+ }
160
+ /**
161
+ * Open a debug session for a memory-ingest or lint run when the project has debug_logging enabled.
162
+ * Reuses the existing `.debug-logs/` directory and rotation policy.
163
+ * The synthetic todoId encodes intent (`mem-{kind}-{sourceType}-{sourceId|ts}`) so logs sort/filter
164
+ * cleanly alongside real todo logs.
165
+ */
166
+ function startDebugSession(project, cliTool, sourceType, sourceId, kind) {
167
+ if (!project.debug_logging || !project.path)
168
+ return undefined;
169
+ const { command, args } = buildInvocation(cliTool);
170
+ const idPart = sourceId ? sourceId.slice(0, 8) : Date.now().toString(36);
171
+ const stypePart = sourceType ?? 'manual';
172
+ const todoId = `mem-${kind}-${stypePart}-${idPart}`;
173
+ try {
174
+ return debugLogger.startSession({
175
+ todoId,
176
+ projectPath: project.path,
177
+ cliTool,
178
+ command,
179
+ args,
180
+ workDir: process.env.HOME || process.env.USERPROFILE || '.',
181
+ });
182
+ }
183
+ catch (err) {
184
+ console.warn('[memory-ingest] failed to open debug session:', err);
185
+ return undefined;
186
+ }
187
+ }
188
+ function runHeadless(cliTool, prompt, timeoutMs = 180_000, debugSession) {
189
+ return new Promise((resolve, reject) => {
190
+ const { command, args } = buildInvocation(cliTool);
191
+ const isWin = process.platform === 'win32';
192
+ const spawnCmd = isWin ? 'cmd.exe' : command;
193
+ const spawnArgs = isWin ? ['/c', command, ...args] : args;
194
+ let stdout = '';
195
+ let stderr = '';
196
+ let settled = false;
197
+ const proc = spawn(spawnCmd, spawnArgs, {
198
+ stdio: ['pipe', 'pipe', 'pipe'],
199
+ env: { ...process.env },
200
+ cwd: process.env.HOME || process.env.USERPROFILE || '.',
201
+ });
202
+ if (debugSession) {
203
+ // tee returns a passthrough we don't need; the side-effect is appending to the debug log file.
204
+ debugSession.teeStdout(proc.stdout);
205
+ debugSession.teeStderr(proc.stderr);
206
+ }
207
+ const timer = setTimeout(() => {
208
+ if (settled)
209
+ return;
210
+ settled = true;
211
+ try {
212
+ proc.kill();
213
+ }
214
+ catch { /* ignore */ }
215
+ try {
216
+ debugSession?.finalize(-1);
217
+ }
218
+ catch { /* ignore */ }
219
+ reject(new Error('Memory ingest timed out'));
220
+ }, timeoutMs);
221
+ proc.stdout.on('data', (c) => { stdout += c.toString('utf8'); });
222
+ proc.stderr.on('data', (c) => { stderr += c.toString('utf8'); });
223
+ proc.on('error', (err) => {
224
+ if (settled)
225
+ return;
226
+ settled = true;
227
+ clearTimeout(timer);
228
+ try {
229
+ debugSession?.finalize(-1);
230
+ }
231
+ catch { /* ignore */ }
232
+ reject(err);
233
+ });
234
+ proc.on('close', (code) => {
235
+ if (settled)
236
+ return;
237
+ settled = true;
238
+ clearTimeout(timer);
239
+ try {
240
+ debugSession?.finalize(code ?? 0);
241
+ }
242
+ catch { /* ignore */ }
243
+ if (code === 0)
244
+ resolve(stdout);
245
+ else
246
+ reject(new Error(`CLI exited with code ${code}: ${stderr.trim().slice(0, 300)}`));
247
+ });
248
+ try {
249
+ proc.stdin.write(prompt + '\n');
250
+ proc.stdin.end();
251
+ try {
252
+ debugSession?.writeStdin(prompt);
253
+ }
254
+ catch { /* ignore */ }
255
+ }
256
+ catch (err) {
257
+ if (settled)
258
+ return;
259
+ settled = true;
260
+ clearTimeout(timer);
261
+ try {
262
+ debugSession?.finalize(-1);
263
+ }
264
+ catch { /* ignore */ }
265
+ try {
266
+ proc.kill();
267
+ }
268
+ catch { /* ignore */ }
269
+ reject(err instanceof Error ? err : new Error(String(err)));
270
+ }
271
+ });
272
+ }
273
+ function resolveCliTool(value) {
274
+ if (value === 'claude' || value === 'gemini' || value === 'codex')
275
+ return value;
276
+ return 'claude';
277
+ }
278
+ function getOrCreateSchemaNode(projectId) {
279
+ const all = queries.getMemoryNodesByProjectId(projectId);
280
+ const existing = all.find(n => {
281
+ try {
282
+ const tags = JSON.parse(n.tags ?? '[]');
283
+ return Array.isArray(tags) && tags.includes(WIKI_SCHEMA_TAG);
284
+ }
285
+ catch {
286
+ return false;
287
+ }
288
+ });
289
+ if (existing)
290
+ return existing.body || DEFAULT_WIKI_SCHEMA;
291
+ queries.createMemoryNode(projectId, 'Wiki Schema', DEFAULT_WIKI_SCHEMA, JSON.stringify([WIKI_SCHEMA_TAG]), 1);
292
+ return DEFAULT_WIKI_SCHEMA;
293
+ }
294
+ function buildNodeSummary(nodes) {
295
+ const visible = nodes.filter(n => {
296
+ try {
297
+ const tags = JSON.parse(n.tags ?? '[]');
298
+ return !Array.isArray(tags) || !tags.includes(WIKI_SCHEMA_TAG);
299
+ }
300
+ catch {
301
+ return true;
302
+ }
303
+ });
304
+ if (visible.length === 0)
305
+ return '(no existing pages)';
306
+ return visible.map(n => {
307
+ try {
308
+ const tags = JSON.parse(n.tags ?? '[]');
309
+ const tagStr = tags.filter(t => t !== WIKI_SCHEMA_TAG).join(', ');
310
+ const pinned = n.pinned ? ' [pinned]' : '';
311
+ const bodyPreview = n.pinned ? `\n ${(n.body || '').slice(0, 300)}` : '';
312
+ return `- id="${n.id}" title="${n.title}"${tagStr ? ` tags=[${tagStr}]` : ''}${pinned}${bodyPreview}`;
313
+ }
314
+ catch {
315
+ return `- id="${n.id}" title="${n.title}"`;
316
+ }
317
+ }).join('\n');
318
+ }
319
+ const INGEST_PROMPT_HEADER = `You are maintaining a project knowledge wiki using the LLM Wiki pattern.{CHUNK_PREAMBLE}
320
+
321
+ ## Wiki Schema
322
+ {SCHEMA}
323
+
324
+ ## Existing Wiki Pages
325
+ {NODES}
326
+
327
+ ## New Source Material{CHUNK_NOTE}
328
+ {SOURCE}
329
+
330
+ ---
331
+
332
+ Analyze the source material and output ONLY a JSON object (no prose, no code fences):
333
+ {
334
+ "create": [{"title": "string", "body": "string", "tags": ["string"]}],
335
+ "update": [{"id": "string", "body": "string", "tags": ["string"]}],
336
+ "edges": [{"from_title": "string", "to_title": "string", "relation_type": "related", "label": "string"}]
337
+ }
338
+
339
+ Rules:
340
+ - 0-10 total create+update operations. Quality over quantity. Skip if nothing new.
341
+ - Match existing nodes by title (case-insensitive) before creating a new one.
342
+ - body: 2-5 sentences, factual, no filler. Markdown allowed.
343
+ - **Inside body, wrap every reference to another node as [[Exact Title]].** When you mention a node that exists in "Existing Wiki Pages" or that you are creating in this batch, write [[Title]] instead of plain text. Aim for 1-3 wikilinks per body when relevant nodes exist. Bodies that mention concepts but don't link them are low quality.
344
+ - tags[0] must be an entity type from the schema (Feature/Decision/Bug/Pattern/Concept).
345
+ - edges.relation_type: one of related|precedes|example_of|counter_example|refines
346
+ - related: generic association
347
+ - precedes: A comes before B in time/sequence/dependency
348
+ - example_of: A is a concrete instance of pattern/concept B
349
+ - counter_example: A contradicts or is rejected in favor of B
350
+ - refines: A is a more specific or improved version of B
351
+ - **Edges are the value of a wiki — generate them aggressively.** For each new or updated node, link it to at least one existing or newly-created node when topically related. If multiple relations apply (A is both an example_of and precedes B), pick the most specific one. from_title/to_title must match existing or newly created nodes exactly. Do not invent titles.
352
+ - A node with zero inbound and outbound connections (no edges, no wikilinks pointing to or from it) is a code smell — fix it before output.
353
+ - If nothing worth extracting, return {"create": [], "update": [], "edges": []}`;
354
+ const LINT_PROMPT_HEADER = `You are auditing a project knowledge wiki for quality issues.
355
+
356
+ ## Wiki Pages
357
+ {NODES}
358
+
359
+ ---
360
+
361
+ Output ONLY a JSON array (no prose, no code fences):
362
+ [{"type": "contradiction|orphan|duplicate|stale", "node_titles": ["title1", "title2"], "message": "short description"}]
363
+
364
+ Issue types:
365
+ - contradiction: two nodes make conflicting claims
366
+ - orphan: node has no connections and seems isolated/useless
367
+ - duplicate: two nodes cover the same topic and should be merged
368
+ - stale: node references something that seems outdated or removed
369
+
370
+ Rules:
371
+ - Maximum 10 issues. Only flag real problems.
372
+ - Return [] if the wiki looks healthy.`;
373
+ const CHUNK_CHARS = 7000;
374
+ const CHUNK_THRESHOLD = 8000;
375
+ const MAX_CHUNKS = 4;
376
+ const VALID_RELATIONS = new Set(['related', 'precedes', 'example_of', 'counter_example', 'refines']);
377
+ /**
378
+ * Split a long source into chunks at paragraph boundaries. Hard-splits any single paragraph
379
+ * that exceeds maxChars. Caps total chunks at maxChunks (later content is dropped).
380
+ * Returns a single-element array when text fits without splitting.
381
+ */
382
+ function chunkSourceText(text, maxChars, maxChunks) {
383
+ if (text.length <= CHUNK_THRESHOLD)
384
+ return [text];
385
+ const paragraphs = text.split(/\n\s*\n/);
386
+ const chunks = [];
387
+ let cur = '';
388
+ for (const para of paragraphs) {
389
+ const p = para.trim();
390
+ if (!p)
391
+ continue;
392
+ if (chunks.length >= maxChunks)
393
+ break;
394
+ if (cur.length + p.length + 2 <= maxChars) {
395
+ cur = cur ? `${cur}\n\n${p}` : p;
396
+ continue;
397
+ }
398
+ if (cur) {
399
+ chunks.push(cur);
400
+ cur = '';
401
+ if (chunks.length >= maxChunks)
402
+ break;
403
+ }
404
+ if (p.length > maxChars) {
405
+ for (let i = 0; i < p.length && chunks.length < maxChunks; i += maxChars) {
406
+ chunks.push(p.slice(i, i + maxChars));
407
+ }
408
+ }
409
+ else {
410
+ cur = p;
411
+ }
412
+ }
413
+ if (cur && chunks.length < maxChunks)
414
+ chunks.push(cur);
415
+ return chunks.slice(0, maxChunks);
416
+ }
417
+ function applyIngestOp(ctx, op, existingNodes) {
418
+ ctx.skipped.proposedCreate += op.create.length;
419
+ ctx.skipped.proposedUpdate += op.update.length;
420
+ ctx.skipped.proposedEdges += op.edges.length;
421
+ for (const c of op.create.slice(0, 10)) {
422
+ if (!c.title?.trim()) {
423
+ ctx.skipped.emptyTitle++;
424
+ continue;
425
+ }
426
+ const title = String(c.title).trim().slice(0, 120);
427
+ if (ctx.titleToId.has(title.toLowerCase())) {
428
+ ctx.skipped.duplicateTitle++;
429
+ continue;
430
+ }
431
+ try {
432
+ const tags = Array.isArray(c.tags) ? JSON.stringify(c.tags.map(String).filter(Boolean)) : null;
433
+ const node = queries.createMemoryNode(ctx.projectId, title, typeof c.body === 'string' ? c.body : '', tags, 0, ctx.sourceType, ctx.sourceId, ctx.rawPath);
434
+ ctx.titleToId.set(title.toLowerCase(), node.id);
435
+ ctx.createdIds.push(node.id);
436
+ }
437
+ catch (err) {
438
+ const msg = err instanceof Error ? err.message : String(err);
439
+ if (msg.includes('UNIQUE'))
440
+ ctx.skipped.uniqueConflict++;
441
+ else {
442
+ ctx.skipped.uniqueConflict++;
443
+ console.warn('[memory-ingest] createMemoryNode failed:', msg);
444
+ }
445
+ }
446
+ }
447
+ for (const u of op.update.slice(0, 10)) {
448
+ if (!u.id) {
449
+ ctx.skipped.invalidUpdateId++;
450
+ continue;
451
+ }
452
+ const existing = existingNodes.find(n => n.id === u.id);
453
+ if (!existing) {
454
+ ctx.skipped.invalidUpdateId++;
455
+ continue;
456
+ }
457
+ const upd = {};
458
+ if (typeof u.body === 'string')
459
+ upd.body = u.body;
460
+ if (Array.isArray(u.tags))
461
+ upd.tags = JSON.stringify(u.tags.map(String).filter(Boolean));
462
+ if (Object.keys(upd).length > 0) {
463
+ queries.updateMemoryNode(u.id, upd);
464
+ ctx.updatedIds.add(u.id);
465
+ }
466
+ }
467
+ for (const e of op.edges.slice(0, 20)) {
468
+ const fromId = ctx.titleToId.get(String(e.from_title || '').toLowerCase());
469
+ const toId = ctx.titleToId.get(String(e.to_title || '').toLowerCase());
470
+ if (!fromId || !toId) {
471
+ ctx.skipped.invalidEdgeRef++;
472
+ continue;
473
+ }
474
+ if (fromId === toId) {
475
+ ctx.skipped.selfEdge++;
476
+ continue;
477
+ }
478
+ const rt = VALID_RELATIONS.has(e.relation_type ?? '') ? e.relation_type : 'related';
479
+ try {
480
+ queries.createMemoryEdge(ctx.projectId, fromId, toId, rt, e.label ?? null);
481
+ ctx.edgesAdded++;
482
+ }
483
+ catch (err) {
484
+ const msg = err instanceof Error ? err.message : String(err);
485
+ if (msg.includes('UNIQUE'))
486
+ ctx.skipped.edgeUniqueConflict++;
487
+ else {
488
+ ctx.skipped.edgeUniqueConflict++;
489
+ console.warn('[memory-ingest] createMemoryEdge failed:', msg);
490
+ }
491
+ }
492
+ }
493
+ }
494
+ export async function ingestSource(projectId, sourceText, sourceType, sourceId, titleHint, locale) {
495
+ const project = queries.getProjectById(projectId);
496
+ if (!project)
497
+ throw new Error('Project not found');
498
+ const cliTool = resolveCliTool(project.cli_tool);
499
+ // Step 1: persist raw snapshot (immutable). Failure is non-fatal.
500
+ let rawPath = null;
501
+ if (sourceType && VALID_SOURCE_TYPES.has(sourceType)) {
502
+ const hint = (titleHint && titleHint.trim()) || sourceText.split('\n').find(l => l.trim())?.trim().slice(0, 60) || sourceType;
503
+ rawPath = writeRawSnapshot(project, sourceType, sourceId, sourceText, hint);
504
+ }
505
+ const schema = getOrCreateSchemaNode(projectId);
506
+ const langRule = locale === 'en'
507
+ ? '- Write all titles, body text, tags, and edge labels in English.'
508
+ : '- Write all titles, body text, tags, and edge labels in Korean (한국어).';
509
+ const chunks = chunkSourceText(sourceText, CHUNK_CHARS, MAX_CHUNKS);
510
+ const total = chunks.length;
511
+ const ctx = {
512
+ projectId,
513
+ sourceType,
514
+ sourceId,
515
+ rawPath,
516
+ titleToId: new Map(),
517
+ createdIds: [],
518
+ updatedIds: new Set(),
519
+ edgesAdded: 0,
520
+ skipped: {
521
+ parseFailed: false,
522
+ proposedCreate: 0, proposedUpdate: 0, proposedEdges: 0,
523
+ duplicateTitle: 0, uniqueConflict: 0, emptyTitle: 0,
524
+ invalidUpdateId: 0, invalidEdgeRef: 0, selfEdge: 0, edgeUniqueConflict: 0,
525
+ },
526
+ lastRaw: '',
527
+ };
528
+ for (let i = 0; i < total; i++) {
529
+ // Re-fetch nodes between chunks so dedup sees nodes added by earlier chunks.
530
+ const nodes = queries.getMemoryNodesByProjectId(projectId);
531
+ ctx.titleToId = new Map(nodes.map(n => [n.title.toLowerCase(), n.id]));
532
+ const nodeSummary = buildNodeSummary(nodes);
533
+ const chunkPreamble = total > 1
534
+ ? `\n\nThis source has been split into ${total} parts due to length. You are processing part ${i + 1}. Earlier parts may have added new pages — see "Existing Wiki Pages" for the current state. Avoid creating duplicates of pages already added in earlier parts.`
535
+ : '';
536
+ const chunkNote = total > 1 ? ` (part ${i + 1} of ${total})` : '';
537
+ const prompt = INGEST_PROMPT_HEADER
538
+ .replace('Rules:\n', `Rules:\n${langRule}\n`)
539
+ .replace('{CHUNK_PREAMBLE}', chunkPreamble)
540
+ .replace('{CHUNK_NOTE}', chunkNote)
541
+ .replace('{SCHEMA}', schema)
542
+ .replace('{NODES}', nodeSummary)
543
+ .replace('{SOURCE}', chunks[i]);
544
+ const debugSession = startDebugSession(project, cliTool, sourceType, sourceId, total > 1 ? `ingest-${i + 1}of${total}` : 'ingest');
545
+ const raw = await runHeadless(cliTool, prompt, 180_000, debugSession);
546
+ ctx.lastRaw = raw;
547
+ const { op, parseFailed } = safeParseIngestOp(raw);
548
+ if (parseFailed)
549
+ ctx.skipped.parseFailed = true;
550
+ applyIngestOp(ctx, op, nodes);
551
+ }
552
+ const created = ctx.createdIds.length;
553
+ const updated = ctx.updatedIds.size;
554
+ const edgesAdded = ctx.edgesAdded;
555
+ if (ctx.skipped.parseFailed || (created === 0 && updated === 0 && edgesAdded === 0)) {
556
+ console.warn(`[memory-ingest] no-op result project=${projectId} cli=${cliTool} chunks=${total} ` +
557
+ `parseFailed=${ctx.skipped.parseFailed} ` +
558
+ `proposed(c/u/e)=${ctx.skipped.proposedCreate}/${ctx.skipped.proposedUpdate}/${ctx.skipped.proposedEdges} ` +
559
+ `skip=dup:${ctx.skipped.duplicateTitle}/uniq:${ctx.skipped.uniqueConflict}/badId:${ctx.skipped.invalidUpdateId}/` +
560
+ `badEdge:${ctx.skipped.invalidEdgeRef}/empty:${ctx.skipped.emptyTitle}`);
561
+ if (ctx.skipped.parseFailed) {
562
+ console.warn('[memory-ingest] last raw response head:', ctx.lastRaw.slice(0, 500));
563
+ }
564
+ }
565
+ return {
566
+ created,
567
+ updated,
568
+ edgesAdded,
569
+ nodeIds: ctx.createdIds,
570
+ skipped: ctx.skipped,
571
+ rawResponseSnippet: ctx.skipped.parseFailed ? ctx.lastRaw.slice(0, 500) : undefined,
572
+ };
573
+ }
574
+ export async function lintWiki(projectId) {
575
+ const project = queries.getProjectById(projectId);
576
+ if (!project)
577
+ throw new Error('Project not found');
578
+ const cliTool = resolveCliTool(project.cli_tool);
579
+ const nodes = queries.getMemoryNodesByProjectId(projectId);
580
+ const visible = nodes.filter(n => {
581
+ try {
582
+ const tags = JSON.parse(n.tags ?? '[]');
583
+ return !Array.isArray(tags) || !tags.includes(WIKI_SCHEMA_TAG);
584
+ }
585
+ catch {
586
+ return true;
587
+ }
588
+ });
589
+ if (visible.length === 0)
590
+ return [];
591
+ const edges = queries.getMemoryEdgesByProjectId(projectId);
592
+ const edgeSet = new Set(edges.flatMap(e => [e.from_node_id, e.to_node_id]));
593
+ const nodeText = visible.map(n => {
594
+ const body = (n.body || '').slice(0, 400);
595
+ const hasEdge = edgeSet.has(n.id) ? '' : ' [no-edges]';
596
+ return `### ${n.title}${hasEdge}\n${body}`;
597
+ }).join('\n\n');
598
+ const prompt = LINT_PROMPT_HEADER.replace('{NODES}', nodeText.slice(0, 12000));
599
+ const debugSession = startDebugSession(project, cliTool, null, null, 'lint');
600
+ const raw = await runHeadless(cliTool, prompt, 180_000, debugSession);
601
+ return safeParseLintIssues(raw);
602
+ }
603
+ /**
604
+ * Run an auto-ingest and broadcast the result over WebSocket so the client can show a toast.
605
+ * Errors are swallowed (auto-ingest is best-effort) but reported as a failure event.
606
+ */
607
+ export function runAutoIngestAndBroadcast(projectId, sourceType, sourceId, sourceTitle, sourceText) {
608
+ ingestSource(projectId, sourceText, sourceType, sourceId, sourceTitle).then((res) => {
609
+ broadcaster.broadcast({
610
+ type: 'memory:ingest-finished',
611
+ projectId,
612
+ sourceType,
613
+ sourceId,
614
+ sourceTitle,
615
+ created: res.created,
616
+ updated: res.updated,
617
+ edgesAdded: res.edgesAdded,
618
+ skipped: res.skipped,
619
+ });
620
+ }).catch((err) => {
621
+ console.error(`[memory-ingest] auto-ingest failed (${sourceType}):`, err);
622
+ broadcaster.broadcast({
623
+ type: 'memory:ingest-finished',
624
+ projectId,
625
+ sourceType,
626
+ sourceId,
627
+ sourceTitle,
628
+ created: 0,
629
+ updated: 0,
630
+ edgesAdded: 0,
631
+ skipped: {
632
+ parseFailed: false,
633
+ proposedCreate: 0, proposedUpdate: 0, proposedEdges: 0,
634
+ duplicateTitle: 0, uniqueConflict: 0, emptyTitle: 0,
635
+ invalidUpdateId: 0, invalidEdgeRef: 0, selfEdge: 0, edgeUniqueConflict: 0,
636
+ },
637
+ error: err instanceof Error ? err.message : String(err),
638
+ });
639
+ });
640
+ }
641
+ export function buildSourceTextFromTodo(todoId) {
642
+ const todo = queries.getTodoById(todoId);
643
+ if (!todo)
644
+ return null;
645
+ const logs = queries.getTaskLogsByTodoId(todoId);
646
+ if (logs.length === 0)
647
+ return null;
648
+ const assistantLogs = logs
649
+ .filter(l => l.log_type === 'assistant' && l.message.trim())
650
+ .sort((a, b) => (a.round_number ?? 1) - (b.round_number ?? 1));
651
+ if (assistantLogs.length === 0)
652
+ return null;
653
+ const lines = [];
654
+ lines.push(`# Task: ${todo.title}`);
655
+ if (todo.description) {
656
+ lines.push('');
657
+ lines.push('## Description');
658
+ lines.push(todo.description.trim());
659
+ }
660
+ lines.push('');
661
+ // Group by round
662
+ const byRound = new Map();
663
+ for (const l of assistantLogs) {
664
+ const r = l.round_number ?? 1;
665
+ if (!byRound.has(r))
666
+ byRound.set(r, []);
667
+ byRound.get(r).push(l.message.trim());
668
+ }
669
+ for (const [round, msgs] of [...byRound.entries()].sort(([a], [b]) => a - b)) {
670
+ lines.push(`## Round ${round}`);
671
+ lines.push(msgs.join('\n\n'));
672
+ lines.push('');
673
+ }
674
+ return lines.join('\n');
675
+ }
676
+ export function buildSourceTextFromDiscussion(discussionId) {
677
+ const discussion = queries.getDiscussionById(discussionId);
678
+ if (!discussion)
679
+ return null;
680
+ const messages = queries.getDiscussionMessages(discussionId);
681
+ if (messages.length === 0)
682
+ return null;
683
+ const completed = messages.filter(m => m.status === 'completed' && m.content && m.content.trim());
684
+ if (completed.length === 0)
685
+ return null;
686
+ const lines = [];
687
+ lines.push(`# Discussion: ${discussion.title}`);
688
+ if (discussion.description) {
689
+ lines.push('');
690
+ lines.push('## Description');
691
+ lines.push(discussion.description.trim());
692
+ }
693
+ lines.push('');
694
+ let lastRound = -1;
695
+ for (const m of completed) {
696
+ if (m.round_number !== lastRound) {
697
+ lines.push(`## Round ${m.round_number}`);
698
+ lastRound = m.round_number;
699
+ }
700
+ lines.push(`### ${m.agent_name} (${m.role})`);
701
+ lines.push((m.content ?? '').trim());
702
+ lines.push('');
703
+ }
704
+ return lines.join('\n');
705
+ }
706
+ //# sourceMappingURL=memory-ingest.js.map