compound-agent 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1576 @@
1
+ import { createRequire } from 'module';
2
+ import { mkdir, appendFile, readFile } from 'fs/promises';
3
+ import { join, dirname, relative } from 'path';
4
+ import { createHash } from 'crypto';
5
+ import { z } from 'zod';
6
+ import { existsSync, mkdirSync, unlinkSync, statSync, readFileSync, readdirSync } from 'fs';
7
+ import { getLlama, resolveModelFile } from 'node-llama-cpp';
8
+ import { homedir } from 'os';
9
+ import 'chalk';
10
+ import { execSync } from 'child_process';
11
+
12
+ // src/index.ts
13
+ var SourceSchema = z.enum([
14
+ "user_correction",
15
+ "self_correction",
16
+ "test_failure",
17
+ "manual"
18
+ ]);
19
+ var ContextSchema = z.object({
20
+ tool: z.string(),
21
+ intent: z.string()
22
+ });
23
+ var PatternSchema = z.object({
24
+ bad: z.string(),
25
+ good: z.string()
26
+ });
27
+ var CitationSchema = z.object({
28
+ file: z.string().min(1),
29
+ // Source file path (required, non-empty)
30
+ line: z.number().int().positive().optional(),
31
+ // Line number (optional, must be positive)
32
+ commit: z.string().optional()
33
+ // Git commit hash (optional)
34
+ });
35
+ var SeveritySchema = z.enum(["high", "medium", "low"]);
36
+ var CompactionLevelSchema = z.union([
37
+ z.literal(0),
38
+ // Active
39
+ z.literal(1),
40
+ // Flagged (>90 days)
41
+ z.literal(2)
42
+ // Archived
43
+ ]);
44
+ var LessonTypeSchema = z.enum(["quick", "full"]);
45
+ var MemoryItemTypeSchema = z.enum(["lesson", "solution", "pattern", "preference"]);
46
+ var baseFields = {
47
+ // Core identity (required)
48
+ id: z.string(),
49
+ trigger: z.string(),
50
+ insight: z.string(),
51
+ // Metadata (required)
52
+ tags: z.array(z.string()),
53
+ source: SourceSchema,
54
+ context: ContextSchema,
55
+ created: z.string(),
56
+ // ISO8601
57
+ confirmed: z.boolean(),
58
+ // Relationships (required, can be empty arrays)
59
+ supersedes: z.array(z.string()),
60
+ related: z.array(z.string()),
61
+ // Extended fields (optional)
62
+ evidence: z.string().optional(),
63
+ severity: SeveritySchema.optional(),
64
+ // Lifecycle fields (optional)
65
+ deleted: z.boolean().optional(),
66
+ deletedAt: z.string().optional(),
67
+ retrievalCount: z.number().optional(),
68
+ // Provenance tracking (optional)
69
+ citation: CitationSchema.optional(),
70
+ // Age-based validity fields (optional)
71
+ compactionLevel: CompactionLevelSchema.optional(),
72
+ compactedAt: z.string().optional(),
73
+ lastRetrieved: z.string().optional(),
74
+ // Invalidation fields (optional)
75
+ invalidatedAt: z.string().optional(),
76
+ invalidationReason: z.string().optional()
77
+ };
78
+ var LessonItemSchema = z.object({
79
+ ...baseFields,
80
+ type: z.literal("lesson"),
81
+ pattern: PatternSchema.optional()
82
+ });
83
+ var SolutionItemSchema = z.object({
84
+ ...baseFields,
85
+ type: z.literal("solution"),
86
+ pattern: PatternSchema.optional()
87
+ });
88
+ var PatternItemSchema = z.object({
89
+ ...baseFields,
90
+ type: z.literal("pattern"),
91
+ pattern: PatternSchema
92
+ });
93
+ var PreferenceItemSchema = z.object({
94
+ ...baseFields,
95
+ type: z.literal("preference"),
96
+ pattern: PatternSchema.optional()
97
+ });
98
+ var MemoryItemSchema = z.discriminatedUnion("type", [
99
+ LessonItemSchema,
100
+ SolutionItemSchema,
101
+ PatternItemSchema,
102
+ PreferenceItemSchema
103
+ ]);
104
+ var LegacyLessonSchema = z.object({
105
+ ...baseFields,
106
+ type: LessonTypeSchema,
107
+ pattern: PatternSchema.optional()
108
+ });
109
+ var LessonSchema = LessonItemSchema;
110
+ var LegacyTombstoneSchema = z.object({
111
+ id: z.string(),
112
+ deleted: z.literal(true),
113
+ deletedAt: z.string()
114
+ // ISO8601
115
+ });
116
+ var LessonRecordSchema = z.union([
117
+ MemoryItemSchema,
118
+ LegacyLessonSchema,
119
+ LegacyTombstoneSchema
120
+ ]);
121
+ var MemoryItemRecordSchema = LessonRecordSchema;
122
+ var TYPE_PREFIXES = {
123
+ lesson: "L",
124
+ solution: "S",
125
+ pattern: "P",
126
+ preference: "R"
127
+ };
128
+ function generateId(insight, type) {
129
+ const prefix = TYPE_PREFIXES[type ?? "lesson"];
130
+ const hash = createHash("sha256").update(insight).digest("hex");
131
+ return `${prefix}${hash.slice(0, 8)}`;
132
+ }
133
+
134
+ // src/memory/storage/jsonl.ts
135
+ var LESSONS_PATH = ".claude/lessons/index.jsonl";
136
+ async function appendMemoryItem(repoRoot, item) {
137
+ const filePath = join(repoRoot, LESSONS_PATH);
138
+ await mkdir(dirname(filePath), { recursive: true });
139
+ const line = JSON.stringify(item) + "\n";
140
+ await appendFile(filePath, line, "utf-8");
141
+ }
142
+ async function appendLesson(repoRoot, lesson) {
143
+ return appendMemoryItem(repoRoot, lesson);
144
+ }
145
+ function parseJsonLine(line, lineNumber, strict, onParseError) {
146
+ let parsed;
147
+ try {
148
+ parsed = JSON.parse(line);
149
+ } catch (err) {
150
+ const parseError = {
151
+ line: lineNumber,
152
+ message: `Invalid JSON: ${err.message}`,
153
+ cause: err
154
+ };
155
+ if (strict) {
156
+ throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);
157
+ }
158
+ onParseError?.(parseError);
159
+ return null;
160
+ }
161
+ const result = MemoryItemRecordSchema.safeParse(parsed);
162
+ if (!result.success) {
163
+ const parseError = {
164
+ line: lineNumber,
165
+ message: `Schema validation failed: ${result.error.message}`,
166
+ cause: result.error
167
+ };
168
+ if (strict) {
169
+ throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);
170
+ }
171
+ onParseError?.(parseError);
172
+ return null;
173
+ }
174
+ return result.data;
175
+ }
176
+ function toMemoryItem(record) {
177
+ if (record.deleted === true) {
178
+ return null;
179
+ }
180
+ if (record.type === "quick" || record.type === "full") {
181
+ return { ...record, type: "lesson" };
182
+ }
183
+ return record;
184
+ }
185
+ async function readMemoryItems(repoRoot, options = {}) {
186
+ const { strict = false, onParseError } = options;
187
+ const filePath = join(repoRoot, LESSONS_PATH);
188
+ let content;
189
+ try {
190
+ content = await readFile(filePath, "utf-8");
191
+ } catch (err) {
192
+ if (err.code === "ENOENT") {
193
+ return { items: [], skippedCount: 0 };
194
+ }
195
+ throw err;
196
+ }
197
+ const items = /* @__PURE__ */ new Map();
198
+ let skippedCount = 0;
199
+ const lines = content.split("\n");
200
+ for (let i = 0; i < lines.length; i++) {
201
+ const trimmed = lines[i].trim();
202
+ if (!trimmed) continue;
203
+ const record = parseJsonLine(trimmed, i + 1, strict, onParseError);
204
+ if (!record) {
205
+ skippedCount++;
206
+ continue;
207
+ }
208
+ if (record.deleted === true) {
209
+ items.delete(record.id);
210
+ } else {
211
+ const item = toMemoryItem(record);
212
+ if (item) {
213
+ items.set(record.id, item);
214
+ }
215
+ }
216
+ }
217
+ return { items: Array.from(items.values()), skippedCount };
218
+ }
219
+ async function readLessons(repoRoot, options = {}) {
220
+ const result = await readMemoryItems(repoRoot, options);
221
+ const lessons = result.items.filter((item) => item.type === "lesson");
222
+ return { lessons, skippedCount: result.skippedCount };
223
+ }
224
+ var require2 = createRequire(import.meta.url);
225
+ var checked = false;
226
+ var DatabaseConstructor = null;
227
+ function ensureSqliteAvailable() {
228
+ if (checked) return;
229
+ try {
230
+ const module = require2("better-sqlite3");
231
+ const Constructor = module.default || module;
232
+ const testDb = new Constructor(":memory:");
233
+ testDb.close();
234
+ DatabaseConstructor = Constructor;
235
+ checked = true;
236
+ } catch (cause) {
237
+ throw new Error(
238
+ `better-sqlite3 failed to load.
239
+ If using pnpm, add to your project's package.json:
240
+ "pnpm": { "onlyBuiltDependencies": ["better-sqlite3"] }
241
+ Then run: pnpm install && pnpm rebuild better-sqlite3
242
+ For npm/yarn, run: npm rebuild better-sqlite3`,
243
+ { cause }
244
+ );
245
+ }
246
+ }
247
+ function getDatabaseConstructor() {
248
+ ensureSqliteAvailable();
249
+ return DatabaseConstructor;
250
+ }
251
+
252
+ // src/memory/storage/sqlite/schema.ts
253
+ var SCHEMA_VERSION = 3;
254
+ var SCHEMA_SQL = `
255
+ CREATE TABLE IF NOT EXISTS lessons (
256
+ id TEXT PRIMARY KEY,
257
+ type TEXT NOT NULL,
258
+ trigger TEXT NOT NULL,
259
+ insight TEXT NOT NULL,
260
+ evidence TEXT,
261
+ severity TEXT,
262
+ tags TEXT NOT NULL DEFAULT '',
263
+ source TEXT NOT NULL,
264
+ context TEXT NOT NULL DEFAULT '{}',
265
+ supersedes TEXT NOT NULL DEFAULT '[]',
266
+ related TEXT NOT NULL DEFAULT '[]',
267
+ created TEXT NOT NULL,
268
+ confirmed INTEGER NOT NULL DEFAULT 0,
269
+ deleted INTEGER NOT NULL DEFAULT 0,
270
+ retrieval_count INTEGER NOT NULL DEFAULT 0,
271
+ last_retrieved TEXT,
272
+ embedding BLOB,
273
+ content_hash TEXT,
274
+ invalidated_at TEXT,
275
+ invalidation_reason TEXT,
276
+ citation_file TEXT,
277
+ citation_line INTEGER,
278
+ citation_commit TEXT,
279
+ compaction_level INTEGER DEFAULT 0,
280
+ compacted_at TEXT,
281
+ pattern_bad TEXT,
282
+ pattern_good TEXT
283
+ );
284
+
285
+ CREATE VIRTUAL TABLE IF NOT EXISTS lessons_fts USING fts5(
286
+ id, trigger, insight, tags, pattern_bad, pattern_good,
287
+ content='lessons', content_rowid='rowid'
288
+ );
289
+
290
+ CREATE TRIGGER IF NOT EXISTS lessons_ai AFTER INSERT ON lessons BEGIN
291
+ INSERT INTO lessons_fts(rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
292
+ VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags, new.pattern_bad, new.pattern_good);
293
+ END;
294
+
295
+ CREATE TRIGGER IF NOT EXISTS lessons_ad AFTER DELETE ON lessons BEGIN
296
+ INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
297
+ VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags, old.pattern_bad, old.pattern_good);
298
+ END;
299
+
300
+ CREATE TRIGGER IF NOT EXISTS lessons_au AFTER UPDATE ON lessons BEGIN
301
+ INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
302
+ VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags, old.pattern_bad, old.pattern_good);
303
+ INSERT INTO lessons_fts(rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
304
+ VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags, new.pattern_bad, new.pattern_good);
305
+ END;
306
+
307
+ CREATE INDEX IF NOT EXISTS idx_lessons_created ON lessons(created);
308
+ CREATE INDEX IF NOT EXISTS idx_lessons_confirmed ON lessons(confirmed);
309
+ CREATE INDEX IF NOT EXISTS idx_lessons_severity ON lessons(severity);
310
+ CREATE INDEX IF NOT EXISTS idx_lessons_type ON lessons(type);
311
+
312
+ CREATE TABLE IF NOT EXISTS metadata (
313
+ key TEXT PRIMARY KEY,
314
+ value TEXT NOT NULL
315
+ );
316
+ `;
317
+ function createSchema(database) {
318
+ database.exec(SCHEMA_SQL);
319
+ database.pragma(`user_version = ${SCHEMA_VERSION}`);
320
+ }
321
+
322
+ // src/memory/storage/sqlite/connection.ts
323
+ var DB_PATH = ".claude/.cache/lessons.sqlite";
324
+ var dbMap = /* @__PURE__ */ new Map();
325
+ function hasExpectedVersion(database) {
326
+ const row = database.pragma("user_version", { simple: true });
327
+ return row === SCHEMA_VERSION;
328
+ }
329
+ function openDb(repoRoot, options = {}) {
330
+ const { inMemory = false } = options;
331
+ const key = inMemory ? `:memory:${repoRoot}` : join(repoRoot, DB_PATH);
332
+ const cached = dbMap.get(key);
333
+ if (cached) {
334
+ return cached;
335
+ }
336
+ const Database = getDatabaseConstructor();
337
+ let database;
338
+ if (inMemory) {
339
+ database = new Database(":memory:");
340
+ } else {
341
+ const dir = dirname(key);
342
+ mkdirSync(dir, { recursive: true });
343
+ database = new Database(key);
344
+ if (!hasExpectedVersion(database)) {
345
+ database.close();
346
+ unlinkSync(key);
347
+ database = new Database(key);
348
+ }
349
+ database.pragma("journal_mode = WAL");
350
+ }
351
+ createSchema(database);
352
+ dbMap.set(key, database);
353
+ return database;
354
+ }
355
+ function closeDb() {
356
+ for (const database of dbMap.values()) {
357
+ database.close();
358
+ }
359
+ dbMap.clear();
360
+ }
361
+ function contentHash(trigger, insight) {
362
+ return createHash("sha256").update(`${trigger} ${insight}`).digest("hex");
363
+ }
364
+ function getCachedEmbedding(repoRoot, lessonId, expectedHash) {
365
+ const database = openDb(repoRoot);
366
+ const row = database.prepare("SELECT embedding, content_hash FROM lessons WHERE id = ?").get(lessonId);
367
+ if (!row || !row.embedding || !row.content_hash) {
368
+ return null;
369
+ }
370
+ if (expectedHash && row.content_hash !== expectedHash) {
371
+ return null;
372
+ }
373
+ const float32 = new Float32Array(
374
+ row.embedding.buffer,
375
+ row.embedding.byteOffset,
376
+ row.embedding.byteLength / 4
377
+ );
378
+ return Array.from(float32);
379
+ }
380
+ function setCachedEmbedding(repoRoot, lessonId, embedding, hash) {
381
+ const database = openDb(repoRoot);
382
+ const float32 = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);
383
+ const buffer = Buffer.from(float32.buffer, float32.byteOffset, float32.byteLength);
384
+ database.prepare("UPDATE lessons SET embedding = ?, content_hash = ? WHERE id = ?").run(buffer, hash, lessonId);
385
+ }
386
+ function collectCachedEmbeddings(database) {
387
+ const cache = /* @__PURE__ */ new Map();
388
+ const rows = database.prepare("SELECT id, embedding, content_hash FROM lessons WHERE embedding IS NOT NULL").all();
389
+ for (const row of rows) {
390
+ if (row.embedding && row.content_hash) {
391
+ cache.set(row.id, { embedding: row.embedding, contentHash: row.content_hash });
392
+ }
393
+ }
394
+ return cache;
395
+ }
396
+ var INSERT_LESSON_SQL = `
397
+ INSERT INTO lessons (id, type, trigger, insight, evidence, severity, tags, source, context, supersedes, related, created, confirmed, deleted, retrieval_count, last_retrieved, embedding, content_hash, invalidated_at, invalidation_reason, citation_file, citation_line, citation_commit, compaction_level, compacted_at, pattern_bad, pattern_good)
398
+ VALUES (@id, @type, @trigger, @insight, @evidence, @severity, @tags, @source, @context, @supersedes, @related, @created, @confirmed, @deleted, @retrieval_count, @last_retrieved, @embedding, @content_hash, @invalidated_at, @invalidation_reason, @citation_file, @citation_line, @citation_commit, @compaction_level, @compacted_at, @pattern_bad, @pattern_good)
399
+ `;
400
+ function getJsonlMtime(repoRoot) {
401
+ const jsonlPath = join(repoRoot, LESSONS_PATH);
402
+ try {
403
+ const stat = statSync(jsonlPath);
404
+ return stat.mtimeMs;
405
+ } catch {
406
+ return null;
407
+ }
408
+ }
409
+ function getLastSyncMtime(database) {
410
+ const row = database.prepare("SELECT value FROM metadata WHERE key = ?").get("last_sync_mtime");
411
+ return row ? parseFloat(row.value) : null;
412
+ }
413
+ function setLastSyncMtime(database, mtime) {
414
+ database.prepare("INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)").run("last_sync_mtime", mtime.toString());
415
+ }
416
+ async function rebuildIndex(repoRoot) {
417
+ const database = openDb(repoRoot);
418
+ const { items } = await readMemoryItems(repoRoot);
419
+ const cachedEmbeddings = collectCachedEmbeddings(database);
420
+ database.exec("DELETE FROM lessons");
421
+ if (items.length === 0) {
422
+ const mtime2 = getJsonlMtime(repoRoot);
423
+ if (mtime2 !== null) {
424
+ setLastSyncMtime(database, mtime2);
425
+ }
426
+ return;
427
+ }
428
+ const insert = database.prepare(INSERT_LESSON_SQL);
429
+ const insertMany = database.transaction((memoryItems) => {
430
+ for (const item of memoryItems) {
431
+ const newHash = contentHash(item.trigger, item.insight);
432
+ const cached = cachedEmbeddings.get(item.id);
433
+ const hasValidCache = cached && cached.contentHash === newHash;
434
+ insert.run({
435
+ id: item.id,
436
+ type: item.type,
437
+ trigger: item.trigger,
438
+ insight: item.insight,
439
+ evidence: item.evidence ?? null,
440
+ severity: item.severity ?? null,
441
+ tags: item.tags.join(","),
442
+ source: item.source,
443
+ context: JSON.stringify(item.context),
444
+ supersedes: JSON.stringify(item.supersedes),
445
+ related: JSON.stringify(item.related),
446
+ created: item.created,
447
+ confirmed: item.confirmed ? 1 : 0,
448
+ deleted: item.deleted ? 1 : 0,
449
+ retrieval_count: item.retrievalCount ?? 0,
450
+ last_retrieved: item.lastRetrieved ?? null,
451
+ embedding: hasValidCache ? cached.embedding : null,
452
+ content_hash: hasValidCache ? cached.contentHash : null,
453
+ invalidated_at: item.invalidatedAt ?? null,
454
+ invalidation_reason: item.invalidationReason ?? null,
455
+ citation_file: item.citation?.file ?? null,
456
+ citation_line: item.citation?.line ?? null,
457
+ citation_commit: item.citation?.commit ?? null,
458
+ compaction_level: item.compactionLevel ?? 0,
459
+ compacted_at: item.compactedAt ?? null,
460
+ pattern_bad: item.pattern?.bad ?? null,
461
+ pattern_good: item.pattern?.good ?? null
462
+ });
463
+ }
464
+ });
465
+ insertMany(items);
466
+ const mtime = getJsonlMtime(repoRoot);
467
+ if (mtime !== null) {
468
+ setLastSyncMtime(database, mtime);
469
+ }
470
+ }
471
+ async function syncIfNeeded(repoRoot, options = {}) {
472
+ const { force = false } = options;
473
+ const jsonlMtime = getJsonlMtime(repoRoot);
474
+ if (jsonlMtime === null && !force) {
475
+ return false;
476
+ }
477
+ const database = openDb(repoRoot);
478
+ const lastSyncMtime = getLastSyncMtime(database);
479
+ const needsRebuild = force || lastSyncMtime === null || jsonlMtime !== null && jsonlMtime > lastSyncMtime;
480
+ if (needsRebuild) {
481
+ await rebuildIndex(repoRoot);
482
+ return true;
483
+ }
484
+ return false;
485
+ }
486
+
487
+ // src/memory/storage/sqlite/search.ts
488
+ function rowToMemoryItem(row) {
489
+ const item = {
490
+ id: row.id,
491
+ type: row.type,
492
+ trigger: row.trigger,
493
+ insight: row.insight,
494
+ tags: row.tags ? row.tags.split(",").filter(Boolean) : [],
495
+ source: row.source,
496
+ context: JSON.parse(row.context),
497
+ supersedes: JSON.parse(row.supersedes),
498
+ related: JSON.parse(row.related),
499
+ created: row.created,
500
+ confirmed: row.confirmed === 1
501
+ };
502
+ if (row.evidence !== null) item.evidence = row.evidence;
503
+ if (row.severity !== null) item.severity = row.severity;
504
+ if (row.deleted === 1) item.deleted = true;
505
+ if (row.retrieval_count > 0) item.retrievalCount = row.retrieval_count;
506
+ if (row.invalidated_at !== null) item.invalidatedAt = row.invalidated_at;
507
+ if (row.invalidation_reason !== null) item.invalidationReason = row.invalidation_reason;
508
+ if (row.citation_file !== null) {
509
+ item.citation = {
510
+ file: row.citation_file,
511
+ ...row.citation_line !== null && { line: row.citation_line },
512
+ ...row.citation_commit !== null && { commit: row.citation_commit }
513
+ };
514
+ }
515
+ if (row.compaction_level !== null && row.compaction_level !== 0) {
516
+ item.compactionLevel = row.compaction_level;
517
+ }
518
+ if (row.compacted_at !== null) item.compactedAt = row.compacted_at;
519
+ if (row.last_retrieved !== null) item.lastRetrieved = row.last_retrieved;
520
+ if (row.pattern_bad !== null && row.pattern_good !== null) {
521
+ item.pattern = { bad: row.pattern_bad, good: row.pattern_good };
522
+ }
523
+ const result = MemoryItemSchema.safeParse(item);
524
+ if (!result.success) return null;
525
+ return result.data;
526
+ }
527
+ var FTS_OPERATORS = /* @__PURE__ */ new Set(["AND", "OR", "NOT", "NEAR"]);
528
+ function sanitizeFtsQuery(query) {
529
+ const stripped = query.replace(/["*^+-]/g, "");
530
+ const tokens = stripped.split(/\s+/).filter((t) => t.length > 0 && !FTS_OPERATORS.has(t));
531
+ return tokens.join(" ");
532
+ }
533
+ function incrementRetrievalCount(repoRoot, lessonIds) {
534
+ if (lessonIds.length === 0) return;
535
+ const database = openDb(repoRoot);
536
+ const now = (/* @__PURE__ */ new Date()).toISOString();
537
+ const update = database.prepare(`
538
+ UPDATE lessons
539
+ SET retrieval_count = retrieval_count + 1,
540
+ last_retrieved = ?
541
+ WHERE id = ?
542
+ `);
543
+ const updateMany = database.transaction((ids) => {
544
+ for (const id of ids) {
545
+ update.run(now, id);
546
+ }
547
+ });
548
+ updateMany(lessonIds);
549
+ }
550
+ async function searchKeyword(repoRoot, query, limit, typeFilter) {
551
+ const database = openDb(repoRoot);
552
+ const countResult = database.prepare("SELECT COUNT(*) as cnt FROM lessons").get();
553
+ if (countResult.cnt === 0) return [];
554
+ const sanitized = sanitizeFtsQuery(query);
555
+ if (sanitized === "") return [];
556
+ try {
557
+ if (typeFilter) {
558
+ const rows2 = database.prepare(
559
+ `
560
+ SELECT l.*
561
+ FROM lessons l
562
+ JOIN lessons_fts fts ON l.rowid = fts.rowid
563
+ WHERE lessons_fts MATCH ?
564
+ AND l.invalidated_at IS NULL
565
+ AND l.type = ?
566
+ LIMIT ?
567
+ `
568
+ ).all(sanitized, typeFilter, limit);
569
+ return rows2.map(rowToMemoryItem).filter((x) => x !== null);
570
+ }
571
+ const rows = database.prepare(
572
+ `
573
+ SELECT l.*
574
+ FROM lessons l
575
+ JOIN lessons_fts fts ON l.rowid = fts.rowid
576
+ WHERE lessons_fts MATCH ?
577
+ AND l.invalidated_at IS NULL
578
+ LIMIT ?
579
+ `
580
+ ).all(sanitized, limit);
581
+ return rows.map(rowToMemoryItem).filter((x) => x !== null);
582
+ } catch (err) {
583
+ const message = err instanceof Error ? err.message : "Unknown FTS5 error";
584
+ console.error(`[compound-agent] search error: ${message}`);
585
+ return [];
586
+ }
587
+ }
588
+
589
+ // src/utils.ts
590
+ var MS_PER_DAY = 24 * 60 * 60 * 1e3;
591
+ function getLessonAgeDays(lesson) {
592
+ const created = new Date(lesson.created).getTime();
593
+ const now = Date.now();
594
+ return Math.floor((now - created) / MS_PER_DAY);
595
+ }
596
+ var MODEL_URI = "hf:ggml-org/embeddinggemma-300M-qat-q4_0-GGUF/embeddinggemma-300M-qat-Q4_0.gguf";
597
+ var MODEL_FILENAME = "hf_ggml-org_embeddinggemma-300M-qat-Q4_0.gguf";
598
+ var DEFAULT_MODEL_DIR = join(homedir(), ".node-llama-cpp", "models");
599
+ var cachedUsability = null;
600
+ function isModelAvailable() {
601
+ return existsSync(join(DEFAULT_MODEL_DIR, MODEL_FILENAME));
602
+ }
603
+ async function isModelUsable() {
604
+ if (cachedUsability !== null) {
605
+ return cachedUsability;
606
+ }
607
+ if (!isModelAvailable()) {
608
+ cachedUsability = {
609
+ usable: false,
610
+ reason: "Embedding model file not found",
611
+ action: "Run: npx ca download-model"
612
+ };
613
+ return cachedUsability;
614
+ }
615
+ let llama = null;
616
+ let model = null;
617
+ let context = null;
618
+ try {
619
+ const modelPath = join(DEFAULT_MODEL_DIR, MODEL_FILENAME);
620
+ llama = await getLlama();
621
+ model = await llama.loadModel({ modelPath });
622
+ context = await model.createEmbeddingContext();
623
+ cachedUsability = { usable: true };
624
+ return cachedUsability;
625
+ } catch (err) {
626
+ const message = err instanceof Error ? err.message : "Unknown error";
627
+ cachedUsability = {
628
+ usable: false,
629
+ reason: `Embedding model runtime initialization failed: ${message}`,
630
+ action: "Check system compatibility or reinstall: npx ca download-model"
631
+ };
632
+ return cachedUsability;
633
+ } finally {
634
+ if (context) {
635
+ try {
636
+ context.dispose();
637
+ } catch {
638
+ }
639
+ }
640
+ }
641
+ }
642
+ async function resolveModel(options = {}) {
643
+ const { cli = true } = options;
644
+ return resolveModelFile(MODEL_URI, { cli });
645
+ }
646
+
647
+ // src/memory/embeddings/nomic.ts
648
+ var embeddingContext = null;
649
+ var pendingInit = null;
650
+ var llamaInstance = null;
651
+ var modelInstance = null;
652
+ async function getEmbedding() {
653
+ if (embeddingContext) return embeddingContext;
654
+ if (pendingInit) return pendingInit;
655
+ pendingInit = (async () => {
656
+ try {
657
+ const modelPath = await resolveModel({ cli: true });
658
+ llamaInstance = await getLlama();
659
+ modelInstance = await llamaInstance.loadModel({ modelPath });
660
+ embeddingContext = await modelInstance.createEmbeddingContext();
661
+ return embeddingContext;
662
+ } catch (err) {
663
+ pendingInit = null;
664
+ throw err;
665
+ }
666
+ })();
667
+ return pendingInit;
668
+ }
669
+ function unloadEmbedding() {
670
+ if (embeddingContext) {
671
+ embeddingContext.dispose();
672
+ embeddingContext = null;
673
+ }
674
+ if (modelInstance) {
675
+ modelInstance.dispose().catch(() => {
676
+ });
677
+ modelInstance = null;
678
+ }
679
+ if (llamaInstance) {
680
+ llamaInstance.dispose().catch(() => {
681
+ });
682
+ llamaInstance = null;
683
+ }
684
+ pendingInit = null;
685
+ }
686
+ async function embedText(text) {
687
+ const ctx = await getEmbedding();
688
+ const result = await ctx.getEmbeddingFor(text);
689
+ return Array.from(result.vector);
690
+ }
691
+ async function embedTexts(texts) {
692
+ if (texts.length === 0) return [];
693
+ const ctx = await getEmbedding();
694
+ const results = [];
695
+ for (const text of texts) {
696
+ const result = await ctx.getEmbeddingFor(text);
697
+ results.push(Array.from(result.vector));
698
+ }
699
+ return results;
700
+ }
701
+
702
+ // src/compound/clustering.ts
703
+ var DEFAULT_THRESHOLD = 0.75;
704
+ function buildSimilarityMatrix(embeddings) {
705
+ const n = embeddings.length;
706
+ const matrix = Array.from({ length: n }, () => new Array(n).fill(0));
707
+ for (let i = 0; i < n; i++) {
708
+ matrix[i][i] = 1;
709
+ for (let j = i + 1; j < n; j++) {
710
+ const sim = cosineSimilarity(embeddings[i], embeddings[j]);
711
+ matrix[i][j] = sim;
712
+ matrix[j][i] = sim;
713
+ }
714
+ }
715
+ return matrix;
716
+ }
717
+ function clusterBySimilarity(items, embeddings, threshold = DEFAULT_THRESHOLD) {
718
+ const n = items.length;
719
+ if (n === 0) return { clusters: [], noise: [] };
720
+ const matrix = buildSimilarityMatrix(embeddings);
721
+ const parent = Array.from({ length: n }, (_, i) => i);
722
+ function find(x) {
723
+ while (parent[x] !== x) {
724
+ parent[x] = parent[parent[x]];
725
+ x = parent[x];
726
+ }
727
+ return x;
728
+ }
729
+ function union(a, b) {
730
+ const rootA = find(a);
731
+ const rootB = find(b);
732
+ if (rootA !== rootB) parent[rootA] = rootB;
733
+ }
734
+ for (let i = 0; i < n; i++) {
735
+ for (let j = i + 1; j < n; j++) {
736
+ if (matrix[i][j] >= threshold) {
737
+ union(i, j);
738
+ }
739
+ }
740
+ }
741
+ const groups = /* @__PURE__ */ new Map();
742
+ for (let i = 0; i < n; i++) {
743
+ const root = find(i);
744
+ let group = groups.get(root);
745
+ if (!group) {
746
+ group = [];
747
+ groups.set(root, group);
748
+ }
749
+ group.push(items[i]);
750
+ }
751
+ const clusters = Array.from(groups.values());
752
+ return { clusters, noise: [] };
753
+ }
754
+ var CCT_PATTERNS_PATH = ".claude/lessons/cct-patterns.jsonl";
755
+ var CctPatternSchema = z.object({
756
+ id: z.string().regex(/^CCT-[a-f0-9]{8}$/),
757
+ name: z.string().min(1),
758
+ description: z.string().min(1),
759
+ frequency: z.number().int().positive(),
760
+ testable: z.boolean(),
761
+ testApproach: z.string().optional(),
762
+ sourceIds: z.array(z.string()).min(1),
763
+ created: z.string()
764
+ // ISO8601
765
+ });
766
+ function generateCctId(input) {
767
+ const hash = createHash("sha256").update(input).digest("hex");
768
+ return `CCT-${hash.slice(0, 8)}`;
769
+ }
770
+
771
+ // src/compound/io.ts
772
+ async function readCctPatterns(repoRoot) {
773
+ const filePath = join(repoRoot, CCT_PATTERNS_PATH);
774
+ let content;
775
+ try {
776
+ content = await readFile(filePath, "utf-8");
777
+ } catch (err) {
778
+ if (err.code === "ENOENT") {
779
+ return [];
780
+ }
781
+ throw err;
782
+ }
783
+ const patterns = [];
784
+ const lines = content.split("\n");
785
+ for (const line of lines) {
786
+ const trimmed = line.trim();
787
+ if (!trimmed) continue;
788
+ const parsed = JSON.parse(trimmed);
789
+ const result = CctPatternSchema.safeParse(parsed);
790
+ if (result.success) {
791
+ patterns.push(result.data);
792
+ }
793
+ }
794
+ return patterns;
795
+ }
796
+ async function writeCctPatterns(repoRoot, patterns) {
797
+ const filePath = join(repoRoot, CCT_PATTERNS_PATH);
798
+ await mkdir(dirname(filePath), { recursive: true });
799
+ const lines = patterns.map((p) => JSON.stringify(p) + "\n").join("");
800
+ await appendFile(filePath, lines, "utf-8");
801
+ }
802
+
803
+ // src/compound/synthesis.ts
804
+ function synthesizePattern(cluster, clusterId) {
805
+ const id = generateCctId(clusterId);
806
+ const frequency = cluster.length;
807
+ const sourceIds = cluster.map((item) => item.id);
808
+ const tagCounts = /* @__PURE__ */ new Map();
809
+ for (const item of cluster) {
810
+ for (const tag of item.tags) {
811
+ tagCounts.set(tag, (tagCounts.get(tag) ?? 0) + 1);
812
+ }
813
+ }
814
+ const sortedTags = [...tagCounts.entries()].sort((a, b) => b[1] - a[1]).map(([tag]) => tag);
815
+ const name = sortedTags.length > 0 ? sortedTags.slice(0, 3).join(", ") : cluster[0].insight.slice(0, 50);
816
+ const description = cluster.map((item) => item.insight).join("; ");
817
+ const hasHighSeverity = cluster.some(
818
+ (item) => "severity" in item && item.severity === "high"
819
+ );
820
+ const hasEvidence = cluster.some(
821
+ (item) => "evidence" in item && item.evidence
822
+ );
823
+ const testable = hasHighSeverity || hasEvidence;
824
+ const testApproach = testable ? `Verify pattern: ${name}. Check ${frequency} related lesson(s).` : void 0;
825
+ return {
826
+ id,
827
+ name,
828
+ description,
829
+ frequency,
830
+ testable,
831
+ ...testApproach !== void 0 && { testApproach },
832
+ sourceIds,
833
+ created: (/* @__PURE__ */ new Date()).toISOString()
834
+ };
835
+ }
836
+
837
+ // src/memory/search/vector.ts
838
+ function cosineSimilarity(a, b) {
839
+ if (a.length !== b.length) {
840
+ throw new Error("Vectors must have same length");
841
+ }
842
+ let dotProduct = 0;
843
+ let normA = 0;
844
+ let normB = 0;
845
+ for (let i = 0; i < a.length; i++) {
846
+ dotProduct += a[i] * b[i];
847
+ normA += a[i] * a[i];
848
+ normB += b[i] * b[i];
849
+ }
850
+ const magnitude = Math.sqrt(normA) * Math.sqrt(normB);
851
+ if (magnitude === 0) return 0;
852
+ return dotProduct / magnitude;
853
+ }
854
+ var DEFAULT_LIMIT = 10;
855
+ function cctToMemoryItem(pattern) {
856
+ return {
857
+ id: pattern.id,
858
+ type: "lesson",
859
+ trigger: pattern.name,
860
+ insight: pattern.description,
861
+ tags: [],
862
+ source: "manual",
863
+ context: { tool: "compound", intent: "synthesis" },
864
+ created: pattern.created,
865
+ confirmed: true,
866
+ supersedes: [],
867
+ related: pattern.sourceIds
868
+ };
869
+ }
870
+ async function searchVector(repoRoot, query, options) {
871
+ const limit = options?.limit ?? DEFAULT_LIMIT;
872
+ const { items } = await readMemoryItems(repoRoot);
873
+ let cctPatterns = [];
874
+ try {
875
+ cctPatterns = await readCctPatterns(repoRoot);
876
+ } catch {
877
+ }
878
+ if (items.length === 0 && cctPatterns.length === 0) return [];
879
+ const queryVector = await embedText(query);
880
+ const scored = [];
881
+ for (const item of items) {
882
+ if (item.invalidatedAt) continue;
883
+ try {
884
+ const itemText = `${item.trigger} ${item.insight}`;
885
+ const hash = contentHash(item.trigger, item.insight);
886
+ let itemVector = getCachedEmbedding(repoRoot, item.id, hash);
887
+ if (!itemVector) {
888
+ itemVector = await embedText(itemText);
889
+ setCachedEmbedding(repoRoot, item.id, itemVector, hash);
890
+ }
891
+ const score = cosineSimilarity(queryVector, itemVector);
892
+ scored.push({ lesson: item, score });
893
+ } catch {
894
+ continue;
895
+ }
896
+ }
897
+ for (const pattern of cctPatterns) {
898
+ try {
899
+ const text = `${pattern.name} ${pattern.description}`;
900
+ const vec = await embedText(text);
901
+ const score = cosineSimilarity(queryVector, vec);
902
+ scored.push({ lesson: cctToMemoryItem(pattern), score });
903
+ } catch {
904
+ continue;
905
+ }
906
+ }
907
+ scored.sort((a, b) => b.score - a.score);
908
+ return scored.slice(0, limit);
909
+ }
910
+
911
+ // src/memory/search/ranking.ts
912
+ var RECENCY_THRESHOLD_DAYS = 30;
913
+ var HIGH_SEVERITY_BOOST = 1.5;
914
+ var MEDIUM_SEVERITY_BOOST = 1;
915
+ var LOW_SEVERITY_BOOST = 0.8;
916
+ var RECENCY_BOOST = 1.2;
917
+ var CONFIRMATION_BOOST = 1.3;
918
+ var MAX_COMBINED_BOOST = 1.8;
919
+ function severityBoost(item) {
920
+ switch (item.severity) {
921
+ case "high":
922
+ return HIGH_SEVERITY_BOOST;
923
+ case "medium":
924
+ return MEDIUM_SEVERITY_BOOST;
925
+ case "low":
926
+ return LOW_SEVERITY_BOOST;
927
+ default:
928
+ return MEDIUM_SEVERITY_BOOST;
929
+ }
930
+ }
931
+ function recencyBoost(item) {
932
+ const ageDays = getLessonAgeDays(item);
933
+ return ageDays <= RECENCY_THRESHOLD_DAYS ? RECENCY_BOOST : 1;
934
+ }
935
+ function confirmationBoost(item) {
936
+ return item.confirmed ? CONFIRMATION_BOOST : 1;
937
+ }
938
+ function calculateScore(item, vectorSimilarity) {
939
+ const boost = Math.min(
940
+ severityBoost(item) * recencyBoost(item) * confirmationBoost(item),
941
+ MAX_COMBINED_BOOST
942
+ );
943
+ return vectorSimilarity * boost;
944
+ }
945
+ function rankLessons(lessons) {
946
+ return lessons.map((scored) => ({
947
+ ...scored,
948
+ finalScore: calculateScore(scored.lesson, scored.score)
949
+ })).sort((a, b) => (b.finalScore ?? 0) - (a.finalScore ?? 0));
950
+ }
951
+ var rankMemoryItems = rankLessons;
952
+
953
+ // src/memory/capture/quality.ts
954
+ var DEFAULT_SIMILARITY_THRESHOLD = 0.8;
955
+ async function isNovel(repoRoot, insight, options = {}) {
956
+ const threshold = options.threshold ?? DEFAULT_SIMILARITY_THRESHOLD;
957
+ await syncIfNeeded(repoRoot);
958
+ const words = insight.toLowerCase().replace(/[^a-z0-9\s]/g, "").split(/\s+/).filter((w) => w.length > 3).slice(0, 3);
959
+ if (words.length === 0) {
960
+ return { novel: true };
961
+ }
962
+ const searchQuery = words.join(" OR ");
963
+ const results = await searchKeyword(repoRoot, searchQuery, 10);
964
+ if (results.length === 0) {
965
+ return { novel: true };
966
+ }
967
+ return checkSimilarity(insight, results, threshold);
968
+ }
969
+ function checkSimilarity(insight, lessons, threshold) {
970
+ const insightWords = new Set(insight.toLowerCase().split(/\s+/));
971
+ for (const lesson of lessons) {
972
+ const lessonWords = new Set(lesson.insight.toLowerCase().split(/\s+/));
973
+ const intersection = [...insightWords].filter((w) => lessonWords.has(w)).length;
974
+ const union = (/* @__PURE__ */ new Set([...insightWords, ...lessonWords])).size;
975
+ const similarity = union > 0 ? intersection / union : 0;
976
+ if (similarity >= threshold) {
977
+ return {
978
+ novel: false,
979
+ reason: `Found similar existing lesson: "${lesson.insight.slice(0, 50)}..."`,
980
+ existingId: lesson.id
981
+ };
982
+ }
983
+ }
984
+ return { novel: true };
985
+ }
986
+ var MIN_WORD_COUNT = 4;
987
+ var VAGUE_PATTERNS = [
988
+ /\bwrite better\b/i,
989
+ /\bbe careful\b/i,
990
+ /\bremember to\b/i,
991
+ /\bmake sure\b/i,
992
+ /\btry to\b/i,
993
+ /\bdouble check\b/i
994
+ ];
995
+ var GENERIC_IMPERATIVE_PATTERN = /^(always|never)\s+\w+(\s+\w+){0,2}$/i;
996
+ function isSpecific(insight) {
997
+ const words = insight.trim().split(/\s+/).filter((w) => w.length > 0);
998
+ if (words.length < MIN_WORD_COUNT) {
999
+ return { specific: false, reason: "Insight is too short to be actionable" };
1000
+ }
1001
+ for (const pattern of VAGUE_PATTERNS) {
1002
+ if (pattern.test(insight)) {
1003
+ return { specific: false, reason: "Insight matches a vague pattern" };
1004
+ }
1005
+ }
1006
+ if (GENERIC_IMPERATIVE_PATTERN.test(insight)) {
1007
+ return { specific: false, reason: "Insight matches a vague pattern" };
1008
+ }
1009
+ return { specific: true };
1010
+ }
1011
+ var ACTION_PATTERNS = [
1012
+ /\buse\s+.+\s+instead\s+of\b/i,
1013
+ // "use X instead of Y"
1014
+ /\bprefer\s+.+\s+(over|to)\b/i,
1015
+ // "prefer X over Y" or "prefer X to Y"
1016
+ /\balways\s+.+\s+when\b/i,
1017
+ // "always X when Y"
1018
+ /\bnever\s+.+\s+without\b/i,
1019
+ // "never X without Y"
1020
+ /\bavoid\s+(using\s+)?\w+/i,
1021
+ // "avoid X" or "avoid using X"
1022
+ /\bcheck\s+.+\s+before\b/i,
1023
+ // "check X before Y"
1024
+ /^(run|use|add|remove|install|update|configure|set|enable|disable)\s+/i
1025
+ // Imperative commands at start
1026
+ ];
1027
+ function isActionable(insight) {
1028
+ for (const pattern of ACTION_PATTERNS) {
1029
+ if (pattern.test(insight)) {
1030
+ return { actionable: true };
1031
+ }
1032
+ }
1033
+ return { actionable: false, reason: "Insight lacks clear action guidance" };
1034
+ }
1035
+ async function shouldPropose(repoRoot, insight) {
1036
+ const specificResult = isSpecific(insight);
1037
+ if (!specificResult.specific) {
1038
+ return { shouldPropose: false, reason: specificResult.reason };
1039
+ }
1040
+ const noveltyResult = await isNovel(repoRoot, insight);
1041
+ if (!noveltyResult.novel) {
1042
+ return { shouldPropose: false, reason: noveltyResult.reason };
1043
+ }
1044
+ return { shouldPropose: true };
1045
+ }
1046
+
1047
+ // src/memory/capture/triggers.ts
1048
+ var USER_CORRECTION_PATTERNS = [
1049
+ /\bno\b[,.]?\s/i,
1050
+ // "no, ..." or "no ..."
1051
+ /\bwrong\b/i,
1052
+ // "wrong"
1053
+ /\bactually\b/i,
1054
+ // "actually..."
1055
+ /\bnot that\b/i,
1056
+ // "not that"
1057
+ /\bi meant\b/i
1058
+ // "I meant"
1059
+ ];
1060
+ function detectUserCorrection(signals) {
1061
+ const { messages, context } = signals;
1062
+ if (messages.length < 2) {
1063
+ return null;
1064
+ }
1065
+ for (let i = 1; i < messages.length; i++) {
1066
+ const message = messages[i];
1067
+ if (!message) continue;
1068
+ for (const pattern of USER_CORRECTION_PATTERNS) {
1069
+ if (pattern.test(message)) {
1070
+ return {
1071
+ trigger: `User correction during ${context.intent}`,
1072
+ correctionMessage: message,
1073
+ context
1074
+ };
1075
+ }
1076
+ }
1077
+ }
1078
+ return null;
1079
+ }
1080
+ function detectSelfCorrection(history) {
1081
+ const { edits } = history;
1082
+ if (edits.length < 3) {
1083
+ return null;
1084
+ }
1085
+ for (let i = 0; i <= edits.length - 3; i++) {
1086
+ const first = edits[i];
1087
+ const second = edits[i + 1];
1088
+ const third = edits[i + 2];
1089
+ if (!first || !second || !third) continue;
1090
+ if (first.file === second.file && second.file === third.file && first.success && !second.success && third.success) {
1091
+ return {
1092
+ file: first.file,
1093
+ trigger: `Self-correction on ${first.file}`
1094
+ };
1095
+ }
1096
+ }
1097
+ return null;
1098
+ }
1099
+ function detectTestFailure(testResult) {
1100
+ if (testResult.passed) {
1101
+ return null;
1102
+ }
1103
+ const lines = testResult.output.split("\n").filter((line) => line.trim().length > 0);
1104
+ const errorLine = lines.find((line) => /error|fail|assert/i.test(line)) ?? lines[0] ?? "";
1105
+ return {
1106
+ testFile: testResult.testFile,
1107
+ errorOutput: testResult.output,
1108
+ trigger: `Test failure in ${testResult.testFile}: ${errorLine.slice(0, 100)}`
1109
+ };
1110
+ }
1111
+ var CorrectionSignalSchema = z.object({
1112
+ messages: z.array(z.string()),
1113
+ context: ContextSchema
1114
+ });
1115
+ var EditEntrySchema = z.object({
1116
+ file: z.string(),
1117
+ success: z.boolean(),
1118
+ timestamp: z.number()
1119
+ });
1120
+ var EditHistorySchema = z.object({
1121
+ edits: z.array(EditEntrySchema)
1122
+ });
1123
+ var TestResultSchema = z.object({
1124
+ passed: z.boolean(),
1125
+ output: z.string(),
1126
+ testFile: z.string()
1127
+ });
1128
+ z.discriminatedUnion("type", [
1129
+ z.object({ type: z.literal("user"), data: CorrectionSignalSchema }),
1130
+ z.object({ type: z.literal("self"), data: EditHistorySchema }),
1131
+ z.object({ type: z.literal("test"), data: TestResultSchema })
1132
+ ]);
1133
+
1134
+ // src/memory/retrieval/session.ts
1135
+ var DEFAULT_LIMIT2 = 5;
1136
+ function hasSeverity(item) {
1137
+ return item.severity !== void 0;
1138
+ }
1139
+ async function loadSessionLessons(repoRoot, limit = DEFAULT_LIMIT2) {
1140
+ const { items } = await readMemoryItems(repoRoot);
1141
+ const highSeverityLessons = items.filter(
1142
+ (item) => hasSeverity(item) && item.severity === "high" && item.confirmed && !item.invalidatedAt
1143
+ );
1144
+ highSeverityLessons.sort((a, b) => {
1145
+ const dateA = new Date(a.created).getTime();
1146
+ const dateB = new Date(b.created).getTime();
1147
+ return dateB - dateA;
1148
+ });
1149
+ const topLessons = highSeverityLessons.slice(0, limit);
1150
+ if (topLessons.length > 0) {
1151
+ incrementRetrievalCount(repoRoot, topLessons.map((lesson) => lesson.id));
1152
+ }
1153
+ return topLessons;
1154
+ }
1155
+ var loadSessionMemory = loadSessionLessons;
1156
+
1157
+ // src/memory/retrieval/plan.ts
1158
+ var DEFAULT_LIMIT3 = 5;
1159
+ async function retrieveForPlan(repoRoot, planText, limit = DEFAULT_LIMIT3) {
1160
+ const scored = await searchVector(repoRoot, planText, { limit: limit * 2 });
1161
+ const ranked = rankLessons(scored);
1162
+ const topLessons = ranked.slice(0, limit);
1163
+ if (topLessons.length > 0) {
1164
+ incrementRetrievalCount(repoRoot, topLessons.map((item) => item.lesson.id));
1165
+ }
1166
+ const message = formatLessonsCheck(topLessons);
1167
+ return { lessons: topLessons, message };
1168
+ }
1169
+ function formatLessonsCheck(lessons) {
1170
+ const header = "Lessons Check\n" + "\u2500".repeat(40);
1171
+ if (lessons.length === 0) {
1172
+ return `${header}
1173
+ No relevant lessons found for this plan.`;
1174
+ }
1175
+ const lessonLines = lessons.map((l, i) => {
1176
+ const bullet = `${i + 1}.`;
1177
+ const insight = l.lesson.insight;
1178
+ return `${bullet} ${insight}`;
1179
+ });
1180
+ return `${header}
1181
+ ${lessonLines.join("\n")}`;
1182
+ }
1183
+ var formatMemoryCheck = formatLessonsCheck;
1184
+
1185
+ // src/cli-utils.ts
1186
+ function getRepoRoot() {
1187
+ return process.env["COMPOUND_AGENT_ROOT"] ?? process.cwd();
1188
+ }
1189
+
1190
+ // src/commands/management-prime.ts
1191
+ var TRUST_LANGUAGE_TEMPLATE = `# Compound Agent Active
1192
+
1193
+ > **Context Recovery**: Run \`ca prime\` after compaction, clear, or new session
1194
+
1195
+ ## MCP Tools (ALWAYS USE THESE)
1196
+
1197
+ **You MUST use MCP tools, NOT CLI commands:**
1198
+
1199
+ | Tool | Purpose |
1200
+ |------|---------|
1201
+ | \`memory_search\` | Search lessons - call BEFORE architectural decisions |
1202
+ | \`memory_capture\` | Capture lessons - call AFTER corrections or discoveries |
1203
+
1204
+ ## Core Constraints
1205
+
1206
+ **Default**: Use MCP tools for lesson management
1207
+ **Prohibited**: NEVER edit .claude/lessons/ files directly
1208
+
1209
+ **Default**: Propose lessons freely after corrections
1210
+ **Prohibited**: NEVER propose without quality gate (novel + specific; prefer actionable)
1211
+
1212
+ ## Retrieval Protocol
1213
+
1214
+ You MUST call \`memory_search\` BEFORE:
1215
+ - Architectural decisions or complex planning
1216
+ - Implementing patterns you've done before in this repo
1217
+
1218
+ **NEVER skip memory_search for complex decisions.** Past mistakes will repeat.
1219
+
1220
+ ## Capture Protocol
1221
+
1222
+ Call \`memory_capture\` AFTER:
1223
+ - User corrects you ("no", "wrong", "actually...")
1224
+ - You self-correct after iteration failures
1225
+ - Test fails then you fix it
1226
+
1227
+ **Quality gate** (must pass before capturing):
1228
+ - Novel (not already stored)
1229
+ - Specific (clear guidance)
1230
+ - Actionable (preferred, not mandatory)
1231
+
1232
+ **Workflow**: Search BEFORE deciding, capture AFTER learning.
1233
+
1234
+ ## CLI (fallback only)
1235
+
1236
+ When MCP is unavailable: \`ca search "query"\`, \`ca learn "insight"\`, \`ca list\`
1237
+ `;
1238
+ function formatSource(source) {
1239
+ switch (source) {
1240
+ case "user_correction":
1241
+ return "user correction";
1242
+ case "self_correction":
1243
+ return "self correction";
1244
+ case "test_failure":
1245
+ return "test failure";
1246
+ case "manual":
1247
+ return "manual";
1248
+ default:
1249
+ return source;
1250
+ }
1251
+ }
1252
+ function formatLessonForPrime(lesson) {
1253
+ const date = lesson.created.slice(0, 10);
1254
+ const tags = lesson.tags.length > 0 ? ` (${lesson.tags.join(", ")})` : "";
1255
+ const source = formatSource(lesson.source);
1256
+ return `- **${lesson.insight}**${tags}
1257
+ Learned: ${date} via ${source}`;
1258
+ }
1259
+ async function getPrimeContext(repoRoot) {
1260
+ const root = repoRoot ?? getRepoRoot();
1261
+ const lessons = await loadSessionLessons(root, 5);
1262
+ let output = TRUST_LANGUAGE_TEMPLATE;
1263
+ if (lessons.length > 0) {
1264
+ const formattedLessons = lessons.map(formatLessonForPrime).join("\n\n");
1265
+ output += `
1266
+ ---
1267
+
1268
+ # [CRITICAL] Mandatory Recall
1269
+
1270
+ Critical lessons from past corrections:
1271
+
1272
+ ${formattedLessons}
1273
+ `;
1274
+ }
1275
+ return output;
1276
+ }
1277
+
1278
+ // src/audit/checks/lessons.ts
1279
+ async function checkLessons(repoRoot) {
1280
+ const { items } = await readMemoryItems(repoRoot);
1281
+ const findings = [];
1282
+ for (const item of items) {
1283
+ if (item.severity === "high") {
1284
+ findings.push({
1285
+ file: "",
1286
+ issue: `High-severity lesson: ${item.insight}`,
1287
+ severity: "info",
1288
+ relatedLessonId: item.id,
1289
+ source: "lesson"
1290
+ });
1291
+ }
1292
+ }
1293
+ const filesChecked = items.length > 0 ? [LESSONS_PATH] : [];
1294
+ return { findings, filesChecked };
1295
+ }
1296
+ var SeveritySchema2 = z.enum(["error", "warning", "info"]);
1297
+ var FilePatternCheckSchema = z.object({
1298
+ type: z.literal("file-pattern"),
1299
+ glob: z.string(),
1300
+ pattern: z.string(),
1301
+ mustMatch: z.boolean().optional()
1302
+ });
1303
+ var FileSizeCheckSchema = z.object({
1304
+ type: z.literal("file-size"),
1305
+ glob: z.string(),
1306
+ maxLines: z.number().int().positive()
1307
+ });
1308
+ var ScriptCheckSchema = z.object({
1309
+ type: z.literal("script"),
1310
+ command: z.string(),
1311
+ expectExitCode: z.number().int().optional()
1312
+ });
1313
+ var RuleCheckSchema = z.discriminatedUnion("type", [
1314
+ FilePatternCheckSchema,
1315
+ FileSizeCheckSchema,
1316
+ ScriptCheckSchema
1317
+ ]);
1318
+ var RuleSchema = z.object({
1319
+ id: z.string().min(1),
1320
+ description: z.string(),
1321
+ severity: SeveritySchema2,
1322
+ check: RuleCheckSchema,
1323
+ remediation: z.string()
1324
+ });
1325
+ var RuleConfigSchema = z.object({
1326
+ rules: z.array(RuleSchema)
1327
+ });
1328
+ function globToRegex(glob) {
1329
+ const pattern = glob.replace(/\./g, "\\.").replace(/\*\*\//g, "(.+/)?").replace(/\*/g, "[^/]*");
1330
+ return new RegExp(`^${pattern}$`);
1331
+ }
1332
+ function findFiles(baseDir, glob) {
1333
+ const regex = globToRegex(glob);
1334
+ const results = [];
1335
+ function walk(dir) {
1336
+ const entries = readdirSync(dir);
1337
+ for (const entry of entries) {
1338
+ if (entry.startsWith(".") || entry === "node_modules") continue;
1339
+ const fullPath = join(dir, entry);
1340
+ const stat = statSync(fullPath);
1341
+ if (stat.isDirectory()) {
1342
+ walk(fullPath);
1343
+ } else {
1344
+ const relPath = relative(baseDir, fullPath);
1345
+ if (regex.test(relPath)) {
1346
+ results.push(relPath);
1347
+ }
1348
+ }
1349
+ }
1350
+ }
1351
+ walk(baseDir);
1352
+ return results.sort();
1353
+ }
1354
+
1355
+ // src/rules/checks/file-pattern.ts
1356
+ function runFilePatternCheck(baseDir, check) {
1357
+ const files = findFiles(baseDir, check.glob);
1358
+ const regex = new RegExp(check.pattern);
1359
+ const violations = [];
1360
+ for (const file of files) {
1361
+ const fullPath = join(baseDir, file);
1362
+ const content = readFileSync(fullPath, "utf-8");
1363
+ const lines = content.split("\n");
1364
+ if (check.mustMatch) {
1365
+ const found = lines.some((line) => regex.test(line));
1366
+ if (!found) {
1367
+ violations.push({
1368
+ file,
1369
+ message: `Pattern ${check.pattern} missing from file`
1370
+ });
1371
+ }
1372
+ } else {
1373
+ for (let i = 0; i < lines.length; i++) {
1374
+ if (regex.test(lines[i])) {
1375
+ violations.push({
1376
+ file,
1377
+ line: i + 1,
1378
+ message: `Pattern ${check.pattern} matched`
1379
+ });
1380
+ }
1381
+ }
1382
+ }
1383
+ }
1384
+ return violations;
1385
+ }
1386
+ function runFileSizeCheck(baseDir, check) {
1387
+ const files = findFiles(baseDir, check.glob);
1388
+ const violations = [];
1389
+ for (const file of files) {
1390
+ const content = readFileSync(join(baseDir, file), "utf-8");
1391
+ const lineCount = content === "" ? 0 : content.split("\n").filter((_, i, arr) => i < arr.length - 1 || arr[i] !== "").length;
1392
+ if (lineCount > check.maxLines) {
1393
+ violations.push({
1394
+ file,
1395
+ message: `File has ${lineCount} lines, exceeds limit of ${check.maxLines}`
1396
+ });
1397
+ }
1398
+ }
1399
+ return violations;
1400
+ }
1401
+ function runScriptCheck(check, baseDir) {
1402
+ const expectedCode = check.expectExitCode ?? 0;
1403
+ try {
1404
+ execSync(check.command, { stdio: ["pipe", "pipe", "pipe"], cwd: baseDir });
1405
+ if (expectedCode !== 0) {
1406
+ return [{ message: `Script exited with exit code 0, expected ${expectedCode}` }];
1407
+ }
1408
+ return [];
1409
+ } catch (err) {
1410
+ const exitCode = err.status ?? 1;
1411
+ if (exitCode === expectedCode) {
1412
+ return [];
1413
+ }
1414
+ const stderr = (err.stderr ?? Buffer.alloc(0)).toString("utf-8").trim();
1415
+ const msg = stderr ? `Script exited with exit code ${exitCode} (expected ${expectedCode}): ${stderr}` : `Script exited with exit code ${exitCode} (expected ${expectedCode})`;
1416
+ return [{ message: msg }];
1417
+ }
1418
+ }
1419
+
1420
+ // src/rules/engine.ts
1421
+ function loadRuleConfig(baseDir) {
1422
+ const configPath = join(baseDir, ".claude", "rules.json");
1423
+ if (!existsSync(configPath)) {
1424
+ return { rules: [] };
1425
+ }
1426
+ const raw = readFileSync(configPath, "utf-8");
1427
+ const json = JSON.parse(raw);
1428
+ return RuleConfigSchema.parse(json);
1429
+ }
1430
+ function runRules(baseDir, rules) {
1431
+ return rules.map((rule) => {
1432
+ try {
1433
+ const violations = runCheck(baseDir, rule);
1434
+ return { rule, violations, passed: violations.length === 0 };
1435
+ } catch (err) {
1436
+ const message = err instanceof Error ? err.message : "Rule check failed";
1437
+ return { rule, violations: [{ message: `Rule check error: ${message}` }], passed: false };
1438
+ }
1439
+ });
1440
+ }
1441
+ function runCheck(baseDir, rule) {
1442
+ switch (rule.check.type) {
1443
+ case "file-pattern":
1444
+ return runFilePatternCheck(baseDir, rule.check);
1445
+ case "file-size":
1446
+ return runFileSizeCheck(baseDir, rule.check);
1447
+ case "script":
1448
+ return runScriptCheck(rule.check, baseDir);
1449
+ }
1450
+ }
1451
+
1452
+ // src/audit/checks/patterns.ts
1453
+ async function checkPatterns(repoRoot) {
1454
+ const { items } = await readMemoryItems(repoRoot);
1455
+ const patterned = items.filter((item) => item.pattern?.bad);
1456
+ if (patterned.length === 0) {
1457
+ return { findings: [], filesChecked: [] };
1458
+ }
1459
+ const sourceFiles = findFiles(repoRoot, "**/*.ts");
1460
+ const findings = [];
1461
+ for (const item of patterned) {
1462
+ const bad = item.pattern.bad;
1463
+ for (const relPath of sourceFiles) {
1464
+ const content = readFileSync(join(repoRoot, relPath), "utf-8");
1465
+ if (content.includes(bad)) {
1466
+ findings.push({
1467
+ file: relPath,
1468
+ issue: `Bad pattern found: "${bad}" (${item.insight})`,
1469
+ severity: "warning",
1470
+ relatedLessonId: item.id,
1471
+ suggestedFix: item.pattern.good ? `Use: ${item.pattern.good}` : void 0,
1472
+ source: "pattern"
1473
+ });
1474
+ }
1475
+ }
1476
+ }
1477
+ return { findings, filesChecked: sourceFiles };
1478
+ }
1479
+
1480
+ // src/audit/checks/rules.ts
1481
+ function checkRules(repoRoot) {
1482
+ let config;
1483
+ try {
1484
+ config = loadRuleConfig(repoRoot);
1485
+ } catch (err) {
1486
+ const message = err instanceof Error ? err.message : "Failed to load rules config";
1487
+ return {
1488
+ findings: [{
1489
+ file: ".claude/rules.json",
1490
+ issue: `Invalid rules configuration: ${message}`,
1491
+ severity: "error",
1492
+ source: "rule"
1493
+ }],
1494
+ filesChecked: []
1495
+ };
1496
+ }
1497
+ if (config.rules.length === 0) {
1498
+ return { findings: [], filesChecked: [] };
1499
+ }
1500
+ const results = runRules(repoRoot, config.rules);
1501
+ const findings = [];
1502
+ const filesCheckedSet = /* @__PURE__ */ new Set();
1503
+ for (const result of results) {
1504
+ for (const violation of result.violations) {
1505
+ if (violation.file) {
1506
+ filesCheckedSet.add(violation.file);
1507
+ }
1508
+ findings.push({
1509
+ file: violation.file ?? "",
1510
+ issue: violation.message,
1511
+ severity: result.rule.severity,
1512
+ suggestedFix: result.rule.remediation,
1513
+ source: "rule"
1514
+ });
1515
+ }
1516
+ }
1517
+ return { findings, filesChecked: [...filesCheckedSet] };
1518
+ }
1519
+
1520
+ // src/audit/engine.ts
1521
+ async function runAudit(repoRoot, options = {}) {
1522
+ const { includeRules = true, includePatterns = true, includeLessons = true } = options;
1523
+ const findings = [];
1524
+ const allCheckedFiles = /* @__PURE__ */ new Set();
1525
+ function collect(result) {
1526
+ findings.push(...result.findings);
1527
+ for (const f of result.filesChecked) {
1528
+ allCheckedFiles.add(f);
1529
+ }
1530
+ }
1531
+ if (includeRules) {
1532
+ collect(checkRules(repoRoot));
1533
+ }
1534
+ if (includePatterns) {
1535
+ collect(await checkPatterns(repoRoot));
1536
+ }
1537
+ if (includeLessons) {
1538
+ collect(await checkLessons(repoRoot));
1539
+ }
1540
+ const errors = findings.filter((f) => f.severity === "error").length;
1541
+ const warnings = findings.filter((f) => f.severity === "warning").length;
1542
+ const infos = findings.filter((f) => f.severity === "info").length;
1543
+ return {
1544
+ findings,
1545
+ summary: { errors, warnings, infos, filesChecked: allCheckedFiles.size },
1546
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
1547
+ };
1548
+ }
1549
+ var AuditFindingSchema = z.object({
1550
+ file: z.string(),
1551
+ issue: z.string(),
1552
+ severity: z.enum(["error", "warning", "info"]),
1553
+ relatedLessonId: z.string().optional(),
1554
+ suggestedFix: z.string().optional(),
1555
+ source: z.enum(["rule", "pattern", "lesson"])
1556
+ });
1557
+ var AuditSummarySchema = z.object({
1558
+ errors: z.number(),
1559
+ warnings: z.number(),
1560
+ infos: z.number(),
1561
+ filesChecked: z.number()
1562
+ });
1563
+ var AuditReportSchema = z.object({
1564
+ findings: z.array(AuditFindingSchema),
1565
+ summary: AuditSummarySchema,
1566
+ timestamp: z.string()
1567
+ });
1568
+
1569
+ // src/index.ts
1570
+ var _require = createRequire(import.meta.url);
1571
+ var _pkg = _require("../package.json");
1572
+ var VERSION = _pkg.version;
1573
+
1574
+ export { AuditFindingSchema, AuditReportSchema, CCT_PATTERNS_PATH, CctPatternSchema, DB_PATH, LESSONS_PATH, LegacyLessonSchema, LegacyTombstoneSchema, LessonItemSchema, LessonRecordSchema, LessonSchema, LessonTypeSchema, MODEL_FILENAME, MODEL_URI, MemoryItemRecordSchema, MemoryItemSchema, MemoryItemTypeSchema, PatternItemSchema, PreferenceItemSchema, SolutionItemSchema, VERSION, appendLesson, appendMemoryItem, buildSimilarityMatrix, calculateScore, closeDb, clusterBySimilarity, confirmationBoost, cosineSimilarity, detectSelfCorrection, detectTestFailure, detectUserCorrection, embedText, embedTexts, formatLessonsCheck, formatMemoryCheck, generateId, getEmbedding, getPrimeContext, isActionable, isModelAvailable, isModelUsable, isNovel, isSpecific, loadSessionLessons, loadSessionMemory, rankLessons, rankMemoryItems, readCctPatterns, readLessons, readMemoryItems, rebuildIndex, recencyBoost, resolveModel, retrieveForPlan, runAudit, searchKeyword, searchVector, severityBoost, shouldPropose, synthesizePattern, unloadEmbedding, writeCctPatterns };
1575
+ //# sourceMappingURL=index.js.map
1576
+ //# sourceMappingURL=index.js.map