learning-agent 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js ADDED
@@ -0,0 +1,1045 @@
1
+ #!/usr/bin/env node
2
+ import chalk from 'chalk';
3
+ import { Command } from 'commander';
4
+ import { statSync, mkdirSync } from 'fs';
5
+ import { join, dirname } from 'path';
6
+ import * as fs from 'fs/promises';
7
+ import { mkdir, appendFile, readFile, writeFile, rename } from 'fs/promises';
8
+ import { createHash } from 'crypto';
9
+ import Database from 'better-sqlite3';
10
+ import { z } from 'zod';
11
+ import 'node-llama-cpp';
12
+ import { homedir } from 'os';
13
+
14
+ var SourceSchema = z.enum([
15
+ "user_correction",
16
+ "self_correction",
17
+ "test_failure",
18
+ "manual"
19
+ ]);
20
+ var ContextSchema = z.object({
21
+ tool: z.string(),
22
+ intent: z.string()
23
+ });
24
+ var PatternSchema = z.object({
25
+ bad: z.string(),
26
+ good: z.string()
27
+ });
28
+ var SeveritySchema = z.enum(["high", "medium", "low"]);
29
+ var LessonTypeSchema = z.enum(["quick", "full"]);
30
+ var LessonSchema = z.object({
31
+ // Core identity (required)
32
+ id: z.string(),
33
+ type: LessonTypeSchema,
34
+ trigger: z.string(),
35
+ insight: z.string(),
36
+ // Metadata (required)
37
+ tags: z.array(z.string()),
38
+ source: SourceSchema,
39
+ context: ContextSchema,
40
+ created: z.string(),
41
+ // ISO8601
42
+ confirmed: z.boolean(),
43
+ // Relationships (required, can be empty arrays)
44
+ supersedes: z.array(z.string()),
45
+ related: z.array(z.string()),
46
+ // Extended fields (optional - typically present for 'full' type)
47
+ evidence: z.string().optional(),
48
+ severity: SeveritySchema.optional(),
49
+ pattern: PatternSchema.optional(),
50
+ // Lifecycle fields (optional)
51
+ deleted: z.boolean().optional(),
52
+ retrievalCount: z.number().optional()
53
+ });
54
+ z.object({
55
+ id: z.string(),
56
+ deleted: z.literal(true),
57
+ deletedAt: z.string()
58
+ // ISO8601
59
+ });
60
+ function generateId(insight) {
61
+ const hash = createHash("sha256").update(insight).digest("hex");
62
+ return `L${hash.slice(0, 8)}`;
63
+ }
64
+
65
+ // src/storage/jsonl.ts
66
+ var LESSONS_PATH = ".claude/lessons/index.jsonl";
67
+ async function appendLesson(repoRoot, lesson) {
68
+ const filePath = join(repoRoot, LESSONS_PATH);
69
+ await mkdir(dirname(filePath), { recursive: true });
70
+ const line = JSON.stringify(lesson) + "\n";
71
+ await appendFile(filePath, line, "utf-8");
72
+ }
73
+ function parseJsonLine(line, lineNumber, strict, onParseError) {
74
+ let parsed;
75
+ try {
76
+ parsed = JSON.parse(line);
77
+ } catch (err) {
78
+ const parseError = {
79
+ line: lineNumber,
80
+ message: `Invalid JSON: ${err.message}`,
81
+ cause: err
82
+ };
83
+ if (strict) {
84
+ throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);
85
+ }
86
+ onParseError?.(parseError);
87
+ return null;
88
+ }
89
+ const result = LessonSchema.safeParse(parsed);
90
+ if (!result.success) {
91
+ const parseError = {
92
+ line: lineNumber,
93
+ message: `Schema validation failed: ${result.error.message}`,
94
+ cause: result.error
95
+ };
96
+ if (strict) {
97
+ throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);
98
+ }
99
+ onParseError?.(parseError);
100
+ return null;
101
+ }
102
+ return result.data;
103
+ }
104
+ async function readLessons(repoRoot, options = {}) {
105
+ const { strict = false, onParseError } = options;
106
+ const filePath = join(repoRoot, LESSONS_PATH);
107
+ let content;
108
+ try {
109
+ content = await readFile(filePath, "utf-8");
110
+ } catch (err) {
111
+ if (err.code === "ENOENT") {
112
+ return { lessons: [], skippedCount: 0 };
113
+ }
114
+ throw err;
115
+ }
116
+ const lessons = /* @__PURE__ */ new Map();
117
+ let skippedCount = 0;
118
+ const lines = content.split("\n");
119
+ for (let i = 0; i < lines.length; i++) {
120
+ const trimmed = lines[i].trim();
121
+ if (!trimmed) continue;
122
+ const lesson = parseJsonLine(trimmed, i + 1, strict, onParseError);
123
+ if (!lesson) {
124
+ skippedCount++;
125
+ continue;
126
+ }
127
+ if (lesson.deleted) {
128
+ lessons.delete(lesson.id);
129
+ } else {
130
+ lessons.set(lesson.id, lesson);
131
+ }
132
+ }
133
+ return { lessons: Array.from(lessons.values()), skippedCount };
134
+ }
135
+
136
+ // src/storage/sqlite.ts
137
+ var DB_PATH = ".claude/.cache/lessons.sqlite";
138
+ var SCHEMA_SQL = `
139
+ -- Main lessons table
140
+ CREATE TABLE IF NOT EXISTS lessons (
141
+ id TEXT PRIMARY KEY,
142
+ type TEXT NOT NULL,
143
+ trigger TEXT NOT NULL,
144
+ insight TEXT NOT NULL,
145
+ evidence TEXT,
146
+ severity TEXT,
147
+ tags TEXT NOT NULL DEFAULT '',
148
+ source TEXT NOT NULL,
149
+ context TEXT NOT NULL DEFAULT '{}',
150
+ supersedes TEXT NOT NULL DEFAULT '[]',
151
+ related TEXT NOT NULL DEFAULT '[]',
152
+ created TEXT NOT NULL,
153
+ confirmed INTEGER NOT NULL DEFAULT 0,
154
+ deleted INTEGER NOT NULL DEFAULT 0,
155
+ retrieval_count INTEGER NOT NULL DEFAULT 0,
156
+ last_retrieved TEXT,
157
+ embedding BLOB,
158
+ content_hash TEXT
159
+ );
160
+
161
+ -- FTS5 virtual table for full-text search
162
+ CREATE VIRTUAL TABLE IF NOT EXISTS lessons_fts USING fts5(
163
+ id,
164
+ trigger,
165
+ insight,
166
+ tags,
167
+ content='lessons',
168
+ content_rowid='rowid'
169
+ );
170
+
171
+ -- Trigger to sync FTS on INSERT
172
+ CREATE TRIGGER IF NOT EXISTS lessons_ai AFTER INSERT ON lessons BEGIN
173
+ INSERT INTO lessons_fts(rowid, id, trigger, insight, tags)
174
+ VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags);
175
+ END;
176
+
177
+ -- Trigger to sync FTS on DELETE
178
+ CREATE TRIGGER IF NOT EXISTS lessons_ad AFTER DELETE ON lessons BEGIN
179
+ INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags)
180
+ VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags);
181
+ END;
182
+
183
+ -- Trigger to sync FTS on UPDATE
184
+ CREATE TRIGGER IF NOT EXISTS lessons_au AFTER UPDATE ON lessons BEGIN
185
+ INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags)
186
+ VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags);
187
+ INSERT INTO lessons_fts(rowid, id, trigger, insight, tags)
188
+ VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags);
189
+ END;
190
+
191
+ -- Index for common queries
192
+ CREATE INDEX IF NOT EXISTS idx_lessons_created ON lessons(created);
193
+ CREATE INDEX IF NOT EXISTS idx_lessons_confirmed ON lessons(confirmed);
194
+ CREATE INDEX IF NOT EXISTS idx_lessons_severity ON lessons(severity);
195
+
196
+ -- Metadata table for sync tracking
197
+ CREATE TABLE IF NOT EXISTS metadata (
198
+ key TEXT PRIMARY KEY,
199
+ value TEXT NOT NULL
200
+ );
201
+ `;
202
+ function createSchema(database) {
203
+ database.exec(SCHEMA_SQL);
204
+ }
205
+ var db = null;
206
+ function contentHash(trigger, insight) {
207
+ return createHash("sha256").update(`${trigger} ${insight}`).digest("hex");
208
+ }
209
+ function openDb(repoRoot) {
210
+ if (db) return db;
211
+ const dbPath = join(repoRoot, DB_PATH);
212
+ const dir = dirname(dbPath);
213
+ mkdirSync(dir, { recursive: true });
214
+ db = new Database(dbPath);
215
+ db.pragma("journal_mode = WAL");
216
+ createSchema(db);
217
+ return db;
218
+ }
219
+ function rowToLesson(row) {
220
+ const lesson = {
221
+ id: row.id,
222
+ type: row.type,
223
+ trigger: row.trigger,
224
+ insight: row.insight,
225
+ tags: row.tags ? row.tags.split(",").filter(Boolean) : [],
226
+ source: row.source,
227
+ context: JSON.parse(row.context),
228
+ supersedes: JSON.parse(row.supersedes),
229
+ related: JSON.parse(row.related),
230
+ created: row.created,
231
+ confirmed: row.confirmed === 1
232
+ };
233
+ if (row.evidence !== null) {
234
+ lesson.evidence = row.evidence;
235
+ }
236
+ if (row.severity !== null) {
237
+ lesson.severity = row.severity;
238
+ }
239
+ if (row.deleted === 1) {
240
+ lesson.deleted = true;
241
+ }
242
+ if (row.retrieval_count > 0) {
243
+ lesson.retrievalCount = row.retrieval_count;
244
+ }
245
+ return lesson;
246
+ }
247
+ function collectCachedEmbeddings(database) {
248
+ const cache = /* @__PURE__ */ new Map();
249
+ const rows = database.prepare("SELECT id, embedding, content_hash FROM lessons WHERE embedding IS NOT NULL").all();
250
+ for (const row of rows) {
251
+ if (row.embedding && row.content_hash) {
252
+ cache.set(row.id, { embedding: row.embedding, contentHash: row.content_hash });
253
+ }
254
+ }
255
+ return cache;
256
+ }
257
+ var INSERT_LESSON_SQL = `
258
+ INSERT INTO lessons (id, type, trigger, insight, evidence, severity, tags, source, context, supersedes, related, created, confirmed, deleted, retrieval_count, last_retrieved, embedding, content_hash)
259
+ VALUES (@id, @type, @trigger, @insight, @evidence, @severity, @tags, @source, @context, @supersedes, @related, @created, @confirmed, @deleted, @retrieval_count, @last_retrieved, @embedding, @content_hash)
260
+ `;
261
+ function getJsonlMtime(repoRoot) {
262
+ const jsonlPath = join(repoRoot, LESSONS_PATH);
263
+ try {
264
+ const stat = statSync(jsonlPath);
265
+ return stat.mtimeMs;
266
+ } catch {
267
+ return null;
268
+ }
269
+ }
270
+ function getLastSyncMtime(database) {
271
+ const row = database.prepare("SELECT value FROM metadata WHERE key = ?").get("last_sync_mtime");
272
+ return row ? parseFloat(row.value) : null;
273
+ }
274
+ function setLastSyncMtime(database, mtime) {
275
+ database.prepare("INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)").run("last_sync_mtime", mtime.toString());
276
+ }
277
+ async function rebuildIndex(repoRoot) {
278
+ const database = openDb(repoRoot);
279
+ const { lessons } = await readLessons(repoRoot);
280
+ const cachedEmbeddings = collectCachedEmbeddings(database);
281
+ database.exec("DELETE FROM lessons");
282
+ if (lessons.length === 0) {
283
+ const mtime2 = getJsonlMtime(repoRoot);
284
+ if (mtime2 !== null) {
285
+ setLastSyncMtime(database, mtime2);
286
+ }
287
+ return;
288
+ }
289
+ const insert = database.prepare(INSERT_LESSON_SQL);
290
+ const insertMany = database.transaction((items) => {
291
+ for (const lesson of items) {
292
+ const newHash = contentHash(lesson.trigger, lesson.insight);
293
+ const cached = cachedEmbeddings.get(lesson.id);
294
+ const hasValidCache = cached && cached.contentHash === newHash;
295
+ insert.run({
296
+ id: lesson.id,
297
+ type: lesson.type,
298
+ trigger: lesson.trigger,
299
+ insight: lesson.insight,
300
+ evidence: lesson.evidence ?? null,
301
+ severity: lesson.severity ?? null,
302
+ tags: lesson.tags.join(","),
303
+ source: lesson.source,
304
+ context: JSON.stringify(lesson.context),
305
+ supersedes: JSON.stringify(lesson.supersedes),
306
+ related: JSON.stringify(lesson.related),
307
+ created: lesson.created,
308
+ confirmed: lesson.confirmed ? 1 : 0,
309
+ deleted: lesson.deleted ? 1 : 0,
310
+ retrieval_count: lesson.retrievalCount ?? 0,
311
+ last_retrieved: null,
312
+ // Reset on rebuild since we're rebuilding from source
313
+ embedding: hasValidCache ? cached.embedding : null,
314
+ content_hash: hasValidCache ? cached.contentHash : null
315
+ });
316
+ }
317
+ });
318
+ insertMany(lessons);
319
+ const mtime = getJsonlMtime(repoRoot);
320
+ if (mtime !== null) {
321
+ setLastSyncMtime(database, mtime);
322
+ }
323
+ }
324
+ async function syncIfNeeded(repoRoot, options = {}) {
325
+ const { force = false } = options;
326
+ const jsonlMtime = getJsonlMtime(repoRoot);
327
+ if (jsonlMtime === null && !force) {
328
+ return false;
329
+ }
330
+ const database = openDb(repoRoot);
331
+ const lastSyncMtime = getLastSyncMtime(database);
332
+ const needsRebuild = force || lastSyncMtime === null || jsonlMtime !== null && jsonlMtime > lastSyncMtime;
333
+ if (needsRebuild) {
334
+ await rebuildIndex(repoRoot);
335
+ return true;
336
+ }
337
+ return false;
338
+ }
339
+ async function searchKeyword(repoRoot, query, limit) {
340
+ const database = openDb(repoRoot);
341
+ const countResult = database.prepare("SELECT COUNT(*) as cnt FROM lessons").get();
342
+ if (countResult.cnt === 0) return [];
343
+ const rows = database.prepare(
344
+ `
345
+ SELECT l.*
346
+ FROM lessons l
347
+ JOIN lessons_fts fts ON l.rowid = fts.rowid
348
+ WHERE lessons_fts MATCH ?
349
+ LIMIT ?
350
+ `
351
+ ).all(query, limit);
352
+ if (rows.length > 0) {
353
+ incrementRetrievalCount(repoRoot, rows.map((r) => r.id));
354
+ }
355
+ return rows.map(rowToLesson);
356
+ }
357
+ function incrementRetrievalCount(repoRoot, lessonIds) {
358
+ if (lessonIds.length === 0) return;
359
+ const database = openDb(repoRoot);
360
+ const now = (/* @__PURE__ */ new Date()).toISOString();
361
+ const update = database.prepare(`
362
+ UPDATE lessons
363
+ SET retrieval_count = retrieval_count + 1,
364
+ last_retrieved = ?
365
+ WHERE id = ?
366
+ `);
367
+ const updateMany = database.transaction((ids) => {
368
+ for (const id of ids) {
369
+ update.run(now, id);
370
+ }
371
+ });
372
+ updateMany(lessonIds);
373
+ }
374
+ function getRetrievalStats(repoRoot) {
375
+ const database = openDb(repoRoot);
376
+ const rows = database.prepare("SELECT id, retrieval_count, last_retrieved FROM lessons").all();
377
+ return rows.map((row) => ({
378
+ id: row.id,
379
+ count: row.retrieval_count,
380
+ lastRetrieved: row.last_retrieved
381
+ }));
382
+ }
383
+
384
+ // src/capture/quality.ts
385
+ var DEFAULT_SIMILARITY_THRESHOLD = 0.8;
386
+ async function isNovel(repoRoot, insight, options = {}) {
387
+ const threshold = options.threshold ?? DEFAULT_SIMILARITY_THRESHOLD;
388
+ await syncIfNeeded(repoRoot);
389
+ const words = insight.toLowerCase().replace(/[^a-z0-9\s]/g, "").split(/\s+/).filter((w) => w.length > 3).slice(0, 3);
390
+ if (words.length === 0) {
391
+ return { novel: true };
392
+ }
393
+ const searchQuery = words.join(" OR ");
394
+ const results = await searchKeyword(repoRoot, searchQuery, 10);
395
+ if (results.length === 0) {
396
+ return { novel: true };
397
+ }
398
+ const insightWords = new Set(insight.toLowerCase().split(/\s+/));
399
+ for (const lesson of results) {
400
+ const lessonWords = new Set(lesson.insight.toLowerCase().split(/\s+/));
401
+ const intersection = [...insightWords].filter((w) => lessonWords.has(w)).length;
402
+ const union = (/* @__PURE__ */ new Set([...insightWords, ...lessonWords])).size;
403
+ const similarity = union > 0 ? intersection / union : 0;
404
+ if (similarity >= threshold) {
405
+ return {
406
+ novel: false,
407
+ reason: `Found similar existing lesson: "${lesson.insight.slice(0, 50)}..."`,
408
+ existingId: lesson.id
409
+ };
410
+ }
411
+ if (lesson.insight.toLowerCase() === insight.toLowerCase()) {
412
+ return {
413
+ novel: false,
414
+ reason: `Exact duplicate found`,
415
+ existingId: lesson.id
416
+ };
417
+ }
418
+ }
419
+ return { novel: true };
420
+ }
421
+ var MIN_WORD_COUNT = 4;
422
+ var VAGUE_PATTERNS = [
423
+ /\bwrite better\b/i,
424
+ /\bbe careful\b/i,
425
+ /\bremember to\b/i,
426
+ /\bmake sure\b/i,
427
+ /\btry to\b/i,
428
+ /\bdouble check\b/i
429
+ ];
430
+ var GENERIC_IMPERATIVE_PATTERN = /^(always|never)\s+\w+(\s+\w+){0,2}$/i;
431
+ function isSpecific(insight) {
432
+ const words = insight.trim().split(/\s+/).filter((w) => w.length > 0);
433
+ if (words.length < MIN_WORD_COUNT) {
434
+ return { specific: false, reason: "Insight is too short to be actionable" };
435
+ }
436
+ for (const pattern of VAGUE_PATTERNS) {
437
+ if (pattern.test(insight)) {
438
+ return { specific: false, reason: "Insight matches a vague pattern" };
439
+ }
440
+ }
441
+ if (GENERIC_IMPERATIVE_PATTERN.test(insight)) {
442
+ return { specific: false, reason: "Insight matches a vague pattern" };
443
+ }
444
+ return { specific: true };
445
+ }
446
+ var ACTION_PATTERNS = [
447
+ /\buse\s+.+\s+instead\s+of\b/i,
448
+ // "use X instead of Y"
449
+ /\bprefer\s+.+\s+(over|to)\b/i,
450
+ // "prefer X over Y" or "prefer X to Y"
451
+ /\balways\s+.+\s+when\b/i,
452
+ // "always X when Y"
453
+ /\bnever\s+.+\s+without\b/i,
454
+ // "never X without Y"
455
+ /\bavoid\s+(using\s+)?\w+/i,
456
+ // "avoid X" or "avoid using X"
457
+ /\bcheck\s+.+\s+before\b/i,
458
+ // "check X before Y"
459
+ /^(run|use|add|remove|install|update|configure|set|enable|disable)\s+/i
460
+ // Imperative commands at start
461
+ ];
462
+ function isActionable(insight) {
463
+ for (const pattern of ACTION_PATTERNS) {
464
+ if (pattern.test(insight)) {
465
+ return { actionable: true };
466
+ }
467
+ }
468
+ return { actionable: false, reason: "Insight lacks clear action guidance" };
469
+ }
470
+ async function shouldPropose(repoRoot, insight) {
471
+ const specificResult = isSpecific(insight);
472
+ if (!specificResult.specific) {
473
+ return { shouldPropose: false, reason: specificResult.reason };
474
+ }
475
+ const actionableResult = isActionable(insight);
476
+ if (!actionableResult.actionable) {
477
+ return { shouldPropose: false, reason: actionableResult.reason };
478
+ }
479
+ const noveltyResult = await isNovel(repoRoot, insight);
480
+ if (!noveltyResult.novel) {
481
+ return { shouldPropose: false, reason: noveltyResult.reason };
482
+ }
483
+ return { shouldPropose: true };
484
+ }
485
+
486
+ // src/capture/triggers.ts
487
+ var USER_CORRECTION_PATTERNS = [
488
+ /\bno\b[,.]?\s/i,
489
+ // "no, ..." or "no ..."
490
+ /\bwrong\b/i,
491
+ // "wrong"
492
+ /\bactually\b/i,
493
+ // "actually..."
494
+ /\bnot that\b/i,
495
+ // "not that"
496
+ /\bi meant\b/i
497
+ // "I meant"
498
+ ];
499
+ function detectUserCorrection(signals) {
500
+ const { messages, context } = signals;
501
+ if (messages.length < 2) {
502
+ return null;
503
+ }
504
+ for (let i = 1; i < messages.length; i++) {
505
+ const message = messages[i];
506
+ if (!message) continue;
507
+ for (const pattern of USER_CORRECTION_PATTERNS) {
508
+ if (pattern.test(message)) {
509
+ return {
510
+ trigger: `User correction during ${context.intent}`,
511
+ correctionMessage: message,
512
+ context
513
+ };
514
+ }
515
+ }
516
+ }
517
+ return null;
518
+ }
519
+ function detectSelfCorrection(history) {
520
+ const { edits } = history;
521
+ if (edits.length < 3) {
522
+ return null;
523
+ }
524
+ for (let i = 0; i <= edits.length - 3; i++) {
525
+ const first = edits[i];
526
+ const second = edits[i + 1];
527
+ const third = edits[i + 2];
528
+ if (!first || !second || !third) continue;
529
+ if (first.file === second.file && second.file === third.file && first.success && !second.success && third.success) {
530
+ return {
531
+ file: first.file,
532
+ trigger: `Self-correction on ${first.file}`
533
+ };
534
+ }
535
+ }
536
+ return null;
537
+ }
538
+ function detectTestFailure(testResult) {
539
+ if (testResult.passed) {
540
+ return null;
541
+ }
542
+ const lines = testResult.output.split("\n").filter((line) => line.trim().length > 0);
543
+ const errorLine = lines.find((line) => /error|fail|assert/i.test(line)) ?? lines[0] ?? "";
544
+ return {
545
+ testFile: testResult.testFile,
546
+ errorOutput: testResult.output,
547
+ trigger: `Test failure in ${testResult.testFile}: ${errorLine.slice(0, 100)}`
548
+ };
549
+ }
550
+
551
+ // src/capture/integration.ts
552
+ async function detectAndPropose(repoRoot, input) {
553
+ const detected = runDetector(input);
554
+ if (!detected) {
555
+ return null;
556
+ }
557
+ const { trigger, source, proposedInsight } = detected;
558
+ const quality = await shouldPropose(repoRoot, proposedInsight);
559
+ if (!quality.shouldPropose) {
560
+ return null;
561
+ }
562
+ return { trigger, source, proposedInsight };
563
+ }
564
+ function runDetector(input) {
565
+ switch (input.type) {
566
+ case "user":
567
+ return detectUserCorrectionFlow(input.data);
568
+ case "self":
569
+ return detectSelfCorrectionFlow(input.data);
570
+ case "test":
571
+ return detectTestFailureFlow(input.data);
572
+ }
573
+ }
574
+ function detectUserCorrectionFlow(data) {
575
+ const result = detectUserCorrection(data);
576
+ if (!result) {
577
+ return null;
578
+ }
579
+ return {
580
+ trigger: result.trigger,
581
+ source: "user_correction",
582
+ proposedInsight: result.correctionMessage
583
+ };
584
+ }
585
+ function detectSelfCorrectionFlow(data) {
586
+ const result = detectSelfCorrection(data);
587
+ if (!result) {
588
+ return null;
589
+ }
590
+ return {
591
+ trigger: result.trigger,
592
+ source: "self_correction",
593
+ // Self-corrections need context to form useful insights
594
+ proposedInsight: `Check ${result.file} for common errors before editing`
595
+ };
596
+ }
597
+ function detectTestFailureFlow(data) {
598
+ const result = detectTestFailure(data);
599
+ if (!result) {
600
+ return null;
601
+ }
602
+ return {
603
+ trigger: result.trigger,
604
+ source: "test_failure",
605
+ proposedInsight: result.errorOutput
606
+ };
607
+ }
608
+ var VALID_TYPES = /* @__PURE__ */ new Set(["user", "self", "test"]);
609
+ async function parseInputFile(filePath) {
610
+ const content = await fs.readFile(filePath, "utf-8");
611
+ const data = JSON.parse(content);
612
+ if (!VALID_TYPES.has(data.type)) {
613
+ throw new Error(`Invalid detection type: ${data.type}. Must be one of: user, self, test`);
614
+ }
615
+ return data;
616
+ }
617
+
618
+ // src/cli-utils.ts
619
+ function formatBytes(bytes) {
620
+ if (bytes === 0) return "0 B";
621
+ if (bytes < 1024) return `${bytes} B`;
622
+ const kb = bytes / 1024;
623
+ if (kb < 1024) return `${kb.toFixed(1)} KB`;
624
+ const mb = kb / 1024;
625
+ return `${mb.toFixed(1)} MB`;
626
+ }
627
+ function parseLimit(value, name) {
628
+ const parsed = parseInt(value, 10);
629
+ if (Number.isNaN(parsed) || parsed <= 0) {
630
+ throw new Error(`Invalid ${name}: must be a positive integer`);
631
+ }
632
+ return parsed;
633
+ }
634
+ function getRepoRoot() {
635
+ return process.env["LEARNING_AGENT_ROOT"] ?? process.cwd();
636
+ }
637
+ join(homedir(), ".node-llama-cpp", "models");
638
+
639
+ // src/index.ts
640
+ var VERSION = "0.1.0";
641
+ var ARCHIVE_DIR = ".claude/lessons/archive";
642
+ var TOMBSTONE_THRESHOLD = 100;
643
+ var ARCHIVE_AGE_DAYS = 90;
644
+ var MS_PER_DAY = 1e3 * 60 * 60 * 24;
645
+ var MONTH_INDEX_OFFSET = 1;
646
+ var MONTH_PAD_LENGTH = 2;
647
+ function getArchivePath(repoRoot, date) {
648
+ const year = date.getFullYear();
649
+ const month = String(date.getMonth() + MONTH_INDEX_OFFSET).padStart(MONTH_PAD_LENGTH, "0");
650
+ return join(repoRoot, ARCHIVE_DIR, `${year}-${month}.jsonl`);
651
+ }
652
+ async function parseRawJsonlLines(repoRoot) {
653
+ const filePath = join(repoRoot, LESSONS_PATH);
654
+ let content;
655
+ try {
656
+ content = await readFile(filePath, "utf-8");
657
+ } catch {
658
+ return [];
659
+ }
660
+ const results = [];
661
+ for (const line of content.split("\n")) {
662
+ const trimmed = line.trim();
663
+ if (!trimmed) continue;
664
+ try {
665
+ const parsed = JSON.parse(trimmed);
666
+ results.push({ line: trimmed, parsed });
667
+ } catch {
668
+ results.push({ line: trimmed, parsed: null });
669
+ }
670
+ }
671
+ return results;
672
+ }
673
+ async function countTombstones(repoRoot) {
674
+ const lines = await parseRawJsonlLines(repoRoot);
675
+ let count = 0;
676
+ for (const { parsed } of lines) {
677
+ if (parsed && parsed["deleted"] === true) {
678
+ count++;
679
+ }
680
+ }
681
+ return count;
682
+ }
683
+ async function needsCompaction(repoRoot) {
684
+ const count = await countTombstones(repoRoot);
685
+ return count >= TOMBSTONE_THRESHOLD;
686
+ }
687
+ async function rewriteWithoutTombstones(repoRoot) {
688
+ const filePath = join(repoRoot, LESSONS_PATH);
689
+ const tempPath = filePath + ".tmp";
690
+ const { lessons } = await readLessons(repoRoot);
691
+ const tombstoneCount = await countTombstones(repoRoot);
692
+ await mkdir(dirname(filePath), { recursive: true });
693
+ const lines = lessons.map((lesson) => JSON.stringify(lesson) + "\n");
694
+ await writeFile(tempPath, lines.join(""), "utf-8");
695
+ await rename(tempPath, filePath);
696
+ return tombstoneCount;
697
+ }
698
+ function shouldArchive(lesson, now) {
699
+ const created = new Date(lesson.created);
700
+ const ageMs = now.getTime() - created.getTime();
701
+ const ageDays = ageMs / MS_PER_DAY;
702
+ return ageDays > ARCHIVE_AGE_DAYS && (lesson.retrievalCount === void 0 || lesson.retrievalCount === 0);
703
+ }
704
+ async function archiveOldLessons(repoRoot) {
705
+ const { lessons } = await readLessons(repoRoot);
706
+ const now = /* @__PURE__ */ new Date();
707
+ const toArchive = [];
708
+ const toKeep = [];
709
+ for (const lesson of lessons) {
710
+ if (shouldArchive(lesson, now)) {
711
+ toArchive.push(lesson);
712
+ } else {
713
+ toKeep.push(lesson);
714
+ }
715
+ }
716
+ if (toArchive.length === 0) {
717
+ return 0;
718
+ }
719
+ const archiveGroups = /* @__PURE__ */ new Map();
720
+ for (const lesson of toArchive) {
721
+ const created = new Date(lesson.created);
722
+ const archivePath = getArchivePath(repoRoot, created);
723
+ const group = archiveGroups.get(archivePath) ?? [];
724
+ group.push(lesson);
725
+ archiveGroups.set(archivePath, group);
726
+ }
727
+ const archiveDir = join(repoRoot, ARCHIVE_DIR);
728
+ await mkdir(archiveDir, { recursive: true });
729
+ for (const [archivePath, archiveLessons] of archiveGroups) {
730
+ const lines2 = archiveLessons.map((l) => JSON.stringify(l) + "\n").join("");
731
+ await appendFile(archivePath, lines2, "utf-8");
732
+ }
733
+ const filePath = join(repoRoot, LESSONS_PATH);
734
+ const tempPath = filePath + ".tmp";
735
+ await mkdir(dirname(filePath), { recursive: true });
736
+ const lines = toKeep.map((lesson) => JSON.stringify(lesson) + "\n");
737
+ await writeFile(tempPath, lines.join(""), "utf-8");
738
+ await rename(tempPath, filePath);
739
+ return toArchive.length;
740
+ }
741
+ async function compact(repoRoot) {
742
+ const tombstonesBefore = await countTombstones(repoRoot);
743
+ const archived = await archiveOldLessons(repoRoot);
744
+ const tombstonesAfterArchive = await countTombstones(repoRoot);
745
+ await rewriteWithoutTombstones(repoRoot);
746
+ const tombstonesRemoved = archived > 0 ? tombstonesBefore : tombstonesAfterArchive;
747
+ const { lessons } = await readLessons(repoRoot);
748
+ return {
749
+ archived,
750
+ tombstonesRemoved,
751
+ lessonsRemaining: lessons.length
752
+ };
753
+ }
754
+
755
+ // src/cli.ts
756
+ var out = {
757
+ success: (msg) => console.log(chalk.green("[ok]"), msg),
758
+ error: (msg) => console.error(chalk.red("[error]"), msg),
759
+ info: (msg) => console.log(chalk.blue("[info]"), msg),
760
+ warn: (msg) => console.log(chalk.yellow("[warn]"), msg)
761
+ };
762
+ function getGlobalOpts(cmd) {
763
+ const opts = cmd.optsWithGlobals();
764
+ return {
765
+ verbose: opts.verbose ?? false,
766
+ quiet: opts.quiet ?? false
767
+ };
768
+ }
769
+ var DEFAULT_SEARCH_LIMIT = "10";
770
+ var DEFAULT_LIST_LIMIT = "20";
771
+ var program = new Command();
772
+ program.option("-v, --verbose", "Show detailed output").option("-q, --quiet", "Suppress non-essential output");
773
+ program.name("learning-agent").description("Repository-scoped learning system for Claude Code").version(VERSION);
774
+ program.command("learn <insight>").description("Capture a new lesson").option("-t, --trigger <text>", "What triggered this lesson").option("--tags <tags>", "Comma-separated tags", "").option("-y, --yes", "Skip confirmation").action(async function(insight, options) {
775
+ const repoRoot = getRepoRoot();
776
+ const { quiet } = getGlobalOpts(this);
777
+ const lesson = {
778
+ id: generateId(insight),
779
+ type: "quick",
780
+ trigger: options.trigger ?? "Manual capture",
781
+ insight,
782
+ tags: options.tags ? options.tags.split(",").map((t) => t.trim()) : [],
783
+ source: "manual",
784
+ context: {
785
+ tool: "cli",
786
+ intent: "manual learning"
787
+ },
788
+ created: (/* @__PURE__ */ new Date()).toISOString(),
789
+ confirmed: options.yes ?? false,
790
+ supersedes: [],
791
+ related: []
792
+ };
793
+ await appendLesson(repoRoot, lesson);
794
+ out.success(`Learned: ${insight}`);
795
+ if (!quiet) {
796
+ console.log(`ID: ${chalk.dim(lesson.id)}`);
797
+ }
798
+ });
799
+ program.command("search <query>").description("Search lessons by keyword").option("-n, --limit <number>", "Maximum results", DEFAULT_SEARCH_LIMIT).action(async function(query, options) {
800
+ const repoRoot = getRepoRoot();
801
+ const limit = parseLimit(options.limit, "limit");
802
+ const { verbose, quiet } = getGlobalOpts(this);
803
+ await syncIfNeeded(repoRoot);
804
+ const results = await searchKeyword(repoRoot, query, limit);
805
+ if (results.length === 0) {
806
+ console.log('No lessons match your search. Try a different query or use "list" to see all lessons.');
807
+ return;
808
+ }
809
+ if (!quiet) {
810
+ out.info(`Found ${results.length} lesson(s):
811
+ `);
812
+ }
813
+ for (const lesson of results) {
814
+ console.log(`[${chalk.cyan(lesson.id)}] ${lesson.insight}`);
815
+ console.log(` Trigger: ${lesson.trigger}`);
816
+ if (verbose && lesson.context) {
817
+ console.log(` Context: ${lesson.context.tool} - ${lesson.context.intent}`);
818
+ console.log(` Created: ${lesson.created}`);
819
+ }
820
+ if (lesson.tags.length > 0) {
821
+ console.log(` Tags: ${lesson.tags.join(", ")}`);
822
+ }
823
+ console.log();
824
+ }
825
+ });
826
+ program.command("list").description("List all lessons").option("-n, --limit <number>", "Maximum results", DEFAULT_LIST_LIMIT).action(async function(options) {
827
+ const repoRoot = getRepoRoot();
828
+ const limit = parseLimit(options.limit, "limit");
829
+ const { verbose, quiet } = getGlobalOpts(this);
830
+ const { lessons, skippedCount } = await readLessons(repoRoot);
831
+ if (lessons.length === 0) {
832
+ console.log('No lessons found. Get started with: learn "Your first lesson"');
833
+ if (skippedCount > 0) {
834
+ out.warn(`${skippedCount} corrupted lesson(s) skipped.`);
835
+ }
836
+ return;
837
+ }
838
+ const toShow = lessons.slice(0, limit);
839
+ if (!quiet) {
840
+ out.info(`Showing ${toShow.length} of ${lessons.length} lesson(s):
841
+ `);
842
+ }
843
+ for (const lesson of toShow) {
844
+ console.log(`[${chalk.cyan(lesson.id)}] ${lesson.insight}`);
845
+ if (verbose) {
846
+ console.log(` Type: ${lesson.type} | Source: ${lesson.source}`);
847
+ console.log(` Created: ${lesson.created}`);
848
+ if (lesson.context) {
849
+ console.log(` Context: ${lesson.context.tool} - ${lesson.context.intent}`);
850
+ }
851
+ } else {
852
+ console.log(` Type: ${lesson.type} | Source: ${lesson.source}`);
853
+ }
854
+ if (lesson.tags.length > 0) {
855
+ console.log(` Tags: ${lesson.tags.join(", ")}`);
856
+ }
857
+ console.log();
858
+ }
859
+ if (skippedCount > 0) {
860
+ out.warn(`${skippedCount} corrupted lesson(s) skipped.`);
861
+ }
862
+ });
863
+ program.command("rebuild").description("Rebuild SQLite index from JSONL").option("-f, --force", "Force rebuild even if unchanged").action(async (options) => {
864
+ const repoRoot = getRepoRoot();
865
+ if (options.force) {
866
+ console.log("Forcing index rebuild...");
867
+ await rebuildIndex(repoRoot);
868
+ console.log("Index rebuilt.");
869
+ } else {
870
+ const rebuilt = await syncIfNeeded(repoRoot);
871
+ if (rebuilt) {
872
+ console.log("Index rebuilt (JSONL changed).");
873
+ } else {
874
+ console.log("Index is up to date.");
875
+ }
876
+ }
877
+ });
878
+ program.command("detect").description("Detect learning triggers from input").requiredOption("--input <file>", "Path to JSON input file").option("--save", "Automatically save proposed lesson").option("--json", "Output result as JSON").action(
879
+ async (options) => {
880
+ const repoRoot = getRepoRoot();
881
+ const input = await parseInputFile(options.input);
882
+ const result = await detectAndPropose(repoRoot, input);
883
+ if (!result) {
884
+ if (options.json) {
885
+ console.log(JSON.stringify({ detected: false }));
886
+ } else {
887
+ console.log("No learning trigger detected.");
888
+ }
889
+ return;
890
+ }
891
+ if (options.json) {
892
+ console.log(JSON.stringify({ detected: true, ...result }));
893
+ return;
894
+ }
895
+ console.log("Learning trigger detected!");
896
+ console.log(` Trigger: ${result.trigger}`);
897
+ console.log(` Source: ${result.source}`);
898
+ console.log(` Proposed: ${result.proposedInsight}`);
899
+ if (options.save) {
900
+ const lesson = {
901
+ id: generateId(result.proposedInsight),
902
+ type: "quick",
903
+ trigger: result.trigger,
904
+ insight: result.proposedInsight,
905
+ tags: [],
906
+ source: result.source,
907
+ context: { tool: "detect", intent: "auto-capture" },
908
+ created: (/* @__PURE__ */ new Date()).toISOString(),
909
+ confirmed: false,
910
+ supersedes: [],
911
+ related: []
912
+ };
913
+ await appendLesson(repoRoot, lesson);
914
+ console.log(`
915
+ Saved as lesson: ${lesson.id}`);
916
+ }
917
+ }
918
+ );
919
+ program.command("compact").description("Compact lessons: archive old lessons and remove tombstones").option("-f, --force", "Run compaction even if below threshold").option("--dry-run", "Show what would be done without making changes").action(async (options) => {
920
+ const repoRoot = getRepoRoot();
921
+ const tombstones = await countTombstones(repoRoot);
922
+ const needs = await needsCompaction(repoRoot);
923
+ if (options.dryRun) {
924
+ console.log("Dry run - no changes will be made.\n");
925
+ console.log(`Tombstones found: ${tombstones}`);
926
+ console.log(`Compaction needed: ${needs ? "yes" : "no"}`);
927
+ return;
928
+ }
929
+ if (!needs && !options.force) {
930
+ console.log(`Compaction not needed (${tombstones} tombstones, threshold is ${TOMBSTONE_THRESHOLD}).`);
931
+ console.log("Use --force to compact anyway.");
932
+ return;
933
+ }
934
+ console.log("Running compaction...");
935
+ const result = await compact(repoRoot);
936
+ console.log("\nCompaction complete:");
937
+ console.log(` Archived: ${result.archived} lesson(s)`);
938
+ console.log(` Tombstones removed: ${result.tombstonesRemoved}`);
939
+ console.log(` Lessons remaining: ${result.lessonsRemaining}`);
940
+ await rebuildIndex(repoRoot);
941
+ console.log(" Index rebuilt.");
942
+ });
943
+ program.command("export").description("Export lessons as JSON to stdout").option("--since <date>", "Only include lessons created after this date (ISO8601)").option("--tags <tags>", "Filter by tags (comma-separated, OR logic)").action(async (options) => {
944
+ const repoRoot = getRepoRoot();
945
+ const { lessons } = await readLessons(repoRoot);
946
+ let filtered = lessons;
947
+ if (options.since) {
948
+ const sinceDate = new Date(options.since);
949
+ if (Number.isNaN(sinceDate.getTime())) {
950
+ console.error(`Invalid date format: ${options.since}. Use ISO8601 format (e.g., 2024-01-15).`);
951
+ process.exit(1);
952
+ }
953
+ filtered = filtered.filter((lesson) => new Date(lesson.created) >= sinceDate);
954
+ }
955
+ if (options.tags) {
956
+ const filterTags = options.tags.split(",").map((t) => t.trim());
957
+ filtered = filtered.filter((lesson) => lesson.tags.some((tag) => filterTags.includes(tag)));
958
+ }
959
+ console.log(JSON.stringify(filtered, null, 2));
960
+ });
961
+ program.command("import <file>").description("Import lessons from a JSONL file").action(async (file) => {
962
+ const repoRoot = getRepoRoot();
963
+ let content;
964
+ try {
965
+ const { readFile: readFile4 } = await import('fs/promises');
966
+ content = await readFile4(file, "utf-8");
967
+ } catch (err) {
968
+ const code = err.code;
969
+ if (code === "ENOENT") {
970
+ console.error(`Error: File not found: ${file}`);
971
+ } else {
972
+ console.error(`Error reading file: ${err.message}`);
973
+ }
974
+ process.exit(1);
975
+ }
976
+ const { lessons: existingLessons } = await readLessons(repoRoot);
977
+ const existingIds = new Set(existingLessons.map((l) => l.id));
978
+ const lines = content.split("\n");
979
+ let imported = 0;
980
+ let skipped = 0;
981
+ let invalid = 0;
982
+ for (const line of lines) {
983
+ const trimmed = line.trim();
984
+ if (!trimmed) continue;
985
+ let parsed;
986
+ try {
987
+ parsed = JSON.parse(trimmed);
988
+ } catch {
989
+ invalid++;
990
+ continue;
991
+ }
992
+ const result = LessonSchema.safeParse(parsed);
993
+ if (!result.success) {
994
+ invalid++;
995
+ continue;
996
+ }
997
+ const lesson = result.data;
998
+ if (existingIds.has(lesson.id)) {
999
+ skipped++;
1000
+ continue;
1001
+ }
1002
+ await appendLesson(repoRoot, lesson);
1003
+ existingIds.add(lesson.id);
1004
+ imported++;
1005
+ }
1006
+ const lessonWord = imported === 1 ? "lesson" : "lessons";
1007
+ const parts = [];
1008
+ if (skipped > 0) parts.push(`${skipped} skipped`);
1009
+ if (invalid > 0) parts.push(`${invalid} invalid`);
1010
+ if (parts.length > 0) {
1011
+ console.log(`Imported ${imported} ${lessonWord} (${parts.join(", ")})`);
1012
+ } else {
1013
+ console.log(`Imported ${imported} ${lessonWord}`);
1014
+ }
1015
+ });
1016
+ program.command("stats").description("Show database health and statistics").action(async () => {
1017
+ const repoRoot = getRepoRoot();
1018
+ await syncIfNeeded(repoRoot);
1019
+ const { lessons } = await readLessons(repoRoot);
1020
+ const deletedCount = await countTombstones(repoRoot);
1021
+ const totalLessons = lessons.length;
1022
+ const retrievalStats = getRetrievalStats(repoRoot);
1023
+ const totalRetrievals = retrievalStats.reduce((sum, s) => sum + s.count, 0);
1024
+ const avgRetrievals = totalLessons > 0 ? (totalRetrievals / totalLessons).toFixed(1) : "0.0";
1025
+ const jsonlPath = join(repoRoot, LESSONS_PATH);
1026
+ const dbPath = join(repoRoot, DB_PATH);
1027
+ let dataSize = 0;
1028
+ let indexSize = 0;
1029
+ try {
1030
+ dataSize = statSync(jsonlPath).size;
1031
+ } catch {
1032
+ }
1033
+ try {
1034
+ indexSize = statSync(dbPath).size;
1035
+ } catch {
1036
+ }
1037
+ const totalSize = dataSize + indexSize;
1038
+ const deletedInfo = deletedCount > 0 ? ` (${deletedCount} deleted)` : "";
1039
+ console.log(`Lessons: ${totalLessons} total${deletedInfo}`);
1040
+ console.log(`Retrievals: ${totalRetrievals} total, ${avgRetrievals} avg per lesson`);
1041
+ console.log(`Storage: ${formatBytes(totalSize)} (index: ${formatBytes(indexSize)}, data: ${formatBytes(dataSize)})`);
1042
+ });
1043
+ program.parse();
1044
+ //# sourceMappingURL=cli.js.map
1045
+ //# sourceMappingURL=cli.js.map