compound-agent 1.8.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +30 -1
- package/README.md +30 -47
- package/bin/ca +32 -0
- package/package.json +19 -78
- package/scripts/postinstall.cjs +221 -0
- package/dist/cli.d.ts +0 -1
- package/dist/cli.js +0 -13655
- package/dist/cli.js.map +0 -1
- package/dist/index.d.ts +0 -3730
- package/dist/index.js +0 -3251
- package/dist/index.js.map +0 -1
- package/docs/research/AgenticAiCodebaseGuide.md +0 -1206
- package/docs/research/BuildingACCompilerAnthropic.md +0 -116
- package/docs/research/HarnessEngineeringOpenAi.md +0 -220
- package/docs/research/code-review/systematic-review-methodology.md +0 -409
- package/docs/research/index.md +0 -76
- package/docs/research/learning-systems/knowledge-compounding-for-agents.md +0 -695
- package/docs/research/property-testing/property-based-testing-and-invariants.md +0 -742
- package/docs/research/scenario-testing/advanced-and-emerging.md +0 -470
- package/docs/research/scenario-testing/core-foundations.md +0 -507
- package/docs/research/scenario-testing/domain-specific-and-human-factors.md +0 -474
- package/docs/research/security/auth-patterns.md +0 -138
- package/docs/research/security/data-exposure.md +0 -185
- package/docs/research/security/dependency-security.md +0 -91
- package/docs/research/security/injection-patterns.md +0 -249
- package/docs/research/security/overview.md +0 -81
- package/docs/research/security/secrets-checklist.md +0 -92
- package/docs/research/security/secure-coding-failure.md +0 -297
- package/docs/research/software_architecture/01-science-of-decomposition.md +0 -615
- package/docs/research/software_architecture/02-architecture-under-uncertainty.md +0 -649
- package/docs/research/software_architecture/03-emergent-behavior-in-composed-systems.md +0 -644
- package/docs/research/spec_design/decision_theory_specifications_and_multi_criteria_tradeoffs.md +0 -0
- package/docs/research/spec_design/design_by_contract.md +0 -251
- package/docs/research/spec_design/domain_driven_design_strategic_modeling.md +0 -183
- package/docs/research/spec_design/formal_specification_methods.md +0 -161
- package/docs/research/spec_design/logic_and_proof_theory_under_the_curry_howard_correspondence.md +0 -250
- package/docs/research/spec_design/natural_language_formal_semantics_abuguity_in_specifications.md +0 -259
- package/docs/research/spec_design/requirements_engineering.md +0 -234
- package/docs/research/spec_design/systems_engineering_specifications_emergent_behavior_interface_contracts.md +0 -149
- package/docs/research/spec_design/what_is_this_about.md +0 -305
- package/docs/research/tdd/test-driven-development-methodology.md +0 -547
- package/docs/research/test-optimization-strategies.md +0 -401
- package/scripts/postinstall.mjs +0 -102
package/dist/index.js
DELETED
|
@@ -1,3251 +0,0 @@
|
|
|
1
|
-
import { createHash } from 'crypto';
|
|
2
|
-
import { z } from 'zod';
|
|
3
|
-
import { mkdir, appendFile, readFile, readdir } from 'fs/promises';
|
|
4
|
-
import { join, dirname, extname, relative } from 'path';
|
|
5
|
-
import { createRequire } from 'module';
|
|
6
|
-
import { existsSync, mkdirSync, unlinkSync, writeFileSync, readFileSync, statSync, readdirSync } from 'fs';
|
|
7
|
-
import { homedir } from 'os';
|
|
8
|
-
import { resolveModelFile, getLlama, LlamaLogLevel } from 'node-llama-cpp';
|
|
9
|
-
import { spawn, execSync } from 'child_process';
|
|
10
|
-
import { fileURLToPath } from 'url';
|
|
11
|
-
import 'chalk';
|
|
12
|
-
|
|
13
|
-
var __defProp = Object.defineProperty;
|
|
14
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
15
|
-
var __esm = (fn, res) => function __init() {
|
|
16
|
-
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
17
|
-
};
|
|
18
|
-
var __export = (target, all) => {
|
|
19
|
-
for (var name in all)
|
|
20
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
21
|
-
};
|
|
22
|
-
function generateId(insight, type) {
|
|
23
|
-
const prefix = TYPE_PREFIXES[type ?? "lesson"];
|
|
24
|
-
const hash = createHash("sha256").update(insight).digest("hex");
|
|
25
|
-
return `${prefix}${hash.slice(0, 16)}`;
|
|
26
|
-
}
|
|
27
|
-
var SourceSchema, ContextSchema, PatternSchema, CitationSchema, SeveritySchema, CompactionLevelSchema, LessonTypeSchema, MemoryItemTypeSchema, baseFields, LessonItemSchema, SolutionItemSchema, PatternItemSchema, PreferenceItemSchema, MemoryItemSchema, LegacyLessonSchema, LessonSchema, LegacyTombstoneSchema, LessonRecordSchema, MemoryItemRecordSchema, TYPE_PREFIXES;
|
|
28
|
-
var init_types = __esm({
|
|
29
|
-
"src/memory/types.ts"() {
|
|
30
|
-
SourceSchema = z.enum([
|
|
31
|
-
"user_correction",
|
|
32
|
-
"self_correction",
|
|
33
|
-
"test_failure",
|
|
34
|
-
"manual"
|
|
35
|
-
]);
|
|
36
|
-
ContextSchema = z.object({
|
|
37
|
-
tool: z.string(),
|
|
38
|
-
intent: z.string()
|
|
39
|
-
});
|
|
40
|
-
PatternSchema = z.object({
|
|
41
|
-
bad: z.string(),
|
|
42
|
-
good: z.string()
|
|
43
|
-
});
|
|
44
|
-
CitationSchema = z.object({
|
|
45
|
-
file: z.string().min(1),
|
|
46
|
-
// Source file path (required, non-empty)
|
|
47
|
-
line: z.number().int().positive().optional(),
|
|
48
|
-
// Line number (optional, must be positive)
|
|
49
|
-
commit: z.string().optional()
|
|
50
|
-
// Git commit hash (optional)
|
|
51
|
-
});
|
|
52
|
-
SeveritySchema = z.enum(["high", "medium", "low"]);
|
|
53
|
-
CompactionLevelSchema = z.union([
|
|
54
|
-
z.literal(0),
|
|
55
|
-
// Active
|
|
56
|
-
z.literal(1),
|
|
57
|
-
// Flagged (>90 days)
|
|
58
|
-
z.literal(2)
|
|
59
|
-
// Archived
|
|
60
|
-
]);
|
|
61
|
-
LessonTypeSchema = z.enum(["quick", "full"]);
|
|
62
|
-
MemoryItemTypeSchema = z.enum(["lesson", "solution", "pattern", "preference"]);
|
|
63
|
-
baseFields = {
|
|
64
|
-
// Core identity (required)
|
|
65
|
-
id: z.string(),
|
|
66
|
-
trigger: z.string(),
|
|
67
|
-
insight: z.string(),
|
|
68
|
-
// Metadata (required)
|
|
69
|
-
tags: z.array(z.string()),
|
|
70
|
-
source: SourceSchema,
|
|
71
|
-
context: ContextSchema,
|
|
72
|
-
created: z.string(),
|
|
73
|
-
// ISO8601
|
|
74
|
-
confirmed: z.boolean(),
|
|
75
|
-
// Relationships (required, can be empty arrays)
|
|
76
|
-
supersedes: z.array(z.string()),
|
|
77
|
-
related: z.array(z.string()),
|
|
78
|
-
// Extended fields (optional)
|
|
79
|
-
evidence: z.string().optional(),
|
|
80
|
-
severity: SeveritySchema.optional(),
|
|
81
|
-
// Lifecycle fields (optional)
|
|
82
|
-
deleted: z.boolean().optional(),
|
|
83
|
-
deletedAt: z.string().optional(),
|
|
84
|
-
retrievalCount: z.number().optional(),
|
|
85
|
-
// Provenance tracking (optional)
|
|
86
|
-
citation: CitationSchema.optional(),
|
|
87
|
-
// Age-based validity fields (optional)
|
|
88
|
-
compactionLevel: CompactionLevelSchema.optional(),
|
|
89
|
-
compactedAt: z.string().optional(),
|
|
90
|
-
lastRetrieved: z.string().optional(),
|
|
91
|
-
// Invalidation fields (optional)
|
|
92
|
-
invalidatedAt: z.string().optional(),
|
|
93
|
-
invalidationReason: z.string().optional()
|
|
94
|
-
};
|
|
95
|
-
LessonItemSchema = z.object({
|
|
96
|
-
...baseFields,
|
|
97
|
-
type: z.literal("lesson"),
|
|
98
|
-
pattern: PatternSchema.optional()
|
|
99
|
-
});
|
|
100
|
-
SolutionItemSchema = z.object({
|
|
101
|
-
...baseFields,
|
|
102
|
-
type: z.literal("solution"),
|
|
103
|
-
pattern: PatternSchema.optional()
|
|
104
|
-
});
|
|
105
|
-
PatternItemSchema = z.object({
|
|
106
|
-
...baseFields,
|
|
107
|
-
type: z.literal("pattern"),
|
|
108
|
-
pattern: PatternSchema
|
|
109
|
-
});
|
|
110
|
-
PreferenceItemSchema = z.object({
|
|
111
|
-
...baseFields,
|
|
112
|
-
type: z.literal("preference"),
|
|
113
|
-
pattern: PatternSchema.optional()
|
|
114
|
-
});
|
|
115
|
-
MemoryItemSchema = z.discriminatedUnion("type", [
|
|
116
|
-
LessonItemSchema,
|
|
117
|
-
SolutionItemSchema,
|
|
118
|
-
PatternItemSchema,
|
|
119
|
-
PreferenceItemSchema
|
|
120
|
-
]);
|
|
121
|
-
LegacyLessonSchema = z.object({
|
|
122
|
-
...baseFields,
|
|
123
|
-
type: LessonTypeSchema,
|
|
124
|
-
pattern: PatternSchema.optional()
|
|
125
|
-
});
|
|
126
|
-
LessonSchema = LessonItemSchema;
|
|
127
|
-
LegacyTombstoneSchema = z.object({
|
|
128
|
-
id: z.string(),
|
|
129
|
-
deleted: z.literal(true),
|
|
130
|
-
deletedAt: z.string()
|
|
131
|
-
// ISO8601
|
|
132
|
-
});
|
|
133
|
-
LessonRecordSchema = z.union([
|
|
134
|
-
MemoryItemSchema,
|
|
135
|
-
LegacyLessonSchema,
|
|
136
|
-
LegacyTombstoneSchema
|
|
137
|
-
]);
|
|
138
|
-
MemoryItemRecordSchema = LessonRecordSchema;
|
|
139
|
-
TYPE_PREFIXES = {
|
|
140
|
-
lesson: "L",
|
|
141
|
-
solution: "S",
|
|
142
|
-
pattern: "P",
|
|
143
|
-
preference: "R"
|
|
144
|
-
};
|
|
145
|
-
}
|
|
146
|
-
});
|
|
147
|
-
async function appendMemoryItem(repoRoot, item) {
|
|
148
|
-
const filePath = join(repoRoot, LESSONS_PATH);
|
|
149
|
-
await mkdir(dirname(filePath), { recursive: true });
|
|
150
|
-
const line = JSON.stringify(item) + "\n";
|
|
151
|
-
await appendFile(filePath, line, "utf-8");
|
|
152
|
-
}
|
|
153
|
-
async function appendLesson(repoRoot, lesson) {
|
|
154
|
-
return appendMemoryItem(repoRoot, lesson);
|
|
155
|
-
}
|
|
156
|
-
function parseJsonLine(line, lineNumber, strict, onParseError) {
|
|
157
|
-
let parsed;
|
|
158
|
-
try {
|
|
159
|
-
parsed = JSON.parse(line);
|
|
160
|
-
} catch (err) {
|
|
161
|
-
const parseError = {
|
|
162
|
-
line: lineNumber,
|
|
163
|
-
message: `Invalid JSON: ${err.message}`,
|
|
164
|
-
cause: err
|
|
165
|
-
};
|
|
166
|
-
if (strict) {
|
|
167
|
-
throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);
|
|
168
|
-
}
|
|
169
|
-
onParseError?.(parseError);
|
|
170
|
-
return null;
|
|
171
|
-
}
|
|
172
|
-
const result = MemoryItemRecordSchema.safeParse(parsed);
|
|
173
|
-
if (!result.success) {
|
|
174
|
-
const parseError = {
|
|
175
|
-
line: lineNumber,
|
|
176
|
-
message: `Schema validation failed: ${result.error.message}`,
|
|
177
|
-
cause: result.error
|
|
178
|
-
};
|
|
179
|
-
if (strict) {
|
|
180
|
-
throw new Error(`Parse error on line ${lineNumber}: ${parseError.message}`);
|
|
181
|
-
}
|
|
182
|
-
onParseError?.(parseError);
|
|
183
|
-
return null;
|
|
184
|
-
}
|
|
185
|
-
return result.data;
|
|
186
|
-
}
|
|
187
|
-
function toMemoryItem(record) {
|
|
188
|
-
if (record.deleted === true) return null;
|
|
189
|
-
if (record.type === "quick" || record.type === "full") {
|
|
190
|
-
return { ...record, type: "lesson" };
|
|
191
|
-
}
|
|
192
|
-
return record;
|
|
193
|
-
}
|
|
194
|
-
async function readMemoryItems(repoRoot, options = {}) {
|
|
195
|
-
const { strict = false, onParseError } = options;
|
|
196
|
-
const filePath = join(repoRoot, LESSONS_PATH);
|
|
197
|
-
let content;
|
|
198
|
-
try {
|
|
199
|
-
content = await readFile(filePath, "utf-8");
|
|
200
|
-
} catch (err) {
|
|
201
|
-
if (err.code === "ENOENT") {
|
|
202
|
-
return { items: [], deletedIds: /* @__PURE__ */ new Set(), skippedCount: 0 };
|
|
203
|
-
}
|
|
204
|
-
throw err;
|
|
205
|
-
}
|
|
206
|
-
const items = /* @__PURE__ */ new Map();
|
|
207
|
-
const deletedIds = /* @__PURE__ */ new Set();
|
|
208
|
-
let skippedCount = 0;
|
|
209
|
-
const lines = content.split("\n");
|
|
210
|
-
for (let i = 0; i < lines.length; i++) {
|
|
211
|
-
const trimmed = lines[i].trim();
|
|
212
|
-
if (!trimmed) continue;
|
|
213
|
-
const record = parseJsonLine(trimmed, i + 1, strict, onParseError);
|
|
214
|
-
if (!record) {
|
|
215
|
-
skippedCount++;
|
|
216
|
-
continue;
|
|
217
|
-
}
|
|
218
|
-
if (record.deleted === true) {
|
|
219
|
-
items.delete(record.id);
|
|
220
|
-
deletedIds.add(record.id);
|
|
221
|
-
} else {
|
|
222
|
-
const item = toMemoryItem(record);
|
|
223
|
-
if (item) {
|
|
224
|
-
items.set(record.id, item);
|
|
225
|
-
}
|
|
226
|
-
}
|
|
227
|
-
}
|
|
228
|
-
return { items: Array.from(items.values()), deletedIds, skippedCount };
|
|
229
|
-
}
|
|
230
|
-
async function readLessons(repoRoot, options = {}) {
|
|
231
|
-
const result = await readMemoryItems(repoRoot, options);
|
|
232
|
-
const lessons = result.items.filter((item) => item.type === "lesson");
|
|
233
|
-
return { lessons, skippedCount: result.skippedCount };
|
|
234
|
-
}
|
|
235
|
-
var LESSONS_PATH;
|
|
236
|
-
var init_jsonl = __esm({
|
|
237
|
-
"src/memory/storage/jsonl.ts"() {
|
|
238
|
-
init_types();
|
|
239
|
-
LESSONS_PATH = ".claude/lessons/index.jsonl";
|
|
240
|
-
}
|
|
241
|
-
});
|
|
242
|
-
function ensureSqliteAvailable() {
|
|
243
|
-
if (checked) return;
|
|
244
|
-
try {
|
|
245
|
-
const module = require2("better-sqlite3");
|
|
246
|
-
const Constructor = module.default || module;
|
|
247
|
-
const testDb = new Constructor(":memory:");
|
|
248
|
-
testDb.close();
|
|
249
|
-
DatabaseConstructor = Constructor;
|
|
250
|
-
checked = true;
|
|
251
|
-
} catch (cause) {
|
|
252
|
-
throw new Error(
|
|
253
|
-
'better-sqlite3 failed to load.\nFor pnpm projects:\n 1. Ensure package.json has: "pnpm": { "onlyBuiltDependencies": ["better-sqlite3"] }\n 2. Run: pnpm install && pnpm rebuild better-sqlite3\nFor npm/yarn projects:\n Run: npm rebuild better-sqlite3\nIf the error persists, check that build tools (python3, make, g++) are installed.',
|
|
254
|
-
{ cause }
|
|
255
|
-
);
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
function getDatabaseConstructor() {
|
|
259
|
-
ensureSqliteAvailable();
|
|
260
|
-
return DatabaseConstructor;
|
|
261
|
-
}
|
|
262
|
-
var require2, checked, DatabaseConstructor;
|
|
263
|
-
var init_availability = __esm({
|
|
264
|
-
"src/memory/storage/sqlite/availability.ts"() {
|
|
265
|
-
require2 = createRequire(import.meta.url);
|
|
266
|
-
checked = false;
|
|
267
|
-
DatabaseConstructor = null;
|
|
268
|
-
}
|
|
269
|
-
});
|
|
270
|
-
|
|
271
|
-
// src/memory/storage/sqlite/schema.ts
|
|
272
|
-
function createSchema(database) {
|
|
273
|
-
database.exec(SCHEMA_SQL);
|
|
274
|
-
const current = database.pragma("user_version", { simple: true });
|
|
275
|
-
if (current !== SCHEMA_VERSION) {
|
|
276
|
-
database.pragma(`user_version = ${SCHEMA_VERSION}`);
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
var SCHEMA_VERSION, SCHEMA_SQL;
|
|
280
|
-
var init_schema = __esm({
|
|
281
|
-
"src/memory/storage/sqlite/schema.ts"() {
|
|
282
|
-
SCHEMA_VERSION = 5;
|
|
283
|
-
SCHEMA_SQL = `
|
|
284
|
-
CREATE TABLE IF NOT EXISTS lessons (
|
|
285
|
-
id TEXT PRIMARY KEY,
|
|
286
|
-
type TEXT NOT NULL,
|
|
287
|
-
trigger TEXT NOT NULL,
|
|
288
|
-
insight TEXT NOT NULL,
|
|
289
|
-
evidence TEXT,
|
|
290
|
-
severity TEXT,
|
|
291
|
-
tags TEXT NOT NULL DEFAULT '',
|
|
292
|
-
source TEXT NOT NULL,
|
|
293
|
-
context TEXT NOT NULL DEFAULT '{}',
|
|
294
|
-
supersedes TEXT NOT NULL DEFAULT '[]',
|
|
295
|
-
related TEXT NOT NULL DEFAULT '[]',
|
|
296
|
-
created TEXT NOT NULL,
|
|
297
|
-
confirmed INTEGER NOT NULL DEFAULT 0,
|
|
298
|
-
deleted INTEGER NOT NULL DEFAULT 0,
|
|
299
|
-
retrieval_count INTEGER NOT NULL DEFAULT 0,
|
|
300
|
-
last_retrieved TEXT,
|
|
301
|
-
embedding BLOB,
|
|
302
|
-
content_hash TEXT,
|
|
303
|
-
embedding_insight BLOB,
|
|
304
|
-
content_hash_insight TEXT,
|
|
305
|
-
invalidated_at TEXT,
|
|
306
|
-
invalidation_reason TEXT,
|
|
307
|
-
citation_file TEXT,
|
|
308
|
-
citation_line INTEGER,
|
|
309
|
-
citation_commit TEXT,
|
|
310
|
-
compaction_level INTEGER DEFAULT 0,
|
|
311
|
-
compacted_at TEXT,
|
|
312
|
-
pattern_bad TEXT,
|
|
313
|
-
pattern_good TEXT
|
|
314
|
-
);
|
|
315
|
-
|
|
316
|
-
CREATE VIRTUAL TABLE IF NOT EXISTS lessons_fts USING fts5(
|
|
317
|
-
id, trigger, insight, tags, pattern_bad, pattern_good,
|
|
318
|
-
content='lessons', content_rowid='rowid'
|
|
319
|
-
);
|
|
320
|
-
|
|
321
|
-
CREATE TRIGGER IF NOT EXISTS lessons_ai AFTER INSERT ON lessons BEGIN
|
|
322
|
-
INSERT INTO lessons_fts(rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
|
|
323
|
-
VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags, new.pattern_bad, new.pattern_good);
|
|
324
|
-
END;
|
|
325
|
-
|
|
326
|
-
CREATE TRIGGER IF NOT EXISTS lessons_ad AFTER DELETE ON lessons BEGIN
|
|
327
|
-
INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
|
|
328
|
-
VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags, old.pattern_bad, old.pattern_good);
|
|
329
|
-
END;
|
|
330
|
-
|
|
331
|
-
CREATE TRIGGER IF NOT EXISTS lessons_au AFTER UPDATE OF id, trigger, insight, tags, pattern_bad, pattern_good ON lessons BEGIN
|
|
332
|
-
INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
|
|
333
|
-
VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags, old.pattern_bad, old.pattern_good);
|
|
334
|
-
INSERT INTO lessons_fts(rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
|
|
335
|
-
VALUES (new.rowid, new.id, new.trigger, new.insight, new.tags, new.pattern_bad, new.pattern_good);
|
|
336
|
-
END;
|
|
337
|
-
|
|
338
|
-
CREATE INDEX IF NOT EXISTS idx_lessons_created ON lessons(created);
|
|
339
|
-
CREATE INDEX IF NOT EXISTS idx_lessons_confirmed ON lessons(confirmed);
|
|
340
|
-
CREATE INDEX IF NOT EXISTS idx_lessons_severity ON lessons(severity);
|
|
341
|
-
CREATE INDEX IF NOT EXISTS idx_lessons_type ON lessons(type);
|
|
342
|
-
|
|
343
|
-
CREATE TABLE IF NOT EXISTS metadata (
|
|
344
|
-
key TEXT PRIMARY KEY,
|
|
345
|
-
value TEXT NOT NULL
|
|
346
|
-
);
|
|
347
|
-
`;
|
|
348
|
-
}
|
|
349
|
-
});
|
|
350
|
-
function hasExpectedVersion(database) {
|
|
351
|
-
const row = database.pragma("user_version", { simple: true });
|
|
352
|
-
return row === SCHEMA_VERSION;
|
|
353
|
-
}
|
|
354
|
-
function openDb(repoRoot, options = {}) {
|
|
355
|
-
const { inMemory = false } = options;
|
|
356
|
-
const key = inMemory ? `:memory:${repoRoot}` : join(repoRoot, DB_PATH);
|
|
357
|
-
const cached = dbMap.get(key);
|
|
358
|
-
if (cached) {
|
|
359
|
-
return cached;
|
|
360
|
-
}
|
|
361
|
-
const Database = getDatabaseConstructor();
|
|
362
|
-
let database;
|
|
363
|
-
if (inMemory) {
|
|
364
|
-
database = new Database(":memory:");
|
|
365
|
-
} else {
|
|
366
|
-
const dir = dirname(key);
|
|
367
|
-
mkdirSync(dir, { recursive: true });
|
|
368
|
-
database = new Database(key);
|
|
369
|
-
if (!hasExpectedVersion(database)) {
|
|
370
|
-
database.close();
|
|
371
|
-
try {
|
|
372
|
-
unlinkSync(key);
|
|
373
|
-
} catch {
|
|
374
|
-
}
|
|
375
|
-
database = new Database(key);
|
|
376
|
-
}
|
|
377
|
-
database.pragma("journal_mode = WAL");
|
|
378
|
-
}
|
|
379
|
-
createSchema(database);
|
|
380
|
-
dbMap.set(key, database);
|
|
381
|
-
return database;
|
|
382
|
-
}
|
|
383
|
-
function closeDb() {
|
|
384
|
-
for (const database of dbMap.values()) {
|
|
385
|
-
database.close();
|
|
386
|
-
}
|
|
387
|
-
dbMap.clear();
|
|
388
|
-
}
|
|
389
|
-
var DB_PATH, dbMap;
|
|
390
|
-
var init_connection = __esm({
|
|
391
|
-
"src/memory/storage/sqlite/connection.ts"() {
|
|
392
|
-
init_availability();
|
|
393
|
-
init_schema();
|
|
394
|
-
DB_PATH = ".claude/.cache/lessons.sqlite";
|
|
395
|
-
dbMap = /* @__PURE__ */ new Map();
|
|
396
|
-
}
|
|
397
|
-
});
|
|
398
|
-
function contentHash(trigger, insight) {
|
|
399
|
-
return createHash("sha256").update(`${trigger} ${insight}`).digest("hex");
|
|
400
|
-
}
|
|
401
|
-
function setCachedEmbedding(repoRoot, lessonId, embedding, hash) {
|
|
402
|
-
const database = openDb(repoRoot);
|
|
403
|
-
const float32 = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);
|
|
404
|
-
const buffer = Buffer.from(float32.buffer, float32.byteOffset, float32.byteLength);
|
|
405
|
-
database.prepare("UPDATE lessons SET embedding = ?, content_hash = ? WHERE id = ?").run(buffer, hash, lessonId);
|
|
406
|
-
}
|
|
407
|
-
function getCachedEmbeddingsBulk(repoRoot) {
|
|
408
|
-
const database = openDb(repoRoot);
|
|
409
|
-
const rows = database.prepare("SELECT id, embedding, content_hash FROM lessons WHERE embedding IS NOT NULL").all();
|
|
410
|
-
const result = /* @__PURE__ */ new Map();
|
|
411
|
-
for (const row of rows) {
|
|
412
|
-
if (!row.content_hash) continue;
|
|
413
|
-
const float32 = new Float32Array(
|
|
414
|
-
row.embedding.buffer,
|
|
415
|
-
row.embedding.byteOffset,
|
|
416
|
-
row.embedding.byteLength / 4
|
|
417
|
-
);
|
|
418
|
-
result.set(row.id, { vector: float32, hash: row.content_hash });
|
|
419
|
-
}
|
|
420
|
-
return result;
|
|
421
|
-
}
|
|
422
|
-
function getCachedInsightEmbedding(repoRoot, lessonId, expectedHash) {
|
|
423
|
-
const database = openDb(repoRoot);
|
|
424
|
-
const row = database.prepare("SELECT embedding_insight, content_hash_insight FROM lessons WHERE id = ?").get(lessonId);
|
|
425
|
-
if (!row || !row.embedding_insight || !row.content_hash_insight) {
|
|
426
|
-
return null;
|
|
427
|
-
}
|
|
428
|
-
if (expectedHash && row.content_hash_insight !== expectedHash) {
|
|
429
|
-
return null;
|
|
430
|
-
}
|
|
431
|
-
return new Float32Array(
|
|
432
|
-
row.embedding_insight.buffer,
|
|
433
|
-
row.embedding_insight.byteOffset,
|
|
434
|
-
row.embedding_insight.byteLength / 4
|
|
435
|
-
);
|
|
436
|
-
}
|
|
437
|
-
function setCachedInsightEmbedding(repoRoot, lessonId, embedding, hash) {
|
|
438
|
-
const database = openDb(repoRoot);
|
|
439
|
-
const float32 = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);
|
|
440
|
-
const buffer = Buffer.from(float32.buffer, float32.byteOffset, float32.byteLength);
|
|
441
|
-
database.prepare("UPDATE lessons SET embedding_insight = ?, content_hash_insight = ? WHERE id = ?").run(buffer, hash, lessonId);
|
|
442
|
-
}
|
|
443
|
-
function collectCachedEmbeddings(database) {
|
|
444
|
-
const cache = /* @__PURE__ */ new Map();
|
|
445
|
-
const rows = database.prepare("SELECT id, embedding, content_hash, embedding_insight, content_hash_insight FROM lessons WHERE embedding IS NOT NULL OR embedding_insight IS NOT NULL").all();
|
|
446
|
-
for (const row of rows) {
|
|
447
|
-
if (row.embedding && row.content_hash) {
|
|
448
|
-
cache.set(row.id, {
|
|
449
|
-
embedding: row.embedding,
|
|
450
|
-
contentHash: row.content_hash,
|
|
451
|
-
embeddingInsight: row.embedding_insight,
|
|
452
|
-
contentHashInsight: row.content_hash_insight
|
|
453
|
-
});
|
|
454
|
-
} else if (row.embedding_insight && row.content_hash_insight) {
|
|
455
|
-
cache.set(row.id, {
|
|
456
|
-
embedding: row.embedding_insight,
|
|
457
|
-
// placeholder, won't match hash
|
|
458
|
-
contentHash: "",
|
|
459
|
-
embeddingInsight: row.embedding_insight,
|
|
460
|
-
contentHashInsight: row.content_hash_insight
|
|
461
|
-
});
|
|
462
|
-
}
|
|
463
|
-
}
|
|
464
|
-
return cache;
|
|
465
|
-
}
|
|
466
|
-
var init_cache = __esm({
|
|
467
|
-
"src/memory/storage/sqlite/cache.ts"() {
|
|
468
|
-
init_connection();
|
|
469
|
-
}
|
|
470
|
-
});
|
|
471
|
-
function getJsonlMtime(repoRoot) {
|
|
472
|
-
const jsonlPath = join(repoRoot, LESSONS_PATH);
|
|
473
|
-
try {
|
|
474
|
-
const stat = statSync(jsonlPath);
|
|
475
|
-
return stat.mtimeMs;
|
|
476
|
-
} catch {
|
|
477
|
-
return null;
|
|
478
|
-
}
|
|
479
|
-
}
|
|
480
|
-
function getLastSyncMtime(database) {
|
|
481
|
-
const row = database.prepare("SELECT value FROM metadata WHERE key = ?").get("last_sync_mtime");
|
|
482
|
-
return row ? parseFloat(row.value) : null;
|
|
483
|
-
}
|
|
484
|
-
function setLastSyncMtime(database, mtime) {
|
|
485
|
-
database.prepare("INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)").run("last_sync_mtime", mtime.toString());
|
|
486
|
-
}
|
|
487
|
-
async function rebuildIndex(repoRoot) {
|
|
488
|
-
const database = openDb(repoRoot);
|
|
489
|
-
const { items } = await readMemoryItems(repoRoot);
|
|
490
|
-
const cachedEmbeddings = collectCachedEmbeddings(database);
|
|
491
|
-
database.exec("DELETE FROM lessons");
|
|
492
|
-
if (items.length === 0) {
|
|
493
|
-
const mtime2 = getJsonlMtime(repoRoot);
|
|
494
|
-
if (mtime2 !== null) {
|
|
495
|
-
setLastSyncMtime(database, mtime2);
|
|
496
|
-
}
|
|
497
|
-
return;
|
|
498
|
-
}
|
|
499
|
-
const insert = database.prepare(INSERT_LESSON_SQL);
|
|
500
|
-
const insertMany = database.transaction((memoryItems) => {
|
|
501
|
-
for (const item of memoryItems) {
|
|
502
|
-
const newHash = contentHash(item.trigger, item.insight);
|
|
503
|
-
const insightHash = contentHash(item.insight, "");
|
|
504
|
-
const cached = cachedEmbeddings.get(item.id);
|
|
505
|
-
const hasValidCache = cached && cached.contentHash === newHash;
|
|
506
|
-
const hasValidInsightCache = cached && cached.contentHashInsight === insightHash;
|
|
507
|
-
insert.run({
|
|
508
|
-
id: item.id,
|
|
509
|
-
type: item.type,
|
|
510
|
-
trigger: item.trigger,
|
|
511
|
-
insight: item.insight,
|
|
512
|
-
evidence: item.evidence ?? null,
|
|
513
|
-
severity: item.severity ?? null,
|
|
514
|
-
tags: item.tags.join(","),
|
|
515
|
-
source: item.source,
|
|
516
|
-
context: JSON.stringify(item.context),
|
|
517
|
-
supersedes: JSON.stringify(item.supersedes),
|
|
518
|
-
related: JSON.stringify(item.related),
|
|
519
|
-
created: item.created,
|
|
520
|
-
confirmed: item.confirmed ? 1 : 0,
|
|
521
|
-
deleted: item.deleted ? 1 : 0,
|
|
522
|
-
retrieval_count: item.retrievalCount ?? 0,
|
|
523
|
-
last_retrieved: item.lastRetrieved ?? null,
|
|
524
|
-
embedding: hasValidCache ? cached.embedding : null,
|
|
525
|
-
content_hash: hasValidCache ? cached.contentHash : null,
|
|
526
|
-
embedding_insight: hasValidInsightCache ? cached.embeddingInsight : null,
|
|
527
|
-
content_hash_insight: hasValidInsightCache ? cached.contentHashInsight : null,
|
|
528
|
-
invalidated_at: item.invalidatedAt ?? null,
|
|
529
|
-
invalidation_reason: item.invalidationReason ?? null,
|
|
530
|
-
citation_file: item.citation?.file ?? null,
|
|
531
|
-
citation_line: item.citation?.line ?? null,
|
|
532
|
-
citation_commit: item.citation?.commit ?? null,
|
|
533
|
-
compaction_level: item.compactionLevel ?? 0,
|
|
534
|
-
compacted_at: item.compactedAt ?? null,
|
|
535
|
-
pattern_bad: item.pattern?.bad ?? null,
|
|
536
|
-
pattern_good: item.pattern?.good ?? null
|
|
537
|
-
});
|
|
538
|
-
}
|
|
539
|
-
});
|
|
540
|
-
insertMany(items);
|
|
541
|
-
const mtime = getJsonlMtime(repoRoot);
|
|
542
|
-
if (mtime !== null) {
|
|
543
|
-
setLastSyncMtime(database, mtime);
|
|
544
|
-
}
|
|
545
|
-
}
|
|
546
|
-
async function syncIfNeeded(repoRoot, options = {}) {
|
|
547
|
-
const { force = false } = options;
|
|
548
|
-
const jsonlMtime = getJsonlMtime(repoRoot);
|
|
549
|
-
if (jsonlMtime === null && !force) {
|
|
550
|
-
return false;
|
|
551
|
-
}
|
|
552
|
-
const database = openDb(repoRoot);
|
|
553
|
-
const lastSyncMtime = getLastSyncMtime(database);
|
|
554
|
-
const needsRebuild = force || lastSyncMtime === null || jsonlMtime !== null && jsonlMtime > lastSyncMtime;
|
|
555
|
-
if (needsRebuild) {
|
|
556
|
-
await rebuildIndex(repoRoot);
|
|
557
|
-
return true;
|
|
558
|
-
}
|
|
559
|
-
return false;
|
|
560
|
-
}
|
|
561
|
-
var INSERT_LESSON_SQL;
|
|
562
|
-
var init_sync = __esm({
|
|
563
|
-
"src/memory/storage/sqlite/sync.ts"() {
|
|
564
|
-
init_jsonl();
|
|
565
|
-
init_connection();
|
|
566
|
-
init_cache();
|
|
567
|
-
INSERT_LESSON_SQL = `
|
|
568
|
-
INSERT INTO lessons (id, type, trigger, insight, evidence, severity, tags, source, context, supersedes, related, created, confirmed, deleted, retrieval_count, last_retrieved, embedding, content_hash, embedding_insight, content_hash_insight, invalidated_at, invalidation_reason, citation_file, citation_line, citation_commit, compaction_level, compacted_at, pattern_bad, pattern_good)
|
|
569
|
-
VALUES (@id, @type, @trigger, @insight, @evidence, @severity, @tags, @source, @context, @supersedes, @related, @created, @confirmed, @deleted, @retrieval_count, @last_retrieved, @embedding, @content_hash, @embedding_insight, @content_hash_insight, @invalidated_at, @invalidation_reason, @citation_file, @citation_line, @citation_commit, @compaction_level, @compacted_at, @pattern_bad, @pattern_good)
|
|
570
|
-
`;
|
|
571
|
-
}
|
|
572
|
-
});
|
|
573
|
-
|
|
574
|
-
// src/memory/search/hybrid.ts
|
|
575
|
-
function normalizeBm25Rank(rank) {
|
|
576
|
-
if (!Number.isFinite(rank)) return 0;
|
|
577
|
-
const abs = Math.abs(rank);
|
|
578
|
-
return abs / (1 + abs);
|
|
579
|
-
}
|
|
580
|
-
function mergeHybridScores(vectorResults, keywordResults, getId, options) {
|
|
581
|
-
if (vectorResults.length === 0 && keywordResults.length === 0) return [];
|
|
582
|
-
const rawVecW = options?.vectorWeight ?? DEFAULT_VECTOR_WEIGHT;
|
|
583
|
-
const rawTxtW = options?.textWeight ?? DEFAULT_TEXT_WEIGHT;
|
|
584
|
-
const total = rawVecW + rawTxtW;
|
|
585
|
-
if (total <= 0) return [];
|
|
586
|
-
const vecW = rawVecW / total;
|
|
587
|
-
const txtW = rawTxtW / total;
|
|
588
|
-
const limit = options?.limit;
|
|
589
|
-
const minScore = options?.minScore;
|
|
590
|
-
const merged = /* @__PURE__ */ new Map();
|
|
591
|
-
for (const v of vectorResults) {
|
|
592
|
-
merged.set(getId(v.item), { item: v.item, vecScore: v.score, txtScore: 0 });
|
|
593
|
-
}
|
|
594
|
-
for (const k of keywordResults) {
|
|
595
|
-
const id = getId(k.item);
|
|
596
|
-
const existing = merged.get(id);
|
|
597
|
-
if (existing) {
|
|
598
|
-
existing.txtScore = k.score;
|
|
599
|
-
} else {
|
|
600
|
-
merged.set(id, { item: k.item, vecScore: 0, txtScore: k.score });
|
|
601
|
-
}
|
|
602
|
-
}
|
|
603
|
-
const results = [];
|
|
604
|
-
for (const entry of merged.values()) {
|
|
605
|
-
results.push({
|
|
606
|
-
item: entry.item,
|
|
607
|
-
score: vecW * entry.vecScore + txtW * entry.txtScore
|
|
608
|
-
});
|
|
609
|
-
}
|
|
610
|
-
results.sort((a, b) => b.score - a.score);
|
|
611
|
-
const filtered = minScore !== void 0 ? results.filter((r) => r.score >= minScore) : results;
|
|
612
|
-
return limit !== void 0 ? filtered.slice(0, limit) : filtered;
|
|
613
|
-
}
|
|
614
|
-
function mergeHybridResults(vectorResults, keywordResults, options) {
|
|
615
|
-
const genericVec = vectorResults.map((v) => ({ item: v.lesson, score: v.score }));
|
|
616
|
-
const genericKw = keywordResults.map((k) => ({ item: k.lesson, score: k.score }));
|
|
617
|
-
const merged = mergeHybridScores(genericVec, genericKw, (item) => item.id, options);
|
|
618
|
-
return merged.map((m) => ({ lesson: m.item, score: m.score }));
|
|
619
|
-
}
|
|
620
|
-
var DEFAULT_VECTOR_WEIGHT, DEFAULT_TEXT_WEIGHT, CANDIDATE_MULTIPLIER, MIN_HYBRID_SCORE;
|
|
621
|
-
var init_hybrid = __esm({
|
|
622
|
-
"src/memory/search/hybrid.ts"() {
|
|
623
|
-
DEFAULT_VECTOR_WEIGHT = 0.7;
|
|
624
|
-
DEFAULT_TEXT_WEIGHT = 0.3;
|
|
625
|
-
CANDIDATE_MULTIPLIER = 4;
|
|
626
|
-
MIN_HYBRID_SCORE = 0.35;
|
|
627
|
-
}
|
|
628
|
-
});
|
|
629
|
-
|
|
630
|
-
// src/memory/storage/sqlite/search.ts
|
|
631
|
-
function safeJsonParse(value, fallback) {
|
|
632
|
-
try {
|
|
633
|
-
return JSON.parse(value);
|
|
634
|
-
} catch {
|
|
635
|
-
return fallback;
|
|
636
|
-
}
|
|
637
|
-
}
|
|
638
|
-
function rowToMemoryItem(row) {
|
|
639
|
-
const item = {
|
|
640
|
-
id: row.id,
|
|
641
|
-
type: row.type,
|
|
642
|
-
trigger: row.trigger,
|
|
643
|
-
insight: row.insight,
|
|
644
|
-
tags: row.tags ? row.tags.split(",").filter(Boolean) : [],
|
|
645
|
-
source: row.source,
|
|
646
|
-
context: safeJsonParse(row.context, {}),
|
|
647
|
-
supersedes: safeJsonParse(row.supersedes, []),
|
|
648
|
-
related: safeJsonParse(row.related, []),
|
|
649
|
-
created: row.created,
|
|
650
|
-
confirmed: row.confirmed === 1
|
|
651
|
-
};
|
|
652
|
-
if (row.evidence !== null) item.evidence = row.evidence;
|
|
653
|
-
if (row.severity !== null) item.severity = row.severity;
|
|
654
|
-
if (row.deleted === 1) item.deleted = true;
|
|
655
|
-
if (row.retrieval_count > 0) item.retrievalCount = row.retrieval_count;
|
|
656
|
-
if (row.invalidated_at !== null) item.invalidatedAt = row.invalidated_at;
|
|
657
|
-
if (row.invalidation_reason !== null) item.invalidationReason = row.invalidation_reason;
|
|
658
|
-
if (row.citation_file !== null) {
|
|
659
|
-
item.citation = {
|
|
660
|
-
file: row.citation_file,
|
|
661
|
-
...row.citation_line !== null && { line: row.citation_line },
|
|
662
|
-
...row.citation_commit !== null && { commit: row.citation_commit }
|
|
663
|
-
};
|
|
664
|
-
}
|
|
665
|
-
if (row.compaction_level !== null && row.compaction_level !== 0) {
|
|
666
|
-
item.compactionLevel = row.compaction_level;
|
|
667
|
-
}
|
|
668
|
-
if (row.compacted_at !== null) item.compactedAt = row.compacted_at;
|
|
669
|
-
if (row.last_retrieved !== null) item.lastRetrieved = row.last_retrieved;
|
|
670
|
-
if (row.pattern_bad !== null && row.pattern_good !== null) {
|
|
671
|
-
item.pattern = { bad: row.pattern_bad, good: row.pattern_good };
|
|
672
|
-
}
|
|
673
|
-
const result = MemoryItemSchema.safeParse(item);
|
|
674
|
-
if (!result.success) return null;
|
|
675
|
-
return result.data;
|
|
676
|
-
}
|
|
677
|
-
function readAllFromSqlite(repoRoot) {
|
|
678
|
-
const database = openDb(repoRoot);
|
|
679
|
-
const rows = database.prepare("SELECT * FROM lessons WHERE invalidated_at IS NULL").all();
|
|
680
|
-
return rows.map(rowToMemoryItem).filter((x) => x !== null);
|
|
681
|
-
}
|
|
682
|
-
function sanitizeFtsQuery(query) {
|
|
683
|
-
const stripped = query.replace(/["*^+\-():{}]/g, "");
|
|
684
|
-
const tokens = stripped.split(/\s+/).filter((t) => t.length > 0 && !FTS_OPERATORS.has(t));
|
|
685
|
-
return tokens.join(" ");
|
|
686
|
-
}
|
|
687
|
-
function incrementRetrievalCount(repoRoot, lessonIds) {
|
|
688
|
-
if (lessonIds.length === 0) return;
|
|
689
|
-
const database = openDb(repoRoot);
|
|
690
|
-
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
691
|
-
const update = database.prepare(`
|
|
692
|
-
UPDATE lessons
|
|
693
|
-
SET retrieval_count = retrieval_count + 1,
|
|
694
|
-
last_retrieved = ?
|
|
695
|
-
WHERE id = ?
|
|
696
|
-
`);
|
|
697
|
-
const updateMany = database.transaction((ids) => {
|
|
698
|
-
for (const id of ids) {
|
|
699
|
-
update.run(now, id);
|
|
700
|
-
}
|
|
701
|
-
});
|
|
702
|
-
updateMany(lessonIds);
|
|
703
|
-
}
|
|
704
|
-
function executeFtsQuery(repoRoot, query, limit, options) {
|
|
705
|
-
const database = openDb(repoRoot);
|
|
706
|
-
const sanitized = sanitizeFtsQuery(query);
|
|
707
|
-
if (sanitized === "") return [];
|
|
708
|
-
const selectCols = options.includeRank ? "l.*, fts.rank" : "l.*";
|
|
709
|
-
const orderClause = options.includeRank ? "ORDER BY fts.rank" : "";
|
|
710
|
-
const typeClause = options.typeFilter ? "AND l.type = ?" : "";
|
|
711
|
-
const sql = `
|
|
712
|
-
SELECT ${selectCols}
|
|
713
|
-
FROM lessons l
|
|
714
|
-
JOIN lessons_fts fts ON l.rowid = fts.rowid
|
|
715
|
-
WHERE lessons_fts MATCH ?
|
|
716
|
-
AND l.invalidated_at IS NULL
|
|
717
|
-
${typeClause}
|
|
718
|
-
${orderClause}
|
|
719
|
-
LIMIT ?
|
|
720
|
-
`;
|
|
721
|
-
const params = options.typeFilter ? [sanitized, options.typeFilter, limit] : [sanitized, limit];
|
|
722
|
-
try {
|
|
723
|
-
return database.prepare(sql).all(...params);
|
|
724
|
-
} catch (err) {
|
|
725
|
-
const message = err instanceof Error ? err.message : "Unknown FTS5 error";
|
|
726
|
-
console.error(`[compound-agent] search error: ${message}`);
|
|
727
|
-
return [];
|
|
728
|
-
}
|
|
729
|
-
}
|
|
730
|
-
async function searchKeyword(repoRoot, query, limit, typeFilter) {
|
|
731
|
-
const rows = executeFtsQuery(repoRoot, query, limit, { includeRank: false, typeFilter });
|
|
732
|
-
return rows.map(rowToMemoryItem).filter((x) => x !== null);
|
|
733
|
-
}
|
|
734
|
-
async function searchKeywordScored(repoRoot, query, limit, typeFilter) {
|
|
735
|
-
const rows = executeFtsQuery(repoRoot, query, limit, { includeRank: true, typeFilter });
|
|
736
|
-
const results = [];
|
|
737
|
-
for (const row of rows) {
|
|
738
|
-
const lesson = rowToMemoryItem(row);
|
|
739
|
-
if (lesson) {
|
|
740
|
-
results.push({ lesson, score: normalizeBm25Rank(row.rank) });
|
|
741
|
-
}
|
|
742
|
-
}
|
|
743
|
-
return results;
|
|
744
|
-
}
|
|
745
|
-
var FTS_OPERATORS;
|
|
746
|
-
var init_search = __esm({
|
|
747
|
-
"src/memory/storage/sqlite/search.ts"() {
|
|
748
|
-
init_types();
|
|
749
|
-
init_hybrid();
|
|
750
|
-
init_connection();
|
|
751
|
-
FTS_OPERATORS = /* @__PURE__ */ new Set(["AND", "OR", "NOT", "NEAR"]);
|
|
752
|
-
}
|
|
753
|
-
});
|
|
754
|
-
|
|
755
|
-
// src/memory/storage/sqlite/index.ts
|
|
756
|
-
var init_sqlite = __esm({
|
|
757
|
-
"src/memory/storage/sqlite/index.ts"() {
|
|
758
|
-
init_connection();
|
|
759
|
-
init_cache();
|
|
760
|
-
init_sync();
|
|
761
|
-
init_availability();
|
|
762
|
-
init_search();
|
|
763
|
-
}
|
|
764
|
-
});
|
|
765
|
-
var init_compact = __esm({
|
|
766
|
-
"src/memory/storage/compact.ts"() {
|
|
767
|
-
init_types();
|
|
768
|
-
init_jsonl();
|
|
769
|
-
}
|
|
770
|
-
});
|
|
771
|
-
|
|
772
|
-
// src/memory/storage/index.ts
|
|
773
|
-
var init_storage = __esm({
|
|
774
|
-
"src/memory/storage/index.ts"() {
|
|
775
|
-
init_jsonl();
|
|
776
|
-
init_sqlite();
|
|
777
|
-
init_compact();
|
|
778
|
-
}
|
|
779
|
-
});
|
|
780
|
-
|
|
781
|
-
// src/memory/embeddings/model.ts
|
|
782
|
-
var model_exports = {};
|
|
783
|
-
__export(model_exports, {
|
|
784
|
-
MODEL_FILENAME: () => MODEL_FILENAME,
|
|
785
|
-
MODEL_URI: () => MODEL_URI,
|
|
786
|
-
clearUsabilityCache: () => clearUsabilityCache,
|
|
787
|
-
isModelAvailable: () => isModelAvailable,
|
|
788
|
-
isModelUsable: () => isModelUsable,
|
|
789
|
-
resolveModel: () => resolveModel
|
|
790
|
-
});
|
|
791
|
-
function isModelAvailable() {
|
|
792
|
-
return existsSync(join(DEFAULT_MODEL_DIR, MODEL_FILENAME));
|
|
793
|
-
}
|
|
794
|
-
async function isModelUsable() {
|
|
795
|
-
if (cachedUsability !== null) {
|
|
796
|
-
return cachedUsability;
|
|
797
|
-
}
|
|
798
|
-
if (!isModelAvailable()) {
|
|
799
|
-
cachedUsability = {
|
|
800
|
-
usable: false,
|
|
801
|
-
reason: "Embedding model file not found",
|
|
802
|
-
action: "Run: npx ca download-model"
|
|
803
|
-
};
|
|
804
|
-
return cachedUsability;
|
|
805
|
-
}
|
|
806
|
-
let llama = null;
|
|
807
|
-
let model = null;
|
|
808
|
-
let context = null;
|
|
809
|
-
try {
|
|
810
|
-
const modelPath = join(DEFAULT_MODEL_DIR, MODEL_FILENAME);
|
|
811
|
-
llama = await getLlama({
|
|
812
|
-
build: "never",
|
|
813
|
-
// Never compile from source in a deployed tool
|
|
814
|
-
progressLogs: false,
|
|
815
|
-
// Suppress prebuilt binary fallback warnings
|
|
816
|
-
logLevel: LlamaLogLevel.error
|
|
817
|
-
// Only surface real errors from C++ backend
|
|
818
|
-
// Set NODE_LLAMA_CPP_DEBUG=true to re-enable all output for troubleshooting
|
|
819
|
-
});
|
|
820
|
-
model = await llama.loadModel({ modelPath });
|
|
821
|
-
context = await model.createEmbeddingContext();
|
|
822
|
-
cachedUsability = { usable: true };
|
|
823
|
-
return cachedUsability;
|
|
824
|
-
} catch (err) {
|
|
825
|
-
const message = err instanceof Error ? err.message : "Unknown error";
|
|
826
|
-
cachedUsability = {
|
|
827
|
-
usable: false,
|
|
828
|
-
reason: `Embedding model runtime initialization failed: ${message}`,
|
|
829
|
-
action: "Check system compatibility or reinstall: npx ca download-model"
|
|
830
|
-
};
|
|
831
|
-
return cachedUsability;
|
|
832
|
-
} finally {
|
|
833
|
-
if (context) {
|
|
834
|
-
try {
|
|
835
|
-
await context.dispose();
|
|
836
|
-
} catch {
|
|
837
|
-
}
|
|
838
|
-
}
|
|
839
|
-
if (model) {
|
|
840
|
-
try {
|
|
841
|
-
await model.dispose();
|
|
842
|
-
} catch {
|
|
843
|
-
}
|
|
844
|
-
}
|
|
845
|
-
if (llama) {
|
|
846
|
-
try {
|
|
847
|
-
await llama.dispose();
|
|
848
|
-
} catch {
|
|
849
|
-
}
|
|
850
|
-
}
|
|
851
|
-
}
|
|
852
|
-
}
|
|
853
|
-
function clearUsabilityCache() {
|
|
854
|
-
cachedUsability = null;
|
|
855
|
-
}
|
|
856
|
-
async function resolveModel(options = {}) {
|
|
857
|
-
const { cli = true } = options;
|
|
858
|
-
return resolveModelFile(MODEL_URI, { cli });
|
|
859
|
-
}
|
|
860
|
-
var MODEL_URI, MODEL_FILENAME, DEFAULT_MODEL_DIR, cachedUsability;
|
|
861
|
-
var init_model = __esm({
|
|
862
|
-
"src/memory/embeddings/model.ts"() {
|
|
863
|
-
MODEL_URI = "hf:ggml-org/embeddinggemma-300M-qat-q4_0-GGUF/embeddinggemma-300M-qat-Q4_0.gguf";
|
|
864
|
-
MODEL_FILENAME = "hf_ggml-org_embeddinggemma-300M-qat-Q4_0.gguf";
|
|
865
|
-
DEFAULT_MODEL_DIR = join(homedir(), ".node-llama-cpp", "models");
|
|
866
|
-
cachedUsability = null;
|
|
867
|
-
}
|
|
868
|
-
});
|
|
869
|
-
async function getEmbedding() {
|
|
870
|
-
if (embeddingContext) return embeddingContext;
|
|
871
|
-
if (pendingInit) return pendingInit;
|
|
872
|
-
pendingInit = (async () => {
|
|
873
|
-
try {
|
|
874
|
-
const modelPath = await resolveModel({ cli: true });
|
|
875
|
-
llamaInstance = await getLlama({
|
|
876
|
-
build: "never",
|
|
877
|
-
// Never compile from source in a deployed tool
|
|
878
|
-
progressLogs: false,
|
|
879
|
-
// Suppress prebuilt binary fallback warnings
|
|
880
|
-
logLevel: LlamaLogLevel.error
|
|
881
|
-
// Only surface real errors from C++ backend
|
|
882
|
-
// Set NODE_LLAMA_CPP_DEBUG=true to re-enable all output for troubleshooting
|
|
883
|
-
});
|
|
884
|
-
modelInstance = await llamaInstance.loadModel({ modelPath });
|
|
885
|
-
embeddingContext = await modelInstance.createEmbeddingContext();
|
|
886
|
-
return embeddingContext;
|
|
887
|
-
} catch (err) {
|
|
888
|
-
pendingInit = null;
|
|
889
|
-
throw err;
|
|
890
|
-
}
|
|
891
|
-
})();
|
|
892
|
-
return pendingInit;
|
|
893
|
-
}
|
|
894
|
-
async function unloadEmbeddingResources() {
|
|
895
|
-
const pending = pendingInit;
|
|
896
|
-
if (pending) {
|
|
897
|
-
try {
|
|
898
|
-
await pending;
|
|
899
|
-
} catch {
|
|
900
|
-
}
|
|
901
|
-
}
|
|
902
|
-
const context = embeddingContext;
|
|
903
|
-
const model = modelInstance;
|
|
904
|
-
const llama = llamaInstance;
|
|
905
|
-
embeddingContext = null;
|
|
906
|
-
modelInstance = null;
|
|
907
|
-
llamaInstance = null;
|
|
908
|
-
pendingInit = null;
|
|
909
|
-
const disposals = [];
|
|
910
|
-
if (context) {
|
|
911
|
-
disposals.push(context.dispose());
|
|
912
|
-
}
|
|
913
|
-
if (model) {
|
|
914
|
-
disposals.push(model.dispose());
|
|
915
|
-
}
|
|
916
|
-
if (llama) {
|
|
917
|
-
disposals.push(llama.dispose());
|
|
918
|
-
}
|
|
919
|
-
if (disposals.length > 0) {
|
|
920
|
-
await Promise.allSettled(disposals);
|
|
921
|
-
}
|
|
922
|
-
}
|
|
923
|
-
function unloadEmbedding() {
|
|
924
|
-
void unloadEmbeddingResources();
|
|
925
|
-
}
|
|
926
|
-
async function withEmbedding(fn) {
|
|
927
|
-
try {
|
|
928
|
-
return await fn();
|
|
929
|
-
} finally {
|
|
930
|
-
await unloadEmbeddingResources();
|
|
931
|
-
}
|
|
932
|
-
}
|
|
933
|
-
async function embedText(text) {
|
|
934
|
-
const ctx = await getEmbedding();
|
|
935
|
-
const result = await ctx.getEmbeddingFor(text);
|
|
936
|
-
return new Float32Array(result.vector);
|
|
937
|
-
}
|
|
938
|
-
async function embedTexts(texts) {
|
|
939
|
-
if (texts.length === 0) return [];
|
|
940
|
-
const ctx = await getEmbedding();
|
|
941
|
-
const results = [];
|
|
942
|
-
for (const text of texts) {
|
|
943
|
-
const result = await ctx.getEmbeddingFor(text);
|
|
944
|
-
results.push(new Float32Array(result.vector));
|
|
945
|
-
}
|
|
946
|
-
return results;
|
|
947
|
-
}
|
|
948
|
-
var embeddingContext, pendingInit, llamaInstance, modelInstance;
|
|
949
|
-
var init_nomic = __esm({
|
|
950
|
-
"src/memory/embeddings/nomic.ts"() {
|
|
951
|
-
init_model();
|
|
952
|
-
embeddingContext = null;
|
|
953
|
-
pendingInit = null;
|
|
954
|
-
llamaInstance = null;
|
|
955
|
-
modelInstance = null;
|
|
956
|
-
}
|
|
957
|
-
});
|
|
958
|
-
|
|
959
|
-
// src/memory/embeddings/index.ts
|
|
960
|
-
var init_embeddings = __esm({
|
|
961
|
-
"src/memory/embeddings/index.ts"() {
|
|
962
|
-
init_nomic();
|
|
963
|
-
init_model();
|
|
964
|
-
}
|
|
965
|
-
});
|
|
966
|
-
|
|
967
|
-
// src/compound/clustering.ts
|
|
968
|
-
function buildSimilarityMatrix(embeddings) {
|
|
969
|
-
const n = embeddings.length;
|
|
970
|
-
const matrix = Array.from({ length: n }, () => new Array(n).fill(0));
|
|
971
|
-
for (let i = 0; i < n; i++) {
|
|
972
|
-
matrix[i][i] = 1;
|
|
973
|
-
for (let j = i + 1; j < n; j++) {
|
|
974
|
-
const sim = cosineSimilarity(embeddings[i], embeddings[j]);
|
|
975
|
-
matrix[i][j] = sim;
|
|
976
|
-
matrix[j][i] = sim;
|
|
977
|
-
}
|
|
978
|
-
}
|
|
979
|
-
return matrix;
|
|
980
|
-
}
|
|
981
|
-
function clusterBySimilarity(items, embeddings, threshold = DEFAULT_THRESHOLD) {
|
|
982
|
-
const n = items.length;
|
|
983
|
-
if (n === 0) return { clusters: [], noise: [] };
|
|
984
|
-
const matrix = buildSimilarityMatrix(embeddings);
|
|
985
|
-
const parent = Array.from({ length: n }, (_, i) => i);
|
|
986
|
-
function find(x) {
|
|
987
|
-
while (parent[x] !== x) {
|
|
988
|
-
parent[x] = parent[parent[x]];
|
|
989
|
-
x = parent[x];
|
|
990
|
-
}
|
|
991
|
-
return x;
|
|
992
|
-
}
|
|
993
|
-
function union(a, b) {
|
|
994
|
-
const rootA = find(a);
|
|
995
|
-
const rootB = find(b);
|
|
996
|
-
if (rootA !== rootB) parent[rootA] = rootB;
|
|
997
|
-
}
|
|
998
|
-
for (let i = 0; i < n; i++) {
|
|
999
|
-
for (let j = i + 1; j < n; j++) {
|
|
1000
|
-
if (matrix[i][j] >= threshold) {
|
|
1001
|
-
union(i, j);
|
|
1002
|
-
}
|
|
1003
|
-
}
|
|
1004
|
-
}
|
|
1005
|
-
const groups = /* @__PURE__ */ new Map();
|
|
1006
|
-
for (let i = 0; i < n; i++) {
|
|
1007
|
-
const root = find(i);
|
|
1008
|
-
let group = groups.get(root);
|
|
1009
|
-
if (!group) {
|
|
1010
|
-
group = [];
|
|
1011
|
-
groups.set(root, group);
|
|
1012
|
-
}
|
|
1013
|
-
group.push(items[i]);
|
|
1014
|
-
}
|
|
1015
|
-
const clusters = [];
|
|
1016
|
-
const noise = [];
|
|
1017
|
-
for (const group of groups.values()) {
|
|
1018
|
-
if (group.length === 1) {
|
|
1019
|
-
noise.push(group[0]);
|
|
1020
|
-
} else {
|
|
1021
|
-
clusters.push(group);
|
|
1022
|
-
}
|
|
1023
|
-
}
|
|
1024
|
-
return { clusters, noise };
|
|
1025
|
-
}
|
|
1026
|
-
var DEFAULT_THRESHOLD;
|
|
1027
|
-
var init_clustering = __esm({
|
|
1028
|
-
"src/compound/clustering.ts"() {
|
|
1029
|
-
init_search2();
|
|
1030
|
-
DEFAULT_THRESHOLD = 0.75;
|
|
1031
|
-
}
|
|
1032
|
-
});
|
|
1033
|
-
function generateCctId(input) {
|
|
1034
|
-
const hash = createHash("sha256").update(input).digest("hex");
|
|
1035
|
-
return `CCT-${hash.slice(0, 8)}`;
|
|
1036
|
-
}
|
|
1037
|
-
var CCT_PATTERNS_PATH, CctPatternSchema;
|
|
1038
|
-
var init_types2 = __esm({
|
|
1039
|
-
"src/compound/types.ts"() {
|
|
1040
|
-
CCT_PATTERNS_PATH = ".claude/lessons/cct-patterns.jsonl";
|
|
1041
|
-
CctPatternSchema = z.object({
|
|
1042
|
-
id: z.string().regex(/^CCT-[a-f0-9]{8}$/),
|
|
1043
|
-
name: z.string().min(1),
|
|
1044
|
-
description: z.string().min(1),
|
|
1045
|
-
frequency: z.number().int().positive(),
|
|
1046
|
-
testable: z.boolean(),
|
|
1047
|
-
testApproach: z.string().optional(),
|
|
1048
|
-
sourceIds: z.array(z.string()).min(1),
|
|
1049
|
-
created: z.string()
|
|
1050
|
-
// ISO8601
|
|
1051
|
-
});
|
|
1052
|
-
}
|
|
1053
|
-
});
|
|
1054
|
-
async function readCctPatterns(repoRoot) {
|
|
1055
|
-
const filePath = join(repoRoot, CCT_PATTERNS_PATH);
|
|
1056
|
-
let content;
|
|
1057
|
-
try {
|
|
1058
|
-
content = await readFile(filePath, "utf-8");
|
|
1059
|
-
} catch (err) {
|
|
1060
|
-
if (err.code === "ENOENT") {
|
|
1061
|
-
return [];
|
|
1062
|
-
}
|
|
1063
|
-
throw err;
|
|
1064
|
-
}
|
|
1065
|
-
const patterns = [];
|
|
1066
|
-
const lines = content.split("\n");
|
|
1067
|
-
for (const line of lines) {
|
|
1068
|
-
const trimmed = line.trim();
|
|
1069
|
-
if (!trimmed) continue;
|
|
1070
|
-
let parsed;
|
|
1071
|
-
try {
|
|
1072
|
-
parsed = JSON.parse(trimmed);
|
|
1073
|
-
} catch {
|
|
1074
|
-
continue;
|
|
1075
|
-
}
|
|
1076
|
-
const result = CctPatternSchema.safeParse(parsed);
|
|
1077
|
-
if (result.success) {
|
|
1078
|
-
patterns.push(result.data);
|
|
1079
|
-
}
|
|
1080
|
-
}
|
|
1081
|
-
return patterns;
|
|
1082
|
-
}
|
|
1083
|
-
async function writeCctPatterns(repoRoot, patterns) {
|
|
1084
|
-
const filePath = join(repoRoot, CCT_PATTERNS_PATH);
|
|
1085
|
-
await mkdir(dirname(filePath), { recursive: true });
|
|
1086
|
-
const lines = patterns.map((p) => JSON.stringify(p) + "\n").join("");
|
|
1087
|
-
await appendFile(filePath, lines, "utf-8");
|
|
1088
|
-
}
|
|
1089
|
-
var init_io = __esm({
|
|
1090
|
-
"src/compound/io.ts"() {
|
|
1091
|
-
init_types2();
|
|
1092
|
-
}
|
|
1093
|
-
});
|
|
1094
|
-
|
|
1095
|
-
// src/compound/synthesis.ts
|
|
1096
|
-
function synthesizePattern(cluster, clusterId) {
|
|
1097
|
-
const id = generateCctId(clusterId);
|
|
1098
|
-
const frequency = cluster.length;
|
|
1099
|
-
const sourceIds = cluster.map((item) => item.id);
|
|
1100
|
-
const tagCounts = /* @__PURE__ */ new Map();
|
|
1101
|
-
for (const item of cluster) {
|
|
1102
|
-
for (const tag of item.tags) {
|
|
1103
|
-
tagCounts.set(tag, (tagCounts.get(tag) ?? 0) + 1);
|
|
1104
|
-
}
|
|
1105
|
-
}
|
|
1106
|
-
const sortedTags = [...tagCounts.entries()].sort((a, b) => b[1] - a[1]).map(([tag]) => tag);
|
|
1107
|
-
const name = sortedTags.length > 0 ? sortedTags.slice(0, 3).join(", ") : cluster[0].insight.slice(0, 50);
|
|
1108
|
-
const description = cluster.map((item) => item.insight).join("; ");
|
|
1109
|
-
const hasHighSeverity = cluster.some(
|
|
1110
|
-
(item) => "severity" in item && item.severity === "high"
|
|
1111
|
-
);
|
|
1112
|
-
const hasEvidence = cluster.some(
|
|
1113
|
-
(item) => "evidence" in item && item.evidence
|
|
1114
|
-
);
|
|
1115
|
-
const testable = hasHighSeverity || hasEvidence;
|
|
1116
|
-
const testApproach = testable ? `Verify pattern: ${name}. Check ${frequency} related lesson(s).` : void 0;
|
|
1117
|
-
return {
|
|
1118
|
-
id,
|
|
1119
|
-
name,
|
|
1120
|
-
description,
|
|
1121
|
-
frequency,
|
|
1122
|
-
testable,
|
|
1123
|
-
...testApproach !== void 0 && { testApproach },
|
|
1124
|
-
sourceIds,
|
|
1125
|
-
created: (/* @__PURE__ */ new Date()).toISOString()
|
|
1126
|
-
};
|
|
1127
|
-
}
|
|
1128
|
-
var init_synthesis = __esm({
|
|
1129
|
-
"src/compound/synthesis.ts"() {
|
|
1130
|
-
init_types2();
|
|
1131
|
-
}
|
|
1132
|
-
});
|
|
1133
|
-
|
|
1134
|
-
// src/compound/index.ts
|
|
1135
|
-
var init_compound = __esm({
|
|
1136
|
-
"src/compound/index.ts"() {
|
|
1137
|
-
init_clustering();
|
|
1138
|
-
init_io();
|
|
1139
|
-
init_synthesis();
|
|
1140
|
-
init_types2();
|
|
1141
|
-
}
|
|
1142
|
-
});
|
|
1143
|
-
|
|
1144
|
-
// src/memory/search/vector.ts
|
|
1145
|
-
function cosineSimilarity(a, b) {
|
|
1146
|
-
if (a.length !== b.length) {
|
|
1147
|
-
throw new Error("Vectors must have same length");
|
|
1148
|
-
}
|
|
1149
|
-
let dotProduct = 0;
|
|
1150
|
-
let normA = 0;
|
|
1151
|
-
let normB = 0;
|
|
1152
|
-
for (let i = 0; i < a.length; i++) {
|
|
1153
|
-
dotProduct += a[i] * b[i];
|
|
1154
|
-
normA += a[i] * a[i];
|
|
1155
|
-
normB += b[i] * b[i];
|
|
1156
|
-
}
|
|
1157
|
-
const magnitude = Math.sqrt(normA) * Math.sqrt(normB);
|
|
1158
|
-
if (magnitude === 0) return 0;
|
|
1159
|
-
return dotProduct / magnitude;
|
|
1160
|
-
}
|
|
1161
|
-
function cctToMemoryItem(pattern) {
|
|
1162
|
-
return {
|
|
1163
|
-
id: pattern.id,
|
|
1164
|
-
type: "lesson",
|
|
1165
|
-
trigger: pattern.name,
|
|
1166
|
-
insight: pattern.description,
|
|
1167
|
-
tags: [],
|
|
1168
|
-
source: "manual",
|
|
1169
|
-
context: { tool: "compound", intent: "synthesis" },
|
|
1170
|
-
created: pattern.created,
|
|
1171
|
-
confirmed: true,
|
|
1172
|
-
supersedes: [],
|
|
1173
|
-
related: pattern.sourceIds
|
|
1174
|
-
};
|
|
1175
|
-
}
|
|
1176
|
-
async function searchVector(repoRoot, query, options) {
|
|
1177
|
-
const limit = options?.limit ?? DEFAULT_LIMIT;
|
|
1178
|
-
await syncIfNeeded(repoRoot);
|
|
1179
|
-
const items = readAllFromSqlite(repoRoot);
|
|
1180
|
-
let cctPatterns = [];
|
|
1181
|
-
try {
|
|
1182
|
-
cctPatterns = await readCctPatterns(repoRoot);
|
|
1183
|
-
} catch {
|
|
1184
|
-
}
|
|
1185
|
-
if (items.length === 0 && cctPatterns.length === 0) return [];
|
|
1186
|
-
const queryVector = await embedText(query);
|
|
1187
|
-
const cachedEmbeddings = getCachedEmbeddingsBulk(repoRoot);
|
|
1188
|
-
const scored = [];
|
|
1189
|
-
for (const item of items) {
|
|
1190
|
-
if (item.invalidatedAt) continue;
|
|
1191
|
-
try {
|
|
1192
|
-
const itemText = `${item.trigger} ${item.insight}`;
|
|
1193
|
-
const hash = contentHash(item.trigger, item.insight);
|
|
1194
|
-
const cached = cachedEmbeddings.get(item.id);
|
|
1195
|
-
let itemVector;
|
|
1196
|
-
if (cached && cached.hash === hash) {
|
|
1197
|
-
itemVector = cached.vector;
|
|
1198
|
-
} else {
|
|
1199
|
-
itemVector = await embedText(itemText);
|
|
1200
|
-
setCachedEmbedding(repoRoot, item.id, itemVector, hash);
|
|
1201
|
-
}
|
|
1202
|
-
const score = cosineSimilarity(queryVector, itemVector);
|
|
1203
|
-
scored.push({ lesson: item, score });
|
|
1204
|
-
} catch {
|
|
1205
|
-
continue;
|
|
1206
|
-
}
|
|
1207
|
-
}
|
|
1208
|
-
for (const pattern of cctPatterns) {
|
|
1209
|
-
try {
|
|
1210
|
-
const text = `${pattern.name} ${pattern.description}`;
|
|
1211
|
-
const hash = contentHash(pattern.name, pattern.description);
|
|
1212
|
-
const cacheKey = `${pattern.id}:${hash}`;
|
|
1213
|
-
let vec = cctEmbeddingCache.get(cacheKey);
|
|
1214
|
-
if (!vec) {
|
|
1215
|
-
vec = await embedText(text);
|
|
1216
|
-
cctEmbeddingCache.set(cacheKey, vec);
|
|
1217
|
-
}
|
|
1218
|
-
const score = cosineSimilarity(queryVector, vec);
|
|
1219
|
-
scored.push({ lesson: cctToMemoryItem(pattern), score });
|
|
1220
|
-
} catch {
|
|
1221
|
-
continue;
|
|
1222
|
-
}
|
|
1223
|
-
}
|
|
1224
|
-
scored.sort((a, b) => b.score - a.score);
|
|
1225
|
-
return scored.slice(0, limit);
|
|
1226
|
-
}
|
|
1227
|
-
async function findSimilarLessons(repoRoot, text, options) {
|
|
1228
|
-
const threshold = options?.threshold ?? DEFAULT_THRESHOLD2;
|
|
1229
|
-
const excludeId = options?.excludeId;
|
|
1230
|
-
if (!isModelAvailable()) return [];
|
|
1231
|
-
let items;
|
|
1232
|
-
if (options?.items) {
|
|
1233
|
-
items = options.items;
|
|
1234
|
-
} else {
|
|
1235
|
-
await syncIfNeeded(repoRoot);
|
|
1236
|
-
items = readAllFromSqlite(repoRoot);
|
|
1237
|
-
}
|
|
1238
|
-
if (items.length === 0) return [];
|
|
1239
|
-
const queryVector = await embedText(text);
|
|
1240
|
-
const scored = [];
|
|
1241
|
-
for (const item of items) {
|
|
1242
|
-
if (item.invalidatedAt) continue;
|
|
1243
|
-
if (excludeId && item.id === excludeId) continue;
|
|
1244
|
-
try {
|
|
1245
|
-
const hash = contentHash(item.insight, "");
|
|
1246
|
-
let itemVector = getCachedInsightEmbedding(repoRoot, item.id, hash);
|
|
1247
|
-
if (!itemVector) {
|
|
1248
|
-
itemVector = await embedText(item.insight);
|
|
1249
|
-
setCachedInsightEmbedding(repoRoot, item.id, itemVector, hash);
|
|
1250
|
-
}
|
|
1251
|
-
const score = cosineSimilarity(queryVector, itemVector);
|
|
1252
|
-
if (score >= threshold) {
|
|
1253
|
-
scored.push({ item, score });
|
|
1254
|
-
}
|
|
1255
|
-
} catch {
|
|
1256
|
-
continue;
|
|
1257
|
-
}
|
|
1258
|
-
}
|
|
1259
|
-
scored.sort((a, b) => b.score - a.score);
|
|
1260
|
-
return scored;
|
|
1261
|
-
}
|
|
1262
|
-
var cctEmbeddingCache, DEFAULT_LIMIT, DEFAULT_THRESHOLD2;
|
|
1263
|
-
var init_vector = __esm({
|
|
1264
|
-
"src/memory/search/vector.ts"() {
|
|
1265
|
-
init_compound();
|
|
1266
|
-
init_embeddings();
|
|
1267
|
-
init_model();
|
|
1268
|
-
init_storage();
|
|
1269
|
-
cctEmbeddingCache = /* @__PURE__ */ new Map();
|
|
1270
|
-
DEFAULT_LIMIT = 10;
|
|
1271
|
-
DEFAULT_THRESHOLD2 = 0.8;
|
|
1272
|
-
}
|
|
1273
|
-
});
|
|
1274
|
-
|
|
1275
|
-
// src/utils.ts
|
|
1276
|
-
function getLessonAgeDays(lesson) {
|
|
1277
|
-
const created = new Date(lesson.created).getTime();
|
|
1278
|
-
const now = Date.now();
|
|
1279
|
-
return Math.floor((now - created) / MS_PER_DAY);
|
|
1280
|
-
}
|
|
1281
|
-
var MS_PER_DAY;
|
|
1282
|
-
var init_utils = __esm({
|
|
1283
|
-
"src/utils.ts"() {
|
|
1284
|
-
MS_PER_DAY = 24 * 60 * 60 * 1e3;
|
|
1285
|
-
}
|
|
1286
|
-
});
|
|
1287
|
-
|
|
1288
|
-
// src/memory/search/ranking.ts
|
|
1289
|
-
function severityBoost(item) {
|
|
1290
|
-
switch (item.severity) {
|
|
1291
|
-
case "high":
|
|
1292
|
-
return HIGH_SEVERITY_BOOST;
|
|
1293
|
-
case "medium":
|
|
1294
|
-
return MEDIUM_SEVERITY_BOOST;
|
|
1295
|
-
case "low":
|
|
1296
|
-
return LOW_SEVERITY_BOOST;
|
|
1297
|
-
default:
|
|
1298
|
-
return MEDIUM_SEVERITY_BOOST;
|
|
1299
|
-
}
|
|
1300
|
-
}
|
|
1301
|
-
function recencyBoost(item) {
|
|
1302
|
-
const ageDays = getLessonAgeDays(item);
|
|
1303
|
-
return ageDays <= RECENCY_THRESHOLD_DAYS ? RECENCY_BOOST : 1;
|
|
1304
|
-
}
|
|
1305
|
-
function confirmationBoost(item) {
|
|
1306
|
-
return item.confirmed ? CONFIRMATION_BOOST : 1;
|
|
1307
|
-
}
|
|
1308
|
-
function calculateScore(item, vectorSimilarity) {
|
|
1309
|
-
const boost = Math.min(
|
|
1310
|
-
severityBoost(item) * recencyBoost(item) * confirmationBoost(item),
|
|
1311
|
-
MAX_COMBINED_BOOST
|
|
1312
|
-
);
|
|
1313
|
-
return vectorSimilarity * boost;
|
|
1314
|
-
}
|
|
1315
|
-
function rankLessons(lessons) {
|
|
1316
|
-
return lessons.map((scored) => ({
|
|
1317
|
-
...scored,
|
|
1318
|
-
finalScore: calculateScore(scored.lesson, scored.score)
|
|
1319
|
-
})).sort((a, b) => (b.finalScore ?? 0) - (a.finalScore ?? 0));
|
|
1320
|
-
}
|
|
1321
|
-
var RECENCY_THRESHOLD_DAYS, HIGH_SEVERITY_BOOST, MEDIUM_SEVERITY_BOOST, LOW_SEVERITY_BOOST, RECENCY_BOOST, CONFIRMATION_BOOST, MAX_COMBINED_BOOST;
|
|
1322
|
-
var init_ranking = __esm({
|
|
1323
|
-
"src/memory/search/ranking.ts"() {
|
|
1324
|
-
init_utils();
|
|
1325
|
-
RECENCY_THRESHOLD_DAYS = 30;
|
|
1326
|
-
HIGH_SEVERITY_BOOST = 1.5;
|
|
1327
|
-
MEDIUM_SEVERITY_BOOST = 1;
|
|
1328
|
-
LOW_SEVERITY_BOOST = 0.8;
|
|
1329
|
-
RECENCY_BOOST = 1.2;
|
|
1330
|
-
CONFIRMATION_BOOST = 1.3;
|
|
1331
|
-
MAX_COMBINED_BOOST = 1.8;
|
|
1332
|
-
}
|
|
1333
|
-
});
|
|
1334
|
-
|
|
1335
|
-
// src/memory/search/prewarm.ts
|
|
1336
|
-
var init_prewarm = __esm({
|
|
1337
|
-
"src/memory/search/prewarm.ts"() {
|
|
1338
|
-
init_model();
|
|
1339
|
-
init_embeddings();
|
|
1340
|
-
init_storage();
|
|
1341
|
-
}
|
|
1342
|
-
});
|
|
1343
|
-
|
|
1344
|
-
// src/memory/search/index.ts
|
|
1345
|
-
var init_search2 = __esm({
|
|
1346
|
-
"src/memory/search/index.ts"() {
|
|
1347
|
-
init_vector();
|
|
1348
|
-
init_ranking();
|
|
1349
|
-
init_prewarm();
|
|
1350
|
-
init_hybrid();
|
|
1351
|
-
}
|
|
1352
|
-
});
|
|
1353
|
-
|
|
1354
|
-
// src/memory/storage/sqlite-knowledge/schema.ts
|
|
1355
|
-
function createKnowledgeSchema(database) {
|
|
1356
|
-
database.exec(SCHEMA_SQL2);
|
|
1357
|
-
database.pragma(`user_version = ${KNOWLEDGE_SCHEMA_VERSION}`);
|
|
1358
|
-
}
|
|
1359
|
-
var KNOWLEDGE_SCHEMA_VERSION, SCHEMA_SQL2;
|
|
1360
|
-
var init_schema2 = __esm({
|
|
1361
|
-
"src/memory/storage/sqlite-knowledge/schema.ts"() {
|
|
1362
|
-
KNOWLEDGE_SCHEMA_VERSION = 2;
|
|
1363
|
-
SCHEMA_SQL2 = `
|
|
1364
|
-
CREATE TABLE IF NOT EXISTS chunks (
|
|
1365
|
-
id TEXT PRIMARY KEY,
|
|
1366
|
-
file_path TEXT NOT NULL,
|
|
1367
|
-
start_line INTEGER NOT NULL,
|
|
1368
|
-
end_line INTEGER NOT NULL,
|
|
1369
|
-
content_hash TEXT NOT NULL,
|
|
1370
|
-
text TEXT NOT NULL,
|
|
1371
|
-
embedding BLOB,
|
|
1372
|
-
model TEXT,
|
|
1373
|
-
updated_at TEXT NOT NULL
|
|
1374
|
-
);
|
|
1375
|
-
|
|
1376
|
-
CREATE VIRTUAL TABLE IF NOT EXISTS chunks_fts USING fts5(
|
|
1377
|
-
text,
|
|
1378
|
-
content='chunks', content_rowid='rowid'
|
|
1379
|
-
);
|
|
1380
|
-
|
|
1381
|
-
CREATE TRIGGER IF NOT EXISTS chunks_ai AFTER INSERT ON chunks BEGIN
|
|
1382
|
-
INSERT INTO chunks_fts(rowid, text)
|
|
1383
|
-
VALUES (new.rowid, new.text);
|
|
1384
|
-
END;
|
|
1385
|
-
|
|
1386
|
-
CREATE TRIGGER IF NOT EXISTS chunks_ad AFTER DELETE ON chunks BEGIN
|
|
1387
|
-
INSERT INTO chunks_fts(chunks_fts, rowid, text)
|
|
1388
|
-
VALUES ('delete', old.rowid, old.text);
|
|
1389
|
-
END;
|
|
1390
|
-
|
|
1391
|
-
CREATE TRIGGER IF NOT EXISTS chunks_au AFTER UPDATE ON chunks BEGIN
|
|
1392
|
-
INSERT INTO chunks_fts(chunks_fts, rowid, text)
|
|
1393
|
-
VALUES ('delete', old.rowid, old.text);
|
|
1394
|
-
INSERT INTO chunks_fts(rowid, text)
|
|
1395
|
-
VALUES (new.rowid, new.text);
|
|
1396
|
-
END;
|
|
1397
|
-
|
|
1398
|
-
CREATE INDEX IF NOT EXISTS idx_chunks_file_path ON chunks(file_path);
|
|
1399
|
-
|
|
1400
|
-
CREATE TABLE IF NOT EXISTS metadata (
|
|
1401
|
-
key TEXT PRIMARY KEY,
|
|
1402
|
-
value TEXT NOT NULL
|
|
1403
|
-
);
|
|
1404
|
-
`;
|
|
1405
|
-
}
|
|
1406
|
-
});
|
|
1407
|
-
function openKnowledgeDb(repoRoot, options = {}) {
|
|
1408
|
-
const { inMemory = false } = options;
|
|
1409
|
-
const key = inMemory ? `:memory:${repoRoot}` : join(repoRoot, KNOWLEDGE_DB_PATH);
|
|
1410
|
-
const cached = knowledgeDbMap.get(key);
|
|
1411
|
-
if (cached) {
|
|
1412
|
-
return cached;
|
|
1413
|
-
}
|
|
1414
|
-
const Database = getDatabaseConstructor();
|
|
1415
|
-
let database;
|
|
1416
|
-
if (inMemory) {
|
|
1417
|
-
database = new Database(":memory:");
|
|
1418
|
-
} else {
|
|
1419
|
-
const dir = dirname(key);
|
|
1420
|
-
mkdirSync(dir, { recursive: true });
|
|
1421
|
-
database = new Database(key);
|
|
1422
|
-
const version = database.pragma("user_version", { simple: true });
|
|
1423
|
-
if (version !== 0 && version !== KNOWLEDGE_SCHEMA_VERSION) {
|
|
1424
|
-
database.close();
|
|
1425
|
-
try {
|
|
1426
|
-
unlinkSync(key);
|
|
1427
|
-
} catch {
|
|
1428
|
-
}
|
|
1429
|
-
database = new Database(key);
|
|
1430
|
-
}
|
|
1431
|
-
database.pragma("journal_mode = WAL");
|
|
1432
|
-
}
|
|
1433
|
-
createKnowledgeSchema(database);
|
|
1434
|
-
knowledgeDbMap.set(key, database);
|
|
1435
|
-
return database;
|
|
1436
|
-
}
|
|
1437
|
-
function closeKnowledgeDb() {
|
|
1438
|
-
for (const database of knowledgeDbMap.values()) {
|
|
1439
|
-
database.close();
|
|
1440
|
-
}
|
|
1441
|
-
knowledgeDbMap.clear();
|
|
1442
|
-
}
|
|
1443
|
-
var KNOWLEDGE_DB_PATH, knowledgeDbMap;
|
|
1444
|
-
var init_connection2 = __esm({
|
|
1445
|
-
"src/memory/storage/sqlite-knowledge/connection.ts"() {
|
|
1446
|
-
init_availability();
|
|
1447
|
-
init_schema2();
|
|
1448
|
-
KNOWLEDGE_DB_PATH = ".claude/.cache/knowledge.sqlite";
|
|
1449
|
-
knowledgeDbMap = /* @__PURE__ */ new Map();
|
|
1450
|
-
}
|
|
1451
|
-
});
|
|
1452
|
-
function generateChunkId(filePath, startLine, endLine) {
|
|
1453
|
-
return createHash("sha256").update(`${filePath}:${startLine}:${endLine}`).digest("hex").slice(0, 16);
|
|
1454
|
-
}
|
|
1455
|
-
function chunkContentHash(text) {
|
|
1456
|
-
return createHash("sha256").update(text).digest("hex");
|
|
1457
|
-
}
|
|
1458
|
-
var SUPPORTED_EXTENSIONS, CODE_EXTENSIONS;
|
|
1459
|
-
var init_types3 = __esm({
|
|
1460
|
-
"src/memory/knowledge/types.ts"() {
|
|
1461
|
-
SUPPORTED_EXTENSIONS = /* @__PURE__ */ new Set([
|
|
1462
|
-
".md",
|
|
1463
|
-
".txt",
|
|
1464
|
-
".rst",
|
|
1465
|
-
".ts",
|
|
1466
|
-
".py",
|
|
1467
|
-
".js",
|
|
1468
|
-
".tsx",
|
|
1469
|
-
".jsx"
|
|
1470
|
-
]);
|
|
1471
|
-
CODE_EXTENSIONS = /* @__PURE__ */ new Set([
|
|
1472
|
-
".ts",
|
|
1473
|
-
".tsx",
|
|
1474
|
-
".js",
|
|
1475
|
-
".jsx",
|
|
1476
|
-
".py"
|
|
1477
|
-
]);
|
|
1478
|
-
}
|
|
1479
|
-
});
|
|
1480
|
-
|
|
1481
|
-
// src/memory/storage/sqlite-knowledge/cache.ts
|
|
1482
|
-
function getCachedChunkEmbedding(repoRoot, chunkId, expectedHash) {
|
|
1483
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1484
|
-
const row = database.prepare("SELECT embedding, content_hash FROM chunks WHERE id = ?").get(chunkId);
|
|
1485
|
-
if (!row || !row.embedding || !row.content_hash) {
|
|
1486
|
-
return null;
|
|
1487
|
-
}
|
|
1488
|
-
if (expectedHash && row.content_hash !== expectedHash) {
|
|
1489
|
-
return null;
|
|
1490
|
-
}
|
|
1491
|
-
return new Float32Array(
|
|
1492
|
-
row.embedding.buffer,
|
|
1493
|
-
row.embedding.byteOffset,
|
|
1494
|
-
row.embedding.byteLength / 4
|
|
1495
|
-
);
|
|
1496
|
-
}
|
|
1497
|
-
function setCachedChunkEmbedding(repoRoot, chunkId, embedding, hash) {
|
|
1498
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1499
|
-
const float32 = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);
|
|
1500
|
-
const buffer = Buffer.from(float32.buffer, float32.byteOffset, float32.byteLength);
|
|
1501
|
-
database.prepare("UPDATE chunks SET embedding = ?, content_hash = ? WHERE id = ?").run(buffer, hash, chunkId);
|
|
1502
|
-
}
|
|
1503
|
-
function collectCachedChunkEmbeddings(database) {
|
|
1504
|
-
const cache = /* @__PURE__ */ new Map();
|
|
1505
|
-
const rows = database.prepare("SELECT id, embedding, content_hash FROM chunks WHERE embedding IS NOT NULL").all();
|
|
1506
|
-
for (const row of rows) {
|
|
1507
|
-
if (row.embedding && row.content_hash) {
|
|
1508
|
-
cache.set(row.id, { embedding: row.embedding, contentHash: row.content_hash });
|
|
1509
|
-
}
|
|
1510
|
-
}
|
|
1511
|
-
return cache;
|
|
1512
|
-
}
|
|
1513
|
-
var init_cache2 = __esm({
|
|
1514
|
-
"src/memory/storage/sqlite-knowledge/cache.ts"() {
|
|
1515
|
-
init_connection2();
|
|
1516
|
-
init_types3();
|
|
1517
|
-
}
|
|
1518
|
-
});
|
|
1519
|
-
|
|
1520
|
-
// src/memory/storage/sqlite-knowledge/search.ts
|
|
1521
|
-
function rowToChunk(row) {
|
|
1522
|
-
const chunk = {
|
|
1523
|
-
id: row.id,
|
|
1524
|
-
filePath: row.file_path,
|
|
1525
|
-
startLine: row.start_line,
|
|
1526
|
-
endLine: row.end_line,
|
|
1527
|
-
contentHash: row.content_hash,
|
|
1528
|
-
text: row.text,
|
|
1529
|
-
updatedAt: row.updated_at
|
|
1530
|
-
};
|
|
1531
|
-
if (row.model !== null) {
|
|
1532
|
-
chunk.model = row.model;
|
|
1533
|
-
}
|
|
1534
|
-
return chunk;
|
|
1535
|
-
}
|
|
1536
|
-
function searchChunksKeywordScored(repoRoot, query, limit) {
|
|
1537
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1538
|
-
const sanitized = sanitizeFtsQuery(query);
|
|
1539
|
-
if (sanitized === "") return [];
|
|
1540
|
-
try {
|
|
1541
|
-
const rows = database.prepare(
|
|
1542
|
-
`SELECT c.*, fts.rank
|
|
1543
|
-
FROM chunks c
|
|
1544
|
-
JOIN chunks_fts fts ON c.rowid = fts.rowid
|
|
1545
|
-
WHERE chunks_fts MATCH ?
|
|
1546
|
-
ORDER BY fts.rank
|
|
1547
|
-
LIMIT ?`
|
|
1548
|
-
).all(sanitized, limit);
|
|
1549
|
-
return rows.map((row) => ({
|
|
1550
|
-
chunk: rowToChunk(row),
|
|
1551
|
-
score: normalizeBm25Rank(row.rank)
|
|
1552
|
-
}));
|
|
1553
|
-
} catch (err) {
|
|
1554
|
-
const message = err instanceof Error ? err.message : "Unknown FTS5 error";
|
|
1555
|
-
console.error(`[compound-agent] knowledge scored search error: ${message}`);
|
|
1556
|
-
return [];
|
|
1557
|
-
}
|
|
1558
|
-
}
|
|
1559
|
-
var init_search3 = __esm({
|
|
1560
|
-
"src/memory/storage/sqlite-knowledge/search.ts"() {
|
|
1561
|
-
init_connection2();
|
|
1562
|
-
init_search();
|
|
1563
|
-
init_hybrid();
|
|
1564
|
-
}
|
|
1565
|
-
});
|
|
1566
|
-
|
|
1567
|
-
// src/memory/storage/sqlite-knowledge/sync.ts
|
|
1568
|
-
function upsertChunks(repoRoot, chunks, embeddings) {
|
|
1569
|
-
if (chunks.length === 0) return;
|
|
1570
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1571
|
-
const insert = database.prepare(`
|
|
1572
|
-
INSERT OR REPLACE INTO chunks (id, file_path, start_line, end_line, content_hash, text, embedding, model, updated_at)
|
|
1573
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
1574
|
-
`);
|
|
1575
|
-
const upsertMany = database.transaction((items) => {
|
|
1576
|
-
for (const chunk of items) {
|
|
1577
|
-
const emb = embeddings?.get(chunk.id);
|
|
1578
|
-
const embBuffer = emb ? Buffer.from(emb.buffer, emb.byteOffset, emb.byteLength) : null;
|
|
1579
|
-
insert.run(
|
|
1580
|
-
chunk.id,
|
|
1581
|
-
chunk.filePath,
|
|
1582
|
-
chunk.startLine,
|
|
1583
|
-
chunk.endLine,
|
|
1584
|
-
chunk.contentHash,
|
|
1585
|
-
chunk.text,
|
|
1586
|
-
embBuffer,
|
|
1587
|
-
chunk.model ?? null,
|
|
1588
|
-
chunk.updatedAt
|
|
1589
|
-
);
|
|
1590
|
-
}
|
|
1591
|
-
});
|
|
1592
|
-
upsertMany(chunks);
|
|
1593
|
-
}
|
|
1594
|
-
function deleteChunksByFilePath(repoRoot, filePaths) {
|
|
1595
|
-
if (filePaths.length === 0) return;
|
|
1596
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1597
|
-
const del = database.prepare("DELETE FROM chunks WHERE file_path = ?");
|
|
1598
|
-
const deleteMany = database.transaction((paths) => {
|
|
1599
|
-
for (const path of paths) {
|
|
1600
|
-
del.run(path);
|
|
1601
|
-
}
|
|
1602
|
-
});
|
|
1603
|
-
deleteMany(filePaths);
|
|
1604
|
-
}
|
|
1605
|
-
function getIndexedFilePaths(repoRoot) {
|
|
1606
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1607
|
-
const rows = database.prepare("SELECT DISTINCT file_path FROM chunks").all();
|
|
1608
|
-
return rows.map((r) => r.file_path);
|
|
1609
|
-
}
|
|
1610
|
-
function getLastIndexTime(repoRoot) {
|
|
1611
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1612
|
-
const row = database.prepare("SELECT value FROM metadata WHERE key = 'last_index_time'").get();
|
|
1613
|
-
return row?.value ?? null;
|
|
1614
|
-
}
|
|
1615
|
-
function getChunkCount(repoRoot) {
|
|
1616
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1617
|
-
const row = database.prepare("SELECT COUNT(*) as cnt FROM chunks").get();
|
|
1618
|
-
return row.cnt;
|
|
1619
|
-
}
|
|
1620
|
-
function getChunkCountByFilePath(repoRoot, filePath) {
|
|
1621
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1622
|
-
const row = database.prepare("SELECT COUNT(*) as cnt FROM chunks WHERE file_path = ?").get(filePath);
|
|
1623
|
-
return row.cnt;
|
|
1624
|
-
}
|
|
1625
|
-
function setLastIndexTime(repoRoot, time) {
|
|
1626
|
-
const database = openKnowledgeDb(repoRoot);
|
|
1627
|
-
database.prepare("INSERT OR REPLACE INTO metadata (key, value) VALUES ('last_index_time', ?)").run(time);
|
|
1628
|
-
}
|
|
1629
|
-
var init_sync2 = __esm({
|
|
1630
|
-
"src/memory/storage/sqlite-knowledge/sync.ts"() {
|
|
1631
|
-
init_connection2();
|
|
1632
|
-
}
|
|
1633
|
-
});
|
|
1634
|
-
|
|
1635
|
-
// src/memory/storage/sqlite-knowledge/index.ts
|
|
1636
|
-
var sqlite_knowledge_exports = {};
|
|
1637
|
-
__export(sqlite_knowledge_exports, {
|
|
1638
|
-
KNOWLEDGE_DB_PATH: () => KNOWLEDGE_DB_PATH,
|
|
1639
|
-
KNOWLEDGE_SCHEMA_VERSION: () => KNOWLEDGE_SCHEMA_VERSION,
|
|
1640
|
-
chunkContentHash: () => chunkContentHash,
|
|
1641
|
-
closeKnowledgeDb: () => closeKnowledgeDb,
|
|
1642
|
-
collectCachedChunkEmbeddings: () => collectCachedChunkEmbeddings,
|
|
1643
|
-
deleteChunksByFilePath: () => deleteChunksByFilePath,
|
|
1644
|
-
getCachedChunkEmbedding: () => getCachedChunkEmbedding,
|
|
1645
|
-
getChunkCount: () => getChunkCount,
|
|
1646
|
-
getChunkCountByFilePath: () => getChunkCountByFilePath,
|
|
1647
|
-
getIndexedFilePaths: () => getIndexedFilePaths,
|
|
1648
|
-
getLastIndexTime: () => getLastIndexTime,
|
|
1649
|
-
openKnowledgeDb: () => openKnowledgeDb,
|
|
1650
|
-
searchChunksKeywordScored: () => searchChunksKeywordScored,
|
|
1651
|
-
setCachedChunkEmbedding: () => setCachedChunkEmbedding,
|
|
1652
|
-
setLastIndexTime: () => setLastIndexTime,
|
|
1653
|
-
upsertChunks: () => upsertChunks
|
|
1654
|
-
});
|
|
1655
|
-
var init_sqlite_knowledge = __esm({
|
|
1656
|
-
"src/memory/storage/sqlite-knowledge/index.ts"() {
|
|
1657
|
-
init_connection2();
|
|
1658
|
-
init_schema2();
|
|
1659
|
-
init_cache2();
|
|
1660
|
-
init_search3();
|
|
1661
|
-
init_sync2();
|
|
1662
|
-
}
|
|
1663
|
-
});
|
|
1664
|
-
function isBinary(content) {
|
|
1665
|
-
return content.includes("\0");
|
|
1666
|
-
}
|
|
1667
|
-
function splitIntoSections(fileLines, ext) {
|
|
1668
|
-
if (ext === ".md") {
|
|
1669
|
-
return splitMarkdown(fileLines);
|
|
1670
|
-
}
|
|
1671
|
-
if (ext === ".rst") {
|
|
1672
|
-
return splitParagraphs(fileLines);
|
|
1673
|
-
}
|
|
1674
|
-
if (CODE_EXTENSIONS.has(ext)) {
|
|
1675
|
-
return splitCode(fileLines);
|
|
1676
|
-
}
|
|
1677
|
-
return splitParagraphs(fileLines);
|
|
1678
|
-
}
|
|
1679
|
-
function splitMarkdown(fileLines) {
|
|
1680
|
-
const sections = [];
|
|
1681
|
-
let current = [];
|
|
1682
|
-
let inCodeBlock = false;
|
|
1683
|
-
for (let i = 0; i < fileLines.length; i++) {
|
|
1684
|
-
const line = fileLines[i];
|
|
1685
|
-
const lineObj = { lineNumber: i + 1, text: line };
|
|
1686
|
-
if (line.trimStart().startsWith("```")) {
|
|
1687
|
-
inCodeBlock = !inCodeBlock;
|
|
1688
|
-
current.push(lineObj);
|
|
1689
|
-
continue;
|
|
1690
|
-
}
|
|
1691
|
-
if (!inCodeBlock && /^#{2,}\s/.test(line) && current.length > 0) {
|
|
1692
|
-
sections.push(current);
|
|
1693
|
-
current = [lineObj];
|
|
1694
|
-
continue;
|
|
1695
|
-
}
|
|
1696
|
-
if (!inCodeBlock && line.trim() === "" && current.length > 0 && current.some((l) => l.text.trim() !== "")) {
|
|
1697
|
-
current.push(lineObj);
|
|
1698
|
-
sections.push(current);
|
|
1699
|
-
current = [];
|
|
1700
|
-
continue;
|
|
1701
|
-
}
|
|
1702
|
-
current.push(lineObj);
|
|
1703
|
-
}
|
|
1704
|
-
if (current.length > 0) {
|
|
1705
|
-
sections.push(current);
|
|
1706
|
-
}
|
|
1707
|
-
return sections;
|
|
1708
|
-
}
|
|
1709
|
-
function splitCode(fileLines) {
|
|
1710
|
-
const sections = [];
|
|
1711
|
-
let current = [];
|
|
1712
|
-
for (let i = 0; i < fileLines.length; i++) {
|
|
1713
|
-
const line = fileLines[i];
|
|
1714
|
-
const lineObj = { lineNumber: i + 1, text: line };
|
|
1715
|
-
if (line.trim() === "" && current.length > 0) {
|
|
1716
|
-
let hasNextNonBlank = false;
|
|
1717
|
-
for (let j = i + 1; j < fileLines.length; j++) {
|
|
1718
|
-
if (fileLines[j].trim() !== "") {
|
|
1719
|
-
hasNextNonBlank = true;
|
|
1720
|
-
break;
|
|
1721
|
-
}
|
|
1722
|
-
}
|
|
1723
|
-
if (hasNextNonBlank) {
|
|
1724
|
-
sections.push(current);
|
|
1725
|
-
current = [lineObj];
|
|
1726
|
-
continue;
|
|
1727
|
-
}
|
|
1728
|
-
}
|
|
1729
|
-
current.push(lineObj);
|
|
1730
|
-
}
|
|
1731
|
-
if (current.length > 0) {
|
|
1732
|
-
sections.push(current);
|
|
1733
|
-
}
|
|
1734
|
-
return sections;
|
|
1735
|
-
}
|
|
1736
|
-
function splitParagraphs(fileLines) {
|
|
1737
|
-
const sections = [];
|
|
1738
|
-
let current = [];
|
|
1739
|
-
for (let i = 0; i < fileLines.length; i++) {
|
|
1740
|
-
const line = fileLines[i];
|
|
1741
|
-
const lineObj = { lineNumber: i + 1, text: line };
|
|
1742
|
-
if (line.trim() === "" && current.length > 0) {
|
|
1743
|
-
sections.push(current);
|
|
1744
|
-
current = [lineObj];
|
|
1745
|
-
continue;
|
|
1746
|
-
}
|
|
1747
|
-
current.push(lineObj);
|
|
1748
|
-
}
|
|
1749
|
-
if (current.length > 0) {
|
|
1750
|
-
sections.push(current);
|
|
1751
|
-
}
|
|
1752
|
-
return sections;
|
|
1753
|
-
}
|
|
1754
|
-
function sectionText(section) {
|
|
1755
|
-
return section.map((l) => l.text).join("\n");
|
|
1756
|
-
}
|
|
1757
|
-
function chunkFile(filePath, content, options) {
|
|
1758
|
-
if (content.trim() === "") return [];
|
|
1759
|
-
if (isBinary(content)) return [];
|
|
1760
|
-
const targetSize = options?.targetSize ?? DEFAULT_TARGET_SIZE;
|
|
1761
|
-
const overlapSize = options?.overlapSize ?? DEFAULT_OVERLAP_SIZE;
|
|
1762
|
-
const fileLines = content.split("\n");
|
|
1763
|
-
const ext = extname(filePath).toLowerCase();
|
|
1764
|
-
const sections = splitIntoSections(fileLines, ext);
|
|
1765
|
-
const chunks = [];
|
|
1766
|
-
let accumulated = [];
|
|
1767
|
-
let accumulatedLength = 0;
|
|
1768
|
-
function emitChunk(lines, overlapLines2) {
|
|
1769
|
-
if (lines.length === 0) return [];
|
|
1770
|
-
const allLines = [...overlapLines2, ...lines];
|
|
1771
|
-
const text = allLines.map((l) => l.text).join("\n");
|
|
1772
|
-
const startLine = allLines[0].lineNumber;
|
|
1773
|
-
const endLine = allLines[allLines.length - 1].lineNumber;
|
|
1774
|
-
chunks.push({
|
|
1775
|
-
id: generateChunkId(filePath, startLine, endLine),
|
|
1776
|
-
filePath,
|
|
1777
|
-
startLine,
|
|
1778
|
-
endLine,
|
|
1779
|
-
text,
|
|
1780
|
-
contentHash: chunkContentHash(text)
|
|
1781
|
-
});
|
|
1782
|
-
if (overlapSize <= 0) return [];
|
|
1783
|
-
const overlapResult = [];
|
|
1784
|
-
let overlapLen = 0;
|
|
1785
|
-
for (let i = lines.length - 1; i >= 0; i--) {
|
|
1786
|
-
const lineLen = lines[i].text.length + 1;
|
|
1787
|
-
if (overlapLen + lineLen > overlapSize && overlapResult.length > 0) break;
|
|
1788
|
-
overlapResult.unshift(lines[i]);
|
|
1789
|
-
overlapLen += lineLen;
|
|
1790
|
-
}
|
|
1791
|
-
return overlapResult;
|
|
1792
|
-
}
|
|
1793
|
-
let overlapLines = [];
|
|
1794
|
-
for (const section of sections) {
|
|
1795
|
-
const sectionLen = sectionText(section).length;
|
|
1796
|
-
if (accumulatedLength > 0 && accumulatedLength + sectionLen > targetSize) {
|
|
1797
|
-
overlapLines = emitChunk(accumulated, overlapLines);
|
|
1798
|
-
accumulated = [];
|
|
1799
|
-
accumulatedLength = 0;
|
|
1800
|
-
}
|
|
1801
|
-
accumulated.push(...section);
|
|
1802
|
-
accumulatedLength += sectionLen;
|
|
1803
|
-
if (accumulatedLength > targetSize) {
|
|
1804
|
-
overlapLines = emitChunk(accumulated, overlapLines);
|
|
1805
|
-
accumulated = [];
|
|
1806
|
-
accumulatedLength = 0;
|
|
1807
|
-
}
|
|
1808
|
-
}
|
|
1809
|
-
if (accumulated.length > 0) {
|
|
1810
|
-
emitChunk(accumulated, overlapLines);
|
|
1811
|
-
}
|
|
1812
|
-
return chunks;
|
|
1813
|
-
}
|
|
1814
|
-
var DEFAULT_TARGET_SIZE, DEFAULT_OVERLAP_SIZE;
|
|
1815
|
-
var init_chunking = __esm({
|
|
1816
|
-
"src/memory/knowledge/chunking.ts"() {
|
|
1817
|
-
init_types3();
|
|
1818
|
-
DEFAULT_TARGET_SIZE = 1600;
|
|
1819
|
-
DEFAULT_OVERLAP_SIZE = 320;
|
|
1820
|
-
}
|
|
1821
|
-
});
|
|
1822
|
-
|
|
1823
|
-
// src/memory/knowledge/embed-chunks.ts
|
|
1824
|
-
var embed_chunks_exports = {};
|
|
1825
|
-
__export(embed_chunks_exports, {
|
|
1826
|
-
embedChunks: () => embedChunks,
|
|
1827
|
-
getUnembeddedChunkCount: () => getUnembeddedChunkCount
|
|
1828
|
-
});
|
|
1829
|
-
function getUnembeddedChunkCount(repoRoot) {
|
|
1830
|
-
const db = openKnowledgeDb(repoRoot);
|
|
1831
|
-
const row = db.prepare("SELECT COUNT(*) as count FROM chunks WHERE embedding IS NULL").get();
|
|
1832
|
-
return row.count;
|
|
1833
|
-
}
|
|
1834
|
-
async function embedChunks(repoRoot, options) {
|
|
1835
|
-
const start = Date.now();
|
|
1836
|
-
const onlyMissing = options?.onlyMissing ?? true;
|
|
1837
|
-
const db = openKnowledgeDb(repoRoot);
|
|
1838
|
-
const query = onlyMissing ? "SELECT id, text, content_hash FROM chunks WHERE embedding IS NULL" : "SELECT id, text, content_hash FROM chunks";
|
|
1839
|
-
const rows = db.prepare(query).all();
|
|
1840
|
-
const totalRow = db.prepare("SELECT COUNT(*) as count FROM chunks").get();
|
|
1841
|
-
const chunksSkipped = totalRow.count - rows.length;
|
|
1842
|
-
let chunksEmbedded = 0;
|
|
1843
|
-
const updateStmt = db.prepare(
|
|
1844
|
-
"UPDATE chunks SET embedding = ?, content_hash = ? WHERE id = ?"
|
|
1845
|
-
);
|
|
1846
|
-
const writeBatch = db.transaction((batch) => {
|
|
1847
|
-
for (const item of batch) {
|
|
1848
|
-
const buffer = Buffer.from(item.vector.buffer, item.vector.byteOffset, item.vector.byteLength);
|
|
1849
|
-
updateStmt.run(buffer, item.content_hash, item.id);
|
|
1850
|
-
}
|
|
1851
|
-
});
|
|
1852
|
-
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
|
|
1853
|
-
const batch = rows.slice(i, i + BATCH_SIZE);
|
|
1854
|
-
const texts = batch.map((r) => r.text);
|
|
1855
|
-
const vectors = await embedTexts(texts);
|
|
1856
|
-
if (vectors.length !== texts.length) {
|
|
1857
|
-
throw new Error(`embedTexts returned ${vectors.length} vectors for ${texts.length} inputs`);
|
|
1858
|
-
}
|
|
1859
|
-
const enriched = batch.map((r, j) => ({ ...r, vector: vectors[j] }));
|
|
1860
|
-
writeBatch(enriched);
|
|
1861
|
-
chunksEmbedded += batch.length;
|
|
1862
|
-
}
|
|
1863
|
-
return {
|
|
1864
|
-
chunksEmbedded,
|
|
1865
|
-
chunksSkipped,
|
|
1866
|
-
durationMs: Date.now() - start
|
|
1867
|
-
};
|
|
1868
|
-
}
|
|
1869
|
-
var BATCH_SIZE;
|
|
1870
|
-
var init_embed_chunks = __esm({
|
|
1871
|
-
"src/memory/knowledge/embed-chunks.ts"() {
|
|
1872
|
-
init_nomic();
|
|
1873
|
-
init_connection2();
|
|
1874
|
-
BATCH_SIZE = 16;
|
|
1875
|
-
}
|
|
1876
|
-
});
|
|
1877
|
-
|
|
1878
|
-
// src/memory/knowledge/indexing.ts
|
|
1879
|
-
var indexing_exports = {};
|
|
1880
|
-
__export(indexing_exports, {
|
|
1881
|
-
indexDocs: () => indexDocs
|
|
1882
|
-
});
|
|
1883
|
-
function fileHash(content) {
|
|
1884
|
-
return createHash("sha256").update(content).digest("hex");
|
|
1885
|
-
}
|
|
1886
|
-
function fileHashKey(relativePath) {
|
|
1887
|
-
return "file_hash:" + relativePath;
|
|
1888
|
-
}
|
|
1889
|
-
function getStoredFileHash(repoRoot, relativePath) {
|
|
1890
|
-
const db = openKnowledgeDb(repoRoot);
|
|
1891
|
-
const row = db.prepare("SELECT value FROM metadata WHERE key = ?").get(fileHashKey(relativePath));
|
|
1892
|
-
return row?.value ?? null;
|
|
1893
|
-
}
|
|
1894
|
-
function setFileHash(repoRoot, relativePath, hash) {
|
|
1895
|
-
const db = openKnowledgeDb(repoRoot);
|
|
1896
|
-
db.prepare("INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)").run(fileHashKey(relativePath), hash);
|
|
1897
|
-
}
|
|
1898
|
-
function removeFileHash(repoRoot, relativePath) {
|
|
1899
|
-
const db = openKnowledgeDb(repoRoot);
|
|
1900
|
-
db.prepare("DELETE FROM metadata WHERE key = ?").run(fileHashKey(relativePath));
|
|
1901
|
-
}
|
|
1902
|
-
async function walkSupportedFiles(baseDir, repoRoot) {
|
|
1903
|
-
const results = [];
|
|
1904
|
-
let entries;
|
|
1905
|
-
try {
|
|
1906
|
-
entries = await readdir(baseDir, { recursive: true, withFileTypes: true });
|
|
1907
|
-
} catch {
|
|
1908
|
-
return results;
|
|
1909
|
-
}
|
|
1910
|
-
for (const entry of entries) {
|
|
1911
|
-
if (!entry.isFile()) continue;
|
|
1912
|
-
const ext = extname(entry.name).toLowerCase();
|
|
1913
|
-
if (!SUPPORTED_EXTENSIONS.has(ext)) continue;
|
|
1914
|
-
const fullPath = join(entry.parentPath ?? entry.path, entry.name);
|
|
1915
|
-
const relPath = relative(repoRoot, fullPath);
|
|
1916
|
-
results.push(relPath);
|
|
1917
|
-
}
|
|
1918
|
-
return results;
|
|
1919
|
-
}
|
|
1920
|
-
async function tryEmbedChunks(repoRoot) {
|
|
1921
|
-
const { isModelUsable: isModelUsable2 } = await Promise.resolve().then(() => (init_model(), model_exports));
|
|
1922
|
-
const usability = await isModelUsable2();
|
|
1923
|
-
if (!usability.usable) {
|
|
1924
|
-
throw new Error(`Embedding failed: ${usability.reason}. ${usability.action}`);
|
|
1925
|
-
}
|
|
1926
|
-
const { embedChunks: embedChunks2 } = await Promise.resolve().then(() => (init_embed_chunks(), embed_chunks_exports));
|
|
1927
|
-
const embedResult = await embedChunks2(repoRoot);
|
|
1928
|
-
return embedResult.chunksEmbedded;
|
|
1929
|
-
}
|
|
1930
|
-
async function indexDocs(repoRoot, options = {}) {
|
|
1931
|
-
const start = Date.now();
|
|
1932
|
-
const docsDir = options.docsDir ?? "docs";
|
|
1933
|
-
const force = options.force ?? false;
|
|
1934
|
-
const stats = {
|
|
1935
|
-
filesIndexed: 0,
|
|
1936
|
-
filesSkipped: 0,
|
|
1937
|
-
filesErrored: 0,
|
|
1938
|
-
chunksCreated: 0,
|
|
1939
|
-
chunksDeleted: 0,
|
|
1940
|
-
chunksEmbedded: 0,
|
|
1941
|
-
durationMs: 0
|
|
1942
|
-
};
|
|
1943
|
-
const docsPath = join(repoRoot, docsDir);
|
|
1944
|
-
const filePaths = await walkSupportedFiles(docsPath, repoRoot);
|
|
1945
|
-
for (const relPath of filePaths) {
|
|
1946
|
-
const fullPath = join(repoRoot, relPath);
|
|
1947
|
-
let content;
|
|
1948
|
-
try {
|
|
1949
|
-
content = await readFile(fullPath, "utf-8");
|
|
1950
|
-
} catch {
|
|
1951
|
-
stats.filesErrored++;
|
|
1952
|
-
continue;
|
|
1953
|
-
}
|
|
1954
|
-
const hash = fileHash(content);
|
|
1955
|
-
const storedHash = getStoredFileHash(repoRoot, relPath);
|
|
1956
|
-
if (!force && storedHash === hash) {
|
|
1957
|
-
stats.filesSkipped++;
|
|
1958
|
-
continue;
|
|
1959
|
-
}
|
|
1960
|
-
const chunks = chunkFile(relPath, content);
|
|
1961
|
-
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1962
|
-
const knowledgeChunks = chunks.map((chunk) => ({
|
|
1963
|
-
id: chunk.id,
|
|
1964
|
-
filePath: chunk.filePath,
|
|
1965
|
-
startLine: chunk.startLine,
|
|
1966
|
-
endLine: chunk.endLine,
|
|
1967
|
-
contentHash: chunk.contentHash,
|
|
1968
|
-
text: chunk.text,
|
|
1969
|
-
updatedAt: now
|
|
1970
|
-
}));
|
|
1971
|
-
const db = openKnowledgeDb(repoRoot);
|
|
1972
|
-
db.transaction(() => {
|
|
1973
|
-
deleteChunksByFilePath(repoRoot, [relPath]);
|
|
1974
|
-
if (knowledgeChunks.length > 0) {
|
|
1975
|
-
upsertChunks(repoRoot, knowledgeChunks);
|
|
1976
|
-
}
|
|
1977
|
-
setFileHash(repoRoot, relPath, hash);
|
|
1978
|
-
})();
|
|
1979
|
-
stats.filesIndexed++;
|
|
1980
|
-
stats.chunksCreated += knowledgeChunks.length;
|
|
1981
|
-
}
|
|
1982
|
-
const indexedPaths = getIndexedFilePaths(repoRoot);
|
|
1983
|
-
const currentPathSet = new Set(filePaths);
|
|
1984
|
-
const stalePaths = indexedPaths.filter((p) => !currentPathSet.has(p));
|
|
1985
|
-
if (stalePaths.length > 0) {
|
|
1986
|
-
for (const path of stalePaths) {
|
|
1987
|
-
stats.chunksDeleted += getChunkCountByFilePath(repoRoot, path);
|
|
1988
|
-
}
|
|
1989
|
-
deleteChunksByFilePath(repoRoot, stalePaths);
|
|
1990
|
-
for (const path of stalePaths) {
|
|
1991
|
-
removeFileHash(repoRoot, path);
|
|
1992
|
-
}
|
|
1993
|
-
}
|
|
1994
|
-
setLastIndexTime(repoRoot, (/* @__PURE__ */ new Date()).toISOString());
|
|
1995
|
-
if (options.embed) {
|
|
1996
|
-
stats.chunksEmbedded = await tryEmbedChunks(repoRoot);
|
|
1997
|
-
}
|
|
1998
|
-
stats.durationMs = Date.now() - start;
|
|
1999
|
-
return stats;
|
|
2000
|
-
}
|
|
2001
|
-
var init_indexing = __esm({
|
|
2002
|
-
"src/memory/knowledge/indexing.ts"() {
|
|
2003
|
-
init_connection2();
|
|
2004
|
-
init_sync2();
|
|
2005
|
-
init_chunking();
|
|
2006
|
-
init_types3();
|
|
2007
|
-
}
|
|
2008
|
-
});
|
|
2009
|
-
function lockPath(repoRoot) {
|
|
2010
|
-
return join(repoRoot, ".claude", ".cache", "embed.lock");
|
|
2011
|
-
}
|
|
2012
|
-
function lockDir(repoRoot) {
|
|
2013
|
-
return join(repoRoot, ".claude", ".cache");
|
|
2014
|
-
}
|
|
2015
|
-
function isProcessAlive(pid) {
|
|
2016
|
-
try {
|
|
2017
|
-
process.kill(pid, 0);
|
|
2018
|
-
return true;
|
|
2019
|
-
} catch {
|
|
2020
|
-
return false;
|
|
2021
|
-
}
|
|
2022
|
-
}
|
|
2023
|
-
function readLock(filePath) {
|
|
2024
|
-
try {
|
|
2025
|
-
const raw = readFileSync(filePath, "utf-8");
|
|
2026
|
-
const parsed = JSON.parse(raw);
|
|
2027
|
-
if (typeof parsed === "object" && parsed !== null && typeof parsed.pid === "number" && typeof parsed.startedAt === "string") {
|
|
2028
|
-
return parsed;
|
|
2029
|
-
}
|
|
2030
|
-
return null;
|
|
2031
|
-
} catch {
|
|
2032
|
-
return null;
|
|
2033
|
-
}
|
|
2034
|
-
}
|
|
2035
|
-
function acquireEmbedLock(repoRoot) {
|
|
2036
|
-
const dir = lockDir(repoRoot);
|
|
2037
|
-
const file = lockPath(repoRoot);
|
|
2038
|
-
const content = { pid: process.pid, startedAt: (/* @__PURE__ */ new Date()).toISOString() };
|
|
2039
|
-
mkdirSync(dir, { recursive: true });
|
|
2040
|
-
try {
|
|
2041
|
-
writeFileSync(file, JSON.stringify(content), { flag: "wx" });
|
|
2042
|
-
return { acquired: true, release: () => releaseLock(file) };
|
|
2043
|
-
} catch (err) {
|
|
2044
|
-
if (err.code !== "EEXIST") throw err;
|
|
2045
|
-
const existing = readLock(file);
|
|
2046
|
-
if (existing && isProcessAlive(existing.pid)) {
|
|
2047
|
-
const lockAge = Date.now() - new Date(existing.startedAt).getTime();
|
|
2048
|
-
if (lockAge < LOCK_MAX_AGE_MS) {
|
|
2049
|
-
return { acquired: false, holder: existing.pid };
|
|
2050
|
-
}
|
|
2051
|
-
}
|
|
2052
|
-
try {
|
|
2053
|
-
unlinkSync(file);
|
|
2054
|
-
} catch {
|
|
2055
|
-
}
|
|
2056
|
-
try {
|
|
2057
|
-
writeFileSync(file, JSON.stringify(content), { flag: "wx" });
|
|
2058
|
-
return { acquired: true, release: () => releaseLock(file) };
|
|
2059
|
-
} catch {
|
|
2060
|
-
const winner = readLock(file);
|
|
2061
|
-
return { acquired: false, holder: winner?.pid ?? -1 };
|
|
2062
|
-
}
|
|
2063
|
-
}
|
|
2064
|
-
}
|
|
2065
|
-
function isEmbedLocked(repoRoot) {
|
|
2066
|
-
const file = lockPath(repoRoot);
|
|
2067
|
-
if (!existsSync(file)) return false;
|
|
2068
|
-
const content = readLock(file);
|
|
2069
|
-
if (!content) return false;
|
|
2070
|
-
return isProcessAlive(content.pid);
|
|
2071
|
-
}
|
|
2072
|
-
function releaseLock(file) {
|
|
2073
|
-
try {
|
|
2074
|
-
unlinkSync(file);
|
|
2075
|
-
} catch {
|
|
2076
|
-
}
|
|
2077
|
-
}
|
|
2078
|
-
var LOCK_MAX_AGE_MS;
|
|
2079
|
-
var init_embed_lock = __esm({
|
|
2080
|
-
"src/memory/knowledge/embed-lock.ts"() {
|
|
2081
|
-
LOCK_MAX_AGE_MS = 60 * 60 * 1e3;
|
|
2082
|
-
}
|
|
2083
|
-
});
|
|
2084
|
-
function statusPath(repoRoot) {
|
|
2085
|
-
return join(repoRoot, STATUS_FILE);
|
|
2086
|
-
}
|
|
2087
|
-
function writeEmbedStatus(repoRoot, status) {
|
|
2088
|
-
const filePath = statusPath(repoRoot);
|
|
2089
|
-
mkdirSync(dirname(filePath), { recursive: true });
|
|
2090
|
-
writeFileSync(filePath, JSON.stringify(status, null, 2), "utf-8");
|
|
2091
|
-
}
|
|
2092
|
-
function readEmbedStatus(repoRoot) {
|
|
2093
|
-
try {
|
|
2094
|
-
const raw = readFileSync(statusPath(repoRoot), "utf-8");
|
|
2095
|
-
const parsed = JSON.parse(raw);
|
|
2096
|
-
if (!parsed || typeof parsed !== "object" || !VALID_STATES.has(parsed.state)) {
|
|
2097
|
-
return null;
|
|
2098
|
-
}
|
|
2099
|
-
return parsed;
|
|
2100
|
-
} catch {
|
|
2101
|
-
return null;
|
|
2102
|
-
}
|
|
2103
|
-
}
|
|
2104
|
-
var STATUS_FILE, VALID_STATES;
|
|
2105
|
-
var init_embed_status = __esm({
|
|
2106
|
-
"src/memory/knowledge/embed-status.ts"() {
|
|
2107
|
-
STATUS_FILE = ".claude/.cache/embed-status.json";
|
|
2108
|
-
VALID_STATES = /* @__PURE__ */ new Set(["idle", "running", "completed", "failed"]);
|
|
2109
|
-
}
|
|
2110
|
-
});
|
|
2111
|
-
function resolveCliInvocation() {
|
|
2112
|
-
let dir = dirname(fileURLToPath(import.meta.url));
|
|
2113
|
-
for (let i = 0; i < 10; i++) {
|
|
2114
|
-
const candidate = join(dir, "dist", "cli.js");
|
|
2115
|
-
if (existsSync(candidate)) {
|
|
2116
|
-
return { command: process.execPath, args: [candidate] };
|
|
2117
|
-
}
|
|
2118
|
-
const parent = dirname(dir);
|
|
2119
|
-
if (parent === dir) break;
|
|
2120
|
-
dir = parent;
|
|
2121
|
-
}
|
|
2122
|
-
return { command: "npx", args: ["ca"] };
|
|
2123
|
-
}
|
|
2124
|
-
function spawnBackgroundEmbed(repoRoot) {
|
|
2125
|
-
if (isEmbedLocked(repoRoot)) {
|
|
2126
|
-
return { spawned: false, reason: "Embedding already in progress" };
|
|
2127
|
-
}
|
|
2128
|
-
if (!isModelAvailable()) {
|
|
2129
|
-
return { spawned: false, reason: "Model not available" };
|
|
2130
|
-
}
|
|
2131
|
-
if (getUnembeddedChunkCount(repoRoot) === 0) {
|
|
2132
|
-
return { spawned: false, reason: "All chunks already embedded" };
|
|
2133
|
-
}
|
|
2134
|
-
const cli = resolveCliInvocation();
|
|
2135
|
-
const child = spawn(cli.command, [...cli.args, "embed-worker", repoRoot], {
|
|
2136
|
-
detached: true,
|
|
2137
|
-
stdio: "ignore"
|
|
2138
|
-
});
|
|
2139
|
-
child.unref();
|
|
2140
|
-
return { spawned: true, pid: child.pid };
|
|
2141
|
-
}
|
|
2142
|
-
async function runBackgroundEmbed(repoRoot) {
|
|
2143
|
-
const lock = acquireEmbedLock(repoRoot);
|
|
2144
|
-
if (!lock.acquired) return;
|
|
2145
|
-
const { openKnowledgeDb: openKnowledgeDb2 } = await Promise.resolve().then(() => (init_sqlite_knowledge(), sqlite_knowledge_exports));
|
|
2146
|
-
openKnowledgeDb2(repoRoot);
|
|
2147
|
-
const start = Date.now();
|
|
2148
|
-
writeEmbedStatus(repoRoot, { state: "running", startedAt: (/* @__PURE__ */ new Date()).toISOString() });
|
|
2149
|
-
try {
|
|
2150
|
-
const result = await withEmbedding(async () => embedChunks(repoRoot, { onlyMissing: true }));
|
|
2151
|
-
writeEmbedStatus(repoRoot, {
|
|
2152
|
-
state: "completed",
|
|
2153
|
-
chunksEmbedded: result.chunksEmbedded,
|
|
2154
|
-
completedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2155
|
-
durationMs: result.durationMs
|
|
2156
|
-
});
|
|
2157
|
-
} catch (err) {
|
|
2158
|
-
const msg = err instanceof Error ? err.message : "Unknown error";
|
|
2159
|
-
writeEmbedStatus(repoRoot, {
|
|
2160
|
-
state: "failed",
|
|
2161
|
-
error: msg,
|
|
2162
|
-
durationMs: Date.now() - start
|
|
2163
|
-
});
|
|
2164
|
-
} finally {
|
|
2165
|
-
closeKnowledgeDb();
|
|
2166
|
-
lock.release();
|
|
2167
|
-
}
|
|
2168
|
-
}
|
|
2169
|
-
async function indexAndSpawnEmbed(repoRoot) {
|
|
2170
|
-
const docsPath = join(repoRoot, "docs");
|
|
2171
|
-
if (!existsSync(docsPath)) return null;
|
|
2172
|
-
const { indexDocs: indexDocs2 } = await Promise.resolve().then(() => (init_indexing(), indexing_exports));
|
|
2173
|
-
await indexDocs2(repoRoot);
|
|
2174
|
-
return spawnBackgroundEmbed(repoRoot);
|
|
2175
|
-
}
|
|
2176
|
-
var init_embed_background = __esm({
|
|
2177
|
-
"src/memory/knowledge/embed-background.ts"() {
|
|
2178
|
-
init_embeddings();
|
|
2179
|
-
init_sqlite_knowledge();
|
|
2180
|
-
init_embed_lock();
|
|
2181
|
-
init_embed_status();
|
|
2182
|
-
init_embed_chunks();
|
|
2183
|
-
}
|
|
2184
|
-
});
|
|
2185
|
-
var _require = createRequire(import.meta.url);
|
|
2186
|
-
var _pkg = _require("../package.json");
|
|
2187
|
-
var VERSION = _pkg.version;
|
|
2188
|
-
|
|
2189
|
-
// src/index.ts
|
|
2190
|
-
init_storage();
|
|
2191
|
-
init_embeddings();
|
|
2192
|
-
init_search2();
|
|
2193
|
-
|
|
2194
|
-
// src/memory/capture/quality.ts
|
|
2195
|
-
init_model();
|
|
2196
|
-
init_search2();
|
|
2197
|
-
init_storage();
|
|
2198
|
-
var DUPLICATE_THRESHOLD = 0.98;
|
|
2199
|
-
async function isNovel(repoRoot, insight, options = {}) {
|
|
2200
|
-
const threshold = options.threshold ?? DUPLICATE_THRESHOLD;
|
|
2201
|
-
if (!isModelAvailable()) {
|
|
2202
|
-
return { novel: true };
|
|
2203
|
-
}
|
|
2204
|
-
try {
|
|
2205
|
-
await syncIfNeeded(repoRoot);
|
|
2206
|
-
const similar = await findSimilarLessons(repoRoot, insight, { threshold });
|
|
2207
|
-
const top = similar[0];
|
|
2208
|
-
if (top) {
|
|
2209
|
-
return {
|
|
2210
|
-
novel: false,
|
|
2211
|
-
reason: `Near-duplicate of existing lesson: "${top.item.insight.slice(0, 50)}..."`,
|
|
2212
|
-
existingId: top.item.id
|
|
2213
|
-
};
|
|
2214
|
-
}
|
|
2215
|
-
return { novel: true };
|
|
2216
|
-
} catch (err) {
|
|
2217
|
-
if (process.env["CA_DEBUG"]) {
|
|
2218
|
-
process.stderr.write(`[CA_DEBUG] isNovel catch: ${err instanceof Error ? err.message : String(err)}
|
|
2219
|
-
`);
|
|
2220
|
-
}
|
|
2221
|
-
return { novel: true };
|
|
2222
|
-
}
|
|
2223
|
-
}
|
|
2224
|
-
var MIN_WORD_COUNT = 4;
|
|
2225
|
-
var VAGUE_PATTERNS = [
|
|
2226
|
-
/\bwrite better\b/i,
|
|
2227
|
-
/\bbe careful\b/i,
|
|
2228
|
-
/\bremember to\b/i,
|
|
2229
|
-
/\bmake sure\b/i,
|
|
2230
|
-
/\btry to\b/i,
|
|
2231
|
-
/\bdouble check\b/i
|
|
2232
|
-
];
|
|
2233
|
-
var GENERIC_IMPERATIVE_PATTERN = /^(always|never)\s+\w+(\s+\w+){0,2}$/i;
|
|
2234
|
-
function isSpecific(insight) {
|
|
2235
|
-
const words = insight.trim().split(/\s+/).filter((w) => w.length > 0);
|
|
2236
|
-
if (words.length < MIN_WORD_COUNT) {
|
|
2237
|
-
return { specific: false, reason: "Insight is too short to be actionable" };
|
|
2238
|
-
}
|
|
2239
|
-
for (const pattern of VAGUE_PATTERNS) {
|
|
2240
|
-
if (pattern.test(insight)) {
|
|
2241
|
-
return { specific: false, reason: "Insight matches a vague pattern" };
|
|
2242
|
-
}
|
|
2243
|
-
}
|
|
2244
|
-
if (GENERIC_IMPERATIVE_PATTERN.test(insight)) {
|
|
2245
|
-
return { specific: false, reason: "Insight matches a vague pattern" };
|
|
2246
|
-
}
|
|
2247
|
-
return { specific: true };
|
|
2248
|
-
}
|
|
2249
|
-
var ACTION_PATTERNS = [
|
|
2250
|
-
/\buse\s+.+\s+instead\s+of\b/i,
|
|
2251
|
-
// "use X instead of Y"
|
|
2252
|
-
/\bprefer\s+.+\s+(over|to)\b/i,
|
|
2253
|
-
// "prefer X over Y" or "prefer X to Y"
|
|
2254
|
-
/\balways\s+.+\s+when\b/i,
|
|
2255
|
-
// "always X when Y"
|
|
2256
|
-
/\bnever\s+.+\s+without\b/i,
|
|
2257
|
-
// "never X without Y"
|
|
2258
|
-
/\bavoid\s+(using\s+)?\w+/i,
|
|
2259
|
-
// "avoid X" or "avoid using X"
|
|
2260
|
-
/\bcheck\s+.+\s+before\b/i,
|
|
2261
|
-
// "check X before Y"
|
|
2262
|
-
/^(run|use|add|remove|install|update|configure|set|enable|disable)\s+/i
|
|
2263
|
-
// Imperative commands at start
|
|
2264
|
-
];
|
|
2265
|
-
function isActionable(insight) {
|
|
2266
|
-
for (const pattern of ACTION_PATTERNS) {
|
|
2267
|
-
if (pattern.test(insight)) {
|
|
2268
|
-
return { actionable: true };
|
|
2269
|
-
}
|
|
2270
|
-
}
|
|
2271
|
-
return { actionable: false, reason: "Insight lacks clear action guidance" };
|
|
2272
|
-
}
|
|
2273
|
-
async function shouldPropose(repoRoot, insight) {
|
|
2274
|
-
const specificResult = isSpecific(insight);
|
|
2275
|
-
if (!specificResult.specific) {
|
|
2276
|
-
return { shouldPropose: false, reason: specificResult.reason };
|
|
2277
|
-
}
|
|
2278
|
-
const noveltyResult = await isNovel(repoRoot, insight);
|
|
2279
|
-
if (!noveltyResult.novel) {
|
|
2280
|
-
return { shouldPropose: false, reason: noveltyResult.reason };
|
|
2281
|
-
}
|
|
2282
|
-
return { shouldPropose: true };
|
|
2283
|
-
}
|
|
2284
|
-
|
|
2285
|
-
// src/memory/capture/triggers.ts
|
|
2286
|
-
var USER_CORRECTION_PATTERNS = [
|
|
2287
|
-
/\bno\b[,.]?\s/i,
|
|
2288
|
-
// "no, ..." or "no ..."
|
|
2289
|
-
/\bwrong\b/i,
|
|
2290
|
-
// "wrong"
|
|
2291
|
-
/\bactually\b/i,
|
|
2292
|
-
// "actually..."
|
|
2293
|
-
/\bnot that\b/i,
|
|
2294
|
-
// "not that"
|
|
2295
|
-
/\bi meant\b/i
|
|
2296
|
-
// "I meant"
|
|
2297
|
-
];
|
|
2298
|
-
function detectUserCorrection(signals) {
|
|
2299
|
-
const { messages, context } = signals;
|
|
2300
|
-
if (messages.length < 2) {
|
|
2301
|
-
return null;
|
|
2302
|
-
}
|
|
2303
|
-
for (let i = 1; i < messages.length; i++) {
|
|
2304
|
-
const message = messages[i];
|
|
2305
|
-
if (!message) continue;
|
|
2306
|
-
for (const pattern of USER_CORRECTION_PATTERNS) {
|
|
2307
|
-
if (pattern.test(message)) {
|
|
2308
|
-
return {
|
|
2309
|
-
trigger: `User correction during ${context.intent}`,
|
|
2310
|
-
correctionMessage: message,
|
|
2311
|
-
context
|
|
2312
|
-
};
|
|
2313
|
-
}
|
|
2314
|
-
}
|
|
2315
|
-
}
|
|
2316
|
-
return null;
|
|
2317
|
-
}
|
|
2318
|
-
function detectSelfCorrection(history) {
|
|
2319
|
-
const { edits } = history;
|
|
2320
|
-
if (edits.length < 3) {
|
|
2321
|
-
return null;
|
|
2322
|
-
}
|
|
2323
|
-
for (let i = 0; i <= edits.length - 3; i++) {
|
|
2324
|
-
const first = edits[i];
|
|
2325
|
-
const second = edits[i + 1];
|
|
2326
|
-
const third = edits[i + 2];
|
|
2327
|
-
if (!first || !second || !third) continue;
|
|
2328
|
-
if (first.file === second.file && second.file === third.file && first.success && !second.success && third.success) {
|
|
2329
|
-
return {
|
|
2330
|
-
file: first.file,
|
|
2331
|
-
trigger: `Self-correction on ${first.file}`
|
|
2332
|
-
};
|
|
2333
|
-
}
|
|
2334
|
-
}
|
|
2335
|
-
return null;
|
|
2336
|
-
}
|
|
2337
|
-
function detectTestFailure(testResult) {
|
|
2338
|
-
if (testResult.passed) {
|
|
2339
|
-
return null;
|
|
2340
|
-
}
|
|
2341
|
-
const lines = testResult.output.split("\n").filter((line) => line.trim().length > 0);
|
|
2342
|
-
const errorLine = lines.find((line) => /error|fail|assert/i.test(line)) ?? lines[0] ?? "";
|
|
2343
|
-
return {
|
|
2344
|
-
testFile: testResult.testFile,
|
|
2345
|
-
errorOutput: testResult.output,
|
|
2346
|
-
trigger: `Test failure in ${testResult.testFile}: ${errorLine.slice(0, 100)}`
|
|
2347
|
-
};
|
|
2348
|
-
}
|
|
2349
|
-
|
|
2350
|
-
// src/memory/capture/integration.ts
|
|
2351
|
-
init_types();
|
|
2352
|
-
var CorrectionSignalSchema = z.object({
|
|
2353
|
-
messages: z.array(z.string()),
|
|
2354
|
-
context: ContextSchema
|
|
2355
|
-
});
|
|
2356
|
-
var EditEntrySchema = z.object({
|
|
2357
|
-
file: z.string(),
|
|
2358
|
-
success: z.boolean(),
|
|
2359
|
-
timestamp: z.number()
|
|
2360
|
-
});
|
|
2361
|
-
var EditHistorySchema = z.object({
|
|
2362
|
-
edits: z.array(EditEntrySchema)
|
|
2363
|
-
});
|
|
2364
|
-
var TestResultSchema = z.object({
|
|
2365
|
-
passed: z.boolean(),
|
|
2366
|
-
output: z.string(),
|
|
2367
|
-
testFile: z.string()
|
|
2368
|
-
});
|
|
2369
|
-
z.discriminatedUnion("type", [
|
|
2370
|
-
z.object({ type: z.literal("user"), data: CorrectionSignalSchema }),
|
|
2371
|
-
z.object({ type: z.literal("self"), data: EditHistorySchema }),
|
|
2372
|
-
z.object({ type: z.literal("test"), data: TestResultSchema })
|
|
2373
|
-
]);
|
|
2374
|
-
|
|
2375
|
-
// src/memory/retrieval/session.ts
|
|
2376
|
-
init_storage();
|
|
2377
|
-
var DEFAULT_LIMIT2 = 5;
|
|
2378
|
-
function hasSeverity(item) {
|
|
2379
|
-
return item.severity !== void 0;
|
|
2380
|
-
}
|
|
2381
|
-
async function loadSessionLessons(repoRoot, limit = DEFAULT_LIMIT2) {
|
|
2382
|
-
const { items } = await readMemoryItems(repoRoot);
|
|
2383
|
-
const highSeverityLessons = items.filter(
|
|
2384
|
-
(item) => hasSeverity(item) && item.severity === "high" && item.confirmed && !item.invalidatedAt
|
|
2385
|
-
);
|
|
2386
|
-
highSeverityLessons.sort((a, b) => {
|
|
2387
|
-
const dateA = new Date(a.created).getTime();
|
|
2388
|
-
const dateB = new Date(b.created).getTime();
|
|
2389
|
-
return dateB - dateA;
|
|
2390
|
-
});
|
|
2391
|
-
const topLessons = highSeverityLessons.slice(0, limit);
|
|
2392
|
-
if (topLessons.length > 0) {
|
|
2393
|
-
incrementRetrievalCount(repoRoot, topLessons.map((lesson) => lesson.id));
|
|
2394
|
-
}
|
|
2395
|
-
return topLessons;
|
|
2396
|
-
}
|
|
2397
|
-
|
|
2398
|
-
// src/memory/retrieval/plan.ts
|
|
2399
|
-
init_search2();
|
|
2400
|
-
init_storage();
|
|
2401
|
-
var DEFAULT_LIMIT3 = 5;
|
|
2402
|
-
async function retrieveForPlan(repoRoot, planText, limit = DEFAULT_LIMIT3) {
|
|
2403
|
-
const candidateLimit = limit * CANDIDATE_MULTIPLIER;
|
|
2404
|
-
let vectorResults = [];
|
|
2405
|
-
let vectorFailed = false;
|
|
2406
|
-
const keywordResultsPromise = searchKeywordScored(repoRoot, planText, candidateLimit);
|
|
2407
|
-
try {
|
|
2408
|
-
vectorResults = await searchVector(repoRoot, planText, { limit: candidateLimit });
|
|
2409
|
-
} catch {
|
|
2410
|
-
vectorFailed = true;
|
|
2411
|
-
console.error("[compound-agent] Vector search unavailable, falling back to keyword-only search");
|
|
2412
|
-
}
|
|
2413
|
-
const keywordResults = await keywordResultsPromise;
|
|
2414
|
-
let merged;
|
|
2415
|
-
if (vectorFailed) {
|
|
2416
|
-
merged = mergeHybridResults([], keywordResults, {
|
|
2417
|
-
vectorWeight: 0,
|
|
2418
|
-
textWeight: DEFAULT_TEXT_WEIGHT
|
|
2419
|
-
});
|
|
2420
|
-
} else {
|
|
2421
|
-
merged = mergeHybridResults(vectorResults, keywordResults, { minScore: MIN_HYBRID_SCORE });
|
|
2422
|
-
}
|
|
2423
|
-
const ranked = rankLessons(merged);
|
|
2424
|
-
const topLessons = ranked.slice(0, limit);
|
|
2425
|
-
if (topLessons.length > 0) {
|
|
2426
|
-
incrementRetrievalCount(repoRoot, topLessons.map((item) => item.lesson.id));
|
|
2427
|
-
}
|
|
2428
|
-
const message = formatLessonsCheck(topLessons);
|
|
2429
|
-
return { lessons: topLessons, message };
|
|
2430
|
-
}
|
|
2431
|
-
function formatLessonsCheck(lessons) {
|
|
2432
|
-
const header = "Lessons Check\n" + "\u2500".repeat(40);
|
|
2433
|
-
if (lessons.length === 0) {
|
|
2434
|
-
return `${header}
|
|
2435
|
-
No relevant lessons found for this plan.`;
|
|
2436
|
-
}
|
|
2437
|
-
const lessonLines = lessons.map((l, i) => {
|
|
2438
|
-
const bullet = `${i + 1}.`;
|
|
2439
|
-
const insight = l.lesson.insight;
|
|
2440
|
-
return `${bullet} ${insight}`;
|
|
2441
|
-
});
|
|
2442
|
-
return `${header}
|
|
2443
|
-
${lessonLines.join("\n")}`;
|
|
2444
|
-
}
|
|
2445
|
-
|
|
2446
|
-
// src/index.ts
|
|
2447
|
-
init_sqlite_knowledge();
|
|
2448
|
-
|
|
2449
|
-
// src/memory/knowledge/index.ts
|
|
2450
|
-
init_chunking();
|
|
2451
|
-
init_types3();
|
|
2452
|
-
init_indexing();
|
|
2453
|
-
|
|
2454
|
-
// src/memory/knowledge/search.ts
|
|
2455
|
-
init_connection2();
|
|
2456
|
-
init_search3();
|
|
2457
|
-
init_nomic();
|
|
2458
|
-
init_vector();
|
|
2459
|
-
init_hybrid();
|
|
2460
|
-
init_model();
|
|
2461
|
-
var DEFAULT_KNOWLEDGE_LIMIT = 6;
|
|
2462
|
-
async function searchKnowledgeVector(repoRoot, query, options) {
|
|
2463
|
-
const limit = options?.limit ?? DEFAULT_KNOWLEDGE_LIMIT;
|
|
2464
|
-
const database = openKnowledgeDb(repoRoot);
|
|
2465
|
-
const embRows = database.prepare("SELECT id, embedding FROM chunks WHERE embedding IS NOT NULL").all();
|
|
2466
|
-
if (embRows.length === 0) return [];
|
|
2467
|
-
const queryVector = await embedText(query);
|
|
2468
|
-
const scored = [];
|
|
2469
|
-
for (const row of embRows) {
|
|
2470
|
-
const embFloat = new Float32Array(
|
|
2471
|
-
row.embedding.buffer,
|
|
2472
|
-
row.embedding.byteOffset,
|
|
2473
|
-
row.embedding.byteLength / 4
|
|
2474
|
-
);
|
|
2475
|
-
scored.push({ id: row.id, score: cosineSimilarity(queryVector, embFloat) });
|
|
2476
|
-
}
|
|
2477
|
-
scored.sort((a, b) => b.score - a.score);
|
|
2478
|
-
const topK = scored.slice(0, limit);
|
|
2479
|
-
if (topK.length === 0) return [];
|
|
2480
|
-
const placeholders = topK.map(() => "?").join(",");
|
|
2481
|
-
const sql = `SELECT id, file_path, start_line, end_line, content_hash, text, model, updated_at FROM chunks WHERE id IN (${placeholders})`;
|
|
2482
|
-
const dataRows = database.prepare(sql).all(...topK.map((r) => r.id));
|
|
2483
|
-
const dataMap = new Map(dataRows.map((r) => [r.id, r]));
|
|
2484
|
-
const results = [];
|
|
2485
|
-
for (const { id, score } of topK) {
|
|
2486
|
-
const row = dataMap.get(id);
|
|
2487
|
-
if (!row) continue;
|
|
2488
|
-
const chunk = {
|
|
2489
|
-
id: row.id,
|
|
2490
|
-
filePath: row.file_path,
|
|
2491
|
-
startLine: row.start_line,
|
|
2492
|
-
endLine: row.end_line,
|
|
2493
|
-
contentHash: row.content_hash,
|
|
2494
|
-
text: row.text,
|
|
2495
|
-
updatedAt: row.updated_at
|
|
2496
|
-
};
|
|
2497
|
-
if (row.model !== null) {
|
|
2498
|
-
chunk.model = row.model;
|
|
2499
|
-
}
|
|
2500
|
-
results.push({ item: chunk, score });
|
|
2501
|
-
}
|
|
2502
|
-
return results;
|
|
2503
|
-
}
|
|
2504
|
-
async function searchKnowledge(repoRoot, query, options) {
|
|
2505
|
-
const limit = options?.limit ?? DEFAULT_KNOWLEDGE_LIMIT;
|
|
2506
|
-
const candidateLimit = limit * CANDIDATE_MULTIPLIER;
|
|
2507
|
-
const usability = await isModelUsable();
|
|
2508
|
-
if (usability.usable) {
|
|
2509
|
-
const [vectorResults, keywordResults2] = await Promise.all([
|
|
2510
|
-
searchKnowledgeVector(repoRoot, query, { limit: candidateLimit }),
|
|
2511
|
-
Promise.resolve(searchChunksKeywordScored(repoRoot, query, candidateLimit))
|
|
2512
|
-
]);
|
|
2513
|
-
if (vectorResults.length === 0) {
|
|
2514
|
-
return keywordResults2.map((k) => ({ item: k.chunk, score: k.score })).slice(0, limit);
|
|
2515
|
-
}
|
|
2516
|
-
const genericKw = keywordResults2.map((k) => ({
|
|
2517
|
-
item: k.chunk,
|
|
2518
|
-
score: k.score
|
|
2519
|
-
}));
|
|
2520
|
-
const merged = mergeHybridScores(
|
|
2521
|
-
vectorResults,
|
|
2522
|
-
genericKw,
|
|
2523
|
-
(item) => item.id,
|
|
2524
|
-
{ limit, minScore: MIN_HYBRID_SCORE }
|
|
2525
|
-
);
|
|
2526
|
-
return merged;
|
|
2527
|
-
}
|
|
2528
|
-
const keywordResults = searchChunksKeywordScored(repoRoot, query, limit);
|
|
2529
|
-
return keywordResults.map((k) => ({ item: k.chunk, score: k.score }));
|
|
2530
|
-
}
|
|
2531
|
-
|
|
2532
|
-
// src/memory/knowledge/index.ts
|
|
2533
|
-
init_embed_chunks();
|
|
2534
|
-
init_embed_lock();
|
|
2535
|
-
init_embed_status();
|
|
2536
|
-
init_embed_background();
|
|
2537
|
-
|
|
2538
|
-
// src/cli-utils.ts
|
|
2539
|
-
function getRepoRoot() {
|
|
2540
|
-
return process.env["COMPOUND_AGENT_ROOT"] || process.cwd();
|
|
2541
|
-
}
|
|
2542
|
-
|
|
2543
|
-
// src/setup/all.ts
|
|
2544
|
-
init_embeddings();
|
|
2545
|
-
init_storage();
|
|
2546
|
-
|
|
2547
|
-
// src/setup/display-utils.ts
|
|
2548
|
-
init_storage();
|
|
2549
|
-
var STATE_DIR = ".claude";
|
|
2550
|
-
var STATE_FILE = ".ca-phase-state.json";
|
|
2551
|
-
var PHASE_STATE_MAX_AGE_MS = 72 * 60 * 60 * 1e3;
|
|
2552
|
-
var PHASES = ["spec-dev", "plan", "work", "review", "compound"];
|
|
2553
|
-
var GATES = ["post-plan", "gate-3", "gate-4", "final"];
|
|
2554
|
-
function getStatePath(repoRoot) {
|
|
2555
|
-
return join(repoRoot, STATE_DIR, STATE_FILE);
|
|
2556
|
-
}
|
|
2557
|
-
function isPhaseName(value) {
|
|
2558
|
-
return typeof value === "string" && PHASES.includes(value);
|
|
2559
|
-
}
|
|
2560
|
-
function isGateName(value) {
|
|
2561
|
-
return typeof value === "string" && GATES.includes(value);
|
|
2562
|
-
}
|
|
2563
|
-
function isIsoDate(value) {
|
|
2564
|
-
if (typeof value !== "string") return false;
|
|
2565
|
-
return !Number.isNaN(Date.parse(value));
|
|
2566
|
-
}
|
|
2567
|
-
function isStringArray(value) {
|
|
2568
|
-
return Array.isArray(value) && value.every((item) => typeof item === "string");
|
|
2569
|
-
}
|
|
2570
|
-
function migrateLegacyFields(raw) {
|
|
2571
|
-
if (raw.cookit_active === void 0 && typeof raw.lfg_active === "boolean") {
|
|
2572
|
-
raw.cookit_active = raw.lfg_active;
|
|
2573
|
-
delete raw.lfg_active;
|
|
2574
|
-
}
|
|
2575
|
-
}
|
|
2576
|
-
function validatePhaseState(raw) {
|
|
2577
|
-
if (typeof raw !== "object" || raw === null) return false;
|
|
2578
|
-
const state = raw;
|
|
2579
|
-
migrateLegacyFields(state);
|
|
2580
|
-
return typeof state.cookit_active === "boolean" && typeof state.epic_id === "string" && isPhaseName(state.current_phase) && typeof state.phase_index === "number" && state.phase_index >= 1 && state.phase_index <= 5 && isStringArray(state.skills_read) && Array.isArray(state.gates_passed) && state.gates_passed.every((gate) => isGateName(gate)) && isIsoDate(state.started_at);
|
|
2581
|
-
}
|
|
2582
|
-
function getPhaseState(repoRoot) {
|
|
2583
|
-
try {
|
|
2584
|
-
const path = getStatePath(repoRoot);
|
|
2585
|
-
if (!existsSync(path)) return null;
|
|
2586
|
-
const raw = readFileSync(path, "utf-8");
|
|
2587
|
-
const parsed = JSON.parse(raw);
|
|
2588
|
-
if (!validatePhaseState(parsed)) return null;
|
|
2589
|
-
const age = Date.now() - new Date(parsed.started_at).getTime();
|
|
2590
|
-
if (age > PHASE_STATE_MAX_AGE_MS) {
|
|
2591
|
-
cleanPhaseState(repoRoot);
|
|
2592
|
-
return null;
|
|
2593
|
-
}
|
|
2594
|
-
return parsed;
|
|
2595
|
-
} catch {
|
|
2596
|
-
return null;
|
|
2597
|
-
}
|
|
2598
|
-
}
|
|
2599
|
-
function cleanPhaseState(repoRoot) {
|
|
2600
|
-
try {
|
|
2601
|
-
const path = getStatePath(repoRoot);
|
|
2602
|
-
if (existsSync(path)) unlinkSync(path);
|
|
2603
|
-
} catch {
|
|
2604
|
-
}
|
|
2605
|
-
}
|
|
2606
|
-
init_storage();
|
|
2607
|
-
init_embeddings();
|
|
2608
|
-
init_storage();
|
|
2609
|
-
|
|
2610
|
-
// src/commands/management-crud.ts
|
|
2611
|
-
init_storage();
|
|
2612
|
-
|
|
2613
|
-
// src/memory/index.ts
|
|
2614
|
-
init_types();
|
|
2615
|
-
init_storage();
|
|
2616
|
-
init_embeddings();
|
|
2617
|
-
init_search2();
|
|
2618
|
-
init_storage();
|
|
2619
|
-
|
|
2620
|
-
// src/commands/shared.ts
|
|
2621
|
-
init_utils();
|
|
2622
|
-
init_embeddings();
|
|
2623
|
-
init_storage();
|
|
2624
|
-
init_storage();
|
|
2625
|
-
|
|
2626
|
-
// src/commands/management-invalidation.ts
|
|
2627
|
-
init_storage();
|
|
2628
|
-
init_storage();
|
|
2629
|
-
init_storage();
|
|
2630
|
-
init_storage();
|
|
2631
|
-
var CACHE_TTL_MS = 24 * 60 * 60 * 1e3;
|
|
2632
|
-
var FETCH_TIMEOUT_MS = 3e3;
|
|
2633
|
-
var CACHE_FILENAME = "update-check.json";
|
|
2634
|
-
async function fetchLatestVersion(packageName = "compound-agent") {
|
|
2635
|
-
try {
|
|
2636
|
-
const res = await fetch(
|
|
2637
|
-
`https://registry.npmjs.org/-/package/${packageName}/dist-tags`,
|
|
2638
|
-
{ signal: AbortSignal.timeout(FETCH_TIMEOUT_MS) }
|
|
2639
|
-
);
|
|
2640
|
-
if (!res.ok) return null;
|
|
2641
|
-
const data = await res.json();
|
|
2642
|
-
const latest = data["latest"];
|
|
2643
|
-
return typeof latest === "string" ? latest : null;
|
|
2644
|
-
} catch {
|
|
2645
|
-
return null;
|
|
2646
|
-
}
|
|
2647
|
-
}
|
|
2648
|
-
async function checkForUpdate(cacheDir) {
|
|
2649
|
-
try {
|
|
2650
|
-
const cachePath = join(cacheDir, CACHE_FILENAME);
|
|
2651
|
-
const cached = readCache(cachePath);
|
|
2652
|
-
if (cached) {
|
|
2653
|
-
return {
|
|
2654
|
-
current: VERSION,
|
|
2655
|
-
latest: cached.latest,
|
|
2656
|
-
updateAvailable: semverGt(cached.latest, VERSION)
|
|
2657
|
-
};
|
|
2658
|
-
}
|
|
2659
|
-
const latest = await fetchLatestVersion();
|
|
2660
|
-
if (latest === null) return null;
|
|
2661
|
-
try {
|
|
2662
|
-
mkdirSync(cacheDir, { recursive: true });
|
|
2663
|
-
const cacheData = { latest };
|
|
2664
|
-
writeFileSync(cachePath, JSON.stringify(cacheData));
|
|
2665
|
-
} catch {
|
|
2666
|
-
}
|
|
2667
|
-
return {
|
|
2668
|
-
current: VERSION,
|
|
2669
|
-
latest,
|
|
2670
|
-
updateAvailable: semverGt(latest, VERSION)
|
|
2671
|
-
};
|
|
2672
|
-
} catch {
|
|
2673
|
-
return null;
|
|
2674
|
-
}
|
|
2675
|
-
}
|
|
2676
|
-
function isMajorUpdate(current, latest) {
|
|
2677
|
-
return parseInt(latest.split(".")[0], 10) > parseInt(current.split(".")[0], 10);
|
|
2678
|
-
}
|
|
2679
|
-
function formatUpdateNotificationMarkdown(current, latest) {
|
|
2680
|
-
const urgency = isMajorUpdate(current, latest) ? " (MAJOR - may contain breaking changes)" : "";
|
|
2681
|
-
return `
|
|
2682
|
-
---
|
|
2683
|
-
# Update Available
|
|
2684
|
-
compound-agent v${latest} is available (current: v${current})${urgency}.
|
|
2685
|
-
Run: \`npm update -g compound-agent\` (global) or \`pnpm add -D compound-agent@latest\` (dev dependency)
|
|
2686
|
-
`;
|
|
2687
|
-
}
|
|
2688
|
-
function semverGt(a, b) {
|
|
2689
|
-
const parse = (v) => {
|
|
2690
|
-
const clean = v.split("-")[0];
|
|
2691
|
-
const parts = clean.split(".").map((n) => {
|
|
2692
|
-
const num = parseInt(n, 10);
|
|
2693
|
-
return isNaN(num) ? 0 : num;
|
|
2694
|
-
});
|
|
2695
|
-
return [parts[0] ?? 0, parts[1] ?? 0, parts[2] ?? 0];
|
|
2696
|
-
};
|
|
2697
|
-
const [aMaj, aMin, aPat] = parse(a);
|
|
2698
|
-
const [bMaj, bMin, bPat] = parse(b);
|
|
2699
|
-
if (aMaj !== bMaj) return aMaj > bMaj;
|
|
2700
|
-
if (aMin !== bMin) return aMin > bMin;
|
|
2701
|
-
return aPat > bPat;
|
|
2702
|
-
}
|
|
2703
|
-
function readCache(cachePath) {
|
|
2704
|
-
try {
|
|
2705
|
-
const stat = statSync(cachePath);
|
|
2706
|
-
if (Date.now() - stat.mtimeMs > CACHE_TTL_MS) return null;
|
|
2707
|
-
const raw = readFileSync(cachePath, "utf-8");
|
|
2708
|
-
const data = JSON.parse(raw);
|
|
2709
|
-
if (typeof data.latest !== "string" || !data.latest) return null;
|
|
2710
|
-
return data;
|
|
2711
|
-
} catch {
|
|
2712
|
-
return null;
|
|
2713
|
-
}
|
|
2714
|
-
}
|
|
2715
|
-
|
|
2716
|
-
// src/commands/management-prime.ts
|
|
2717
|
-
var TRUST_LANGUAGE_TEMPLATE = `# Compound Agent Active
|
|
2718
|
-
|
|
2719
|
-
> **Context Recovery**: Run \`npx ca prime\` after compaction, clear, or new session
|
|
2720
|
-
|
|
2721
|
-
## CLI Commands (ALWAYS USE THESE)
|
|
2722
|
-
|
|
2723
|
-
**You MUST use CLI commands for lesson management:**
|
|
2724
|
-
|
|
2725
|
-
| Command | Purpose |
|
|
2726
|
-
|---------|---------|
|
|
2727
|
-
| \`npx ca search "query"\` | Search lessons - MUST call before architectural decisions; use anytime you need context |
|
|
2728
|
-
| \`npx ca knowledge "query"\` | Semantic search over project docs - MUST call before architectural decisions; use keyword phrases, not questions |
|
|
2729
|
-
| \`npx ca learn "insight"\` | Capture lessons - call AFTER corrections or discoveries |
|
|
2730
|
-
|
|
2731
|
-
## Core Constraints
|
|
2732
|
-
|
|
2733
|
-
**Default**: Use CLI commands for lesson management
|
|
2734
|
-
**Prohibited**: NEVER edit .claude/lessons/ files directly
|
|
2735
|
-
|
|
2736
|
-
**Default**: Propose lessons freely after corrections
|
|
2737
|
-
**Prohibited**: NEVER propose without quality gate (novel + specific; prefer actionable)
|
|
2738
|
-
|
|
2739
|
-
## Retrieval Protocol
|
|
2740
|
-
|
|
2741
|
-
You MUST call \`npx ca search\` and \`npx ca knowledge\` BEFORE:
|
|
2742
|
-
- Architectural decisions or complex planning
|
|
2743
|
-
- Implementing patterns you've done before in this repo
|
|
2744
|
-
|
|
2745
|
-
**NEVER skip search for complex decisions.** Past mistakes will repeat.
|
|
2746
|
-
|
|
2747
|
-
Beyond mandatory triggers, use these commands freely \u2014 they are lightweight queries, not heavyweight operations. Uncertain about a pattern? \`ca search\`. Need a detail from the docs? \`ca knowledge\`. The cost of an unnecessary search is near-zero; the cost of a missed one can be hours.
|
|
2748
|
-
|
|
2749
|
-
## Capture Protocol
|
|
2750
|
-
|
|
2751
|
-
Run \`npx ca learn\` AFTER:
|
|
2752
|
-
- User corrects you ("no", "wrong", "actually...")
|
|
2753
|
-
- You self-correct after iteration failures
|
|
2754
|
-
- Test fails then you fix it
|
|
2755
|
-
|
|
2756
|
-
**Quality gate** (must pass before capturing):
|
|
2757
|
-
- Novel (not already stored)
|
|
2758
|
-
- Specific (clear guidance)
|
|
2759
|
-
- Actionable (preferred, not mandatory)
|
|
2760
|
-
|
|
2761
|
-
**Workflow**: Search BEFORE deciding, capture AFTER learning.
|
|
2762
|
-
`;
|
|
2763
|
-
function formatSource(source) {
|
|
2764
|
-
switch (source) {
|
|
2765
|
-
case "user_correction":
|
|
2766
|
-
return "user correction";
|
|
2767
|
-
case "self_correction":
|
|
2768
|
-
return "self correction";
|
|
2769
|
-
case "test_failure":
|
|
2770
|
-
return "test failure";
|
|
2771
|
-
case "manual":
|
|
2772
|
-
return "manual";
|
|
2773
|
-
default:
|
|
2774
|
-
return source;
|
|
2775
|
-
}
|
|
2776
|
-
}
|
|
2777
|
-
function formatLessonForPrime(lesson) {
|
|
2778
|
-
const date = lesson.created.slice(0, 10);
|
|
2779
|
-
const tags = lesson.tags.length > 0 ? ` (${lesson.tags.join(", ")})` : "";
|
|
2780
|
-
const source = formatSource(lesson.source);
|
|
2781
|
-
return `- **${lesson.insight}**${tags}
|
|
2782
|
-
Learned: ${date} via ${source}`;
|
|
2783
|
-
}
|
|
2784
|
-
function formatActiveCookitSection(repoRoot) {
|
|
2785
|
-
const state = getPhaseState(repoRoot);
|
|
2786
|
-
if (state === null || !state.cookit_active) return null;
|
|
2787
|
-
const skillsRead = state.skills_read.length === 0 ? "(none)" : state.skills_read.join(", ");
|
|
2788
|
-
const gatesPassed = state.gates_passed.length === 0 ? "(none)" : state.gates_passed.join(", ");
|
|
2789
|
-
return `
|
|
2790
|
-
---
|
|
2791
|
-
|
|
2792
|
-
# ACTIVE COOK-IT SESSION
|
|
2793
|
-
|
|
2794
|
-
Epic: ${state.epic_id}
|
|
2795
|
-
Phase: ${state.current_phase} (${state.phase_index}/5)
|
|
2796
|
-
Skills read: ${skillsRead}
|
|
2797
|
-
Gates passed: ${gatesPassed}
|
|
2798
|
-
Started: ${state.started_at}
|
|
2799
|
-
|
|
2800
|
-
Resume from phase ${state.current_phase}. Run: \`npx ca phase-check start ${state.current_phase}\`
|
|
2801
|
-
Read the skill file first: \`.claude/skills/compound/${state.current_phase}/SKILL.md\`
|
|
2802
|
-
`;
|
|
2803
|
-
}
|
|
2804
|
-
async function getPrimeContext(repoRoot) {
|
|
2805
|
-
const root = repoRoot ?? getRepoRoot();
|
|
2806
|
-
try {
|
|
2807
|
-
await syncIfNeeded(root);
|
|
2808
|
-
} catch {
|
|
2809
|
-
}
|
|
2810
|
-
const lessons = await loadSessionLessons(root, 5);
|
|
2811
|
-
let output = TRUST_LANGUAGE_TEMPLATE;
|
|
2812
|
-
if (lessons.length > 0) {
|
|
2813
|
-
const formattedLessons = lessons.map(formatLessonForPrime).join("\n\n");
|
|
2814
|
-
output += `
|
|
2815
|
-
---
|
|
2816
|
-
|
|
2817
|
-
# [CRITICAL] Mandatory Recall
|
|
2818
|
-
|
|
2819
|
-
Critical lessons from past corrections:
|
|
2820
|
-
|
|
2821
|
-
${formattedLessons}
|
|
2822
|
-
`;
|
|
2823
|
-
}
|
|
2824
|
-
const cookitSection = formatActiveCookitSection(root);
|
|
2825
|
-
if (cookitSection !== null) {
|
|
2826
|
-
output += cookitSection;
|
|
2827
|
-
}
|
|
2828
|
-
if (!process.stdout.isTTY && !process.env["CI"] && !process.env["NO_UPDATE_NOTIFIER"]) {
|
|
2829
|
-
try {
|
|
2830
|
-
const updateResult = await checkForUpdate(join(root, ".claude", ".cache"));
|
|
2831
|
-
if (updateResult?.updateAvailable) {
|
|
2832
|
-
output += formatUpdateNotificationMarkdown(updateResult.current, updateResult.latest);
|
|
2833
|
-
}
|
|
2834
|
-
} catch {
|
|
2835
|
-
}
|
|
2836
|
-
}
|
|
2837
|
-
return output;
|
|
2838
|
-
}
|
|
2839
|
-
|
|
2840
|
-
// src/audit/checks/lessons.ts
|
|
2841
|
-
init_storage();
|
|
2842
|
-
async function checkLessons(repoRoot) {
|
|
2843
|
-
const { items } = await readMemoryItems(repoRoot);
|
|
2844
|
-
const findings = [];
|
|
2845
|
-
for (const item of items) {
|
|
2846
|
-
if (item.severity === "high") {
|
|
2847
|
-
findings.push({
|
|
2848
|
-
file: "",
|
|
2849
|
-
issue: `High-severity lesson: ${item.insight}`,
|
|
2850
|
-
severity: "info",
|
|
2851
|
-
relatedLessonId: item.id,
|
|
2852
|
-
source: "lesson"
|
|
2853
|
-
});
|
|
2854
|
-
}
|
|
2855
|
-
}
|
|
2856
|
-
const filesChecked = items.length > 0 ? [LESSONS_PATH] : [];
|
|
2857
|
-
return { findings, filesChecked };
|
|
2858
|
-
}
|
|
2859
|
-
|
|
2860
|
-
// src/audit/checks/patterns.ts
|
|
2861
|
-
init_storage();
|
|
2862
|
-
var SeveritySchema2 = z.enum(["error", "warning", "info"]);
|
|
2863
|
-
var FilePatternCheckSchema = z.object({
|
|
2864
|
-
type: z.literal("file-pattern"),
|
|
2865
|
-
glob: z.string(),
|
|
2866
|
-
pattern: z.string(),
|
|
2867
|
-
mustMatch: z.boolean().optional()
|
|
2868
|
-
});
|
|
2869
|
-
var FileSizeCheckSchema = z.object({
|
|
2870
|
-
type: z.literal("file-size"),
|
|
2871
|
-
glob: z.string(),
|
|
2872
|
-
maxLines: z.number().int().positive()
|
|
2873
|
-
});
|
|
2874
|
-
var ScriptCheckSchema = z.object({
|
|
2875
|
-
type: z.literal("script"),
|
|
2876
|
-
command: z.string(),
|
|
2877
|
-
expectExitCode: z.number().int().optional(),
|
|
2878
|
-
timeout: z.number().int().positive().optional()
|
|
2879
|
-
});
|
|
2880
|
-
var RuleCheckSchema = z.discriminatedUnion("type", [
|
|
2881
|
-
FilePatternCheckSchema,
|
|
2882
|
-
FileSizeCheckSchema,
|
|
2883
|
-
ScriptCheckSchema
|
|
2884
|
-
]);
|
|
2885
|
-
var RuleSchema = z.object({
|
|
2886
|
-
id: z.string().min(1),
|
|
2887
|
-
description: z.string(),
|
|
2888
|
-
severity: SeveritySchema2,
|
|
2889
|
-
check: RuleCheckSchema,
|
|
2890
|
-
remediation: z.string()
|
|
2891
|
-
});
|
|
2892
|
-
var RuleConfigSchema = z.object({
|
|
2893
|
-
rules: z.array(RuleSchema)
|
|
2894
|
-
});
|
|
2895
|
-
function globToRegex(glob) {
|
|
2896
|
-
const pattern = glob.replace(/\./g, "\\.").replace(/\*\*\//g, "(.+/)?").replace(/\*/g, "[^/]*");
|
|
2897
|
-
return new RegExp(`^${pattern}$`);
|
|
2898
|
-
}
|
|
2899
|
-
function findFiles(baseDir, glob) {
|
|
2900
|
-
const regex = globToRegex(glob);
|
|
2901
|
-
const results = [];
|
|
2902
|
-
function walk(dir) {
|
|
2903
|
-
const entries = readdirSync(dir);
|
|
2904
|
-
for (const entry of entries) {
|
|
2905
|
-
if (entry.startsWith(".") || entry === "node_modules") continue;
|
|
2906
|
-
const fullPath = join(dir, entry);
|
|
2907
|
-
const stat = statSync(fullPath);
|
|
2908
|
-
if (stat.isDirectory()) {
|
|
2909
|
-
walk(fullPath);
|
|
2910
|
-
} else {
|
|
2911
|
-
const relPath = relative(baseDir, fullPath);
|
|
2912
|
-
if (regex.test(relPath)) {
|
|
2913
|
-
results.push(relPath);
|
|
2914
|
-
}
|
|
2915
|
-
}
|
|
2916
|
-
}
|
|
2917
|
-
}
|
|
2918
|
-
walk(baseDir);
|
|
2919
|
-
return results.sort();
|
|
2920
|
-
}
|
|
2921
|
-
|
|
2922
|
-
// src/rules/checks/file-pattern.ts
|
|
2923
|
-
function runFilePatternCheck(baseDir, check) {
|
|
2924
|
-
const files = findFiles(baseDir, check.glob);
|
|
2925
|
-
const regex = new RegExp(check.pattern);
|
|
2926
|
-
const violations = [];
|
|
2927
|
-
for (const file of files) {
|
|
2928
|
-
const fullPath = join(baseDir, file);
|
|
2929
|
-
const content = readFileSync(fullPath, "utf-8");
|
|
2930
|
-
const lines = content.split("\n");
|
|
2931
|
-
if (check.mustMatch) {
|
|
2932
|
-
const found = lines.some((line) => regex.test(line));
|
|
2933
|
-
if (!found) {
|
|
2934
|
-
violations.push({
|
|
2935
|
-
file,
|
|
2936
|
-
message: `Pattern ${check.pattern} missing from file`
|
|
2937
|
-
});
|
|
2938
|
-
}
|
|
2939
|
-
} else {
|
|
2940
|
-
for (let i = 0; i < lines.length; i++) {
|
|
2941
|
-
if (regex.test(lines[i])) {
|
|
2942
|
-
violations.push({
|
|
2943
|
-
file,
|
|
2944
|
-
line: i + 1,
|
|
2945
|
-
message: `Pattern ${check.pattern} matched`
|
|
2946
|
-
});
|
|
2947
|
-
}
|
|
2948
|
-
}
|
|
2949
|
-
}
|
|
2950
|
-
}
|
|
2951
|
-
return violations;
|
|
2952
|
-
}
|
|
2953
|
-
function runFileSizeCheck(baseDir, check) {
|
|
2954
|
-
const files = findFiles(baseDir, check.glob);
|
|
2955
|
-
const violations = [];
|
|
2956
|
-
for (const file of files) {
|
|
2957
|
-
const content = readFileSync(join(baseDir, file), "utf-8");
|
|
2958
|
-
const lineCount = content === "" ? 0 : content.split("\n").filter((_, i, arr) => i < arr.length - 1 || arr[i] !== "").length;
|
|
2959
|
-
if (lineCount > check.maxLines) {
|
|
2960
|
-
violations.push({
|
|
2961
|
-
file,
|
|
2962
|
-
message: `File has ${lineCount} lines, exceeds limit of ${check.maxLines}`
|
|
2963
|
-
});
|
|
2964
|
-
}
|
|
2965
|
-
}
|
|
2966
|
-
return violations;
|
|
2967
|
-
}
|
|
2968
|
-
var DEFAULT_SCRIPT_TIMEOUT = 3e4;
|
|
2969
|
-
function runScriptCheck(check, baseDir) {
|
|
2970
|
-
const expectedCode = check.expectExitCode ?? 0;
|
|
2971
|
-
const timeout = check.timeout ?? DEFAULT_SCRIPT_TIMEOUT;
|
|
2972
|
-
try {
|
|
2973
|
-
execSync(check.command, { stdio: ["pipe", "pipe", "pipe"], cwd: baseDir, timeout });
|
|
2974
|
-
if (expectedCode !== 0) {
|
|
2975
|
-
return [{ message: `Script exited with exit code 0, expected ${expectedCode}` }];
|
|
2976
|
-
}
|
|
2977
|
-
return [];
|
|
2978
|
-
} catch (err) {
|
|
2979
|
-
const exitCode = err.status ?? 1;
|
|
2980
|
-
if (exitCode === expectedCode) {
|
|
2981
|
-
return [];
|
|
2982
|
-
}
|
|
2983
|
-
const stderr = (err.stderr ?? Buffer.alloc(0)).toString("utf-8").trim();
|
|
2984
|
-
const msg = stderr ? `Script exited with exit code ${exitCode} (expected ${expectedCode}): ${stderr}` : `Script exited with exit code ${exitCode} (expected ${expectedCode})`;
|
|
2985
|
-
return [{ message: msg }];
|
|
2986
|
-
}
|
|
2987
|
-
}
|
|
2988
|
-
|
|
2989
|
-
// src/rules/engine.ts
|
|
2990
|
-
function loadRuleConfig(baseDir) {
|
|
2991
|
-
const configPath = join(baseDir, ".claude", "rules.json");
|
|
2992
|
-
if (!existsSync(configPath)) {
|
|
2993
|
-
return { rules: [] };
|
|
2994
|
-
}
|
|
2995
|
-
const raw = readFileSync(configPath, "utf-8");
|
|
2996
|
-
const json = JSON.parse(raw);
|
|
2997
|
-
return RuleConfigSchema.parse(json);
|
|
2998
|
-
}
|
|
2999
|
-
function runRules(baseDir, rules) {
|
|
3000
|
-
return rules.map((rule) => {
|
|
3001
|
-
try {
|
|
3002
|
-
const violations = runCheck(baseDir, rule);
|
|
3003
|
-
return { rule, violations, passed: violations.length === 0 };
|
|
3004
|
-
} catch (err) {
|
|
3005
|
-
const message = err instanceof Error ? err.message : "Rule check failed";
|
|
3006
|
-
return { rule, violations: [{ message: `Rule check error: ${message}` }], passed: false };
|
|
3007
|
-
}
|
|
3008
|
-
});
|
|
3009
|
-
}
|
|
3010
|
-
function runCheck(baseDir, rule) {
|
|
3011
|
-
switch (rule.check.type) {
|
|
3012
|
-
case "file-pattern":
|
|
3013
|
-
return runFilePatternCheck(baseDir, rule.check);
|
|
3014
|
-
case "file-size":
|
|
3015
|
-
return runFileSizeCheck(baseDir, rule.check);
|
|
3016
|
-
case "script":
|
|
3017
|
-
return runScriptCheck(rule.check, baseDir);
|
|
3018
|
-
}
|
|
3019
|
-
}
|
|
3020
|
-
|
|
3021
|
-
// src/audit/checks/patterns.ts
|
|
3022
|
-
async function checkPatterns(repoRoot) {
|
|
3023
|
-
const { items } = await readMemoryItems(repoRoot);
|
|
3024
|
-
const patterned = items.filter((item) => item.pattern?.bad);
|
|
3025
|
-
if (patterned.length === 0) {
|
|
3026
|
-
return { findings: [], filesChecked: [] };
|
|
3027
|
-
}
|
|
3028
|
-
const sourceFiles = findFiles(repoRoot, "**/*.ts");
|
|
3029
|
-
const findings = [];
|
|
3030
|
-
for (const item of patterned) {
|
|
3031
|
-
const bad = item.pattern.bad;
|
|
3032
|
-
for (const relPath of sourceFiles) {
|
|
3033
|
-
const content = readFileSync(join(repoRoot, relPath), "utf-8");
|
|
3034
|
-
if (content.includes(bad)) {
|
|
3035
|
-
findings.push({
|
|
3036
|
-
file: relPath,
|
|
3037
|
-
issue: `Bad pattern found: "${bad}" (${item.insight})`,
|
|
3038
|
-
severity: "warning",
|
|
3039
|
-
relatedLessonId: item.id,
|
|
3040
|
-
suggestedFix: item.pattern.good ? `Use: ${item.pattern.good}` : void 0,
|
|
3041
|
-
source: "pattern"
|
|
3042
|
-
});
|
|
3043
|
-
}
|
|
3044
|
-
}
|
|
3045
|
-
}
|
|
3046
|
-
return { findings, filesChecked: sourceFiles };
|
|
3047
|
-
}
|
|
3048
|
-
|
|
3049
|
-
// src/audit/checks/rules.ts
|
|
3050
|
-
function checkRules(repoRoot) {
|
|
3051
|
-
let config;
|
|
3052
|
-
try {
|
|
3053
|
-
config = loadRuleConfig(repoRoot);
|
|
3054
|
-
} catch (err) {
|
|
3055
|
-
const message = err instanceof Error ? err.message : "Failed to load rules config";
|
|
3056
|
-
return {
|
|
3057
|
-
findings: [{
|
|
3058
|
-
file: ".claude/rules.json",
|
|
3059
|
-
issue: `Invalid rules configuration: ${message}`,
|
|
3060
|
-
severity: "error",
|
|
3061
|
-
source: "rule"
|
|
3062
|
-
}],
|
|
3063
|
-
filesChecked: []
|
|
3064
|
-
};
|
|
3065
|
-
}
|
|
3066
|
-
if (config.rules.length === 0) {
|
|
3067
|
-
return { findings: [], filesChecked: [] };
|
|
3068
|
-
}
|
|
3069
|
-
const results = runRules(repoRoot, config.rules);
|
|
3070
|
-
const findings = [];
|
|
3071
|
-
const filesCheckedSet = /* @__PURE__ */ new Set();
|
|
3072
|
-
for (const result of results) {
|
|
3073
|
-
for (const violation of result.violations) {
|
|
3074
|
-
if (violation.file) {
|
|
3075
|
-
filesCheckedSet.add(violation.file);
|
|
3076
|
-
}
|
|
3077
|
-
findings.push({
|
|
3078
|
-
file: violation.file ?? "",
|
|
3079
|
-
issue: violation.message,
|
|
3080
|
-
severity: result.rule.severity,
|
|
3081
|
-
suggestedFix: result.rule.remediation,
|
|
3082
|
-
source: "rule"
|
|
3083
|
-
});
|
|
3084
|
-
}
|
|
3085
|
-
}
|
|
3086
|
-
return { findings, filesChecked: [...filesCheckedSet] };
|
|
3087
|
-
}
|
|
3088
|
-
|
|
3089
|
-
// src/audit/engine.ts
|
|
3090
|
-
async function runAudit(repoRoot, options = {}) {
|
|
3091
|
-
const { includeRules = true, includePatterns = true, includeLessons = true } = options;
|
|
3092
|
-
const findings = [];
|
|
3093
|
-
const allCheckedFiles = /* @__PURE__ */ new Set();
|
|
3094
|
-
function collect(result) {
|
|
3095
|
-
findings.push(...result.findings);
|
|
3096
|
-
for (const f of result.filesChecked) {
|
|
3097
|
-
allCheckedFiles.add(f);
|
|
3098
|
-
}
|
|
3099
|
-
}
|
|
3100
|
-
if (includeRules) {
|
|
3101
|
-
collect(checkRules(repoRoot));
|
|
3102
|
-
}
|
|
3103
|
-
if (includePatterns) {
|
|
3104
|
-
collect(await checkPatterns(repoRoot));
|
|
3105
|
-
}
|
|
3106
|
-
if (includeLessons) {
|
|
3107
|
-
collect(await checkLessons(repoRoot));
|
|
3108
|
-
}
|
|
3109
|
-
const errors = findings.filter((f) => f.severity === "error").length;
|
|
3110
|
-
const warnings = findings.filter((f) => f.severity === "warning").length;
|
|
3111
|
-
const infos = findings.filter((f) => f.severity === "info").length;
|
|
3112
|
-
return {
|
|
3113
|
-
findings,
|
|
3114
|
-
summary: { errors, warnings, infos, filesChecked: allCheckedFiles.size },
|
|
3115
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
3116
|
-
};
|
|
3117
|
-
}
|
|
3118
|
-
var AuditFindingSchema = z.object({
|
|
3119
|
-
file: z.string(),
|
|
3120
|
-
issue: z.string(),
|
|
3121
|
-
severity: z.enum(["error", "warning", "info"]),
|
|
3122
|
-
relatedLessonId: z.string().optional(),
|
|
3123
|
-
suggestedFix: z.string().optional(),
|
|
3124
|
-
source: z.enum(["rule", "pattern", "lesson"])
|
|
3125
|
-
});
|
|
3126
|
-
var AuditSummarySchema = z.object({
|
|
3127
|
-
errors: z.number(),
|
|
3128
|
-
warnings: z.number(),
|
|
3129
|
-
infos: z.number(),
|
|
3130
|
-
filesChecked: z.number()
|
|
3131
|
-
});
|
|
3132
|
-
var AuditReportSchema = z.object({
|
|
3133
|
-
findings: z.array(AuditFindingSchema),
|
|
3134
|
-
summary: AuditSummarySchema,
|
|
3135
|
-
timestamp: z.string()
|
|
3136
|
-
});
|
|
3137
|
-
|
|
3138
|
-
// src/commands/knowledge.ts
|
|
3139
|
-
init_embeddings();
|
|
3140
|
-
init_sqlite_knowledge();
|
|
3141
|
-
|
|
3142
|
-
// src/commands/knowledge-index.ts
|
|
3143
|
-
init_embeddings();
|
|
3144
|
-
init_sqlite_knowledge();
|
|
3145
|
-
|
|
3146
|
-
// src/commands/clean-lessons.ts
|
|
3147
|
-
init_embeddings();
|
|
3148
|
-
init_search2();
|
|
3149
|
-
init_storage();
|
|
3150
|
-
init_embeddings();
|
|
3151
|
-
init_storage();
|
|
3152
|
-
init_search2();
|
|
3153
|
-
|
|
3154
|
-
// src/index.ts
|
|
3155
|
-
init_compound();
|
|
3156
|
-
var LinterNameSchema = z.enum([
|
|
3157
|
-
"eslint",
|
|
3158
|
-
"ruff",
|
|
3159
|
-
"clippy",
|
|
3160
|
-
"golangci-lint",
|
|
3161
|
-
"ast-grep",
|
|
3162
|
-
"semgrep",
|
|
3163
|
-
"unknown"
|
|
3164
|
-
]);
|
|
3165
|
-
var LinterInfoSchema = z.object({
|
|
3166
|
-
linter: LinterNameSchema,
|
|
3167
|
-
configPath: z.string().nullable()
|
|
3168
|
-
});
|
|
3169
|
-
var DETECTION_RULES = [
|
|
3170
|
-
{
|
|
3171
|
-
linter: "eslint",
|
|
3172
|
-
configs: [
|
|
3173
|
-
// Flat config (ESLint v9+)
|
|
3174
|
-
"eslint.config.js",
|
|
3175
|
-
"eslint.config.mjs",
|
|
3176
|
-
"eslint.config.cjs",
|
|
3177
|
-
"eslint.config.ts",
|
|
3178
|
-
"eslint.config.mts",
|
|
3179
|
-
"eslint.config.cts",
|
|
3180
|
-
// Legacy config
|
|
3181
|
-
".eslintrc.js",
|
|
3182
|
-
".eslintrc.cjs",
|
|
3183
|
-
".eslintrc.json",
|
|
3184
|
-
".eslintrc.yml",
|
|
3185
|
-
".eslintrc.yaml"
|
|
3186
|
-
]
|
|
3187
|
-
},
|
|
3188
|
-
{
|
|
3189
|
-
linter: "ruff",
|
|
3190
|
-
configs: ["ruff.toml", ".ruff.toml"]
|
|
3191
|
-
},
|
|
3192
|
-
{
|
|
3193
|
-
linter: "clippy",
|
|
3194
|
-
configs: ["clippy.toml", ".clippy.toml"]
|
|
3195
|
-
},
|
|
3196
|
-
{
|
|
3197
|
-
linter: "golangci-lint",
|
|
3198
|
-
configs: [".golangci.yml", ".golangci.yaml", ".golangci.toml", ".golangci.json"]
|
|
3199
|
-
},
|
|
3200
|
-
{
|
|
3201
|
-
linter: "ast-grep",
|
|
3202
|
-
configs: ["sgconfig.yml"]
|
|
3203
|
-
},
|
|
3204
|
-
{
|
|
3205
|
-
linter: "semgrep",
|
|
3206
|
-
configs: [".semgrep.yml", ".semgrep.yaml"]
|
|
3207
|
-
}
|
|
3208
|
-
];
|
|
3209
|
-
function unknown() {
|
|
3210
|
-
return { linter: "unknown", configPath: null };
|
|
3211
|
-
}
|
|
3212
|
-
function isFile(filePath) {
|
|
3213
|
-
try {
|
|
3214
|
-
return statSync(filePath).isFile();
|
|
3215
|
-
} catch {
|
|
3216
|
-
return false;
|
|
3217
|
-
}
|
|
3218
|
-
}
|
|
3219
|
-
function pyprojectHasRuff(repoRoot) {
|
|
3220
|
-
const filePath = join(repoRoot, "pyproject.toml");
|
|
3221
|
-
try {
|
|
3222
|
-
const content = readFileSync(filePath, "utf-8");
|
|
3223
|
-
return /^\s*\[tool\.ruff\b/m.test(content);
|
|
3224
|
-
} catch {
|
|
3225
|
-
return false;
|
|
3226
|
-
}
|
|
3227
|
-
}
|
|
3228
|
-
function detectLinter(repoRoot) {
|
|
3229
|
-
try {
|
|
3230
|
-
for (const rule of DETECTION_RULES) {
|
|
3231
|
-
for (const config of rule.configs) {
|
|
3232
|
-
if (isFile(join(repoRoot, config))) {
|
|
3233
|
-
return { linter: rule.linter, configPath: config };
|
|
3234
|
-
}
|
|
3235
|
-
}
|
|
3236
|
-
if (rule.linter === "ruff" && pyprojectHasRuff(repoRoot)) {
|
|
3237
|
-
return { linter: "ruff", configPath: "pyproject.toml" };
|
|
3238
|
-
}
|
|
3239
|
-
}
|
|
3240
|
-
} catch {
|
|
3241
|
-
return unknown();
|
|
3242
|
-
}
|
|
3243
|
-
return unknown();
|
|
3244
|
-
}
|
|
3245
|
-
|
|
3246
|
-
// src/index.ts
|
|
3247
|
-
init_types();
|
|
3248
|
-
|
|
3249
|
-
export { AuditFindingSchema, AuditReportSchema, CANDIDATE_MULTIPLIER, CCT_PATTERNS_PATH, CctPatternSchema, DB_PATH, DEFAULT_TEXT_WEIGHT, DEFAULT_VECTOR_WEIGHT, KNOWLEDGE_DB_PATH, KNOWLEDGE_SCHEMA_VERSION, LESSONS_PATH, LessonItemSchema, LessonSchema, LinterInfoSchema, LinterNameSchema, MODEL_FILENAME, MODEL_URI, MemoryItemRecordSchema, MemoryItemSchema, MemoryItemTypeSchema, PatternItemSchema, PreferenceItemSchema, SolutionItemSchema, VERSION, acquireEmbedLock, appendLesson, appendMemoryItem, buildSimilarityMatrix, calculateScore, chunkFile, closeDb, closeKnowledgeDb, clusterBySimilarity, collectCachedChunkEmbeddings, confirmationBoost, cosineSimilarity, detectLinter, detectSelfCorrection, detectTestFailure, detectUserCorrection, embedChunks, embedText, embedTexts, formatLessonsCheck, generateId, getCachedChunkEmbedding, getEmbedding, getPrimeContext, getUnembeddedChunkCount, indexAndSpawnEmbed, indexDocs, isActionable, isEmbedLocked, isModelAvailable, isModelUsable, isNovel, isSpecific, loadSessionLessons, mergeHybridResults, normalizeBm25Rank, openKnowledgeDb, rankLessons, readCctPatterns, readEmbedStatus, readLessons, readMemoryItems, rebuildIndex, recencyBoost, resolveModel, retrieveForPlan, runAudit, runBackgroundEmbed, searchChunksKeywordScored, searchKeyword, searchKnowledge, searchKnowledgeVector, searchVector, setCachedChunkEmbedding, severityBoost, shouldPropose, spawnBackgroundEmbed, synthesizePattern, unloadEmbedding, writeCctPatterns, writeEmbedStatus };
|
|
3250
|
-
//# sourceMappingURL=index.js.map
|
|
3251
|
-
//# sourceMappingURL=index.js.map
|