kontex-core 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +1750 -0
  2. package/package.json +63 -0
package/dist/index.js ADDED
@@ -0,0 +1,1750 @@
1
+ // @bun
2
+ var __defProp = Object.defineProperty;
3
+ var __returnValue = (v) => v;
4
+ function __exportSetter(name, newValue) {
5
+ this[name] = __returnValue.bind(null, newValue);
6
+ }
7
+ var __export = (target, all) => {
8
+ for (var name in all)
9
+ __defProp(target, name, {
10
+ get: all[name],
11
+ enumerable: true,
12
+ configurable: true,
13
+ set: __exportSetter.bind(all, name)
14
+ });
15
+ };
16
+ var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res);
17
+ var __require = import.meta.require;
18
+
19
+ // src/secrets.ts
20
+ function buildExtraPatterns(extraPatterns) {
21
+ const result = [];
22
+ for (const pattern of extraPatterns) {
23
+ try {
24
+ result.push({ name: `custom:${pattern.slice(0, 30)}`, regex: new RegExp(pattern) });
25
+ } catch {}
26
+ }
27
+ return result;
28
+ }
29
+ function scanForSecrets(content, extraPatterns = []) {
30
+ const allPatterns = [...SECRET_PATTERNS, ...buildExtraPatterns(extraPatterns)];
31
+ for (const { name, regex } of allPatterns) {
32
+ if (regex.test(content)) {
33
+ return { blocked: true, pattern: name };
34
+ }
35
+ }
36
+ return { blocked: false };
37
+ }
38
+ var SECRET_PATTERNS;
39
+ var init_secrets = __esm(() => {
40
+ SECRET_PATTERNS = [
41
+ { name: "generic-long-token", regex: /['"]\w{32,}['"]/ },
42
+ { name: "api-key-assignment", regex: /api[_-]?key\s*[:=]\s*['"]?\w+/i },
43
+ { name: "secret-key-assignment", regex: /secret[_-]?key\s*[:=]\s*['"]?\w+/i },
44
+ { name: "password-assignment", regex: /password\s*[:=]\s*['"]?[^\s'"]{8,}/i },
45
+ { name: "postgres-connection", regex: /postgres:\/\/[^@]+:[^@]+@/ },
46
+ { name: "mysql-connection", regex: /mysql:\/\/[^@]+:[^@]+@/ },
47
+ { name: "mongodb-connection", regex: /mongodb\+srv:\/\/[^@]+:[^@]+@/ },
48
+ { name: "openai-key", regex: /sk-[a-zA-Z0-9]{40,}/ },
49
+ { name: "github-personal-token", regex: /ghp_[a-zA-Z0-9]{36}/ },
50
+ { name: "aws-access-key", regex: /AKIA[A-Z0-9]{16}/ }
51
+ ];
52
+ });
53
+
54
+ // src/storage/db.ts
55
+ var exports_db = {};
56
+ __export(exports_db, {
57
+ getDatabase: () => getDatabase,
58
+ closeDatabase: () => closeDatabase,
59
+ EMBEDDING_DIM: () => EMBEDDING_DIM
60
+ });
61
+ import { Database } from "bun:sqlite";
62
+ import { join as join2 } from "path";
63
+ import { existsSync as existsSync2, mkdirSync } from "fs";
64
+ import * as sqliteVec from "sqlite-vec";
65
+ function getDatabase(workspaceRoot) {
66
+ if (dbInstance)
67
+ return dbInstance;
68
+ const indexDir = join2(workspaceRoot, INDEX_DIR);
69
+ if (!existsSync2(indexDir))
70
+ mkdirSync(indexDir, { recursive: true });
71
+ const db = new Database(join2(indexDir, DB_FILENAME));
72
+ db.exec("PRAGMA journal_mode=WAL");
73
+ db.exec("PRAGMA synchronous=NORMAL");
74
+ tryLoadVecExtension(db);
75
+ runMigrations(db);
76
+ dbInstance = db;
77
+ return db;
78
+ }
79
+ function closeDatabase() {
80
+ if (dbInstance) {
81
+ dbInstance.close();
82
+ dbInstance = null;
83
+ }
84
+ }
85
+ function runMigrations(db) {
86
+ db.exec(`CREATE TABLE IF NOT EXISTS schema_version (version INTEGER PRIMARY KEY)`);
87
+ const currentVersion = db.prepare("SELECT COALESCE(MAX(version), 0) as v FROM schema_version").get();
88
+ if (currentVersion.v < 1)
89
+ migrateV1(db);
90
+ }
91
+ function migrateV1(db) {
92
+ db.exec(`
93
+ CREATE TABLE IF NOT EXISTS memories (
94
+ uri TEXT PRIMARY KEY,
95
+ content TEXT NOT NULL,
96
+ type TEXT NOT NULL CHECK(type IN ('decision', 'pattern', 'gotcha', 'convention', 'resource')),
97
+ l0 TEXT NOT NULL DEFAULT '',
98
+ l1 TEXT NOT NULL DEFAULT '',
99
+ l2 TEXT NOT NULL DEFAULT '',
100
+ confidence REAL NOT NULL DEFAULT 0.0,
101
+ verified INTEGER NOT NULL DEFAULT 0,
102
+ stale INTEGER NOT NULL DEFAULT 0,
103
+ global INTEGER NOT NULL DEFAULT 0,
104
+ ref_count INTEGER NOT NULL DEFAULT 0,
105
+ author TEXT NOT NULL DEFAULT '',
106
+ affected_paths TEXT NOT NULL DEFAULT '[]',
107
+ tags TEXT NOT NULL DEFAULT '[]',
108
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
109
+ updated_at TEXT NOT NULL DEFAULT (datetime('now'))
110
+ )
111
+ `);
112
+ try {
113
+ db.exec(`
114
+ CREATE VIRTUAL TABLE IF NOT EXISTS memory_embeddings USING vec0(
115
+ uri TEXT PRIMARY KEY,
116
+ embedding float[${EMBEDDING_DIM}]
117
+ )
118
+ `);
119
+ } catch {}
120
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_memories_type ON memories(type)`);
121
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_memories_verified ON memories(verified)`);
122
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_memories_stale ON memories(stale)`);
123
+ db.exec("INSERT INTO schema_version (version) VALUES (1)");
124
+ }
125
+ function tryLoadVecExtension(db) {
126
+ try {
127
+ const extPath = sqliteVec.getLoadablePath();
128
+ db.loadExtension(extPath);
129
+ } catch (err) {
130
+ const message = err instanceof Error ? err.message : "unknown error";
131
+ console.warn(`kontex: sqlite-vec extension not available (${message}). Semantic search will use keyword matching.`);
132
+ }
133
+ }
134
+ var INDEX_DIR = ".kontex-index", DB_FILENAME = "index.db", EMBEDDING_DIM = 384, dbInstance = null;
135
+ var init_db = () => {};
136
+
137
+ // src/storage/embeddings.ts
138
+ import { join as join3 } from "path";
139
+ import { homedir } from "os";
140
+ async function initEmbeddingModel(modelName = DEFAULT_MODEL) {
141
+ if (pipeline)
142
+ return;
143
+ if (!modelLoading)
144
+ modelLoading = loadPipeline(modelName);
145
+ pipeline = await modelLoading;
146
+ }
147
+ async function embed(text) {
148
+ if (!pipeline)
149
+ await initEmbeddingModel();
150
+ const result = await pipeline(text, { pooling: "mean", normalize: true });
151
+ return result.data;
152
+ }
153
+ async function loadPipeline(modelName) {
154
+ const { pipeline: createPipeline, env } = await import("@xenova/transformers");
155
+ env.cacheDir = CACHE_DIR;
156
+ env.allowLocalModels = true;
157
+ const pipe = await createPipeline("feature-extraction", modelName);
158
+ return pipe;
159
+ }
160
+ var DEFAULT_MODEL = "Xenova/all-MiniLM-L6-v2", CACHE_DIR, pipeline = null, modelLoading = null;
161
+ var init_embeddings = __esm(() => {
162
+ CACHE_DIR = join3(homedir(), ".cache", "kontex", "models");
163
+ });
164
+
165
+ // src/memory/write.ts
166
+ var exports_write = {};
167
+ __export(exports_write, {
168
+ writeMemory: () => writeMemory,
169
+ logDecision: () => logDecision,
170
+ invalidateMemory: () => invalidateMemory
171
+ });
172
+ import { writeFileSync as writeFileSync2, readFileSync as readFileSync2, existsSync as existsSync3, mkdirSync as mkdirSync2, appendFileSync } from "fs";
173
+ import { join as join4, dirname } from "path";
174
+ import matter from "gray-matter";
175
+ async function writeMemory(entry, workspaceRoot, config) {
176
+ const logPath = join4(workspaceRoot, ".kontex-log", "quality.log");
177
+ const secretResult = scanForSecrets(entry.content, config.secrets.extraPatterns);
178
+ if (secretResult.blocked) {
179
+ logQualityEvent(logPath, "BLOCKED", `Secret detected: ${secretResult.pattern}`);
180
+ logSecurityEvent(workspaceRoot, secretResult.pattern);
181
+ return { success: false, error: `Write blocked: content contains a secret (pattern: ${secretResult.pattern})` };
182
+ }
183
+ const db = getDatabase(workspaceRoot);
184
+ const dedupResult = await dedupCheck(entry.content, db, config);
185
+ if (dedupResult.status === "duplicate") {
186
+ logQualityEvent(logPath, "DEDUP", `Duplicate of ${dedupResult.existing_uri}`);
187
+ return { success: false, error: `Duplicate: too similar to ${dedupResult.existing_uri}` };
188
+ }
189
+ if (dedupResult.status === "conflict") {
190
+ logQualityEvent(logPath, "CONFLICT", `Conflicts with ${dedupResult.existing_uri}`);
191
+ return { success: false, error: dedupResult.message, conflict: dedupResult };
192
+ }
193
+ if (entry.confidence < config.quality.minConfidence) {
194
+ logQualityEvent(logPath, "DISCARDED", `Confidence ${entry.confidence} below ${config.quality.minConfidence}`);
195
+ return { success: false, error: `Discarded: confidence ${entry.confidence} below minimum ${config.quality.minConfidence}` };
196
+ }
197
+ const verified = entry.confidence >= config.quality.autoVerifyThreshold;
198
+ const uri = generateUri(entry.type, entry.content);
199
+ const now = new Date().toISOString();
200
+ const author = await getGitAuthor(workspaceRoot);
201
+ const frontmatter = {
202
+ uri,
203
+ type: entry.type,
204
+ created: now,
205
+ updated: now,
206
+ author,
207
+ confidence: entry.confidence,
208
+ verified,
209
+ stale: false,
210
+ global: false,
211
+ affected_paths: entry.affected_paths ?? [],
212
+ ref_count: 0,
213
+ tags: extractTags(entry.content)
214
+ };
215
+ const l0 = entry.content.split(`
216
+ `)[0]?.slice(0, 200) ?? entry.content.slice(0, 200);
217
+ const l1 = entry.why_memorable ? `## Why
218
+
219
+ ${entry.why_memorable}
220
+
221
+ ${entry.content}` : entry.content;
222
+ const fileContent = matter.stringify(`# L0
223
+ ${l0}
224
+
225
+ # L1
226
+ ${l1}
227
+
228
+ # L2
229
+ [Session context]
230
+ `, frontmatter);
231
+ const filePath = join4(workspaceRoot, ".context", `${uri}.md`);
232
+ const fileDir = dirname(filePath);
233
+ if (!existsSync3(fileDir))
234
+ mkdirSync2(fileDir, { recursive: true });
235
+ writeFileSync2(filePath, fileContent, "utf-8");
236
+ await indexMemoryEntry(uri, entry.content, entry.type, verified, entry.confidence, entry.affected_paths ?? [], db, l0, l1, "");
237
+ logQualityEvent(logPath, "WRITTEN", `${uri} (verified: ${verified})`);
238
+ return { success: true, uri, verified };
239
+ }
240
+ async function invalidateMemory(uri, reason, workspaceRoot) {
241
+ const filePath = join4(workspaceRoot, ".context", `${uri}.md`);
242
+ if (!existsSync3(filePath))
243
+ return { success: false, error: `Entry not found: ${uri}` };
244
+ const raw = readFileSync2(filePath, "utf-8");
245
+ const parsed = matter(raw);
246
+ parsed.data.stale = true;
247
+ parsed.data.updated = new Date().toISOString();
248
+ writeFileSync2(filePath, matter.stringify(parsed.content, parsed.data), "utf-8");
249
+ const db = getDatabase(workspaceRoot);
250
+ db.prepare("UPDATE memories SET stale = 1, updated_at = datetime('now') WHERE uri = ?").run(uri);
251
+ logQualityEvent(join4(workspaceRoot, ".kontex-log", "quality.log"), "INVALIDATED", `${uri}: ${reason}`);
252
+ return { success: true, uri };
253
+ }
254
+ async function logDecision(adr, workspaceRoot, _config) {
255
+ const decisionsDir = join4(workspaceRoot, ".context", "memory", "decisions");
256
+ if (!existsSync3(decisionsDir))
257
+ mkdirSync2(decisionsDir, { recursive: true });
258
+ const { readdirSync } = await import("fs");
259
+ const existing = readdirSync(decisionsDir).filter((f) => f.endsWith(".md"));
260
+ const nextNum = String(existing.length + 1).padStart(3, "0");
261
+ const slug = slugify(adr.title);
262
+ const uri = `memory/decisions/${nextNum}-${slug}`;
263
+ const now = new Date().toISOString();
264
+ const author = await getGitAuthor(workspaceRoot);
265
+ const frontmatter = {
266
+ uri,
267
+ type: "decision",
268
+ created: now,
269
+ updated: now,
270
+ author,
271
+ confidence: 0.95,
272
+ verified: true,
273
+ stale: false,
274
+ global: false,
275
+ affected_paths: adr.affected_paths ?? [],
276
+ ref_count: 0,
277
+ tags: extractTags(adr.title + " " + adr.decision)
278
+ };
279
+ const alternativesSection = adr.alternatives?.length ? `
280
+
281
+ ## Alternatives considered
282
+ ${adr.alternatives.map((a) => `- ${a}`).join(`
283
+ `)}` : "";
284
+ const consequencesSection = adr.consequences ? `
285
+
286
+ ## Consequences
287
+ ${adr.consequences}` : "";
288
+ const l0 = `${adr.title}. Decision: ${now.slice(0, 10)}.`;
289
+ const body = `# L0
290
+ ${l0}
291
+
292
+ # L1
293
+ ## Context
294
+ ${adr.context}
295
+
296
+ ## Decision
297
+ ${adr.decision}
298
+
299
+ ## Rationale
300
+ ${adr.rationale}${alternativesSection}${consequencesSection}
301
+ `;
302
+ const fileContent = matter.stringify(body, frontmatter);
303
+ const filePath = join4(workspaceRoot, ".context", `${uri}.md`);
304
+ const fileDir = dirname(filePath);
305
+ if (!existsSync3(fileDir))
306
+ mkdirSync2(fileDir, { recursive: true });
307
+ writeFileSync2(filePath, fileContent, "utf-8");
308
+ const db = getDatabase(workspaceRoot);
309
+ const adrContent = `${adr.title} ${adr.decision} ${adr.context}`;
310
+ await indexMemoryEntry(uri, adrContent, "decision", true, 0.95, adr.affected_paths ?? [], db, l0, body, "");
311
+ return { success: true, uri, verified: true };
312
+ }
313
+ async function dedupCheck(content, db, config) {
314
+ try {
315
+ const embedding = await embed(content);
316
+ const embeddingBuffer = Buffer.from(embedding.buffer);
317
+ const similar = db.prepare(`
318
+ SELECT m.uri, m.content, m.verified, m.confidence, vec_distance_cosine(e.embedding, ?) AS distance
319
+ FROM memory_embeddings e JOIN memories m ON m.uri = e.uri ORDER BY distance ASC LIMIT 5
320
+ `).all(embeddingBuffer);
321
+ if (!similar.length)
322
+ return { status: "clear" };
323
+ const top = similar[0];
324
+ const similarity = 1 - top.distance;
325
+ if (similarity > config.quality.deduplicateThreshold)
326
+ return { status: "duplicate", existing_uri: top.uri };
327
+ if (similarity > config.quality.contradictionThreshold)
328
+ return { status: "conflict", existing_uri: top.uri, existing_content: top.content, message: `Similar memory exists at ${top.uri}. If this supersedes it, call kontex_invalidate first.` };
329
+ return { status: "clear" };
330
+ } catch {
331
+ return { status: "clear" };
332
+ }
333
+ }
334
+ async function indexMemoryEntry(uri, content, type, verified, confidence, affectedPaths, db, l0 = "", l1 = "", l2 = "") {
335
+ db.prepare(`INSERT OR REPLACE INTO memories (uri, content, type, l0, l1, l2, verified, confidence, affected_paths, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'))`).run(uri, content, type, l0, l1, l2, verified ? 1 : 0, confidence, JSON.stringify(affectedPaths));
336
+ try {
337
+ const embedding = await embed(content);
338
+ db.prepare(`INSERT OR REPLACE INTO memory_embeddings (uri, embedding) VALUES (?, ?)`).run(uri, Buffer.from(embedding.buffer));
339
+ } catch {}
340
+ }
341
+ function generateUri(type, content) {
342
+ const slug = slugify(content.split(`
343
+ `)[0]?.slice(0, 60) ?? "entry");
344
+ return `memory/${type}s/${slug}-${Date.now().toString(36)}`;
345
+ }
346
+ function slugify(text) {
347
+ return text.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "").slice(0, 50);
348
+ }
349
+ function extractTags(content) {
350
+ const words = content.toLowerCase().replace(/[^a-z0-9\s-]/g, " ").split(/\s+/).filter((w) => w.length > 3 && w.length < 20);
351
+ const counts = new Map;
352
+ for (const word of words)
353
+ counts.set(word, (counts.get(word) ?? 0) + 1);
354
+ return [...counts.entries()].sort((a, b) => b[1] - a[1]).slice(0, 5).map(([word]) => word);
355
+ }
356
+ async function getGitAuthor(workspaceRoot) {
357
+ try {
358
+ const proc = Bun.spawn(["git", "config", "user.email"], { cwd: workspaceRoot, stdout: "pipe" });
359
+ return (await new Response(proc.stdout).text()).trim() || "unknown";
360
+ } catch {
361
+ return "unknown";
362
+ }
363
+ }
364
+ function logQualityEvent(logPath, event, message) {
365
+ const logDir = dirname(logPath);
366
+ if (!existsSync3(logDir))
367
+ mkdirSync2(logDir, { recursive: true });
368
+ appendFileSync(logPath, `[${new Date().toISOString()}] ${event}: ${message}
369
+ `, "utf-8");
370
+ }
371
+ function logSecurityEvent(workspaceRoot, pattern) {
372
+ logQualityEvent(join4(workspaceRoot, ".kontex-log", "security.log"), "SECRET_BLOCKED", pattern);
373
+ }
374
+ var init_write = __esm(() => {
375
+ init_secrets();
376
+ init_embeddings();
377
+ init_db();
378
+ });
379
+
380
+ // src/llm.ts
381
+ var exports_llm = {};
382
+ __export(exports_llm, {
383
+ createLLMModel: () => createLLMModel
384
+ });
385
+ async function createLLMModel(config, token) {
386
+ const { createOpenAI } = await import("@ai-sdk/openai");
387
+ switch (config.llm.provider) {
388
+ case "github-models":
389
+ return createOpenAI({
390
+ baseURL: "https://models.inference.ai.azure.com",
391
+ apiKey: token ?? ""
392
+ })(config.llm.model);
393
+ case "openai":
394
+ return createOpenAI({
395
+ apiKey: config.llm.apiKey ?? process.env.OPENAI_API_KEY ?? ""
396
+ })(config.llm.model);
397
+ case "ollama":
398
+ return createOpenAI({
399
+ baseURL: "http://localhost:11434/v1",
400
+ apiKey: "ollama"
401
+ })(config.llm.model);
402
+ case "anthropic":
403
+ return null;
404
+ case "none":
405
+ return null;
406
+ default:
407
+ return null;
408
+ }
409
+ }
410
+
411
+ // src/config.ts
412
+ import { readFileSync, writeFileSync, existsSync } from "fs";
413
+ import { join } from "path";
414
+ var DEFAULT_CONFIG = {
415
+ compile: {
416
+ tokenBudget: 3000,
417
+ alwaysInclude: ["memory/project.md"],
418
+ excludePaths: ["memory/sessions/archive/"]
419
+ },
420
+ embedding: {
421
+ provider: "local",
422
+ model: "Xenova/all-MiniLM-L6-v2"
423
+ },
424
+ llm: {
425
+ provider: "github-models",
426
+ model: "gpt-4o-mini"
427
+ },
428
+ quality: {
429
+ minConfidence: 0.6,
430
+ autoVerifyThreshold: 0.85,
431
+ deduplicateThreshold: 0.92,
432
+ contradictionThreshold: 0.75
433
+ },
434
+ hooks: {
435
+ postCommitExtract: true,
436
+ postMergeRecompile: true,
437
+ maxBackgroundRetries: 2
438
+ },
439
+ secrets: {
440
+ scan: true,
441
+ extraPatterns: []
442
+ },
443
+ decay: {
444
+ sessionArchiveDays: 7,
445
+ unverifiedExpireDays: 30,
446
+ maxSessionsDirKB: 500
447
+ }
448
+ };
449
+ var CONFIG_FILENAME = "kontex.config.json";
450
+ function resolveEnvVars(value) {
451
+ return value.replace(/\$\{(\w+)\}/g, (_, envKey) => {
452
+ return process.env[envKey] ?? "";
453
+ });
454
+ }
455
+ function deepMerge(defaults, overrides) {
456
+ const result = { ...defaults };
457
+ for (const key of Object.keys(overrides)) {
458
+ const overrideVal = overrides[key];
459
+ const defaultVal = defaults[key];
460
+ if (overrideVal !== undefined && typeof overrideVal === "object" && !Array.isArray(overrideVal) && typeof defaultVal === "object" && !Array.isArray(defaultVal) && defaultVal !== null) {
461
+ result[key] = deepMerge(defaultVal, overrideVal);
462
+ } else if (overrideVal !== undefined) {
463
+ result[key] = overrideVal;
464
+ }
465
+ }
466
+ return result;
467
+ }
468
+ function loadConfig(workspaceRoot) {
469
+ const configPath = join(workspaceRoot, CONFIG_FILENAME);
470
+ if (!existsSync(configPath)) {
471
+ return { ...DEFAULT_CONFIG };
472
+ }
473
+ try {
474
+ const raw = readFileSync(configPath, "utf-8");
475
+ const parsed = JSON.parse(raw);
476
+ const merged = deepMerge(DEFAULT_CONFIG, parsed);
477
+ if (merged.llm && merged.llm.apiKey) {
478
+ merged.llm.apiKey = resolveEnvVars(merged.llm.apiKey);
479
+ }
480
+ return merged;
481
+ } catch {
482
+ return { ...DEFAULT_CONFIG };
483
+ }
484
+ }
485
+ function writeConfig(workspaceRoot, config) {
486
+ const configPath = join(workspaceRoot, CONFIG_FILENAME);
487
+ const serializable = { ...config };
488
+ writeFileSync(configPath, JSON.stringify(serializable, null, 2) + `
489
+ `, "utf-8");
490
+ }
491
+
492
+ // src/index.ts
493
+ init_secrets();
494
+
495
+ // src/auth.ts
496
+ import keytar from "keytar";
497
+ var KEYCHAIN_SERVICE = "kontex";
498
+ var KEYCHAIN_ACCOUNT = "github-oauth";
499
+ var GITHUB_CLIENT_ID = "Ov23liMXcybhETe03nNJ";
500
+ async function login() {
501
+ const { createOAuthDeviceAuth } = await import("@octokit/auth-oauth-device");
502
+ const auth = createOAuthDeviceAuth({
503
+ clientType: "oauth-app",
504
+ clientId: GITHUB_CLIENT_ID,
505
+ scopes: [],
506
+ onVerification: (verification) => {
507
+ console.log(`
508
+ Visit: ${verification.verification_uri}`);
509
+ console.log(`Code: ${verification.user_code}
510
+ `);
511
+ console.log(`Waiting for authorization...
512
+ `);
513
+ openBrowser(verification.verification_uri).catch(() => {});
514
+ }
515
+ });
516
+ const { token } = await auth({ type: "oauth" });
517
+ await keytar.setPassword(KEYCHAIN_SERVICE, KEYCHAIN_ACCOUNT, token);
518
+ const username = await fetchGitHubUsername(token);
519
+ console.log(`\u2713 Authenticated as @${username}`);
520
+ console.log("\u2713 GitHub Models access confirmed");
521
+ return username;
522
+ }
523
+ async function logout() {
524
+ const deleted = await keytar.deletePassword(KEYCHAIN_SERVICE, KEYCHAIN_ACCOUNT);
525
+ console.log(deleted ? "\u2713 Token removed from keychain" : "No token found in keychain");
526
+ }
527
+ async function getToken() {
528
+ return keytar.getPassword(KEYCHAIN_SERVICE, KEYCHAIN_ACCOUNT);
529
+ }
530
+ async function isAuthenticated() {
531
+ const token = await getToken();
532
+ return token !== null;
533
+ }
534
+ async function fetchGitHubUsername(token) {
535
+ const response = await fetch("https://api.github.com/user", {
536
+ headers: { Authorization: `Bearer ${token}`, Accept: "application/vnd.github+json" }
537
+ });
538
+ if (!response.ok)
539
+ throw new Error(`GitHub API returned ${response.status}`);
540
+ const data = await response.json();
541
+ return data.login;
542
+ }
543
+ async function openBrowser(url) {
544
+ const { exec } = await import("child_process");
545
+ const command = process.platform === "darwin" ? `open "${url}"` : process.platform === "win32" ? `start "${url}"` : `xdg-open "${url}"`;
546
+ return new Promise((resolve, reject) => {
547
+ exec(command, (error) => {
548
+ if (error)
549
+ reject(error);
550
+ else
551
+ resolve();
552
+ });
553
+ });
554
+ }
555
+
556
+ // src/index.ts
557
+ init_db();
558
+ init_embeddings();
559
+ init_write();
560
+
561
+ // src/memory/read.ts
562
+ init_embeddings();
563
+ init_db();
564
+ import { readFileSync as readFileSync3, readdirSync, existsSync as existsSync4 } from "fs";
565
+ import { join as join5, relative } from "path";
566
+ import matter2 from "gray-matter";
567
+ async function findMemories(query, limit = 5, workspaceRoot) {
568
+ const db = getDatabase(workspaceRoot);
569
+ try {
570
+ return await vectorSearch(query, limit, db);
571
+ } catch {
572
+ return keywordSearch(query, limit, db);
573
+ }
574
+ }
575
+ function loadAllEntries(workspaceRoot) {
576
+ const memoryDir = join5(workspaceRoot, ".context", "memory");
577
+ if (!existsSync4(memoryDir))
578
+ return [];
579
+ const entries = [];
580
+ collectEntries(memoryDir, workspaceRoot, entries);
581
+ return entries;
582
+ }
583
+ function loadEntry(uri, workspaceRoot) {
584
+ const filePath = join5(workspaceRoot, ".context", `${uri}.md`);
585
+ return existsSync4(filePath) ? parseMemoryFile(filePath, workspaceRoot) : null;
586
+ }
587
+ async function vectorSearch(query, limit, db) {
588
+ const queryEmbedding = await embed(query);
589
+ const results = db.prepare(`
590
+ SELECT m.uri, m.content, m.type, m.verified, vec_distance_cosine(e.embedding, ?) AS distance
591
+ FROM memory_embeddings e JOIN memories m ON m.uri = e.uri WHERE m.stale = 0 ORDER BY distance ASC LIMIT ?
592
+ `).all(Buffer.from(queryEmbedding.buffer), limit);
593
+ return results.map((row) => ({
594
+ uri: row.uri,
595
+ type: row.type,
596
+ content: row.content,
597
+ similarity: 1 - row.distance,
598
+ verified: row.verified === 1,
599
+ tier: "l1"
600
+ }));
601
+ }
602
+ function keywordSearch(query, limit, db) {
603
+ const keywords = query.toLowerCase().split(/\s+/).filter(Boolean);
604
+ if (keywords.length === 0)
605
+ return [];
606
+ const conditions = keywords.map(() => "LOWER(content) LIKE ?").join(" AND ");
607
+ const params = keywords.map((k) => `%${k}%`);
608
+ const results = db.prepare(`
609
+ SELECT uri, content, type, verified, confidence FROM memories
610
+ WHERE stale = 0 AND (${conditions}) ORDER BY ref_count DESC, confidence DESC LIMIT ?
611
+ `).all(...params, limit);
612
+ return results.map((row, i) => ({
613
+ uri: row.uri,
614
+ type: row.type,
615
+ content: row.content,
616
+ similarity: 1 - i * 0.1,
617
+ verified: row.verified === 1,
618
+ tier: "l1"
619
+ }));
620
+ }
621
+ function collectEntries(dir, workspaceRoot, entries) {
622
+ for (const item of readdirSync(dir, { withFileTypes: true })) {
623
+ const fullPath = join5(dir, item.name);
624
+ if (item.isDirectory())
625
+ collectEntries(fullPath, workspaceRoot, entries);
626
+ else if (item.name.endsWith(".md")) {
627
+ const e = parseMemoryFile(fullPath, workspaceRoot);
628
+ if (e)
629
+ entries.push(e);
630
+ }
631
+ }
632
+ }
633
+ function parseMemoryFile(filePath, workspaceRoot) {
634
+ try {
635
+ const raw = readFileSync3(filePath, "utf-8");
636
+ const parsed = matter2(raw);
637
+ const data = parsed.data;
638
+ const { l0, l1, l2 } = splitTiers(parsed.content);
639
+ const relPath = relative(join5(workspaceRoot, ".context"), filePath).replace(/\.md$/, "");
640
+ return {
641
+ uri: data.uri ?? relPath,
642
+ type: data.type ?? "convention",
643
+ content: parsed.content,
644
+ created: data.created ?? new Date().toISOString(),
645
+ updated: data.updated ?? new Date().toISOString(),
646
+ author: data.author ?? "unknown",
647
+ confidence: data.confidence ?? 0.5,
648
+ verified: data.verified ?? false,
649
+ stale: data.stale ?? false,
650
+ global: data.global ?? false,
651
+ affected_paths: data.affected_paths ?? [],
652
+ ref_count: data.ref_count ?? 0,
653
+ tags: data.tags ?? [],
654
+ l0,
655
+ l1,
656
+ l2
657
+ };
658
+ } catch {
659
+ return null;
660
+ }
661
+ }
662
+ function splitTiers(content) {
663
+ const sections = content.split(/^# (L[012])\s*$/m);
664
+ let l0 = "", l1 = "", l2 = "";
665
+ for (let i = 0;i < sections.length; i++) {
666
+ const s = sections[i]?.trim();
667
+ if (s === "L0" && sections[i + 1])
668
+ l0 = sections[i + 1].trim();
669
+ else if (s === "L1" && sections[i + 1])
670
+ l1 = sections[i + 1].trim();
671
+ else if (s === "L2" && sections[i + 1])
672
+ l2 = sections[i + 1].trim();
673
+ }
674
+ if (!l0 && !l1 && !l2) {
675
+ l0 = content.trim().split(`
676
+ `)[0]?.slice(0, 200) ?? "";
677
+ l1 = content.trim();
678
+ }
679
+ return { l0, l1, l2 };
680
+ }
681
+ // src/memory/compile.ts
682
+ import { writeFileSync as writeFileSync3 } from "fs";
683
+ import { join as join6 } from "path";
684
+ import { encoding_for_model } from "tiktoken";
685
+ var cachedEncoder = null;
686
+ function getEncoder() {
687
+ if (!cachedEncoder) {
688
+ try {
689
+ cachedEncoder = encoding_for_model("gpt-4o-mini");
690
+ } catch {
691
+ return { encode: (text) => new Uint32Array(Math.ceil(text.length / 4)) };
692
+ }
693
+ }
694
+ return cachedEncoder;
695
+ }
696
+ async function compile(workspaceRoot, config) {
697
+ const allEntries = loadAllEntries(workspaceRoot);
698
+ const recentFiles = await getRecentlyModifiedFiles(workspaceRoot, 7);
699
+ let tokenCount = 0;
700
+ const sections = [];
701
+ const systemPrompt = buildSystemPrompt();
702
+ sections.push(systemPrompt);
703
+ tokenCount += estimateTokens(systemPrompt);
704
+ const verifiedEntries = allEntries.filter((e) => e.verified && !e.stale);
705
+ const l0Section = buildL0Index(verifiedEntries);
706
+ sections.push(l0Section);
707
+ tokenCount += estimateTokens(l0Section);
708
+ const relevant = verifiedEntries.filter((e) => e.global || isRecentlyTouched(e, recentFiles) || isRecentlyCreated(e, 7)).sort((a, b) => b.ref_count - a.ref_count);
709
+ const includedUris = new Set;
710
+ for (const entry of relevant) {
711
+ const l1Content = formatL1Section(entry);
712
+ const tokens = estimateTokens(l1Content);
713
+ if (tokenCount + tokens > config.compile.tokenBudget)
714
+ break;
715
+ sections.push(l1Content);
716
+ tokenCount += tokens;
717
+ includedUris.add(entry.uri);
718
+ }
719
+ const l2Available = allEntries.filter((e) => !e.stale && !includedUris.has(e.uri));
720
+ if (l2Available.length > 0)
721
+ sections.push(buildL2Footer(l2Available));
722
+ const output = sections.join(`
723
+
724
+ ---
725
+
726
+ `);
727
+ writeFileSync3(join6(workspaceRoot, ".context", "KONTEX.md"), output, "utf-8");
728
+ }
729
+ function buildSystemPrompt() {
730
+ return `## Project memory (kontex)
731
+
732
+ You have access to a persistent memory store for this codebase.
733
+
734
+ **On session start:** Read KONTEX.md in this workspace before answering
735
+ any codebase questions. It contains compiled project context.
736
+
737
+ **Write proactively, not constantly.** Call kontex_remember only for:
738
+ - Architectural decisions that were committed to (not just discussed)
739
+ - Non-obvious constraints or gotchas discovered in the code
740
+ - Patterns established that apply across the codebase
741
+ - Conventions confirmed by the developer
742
+
743
+ Never call kontex_remember for routine edits or things already in KONTEX.md.
744
+
745
+ **Correct stale memory immediately.** If KONTEX.md contains something
746
+ outdated, call kontex_invalidate on that entry, then kontex_remember with
747
+ the correction.
748
+
749
+ **Search before answering.** For architecture and convention questions,
750
+ call kontex_find before responding. Do not reconstruct from code what
751
+ memory already knows.`;
752
+ }
753
+ function buildL0Index(entries) {
754
+ if (entries.length === 0)
755
+ return `## Memory index (L0)
756
+
757
+ No memory entries yet.`;
758
+ const lines = entries.map((e) => `- \`${e.uri}\` \u2014 ${e.l0 || e.content.split(`
759
+ `)[0]?.slice(0, 100)}`);
760
+ return `## Memory index (L0)
761
+
762
+ ${lines.join(`
763
+ `)}`;
764
+ }
765
+ function formatL1Section(entry) {
766
+ return `### ${entry.uri} (${entry.type})
767
+
768
+ ${entry.l1 || entry.content}`;
769
+ }
770
+ function buildL2Footer(entries) {
771
+ const lines = entries.slice(0, 20).map((e) => `- \`${e.uri}\` \u2014 search "${e.l0 || e.content.split(`
772
+ `)[0]?.slice(0, 80)}"`);
773
+ return `## Available detail
774
+
775
+ The following entries have full detail accessible via kontex_find:
776
+ ${lines.join(`
777
+ `)}`;
778
+ }
779
+ function isRecentlyTouched(entry, recentFiles) {
780
+ if (!entry.affected_paths.length || !recentFiles.length)
781
+ return false;
782
+ return entry.affected_paths.some((ap) => recentFiles.some((rf) => rf.startsWith(ap) || ap.startsWith(rf)));
783
+ }
784
+ function isRecentlyCreated(entry, days) {
785
+ const cutoff = new Date;
786
+ cutoff.setDate(cutoff.getDate() - days);
787
+ return new Date(entry.created) >= cutoff;
788
+ }
789
+ async function getRecentlyModifiedFiles(workspaceRoot, days) {
790
+ try {
791
+ const proc = Bun.spawn(["git", "log", "--since", `${days}.days.ago`, "--name-only", "--pretty=format:"], { cwd: workspaceRoot, stdout: "pipe" });
792
+ const output = await new Response(proc.stdout).text();
793
+ return [...new Set(output.split(`
794
+ `).map((l) => l.trim()).filter(Boolean))];
795
+ } catch {
796
+ return [];
797
+ }
798
+ }
799
+ function estimateTokens(text) {
800
+ try {
801
+ return getEncoder().encode(text).length;
802
+ } catch {
803
+ return Math.ceil(text.length / 4);
804
+ }
805
+ }
806
+ // src/memory/decay.ts
807
+ import { readFileSync as readFileSync4, writeFileSync as writeFileSync4, readdirSync as readdirSync2, statSync, existsSync as existsSync5, mkdirSync as mkdirSync3 } from "fs";
808
+ import { join as join7, basename } from "path";
809
+ import matter3 from "gray-matter";
810
+ async function runDecay(workspaceRoot, config) {
811
+ const result = { promoted: [], expired: [], archived: [], flaggedStale: [] };
812
+ const memoryDir = join7(workspaceRoot, ".context", "memory");
813
+ if (!existsSync5(memoryDir))
814
+ return result;
815
+ await promoteEntries(memoryDir, result);
816
+ await expireEntries(memoryDir, config.decay.unverifiedExpireDays, result);
817
+ await archiveSessions(memoryDir, config.decay.sessionArchiveDays, result);
818
+ await flagStaleEntries(memoryDir, workspaceRoot, result);
819
+ await enforceSessionsSizeCap(memoryDir, config.decay.maxSessionsDirKB, result);
820
+ return result;
821
+ }
822
+ async function promoteEntries(memoryDir, result) {
823
+ for (const filePath of collectMemoryFiles(memoryDir)) {
824
+ try {
825
+ const raw = readFileSync4(filePath, "utf-8");
826
+ const parsed = matter3(raw);
827
+ if (!parsed.data.verified && (parsed.data.ref_count ?? 0) >= 3) {
828
+ parsed.data.verified = true;
829
+ parsed.data.updated = new Date().toISOString();
830
+ writeFileSync4(filePath, matter3.stringify(parsed.content, parsed.data), "utf-8");
831
+ result.promoted.push(parsed.data.uri ?? basename(filePath));
832
+ }
833
+ } catch {}
834
+ }
835
+ }
836
+ async function expireEntries(memoryDir, expireDays, result) {
837
+ const cutoff = new Date;
838
+ cutoff.setDate(cutoff.getDate() - expireDays);
839
+ for (const filePath of collectMemoryFiles(memoryDir)) {
840
+ try {
841
+ const raw = readFileSync4(filePath, "utf-8");
842
+ const parsed = matter3(raw);
843
+ if (!parsed.data.verified && (parsed.data.ref_count ?? 0) === 0 && new Date(parsed.data.updated ?? parsed.data.created) < cutoff) {
844
+ parsed.data.stale = true;
845
+ parsed.data.updated = new Date().toISOString();
846
+ writeFileSync4(filePath, matter3.stringify(parsed.content, parsed.data), "utf-8");
847
+ result.expired.push(parsed.data.uri ?? basename(filePath));
848
+ }
849
+ } catch {}
850
+ }
851
+ }
852
+ async function archiveSessions(memoryDir, archiveDays, result) {
853
+ const sessionsDir = join7(memoryDir, "sessions");
854
+ const archiveDir = join7(sessionsDir, "archive");
855
+ if (!existsSync5(sessionsDir))
856
+ return;
857
+ if (!existsSync5(archiveDir))
858
+ mkdirSync3(archiveDir, { recursive: true });
859
+ const cutoff = new Date;
860
+ cutoff.setDate(cutoff.getDate() - archiveDays);
861
+ for (const file of readdirSync2(sessionsDir, { withFileTypes: true })) {
862
+ if (!file.isFile() || !file.name.endsWith(".md"))
863
+ continue;
864
+ const filePath = join7(sessionsDir, file.name);
865
+ try {
866
+ if (statSync(filePath).mtime < cutoff) {
867
+ writeFileSync4(join7(archiveDir, file.name), readFileSync4(filePath, "utf-8"), "utf-8");
868
+ writeFileSync4(filePath, `---
869
+ archived: true
870
+ archived_at: ${new Date().toISOString()}
871
+ ---
872
+ Archived to archive/${file.name}
873
+ `, "utf-8");
874
+ result.archived.push(file.name);
875
+ }
876
+ } catch {}
877
+ }
878
+ }
879
+ async function flagStaleEntries(memoryDir, workspaceRoot, result) {
880
+ for (const filePath of collectMemoryFiles(memoryDir)) {
881
+ try {
882
+ const raw = readFileSync4(filePath, "utf-8");
883
+ const parsed = matter3(raw);
884
+ if (parsed.data.stale)
885
+ continue;
886
+ const paths = parsed.data.affected_paths ?? [];
887
+ if (paths.length > 0 && paths.every((p) => !existsSync5(join7(workspaceRoot, p)))) {
888
+ parsed.data.stale = true;
889
+ parsed.data.updated = new Date().toISOString();
890
+ writeFileSync4(filePath, matter3.stringify(parsed.content, parsed.data), "utf-8");
891
+ result.flaggedStale.push(parsed.data.uri ?? basename(filePath));
892
+ }
893
+ } catch {}
894
+ }
895
+ }
896
+ async function enforceSessionsSizeCap(memoryDir, maxSizeKB, result) {
897
+ const sessionsDir = join7(memoryDir, "sessions");
898
+ if (!existsSync5(sessionsDir))
899
+ return;
900
+ let totalKB = getDirSize(sessionsDir) / 1024;
901
+ if (totalKB <= maxSizeKB)
902
+ return;
903
+ const archiveDir = join7(sessionsDir, "archive");
904
+ if (!existsSync5(archiveDir))
905
+ mkdirSync3(archiveDir, { recursive: true });
906
+ const files = readdirSync2(sessionsDir, { withFileTypes: true }).filter((f) => f.isFile() && f.name.endsWith(".md")).map((f) => ({ name: f.name, path: join7(sessionsDir, f.name), mtime: statSync(join7(sessionsDir, f.name)).mtime })).sort((a, b) => a.mtime.getTime() - b.mtime.getTime());
907
+ for (const file of files) {
908
+ if (totalKB <= maxSizeKB)
909
+ break;
910
+ totalKB -= statSync(file.path).size / 1024;
911
+ writeFileSync4(join7(archiveDir, file.name), readFileSync4(file.path, "utf-8"), "utf-8");
912
+ writeFileSync4(file.path, `---
913
+ archived: true
914
+ ---
915
+ Compressed due to size cap.
916
+ `, "utf-8");
917
+ result.archived.push(file.name);
918
+ }
919
+ }
920
+ function collectMemoryFiles(dir) {
921
+ const files = [];
922
+ for (const item of readdirSync2(dir, { withFileTypes: true })) {
923
+ const fullPath = join7(dir, item.name);
924
+ if (item.isDirectory() && item.name !== "archive")
925
+ files.push(...collectMemoryFiles(fullPath));
926
+ else if (item.isFile() && item.name.endsWith(".md"))
927
+ files.push(fullPath);
928
+ }
929
+ return files;
930
+ }
931
+ function getDirSize(dir) {
932
+ let total = 0;
933
+ for (const item of readdirSync2(dir, { withFileTypes: true })) {
934
+ if (item.isFile())
935
+ total += statSync(join7(dir, item.name)).size;
936
+ }
937
+ return total;
938
+ }
939
+ // src/mcp/server.ts
940
+ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
941
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
942
+ import { CallToolRequestSchema, ListToolsRequestSchema } from "@modelcontextprotocol/sdk/types.js";
943
+
944
+ // src/mcp/tools.ts
945
+ var TOOLS = [
946
+ {
947
+ name: "kontex_remember",
948
+ description: `Store a memory about this codebase.
949
+ Call when: architectural decision made, non-obvious constraint discovered,
950
+ pattern established, convention confirmed by developer.
951
+ Do NOT call for: routine edits, temporary context, things already in KONTEX.md.`,
952
+ inputSchema: {
953
+ type: "object",
954
+ properties: {
955
+ content: { type: "string", description: "The memory content to store" },
956
+ type: { type: "string", enum: ["decision", "pattern", "gotcha", "convention", "resource"], description: "Category of memory" },
957
+ why_memorable: { type: "string", description: "Why this outlives the current session" },
958
+ confidence: { type: "number", minimum: 0, maximum: 1, description: "Confidence level (0.0 - 1.0)" },
959
+ affected_paths: { type: "array", items: { type: "string" }, description: "File paths this memory is relevant to" }
960
+ },
961
+ required: ["content", "type", "why_memorable", "confidence"]
962
+ }
963
+ },
964
+ {
965
+ name: "kontex_invalidate",
966
+ description: `Mark an existing memory as outdated.
967
+ Call when existing memory is discovered to be wrong.
968
+ Always follow with kontex_remember to store the correction.`,
969
+ inputSchema: {
970
+ type: "object",
971
+ properties: {
972
+ uri: { type: "string", description: "URI of the memory entry to invalidate" },
973
+ reason: { type: "string", description: "Why this memory is no longer valid" }
974
+ },
975
+ required: ["uri", "reason"]
976
+ }
977
+ },
978
+ {
979
+ name: "kontex_log_decision",
980
+ description: `Log a structured ADR. Call when the developer commits to an
981
+ architectural approach after discussion \u2014 not for suggestions, only decisions.`,
982
+ inputSchema: {
983
+ type: "object",
984
+ properties: {
985
+ title: { type: "string", description: "Short title for the decision" },
986
+ context: { type: "string", description: "What problem or situation prompted this decision" },
987
+ decision: { type: "string", description: "What was decided" },
988
+ rationale: { type: "string", description: "Why this approach was chosen" },
989
+ alternatives: { type: "array", items: { type: "string" }, description: "Other approaches considered" },
990
+ consequences: { type: "string", description: "Expected consequences" }
991
+ },
992
+ required: ["title", "context", "decision", "rationale"]
993
+ }
994
+ },
995
+ {
996
+ name: "kontex_find",
997
+ description: `Semantic search across project memory.
998
+ Call before answering architecture or convention questions.
999
+ Do not guess what memory contains \u2014 search it.`,
1000
+ inputSchema: {
1001
+ type: "object",
1002
+ properties: {
1003
+ query: { type: "string", description: "Natural language search query" },
1004
+ limit: { type: "number", description: "Maximum number of results (default: 5)" }
1005
+ },
1006
+ required: ["query"]
1007
+ }
1008
+ }
1009
+ ];
1010
+
1011
+ // src/mcp/handlers.ts
1012
+ init_write();
1013
+ import { readFileSync as readFileSync5, writeFileSync as writeFileSync5, existsSync as existsSync6 } from "fs";
1014
+ import { join as join8 } from "path";
1015
+ import matter4 from "gray-matter";
1016
+ async function handleRemember(args, workspaceRoot) {
1017
+ const { content, type, why_memorable, confidence, affected_paths } = args;
1018
+ if (!content || !type || !why_memorable || confidence === undefined)
1019
+ return errorResponse("Missing required fields");
1020
+ const config = loadConfig(workspaceRoot);
1021
+ const result = await writeMemory({ content, type, why_memorable, confidence, affected_paths: affected_paths ?? [] }, workspaceRoot, config);
1022
+ if (!result.success)
1023
+ return errorResponse(result.error ?? "Write failed");
1024
+ await compile(workspaceRoot, config);
1025
+ return textResponse(`Written: ${result.uri} (verified: ${result.verified})`);
1026
+ }
1027
+ async function handleInvalidate(args, workspaceRoot) {
1028
+ const { uri, reason } = args;
1029
+ if (!uri || !reason)
1030
+ return errorResponse("Missing required fields: uri, reason");
1031
+ const result = await invalidateMemory(uri, reason, workspaceRoot);
1032
+ if (!result.success)
1033
+ return errorResponse(result.error ?? "Invalidation failed");
1034
+ await compile(workspaceRoot, loadConfig(workspaceRoot));
1035
+ return textResponse(`Invalidated: ${uri}. Use kontex_remember to store the correction.`);
1036
+ }
1037
+ async function handleLogDecision(args, workspaceRoot) {
1038
+ const adr = {
1039
+ title: args.title,
1040
+ context: args.context,
1041
+ decision: args.decision,
1042
+ rationale: args.rationale,
1043
+ alternatives: args.alternatives,
1044
+ consequences: args.consequences
1045
+ };
1046
+ if (!adr.title || !adr.context || !adr.decision || !adr.rationale)
1047
+ return errorResponse("Missing required fields");
1048
+ const config = loadConfig(workspaceRoot);
1049
+ const result = await logDecision(adr, workspaceRoot, config);
1050
+ if (!result.success)
1051
+ return errorResponse(result.error ?? "Decision logging failed");
1052
+ await compile(workspaceRoot, config);
1053
+ return textResponse(`Decision logged: ${result.uri}`);
1054
+ }
1055
+ async function handleFind(args, workspaceRoot) {
1056
+ const query = args.query;
1057
+ const limit = args.limit ?? 5;
1058
+ if (!query)
1059
+ return errorResponse("Missing required field: query");
1060
+ const results = await findMemories(query, limit, workspaceRoot);
1061
+ if (results.length === 0)
1062
+ return textResponse("No matching memories found.");
1063
+ const { getDatabase: getDatabase2 } = await Promise.resolve().then(() => (init_db(), exports_db));
1064
+ const db = getDatabase2(workspaceRoot);
1065
+ for (const r of results) {
1066
+ db.prepare("UPDATE memories SET ref_count = ref_count + 1 WHERE uri = ?").run(r.uri);
1067
+ syncRefCountToFile(r.uri, workspaceRoot, db);
1068
+ }
1069
+ const formatted = results.map((r, i) => `${i + 1}. **${r.uri}** (${r.type}, similarity: ${r.similarity.toFixed(2)}, verified: ${r.verified})
1070
+ ${r.content.slice(0, 300)}`).join(`
1071
+
1072
+ `);
1073
+ return textResponse(formatted);
1074
+ }
1075
+ function syncRefCountToFile(uri, workspaceRoot, db) {
1076
+ try {
1077
+ const filePath = join8(workspaceRoot, ".context", `${uri}.md`);
1078
+ if (!existsSync6(filePath))
1079
+ return;
1080
+ const row = db.prepare("SELECT ref_count FROM memories WHERE uri = ?").get(uri);
1081
+ if (!row)
1082
+ return;
1083
+ const raw = readFileSync5(filePath, "utf-8");
1084
+ const parsed = matter4(raw);
1085
+ parsed.data.ref_count = row.ref_count;
1086
+ writeFileSync5(filePath, matter4.stringify(parsed.content, parsed.data), "utf-8");
1087
+ } catch {}
1088
+ }
1089
+ function textResponse(text) {
1090
+ return { content: [{ type: "text", text }] };
1091
+ }
1092
+ function errorResponse(text) {
1093
+ return { content: [{ type: "text", text }], isError: true };
1094
+ }
1095
+
1096
+ // src/mcp/server.ts
1097
+ async function createMCPServer(workspaceRoot) {
1098
+ const server = new Server({ name: "kontex", version: "1.0.0" }, { capabilities: { tools: {} } });
1099
+ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools: [...TOOLS] }));
1100
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
1101
+ const { name, arguments: args } = request.params;
1102
+ const toolArgs = args ?? {};
1103
+ switch (name) {
1104
+ case "kontex_remember":
1105
+ return handleRemember(toolArgs, workspaceRoot);
1106
+ case "kontex_invalidate":
1107
+ return handleInvalidate(toolArgs, workspaceRoot);
1108
+ case "kontex_log_decision":
1109
+ return handleLogDecision(toolArgs, workspaceRoot);
1110
+ case "kontex_find":
1111
+ return handleFind(toolArgs, workspaceRoot);
1112
+ default:
1113
+ return { content: [{ type: "text", text: `Unknown tool: ${name}` }], isError: true };
1114
+ }
1115
+ });
1116
+ const transport = new StdioServerTransport;
1117
+ await server.connect(transport);
1118
+ }
1119
+ // src/hooks/pre-commit.ts
1120
+ init_db();
1121
+ import { writeFileSync as writeFileSync6, existsSync as existsSync7 } from "fs";
1122
+ import { join as join9 } from "path";
1123
+ async function handlePreCommit(stagedFiles, workspaceRoot) {
1124
+ const files = stagedFiles.split(/[\n\s]+/).map((f) => f.trim()).filter(Boolean);
1125
+ if (files.length === 0)
1126
+ return;
1127
+ const affectedMemories = findAffectedMemories(files, workspaceRoot);
1128
+ const queue = { staged_files: files, affected_memories: affectedMemories, commit_sha_pending: true, timestamp: new Date().toISOString() };
1129
+ writeFileSync6(join9(workspaceRoot, ".kontex-queue.json"), JSON.stringify(queue, null, 2), "utf-8");
1130
+ const config = loadConfig(workspaceRoot);
1131
+ if (existsSync7(join9(workspaceRoot, ".context"))) {
1132
+ await compile(workspaceRoot, config);
1133
+ try {
1134
+ Bun.spawnSync(["git", "add", ".context/KONTEX.md"], { cwd: workspaceRoot });
1135
+ } catch {}
1136
+ }
1137
+ }
1138
+ function findAffectedMemories(stagedFiles, workspaceRoot) {
1139
+ try {
1140
+ const db = getDatabase(workspaceRoot);
1141
+ const allMemories = db.prepare("SELECT uri, affected_paths FROM memories WHERE stale = 0").all();
1142
+ const affected = [];
1143
+ for (const m of allMemories) {
1144
+ const paths = JSON.parse(m.affected_paths || "[]");
1145
+ if (paths.some((mp) => stagedFiles.some((sf) => sf.startsWith(mp) || mp.startsWith(sf))))
1146
+ affected.push(m.uri);
1147
+ }
1148
+ return affected;
1149
+ } catch {
1150
+ return [];
1151
+ }
1152
+ }
1153
+ // src/hooks/post-commit.ts
1154
+ import { writeFileSync as writeFileSync7, existsSync as existsSync8, unlinkSync, mkdirSync as mkdirSync4, appendFileSync as appendFileSync2 } from "fs";
1155
+ import { join as join10, dirname as dirname2 } from "path";
1156
+ import { createHash } from "crypto";
1157
+ init_write();
1158
+ async function handlePostCommit(commitSha, authorEmail, workspaceRoot) {
1159
+ const config = loadConfig(workspaceRoot);
1160
+ const logPath = join10(workspaceRoot, ".kontex-log", "hooks.log");
1161
+ try {
1162
+ logHookEvent(logPath, "POST_COMMIT_START", commitSha);
1163
+ if (!config.hooks.postCommitExtract || config.llm.provider === "none") {
1164
+ await compileAndCommit(workspaceRoot, config);
1165
+ return;
1166
+ }
1167
+ let token = null;
1168
+ if (config.llm.provider === "github-models") {
1169
+ token = await getToken();
1170
+ if (!token) {
1171
+ await compileAndCommit(workspaceRoot, config);
1172
+ return;
1173
+ }
1174
+ }
1175
+ const diff = await getCommitDiff(commitSha, workspaceRoot);
1176
+ if (!diff.trim())
1177
+ return;
1178
+ const entries = loadAllEntries(workspaceRoot);
1179
+ const existingL0 = entries.filter((e) => e.verified && !e.stale).map((e) => `- ${e.uri}: ${e.l0}`).join(`
1180
+ `);
1181
+ const extraction = await extractKnowledge(diff, existingL0, config, token);
1182
+ if (extraction) {
1183
+ for (const m of extraction.new_memories) {
1184
+ await writeMemory({ content: m.content, type: m.type, why_memorable: m.why_memorable, confidence: m.confidence, affected_paths: m.affected_paths }, workspaceRoot, config);
1185
+ }
1186
+ await writeSessionFile(commitSha, authorEmail, extraction, workspaceRoot);
1187
+ if (extraction.stale_uris.length > 0) {
1188
+ const { invalidateMemory: invalidateMemory2 } = await Promise.resolve().then(() => (init_write(), exports_write));
1189
+ for (const uri of extraction.stale_uris)
1190
+ await invalidateMemory2(uri, `Flagged stale by commit ${commitSha.slice(0, 7)}`, workspaceRoot);
1191
+ }
1192
+ }
1193
+ await compileAndCommit(workspaceRoot, config);
1194
+ logHookEvent(logPath, "COMPLETE", commitSha);
1195
+ } catch (error) {
1196
+ logHookEvent(logPath, "ERROR", String(error));
1197
+ } finally {
1198
+ const queuePath = join10(workspaceRoot, ".kontex-queue.json");
1199
+ if (existsSync8(queuePath))
1200
+ try {
1201
+ unlinkSync(queuePath);
1202
+ } catch {}
1203
+ }
1204
+ }
1205
+ async function extractKnowledge(diff, existingL0, config, token) {
1206
+ try {
1207
+ const { createLLMModel: createLLMModel2 } = await Promise.resolve().then(() => exports_llm);
1208
+ const { generateText } = await import("ai");
1209
+ const truncatedDiff = diff.slice(0, 24000);
1210
+ const prompt = `You are analyzing a git commit to extract knowledge for a persistent project memory store.
1211
+
1212
+ Commit diff:
1213
+ ${truncatedDiff}
1214
+
1215
+ Existing memory context (L0):
1216
+ ${existingL0}
1217
+
1218
+ Extract learnings. Return JSON only:
1219
+ {"new_memories":[{"content":"...","type":"decision|pattern|gotcha|convention","why_memorable":"...","confidence":0.0-1.0,"affected_paths":["..."]}],"stale_uris":["..."]}
1220
+
1221
+ If nothing worth persisting, return: {"new_memories":[],"stale_uris":[]}
1222
+ Be conservative.`;
1223
+ const model = await createLLMModel2(config, token);
1224
+ if (!model)
1225
+ return null;
1226
+ const result = await generateText({ model, prompt, maxTokens: 1000, temperature: 0.1 });
1227
+ const jsonMatch = result.text.match(/\{[\s\S]*\}/);
1228
+ return jsonMatch ? JSON.parse(jsonMatch[0]) : null;
1229
+ } catch {
1230
+ return null;
1231
+ }
1232
+ }
1233
+ async function writeSessionFile(commitSha, authorEmail, extraction, workspaceRoot) {
1234
+ const sessionsDir = join10(workspaceRoot, ".context", "memory", "sessions");
1235
+ if (!existsSync8(sessionsDir))
1236
+ mkdirSync4(sessionsDir, { recursive: true });
1237
+ const date = new Date().toISOString().slice(0, 10);
1238
+ const authorHash = createHash("md5").update(authorEmail).digest("hex").slice(0, 8);
1239
+ const filePath = join10(sessionsDir, `${date}-${authorHash}.md`);
1240
+ const commitBody = `
1241
+ ### Commit ${commitSha.slice(0, 7)}
1242
+
1243
+ ${extraction.new_memories.map((m) => `- **${m.type}** (${m.confidence.toFixed(2)}): ${m.content}`).join(`
1244
+ `) || "No new memories."}
1245
+ `;
1246
+ if (existsSync8(filePath)) {
1247
+ appendFileSync2(filePath, commitBody, "utf-8");
1248
+ } else {
1249
+ const header = `---
1250
+ author: ${authorEmail}
1251
+ date: ${date}
1252
+ uri: memory/sessions/${date}-${authorHash}
1253
+ type: resource
1254
+ confidence: 0.7
1255
+ verified: false
1256
+ stale: false
1257
+ global: false
1258
+ affected_paths: []
1259
+ ref_count: 0
1260
+ tags: ["session"]
1261
+ ---
1262
+
1263
+ ## Session: ${date}
1264
+ `;
1265
+ writeFileSync7(filePath, header + commitBody, "utf-8");
1266
+ }
1267
+ }
1268
+ async function getCommitDiff(sha, workspaceRoot) {
1269
+ try {
1270
+ const proc = Bun.spawn(["git", "show", sha, "--stat", "--patch"], { cwd: workspaceRoot, stdout: "pipe" });
1271
+ return await new Response(proc.stdout).text();
1272
+ } catch {
1273
+ return "";
1274
+ }
1275
+ }
1276
+ async function compileAndCommit(workspaceRoot, config) {
1277
+ if (!existsSync8(join10(workspaceRoot, ".context")))
1278
+ return;
1279
+ await compile(workspaceRoot, config);
1280
+ try {
1281
+ Bun.spawnSync(["git", "add", ".context/"], { cwd: workspaceRoot });
1282
+ const status = Bun.spawnSync(["git", "diff", "--cached", "--quiet", ".context/"], { cwd: workspaceRoot });
1283
+ if (status.exitCode !== 0)
1284
+ Bun.spawnSync(["git", "commit", "--no-verify", "-m", "chore(kontex): update memory [skip ci]"], { cwd: workspaceRoot });
1285
+ } catch {}
1286
+ }
1287
+ function logHookEvent(logPath, event, message) {
1288
+ const logDir = dirname2(logPath);
1289
+ if (!existsSync8(logDir))
1290
+ mkdirSync4(logDir, { recursive: true });
1291
+ appendFileSync2(logPath, `[${new Date().toISOString()}] ${event}: ${message}
1292
+ `, "utf-8");
1293
+ }
1294
+ // src/hooks/post-merge.ts
1295
+ import { existsSync as existsSync9 } from "fs";
1296
+ import { join as join11 } from "path";
1297
+ init_db();
1298
+ init_embeddings();
1299
+ async function handlePostMerge(workspaceRoot) {
1300
+ if (!existsSync9(join11(workspaceRoot, ".context")))
1301
+ return;
1302
+ const config = loadConfig(workspaceRoot);
1303
+ await compile(workspaceRoot, config);
1304
+ await rebuildIndex(workspaceRoot);
1305
+ }
1306
+ async function rebuildIndex(workspaceRoot) {
1307
+ try {
1308
+ const db = getDatabase(workspaceRoot);
1309
+ const entries = loadAllEntries(workspaceRoot);
1310
+ const existing = new Set(db.prepare("SELECT uri FROM memories").all().map((r) => r.uri));
1311
+ for (const entry of entries) {
1312
+ if (existing.has(entry.uri))
1313
+ continue;
1314
+ db.prepare(`INSERT OR REPLACE INTO memories (uri, content, type, l0, l1, l2, confidence, verified, stale, global, ref_count, author, affected_paths, tags, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'))`).run(entry.uri, entry.content, entry.type, entry.l0, entry.l1, entry.l2, entry.confidence, entry.verified ? 1 : 0, entry.stale ? 1 : 0, entry.global ? 1 : 0, entry.ref_count, entry.author, JSON.stringify(entry.affected_paths), JSON.stringify(entry.tags));
1315
+ try {
1316
+ const embedding = await embed(entry.content);
1317
+ db.prepare("INSERT OR REPLACE INTO memory_embeddings (uri, embedding) VALUES (?, ?)").run(entry.uri, Buffer.from(embedding.buffer));
1318
+ } catch {}
1319
+ }
1320
+ } catch {}
1321
+ }
1322
+ // src/init.ts
1323
+ import { existsSync as existsSync11, readFileSync as readFileSync7, writeFileSync as writeFileSync8, mkdirSync as mkdirSync5, appendFileSync as appendFileSync3, chmodSync } from "fs";
1324
+ import { join as join13, resolve } from "path";
1325
+ import { homedir as homedir2 } from "os";
1326
+ init_embeddings();
1327
+
1328
+ // src/memory/extract.ts
1329
+ init_write();
1330
+ import { readFileSync as readFileSync6, existsSync as existsSync10 } from "fs";
1331
+ import { join as join12 } from "path";
1332
+ async function runInitAI(workspaceRoot) {
1333
+ const config = loadConfig(workspaceRoot);
1334
+ if (config.llm.provider === "none")
1335
+ return;
1336
+ let token = null;
1337
+ if (config.llm.provider === "github-models") {
1338
+ token = await getToken();
1339
+ if (!token) {
1340
+ console.log("\u26A0 Missing GitHub token. Run `kontex login` first to use --ai.");
1341
+ return;
1342
+ }
1343
+ }
1344
+ console.log(" Analyzing repository structure...");
1345
+ const tree = await generateProjectTree(workspaceRoot);
1346
+ console.log(" Identifying core files...");
1347
+ const hotFiles = await getHotFiles(workspaceRoot);
1348
+ let filesContext = "";
1349
+ for (const file of hotFiles) {
1350
+ const fullPath = join12(workspaceRoot, file);
1351
+ if (existsSync10(fullPath)) {
1352
+ try {
1353
+ const content = readFileSync6(fullPath, "utf-8");
1354
+ filesContext += `
1355
+ --- FILE: ${file} ---
1356
+ ${content.slice(0, 3000)}
1357
+ `;
1358
+ } catch {}
1359
+ }
1360
+ }
1361
+ console.log(" Extracting team conventions and practices...");
1362
+ try {
1363
+ const { generateText } = await import("ai");
1364
+ const prompt = `You are an expert software architect. Analyze the provided directory structure and the contents of the most frequently modified files in this codebase.
1365
+ Identify the prevailing, undocumented architectural conventions and patterns used by this team. Look for repeated structural choices, layering principles, and standard practices that a new developer would need to mimic to write consistent code in this repository.
1366
+ Do NOT summarize what the app does. ONLY extract actionable engineering conventions.
1367
+ Return JSON only:
1368
+ {"new_memories":[{"content":"...","type":"pattern","why_memorable":"...","confidence":0.8,"affected_paths":["..."]}]}
1369
+
1370
+ Directory Tree:
1371
+ ${tree}
1372
+
1373
+ Hot Files Context:
1374
+ ${filesContext}`;
1375
+ const { createLLMModel: createLLMModel2 } = await Promise.resolve().then(() => exports_llm);
1376
+ const model = await createLLMModel2(config, token);
1377
+ if (!model)
1378
+ return;
1379
+ const result = await generateText({ model, prompt, maxTokens: 2000, temperature: 0.1 });
1380
+ const jsonMatch = result.text.match(/\{[\s\S]*\}/);
1381
+ if (jsonMatch) {
1382
+ const parsed = JSON.parse(jsonMatch[0]);
1383
+ let writtenCount = 0;
1384
+ for (const m of parsed.new_memories || []) {
1385
+ const type = ["decision", "pattern", "gotcha", "convention"].includes(m.type) ? m.type : "convention";
1386
+ const r = await writeMemory({
1387
+ content: m.content,
1388
+ type,
1389
+ why_memorable: m.why_memorable || "Extracted from codebase initialization",
1390
+ confidence: Math.min(m.confidence || 0.8, 0.84),
1391
+ affected_paths: m.affected_paths || []
1392
+ }, workspaceRoot, config);
1393
+ if (r.success)
1394
+ writtenCount++;
1395
+ }
1396
+ console.log(`\u2713 Extracted and saved ${writtenCount} architectural conventions to memory.`);
1397
+ } else {
1398
+ console.log("\u26A0 AI extraction returned no verifiable conventions.");
1399
+ }
1400
+ } catch (error) {
1401
+ const message = error instanceof Error ? error.message : String(error);
1402
+ console.log(`\u26A0 AI extraction failed: ${message}`);
1403
+ }
1404
+ }
1405
+ async function generateProjectTree(workspaceRoot) {
1406
+ try {
1407
+ const proc = Bun.spawn(["find", ".", "-not", "-path", "*/.git/*", "-not", "-path", "*/node_modules/*", "-not", "-path", "*/dist/*", "-not", "-path", "*/.context/*"], { cwd: workspaceRoot, stdout: "pipe" });
1408
+ const text = await new Response(proc.stdout).text();
1409
+ return text.split(`
1410
+ `).sort().slice(0, 100).join(`
1411
+ `);
1412
+ } catch {
1413
+ return "Tree unavailable";
1414
+ }
1415
+ }
1416
+ async function getHotFiles(workspaceRoot) {
1417
+ try {
1418
+ const proc = Bun.spawn(["bash", "-c", "git log --name-only --pretty=format: | grep -v '^$' | grep -v 'package.json' | grep -v 'bun.lockb' | grep -v 'README' | sort | uniq -c | sort -nr | head -10 | awk '{print $2}'"], { cwd: workspaceRoot, stdout: "pipe" });
1419
+ const text = await new Response(proc.stdout).text();
1420
+ return text.split(`
1421
+ `).map((s) => s.trim()).filter(Boolean);
1422
+ } catch {
1423
+ return [];
1424
+ }
1425
+ }
1426
+
1427
+ // src/init.ts
1428
+ async function initProject(workspaceRoot, options = {}) {
1429
+ const absRoot = resolve(workspaceRoot);
1430
+ console.log(`kontex init \u2014 setting up project memory
1431
+ `);
1432
+ if (!await isAuthenticated()) {
1433
+ try {
1434
+ await login();
1435
+ } catch {
1436
+ console.log("\u26A0 Login skipped \u2014 run `kontex login` later.\n");
1437
+ }
1438
+ } else {
1439
+ console.log(`\u2713 GitHub auth: authenticated
1440
+ `);
1441
+ }
1442
+ const stack = scanProject(absRoot);
1443
+ console.log(`\u2713 Detected: ${stack}`);
1444
+ createContextScaffold(absRoot, stack, options.force);
1445
+ const configPath = join13(absRoot, "kontex.config.json");
1446
+ if (!existsSync11(configPath) || options.force) {
1447
+ writeConfig(absRoot, DEFAULT_CONFIG);
1448
+ console.log("\u2713 Created kontex.config.json");
1449
+ } else
1450
+ console.log("\u2713 kontex.config.json exists (skipped)");
1451
+ if (!options.noHooks)
1452
+ installGitHooks(absRoot);
1453
+ addPrepareScript(absRoot);
1454
+ console.log(" Downloading embedding model (first time only)...");
1455
+ try {
1456
+ await initEmbeddingModel();
1457
+ console.log("\u2713 Embedding model ready");
1458
+ } catch {
1459
+ console.log("\u26A0 Embedding model deferred");
1460
+ }
1461
+ if (options.ai) {
1462
+ await runInitAI(absRoot);
1463
+ }
1464
+ await compile(absRoot, loadConfig(absRoot));
1465
+ console.log("\u2713 KONTEX.md compiled");
1466
+ registerInAITools(absRoot);
1467
+ injectAgentInstructions(absRoot);
1468
+ updateGitignore(absRoot);
1469
+ console.log(`
1470
+ \u2713 kontex initialized. Memory is active.
1471
+ `);
1472
+ }
1473
+ function scanProject(root) {
1474
+ if (existsSync11(join13(root, "package.json"))) {
1475
+ try {
1476
+ const pkg = JSON.parse(readFileSync7(join13(root, "package.json"), "utf-8"));
1477
+ const deps = { ...pkg.dependencies, ...pkg.devDependencies };
1478
+ if (deps?.next)
1479
+ return "Next.js";
1480
+ if (deps?.react)
1481
+ return "React";
1482
+ if (deps?.vue)
1483
+ return "Vue";
1484
+ if (deps?.express)
1485
+ return "Express";
1486
+ return "Node.js/Bun";
1487
+ } catch {
1488
+ return "Node.js/Bun";
1489
+ }
1490
+ }
1491
+ if (existsSync11(join13(root, "pyproject.toml")))
1492
+ return "Python";
1493
+ if (existsSync11(join13(root, "go.mod")))
1494
+ return "Go";
1495
+ return "Unknown project";
1496
+ }
1497
+ function createContextScaffold(root, stack, force) {
1498
+ if (existsSync11(join13(root, ".context")) && !force) {
1499
+ console.log("\u2713 .context/ exists (skipped)");
1500
+ return;
1501
+ }
1502
+ for (const dir of [".context/memory", ".context/memory/decisions", ".context/memory/sessions", ".context/memory/sessions/archive", ".context/resources", ".context/skills"])
1503
+ mkdirSync5(join13(root, dir), { recursive: true });
1504
+ const projectMd = join13(root, ".context", "memory", "project.md");
1505
+ if (!existsSync11(projectMd)) {
1506
+ const memoryContent = `---
1507
+ uri: memory/project
1508
+ type: resource
1509
+ created: ${new Date().toISOString()}
1510
+ updated: ${new Date().toISOString()}
1511
+ confidence: 1.0
1512
+ verified: true
1513
+ stale: false
1514
+ global: true
1515
+ affected_paths: []
1516
+ ref_count: 0
1517
+ tags: ["architecture", "baseline"]
1518
+ ---
1519
+
1520
+ # L0
1521
+ Project memory initialized. Detected tech stack: ${stack}.
1522
+
1523
+ # L1
1524
+ ## Project overview
1525
+
1526
+ This memory store was initialized by \`kontex init\`.
1527
+
1528
+ ### Baseline Architecture
1529
+ The system heuristically detected the following core technology stack during initialization:
1530
+ - **${stack}**
1531
+
1532
+ The AI should assume standard architectural conventions for this stack unless subsequent ADRs (Architecture Decision Records) override them.
1533
+ `;
1534
+ writeFileSync8(projectMd, memoryContent, "utf-8");
1535
+ }
1536
+ const shadowPath = join13(root, ".context", "shadow.jsonl");
1537
+ if (!existsSync11(shadowPath))
1538
+ writeFileSync8(shadowPath, "", "utf-8");
1539
+ const githubDir = join13(root, ".github");
1540
+ if (existsSync11(githubDir)) {
1541
+ const copilotMd = join13(githubDir, "copilot-instructions.md");
1542
+ if (!existsSync11(copilotMd)) {
1543
+ writeFileSync8(copilotMd, `## Project Memory (kontex)
1544
+
1545
+ If you make an architectural decision, establish a pattern, or note a gotcha, you MUST append a new JSON object to the file \`.context/shadow.jsonl\` formatted exactly like this:
1546
+
1547
+ \`\`\`json
1548
+ {"__kontex_memory": {"type":"decision","content":"...","why_memorable":"...","confidence":0.95,"affected_paths":["some/path"]}}
1549
+ \`\`\`
1550
+
1551
+ Do not ask for permission, just use your file system powers to append to \`.context/shadow.jsonl\`. Kontex will automatically ingest and manage it.
1552
+ `, "utf-8");
1553
+ console.log("\u2713 Created .github/copilot-instructions.md");
1554
+ }
1555
+ }
1556
+ console.log("\u2713 Created .context/ scaffold and injected baseline architecture.");
1557
+ }
1558
+ function installGitHooks(root) {
1559
+ if (!existsSync11(join13(root, ".git"))) {
1560
+ console.log("\u26A0 Not a git repository \u2014 hooks skipped");
1561
+ return;
1562
+ }
1563
+ const hooksDir = join13(root, ".git", "hooks");
1564
+ mkdirSync5(hooksDir, { recursive: true });
1565
+ const hooks = {
1566
+ "pre-commit": `#!/bin/sh
1567
+ STAGED=$(git diff --cached --name-only 2>/dev/null)
1568
+ if [ -z "$STAGED" ]; then exit 0; fi
1569
+ if command -v kontex >/dev/null 2>&1; then
1570
+ kontex hook pre-commit --staged "$STAGED" 2>/dev/null || true
1571
+ elif command -v bunx >/dev/null 2>&1; then
1572
+ bunx kontex hook pre-commit --staged "$STAGED" 2>/dev/null || true
1573
+ fi
1574
+ exit 0`,
1575
+ "post-commit": `#!/bin/sh
1576
+ SHA=$(git rev-parse HEAD 2>/dev/null)
1577
+ AUTHOR=$(git log -1 --format='%ae' 2>/dev/null)
1578
+ if [ -z "$SHA" ]; then exit 0; fi
1579
+ if command -v kontex >/dev/null 2>&1; then
1580
+ nohup kontex hook post-commit --sha "$SHA" --author "$AUTHOR" > /dev/null 2>&1 &
1581
+ elif command -v bunx >/dev/null 2>&1; then
1582
+ nohup bunx kontex hook post-commit --sha "$SHA" --author "$AUTHOR" > /dev/null 2>&1 &
1583
+ fi
1584
+ exit 0`,
1585
+ "post-merge": `#!/bin/sh
1586
+ if command -v kontex >/dev/null 2>&1; then
1587
+ nohup kontex hook post-merge > /dev/null 2>&1 &
1588
+ elif command -v bunx >/dev/null 2>&1; then
1589
+ nohup bunx kontex hook post-merge > /dev/null 2>&1 &
1590
+ fi
1591
+ exit 0`
1592
+ };
1593
+ for (const [name, content] of Object.entries(hooks)) {
1594
+ const hookPath = join13(hooksDir, name);
1595
+ writeFileSync8(hookPath, content + `
1596
+ `, "utf-8");
1597
+ chmodSync(hookPath, 493);
1598
+ }
1599
+ console.log("\u2713 Installed git hooks");
1600
+ }
1601
+ function addPrepareScript(root) {
1602
+ const pkgPath = join13(root, "package.json");
1603
+ if (!existsSync11(pkgPath))
1604
+ return;
1605
+ try {
1606
+ const pkg = JSON.parse(readFileSync7(pkgPath, "utf-8"));
1607
+ if (!pkg.scripts)
1608
+ pkg.scripts = {};
1609
+ if (!pkg.scripts.prepare?.includes("kontex")) {
1610
+ const existing = pkg.scripts.prepare ?? "";
1611
+ pkg.scripts.prepare = existing ? `${existing} && kontex hooks install` : "kontex hooks install";
1612
+ writeFileSync8(pkgPath, JSON.stringify(pkg, null, 2) + `
1613
+ `, "utf-8");
1614
+ console.log("\u2713 Added prepare script");
1615
+ }
1616
+ } catch {}
1617
+ }
1618
+ function registerInAITools(root) {
1619
+ const home = homedir2();
1620
+ const entry = { command: "bunx", args: ["kontex", "mcp"], env: { KONTEX_WORKSPACE: root } };
1621
+ const tools = [
1622
+ { name: "Claude Code", configPath: join13(home, ".claude", "claude_desktop_config.json"), key: "mcpServers" },
1623
+ { name: "Cursor", configPath: join13(root, ".cursor", "mcp.json"), key: "mcpServers" },
1624
+ { name: "Windsurf", configPath: join13(home, ".codeium", "windsurf", "mcp_config.json"), key: "mcpServers" },
1625
+ { name: "Zed", configPath: join13(home, ".config", "zed", "settings.json"), key: "mcpServers" }
1626
+ ];
1627
+ for (const tool of tools) {
1628
+ try {
1629
+ let config = {};
1630
+ if (existsSync11(tool.configPath))
1631
+ config = JSON.parse(readFileSync7(tool.configPath, "utf-8"));
1632
+ const servers = config[tool.key] ?? {};
1633
+ if (!servers.kontex) {
1634
+ servers.kontex = entry;
1635
+ config[tool.key] = servers;
1636
+ mkdirSync5(join13(tool.configPath, ".."), { recursive: true });
1637
+ writeFileSync8(tool.configPath, JSON.stringify(config, null, 2) + `
1638
+ `, "utf-8");
1639
+ console.log(`\u2713 Registered in ${tool.name}`);
1640
+ }
1641
+ } catch {}
1642
+ }
1643
+ }
1644
+ function injectAgentInstructions(root) {
1645
+ const instruction = `
1646
+ # Do not remove this block! It ensures the AI reads your project memory.
1647
+ Always read \`.context/KONTEX.md\` at the start of every session. It contains the compiled project memory, conventions, and architectural decisions.
1648
+ `;
1649
+ const targets = ["AGENTS.md", "CLAUDE.md", ".cursorrules"];
1650
+ let injectedCount = 0;
1651
+ for (const target of targets) {
1652
+ const targetPath = join13(root, target);
1653
+ if (existsSync11(targetPath)) {
1654
+ const content = readFileSync7(targetPath, "utf-8");
1655
+ if (!content.includes("KONTEX.md")) {
1656
+ appendFileSync3(targetPath, instruction, "utf-8");
1657
+ injectedCount++;
1658
+ }
1659
+ } else if (target === "AGENTS.md") {
1660
+ writeFileSync8(targetPath, instruction.trimStart(), "utf-8");
1661
+ injectedCount++;
1662
+ }
1663
+ }
1664
+ if (injectedCount > 0) {
1665
+ console.log("\u2713 Injected KONTEX.md pointers into AI agent rules");
1666
+ }
1667
+ }
1668
+ function updateGitignore(root) {
1669
+ const gitignorePath = join13(root, ".gitignore");
1670
+ const content = existsSync11(gitignorePath) ? readFileSync7(gitignorePath, "utf-8") : "";
1671
+ const missing = [".kontex-index/", ".kontex-log/", ".kontex-queue.json"].filter((e) => !content.includes(e));
1672
+ if (missing.length > 0) {
1673
+ appendFileSync3(gitignorePath, `
1674
+ # kontex (local)
1675
+ ` + missing.join(`
1676
+ `) + `
1677
+ `, "utf-8");
1678
+ console.log("\u2713 Updated .gitignore");
1679
+ }
1680
+ }
1681
+ // src/hook-templates.ts
1682
+ var HOOK_TEMPLATES = {
1683
+ "pre-commit": [
1684
+ "#!/bin/sh",
1685
+ "# kontex pre-commit hook \u2014 reads staged files, writes queue, runs compile.",
1686
+ "# Never blocks the commit.",
1687
+ "STAGED=$(git diff --cached --name-only 2>/dev/null)",
1688
+ 'if [ -z "$STAGED" ]; then exit 0; fi',
1689
+ "if command -v bunx >/dev/null 2>&1; then",
1690
+ ' bunx kontex hook pre-commit --staged "$STAGED" 2>/dev/null || true',
1691
+ "elif command -v kontex >/dev/null 2>&1; then",
1692
+ ' kontex hook pre-commit --staged "$STAGED" 2>/dev/null || true',
1693
+ "fi",
1694
+ "exit 0"
1695
+ ].join(`
1696
+ `),
1697
+ "post-commit": [
1698
+ "#!/bin/sh",
1699
+ "# kontex post-commit hook \u2014 spawns background LLM extraction.",
1700
+ "SHA=$(git rev-parse HEAD 2>/dev/null)",
1701
+ "AUTHOR=$(git log -1 --format='%ae' 2>/dev/null)",
1702
+ 'if [ -z "$SHA" ]; then exit 0; fi',
1703
+ "if command -v bunx >/dev/null 2>&1; then",
1704
+ ' nohup bunx kontex hook post-commit --sha "$SHA" --author "$AUTHOR" > /dev/null 2>&1 &',
1705
+ "elif command -v kontex >/dev/null 2>&1; then",
1706
+ ' nohup kontex hook post-commit --sha "$SHA" --author "$AUTHOR" > /dev/null 2>&1 &',
1707
+ "fi",
1708
+ "exit 0"
1709
+ ].join(`
1710
+ `),
1711
+ "post-merge": [
1712
+ "#!/bin/sh",
1713
+ "# kontex post-merge hook \u2014 recompiles KONTEX.md after pull/merge.",
1714
+ "if command -v bunx >/dev/null 2>&1; then",
1715
+ " nohup bunx kontex hook post-merge > /dev/null 2>&1 &",
1716
+ "elif command -v kontex >/dev/null 2>&1; then",
1717
+ " nohup kontex hook post-merge > /dev/null 2>&1 &",
1718
+ "fi",
1719
+ "exit 0"
1720
+ ].join(`
1721
+ `)
1722
+ };
1723
+ export {
1724
+ writeMemory,
1725
+ writeConfig,
1726
+ scanForSecrets,
1727
+ runDecay,
1728
+ logout,
1729
+ login,
1730
+ logDecision,
1731
+ loadEntry,
1732
+ loadConfig,
1733
+ loadAllEntries,
1734
+ isAuthenticated,
1735
+ invalidateMemory,
1736
+ initProject,
1737
+ initEmbeddingModel,
1738
+ handlePreCommit,
1739
+ handlePostMerge,
1740
+ handlePostCommit,
1741
+ getToken,
1742
+ getDatabase,
1743
+ findMemories,
1744
+ embed,
1745
+ createMCPServer,
1746
+ compile,
1747
+ closeDatabase,
1748
+ HOOK_TEMPLATES,
1749
+ DEFAULT_CONFIG
1750
+ };