@wastedcode/memex 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +291 -0
  3. package/dist/cli/client.d.ts +35 -0
  4. package/dist/cli/client.js +183 -0
  5. package/dist/cli/client.js.map +1 -0
  6. package/dist/cli/commands/chown.d.ts +2 -0
  7. package/dist/cli/commands/chown.js +22 -0
  8. package/dist/cli/commands/chown.js.map +1 -0
  9. package/dist/cli/commands/config.d.ts +2 -0
  10. package/dist/cli/commands/config.js +132 -0
  11. package/dist/cli/commands/config.js.map +1 -0
  12. package/dist/cli/commands/create.d.ts +2 -0
  13. package/dist/cli/commands/create.js +21 -0
  14. package/dist/cli/commands/create.js.map +1 -0
  15. package/dist/cli/commands/destroy.d.ts +2 -0
  16. package/dist/cli/commands/destroy.js +34 -0
  17. package/dist/cli/commands/destroy.js.map +1 -0
  18. package/dist/cli/commands/ingest.d.ts +2 -0
  19. package/dist/cli/commands/ingest.js +74 -0
  20. package/dist/cli/commands/ingest.js.map +1 -0
  21. package/dist/cli/commands/lint.d.ts +2 -0
  22. package/dist/cli/commands/lint.js +46 -0
  23. package/dist/cli/commands/lint.js.map +1 -0
  24. package/dist/cli/commands/list.d.ts +2 -0
  25. package/dist/cli/commands/list.js +28 -0
  26. package/dist/cli/commands/list.js.map +1 -0
  27. package/dist/cli/commands/login.d.ts +2 -0
  28. package/dist/cli/commands/login.js +51 -0
  29. package/dist/cli/commands/login.js.map +1 -0
  30. package/dist/cli/commands/logs.d.ts +2 -0
  31. package/dist/cli/commands/logs.js +26 -0
  32. package/dist/cli/commands/logs.js.map +1 -0
  33. package/dist/cli/commands/query.d.ts +2 -0
  34. package/dist/cli/commands/query.js +48 -0
  35. package/dist/cli/commands/query.js.map +1 -0
  36. package/dist/cli/commands/serve.d.ts +2 -0
  37. package/dist/cli/commands/serve.js +14 -0
  38. package/dist/cli/commands/serve.js.map +1 -0
  39. package/dist/cli/commands/status.d.ts +2 -0
  40. package/dist/cli/commands/status.js +66 -0
  41. package/dist/cli/commands/status.js.map +1 -0
  42. package/dist/daemon/auth.d.ts +31 -0
  43. package/dist/daemon/auth.js +84 -0
  44. package/dist/daemon/auth.js.map +1 -0
  45. package/dist/daemon/db.d.ts +36 -0
  46. package/dist/daemon/db.js +181 -0
  47. package/dist/daemon/db.js.map +1 -0
  48. package/dist/daemon/namespace.d.ts +34 -0
  49. package/dist/daemon/namespace.js +74 -0
  50. package/dist/daemon/namespace.js.map +1 -0
  51. package/dist/daemon/peercred.d.ts +15 -0
  52. package/dist/daemon/peercred.js +19 -0
  53. package/dist/daemon/peercred.js.map +1 -0
  54. package/dist/daemon/queue.d.ts +26 -0
  55. package/dist/daemon/queue.js +126 -0
  56. package/dist/daemon/queue.js.map +1 -0
  57. package/dist/daemon/routes.d.ts +38 -0
  58. package/dist/daemon/routes.js +258 -0
  59. package/dist/daemon/routes.js.map +1 -0
  60. package/dist/daemon/runner.d.ts +25 -0
  61. package/dist/daemon/runner.js +195 -0
  62. package/dist/daemon/runner.js.map +1 -0
  63. package/dist/daemon/scaffold.d.ts +38 -0
  64. package/dist/daemon/scaffold.js +141 -0
  65. package/dist/daemon/scaffold.js.map +1 -0
  66. package/dist/daemon/server.d.ts +11 -0
  67. package/dist/daemon/server.js +145 -0
  68. package/dist/daemon/server.js.map +1 -0
  69. package/dist/daemon.d.ts +1 -0
  70. package/dist/daemon.js +55 -0
  71. package/dist/daemon.js.map +1 -0
  72. package/dist/index.d.ts +2 -0
  73. package/dist/index.js +36 -0
  74. package/dist/index.js.map +1 -0
  75. package/dist/lib/constants.d.ts +17 -0
  76. package/dist/lib/constants.js +30 -0
  77. package/dist/lib/constants.js.map +1 -0
  78. package/dist/lib/errors.d.ts +32 -0
  79. package/dist/lib/errors.js +64 -0
  80. package/dist/lib/errors.js.map +1 -0
  81. package/dist/lib/prompts/ingest.d.ts +9 -0
  82. package/dist/lib/prompts/ingest.js +48 -0
  83. package/dist/lib/prompts/ingest.js.map +1 -0
  84. package/dist/lib/prompts/lint.d.ts +8 -0
  85. package/dist/lib/prompts/lint.js +62 -0
  86. package/dist/lib/prompts/lint.js.map +1 -0
  87. package/dist/lib/prompts/query.d.ts +8 -0
  88. package/dist/lib/prompts/query.js +37 -0
  89. package/dist/lib/prompts/query.js.map +1 -0
  90. package/dist/lib/prompts/wiki.d.ts +11 -0
  91. package/dist/lib/prompts/wiki.js +112 -0
  92. package/dist/lib/prompts/wiki.js.map +1 -0
  93. package/dist/lib/types.d.ts +76 -0
  94. package/dist/lib/types.js +3 -0
  95. package/dist/lib/types.js.map +1 -0
  96. package/dist/standalone/memex.mjs +2313 -0
  97. package/dist/standalone/memex.mjs.map +7 -0
  98. package/package.json +54 -0
@@ -0,0 +1,2313 @@
1
+ #!/usr/bin/env node
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropNames = Object.getOwnPropertyNames;
4
+ var __esm = (fn, res) => function __init() {
5
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
6
+ };
7
+ var __export = (target, all) => {
8
+ for (var name in all)
9
+ __defProp(target, name, { get: all[name], enumerable: true });
10
+ };
11
+
12
+ // src/daemon/db.ts
13
+ import BetterSqlite3 from "better-sqlite3";
14
+ var Database;
15
+ var init_db = __esm({
16
+ "src/daemon/db.ts"() {
17
+ "use strict";
18
+ Database = class {
19
+ db;
20
+ constructor(dbPath) {
21
+ this.db = new BetterSqlite3(dbPath);
22
+ this.db.pragma("journal_mode = WAL");
23
+ this.db.pragma("foreign_keys = ON");
24
+ }
25
+ // ── Schema ───────────────────────────────────────────────────────────────
26
+ initialize() {
27
+ this.db.exec(`
28
+ CREATE TABLE IF NOT EXISTS wikis (
29
+ id TEXT PRIMARY KEY,
30
+ name TEXT NOT NULL,
31
+ owner_uid INTEGER NOT NULL,
32
+ default_model TEXT NOT NULL DEFAULT 'sonnet',
33
+ created_at TEXT NOT NULL DEFAULT (datetime('now'))
34
+ );
35
+
36
+ CREATE TABLE IF NOT EXISTS queue_jobs (
37
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
38
+ wiki_id TEXT NOT NULL REFERENCES wikis(id) ON DELETE CASCADE,
39
+ type TEXT NOT NULL,
40
+ payload TEXT NOT NULL,
41
+ status TEXT NOT NULL DEFAULT 'pending',
42
+ retry_count INTEGER NOT NULL DEFAULT 0,
43
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
44
+ started_at TEXT,
45
+ completed_at TEXT,
46
+ result TEXT
47
+ );
48
+
49
+ CREATE TABLE IF NOT EXISTS audit_log (
50
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
51
+ wiki_id TEXT NOT NULL REFERENCES wikis(id) ON DELETE CASCADE,
52
+ action TEXT NOT NULL,
53
+ detail TEXT,
54
+ created_at TEXT NOT NULL DEFAULT (datetime('now'))
55
+ );
56
+
57
+ CREATE INDEX IF NOT EXISTS idx_queue_wiki_status
58
+ ON queue_jobs(wiki_id, status);
59
+ CREATE INDEX IF NOT EXISTS idx_audit_wiki
60
+ ON audit_log(wiki_id);
61
+ CREATE INDEX IF NOT EXISTS idx_audit_created
62
+ ON audit_log(created_at);
63
+ `);
64
+ }
65
+ close() {
66
+ this.db.close();
67
+ }
68
+ // ── Wikis ───────────────────────────────────────────────────────────────
69
+ createWiki(id, name, ownerUid) {
70
+ const stmt = this.db.prepare(`
71
+ INSERT INTO wikis (id, name, owner_uid) VALUES (?, ?, ?)
72
+ RETURNING *
73
+ `);
74
+ return stmt.get(id, name, ownerUid);
75
+ }
76
+ getWiki(id) {
77
+ return this.db.prepare("SELECT * FROM wikis WHERE id = ?").get(id);
78
+ }
79
+ listWikis(ownerUid) {
80
+ if (ownerUid !== void 0) {
81
+ return this.db.prepare("SELECT * FROM wikis WHERE owner_uid = ? ORDER BY created_at").all(ownerUid);
82
+ }
83
+ return this.db.prepare("SELECT * FROM wikis ORDER BY created_at").all();
84
+ }
85
+ chownWiki(id, newOwnerUid) {
86
+ const stmt = this.db.prepare(
87
+ "UPDATE wikis SET owner_uid = ? WHERE id = ? RETURNING *"
88
+ );
89
+ return stmt.get(newOwnerUid, id);
90
+ }
91
+ updateWiki(id, config) {
92
+ const sets = [];
93
+ const values = [];
94
+ if (config.name !== void 0) {
95
+ sets.push("name = ?");
96
+ values.push(config.name);
97
+ }
98
+ if (config.default_model !== void 0) {
99
+ sets.push("default_model = ?");
100
+ values.push(config.default_model);
101
+ }
102
+ if (sets.length === 0) {
103
+ return this.getWiki(id);
104
+ }
105
+ values.push(id);
106
+ const stmt = this.db.prepare(
107
+ `UPDATE wikis SET ${sets.join(", ")} WHERE id = ? RETURNING *`
108
+ );
109
+ return stmt.get(...values);
110
+ }
111
+ deleteWiki(id) {
112
+ this.db.prepare("DELETE FROM wikis WHERE id = ?").run(id);
113
+ }
114
+ // ── Jobs ─────────────────────────────────────────────────────────────────
115
+ createJob(wikiId, type, payload) {
116
+ const stmt = this.db.prepare(`
117
+ INSERT INTO queue_jobs (wiki_id, type, payload)
118
+ VALUES (?, ?, ?)
119
+ RETURNING *
120
+ `);
121
+ return stmt.get(wikiId, type, JSON.stringify(payload));
122
+ }
123
+ getJob(jobId) {
124
+ return this.db.prepare("SELECT * FROM queue_jobs WHERE id = ?").get(jobId);
125
+ }
126
+ listJobs(wikiId, opts) {
127
+ let sql = "SELECT * FROM queue_jobs WHERE wiki_id = ?";
128
+ const params = [wikiId];
129
+ if (opts?.status) {
130
+ sql += " AND status = ?";
131
+ params.push(opts.status);
132
+ }
133
+ sql += " ORDER BY id DESC";
134
+ if (opts?.limit) {
135
+ sql += " LIMIT ?";
136
+ params.push(opts.limit);
137
+ }
138
+ return this.db.prepare(sql).all(...params);
139
+ }
140
+ /**
141
+ * Atomically claim the next pending job for a wiki.
142
+ * Sets status to 'running' and records started_at.
143
+ */
144
+ claimNextJob(wikiId) {
145
+ const stmt = this.db.prepare(`
146
+ UPDATE queue_jobs
147
+ SET status = 'running', started_at = datetime('now')
148
+ WHERE id = (
149
+ SELECT id FROM queue_jobs
150
+ WHERE wiki_id = ? AND status = 'pending'
151
+ ORDER BY id ASC
152
+ LIMIT 1
153
+ )
154
+ RETURNING *
155
+ `);
156
+ return stmt.get(wikiId);
157
+ }
158
+ completeJob(jobId, result) {
159
+ this.db.prepare(`
160
+ UPDATE queue_jobs
161
+ SET status = 'completed', completed_at = datetime('now'), result = ?
162
+ WHERE id = ?
163
+ `).run(JSON.stringify(result), jobId);
164
+ }
165
+ failJob(jobId, error) {
166
+ this.db.prepare(`
167
+ UPDATE queue_jobs
168
+ SET status = 'failed', completed_at = datetime('now'), result = ?
169
+ WHERE id = ?
170
+ `).run(JSON.stringify({ success: false, error }), jobId);
171
+ }
172
+ getPendingJobCount(wikiId) {
173
+ const row = this.db.prepare(
174
+ "SELECT COUNT(*) as count FROM queue_jobs WHERE wiki_id = ? AND status IN ('pending', 'running')"
175
+ ).get(wikiId);
176
+ return row.count;
177
+ }
178
+ /**
179
+ * On daemon startup: reset jobs that were 'running' when the process died.
180
+ * They get re-queued as 'pending'.
181
+ */
182
+ resetStaleJobs() {
183
+ const info = this.db.prepare(`
184
+ UPDATE queue_jobs
185
+ SET status = 'pending', retry_count = retry_count + 1
186
+ WHERE status = 'running'
187
+ `).run();
188
+ return info.changes;
189
+ }
190
+ /** Get wiki IDs that have pending jobs (for startup drain). */
191
+ wikisWithPendingJobs() {
192
+ const rows = this.db.prepare(
193
+ "SELECT DISTINCT wiki_id FROM queue_jobs WHERE status = 'pending'"
194
+ ).all();
195
+ return rows.map((r) => r.wiki_id);
196
+ }
197
+ // ── Audit ────────────────────────────────────────────────────────────────
198
+ logAudit(wikiId, action, detail) {
199
+ this.db.prepare(
200
+ "INSERT INTO audit_log (wiki_id, action, detail) VALUES (?, ?, ?)"
201
+ ).run(wikiId, action, detail ?? null);
202
+ }
203
+ getAuditLog(wikiId, limit = 50) {
204
+ return this.db.prepare(
205
+ "SELECT * FROM audit_log WHERE wiki_id = ? ORDER BY id DESC LIMIT ?"
206
+ ).all(wikiId, limit);
207
+ }
208
+ };
209
+ }
210
+ });
211
+
212
+ // src/lib/errors.ts
213
+ var MemexError, WikiNotFoundError, WikiExistsError, JobNotFoundError, CapabilityError, NoCredentialsError, ForbiddenError, ValidationError, DaemonNotRunningError;
214
+ var init_errors = __esm({
215
+ "src/lib/errors.ts"() {
216
+ "use strict";
217
+ MemexError = class extends Error {
218
+ constructor(message, code, statusCode = 500) {
219
+ super(message);
220
+ this.code = code;
221
+ this.statusCode = statusCode;
222
+ this.name = "MemexError";
223
+ }
224
+ code;
225
+ statusCode;
226
+ };
227
+ WikiNotFoundError = class extends MemexError {
228
+ constructor(wikiId) {
229
+ super(`Wiki '${wikiId}' not found`, "WIKI_NOT_FOUND", 404);
230
+ }
231
+ };
232
+ WikiExistsError = class extends MemexError {
233
+ constructor(wikiId) {
234
+ super(`Wiki '${wikiId}' already exists`, "WIKI_EXISTS", 409);
235
+ }
236
+ };
237
+ JobNotFoundError = class extends MemexError {
238
+ constructor(jobId) {
239
+ super(`Job #${jobId} not found`, "JOB_NOT_FOUND", 404);
240
+ }
241
+ };
242
+ CapabilityError = class extends MemexError {
243
+ constructor() {
244
+ super(
245
+ "CAP_SYS_ADMIN is required for mount namespace isolation.\nOptions:\n 1. Run the daemon as root: sudo memex serve\n 2. Use systemd with AmbientCapabilities=CAP_SYS_ADMIN\n 3. Grant capability: sudo setcap cap_sys_admin+ep $(which node)",
246
+ "NO_CAP_SYS_ADMIN",
247
+ 500
248
+ );
249
+ }
250
+ };
251
+ NoCredentialsError = class extends MemexError {
252
+ constructor(wikiId) {
253
+ super(
254
+ `No credentials configured for wiki '${wikiId}'.
255
+ Set credentials with:
256
+ memex login ${wikiId} (OAuth)
257
+ memex config ${wikiId} --set-key (API key)`,
258
+ "NO_CREDENTIALS",
259
+ 400
260
+ );
261
+ }
262
+ };
263
+ ForbiddenError = class extends MemexError {
264
+ constructor(wikiId) {
265
+ super(`Access denied to wiki '${wikiId}'`, "FORBIDDEN", 403);
266
+ }
267
+ };
268
+ ValidationError = class extends MemexError {
269
+ constructor(message) {
270
+ super(message, "VALIDATION_ERROR", 400);
271
+ }
272
+ };
273
+ DaemonNotRunningError = class extends MemexError {
274
+ constructor() {
275
+ super(
276
+ "Cannot connect to memex daemon.\nStart it with: memex serve",
277
+ "DAEMON_NOT_RUNNING",
278
+ 502
279
+ );
280
+ }
281
+ };
282
+ }
283
+ });
284
+
285
+ // src/lib/constants.ts
286
+ var DATA_DIR, RUN_DIR, SOCKET_PATH, DB_PATH, WIKIS_DIR, NS_DIR, WORKSPACE_MOUNT, JOB_LIMITS, SIGTERM_GRACE_MS, AUTO_LINT_INTERVAL, JOB_POLL_INTERVAL_MS, DEFAULT_MODEL, BASE_ALLOWED_TOOLS, ALLOWED_TOOLS_WHITELIST, WIKI_ID_PATTERN;
287
+ var init_constants = __esm({
288
+ "src/lib/constants.ts"() {
289
+ "use strict";
290
+ DATA_DIR = process.env["MEMEX_DATA_DIR"] ?? "/var/lib/memex";
291
+ RUN_DIR = process.env["MEMEX_RUN_DIR"] ?? "/run/memex";
292
+ SOCKET_PATH = process.env["MEMEX_SOCKET_PATH"] ?? `${RUN_DIR}/memex.sock`;
293
+ DB_PATH = `${DATA_DIR}/memex.db`;
294
+ WIKIS_DIR = `${DATA_DIR}/wikis`;
295
+ NS_DIR = `${RUN_DIR}/ns`;
296
+ WORKSPACE_MOUNT = "/workspace";
297
+ JOB_LIMITS = {
298
+ ingest: { timeout_ms: 5 * 6e4, max_turns: 25 },
299
+ query: { timeout_ms: 2 * 6e4, max_turns: 15 },
300
+ lint: { timeout_ms: 10 * 6e4, max_turns: 30 }
301
+ };
302
+ SIGTERM_GRACE_MS = 5e3;
303
+ AUTO_LINT_INTERVAL = 10;
304
+ JOB_POLL_INTERVAL_MS = 500;
305
+ DEFAULT_MODEL = "sonnet";
306
+ BASE_ALLOWED_TOOLS = ["Read", "Write", "Edit", "Glob", "Grep"];
307
+ ALLOWED_TOOLS_WHITELIST = /* @__PURE__ */ new Set([
308
+ "Read",
309
+ "Write",
310
+ "Edit",
311
+ "Glob",
312
+ "Grep",
313
+ "NotebookEdit",
314
+ "WebFetch",
315
+ "WebSearch"
316
+ ]);
317
+ WIKI_ID_PATTERN = /^[a-z0-9][a-z0-9-]{1,62}[a-z0-9]$/;
318
+ }
319
+ });
320
+
321
+ // src/daemon/namespace.ts
322
+ import { execFileSync } from "node:child_process";
323
+ import { existsSync, mkdirSync } from "node:fs";
324
+ import { join } from "node:path";
325
+ function shellEscape(s) {
326
+ return "'" + s.replace(/'/g, "'\\''") + "'";
327
+ }
328
+ var NamespaceManager;
329
+ var init_namespace = __esm({
330
+ "src/daemon/namespace.ts"() {
331
+ "use strict";
332
+ init_errors();
333
+ init_constants();
334
+ NamespaceManager = class {
335
+ constructor(wikisDir = WIKIS_DIR) {
336
+ this.wikisDir = wikisDir;
337
+ }
338
+ wikisDir;
339
+ /**
340
+ * Verify that we have CAP_SYS_ADMIN by attempting a trivial namespace operation.
341
+ */
342
+ checkCapabilities() {
343
+ try {
344
+ execFileSync("unshare", ["-m", "--", "true"], { stdio: "pipe" });
345
+ } catch {
346
+ throw new CapabilityError();
347
+ }
348
+ }
349
+ /**
350
+ * Ensure the /workspace mount target exists on the host.
351
+ */
352
+ ensureDirectories() {
353
+ mkdirSync(WORKSPACE_MOUNT, { recursive: true });
354
+ }
355
+ /**
356
+ * Verify a wiki's directory exists.
357
+ */
358
+ validateWiki(wikiId) {
359
+ const wikiDir = join(this.wikisDir, wikiId);
360
+ if (!existsSync(wikiDir)) {
361
+ throw new Error(`Wiki directory does not exist: ${wikiDir}`);
362
+ }
363
+ }
364
+ /**
365
+ * Build the command + args to run a command inside a fresh mount namespace
366
+ * with the wiki's directory bind-mounted to /workspace.
367
+ *
368
+ * Returns [command, ...args] to pass to spawn().
369
+ * The caller appends their actual command (e.g. claude -p ...) to innerArgs.
370
+ */
371
+ wrapCommand(wikiId, innerCommand) {
372
+ const wikiDir = join(this.wikisDir, wikiId);
373
+ const script = [
374
+ `mount --bind ${shellEscape(wikiDir)} ${shellEscape(WORKSPACE_MOUNT)}`,
375
+ `mount -o remount,nosuid,nodev ${shellEscape(WORKSPACE_MOUNT)}`,
376
+ `cd ${shellEscape(WORKSPACE_MOUNT)}`,
377
+ `exec ${innerCommand.map(shellEscape).join(" ")}`
378
+ ].join(" && ");
379
+ return {
380
+ command: "unshare",
381
+ args: ["-m", "--propagation", "private", "--", "sh", "-c", script]
382
+ };
383
+ }
384
+ };
385
+ }
386
+ });
387
+
388
+ // src/daemon/scaffold.ts
389
+ import { mkdirSync as mkdirSync2, writeFileSync, rmSync, readFileSync, existsSync as existsSync2 } from "node:fs";
390
+ import { join as join2, basename } from "node:path";
391
+ function sanitizeFilename(name) {
392
+ return name.replace(/[^a-zA-Z0-9._-]/g, "_");
393
+ }
394
+ var DEFAULT_CLAUDE_MD, DEFAULT_SCHEMA_MD, DEFAULT_INDEX_MD, DEFAULT_LOG_MD, WikiScaffold;
395
+ var init_scaffold = __esm({
396
+ "src/daemon/scaffold.ts"() {
397
+ "use strict";
398
+ init_constants();
399
+ DEFAULT_CLAUDE_MD = `# Wiki Agent \u2014 Conventions
400
+
401
+ This file is auto-discovered by Claude Code and extends the base system prompt.
402
+ Use it to define wiki-specific conventions, domain vocabulary, and wiki structure.
403
+
404
+ The base system prompt handles core wiki behavior (index, schema, connections, log).
405
+ This file is for YOUR customizations on top of that.
406
+
407
+ ## Domain
408
+
409
+ _(Describe what this knowledge base is about)_
410
+
411
+ ## Conventions
412
+
413
+ _(Add wiki-specific filing rules, vocabulary, categories, and preferences here)_
414
+
415
+ ## Things to ignore
416
+
417
+ _(Topics, patterns, or noise that should be skipped during ingestion)_
418
+ `;
419
+ DEFAULT_SCHEMA_MD = `# Schema
420
+
421
+ This file documents the conventions for this knowledge base.
422
+ It will be created and maintained by the wiki agent as content is ingested.
423
+
424
+ _(This is a new knowledge base. The schema will be populated on first ingest.)_
425
+ `;
426
+ DEFAULT_INDEX_MD = `# Index
427
+
428
+ One-line summary of every wiki page, organized by category.
429
+ A reader should understand the shape of the knowledge base from this file alone.
430
+
431
+ _(No pages yet. The index will be populated as content is ingested.)_
432
+ `;
433
+ DEFAULT_LOG_MD = `# Activity Log
434
+
435
+ Chronological record of knowledge base activity.
436
+
437
+ ---
438
+ `;
439
+ WikiScaffold = class {
440
+ constructor(wikisDir = WIKIS_DIR) {
441
+ this.wikisDir = wikisDir;
442
+ }
443
+ wikisDir;
444
+ /**
445
+ * Create the full directory structure and default files for a new wiki.
446
+ */
447
+ create(wikiId) {
448
+ const base = this.wikiDir(wikiId);
449
+ mkdirSync2(join2(base, ".claude"), { recursive: true, mode: 448 });
450
+ mkdirSync2(join2(base, ".tools"), { recursive: true });
451
+ mkdirSync2(join2(base, "wiki", "raw"), { recursive: true });
452
+ writeFileSync(join2(base, ".claude.md"), DEFAULT_CLAUDE_MD);
453
+ writeFileSync(join2(base, "wiki", "_schema.md"), DEFAULT_SCHEMA_MD);
454
+ writeFileSync(join2(base, "wiki", "_index.md"), DEFAULT_INDEX_MD);
455
+ writeFileSync(join2(base, "wiki", "_log.md"), DEFAULT_LOG_MD);
456
+ }
457
+ /**
458
+ * Remove a wiki's directory tree.
459
+ */
460
+ destroy(wikiId, keepData = false) {
461
+ if (keepData) return;
462
+ const dir = this.wikiDir(wikiId);
463
+ if (existsSync2(dir)) {
464
+ rmSync(dir, { recursive: true, force: true });
465
+ }
466
+ }
467
+ /**
468
+ * Get the host filesystem path for a wiki's root directory.
469
+ */
470
+ wikiDir(wikiId) {
471
+ return join2(this.wikisDir, wikiId);
472
+ }
473
+ /**
474
+ * Write a file into the wiki's wiki/raw/ directory.
475
+ * Prefixes with a timestamp to avoid collisions.
476
+ * Returns the stored filename (relative to wiki/raw/).
477
+ */
478
+ writeRawFile(wikiId, filename, content) {
479
+ const rawDir = join2(this.wikiDir(wikiId), "wiki", "raw");
480
+ mkdirSync2(rawDir, { recursive: true });
481
+ const ts = (/* @__PURE__ */ new Date()).toISOString().replace(/[-:]/g, "").replace(/\.\d+Z$/, "");
482
+ const stored = `${ts}-${sanitizeFilename(basename(filename))}`;
483
+ writeFileSync(join2(rawDir, stored), content);
484
+ return stored;
485
+ }
486
+ /**
487
+ * Read the wiki's .claude.md content.
488
+ */
489
+ readClaudeMd(wikiId) {
490
+ const p = join2(this.wikiDir(wikiId), ".claude.md");
491
+ if (!existsSync2(p)) return "";
492
+ return readFileSync(p, "utf-8");
493
+ }
494
+ /**
495
+ * Write the allowed-tools.txt file for a wiki.
496
+ */
497
+ writeAllowedTools(wikiId, tools) {
498
+ const toolsDir = join2(this.wikiDir(wikiId), ".tools");
499
+ mkdirSync2(toolsDir, { recursive: true });
500
+ const content = tools.length > 0 ? tools.join("\n") + "\n" : "";
501
+ writeFileSync(join2(toolsDir, "allowed-tools.txt"), content);
502
+ }
503
+ /**
504
+ * Read the current allowed-tools.txt for a wiki.
505
+ */
506
+ readAllowedTools(wikiId) {
507
+ const p = join2(this.wikiDir(wikiId), ".tools", "allowed-tools.txt");
508
+ if (!existsSync2(p)) return [];
509
+ return readFileSync(p, "utf-8").split("\n").map((l) => l.trim()).filter((l) => l && !l.startsWith("#"));
510
+ }
511
+ /**
512
+ * Check if a wiki directory exists on disk.
513
+ */
514
+ exists(wikiId) {
515
+ return existsSync2(this.wikiDir(wikiId));
516
+ }
517
+ };
518
+ }
519
+ });
520
+
521
+ // src/daemon/auth.ts
522
+ import { existsSync as existsSync3, readFileSync as readFileSync2, writeFileSync as writeFileSync2, mkdirSync as mkdirSync3 } from "node:fs";
523
+ import { join as join3 } from "node:path";
524
+ var AuthManager;
525
+ var init_auth = __esm({
526
+ "src/daemon/auth.ts"() {
527
+ "use strict";
528
+ init_constants();
529
+ init_errors();
530
+ AuthManager = class {
531
+ constructor(wikisDir = WIKIS_DIR, globalApiKey) {
532
+ this.wikisDir = wikisDir;
533
+ this.globalApiKey = globalApiKey;
534
+ }
535
+ wikisDir;
536
+ globalApiKey;
537
+ /**
538
+ * Resolve credentials for a wiki, returning environment variables
539
+ * to set on the claude child process.
540
+ *
541
+ * Priority:
542
+ * 1. Per-wiki API key file (.claude/api-key)
543
+ * 2. Per-wiki OAuth credentials (.claude/.credentials.json exists)
544
+ * 3. Global ANTHROPIC_API_KEY from daemon environment
545
+ */
546
+ resolveCredentials(wikiId) {
547
+ const claudeDir = this.configDir(wikiId);
548
+ const apiKeyPath = join3(claudeDir, "api-key");
549
+ if (existsSync3(apiKeyPath)) {
550
+ const key = readFileSync2(apiKeyPath, "utf-8").trim();
551
+ if (key) {
552
+ return {
553
+ ANTHROPIC_API_KEY: key,
554
+ CLAUDE_CONFIG_DIR: claudeDir
555
+ };
556
+ }
557
+ }
558
+ const credsPath = join3(claudeDir, ".credentials.json");
559
+ if (existsSync3(credsPath)) {
560
+ return {
561
+ CLAUDE_CONFIG_DIR: claudeDir
562
+ };
563
+ }
564
+ if (this.globalApiKey) {
565
+ return {
566
+ ANTHROPIC_API_KEY: this.globalApiKey,
567
+ CLAUDE_CONFIG_DIR: claudeDir
568
+ };
569
+ }
570
+ throw new NoCredentialsError(wikiId);
571
+ }
572
+ /**
573
+ * Store an API key for a wiki.
574
+ */
575
+ setApiKey(wikiId, key) {
576
+ const claudeDir = this.configDir(wikiId);
577
+ mkdirSync3(claudeDir, { recursive: true, mode: 448 });
578
+ const apiKeyPath = join3(claudeDir, "api-key");
579
+ writeFileSync2(apiKeyPath, key.trim(), { mode: 384 });
580
+ }
581
+ /**
582
+ * Get the CLAUDE_CONFIG_DIR path for a wiki.
583
+ */
584
+ configDir(wikiId) {
585
+ return join3(this.wikisDir, wikiId, ".claude");
586
+ }
587
+ /**
588
+ * Store OAuth credentials for a wiki by copying .credentials.json content.
589
+ */
590
+ setCredentials(wikiId, credentialsJson) {
591
+ const claudeDir = this.configDir(wikiId);
592
+ mkdirSync3(claudeDir, { recursive: true, mode: 448 });
593
+ const credsPath = join3(claudeDir, ".credentials.json");
594
+ writeFileSync2(credsPath, credentialsJson, { mode: 384 });
595
+ }
596
+ /**
597
+ * Check if a wiki has valid credentials (any method).
598
+ */
599
+ hasCredentials(wikiId) {
600
+ const claudeDir = this.configDir(wikiId);
601
+ const apiKeyPath = join3(claudeDir, "api-key");
602
+ const credsPath = join3(claudeDir, ".credentials.json");
603
+ return existsSync3(apiKeyPath) || existsSync3(credsPath) || !!this.globalApiKey;
604
+ }
605
+ };
606
+ }
607
+ });
608
+
609
+ // src/lib/prompts/ingest.ts
610
+ function buildIngestPrompt(payload) {
611
+ const fileList = payload.files.map((f) => `- raw/${f}`).join("\n");
612
+ return `New source documents have been added to the knowledge base.
613
+
614
+ Source files to process:
615
+ ${fileList}
616
+
617
+ Integrate these into the wiki:
618
+
619
+ 1. Read each source file listed above.
620
+ 2. Read _schema.md for current conventions (create it if it doesn't exist \u2014 this is a new knowledge base).
621
+ 3. Read _index.md to see what's already filed.
622
+ 4. Search existing wiki pages for related content (grep for names, topics, themes).
623
+ 5. For each source document:
624
+ a. Extract key facts, concepts, entities, and relationships.
625
+ b. Determine which existing pages should be updated and which new pages should be created.
626
+ c. Create or update wiki pages with the extracted information.
627
+ d. Maintain bidirectional links \u2014 if you link A\u2192B, update B\u2192A too.
628
+ e. Update _index.md with current summaries for all affected pages.
629
+ 6. Append a dated ingest entry to _log.md summarizing what was ingested and what pages were affected.
630
+
631
+ Rules:
632
+ - NEVER modify files in raw/ \u2014 they are immutable sources
633
+ - Prefer updating existing pages over creating duplicates
634
+ - If a source contradicts existing wiki content, UPDATE the existing page \u2014 resolve or flag the contradiction
635
+ - Keep pages focused \u2014 one topic per page
636
+ - Use descriptive kebab-case paths: themes/pricing-feedback.md, customers/acme-corp.md
637
+ - Every page must have a ## Related section with labeled, bidirectional links
638
+ - The "reason" for each change should articulate what it ADDS to the knowledge base \u2014 not mechanics
639
+
640
+ After completing all file operations, output ONLY valid JSON (no markdown fences, no explanation):
641
+ {
642
+ "summary": "one-line human summary of what you filed",
643
+ "operations": [
644
+ {"action": "create|update", "path": "relative/path.md", "reason": "what this adds to the knowledge base"}
645
+ ]
646
+ }`;
647
+ }
648
+ var init_ingest = __esm({
649
+ "src/lib/prompts/ingest.ts"() {
650
+ "use strict";
651
+ }
652
+ });
653
+
654
+ // src/lib/prompts/query.ts
655
+ function buildQueryPrompt(payload) {
656
+ let prompt = `You are the voice of a living knowledge base. Answer the user's question by searching the wiki files in this directory.
657
+
658
+ Question: ${payload.question}
659
+
660
+ RULES:
661
+ 1. ONLY reference information that exists in the wiki files. Never use training data for facts.
662
+ 2. Always cite the specific file path when referencing information (e.g. \`themes/pricing.md\`).
663
+ 3. If the wiki has no relevant information, say so clearly: "The knowledge base doesn't have information on that topic yet."
664
+ 4. If you notice contradictions between pages, mention them.
665
+ 5. If coverage is thin on a topic, note it: "Coverage on X is thin \u2014 only one source."
666
+ 6. If your answer synthesizes multiple pages into new insight, mention it could be saved to the wiki as a new page.
667
+ 7. Keep responses concise and specific. Cite evidence, don't summarize generically.
668
+
669
+ To answer:
670
+ - Read _index.md to understand what's in the knowledge base
671
+ - Search for relevant files using grep and glob
672
+ - Read the files that seem relevant
673
+ - Synthesize an answer grounded in what you found
674
+
675
+ Provide your answer as plain markdown. Be concise but thorough.`;
676
+ if (payload.history && payload.history.length > 0) {
677
+ prompt += "\n\nConversation so far:\n";
678
+ for (const turn of payload.history) {
679
+ prompt += `${turn}
680
+
681
+ `;
682
+ }
683
+ prompt += "Answer the last message.";
684
+ }
685
+ return prompt;
686
+ }
687
+ var init_query = __esm({
688
+ "src/lib/prompts/query.ts"() {
689
+ "use strict";
690
+ }
691
+ });
692
+
693
+ // src/lib/prompts/lint.ts
694
+ function buildLintPrompt() {
695
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
696
+ return `Perform a thorough health check on this knowledge base. Today is ${date}.
697
+
698
+ Read all wiki files (glob **/*.md, excluding raw/) and check for:
699
+
700
+ 1. **Contradictions** \u2014 pages making conflicting claims about the same entity or fact
701
+ 2. **Stale claims** \u2014 date-stamped statements that may no longer be current
702
+ 3. **Orphan pages** \u2014 pages with no inbound links from other pages' ## Related sections
703
+ 4. **Missing pages** \u2014 concepts mentioned across multiple pages that deserve their own page
704
+ 5. **Duplicate pages** \u2014 topics covered by two pages that should be merged
705
+ 6. **Missing cross-references** \u2014 pages discussing entities with their own pages but not linking
706
+ 7. **Index accuracy** \u2014 _index.md entries that don't match file content, or missing entries
707
+ 8. **Schema drift** \u2014 actual patterns that don't match _schema.md conventions
708
+
709
+ For issues you're CONFIDENT about, fix them directly:
710
+ - Add missing cross-references and ## Related links (bidirectional)
711
+ - Correct _index.md entries
712
+ - Add ## Related sections to pages that lack them
713
+ - Fix schema drift in _schema.md
714
+
715
+ For issues requiring human judgment (contradictions, merges, stale facts), note them in your report but do NOT change the files.
716
+
717
+ Do NOT rewrite pages for style \u2014 only fix semantic issues.
718
+
719
+ After making fixes, append a lint entry to _log.md.
720
+
721
+ Output a markdown health check report:
722
+
723
+ # Knowledge Base Health Check \u2014 ${date}
724
+
725
+ ## Contradictions
726
+ (numbered list with specific pages and quotes)
727
+
728
+ ## Stale Claims
729
+ (numbered list with page, claim, and age)
730
+
731
+ ## Orphan Pages
732
+ (list with page path and suggested connections)
733
+
734
+ ## Missing Cross-References
735
+ (specific A\u2192B links that should exist)
736
+
737
+ ## Auto-Fixes Applied
738
+ (list of changes you made directly)
739
+
740
+ ## Flagged for Review
741
+ (issues needing human judgment)
742
+
743
+ ## Statistics
744
+ - Total pages: N
745
+ - Pages with ## Related: N
746
+ - Orphan pages: N`;
747
+ }
748
+ var init_lint = __esm({
749
+ "src/lib/prompts/lint.ts"() {
750
+ "use strict";
751
+ }
752
+ });
753
+
754
+ // src/lib/prompts/wiki.ts
755
+ function getWikiSystemPrompt() {
756
+ return `You are the curator of a living knowledge base. Your job is not just to file information \u2014 it's to build and maintain a persistent, compounding artifact where every conversation makes the whole richer.
757
+
758
+ Think of yourself as the curator of a wiki, not a filing clerk. When you process a source, you're not just extracting facts \u2014 you're integrating new understanding into an evolving body of knowledge. The connections between documents are as valuable as the documents themselves.
759
+
760
+ On every call, think about:
761
+ 1. What does this source add to what we already know?
762
+ 2. Does it confirm, extend, or contradict existing understanding?
763
+ 3. Which existing pages need to know about this? Which pages does this need to know about?
764
+ 4. Are there patterns emerging that deserve their own page?
765
+ 5. Should any conventions or filing rules be updated based on what we've learned?
766
+
767
+ IMPORTANT: You are operating in a sandboxed wiki directory. Only read and write files within this directory using relative paths. Do not use absolute paths. Do not attempt to access files outside this directory.
768
+
769
+ ## Directory structure
770
+
771
+ - \`_schema.md\` \u2014 Filing conventions, categories, domain vocabulary, filing heuristics. YOUR institutional memory.
772
+ - \`_index.md\` \u2014 One-line summary of every wiki page, organized by category. The table of contents.
773
+ - \`_log.md\` \u2014 Chronological activity log. You maintain this.
774
+ - \`raw/\` \u2014 Immutable source documents. NEVER modify or delete these files.
775
+ - Everything else \u2014 Wiki pages organized by entity and topic.
776
+
777
+ ## Your responsibilities
778
+
779
+ ### 1. Schema (_schema.md)
780
+ You own the schema. Update it when you establish or refine conventions \u2014 new categories, naming patterns, domain vocabulary, filing heuristics, things to ignore.
781
+
782
+ On the FIRST call for a new knowledge base (no _schema.md exists), CREATE it with the conventions you establish. Suggested starting categories (adapt to what fits):
783
+ - customers/ \u2014 profiles and feedback per person or company
784
+ - themes/ \u2014 cross-cutting topics
785
+ - products/ \u2014 organized by product area
786
+ - research/ \u2014 deep dives and analyses
787
+ - reference/ \u2014 factual reference material
788
+
789
+ ### 2. Index (_index.md)
790
+ ALWAYS keep _index.md current. Every wiki page gets a one-line summary: what it contains, how many connections, what matters most. Organize by category. A reader should understand the shape of the entire knowledge base by reading only the index.
791
+
792
+ Format:
793
+ \`\`\`
794
+ ## category
795
+ - path/to/file.md \u2014 One-line semantic summary [N connections]
796
+ \`\`\`
797
+
798
+ ### 3. Connections (## Related)
799
+ Every wiki page you create or update MUST have a \`## Related\` section at the bottom with labeled, bidirectional links:
800
+
801
+ \`\`\`
802
+ ## Related
803
+ - **Topic:** [themes/pricing.md](themes/pricing.md) \u2014 related pricing analysis
804
+ - **Entity:** [customers/acme.md](customers/acme.md) \u2014 mentioned in their feedback
805
+ - **Contradicts:** [research/market-size.md](research/market-size.md) \u2014 conflicting data point
806
+ \`\`\`
807
+
808
+ Connection labels should be domain-appropriate: Topic, Entity, Source, Contradicts, See also, etc.
809
+
810
+ CRITICAL: If you add a link from A to B, you MUST also update B to link back to A. Both sides of every connection. Always.
811
+
812
+ ### 4. Activity log (_log.md)
813
+ After completing any operation, append an entry to _log.md:
814
+
815
+ \`\`\`
816
+ ## [YYYY-MM-DD HH:MM] type | source
817
+ Summary of what happened
818
+ - Detail 1
819
+ - Detail 2
820
+ \`\`\`
821
+
822
+ Types: ingest, lint, query. If _log.md doesn't exist, create it with a header.
823
+
824
+ ### 5. Contradictions
825
+ When new information conflicts with an existing page, UPDATE the existing page \u2014 resolve the contradiction or flag it clearly. Never file contradictory claims in separate pages without acknowledging the conflict.
826
+
827
+ ### 6. Look up before writing
828
+ ALWAYS read existing files before creating new ones. Search with grep and glob. Prefer updating existing pages over creating duplicates. The knowledge base should grow deeper, not just wider.
829
+
830
+ ### 7. Source references
831
+ Each wiki page should reference its raw sources: \`*Source: [raw/filename.md](raw/filename.md)*\`
832
+
833
+ ### 8. Ongoing tuning
834
+ As you process more sources, notice patterns:
835
+ - Are certain categories getting too broad? Split them.
836
+ - Are there clusters of related pages that need a synthesis page?
837
+ - Is the schema still serving the content well? Evolve it.
838
+ - Are naming conventions consistent? Fix drift.
839
+
840
+ Update _schema.md to reflect what you learn.
841
+
842
+ ## Page format
843
+ - Organize by entity and topic, not by date
844
+ - kebab-case for all file paths
845
+ - Self-contained and readable with no other context
846
+ - Preserve key quotes verbatim with attribution and date
847
+ - Include source references`;
848
+ }
849
+ var init_wiki = __esm({
850
+ "src/lib/prompts/wiki.ts"() {
851
+ "use strict";
852
+ }
853
+ });
854
+
855
+ // src/daemon/runner.ts
856
+ import { spawn } from "node:child_process";
857
+ import { existsSync as existsSync4, readFileSync as readFileSync3 } from "node:fs";
858
+ import { join as join4 } from "node:path";
859
+ function filterEnv(env) {
860
+ const safe = {};
861
+ const keep = ["PATH", "LANG", "LC_ALL", "TERM", "NODE_ENV"];
862
+ for (const key of keep) {
863
+ if (env[key]) safe[key] = env[key];
864
+ }
865
+ return safe;
866
+ }
867
+ var ClaudeRunner;
868
+ var init_runner = __esm({
869
+ "src/daemon/runner.ts"() {
870
+ "use strict";
871
+ init_constants();
872
+ init_ingest();
873
+ init_query();
874
+ init_lint();
875
+ init_wiki();
876
+ ClaudeRunner = class {
877
+ constructor(namespace, auth, db, wikisDir) {
878
+ this.namespace = namespace;
879
+ this.auth = auth;
880
+ this.db = db;
881
+ this.wikisDir = wikisDir;
882
+ }
883
+ namespace;
884
+ auth;
885
+ db;
886
+ wikisDir;
887
+ active = /* @__PURE__ */ new Map();
888
+ async run(job) {
889
+ const startTime = Date.now();
890
+ const wikiId = job.wiki_id;
891
+ const wiki = this.db.getWiki(wikiId);
892
+ const model = wiki?.default_model ?? DEFAULT_MODEL;
893
+ const limits = JOB_LIMITS[job.type];
894
+ const prompt = this.buildPrompt(job);
895
+ const credEnv = this.auth.resolveCredentials(wikiId);
896
+ const tools = this.resolveTools(wikiId);
897
+ const claudeArgs = this.buildClaudeArgs(prompt, model, limits.max_turns, tools, wikiId);
898
+ const wrapped = this.namespace.wrapCommand(wikiId, claudeArgs);
899
+ const env = {
900
+ ...filterEnv(process.env),
901
+ ...credEnv,
902
+ HOME: WORKSPACE_MOUNT
903
+ };
904
+ if (credEnv["CLAUDE_CONFIG_DIR"]) {
905
+ env["CLAUDE_CONFIG_DIR"] = `${WORKSPACE_MOUNT}/.claude`;
906
+ }
907
+ return new Promise((resolve) => {
908
+ const child = spawn(wrapped.command, wrapped.args, {
909
+ env,
910
+ stdio: ["pipe", "pipe", "pipe"]
911
+ });
912
+ child.stdin.end();
913
+ this.active.set(job.id, child);
914
+ const stdout = [];
915
+ const stderr = [];
916
+ child.stdout.on("data", (chunk) => stdout.push(chunk));
917
+ child.stderr.on("data", (chunk) => stderr.push(chunk));
918
+ const timeout = setTimeout(() => {
919
+ console.warn(`[runner] Job #${job.id} timed out after ${limits.timeout_ms}ms, sending SIGTERM`);
920
+ child.kill("SIGTERM");
921
+ setTimeout(() => {
922
+ if (!child.killed) {
923
+ console.warn(`[runner] Job #${job.id} did not exit after SIGTERM grace period, sending SIGKILL`);
924
+ child.kill("SIGKILL");
925
+ }
926
+ }, SIGTERM_GRACE_MS);
927
+ }, limits.timeout_ms);
928
+ child.on("close", (code) => {
929
+ clearTimeout(timeout);
930
+ this.active.delete(job.id);
931
+ const duration_ms = Date.now() - startTime;
932
+ const rawOut = Buffer.concat(stdout).toString("utf-8");
933
+ const rawErr = Buffer.concat(stderr).toString("utf-8");
934
+ let output = rawOut;
935
+ try {
936
+ const envelope = JSON.parse(rawOut);
937
+ const text = envelope.result ?? "";
938
+ try {
939
+ output = JSON.stringify(JSON.parse(text));
940
+ } catch {
941
+ output = text.trim() || rawOut;
942
+ }
943
+ } catch {
944
+ }
945
+ if (rawErr) {
946
+ console.error(`[runner] Job #${job.id} stderr: ${rawErr.slice(0, 500)}`);
947
+ }
948
+ const finalOutput = code === 0 ? output : [output, rawErr].filter(Boolean).join("\n").trim() || `claude exited with code ${code}`;
949
+ resolve({
950
+ success: code === 0,
951
+ output: finalOutput,
952
+ exit_code: code ?? 1,
953
+ duration_ms
954
+ });
955
+ });
956
+ });
957
+ }
958
+ /**
959
+ * Kill an active job's process. Used during graceful shutdown.
960
+ */
961
+ kill(jobId) {
962
+ const child = this.active.get(jobId);
963
+ if (child) {
964
+ child.kill("SIGTERM");
965
+ setTimeout(() => {
966
+ if (!child.killed) child.kill("SIGKILL");
967
+ }, SIGTERM_GRACE_MS);
968
+ }
969
+ }
970
+ /**
971
+ * Kill all active processes. Used during daemon shutdown.
972
+ */
973
+ killAll() {
974
+ for (const [jobId] of this.active) {
975
+ this.kill(jobId);
976
+ }
977
+ }
978
+ get activeCount() {
979
+ return this.active.size;
980
+ }
981
+ // ── Private ────────────────────────────────────────────────────────────
982
+ buildPrompt(job) {
983
+ const payload = JSON.parse(job.payload);
984
+ switch (job.type) {
985
+ case "ingest":
986
+ return buildIngestPrompt(payload);
987
+ case "query":
988
+ return buildQueryPrompt(payload);
989
+ case "lint":
990
+ return buildLintPrompt();
991
+ default:
992
+ return `Unknown job type: ${job.type}`;
993
+ }
994
+ }
995
+ resolveTools(wikiId) {
996
+ const tools = [...BASE_ALLOWED_TOOLS];
997
+ const allowedPath = join4(this.wikisDir, wikiId, ".tools", "allowed-tools.txt");
998
+ if (existsSync4(allowedPath)) {
999
+ const extra = readFileSync3(allowedPath, "utf-8").split("\n").map((l) => l.trim()).filter((l) => l && !l.startsWith("#") && ALLOWED_TOOLS_WHITELIST.has(l));
1000
+ tools.push(...extra);
1001
+ }
1002
+ return [...new Set(tools)];
1003
+ }
1004
+ buildClaudeArgs(prompt, model, maxTurns, tools, wikiId) {
1005
+ const toolStr = tools.join(",");
1006
+ const args = [
1007
+ "claude",
1008
+ "-p",
1009
+ prompt,
1010
+ "--model",
1011
+ model,
1012
+ "--max-turns",
1013
+ String(maxTurns),
1014
+ "--output-format",
1015
+ "json",
1016
+ "--tools",
1017
+ toolStr,
1018
+ // restrict which tools EXIST (hard boundary)
1019
+ "--allowedTools",
1020
+ toolStr
1021
+ // pre-approve those tools (no interactive prompts)
1022
+ ];
1023
+ const systemPrompt = getWikiSystemPrompt();
1024
+ args.push("--append-system-prompt", systemPrompt);
1025
+ const mcpPath = join4(this.wikisDir, wikiId, ".tools", "mcp.json");
1026
+ if (existsSync4(mcpPath)) {
1027
+ args.push("--mcp-config", `${WORKSPACE_MOUNT}/.tools/mcp.json`);
1028
+ }
1029
+ return args;
1030
+ }
1031
+ };
1032
+ }
1033
+ });
1034
+
1035
+ // src/daemon/queue.ts
1036
+ function sleep(ms) {
1037
+ return new Promise((resolve) => setTimeout(resolve, ms));
1038
+ }
1039
+ var QueueManager;
1040
+ var init_queue = __esm({
1041
+ "src/daemon/queue.ts"() {
1042
+ "use strict";
1043
+ init_constants();
1044
+ QueueManager = class {
1045
+ constructor(db, runner, autoLintInterval = AUTO_LINT_INTERVAL) {
1046
+ this.db = db;
1047
+ this.runner = runner;
1048
+ this.autoLintInterval = autoLintInterval;
1049
+ }
1050
+ db;
1051
+ runner;
1052
+ autoLintInterval;
1053
+ wikis = /* @__PURE__ */ new Map();
1054
+ shuttingDown = false;
1055
+ /**
1056
+ * Start draining queues. Called on daemon startup.
1057
+ * Kicks off drain loops for all wikis with pending jobs.
1058
+ */
1059
+ start() {
1060
+ const wikisWithWork = this.db.wikisWithPendingJobs();
1061
+ for (const wikiId of wikisWithWork) {
1062
+ this.notify(wikiId);
1063
+ }
1064
+ console.log(`[queue] Started. ${wikisWithWork.length} wiki(s) have pending work.`);
1065
+ }
1066
+ /**
1067
+ * Signal that a wiki has new work to process.
1068
+ * If the wiki isn't already draining, starts a drain loop.
1069
+ */
1070
+ notify(wikiId) {
1071
+ if (this.shuttingDown) return;
1072
+ let state = this.wikis.get(wikiId);
1073
+ if (!state) {
1074
+ state = { active: false, pending: false, ingestCount: 0 };
1075
+ this.wikis.set(wikiId, state);
1076
+ }
1077
+ if (state.active) {
1078
+ state.pending = true;
1079
+ } else {
1080
+ this.drainWiki(wikiId, state);
1081
+ }
1082
+ }
1083
+ /**
1084
+ * Graceful shutdown. Waits for active jobs to finish.
1085
+ */
1086
+ async stop() {
1087
+ this.shuttingDown = true;
1088
+ console.log("[queue] Shutting down...");
1089
+ const maxWait = 3e4;
1090
+ const start = Date.now();
1091
+ while (this.runner.activeCount > 0 && Date.now() - start < maxWait) {
1092
+ await sleep(500);
1093
+ }
1094
+ if (this.runner.activeCount > 0) {
1095
+ console.warn(`[queue] ${this.runner.activeCount} job(s) still active after ${maxWait}ms, killing`);
1096
+ this.runner.killAll();
1097
+ }
1098
+ console.log("[queue] Shutdown complete.");
1099
+ }
1100
+ // ── Private ────────────────────────────────────────────────────────────
1101
+ async drainWiki(wikiId, state) {
1102
+ state.active = true;
1103
+ state.pending = false;
1104
+ try {
1105
+ while (!this.shuttingDown) {
1106
+ const job = this.db.claimNextJob(wikiId);
1107
+ if (!job) break;
1108
+ console.log(`[queue] Running job #${job.id} (${job.type}) for wiki '${wikiId}'`);
1109
+ try {
1110
+ const result = await this.runner.run(job);
1111
+ if (result.success) {
1112
+ this.db.completeJob(job.id, result);
1113
+ this.db.logAudit(wikiId, `job.${job.type}.completed`, `job #${job.id} (${result.duration_ms}ms)`);
1114
+ console.log(`[queue] Job #${job.id} completed in ${result.duration_ms}ms`);
1115
+ } else {
1116
+ this.db.failJob(job.id, result.output);
1117
+ this.db.logAudit(wikiId, `job.${job.type}.failed`, `job #${job.id}: ${result.output.slice(0, 200)}`);
1118
+ console.error(`[queue] Job #${job.id} failed (exit ${result.exit_code})`);
1119
+ }
1120
+ if (job.type === "ingest" && result.success) {
1121
+ state.ingestCount++;
1122
+ if (this.autoLintInterval > 0 && state.ingestCount >= this.autoLintInterval) {
1123
+ state.ingestCount = 0;
1124
+ this.scheduleAutoLint(wikiId);
1125
+ }
1126
+ }
1127
+ } catch (err) {
1128
+ const msg = err instanceof Error ? err.message : String(err);
1129
+ this.db.failJob(job.id, msg);
1130
+ this.db.logAudit(wikiId, `job.${job.type}.error`, `job #${job.id}: ${msg}`);
1131
+ console.error(`[queue] Job #${job.id} error: ${msg}`);
1132
+ }
1133
+ }
1134
+ } finally {
1135
+ state.active = false;
1136
+ if (state.pending && !this.shuttingDown) {
1137
+ this.drainWiki(wikiId, state);
1138
+ }
1139
+ }
1140
+ }
1141
+ scheduleAutoLint(wikiId) {
1142
+ const pendingLints = this.db.listJobs(wikiId, { status: "pending" }).filter((j) => j.type === "lint");
1143
+ if (pendingLints.length > 0) {
1144
+ console.log(`[queue] Skipping auto-lint for '${wikiId}' \u2014 lint already pending`);
1145
+ return;
1146
+ }
1147
+ console.log(`[queue] Scheduling auto-lint for '${wikiId}' after ${this.autoLintInterval} ingests`);
1148
+ this.db.createJob(wikiId, "lint", {});
1149
+ }
1150
+ };
1151
+ }
1152
+ });
1153
+
1154
+ // src/daemon/routes.ts
1155
+ function isValidJobType(type) {
1156
+ return type === "ingest" || type === "query" || type === "lint";
1157
+ }
1158
+ function requireBody(body) {
1159
+ if (!body || typeof body !== "object" || Array.isArray(body)) {
1160
+ throw new ValidationError("Request body must be a JSON object");
1161
+ }
1162
+ return body;
1163
+ }
1164
+ var RouteHandler;
1165
+ var init_routes = __esm({
1166
+ "src/daemon/routes.ts"() {
1167
+ "use strict";
1168
+ init_constants();
1169
+ init_errors();
1170
+ RouteHandler = class {
1171
+ constructor(db, scaffold, namespace, queue, auth) {
1172
+ this.db = db;
1173
+ this.scaffold = scaffold;
1174
+ this.namespace = namespace;
1175
+ this.queue = queue;
1176
+ this.auth = auth;
1177
+ this.routes = [
1178
+ { method: "POST", pattern: /^\/wikis$/, handler: this.createWiki.bind(this) },
1179
+ { method: "GET", pattern: /^\/wikis$/, handler: this.listWikis.bind(this) },
1180
+ { method: "GET", pattern: /^\/wikis\/(?<id>[^/]+)$/, handler: this.getWiki.bind(this) },
1181
+ { method: "DELETE", pattern: /^\/wikis\/(?<id>[^/]+)$/, handler: this.destroyWiki.bind(this) },
1182
+ { method: "PUT", pattern: /^\/wikis\/(?<id>[^/]+)\/config$/, handler: this.updateConfig.bind(this) },
1183
+ { method: "POST", pattern: /^\/wikis\/(?<id>[^/]+)\/chown$/, handler: this.chownWiki.bind(this) },
1184
+ { method: "POST", pattern: /^\/wikis\/(?<id>[^/]+)\/api-key$/, handler: this.setApiKey.bind(this) },
1185
+ { method: "POST", pattern: /^\/wikis\/(?<id>[^/]+)\/credentials$/, handler: this.setCredentials.bind(this) },
1186
+ { method: "POST", pattern: /^\/wikis\/(?<id>[^/]+)\/jobs$/, handler: this.submitJob.bind(this) },
1187
+ { method: "GET", pattern: /^\/wikis\/(?<id>[^/]+)\/jobs\/(?<jobId>\d+)$/, handler: this.getJob.bind(this) },
1188
+ { method: "GET", pattern: /^\/wikis\/(?<id>[^/]+)\/jobs$/, handler: this.listJobs.bind(this) },
1189
+ { method: "GET", pattern: /^\/wikis\/(?<id>[^/]+)\/logs$/, handler: this.getAuditLog.bind(this) },
1190
+ { method: "POST", pattern: /^\/wikis\/(?<id>[^/]+)\/ingest-file$/, handler: this.receiveFile.bind(this) }
1191
+ ];
1192
+ }
1193
+ db;
1194
+ scaffold;
1195
+ namespace;
1196
+ queue;
1197
+ auth;
1198
+ routes;
1199
+ async handle(method, path, body, ctx) {
1200
+ for (const route of this.routes) {
1201
+ if (route.method !== method) continue;
1202
+ const match = path.match(route.pattern);
1203
+ if (match) {
1204
+ return route.handler(match.groups ?? {}, body, ctx);
1205
+ }
1206
+ }
1207
+ return { status: 404, body: { ok: false, error: "Not found" } };
1208
+ }
1209
+ // ── Wiki CRUD ───────────────────────────────────────────────────────────
1210
+ async createWiki(params, body, ctx) {
1211
+ const b = requireBody(body);
1212
+ const id = b.id;
1213
+ const name = b.name;
1214
+ if (!id || !WIKI_ID_PATTERN.test(id)) {
1215
+ throw new ValidationError(
1216
+ `Invalid wiki ID '${id}'. Must be 3-64 chars, lowercase alphanumeric and hyphens, cannot start or end with a hyphen.`
1217
+ );
1218
+ }
1219
+ if (this.db.getWiki(id)) {
1220
+ throw new WikiExistsError(id);
1221
+ }
1222
+ const wiki = this.db.createWiki(id, name ?? id, ctx.callerUid);
1223
+ this.scaffold.create(id);
1224
+ this.namespace.validateWiki(id);
1225
+ this.db.logAudit(id, "wiki.created");
1226
+ return { status: 201, body: { ok: true, data: wiki } };
1227
+ }
1228
+ async listWikis(params, body, ctx) {
1229
+ const wikis = this.db.listWikis(ctx.callerUid);
1230
+ return { status: 200, body: { ok: true, data: wikis } };
1231
+ }
1232
+ async getWiki(params, body, ctx) {
1233
+ const wiki = this.requireWiki(params["id"], ctx.callerUid);
1234
+ const pendingJobs = this.db.getPendingJobCount(wiki.id);
1235
+ return {
1236
+ status: 200,
1237
+ body: { ok: true, data: { ...wiki, pending_jobs: pendingJobs } }
1238
+ };
1239
+ }
1240
+ async destroyWiki(params, body, ctx) {
1241
+ const wikiId = params["id"];
1242
+ this.requireWiki(wikiId, ctx.callerUid);
1243
+ const keepData = body && typeof body === "object" && "keepData" in body ? Boolean(body.keepData) : false;
1244
+ this.scaffold.destroy(wikiId, keepData);
1245
+ this.db.deleteWiki(wikiId);
1246
+ return { status: 200, body: { ok: true } };
1247
+ }
1248
+ async chownWiki(params, body, ctx) {
1249
+ const wikiId = params["id"];
1250
+ const wiki = this.db.getWiki(wikiId);
1251
+ if (!wiki) throw new WikiNotFoundError(wikiId);
1252
+ if (ctx.callerUid !== 0 && wiki.owner_uid !== ctx.callerUid) {
1253
+ throw new ForbiddenError(wikiId);
1254
+ }
1255
+ const b = requireBody(body);
1256
+ const newOwnerUid = b.uid;
1257
+ if (newOwnerUid === void 0 || typeof newOwnerUid !== "number" || !Number.isInteger(newOwnerUid) || newOwnerUid < 0) {
1258
+ throw new ValidationError("uid (non-negative integer) is required");
1259
+ }
1260
+ const updated = this.db.chownWiki(wikiId, newOwnerUid);
1261
+ this.db.logAudit(wikiId, "wiki.chown", `uid ${ctx.callerUid} \u2192 ${newOwnerUid}`);
1262
+ return { status: 200, body: { ok: true, data: updated } };
1263
+ }
1264
+ async updateConfig(params, body, ctx) {
1265
+ const wikiId = params["id"];
1266
+ this.requireWiki(wikiId, ctx.callerUid);
1267
+ const config = requireBody(body);
1268
+ if (config.allowed_tools !== void 0) {
1269
+ if (!Array.isArray(config.allowed_tools)) {
1270
+ throw new ValidationError("allowed_tools must be an array of tool names");
1271
+ }
1272
+ const invalid = config.allowed_tools.filter((t) => !ALLOWED_TOOLS_WHITELIST.has(t));
1273
+ if (invalid.length > 0) {
1274
+ const allowed = [...ALLOWED_TOOLS_WHITELIST].filter((t) => !BASE_ALLOWED_TOOLS.includes(t));
1275
+ throw new ValidationError(
1276
+ `Invalid tools: ${invalid.join(", ")}. Allowed extras: ${allowed.join(", ")}`
1277
+ );
1278
+ }
1279
+ this.scaffold.writeAllowedTools(wikiId, config.allowed_tools);
1280
+ this.db.logAudit(wikiId, "wiki.allowed_tools_updated", JSON.stringify(config.allowed_tools));
1281
+ }
1282
+ const { allowed_tools: _, ...dbConfig } = config;
1283
+ if (Object.keys(dbConfig).length > 0) {
1284
+ const wiki2 = this.db.updateWiki(wikiId, dbConfig);
1285
+ this.db.logAudit(wikiId, "wiki.config_updated", JSON.stringify(dbConfig));
1286
+ return { status: 200, body: { ok: true, data: wiki2 } };
1287
+ }
1288
+ const wiki = this.db.getWiki(wikiId);
1289
+ return { status: 200, body: { ok: true, data: wiki } };
1290
+ }
1291
+ // ── Auth ─────────────────────────────────────────────────────────────────
1292
+ async setApiKey(params, body, ctx) {
1293
+ const wikiId = params["id"];
1294
+ this.requireWiki(wikiId, ctx.callerUid);
1295
+ const b = requireBody(body);
1296
+ const key = b.key;
1297
+ if (!key || typeof key !== "string") {
1298
+ throw new ValidationError("API key is required");
1299
+ }
1300
+ this.auth.setApiKey(wikiId, key);
1301
+ this.db.logAudit(wikiId, "wiki.api_key_set");
1302
+ return { status: 200, body: { ok: true } };
1303
+ }
1304
+ async setCredentials(params, body, ctx) {
1305
+ const wikiId = params["id"];
1306
+ this.requireWiki(wikiId, ctx.callerUid);
1307
+ const b = requireBody(body);
1308
+ const credentials = b.credentials;
1309
+ if (!credentials || typeof credentials !== "string") {
1310
+ throw new ValidationError("credentials (JSON string) is required");
1311
+ }
1312
+ try {
1313
+ JSON.parse(credentials);
1314
+ } catch {
1315
+ throw new ValidationError("credentials must be valid JSON");
1316
+ }
1317
+ this.auth.setCredentials(wikiId, credentials);
1318
+ this.db.logAudit(wikiId, "wiki.credentials_set");
1319
+ return { status: 200, body: { ok: true } };
1320
+ }
1321
+ // ── Jobs ─────────────────────────────────────────────────────────────────
1322
+ async submitJob(params, body, ctx) {
1323
+ const wikiId = params["id"];
1324
+ this.requireWiki(wikiId, ctx.callerUid);
1325
+ const b = requireBody(body);
1326
+ const type = b.type;
1327
+ const payload = b.payload;
1328
+ if (!type || !isValidJobType(type)) {
1329
+ throw new ValidationError(`Invalid job type '${type}'. Must be one of: ingest, query, lint`);
1330
+ }
1331
+ const job = this.db.createJob(wikiId, type, payload ?? {});
1332
+ this.queue.notify(wikiId);
1333
+ this.db.logAudit(wikiId, `job.${type}.submitted`, `job #${job.id}`);
1334
+ if (ctx.wait) {
1335
+ const result = await this.waitForJob(job.id);
1336
+ return { status: 200, body: { ok: true, data: result } };
1337
+ }
1338
+ return { status: 202, body: { ok: true, data: job } };
1339
+ }
1340
+ async getJob(params, body, ctx) {
1341
+ const wikiId = params["id"];
1342
+ this.requireWiki(wikiId, ctx.callerUid);
1343
+ const jobId = Number(params["jobId"]);
1344
+ const job = this.db.getJob(jobId);
1345
+ if (!job || job.wiki_id !== wikiId) throw new JobNotFoundError(jobId);
1346
+ return { status: 200, body: { ok: true, data: job } };
1347
+ }
1348
+ async listJobs(params, body, ctx) {
1349
+ const wikiId = params["id"];
1350
+ this.requireWiki(wikiId, ctx.callerUid);
1351
+ const jobs = this.db.listJobs(wikiId, { limit: 50 });
1352
+ return { status: 200, body: { ok: true, data: jobs } };
1353
+ }
1354
+ // ── Audit ────────────────────────────────────────────────────────────────
1355
+ async getAuditLog(params, body, ctx) {
1356
+ const wikiId = params["id"];
1357
+ this.requireWiki(wikiId, ctx.callerUid);
1358
+ const log = this.db.getAuditLog(wikiId);
1359
+ return { status: 200, body: { ok: true, data: log } };
1360
+ }
1361
+ // ── File upload ──────────────────────────────────────────────────────────
1362
+ async receiveFile(params, body, ctx) {
1363
+ const wikiId = params["id"];
1364
+ this.requireWiki(wikiId, ctx.callerUid);
1365
+ const b = requireBody(body);
1366
+ const filename = b.filename;
1367
+ const content = b.content;
1368
+ if (!filename || typeof filename !== "string" || !content || typeof content !== "string") {
1369
+ throw new ValidationError("filename (string) and content (base64 string) are required");
1370
+ }
1371
+ const buffer = Buffer.from(content, "base64");
1372
+ const stored = this.scaffold.writeRawFile(wikiId, filename, buffer);
1373
+ return { status: 201, body: { ok: true, data: { filename: stored } } };
1374
+ }
1375
+ // ── Helpers ──────────────────────────────────────────────────────────────
1376
+ requireWiki(wikiId, callerUid) {
1377
+ const wiki = this.db.getWiki(wikiId);
1378
+ if (!wiki) throw new WikiNotFoundError(wikiId);
1379
+ if (wiki.owner_uid !== callerUid) throw new ForbiddenError(wikiId);
1380
+ return wiki;
1381
+ }
1382
+ waitForJob(jobId, timeoutMs = 10 * 6e4) {
1383
+ return new Promise((resolve, reject) => {
1384
+ const deadline = Date.now() + timeoutMs;
1385
+ const check = () => {
1386
+ const job = this.db.getJob(jobId);
1387
+ if (!job || job.status === "completed" || job.status === "failed") {
1388
+ resolve(job);
1389
+ } else if (Date.now() > deadline) {
1390
+ resolve(job);
1391
+ } else {
1392
+ setTimeout(check, JOB_POLL_INTERVAL_MS);
1393
+ }
1394
+ };
1395
+ check();
1396
+ });
1397
+ }
1398
+ };
1399
+ }
1400
+ });
1401
+
1402
+ // src/daemon/peercred.ts
1403
+ import { createRequire } from "node:module";
1404
+ function getPeerCred(socket) {
1405
+ const handle = socket._handle;
1406
+ if (!handle || typeof handle.fd !== "number" || handle.fd < 0) {
1407
+ throw new Error("Cannot get file descriptor from socket");
1408
+ }
1409
+ return addon.getPeerCred(handle.fd);
1410
+ }
1411
+ var require2, addon;
1412
+ var init_peercred = __esm({
1413
+ "src/daemon/peercred.ts"() {
1414
+ "use strict";
1415
+ require2 = createRequire(import.meta.url);
1416
+ addon = require2("../../build/Release/peercred.node");
1417
+ }
1418
+ });
1419
+
1420
+ // src/daemon/server.ts
1421
+ import { createServer } from "node:http";
1422
+ import { existsSync as existsSync5, unlinkSync, chmodSync } from "node:fs";
1423
+ function sendJson(res, status, body) {
1424
+ const json = JSON.stringify(body);
1425
+ res.writeHead(status, {
1426
+ "Content-Type": "application/json",
1427
+ "Content-Length": Buffer.byteLength(json)
1428
+ });
1429
+ res.end(json);
1430
+ }
1431
+ async function readBody(req) {
1432
+ return new Promise((resolve, reject) => {
1433
+ const chunks = [];
1434
+ let totalSize = 0;
1435
+ req.on("data", (chunk) => {
1436
+ totalSize += chunk.length;
1437
+ if (totalSize > MAX_BODY_BYTES) {
1438
+ req.destroy();
1439
+ reject(new Error(`Request body exceeds ${MAX_BODY_BYTES} bytes`));
1440
+ return;
1441
+ }
1442
+ chunks.push(chunk);
1443
+ });
1444
+ req.on("end", () => {
1445
+ const raw = Buffer.concat(chunks).toString("utf-8");
1446
+ if (!raw) {
1447
+ resolve(void 0);
1448
+ return;
1449
+ }
1450
+ try {
1451
+ resolve(JSON.parse(raw));
1452
+ } catch {
1453
+ reject(new Error("Invalid JSON body"));
1454
+ }
1455
+ });
1456
+ req.on("error", reject);
1457
+ });
1458
+ }
1459
+ var DaemonServer, MAX_BODY_BYTES;
1460
+ var init_server = __esm({
1461
+ "src/daemon/server.ts"() {
1462
+ "use strict";
1463
+ init_peercred();
1464
+ DaemonServer = class {
1465
+ constructor(socketPath, routes) {
1466
+ this.socketPath = socketPath;
1467
+ this.routes = routes;
1468
+ this.server = createServer((req, res) => {
1469
+ this.handleRequest(req, res).catch((err) => {
1470
+ console.error("[server] Unhandled error:", err);
1471
+ if (!res.headersSent) {
1472
+ sendJson(res, 500, { ok: false, error: "Internal server error" });
1473
+ }
1474
+ });
1475
+ });
1476
+ this.server.on("connection", (socket) => {
1477
+ try {
1478
+ const cred = getPeerCred(socket);
1479
+ this.socketUids.set(socket, cred.uid);
1480
+ } catch (err) {
1481
+ console.error("[server] Failed to get peer credentials:", err);
1482
+ socket.destroy();
1483
+ }
1484
+ });
1485
+ }
1486
+ socketPath;
1487
+ routes;
1488
+ server;
1489
+ socketUids = /* @__PURE__ */ new WeakMap();
1490
+ async start() {
1491
+ if (existsSync5(this.socketPath)) {
1492
+ unlinkSync(this.socketPath);
1493
+ }
1494
+ return new Promise((resolve, reject) => {
1495
+ this.server.on("error", reject);
1496
+ this.server.listen(this.socketPath, () => {
1497
+ chmodSync(this.socketPath, 438);
1498
+ console.log(`[server] Listening on ${this.socketPath}`);
1499
+ resolve();
1500
+ });
1501
+ });
1502
+ }
1503
+ async stop() {
1504
+ return new Promise((resolve) => {
1505
+ this.server.close(() => {
1506
+ if (existsSync5(this.socketPath)) {
1507
+ try {
1508
+ unlinkSync(this.socketPath);
1509
+ } catch {
1510
+ }
1511
+ }
1512
+ resolve();
1513
+ });
1514
+ });
1515
+ }
1516
+ async handleRequest(req, res) {
1517
+ const method = (req.method ?? "GET").toUpperCase();
1518
+ const url = req.url ?? "/";
1519
+ let body = void 0;
1520
+ if (method === "POST" || method === "PUT") {
1521
+ body = await readBody(req);
1522
+ }
1523
+ const urlObj = new URL(url, "http://localhost");
1524
+ const wait = urlObj.searchParams.get("wait") === "true";
1525
+ const socket = req.socket;
1526
+ const callerUid = this.socketUids.get(socket);
1527
+ if (callerUid === void 0) {
1528
+ sendJson(res, 500, { ok: false, error: "Could not determine caller identity" });
1529
+ return;
1530
+ }
1531
+ try {
1532
+ const result = await this.routes.handle(method, urlObj.pathname, body, {
1533
+ wait,
1534
+ res,
1535
+ // pass response for streaming login output
1536
+ callerUid
1537
+ });
1538
+ if (res.writableEnded) return;
1539
+ sendJson(res, result.status, result.body);
1540
+ } catch (err) {
1541
+ if (res.writableEnded) return;
1542
+ if (err && typeof err === "object" && "statusCode" in err) {
1543
+ const memexErr = err;
1544
+ sendJson(res, memexErr.statusCode, {
1545
+ ok: false,
1546
+ error: memexErr.message
1547
+ });
1548
+ } else {
1549
+ console.error("[server] Error handling request:", err);
1550
+ sendJson(res, 500, { ok: false, error: "Internal server error" });
1551
+ }
1552
+ }
1553
+ }
1554
+ };
1555
+ MAX_BODY_BYTES = 100 * 1024 * 1024;
1556
+ }
1557
+ });
1558
+
1559
+ // src/daemon.ts
1560
+ var daemon_exports = {};
1561
+ __export(daemon_exports, {
1562
+ startDaemon: () => startDaemon
1563
+ });
1564
+ import { mkdirSync as mkdirSync4 } from "node:fs";
1565
+ async function startDaemon() {
1566
+ console.log("[memex] Starting daemon...");
1567
+ for (const dir of [DATA_DIR, WIKIS_DIR, RUN_DIR]) {
1568
+ mkdirSync4(dir, { recursive: true });
1569
+ }
1570
+ const db = new Database(DB_PATH);
1571
+ db.initialize();
1572
+ const staleCount = db.resetStaleJobs();
1573
+ if (staleCount > 0) {
1574
+ console.log(`[memex] Reset ${staleCount} stale job(s) from previous run`);
1575
+ }
1576
+ const namespace = new NamespaceManager(WIKIS_DIR);
1577
+ namespace.checkCapabilities();
1578
+ namespace.ensureDirectories();
1579
+ const scaffold = new WikiScaffold(WIKIS_DIR);
1580
+ const auth = new AuthManager(WIKIS_DIR, process.env["ANTHROPIC_API_KEY"]);
1581
+ const runner = new ClaudeRunner(namespace, auth, db, WIKIS_DIR);
1582
+ const queue = new QueueManager(db, runner, AUTO_LINT_INTERVAL);
1583
+ const routes = new RouteHandler(db, scaffold, namespace, queue, auth);
1584
+ const server = new DaemonServer(SOCKET_PATH, routes);
1585
+ await server.start();
1586
+ queue.start();
1587
+ const wikiCount = db.listWikis().length;
1588
+ console.log(`[memex] Daemon ready (PID ${process.pid})`);
1589
+ console.log(`[memex] Socket: ${SOCKET_PATH}`);
1590
+ console.log(`[memex] Data: ${DATA_DIR}`);
1591
+ console.log(`[memex] Wikis: ${wikiCount}`);
1592
+ const shutdown = async (signal) => {
1593
+ console.log(`
1594
+ [memex] Received ${signal}, shutting down...`);
1595
+ await queue.stop();
1596
+ await server.stop();
1597
+ db.close();
1598
+ console.log("[memex] Shutdown complete.");
1599
+ process.exit(0);
1600
+ };
1601
+ process.on("SIGTERM", () => shutdown("SIGTERM"));
1602
+ process.on("SIGINT", () => shutdown("SIGINT"));
1603
+ }
1604
+ var init_daemon = __esm({
1605
+ "src/daemon.ts"() {
1606
+ "use strict";
1607
+ init_db();
1608
+ init_namespace();
1609
+ init_scaffold();
1610
+ init_auth();
1611
+ init_runner();
1612
+ init_queue();
1613
+ init_routes();
1614
+ init_server();
1615
+ init_constants();
1616
+ }
1617
+ });
1618
+
1619
+ // src/index.ts
1620
+ import { Command as Command13 } from "commander";
1621
+
1622
+ // src/cli/commands/serve.ts
1623
+ import { Command } from "commander";
1624
+ var serveCommand = new Command("serve").description("Start the memex daemon").action(async () => {
1625
+ try {
1626
+ const { startDaemon: startDaemon2 } = await Promise.resolve().then(() => (init_daemon(), daemon_exports));
1627
+ await startDaemon2();
1628
+ } catch (err) {
1629
+ console.error(err instanceof Error ? err.message : err);
1630
+ process.exit(1);
1631
+ }
1632
+ });
1633
+
1634
+ // src/cli/commands/create.ts
1635
+ import { Command as Command2 } from "commander";
1636
+
1637
+ // src/cli/client.ts
1638
+ init_constants();
1639
+ init_errors();
1640
+ import { request as httpRequest } from "node:http";
1641
+ import { readFileSync as readFileSync4 } from "node:fs";
1642
+ import { basename as basename2 } from "node:path";
1643
+ var MemexClient = class {
1644
+ constructor(socketPath = SOCKET_PATH) {
1645
+ this.socketPath = socketPath;
1646
+ }
1647
+ socketPath;
1648
+ // ── Generic request ────────────────────────────────────────────────────
1649
+ async request(method, path, body) {
1650
+ return new Promise((resolve, reject) => {
1651
+ const opts = {
1652
+ socketPath: this.socketPath,
1653
+ method,
1654
+ path,
1655
+ headers: { "Content-Type": "application/json" }
1656
+ };
1657
+ const req = httpRequest(opts, (res) => {
1658
+ const chunks = [];
1659
+ res.on("data", (chunk) => chunks.push(chunk));
1660
+ res.on("end", () => {
1661
+ const raw = Buffer.concat(chunks).toString("utf-8");
1662
+ try {
1663
+ resolve(JSON.parse(raw));
1664
+ } catch {
1665
+ resolve({ ok: false, error: raw });
1666
+ }
1667
+ });
1668
+ });
1669
+ req.on("error", (err) => {
1670
+ if (err.code === "ECONNREFUSED" || err.code === "ENOENT") {
1671
+ reject(new DaemonNotRunningError());
1672
+ } else {
1673
+ reject(err);
1674
+ }
1675
+ });
1676
+ if (body !== void 0) {
1677
+ req.write(JSON.stringify(body));
1678
+ }
1679
+ req.end();
1680
+ });
1681
+ }
1682
+ /**
1683
+ * Make a request and stream the raw response body chunks.
1684
+ * Used for login flow where we stream CLI output.
1685
+ */
1686
+ async stream(method, path, body) {
1687
+ return new Promise((resolve, reject) => {
1688
+ const opts = {
1689
+ socketPath: this.socketPath,
1690
+ method,
1691
+ path,
1692
+ headers: { "Content-Type": "application/json" }
1693
+ };
1694
+ const req = httpRequest(opts, (res) => {
1695
+ let responseError = null;
1696
+ res.on("error", (err) => {
1697
+ responseError = err;
1698
+ });
1699
+ const iterable = {
1700
+ [Symbol.asyncIterator]() {
1701
+ return {
1702
+ next() {
1703
+ return new Promise((resolveNext, rejectNext) => {
1704
+ if (responseError) {
1705
+ rejectNext(responseError);
1706
+ return;
1707
+ }
1708
+ const onData = (chunk) => {
1709
+ res.removeListener("end", onEnd);
1710
+ res.removeListener("error", onError);
1711
+ resolveNext({ value: chunk.toString("utf-8"), done: false });
1712
+ };
1713
+ const onEnd = () => {
1714
+ res.removeListener("data", onData);
1715
+ res.removeListener("error", onError);
1716
+ resolveNext({ value: "", done: true });
1717
+ };
1718
+ const onError = (err) => {
1719
+ res.removeListener("data", onData);
1720
+ res.removeListener("end", onEnd);
1721
+ rejectNext(err);
1722
+ };
1723
+ res.once("data", onData);
1724
+ res.once("end", onEnd);
1725
+ res.once("error", onError);
1726
+ });
1727
+ }
1728
+ };
1729
+ }
1730
+ };
1731
+ resolve(iterable);
1732
+ });
1733
+ req.on("error", (err) => {
1734
+ if (err.code === "ECONNREFUSED" || err.code === "ENOENT") {
1735
+ reject(new DaemonNotRunningError());
1736
+ } else {
1737
+ reject(err);
1738
+ }
1739
+ });
1740
+ if (body !== void 0) {
1741
+ req.write(JSON.stringify(body));
1742
+ }
1743
+ req.end();
1744
+ });
1745
+ }
1746
+ // ── Convenience methods ────────────────────────────────────────────────
1747
+ createWiki(id, name) {
1748
+ return this.request("POST", "/wikis", { id, name });
1749
+ }
1750
+ listWikis() {
1751
+ return this.request("GET", "/wikis");
1752
+ }
1753
+ getWiki(id) {
1754
+ return this.request("GET", `/wikis/${id}`);
1755
+ }
1756
+ destroyWiki(id, keepData = false) {
1757
+ return this.request("DELETE", `/wikis/${id}`, { keepData });
1758
+ }
1759
+ updateConfig(wikiId, config) {
1760
+ return this.request("PUT", `/wikis/${wikiId}/config`, config);
1761
+ }
1762
+ chownWiki(wikiId, uid) {
1763
+ return this.request("POST", `/wikis/${wikiId}/chown`, { uid });
1764
+ }
1765
+ setApiKey(wikiId, key) {
1766
+ return this.request("POST", `/wikis/${wikiId}/api-key`, { key });
1767
+ }
1768
+ setCredentials(wikiId, credentials) {
1769
+ return this.request("POST", `/wikis/${wikiId}/credentials`, { credentials });
1770
+ }
1771
+ submitJob(wikiId, type, payload, wait = false) {
1772
+ const path = `/wikis/${wikiId}/jobs` + (wait ? "?wait=true" : "");
1773
+ return this.request("POST", path, { type, payload });
1774
+ }
1775
+ getJob(wikiId, jobId) {
1776
+ return this.request("GET", `/wikis/${wikiId}/jobs/${jobId}`);
1777
+ }
1778
+ listJobs(wikiId) {
1779
+ return this.request("GET", `/wikis/${wikiId}/jobs`);
1780
+ }
1781
+ getAuditLog(wikiId, limit) {
1782
+ const path = `/wikis/${wikiId}/logs` + (limit ? `?limit=${limit}` : "");
1783
+ return this.request("GET", path);
1784
+ }
1785
+ /**
1786
+ * Upload a local file to the daemon for ingestion into a wiki's raw/ directory.
1787
+ */
1788
+ async uploadFile(wikiId, localPath) {
1789
+ const content = readFileSync4(localPath);
1790
+ const filename = basename2(localPath);
1791
+ return this.request("POST", `/wikis/${wikiId}/ingest-file`, {
1792
+ filename,
1793
+ content: content.toString("base64")
1794
+ });
1795
+ }
1796
+ /**
1797
+ * Submit a job and poll until it completes.
1798
+ */
1799
+ async waitForJob(wikiId, jobId, onPoll) {
1800
+ while (true) {
1801
+ const resp = await this.getJob(wikiId, jobId);
1802
+ if (!resp.ok || !resp.data) {
1803
+ throw new Error(resp.error ?? "Failed to get job status");
1804
+ }
1805
+ const job = resp.data;
1806
+ if (onPoll) onPoll(job);
1807
+ if (job.status === "completed" || job.status === "failed") {
1808
+ return job;
1809
+ }
1810
+ await sleep2(JOB_POLL_INTERVAL_MS);
1811
+ }
1812
+ }
1813
+ };
1814
+ function sleep2(ms) {
1815
+ return new Promise((resolve) => setTimeout(resolve, ms));
1816
+ }
1817
+
1818
+ // src/cli/commands/create.ts
1819
+ var createCommand = new Command2("create").description("Create a new wiki").argument("<wikiId>", "Wiki identifier (lowercase alphanumeric + hyphens, 3-64 chars)").option("--name <name>", "Display name for the wiki").action(async (wikiId, opts) => {
1820
+ const client = new MemexClient();
1821
+ const resp = await client.createWiki(wikiId, opts.name);
1822
+ if (!resp.ok) {
1823
+ console.error(`Error: ${resp.error}`);
1824
+ process.exit(1);
1825
+ }
1826
+ const wiki = resp.data;
1827
+ console.log(`Created wiki '${wiki.id}' (${wiki.name})`);
1828
+ console.log(`
1829
+ Next steps:`);
1830
+ console.log(` memex login ${wiki.id} # authenticate with Claude`);
1831
+ console.log(` memex config ${wiki.id} --edit # customize wiki agent`);
1832
+ console.log(` memex ingest ${wiki.id} file.md # add content`);
1833
+ });
1834
+
1835
+ // src/cli/commands/destroy.ts
1836
+ import { Command as Command3 } from "commander";
1837
+ import { createInterface } from "node:readline";
1838
+ var destroyCommand = new Command3("destroy").description("Destroy a wiki and its data").argument("<wikiId>", "Wiki to destroy").option("--keep-data", "Keep wiki files on disk (only remove registration)").option("-y, --yes", "Skip confirmation prompt").action(async (wikiId, opts) => {
1839
+ if (!opts.yes) {
1840
+ const confirmed = await confirm(
1841
+ `This will destroy wiki '${wikiId}'${opts.keepData ? " (keeping data)" : " and ALL its data"}. Continue? [y/N] `
1842
+ );
1843
+ if (!confirmed) {
1844
+ console.log("Aborted.");
1845
+ process.exit(0);
1846
+ }
1847
+ }
1848
+ const client = new MemexClient();
1849
+ const resp = await client.destroyWiki(wikiId, opts.keepData ?? false);
1850
+ if (!resp.ok) {
1851
+ console.error(`Error: ${resp.error}`);
1852
+ process.exit(1);
1853
+ }
1854
+ console.log(`Destroyed wiki '${wikiId}'${opts.keepData ? " (data preserved)" : ""}`);
1855
+ });
1856
+ function confirm(question) {
1857
+ const rl = createInterface({ input: process.stdin, output: process.stdout });
1858
+ return new Promise((resolve) => {
1859
+ rl.question(question, (answer) => {
1860
+ rl.close();
1861
+ resolve(answer.toLowerCase() === "y" || answer.toLowerCase() === "yes");
1862
+ });
1863
+ });
1864
+ }
1865
+
1866
+ // src/cli/commands/config.ts
1867
+ import { Command as Command4 } from "commander";
1868
+ import { createInterface as createInterface2 } from "node:readline";
1869
+ import { execFileSync as execFileSync2 } from "node:child_process";
1870
+ init_constants();
1871
+ import { join as join5 } from "node:path";
1872
+ var configCommand = new Command4("config").description("Configure a wiki").argument("<wikiId>", "Wiki to configure").option("--edit", "Open .claude.md in $EDITOR").option("--set-key", "Set the API key for this wiki").option("--model <model>", "Set the default model (e.g., sonnet, opus, haiku)").option("--allowed-tools <tools>", "Set allowed tools (comma-separated, e.g., WebSearch,WebFetch)").option("--list-tools", "Show available tools and current configuration").action(async (wikiId, opts) => {
1873
+ const client = new MemexClient();
1874
+ const wikiResp = await client.getWiki(wikiId);
1875
+ if (!wikiResp.ok) {
1876
+ console.error(`Error: ${wikiResp.error}`);
1877
+ process.exit(1);
1878
+ }
1879
+ if (opts.edit) {
1880
+ const editor = process.env["EDITOR"] || process.env["VISUAL"] || "vi";
1881
+ const claudeMdPath = join5(WIKIS_DIR, wikiId, ".claude.md");
1882
+ try {
1883
+ execFileSync2(editor, [claudeMdPath], { stdio: "inherit" });
1884
+ console.log(`Updated .claude.md for '${wikiId}'`);
1885
+ } catch {
1886
+ console.error(`Failed to open editor. Set $EDITOR environment variable.`);
1887
+ process.exit(1);
1888
+ }
1889
+ return;
1890
+ }
1891
+ if (opts.setKey) {
1892
+ const key = await promptSecret("API key: ");
1893
+ const resp = await client.setApiKey(wikiId, key);
1894
+ if (!resp.ok) {
1895
+ console.error(`Error: ${resp.error}`);
1896
+ process.exit(1);
1897
+ }
1898
+ console.log(`API key set for '${wikiId}'`);
1899
+ return;
1900
+ }
1901
+ if (opts.listTools) {
1902
+ const extras = [...ALLOWED_TOOLS_WHITELIST].filter((t) => !BASE_ALLOWED_TOOLS.includes(t));
1903
+ console.log(`Base tools (always enabled): ${BASE_ALLOWED_TOOLS.join(", ")}`);
1904
+ console.log(`Available extras: ${extras.join(", ")}`);
1905
+ console.log(`
1906
+ To enable extras: memex config ${wikiId} --allowed-tools ${extras.join(",")}`);
1907
+ return;
1908
+ }
1909
+ if (opts.allowedTools !== void 0) {
1910
+ const tools = opts.allowedTools ? opts.allowedTools.split(",").map((t) => t.trim()).filter(Boolean) : [];
1911
+ const resp = await client.updateConfig(wikiId, { allowed_tools: tools });
1912
+ if (!resp.ok) {
1913
+ console.error(`Error: ${resp.error}`);
1914
+ process.exit(1);
1915
+ }
1916
+ if (tools.length === 0) {
1917
+ console.log(`Allowed tools reset to base set for '${wikiId}'`);
1918
+ } else {
1919
+ console.log(`Allowed tools set for '${wikiId}': ${tools.join(", ")}`);
1920
+ }
1921
+ return;
1922
+ }
1923
+ if (opts.model) {
1924
+ const resp = await client.updateConfig(wikiId, { default_model: opts.model });
1925
+ if (!resp.ok) {
1926
+ console.error(`Error: ${resp.error}`);
1927
+ process.exit(1);
1928
+ }
1929
+ console.log(`Model set to '${opts.model}' for '${wikiId}'`);
1930
+ return;
1931
+ }
1932
+ const wiki = wikiResp.data;
1933
+ console.log(`Wiki: ${wiki.id}`);
1934
+ console.log(`Name: ${wiki.name}`);
1935
+ console.log(`Model: ${wiki.default_model}`);
1936
+ console.log(`Created: ${wiki.created_at}`);
1937
+ console.log(`
1938
+ Config file: ${join5(WIKIS_DIR, wikiId, ".claude.md")}`);
1939
+ });
1940
+ function promptSecret(prompt) {
1941
+ const rl = createInterface2({ input: process.stdin, output: process.stdout });
1942
+ return new Promise((resolve) => {
1943
+ if (process.stdin.isTTY) {
1944
+ process.stdout.write(prompt);
1945
+ const stdin = process.stdin;
1946
+ stdin.setRawMode(true);
1947
+ stdin.resume();
1948
+ let input = "";
1949
+ const onData = (chunk) => {
1950
+ const char = chunk.toString("utf-8");
1951
+ if (char === "\n" || char === "\r") {
1952
+ stdin.setRawMode(false);
1953
+ stdin.removeListener("data", onData);
1954
+ process.stdout.write("\n");
1955
+ rl.close();
1956
+ resolve(input);
1957
+ } else if (char === "\x7F" || char === "\b") {
1958
+ if (input.length > 0) {
1959
+ input = input.slice(0, -1);
1960
+ }
1961
+ } else if (char === "") {
1962
+ rl.close();
1963
+ process.exit(0);
1964
+ } else {
1965
+ input += char;
1966
+ }
1967
+ };
1968
+ stdin.on("data", onData);
1969
+ } else {
1970
+ rl.question(prompt, (answer) => {
1971
+ rl.close();
1972
+ resolve(answer);
1973
+ });
1974
+ }
1975
+ });
1976
+ }
1977
+
1978
+ // src/cli/commands/login.ts
1979
+ import { Command as Command5 } from "commander";
1980
+ import { readFileSync as readFileSync5, existsSync as existsSync6 } from "node:fs";
1981
+ import { join as join6 } from "node:path";
1982
+ import { homedir } from "node:os";
1983
+ var DEFAULT_CREDS_PATH = join6(homedir(), ".claude", ".credentials.json");
1984
+ var loginCommand = new Command5("login").description("Copy Claude credentials into a wiki").argument("<wikiId>", "Wiki to authenticate").option("--credentials <path>", "Path to .credentials.json", DEFAULT_CREDS_PATH).option("--api-key <key>", "Use an API key instead of OAuth credentials").action(async (wikiId, opts) => {
1985
+ const client = new MemexClient();
1986
+ const wikiResp = await client.getWiki(wikiId);
1987
+ if (!wikiResp.ok) {
1988
+ console.error(`Error: ${wikiResp.error}`);
1989
+ process.exit(1);
1990
+ }
1991
+ if (opts.apiKey) {
1992
+ const resp2 = await client.setApiKey(wikiId, opts.apiKey);
1993
+ if (!resp2.ok) {
1994
+ console.error(`Error: ${resp2.error}`);
1995
+ process.exit(1);
1996
+ }
1997
+ console.log(`API key stored for wiki '${wikiId}'.`);
1998
+ return;
1999
+ }
2000
+ const credsPath = opts.credentials;
2001
+ if (!existsSync6(credsPath)) {
2002
+ console.error(`Error: Credentials file not found: ${credsPath}`);
2003
+ console.error(`
2004
+ Run 'claude auth login' first, or pass --credentials <path>`);
2005
+ process.exit(1);
2006
+ }
2007
+ const credentials = readFileSync5(credsPath, "utf-8");
2008
+ try {
2009
+ JSON.parse(credentials);
2010
+ } catch {
2011
+ console.error(`Error: ${credsPath} is not valid JSON`);
2012
+ process.exit(1);
2013
+ }
2014
+ const resp = await client.setCredentials(wikiId, credentials);
2015
+ if (!resp.ok) {
2016
+ console.error(`Error: ${resp.error}`);
2017
+ process.exit(1);
2018
+ }
2019
+ console.log(`Credentials copied from ${credsPath} to wiki '${wikiId}'.`);
2020
+ });
2021
+
2022
+ // src/cli/commands/ingest.ts
2023
+ import { Command as Command6 } from "commander";
2024
+ import { existsSync as existsSync7 } from "node:fs";
2025
+ var ingestCommand = new Command6("ingest").description("Ingest source files into a wiki").argument("<wikiId>", "Target wiki").argument("<files...>", "Files to ingest (pdf, md, html, txt, images, etc.)").option("--async", "Return job ID immediately instead of waiting").action(async (wikiId, files, opts) => {
2026
+ const client = new MemexClient();
2027
+ for (const file of files) {
2028
+ if (!existsSync7(file)) {
2029
+ console.error(`File not found: ${file}`);
2030
+ process.exit(1);
2031
+ }
2032
+ }
2033
+ const storedFiles = [];
2034
+ for (const file of files) {
2035
+ process.stdout.write(`Uploading ${file}...`);
2036
+ const resp = await client.uploadFile(wikiId, file);
2037
+ if (!resp.ok) {
2038
+ console.error(` failed: ${resp.error}`);
2039
+ process.exit(1);
2040
+ }
2041
+ storedFiles.push(resp.data.filename);
2042
+ console.log(` done (${resp.data.filename})`);
2043
+ }
2044
+ const jobResp = await client.submitJob(wikiId, "ingest", { files: storedFiles });
2045
+ if (!jobResp.ok) {
2046
+ console.error(`Error: ${jobResp.error}`);
2047
+ process.exit(1);
2048
+ }
2049
+ const job = jobResp.data;
2050
+ console.log(`
2051
+ Ingest job #${job.id} submitted`);
2052
+ if (opts.async) {
2053
+ console.log(`Check status: memex status ${wikiId} ${job.id}`);
2054
+ return;
2055
+ }
2056
+ process.stdout.write("Processing");
2057
+ const result = await client.waitForJob(wikiId, job.id, () => {
2058
+ process.stdout.write(".");
2059
+ });
2060
+ console.log();
2061
+ if (result.status === "completed") {
2062
+ const parsed = safeParseResult(result.result);
2063
+ console.log("\nIngest complete.");
2064
+ if (parsed?.output) {
2065
+ console.log(parsed.output);
2066
+ }
2067
+ } else {
2068
+ console.error("\nIngest failed.");
2069
+ const parsed = safeParseResult(result.result);
2070
+ if (parsed) {
2071
+ console.error(parsed.error ?? parsed.output ?? result.result);
2072
+ }
2073
+ process.exit(1);
2074
+ }
2075
+ });
2076
+ function safeParseResult(raw) {
2077
+ if (!raw) return null;
2078
+ try {
2079
+ return JSON.parse(raw);
2080
+ } catch {
2081
+ return { output: raw };
2082
+ }
2083
+ }
2084
+
2085
+ // src/cli/commands/query.ts
2086
+ import { Command as Command7 } from "commander";
2087
+ var queryCommand = new Command7("query").description("Ask a question against a wiki").argument("<wikiId>", "Target wiki").argument("<question>", "Question to ask").option("--async", "Return job ID immediately instead of waiting").action(async (wikiId, question, opts) => {
2088
+ const client = new MemexClient();
2089
+ const jobResp = await client.submitJob(wikiId, "query", { question });
2090
+ if (!jobResp.ok) {
2091
+ console.error(`Error: ${jobResp.error}`);
2092
+ process.exit(1);
2093
+ }
2094
+ const job = jobResp.data;
2095
+ if (opts.async) {
2096
+ console.log(`Query job #${job.id} submitted`);
2097
+ console.log(`Check status: memex status ${wikiId} ${job.id}`);
2098
+ return;
2099
+ }
2100
+ process.stdout.write("Thinking");
2101
+ const result = await client.waitForJob(wikiId, job.id, () => {
2102
+ process.stdout.write(".");
2103
+ });
2104
+ console.log("\n");
2105
+ if (result.status === "completed") {
2106
+ const parsed = safeParseResult2(result.result);
2107
+ console.log(parsed?.output ?? result.result ?? "(no output)");
2108
+ } else {
2109
+ console.error("Query failed.");
2110
+ const parsed = safeParseResult2(result.result);
2111
+ console.error(parsed?.error ?? parsed?.output ?? result.result);
2112
+ process.exit(1);
2113
+ }
2114
+ });
2115
+ function safeParseResult2(raw) {
2116
+ if (!raw) return null;
2117
+ try {
2118
+ return JSON.parse(raw);
2119
+ } catch {
2120
+ return { output: raw };
2121
+ }
2122
+ }
2123
+
2124
+ // src/cli/commands/lint.ts
2125
+ import { Command as Command8 } from "commander";
2126
+ var lintCommand = new Command8("lint").description("Run a maintenance health check on a wiki").argument("<wikiId>", "Target wiki").option("--async", "Return job ID immediately instead of waiting").action(async (wikiId, opts) => {
2127
+ const client = new MemexClient();
2128
+ const jobResp = await client.submitJob(wikiId, "lint", {});
2129
+ if (!jobResp.ok) {
2130
+ console.error(`Error: ${jobResp.error}`);
2131
+ process.exit(1);
2132
+ }
2133
+ const job = jobResp.data;
2134
+ if (opts.async) {
2135
+ console.log(`Lint job #${job.id} submitted`);
2136
+ console.log(`Check status: memex status ${wikiId} ${job.id}`);
2137
+ return;
2138
+ }
2139
+ process.stdout.write("Checking wiki health");
2140
+ const result = await client.waitForJob(wikiId, job.id, () => {
2141
+ process.stdout.write(".");
2142
+ });
2143
+ console.log("\n");
2144
+ if (result.status === "completed") {
2145
+ const parsed = safeParseResult3(result.result);
2146
+ console.log(parsed?.output ?? result.result ?? "(no output)");
2147
+ } else {
2148
+ console.error("Lint failed.");
2149
+ const parsed = safeParseResult3(result.result);
2150
+ console.error(parsed?.error ?? parsed?.output ?? result.result);
2151
+ process.exit(1);
2152
+ }
2153
+ });
2154
+ function safeParseResult3(raw) {
2155
+ if (!raw) return null;
2156
+ try {
2157
+ return JSON.parse(raw);
2158
+ } catch {
2159
+ return { output: raw };
2160
+ }
2161
+ }
2162
+
2163
+ // src/cli/commands/logs.ts
2164
+ import { Command as Command9 } from "commander";
2165
+ var logsCommand = new Command9("logs").description("View audit log for a wiki").argument("<wikiId>", "Target wiki").option("--tail <n>", "Number of entries to show", "20").action(async (wikiId, opts) => {
2166
+ const client = new MemexClient();
2167
+ const limit = parseInt(opts.tail, 10) || 20;
2168
+ const resp = await client.getAuditLog(wikiId, limit);
2169
+ if (!resp.ok) {
2170
+ console.error(`Error: ${resp.error}`);
2171
+ process.exit(1);
2172
+ }
2173
+ const entries = resp.data ?? [];
2174
+ if (entries.length === 0) {
2175
+ console.log("No audit log entries.");
2176
+ return;
2177
+ }
2178
+ for (const entry of entries.reverse()) {
2179
+ const detail = entry.detail ? ` \u2014 ${entry.detail}` : "";
2180
+ console.log(`[${entry.created_at}] ${entry.action}${detail}`);
2181
+ }
2182
+ });
2183
+
2184
+ // src/cli/commands/list.ts
2185
+ import { Command as Command10 } from "commander";
2186
+ var listCommand = new Command10("list").description("List all wikis").action(async () => {
2187
+ const client = new MemexClient();
2188
+ const resp = await client.listWikis();
2189
+ if (!resp.ok) {
2190
+ console.error(`Error: ${resp.error}`);
2191
+ process.exit(1);
2192
+ }
2193
+ const wikis = resp.data ?? [];
2194
+ if (wikis.length === 0) {
2195
+ console.log("No wikis. Create one with: memex create <wikiId>");
2196
+ return;
2197
+ }
2198
+ const header = padRow("ID", "NAME", "MODEL", "CREATED");
2199
+ console.log(header);
2200
+ console.log("-".repeat(header.length));
2201
+ for (const wiki of wikis) {
2202
+ console.log(padRow(wiki.id, wiki.name, wiki.default_model, wiki.created_at));
2203
+ }
2204
+ });
2205
+ function padRow(id, name, model, created) {
2206
+ return `${id.padEnd(24)} ${name.padEnd(24)} ${model.padEnd(10)} ${created}`;
2207
+ }
2208
+
2209
+ // src/cli/commands/chown.ts
2210
+ import { Command as Command11 } from "commander";
2211
+ var chownCommand = new Command11("chown").description("Transfer wiki ownership to another user (by UID)").argument("<wikiId>", "Wiki identifier").argument("<uid>", "New owner UID (numeric)").action(async (wikiId, uidStr) => {
2212
+ const uid = Number(uidStr);
2213
+ if (!Number.isInteger(uid) || uid < 0) {
2214
+ console.error(`Error: uid must be a non-negative integer, got '${uidStr}'`);
2215
+ process.exit(1);
2216
+ }
2217
+ const client = new MemexClient();
2218
+ const resp = await client.chownWiki(wikiId, uid);
2219
+ if (!resp.ok) {
2220
+ console.error(`Error: ${resp.error}`);
2221
+ process.exit(1);
2222
+ }
2223
+ const wiki = resp.data;
2224
+ console.log(`Transferred wiki '${wiki.id}' to uid ${wiki.owner_uid}`);
2225
+ });
2226
+
2227
+ // src/cli/commands/status.ts
2228
+ import { Command as Command12 } from "commander";
2229
+ var statusCommand = new Command12("status").description("Check job status for a wiki").argument("<wikiId>", "Target wiki").argument("[jobId]", "Specific job ID (omit to list recent jobs)").action(async (wikiId, jobId) => {
2230
+ const client = new MemexClient();
2231
+ if (jobId) {
2232
+ const resp = await client.getJob(wikiId, parseInt(jobId, 10));
2233
+ if (!resp.ok) {
2234
+ console.error(`Error: ${resp.error}`);
2235
+ process.exit(1);
2236
+ }
2237
+ const job = resp.data;
2238
+ console.log(`Job #${job.id}`);
2239
+ console.log(` Type: ${job.type}`);
2240
+ console.log(` Status: ${job.status}`);
2241
+ console.log(` Created: ${job.created_at}`);
2242
+ if (job.started_at) console.log(` Started: ${job.started_at}`);
2243
+ if (job.completed_at) console.log(` Completed: ${job.completed_at}`);
2244
+ if (job.result) {
2245
+ try {
2246
+ const parsed = JSON.parse(job.result);
2247
+ if (parsed.output) {
2248
+ console.log(`
2249
+ Output:
2250
+ ${parsed.output}`);
2251
+ }
2252
+ if (parsed.error) {
2253
+ console.log(`
2254
+ Error:
2255
+ ${parsed.error}`);
2256
+ }
2257
+ if (parsed.duration_ms) {
2258
+ console.log(`
2259
+ Duration: ${parsed.duration_ms}ms`);
2260
+ }
2261
+ } catch {
2262
+ console.log(`
2263
+ Result: ${job.result}`);
2264
+ }
2265
+ }
2266
+ } else {
2267
+ const resp = await client.listJobs(wikiId);
2268
+ if (!resp.ok) {
2269
+ console.error(`Error: ${resp.error}`);
2270
+ process.exit(1);
2271
+ }
2272
+ const jobs = resp.data ?? [];
2273
+ if (jobs.length === 0) {
2274
+ console.log(`No jobs for wiki '${wikiId}'.`);
2275
+ return;
2276
+ }
2277
+ const header = padRow2("ID", "TYPE", "STATUS", "CREATED");
2278
+ console.log(header);
2279
+ console.log("-".repeat(header.length));
2280
+ for (const job of jobs) {
2281
+ console.log(padRow2(
2282
+ String(job.id),
2283
+ job.type,
2284
+ job.status,
2285
+ job.created_at
2286
+ ));
2287
+ }
2288
+ }
2289
+ });
2290
+ function padRow2(id, type, status, created) {
2291
+ return `${id.padEnd(8)} ${type.padEnd(10)} ${status.padEnd(12)} ${created}`;
2292
+ }
2293
+
2294
+ // src/index.ts
2295
+ var program = new Command13();
2296
+ program.name("memex").description("Isolated, queued claude -p runtime for persistent knowledge bases").version("0.1.0");
2297
+ program.addCommand(serveCommand);
2298
+ program.addCommand(createCommand);
2299
+ program.addCommand(destroyCommand);
2300
+ program.addCommand(configCommand);
2301
+ program.addCommand(loginCommand);
2302
+ program.addCommand(ingestCommand);
2303
+ program.addCommand(queryCommand);
2304
+ program.addCommand(lintCommand);
2305
+ program.addCommand(logsCommand);
2306
+ program.addCommand(listCommand);
2307
+ program.addCommand(chownCommand);
2308
+ program.addCommand(statusCommand);
2309
+ program.parseAsync().catch((err) => {
2310
+ console.error(err.message ?? err);
2311
+ process.exit(1);
2312
+ });
2313
+ //# sourceMappingURL=memex.mjs.map