@askexenow/exe-os 0.9.11 → 0.9.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/dist/bin/backfill-conversations.js +22 -1
  2. package/dist/bin/backfill-responses.js +22 -1
  3. package/dist/bin/backfill-vectors.js +22 -1
  4. package/dist/bin/cleanup-stale-review-tasks.js +22 -1
  5. package/dist/bin/cli.js +22 -1
  6. package/dist/bin/exe-assign.js +22 -1
  7. package/dist/bin/exe-boot.js +22 -1
  8. package/dist/bin/exe-dispatch.js +22 -1
  9. package/dist/bin/exe-doctor.js +22 -1
  10. package/dist/bin/exe-export-behaviors.js +22 -1
  11. package/dist/bin/exe-forget.js +22 -1
  12. package/dist/bin/exe-gateway.js +22 -1
  13. package/dist/bin/exe-heartbeat.js +22 -1
  14. package/dist/bin/exe-kill.js +22 -1
  15. package/dist/bin/exe-launch-agent.js +22 -1
  16. package/dist/bin/exe-link.js +22 -1
  17. package/dist/bin/exe-pending-messages.js +22 -1
  18. package/dist/bin/exe-pending-notifications.js +22 -1
  19. package/dist/bin/exe-pending-reviews.js +22 -1
  20. package/dist/bin/exe-rename.js +22 -1
  21. package/dist/bin/exe-review.js +22 -1
  22. package/dist/bin/exe-search.js +22 -1
  23. package/dist/bin/exe-session-cleanup.js +22 -1
  24. package/dist/bin/exe-start-codex.js +22 -1
  25. package/dist/bin/exe-start-opencode.js +22 -1
  26. package/dist/bin/exe-status.js +22 -1
  27. package/dist/bin/exe-team.js +22 -1
  28. package/dist/bin/git-sweep.js +22 -1
  29. package/dist/bin/graph-backfill.js +22 -1
  30. package/dist/bin/graph-export.js +22 -1
  31. package/dist/bin/scan-tasks.js +22 -1
  32. package/dist/bin/setup.js +22 -1
  33. package/dist/bin/shard-migrate.js +22 -1
  34. package/dist/bin/wiki-sync.js +22 -1
  35. package/dist/gateway/index.js +22 -1
  36. package/dist/hooks/bug-report-worker.js +22 -1
  37. package/dist/hooks/codex-stop-task-finalizer.js +22 -1
  38. package/dist/hooks/commit-complete.js +22 -1
  39. package/dist/hooks/error-recall.js +22 -1
  40. package/dist/hooks/ingest-worker.js +22 -1
  41. package/dist/hooks/ingest.js +3345 -232
  42. package/dist/hooks/instructions-loaded.js +22 -1
  43. package/dist/hooks/notification.js +22 -1
  44. package/dist/hooks/post-compact.js +22 -1
  45. package/dist/hooks/pre-compact.js +22 -1
  46. package/dist/hooks/pre-tool-use.js +22 -1
  47. package/dist/hooks/prompt-ingest-worker.js +22 -1
  48. package/dist/hooks/prompt-submit.js +1700 -1396
  49. package/dist/hooks/response-ingest-worker.js +22 -1
  50. package/dist/hooks/session-end.js +345 -187
  51. package/dist/hooks/session-start.js +304 -15
  52. package/dist/hooks/stop.js +22 -1
  53. package/dist/hooks/subagent-stop.js +22 -1
  54. package/dist/hooks/summary-worker.js +22 -1
  55. package/dist/index.js +22 -1
  56. package/dist/lib/cloud-sync.js +22 -1
  57. package/dist/lib/database.js +22 -1
  58. package/dist/lib/db.js +22 -1
  59. package/dist/lib/device-registry.js +22 -1
  60. package/dist/lib/exe-daemon.js +39 -1
  61. package/dist/lib/hybrid-search.js +22 -1
  62. package/dist/lib/schedules.js +22 -1
  63. package/dist/lib/store.js +22 -1
  64. package/dist/mcp/server.js +126 -1
  65. package/dist/runtime/index.js +22 -1
  66. package/dist/tui/App.js +22 -1
  67. package/package.json +1 -1
@@ -17,6 +17,13 @@ var __export = (target, all) => {
17
17
  // src/lib/secure-files.ts
18
18
  import { chmodSync, existsSync, mkdirSync } from "fs";
19
19
  import { chmod, mkdir } from "fs/promises";
20
+ async function ensurePrivateDir(dirPath) {
21
+ await mkdir(dirPath, { recursive: true, mode: PRIVATE_DIR_MODE });
22
+ try {
23
+ await chmod(dirPath, PRIVATE_DIR_MODE);
24
+ } catch {
25
+ }
26
+ }
20
27
  function ensurePrivateDirSync(dirPath) {
21
28
  mkdirSync(dirPath, { recursive: true, mode: PRIVATE_DIR_MODE });
22
29
  try {
@@ -24,6 +31,12 @@ function ensurePrivateDirSync(dirPath) {
24
31
  } catch {
25
32
  }
26
33
  }
34
+ async function enforcePrivateFile(filePath) {
35
+ try {
36
+ await chmod(filePath, PRIVATE_FILE_MODE);
37
+ } catch {
38
+ }
39
+ }
27
40
  function enforcePrivateFileSync(filePath) {
28
41
  try {
29
42
  if (existsSync(filePath)) chmodSync(filePath, PRIVATE_FILE_MODE);
@@ -110,6 +123,39 @@ function normalizeAutoUpdate(raw) {
110
123
  const userAU = raw.autoUpdate ?? {};
111
124
  raw.autoUpdate = { ...defaultAU, ...userAU };
112
125
  }
126
+ async function loadConfig() {
127
+ const dir = process.env.EXE_OS_DIR ?? process.env.EXE_MEM_DIR ?? EXE_AI_DIR;
128
+ await ensurePrivateDir(dir);
129
+ const configPath = path.join(dir, "config.json");
130
+ if (!existsSync2(configPath)) {
131
+ return { ...DEFAULT_CONFIG, dbPath: path.join(dir, "memories.db") };
132
+ }
133
+ const raw = await readFile(configPath, "utf-8");
134
+ try {
135
+ let parsed = JSON.parse(raw);
136
+ parsed = migrateLegacyConfig(parsed);
137
+ const { config: migratedCfg, migrated, fromVersion } = migrateConfig(parsed);
138
+ if (migrated) {
139
+ process.stderr.write(`[exe-os] Config migrated from v${fromVersion} to v${migratedCfg.config_version}
140
+ `);
141
+ try {
142
+ await writeFile(configPath, JSON.stringify(migratedCfg, null, 2) + "\n");
143
+ await enforcePrivateFile(configPath);
144
+ } catch {
145
+ }
146
+ }
147
+ normalizeScalingRoadmap(migratedCfg);
148
+ normalizeSessionLifecycle(migratedCfg);
149
+ normalizeAutoUpdate(migratedCfg);
150
+ const config = { ...DEFAULT_CONFIG, dbPath: path.join(dir, "memories.db"), ...migratedCfg };
151
+ if (config.dbPath.startsWith("~")) {
152
+ config.dbPath = config.dbPath.replace(/^~/, os.homedir());
153
+ }
154
+ return config;
155
+ } catch {
156
+ return { ...DEFAULT_CONFIG, dbPath: path.join(dir, "memories.db") };
157
+ }
158
+ }
113
159
  function loadConfigSync() {
114
160
  const dir = process.env.EXE_OS_DIR ?? process.env.EXE_MEM_DIR ?? EXE_AI_DIR;
115
161
  const configPath = path.join(dir, "config.json");
@@ -388,219 +434,3243 @@ function validateEmployeeName(name) {
388
434
  error: "Name must start with a letter and contain only lowercase alphanumeric characters"
389
435
  };
390
436
  }
391
- return { valid: true };
437
+ return { valid: true };
438
+ }
439
+ async function loadEmployees(employeesPath = EMPLOYEES_PATH) {
440
+ if (!existsSync4(employeesPath)) {
441
+ return [];
442
+ }
443
+ const raw = await readFile2(employeesPath, "utf-8");
444
+ try {
445
+ return JSON.parse(raw);
446
+ } catch {
447
+ return [];
448
+ }
449
+ }
450
+ async function saveEmployees(employees, employeesPath = EMPLOYEES_PATH) {
451
+ await mkdir2(path3.dirname(employeesPath), { recursive: true });
452
+ await writeFile2(employeesPath, JSON.stringify(employees, null, 2) + "\n", "utf-8");
453
+ }
454
+ function loadEmployeesSync(employeesPath = EMPLOYEES_PATH) {
455
+ if (!existsSync4(employeesPath)) return [];
456
+ try {
457
+ return JSON.parse(readFileSync3(employeesPath, "utf-8"));
458
+ } catch {
459
+ return [];
460
+ }
461
+ }
462
+ function getEmployee(employees, name) {
463
+ return employees.find((e) => e.name.toLowerCase() === name.toLowerCase());
464
+ }
465
+ function getEmployeeByRole(employees, role) {
466
+ const lower = role.toLowerCase();
467
+ return employees.find((e) => e.role.toLowerCase() === lower);
468
+ }
469
+ function getEmployeeNamesByRole(employees, role) {
470
+ const lower = role.toLowerCase();
471
+ return employees.filter((e) => e.role.toLowerCase() === lower).map((e) => e.name);
472
+ }
473
+ function hasRole(agentName, role) {
474
+ const employees = loadEmployeesSync();
475
+ const emp = getEmployee(employees, agentName);
476
+ return emp ? emp.role.toLowerCase() === role.toLowerCase() : false;
477
+ }
478
+ function baseAgentName(name, employees) {
479
+ const match = name.match(/^([a-zA-Z]+)\d+$/);
480
+ if (!match) return name;
481
+ const base = match[1];
482
+ const roster = employees ?? loadEmployeesSync();
483
+ if (getEmployee(roster, base)) return base;
484
+ return name;
485
+ }
486
+ function isMultiInstance(agentName, employees) {
487
+ const roster = employees ?? loadEmployeesSync();
488
+ const emp = getEmployee(roster, agentName);
489
+ if (!emp) return false;
490
+ return MULTI_INSTANCE_ROLES.has(emp.role.toLowerCase());
491
+ }
492
+ function addEmployee(employees, employee) {
493
+ const normalized = { ...employee, name: employee.name.toLowerCase() };
494
+ if (employees.some((e) => e.name.toLowerCase() === normalized.name)) {
495
+ throw new Error(`Employee '${normalized.name}' already exists`);
496
+ }
497
+ return [...employees, normalized];
498
+ }
499
+ function appendToCoordinatorTeam(employee) {
500
+ const coordinator = getCoordinatorEmployee(loadEmployeesSync());
501
+ if (!coordinator) return;
502
+ const idPath = path3.join(IDENTITY_DIR, `${coordinator.name}.md`);
503
+ if (!existsSync4(idPath)) return;
504
+ const content = readFileSync3(idPath, "utf-8");
505
+ if (content.includes(`**${capitalize(employee.name)}`)) return;
506
+ const teamMatch = content.match(TEAM_SECTION_RE);
507
+ if (!teamMatch || teamMatch.index === void 0) return;
508
+ const afterTeam = content.slice(teamMatch.index + teamMatch[0].length);
509
+ const nextHeading = afterTeam.match(/\n## /);
510
+ const entry = `
511
+ **${capitalize(employee.name)} (${employee.role}):** Newly hired. Update this description as the role develops.
512
+ `;
513
+ let updated;
514
+ if (nextHeading && nextHeading.index !== void 0) {
515
+ const insertAt = teamMatch.index + teamMatch[0].length + nextHeading.index;
516
+ updated = content.slice(0, insertAt) + entry + content.slice(insertAt);
517
+ } else {
518
+ updated = content.trimEnd() + "\n" + entry;
519
+ }
520
+ writeFileSync2(idPath, updated, "utf-8");
521
+ }
522
+ function capitalize(s) {
523
+ return s.charAt(0).toUpperCase() + s.slice(1);
524
+ }
525
+ async function hireEmployee(employee) {
526
+ const employees = await loadEmployees();
527
+ const updated = addEmployee(employees, employee);
528
+ await saveEmployees(updated);
529
+ try {
530
+ appendToCoordinatorTeam(employee);
531
+ } catch {
532
+ }
533
+ try {
534
+ const { loadAgentConfig: loadAgentConfig2, saveAgentConfig: saveAgentConfig2 } = await Promise.resolve().then(() => (init_agent_config(), agent_config_exports));
535
+ const config = loadAgentConfig2();
536
+ const name = employee.name.toLowerCase();
537
+ if (!config[name] && config["default"]) {
538
+ config[name] = { ...config["default"] };
539
+ saveAgentConfig2(config);
540
+ }
541
+ } catch {
542
+ }
543
+ return updated;
544
+ }
545
+ async function normalizeRosterCase(rosterPath) {
546
+ const employees = await loadEmployees(rosterPath);
547
+ let changed = false;
548
+ for (const emp of employees) {
549
+ if (emp.name !== emp.name.toLowerCase()) {
550
+ const oldName = emp.name;
551
+ emp.name = emp.name.toLowerCase();
552
+ changed = true;
553
+ try {
554
+ const identityDir = path3.join(os2.homedir(), ".exe-os", "identity");
555
+ const oldPath = path3.join(identityDir, `${oldName}.md`);
556
+ const newPath = path3.join(identityDir, `${emp.name}.md`);
557
+ if (existsSync4(oldPath) && !existsSync4(newPath)) {
558
+ renameSync2(oldPath, newPath);
559
+ } else if (existsSync4(oldPath) && oldPath !== newPath) {
560
+ const content = readFileSync3(oldPath, "utf-8");
561
+ writeFileSync2(newPath, content, "utf-8");
562
+ if (oldPath.toLowerCase() !== newPath.toLowerCase()) {
563
+ unlinkSync(oldPath);
564
+ }
565
+ }
566
+ } catch {
567
+ }
568
+ }
569
+ }
570
+ if (changed) {
571
+ await saveEmployees(employees, rosterPath);
572
+ }
573
+ return changed;
574
+ }
575
+ function findExeBin() {
576
+ try {
577
+ return execSync2(process.platform === "win32" ? "where exe-os" : "which exe-os", { encoding: "utf8" }).trim();
578
+ } catch {
579
+ return null;
580
+ }
581
+ }
582
+ function registerBinSymlinks(name) {
583
+ const created = [];
584
+ const skipped = [];
585
+ const errors = [];
586
+ const exeBinPath = findExeBin();
587
+ if (!exeBinPath) {
588
+ errors.push("Could not find 'exe-os' in PATH");
589
+ return { created, skipped, errors };
590
+ }
591
+ const binDir = path3.dirname(exeBinPath);
592
+ let target;
593
+ try {
594
+ target = readlinkSync(exeBinPath);
595
+ } catch {
596
+ errors.push("Could not read 'exe' symlink");
597
+ return { created, skipped, errors };
598
+ }
599
+ for (const suffix of ["", "-opencode"]) {
600
+ const linkName = `${name}${suffix}`;
601
+ const linkPath = path3.join(binDir, linkName);
602
+ if (existsSync4(linkPath)) {
603
+ skipped.push(linkName);
604
+ continue;
605
+ }
606
+ try {
607
+ symlinkSync(target, linkPath);
608
+ created.push(linkName);
609
+ } catch (err) {
610
+ errors.push(`${linkName}: ${err instanceof Error ? err.message : String(err)}`);
611
+ }
612
+ }
613
+ return { created, skipped, errors };
614
+ }
615
+ var EMPLOYEES_PATH, DEFAULT_COORDINATOR_TEMPLATE_NAME, COORDINATOR_ROLE, MULTI_INSTANCE_ROLES, IDENTITY_DIR, TEAM_SECTION_RE;
616
+ var init_employees = __esm({
617
+ "src/lib/employees.ts"() {
618
+ "use strict";
619
+ init_config();
620
+ EMPLOYEES_PATH = path3.join(EXE_AI_DIR, "exe-employees.json");
621
+ DEFAULT_COORDINATOR_TEMPLATE_NAME = "exe";
622
+ COORDINATOR_ROLE = "COO";
623
+ MULTI_INSTANCE_ROLES = /* @__PURE__ */ new Set(["principal engineer", "content production specialist", "staff code reviewer"]);
624
+ IDENTITY_DIR = path3.join(EXE_AI_DIR, "identity");
625
+ TEAM_SECTION_RE = /^## Team\b.*$/m;
626
+ }
627
+ });
628
+
629
+ // src/lib/mcp-prefix.ts
630
+ function isExeMcpTool(toolName) {
631
+ if (!toolName) return false;
632
+ return MCP_TOOL_PREFIXES.some((p) => toolName.startsWith(p));
633
+ }
634
+ function stripExeMcpPrefix(toolName) {
635
+ for (const p of MCP_TOOL_PREFIXES) {
636
+ if (toolName.startsWith(p)) return toolName.slice(p.length);
637
+ }
638
+ return toolName;
639
+ }
640
+ var MCP_PRIMARY_KEY, MCP_LEGACY_KEY, MCP_TOOL_PREFIXES;
641
+ var init_mcp_prefix = __esm({
642
+ "src/lib/mcp-prefix.ts"() {
643
+ "use strict";
644
+ MCP_PRIMARY_KEY = "exe-os";
645
+ MCP_LEGACY_KEY = "exe-mem";
646
+ MCP_TOOL_PREFIXES = [
647
+ `mcp__${MCP_PRIMARY_KEY}__`,
648
+ `mcp__${MCP_LEGACY_KEY}__`
649
+ ];
650
+ }
651
+ });
652
+
653
+ // src/types/memory.ts
654
+ var EMBEDDING_DIM;
655
+ var init_memory = __esm({
656
+ "src/types/memory.ts"() {
657
+ "use strict";
658
+ EMBEDDING_DIM = 1024;
659
+ }
660
+ });
661
+
662
+ // src/lib/db-retry.ts
663
+ function isBusyError(err) {
664
+ if (err instanceof Error) {
665
+ const msg = err.message.toLowerCase();
666
+ return msg.includes("sqlite_busy") || msg.includes("database is locked");
667
+ }
668
+ return false;
669
+ }
670
+ function delay(ms) {
671
+ return new Promise((resolve) => setTimeout(resolve, ms));
672
+ }
673
+ async function retryOnBusy(fn, label) {
674
+ let lastError;
675
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
676
+ try {
677
+ return await fn();
678
+ } catch (err) {
679
+ lastError = err;
680
+ if (!isBusyError(err) || attempt === MAX_RETRIES) {
681
+ throw err;
682
+ }
683
+ const backoff = BASE_DELAY_MS * Math.pow(2, attempt);
684
+ const jitter = Math.floor(Math.random() * MAX_JITTER_MS);
685
+ process.stderr.write(
686
+ `[exe-os] SQLITE_BUSY ${label} retry ${attempt + 1}/${MAX_RETRIES} \u2014 waiting ${backoff + jitter}ms
687
+ `
688
+ );
689
+ await delay(backoff + jitter);
690
+ }
691
+ }
692
+ throw lastError;
693
+ }
694
+ function wrapWithRetry(client) {
695
+ return new Proxy(client, {
696
+ get(target, prop, receiver) {
697
+ if (prop === "execute") {
698
+ return (sql) => retryOnBusy(() => target.execute(sql), "execute");
699
+ }
700
+ if (prop === "batch") {
701
+ return (stmts, mode) => retryOnBusy(() => target.batch(stmts, mode), "batch");
702
+ }
703
+ return Reflect.get(target, prop, receiver);
704
+ }
705
+ });
706
+ }
707
+ var MAX_RETRIES, BASE_DELAY_MS, MAX_JITTER_MS;
708
+ var init_db_retry = __esm({
709
+ "src/lib/db-retry.ts"() {
710
+ "use strict";
711
+ MAX_RETRIES = 3;
712
+ BASE_DELAY_MS = 200;
713
+ MAX_JITTER_MS = 300;
714
+ }
715
+ });
716
+
717
+ // src/lib/database-adapter.ts
718
+ import os4 from "os";
719
+ import path7 from "path";
720
+ import { createRequire } from "module";
721
+ import { pathToFileURL } from "url";
722
+ function quotedIdentifier(identifier) {
723
+ return `"${identifier.replace(/"/g, '""')}"`;
724
+ }
725
+ function unqualifiedTableName(name) {
726
+ const raw = name.trim().replace(/^"|"$/g, "");
727
+ const parts = raw.split(".");
728
+ return parts[parts.length - 1].replace(/^"|"$/g, "").toLowerCase();
729
+ }
730
+ function stripTrailingSemicolon(sql) {
731
+ return sql.trim().replace(/;+\s*$/u, "");
732
+ }
733
+ function appendClause(sql, clause) {
734
+ const trimmed = stripTrailingSemicolon(sql);
735
+ const returningMatch = /\sRETURNING\b[\s\S]*$/iu.exec(trimmed);
736
+ if (!returningMatch) {
737
+ return `${trimmed}${clause}`;
738
+ }
739
+ const idx = returningMatch.index;
740
+ return `${trimmed.slice(0, idx)}${clause}${trimmed.slice(idx)}`;
741
+ }
742
+ function normalizeStatement(stmt) {
743
+ if (typeof stmt === "string") {
744
+ return { kind: "positional", sql: stmt, args: [] };
745
+ }
746
+ const sql = stmt.sql;
747
+ if (Array.isArray(stmt.args) || stmt.args === void 0) {
748
+ return { kind: "positional", sql, args: stmt.args ?? [] };
749
+ }
750
+ return { kind: "named", sql, args: stmt.args };
751
+ }
752
+ function rewriteBooleanLiterals(sql) {
753
+ let out = sql;
754
+ for (const column of BOOLEAN_COLUMN_NAMES) {
755
+ const scoped = `((?:\\b[a-z_][a-z0-9_]*\\.)?${column})`;
756
+ out = out.replace(new RegExp(`${scoped}\\s*=\\s*0\\b`, "giu"), "$1 = FALSE");
757
+ out = out.replace(new RegExp(`${scoped}\\s*=\\s*1\\b`, "giu"), "$1 = TRUE");
758
+ out = out.replace(new RegExp(`${scoped}\\s*!=\\s*0\\b`, "giu"), "$1 != FALSE");
759
+ out = out.replace(new RegExp(`${scoped}\\s*!=\\s*1\\b`, "giu"), "$1 != TRUE");
760
+ out = out.replace(new RegExp(`${scoped}\\s*<>\\s*0\\b`, "giu"), "$1 <> FALSE");
761
+ out = out.replace(new RegExp(`${scoped}\\s*<>\\s*1\\b`, "giu"), "$1 <> TRUE");
762
+ }
763
+ return out;
764
+ }
765
+ function rewriteInsertOrIgnore(sql) {
766
+ if (!/^\s*INSERT\s+OR\s+IGNORE\s+INTO\b/iu.test(sql)) {
767
+ return sql;
768
+ }
769
+ const replaced = sql.replace(/^\s*INSERT\s+OR\s+IGNORE\s+INTO\b/iu, "INSERT INTO");
770
+ return /\bON\s+CONFLICT\b/iu.test(replaced) ? replaced : appendClause(replaced, " ON CONFLICT DO NOTHING");
771
+ }
772
+ function rewriteInsertOrReplace(sql) {
773
+ const match = /^\s*INSERT\s+OR\s+REPLACE\s+INTO\s+([A-Za-z0-9_."]+)\s*\(([^)]+)\)([\s\S]*)$/iu.exec(sql);
774
+ if (!match) {
775
+ return sql;
776
+ }
777
+ const rawTable = match[1];
778
+ const rawColumns = match[2];
779
+ const remainder = match[3];
780
+ const tableName = unqualifiedTableName(rawTable);
781
+ const conflictKeys = UPSERT_KEYS[tableName];
782
+ if (!conflictKeys?.length) {
783
+ return sql;
784
+ }
785
+ const columns = rawColumns.split(",").map((col) => col.trim().replace(/^"|"$/g, ""));
786
+ const updateColumns = columns.filter((col) => !conflictKeys.includes(col));
787
+ const conflictTarget = conflictKeys.map(quotedIdentifier).join(", ");
788
+ const updateClause = updateColumns.length === 0 ? " DO NOTHING" : ` DO UPDATE SET ${updateColumns.map((col) => `${quotedIdentifier(col)} = EXCLUDED.${quotedIdentifier(col)}`).join(", ")}`;
789
+ return `INSERT INTO ${rawTable} (${rawColumns})${appendClause(remainder, ` ON CONFLICT (${conflictTarget})${updateClause}`)}`;
790
+ }
791
+ function rewriteSql(sql) {
792
+ let out = sql;
793
+ out = out.replace(/\bdatetime\(\s*['"]now['"]\s*\)/giu, "CURRENT_TIMESTAMP");
794
+ out = out.replace(/\bvector32\s*\(\s*\?\s*\)/giu, "?");
795
+ out = rewriteBooleanLiterals(out);
796
+ out = rewriteInsertOrReplace(out);
797
+ out = rewriteInsertOrIgnore(out);
798
+ return stripTrailingSemicolon(out);
799
+ }
800
+ function toBoolean(value) {
801
+ if (value === null || value === void 0) return value;
802
+ if (typeof value === "boolean") return value;
803
+ if (typeof value === "number") return value !== 0;
804
+ if (typeof value === "bigint") return value !== 0n;
805
+ if (typeof value === "string") {
806
+ const normalized = value.trim().toLowerCase();
807
+ if (normalized === "0" || normalized === "false") return false;
808
+ if (normalized === "1" || normalized === "true") return true;
809
+ }
810
+ return Boolean(value);
811
+ }
812
+ function countQuestionMarks(sql, end) {
813
+ let count = 0;
814
+ let inSingle = false;
815
+ let inDouble = false;
816
+ let inLineComment = false;
817
+ let inBlockComment = false;
818
+ for (let i = 0; i < end; i++) {
819
+ const ch = sql[i];
820
+ const next = sql[i + 1];
821
+ if (inLineComment) {
822
+ if (ch === "\n") inLineComment = false;
823
+ continue;
824
+ }
825
+ if (inBlockComment) {
826
+ if (ch === "*" && next === "/") {
827
+ inBlockComment = false;
828
+ i += 1;
829
+ }
830
+ continue;
831
+ }
832
+ if (!inSingle && !inDouble && ch === "-" && next === "-") {
833
+ inLineComment = true;
834
+ i += 1;
835
+ continue;
836
+ }
837
+ if (!inSingle && !inDouble && ch === "/" && next === "*") {
838
+ inBlockComment = true;
839
+ i += 1;
840
+ continue;
841
+ }
842
+ if (!inDouble && ch === "'" && sql[i - 1] !== "\\") {
843
+ inSingle = !inSingle;
844
+ continue;
845
+ }
846
+ if (!inSingle && ch === '"' && sql[i - 1] !== "\\") {
847
+ inDouble = !inDouble;
848
+ continue;
849
+ }
850
+ if (!inSingle && !inDouble && ch === "?") {
851
+ count += 1;
852
+ }
853
+ }
854
+ return count;
855
+ }
856
+ function findBooleanPlaceholderIndexes(sql) {
857
+ const indexes = /* @__PURE__ */ new Set();
858
+ for (const column of BOOLEAN_COLUMN_NAMES) {
859
+ const pattern = new RegExp(`(?:\\b[a-z_][a-z0-9_]*\\.)?${column}\\s*=\\s*\\?`, "giu");
860
+ for (const match of sql.matchAll(pattern)) {
861
+ const matchText = match[0];
862
+ const qIndex = match.index + matchText.lastIndexOf("?");
863
+ indexes.add(countQuestionMarks(sql, qIndex + 1));
864
+ }
865
+ }
866
+ return indexes;
867
+ }
868
+ function coerceInsertBooleanArgs(sql, args) {
869
+ const match = /^\s*INSERT(?:\s+OR\s+(?:IGNORE|REPLACE))?\s+INTO\s+([A-Za-z0-9_."]+)\s*\(([^)]+)\)/iu.exec(sql);
870
+ if (!match) return;
871
+ const rawTable = match[1];
872
+ const rawColumns = match[2];
873
+ const boolColumns = BOOLEAN_COLUMNS_BY_TABLE[unqualifiedTableName(rawTable)];
874
+ if (!boolColumns?.size) return;
875
+ const columns = rawColumns.split(",").map((col) => col.trim().replace(/^"|"$/g, ""));
876
+ for (const [index, column] of columns.entries()) {
877
+ if (boolColumns.has(column) && index < args.length) {
878
+ args[index] = toBoolean(args[index]);
879
+ }
880
+ }
881
+ }
882
+ function coerceUpdateBooleanArgs(sql, args) {
883
+ const match = /^\s*UPDATE\s+([A-Za-z0-9_."]+)\s+SET\s+([\s\S]+?)(?:\s+WHERE\b|$)/iu.exec(sql);
884
+ if (!match) return;
885
+ const rawTable = match[1];
886
+ const setClause = match[2];
887
+ const boolColumns = BOOLEAN_COLUMNS_BY_TABLE[unqualifiedTableName(rawTable)];
888
+ if (!boolColumns?.size) return;
889
+ const assignments = setClause.split(",");
890
+ let placeholderIndex = 0;
891
+ for (const assignment of assignments) {
892
+ if (!assignment.includes("?")) continue;
893
+ placeholderIndex += 1;
894
+ const colMatch = /^\s*(?:[A-Za-z_][A-Za-z0-9_]*\.)?([A-Za-z_][A-Za-z0-9_]*)\s*=\s*\?/iu.exec(assignment);
895
+ if (colMatch && boolColumns.has(colMatch[1])) {
896
+ args[placeholderIndex - 1] = toBoolean(args[placeholderIndex - 1]);
897
+ }
898
+ }
899
+ }
900
+ function coerceBooleanArgs(sql, args) {
901
+ const nextArgs = [...args];
902
+ coerceInsertBooleanArgs(sql, nextArgs);
903
+ coerceUpdateBooleanArgs(sql, nextArgs);
904
+ const placeholderIndexes = findBooleanPlaceholderIndexes(sql);
905
+ for (const index of placeholderIndexes) {
906
+ if (index > 0 && index <= nextArgs.length) {
907
+ nextArgs[index - 1] = toBoolean(nextArgs[index - 1]);
908
+ }
909
+ }
910
+ return nextArgs;
911
+ }
912
+ function convertQuestionMarksToDollarParams(sql) {
913
+ let out = "";
914
+ let placeholder = 0;
915
+ let inSingle = false;
916
+ let inDouble = false;
917
+ let inLineComment = false;
918
+ let inBlockComment = false;
919
+ for (let i = 0; i < sql.length; i++) {
920
+ const ch = sql[i];
921
+ const next = sql[i + 1];
922
+ if (inLineComment) {
923
+ out += ch;
924
+ if (ch === "\n") inLineComment = false;
925
+ continue;
926
+ }
927
+ if (inBlockComment) {
928
+ out += ch;
929
+ if (ch === "*" && next === "/") {
930
+ out += next;
931
+ inBlockComment = false;
932
+ i += 1;
933
+ }
934
+ continue;
935
+ }
936
+ if (!inSingle && !inDouble && ch === "-" && next === "-") {
937
+ out += ch + next;
938
+ inLineComment = true;
939
+ i += 1;
940
+ continue;
941
+ }
942
+ if (!inSingle && !inDouble && ch === "/" && next === "*") {
943
+ out += ch + next;
944
+ inBlockComment = true;
945
+ i += 1;
946
+ continue;
947
+ }
948
+ if (!inDouble && ch === "'" && sql[i - 1] !== "\\") {
949
+ inSingle = !inSingle;
950
+ out += ch;
951
+ continue;
952
+ }
953
+ if (!inSingle && ch === '"' && sql[i - 1] !== "\\") {
954
+ inDouble = !inDouble;
955
+ out += ch;
956
+ continue;
957
+ }
958
+ if (!inSingle && !inDouble && ch === "?") {
959
+ placeholder += 1;
960
+ out += `$${placeholder}`;
961
+ continue;
962
+ }
963
+ out += ch;
964
+ }
965
+ return out;
966
+ }
967
+ function translateStatementForPostgres(stmt) {
968
+ const normalized = normalizeStatement(stmt);
969
+ if (normalized.kind === "named") {
970
+ throw new Error("Named SQL parameters are not supported by the Prisma adapter.");
971
+ }
972
+ const rewrittenSql = rewriteSql(normalized.sql);
973
+ const coercedArgs = coerceBooleanArgs(rewrittenSql, normalized.args);
974
+ return {
975
+ sql: convertQuestionMarksToDollarParams(rewrittenSql),
976
+ args: coercedArgs
977
+ };
978
+ }
979
+ function shouldBypassPostgres(stmt) {
980
+ const normalized = normalizeStatement(stmt);
981
+ if (normalized.kind === "named") {
982
+ return true;
983
+ }
984
+ return IMMEDIATE_FALLBACK_PATTERNS.some((pattern) => pattern.test(normalized.sql));
985
+ }
986
+ function shouldFallbackOnError(error) {
987
+ const message = error instanceof Error ? error.message : String(error);
988
+ return /42P01|42883|42601|does not exist|syntax error|not supported|Named SQL parameters are not supported/iu.test(message);
989
+ }
990
+ function isReadQuery(sql) {
991
+ const trimmed = sql.trimStart();
992
+ return /^(SELECT|WITH|SHOW|EXPLAIN|VALUES)\b/iu.test(trimmed) || /\bRETURNING\b/iu.test(trimmed);
993
+ }
994
+ function buildRow(row, columns) {
995
+ const values = columns.map((column) => row[column]);
996
+ return Object.assign(values, row);
997
+ }
998
+ function buildResultSet(rows, rowsAffected = 0) {
999
+ const columns = rows[0] ? Object.keys(rows[0]) : [];
1000
+ const resultRows = rows.map((row) => buildRow(row, columns));
1001
+ return {
1002
+ columns,
1003
+ columnTypes: columns.map(() => ""),
1004
+ rows: resultRows,
1005
+ rowsAffected,
1006
+ lastInsertRowid: void 0,
1007
+ toJSON() {
1008
+ return {
1009
+ columns,
1010
+ columnTypes: columns.map(() => ""),
1011
+ rows,
1012
+ rowsAffected,
1013
+ lastInsertRowid: void 0
1014
+ };
1015
+ }
1016
+ };
1017
+ }
1018
+ async function loadPrismaClient() {
1019
+ if (!prismaClientPromise) {
1020
+ prismaClientPromise = (async () => {
1021
+ const explicitPath = process.env.EXE_OS_PRISMA_CLIENT_PATH;
1022
+ if (explicitPath) {
1023
+ const module2 = await import(pathToFileURL(explicitPath).href);
1024
+ const PrismaClient2 = module2.PrismaClient ?? module2.default?.PrismaClient;
1025
+ if (!PrismaClient2) {
1026
+ throw new Error(`No PrismaClient export found at ${explicitPath}`);
1027
+ }
1028
+ return new PrismaClient2();
1029
+ }
1030
+ const exeDbRoot = process.env.EXE_DB_ROOT ?? path7.join(os4.homedir(), "exe-db");
1031
+ const requireFromExeDb = createRequire(path7.join(exeDbRoot, "package.json"));
1032
+ const prismaEntry = requireFromExeDb.resolve("@prisma/client");
1033
+ const module = await import(pathToFileURL(prismaEntry).href);
1034
+ const PrismaClient = module.PrismaClient ?? module.default?.PrismaClient;
1035
+ if (!PrismaClient) {
1036
+ throw new Error(`No PrismaClient export found in ${prismaEntry}`);
1037
+ }
1038
+ return new PrismaClient();
1039
+ })();
1040
+ }
1041
+ return prismaClientPromise;
1042
+ }
1043
+ async function ensureCompatibilityViews(prisma) {
1044
+ if (!compatibilityBootstrapPromise) {
1045
+ compatibilityBootstrapPromise = (async () => {
1046
+ for (const mapping of VIEW_MAPPINGS) {
1047
+ const relation = mapping.source.replace(/"/g, "");
1048
+ const rows = await prisma.$queryRawUnsafe(
1049
+ "SELECT to_regclass($1) AS regclass",
1050
+ relation
1051
+ );
1052
+ if (!rows[0]?.regclass) {
1053
+ continue;
1054
+ }
1055
+ await prisma.$executeRawUnsafe(
1056
+ `CREATE OR REPLACE VIEW public.${quotedIdentifier(mapping.view)} AS SELECT * FROM ${mapping.source}`
1057
+ );
1058
+ }
1059
+ })();
1060
+ }
1061
+ return compatibilityBootstrapPromise;
1062
+ }
1063
+ async function executeOnPrisma(executor, stmt) {
1064
+ const translated = translateStatementForPostgres(stmt);
1065
+ if (isReadQuery(translated.sql)) {
1066
+ const rows = await executor.$queryRawUnsafe(
1067
+ translated.sql,
1068
+ ...translated.args
1069
+ );
1070
+ return buildResultSet(rows, /\bRETURNING\b/iu.test(translated.sql) ? rows.length : 0);
1071
+ }
1072
+ const rowsAffected = await executor.$executeRawUnsafe(translated.sql, ...translated.args);
1073
+ return buildResultSet([], rowsAffected);
1074
+ }
1075
+ function splitSqlStatements(sql) {
1076
+ const parts = [];
1077
+ let current = "";
1078
+ let inSingle = false;
1079
+ let inDouble = false;
1080
+ let inLineComment = false;
1081
+ let inBlockComment = false;
1082
+ for (let i = 0; i < sql.length; i++) {
1083
+ const ch = sql[i];
1084
+ const next = sql[i + 1];
1085
+ if (inLineComment) {
1086
+ current += ch;
1087
+ if (ch === "\n") inLineComment = false;
1088
+ continue;
1089
+ }
1090
+ if (inBlockComment) {
1091
+ current += ch;
1092
+ if (ch === "*" && next === "/") {
1093
+ current += next;
1094
+ inBlockComment = false;
1095
+ i += 1;
1096
+ }
1097
+ continue;
1098
+ }
1099
+ if (!inSingle && !inDouble && ch === "-" && next === "-") {
1100
+ current += ch + next;
1101
+ inLineComment = true;
1102
+ i += 1;
1103
+ continue;
1104
+ }
1105
+ if (!inSingle && !inDouble && ch === "/" && next === "*") {
1106
+ current += ch + next;
1107
+ inBlockComment = true;
1108
+ i += 1;
1109
+ continue;
1110
+ }
1111
+ if (!inDouble && ch === "'" && sql[i - 1] !== "\\") {
1112
+ inSingle = !inSingle;
1113
+ current += ch;
1114
+ continue;
1115
+ }
1116
+ if (!inSingle && ch === '"' && sql[i - 1] !== "\\") {
1117
+ inDouble = !inDouble;
1118
+ current += ch;
1119
+ continue;
1120
+ }
1121
+ if (!inSingle && !inDouble && ch === ";") {
1122
+ if (current.trim()) {
1123
+ parts.push(current.trim());
1124
+ }
1125
+ current = "";
1126
+ continue;
1127
+ }
1128
+ current += ch;
1129
+ }
1130
+ if (current.trim()) {
1131
+ parts.push(current.trim());
1132
+ }
1133
+ return parts;
1134
+ }
1135
+ async function createPrismaDbAdapter(fallbackClient) {
1136
+ const prisma = await loadPrismaClient();
1137
+ await ensureCompatibilityViews(prisma);
1138
+ let closed = false;
1139
+ let adapter;
1140
+ const fallbackExecute = async (stmt, error) => {
1141
+ if (!fallbackClient) {
1142
+ if (error) throw error;
1143
+ throw new Error("No fallback SQLite client is available for this Prisma-routed query.");
1144
+ }
1145
+ if (error) {
1146
+ process.stderr.write(
1147
+ `[database-adapter] Falling back to SQLite: ${error instanceof Error ? error.message : String(error)}
1148
+ `
1149
+ );
1150
+ }
1151
+ return fallbackClient.execute(stmt);
1152
+ };
1153
+ adapter = {
1154
+ async execute(stmt) {
1155
+ if (shouldBypassPostgres(stmt)) {
1156
+ return fallbackExecute(stmt);
1157
+ }
1158
+ try {
1159
+ return await executeOnPrisma(prisma, stmt);
1160
+ } catch (error) {
1161
+ if (shouldFallbackOnError(error)) {
1162
+ return fallbackExecute(stmt, error);
1163
+ }
1164
+ throw error;
1165
+ }
1166
+ },
1167
+ async batch(stmts, mode) {
1168
+ if (stmts.some((stmt) => shouldBypassPostgres(stmt))) {
1169
+ if (!fallbackClient) {
1170
+ throw new Error("Cannot batch unsupported SQLite-only statements without a fallback client.");
1171
+ }
1172
+ return fallbackClient.batch(stmts, mode);
1173
+ }
1174
+ try {
1175
+ if (prisma.$transaction) {
1176
+ return await prisma.$transaction(async (tx) => {
1177
+ const results2 = [];
1178
+ for (const stmt of stmts) {
1179
+ results2.push(await executeOnPrisma(tx, stmt));
1180
+ }
1181
+ return results2;
1182
+ });
1183
+ }
1184
+ const results = [];
1185
+ for (const stmt of stmts) {
1186
+ results.push(await executeOnPrisma(prisma, stmt));
1187
+ }
1188
+ return results;
1189
+ } catch (error) {
1190
+ if (fallbackClient && shouldFallbackOnError(error)) {
1191
+ process.stderr.write(
1192
+ `[database-adapter] Falling back batch to SQLite: ${error instanceof Error ? error.message : String(error)}
1193
+ `
1194
+ );
1195
+ return fallbackClient.batch(stmts, mode);
1196
+ }
1197
+ throw error;
1198
+ }
1199
+ },
1200
+ async migrate(stmts) {
1201
+ if (fallbackClient) {
1202
+ return fallbackClient.migrate(stmts);
1203
+ }
1204
+ return adapter.batch(stmts, "deferred");
1205
+ },
1206
+ async transaction(mode) {
1207
+ if (!fallbackClient) {
1208
+ throw new Error("Interactive transactions are only supported on the SQLite fallback client.");
1209
+ }
1210
+ return fallbackClient.transaction(mode);
1211
+ },
1212
+ async executeMultiple(sql) {
1213
+ if (fallbackClient && shouldBypassPostgres(sql)) {
1214
+ return fallbackClient.executeMultiple(sql);
1215
+ }
1216
+ for (const statement of splitSqlStatements(sql)) {
1217
+ await adapter.execute(statement);
1218
+ }
1219
+ },
1220
+ async sync() {
1221
+ if (fallbackClient) {
1222
+ return fallbackClient.sync();
1223
+ }
1224
+ return { frame_no: 0, frames_synced: 0 };
1225
+ },
1226
+ close() {
1227
+ closed = true;
1228
+ prismaClientPromise = null;
1229
+ compatibilityBootstrapPromise = null;
1230
+ void prisma.$disconnect?.();
1231
+ },
1232
+ get closed() {
1233
+ return closed;
1234
+ },
1235
+ get protocol() {
1236
+ return "prisma-postgres";
1237
+ }
1238
+ };
1239
+ return adapter;
1240
+ }
1241
+ var VIEW_MAPPINGS, UPSERT_KEYS, BOOLEAN_COLUMNS_BY_TABLE, BOOLEAN_COLUMN_NAMES, IMMEDIATE_FALLBACK_PATTERNS, prismaClientPromise, compatibilityBootstrapPromise;
1242
+ var init_database_adapter = __esm({
1243
+ "src/lib/database-adapter.ts"() {
1244
+ "use strict";
1245
+ VIEW_MAPPINGS = [
1246
+ { view: "memories", source: "memory.memory_records" },
1247
+ { view: "tasks", source: "memory.tasks" },
1248
+ { view: "behaviors", source: "memory.behaviors" },
1249
+ { view: "entities", source: "memory.entities" },
1250
+ { view: "relationships", source: "memory.relationships" },
1251
+ { view: "entity_memories", source: "memory.entity_memories" },
1252
+ { view: "entity_aliases", source: "memory.entity_aliases" },
1253
+ { view: "notifications", source: "memory.notifications" },
1254
+ { view: "messages", source: "memory.messages" },
1255
+ { view: "users", source: "wiki.users" },
1256
+ { view: "workspaces", source: "wiki.workspaces" },
1257
+ { view: "workspace_users", source: "wiki.workspace_users" },
1258
+ { view: "documents", source: "wiki.workspace_documents" },
1259
+ { view: "chats", source: "wiki.workspace_chats" }
1260
+ ];
1261
+ UPSERT_KEYS = {
1262
+ memories: ["id"],
1263
+ tasks: ["id"],
1264
+ behaviors: ["id"],
1265
+ entities: ["id"],
1266
+ relationships: ["id"],
1267
+ entity_aliases: ["alias"],
1268
+ notifications: ["id"],
1269
+ messages: ["id"],
1270
+ users: ["id"],
1271
+ workspaces: ["id"],
1272
+ workspace_users: ["id"],
1273
+ documents: ["id"],
1274
+ chats: ["id"]
1275
+ };
1276
+ BOOLEAN_COLUMNS_BY_TABLE = {
1277
+ memories: /* @__PURE__ */ new Set(["has_error", "draft"]),
1278
+ behaviors: /* @__PURE__ */ new Set(["active"]),
1279
+ notifications: /* @__PURE__ */ new Set(["read"]),
1280
+ users: /* @__PURE__ */ new Set(["has_personal_memory"])
1281
+ };
1282
+ BOOLEAN_COLUMN_NAMES = new Set(
1283
+ Object.values(BOOLEAN_COLUMNS_BY_TABLE).flatMap((cols) => [...cols])
1284
+ );
1285
+ IMMEDIATE_FALLBACK_PATTERNS = [
1286
+ /\bPRAGMA\b/i,
1287
+ /\bsqlite_master\b/i,
1288
+ /(?:^|[.\s])(?:memories|conversations|entities)_fts\b/i,
1289
+ /\bMATCH\b/i,
1290
+ /\bvector_distance_cos\s*\(/i,
1291
+ /\bjson_extract\s*\(/i,
1292
+ /\bjulianday\s*\(/i,
1293
+ /\bstrftime\s*\(/i,
1294
+ /\blast_insert_rowid\s*\(/i
1295
+ ];
1296
+ prismaClientPromise = null;
1297
+ compatibilityBootstrapPromise = null;
1298
+ }
1299
+ });
1300
+
1301
+ // src/lib/database.ts
1302
+ import { createClient } from "@libsql/client";
1303
+ async function initDatabase(config) {
1304
+ if (_walCheckpointTimer) {
1305
+ clearInterval(_walCheckpointTimer);
1306
+ _walCheckpointTimer = null;
1307
+ }
1308
+ if (_daemonClient) {
1309
+ _daemonClient.close();
1310
+ _daemonClient = null;
1311
+ }
1312
+ if (_adapterClient && _adapterClient !== _resilientClient) {
1313
+ _adapterClient.close();
1314
+ }
1315
+ _adapterClient = null;
1316
+ if (_client) {
1317
+ _client.close();
1318
+ _client = null;
1319
+ _resilientClient = null;
1320
+ }
1321
+ const opts = {
1322
+ url: `file:${config.dbPath}`
1323
+ };
1324
+ if (config.encryptionKey) {
1325
+ opts.encryptionKey = config.encryptionKey;
1326
+ }
1327
+ _client = createClient(opts);
1328
+ _resilientClient = wrapWithRetry(_client);
1329
+ _adapterClient = _resilientClient;
1330
+ _client.execute("PRAGMA busy_timeout = 30000").catch(() => {
1331
+ });
1332
+ _client.execute("PRAGMA journal_mode = WAL").catch(() => {
1333
+ });
1334
+ if (_walCheckpointTimer) clearInterval(_walCheckpointTimer);
1335
+ _walCheckpointTimer = setInterval(() => {
1336
+ _client?.execute("PRAGMA wal_checkpoint(PASSIVE)").catch(() => {
1337
+ });
1338
+ }, 3e4);
1339
+ _walCheckpointTimer.unref();
1340
+ if (process.env.DATABASE_URL) {
1341
+ _adapterClient = await createPrismaDbAdapter(_resilientClient);
1342
+ }
1343
+ }
1344
+ function getClient() {
1345
+ if (!_adapterClient) {
1346
+ throw new Error("Database client not initialized. Call initDatabase() first.");
1347
+ }
1348
+ if (process.env.DATABASE_URL) {
1349
+ return _adapterClient;
1350
+ }
1351
+ if (process.env.EXE_IS_DAEMON === "1") {
1352
+ return _resilientClient;
1353
+ }
1354
+ if (_daemonClient && _daemonClient._isDaemonActive()) {
1355
+ return _daemonClient;
1356
+ }
1357
+ return _resilientClient;
1358
+ }
1359
+ function getRawClient() {
1360
+ if (!_client) {
1361
+ throw new Error("Database client not initialized. Call initDatabase() first.");
1362
+ }
1363
+ return _client;
1364
+ }
1365
+ async function ensureSchema() {
1366
+ const client = getRawClient();
1367
+ await client.execute("PRAGMA journal_mode = WAL");
1368
+ await client.execute("PRAGMA busy_timeout = 30000");
1369
+ await client.execute("PRAGMA wal_autocheckpoint = 1000");
1370
+ try {
1371
+ await client.execute("PRAGMA libsql_vector_search_ef = 128");
1372
+ } catch {
1373
+ }
1374
+ await client.executeMultiple(`
1375
+ CREATE TABLE IF NOT EXISTS memories (
1376
+ id TEXT PRIMARY KEY,
1377
+ agent_id TEXT NOT NULL,
1378
+ agent_role TEXT NOT NULL,
1379
+ session_id TEXT NOT NULL,
1380
+ timestamp TEXT NOT NULL,
1381
+ tool_name TEXT NOT NULL,
1382
+ project_name TEXT NOT NULL,
1383
+ has_error INTEGER NOT NULL DEFAULT 0,
1384
+ raw_text TEXT NOT NULL,
1385
+ vector F32_BLOB(1024),
1386
+ version INTEGER NOT NULL DEFAULT 0
1387
+ );
1388
+
1389
+ CREATE INDEX IF NOT EXISTS idx_memories_agent
1390
+ ON memories(agent_id);
1391
+
1392
+ CREATE INDEX IF NOT EXISTS idx_memories_timestamp
1393
+ ON memories(timestamp);
1394
+
1395
+ CREATE INDEX IF NOT EXISTS idx_memories_session
1396
+ ON memories(session_id);
1397
+
1398
+ CREATE INDEX IF NOT EXISTS idx_memories_project
1399
+ ON memories(project_name);
1400
+
1401
+ CREATE INDEX IF NOT EXISTS idx_memories_tool
1402
+ ON memories(tool_name);
1403
+
1404
+ CREATE INDEX IF NOT EXISTS idx_memories_version
1405
+ ON memories(version);
1406
+
1407
+ CREATE INDEX IF NOT EXISTS idx_memories_agent_project
1408
+ ON memories(agent_id, project_name);
1409
+ `);
1410
+ await client.executeMultiple(`
1411
+ CREATE VIRTUAL TABLE IF NOT EXISTS memories_fts USING fts5(
1412
+ raw_text,
1413
+ content='memories',
1414
+ content_rowid='rowid'
1415
+ );
1416
+
1417
+ CREATE TRIGGER IF NOT EXISTS memories_fts_ai AFTER INSERT ON memories BEGIN
1418
+ INSERT INTO memories_fts(rowid, raw_text) VALUES (new.rowid, new.raw_text);
1419
+ END;
1420
+
1421
+ CREATE TRIGGER IF NOT EXISTS memories_fts_ad AFTER DELETE ON memories BEGIN
1422
+ INSERT INTO memories_fts(memories_fts, rowid, raw_text) VALUES('delete', old.rowid, old.raw_text);
1423
+ END;
1424
+
1425
+ CREATE TRIGGER IF NOT EXISTS memories_fts_au AFTER UPDATE ON memories BEGIN
1426
+ INSERT INTO memories_fts(memories_fts, rowid, raw_text) VALUES('delete', old.rowid, old.raw_text);
1427
+ INSERT INTO memories_fts(rowid, raw_text) VALUES (new.rowid, new.raw_text);
1428
+ END;
1429
+ `);
1430
+ await client.executeMultiple(`
1431
+ CREATE TABLE IF NOT EXISTS sync_meta (
1432
+ key TEXT PRIMARY KEY,
1433
+ value TEXT NOT NULL
1434
+ );
1435
+ `);
1436
+ await client.executeMultiple(`
1437
+ CREATE TABLE IF NOT EXISTS tasks (
1438
+ id TEXT PRIMARY KEY,
1439
+ title TEXT NOT NULL,
1440
+ assigned_to TEXT NOT NULL,
1441
+ assigned_by TEXT NOT NULL,
1442
+ project_name TEXT NOT NULL,
1443
+ priority TEXT NOT NULL DEFAULT 'p1',
1444
+ status TEXT NOT NULL DEFAULT 'open',
1445
+ task_file TEXT,
1446
+ created_at TEXT NOT NULL,
1447
+ updated_at TEXT NOT NULL
1448
+ );
1449
+
1450
+ CREATE INDEX IF NOT EXISTS idx_tasks_assignee_status
1451
+ ON tasks(assigned_to, status);
1452
+ `);
1453
+ await client.executeMultiple(`
1454
+ CREATE TABLE IF NOT EXISTS behaviors (
1455
+ id TEXT PRIMARY KEY,
1456
+ agent_id TEXT NOT NULL,
1457
+ project_name TEXT,
1458
+ domain TEXT,
1459
+ content TEXT NOT NULL,
1460
+ active INTEGER NOT NULL DEFAULT 1,
1461
+ created_at TEXT NOT NULL,
1462
+ updated_at TEXT NOT NULL
1463
+ );
1464
+
1465
+ CREATE INDEX IF NOT EXISTS idx_behaviors_agent
1466
+ ON behaviors(agent_id, active);
1467
+ `);
1468
+ try {
1469
+ const coordinatorName = getCoordinatorName();
1470
+ const existing = await client.execute({
1471
+ sql: "SELECT COUNT(*) as cnt FROM behaviors WHERE agent_id = ?",
1472
+ args: [coordinatorName]
1473
+ });
1474
+ if (Number(existing.rows[0]?.cnt) === 0) {
1475
+ const seededAt = "2026-03-25T00:00:00Z";
1476
+ for (const [domain, content] of [
1477
+ ["workflow", `Don't ask "keep going?" \u2014 just keep executing phases/plans autonomously`],
1478
+ ["tool-use", "Always use create_task MCP tool, never write .md files directly for task creation"],
1479
+ ["workflow", "Auto-start reviewing when idle and reviews are pending \u2014 never ask founder for permission"]
1480
+ ]) {
1481
+ await client.execute({
1482
+ sql: `INSERT INTO behaviors (id, agent_id, project_name, domain, content, active, created_at, updated_at)
1483
+ VALUES (hex(randomblob(16)), ?, NULL, ?, ?, 1, ?, ?)`,
1484
+ args: [coordinatorName, domain, content, seededAt, seededAt]
1485
+ });
1486
+ }
1487
+ }
1488
+ } catch {
1489
+ }
1490
+ try {
1491
+ await client.execute({
1492
+ sql: `ALTER TABLE behaviors ADD COLUMN priority TEXT DEFAULT 'p1'`,
1493
+ args: []
1494
+ });
1495
+ } catch {
1496
+ }
1497
+ try {
1498
+ await client.execute({
1499
+ sql: `ALTER TABLE tasks ADD COLUMN blocked_by TEXT`,
1500
+ args: []
1501
+ });
1502
+ } catch {
1503
+ }
1504
+ try {
1505
+ await client.execute({
1506
+ sql: `ALTER TABLE tasks ADD COLUMN parent_task_id TEXT`,
1507
+ args: []
1508
+ });
1509
+ } catch {
1510
+ }
1511
+ try {
1512
+ await client.execute({
1513
+ sql: `CREATE INDEX IF NOT EXISTS idx_tasks_parent_task_id
1514
+ ON tasks(parent_task_id)
1515
+ WHERE parent_task_id IS NOT NULL`,
1516
+ args: []
1517
+ });
1518
+ } catch {
1519
+ }
1520
+ try {
1521
+ await client.execute({
1522
+ sql: `UPDATE tasks SET status = 'done' WHERE status = 'completed'`,
1523
+ args: []
1524
+ });
1525
+ } catch {
1526
+ }
1527
+ try {
1528
+ await client.execute({
1529
+ sql: `ALTER TABLE tasks ADD COLUMN reviewer TEXT`,
1530
+ args: []
1531
+ });
1532
+ } catch {
1533
+ }
1534
+ try {
1535
+ await client.execute({
1536
+ sql: `ALTER TABLE tasks ADD COLUMN context TEXT`,
1537
+ args: []
1538
+ });
1539
+ } catch {
1540
+ }
1541
+ try {
1542
+ await client.execute({
1543
+ sql: `ALTER TABLE tasks ADD COLUMN result TEXT`,
1544
+ args: []
1545
+ });
1546
+ } catch {
1547
+ }
1548
+ try {
1549
+ await client.execute({
1550
+ sql: `ALTER TABLE tasks ADD COLUMN assigned_tmux TEXT`,
1551
+ args: []
1552
+ });
1553
+ } catch {
1554
+ }
1555
+ try {
1556
+ await client.execute({
1557
+ sql: `ALTER TABLE tasks ADD COLUMN checkpoint TEXT`,
1558
+ args: []
1559
+ });
1560
+ } catch {
1561
+ }
1562
+ try {
1563
+ await client.execute({
1564
+ sql: `ALTER TABLE tasks ADD COLUMN checkpoint_count INTEGER NOT NULL DEFAULT 0`,
1565
+ args: []
1566
+ });
1567
+ } catch {
1568
+ }
1569
+ try {
1570
+ await client.execute({
1571
+ sql: `ALTER TABLE tasks ADD COLUMN complexity TEXT NOT NULL DEFAULT 'standard'`,
1572
+ args: []
1573
+ });
1574
+ } catch {
1575
+ }
1576
+ try {
1577
+ await client.execute({
1578
+ sql: `ALTER TABLE tasks ADD COLUMN session_scope TEXT`,
1579
+ args: []
1580
+ });
1581
+ } catch {
1582
+ }
1583
+ try {
1584
+ await client.execute({
1585
+ sql: `ALTER TABLE memories ADD COLUMN task_id TEXT`,
1586
+ args: []
1587
+ });
1588
+ } catch {
1589
+ }
1590
+ try {
1591
+ await client.execute({
1592
+ sql: `ALTER TABLE memories ADD COLUMN consolidated INTEGER NOT NULL DEFAULT 0`,
1593
+ args: []
1594
+ });
1595
+ } catch {
1596
+ }
1597
+ try {
1598
+ await client.execute({
1599
+ sql: `ALTER TABLE memories ADD COLUMN author_device_id TEXT`,
1600
+ args: []
1601
+ });
1602
+ } catch {
1603
+ }
1604
+ try {
1605
+ await client.execute({
1606
+ sql: `ALTER TABLE memories ADD COLUMN scope TEXT NOT NULL DEFAULT 'business'`,
1607
+ args: []
1608
+ });
1609
+ } catch {
1610
+ }
1611
+ await client.executeMultiple(`
1612
+ CREATE TABLE IF NOT EXISTS consolidations (
1613
+ id TEXT PRIMARY KEY,
1614
+ consolidated_memory_id TEXT NOT NULL,
1615
+ source_memory_id TEXT NOT NULL,
1616
+ created_at TEXT NOT NULL
1617
+ );
1618
+
1619
+ CREATE INDEX IF NOT EXISTS idx_consolidations_source
1620
+ ON consolidations(source_memory_id);
1621
+
1622
+ CREATE INDEX IF NOT EXISTS idx_consolidations_consolidated
1623
+ ON consolidations(consolidated_memory_id);
1624
+ `);
1625
+ await client.executeMultiple(`
1626
+ CREATE TABLE IF NOT EXISTS reminders (
1627
+ id TEXT PRIMARY KEY,
1628
+ text TEXT NOT NULL,
1629
+ created_at TEXT NOT NULL,
1630
+ due_date TEXT,
1631
+ completed_at TEXT
1632
+ );
1633
+ `);
1634
+ await client.executeMultiple(`
1635
+ CREATE TABLE IF NOT EXISTS notifications (
1636
+ id TEXT PRIMARY KEY,
1637
+ agent_id TEXT NOT NULL,
1638
+ agent_role TEXT NOT NULL,
1639
+ event TEXT NOT NULL,
1640
+ project TEXT NOT NULL,
1641
+ summary TEXT NOT NULL,
1642
+ task_file TEXT,
1643
+ session_scope TEXT,
1644
+ read INTEGER NOT NULL DEFAULT 0,
1645
+ created_at TEXT NOT NULL
1646
+ );
1647
+
1648
+ CREATE INDEX IF NOT EXISTS idx_notifications_read
1649
+ ON notifications(read);
1650
+
1651
+ CREATE INDEX IF NOT EXISTS idx_notifications_agent
1652
+ ON notifications(agent_id, session_scope);
1653
+
1654
+ CREATE INDEX IF NOT EXISTS idx_notifications_task_file
1655
+ ON notifications(task_file);
1656
+ `);
1657
+ await client.executeMultiple(`
1658
+ CREATE TABLE IF NOT EXISTS schedules (
1659
+ id TEXT PRIMARY KEY,
1660
+ cron TEXT NOT NULL,
1661
+ description TEXT NOT NULL,
1662
+ job_type TEXT NOT NULL DEFAULT 'report',
1663
+ prompt TEXT,
1664
+ assigned_to TEXT,
1665
+ project_name TEXT,
1666
+ active INTEGER NOT NULL DEFAULT 1,
1667
+ use_crontab INTEGER NOT NULL DEFAULT 0,
1668
+ created_at TEXT NOT NULL
1669
+ );
1670
+ `);
1671
+ await client.executeMultiple(`
1672
+ CREATE TABLE IF NOT EXISTS device_registry (
1673
+ device_id TEXT PRIMARY KEY,
1674
+ friendly_name TEXT NOT NULL,
1675
+ hostname TEXT NOT NULL,
1676
+ projects TEXT NOT NULL DEFAULT '[]',
1677
+ agents TEXT NOT NULL DEFAULT '[]',
1678
+ connected INTEGER DEFAULT 0,
1679
+ last_seen TEXT NOT NULL
1680
+ );
1681
+ `);
1682
+ await client.executeMultiple(`
1683
+ CREATE TABLE IF NOT EXISTS messages (
1684
+ id TEXT PRIMARY KEY,
1685
+ from_agent TEXT NOT NULL,
1686
+ from_device TEXT NOT NULL DEFAULT 'local',
1687
+ target_agent TEXT NOT NULL,
1688
+ target_project TEXT,
1689
+ target_device TEXT NOT NULL DEFAULT 'local',
1690
+ session_scope TEXT,
1691
+ content TEXT NOT NULL,
1692
+ priority TEXT DEFAULT 'normal',
1693
+ status TEXT DEFAULT 'pending',
1694
+ server_seq INTEGER,
1695
+ retry_count INTEGER DEFAULT 0,
1696
+ created_at TEXT NOT NULL,
1697
+ delivered_at TEXT,
1698
+ processed_at TEXT,
1699
+ failed_at TEXT,
1700
+ failure_reason TEXT
1701
+ );
1702
+
1703
+ CREATE INDEX IF NOT EXISTS idx_messages_target
1704
+ ON messages(target_agent, session_scope, status);
1705
+
1706
+ CREATE INDEX IF NOT EXISTS idx_messages_conversation_order
1707
+ ON messages(target_agent, session_scope, from_agent, server_seq);
1708
+ `);
1709
+ try {
1710
+ await client.execute({
1711
+ sql: `ALTER TABLE notifications ADD COLUMN session_scope TEXT`,
1712
+ args: []
1713
+ });
1714
+ } catch {
1715
+ }
1716
+ try {
1717
+ await client.execute({
1718
+ sql: `ALTER TABLE messages ADD COLUMN session_scope TEXT`,
1719
+ args: []
1720
+ });
1721
+ } catch {
1722
+ }
1723
+ await client.executeMultiple(`
1724
+ CREATE INDEX IF NOT EXISTS idx_notifications_agent_scope_read
1725
+ ON notifications(agent_id, session_scope, read, created_at);
1726
+
1727
+ CREATE INDEX IF NOT EXISTS idx_messages_target_scope_status
1728
+ ON messages(target_agent, session_scope, status, created_at);
1729
+ `);
1730
+ try {
1731
+ await client.execute({
1732
+ sql: `UPDATE memories SET project_name = 'exe-create' WHERE project_name = 'web'`,
1733
+ args: []
1734
+ });
1735
+ await client.execute({
1736
+ sql: `UPDATE memories SET project_name = 'exe-os' WHERE project_name = 'worker'`,
1737
+ args: []
1738
+ });
1739
+ await client.execute({
1740
+ sql: `UPDATE tasks SET project_name = 'exe-create' WHERE project_name = 'web'`,
1741
+ args: []
1742
+ });
1743
+ await client.execute({
1744
+ sql: `UPDATE tasks SET project_name = 'exe-os' WHERE project_name = 'worker'`,
1745
+ args: []
1746
+ });
1747
+ } catch {
1748
+ }
1749
+ await client.executeMultiple(`
1750
+ CREATE TABLE IF NOT EXISTS trajectories (
1751
+ id TEXT PRIMARY KEY,
1752
+ task_id TEXT NOT NULL,
1753
+ agent_id TEXT NOT NULL,
1754
+ project_name TEXT NOT NULL,
1755
+ task_title TEXT NOT NULL,
1756
+ signature TEXT NOT NULL,
1757
+ signature_hash TEXT NOT NULL,
1758
+ tool_count INTEGER NOT NULL,
1759
+ skill_id TEXT,
1760
+ created_at TEXT NOT NULL
1761
+ );
1762
+
1763
+ CREATE INDEX IF NOT EXISTS idx_trajectories_hash
1764
+ ON trajectories(signature_hash);
1765
+
1766
+ CREATE INDEX IF NOT EXISTS idx_trajectories_agent
1767
+ ON trajectories(agent_id);
1768
+ `);
1769
+ try {
1770
+ await client.execute("ALTER TABLE trajectories ADD COLUMN skill_id TEXT");
1771
+ } catch {
1772
+ }
1773
+ await client.executeMultiple(`
1774
+ CREATE TABLE IF NOT EXISTS consolidations (
1775
+ id TEXT PRIMARY KEY,
1776
+ consolidated_memory_id TEXT NOT NULL,
1777
+ source_memory_id TEXT NOT NULL,
1778
+ created_at TEXT NOT NULL
1779
+ );
1780
+
1781
+ CREATE INDEX IF NOT EXISTS idx_consolidations_source
1782
+ ON consolidations(source_memory_id);
1783
+ `);
1784
+ await client.executeMultiple(`
1785
+ CREATE TABLE IF NOT EXISTS audit_trail (
1786
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1787
+ timestamp TEXT NOT NULL,
1788
+ session_id TEXT NOT NULL,
1789
+ agent_id TEXT NOT NULL,
1790
+ tool TEXT NOT NULL,
1791
+ input TEXT,
1792
+ decision TEXT NOT NULL,
1793
+ reason TEXT,
1794
+ is_customer_facing INTEGER NOT NULL DEFAULT 0
1795
+ );
1796
+
1797
+ CREATE INDEX IF NOT EXISTS idx_audit_trail_agent
1798
+ ON audit_trail(agent_id, timestamp);
1799
+
1800
+ CREATE INDEX IF NOT EXISTS idx_audit_trail_session
1801
+ ON audit_trail(session_id);
1802
+ `);
1803
+ try {
1804
+ await client.execute({
1805
+ sql: `ALTER TABLE memories ADD COLUMN consolidated INTEGER NOT NULL DEFAULT 0`,
1806
+ args: []
1807
+ });
1808
+ } catch {
1809
+ }
1810
+ try {
1811
+ await client.execute({
1812
+ sql: `ALTER TABLE memories ADD COLUMN importance INTEGER DEFAULT 5`,
1813
+ args: []
1814
+ });
1815
+ } catch {
1816
+ }
1817
+ try {
1818
+ await client.execute({
1819
+ sql: `ALTER TABLE memories ADD COLUMN status TEXT DEFAULT 'active'`,
1820
+ args: []
1821
+ });
1822
+ } catch {
1823
+ }
1824
+ try {
1825
+ await client.execute({
1826
+ sql: `ALTER TABLE memories ADD COLUMN confidence REAL DEFAULT 0.7`,
1827
+ args: []
1828
+ });
1829
+ } catch {
1830
+ }
1831
+ try {
1832
+ await client.execute({
1833
+ sql: `ALTER TABLE memories ADD COLUMN last_accessed TEXT`,
1834
+ args: []
1835
+ });
1836
+ } catch {
1837
+ }
1838
+ try {
1839
+ await client.execute({
1840
+ sql: `UPDATE memories SET last_accessed = timestamp WHERE last_accessed IS NULL`,
1841
+ args: []
1842
+ });
1843
+ } catch {
1844
+ }
1845
+ try {
1846
+ await client.execute({
1847
+ sql: `ALTER TABLE memories ADD COLUMN wiki_synced INTEGER DEFAULT 0`,
1848
+ args: []
1849
+ });
1850
+ } catch {
1851
+ }
1852
+ try {
1853
+ await client.execute({
1854
+ sql: `ALTER TABLE memories ADD COLUMN graph_extracted INTEGER DEFAULT 0`,
1855
+ args: []
1856
+ });
1857
+ } catch {
1858
+ }
1859
+ for (const col of [
1860
+ "ALTER TABLE memories ADD COLUMN content_hash TEXT",
1861
+ "ALTER TABLE memories ADD COLUMN graph_extracted_hash TEXT"
1862
+ ]) {
1863
+ try {
1864
+ await client.execute(col);
1865
+ } catch {
1866
+ }
1867
+ }
1868
+ try {
1869
+ await client.execute(
1870
+ `CREATE INDEX IF NOT EXISTS idx_memories_content_hash ON memories(content_hash, agent_id)`
1871
+ );
1872
+ } catch {
1873
+ }
1874
+ await client.executeMultiple(`
1875
+ CREATE TABLE IF NOT EXISTS entities (
1876
+ id TEXT PRIMARY KEY,
1877
+ name TEXT NOT NULL,
1878
+ type TEXT NOT NULL,
1879
+ first_seen TEXT NOT NULL,
1880
+ last_seen TEXT NOT NULL,
1881
+ properties TEXT DEFAULT '{}',
1882
+ UNIQUE(name, type)
1883
+ );
1884
+
1885
+ CREATE TABLE IF NOT EXISTS relationships (
1886
+ id TEXT PRIMARY KEY,
1887
+ source_entity_id TEXT NOT NULL,
1888
+ target_entity_id TEXT NOT NULL,
1889
+ type TEXT NOT NULL,
1890
+ weight REAL DEFAULT 1.0,
1891
+ timestamp TEXT NOT NULL,
1892
+ properties TEXT DEFAULT '{}',
1893
+ UNIQUE(source_entity_id, target_entity_id, type)
1894
+ );
1895
+
1896
+ CREATE TABLE IF NOT EXISTS entity_memories (
1897
+ entity_id TEXT NOT NULL,
1898
+ memory_id TEXT NOT NULL,
1899
+ PRIMARY KEY (entity_id, memory_id)
1900
+ );
1901
+
1902
+ CREATE TABLE IF NOT EXISTS relationship_memories (
1903
+ relationship_id TEXT NOT NULL,
1904
+ memory_id TEXT NOT NULL,
1905
+ PRIMARY KEY (relationship_id, memory_id)
1906
+ );
1907
+
1908
+ CREATE INDEX IF NOT EXISTS idx_entities_name ON entities(name);
1909
+ CREATE INDEX IF NOT EXISTS idx_entities_type ON entities(type);
1910
+ CREATE INDEX IF NOT EXISTS idx_relationships_source ON relationships(source_entity_id);
1911
+ CREATE INDEX IF NOT EXISTS idx_relationships_target ON relationships(target_entity_id);
1912
+
1913
+ CREATE TABLE IF NOT EXISTS hyperedges (
1914
+ id TEXT PRIMARY KEY,
1915
+ label TEXT NOT NULL,
1916
+ relation TEXT NOT NULL,
1917
+ confidence REAL DEFAULT 1.0,
1918
+ timestamp TEXT NOT NULL
1919
+ );
1920
+
1921
+ CREATE TABLE IF NOT EXISTS hyperedge_nodes (
1922
+ hyperedge_id TEXT NOT NULL,
1923
+ entity_id TEXT NOT NULL,
1924
+ PRIMARY KEY (hyperedge_id, entity_id)
1925
+ );
1926
+
1927
+ CREATE VIRTUAL TABLE IF NOT EXISTS entities_fts USING fts5(
1928
+ name,
1929
+ content=entities,
1930
+ content_rowid=rowid
1931
+ );
1932
+
1933
+ CREATE TRIGGER IF NOT EXISTS entities_fts_ai AFTER INSERT ON entities BEGIN
1934
+ INSERT INTO entities_fts(rowid, name) VALUES (new.rowid, new.name);
1935
+ END;
1936
+
1937
+ CREATE TRIGGER IF NOT EXISTS entities_fts_ad AFTER DELETE ON entities BEGIN
1938
+ INSERT INTO entities_fts(entities_fts, rowid, name) VALUES('delete', old.rowid, old.name);
1939
+ END;
1940
+
1941
+ CREATE TRIGGER IF NOT EXISTS entities_fts_au AFTER UPDATE ON entities BEGIN
1942
+ INSERT INTO entities_fts(entities_fts, rowid, name) VALUES('delete', old.rowid, old.name);
1943
+ INSERT INTO entities_fts(rowid, name) VALUES (new.rowid, new.name);
1944
+ END;
1945
+ `);
1946
+ try {
1947
+ await client.execute("INSERT INTO entities_fts(entities_fts) VALUES('rebuild')");
1948
+ } catch {
1949
+ }
1950
+ await client.executeMultiple(`
1951
+ CREATE TABLE IF NOT EXISTS entity_aliases (
1952
+ alias TEXT NOT NULL PRIMARY KEY,
1953
+ canonical_entity_id TEXT NOT NULL
1954
+ );
1955
+ CREATE INDEX IF NOT EXISTS idx_entity_aliases_canonical ON entity_aliases(canonical_entity_id);
1956
+ `);
1957
+ for (const col of [
1958
+ "ALTER TABLE relationships ADD COLUMN confidence REAL DEFAULT 1.0",
1959
+ "ALTER TABLE relationships ADD COLUMN confidence_label TEXT DEFAULT 'extracted'"
1960
+ ]) {
1961
+ try {
1962
+ await client.execute(col);
1963
+ } catch {
1964
+ }
1965
+ }
1966
+ try {
1967
+ await client.execute(
1968
+ `CREATE INDEX IF NOT EXISTS idx_memories_status ON memories(status)`
1969
+ );
1970
+ } catch {
1971
+ }
1972
+ await client.executeMultiple(`
1973
+ CREATE TABLE IF NOT EXISTS identity (
1974
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1975
+ agent_id TEXT NOT NULL UNIQUE,
1976
+ content_hash TEXT NOT NULL,
1977
+ updated_at TEXT NOT NULL,
1978
+ updated_by TEXT NOT NULL
1979
+ );
1980
+
1981
+ CREATE INDEX IF NOT EXISTS idx_identity_agent ON identity(agent_id);
1982
+ `);
1983
+ await client.executeMultiple(`
1984
+ CREATE TABLE IF NOT EXISTS chat_history (
1985
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1986
+ session_id TEXT NOT NULL,
1987
+ role TEXT NOT NULL,
1988
+ content TEXT NOT NULL,
1989
+ tool_name TEXT,
1990
+ tool_id TEXT,
1991
+ is_error INTEGER NOT NULL DEFAULT 0,
1992
+ timestamp INTEGER NOT NULL
1993
+ );
1994
+
1995
+ CREATE INDEX IF NOT EXISTS idx_chat_history_session
1996
+ ON chat_history(session_id, id);
1997
+ `);
1998
+ await client.executeMultiple(`
1999
+ CREATE TABLE IF NOT EXISTS workspaces (
2000
+ id TEXT PRIMARY KEY,
2001
+ slug TEXT NOT NULL UNIQUE,
2002
+ name TEXT NOT NULL,
2003
+ owner_agent_id TEXT,
2004
+ created_at TEXT NOT NULL,
2005
+ metadata TEXT
2006
+ );
2007
+
2008
+ CREATE INDEX IF NOT EXISTS idx_workspaces_slug
2009
+ ON workspaces(slug);
2010
+ `);
2011
+ await client.executeMultiple(`
2012
+ CREATE TABLE IF NOT EXISTS documents (
2013
+ id TEXT PRIMARY KEY,
2014
+ workspace_id TEXT NOT NULL,
2015
+ filename TEXT NOT NULL,
2016
+ mime TEXT,
2017
+ source_type TEXT,
2018
+ user_id TEXT,
2019
+ uploaded_at TEXT NOT NULL,
2020
+ metadata TEXT,
2021
+ FOREIGN KEY (workspace_id) REFERENCES workspaces(id)
2022
+ );
2023
+
2024
+ CREATE INDEX IF NOT EXISTS idx_documents_workspace
2025
+ ON documents(workspace_id);
2026
+
2027
+ CREATE INDEX IF NOT EXISTS idx_documents_user
2028
+ ON documents(user_id);
2029
+ `);
2030
+ for (const column of [
2031
+ "workspace_id TEXT",
2032
+ "document_id TEXT",
2033
+ "user_id TEXT",
2034
+ "char_offset INTEGER",
2035
+ "page_number INTEGER"
2036
+ ]) {
2037
+ try {
2038
+ await client.execute({
2039
+ sql: `ALTER TABLE memories ADD COLUMN ${column}`,
2040
+ args: []
2041
+ });
2042
+ } catch {
2043
+ }
2044
+ }
2045
+ for (const col of [
2046
+ "ALTER TABLE memories ADD COLUMN source_path TEXT",
2047
+ "ALTER TABLE memories ADD COLUMN source_type TEXT DEFAULT 'text'"
2048
+ ]) {
2049
+ try {
2050
+ await client.execute(col);
2051
+ } catch {
2052
+ }
2053
+ }
2054
+ await client.executeMultiple(`
2055
+ CREATE INDEX IF NOT EXISTS idx_memories_workspace
2056
+ ON memories(workspace_id);
2057
+
2058
+ CREATE INDEX IF NOT EXISTS idx_memories_document
2059
+ ON memories(document_id);
2060
+
2061
+ CREATE INDEX IF NOT EXISTS idx_memories_user
2062
+ ON memories(user_id);
2063
+ `);
2064
+ await client.executeMultiple(`
2065
+ CREATE TABLE IF NOT EXISTS session_kills (
2066
+ id TEXT PRIMARY KEY,
2067
+ session_name TEXT NOT NULL,
2068
+ agent_id TEXT NOT NULL,
2069
+ killed_at TIMESTAMP NOT NULL,
2070
+ reason TEXT NOT NULL,
2071
+ ticks_idle INTEGER,
2072
+ estimated_tokens_saved INTEGER
2073
+ );
2074
+
2075
+ CREATE INDEX IF NOT EXISTS idx_session_kills_killed_at
2076
+ ON session_kills(killed_at);
2077
+
2078
+ CREATE INDEX IF NOT EXISTS idx_session_kills_agent
2079
+ ON session_kills(agent_id);
2080
+ `);
2081
+ await client.execute(`
2082
+ CREATE TABLE IF NOT EXISTS global_procedures (
2083
+ id TEXT PRIMARY KEY,
2084
+ title TEXT NOT NULL,
2085
+ content TEXT NOT NULL,
2086
+ priority TEXT NOT NULL DEFAULT 'p0',
2087
+ domain TEXT,
2088
+ active INTEGER NOT NULL DEFAULT 1,
2089
+ created_at TEXT NOT NULL,
2090
+ updated_at TEXT NOT NULL
2091
+ )
2092
+ `);
2093
+ await client.executeMultiple(`
2094
+ CREATE TABLE IF NOT EXISTS conversations (
2095
+ id TEXT PRIMARY KEY,
2096
+ platform TEXT NOT NULL,
2097
+ external_id TEXT,
2098
+ sender_id TEXT NOT NULL,
2099
+ sender_name TEXT,
2100
+ sender_phone TEXT,
2101
+ sender_email TEXT,
2102
+ recipient_id TEXT,
2103
+ channel_id TEXT NOT NULL,
2104
+ thread_id TEXT,
2105
+ reply_to_id TEXT,
2106
+ content_text TEXT,
2107
+ content_media TEXT,
2108
+ content_metadata TEXT,
2109
+ agent_response TEXT,
2110
+ agent_name TEXT,
2111
+ timestamp TEXT NOT NULL,
2112
+ ingested_at TEXT NOT NULL
2113
+ );
2114
+
2115
+ CREATE INDEX IF NOT EXISTS idx_conversations_platform
2116
+ ON conversations(platform);
2117
+
2118
+ CREATE INDEX IF NOT EXISTS idx_conversations_sender
2119
+ ON conversations(sender_id);
2120
+
2121
+ CREATE INDEX IF NOT EXISTS idx_conversations_timestamp
2122
+ ON conversations(timestamp);
2123
+
2124
+ CREATE INDEX IF NOT EXISTS idx_conversations_thread
2125
+ ON conversations(thread_id);
2126
+
2127
+ CREATE INDEX IF NOT EXISTS idx_conversations_channel
2128
+ ON conversations(channel_id);
2129
+ `);
2130
+ await client.executeMultiple(`
2131
+ CREATE TABLE IF NOT EXISTS session_agent_map (
2132
+ session_uuid TEXT PRIMARY KEY,
2133
+ agent_id TEXT NOT NULL,
2134
+ session_name TEXT,
2135
+ task_id TEXT,
2136
+ project_name TEXT,
2137
+ started_at TEXT NOT NULL,
2138
+ cache_cold_count INTEGER NOT NULL DEFAULT 0
2139
+ );
2140
+
2141
+ CREATE INDEX IF NOT EXISTS idx_session_agent_map_agent
2142
+ ON session_agent_map(agent_id);
2143
+ `);
2144
+ await client.executeMultiple(`
2145
+ CREATE TABLE IF NOT EXISTS agent_file_reads (
2146
+ session_uuid TEXT NOT NULL,
2147
+ agent_id TEXT NOT NULL,
2148
+ file_path TEXT NOT NULL,
2149
+ read_at TEXT NOT NULL,
2150
+ commit_hash TEXT,
2151
+ PRIMARY KEY (session_uuid, file_path)
2152
+ );
2153
+
2154
+ CREATE INDEX IF NOT EXISTS idx_agent_file_reads_agent_read_at
2155
+ ON agent_file_reads(agent_id, read_at);
2156
+ `);
2157
+ try {
2158
+ const mapCount = await client.execute({ sql: `SELECT COUNT(*) as cnt FROM session_agent_map`, args: [] });
2159
+ if (Number(mapCount.rows[0]?.cnt ?? 0) === 0) {
2160
+ await client.execute({
2161
+ sql: `INSERT OR IGNORE INTO session_agent_map (session_uuid, agent_id, session_name, started_at)
2162
+ SELECT session_id, agent_id, '', MIN(timestamp)
2163
+ FROM memories
2164
+ WHERE session_id IS NOT NULL AND session_id != '' AND agent_id IS NOT NULL AND agent_id != ''
2165
+ GROUP BY session_id, agent_id`,
2166
+ args: []
2167
+ });
2168
+ }
2169
+ } catch {
2170
+ }
2171
+ try {
2172
+ await client.execute({
2173
+ sql: `ALTER TABLE session_agent_map ADD COLUMN cache_cold_count INTEGER NOT NULL DEFAULT 0`,
2174
+ args: []
2175
+ });
2176
+ } catch {
2177
+ }
2178
+ try {
2179
+ await client.execute({
2180
+ sql: `ALTER TABLE tasks ADD COLUMN budget_tokens INTEGER`,
2181
+ args: []
2182
+ });
2183
+ } catch {
2184
+ }
2185
+ try {
2186
+ await client.execute({
2187
+ sql: `ALTER TABLE tasks ADD COLUMN budget_fallback_model TEXT`,
2188
+ args: []
2189
+ });
2190
+ } catch {
2191
+ }
2192
+ try {
2193
+ await client.execute({
2194
+ sql: `ALTER TABLE tasks ADD COLUMN tokens_used INTEGER DEFAULT 0`,
2195
+ args: []
2196
+ });
2197
+ } catch {
2198
+ }
2199
+ try {
2200
+ await client.execute({
2201
+ sql: `ALTER TABLE tasks ADD COLUMN tokens_warned_at INTEGER`,
2202
+ args: []
2203
+ });
2204
+ } catch {
2205
+ }
2206
+ await client.executeMultiple(`
2207
+ CREATE VIRTUAL TABLE IF NOT EXISTS conversations_fts USING fts5(
2208
+ content_text,
2209
+ sender_name,
2210
+ agent_response,
2211
+ content='conversations',
2212
+ content_rowid='rowid'
2213
+ );
2214
+
2215
+ CREATE TRIGGER IF NOT EXISTS conversations_fts_ai AFTER INSERT ON conversations BEGIN
2216
+ INSERT INTO conversations_fts(rowid, content_text, sender_name, agent_response)
2217
+ VALUES (new.rowid, new.content_text, new.sender_name, new.agent_response);
2218
+ END;
2219
+
2220
+ CREATE TRIGGER IF NOT EXISTS conversations_fts_ad AFTER DELETE ON conversations BEGIN
2221
+ INSERT INTO conversations_fts(conversations_fts, rowid, content_text, sender_name, agent_response)
2222
+ VALUES('delete', old.rowid, old.content_text, old.sender_name, old.agent_response);
2223
+ END;
2224
+
2225
+ CREATE TRIGGER IF NOT EXISTS conversations_fts_au AFTER UPDATE ON conversations BEGIN
2226
+ INSERT INTO conversations_fts(conversations_fts, rowid, content_text, sender_name, agent_response)
2227
+ VALUES('delete', old.rowid, old.content_text, old.sender_name, old.agent_response);
2228
+ INSERT INTO conversations_fts(rowid, content_text, sender_name, agent_response)
2229
+ VALUES (new.rowid, new.content_text, new.sender_name, new.agent_response);
2230
+ END;
2231
+ `);
2232
+ try {
2233
+ await client.execute({
2234
+ sql: `ALTER TABLE memories ADD COLUMN tier INTEGER DEFAULT 3`,
2235
+ args: []
2236
+ });
2237
+ } catch {
2238
+ }
2239
+ try {
2240
+ await client.execute(
2241
+ `CREATE INDEX IF NOT EXISTS idx_memories_tier ON memories(tier)`
2242
+ );
2243
+ } catch {
2244
+ }
2245
+ try {
2246
+ await client.execute({
2247
+ sql: `UPDATE memories SET tier = 1 WHERE tool_name = 'commit_to_long_term_memory' AND importance >= 8 AND tier = 3`,
2248
+ args: []
2249
+ });
2250
+ await client.execute({
2251
+ sql: `UPDATE memories SET tier = 2 WHERE tool_name IN ('store_memory', 'manual') AND importance >= 5 AND tier = 3`,
2252
+ args: []
2253
+ });
2254
+ } catch {
2255
+ }
2256
+ try {
2257
+ await client.execute({
2258
+ sql: `ALTER TABLE memories ADD COLUMN supersedes_id TEXT`,
2259
+ args: []
2260
+ });
2261
+ } catch {
2262
+ }
2263
+ try {
2264
+ await client.execute(
2265
+ `CREATE INDEX IF NOT EXISTS idx_memories_supersedes ON memories(supersedes_id) WHERE supersedes_id IS NOT NULL`
2266
+ );
2267
+ } catch {
2268
+ }
2269
+ for (const col of [
2270
+ "ALTER TABLE tasks ADD COLUMN checkpoint TEXT",
2271
+ "ALTER TABLE tasks ADD COLUMN checkpoint_count INTEGER DEFAULT 0"
2272
+ ]) {
2273
+ try {
2274
+ await client.execute(col);
2275
+ } catch {
2276
+ }
2277
+ }
2278
+ try {
2279
+ await client.execute({
2280
+ sql: `ALTER TABLE memories ADD COLUMN draft INTEGER DEFAULT 0`,
2281
+ args: []
2282
+ });
2283
+ } catch {
2284
+ }
2285
+ try {
2286
+ await client.execute(
2287
+ `CREATE INDEX IF NOT EXISTS idx_memories_draft ON memories(draft) WHERE draft = 1`
2288
+ );
2289
+ } catch {
2290
+ }
2291
+ try {
2292
+ await client.execute({
2293
+ sql: `ALTER TABLE memories ADD COLUMN memory_type TEXT DEFAULT 'raw'`,
2294
+ args: []
2295
+ });
2296
+ } catch {
2297
+ }
2298
+ try {
2299
+ await client.execute(
2300
+ `CREATE INDEX IF NOT EXISTS idx_memories_type ON memories(memory_type)`
2301
+ );
2302
+ } catch {
2303
+ }
2304
+ try {
2305
+ await client.execute({
2306
+ sql: `ALTER TABLE memories ADD COLUMN trajectory TEXT`,
2307
+ args: []
2308
+ });
2309
+ } catch {
2310
+ }
2311
+ for (const col of [
2312
+ "ALTER TABLE memories ADD COLUMN intent TEXT",
2313
+ "ALTER TABLE memories ADD COLUMN outcome TEXT",
2314
+ "ALTER TABLE memories ADD COLUMN domain TEXT",
2315
+ "ALTER TABLE memories ADD COLUMN referenced_entities TEXT",
2316
+ "ALTER TABLE memories ADD COLUMN retrieval_count INTEGER DEFAULT 0",
2317
+ "ALTER TABLE memories ADD COLUMN chain_position TEXT",
2318
+ "ALTER TABLE memories ADD COLUMN review_status TEXT",
2319
+ "ALTER TABLE memories ADD COLUMN context_window_pct INTEGER",
2320
+ "ALTER TABLE memories ADD COLUMN file_paths TEXT",
2321
+ "ALTER TABLE memories ADD COLUMN commit_hash TEXT",
2322
+ "ALTER TABLE memories ADD COLUMN duration_ms INTEGER",
2323
+ "ALTER TABLE memories ADD COLUMN token_cost REAL",
2324
+ "ALTER TABLE memories ADD COLUMN audience TEXT",
2325
+ "ALTER TABLE memories ADD COLUMN language_type TEXT",
2326
+ "ALTER TABLE memories ADD COLUMN parent_memory_id TEXT"
2327
+ ]) {
2328
+ try {
2329
+ await client.execute(col);
2330
+ } catch {
2331
+ }
2332
+ }
2333
+ try {
2334
+ await client.execute({
2335
+ sql: `UPDATE tasks SET status = 'closed' WHERE status = 'done' AND result IS NOT NULL`,
2336
+ args: []
2337
+ });
2338
+ } catch {
2339
+ }
2340
+ }
2341
+ async function disposeDatabase() {
2342
+ if (_walCheckpointTimer) {
2343
+ clearInterval(_walCheckpointTimer);
2344
+ _walCheckpointTimer = null;
2345
+ }
2346
+ if (_daemonClient) {
2347
+ _daemonClient.close();
2348
+ _daemonClient = null;
2349
+ }
2350
+ if (_adapterClient && _adapterClient !== _resilientClient) {
2351
+ _adapterClient.close();
2352
+ }
2353
+ _adapterClient = null;
2354
+ if (_client) {
2355
+ _client.close();
2356
+ _client = null;
2357
+ _resilientClient = null;
2358
+ }
2359
+ }
2360
+ var _client, _resilientClient, _walCheckpointTimer, _daemonClient, _adapterClient, initTurso, disposeTurso;
2361
+ var init_database = __esm({
2362
+ "src/lib/database.ts"() {
2363
+ "use strict";
2364
+ init_db_retry();
2365
+ init_employees();
2366
+ init_database_adapter();
2367
+ _client = null;
2368
+ _resilientClient = null;
2369
+ _walCheckpointTimer = null;
2370
+ _daemonClient = null;
2371
+ _adapterClient = null;
2372
+ initTurso = initDatabase;
2373
+ disposeTurso = disposeDatabase;
2374
+ }
2375
+ });
2376
+
2377
+ // src/lib/keychain.ts
2378
+ import { readFile as readFile3, writeFile as writeFile3, unlink, mkdir as mkdir3, chmod as chmod2 } from "fs/promises";
2379
+ import { existsSync as existsSync7 } from "fs";
2380
+ import path8 from "path";
2381
+ import os5 from "os";
2382
+ function getKeyDir() {
2383
+ return process.env.EXE_OS_DIR ?? process.env.EXE_MEM_DIR ?? path8.join(os5.homedir(), ".exe-os");
2384
+ }
2385
+ function getKeyPath() {
2386
+ return path8.join(getKeyDir(), "master.key");
2387
+ }
2388
+ async function tryKeytar() {
2389
+ try {
2390
+ return await import("keytar");
2391
+ } catch {
2392
+ return null;
2393
+ }
2394
+ }
2395
+ async function getMasterKey() {
2396
+ const keytar = await tryKeytar();
2397
+ if (keytar) {
2398
+ try {
2399
+ const stored = await keytar.getPassword(SERVICE, ACCOUNT);
2400
+ if (stored) {
2401
+ return Buffer.from(stored, "base64");
2402
+ }
2403
+ } catch {
2404
+ }
2405
+ }
2406
+ const keyPath = getKeyPath();
2407
+ if (!existsSync7(keyPath)) {
2408
+ process.stderr.write(
2409
+ `[keychain] Key not found at ${keyPath} (HOME=${os5.homedir()}, EXE_OS_DIR=${process.env.EXE_OS_DIR ?? "unset"})
2410
+ `
2411
+ );
2412
+ return null;
2413
+ }
2414
+ try {
2415
+ const content = await readFile3(keyPath, "utf-8");
2416
+ return Buffer.from(content.trim(), "base64");
2417
+ } catch (err) {
2418
+ process.stderr.write(
2419
+ `[keychain] Key read failed at ${keyPath}: ${err instanceof Error ? err.message : String(err)}
2420
+ `
2421
+ );
2422
+ return null;
2423
+ }
2424
+ }
2425
+ var SERVICE, ACCOUNT;
2426
+ var init_keychain = __esm({
2427
+ "src/lib/keychain.ts"() {
2428
+ "use strict";
2429
+ SERVICE = "exe-mem";
2430
+ ACCOUNT = "master-key";
2431
+ }
2432
+ });
2433
+
2434
+ // src/lib/state-bus.ts
2435
+ var StateBus, orgBus;
2436
+ var init_state_bus = __esm({
2437
+ "src/lib/state-bus.ts"() {
2438
+ "use strict";
2439
+ StateBus = class {
2440
+ handlers = /* @__PURE__ */ new Map();
2441
+ globalHandlers = /* @__PURE__ */ new Set();
2442
+ /** Emit an event to all subscribers */
2443
+ emit(event) {
2444
+ const typeHandlers = this.handlers.get(event.type);
2445
+ if (typeHandlers) {
2446
+ for (const handler of typeHandlers) {
2447
+ try {
2448
+ handler(event);
2449
+ } catch {
2450
+ }
2451
+ }
2452
+ }
2453
+ for (const handler of this.globalHandlers) {
2454
+ try {
2455
+ handler(event);
2456
+ } catch {
2457
+ }
2458
+ }
2459
+ }
2460
+ /** Subscribe to a specific event type */
2461
+ on(type, handler) {
2462
+ if (!this.handlers.has(type)) {
2463
+ this.handlers.set(type, /* @__PURE__ */ new Set());
2464
+ }
2465
+ this.handlers.get(type).add(handler);
2466
+ }
2467
+ /** Subscribe to ALL events */
2468
+ onAny(handler) {
2469
+ this.globalHandlers.add(handler);
2470
+ }
2471
+ /** Unsubscribe from a specific event type */
2472
+ off(type, handler) {
2473
+ this.handlers.get(type)?.delete(handler);
2474
+ }
2475
+ /** Unsubscribe from ALL events */
2476
+ offAny(handler) {
2477
+ this.globalHandlers.delete(handler);
2478
+ }
2479
+ /** Remove all listeners */
2480
+ clear() {
2481
+ this.handlers.clear();
2482
+ this.globalHandlers.clear();
2483
+ }
2484
+ };
2485
+ orgBus = new StateBus();
2486
+ }
2487
+ });
2488
+
2489
+ // src/lib/shard-manager.ts
2490
+ var shard_manager_exports = {};
2491
+ __export(shard_manager_exports, {
2492
+ disposeShards: () => disposeShards,
2493
+ ensureShardSchema: () => ensureShardSchema,
2494
+ getOpenShardCount: () => getOpenShardCount,
2495
+ getReadyShardClient: () => getReadyShardClient,
2496
+ getShardClient: () => getShardClient,
2497
+ getShardsDir: () => getShardsDir,
2498
+ initShardManager: () => initShardManager,
2499
+ isShardingEnabled: () => isShardingEnabled,
2500
+ listShards: () => listShards,
2501
+ shardExists: () => shardExists
2502
+ });
2503
+ import path9 from "path";
2504
+ import { existsSync as existsSync8, mkdirSync as mkdirSync5, readdirSync as readdirSync3 } from "fs";
2505
+ import { createClient as createClient2 } from "@libsql/client";
2506
+ function initShardManager(encryptionKey) {
2507
+ _encryptionKey = encryptionKey;
2508
+ if (!existsSync8(SHARDS_DIR)) {
2509
+ mkdirSync5(SHARDS_DIR, { recursive: true });
2510
+ }
2511
+ _shardingEnabled = true;
2512
+ if (_evictionTimer) clearInterval(_evictionTimer);
2513
+ _evictionTimer = setInterval(evictIdleShards, EVICTION_INTERVAL_MS);
2514
+ _evictionTimer.unref();
2515
+ }
2516
+ function isShardingEnabled() {
2517
+ return _shardingEnabled;
2518
+ }
2519
+ function getShardsDir() {
2520
+ return SHARDS_DIR;
2521
+ }
2522
+ function getShardClient(projectName) {
2523
+ if (!_encryptionKey) {
2524
+ throw new Error("Shard manager not initialized. Call initShardManager() first.");
2525
+ }
2526
+ const safeName = projectName.replace(/[^a-zA-Z0-9_-]/g, "_");
2527
+ if (!safeName) {
2528
+ throw new Error(`Invalid project name for shard: "${projectName}"`);
2529
+ }
2530
+ const cached = _shards.get(safeName);
2531
+ if (cached) {
2532
+ _shardLastAccess.set(safeName, Date.now());
2533
+ return cached;
2534
+ }
2535
+ while (_shards.size >= MAX_OPEN_SHARDS) {
2536
+ evictLRU();
2537
+ }
2538
+ const dbPath = path9.join(SHARDS_DIR, `${safeName}.db`);
2539
+ const client = createClient2({
2540
+ url: `file:${dbPath}`,
2541
+ encryptionKey: _encryptionKey
2542
+ });
2543
+ _shards.set(safeName, client);
2544
+ _shardLastAccess.set(safeName, Date.now());
2545
+ return client;
2546
+ }
2547
+ function shardExists(projectName) {
2548
+ const safeName = projectName.replace(/[^a-zA-Z0-9_-]/g, "_");
2549
+ return existsSync8(path9.join(SHARDS_DIR, `${safeName}.db`));
2550
+ }
2551
+ function listShards() {
2552
+ if (!existsSync8(SHARDS_DIR)) return [];
2553
+ return readdirSync3(SHARDS_DIR).filter((f) => f.endsWith(".db")).map((f) => f.replace(".db", ""));
2554
+ }
2555
+ async function ensureShardSchema(client) {
2556
+ await client.execute("PRAGMA journal_mode = WAL");
2557
+ await client.execute("PRAGMA busy_timeout = 30000");
2558
+ try {
2559
+ await client.execute("PRAGMA libsql_vector_search_ef = 128");
2560
+ } catch {
2561
+ }
2562
+ await client.executeMultiple(`
2563
+ CREATE TABLE IF NOT EXISTS memories (
2564
+ id TEXT PRIMARY KEY,
2565
+ agent_id TEXT NOT NULL,
2566
+ agent_role TEXT NOT NULL,
2567
+ session_id TEXT NOT NULL,
2568
+ timestamp TEXT NOT NULL,
2569
+ tool_name TEXT NOT NULL,
2570
+ project_name TEXT NOT NULL,
2571
+ has_error INTEGER NOT NULL DEFAULT 0,
2572
+ raw_text TEXT NOT NULL,
2573
+ vector F32_BLOB(1024),
2574
+ version INTEGER NOT NULL DEFAULT 0
2575
+ );
2576
+
2577
+ CREATE INDEX IF NOT EXISTS idx_memories_agent ON memories(agent_id);
2578
+ CREATE INDEX IF NOT EXISTS idx_memories_timestamp ON memories(timestamp);
2579
+ CREATE INDEX IF NOT EXISTS idx_memories_agent_project ON memories(agent_id, project_name);
2580
+ `);
2581
+ await client.executeMultiple(`
2582
+ CREATE VIRTUAL TABLE IF NOT EXISTS memories_fts USING fts5(
2583
+ raw_text,
2584
+ content='memories',
2585
+ content_rowid='rowid'
2586
+ );
2587
+
2588
+ CREATE TRIGGER IF NOT EXISTS memories_fts_ai AFTER INSERT ON memories BEGIN
2589
+ INSERT INTO memories_fts(rowid, raw_text) VALUES (new.rowid, new.raw_text);
2590
+ END;
2591
+
2592
+ CREATE TRIGGER IF NOT EXISTS memories_fts_ad AFTER DELETE ON memories BEGIN
2593
+ INSERT INTO memories_fts(memories_fts, rowid, raw_text) VALUES('delete', old.rowid, old.raw_text);
2594
+ END;
2595
+
2596
+ CREATE TRIGGER IF NOT EXISTS memories_fts_au AFTER UPDATE ON memories BEGIN
2597
+ INSERT INTO memories_fts(memories_fts, rowid, raw_text) VALUES('delete', old.rowid, old.raw_text);
2598
+ INSERT INTO memories_fts(rowid, raw_text) VALUES (new.rowid, new.raw_text);
2599
+ END;
2600
+ `);
2601
+ for (const col of [
2602
+ "ALTER TABLE memories ADD COLUMN task_id TEXT",
2603
+ "ALTER TABLE memories ADD COLUMN consolidated INTEGER NOT NULL DEFAULT 0",
2604
+ "ALTER TABLE memories ADD COLUMN author_device_id TEXT",
2605
+ "ALTER TABLE memories ADD COLUMN scope TEXT NOT NULL DEFAULT 'business'",
2606
+ "ALTER TABLE memories ADD COLUMN importance INTEGER DEFAULT 5",
2607
+ "ALTER TABLE memories ADD COLUMN status TEXT DEFAULT 'active'",
2608
+ "ALTER TABLE memories ADD COLUMN wiki_synced INTEGER DEFAULT 0",
2609
+ "ALTER TABLE memories ADD COLUMN graph_extracted INTEGER DEFAULT 0",
2610
+ "ALTER TABLE memories ADD COLUMN content_hash TEXT",
2611
+ "ALTER TABLE memories ADD COLUMN graph_extracted_hash TEXT",
2612
+ "ALTER TABLE memories ADD COLUMN confidence REAL DEFAULT 0.7",
2613
+ "ALTER TABLE memories ADD COLUMN last_accessed TEXT",
2614
+ // Wiki linkage columns (must match database.ts)
2615
+ "ALTER TABLE memories ADD COLUMN workspace_id TEXT",
2616
+ "ALTER TABLE memories ADD COLUMN document_id TEXT",
2617
+ "ALTER TABLE memories ADD COLUMN user_id TEXT",
2618
+ "ALTER TABLE memories ADD COLUMN char_offset INTEGER",
2619
+ "ALTER TABLE memories ADD COLUMN page_number INTEGER",
2620
+ // Source provenance columns (must match database.ts)
2621
+ "ALTER TABLE memories ADD COLUMN source_path TEXT",
2622
+ "ALTER TABLE memories ADD COLUMN source_type TEXT DEFAULT 'text'",
2623
+ "ALTER TABLE memories ADD COLUMN tier INTEGER DEFAULT 3",
2624
+ "ALTER TABLE memories ADD COLUMN supersedes_id TEXT",
2625
+ // MS-11: draft staging, MS-6a: memory_type, MS-7: trajectory
2626
+ "ALTER TABLE memories ADD COLUMN draft INTEGER DEFAULT 0",
2627
+ "ALTER TABLE memories ADD COLUMN memory_type TEXT DEFAULT 'raw'",
2628
+ "ALTER TABLE memories ADD COLUMN trajectory TEXT",
2629
+ // Metadata enrichment columns (must match database.ts)
2630
+ "ALTER TABLE memories ADD COLUMN intent TEXT",
2631
+ "ALTER TABLE memories ADD COLUMN outcome TEXT",
2632
+ "ALTER TABLE memories ADD COLUMN domain TEXT",
2633
+ "ALTER TABLE memories ADD COLUMN referenced_entities TEXT",
2634
+ "ALTER TABLE memories ADD COLUMN retrieval_count INTEGER DEFAULT 0",
2635
+ "ALTER TABLE memories ADD COLUMN chain_position TEXT",
2636
+ "ALTER TABLE memories ADD COLUMN review_status TEXT",
2637
+ "ALTER TABLE memories ADD COLUMN context_window_pct INTEGER",
2638
+ "ALTER TABLE memories ADD COLUMN file_paths TEXT",
2639
+ "ALTER TABLE memories ADD COLUMN commit_hash TEXT",
2640
+ "ALTER TABLE memories ADD COLUMN duration_ms INTEGER",
2641
+ "ALTER TABLE memories ADD COLUMN token_cost REAL",
2642
+ "ALTER TABLE memories ADD COLUMN audience TEXT",
2643
+ "ALTER TABLE memories ADD COLUMN language_type TEXT",
2644
+ "ALTER TABLE memories ADD COLUMN parent_memory_id TEXT"
2645
+ ]) {
2646
+ try {
2647
+ await client.execute(col);
2648
+ } catch {
2649
+ }
2650
+ }
2651
+ for (const idx of [
2652
+ "CREATE INDEX IF NOT EXISTS idx_memories_tier ON memories(tier)",
2653
+ "CREATE INDEX IF NOT EXISTS idx_memories_supersedes ON memories(supersedes_id) WHERE supersedes_id IS NOT NULL"
2654
+ ]) {
2655
+ try {
2656
+ await client.execute(idx);
2657
+ } catch {
2658
+ }
2659
+ }
2660
+ try {
2661
+ await client.execute("CREATE INDEX IF NOT EXISTS idx_memories_status ON memories(status)");
2662
+ } catch {
2663
+ }
2664
+ for (const idx of [
2665
+ "CREATE INDEX IF NOT EXISTS idx_memories_workspace ON memories(workspace_id)",
2666
+ "CREATE INDEX IF NOT EXISTS idx_memories_document ON memories(document_id)",
2667
+ "CREATE INDEX IF NOT EXISTS idx_memories_user ON memories(user_id)"
2668
+ ]) {
2669
+ try {
2670
+ await client.execute(idx);
2671
+ } catch {
2672
+ }
2673
+ }
2674
+ await client.executeMultiple(`
2675
+ CREATE TABLE IF NOT EXISTS entities (
2676
+ id TEXT PRIMARY KEY,
2677
+ name TEXT NOT NULL,
2678
+ type TEXT NOT NULL,
2679
+ first_seen TEXT NOT NULL,
2680
+ last_seen TEXT NOT NULL,
2681
+ properties TEXT DEFAULT '{}',
2682
+ UNIQUE(name, type)
2683
+ );
2684
+
2685
+ CREATE TABLE IF NOT EXISTS relationships (
2686
+ id TEXT PRIMARY KEY,
2687
+ source_entity_id TEXT NOT NULL,
2688
+ target_entity_id TEXT NOT NULL,
2689
+ type TEXT NOT NULL,
2690
+ weight REAL DEFAULT 1.0,
2691
+ timestamp TEXT NOT NULL,
2692
+ properties TEXT DEFAULT '{}',
2693
+ UNIQUE(source_entity_id, target_entity_id, type)
2694
+ );
2695
+
2696
+ CREATE TABLE IF NOT EXISTS entity_memories (
2697
+ entity_id TEXT NOT NULL,
2698
+ memory_id TEXT NOT NULL,
2699
+ PRIMARY KEY (entity_id, memory_id)
2700
+ );
2701
+
2702
+ CREATE TABLE IF NOT EXISTS relationship_memories (
2703
+ relationship_id TEXT NOT NULL,
2704
+ memory_id TEXT NOT NULL,
2705
+ PRIMARY KEY (relationship_id, memory_id)
2706
+ );
2707
+
2708
+ CREATE INDEX IF NOT EXISTS idx_entities_name ON entities(name);
2709
+ CREATE INDEX IF NOT EXISTS idx_entities_type ON entities(type);
2710
+ CREATE INDEX IF NOT EXISTS idx_relationships_source ON relationships(source_entity_id);
2711
+ CREATE INDEX IF NOT EXISTS idx_relationships_target ON relationships(target_entity_id);
2712
+ CREATE INDEX IF NOT EXISTS idx_relationships_type ON relationships(type);
2713
+
2714
+ CREATE TABLE IF NOT EXISTS hyperedges (
2715
+ id TEXT PRIMARY KEY,
2716
+ label TEXT NOT NULL,
2717
+ relation TEXT NOT NULL,
2718
+ confidence REAL DEFAULT 1.0,
2719
+ timestamp TEXT NOT NULL
2720
+ );
2721
+
2722
+ CREATE TABLE IF NOT EXISTS hyperedge_nodes (
2723
+ hyperedge_id TEXT NOT NULL,
2724
+ entity_id TEXT NOT NULL,
2725
+ PRIMARY KEY (hyperedge_id, entity_id)
2726
+ );
2727
+ `);
2728
+ for (const col of [
2729
+ "ALTER TABLE relationships ADD COLUMN confidence REAL DEFAULT 1.0",
2730
+ "ALTER TABLE relationships ADD COLUMN confidence_label TEXT DEFAULT 'extracted'"
2731
+ ]) {
2732
+ try {
2733
+ await client.execute(col);
2734
+ } catch {
2735
+ }
2736
+ }
2737
+ }
2738
+ async function getReadyShardClient(projectName) {
2739
+ const client = getShardClient(projectName);
2740
+ await ensureShardSchema(client);
2741
+ return client;
2742
+ }
2743
+ function evictLRU() {
2744
+ let oldest = null;
2745
+ let oldestTime = Infinity;
2746
+ for (const [name, time] of _shardLastAccess) {
2747
+ if (time < oldestTime) {
2748
+ oldestTime = time;
2749
+ oldest = name;
2750
+ }
2751
+ }
2752
+ if (oldest) {
2753
+ const client = _shards.get(oldest);
2754
+ if (client) {
2755
+ client.close();
2756
+ }
2757
+ _shards.delete(oldest);
2758
+ _shardLastAccess.delete(oldest);
2759
+ }
2760
+ }
2761
+ function evictIdleShards() {
2762
+ const now = Date.now();
2763
+ const toEvict = [];
2764
+ for (const [name, lastAccess] of _shardLastAccess) {
2765
+ if (now - lastAccess > SHARD_IDLE_MS) {
2766
+ toEvict.push(name);
2767
+ }
2768
+ }
2769
+ for (const name of toEvict) {
2770
+ const client = _shards.get(name);
2771
+ if (client) {
2772
+ client.close();
2773
+ }
2774
+ _shards.delete(name);
2775
+ _shardLastAccess.delete(name);
2776
+ }
2777
+ }
2778
+ function getOpenShardCount() {
2779
+ return _shards.size;
2780
+ }
2781
+ function disposeShards() {
2782
+ if (_evictionTimer) {
2783
+ clearInterval(_evictionTimer);
2784
+ _evictionTimer = null;
2785
+ }
2786
+ for (const [, client] of _shards) {
2787
+ client.close();
2788
+ }
2789
+ _shards.clear();
2790
+ _shardLastAccess.clear();
2791
+ _shardingEnabled = false;
2792
+ _encryptionKey = null;
2793
+ }
2794
+ var SHARDS_DIR, SHARD_IDLE_MS, MAX_OPEN_SHARDS, EVICTION_INTERVAL_MS, _shards, _shardLastAccess, _evictionTimer, _encryptionKey, _shardingEnabled;
2795
+ var init_shard_manager = __esm({
2796
+ "src/lib/shard-manager.ts"() {
2797
+ "use strict";
2798
+ init_config();
2799
+ SHARDS_DIR = path9.join(EXE_AI_DIR, "shards");
2800
+ SHARD_IDLE_MS = 5 * 60 * 1e3;
2801
+ MAX_OPEN_SHARDS = 10;
2802
+ EVICTION_INTERVAL_MS = 60 * 1e3;
2803
+ _shards = /* @__PURE__ */ new Map();
2804
+ _shardLastAccess = /* @__PURE__ */ new Map();
2805
+ _evictionTimer = null;
2806
+ _encryptionKey = null;
2807
+ _shardingEnabled = false;
2808
+ }
2809
+ });
2810
+
2811
+ // src/lib/platform-procedures.ts
2812
+ var PLATFORM_PROCEDURES, PLATFORM_PROCEDURE_TITLES;
2813
+ var init_platform_procedures = __esm({
2814
+ "src/lib/platform-procedures.ts"() {
2815
+ "use strict";
2816
+ PLATFORM_PROCEDURES = [
2817
+ // --- Foundation: what is exe-os ---
2818
+ {
2819
+ title: "What is exe-os \u2014 the operating model every agent must understand",
2820
+ domain: "architecture",
2821
+ priority: "p0",
2822
+ content: "Exe OS is an AI employee operating system. A founder runs 5-10 AI agents as a real org: COO, CTO, CMO, engineers, and content production specialists. Each agent has identity, expertise, and experience layers \u2014 persistent memory that makes them better over time. All data is local-first, E2EE, owned by the user. The MCP server is the ONLY data interface \u2014 never access the DB directly."
2823
+ },
2824
+ {
2825
+ title: "Mode 1 \u2014 how exe-os runs inside Claude Code",
2826
+ domain: "architecture",
2827
+ priority: "p0",
2828
+ content: "Mode 1: exe-os runs AS hooks + MCP + skills inside Claude Code, Codex, or OpenCode. The founder picks their default tool at setup. The COO manages employees in tmux sessions. Each coordinator session is a separate window/project. Employees run in their own tmux panes via create_task auto-spawn. The founder talks to the COO; the COO orchestrates the team. The tool is the shell, exe-os is the brain."
2829
+ },
2830
+ {
2831
+ title: "Sessions explained \u2014 coordinator session names and projects",
2832
+ domain: "architecture",
2833
+ priority: "p0",
2834
+ content: "Each coordinator session is an isolated project session. One might be exe-os development, another might be exe-wiki. Each session spawns its own employees using {employee}-{coordinatorSession}. Sessions share the same memory DB but tasks are scoped to the session that created them. A founder can run multiple projects simultaneously. Sessions never interfere with each other."
2835
+ },
2836
+ {
2837
+ title: "Runtime settings \u2014 COO can view and change tools per agent",
2838
+ domain: "workflow",
2839
+ priority: "p1",
2840
+ content: "exe-os supports three tools: Claude Code (Anthropic), Codex (OpenAI), and OpenCode (open source, 75+ providers). Each agent can use a different tool and model. COO uses set_agent_config MCP tool to view or change settings. Call with no args to show all agents. Call with agent_id + runtime + model to change. Users can also run `exe-os settings` from terminal for interactive arrow-key selection."
2841
+ },
2842
+ // --- Hierarchy and dispatch ---
2843
+ {
2844
+ title: "Chain of command \u2014 who talks to whom",
2845
+ domain: "workflow",
2846
+ priority: "p0",
2847
+ content: "Founder -> COO -> CTO/CMO. CTO -> engineers. CMO -> content production. Never skip levels: the COO does not bypass managers for specialist work. Specialists report to their manager. If you need cross-team info, use ask_team_memory \u2014 don't read other agents' task folders. Each level owns dispatch downward and review upward."
2848
+ },
2849
+ {
2850
+ title: "Single dispatch path \u2014 create_task only",
2851
+ domain: "workflow",
2852
+ priority: "p0",
2853
+ content: "create_task is the ONLY way to dispatch work to another agent. No direct ensureEmployee calls, no manual tmux spawns, no send_message for actionable work. create_task \u2192 system auto-spawns \u2192 session correctly named. ONE PATH. No backdoors. No exceptions."
2854
+ },
2855
+ // --- Session isolation ---
2856
+ {
2857
+ title: "Session scoping \u2014 stay in your coordinator boundary",
2858
+ domain: "security",
2859
+ priority: "p0",
2860
+ content: "Session scoping is mandatory. Managers dispatch to workers within their own coordinator session ONLY. Employee sessions use {employee}-{coordinatorSession}. Cross-session dispatch is blocked by the system. Verify session names before dispatch. Tasks are scoped to the creating coordinator session."
2861
+ },
2862
+ {
2863
+ title: "Session isolation \u2014 never touch another session's work",
2864
+ domain: "workflow",
2865
+ priority: "p0",
2866
+ content: "Sessions are isolated. A coordinator session owns ONLY tasks it dispatched. (1) Never close/update/cancel tasks from another coordinator session. (2) Never review work from a different session \u2014 report that it belongs to another session and skip. (3) Ignore other sessions' items in list_tasks results. (4) Employees inherit session: employee sessions work ONLY on their parent coordinator session's tasks. Cross-session work is a system violation."
2867
+ },
2868
+ // --- Engineering: session scoping in code ---
2869
+ {
2870
+ title: "Three-dimensional scoping \u2014 session, project, role \u2014 enforced in every query",
2871
+ domain: "architecture",
2872
+ priority: "p0",
2873
+ content: "Every DB query, notification, review count, and task operation MUST be scoped on 3 dimensions: (1) Session \u2014 filter by session_scope matching the current coordinator session. (2) Project \u2014 filter by project_name. (3) Role \u2014 agents only see data at their hierarchy level. When writing ANY function that touches tasks, reviews, messages, or notifications: always accept a sessionScope parameter and pass it to the SQL WHERE clause. Unscoped queries are bugs. Test by running 2+ coordinator sessions simultaneously."
2874
+ },
2875
+ // --- Hard constraints ---
2876
+ {
2877
+ title: "What you CANNOT do in exe-os \u2014 hard constraints",
2878
+ domain: "security",
2879
+ priority: "p0",
2880
+ content: "NEVER: (1) Access the database directly \u2014 it's SQLCipher encrypted, always fails. Use MCP tools only. (2) Manually spawn tmux sessions \u2014 create_task handles it. (3) Run git checkout main \u2014 agents work in worktrees. (4) Modify another agent's in-progress task. (5) Push to remote \u2014 the COO reviews and pushes. (6) Skip update_task(done) \u2014 it's the ONLY way your work gets reviewed. (7) Run git init."
2881
+ },
2882
+ // --- Operations ---
2883
+ {
2884
+ title: "Managers must supervise deployed workers",
2885
+ domain: "workflow",
2886
+ priority: "p0",
2887
+ content: `Every manager (COO/CTO/CMO) who dispatches work to a worker MUST actively monitor them. Check tmux capture-pane every 10 minutes. Verify they're working, not stuck. If idle at prompt with in_progress task \u2192 send intercom. If stuck \u2192 unblock or escalate. "Standing by" without checking is negligence.`
2888
+ },
2889
+ {
2890
+ title: "COO boot health check \u2014 memory, cloud sync, daemon on every launch",
2891
+ domain: "workflow",
2892
+ priority: "p0",
2893
+ content: "On every /exe boot, COO MUST check system health BEFORE other work: (1) daemon \u2014 is exed PID alive, (2) cloud sync \u2014 grep workers.log for recent cloud-sync errors, (3) memory count \u2014 total in DB, (4) sync delta \u2014 local vs cloud storage_bytes. Report as 4-line status table. If ANY check fails, surface to founder immediately. Do not proceed to tasks until health confirmed."
2894
+ },
2895
+ {
2896
+ title: "exe-build-adv mandatory for 3+ files",
2897
+ domain: "workflow",
2898
+ priority: "p0",
2899
+ content: "exe-build-adv is MANDATORY for ALL work touching 3+ files. Run /exe-build-adv --auto BEFORE implementation. Pipeline: Spec \u2192 AC \u2192 Tests \u2192 Evaluate \u2192 Fix. No multi-file feature ships without pipeline artifacts. No exceptions \u2014 managers reject work without them."
2900
+ },
2901
+ {
2902
+ title: "Desktop and TUI are the same product",
2903
+ domain: "architecture",
2904
+ priority: "p0",
2905
+ content: "Desktop and TUI are the SAME product in different renderers. Same data contracts, same interactions, same acceptance criteria. Desktop tab specs in ARCHITECTURE.md ARE the TUI specs. When building TUI, cross-reference Desktop spec. Different tab names, identical behavior. Never treat them as separate products."
2906
+ },
2907
+ // --- Orchestration golden path ---
2908
+ {
2909
+ title: "Task lifecycle \u2014 the golden path every agent follows",
2910
+ domain: "workflow",
2911
+ priority: "p0",
2912
+ content: "create_task is dispatch + delivery. Task lifecycle: open \u2192 in_progress (you start) \u2192 done (update_task when finished) \u2192 needs_review (reviewer nudged) \u2192 closed (COO only via close_task). DB is the reliable delivery \u2014 intercom is just a speedup nudge. If you finish a task, self-chain: check for next task immediately (step 7). Never wait for a nudge. Never say 'standing by.'"
2913
+ },
2914
+ {
2915
+ title: "Intercom is a speedup, not delivery \u2014 DB is the source of truth",
2916
+ domain: "architecture",
2917
+ priority: "p0",
2918
+ content: "Tasks live in the DB. Intercom (tmux send-keys) is fire-and-forget \u2014 it may fail, get garbled, or arrive mid-work. Never rely on intercom for task delivery. The UserPromptSubmit hook checks the DB for new tasks on every prompt. Your operating procedures step 7 says check for next work. The daemon nudges idle agents as a speedup. If you have no tasks, you found them all."
2919
+ }
2920
+ ];
2921
+ PLATFORM_PROCEDURE_TITLES = new Set(
2922
+ PLATFORM_PROCEDURES.map((p) => p.title)
2923
+ );
2924
+ }
2925
+ });
2926
+
2927
+ // src/lib/global-procedures.ts
2928
+ var global_procedures_exports = {};
2929
+ __export(global_procedures_exports, {
2930
+ deactivateGlobalProcedure: () => deactivateGlobalProcedure,
2931
+ getGlobalProceduresBlock: () => getGlobalProceduresBlock,
2932
+ loadGlobalProcedures: () => loadGlobalProcedures,
2933
+ storeGlobalProcedure: () => storeGlobalProcedure
2934
+ });
2935
+ import { randomUUID } from "crypto";
2936
+ async function loadGlobalProcedures() {
2937
+ const client = getClient();
2938
+ const result = await client.execute({
2939
+ sql: "SELECT * FROM global_procedures WHERE active = 1 ORDER BY priority ASC, created_at ASC",
2940
+ args: []
2941
+ });
2942
+ const allRows = result.rows;
2943
+ const customerOnly = allRows.filter((p) => !PLATFORM_PROCEDURE_TITLES.has(p.title));
2944
+ if (customerOnly.length > 0) {
2945
+ _customerCache = customerOnly.map((p) => `### ${p.title}
2946
+ ${p.content}`).join("\n\n");
2947
+ } else {
2948
+ _customerCache = "";
2949
+ }
2950
+ _cacheLoaded = true;
2951
+ return customerOnly;
2952
+ }
2953
+ function getGlobalProceduresBlock() {
2954
+ const sections = [];
2955
+ if (_platformCache) sections.push(_platformCache);
2956
+ if (_cacheLoaded && _customerCache) sections.push(_customerCache);
2957
+ if (sections.length === 0) return "";
2958
+ return `## Organization-Wide Procedures (MANDATORY \u2014 supersedes all other rules)
2959
+
2960
+ ${sections.join("\n\n")}
2961
+ `;
2962
+ }
2963
+ async function storeGlobalProcedure(input2) {
2964
+ const id = randomUUID();
2965
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2966
+ const client = getClient();
2967
+ await client.execute({
2968
+ sql: `INSERT INTO global_procedures (id, title, content, priority, domain, active, created_at, updated_at)
2969
+ VALUES (?, ?, ?, ?, ?, 1, ?, ?)`,
2970
+ args: [id, input2.title, input2.content, input2.priority ?? "p0", input2.domain ?? null, now, now]
2971
+ });
2972
+ await loadGlobalProcedures();
2973
+ return id;
2974
+ }
2975
+ async function deactivateGlobalProcedure(id) {
2976
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2977
+ const client = getClient();
2978
+ const result = await client.execute({
2979
+ sql: "UPDATE global_procedures SET active = 0, updated_at = ? WHERE id = ?",
2980
+ args: [now, id]
2981
+ });
2982
+ await loadGlobalProcedures();
2983
+ return result.rowsAffected > 0;
2984
+ }
2985
+ var _customerCache, _cacheLoaded, _platformCache;
2986
+ var init_global_procedures = __esm({
2987
+ "src/lib/global-procedures.ts"() {
2988
+ "use strict";
2989
+ init_database();
2990
+ init_platform_procedures();
2991
+ _customerCache = "";
2992
+ _cacheLoaded = false;
2993
+ _platformCache = PLATFORM_PROCEDURES.map((p) => `### ${p.title}
2994
+ ${p.content}`).join("\n\n");
2995
+ }
2996
+ });
2997
+
2998
+ // src/lib/store.ts
2999
+ var store_exports = {};
3000
+ __export(store_exports, {
3001
+ attachDocumentMetadata: () => attachDocumentMetadata,
3002
+ buildWikiScopeFilter: () => buildWikiScopeFilter,
3003
+ classifyTier: () => classifyTier,
3004
+ disposeStore: () => disposeStore,
3005
+ flushBatch: () => flushBatch,
3006
+ flushTier3: () => flushTier3,
3007
+ getMemoryCardinality: () => getMemoryCardinality,
3008
+ initStore: () => initStore,
3009
+ reserveVersions: () => reserveVersions,
3010
+ searchMemories: () => searchMemories,
3011
+ updateMemoryStatus: () => updateMemoryStatus,
3012
+ vectorToBlob: () => vectorToBlob,
3013
+ writeMemory: () => writeMemory
3014
+ });
3015
+ import { createHash } from "crypto";
3016
+ function isBusyError2(err) {
3017
+ if (err instanceof Error) {
3018
+ const msg = err.message.toLowerCase();
3019
+ return msg.includes("sqlite_busy") || msg.includes("database is locked");
3020
+ }
3021
+ return false;
3022
+ }
3023
+ async function retryOnBusy2(fn, label) {
3024
+ for (let attempt = 0; attempt <= INIT_MAX_RETRIES; attempt++) {
3025
+ try {
3026
+ return await fn();
3027
+ } catch (err) {
3028
+ if (!isBusyError2(err) || attempt === INIT_MAX_RETRIES) throw err;
3029
+ process.stderr.write(
3030
+ `[store] SQLITE_BUSY during ${label}, retry ${attempt + 1}/${INIT_MAX_RETRIES}
3031
+ `
3032
+ );
3033
+ await new Promise((r) => setTimeout(r, INIT_RETRY_DELAY_MS * (attempt + 1)));
3034
+ }
3035
+ }
3036
+ throw new Error("unreachable");
392
3037
  }
393
- async function loadEmployees(employeesPath = EMPLOYEES_PATH) {
394
- if (!existsSync4(employeesPath)) {
395
- return [];
3038
+ async function initStore(options) {
3039
+ if (_flushTimer !== null) {
3040
+ clearInterval(_flushTimer);
3041
+ _flushTimer = null;
396
3042
  }
397
- const raw = await readFile2(employeesPath, "utf-8");
398
- try {
399
- return JSON.parse(raw);
400
- } catch {
401
- return [];
3043
+ _pendingRecords = [];
3044
+ _flushing = false;
3045
+ _batchSize = options?.batchSize ?? 20;
3046
+ _flushIntervalMs = options?.flushIntervalMs ?? 1e4;
3047
+ let dbPath = options?.dbPath;
3048
+ if (!dbPath) {
3049
+ const config = await loadConfig();
3050
+ dbPath = config.dbPath;
402
3051
  }
403
- }
404
- async function saveEmployees(employees, employeesPath = EMPLOYEES_PATH) {
405
- await mkdir2(path3.dirname(employeesPath), { recursive: true });
406
- await writeFile2(employeesPath, JSON.stringify(employees, null, 2) + "\n", "utf-8");
407
- }
408
- function loadEmployeesSync(employeesPath = EMPLOYEES_PATH) {
409
- if (!existsSync4(employeesPath)) return [];
410
- try {
411
- return JSON.parse(readFileSync3(employeesPath, "utf-8"));
412
- } catch {
413
- return [];
3052
+ let masterKey = options?.masterKey ?? null;
3053
+ if (!masterKey) {
3054
+ masterKey = await getMasterKey();
3055
+ if (!masterKey) {
3056
+ throw new Error(
3057
+ "No encryption key found. Run /exe-setup to generate one."
3058
+ );
3059
+ }
3060
+ }
3061
+ const hexKey = masterKey.toString("hex");
3062
+ await initTurso({
3063
+ dbPath,
3064
+ encryptionKey: hexKey
3065
+ });
3066
+ await retryOnBusy2(() => ensureSchema(), "ensureSchema");
3067
+ if (!options?.lightweight) {
3068
+ try {
3069
+ const { initShardManager: initShardManager2 } = await Promise.resolve().then(() => (init_shard_manager(), shard_manager_exports));
3070
+ initShardManager2(hexKey);
3071
+ } catch {
3072
+ }
3073
+ const client = getClient();
3074
+ const vResult = await retryOnBusy2(
3075
+ () => client.execute("SELECT MAX(version) as max_v FROM memories"),
3076
+ "version-query"
3077
+ );
3078
+ _nextVersion = (Number(vResult.rows[0]?.max_v) || 0) + 1;
3079
+ try {
3080
+ const { loadGlobalProcedures: loadGlobalProcedures2 } = await Promise.resolve().then(() => (init_global_procedures(), global_procedures_exports));
3081
+ await loadGlobalProcedures2();
3082
+ } catch {
3083
+ }
414
3084
  }
415
3085
  }
416
- function getEmployee(employees, name) {
417
- return employees.find((e) => e.name.toLowerCase() === name.toLowerCase());
418
- }
419
- function getEmployeeByRole(employees, role) {
420
- const lower = role.toLowerCase();
421
- return employees.find((e) => e.role.toLowerCase() === lower);
3086
+ function classifyTier(record) {
3087
+ if (record.tool_name === "commit_to_long_term_memory" && (record.importance ?? 0) >= 8) return 1;
3088
+ if (["store_memory", "manual"].includes(record.tool_name ?? "") && (record.importance ?? 0) >= 5) return 2;
3089
+ return 3;
422
3090
  }
423
- function getEmployeeNamesByRole(employees, role) {
424
- const lower = role.toLowerCase();
425
- return employees.filter((e) => e.role.toLowerCase() === lower).map((e) => e.name);
3091
+ function inferFilePaths(record) {
3092
+ if (!["Read", "Write", "Edit"].includes(record.tool_name)) return null;
3093
+ const firstLine = record.raw_text.split("\n")[0] ?? "";
3094
+ const match = firstLine.match(/(\/[\w./-]+\.\w+)/);
3095
+ return match ? JSON.stringify([match[1]]) : null;
426
3096
  }
427
- function hasRole(agentName, role) {
428
- const employees = loadEmployeesSync();
429
- const emp = getEmployee(employees, agentName);
430
- return emp ? emp.role.toLowerCase() === role.toLowerCase() : false;
3097
+ function inferCommitHash(record) {
3098
+ if (record.tool_name !== "Bash") return null;
3099
+ const match = record.raw_text.match(/\b([a-f0-9]{7,40})\b/);
3100
+ return match ? match[1] : null;
431
3101
  }
432
- function baseAgentName(name, employees) {
433
- const match = name.match(/^([a-zA-Z]+)\d+$/);
434
- if (!match) return name;
435
- const base = match[1];
436
- const roster = employees ?? loadEmployeesSync();
437
- if (getEmployee(roster, base)) return base;
438
- return name;
3102
+ function inferLanguageType(record) {
3103
+ const text = record.raw_text;
3104
+ if (!text || text.length < 10) return null;
3105
+ const trimmed = text.trimStart();
3106
+ if (trimmed.startsWith("{") || trimmed.startsWith("[")) return "json";
3107
+ if (/\b(SELECT|INSERT|UPDATE|DELETE|CREATE TABLE|ALTER TABLE)\b/i.test(text)) return "sql";
3108
+ if (/\b(function |const |import |export |class |def |async |=>)\b/.test(text)) return "code";
3109
+ if (trimmed.startsWith("#") || trimmed.startsWith("*")) return "prose";
3110
+ return "mixed";
439
3111
  }
440
- function isMultiInstance(agentName, employees) {
441
- const roster = employees ?? loadEmployeesSync();
442
- const emp = getEmployee(roster, agentName);
443
- if (!emp) return false;
444
- return MULTI_INSTANCE_ROLES.has(emp.role.toLowerCase());
3112
+ function inferDomain(record) {
3113
+ const proj = (record.project_name ?? "").toLowerCase();
3114
+ if (proj.includes("marketing") || proj.includes("content")) return "marketing";
3115
+ if (proj.includes("crm") || proj.includes("customer")) return "customer";
3116
+ return null;
445
3117
  }
446
- function addEmployee(employees, employee) {
447
- const normalized = { ...employee, name: employee.name.toLowerCase() };
448
- if (employees.some((e) => e.name.toLowerCase() === normalized.name)) {
449
- throw new Error(`Employee '${normalized.name}' already exists`);
3118
+ async function writeMemory(record) {
3119
+ if (record.vector !== null && record.vector.length !== EMBEDDING_DIM) {
3120
+ throw new Error(
3121
+ `Expected ${EMBEDDING_DIM}-dim vector, got ${record.vector.length}`
3122
+ );
450
3123
  }
451
- return [...employees, normalized];
452
- }
453
- function appendToCoordinatorTeam(employee) {
454
- const coordinator = getCoordinatorEmployee(loadEmployeesSync());
455
- if (!coordinator) return;
456
- const idPath = path3.join(IDENTITY_DIR, `${coordinator.name}.md`);
457
- if (!existsSync4(idPath)) return;
458
- const content = readFileSync3(idPath, "utf-8");
459
- if (content.includes(`**${capitalize(employee.name)}`)) return;
460
- const teamMatch = content.match(TEAM_SECTION_RE);
461
- if (!teamMatch || teamMatch.index === void 0) return;
462
- const afterTeam = content.slice(teamMatch.index + teamMatch[0].length);
463
- const nextHeading = afterTeam.match(/\n## /);
464
- const entry = `
465
- **${capitalize(employee.name)} (${employee.role}):** Newly hired. Update this description as the role develops.
466
- `;
467
- let updated;
468
- if (nextHeading && nextHeading.index !== void 0) {
469
- const insertAt = teamMatch.index + teamMatch[0].length + nextHeading.index;
470
- updated = content.slice(0, insertAt) + entry + content.slice(insertAt);
471
- } else {
472
- updated = content.trimEnd() + "\n" + entry;
3124
+ const contentHash = createHash("md5").update(record.raw_text).digest("hex");
3125
+ if (_pendingRecords.some((r) => r.content_hash === contentHash && r.agent_id === record.agent_id)) {
3126
+ return;
473
3127
  }
474
- writeFileSync2(idPath, updated, "utf-8");
475
- }
476
- function capitalize(s) {
477
- return s.charAt(0).toUpperCase() + s.slice(1);
478
- }
479
- async function hireEmployee(employee) {
480
- const employees = await loadEmployees();
481
- const updated = addEmployee(employees, employee);
482
- await saveEmployees(updated);
483
3128
  try {
484
- appendToCoordinatorTeam(employee);
3129
+ const client = getClient();
3130
+ const existing = await client.execute({
3131
+ sql: "SELECT id FROM memories WHERE content_hash = ? AND agent_id = ? LIMIT 1",
3132
+ args: [contentHash, record.agent_id]
3133
+ });
3134
+ if (existing.rows.length > 0) return;
485
3135
  } catch {
486
3136
  }
487
- try {
488
- const { loadAgentConfig: loadAgentConfig2, saveAgentConfig: saveAgentConfig2 } = await Promise.resolve().then(() => (init_agent_config(), agent_config_exports));
489
- const config = loadAgentConfig2();
490
- const name = employee.name.toLowerCase();
491
- if (!config[name] && config["default"]) {
492
- config[name] = { ...config["default"] };
493
- saveAgentConfig2(config);
3137
+ const dbRow = {
3138
+ id: record.id,
3139
+ agent_id: record.agent_id,
3140
+ agent_role: record.agent_role,
3141
+ session_id: record.session_id,
3142
+ timestamp: record.timestamp,
3143
+ tool_name: record.tool_name,
3144
+ project_name: record.project_name,
3145
+ has_error: record.has_error ? 1 : 0,
3146
+ raw_text: record.raw_text,
3147
+ vector: record.vector,
3148
+ version: 0,
3149
+ // Placeholder — assigned atomically at flush time
3150
+ task_id: record.task_id ?? null,
3151
+ importance: record.importance ?? 5,
3152
+ status: record.status ?? "active",
3153
+ confidence: record.confidence ?? 0.7,
3154
+ last_accessed: record.last_accessed ?? record.timestamp,
3155
+ workspace_id: record.workspace_id ?? null,
3156
+ document_id: record.document_id ?? null,
3157
+ user_id: record.user_id ?? null,
3158
+ char_offset: record.char_offset ?? null,
3159
+ page_number: record.page_number ?? null,
3160
+ source_path: record.source_path ?? null,
3161
+ source_type: record.source_type ?? null,
3162
+ tier: record.tier ?? classifyTier(record),
3163
+ supersedes_id: record.supersedes_id ?? null,
3164
+ draft: record.draft ? 1 : 0,
3165
+ memory_type: record.memory_type ?? "raw",
3166
+ trajectory: record.trajectory ? JSON.stringify(record.trajectory) : null,
3167
+ content_hash: contentHash,
3168
+ intent: record.intent ?? null,
3169
+ outcome: record.outcome ?? null,
3170
+ domain: record.domain ?? inferDomain(record),
3171
+ referenced_entities: record.referenced_entities ?? null,
3172
+ retrieval_count: record.retrieval_count ?? 0,
3173
+ chain_position: record.chain_position ?? null,
3174
+ review_status: record.review_status ?? null,
3175
+ context_window_pct: record.context_window_pct ?? null,
3176
+ file_paths: record.file_paths ?? inferFilePaths(record),
3177
+ commit_hash: record.commit_hash ?? inferCommitHash(record),
3178
+ duration_ms: record.duration_ms ?? null,
3179
+ token_cost: record.token_cost ?? null,
3180
+ audience: record.audience ?? null,
3181
+ language_type: record.language_type ?? inferLanguageType(record),
3182
+ parent_memory_id: record.parent_memory_id ?? null
3183
+ };
3184
+ _pendingRecords.push(dbRow);
3185
+ orgBus.emit({
3186
+ type: "memory_stored",
3187
+ agentId: record.agent_id,
3188
+ project: record.project_name,
3189
+ timestamp: record.timestamp
3190
+ });
3191
+ const MAX_PENDING = 1e3;
3192
+ if (_pendingRecords.length > MAX_PENDING) {
3193
+ const dropped = _pendingRecords.length - MAX_PENDING;
3194
+ _pendingRecords = _pendingRecords.slice(-MAX_PENDING);
3195
+ console.warn(`[store] Dropped ${dropped} oldest pending records (overflow)`);
3196
+ }
3197
+ if (_flushTimer === null) {
3198
+ _flushTimer = setInterval(() => {
3199
+ void flushBatch();
3200
+ }, _flushIntervalMs);
3201
+ if (_flushTimer && typeof _flushTimer === "object" && "unref" in _flushTimer) {
3202
+ _flushTimer.unref();
494
3203
  }
495
- } catch {
496
3204
  }
497
- return updated;
3205
+ if (_pendingRecords.length >= _batchSize) {
3206
+ await flushBatch();
3207
+ }
498
3208
  }
499
- async function normalizeRosterCase(rosterPath) {
500
- const employees = await loadEmployees(rosterPath);
501
- let changed = false;
502
- for (const emp of employees) {
503
- if (emp.name !== emp.name.toLowerCase()) {
504
- const oldName = emp.name;
505
- emp.name = emp.name.toLowerCase();
506
- changed = true;
507
- try {
508
- const identityDir = path3.join(os2.homedir(), ".exe-os", "identity");
509
- const oldPath = path3.join(identityDir, `${oldName}.md`);
510
- const newPath = path3.join(identityDir, `${emp.name}.md`);
511
- if (existsSync4(oldPath) && !existsSync4(newPath)) {
512
- renameSync2(oldPath, newPath);
513
- } else if (existsSync4(oldPath) && oldPath !== newPath) {
514
- const content = readFileSync3(oldPath, "utf-8");
515
- writeFileSync2(newPath, content, "utf-8");
516
- if (oldPath.toLowerCase() !== newPath.toLowerCase()) {
517
- unlinkSync(oldPath);
3209
+ async function flushBatch() {
3210
+ if (_flushing || _pendingRecords.length === 0) return 0;
3211
+ _flushing = true;
3212
+ try {
3213
+ const batch = _pendingRecords.slice(0);
3214
+ const client = getClient();
3215
+ const vResult = await client.execute("SELECT MAX(version) as max_v FROM memories");
3216
+ let baseVersion = (Number(vResult.rows[0]?.max_v) || 0) + 1;
3217
+ for (const row of batch) {
3218
+ row.version = baseVersion++;
3219
+ }
3220
+ _nextVersion = baseVersion;
3221
+ const buildStmt = (row) => {
3222
+ const hasVector = row.vector !== null;
3223
+ const taskId = row.task_id ?? null;
3224
+ const importance = row.importance ?? 5;
3225
+ const status = row.status ?? "active";
3226
+ const confidence = row.confidence ?? 0.7;
3227
+ const lastAccessed = row.last_accessed ?? row.timestamp;
3228
+ const workspaceId = row.workspace_id ?? null;
3229
+ const documentId = row.document_id ?? null;
3230
+ const userId = row.user_id ?? null;
3231
+ const charOffset = row.char_offset ?? null;
3232
+ const pageNumber = row.page_number ?? null;
3233
+ const sourcePath = row.source_path ?? null;
3234
+ const sourceType = row.source_type ?? null;
3235
+ const tier = row.tier ?? 3;
3236
+ const supersedesId = row.supersedes_id ?? null;
3237
+ const draft = row.draft ? 1 : 0;
3238
+ const memoryType = row.memory_type ?? "raw";
3239
+ const trajectory = row.trajectory ?? null;
3240
+ const contentHash = row.content_hash ?? null;
3241
+ const intent = row.intent ?? null;
3242
+ const outcome = row.outcome ?? null;
3243
+ const domain = row.domain ?? null;
3244
+ const referencedEntities = row.referenced_entities ?? null;
3245
+ const retrievalCount = row.retrieval_count ?? 0;
3246
+ const chainPosition = row.chain_position ?? null;
3247
+ const reviewStatus = row.review_status ?? null;
3248
+ const contextWindowPct = row.context_window_pct ?? null;
3249
+ const filePaths = row.file_paths ?? null;
3250
+ const commitHash = row.commit_hash ?? null;
3251
+ const durationMs = row.duration_ms ?? null;
3252
+ const tokenCost = row.token_cost ?? null;
3253
+ const audience = row.audience ?? null;
3254
+ const languageType = row.language_type ?? null;
3255
+ const parentMemoryId = row.parent_memory_id ?? null;
3256
+ const cols = `id, agent_id, agent_role, session_id, timestamp,
3257
+ tool_name, project_name,
3258
+ has_error, raw_text, vector, version, task_id, importance, status,
3259
+ confidence, last_accessed,
3260
+ workspace_id, document_id, user_id, char_offset, page_number,
3261
+ source_path, source_type, tier, supersedes_id, draft, memory_type, trajectory, content_hash,
3262
+ intent, outcome, domain, referenced_entities, retrieval_count,
3263
+ chain_position, review_status, context_window_pct, file_paths, commit_hash,
3264
+ duration_ms, token_cost, audience, language_type, parent_memory_id`;
3265
+ const metaArgs = [
3266
+ intent,
3267
+ outcome,
3268
+ domain,
3269
+ referencedEntities,
3270
+ retrievalCount,
3271
+ chainPosition,
3272
+ reviewStatus,
3273
+ contextWindowPct,
3274
+ filePaths,
3275
+ commitHash,
3276
+ durationMs,
3277
+ tokenCost,
3278
+ audience,
3279
+ languageType,
3280
+ parentMemoryId
3281
+ ];
3282
+ const baseArgs = [
3283
+ row.id,
3284
+ row.agent_id,
3285
+ row.agent_role,
3286
+ row.session_id,
3287
+ row.timestamp,
3288
+ row.tool_name,
3289
+ row.project_name,
3290
+ row.has_error,
3291
+ row.raw_text
3292
+ ];
3293
+ const sharedArgs = [
3294
+ row.version,
3295
+ taskId,
3296
+ importance,
3297
+ status,
3298
+ confidence,
3299
+ lastAccessed,
3300
+ workspaceId,
3301
+ documentId,
3302
+ userId,
3303
+ charOffset,
3304
+ pageNumber,
3305
+ sourcePath,
3306
+ sourceType,
3307
+ tier,
3308
+ supersedesId,
3309
+ draft,
3310
+ memoryType,
3311
+ trajectory,
3312
+ contentHash
3313
+ ];
3314
+ return {
3315
+ sql: hasVector ? `INSERT OR IGNORE INTO memories (${cols})
3316
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, vector32(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` : `INSERT OR IGNORE INTO memories (${cols})
3317
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, NULL, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
3318
+ args: hasVector ? [...baseArgs, vectorToBlob(row.vector), ...sharedArgs, ...metaArgs] : [...baseArgs, ...sharedArgs, ...metaArgs]
3319
+ };
3320
+ };
3321
+ const globalClient = getClient();
3322
+ const globalStmts = batch.map(buildStmt);
3323
+ await globalClient.batch(globalStmts, "write");
3324
+ _pendingRecords.splice(0, batch.length);
3325
+ try {
3326
+ const { isShardingEnabled: isShardingEnabled2, getReadyShardClient: getReadyShardClient2 } = await Promise.resolve().then(() => (init_shard_manager(), shard_manager_exports));
3327
+ if (isShardingEnabled2()) {
3328
+ const byProject = /* @__PURE__ */ new Map();
3329
+ for (const row of batch) {
3330
+ const proj = row.project_name || "unknown";
3331
+ if (!byProject.has(proj)) byProject.set(proj, []);
3332
+ byProject.get(proj).push(row);
3333
+ }
3334
+ for (const [project, rows] of byProject) {
3335
+ try {
3336
+ const shardClient = await getReadyShardClient2(project);
3337
+ const shardStmts = rows.map(buildStmt);
3338
+ await shardClient.batch(shardStmts, "write");
3339
+ } catch (err) {
3340
+ process.stderr.write(
3341
+ `[store] Shard write failed for ${project}: ${err instanceof Error ? err.message : String(err)}
3342
+ `
3343
+ );
518
3344
  }
519
3345
  }
520
- } catch {
521
3346
  }
3347
+ } catch {
522
3348
  }
3349
+ return batch.length;
3350
+ } finally {
3351
+ _flushing = false;
523
3352
  }
524
- if (changed) {
525
- await saveEmployees(employees, rosterPath);
3353
+ }
3354
+ function buildWikiScopeFilter(options, columnPrefix) {
3355
+ const args = [];
3356
+ let clause = "";
3357
+ if (options?.workspaceId !== void 0) {
3358
+ clause += ` AND ${columnPrefix}workspace_id = ?`;
3359
+ args.push(options.workspaceId);
526
3360
  }
527
- return changed;
3361
+ if (options?.userId === void 0) {
3362
+ clause += ` AND ${columnPrefix}user_id IS NULL`;
3363
+ } else if (options.userId === null) {
3364
+ clause += ` AND ${columnPrefix}user_id IS NULL`;
3365
+ } else {
3366
+ clause += ` AND (${columnPrefix}user_id = ? OR ${columnPrefix}user_id IS NULL)`;
3367
+ args.push(options.userId);
3368
+ }
3369
+ return { clause, args };
528
3370
  }
529
- function findExeBin() {
3371
+ async function searchMemories(queryVector, agentId, options) {
3372
+ let client;
530
3373
  try {
531
- return execSync2(process.platform === "win32" ? "where exe-os" : "which exe-os", { encoding: "utf8" }).trim();
3374
+ const { isShardingEnabled: isShardingEnabled2, shardExists: shardExists2, getReadyShardClient: getReadyShardClient2 } = await Promise.resolve().then(() => (init_shard_manager(), shard_manager_exports));
3375
+ if (isShardingEnabled2() && options?.projectName && shardExists2(options.projectName)) {
3376
+ client = await getReadyShardClient2(options.projectName);
3377
+ } else {
3378
+ client = getClient();
3379
+ }
532
3380
  } catch {
533
- return null;
3381
+ client = getClient();
534
3382
  }
535
- }
536
- function registerBinSymlinks(name) {
537
- const created = [];
538
- const skipped = [];
539
- const errors = [];
540
- const exeBinPath = findExeBin();
541
- if (!exeBinPath) {
542
- errors.push("Could not find 'exe-os' in PATH");
543
- return { created, skipped, errors };
3383
+ const limit = options?.limit ?? 10;
3384
+ const statusFilter = options?.includeArchived ? "" : `
3385
+ AND COALESCE(status, 'active') = 'active'`;
3386
+ const draftFilter = options?.includeDrafts ? "" : `
3387
+ AND (draft = 0 OR draft IS NULL)`;
3388
+ let sql = `SELECT id, agent_id, agent_role, session_id, timestamp,
3389
+ tool_name, project_name,
3390
+ has_error, raw_text, vector, importance, status,
3391
+ confidence, last_accessed,
3392
+ workspace_id, document_id, user_id,
3393
+ char_offset, page_number,
3394
+ source_path, source_type
3395
+ FROM memories
3396
+ WHERE agent_id = ?
3397
+ AND vector IS NOT NULL${statusFilter}${draftFilter}
3398
+ AND COALESCE(confidence, 0.7) >= 0.3`;
3399
+ const args = [agentId];
3400
+ const scope = buildWikiScopeFilter(options, "");
3401
+ sql += scope.clause;
3402
+ args.push(...scope.args);
3403
+ if (options?.projectName) {
3404
+ sql += ` AND project_name = ?`;
3405
+ args.push(options.projectName);
544
3406
  }
545
- const binDir = path3.dirname(exeBinPath);
546
- let target;
547
- try {
548
- target = readlinkSync(exeBinPath);
549
- } catch {
550
- errors.push("Could not read 'exe' symlink");
551
- return { created, skipped, errors };
3407
+ if (options?.toolName) {
3408
+ sql += ` AND tool_name = ?`;
3409
+ args.push(options.toolName);
552
3410
  }
553
- for (const suffix of ["", "-opencode"]) {
554
- const linkName = `${name}${suffix}`;
555
- const linkPath = path3.join(binDir, linkName);
556
- if (existsSync4(linkPath)) {
557
- skipped.push(linkName);
558
- continue;
3411
+ if (options?.hasError !== void 0) {
3412
+ sql += ` AND has_error = ?`;
3413
+ args.push(options.hasError ? 1 : 0);
3414
+ }
3415
+ if (options?.since) {
3416
+ sql += ` AND timestamp >= ?`;
3417
+ args.push(options.since);
3418
+ }
3419
+ if (options?.memoryType) {
3420
+ sql += ` AND memory_type = ?`;
3421
+ args.push(options.memoryType);
3422
+ }
3423
+ sql += ` ORDER BY vector_distance_cos(vector, vector32(?))`;
3424
+ args.push(vectorToBlob(queryVector));
3425
+ sql += ` LIMIT ?`;
3426
+ args.push(limit);
3427
+ const result = await client.execute({ sql, args });
3428
+ return result.rows.map((row) => ({
3429
+ id: row.id,
3430
+ agent_id: row.agent_id,
3431
+ agent_role: row.agent_role,
3432
+ session_id: row.session_id,
3433
+ timestamp: row.timestamp,
3434
+ tool_name: row.tool_name,
3435
+ project_name: row.project_name,
3436
+ has_error: row.has_error === 1,
3437
+ raw_text: row.raw_text,
3438
+ vector: row.vector == null ? [] : Array.isArray(row.vector) ? row.vector : Array.from(row.vector),
3439
+ importance: row.importance ?? 5,
3440
+ status: row.status ?? "active",
3441
+ confidence: row.confidence ?? 0.7,
3442
+ last_accessed: row.last_accessed ?? row.timestamp,
3443
+ workspace_id: row.workspace_id ?? null,
3444
+ document_id: row.document_id ?? null,
3445
+ user_id: row.user_id ?? null,
3446
+ char_offset: row.char_offset ?? null,
3447
+ page_number: row.page_number ?? null,
3448
+ source_path: row.source_path ?? null,
3449
+ source_type: row.source_type ?? null
3450
+ }));
3451
+ }
3452
+ async function attachDocumentMetadata(records) {
3453
+ const docIds = [
3454
+ ...new Set(
3455
+ records.map((r) => r.document_id).filter((id) => typeof id === "string" && id.length > 0)
3456
+ )
3457
+ ];
3458
+ if (docIds.length === 0) return records;
3459
+ try {
3460
+ const client = getClient();
3461
+ const placeholders = docIds.map(() => "?").join(",");
3462
+ const result = await client.execute({
3463
+ sql: `SELECT id, filename, mime, source_type, uploaded_at
3464
+ FROM documents
3465
+ WHERE id IN (${placeholders})`,
3466
+ args: docIds
3467
+ });
3468
+ const byId = /* @__PURE__ */ new Map();
3469
+ for (const row of result.rows) {
3470
+ const id = row.id;
3471
+ byId.set(id, {
3472
+ document_id: id,
3473
+ filename: row.filename,
3474
+ mime: row.mime ?? null,
3475
+ source_type: row.source_type ?? null,
3476
+ uploaded_at: row.uploaded_at
3477
+ });
559
3478
  }
560
- try {
561
- symlinkSync(target, linkPath);
562
- created.push(linkName);
563
- } catch (err) {
564
- errors.push(`${linkName}: ${err instanceof Error ? err.message : String(err)}`);
3479
+ for (const record of records) {
3480
+ if (!record.document_id) continue;
3481
+ record.document_metadata = byId.get(record.document_id) ?? null;
565
3482
  }
3483
+ } catch {
566
3484
  }
567
- return { created, skipped, errors };
3485
+ return records;
568
3486
  }
569
- var EMPLOYEES_PATH, DEFAULT_COORDINATOR_TEMPLATE_NAME, COORDINATOR_ROLE, MULTI_INSTANCE_ROLES, IDENTITY_DIR, TEAM_SECTION_RE;
570
- var init_employees = __esm({
571
- "src/lib/employees.ts"() {
3487
+ async function flushTier3(agentId, options) {
3488
+ const client = getClient();
3489
+ const maxAge = options?.maxAgeHours ?? 72;
3490
+ const cutoff = new Date(Date.now() - maxAge * 36e5).toISOString();
3491
+ if (options?.dryRun) {
3492
+ const result2 = await client.execute({
3493
+ sql: `SELECT COUNT(*) as cnt FROM memories
3494
+ WHERE agent_id = ? AND tier = 3 AND status = 'active' AND timestamp < ?`,
3495
+ args: [agentId, cutoff]
3496
+ });
3497
+ return { archived: Number(result2.rows[0]?.cnt ?? 0) };
3498
+ }
3499
+ const result = await client.execute({
3500
+ sql: `UPDATE memories SET status = 'archived'
3501
+ WHERE agent_id = ? AND tier = 3 AND status = 'active' AND timestamp < ?`,
3502
+ args: [agentId, cutoff]
3503
+ });
3504
+ return { archived: result.rowsAffected };
3505
+ }
3506
+ async function disposeStore() {
3507
+ if (_flushTimer !== null) {
3508
+ clearInterval(_flushTimer);
3509
+ _flushTimer = null;
3510
+ }
3511
+ if (_pendingRecords.length > 0) {
3512
+ await flushBatch();
3513
+ }
3514
+ await disposeTurso();
3515
+ _pendingRecords = [];
3516
+ _nextVersion = 1;
3517
+ }
3518
+ function vectorToBlob(vector) {
3519
+ const f32 = vector instanceof Float32Array ? vector : new Float32Array(vector);
3520
+ return JSON.stringify(Array.from(f32));
3521
+ }
3522
+ async function updateMemoryStatus(id, status) {
3523
+ const client = getClient();
3524
+ await client.execute({
3525
+ sql: `UPDATE memories SET status = ? WHERE id = ?`,
3526
+ args: [status, id]
3527
+ });
3528
+ }
3529
+ function reserveVersions(count) {
3530
+ const reserved = [];
3531
+ for (let i = 0; i < count; i++) {
3532
+ reserved.push(_nextVersion++);
3533
+ }
3534
+ return reserved;
3535
+ }
3536
+ async function getMemoryCardinality(agentId) {
3537
+ try {
3538
+ const client = getClient();
3539
+ const result = await client.execute({
3540
+ sql: `SELECT COUNT(*) as cnt FROM memories WHERE agent_id = ? AND COALESCE(status, 'active') = 'active'`,
3541
+ args: [agentId]
3542
+ });
3543
+ return Number(result.rows[0]?.cnt) || 0;
3544
+ } catch {
3545
+ return 0;
3546
+ }
3547
+ }
3548
+ var INIT_MAX_RETRIES, INIT_RETRY_DELAY_MS, _pendingRecords, _batchSize, _flushIntervalMs, _flushTimer, _flushing, _nextVersion;
3549
+ var init_store = __esm({
3550
+ "src/lib/store.ts"() {
572
3551
  "use strict";
3552
+ init_memory();
3553
+ init_database();
3554
+ init_keychain();
573
3555
  init_config();
574
- EMPLOYEES_PATH = path3.join(EXE_AI_DIR, "exe-employees.json");
575
- DEFAULT_COORDINATOR_TEMPLATE_NAME = "exe";
576
- COORDINATOR_ROLE = "COO";
577
- MULTI_INSTANCE_ROLES = /* @__PURE__ */ new Set(["principal engineer", "content production specialist", "staff code reviewer"]);
578
- IDENTITY_DIR = path3.join(EXE_AI_DIR, "identity");
579
- TEAM_SECTION_RE = /^## Team\b.*$/m;
3556
+ init_state_bus();
3557
+ INIT_MAX_RETRIES = 3;
3558
+ INIT_RETRY_DELAY_MS = 1e3;
3559
+ _pendingRecords = [];
3560
+ _batchSize = 20;
3561
+ _flushIntervalMs = 1e4;
3562
+ _flushTimer = null;
3563
+ _flushing = false;
3564
+ _nextVersion = 1;
580
3565
  }
581
3566
  });
582
3567
 
583
- // src/lib/mcp-prefix.ts
584
- function isExeMcpTool(toolName) {
585
- if (!toolName) return false;
586
- return MCP_TOOL_PREFIXES.some((p) => toolName.startsWith(p));
3568
+ // src/lib/git-staleness.ts
3569
+ var git_staleness_exports = {};
3570
+ __export(git_staleness_exports, {
3571
+ clearSessionFileReads: () => clearSessionFileReads,
3572
+ detectStaleFiles: () => detectStaleFiles,
3573
+ recordFileRead: () => recordFileRead
3574
+ });
3575
+ import { execSync as execSync4 } from "child_process";
3576
+ import path10 from "path";
3577
+ function getHeadCommit(cwd) {
3578
+ try {
3579
+ return execSync4("git rev-parse --short HEAD", {
3580
+ cwd,
3581
+ timeout: GIT_TIMEOUT_MS,
3582
+ encoding: "utf-8"
3583
+ }).trim() || null;
3584
+ } catch {
3585
+ return null;
3586
+ }
587
3587
  }
588
- function stripExeMcpPrefix(toolName) {
589
- for (const p of MCP_TOOL_PREFIXES) {
590
- if (toolName.startsWith(p)) return toolName.slice(p.length);
3588
+ function normalizeTrackedPath(cwd, filePath) {
3589
+ const resolved = path10.resolve(cwd, filePath);
3590
+ const relative = path10.relative(cwd, resolved);
3591
+ if (!relative || relative.startsWith("..") || path10.isAbsolute(relative)) {
3592
+ return null;
591
3593
  }
592
- return toolName;
3594
+ return relative;
593
3595
  }
594
- var MCP_PRIMARY_KEY, MCP_LEGACY_KEY, MCP_TOOL_PREFIXES;
595
- var init_mcp_prefix = __esm({
596
- "src/lib/mcp-prefix.ts"() {
3596
+ function formatGitSummary(filePath, summary) {
3597
+ const match = summary.trim().match(/^([a-f0-9]+)\s+([^:]+):\s+(.+)$/i);
3598
+ if (!match) return `${filePath} (${summary.trim()})`;
3599
+ const [, hash, author, subject] = match;
3600
+ return `${filePath} (modified by ${author} in commit ${hash}: "${subject}")`;
3601
+ }
3602
+ async function recordFileRead(sessionUuid, agentId, cwd, filePath) {
3603
+ if (!sessionUuid || !filePath) return;
3604
+ const trackedPath = normalizeTrackedPath(cwd, filePath);
3605
+ if (!trackedPath) return;
3606
+ const client = getClient();
3607
+ await client.execute({
3608
+ sql: `INSERT INTO agent_file_reads (session_uuid, agent_id, file_path, read_at, commit_hash)
3609
+ VALUES (?, ?, ?, ?, ?)
3610
+ ON CONFLICT(session_uuid, file_path) DO UPDATE SET
3611
+ read_at = excluded.read_at,
3612
+ commit_hash = excluded.commit_hash`,
3613
+ args: [
3614
+ sessionUuid,
3615
+ agentId,
3616
+ trackedPath,
3617
+ (/* @__PURE__ */ new Date()).toISOString(),
3618
+ getHeadCommit(cwd)
3619
+ ]
3620
+ });
3621
+ }
3622
+ async function detectStaleFiles(agentId, cwd) {
3623
+ const client = getClient();
3624
+ const recentCutoff = new Date(Date.now() - RECENT_READ_LOOKBACK_MS).toISOString();
3625
+ const result = await client.execute({
3626
+ sql: `SELECT file_path, MAX(read_at) AS read_at
3627
+ FROM agent_file_reads
3628
+ WHERE agent_id = ? AND read_at >= ?
3629
+ GROUP BY file_path
3630
+ ORDER BY MAX(read_at) DESC
3631
+ LIMIT 10`,
3632
+ args: [agentId, recentCutoff]
3633
+ });
3634
+ const stale = [];
3635
+ for (const row of result.rows) {
3636
+ if (stale.length >= MAX_STALE_FILES) break;
3637
+ const record = row;
3638
+ const filePath = String(record.file_path ?? "");
3639
+ const readAt = String(record.read_at ?? "");
3640
+ if (!filePath || !readAt) continue;
3641
+ try {
3642
+ const gitSummary = execSync4(
3643
+ `git log -1 --oneline --after=${JSON.stringify(readAt)} --format="%h %an: %s" -- ${JSON.stringify(filePath)}`,
3644
+ {
3645
+ cwd,
3646
+ timeout: GIT_TIMEOUT_MS,
3647
+ encoding: "utf-8"
3648
+ }
3649
+ ).trim();
3650
+ if (gitSummary) {
3651
+ stale.push(formatGitSummary(filePath, gitSummary));
3652
+ }
3653
+ } catch {
3654
+ }
3655
+ }
3656
+ return stale.slice(0, MAX_STALE_FILES);
3657
+ }
3658
+ async function clearSessionFileReads(sessionUuid) {
3659
+ if (!sessionUuid) return;
3660
+ const client = getClient();
3661
+ await client.execute({
3662
+ sql: "DELETE FROM agent_file_reads WHERE session_uuid = ?",
3663
+ args: [sessionUuid]
3664
+ });
3665
+ }
3666
+ var RECENT_READ_LOOKBACK_MS, MAX_STALE_FILES, GIT_TIMEOUT_MS;
3667
+ var init_git_staleness = __esm({
3668
+ "src/lib/git-staleness.ts"() {
597
3669
  "use strict";
598
- MCP_PRIMARY_KEY = "exe-os";
599
- MCP_LEGACY_KEY = "exe-mem";
600
- MCP_TOOL_PREFIXES = [
601
- `mcp__${MCP_PRIMARY_KEY}__`,
602
- `mcp__${MCP_LEGACY_KEY}__`
603
- ];
3670
+ init_database();
3671
+ RECENT_READ_LOOKBACK_MS = 24 * 60 * 60 * 1e3;
3672
+ MAX_STALE_FILES = 10;
3673
+ GIT_TIMEOUT_MS = 400;
604
3674
  }
605
3675
  });
606
3676
 
@@ -637,8 +3707,8 @@ function findContainingChunk(filePath, snippet) {
637
3707
  try {
638
3708
  const ext = filePath.split(".").pop()?.toLowerCase();
639
3709
  if (ext !== "ts" && ext !== "tsx" && ext !== "js" && ext !== "jsx") return "";
640
- const { readFileSync: readFileSync7 } = __require("fs");
641
- const source = readFileSync7(filePath, "utf8");
3710
+ const { readFileSync: readFileSync8 } = __require("fs");
3711
+ const source = readFileSync8(filePath, "utf8");
642
3712
  const lines = source.split("\n");
643
3713
  const lowerSnippet = snippet.toLowerCase().slice(0, 80);
644
3714
  let matchLine = -1;
@@ -704,9 +3774,9 @@ function extractBash(input2, response) {
704
3774
  }
705
3775
  function extractGrep(input2, response) {
706
3776
  const pattern = String(input2.pattern ?? "");
707
- const path9 = input2.path ? String(input2.path) : "";
3777
+ const path14 = input2.path ? String(input2.path) : "";
708
3778
  const output = String(response.text ?? response.content ?? JSON.stringify(response).slice(0, MAX_OUTPUT));
709
- return `Searched for "${pattern}"${path9 ? ` in ${path9}` : ""}
3779
+ return `Searched for "${pattern}"${path14 ? ` in ${path14}` : ""}
710
3780
  ${output.slice(0, MAX_OUTPUT)}`;
711
3781
  }
712
3782
  function extractGlob(input2, response) {
@@ -806,34 +3876,34 @@ __export(project_name_exports, {
806
3876
  _resetCache: () => _resetCache,
807
3877
  getProjectName: () => getProjectName
808
3878
  });
809
- import { execSync as execSync4 } from "child_process";
810
- import path6 from "path";
3879
+ import { execSync as execSync5 } from "child_process";
3880
+ import path11 from "path";
811
3881
  function getProjectName(cwd) {
812
3882
  const dir = cwd ?? process.cwd();
813
3883
  if (_cached2 && _cachedCwd === dir) return _cached2;
814
3884
  try {
815
3885
  let repoRoot;
816
3886
  try {
817
- const gitCommonDir = execSync4("git rev-parse --path-format=absolute --git-common-dir", {
3887
+ const gitCommonDir = execSync5("git rev-parse --path-format=absolute --git-common-dir", {
818
3888
  cwd: dir,
819
3889
  encoding: "utf8",
820
3890
  timeout: 2e3,
821
3891
  stdio: ["pipe", "pipe", "pipe"]
822
3892
  }).trim();
823
- repoRoot = path6.dirname(gitCommonDir);
3893
+ repoRoot = path11.dirname(gitCommonDir);
824
3894
  } catch {
825
- repoRoot = execSync4("git rev-parse --show-toplevel", {
3895
+ repoRoot = execSync5("git rev-parse --show-toplevel", {
826
3896
  cwd: dir,
827
3897
  encoding: "utf8",
828
3898
  timeout: 2e3,
829
3899
  stdio: ["pipe", "pipe", "pipe"]
830
3900
  }).trim();
831
3901
  }
832
- _cached2 = path6.basename(repoRoot);
3902
+ _cached2 = path11.basename(repoRoot);
833
3903
  _cachedCwd = dir;
834
3904
  return _cached2;
835
3905
  } catch {
836
- _cached2 = path6.basename(dir);
3906
+ _cached2 = path11.basename(dir);
837
3907
  _cachedCwd = dir;
838
3908
  return _cached2;
839
3909
  }
@@ -859,8 +3929,8 @@ __export(daemon_auth_exports, {
859
3929
  readDaemonToken: () => readDaemonToken
860
3930
  });
861
3931
  import crypto2 from "crypto";
862
- import path7 from "path";
863
- import { existsSync as existsSync6, readFileSync as readFileSync5, writeFileSync as writeFileSync5 } from "fs";
3932
+ import path12 from "path";
3933
+ import { existsSync as existsSync9, readFileSync as readFileSync6, writeFileSync as writeFileSync6 } from "fs";
864
3934
  function normalizeToken(token) {
865
3935
  if (!token) return null;
866
3936
  const trimmed = token.trim();
@@ -868,8 +3938,8 @@ function normalizeToken(token) {
868
3938
  }
869
3939
  function readDaemonToken() {
870
3940
  try {
871
- if (!existsSync6(DAEMON_TOKEN_PATH)) return null;
872
- return normalizeToken(readFileSync5(DAEMON_TOKEN_PATH, "utf8"));
3941
+ if (!existsSync9(DAEMON_TOKEN_PATH)) return null;
3942
+ return normalizeToken(readFileSync6(DAEMON_TOKEN_PATH, "utf8"));
873
3943
  } catch {
874
3944
  return null;
875
3945
  }
@@ -879,7 +3949,7 @@ function ensureDaemonToken(seed) {
879
3949
  if (existing) return existing;
880
3950
  const token = normalizeToken(seed) ?? crypto2.randomBytes(32).toString("hex");
881
3951
  ensurePrivateDirSync(EXE_AI_DIR);
882
- writeFileSync5(DAEMON_TOKEN_PATH, `${token}
3952
+ writeFileSync6(DAEMON_TOKEN_PATH, `${token}
883
3953
  `, "utf8");
884
3954
  enforcePrivateFileSync(DAEMON_TOKEN_PATH);
885
3955
  return token;
@@ -890,7 +3960,7 @@ var init_daemon_auth = __esm({
890
3960
  "use strict";
891
3961
  init_config();
892
3962
  init_secure_files();
893
- DAEMON_TOKEN_PATH = path7.join(EXE_AI_DIR, "exed.token");
3963
+ DAEMON_TOKEN_PATH = path12.join(EXE_AI_DIR, "exed.token");
894
3964
  }
895
3965
  });
896
3966
 
@@ -898,8 +3968,8 @@ var init_daemon_auth = __esm({
898
3968
  init_config();
899
3969
  init_config();
900
3970
  import { spawn } from "child_process";
901
- import { readFileSync as readFileSync6, writeFileSync as writeFileSync6, mkdirSync as mkdirSync4, existsSync as existsSync7, openSync, closeSync } from "fs";
902
- import path8 from "path";
3971
+ import { readFileSync as readFileSync7, writeFileSync as writeFileSync7, mkdirSync as mkdirSync6, existsSync as existsSync10, openSync, closeSync } from "fs";
3972
+ import path13 from "path";
903
3973
  import { fileURLToPath } from "url";
904
3974
 
905
3975
  // src/lib/active-agent.ts
@@ -1062,6 +4132,31 @@ function getActiveAgent() {
1062
4132
  };
1063
4133
  }
1064
4134
 
4135
+ // src/lib/cache-warmth.ts
4136
+ import os3 from "os";
4137
+ import path5 from "path";
4138
+ import { existsSync as existsSync5, mkdirSync as mkdirSync3, readFileSync as readFileSync5, unlinkSync as unlinkSync3, writeFileSync as writeFileSync4 } from "fs";
4139
+ var CACHE_TTL_MS = 5 * 60 * 1e3;
4140
+ var CACHE_DIR2 = path5.join(
4141
+ process.env.EXE_OS_DIR ?? path5.join(os3.homedir(), ".exe-os"),
4142
+ "session-cache"
4143
+ );
4144
+ function getStatePath(sessionKey) {
4145
+ return path5.join(CACHE_DIR2, `cache-warmth-${sessionKey}.json`);
4146
+ }
4147
+ function recordApiCall(sessionKey, agentId, sessionUuid) {
4148
+ try {
4149
+ mkdirSync3(CACHE_DIR2, { recursive: true });
4150
+ const state = {
4151
+ lastApiCallAt: (/* @__PURE__ */ new Date()).toISOString(),
4152
+ agentId,
4153
+ sessionUuid
4154
+ };
4155
+ writeFileSync4(getStatePath(sessionKey), JSON.stringify(state));
4156
+ } catch {
4157
+ }
4158
+ }
4159
+
1065
4160
  // src/lib/error-detector.ts
1066
4161
  init_mcp_prefix();
1067
4162
  import crypto from "crypto";
@@ -1201,16 +4296,16 @@ function errorFingerprint(toolName, errorText) {
1201
4296
 
1202
4297
  // src/lib/worker-gate.ts
1203
4298
  init_config();
1204
- import { readdirSync as readdirSync2, writeFileSync as writeFileSync4, unlinkSync as unlinkSync3, mkdirSync as mkdirSync3, existsSync as existsSync5 } from "fs";
1205
- import path5 from "path";
1206
- var WORKER_PID_DIR = path5.join(EXE_AI_DIR, "worker-pids");
4299
+ import { readdirSync as readdirSync2, writeFileSync as writeFileSync5, unlinkSync as unlinkSync4, mkdirSync as mkdirSync4, existsSync as existsSync6 } from "fs";
4300
+ import path6 from "path";
4301
+ var WORKER_PID_DIR = path6.join(EXE_AI_DIR, "worker-pids");
1207
4302
  var MAX_CONCURRENT_WORKERS = 3;
1208
4303
  function tryAcquireWorkerSlot() {
1209
4304
  try {
1210
- mkdirSync3(WORKER_PID_DIR, { recursive: true });
4305
+ mkdirSync4(WORKER_PID_DIR, { recursive: true });
1211
4306
  const reservationId = `res-${process.pid}-${Date.now()}`;
1212
- const reservationPath = path5.join(WORKER_PID_DIR, `${reservationId}.pid`);
1213
- writeFileSync4(reservationPath, String(process.pid));
4307
+ const reservationPath = path6.join(WORKER_PID_DIR, `${reservationId}.pid`);
4308
+ writeFileSync5(reservationPath, String(process.pid));
1214
4309
  const files = readdirSync2(WORKER_PID_DIR);
1215
4310
  let alive = 0;
1216
4311
  for (const f of files) {
@@ -1227,20 +4322,20 @@ function tryAcquireWorkerSlot() {
1227
4322
  alive++;
1228
4323
  } catch {
1229
4324
  try {
1230
- unlinkSync3(path5.join(WORKER_PID_DIR, f));
4325
+ unlinkSync4(path6.join(WORKER_PID_DIR, f));
1231
4326
  } catch {
1232
4327
  }
1233
4328
  }
1234
4329
  }
1235
4330
  if (alive > MAX_CONCURRENT_WORKERS) {
1236
4331
  try {
1237
- unlinkSync3(reservationPath);
4332
+ unlinkSync4(reservationPath);
1238
4333
  } catch {
1239
4334
  }
1240
4335
  return false;
1241
4336
  }
1242
4337
  try {
1243
- unlinkSync3(reservationPath);
4338
+ unlinkSync4(reservationPath);
1244
4339
  } catch {
1245
4340
  }
1246
4341
  return true;
@@ -1250,12 +4345,12 @@ function tryAcquireWorkerSlot() {
1250
4345
  }
1251
4346
  function registerWorkerPid(pid) {
1252
4347
  try {
1253
- mkdirSync3(WORKER_PID_DIR, { recursive: true });
1254
- writeFileSync4(path5.join(WORKER_PID_DIR, `worker-${pid}.pid`), String(pid));
4348
+ mkdirSync4(WORKER_PID_DIR, { recursive: true });
4349
+ writeFileSync5(path6.join(WORKER_PID_DIR, `worker-${pid}.pid`), String(pid));
1255
4350
  } catch {
1256
4351
  }
1257
4352
  }
1258
- var BACKFILL_LOCK = path5.join(WORKER_PID_DIR, "backfill.lock");
4353
+ var BACKFILL_LOCK = path6.join(WORKER_PID_DIR, "backfill.lock");
1259
4354
 
1260
4355
  // src/adapters/claude/hooks/ingest.ts
1261
4356
  init_employees();
@@ -1269,7 +4364,7 @@ if (!loadConfigSync().autoIngestion) {
1269
4364
  if (!process.env.EXE_OS_DIR) {
1270
4365
  process.env.EXE_OS_DIR = EXE_AI_DIR;
1271
4366
  }
1272
- var WORKER_LOG_PATH = path8.join(EXE_AI_DIR, "workers.log");
4367
+ var WORKER_LOG_PATH = path13.join(EXE_AI_DIR, "workers.log");
1273
4368
  function openWorkerLog() {
1274
4369
  try {
1275
4370
  return openSync(WORKER_LOG_PATH, "a");
@@ -1281,13 +4376,13 @@ var ALLOWED_TOOL_RE = /^(Bash|Edit|Write|Read|Glob|Grep|Agent|apply_patch|mcp__.
1281
4376
  var WRITE_TOOL_RE = /^(Bash|Edit|Write|apply_patch)$/;
1282
4377
  var SUMMARY_INTERVAL = 25;
1283
4378
  var MIN_WRITES_FOR_SUMMARY = 3;
1284
- var COUNTER_DIR = path8.join(EXE_AI_DIR, "session-cache");
4379
+ var COUNTER_DIR = path13.join(EXE_AI_DIR, "session-cache");
1285
4380
  function getCounterPath(sessionId) {
1286
- return path8.join(COUNTER_DIR, `counter-${sessionId}.json`);
4381
+ return path13.join(COUNTER_DIR, `counter-${sessionId}.json`);
1287
4382
  }
1288
4383
  function loadCounter(sessionId) {
1289
4384
  try {
1290
- const raw = readFileSync6(getCounterPath(sessionId), "utf8");
4385
+ const raw = readFileSync7(getCounterPath(sessionId), "utf8");
1291
4386
  return JSON.parse(raw);
1292
4387
  } catch {
1293
4388
  return { total: 0, writes: 0, pipelineWrites: 0, lastSummaryAt: 0, pipelineDetected: false };
@@ -1295,8 +4390,8 @@ function loadCounter(sessionId) {
1295
4390
  }
1296
4391
  function saveCounter(sessionId, counter) {
1297
4392
  try {
1298
- mkdirSync4(COUNTER_DIR, { recursive: true });
1299
- writeFileSync6(getCounterPath(sessionId), JSON.stringify(counter));
4393
+ mkdirSync6(COUNTER_DIR, { recursive: true });
4394
+ writeFileSync7(getCounterPath(sessionId), JSON.stringify(counter));
1300
4395
  } catch {
1301
4396
  }
1302
4397
  }
@@ -1312,11 +4407,11 @@ process.stdin.on("end", async () => {
1312
4407
  try {
1313
4408
  if (process.env.EXE_DEBUG_HOOKS) {
1314
4409
  try {
1315
- const debugPath = path8.join(EXE_AI_DIR, "logs", "hook-stdin-ingest.log");
1316
- mkdirSync4(path8.dirname(debugPath), { recursive: true });
4410
+ const debugPath = path13.join(EXE_AI_DIR, "logs", "hook-stdin-ingest.log");
4411
+ mkdirSync6(path13.dirname(debugPath), { recursive: true });
1317
4412
  const ts = (/* @__PURE__ */ new Date()).toISOString();
1318
4413
  const snippet = input.length > 2e3 ? input.slice(0, 2e3) + "...[truncated]" : input;
1319
- writeFileSync6(debugPath, `[${ts}] ${snippet}
4414
+ writeFileSync7(debugPath, `[${ts}] ${snippet}
1320
4415
  `, { flag: "a" });
1321
4416
  } catch {
1322
4417
  }
@@ -1326,6 +4421,24 @@ process.stdin.on("end", async () => {
1326
4421
  process.exit(0);
1327
4422
  }
1328
4423
  const agent = getActiveAgent();
4424
+ recordApiCall(getSessionKey(), agent.agentId, data.session_id ?? "");
4425
+ if (data.tool_name === "Read" && agent.agentId !== "default" && data.session_id) {
4426
+ try {
4427
+ const filePath = data.tool_input?.file_path;
4428
+ if (filePath) {
4429
+ const { initStore: initStore2 } = await Promise.resolve().then(() => (init_store(), store_exports));
4430
+ await initStore2({ lightweight: true });
4431
+ const { recordFileRead: recordFileRead2 } = await Promise.resolve().then(() => (init_git_staleness(), git_staleness_exports));
4432
+ await recordFileRead2(
4433
+ data.session_id,
4434
+ agent.agentId,
4435
+ data.cwd ?? process.cwd(),
4436
+ filePath
4437
+ );
4438
+ }
4439
+ } catch {
4440
+ }
4441
+ }
1329
4442
  if (/^(Read|Write|Edit|apply_patch)$/.test(data.tool_name) && agent.agentRole !== "COO" && agent.agentId !== "default") {
1330
4443
  const filePath = data.tool_input?.file_path ?? "";
1331
4444
  const exeMatch = filePath.match(/exe\/([^/]+)\//);
@@ -1383,14 +4496,14 @@ Your output files must start with: exe/output/${agent.agentId}-`
1383
4496
  const classification = classifyError(errorText);
1384
4497
  if (classification === "system" && data.session_id) {
1385
4498
  const fp = errorFingerprint(data.tool_name, errorText);
1386
- const fpFilePath = path8.join(COUNTER_DIR, `bug-fingerprints-${data.session_id}.json`);
4499
+ const fpFilePath = path13.join(COUNTER_DIR, `bug-fingerprints-${data.session_id}.json`);
1387
4500
  let fpData = {
1388
4501
  seen: {},
1389
4502
  taskCount: 0,
1390
4503
  lastTaskAt: ""
1391
4504
  };
1392
4505
  try {
1393
- fpData = JSON.parse(readFileSync6(fpFilePath, "utf8"));
4506
+ fpData = JSON.parse(readFileSync7(fpFilePath, "utf8"));
1394
4507
  } catch {
1395
4508
  }
1396
4509
  const now = (/* @__PURE__ */ new Date()).toISOString();
@@ -1408,13 +4521,13 @@ Your output files must start with: exe/output/${agent.agentId}-`
1408
4521
  fpData.seen[fp] = { count: 1, firstAt: now, lastAt: now };
1409
4522
  fpData.taskCount++;
1410
4523
  fpData.lastTaskAt = now;
1411
- const bugWorkerPath = path8.resolve(
1412
- path8.dirname(fileURLToPath(import.meta.url)),
4524
+ const bugWorkerPath = path13.resolve(
4525
+ path13.dirname(fileURLToPath(import.meta.url)),
1413
4526
  "bug-report-worker.js"
1414
4527
  );
1415
- if (existsSync7(bugWorkerPath) && tryAcquireWorkerSlot()) {
4528
+ if (existsSync10(bugWorkerPath) && tryAcquireWorkerSlot()) {
1416
4529
  const stderrFd2 = openWorkerLog();
1417
- const projectName = process.cwd().split(path8.sep).pop() ?? "unknown";
4530
+ const projectName = process.cwd().split(path13.sep).pop() ?? "unknown";
1418
4531
  const bugToolInput = data.tool_input ?? {};
1419
4532
  const bugWorker = spawn(process.execPath, [bugWorkerPath], {
1420
4533
  detached: true,
@@ -1442,8 +4555,8 @@ Your output files must start with: exe/output/${agent.agentId}-`
1442
4555
  }
1443
4556
  }
1444
4557
  try {
1445
- mkdirSync4(COUNTER_DIR, { recursive: true });
1446
- writeFileSync6(fpFilePath, JSON.stringify(fpData));
4558
+ mkdirSync6(COUNTER_DIR, { recursive: true });
4559
+ writeFileSync7(fpFilePath, JSON.stringify(fpData));
1447
4560
  } catch {
1448
4561
  }
1449
4562
  }
@@ -1460,11 +4573,11 @@ Your output files must start with: exe/output/${agent.agentId}-`
1460
4573
  }
1461
4574
  const callsSinceLastSummary = counter.total - counter.lastSummaryAt;
1462
4575
  if (callsSinceLastSummary >= SUMMARY_INTERVAL && counter.writes >= MIN_WRITES_FOR_SUMMARY) {
1463
- const summaryWorkerPath = path8.resolve(
1464
- path8.dirname(fileURLToPath(import.meta.url)),
4576
+ const summaryWorkerPath = path13.resolve(
4577
+ path13.dirname(fileURLToPath(import.meta.url)),
1465
4578
  "summary-worker.js"
1466
4579
  );
1467
- if (existsSync7(summaryWorkerPath) && tryAcquireWorkerSlot()) {
4580
+ if (existsSync10(summaryWorkerPath) && tryAcquireWorkerSlot()) {
1468
4581
  const stderrFd2 = openWorkerLog();
1469
4582
  const summaryWorker = spawn(process.execPath, [summaryWorkerPath], {
1470
4583
  detached: true,
@@ -1510,13 +4623,13 @@ WARNING: You are writing code without running the build pipeline. If this task a
1510
4623
  const bashOutput = typeof data.tool_response === "string" ? data.tool_response : JSON.stringify(data.tool_response ?? "");
1511
4624
  const commitMatch = bashOutput.match(/\[(\S+)\s+([a-f0-9]{7,40})\]\s+(.+)/);
1512
4625
  if (commitMatch) {
1513
- const commitWorkerPath = path8.resolve(
1514
- path8.dirname(fileURLToPath(import.meta.url)),
4626
+ const commitWorkerPath = path13.resolve(
4627
+ path13.dirname(fileURLToPath(import.meta.url)),
1515
4628
  "commit-complete.js"
1516
4629
  );
1517
- if (existsSync7(commitWorkerPath) && tryAcquireWorkerSlot()) {
4630
+ if (existsSync10(commitWorkerPath) && tryAcquireWorkerSlot()) {
1518
4631
  const stderrFd2 = openWorkerLog();
1519
- const projectName = process.cwd().split(path8.sep).pop() ?? "unknown";
4632
+ const projectName = process.cwd().split(path13.sep).pop() ?? "unknown";
1520
4633
  const commitWorker = spawn(process.execPath, [commitWorkerPath], {
1521
4634
  detached: true,
1522
4635
  stdio: ["ignore", "ignore", stderrFd2],
@@ -1542,8 +4655,8 @@ WARNING: You are writing code without running the build pipeline. If this task a
1542
4655
  }
1543
4656
  let sentViaDaemon = false;
1544
4657
  try {
1545
- const socketPath = process.env.EXE_DAEMON_SOCK ?? process.env.EXE_EMBED_SOCK ?? path8.join(EXE_AI_DIR, "exed.sock");
1546
- if (existsSync7(socketPath)) {
4658
+ const socketPath = process.env.EXE_DAEMON_SOCK ?? process.env.EXE_EMBED_SOCK ?? path13.join(EXE_AI_DIR, "exed.sock");
4659
+ if (existsSync10(socketPath)) {
1547
4660
  const { extractSemanticText: extractSemanticText2 } = await Promise.resolve().then(() => (init_content_extractor(), content_extractor_exports));
1548
4661
  const { getProjectName: getProjectName2 } = await Promise.resolve().then(() => (init_project_name(), project_name_exports));
1549
4662
  const { canCoordinate: canCoordinate2 } = await Promise.resolve().then(() => (init_employees(), employees_exports));
@@ -1552,9 +4665,9 @@ WARNING: You are writing code without running the build pipeline. If this task a
1552
4665
  if (rawText.length >= 50) {
1553
4666
  let taskId = null;
1554
4667
  try {
1555
- const cachePath = path8.join(EXE_AI_DIR, "session-cache", `current-task-${agent.agentId}.json`);
1556
- if (existsSync7(cachePath)) {
1557
- const cached = JSON.parse(readFileSync6(cachePath, "utf8"));
4668
+ const cachePath = path13.join(EXE_AI_DIR, "session-cache", `current-task-${agent.agentId}.json`);
4669
+ if (existsSync10(cachePath)) {
4670
+ const cached = JSON.parse(readFileSync7(cachePath, "utf8"));
1558
4671
  taskId = cached.taskId ?? null;
1559
4672
  }
1560
4673
  } catch {
@@ -1600,8 +4713,8 @@ WARNING: You are writing code without running the build pipeline. If this task a
1600
4713
  setTimeout(() => process.exit(0), 10);
1601
4714
  return;
1602
4715
  }
1603
- const workerPath = path8.resolve(
1604
- path8.dirname(fileURLToPath(import.meta.url)),
4716
+ const workerPath = path13.resolve(
4717
+ path13.dirname(fileURLToPath(import.meta.url)),
1605
4718
  "ingest-worker.js"
1606
4719
  );
1607
4720
  if (!tryAcquireWorkerSlot()) {