@askexenow/exe-os 0.9.37 → 0.9.39

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/deploy/stack-manifests/v0.9.json +55 -0
  2. package/dist/bin/backfill-conversations.js +36 -9
  3. package/dist/bin/backfill-responses.js +36 -9
  4. package/dist/bin/backfill-vectors.js +36 -9
  5. package/dist/bin/cleanup-stale-review-tasks.js +37 -10
  6. package/dist/bin/cli.js +624 -204
  7. package/dist/bin/exe-agent.js +13 -5
  8. package/dist/bin/exe-assign.js +36 -9
  9. package/dist/bin/exe-boot.js +50 -20
  10. package/dist/bin/exe-call.js +134 -342
  11. package/dist/bin/exe-dispatch.js +36 -9
  12. package/dist/bin/exe-doctor.js +39 -12
  13. package/dist/bin/exe-export-behaviors.js +38 -11
  14. package/dist/bin/exe-forget.js +36 -9
  15. package/dist/bin/exe-gateway.js +64 -15
  16. package/dist/bin/exe-heartbeat.js +37 -10
  17. package/dist/bin/exe-kill.js +36 -9
  18. package/dist/bin/exe-launch-agent.js +287 -1081
  19. package/dist/bin/exe-new-employee.js +100 -14
  20. package/dist/bin/exe-pending-messages.js +36 -9
  21. package/dist/bin/exe-pending-notifications.js +36 -9
  22. package/dist/bin/exe-pending-reviews.js +36 -9
  23. package/dist/bin/exe-rename.js +1780 -204
  24. package/dist/bin/exe-review.js +36 -9
  25. package/dist/bin/exe-search.js +38 -11
  26. package/dist/bin/exe-session-cleanup.js +38 -11
  27. package/dist/bin/exe-start-codex.js +38 -11
  28. package/dist/bin/exe-start-opencode.js +38 -11
  29. package/dist/bin/exe-status.js +37 -10
  30. package/dist/bin/exe-team.js +36 -9
  31. package/dist/bin/git-sweep.js +36 -9
  32. package/dist/bin/graph-backfill.js +36 -9
  33. package/dist/bin/graph-export.js +36 -9
  34. package/dist/bin/install.js +70 -3
  35. package/dist/bin/intercom-check.js +38 -11
  36. package/dist/bin/scan-tasks.js +36 -9
  37. package/dist/bin/setup.js +20 -19
  38. package/dist/bin/shard-migrate.js +36 -9
  39. package/dist/bin/stack-update.js +308 -0
  40. package/dist/gateway/index.js +62 -13
  41. package/dist/hooks/bug-report-worker.js +40 -12
  42. package/dist/hooks/codex-stop-task-finalizer.js +38 -11
  43. package/dist/hooks/commit-complete.js +36 -9
  44. package/dist/hooks/error-recall.js +38 -11
  45. package/dist/hooks/ingest.js +38 -10
  46. package/dist/hooks/instructions-loaded.js +44 -12
  47. package/dist/hooks/notification.js +36 -9
  48. package/dist/hooks/post-compact.js +36 -9
  49. package/dist/hooks/post-tool-combined.js +39 -12
  50. package/dist/hooks/pre-compact.js +37 -10
  51. package/dist/hooks/pre-tool-use.js +38 -10
  52. package/dist/hooks/prompt-submit.js +43 -15
  53. package/dist/hooks/session-end.js +37 -10
  54. package/dist/hooks/session-start.js +49 -16
  55. package/dist/hooks/stop.js +37 -10
  56. package/dist/hooks/subagent-stop.js +36 -9
  57. package/dist/hooks/summary-worker.js +45 -18
  58. package/dist/index.js +60 -11
  59. package/dist/lib/consolidation.js +2 -1
  60. package/dist/lib/employee-templates.js +4 -3
  61. package/dist/lib/employees.js +2 -1
  62. package/dist/lib/exe-daemon.js +11229 -10537
  63. package/dist/lib/hybrid-search.js +38 -11
  64. package/dist/lib/identity.js +8 -3
  65. package/dist/lib/schedules.js +36 -9
  66. package/dist/lib/store.js +36 -9
  67. package/dist/mcp/server.js +6873 -6249
  68. package/dist/mcp/tools/create-task.js +10 -4
  69. package/dist/runtime/index.js +36 -9
  70. package/dist/tui/App.js +42 -13
  71. package/package.json +4 -1
  72. package/stack.release.json +31 -0
  73. package/stack.release.schema.json +31 -0
@@ -0,0 +1,308 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/bin/stack-update.ts
4
+ import { readFileSync as readFileSync2 } from "fs";
5
+
6
+ // src/lib/is-main.ts
7
+ import { realpathSync } from "fs";
8
+ import { fileURLToPath } from "url";
9
+ function isMainModule(importMetaUrl) {
10
+ if (process.argv[1] == null) return false;
11
+ if (process.argv[1].includes("mcp/server")) return false;
12
+ try {
13
+ const scriptPath = realpathSync(process.argv[1]);
14
+ const modulePath = realpathSync(fileURLToPath(importMetaUrl));
15
+ return scriptPath === modulePath;
16
+ } catch {
17
+ return importMetaUrl === `file://${process.argv[1]}` || importMetaUrl === new URL(process.argv[1], "file://").href;
18
+ }
19
+ }
20
+
21
+ // src/lib/stack-update.ts
22
+ import { execFileSync } from "child_process";
23
+ import { existsSync, mkdirSync, readFileSync, renameSync, writeFileSync } from "fs";
24
+ import http from "http";
25
+ import https from "https";
26
+ import path from "path";
27
+ function parseStackManifest(raw) {
28
+ const parsed = JSON.parse(raw);
29
+ if (parsed.schemaVersion !== 1) throw new Error("Unsupported stack manifest schemaVersion");
30
+ if (!parsed.latest || !parsed.stacks || typeof parsed.stacks !== "object") {
31
+ throw new Error("Invalid stack manifest: latest and stacks are required");
32
+ }
33
+ for (const [version, release] of Object.entries(parsed.stacks)) {
34
+ if (!release.version) release.version = version;
35
+ if (!release.services || typeof release.services !== "object") {
36
+ throw new Error(`Invalid stack manifest: release ${version} has no services`);
37
+ }
38
+ for (const [serviceName, service] of Object.entries(release.services)) {
39
+ if (!service.image || !service.env) {
40
+ throw new Error(`Invalid stack manifest: ${version}.${serviceName} requires image and env`);
41
+ }
42
+ }
43
+ }
44
+ return parsed;
45
+ }
46
+ async function loadStackManifest(ref, fetchText = defaultFetchText) {
47
+ if (/^https?:\/\//.test(ref)) return parseStackManifest(await fetchText(ref));
48
+ return parseStackManifest(readFileSync(ref, "utf8"));
49
+ }
50
+ function parseEnv(raw) {
51
+ const env = /* @__PURE__ */ new Map();
52
+ for (const line of raw.split(/\r?\n/)) {
53
+ const trimmed = line.trim();
54
+ if (!trimmed || trimmed.startsWith("#")) continue;
55
+ const idx = line.indexOf("=");
56
+ if (idx <= 0) continue;
57
+ env.set(line.slice(0, idx).trim(), line.slice(idx + 1));
58
+ }
59
+ return env;
60
+ }
61
+ function patchEnv(raw, updates) {
62
+ const seen = /* @__PURE__ */ new Set();
63
+ const lines = raw.replace(/\n$/, "").split(/\r?\n/);
64
+ const patched = lines.map((line) => {
65
+ const idx = line.indexOf("=");
66
+ if (idx <= 0 || line.trim().startsWith("#")) return line;
67
+ const key = line.slice(0, idx).trim();
68
+ if (!(key in updates)) return line;
69
+ seen.add(key);
70
+ return `${key}=${updates[key]}`;
71
+ });
72
+ for (const [key, value] of Object.entries(updates)) {
73
+ if (!seen.has(key)) patched.push(`${key}=${value}`);
74
+ }
75
+ return patched.join("\n").replace(/\n*$/, "\n");
76
+ }
77
+ function createStackUpdatePlan(manifest, envRaw, targetVersion) {
78
+ const version = targetVersion ?? manifest.latest;
79
+ const release = manifest.stacks[version];
80
+ if (!release) throw new Error(`Stack version ${version} not found in manifest`);
81
+ const env = parseEnv(envRaw);
82
+ const changes = [];
83
+ for (const [serviceName, service] of Object.entries(release.services)) {
84
+ const before = env.get(service.env);
85
+ if (before !== service.image) {
86
+ changes.push({ key: service.env, before, after: service.image, service: serviceName });
87
+ }
88
+ }
89
+ return {
90
+ manifest,
91
+ release,
92
+ targetVersion: version,
93
+ changes,
94
+ breakingChanges: release.breakingChanges ?? []
95
+ };
96
+ }
97
+ function assertBreakingChangesAllowed(plan, allowedIds) {
98
+ const required = plan.breakingChanges.filter((c) => c.requiresConfirmation !== false);
99
+ const missing = required.filter((c) => !allowedIds.includes(c.id));
100
+ if (missing.length > 0) {
101
+ const details = missing.map((c) => `- ${c.id}: ${c.title}
102
+ ${c.description}
103
+ Action: ${c.requiredAction ?? "Review release notes."}`).join("\n");
104
+ throw new Error(
105
+ `Stack ${plan.targetVersion} has breaking changes that require confirmation:
106
+ ${details}
107
+ Re-run with --allow-breaking ${missing.map((c) => c.id).join(",")}`
108
+ );
109
+ }
110
+ }
111
+ async function runStackUpdate(options) {
112
+ const exec = options.exec ?? defaultExec;
113
+ const now = options.now ?? (() => /* @__PURE__ */ new Date());
114
+ const manifest = await loadStackManifest(options.manifestRef, options.fetchText);
115
+ const envRaw = readFileSync(options.envFile, "utf8");
116
+ const plan = createStackUpdatePlan(manifest, envRaw, options.targetVersion);
117
+ assertBreakingChangesAllowed(plan, options.allowedBreakingChangeIds ?? []);
118
+ const lockFile = options.lockFile ?? path.join(path.dirname(options.envFile), ".exe-stack-lock.json");
119
+ if (options.dryRun || plan.changes.length === 0) {
120
+ return { status: "planned", targetVersion: plan.targetVersion, changes: plan.changes, lockFile };
121
+ }
122
+ const backupDir = path.join(path.dirname(options.envFile), ".exe-stack-backups");
123
+ mkdirSync(backupDir, { recursive: true });
124
+ const stamp = now().toISOString().replace(/[:.]/g, "-");
125
+ const backupEnvFile = path.join(backupDir, `env-${stamp}.bak`);
126
+ writeFileSync(backupEnvFile, envRaw, { mode: 384 });
127
+ const updates = Object.fromEntries(plan.changes.map((c) => [c.key, c.after]));
128
+ const patched = patchEnv(envRaw, updates);
129
+ const tmp = `${options.envFile}.tmp-${process.pid}`;
130
+ writeFileSync(tmp, patched, { mode: 384 });
131
+ renameSync(tmp, options.envFile);
132
+ const composeArgs = ["compose", "--file", options.composeFile, "--env-file", options.envFile];
133
+ try {
134
+ exec("docker", [...composeArgs, "pull"]);
135
+ exec("docker", [...composeArgs, "up", "-d"]);
136
+ await verifyReleaseHealth(plan.release, options.healthRetries ?? 12, options.healthDelayMs ?? 5e3);
137
+ writeFileSync(lockFile, JSON.stringify({ stackVersion: plan.targetVersion, updatedAt: now().toISOString(), services: plan.release.services }, null, 2) + "\n");
138
+ return { status: "updated", targetVersion: plan.targetVersion, changes: plan.changes, backupEnvFile, lockFile };
139
+ } catch (err) {
140
+ writeFileSync(options.envFile, envRaw, { mode: 384 });
141
+ try {
142
+ exec("docker", [...composeArgs, "up", "-d"]);
143
+ } catch {
144
+ }
145
+ const reason = err instanceof Error ? err.message : String(err);
146
+ throw new Error(`Stack update failed and rollback was attempted: ${reason}`);
147
+ }
148
+ }
149
+ async function verifyReleaseHealth(release, retries, delayMs) {
150
+ for (const [serviceName, service] of Object.entries(release.services)) {
151
+ if (!service.healthUrl) continue;
152
+ await waitForHttpOk(service.healthUrl, retries, delayMs, serviceName);
153
+ }
154
+ }
155
+ async function waitForHttpOk(url, retries, delayMs, label) {
156
+ let last = "";
157
+ for (let i = 0; i < retries; i++) {
158
+ try {
159
+ const status = await httpStatus(url);
160
+ if (status >= 200 && status < 300) return;
161
+ last = `HTTP ${status}`;
162
+ } catch (err) {
163
+ last = err instanceof Error ? err.message : String(err);
164
+ }
165
+ if (i < retries - 1) await new Promise((resolve) => setTimeout(resolve, delayMs));
166
+ }
167
+ throw new Error(`Health check failed for ${label} (${url}): ${last}`);
168
+ }
169
+ function httpStatus(urlString) {
170
+ return new Promise((resolve, reject) => {
171
+ const url = new URL(urlString);
172
+ const mod = url.protocol === "https:" ? https : http;
173
+ const req = mod.request(url, { method: "GET", timeout: 5e3 }, (res) => {
174
+ res.resume();
175
+ resolve(res.statusCode ?? 0);
176
+ });
177
+ req.on("timeout", () => req.destroy(new Error("timeout")));
178
+ req.on("error", reject);
179
+ req.end();
180
+ });
181
+ }
182
+ function defaultExec(cmd, args, opts) {
183
+ execFileSync(cmd, args, { stdio: "inherit", cwd: opts?.cwd });
184
+ }
185
+ async function defaultFetchText(ref) {
186
+ const res = await fetch(ref);
187
+ if (!res.ok) throw new Error(`Failed to fetch ${ref}: HTTP ${res.status}`);
188
+ return res.text();
189
+ }
190
+ function defaultStackPaths() {
191
+ const cwdCompose = path.resolve("docker-compose.yml");
192
+ const cwdEnv = path.resolve(".env");
193
+ return {
194
+ composeFile: process.env.EXE_STACK_COMPOSE_FILE || (existsSync(cwdCompose) ? cwdCompose : "/opt/exe-stack/docker-compose.yml"),
195
+ envFile: process.env.EXE_STACK_ENV_FILE || (existsSync(cwdEnv) ? cwdEnv : "/opt/exe-stack/.env"),
196
+ manifestRef: process.env.EXE_STACK_MANIFEST || "https://updates.askexe.com/stack-manifest.json"
197
+ };
198
+ }
199
+
200
+ // src/bin/stack-update.ts
201
+ function parseArgs(args) {
202
+ const defaults = defaultStackPaths();
203
+ const opts = {
204
+ manifestRef: defaults.manifestRef,
205
+ composeFile: defaults.composeFile,
206
+ envFile: defaults.envFile,
207
+ dryRun: false,
208
+ check: false,
209
+ yes: false,
210
+ allowedBreakingChangeIds: []
211
+ };
212
+ for (let i = 0; i < args.length; i++) {
213
+ const arg = args[i];
214
+ const next = () => args[++i] ?? "";
215
+ if (arg === "--manifest") opts.manifestRef = next();
216
+ else if (arg.startsWith("--manifest=")) opts.manifestRef = arg.split("=").slice(1).join("=");
217
+ else if (arg === "--target") opts.targetVersion = next();
218
+ else if (arg.startsWith("--target=")) opts.targetVersion = arg.split("=")[1];
219
+ else if (arg === "--compose-file") opts.composeFile = next();
220
+ else if (arg.startsWith("--compose-file=")) opts.composeFile = arg.split("=").slice(1).join("=");
221
+ else if (arg === "--env-file") opts.envFile = next();
222
+ else if (arg.startsWith("--env-file=")) opts.envFile = arg.split("=").slice(1).join("=");
223
+ else if (arg === "--lock-file") opts.lockFile = next();
224
+ else if (arg === "--dry-run") opts.dryRun = true;
225
+ else if (arg === "--check") opts.check = true;
226
+ else if (arg === "--yes" || arg === "-y") opts.yes = true;
227
+ else if (arg === "--allow-breaking") opts.allowedBreakingChangeIds.push(...next().split(",").map((s) => s.trim()).filter(Boolean));
228
+ else if (arg.startsWith("--allow-breaking=")) opts.allowedBreakingChangeIds.push(...arg.split("=")[1].split(",").map((s) => s.trim()).filter(Boolean));
229
+ else if (arg === "--help" || arg === "-h") {
230
+ printHelp();
231
+ process.exit(0);
232
+ } else {
233
+ throw new Error(`Unknown option: ${arg}`);
234
+ }
235
+ }
236
+ return opts;
237
+ }
238
+ function printHelp() {
239
+ console.log(`exe-os stack-update \u2014 update a self-hosted Exe OS stack from a pinned manifest
240
+
241
+ Usage:
242
+ exe-os stack-update [--manifest <path-or-url>] [--target <version>] [--yes]
243
+
244
+ Options:
245
+ --manifest <ref> Stack manifest JSON path or URL (default: updates.askexe.com)
246
+ --target <version> Stack version to install (default: manifest.latest)
247
+ --compose-file <path> docker-compose.yml path (default: ./docker-compose.yml or /opt/exe-stack/docker-compose.yml)
248
+ --env-file <path> .env path (default: ./.env or /opt/exe-stack/.env)
249
+ --lock-file <path> Lock file path (default: beside .env)
250
+ --check Print available changes only
251
+ --dry-run Plan only; do not run Docker
252
+ --allow-breaking <ids> Confirm breaking changes, comma-separated
253
+ -y, --yes Non-interactive confirmation
254
+ `);
255
+ }
256
+ function printChanges(changes) {
257
+ if (changes.length === 0) {
258
+ console.log("\u2705 Stack already matches target manifest.");
259
+ return;
260
+ }
261
+ console.log("Planned image tag changes:");
262
+ for (const c of changes) {
263
+ console.log(` - ${c.service}: ${c.key}`);
264
+ console.log(` ${c.before ?? "<unset>"} \u2192 ${c.after}`);
265
+ }
266
+ }
267
+ function printBreaking(changes) {
268
+ if (changes.length === 0) return;
269
+ console.log("\nBreaking-change notices:");
270
+ for (const c of changes) {
271
+ console.log(` - ${c.id}: ${c.title}`);
272
+ console.log(` ${c.description}`);
273
+ if (c.requiredAction) console.log(` Action: ${c.requiredAction}`);
274
+ if (c.expectedDowntimeMinutes) console.log(` Expected downtime: ${c.expectedDowntimeMinutes} minutes`);
275
+ }
276
+ }
277
+ async function main() {
278
+ const opts = parseArgs(process.argv.slice(2));
279
+ const manifest = await loadStackManifest(opts.manifestRef);
280
+ const envRaw = readFileSync2(opts.envFile, "utf8");
281
+ const plan = createStackUpdatePlan(manifest, envRaw, opts.targetVersion);
282
+ console.log(`Exe OS stack target: ${plan.targetVersion}`);
283
+ console.log(`Manifest: ${opts.manifestRef}`);
284
+ console.log(`Compose: ${opts.composeFile}`);
285
+ console.log(`Env: ${opts.envFile}
286
+ `);
287
+ printChanges(plan.changes);
288
+ printBreaking(plan.breakingChanges);
289
+ if (opts.check || opts.dryRun) return;
290
+ if (!opts.yes) {
291
+ console.error("\nRefusing to update without --yes. Re-run with --yes after reviewing the plan.");
292
+ process.exit(2);
293
+ }
294
+ const result = await runStackUpdate(opts);
295
+ console.log(`
296
+ \u2705 Stack ${result.status}: ${result.targetVersion}`);
297
+ if (result.backupEnvFile) console.log(`Backup env: ${result.backupEnvFile}`);
298
+ console.log(`Lock file: ${result.lockFile}`);
299
+ }
300
+ if (isMainModule(import.meta.url)) {
301
+ main().catch((err) => {
302
+ console.error(err instanceof Error ? err.message : String(err));
303
+ process.exit(1);
304
+ });
305
+ }
306
+ export {
307
+ main as runStackUpdateCli
308
+ };
@@ -905,7 +905,8 @@ function isMultiInstance(agentName, employees) {
905
905
  return MULTI_INSTANCE_ROLES.has(emp.role.toLowerCase());
906
906
  }
907
907
  function addEmployee(employees, employee) {
908
- const normalized = { ...employee, name: employee.name.toLowerCase() };
908
+ const { systemPrompt: _legacyPrompt, ...rest } = employee;
909
+ const normalized = { ...rest, name: employee.name.toLowerCase() };
909
910
  if (employees.some((e) => e.name.toLowerCase() === normalized.name)) {
910
911
  throw new Error(`Employee '${normalized.name}' already exists`);
911
912
  }
@@ -3907,7 +3908,7 @@ __export(shard_manager_exports, {
3907
3908
  shardExists: () => shardExists
3908
3909
  });
3909
3910
  import path8 from "path";
3910
- import { existsSync as existsSync8, mkdirSync as mkdirSync2, readdirSync } from "fs";
3911
+ import { existsSync as existsSync8, mkdirSync as mkdirSync2, readdirSync, renameSync as renameSync3, statSync as statSync2 } from "fs";
3911
3912
  import { createClient as createClient2 } from "@libsql/client";
3912
3913
  function initShardManager(encryptionKey) {
3913
3914
  _encryptionKey = encryptionKey;
@@ -3929,7 +3930,7 @@ function getShardClient(projectName) {
3929
3930
  if (!_encryptionKey) {
3930
3931
  throw new Error("Shard manager not initialized. Call initShardManager() first.");
3931
3932
  }
3932
- const safeName = projectName.replace(/[^a-zA-Z0-9_-]/g, "_");
3933
+ const safeName = safeShardName(projectName);
3933
3934
  if (!safeName || safeName === "unknown") {
3934
3935
  throw new Error(`Invalid project name for shard: "${projectName}" (resolved to "${safeName}")`);
3935
3936
  }
@@ -3951,9 +3952,12 @@ function getShardClient(projectName) {
3951
3952
  return client;
3952
3953
  }
3953
3954
  function shardExists(projectName) {
3954
- const safeName = projectName.replace(/[^a-zA-Z0-9_-]/g, "_");
3955
+ const safeName = safeShardName(projectName);
3955
3956
  return existsSync8(path8.join(SHARDS_DIR, `${safeName}.db`));
3956
3957
  }
3958
+ function safeShardName(projectName) {
3959
+ return projectName.replace(/[^a-zA-Z0-9_-]/g, "_");
3960
+ }
3957
3961
  function listShards() {
3958
3962
  if (!existsSync8(SHARDS_DIR)) return [];
3959
3963
  return readdirSync(SHARDS_DIR).filter((f) => f.endsWith(".db")).map((f) => f.replace(".db", ""));
@@ -4047,7 +4051,8 @@ async function ensureShardSchema(client) {
4047
4051
  "ALTER TABLE memories ADD COLUMN token_cost REAL",
4048
4052
  "ALTER TABLE memories ADD COLUMN audience TEXT",
4049
4053
  "ALTER TABLE memories ADD COLUMN language_type TEXT",
4050
- "ALTER TABLE memories ADD COLUMN parent_memory_id TEXT"
4054
+ "ALTER TABLE memories ADD COLUMN parent_memory_id TEXT",
4055
+ "ALTER TABLE memories ADD COLUMN deleted_at TEXT"
4051
4056
  ]) {
4052
4057
  try {
4053
4058
  await client.execute(col);
@@ -4143,9 +4148,32 @@ async function ensureShardSchema(client) {
4143
4148
  }
4144
4149
  }
4145
4150
  async function getReadyShardClient(projectName) {
4146
- const client = getShardClient(projectName);
4147
- await ensureShardSchema(client);
4148
- return client;
4151
+ const safeName = safeShardName(projectName);
4152
+ let client = getShardClient(projectName);
4153
+ try {
4154
+ await ensureShardSchema(client);
4155
+ return client;
4156
+ } catch (err) {
4157
+ const message = err instanceof Error ? err.message : String(err);
4158
+ if (!/SQLITE_NOTADB|file is not a database/i.test(message)) throw err;
4159
+ client.close();
4160
+ _shards.delete(safeName);
4161
+ _shardLastAccess.delete(safeName);
4162
+ const dbPath = path8.join(SHARDS_DIR, `${safeName}.db`);
4163
+ if (existsSync8(dbPath)) {
4164
+ const stat = statSync2(dbPath);
4165
+ const stamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
4166
+ const archivedPath = path8.join(SHARDS_DIR, `${safeName}.db.broken-${stamp}`);
4167
+ renameSync3(dbPath, archivedPath);
4168
+ process.stderr.write(
4169
+ `[shard-manager] Archived unreadable shard ${safeName}: ${archivedPath} (${stat.size} bytes, mtime ${stat.mtime.toISOString()})
4170
+ `
4171
+ );
4172
+ }
4173
+ client = getShardClient(projectName);
4174
+ await ensureShardSchema(client);
4175
+ return client;
4176
+ }
4149
4177
  }
4150
4178
  function evictLRU() {
4151
4179
  let oldest = null;
@@ -4366,7 +4394,7 @@ var init_platform_procedures = __esm({
4366
4394
  title: "MCP tools \u2014 wiki, documents, and content",
4367
4395
  domain: "tool-use",
4368
4396
  priority: "p1",
4369
- content: "create_wiki_page: create a wiki page in exe-wiki. list_wiki_pages: browse wiki pages. get_wiki_page: read a wiki page. update_wiki_page: edit a wiki page. ingest_document: import a file (PDF, MD, etc.) into memory as chunks. list_documents: browse ingested documents by workspace. purge_document: remove a document and its memory chunks. set_document_importance: adjust chunk importance scores. rerank_documents: re-score document relevance for a query."
4397
+ content: "wiki: read/list wiki pages only. Direct wiki write tools are removed; wiki updates flow through raw-data ingestion/projection into the curated wiki store. Legacy aliases: list_wiki_pages/get_wiki_page. crm: read/list/get CRM records from exe-db. raw_data: read capped raw landing-pad events from exe-db with payload opt-in. ingest_document: import a file (PDF, MD, etc.) into memory as chunks. list_documents: browse ingested documents by workspace. purge_document: remove a document and its memory chunks. set_document_importance: adjust chunk importance scores. rerank_documents: re-score document relevance for a query."
4370
4398
  },
4371
4399
  {
4372
4400
  title: "MCP tools \u2014 system, operations, and admin",
@@ -4384,7 +4412,7 @@ var init_platform_procedures = __esm({
4384
4412
  title: "MCP tools \u2014 advanced (triggers, skills, orchestration)",
4385
4413
  domain: "tool-use",
4386
4414
  priority: "p1",
4387
- content: "create_trigger: set up a scheduled recurring agent job (cron). list_triggers: view active triggers. load_skill: load a slash-command skill dynamically. apply_starter_pack: import a pre-built behavior + identity pack for a role. export_orchestration: export full org state (tasks, behaviors, identities) as portable JSON. import_orchestration: import org state into a new instance. deploy_client: deploy a customer client instance. query_company_brain: unified RAG query across all company knowledge. create_reminder: set a text reminder (shown in boot brief). list_reminders: view pending reminders. complete_reminder: mark a reminder done. global_procedure: manage Layer 0 procedures (actions: store, list, deactivate). Legacy aliases: store_global_procedure, list_global_procedures, deactivate_global_procedure."
4415
+ content: "create_trigger: set up a scheduled recurring agent job (cron). list_triggers: view active triggers. load_skill: load a slash-command skill dynamically. apply_starter_pack: import a pre-built behavior + identity pack for a role. export_orchestration: export full org state (tasks, behaviors, identities) as portable JSON. import_orchestration: import org state into a new instance. deploy_client: deploy a customer client instance. query_company_brain: unified RAG query across all company knowledge. create_reminder: set a text reminder (shown in boot brief). list_reminders: view pending reminders. complete_reminder: mark a reminder done. global_procedure: manage customer-owned company procedures (Layer 0; actions: store, list, deactivate). Legacy aliases: store_global_procedure, list_global_procedures, deactivate_global_procedure."
4388
4416
  }
4389
4417
  ];
4390
4418
  PLATFORM_PROCEDURE_TITLES = new Set(
@@ -6487,7 +6515,7 @@ __export(intercom_queue_exports, {
6487
6515
  queueIntercom: () => queueIntercom,
6488
6516
  readQueue: () => readQueue
6489
6517
  });
6490
- import { readFileSync as readFileSync8, writeFileSync as writeFileSync5, renameSync as renameSync3, existsSync as existsSync10, mkdirSync as mkdirSync5 } from "fs";
6518
+ import { readFileSync as readFileSync8, writeFileSync as writeFileSync5, renameSync as renameSync4, existsSync as existsSync10, mkdirSync as mkdirSync5 } from "fs";
6491
6519
  import path11 from "path";
6492
6520
  import os8 from "os";
6493
6521
  function ensureDir() {
@@ -6506,7 +6534,7 @@ function writeQueue(queue) {
6506
6534
  ensureDir();
6507
6535
  const tmp = `${QUEUE_PATH}.tmp`;
6508
6536
  writeFileSync5(tmp, JSON.stringify(queue, null, 2));
6509
- renameSync3(tmp, QUEUE_PATH);
6537
+ renameSync4(tmp, QUEUE_PATH);
6510
6538
  }
6511
6539
  function queueIntercom(targetSession, reason) {
6512
6540
  const queue = readQueue();
@@ -11536,6 +11564,24 @@ var MAX_BACKOFF_MS = 3e5;
11536
11564
  var BACKOFF_MULTIPLIER = 2;
11537
11565
  var JITTER_FACTOR = 0.25;
11538
11566
  var AUTH_DIR = join(homedir(), ".exe-os", "whatsapp-auth");
11567
+ function createBaileysLogger(accountName) {
11568
+ const prefix = accountName ? `[whatsapp:${accountName}]` : "[whatsapp]";
11569
+ let logger;
11570
+ logger = {
11571
+ level: "warn",
11572
+ trace: () => {
11573
+ },
11574
+ debug: () => {
11575
+ },
11576
+ info: () => {
11577
+ },
11578
+ warn: (...args) => console.warn(prefix, ...args),
11579
+ error: (...args) => console.error(prefix, ...args),
11580
+ fatal: (...args) => console.error(prefix, ...args),
11581
+ child: () => logger
11582
+ };
11583
+ return logger;
11584
+ }
11539
11585
  function calculateBackoff(retryCount) {
11540
11586
  const base = Math.min(
11541
11587
  INITIAL_BACKOFF_MS * BACKOFF_MULTIPLIER ** retryCount,
@@ -11551,6 +11597,7 @@ var WhatsAppAdapter = class {
11551
11597
  connected = false;
11552
11598
  abortController = null;
11553
11599
  authDir = AUTH_DIR;
11600
+ baileysLogger = createBaileysLogger();
11554
11601
  // Resilience state
11555
11602
  retryCount = 0;
11556
11603
  disconnectedAt = 0;
@@ -11561,6 +11608,7 @@ var WhatsAppAdapter = class {
11561
11608
  const { makeWASocket, useMultiFileAuthState, fetchLatestBaileysVersion, DisconnectReason, makeCacheableSignalKeyStore } = baileys;
11562
11609
  const { state, saveCreds } = await useMultiFileAuthState(this.authDir);
11563
11610
  const { version } = await fetchLatestBaileysVersion();
11611
+ this.baileysLogger = createBaileysLogger();
11564
11612
  this.abortController = new AbortController();
11565
11613
  let agent;
11566
11614
  const socksProxy = config2.credentials.socksProxy;
@@ -11579,8 +11627,9 @@ var WhatsAppAdapter = class {
11579
11627
  const sock = makeWASocket({
11580
11628
  auth: {
11581
11629
  creds: state.creds,
11582
- keys: makeCacheableSignalKeyStore(state.keys, void 0)
11630
+ keys: makeCacheableSignalKeyStore(state.keys, this.baileysLogger)
11583
11631
  },
11632
+ logger: this.baileysLogger,
11584
11633
  version,
11585
11634
  printQRInTerminal: true,
11586
11635
  browser: ["exe-os", "cli", "1.0"],
@@ -594,7 +594,8 @@ function isMultiInstance(agentName, employees) {
594
594
  return MULTI_INSTANCE_ROLES.has(emp.role.toLowerCase());
595
595
  }
596
596
  function addEmployee(employees, employee) {
597
- const normalized = { ...employee, name: employee.name.toLowerCase() };
597
+ const { systemPrompt: _legacyPrompt, ...rest } = employee;
598
+ const normalized = { ...rest, name: employee.name.toLowerCase() };
598
599
  if (employees.some((e) => e.name.toLowerCase() === normalized.name)) {
599
600
  throw new Error(`Employee '${normalized.name}' already exists`);
600
601
  }
@@ -3649,7 +3650,7 @@ __export(shard_manager_exports, {
3649
3650
  shardExists: () => shardExists
3650
3651
  });
3651
3652
  import path8 from "path";
3652
- import { existsSync as existsSync8, mkdirSync as mkdirSync2, readdirSync } from "fs";
3653
+ import { existsSync as existsSync8, mkdirSync as mkdirSync2, readdirSync, renameSync as renameSync3, statSync as statSync2 } from "fs";
3653
3654
  import { createClient as createClient2 } from "@libsql/client";
3654
3655
  function initShardManager(encryptionKey) {
3655
3656
  _encryptionKey = encryptionKey;
@@ -3671,7 +3672,7 @@ function getShardClient(projectName) {
3671
3672
  if (!_encryptionKey) {
3672
3673
  throw new Error("Shard manager not initialized. Call initShardManager() first.");
3673
3674
  }
3674
- const safeName = projectName.replace(/[^a-zA-Z0-9_-]/g, "_");
3675
+ const safeName = safeShardName(projectName);
3675
3676
  if (!safeName || safeName === "unknown") {
3676
3677
  throw new Error(`Invalid project name for shard: "${projectName}" (resolved to "${safeName}")`);
3677
3678
  }
@@ -3693,9 +3694,12 @@ function getShardClient(projectName) {
3693
3694
  return client;
3694
3695
  }
3695
3696
  function shardExists(projectName) {
3696
- const safeName = projectName.replace(/[^a-zA-Z0-9_-]/g, "_");
3697
+ const safeName = safeShardName(projectName);
3697
3698
  return existsSync8(path8.join(SHARDS_DIR, `${safeName}.db`));
3698
3699
  }
3700
+ function safeShardName(projectName) {
3701
+ return projectName.replace(/[^a-zA-Z0-9_-]/g, "_");
3702
+ }
3699
3703
  function listShards() {
3700
3704
  if (!existsSync8(SHARDS_DIR)) return [];
3701
3705
  return readdirSync(SHARDS_DIR).filter((f) => f.endsWith(".db")).map((f) => f.replace(".db", ""));
@@ -3789,7 +3793,8 @@ async function ensureShardSchema(client) {
3789
3793
  "ALTER TABLE memories ADD COLUMN token_cost REAL",
3790
3794
  "ALTER TABLE memories ADD COLUMN audience TEXT",
3791
3795
  "ALTER TABLE memories ADD COLUMN language_type TEXT",
3792
- "ALTER TABLE memories ADD COLUMN parent_memory_id TEXT"
3796
+ "ALTER TABLE memories ADD COLUMN parent_memory_id TEXT",
3797
+ "ALTER TABLE memories ADD COLUMN deleted_at TEXT"
3793
3798
  ]) {
3794
3799
  try {
3795
3800
  await client.execute(col);
@@ -3885,9 +3890,32 @@ async function ensureShardSchema(client) {
3885
3890
  }
3886
3891
  }
3887
3892
  async function getReadyShardClient(projectName) {
3888
- const client = getShardClient(projectName);
3889
- await ensureShardSchema(client);
3890
- return client;
3893
+ const safeName = safeShardName(projectName);
3894
+ let client = getShardClient(projectName);
3895
+ try {
3896
+ await ensureShardSchema(client);
3897
+ return client;
3898
+ } catch (err) {
3899
+ const message = err instanceof Error ? err.message : String(err);
3900
+ if (!/SQLITE_NOTADB|file is not a database/i.test(message)) throw err;
3901
+ client.close();
3902
+ _shards.delete(safeName);
3903
+ _shardLastAccess.delete(safeName);
3904
+ const dbPath = path8.join(SHARDS_DIR, `${safeName}.db`);
3905
+ if (existsSync8(dbPath)) {
3906
+ const stat = statSync2(dbPath);
3907
+ const stamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
3908
+ const archivedPath = path8.join(SHARDS_DIR, `${safeName}.db.broken-${stamp}`);
3909
+ renameSync3(dbPath, archivedPath);
3910
+ process.stderr.write(
3911
+ `[shard-manager] Archived unreadable shard ${safeName}: ${archivedPath} (${stat.size} bytes, mtime ${stat.mtime.toISOString()})
3912
+ `
3913
+ );
3914
+ }
3915
+ client = getShardClient(projectName);
3916
+ await ensureShardSchema(client);
3917
+ return client;
3918
+ }
3891
3919
  }
3892
3920
  function evictLRU() {
3893
3921
  let oldest = null;
@@ -4108,7 +4136,7 @@ var init_platform_procedures = __esm({
4108
4136
  title: "MCP tools \u2014 wiki, documents, and content",
4109
4137
  domain: "tool-use",
4110
4138
  priority: "p1",
4111
- content: "create_wiki_page: create a wiki page in exe-wiki. list_wiki_pages: browse wiki pages. get_wiki_page: read a wiki page. update_wiki_page: edit a wiki page. ingest_document: import a file (PDF, MD, etc.) into memory as chunks. list_documents: browse ingested documents by workspace. purge_document: remove a document and its memory chunks. set_document_importance: adjust chunk importance scores. rerank_documents: re-score document relevance for a query."
4139
+ content: "wiki: read/list wiki pages only. Direct wiki write tools are removed; wiki updates flow through raw-data ingestion/projection into the curated wiki store. Legacy aliases: list_wiki_pages/get_wiki_page. crm: read/list/get CRM records from exe-db. raw_data: read capped raw landing-pad events from exe-db with payload opt-in. ingest_document: import a file (PDF, MD, etc.) into memory as chunks. list_documents: browse ingested documents by workspace. purge_document: remove a document and its memory chunks. set_document_importance: adjust chunk importance scores. rerank_documents: re-score document relevance for a query."
4112
4140
  },
4113
4141
  {
4114
4142
  title: "MCP tools \u2014 system, operations, and admin",
@@ -4126,7 +4154,7 @@ var init_platform_procedures = __esm({
4126
4154
  title: "MCP tools \u2014 advanced (triggers, skills, orchestration)",
4127
4155
  domain: "tool-use",
4128
4156
  priority: "p1",
4129
- content: "create_trigger: set up a scheduled recurring agent job (cron). list_triggers: view active triggers. load_skill: load a slash-command skill dynamically. apply_starter_pack: import a pre-built behavior + identity pack for a role. export_orchestration: export full org state (tasks, behaviors, identities) as portable JSON. import_orchestration: import org state into a new instance. deploy_client: deploy a customer client instance. query_company_brain: unified RAG query across all company knowledge. create_reminder: set a text reminder (shown in boot brief). list_reminders: view pending reminders. complete_reminder: mark a reminder done. global_procedure: manage Layer 0 procedures (actions: store, list, deactivate). Legacy aliases: store_global_procedure, list_global_procedures, deactivate_global_procedure."
4157
+ content: "create_trigger: set up a scheduled recurring agent job (cron). list_triggers: view active triggers. load_skill: load a slash-command skill dynamically. apply_starter_pack: import a pre-built behavior + identity pack for a role. export_orchestration: export full org state (tasks, behaviors, identities) as portable JSON. import_orchestration: import org state into a new instance. deploy_client: deploy a customer client instance. query_company_brain: unified RAG query across all company knowledge. create_reminder: set a text reminder (shown in boot brief). list_reminders: view pending reminders. complete_reminder: mark a reminder done. global_procedure: manage customer-owned company procedures (Layer 0; actions: store, list, deactivate). Legacy aliases: store_global_procedure, list_global_procedures, deactivate_global_procedure."
4130
4158
  }
4131
4159
  ];
4132
4160
  PLATFORM_PROCEDURE_TITLES = new Set(
@@ -5120,7 +5148,7 @@ __export(intercom_queue_exports, {
5120
5148
  queueIntercom: () => queueIntercom,
5121
5149
  readQueue: () => readQueue
5122
5150
  });
5123
- import { readFileSync as readFileSync7, writeFileSync as writeFileSync5, renameSync as renameSync3, existsSync as existsSync10, mkdirSync as mkdirSync4 } from "fs";
5151
+ import { readFileSync as readFileSync7, writeFileSync as writeFileSync5, renameSync as renameSync4, existsSync as existsSync10, mkdirSync as mkdirSync4 } from "fs";
5124
5152
  import path10 from "path";
5125
5153
  import os7 from "os";
5126
5154
  function ensureDir() {
@@ -5139,7 +5167,7 @@ function writeQueue(queue) {
5139
5167
  ensureDir();
5140
5168
  const tmp = `${QUEUE_PATH}.tmp`;
5141
5169
  writeFileSync5(tmp, JSON.stringify(queue, null, 2));
5142
- renameSync3(tmp, QUEUE_PATH);
5170
+ renameSync4(tmp, QUEUE_PATH);
5143
5171
  }
5144
5172
  function queueIntercom(targetSession, reason) {
5145
5173
  const queue = readQueue();