@sesamespace/hivemind 0.5.4 → 0.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. package/.github/workflows/memory-release.yml +89 -0
  2. package/config/default.toml +12 -0
  3. package/data/lancedb/contexts.lance/_transactions/0-c4755ab9-b604-4d90-851f-0491f3cbcbce.txn +2 -0
  4. package/data/lancedb/contexts.lance/_versions/1.manifest +0 -0
  5. package/data/lancedb/episode_access.lance/_transactions/0-407a6366-0dca-490a-868b-ea63bee3b40c.txn +2 -0
  6. package/data/lancedb/episode_access.lance/_versions/1.manifest +0 -0
  7. package/data/lancedb/episode_cooccurrence.lance/_transactions/0-0e103c7f-29d7-4f09-8100-505c076f01ae.txn +1 -0
  8. package/data/lancedb/episode_cooccurrence.lance/_versions/1.manifest +0 -0
  9. package/data/lancedb/episodes.lance/_transactions/0-e678cbac-792b-4a9d-a457-17b0d4d23607.txn +1 -0
  10. package/data/lancedb/episodes.lance/_versions/1.manifest +0 -0
  11. package/data/lancedb/l3_knowledge.lance/_transactions/0-cdb3561f-3a59-4e15-bded-e93c5f9a50e3.txn +1 -0
  12. package/data/lancedb/l3_knowledge.lance/_versions/1.manifest +0 -0
  13. package/data/lancedb/tasks.lance/_transactions/0-d1cf10ec-1eb8-48b4-bbbe-34b3a1083664.txn +4 -0
  14. package/data/lancedb/tasks.lance/_versions/1.manifest +0 -0
  15. package/dist/{chunk-MHDLSAJ3.js → chunk-2OIRJFI5.js} +14 -30
  16. package/dist/chunk-2OIRJFI5.js.map +1 -0
  17. package/dist/chunk-7D4SUZUM.js +38 -0
  18. package/dist/chunk-7D4SUZUM.js.map +1 -0
  19. package/dist/chunk-LRK64BAK.js +3601 -0
  20. package/dist/chunk-LRK64BAK.js.map +1 -0
  21. package/dist/chunk-MBS5A6BZ.js +132 -0
  22. package/dist/chunk-MBS5A6BZ.js.map +1 -0
  23. package/dist/{chunk-LNV373IF.js → chunk-OQ272HKA.js} +3 -28
  24. package/dist/chunk-OQ272HKA.js.map +1 -0
  25. package/dist/{chunk-CGSXJVSS.js → chunk-RXCV57H3.js} +2 -2
  26. package/dist/{chunk-S3RVZBPZ.js → chunk-YEOAEJ62.js} +2 -2
  27. package/dist/commands/fleet.js +4 -3
  28. package/dist/commands/init.js +2 -1
  29. package/dist/commands/service.js +2 -1
  30. package/dist/commands/start.js +4 -3
  31. package/dist/commands/upgrade.js +1 -0
  32. package/dist/index.js +3 -2
  33. package/dist/main.js +7 -6
  34. package/dist/main.js.map +1 -1
  35. package/dist/start.js +2 -1
  36. package/dist/start.js.map +1 -1
  37. package/package.json +1 -1
  38. package/PLANNING.md +0 -383
  39. package/TASKS.md +0 -60
  40. package/dist/chunk-LNV373IF.js.map +0 -1
  41. package/dist/chunk-MHDLSAJ3.js.map +0 -1
  42. package/dist/chunk-PPQGQHXJ.js +0 -151
  43. package/dist/chunk-PPQGQHXJ.js.map +0 -1
  44. package/dist/chunk-YHRGEWAZ.js +0 -2326
  45. package/dist/chunk-YHRGEWAZ.js.map +0 -1
  46. package/install.sh +0 -120
  47. package/npm-package.json +0 -26
  48. package/packages/cli/package.json +0 -23
  49. package/packages/cli/src/commands/fleet.ts +0 -206
  50. package/packages/cli/src/commands/init.ts +0 -252
  51. package/packages/cli/src/commands/service.ts +0 -159
  52. package/packages/cli/src/commands/start.ts +0 -78
  53. package/packages/cli/src/commands/upgrade.ts +0 -158
  54. package/packages/cli/src/main.ts +0 -64
  55. package/packages/cli/tsconfig.json +0 -8
  56. package/packages/memory/Cargo.lock +0 -6480
  57. package/packages/memory/Cargo.toml +0 -21
  58. package/packages/memory/src/context.rs +0 -179
  59. package/packages/memory/src/embeddings.rs +0 -51
  60. package/packages/memory/src/main.rs +0 -626
  61. package/packages/memory/src/promotion.rs +0 -637
  62. package/packages/memory/src/scoring.rs +0 -131
  63. package/packages/memory/src/store.rs +0 -460
  64. package/packages/memory/src/tasks.rs +0 -321
  65. package/packages/runtime/package.json +0 -24
  66. package/packages/runtime/src/__tests__/fleet-integration.test.ts +0 -235
  67. package/packages/runtime/src/__tests__/fleet.test.ts +0 -207
  68. package/packages/runtime/src/__tests__/integration.test.ts +0 -434
  69. package/packages/runtime/src/agent.ts +0 -255
  70. package/packages/runtime/src/config.ts +0 -130
  71. package/packages/runtime/src/context.ts +0 -192
  72. package/packages/runtime/src/fleet/fleet-manager.ts +0 -399
  73. package/packages/runtime/src/fleet/memory-sync.ts +0 -362
  74. package/packages/runtime/src/fleet/primary-client.ts +0 -285
  75. package/packages/runtime/src/fleet/worker-protocol.ts +0 -158
  76. package/packages/runtime/src/fleet/worker-server.ts +0 -246
  77. package/packages/runtime/src/index.ts +0 -57
  78. package/packages/runtime/src/llm-client.ts +0 -65
  79. package/packages/runtime/src/memory-client.ts +0 -309
  80. package/packages/runtime/src/pipeline.ts +0 -187
  81. package/packages/runtime/src/prompt.ts +0 -173
  82. package/packages/runtime/src/sesame.ts +0 -226
  83. package/packages/runtime/src/start.ts +0 -20
  84. package/packages/runtime/src/task-engine.ts +0 -113
  85. package/packages/runtime/src/worker.ts +0 -339
  86. package/packages/runtime/tsconfig.json +0 -8
  87. package/pnpm-workspace.yaml +0 -2
  88. package/run-aidan.sh +0 -23
  89. package/scripts/bootstrap.sh +0 -196
  90. package/scripts/build-npm.sh +0 -92
  91. package/scripts/com.hivemind.agent.plist +0 -44
  92. package/scripts/com.hivemind.memory.plist +0 -31
  93. package/tsconfig.json +0 -22
  94. package/tsup.config.ts +0 -27
  95. /package/dist/{chunk-CGSXJVSS.js.map → chunk-RXCV57H3.js.map} +0 -0
  96. /package/dist/{chunk-S3RVZBPZ.js.map → chunk-YEOAEJ62.js.map} +0 -0
@@ -1,309 +0,0 @@
1
- import type { MemoryConfig } from "./config.js";
2
-
3
- export interface Episode {
4
- id: string;
5
- timestamp: string;
6
- context_name: string;
7
- role: string;
8
- content: string;
9
- access_count?: number;
10
- layer?: string;
11
- }
12
-
13
- export interface ScoredEpisode {
14
- id: string;
15
- timestamp: string;
16
- context_name: string;
17
- role: string;
18
- content: string;
19
- score: number;
20
- source_context?: string;
21
- }
22
-
23
- export interface EpisodeInput {
24
- context_name?: string;
25
- role: string;
26
- content: string;
27
- }
28
-
29
- export interface ContextInfo {
30
- name: string;
31
- description: string;
32
- created_at: string;
33
- episode_count: number;
34
- }
35
-
36
- export interface CrossContextResult {
37
- context: string;
38
- episodes: ScoredEpisode[];
39
- }
40
-
41
- export interface PromotionResult {
42
- promoted_count: number;
43
- episode_ids: string[];
44
- }
45
-
46
- export interface TaskRecord {
47
- id: string;
48
- context_name: string;
49
- title: string;
50
- description: string;
51
- status: "planned" | "active" | "complete" | "archived";
52
- blocked_by: string[];
53
- created_at: string;
54
- updated_at: string;
55
- }
56
-
57
- export interface L3Entry {
58
- id: string;
59
- source_episode_id: string;
60
- context_name: string;
61
- content: string;
62
- promoted_at: string;
63
- access_count: number;
64
- connection_density: number;
65
- }
66
-
67
- export interface TaskInput {
68
- context_name: string;
69
- title: string;
70
- description: string;
71
- status?: string;
72
- blocked_by?: string[];
73
- }
74
-
75
- export class MemoryClient {
76
- private baseUrl: string;
77
- private topK: number;
78
-
79
- constructor(config: MemoryConfig) {
80
- this.baseUrl = config.daemon_url;
81
- this.topK = config.top_k;
82
- }
83
-
84
- async storeEpisode(input: EpisodeInput): Promise<Episode> {
85
- const resp = await fetch(`${this.baseUrl}/episodes`, {
86
- method: "POST",
87
- headers: { "Content-Type": "application/json" },
88
- body: JSON.stringify(input),
89
- });
90
-
91
- if (!resp.ok) {
92
- throw new Error(`Memory store failed: ${resp.status} ${await resp.text()}`);
93
- }
94
-
95
- return resp.json() as Promise<Episode>;
96
- }
97
-
98
- async search(query: string, context?: string, limit?: number): Promise<ScoredEpisode[]> {
99
- const params = new URLSearchParams({ q: query });
100
- if (context) params.set("context", context);
101
- params.set("limit", String(limit ?? this.topK));
102
-
103
- const resp = await fetch(`${this.baseUrl}/search?${params}`);
104
-
105
- if (!resp.ok) {
106
- throw new Error(`Memory search failed: ${resp.status} ${await resp.text()}`);
107
- }
108
-
109
- const data = (await resp.json()) as { episodes: ScoredEpisode[] };
110
- return data.episodes;
111
- }
112
-
113
- async getContext(name: string): Promise<Episode[]> {
114
- const resp = await fetch(`${this.baseUrl}/contexts/${encodeURIComponent(name)}`);
115
-
116
- if (!resp.ok) {
117
- throw new Error(`Get context failed: ${resp.status} ${await resp.text()}`);
118
- }
119
-
120
- return resp.json() as Promise<Episode[]>;
121
- }
122
-
123
- async createContext(name: string, description = ""): Promise<void> {
124
- const resp = await fetch(`${this.baseUrl}/contexts`, {
125
- method: "POST",
126
- headers: { "Content-Type": "application/json" },
127
- body: JSON.stringify({ name, description }),
128
- });
129
-
130
- if (!resp.ok) {
131
- throw new Error(`Create context failed: ${resp.status} ${await resp.text()}`);
132
- }
133
- }
134
-
135
- async deleteContext(name: string): Promise<void> {
136
- const resp = await fetch(`${this.baseUrl}/contexts/${encodeURIComponent(name)}`, {
137
- method: "DELETE",
138
- });
139
-
140
- if (!resp.ok) {
141
- throw new Error(`Delete context failed: ${resp.status} ${await resp.text()}`);
142
- }
143
- }
144
-
145
- async listContexts(): Promise<ContextInfo[]> {
146
- const resp = await fetch(`${this.baseUrl}/contexts`);
147
-
148
- if (!resp.ok) {
149
- throw new Error(`List contexts failed: ${resp.status} ${await resp.text()}`);
150
- }
151
-
152
- const data = (await resp.json()) as { contexts: ContextInfo[] };
153
- return data.contexts;
154
- }
155
-
156
- async searchCrossContext(query: string, limit?: number): Promise<CrossContextResult[]> {
157
- const params = new URLSearchParams({ q: query });
158
- params.set("limit", String(limit ?? this.topK));
159
-
160
- const resp = await fetch(`${this.baseUrl}/search/cross-context?${params}`);
161
-
162
- if (!resp.ok) {
163
- throw new Error(`Cross-context search failed: ${resp.status} ${await resp.text()}`);
164
- }
165
-
166
- const data = (await resp.json()) as { results: CrossContextResult[] };
167
- return data.results;
168
- }
169
-
170
- async shareEpisode(episodeId: string, targetContext: string): Promise<void> {
171
- const resp = await fetch(`${this.baseUrl}/episodes/${encodeURIComponent(episodeId)}/share`, {
172
- method: "POST",
173
- headers: { "Content-Type": "application/json" },
174
- body: JSON.stringify({ target_context: targetContext }),
175
- });
176
-
177
- if (!resp.ok) {
178
- throw new Error(`Share episode failed: ${resp.status} ${await resp.text()}`);
179
- }
180
- }
181
-
182
- async runPromotion(contextName?: string): Promise<PromotionResult> {
183
- const params = new URLSearchParams();
184
- if (contextName) params.set("context", contextName);
185
-
186
- const resp = await fetch(`${this.baseUrl}/promotion/run?${params}`, {
187
- method: "POST",
188
- });
189
-
190
- if (!resp.ok) {
191
- throw new Error(`Promotion failed: ${resp.status} ${await resp.text()}`);
192
- }
193
-
194
- return resp.json() as Promise<PromotionResult>;
195
- }
196
-
197
- async getL3Knowledge(contextName: string): Promise<L3Entry[]> {
198
- const resp = await fetch(
199
- `${this.baseUrl}/promotion/l3?context=${encodeURIComponent(contextName)}`,
200
- );
201
-
202
- if (!resp.ok) {
203
- throw new Error(`Get L3 failed: ${resp.status} ${await resp.text()}`);
204
- }
205
-
206
- const data = (await resp.json()) as { entries: L3Entry[] };
207
- return data.entries;
208
- }
209
-
210
- async setContextScoring(contextName: string, halfLifeHours: number): Promise<void> {
211
- const resp = await fetch(
212
- `${this.baseUrl}/contexts/${encodeURIComponent(contextName)}/scoring`,
213
- {
214
- method: "POST",
215
- headers: { "Content-Type": "application/json" },
216
- body: JSON.stringify({ half_life_hours: halfLifeHours }),
217
- },
218
- );
219
-
220
- if (!resp.ok) {
221
- throw new Error(`Set scoring failed: ${resp.status} ${await resp.text()}`);
222
- }
223
- }
224
-
225
- async recordAccess(episodeId: string): Promise<void> {
226
- const resp = await fetch(`${this.baseUrl}/episodes/${encodeURIComponent(episodeId)}/access`, {
227
- method: "POST",
228
- });
229
-
230
- if (!resp.ok) {
231
- throw new Error(`Record access failed: ${resp.status} ${await resp.text()}`);
232
- }
233
- }
234
-
235
- async recordCoAccess(episodeIds: string[]): Promise<void> {
236
- const resp = await fetch(`${this.baseUrl}/episodes/co-access`, {
237
- method: "POST",
238
- headers: { "Content-Type": "application/json" },
239
- body: JSON.stringify({ episode_ids: episodeIds }),
240
- });
241
-
242
- if (!resp.ok) {
243
- throw new Error(`Co-access failed: ${resp.status} ${await resp.text()}`);
244
- }
245
- }
246
-
247
- // Task engine methods
248
- async createTask(input: TaskInput): Promise<TaskRecord> {
249
- const resp = await fetch(`${this.baseUrl}/tasks`, {
250
- method: "POST",
251
- headers: { "Content-Type": "application/json" },
252
- body: JSON.stringify(input),
253
- });
254
-
255
- if (!resp.ok) {
256
- throw new Error(`Create task failed: ${resp.status} ${await resp.text()}`);
257
- }
258
-
259
- return resp.json() as Promise<TaskRecord>;
260
- }
261
-
262
- async listTasks(contextName: string, status?: string): Promise<TaskRecord[]> {
263
- const params = new URLSearchParams({ context: contextName });
264
- if (status) params.set("status", status);
265
-
266
- const resp = await fetch(`${this.baseUrl}/tasks?${params}`);
267
-
268
- if (!resp.ok) {
269
- throw new Error(`List tasks failed: ${resp.status} ${await resp.text()}`);
270
- }
271
-
272
- const data = (await resp.json()) as { tasks: TaskRecord[] };
273
- return data.tasks;
274
- }
275
-
276
- async updateTask(taskId: string, updates: Partial<Pick<TaskRecord, "status" | "title" | "description" | "blocked_by">>): Promise<TaskRecord> {
277
- const resp = await fetch(`${this.baseUrl}/tasks/${encodeURIComponent(taskId)}`, {
278
- method: "PATCH",
279
- headers: { "Content-Type": "application/json" },
280
- body: JSON.stringify(updates),
281
- });
282
-
283
- if (!resp.ok) {
284
- throw new Error(`Update task failed: ${resp.status} ${await resp.text()}`);
285
- }
286
-
287
- return resp.json() as Promise<TaskRecord>;
288
- }
289
-
290
- async getNextTask(contextName: string): Promise<TaskRecord | null> {
291
- const resp = await fetch(`${this.baseUrl}/tasks/next?context=${encodeURIComponent(contextName)}`);
292
-
293
- if (!resp.ok) {
294
- if (resp.status === 404) return null;
295
- throw new Error(`Get next task failed: ${resp.status} ${await resp.text()}`);
296
- }
297
-
298
- return resp.json() as Promise<TaskRecord>;
299
- }
300
-
301
- async healthCheck(): Promise<boolean> {
302
- try {
303
- const resp = await fetch(`${this.baseUrl}/health`);
304
- return resp.ok;
305
- } catch {
306
- return false;
307
- }
308
- }
309
- }
@@ -1,187 +0,0 @@
1
- import { execSync } from "child_process";
2
- import { Agent } from "./agent.js";
3
- import type { HivemindConfig } from "./config.js";
4
- import { loadConfig } from "./config.js";
5
- import { SesameClient } from "./sesame.js";
6
- import type { UpgradeRequest } from "./sesame.js";
7
- import { MemoryClient } from "./memory-client.js";
8
-
9
- export async function startPipeline(configPath: string): Promise<void> {
10
- const config = loadConfig(configPath);
11
-
12
- console.log(`[hivemind] Starting ${config.agent.name} (pid ${process.pid})`);
13
-
14
- // Verify memory daemon is reachable
15
- const memory = new MemoryClient(config.memory);
16
- const memoryOk = await memory.healthCheck();
17
- if (!memoryOk) {
18
- console.warn("[hivemind] Memory daemon unreachable at", config.memory.daemon_url);
19
- console.warn("[hivemind] Continuing without persistent memory — episodes will not be stored");
20
- } else {
21
- console.log("[hivemind] Memory daemon connected");
22
- }
23
-
24
- // Create the agent
25
- const agent = new Agent(config);
26
- console.log(`[hivemind] Context manager initialized (active: ${agent.getActiveContext()})`);
27
-
28
- // Connect to Sesame if configured
29
- if (config.sesame.api_key) {
30
- await startSesameLoop(config, agent);
31
- } else {
32
- console.log("[hivemind] No Sesame API key configured — running in stdin mode");
33
- await startStdinLoop(agent);
34
- }
35
- }
36
-
37
- async function startSesameLoop(config: HivemindConfig, agent: Agent): Promise<void> {
38
- const sesame = new SesameClient(config.sesame);
39
-
40
- // --- Graceful shutdown ---
41
- let shuttingDown = false;
42
- const shutdown = (signal: string) => {
43
- if (shuttingDown) return;
44
- shuttingDown = true;
45
- console.log(`\n[hivemind] Received ${signal}, shutting down...`);
46
- try {
47
- sesame.updatePresence("offline", { emoji: "⭘" });
48
- sesame.disconnect();
49
- console.log("[hivemind] Sesame disconnected cleanly");
50
- } catch (err) {
51
- console.error("[hivemind] Error during disconnect:", (err as Error).message);
52
- }
53
- process.exit(0);
54
- };
55
- process.on("SIGTERM", () => shutdown("SIGTERM"));
56
- process.on("SIGINT", () => shutdown("SIGINT"));
57
-
58
- // ── Native upgrade handler (no LLM involved) ──
59
- sesame.onUpgrade(async (req: UpgradeRequest) => {
60
- console.log(`[hivemind] Upgrade requested: ${req.packageName}@${req.targetVersion} (by ${req.requestedBy})`);
61
- sesame.updatePresence("working", { detail: `Upgrading to ${req.targetVersion}`, emoji: "⬆️" });
62
-
63
- try {
64
- const target = req.targetVersion === "latest"
65
- ? req.packageName
66
- : `${req.packageName}@${req.targetVersion}`;
67
-
68
- console.log(`[hivemind] Running: npm install -g ${target}`);
69
- execSync(`npm install -g ${target}`, { stdio: "inherit", timeout: 120_000 });
70
- console.log("[hivemind] Package updated successfully");
71
-
72
- // Send confirmation message to the requesting human's DM
73
- // (For now, log it — we'd need the human's channel ID for a proper message)
74
- console.log(`[hivemind] Upgrade to ${req.targetVersion} complete. Restarting...`);
75
-
76
- sesame.updatePresence("working", { detail: "Restarting after upgrade", emoji: "🔄" });
77
-
78
- // Graceful restart — try launchd kickstart, fall back to process.exit
79
- try {
80
- execSync("launchctl kickstart -k gui/$(id -u)/com.hivemind.agent", { timeout: 10_000 });
81
- } catch {
82
- // If launchd isn't managing us, just exit (the wrapper script will restart)
83
- console.log("[hivemind] Exiting for restart...");
84
- process.exit(0);
85
- }
86
- } catch (err) {
87
- console.error(`[hivemind] Upgrade failed: ${(err as Error).message}`);
88
- sesame.updatePresence("online", { detail: "Upgrade failed", emoji: "❌" });
89
- }
90
- });
91
-
92
- sesame.onMessage(async (msg) => {
93
- if (shuttingDown) return;
94
- console.log(`[sesame] ${msg.author.handle} (${msg.channelKind}): ${msg.content}`);
95
-
96
- // Start typing indicator + set presence to thinking
97
- sesame.startTyping(msg.channelId);
98
- sesame.updatePresence("thinking", { detail: `Replying to ${msg.author.handle}`, emoji: "💭" });
99
-
100
- try {
101
- // Prepend sender info so the agent knows who's talking
102
- const prefix = msg.channelKind === "group"
103
- ? `[${msg.author.handle} in group chat]: `
104
- : `[${msg.author.handle}]: `;
105
- const response = await agent.processMessage(prefix + msg.content);
106
-
107
- // Stop typing before sending reply
108
- sesame.stopTyping(msg.channelId);
109
-
110
- // Agent chose not to respond (group chat etiquette)
111
- if (response.content.trim() === "__SKIP__") {
112
- console.log(`[sesame] ${config.agent.name}: skipped (${msg.author.handle} in ${msg.channelKind})`);
113
- sesame.updatePresence("online", { emoji: "🟢" });
114
- return;
115
- }
116
-
117
- const ctxPrefix = response.contextSwitched
118
- ? `[switched to ${response.context}] `
119
- : "";
120
- await sesame.sendMessage(msg.channelId, ctxPrefix + response.content);
121
- console.log(`[sesame] ${config.agent.name} (${response.context}): ${response.content.slice(0, 100)}...`);
122
-
123
- // Back to online
124
- sesame.updatePresence("online", { emoji: "🟢" });
125
- } catch (err) {
126
- sesame.stopTyping(msg.channelId);
127
- sesame.updatePresence("online", { emoji: "🟢" });
128
- console.error("[sesame] Error processing message:", (err as Error).message);
129
- }
130
- });
131
-
132
- await sesame.connect();
133
- console.log("[hivemind] Listening for Sesame messages");
134
-
135
- // Keep alive
136
- await new Promise<void>(() => {});
137
- }
138
-
139
- async function startStdinLoop(agent: Agent): Promise<void> {
140
- const readline = await import("readline");
141
- const rl = readline.createInterface({
142
- input: process.stdin,
143
- output: process.stdout,
144
- });
145
-
146
- console.log("[hivemind] Ready. Type a message (Ctrl+C to exit)");
147
- console.log("[hivemind] Commands: 'switch to <name>', 'list contexts', 'create context <name>'\n");
148
-
149
- rl.on("line", async (line: string) => {
150
- const input = line.trim();
151
- if (!input) return;
152
-
153
- if (input.toLowerCase() === "list contexts") {
154
- const contexts = agent.getContextManager().listContexts();
155
- console.log("\nContexts:");
156
- for (const ctx of contexts) {
157
- const active = ctx.name === agent.getActiveContext() ? " (active)" : "";
158
- console.log(` - ${ctx.name}${active}: ${ctx.description || "(no description)"}`);
159
- }
160
- console.log();
161
- return;
162
- }
163
-
164
- const createMatch = input.match(/^create context (\S+)\s*(.*)?$/i);
165
- if (createMatch) {
166
- const name = createMatch[1];
167
- const desc = createMatch[2] || "";
168
- await agent.getContextManager().createContext(name, desc);
169
- console.log(`\nCreated context: ${name}\n`);
170
- return;
171
- }
172
-
173
- try {
174
- const response = await agent.processMessage(input);
175
- if (response.contextSwitched) {
176
- console.log(`\n[switched to context: ${response.context}]`);
177
- }
178
- console.log(`\n${response.content}\n`);
179
- } catch (err) {
180
- console.error("Error:", (err as Error).message);
181
- }
182
- });
183
-
184
- return new Promise((resolve) => {
185
- rl.on("close", resolve);
186
- });
187
- }
@@ -1,173 +0,0 @@
1
- import { readFileSync, existsSync, readdirSync } from "fs";
2
- import { resolve, basename } from "path";
3
- import type { AgentConfig } from "./config.js";
4
- import type { ChatMessage } from "./llm-client.js";
5
- import type { ScoredEpisode, L3Entry } from "./memory-client.js";
6
-
7
- // Cache loaded charter to avoid re-reading every message
8
- let charterCache: { path: string; content: string } | null = null;
9
-
10
- function loadCharter(path: string): string {
11
- if (charterCache && charterCache.path === path) return charterCache.content;
12
- if (!existsSync(path)) return "";
13
- const content = readFileSync(path, "utf-8");
14
- charterCache = { path, content };
15
- return content;
16
- }
17
-
18
- // ── Workspace file loading ──
19
- // These are identity/config files loaded once from disk into the system prompt.
20
- // They don't consume conversation tokens — they're part of the static system prompt.
21
-
22
- // Priority order for workspace files (loaded in this order)
23
- const WORKSPACE_FILE_PRIORITY = [
24
- "SOUL.md", // Who the agent is (personality, values)
25
- "IDENTITY.md", // Name, avatar, creature type
26
- "AGENTS.md", // Operating instructions
27
- "USER.md", // About the human(s)
28
- "TOOLS.md", // Tool-specific notes
29
- ];
30
-
31
- interface WorkspaceFiles {
32
- dir: string;
33
- files: Map<string, string>; // filename -> content
34
- loadedAt: number;
35
- }
36
-
37
- let workspaceCache: WorkspaceFiles | null = null;
38
- const WORKSPACE_CACHE_TTL_MS = 60_000; // Reload every 60s
39
-
40
- function loadWorkspaceFiles(dir: string): Map<string, string> {
41
- const now = Date.now();
42
- if (workspaceCache && workspaceCache.dir === dir && (now - workspaceCache.loadedAt) < WORKSPACE_CACHE_TTL_MS) {
43
- return workspaceCache.files;
44
- }
45
-
46
- const files = new Map<string, string>();
47
- if (!existsSync(dir)) return files;
48
-
49
- // Load priority files first
50
- for (const filename of WORKSPACE_FILE_PRIORITY) {
51
- const filePath = resolve(dir, filename);
52
- if (existsSync(filePath)) {
53
- try {
54
- files.set(filename, readFileSync(filePath, "utf-8"));
55
- } catch {
56
- // Skip unreadable files
57
- }
58
- }
59
- }
60
-
61
- // Load any other .md files not in the priority list
62
- try {
63
- const entries = readdirSync(dir);
64
- for (const entry of entries) {
65
- if (entry.endsWith(".md") && !files.has(entry) && !entry.startsWith(".")) {
66
- const filePath = resolve(dir, entry);
67
- try {
68
- files.set(entry, readFileSync(filePath, "utf-8"));
69
- } catch {
70
- // Skip
71
- }
72
- }
73
- }
74
- } catch {
75
- // Skip if dir can't be read
76
- }
77
-
78
- workspaceCache = { dir, files, loadedAt: now };
79
- const names = [...files.keys()].join(", ");
80
- if (files.size > 0) {
81
- console.log(`[workspace] Loaded ${files.size} files: ${names}`);
82
- }
83
- return files;
84
- }
85
-
86
- export function buildSystemPrompt(
87
- config: AgentConfig,
88
- episodes: ScoredEpisode[],
89
- contextName = "global",
90
- l3Knowledge: L3Entry[] = [],
91
- ): string {
92
- let prompt = `You are ${config.name}. ${config.personality}\n`;
93
-
94
- // Load workspace identity files (SOUL.md, IDENTITY.md, AGENTS.md, etc.)
95
- // These are loaded from disk and cached — they don't consume conversation tokens.
96
- if (config.workspace) {
97
- const wsFiles = loadWorkspaceFiles(config.workspace);
98
- if (wsFiles.size > 0) {
99
- prompt += "\n# Workspace Context\n";
100
- for (const [filename, content] of wsFiles) {
101
- prompt += `\n## ${filename}\n${content}\n`;
102
- }
103
- }
104
- }
105
-
106
- // Load team charter if configured (legacy — workspace files supersede this)
107
- if (config.team_charter && !config.workspace) {
108
- const charter = loadCharter(config.team_charter);
109
- if (charter) {
110
- prompt += `\n${charter}\n`;
111
- }
112
- }
113
-
114
- prompt += `
115
- ## Communication
116
- Messages are prefixed with [sender_handle]: or [sender_handle in group chat]: to tell you who's talking.
117
- In group chats, multiple people (humans and agents) may be present. Address them by name when relevant.
118
- Don't repeat or quote these prefixes in your responses — just respond naturally.
119
- If you decide not to respond to a group message, reply with exactly: __SKIP__
120
- `;
121
-
122
- if (contextName !== "global") {
123
- prompt += `\n## Active Context: ${contextName}\nYou are currently working in the "${contextName}" project context.\n`;
124
- }
125
-
126
- // L3 semantic knowledge (higher-level patterns, decisions, architecture)
127
- if (l3Knowledge.length > 0) {
128
- prompt += "\n## Established Knowledge (learned patterns)\n\n";
129
- for (const entry of l3Knowledge) {
130
- prompt += `- ${entry.content}\n`;
131
- }
132
- }
133
-
134
- // L2 episodic memories (recent interactions)
135
- if (episodes.length > 0) {
136
- prompt += "\n## Relevant memories from previous conversations\n\n";
137
- for (const ep of episodes) {
138
- const timeAgo = formatTimeAgo(ep.timestamp);
139
- const ctxLabel = ep.context_name !== contextName ? ` [from: ${ep.context_name}]` : "";
140
- prompt += `[${timeAgo}]${ctxLabel} ${ep.role}: ${ep.content}\n`;
141
- }
142
- prompt += "\nUse these memories naturally — reference past conversations when relevant, but don't force it.\n";
143
- }
144
-
145
- return prompt;
146
- }
147
-
148
- export function buildMessages(
149
- systemPrompt: string,
150
- conversationHistory: ChatMessage[],
151
- currentMessage: string,
152
- ): ChatMessage[] {
153
- return [
154
- { role: "system", content: systemPrompt },
155
- ...conversationHistory,
156
- { role: "user", content: currentMessage },
157
- ];
158
- }
159
-
160
- function formatTimeAgo(timestamp: string): string {
161
- const date = new Date(timestamp);
162
- const now = new Date();
163
- const diffMs = now.getTime() - date.getTime();
164
- const diffMins = Math.floor(diffMs / 60_000);
165
- const diffHours = Math.floor(diffMs / 3_600_000);
166
- const diffDays = Math.floor(diffMs / 86_400_000);
167
-
168
- if (diffMins < 1) return "just now";
169
- if (diffMins < 60) return `${diffMins}m ago`;
170
- if (diffHours < 24) return `${diffHours}h ago`;
171
- if (diffDays < 7) return `${diffDays}d ago`;
172
- return date.toLocaleDateString();
173
- }