@slock-ai/daemon 0.38.1 → 0.39.1-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chat-bridge.js +165 -1
- package/dist/{chunk-GCL6DIFU.js → chunk-SDJ4NOR7.js} +843 -288
- package/dist/cli/index.js +1181 -0
- package/dist/core.js +3 -1
- package/dist/index.js +1 -1
- package/package.json +16 -16
|
@@ -4,10 +4,9 @@ import {
|
|
|
4
4
|
} from "./chunk-E6OOH3IC.js";
|
|
5
5
|
|
|
6
6
|
// src/core.ts
|
|
7
|
-
import
|
|
7
|
+
import path11 from "path";
|
|
8
8
|
import os4 from "os";
|
|
9
9
|
import { createRequire } from "module";
|
|
10
|
-
import { execSync as execSync2 } from "child_process";
|
|
11
10
|
import { accessSync } from "fs";
|
|
12
11
|
import { fileURLToPath } from "url";
|
|
13
12
|
|
|
@@ -38,6 +37,9 @@ var TOOL_DISPLAY_METADATA = {
|
|
|
38
37
|
web_fetch: { logLabel: "Fetching web", activityLabel: "Fetching web\u2026", summaryKind: "url" },
|
|
39
38
|
web_search: { logLabel: "Searching web", activityLabel: "Searching web\u2026", summaryKind: "query" },
|
|
40
39
|
todo_write: { logLabel: "Updating tasks", activityLabel: "Updating tasks\u2026", summaryKind: "none" },
|
|
40
|
+
schedule_reminder: { logLabel: "Scheduling reminder", activityLabel: "Scheduling reminder\u2026", summaryKind: "reminder_title" },
|
|
41
|
+
list_reminders: { logLabel: "Listing reminders", activityLabel: "Listing reminders\u2026", summaryKind: "none" },
|
|
42
|
+
cancel_reminder: { logLabel: "Canceling reminder", activityLabel: "Canceling reminder\u2026", summaryKind: "reminder_id" },
|
|
41
43
|
collab_tool_call: { logLabel: "Collaborating", activityLabel: "Collaborating\u2026", summaryKind: "none" }
|
|
42
44
|
};
|
|
43
45
|
var KNOWN_TOOL_ALIASES = {
|
|
@@ -90,6 +92,9 @@ var KNOWN_TOOL_ALIASES = {
|
|
|
90
92
|
SearchWeb: "web_search",
|
|
91
93
|
TodoWrite: "todo_write",
|
|
92
94
|
SetTodoList: "todo_write",
|
|
95
|
+
schedule_reminder: "schedule_reminder",
|
|
96
|
+
list_reminders: "list_reminders",
|
|
97
|
+
cancel_reminder: "cancel_reminder",
|
|
93
98
|
collab_tool_call: "collab_tool_call"
|
|
94
99
|
};
|
|
95
100
|
var MCP_CHAT_NAMESPACE_PREFIXES = ["mcp__chat__", "mcp_chat_"];
|
|
@@ -120,6 +125,198 @@ function resolveToolSemantic(toolName) {
|
|
|
120
125
|
const normalized = normalizeToolLookupName(toolName);
|
|
121
126
|
return KNOWN_TOOL_ALIASES[normalized] ?? null;
|
|
122
127
|
}
|
|
128
|
+
function tokenizeShellCommand(command) {
|
|
129
|
+
const tokens = [];
|
|
130
|
+
let current = "";
|
|
131
|
+
let quote = null;
|
|
132
|
+
let escaping = false;
|
|
133
|
+
for (const ch of command) {
|
|
134
|
+
if (escaping) {
|
|
135
|
+
current += ch;
|
|
136
|
+
escaping = false;
|
|
137
|
+
continue;
|
|
138
|
+
}
|
|
139
|
+
if (quote === "'") {
|
|
140
|
+
if (ch === "'") {
|
|
141
|
+
quote = null;
|
|
142
|
+
} else {
|
|
143
|
+
current += ch;
|
|
144
|
+
}
|
|
145
|
+
continue;
|
|
146
|
+
}
|
|
147
|
+
if (quote === '"') {
|
|
148
|
+
if (ch === '"') {
|
|
149
|
+
quote = null;
|
|
150
|
+
} else if (ch === "\\") {
|
|
151
|
+
escaping = true;
|
|
152
|
+
} else {
|
|
153
|
+
current += ch;
|
|
154
|
+
}
|
|
155
|
+
continue;
|
|
156
|
+
}
|
|
157
|
+
if (ch === "'" || ch === '"') {
|
|
158
|
+
quote = ch;
|
|
159
|
+
continue;
|
|
160
|
+
}
|
|
161
|
+
if (ch === "\\") {
|
|
162
|
+
escaping = true;
|
|
163
|
+
continue;
|
|
164
|
+
}
|
|
165
|
+
if (/\s/.test(ch)) {
|
|
166
|
+
if (current) {
|
|
167
|
+
tokens.push(current);
|
|
168
|
+
current = "";
|
|
169
|
+
}
|
|
170
|
+
continue;
|
|
171
|
+
}
|
|
172
|
+
current += ch;
|
|
173
|
+
}
|
|
174
|
+
if (escaping || quote) return null;
|
|
175
|
+
if (current) tokens.push(current);
|
|
176
|
+
return tokens;
|
|
177
|
+
}
|
|
178
|
+
function isEnvAssignmentToken(token) {
|
|
179
|
+
return /^[A-Za-z_][A-Za-z0-9_]*=.*/.test(token);
|
|
180
|
+
}
|
|
181
|
+
function isSlockExecutableToken(token) {
|
|
182
|
+
const lastSep = Math.max(token.lastIndexOf("/"), token.lastIndexOf("\\"));
|
|
183
|
+
const base = (lastSep >= 0 ? token.slice(lastSep + 1) : token).toLowerCase();
|
|
184
|
+
return base === "slock" || base === "slock.cmd";
|
|
185
|
+
}
|
|
186
|
+
function isShellExecutableToken(token) {
|
|
187
|
+
const lastSep = Math.max(token.lastIndexOf("/"), token.lastIndexOf("\\"));
|
|
188
|
+
const base = (lastSep >= 0 ? token.slice(lastSep + 1) : token).toLowerCase();
|
|
189
|
+
return base === "bash" || base === "zsh" || base === "sh";
|
|
190
|
+
}
|
|
191
|
+
function findSlockExecutableIndex(tokens) {
|
|
192
|
+
const commandStartIndexes = [0];
|
|
193
|
+
for (let i = 0; i < tokens.length; i += 1) {
|
|
194
|
+
if (tokens[i] === "|" || tokens[i] === "&&" || tokens[i] === "||" || tokens[i] === ";") {
|
|
195
|
+
commandStartIndexes.push(i + 1);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
for (const start of commandStartIndexes) {
|
|
199
|
+
let executableIndex = start;
|
|
200
|
+
while (executableIndex < tokens.length && isEnvAssignmentToken(tokens[executableIndex])) {
|
|
201
|
+
executableIndex += 1;
|
|
202
|
+
}
|
|
203
|
+
if (executableIndex < tokens.length && isSlockExecutableToken(tokens[executableIndex])) {
|
|
204
|
+
return executableIndex;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
return -1;
|
|
208
|
+
}
|
|
209
|
+
function unwrapShellPayload(tokens, executableIndex) {
|
|
210
|
+
if (!isShellExecutableToken(tokens[executableIndex])) return null;
|
|
211
|
+
for (let i = executableIndex + 1; i < tokens.length; i++) {
|
|
212
|
+
const arg = tokens[i];
|
|
213
|
+
if (arg.startsWith("-") && arg.endsWith("c")) {
|
|
214
|
+
return i + 1 < tokens.length ? tokens[i + 1] : null;
|
|
215
|
+
}
|
|
216
|
+
if (!arg.startsWith("-")) break;
|
|
217
|
+
}
|
|
218
|
+
return null;
|
|
219
|
+
}
|
|
220
|
+
function readOptionValues(args, flag) {
|
|
221
|
+
const values = [];
|
|
222
|
+
for (let i = 0; i < args.length; i += 1) {
|
|
223
|
+
const arg = args[i];
|
|
224
|
+
if (arg === flag && i + 1 < args.length) {
|
|
225
|
+
values.push(args[i + 1]);
|
|
226
|
+
i += 1;
|
|
227
|
+
continue;
|
|
228
|
+
}
|
|
229
|
+
if (arg.startsWith(`${flag}=`)) {
|
|
230
|
+
values.push(arg.slice(flag.length + 1));
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
return values;
|
|
234
|
+
}
|
|
235
|
+
function readOptionValue(args, flag) {
|
|
236
|
+
return readOptionValues(args, flag).at(-1);
|
|
237
|
+
}
|
|
238
|
+
function parsePositiveIntegers(args, flag) {
|
|
239
|
+
return readOptionValues(args, flag).map((value) => Number(value)).filter((value) => Number.isFinite(value) && Number.isInteger(value) && value > 0);
|
|
240
|
+
}
|
|
241
|
+
function resolveSlockCliInvocation(toolName, input) {
|
|
242
|
+
if (resolveToolSemantic(toolName) !== "bash") return null;
|
|
243
|
+
const value = asObject(input);
|
|
244
|
+
if (!value || typeof value.command !== "string") return null;
|
|
245
|
+
const tokens = tokenizeShellCommand(value.command);
|
|
246
|
+
if (!tokens || tokens.length === 0) return null;
|
|
247
|
+
const firstExecutableIndex = (() => {
|
|
248
|
+
let index = 0;
|
|
249
|
+
while (index < tokens.length && isEnvAssignmentToken(tokens[index])) index += 1;
|
|
250
|
+
return index;
|
|
251
|
+
})();
|
|
252
|
+
if (firstExecutableIndex >= tokens.length) return null;
|
|
253
|
+
if (isShellExecutableToken(tokens[firstExecutableIndex])) {
|
|
254
|
+
const innerCommand = unwrapShellPayload(tokens, firstExecutableIndex);
|
|
255
|
+
if (innerCommand) {
|
|
256
|
+
return resolveSlockCliInvocation(toolName, { command: innerCommand });
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
const executableIndex = findSlockExecutableIndex(tokens);
|
|
260
|
+
if (executableIndex < 0) return null;
|
|
261
|
+
const cliArgs = tokens.slice(executableIndex + 1);
|
|
262
|
+
const [resource, action, ...rest] = cliArgs;
|
|
263
|
+
if (!resource || !action) return null;
|
|
264
|
+
switch (`${resource} ${action}`) {
|
|
265
|
+
case "message send":
|
|
266
|
+
return { toolName: "send_message", input: { target: readOptionValue(rest, "--target") } };
|
|
267
|
+
case "message check":
|
|
268
|
+
return { toolName: "check_messages", input: {} };
|
|
269
|
+
case "message read":
|
|
270
|
+
return { toolName: "read_history", input: { channel: readOptionValue(rest, "--channel") } };
|
|
271
|
+
case "message search":
|
|
272
|
+
return { toolName: "search_messages", input: { query: readOptionValue(rest, "--query") } };
|
|
273
|
+
case "server info":
|
|
274
|
+
return { toolName: "list_server", input: {} };
|
|
275
|
+
case "task list":
|
|
276
|
+
return { toolName: "list_tasks", input: { channel: readOptionValue(rest, "--channel") } };
|
|
277
|
+
case "task create":
|
|
278
|
+
return { toolName: "create_tasks", input: { channel: readOptionValue(rest, "--channel") } };
|
|
279
|
+
case "task claim":
|
|
280
|
+
return {
|
|
281
|
+
toolName: "claim_tasks",
|
|
282
|
+
input: {
|
|
283
|
+
channel: readOptionValue(rest, "--channel"),
|
|
284
|
+
task_numbers: parsePositiveIntegers(rest, "--number")
|
|
285
|
+
}
|
|
286
|
+
};
|
|
287
|
+
case "task unclaim":
|
|
288
|
+
return {
|
|
289
|
+
toolName: "unclaim_task",
|
|
290
|
+
input: {
|
|
291
|
+
channel: readOptionValue(rest, "--channel"),
|
|
292
|
+
task_number: parsePositiveIntegers(rest, "--number")[0]
|
|
293
|
+
}
|
|
294
|
+
};
|
|
295
|
+
case "task update":
|
|
296
|
+
return {
|
|
297
|
+
toolName: "update_task_status",
|
|
298
|
+
input: {
|
|
299
|
+
channel: readOptionValue(rest, "--channel"),
|
|
300
|
+
task_number: parsePositiveIntegers(rest, "--number")[0]
|
|
301
|
+
}
|
|
302
|
+
};
|
|
303
|
+
case "attachment upload":
|
|
304
|
+
return { toolName: "upload_file", input: { path: readOptionValue(rest, "--path") } };
|
|
305
|
+
case "attachment view":
|
|
306
|
+
return { toolName: "view_file", input: {} };
|
|
307
|
+
case "reminder schedule":
|
|
308
|
+
return { toolName: "schedule_reminder", input: { title: readOptionValue(rest, "--title") } };
|
|
309
|
+
case "reminder list":
|
|
310
|
+
return { toolName: "list_reminders", input: {} };
|
|
311
|
+
case "reminder cancel":
|
|
312
|
+
return { toolName: "cancel_reminder", input: { reminder_id: readOptionValue(rest, "--id") } };
|
|
313
|
+
default:
|
|
314
|
+
return null;
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
function normalizeToolDisplayInvocation(toolName, input) {
|
|
318
|
+
return resolveSlockCliInvocation(toolName, input) ?? { toolName, input };
|
|
319
|
+
}
|
|
123
320
|
function getToolActivityLabel(toolName) {
|
|
124
321
|
const semantic = resolveToolSemantic(toolName);
|
|
125
322
|
if (semantic) return TOOL_DISPLAY_METADATA[semantic].activityLabel;
|
|
@@ -158,6 +355,14 @@ function summarizeToolInput(toolName, input) {
|
|
|
158
355
|
return value.channel && value.task_number != null ? `${value.channel} #t${value.task_number}` : "";
|
|
159
356
|
case "target":
|
|
160
357
|
return value.target || "";
|
|
358
|
+
case "reminder_title": {
|
|
359
|
+
const title = value.title;
|
|
360
|
+
return typeof title === "string" ? truncateLabel(title, 40) : "";
|
|
361
|
+
}
|
|
362
|
+
case "reminder_id": {
|
|
363
|
+
const id = value.reminder_id;
|
|
364
|
+
return typeof id === "string" ? `#${id.slice(0, 8)}` : "";
|
|
365
|
+
}
|
|
161
366
|
}
|
|
162
367
|
}
|
|
163
368
|
|
|
@@ -232,7 +437,7 @@ var RUNTIMES = [
|
|
|
232
437
|
{ id: "codex", displayName: "Codex CLI", binary: "codex", supported: true },
|
|
233
438
|
{ id: "kimi", displayName: "Kimi CLI", binary: "kimi", supported: true },
|
|
234
439
|
{ id: "copilot", displayName: "Copilot CLI", binary: "copilot", supported: true },
|
|
235
|
-
{ id: "cursor", displayName: "Cursor CLI", binary: "agent", supported: true },
|
|
440
|
+
{ id: "cursor", displayName: "Cursor CLI", binary: "cursor-agent", supported: true },
|
|
236
441
|
{ id: "gemini", displayName: "Gemini CLI", binary: "gemini", supported: true }
|
|
237
442
|
];
|
|
238
443
|
var PLAN_CONFIG = {
|
|
@@ -271,127 +476,203 @@ var DISPLAY_PLAN_CONFIG = {
|
|
|
271
476
|
|
|
272
477
|
// src/agentProcessManager.ts
|
|
273
478
|
import { mkdir, writeFile, access, readdir as readdir2, stat as stat2, readFile, rm as rm2 } from "fs/promises";
|
|
274
|
-
import
|
|
479
|
+
import path10 from "path";
|
|
275
480
|
import os3 from "os";
|
|
276
481
|
|
|
277
482
|
// src/drivers/claude.ts
|
|
278
483
|
import { spawn } from "child_process";
|
|
279
|
-
import
|
|
280
|
-
|
|
484
|
+
import path3 from "path";
|
|
485
|
+
|
|
486
|
+
// src/drivers/cliTransport.ts
|
|
487
|
+
import { mkdirSync, writeFileSync } from "fs";
|
|
488
|
+
import path from "path";
|
|
281
489
|
|
|
282
490
|
// src/drivers/systemPrompt.ts
|
|
283
491
|
function toolRef(prefix, name) {
|
|
284
492
|
return `${prefix}${name}`;
|
|
285
493
|
}
|
|
286
|
-
function
|
|
494
|
+
function buildPrompt(config, variant, opts) {
|
|
495
|
+
const isCli = variant === "cli";
|
|
287
496
|
const t = (name) => toolRef(opts.toolPrefix, name);
|
|
497
|
+
const sendCmd = isCli ? "`slock message send`" : `\`${t("send_message")}\``;
|
|
498
|
+
const readCmd = isCli ? "`slock message read`" : `\`${t("read_history")}\``;
|
|
499
|
+
const checkCmd = isCli ? "`slock message check`" : `\`${t("check_messages")}\``;
|
|
500
|
+
const taskClaimCmd = isCli ? "`slock task claim`" : `\`${t("claim_tasks")}\``;
|
|
501
|
+
const taskCreateCmd = isCli ? "`slock task create`" : `\`${t("create_tasks")}\``;
|
|
502
|
+
const taskUpdateCmd = isCli ? "`slock task update`" : `\`${t("update_task_status")}\``;
|
|
503
|
+
const serverInfoCmd = isCli ? "`slock server info`" : `\`${t("list_server")}\``;
|
|
288
504
|
const messageDeliveryText = opts.includeStdinNotificationSection ? "New messages may be delivered to you automatically while your process stays alive." : "The daemon will automatically restart you when new messages arrive.";
|
|
289
|
-
const criticalRules = [
|
|
290
|
-
|
|
505
|
+
const criticalRules = isCli ? [
|
|
506
|
+
"- Always communicate through `slock` CLI commands. This is your only output channel.",
|
|
291
507
|
...opts.extraCriticalRules,
|
|
292
|
-
|
|
293
|
-
|
|
508
|
+
"- Use only the provided `slock` CLI commands for messaging.",
|
|
509
|
+
"- Always claim a task via `slock task claim` before starting work on it. If the claim fails, move on to a different task."
|
|
510
|
+
] : [
|
|
511
|
+
`- Always communicate through ${sendCmd}. This is your only output channel.`,
|
|
512
|
+
...opts.extraCriticalRules,
|
|
513
|
+
"- Use only the provided MCP tools for messaging \u2014 they are already available and ready.",
|
|
514
|
+
`- Always claim a task via ${taskClaimCmd} before starting work on it. If the claim fails, move on to a different task.`
|
|
294
515
|
];
|
|
295
|
-
const startupSteps = [
|
|
296
|
-
|
|
297
|
-
|
|
516
|
+
const startupSteps = isCli ? [
|
|
517
|
+
"1. If this turn already includes a concrete incoming message, first decide whether that message needs a visible acknowledgment, blocker question, or ownership signal. If it does, send it early with `slock message send` before deep context gathering.",
|
|
518
|
+
"2. Read MEMORY.md (in your cwd) and then only the additional memory/files you need to handle the current turn well.",
|
|
519
|
+
`3. If there is no concrete incoming message to handle, stop and wait. ${messageDeliveryText}`,
|
|
520
|
+
"4. When you receive a message, process it and reply with `slock message send`.",
|
|
521
|
+
"5. **Complete ALL your work before stopping.** If a task requires multi-step work (research, code changes, testing), finish everything, report results, then stop. New messages arrive automatically \u2014 you do not need to poll or wait for them."
|
|
522
|
+
] : [
|
|
523
|
+
`1. If this turn already includes a concrete incoming message, first decide whether that message needs a visible acknowledgment, blocker question, or ownership signal. If it does, send it early with ${sendCmd} before deep context gathering.`,
|
|
524
|
+
"2. Read MEMORY.md (in your cwd) and then only the additional memory/files you need to handle the current turn well.",
|
|
298
525
|
`3. If there is no concrete incoming message to handle, stop and wait. ${messageDeliveryText}`,
|
|
299
|
-
`4. When you receive a message, process it and reply with ${
|
|
300
|
-
|
|
526
|
+
`4. When you receive a message, process it and reply with ${sendCmd}.`,
|
|
527
|
+
"5. **Complete ALL your work before stopping.** If a task requires multi-step work (research, code changes, testing), finish everything, report results, then stop. New messages arrive automatically \u2014 you do not need to poll or wait for them."
|
|
301
528
|
];
|
|
302
|
-
|
|
529
|
+
const communicationSection = isCli ? `## Communication \u2014 slock CLI ONLY
|
|
530
|
+
|
|
531
|
+
Use the \`slock\` CLI for chat / task / attachment operations. The daemon injects a local \`slock\` wrapper into PATH for you. Use ONLY these commands for communication:
|
|
532
|
+
|
|
533
|
+
1. **\`slock message check\`** \u2014 Non-blocking check for new messages. Use freely during work \u2014 at natural breakpoints or after notifications.
|
|
534
|
+
2. **\`slock message send\`** \u2014 Send a message to a channel or DM.
|
|
535
|
+
3. **\`slock server info\`** \u2014 List channels in this server, which ones you have joined, plus all agents and humans.
|
|
536
|
+
4. **\`slock message read\`** \u2014 Read past messages from a channel, DM, or thread. Supports \`before\` / \`after\` pagination and \`around\` for centered context.
|
|
537
|
+
5. **\`slock message search\`** \u2014 Search messages visible to you, then inspect a hit with \`slock message read\`.
|
|
538
|
+
6. **\`slock task list\`** \u2014 View a channel's task board.
|
|
539
|
+
7. **\`slock task create\`** \u2014 Create new task-messages in a channel (supports batch titles; equivalent to sending a new message and publishing it as a task-message, not claiming it for yourself).
|
|
540
|
+
8. **\`slock task claim\`** \u2014 Claim tasks by number or message ID (supports batch, handles conflicts).
|
|
541
|
+
9. **\`slock task unclaim\`** \u2014 Release your claim on a task.
|
|
542
|
+
10. **\`slock task update\`** \u2014 Change a task's status (e.g. to in_review or done).
|
|
543
|
+
11. **\`slock attachment upload\`** \u2014 Upload a file to attach to a message. Returns an attachment ID to pass to \`slock message send\`.
|
|
544
|
+
12. **\`slock attachment view\`** \u2014 Download an attached file by its attachment ID so you can inspect it locally.
|
|
545
|
+
|
|
546
|
+
The CLI prints human-readable canonical text on success (matching the format you see in received messages and history). On failure it prints JSON to stderr:
|
|
547
|
+
- failure \u2192 stderr \`{"ok":false,"code":"...","message":"..."}\` with non-zero exit
|
|
548
|
+
|
|
549
|
+
Error code prefixes tell you the layer:
|
|
550
|
+
- \`MISSING_*\` / \`TOKEN_*\` = local auth bootstrap
|
|
551
|
+
- \`*_FAILED\` = 4xx from server
|
|
552
|
+
- \`SERVER_5XX\` = server unreachable / crashed` : `## Communication \u2014 MCP tools ONLY
|
|
303
553
|
|
|
304
|
-
|
|
554
|
+
You have MCP tools from the "chat" server. Use ONLY these for communication:
|
|
305
555
|
|
|
306
|
-
|
|
556
|
+
1. **${checkCmd}** \u2014 Non-blocking check for new messages. Use freely during work \u2014 at natural breakpoints or after notifications.
|
|
557
|
+
2. **${sendCmd}** \u2014 Send a message to a channel or DM.
|
|
558
|
+
3. **${serverInfoCmd}** \u2014 List all channels in this server, which ones you have joined, plus all agents and humans.
|
|
559
|
+
4. **${readCmd}** \u2014 Read past messages from a channel, DM, or thread. Supports \`before\` / \`after\` pagination and \`around\` for centered context.
|
|
560
|
+
5. **\`${t("search_messages")}\`** \u2014 Search messages visible to you, then inspect a hit with ${readCmd}.
|
|
561
|
+
6. **\`${t("list_tasks")}\`** \u2014 View a channel's task board.
|
|
562
|
+
7. **${taskCreateCmd}** \u2014 Create new task-messages in a channel (supports batch titles; equivalent to sending a new message and publishing it as a task-message, not claiming it for yourself).
|
|
563
|
+
8. **${taskClaimCmd}** \u2014 Claim tasks by number or message ID (supports batch, handles conflicts).
|
|
564
|
+
9. **\`${t("unclaim_task")}\`** \u2014 Release your claim on a task.
|
|
565
|
+
10. **${taskUpdateCmd}** \u2014 Change a task's status (e.g. to in_review or done).
|
|
566
|
+
11. **\`${t("upload_file")}\`** \u2014 Upload a file to attach to a message. Returns an attachment ID to pass to ${sendCmd}.
|
|
567
|
+
12. **\`${t("view_file")}\`** \u2014 Download an attached file by its attachment ID so you can inspect it locally.`;
|
|
568
|
+
const sendingMessagesSection = isCli ? `### Sending messages
|
|
569
|
+
|
|
570
|
+
- **Reply to a channel**: \`slock message send --target "#channel-name" <<'EOF'\` followed by the message body and \`EOF\`
|
|
571
|
+
- **Reply to a DM**: \`slock message send --target "dm:@peer-name" <<'EOF'\` followed by the message body and \`EOF\`
|
|
572
|
+
- **Reply in a thread**: \`slock message send --target "#channel:shortid" <<'EOF'\` followed by the message body and \`EOF\`
|
|
573
|
+
- **Start a NEW DM**: \`slock message send --target "dm:@person-name" <<'EOF'\` followed by the message body and \`EOF\`
|
|
574
|
+
|
|
575
|
+
Message content is always read from stdin. Use a heredoc so quotes, backticks, code blocks, and newlines are not interpreted by the shell:
|
|
576
|
+
\`\`\`bash
|
|
577
|
+
slock message send --target "#channel-name" <<'EOF'
|
|
578
|
+
Long message with "quotes", $vars, \`backticks\`, and code blocks.
|
|
579
|
+
EOF
|
|
580
|
+
\`\`\`
|
|
307
581
|
|
|
308
|
-
|
|
582
|
+
**IMPORTANT**: To reply to any message, always reuse the exact \`target\` from the received message. This ensures your reply goes to the right place \u2014 whether it's a channel, DM, or thread.` : `### Sending messages
|
|
309
583
|
|
|
310
|
-
|
|
584
|
+
- **Reply to a channel**: \`${t("send_message")}(target="#channel-name", content="...")\`
|
|
585
|
+
- **Reply to a DM**: \`${t("send_message")}(target="dm:@peer-name", content="...")\`
|
|
586
|
+
- **Reply in a thread**: \`${t("send_message")}(target="#channel:shortid", content="...")\` or \`${t("send_message")}(target="dm:@peer:shortid", content="...")\`
|
|
587
|
+
- **Start a NEW DM**: \`${t("send_message")}(target="dm:@person-name", content="...")\`
|
|
311
588
|
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
3. **${t("list_server")}** \u2014 List all channels in this server, which ones you have joined, plus all agents and humans.
|
|
315
|
-
4. **${t("read_history")}** \u2014 Read past messages from a channel, DM, or thread. Supports \`before\` / \`after\` pagination and \`around\` for centered context.
|
|
316
|
-
5. **${t("search_messages")}** \u2014 Search messages visible to you, then inspect a hit with \`${t("read_history")}\`.
|
|
317
|
-
6. **${t("list_tasks")}** \u2014 View a channel's task board.
|
|
318
|
-
7. **${t("create_tasks")}** \u2014 Create new task-messages in a channel (supports batch; equivalent to sending a new message and publishing it as a task-message, not claiming it for yourself).
|
|
319
|
-
8. **${t("claim_tasks")}** \u2014 Claim tasks by number (supports batch, handles conflicts).
|
|
320
|
-
9. **${t("unclaim_task")}** \u2014 Release your claim on a task.
|
|
321
|
-
10. **${t("update_task_status")}** \u2014 Change a task's status (e.g. to in_review or done).
|
|
322
|
-
11. **${t("upload_file")}** \u2014 Upload a file to attach to a message. Returns an attachment ID to pass to send_message.
|
|
323
|
-
12. **${t("view_file")}** \u2014 Download an attached file by its attachment ID so you can inspect it locally.
|
|
589
|
+
**IMPORTANT**: To reply to any message, always reuse the exact \`target\` from the received message. This ensures your reply goes to the right place \u2014 whether it's a channel, DM, or thread.`;
|
|
590
|
+
const threadsSection = isCli ? `### Threads
|
|
324
591
|
|
|
325
|
-
|
|
326
|
-
${criticalRules.join("\n")}
|
|
592
|
+
Threads are sub-conversations attached to a specific message. They let you discuss a topic without cluttering the main channel.
|
|
327
593
|
|
|
328
|
-
|
|
594
|
+
- **Thread targets** have a colon and short ID suffix: \`#general:a1b2c3d4\` (thread in #general) or \`dm:@richard:x9y8z7a0\` (thread in a DM).
|
|
595
|
+
- When you receive a message from a thread (the target has a \`:shortid\` suffix), **always reply using that same target** to keep the conversation in the thread.
|
|
596
|
+
- **Start a new thread**: Use the \`msg=\` field from the header as the thread suffix. For example, if you see \`[target=#general msg=a1b2c3d4 ...]\`, reply with \`slock message send --target "#general:a1b2c3d4" <<'EOF'\` followed by the message body and \`EOF\`. The thread will be auto-created if it doesn't exist yet.
|
|
597
|
+
- When you send a message, the response includes the message ID. You can use it to start a thread on your own message.
|
|
598
|
+
- You can read thread history: \`slock message read --channel "#general:a1b2c3d4"\`
|
|
599
|
+
- Threads cannot be nested \u2014 you cannot start a thread inside a thread.` : `### Threads
|
|
329
600
|
|
|
330
|
-
|
|
331
|
-
if (opts.postStartupNotes.length > 0) {
|
|
332
|
-
prompt += `
|
|
601
|
+
Threads are sub-conversations attached to a specific message. They let you discuss a topic without cluttering the main channel.
|
|
333
602
|
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
603
|
+
- **Thread targets** have a colon and short ID suffix: \`#general:a1b2c3d4\` (thread in #general) or \`dm:@richard:x9y8z7a0\` (thread in a DM).
|
|
604
|
+
- When you receive a message from a thread (the target has a \`:shortid\` suffix), **always reply using that same target** to keep the conversation in the thread.
|
|
605
|
+
- **Start a new thread**: Use the \`msg=\` field from the header as the thread suffix. For example, if you see \`[target=#general msg=a1b2c3d4 ...]\`, call \`${t("send_message")}(target="#general:a1b2c3d4", content="...")\`. The thread will be auto-created if it doesn't exist yet.
|
|
606
|
+
- When you send a message, the response includes the message ID. You can use it to start a thread on your own message.
|
|
607
|
+
- You can read thread history via ${readCmd} with the same thread target.
|
|
608
|
+
- Threads cannot be nested \u2014 you cannot start a thread inside a thread.`;
|
|
609
|
+
const discoverySection = isCli ? `### Discovering people and channels
|
|
337
610
|
|
|
338
|
-
|
|
611
|
+
Call \`slock server info\` to see all channels in this server, which ones you have joined, other agents, and humans.
|
|
612
|
+
Visible public channels may appear even when \`joined=false\`. In that state you can still inspect them with \`slock message read\`, but you cannot send messages there or receive ordinary channel delivery until a human adds you to the channel.` : `### Discovering people and channels
|
|
339
613
|
|
|
340
|
-
|
|
614
|
+
Call ${serverInfoCmd} to see all channels in this server, which ones you have joined, other agents, and humans.
|
|
615
|
+
Visible public channels may appear even when \`joined=false\`. In that state you can still inspect them with ${readCmd}, but you cannot send messages there or receive ordinary channel delivery until a human adds you to the channel.`;
|
|
616
|
+
const channelAwarenessSection = isCli ? `### Channel awareness
|
|
341
617
|
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
[target=#general:a1b2c3d4 msg=f3a4b5c6 time=2026-03-15T01:00:03 type=human] @richard: thread reply
|
|
347
|
-
[target=dm:@richard:x9y8z7a0 msg=d7e8f9a0 time=2026-03-15T01:00:04 type=human] @richard: DM thread reply
|
|
348
|
-
\`\`\`
|
|
618
|
+
Each channel has a **name** and optionally a **description** that define its purpose (visible via \`slock server info\`). Respect them:
|
|
619
|
+
- **Reply in context** \u2014 always respond in the channel/thread the message came from.
|
|
620
|
+
- **Stay on topic** \u2014 when proactively sharing results or updates, post in the channel most relevant to the work. Don't scatter messages across unrelated channels.
|
|
621
|
+
- If unsure where something belongs, call \`slock server info\` to review channel descriptions.` : `### Channel awareness
|
|
349
622
|
|
|
350
|
-
|
|
351
|
-
-
|
|
352
|
-
-
|
|
353
|
-
-
|
|
354
|
-
|
|
623
|
+
Each channel has a **name** and optionally a **description** that define its purpose (visible via ${serverInfoCmd}). Respect them:
|
|
624
|
+
- **Reply in context** \u2014 always respond in the channel/thread the message came from.
|
|
625
|
+
- **Stay on topic** \u2014 when proactively sharing results or updates, post in the channel most relevant to the work. Don't scatter messages across unrelated channels.
|
|
626
|
+
- If unsure where something belongs, call ${serverInfoCmd} to review channel descriptions.`;
|
|
627
|
+
const readingHistorySection = isCli ? `### Reading history
|
|
355
628
|
|
|
356
|
-
|
|
629
|
+
\`slock message read --channel "#channel-name"\` or \`slock message read --channel "dm:@peer-name"\` or \`slock message read --channel "#channel:shortid"\`
|
|
357
630
|
|
|
358
|
-
|
|
359
|
-
- **Reply to a DM**: \`send_message(target="dm:@peer-name", content="...")\`
|
|
360
|
-
- **Reply in a thread**: \`send_message(target="#channel:shortid", content="...")\` or \`send_message(target="dm:@peer:shortid", content="...")\`
|
|
361
|
-
- **Start a NEW DM**: \`send_message(target="dm:@person-name", content="...")\`
|
|
631
|
+
To jump directly to a specific hit with nearby context, use \`slock message read --channel "..." --around "messageId"\` or \`slock message read --channel "..." --around 12345\`.` : `### Reading history
|
|
362
632
|
|
|
363
|
-
|
|
633
|
+
Use ${readCmd} with the \`channel\` parameter set to \`"#channel-name"\`, \`"dm:@peer-name"\`, or a thread target like \`"#channel:shortid"\`.
|
|
364
634
|
|
|
365
|
-
|
|
635
|
+
To jump directly to a specific hit with nearby context, pass \`around\` set to a message ID or seq number.`;
|
|
636
|
+
const tasksSection = isCli ? `### Tasks
|
|
366
637
|
|
|
367
|
-
|
|
638
|
+
When someone sends a message that asks you to do something \u2014 fix a bug, write code, review a PR, deploy, investigate an issue \u2014 that is work. Claim it before you start.
|
|
368
639
|
|
|
369
|
-
|
|
370
|
-
- When you receive a message from a thread (the target has a \`:shortid\` suffix), **always reply using that same target** to keep the conversation in the thread.
|
|
371
|
-
- **Start a new thread**: Use the \`msg=\` field from the header as the thread suffix. For example, if you see \`[target=#general msg=a1b2c3d4 ...]\`, reply with \`send_message(target="#general:a1b2c3d4", content="...")\`. The thread will be auto-created if it doesn't exist yet.
|
|
372
|
-
- When you send a message, the response includes the message ID. You can use it to start a thread on your own message.
|
|
373
|
-
- You can read thread history: \`read_history(channel="#general:a1b2c3d4")\`
|
|
374
|
-
- Threads cannot be nested \u2014 you cannot start a thread inside a thread.
|
|
640
|
+
**Decision rule:** if fulfilling a message requires you to take action beyond just replying (running tools, writing code, making changes), claim the message first. If you're only answering a question or having a conversation, no claim needed.
|
|
375
641
|
|
|
376
|
-
|
|
642
|
+
**What you see in messages:**
|
|
643
|
+
- A message already marked as a task: \`@Alice: Fix the login bug [task #3 status=in_progress]\`
|
|
644
|
+
- A regular message (no task suffix): \`@Alice: Can someone look into the login bug?\`
|
|
645
|
+
- A system notification about task changes: \`\u{1F4CB} Alice converted a message to task #3 "Fix the login bug"\`
|
|
377
646
|
|
|
378
|
-
|
|
379
|
-
Visible public channels may appear even when \`joined=false\`. In that state you can still inspect them with \`read_history\`, but you cannot send messages there or receive ordinary channel delivery until a human adds you to the channel.
|
|
647
|
+
Only top-level channel / DM messages can become tasks. Messages inside threads are discussion context \u2014 reply there, but keep claims and conversions to top-level messages.
|
|
380
648
|
|
|
381
|
-
|
|
649
|
+
\`slock message read\` shows messages in their current state. If a message was later converted to a task, it will show the \`[task #N ...]\` suffix.
|
|
382
650
|
|
|
383
|
-
|
|
384
|
-
- **Reply in context** \u2014 always respond in the channel/thread the message came from.
|
|
385
|
-
- **Stay on topic** \u2014 when proactively sharing results or updates, post in the channel most relevant to the work. Don't scatter messages across unrelated channels.
|
|
386
|
-
- If unsure where something belongs, call \`list_server\` to review channel descriptions.
|
|
651
|
+
**Status flow:** \`todo\` \u2192 \`in_progress\` \u2192 \`in_review\` \u2192 \`done\`
|
|
387
652
|
|
|
388
|
-
|
|
653
|
+
**Assignee** is independent from status \u2014 a task can be claimed or unclaimed at any status except \`done\`.
|
|
389
654
|
|
|
390
|
-
|
|
655
|
+
**Workflow:**
|
|
656
|
+
1. Receive a message that requires action \u2192 claim it first (by task number if already a task, or by message ID if it's a regular message)
|
|
657
|
+
2. If the claim fails, someone else is working on it \u2014 move on to another task
|
|
658
|
+
3. Post updates in the task's thread: \`slock message send --target "#channel:msgShortId" <<'EOF'\` followed by the message body and \`EOF\`
|
|
659
|
+
4. When done, set status to \`in_review\` so a human can validate via \`slock task update\`
|
|
660
|
+
5. After approval (e.g. "looks good", "merge it"), set status to \`done\`
|
|
391
661
|
|
|
392
|
-
|
|
662
|
+
**What \`slock task create\` really means:**
|
|
663
|
+
- Tasks live in the same chat flow as messages. A task is just a message with task metadata, not a separate source of truth.
|
|
664
|
+
- \`slock task create\` is a convenience helper for a specific sequence: create a brand-new message, then publish that new message as a task-message.
|
|
665
|
+
- \`slock task create\` only creates the task \u2014 to own it, call \`slock task claim\` afterward.
|
|
666
|
+
- Typical uses for \`slock task create\` are breaking down a larger task into parallel subtasks, or batch-creating genuinely new work for others to claim.
|
|
667
|
+
- If someone already sent the work item as a message, just claim that existing message/task instead of creating a new one.
|
|
668
|
+
- If the work already exists as a message, reuse it via \`slock task claim --message-id ...\`.
|
|
393
669
|
|
|
394
|
-
|
|
670
|
+
**Creating new tasks:**
|
|
671
|
+
- The task system exists to prevent duplicate work. If you see an existing task for the work, either claim that task or leave it alone.
|
|
672
|
+
- If a message already shows a \`[task #N ...]\` suffix, claim \`#N\` if it is yours to take; otherwise move on.
|
|
673
|
+
- Before calling \`slock task create\`, first check whether the work already exists on the task board or is already being handled.
|
|
674
|
+
- Reuse existing tasks and threads instead of creating duplicates.
|
|
675
|
+
- Use \`slock task create\` only for genuinely new subtasks or follow-up work that does not already have a canonical task.` : `### Tasks
|
|
395
676
|
|
|
396
677
|
When someone sends a message that asks you to do something \u2014 fix a bug, write code, review a PR, deploy, investigate an issue \u2014 that is work. Claim it before you start.
|
|
397
678
|
|
|
@@ -404,7 +685,7 @@ When someone sends a message that asks you to do something \u2014 fix a bug, wri
|
|
|
404
685
|
|
|
405
686
|
Only top-level channel / DM messages can become tasks. Messages inside threads are discussion context \u2014 reply there, but keep claims and conversions to top-level messages.
|
|
406
687
|
|
|
407
|
-
|
|
688
|
+
${readCmd} shows messages in their current state. If a message was later converted to a task, it will show the \`[task #N ...]\` suffix.
|
|
408
689
|
|
|
409
690
|
**Status flow:** \`todo\` \u2192 \`in_progress\` \u2192 \`in_review\` \u2192 \`done\`
|
|
410
691
|
|
|
@@ -413,24 +694,77 @@ Only top-level channel / DM messages can become tasks. Messages inside threads a
|
|
|
413
694
|
**Workflow:**
|
|
414
695
|
1. Receive a message that requires action \u2192 claim it first (by task number if already a task, or by message ID if it's a regular message)
|
|
415
696
|
2. If the claim fails, someone else is working on it \u2014 move on to another task
|
|
416
|
-
3. Post updates in the task's thread
|
|
417
|
-
4. When done, set status to \`in_review\` so a human can validate
|
|
697
|
+
3. Post updates in the task's thread via ${sendCmd} with \`target="#channel:msgShortId"\`
|
|
698
|
+
4. When done, set status to \`in_review\` so a human can validate via ${taskUpdateCmd}
|
|
418
699
|
5. After approval (e.g. "looks good", "merge it"), set status to \`done\`
|
|
419
700
|
|
|
420
|
-
**What
|
|
701
|
+
**What ${taskCreateCmd} really means:**
|
|
421
702
|
- Tasks live in the same chat flow as messages. A task is just a message with task metadata, not a separate source of truth.
|
|
422
|
-
-
|
|
423
|
-
-
|
|
424
|
-
- Typical uses for
|
|
703
|
+
- ${taskCreateCmd} is a convenience helper for a specific sequence: create a brand-new message, then publish that new message as a task-message.
|
|
704
|
+
- ${taskCreateCmd} only creates the task \u2014 to own it, call ${taskClaimCmd} afterward.
|
|
705
|
+
- Typical uses for ${taskCreateCmd} are breaking down a larger task into parallel subtasks, or batch-creating genuinely new work for others to claim.
|
|
425
706
|
- If someone already sent the work item as a message, just claim that existing message/task instead of creating a new one.
|
|
426
|
-
- If the work already exists as a message, reuse it via
|
|
707
|
+
- If the work already exists as a message, reuse it via ${taskClaimCmd} with the message ID.
|
|
427
708
|
|
|
428
709
|
**Creating new tasks:**
|
|
429
710
|
- The task system exists to prevent duplicate work. If you see an existing task for the work, either claim that task or leave it alone.
|
|
430
711
|
- If a message already shows a \`[task #N ...]\` suffix, claim \`#N\` if it is yours to take; otherwise move on.
|
|
431
|
-
- Before calling
|
|
712
|
+
- Before calling ${taskCreateCmd}, first check whether the work already exists on the task board or is already being handled.
|
|
432
713
|
- Reuse existing tasks and threads instead of creating duplicates.
|
|
433
|
-
- Use
|
|
714
|
+
- Use ${taskCreateCmd} only for genuinely new subtasks or follow-up work that does not already have a canonical task.`;
|
|
715
|
+
const claimForEtiquette = isCli ? "`slock task claim`" : taskClaimCmd;
|
|
716
|
+
let prompt = `You are "${config.displayName || config.name}", an AI agent in Slock \u2014 a collaborative platform for human-AI collaboration.
|
|
717
|
+
|
|
718
|
+
## Who you are
|
|
719
|
+
|
|
720
|
+
Your workspace and MEMORY.md persist across turns, so you can recover context when resumed. You will be started, put to sleep when idle, and woken up again when someone sends you a message. Think of yourself as a colleague who is always available, accumulates knowledge over time, and develops expertise through interactions.
|
|
721
|
+
|
|
722
|
+
${communicationSection}
|
|
723
|
+
|
|
724
|
+
CRITICAL RULES:
|
|
725
|
+
${criticalRules.join("\n")}
|
|
726
|
+
|
|
727
|
+
## Startup sequence
|
|
728
|
+
|
|
729
|
+
${startupSteps.join("\n")}`;
|
|
730
|
+
if (opts.postStartupNotes.length > 0) {
|
|
731
|
+
prompt += `
|
|
732
|
+
|
|
733
|
+
${opts.postStartupNotes.join("\n")}`;
|
|
734
|
+
}
|
|
735
|
+
prompt += `
|
|
736
|
+
|
|
737
|
+
## Messaging
|
|
738
|
+
|
|
739
|
+
Messages you receive have a single RFC 5424-style structured data header followed by the sender and content:
|
|
740
|
+
|
|
741
|
+
\`\`\`
|
|
742
|
+
[target=#general msg=a1b2c3d4 time=2026-03-15T01:00:00 type=human] @richard: hello everyone
|
|
743
|
+
[target=#general msg=e5f6a7b8 time=2026-03-15T01:00:01 type=agent] @Alice: hi there
|
|
744
|
+
[target=dm:@richard msg=c9d0e1f2 time=2026-03-15T01:00:02 type=human] @richard: hey, can you help?
|
|
745
|
+
[target=#general:a1b2c3d4 msg=f3a4b5c6 time=2026-03-15T01:00:03 type=human] @richard: thread reply
|
|
746
|
+
[target=dm:@richard:x9y8z7a0 msg=d7e8f9a0 time=2026-03-15T01:00:04 type=human] @richard: DM thread reply
|
|
747
|
+
\`\`\`
|
|
748
|
+
|
|
749
|
+
Header fields:
|
|
750
|
+
- \`target=\` \u2014 where the message came from. Reuse as the \`target\` parameter when replying.
|
|
751
|
+
- \`msg=\` \u2014 message short ID (first 8 chars of UUID). Use as thread suffix to start/reply in a thread.
|
|
752
|
+
- \`time=\` \u2014 timestamp.
|
|
753
|
+
- \`type=\` \u2014 sender kind. Values are \`human\`, \`agent\`, or \`system\`.
|
|
754
|
+
|
|
755
|
+
\`type=system\` messages announce state changes in the channel (task events, channel archived/unarchived, etc.). They are informational \u2014 don't reply to them unless they clearly request action (e.g. a task was just assigned to you). In particular, archive/unarchive notifications do not need any response. If a channel is archived, further writes there will be rejected.
|
|
756
|
+
|
|
757
|
+
${sendingMessagesSection}
|
|
758
|
+
|
|
759
|
+
${threadsSection}
|
|
760
|
+
|
|
761
|
+
${discoverySection}
|
|
762
|
+
|
|
763
|
+
${channelAwarenessSection}
|
|
764
|
+
|
|
765
|
+
${readingHistorySection}
|
|
766
|
+
|
|
767
|
+
${tasksSection}
|
|
434
768
|
|
|
435
769
|
### Splitting tasks for parallel execution
|
|
436
770
|
|
|
@@ -444,8 +778,8 @@ When you receive a notification about new tasks, check the task board and claim
|
|
|
444
778
|
## @Mentions
|
|
445
779
|
|
|
446
780
|
In channel group chats, you can @mention people by their unique name (e.g. @alice or @bob).
|
|
447
|
-
- Your stable Slock @mention handle is
|
|
448
|
-
- Your display name is
|
|
781
|
+
- Your stable Slock @mention handle is \`@${config.name}\`.
|
|
782
|
+
- Your display name is \`${config.displayName || config.name}\`. Treat it as presentation only \u2014 when reasoning about identity and @mentions, prefer your stable \`name\`.
|
|
449
783
|
- Every human and agent has a unique \`name\` \u2014 this is their stable identifier for @mentions.
|
|
450
784
|
- Mention others, not yourself \u2014 assign reviews and follow-ups to teammates.
|
|
451
785
|
- @mentions only reach people inside the channel \u2014 channels are the isolation boundary.
|
|
@@ -462,7 +796,7 @@ Keep the user informed. They cannot see your internal reasoning, so:
|
|
|
462
796
|
|
|
463
797
|
- **Respect ongoing conversations.** If a human is having a back-and-forth with another person (human or agent) on a topic, their follow-up messages are directed at that person \u2014 only join if you are explicitly @mentioned or clearly addressed.
|
|
464
798
|
- **Only the person doing the work should report on it.** If someone else completed a task or submitted a PR, don't echo or summarize their work \u2014 let them respond to questions about it.
|
|
465
|
-
- **Claim before you start.** Always call
|
|
799
|
+
- **Claim before you start.** Always call ${claimForEtiquette} before doing any work on a task. If the claim fails, stop immediately and pick a different task.
|
|
466
800
|
- **Before stopping, check for concrete blockers you own.** If you still owe a specific handoff, review, decision, or reply that is currently blocking a specific person, send one minimal actionable message to that person or channel before stopping.
|
|
467
801
|
- **Skip idle narration.** Only send messages when you have actionable content \u2014 avoid broadcasting that you are waiting or idle.
|
|
468
802
|
|
|
@@ -558,20 +892,21 @@ How to handle these:
|
|
|
558
892
|
- Treat direct follow-up messages as new user input for the same live session.
|
|
559
893
|
- Adapt if the new message changes priority or direction.
|
|
560
894
|
- You do NOT need to poll just because direct follow-up delivery is available.
|
|
561
|
-
- Use
|
|
895
|
+
- Use ${checkCmd} only when you need to inspect other pending channels or recover broader context.`;
|
|
562
896
|
} else {
|
|
897
|
+
const notifyExample = isCli ? `\`[System notification: You have N new message(s) waiting. Call slock message check to read them when you're ready.]\`` : `\`[System notification: You have N new message(s) waiting. Call ${t("check_messages")} to read them when you're ready.]\``;
|
|
563
898
|
prompt += `
|
|
564
899
|
|
|
565
900
|
## Message Notifications
|
|
566
901
|
|
|
567
902
|
While you are busy (executing tools, thinking, etc.), new messages may arrive. When this happens, you will receive a system notification like:
|
|
568
903
|
|
|
569
|
-
|
|
904
|
+
${notifyExample}
|
|
570
905
|
|
|
571
906
|
How to handle these:
|
|
572
|
-
- Call
|
|
907
|
+
- Call ${checkCmd} to check for new messages. You are encouraged to do this frequently \u2014 at natural breakpoints in your work, or whenever you see a notification.
|
|
573
908
|
- If the new message is higher priority, you may pivot to it. If not, continue your current work.
|
|
574
|
-
-
|
|
909
|
+
- ${checkCmd} returns instantly with any pending messages (or "no new messages"). It is always safe to call.`;
|
|
575
910
|
}
|
|
576
911
|
}
|
|
577
912
|
if (config.description) {
|
|
@@ -582,19 +917,93 @@ ${config.description}. This may evolve.`;
|
|
|
582
917
|
}
|
|
583
918
|
return prompt;
|
|
584
919
|
}
|
|
920
|
+
function buildCliSystemPrompt(config, opts) {
|
|
921
|
+
return buildPrompt(config, "cli", opts);
|
|
922
|
+
}
|
|
923
|
+
function buildMcpSystemPrompt(config, opts) {
|
|
924
|
+
return buildPrompt(config, "mcp", opts);
|
|
925
|
+
}
|
|
926
|
+
|
|
927
|
+
// src/drivers/cliTransport.ts
|
|
928
|
+
var shellSingleQuote = (value) => `'${value.replace(/'/g, `'\\''`)}'`;
|
|
929
|
+
function buildCliTransportSystemPrompt(config, opts) {
|
|
930
|
+
return buildCliSystemPrompt(config, opts);
|
|
931
|
+
}
|
|
932
|
+
function prepareCliTransport(ctx, extraEnv = {}, platform = process.platform) {
|
|
933
|
+
if (!ctx.slockCliPath) {
|
|
934
|
+
throw new Error(`${ctx.config.runtime} driver: slockCliPath is required (daemon must inject it)`);
|
|
935
|
+
}
|
|
936
|
+
const slockDir = path.join(ctx.workingDirectory, ".slock");
|
|
937
|
+
mkdirSync(slockDir, { recursive: true });
|
|
938
|
+
const tokenFile = path.join(slockDir, "agent-token");
|
|
939
|
+
writeFileSync(tokenFile, ctx.config.authToken || ctx.daemonApiKey, { mode: 384 });
|
|
940
|
+
const posixWrapper = path.join(slockDir, "slock");
|
|
941
|
+
const posixBody = `#!/usr/bin/env bash
|
|
942
|
+
exec ${shellSingleQuote(process.execPath)} ${shellSingleQuote(ctx.slockCliPath)} "$@"
|
|
943
|
+
`;
|
|
944
|
+
writeFileSync(posixWrapper, posixBody, { mode: 493 });
|
|
945
|
+
if (platform === "win32") {
|
|
946
|
+
const cmdWrapper = path.join(slockDir, "slock.cmd");
|
|
947
|
+
const cmdBody = `@echo off\r
|
|
948
|
+
"${process.execPath}" "${ctx.slockCliPath}" %*\r
|
|
949
|
+
`;
|
|
950
|
+
writeFileSync(cmdWrapper, cmdBody);
|
|
951
|
+
}
|
|
952
|
+
const wrapperPath = platform === "win32" ? path.join(slockDir, "slock.cmd") : posixWrapper;
|
|
953
|
+
const spawnEnv = {
|
|
954
|
+
...process.env,
|
|
955
|
+
FORCE_COLOR: "0",
|
|
956
|
+
...ctx.config.envVars || {},
|
|
957
|
+
...extraEnv,
|
|
958
|
+
SLOCK_AGENT_ID: ctx.agentId,
|
|
959
|
+
SLOCK_SERVER_URL: ctx.config.serverUrl,
|
|
960
|
+
SLOCK_AGENT_TOKEN_FILE: tokenFile,
|
|
961
|
+
PATH: `${slockDir}${path.delimiter}${process.env.PATH ?? ""}`
|
|
962
|
+
};
|
|
963
|
+
delete spawnEnv.SLOCK_AGENT_TOKEN;
|
|
964
|
+
return {
|
|
965
|
+
slockDir,
|
|
966
|
+
tokenFile,
|
|
967
|
+
wrapperPath,
|
|
968
|
+
spawnEnv
|
|
969
|
+
};
|
|
970
|
+
}
|
|
585
971
|
|
|
586
972
|
// src/drivers/probe.ts
|
|
587
973
|
import { execFileSync } from "child_process";
|
|
588
974
|
import { existsSync } from "fs";
|
|
589
|
-
import
|
|
975
|
+
import path2 from "path";
|
|
590
976
|
function normalizeExecOutput(raw) {
|
|
591
977
|
return Buffer.isBuffer(raw) ? raw.toString("utf8") : String(raw ?? "");
|
|
592
978
|
}
|
|
979
|
+
function resolveCommandOnWindows(command, env, execFileSyncFn) {
|
|
980
|
+
const script = "$cmd = Get-Command -Name $args[0] -ErrorAction Stop | Select-Object -First 1; if ($cmd.Path) { $cmd.Path } elseif ($cmd.Source) { $cmd.Source } elseif ($cmd.Definition) { $cmd.Definition }";
|
|
981
|
+
try {
|
|
982
|
+
const output = normalizeExecOutput(execFileSyncFn("powershell.exe", [
|
|
983
|
+
"-NoProfile",
|
|
984
|
+
"-NonInteractive",
|
|
985
|
+
"-Command",
|
|
986
|
+
script,
|
|
987
|
+
"--%",
|
|
988
|
+
command
|
|
989
|
+
], {
|
|
990
|
+
stdio: ["ignore", "pipe", "ignore"],
|
|
991
|
+
env
|
|
992
|
+
}));
|
|
993
|
+
const resolved = output.trim().split(/\r?\n/)[0];
|
|
994
|
+
return resolved || command;
|
|
995
|
+
} catch {
|
|
996
|
+
return null;
|
|
997
|
+
}
|
|
998
|
+
}
|
|
593
999
|
function resolveCommandOnPath(command, deps = {}) {
|
|
594
1000
|
const platform = deps.platform ?? process.platform;
|
|
595
1001
|
const env = deps.env ?? process.env;
|
|
596
1002
|
const execFileSyncFn = deps.execFileSyncFn ?? execFileSync;
|
|
597
|
-
|
|
1003
|
+
if (platform === "win32") {
|
|
1004
|
+
return resolveCommandOnWindows(command, env, execFileSyncFn);
|
|
1005
|
+
}
|
|
1006
|
+
const locator = "which";
|
|
598
1007
|
try {
|
|
599
1008
|
const output = normalizeExecOutput(execFileSyncFn(locator, [command], {
|
|
600
1009
|
stdio: ["ignore", "pipe", "ignore"],
|
|
@@ -629,12 +1038,19 @@ function readCommandVersion(command, args = [], deps = {}) {
|
|
|
629
1038
|
}
|
|
630
1039
|
function resolveHomePath(relativePath, deps = {}) {
|
|
631
1040
|
const homeDir = deps.homeDir ?? deps.env?.HOME ?? process.env.HOME ?? "";
|
|
632
|
-
return
|
|
1041
|
+
return path2.join(homeDir, relativePath);
|
|
633
1042
|
}
|
|
634
1043
|
|
|
635
1044
|
// src/drivers/claude.ts
|
|
636
|
-
var CLAUDE_DESKTOP_CLI_RELATIVE_PATH =
|
|
1045
|
+
var CLAUDE_DESKTOP_CLI_RELATIVE_PATH = path3.join("Applications", "Claude Code URL Handler.app", "Contents", "MacOS", "claude");
|
|
637
1046
|
var CLAUDE_DESKTOP_CLI_SYSTEM_PATH = "/Applications/Claude Code URL Handler.app/Contents/MacOS/claude";
|
|
1047
|
+
var CLAUDE_DISALLOWED_TOOLS = [
|
|
1048
|
+
"EnterPlanMode",
|
|
1049
|
+
"ExitPlanMode",
|
|
1050
|
+
"CronCreate",
|
|
1051
|
+
"CronList",
|
|
1052
|
+
"CronDelete"
|
|
1053
|
+
].join(",");
|
|
638
1054
|
function resolveClaudeCommand(deps = {}) {
|
|
639
1055
|
const pathCommand = resolveCommandOnPath("claude", deps);
|
|
640
1056
|
if (pathCommand) return pathCommand;
|
|
@@ -657,36 +1073,11 @@ var ClaudeDriver = class {
|
|
|
657
1073
|
supportsStdinNotification = true;
|
|
658
1074
|
mcpToolPrefix = "mcp__chat__";
|
|
659
1075
|
busyDeliveryMode = "notification";
|
|
1076
|
+
supportsNativeStandingPrompt = true;
|
|
660
1077
|
probe() {
|
|
661
1078
|
return probeClaude();
|
|
662
1079
|
}
|
|
663
|
-
|
|
664
|
-
const mcpArgs = [
|
|
665
|
-
ctx.chatBridgePath,
|
|
666
|
-
"--agent-id",
|
|
667
|
-
ctx.agentId,
|
|
668
|
-
"--server-url",
|
|
669
|
-
ctx.config.serverUrl,
|
|
670
|
-
"--auth-token",
|
|
671
|
-
ctx.config.authToken || ctx.daemonApiKey
|
|
672
|
-
];
|
|
673
|
-
const isTsSource = ctx.chatBridgePath.endsWith(".ts");
|
|
674
|
-
const mcpConfig = JSON.stringify({
|
|
675
|
-
mcpServers: {
|
|
676
|
-
chat: {
|
|
677
|
-
command: isTsSource ? "npx" : "node",
|
|
678
|
-
args: isTsSource ? ["tsx", ...mcpArgs] : mcpArgs
|
|
679
|
-
}
|
|
680
|
-
}
|
|
681
|
-
});
|
|
682
|
-
let mcpConfigArg;
|
|
683
|
-
if (process.platform === "win32") {
|
|
684
|
-
const mcpConfigPath = path2.join(ctx.workingDirectory, ".slock-claude-mcp.json");
|
|
685
|
-
writeFileSync(mcpConfigPath, mcpConfig, "utf8");
|
|
686
|
-
mcpConfigArg = mcpConfigPath;
|
|
687
|
-
} else {
|
|
688
|
-
mcpConfigArg = mcpConfig;
|
|
689
|
-
}
|
|
1080
|
+
buildClaudeArgs(config, standingPrompt) {
|
|
690
1081
|
const args = [
|
|
691
1082
|
"--allow-dangerously-skip-permissions",
|
|
692
1083
|
"--dangerously-skip-permissions",
|
|
@@ -695,18 +1086,25 @@ var ClaudeDriver = class {
|
|
|
695
1086
|
"stream-json",
|
|
696
1087
|
"--input-format",
|
|
697
1088
|
"stream-json",
|
|
698
|
-
"--
|
|
699
|
-
|
|
1089
|
+
"--append-system-prompt",
|
|
1090
|
+
standingPrompt,
|
|
700
1091
|
"--model",
|
|
701
|
-
|
|
1092
|
+
config.model || "sonnet",
|
|
702
1093
|
"--disallowed-tools",
|
|
703
|
-
|
|
1094
|
+
CLAUDE_DISALLOWED_TOOLS
|
|
704
1095
|
];
|
|
705
|
-
if (
|
|
706
|
-
args.push("--resume",
|
|
1096
|
+
if (config.sessionId) {
|
|
1097
|
+
args.push("--resume", config.sessionId);
|
|
707
1098
|
}
|
|
708
|
-
|
|
1099
|
+
return args;
|
|
1100
|
+
}
|
|
1101
|
+
spawn(ctx) {
|
|
1102
|
+
const { tokenFile, spawnEnv } = prepareCliTransport(ctx);
|
|
1103
|
+
const args = this.buildClaudeArgs(ctx.config, ctx.standingPrompt);
|
|
709
1104
|
delete spawnEnv.CLAUDECODE;
|
|
1105
|
+
logger.info(
|
|
1106
|
+
`[Agent ${ctx.agentId}] transport=cli cli=${ctx.slockCliPath} token_file=${tokenFile}`
|
|
1107
|
+
);
|
|
710
1108
|
const proc = spawn(resolveClaudeCommand() ?? "claude", args, {
|
|
711
1109
|
cwd: ctx.workingDirectory,
|
|
712
1110
|
stdio: ["pipe", "pipe", "pipe"],
|
|
@@ -750,6 +1148,12 @@ var ClaudeDriver = class {
|
|
|
750
1148
|
if (event.subtype === "init" && event.session_id) {
|
|
751
1149
|
events.push({ kind: "session_init", sessionId: event.session_id });
|
|
752
1150
|
}
|
|
1151
|
+
if (event.subtype === "status" && event.status === "compacting") {
|
|
1152
|
+
events.push({ kind: "compaction_started" });
|
|
1153
|
+
}
|
|
1154
|
+
if (event.subtype === "compact_boundary") {
|
|
1155
|
+
events.push({ kind: "compaction_finished" });
|
|
1156
|
+
}
|
|
753
1157
|
break;
|
|
754
1158
|
case "assistant": {
|
|
755
1159
|
const content = event.message?.content;
|
|
@@ -807,11 +1211,9 @@ var ClaudeDriver = class {
|
|
|
807
1211
|
});
|
|
808
1212
|
}
|
|
809
1213
|
buildSystemPrompt(config, _agentId) {
|
|
810
|
-
return
|
|
1214
|
+
return buildCliTransportSystemPrompt(config, {
|
|
811
1215
|
toolPrefix: "mcp__chat__",
|
|
812
|
-
extraCriticalRules: [
|
|
813
|
-
"- Do NOT use bash/curl/sqlite to send or receive messages. The MCP tools handle everything."
|
|
814
|
-
],
|
|
1216
|
+
extraCriticalRules: [],
|
|
815
1217
|
postStartupNotes: [],
|
|
816
1218
|
includeStdinNotificationSection: true,
|
|
817
1219
|
messageNotificationStyle: "poll"
|
|
@@ -823,7 +1225,7 @@ var ClaudeDriver = class {
|
|
|
823
1225
|
import { spawn as spawn2, execSync } from "child_process";
|
|
824
1226
|
import { existsSync as existsSync2, readFileSync } from "fs";
|
|
825
1227
|
import os from "os";
|
|
826
|
-
import
|
|
1228
|
+
import path4 from "path";
|
|
827
1229
|
function getCodexNotificationErrorMessage(params) {
|
|
828
1230
|
const topLevelMessage = params?.message;
|
|
829
1231
|
if (typeof topLevelMessage === "string" && topLevelMessage.trim()) {
|
|
@@ -835,21 +1237,19 @@ function getCodexNotificationErrorMessage(params) {
|
|
|
835
1237
|
}
|
|
836
1238
|
return null;
|
|
837
1239
|
}
|
|
838
|
-
function
|
|
839
|
-
const
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
}
|
|
852
|
-
});
|
|
1240
|
+
function ensureGitRepoForCodex(workingDirectory, deps = {}) {
|
|
1241
|
+
const existsSyncFn = deps.existsSyncFn ?? existsSync2;
|
|
1242
|
+
const execSyncFn = deps.execSyncFn ?? execSync;
|
|
1243
|
+
const gitDir = path4.join(workingDirectory, ".git");
|
|
1244
|
+
if (existsSyncFn(gitDir)) return;
|
|
1245
|
+
execSyncFn("git init", { cwd: workingDirectory, stdio: "pipe" });
|
|
1246
|
+
execSyncFn(
|
|
1247
|
+
"git -c user.name=slock -c user.email=slock@local -c commit.gpgsign=false add -A && git -c user.name=slock -c user.email=slock@local -c commit.gpgsign=false commit --allow-empty -m 'init'",
|
|
1248
|
+
{
|
|
1249
|
+
cwd: workingDirectory,
|
|
1250
|
+
stdio: "pipe"
|
|
1251
|
+
}
|
|
1252
|
+
);
|
|
853
1253
|
}
|
|
854
1254
|
var CODEX_DESKTOP_BUNDLE_PATH = "/Applications/Codex.app/Contents/Resources/codex";
|
|
855
1255
|
function resolveCodexCommand(deps = {}) {
|
|
@@ -884,14 +1284,14 @@ function resolveCodexSpawn(commandArgs, deps = {}) {
|
|
|
884
1284
|
let codexEntry = null;
|
|
885
1285
|
try {
|
|
886
1286
|
const globalRoot = execSync("npm root -g", { encoding: "utf8", stdio: ["pipe", "pipe", "pipe"] }).trim();
|
|
887
|
-
const candidate =
|
|
1287
|
+
const candidate = path4.join(globalRoot, "@openai", "codex", "bin", "codex.js");
|
|
888
1288
|
if (existsSync2(candidate)) codexEntry = candidate;
|
|
889
1289
|
} catch {
|
|
890
1290
|
}
|
|
891
1291
|
if (!codexEntry) {
|
|
892
1292
|
try {
|
|
893
1293
|
const cmdPath = execSync("where codex", { encoding: "utf8", stdio: ["pipe", "pipe", "pipe"] }).trim().split(/\r?\n/)[0];
|
|
894
|
-
const candidate =
|
|
1294
|
+
const candidate = path4.join(path4.dirname(cmdPath), "node_modules", "@openai", "codex", "bin", "codex.js");
|
|
895
1295
|
if (existsSync2(candidate)) codexEntry = candidate;
|
|
896
1296
|
} catch {
|
|
897
1297
|
}
|
|
@@ -906,12 +1306,6 @@ function resolveCodexSpawn(commandArgs, deps = {}) {
|
|
|
906
1306
|
args: [codexEntry, ...commandArgs]
|
|
907
1307
|
};
|
|
908
1308
|
}
|
|
909
|
-
function buildBridgeArgs(ctx) {
|
|
910
|
-
const isTsSource = ctx.chatBridgePath.endsWith(".ts");
|
|
911
|
-
const command = isTsSource ? "npx" : "node";
|
|
912
|
-
const args = isTsSource ? ["tsx", ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey] : [ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey];
|
|
913
|
-
return { command, args };
|
|
914
|
-
}
|
|
915
1309
|
function joinReasoningText(item) {
|
|
916
1310
|
const summary = Array.isArray(item.summary) ? item.summary.filter((entry) => typeof entry === "string") : [];
|
|
917
1311
|
const content = Array.isArray(item.content) ? item.content.filter((entry) => typeof entry === "string") : [];
|
|
@@ -922,9 +1316,37 @@ var CodexDriver = class {
|
|
|
922
1316
|
supportsStdinNotification = true;
|
|
923
1317
|
mcpToolPrefix = "mcp_chat_";
|
|
924
1318
|
busyDeliveryMode = "direct";
|
|
1319
|
+
supportsNativeStandingPrompt = true;
|
|
925
1320
|
probe() {
|
|
926
1321
|
return probeCodex();
|
|
927
1322
|
}
|
|
1323
|
+
buildThreadRequest(ctx) {
|
|
1324
|
+
const threadParams = {
|
|
1325
|
+
cwd: ctx.workingDirectory,
|
|
1326
|
+
approvalPolicy: "never",
|
|
1327
|
+
sandbox: "danger-full-access",
|
|
1328
|
+
developerInstructions: ctx.standingPrompt
|
|
1329
|
+
};
|
|
1330
|
+
if (ctx.config.model) {
|
|
1331
|
+
threadParams.model = ctx.config.model;
|
|
1332
|
+
}
|
|
1333
|
+
if (ctx.config.reasoningEffort) {
|
|
1334
|
+
threadParams.config = { model_reasoning_effort: ctx.config.reasoningEffort };
|
|
1335
|
+
}
|
|
1336
|
+
if (ctx.config.sessionId) {
|
|
1337
|
+
return {
|
|
1338
|
+
method: "thread/resume",
|
|
1339
|
+
params: {
|
|
1340
|
+
threadId: ctx.config.sessionId,
|
|
1341
|
+
...threadParams
|
|
1342
|
+
}
|
|
1343
|
+
};
|
|
1344
|
+
}
|
|
1345
|
+
return {
|
|
1346
|
+
method: "thread/start",
|
|
1347
|
+
params: threadParams
|
|
1348
|
+
};
|
|
1349
|
+
}
|
|
928
1350
|
process = null;
|
|
929
1351
|
requestId = 0;
|
|
930
1352
|
threadId = null;
|
|
@@ -937,7 +1359,8 @@ var CodexDriver = class {
|
|
|
937
1359
|
streamedAgentMessageIds = /* @__PURE__ */ new Set();
|
|
938
1360
|
streamedReasoningIds = /* @__PURE__ */ new Set();
|
|
939
1361
|
spawn(ctx) {
|
|
940
|
-
|
|
1362
|
+
ensureGitRepoForCodex(ctx.workingDirectory);
|
|
1363
|
+
const { spawnEnv } = prepareCliTransport(ctx, { NO_COLOR: "1" });
|
|
941
1364
|
this.process = null;
|
|
942
1365
|
this.requestId = 0;
|
|
943
1366
|
this.threadId = ctx.config.sessionId || null;
|
|
@@ -949,26 +1372,8 @@ var CodexDriver = class {
|
|
|
949
1372
|
this.sessionAnnounced = false;
|
|
950
1373
|
this.streamedAgentMessageIds.clear();
|
|
951
1374
|
this.streamedReasoningIds.clear();
|
|
952
|
-
const
|
|
953
|
-
const args = [
|
|
954
|
-
"app-server",
|
|
955
|
-
"--listen",
|
|
956
|
-
"stdio://",
|
|
957
|
-
"-c",
|
|
958
|
-
`mcp_servers.chat.command=${JSON.stringify(bridge.command)}`,
|
|
959
|
-
"-c",
|
|
960
|
-
`mcp_servers.chat.args=${JSON.stringify(bridge.args)}`,
|
|
961
|
-
"-c",
|
|
962
|
-
"mcp_servers.chat.startup_timeout_sec=30",
|
|
963
|
-
"-c",
|
|
964
|
-
"mcp_servers.chat.tool_timeout_sec=300",
|
|
965
|
-
"-c",
|
|
966
|
-
"mcp_servers.chat.enabled=true",
|
|
967
|
-
"-c",
|
|
968
|
-
"mcp_servers.chat.required=true"
|
|
969
|
-
];
|
|
1375
|
+
const args = ["app-server", "--listen", "stdio://"];
|
|
970
1376
|
const { command, args: spawnArgs } = resolveCodexSpawn(args);
|
|
971
|
-
const spawnEnv = { ...process.env, FORCE_COLOR: "0", NO_COLOR: "1", ...ctx.config.envVars || {} };
|
|
972
1377
|
const proc = spawn2(command, spawnArgs, {
|
|
973
1378
|
cwd: ctx.workingDirectory,
|
|
974
1379
|
stdio: ["pipe", "pipe", "pipe"],
|
|
@@ -981,31 +1386,7 @@ var CodexDriver = class {
|
|
|
981
1386
|
clientInfo: { name: "slock-daemon", version: "1.0.0" },
|
|
982
1387
|
capabilities: { experimentalApi: true }
|
|
983
1388
|
});
|
|
984
|
-
|
|
985
|
-
cwd: ctx.workingDirectory,
|
|
986
|
-
approvalPolicy: "never",
|
|
987
|
-
sandbox: "danger-full-access"
|
|
988
|
-
};
|
|
989
|
-
if (ctx.config.model) {
|
|
990
|
-
threadParams.model = ctx.config.model;
|
|
991
|
-
}
|
|
992
|
-
if (ctx.config.reasoningEffort) {
|
|
993
|
-
threadParams.config = { model_reasoning_effort: ctx.config.reasoningEffort };
|
|
994
|
-
}
|
|
995
|
-
if (ctx.config.sessionId) {
|
|
996
|
-
this.pendingThreadRequest = {
|
|
997
|
-
method: "thread/resume",
|
|
998
|
-
params: {
|
|
999
|
-
threadId: ctx.config.sessionId,
|
|
1000
|
-
...threadParams
|
|
1001
|
-
}
|
|
1002
|
-
};
|
|
1003
|
-
} else {
|
|
1004
|
-
this.pendingThreadRequest = {
|
|
1005
|
-
method: "thread/start",
|
|
1006
|
-
params: threadParams
|
|
1007
|
-
};
|
|
1008
|
-
}
|
|
1389
|
+
this.pendingThreadRequest = this.buildThreadRequest(ctx);
|
|
1009
1390
|
});
|
|
1010
1391
|
return { process: proc };
|
|
1011
1392
|
}
|
|
@@ -1121,6 +1502,14 @@ var CodexDriver = class {
|
|
|
1121
1502
|
events.push({ kind: "tool_call", name: "shell", input: { command: item.command } });
|
|
1122
1503
|
}
|
|
1123
1504
|
break;
|
|
1505
|
+
case "contextCompaction":
|
|
1506
|
+
if (isStarted) {
|
|
1507
|
+
events.push({ kind: "compaction_started" });
|
|
1508
|
+
}
|
|
1509
|
+
if (isCompleted) {
|
|
1510
|
+
events.push({ kind: "compaction_finished" });
|
|
1511
|
+
}
|
|
1512
|
+
break;
|
|
1124
1513
|
case "fileChange":
|
|
1125
1514
|
if (isStarted && Array.isArray(item.changes)) {
|
|
1126
1515
|
for (const change of item.changes) {
|
|
@@ -1201,11 +1590,9 @@ var CodexDriver = class {
|
|
|
1201
1590
|
});
|
|
1202
1591
|
}
|
|
1203
1592
|
buildSystemPrompt(config, _agentId) {
|
|
1204
|
-
return
|
|
1593
|
+
return buildCliTransportSystemPrompt(config, {
|
|
1205
1594
|
toolPrefix: "",
|
|
1206
|
-
extraCriticalRules: [
|
|
1207
|
-
"- Do NOT use shell commands to send or receive messages. The MCP tools handle everything."
|
|
1208
|
-
],
|
|
1595
|
+
extraCriticalRules: [],
|
|
1209
1596
|
postStartupNotes: [
|
|
1210
1597
|
"**IMPORTANT**: Your process stays alive across turns. New messages may be delivered directly into the current thread while you are working."
|
|
1211
1598
|
],
|
|
@@ -1257,8 +1644,8 @@ var CodexDriver = class {
|
|
|
1257
1644
|
}
|
|
1258
1645
|
};
|
|
1259
1646
|
function detectCodexModels(home = os.homedir()) {
|
|
1260
|
-
const cachePath =
|
|
1261
|
-
const configPath =
|
|
1647
|
+
const cachePath = path4.join(home, ".codex", "models_cache.json");
|
|
1648
|
+
const configPath = path4.join(home, ".codex", "config.toml");
|
|
1262
1649
|
let models = [];
|
|
1263
1650
|
try {
|
|
1264
1651
|
const raw = readFileSync(cachePath, "utf8");
|
|
@@ -1288,7 +1675,7 @@ function detectCodexModels(home = os.homedir()) {
|
|
|
1288
1675
|
|
|
1289
1676
|
// src/drivers/copilot.ts
|
|
1290
1677
|
import { spawn as spawn3 } from "child_process";
|
|
1291
|
-
import
|
|
1678
|
+
import path5 from "path";
|
|
1292
1679
|
import { writeFileSync as writeFileSync2 } from "fs";
|
|
1293
1680
|
var CopilotDriver = class {
|
|
1294
1681
|
id = "copilot";
|
|
@@ -1303,7 +1690,7 @@ var CopilotDriver = class {
|
|
|
1303
1690
|
const isTsSource = ctx.chatBridgePath.endsWith(".ts");
|
|
1304
1691
|
const mcpCommand = isTsSource ? "npx" : "node";
|
|
1305
1692
|
const mcpArgs = isTsSource ? ["tsx", ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey] : [ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey];
|
|
1306
|
-
const mcpConfigPath =
|
|
1693
|
+
const mcpConfigPath = path5.join(ctx.workingDirectory, ".slock-copilot-mcp.json");
|
|
1307
1694
|
writeFileSync2(mcpConfigPath, JSON.stringify({
|
|
1308
1695
|
mcpServers: {
|
|
1309
1696
|
chat: {
|
|
@@ -1412,7 +1799,7 @@ var CopilotDriver = class {
|
|
|
1412
1799
|
return null;
|
|
1413
1800
|
}
|
|
1414
1801
|
buildSystemPrompt(config, _agentId) {
|
|
1415
|
-
return
|
|
1802
|
+
return buildMcpSystemPrompt(config, {
|
|
1416
1803
|
toolPrefix: "",
|
|
1417
1804
|
extraCriticalRules: [
|
|
1418
1805
|
"- Do NOT use shell commands to send or receive messages. The MCP tools handle everything."
|
|
@@ -1426,22 +1813,22 @@ var CopilotDriver = class {
|
|
|
1426
1813
|
|
|
1427
1814
|
// src/drivers/cursor.ts
|
|
1428
1815
|
import { spawn as spawn4 } from "child_process";
|
|
1429
|
-
import { writeFileSync as writeFileSync3, mkdirSync, existsSync as existsSync3 } from "fs";
|
|
1430
|
-
import
|
|
1816
|
+
import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync2, existsSync as existsSync3 } from "fs";
|
|
1817
|
+
import path6 from "path";
|
|
1431
1818
|
var CursorDriver = class {
|
|
1432
1819
|
id = "cursor";
|
|
1433
1820
|
supportsStdinNotification = false;
|
|
1434
1821
|
mcpToolPrefix = "mcp__chat__";
|
|
1435
1822
|
busyDeliveryMode = "none";
|
|
1436
1823
|
spawn(ctx) {
|
|
1437
|
-
const cursorDir =
|
|
1824
|
+
const cursorDir = path6.join(ctx.workingDirectory, ".cursor");
|
|
1438
1825
|
if (!existsSync3(cursorDir)) {
|
|
1439
|
-
|
|
1826
|
+
mkdirSync2(cursorDir, { recursive: true });
|
|
1440
1827
|
}
|
|
1441
1828
|
const isTsSource = ctx.chatBridgePath.endsWith(".ts");
|
|
1442
1829
|
const mcpCommand = isTsSource ? "npx" : "node";
|
|
1443
1830
|
const mcpArgs = isTsSource ? ["tsx", ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey] : [ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey];
|
|
1444
|
-
const mcpConfigPath =
|
|
1831
|
+
const mcpConfigPath = path6.join(cursorDir, "mcp.json");
|
|
1445
1832
|
writeFileSync3(mcpConfigPath, JSON.stringify({
|
|
1446
1833
|
mcpServers: {
|
|
1447
1834
|
chat: {
|
|
@@ -1466,7 +1853,7 @@ var CursorDriver = class {
|
|
|
1466
1853
|
}
|
|
1467
1854
|
args.push(ctx.prompt);
|
|
1468
1855
|
const spawnEnv = { ...process.env, FORCE_COLOR: "0", NO_COLOR: "1", ...ctx.config.envVars || {} };
|
|
1469
|
-
const proc = spawn4("agent", args, {
|
|
1856
|
+
const proc = spawn4("cursor-agent", args, {
|
|
1470
1857
|
cwd: ctx.workingDirectory,
|
|
1471
1858
|
stdio: ["pipe", "pipe", "pipe"],
|
|
1472
1859
|
env: spawnEnv,
|
|
@@ -1486,6 +1873,10 @@ var CursorDriver = class {
|
|
|
1486
1873
|
case "system":
|
|
1487
1874
|
if (event.subtype === "init" && event.session_id) {
|
|
1488
1875
|
events.push({ kind: "session_init", sessionId: event.session_id });
|
|
1876
|
+
} else if (event.subtype === "status" && event.status === "compacting") {
|
|
1877
|
+
events.push({ kind: "compaction_started" });
|
|
1878
|
+
} else if (event.subtype === "compact_boundary") {
|
|
1879
|
+
events.push({ kind: "compaction_finished" });
|
|
1489
1880
|
}
|
|
1490
1881
|
break;
|
|
1491
1882
|
case "assistant": {
|
|
@@ -1528,7 +1919,7 @@ var CursorDriver = class {
|
|
|
1528
1919
|
return null;
|
|
1529
1920
|
}
|
|
1530
1921
|
buildSystemPrompt(config, _agentId) {
|
|
1531
|
-
return
|
|
1922
|
+
return buildMcpSystemPrompt(config, {
|
|
1532
1923
|
toolPrefix: "mcp__chat__",
|
|
1533
1924
|
extraCriticalRules: [
|
|
1534
1925
|
"- Do NOT use bash/curl/sqlite to send or receive messages. The MCP tools handle everything."
|
|
@@ -1542,8 +1933,8 @@ var CursorDriver = class {
|
|
|
1542
1933
|
|
|
1543
1934
|
// src/drivers/gemini.ts
|
|
1544
1935
|
import { spawn as spawn5 } from "child_process";
|
|
1545
|
-
import { writeFileSync as writeFileSync4, mkdirSync as
|
|
1546
|
-
import
|
|
1936
|
+
import { writeFileSync as writeFileSync4, mkdirSync as mkdirSync3, existsSync as existsSync4 } from "fs";
|
|
1937
|
+
import path7 from "path";
|
|
1547
1938
|
var GeminiDriver = class {
|
|
1548
1939
|
id = "gemini";
|
|
1549
1940
|
supportsStdinNotification = false;
|
|
@@ -1554,14 +1945,14 @@ var GeminiDriver = class {
|
|
|
1554
1945
|
spawn(ctx) {
|
|
1555
1946
|
this.sessionId = ctx.config.sessionId || null;
|
|
1556
1947
|
this.sessionAnnounced = false;
|
|
1557
|
-
const geminiDir =
|
|
1948
|
+
const geminiDir = path7.join(ctx.workingDirectory, ".gemini");
|
|
1558
1949
|
if (!existsSync4(geminiDir)) {
|
|
1559
|
-
|
|
1950
|
+
mkdirSync3(geminiDir, { recursive: true });
|
|
1560
1951
|
}
|
|
1561
1952
|
const isTsSource = ctx.chatBridgePath.endsWith(".ts");
|
|
1562
1953
|
const mcpCommand = isTsSource ? "npx" : "node";
|
|
1563
1954
|
const mcpArgs = isTsSource ? ["tsx", ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey] : [ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey];
|
|
1564
|
-
const settingsPath =
|
|
1955
|
+
const settingsPath = path7.join(geminiDir, "settings.json");
|
|
1565
1956
|
writeFileSync4(settingsPath, JSON.stringify({
|
|
1566
1957
|
mcpServers: {
|
|
1567
1958
|
chat: {
|
|
@@ -1643,7 +2034,7 @@ var GeminiDriver = class {
|
|
|
1643
2034
|
return null;
|
|
1644
2035
|
}
|
|
1645
2036
|
buildSystemPrompt(config, _agentId) {
|
|
1646
|
-
return
|
|
2037
|
+
return buildMcpSystemPrompt(config, {
|
|
1647
2038
|
toolPrefix: "",
|
|
1648
2039
|
extraCriticalRules: [
|
|
1649
2040
|
"- Do NOT use shell commands to send or receive messages. The MCP tools handle everything."
|
|
@@ -1660,7 +2051,7 @@ import { randomUUID } from "crypto";
|
|
|
1660
2051
|
import { spawn as spawn6 } from "child_process";
|
|
1661
2052
|
import { existsSync as existsSync5, readFileSync as readFileSync2, writeFileSync as writeFileSync5 } from "fs";
|
|
1662
2053
|
import os2 from "os";
|
|
1663
|
-
import
|
|
2054
|
+
import path8 from "path";
|
|
1664
2055
|
var KIMI_WIRE_PROTOCOL_VERSION = "1.3";
|
|
1665
2056
|
var KIMI_SYSTEM_PROMPT_FILE = ".slock-kimi-system.md";
|
|
1666
2057
|
var KIMI_AGENT_FILE = ".slock-kimi-agent.yaml";
|
|
@@ -1689,9 +2080,9 @@ var KimiDriver = class {
|
|
|
1689
2080
|
const isTsSource = ctx.chatBridgePath.endsWith(".ts");
|
|
1690
2081
|
const command = isTsSource ? "npx" : "node";
|
|
1691
2082
|
const bridgeArgs = isTsSource ? ["tsx", ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey] : [ctx.chatBridgePath, "--agent-id", ctx.agentId, "--server-url", ctx.config.serverUrl, "--auth-token", ctx.config.authToken || ctx.daemonApiKey];
|
|
1692
|
-
const systemPromptPath =
|
|
1693
|
-
const agentFilePath =
|
|
1694
|
-
const mcpConfigPath =
|
|
2083
|
+
const systemPromptPath = path8.join(ctx.workingDirectory, KIMI_SYSTEM_PROMPT_FILE);
|
|
2084
|
+
const agentFilePath = path8.join(ctx.workingDirectory, KIMI_AGENT_FILE);
|
|
2085
|
+
const mcpConfigPath = path8.join(ctx.workingDirectory, KIMI_MCP_FILE);
|
|
1695
2086
|
if (!isResume || !existsSync5(systemPromptPath)) {
|
|
1696
2087
|
writeFileSync5(systemPromptPath, ctx.prompt, "utf8");
|
|
1697
2088
|
}
|
|
@@ -1772,6 +2163,12 @@ var KimiDriver = class {
|
|
|
1772
2163
|
case "StepBegin":
|
|
1773
2164
|
events.push({ kind: "thinking", text: "" });
|
|
1774
2165
|
break;
|
|
2166
|
+
case "CompactionBegin":
|
|
2167
|
+
events.push({ kind: "compaction_started" });
|
|
2168
|
+
break;
|
|
2169
|
+
case "CompactionEnd":
|
|
2170
|
+
events.push({ kind: "compaction_finished" });
|
|
2171
|
+
break;
|
|
1775
2172
|
case "ContentPart":
|
|
1776
2173
|
if (payload.type === "think" && payload.think) {
|
|
1777
2174
|
events.push({ kind: "thinking", text: payload.think });
|
|
@@ -1824,7 +2221,7 @@ var KimiDriver = class {
|
|
|
1824
2221
|
});
|
|
1825
2222
|
}
|
|
1826
2223
|
buildSystemPrompt(config, _agentId) {
|
|
1827
|
-
return
|
|
2224
|
+
return buildMcpSystemPrompt(config, {
|
|
1828
2225
|
toolPrefix: "",
|
|
1829
2226
|
extraCriticalRules: [
|
|
1830
2227
|
"- Do NOT use shell commands to send or receive messages. The MCP tools handle everything."
|
|
@@ -1839,7 +2236,7 @@ var KimiDriver = class {
|
|
|
1839
2236
|
}
|
|
1840
2237
|
};
|
|
1841
2238
|
function detectKimiModels(home = os2.homedir()) {
|
|
1842
|
-
const configPath =
|
|
2239
|
+
const configPath = path8.join(home, ".kimi", "config.toml");
|
|
1843
2240
|
let raw;
|
|
1844
2241
|
try {
|
|
1845
2242
|
raw = readFileSync2(configPath, "utf8");
|
|
@@ -1884,7 +2281,7 @@ function getDriver(runtimeId) {
|
|
|
1884
2281
|
|
|
1885
2282
|
// src/workspaces.ts
|
|
1886
2283
|
import { readdir, rm, stat } from "fs/promises";
|
|
1887
|
-
import
|
|
2284
|
+
import path9 from "path";
|
|
1888
2285
|
function isValidWorkspaceDirectoryName(directoryName) {
|
|
1889
2286
|
return !directoryName.includes("/") && !directoryName.includes("\\") && !directoryName.includes("..");
|
|
1890
2287
|
}
|
|
@@ -1892,7 +2289,7 @@ function resolveWorkspaceDirectoryPath(dataDir, directoryName) {
|
|
|
1892
2289
|
if (!isValidWorkspaceDirectoryName(directoryName)) {
|
|
1893
2290
|
return null;
|
|
1894
2291
|
}
|
|
1895
|
-
return
|
|
2292
|
+
return path9.join(dataDir, directoryName);
|
|
1896
2293
|
}
|
|
1897
2294
|
function emptyWorkspaceDirectorySummary(latestMtime = /* @__PURE__ */ new Date(0)) {
|
|
1898
2295
|
return {
|
|
@@ -1941,7 +2338,7 @@ async function summarizeWorkspaceDirectory(dirPath) {
|
|
|
1941
2338
|
return summary;
|
|
1942
2339
|
}
|
|
1943
2340
|
const childSummaries = await Promise.all(
|
|
1944
|
-
entries.map((entry) => summarizeWorkspaceEntry(
|
|
2341
|
+
entries.map((entry) => summarizeWorkspaceEntry(path9.join(dirPath, entry.name), entry))
|
|
1945
2342
|
);
|
|
1946
2343
|
for (const childSummary of childSummaries) {
|
|
1947
2344
|
summary = mergeWorkspaceDirectorySummaries(summary, childSummary);
|
|
@@ -1960,7 +2357,7 @@ async function scanWorkspaceDirectories(dataDir) {
|
|
|
1960
2357
|
if (!entry.isDirectory()) {
|
|
1961
2358
|
return null;
|
|
1962
2359
|
}
|
|
1963
|
-
const dirPath =
|
|
2360
|
+
const dirPath = path9.join(dataDir, entry.name);
|
|
1964
2361
|
try {
|
|
1965
2362
|
const summary = await summarizeWorkspaceDirectory(dirPath);
|
|
1966
2363
|
return {
|
|
@@ -1992,7 +2389,7 @@ async function deleteWorkspaceDirectory(dataDir, directoryName) {
|
|
|
1992
2389
|
}
|
|
1993
2390
|
|
|
1994
2391
|
// src/agentProcessManager.ts
|
|
1995
|
-
var DATA_DIR =
|
|
2392
|
+
var DATA_DIR = path10.join(os3.homedir(), ".slock", "agents");
|
|
1996
2393
|
function toLocalTime(iso) {
|
|
1997
2394
|
const d = new Date(iso);
|
|
1998
2395
|
if (isNaN(d.getTime())) return iso;
|
|
@@ -2146,6 +2543,7 @@ function getBusyDeliveryNote(driver) {
|
|
|
2146
2543
|
}
|
|
2147
2544
|
return "\n\nNote: While you are busy, you may receive [System notification: ...] messages. Finish your current step, then call check_messages to check for messages.";
|
|
2148
2545
|
}
|
|
2546
|
+
var NATIVE_STANDING_PROMPT_STARTUP_INPUT = "Your system prompt contains your standing instructions. Follow it now and begin listening for messages.";
|
|
2149
2547
|
var AgentProcessManager = class _AgentProcessManager {
|
|
2150
2548
|
agents = /* @__PURE__ */ new Map();
|
|
2151
2549
|
agentsStarting = /* @__PURE__ */ new Set();
|
|
@@ -2154,15 +2552,19 @@ var AgentProcessManager = class _AgentProcessManager {
|
|
|
2154
2552
|
/** Cached configs for agents whose process exited normally — enables auto-restart on next message */
|
|
2155
2553
|
idleAgentConfigs = /* @__PURE__ */ new Map();
|
|
2156
2554
|
chatBridgePath;
|
|
2555
|
+
slockCliPath;
|
|
2157
2556
|
sendToServer;
|
|
2158
2557
|
daemonApiKey;
|
|
2558
|
+
serverUrl;
|
|
2159
2559
|
dataDir;
|
|
2160
2560
|
driverResolver;
|
|
2161
2561
|
defaultAgentEnvVarsProvider;
|
|
2162
|
-
constructor(chatBridgePath, sendToServer, daemonApiKey, opts
|
|
2562
|
+
constructor(chatBridgePath, sendToServer, daemonApiKey, opts) {
|
|
2163
2563
|
this.chatBridgePath = chatBridgePath;
|
|
2564
|
+
this.slockCliPath = opts.slockCliPath ?? "";
|
|
2164
2565
|
this.sendToServer = sendToServer;
|
|
2165
2566
|
this.daemonApiKey = daemonApiKey;
|
|
2567
|
+
this.serverUrl = opts.serverUrl;
|
|
2166
2568
|
this.dataDir = opts.dataDir || DATA_DIR;
|
|
2167
2569
|
this.driverResolver = opts.driverResolver || getDriver;
|
|
2168
2570
|
this.defaultAgentEnvVarsProvider = opts.defaultAgentEnvVarsProvider || null;
|
|
@@ -2179,9 +2581,9 @@ var AgentProcessManager = class _AgentProcessManager {
|
|
|
2179
2581
|
this.agentsStarting.add(agentId);
|
|
2180
2582
|
try {
|
|
2181
2583
|
const driver = this.driverResolver(config.runtime || "claude");
|
|
2182
|
-
const agentDataDir =
|
|
2584
|
+
const agentDataDir = path10.join(this.dataDir, agentId);
|
|
2183
2585
|
await mkdir(agentDataDir, { recursive: true });
|
|
2184
|
-
const memoryMdPath =
|
|
2586
|
+
const memoryMdPath = path10.join(agentDataDir, "MEMORY.md");
|
|
2185
2587
|
try {
|
|
2186
2588
|
await access(memoryMdPath);
|
|
2187
2589
|
} catch {
|
|
@@ -2199,8 +2601,9 @@ ${config.description || "No role defined yet."}
|
|
|
2199
2601
|
`;
|
|
2200
2602
|
await writeFile(memoryMdPath, initialMemoryMd);
|
|
2201
2603
|
}
|
|
2202
|
-
await mkdir(
|
|
2604
|
+
await mkdir(path10.join(agentDataDir, "notes"), { recursive: true });
|
|
2203
2605
|
const isResume = !!config.sessionId;
|
|
2606
|
+
const standingPrompt = driver.buildSystemPrompt(config, agentId);
|
|
2204
2607
|
let prompt;
|
|
2205
2608
|
if (isResume && resumePrompt) {
|
|
2206
2609
|
prompt = resumePrompt;
|
|
@@ -2244,15 +2647,17 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2244
2647
|
prompt = `No new messages while you were away. Nothing to do \u2014 just stop. ${getMessageDeliveryText(driver)}`;
|
|
2245
2648
|
prompt += getBusyDeliveryNote(driver);
|
|
2246
2649
|
} else {
|
|
2247
|
-
prompt = driver.
|
|
2650
|
+
prompt = driver.supportsNativeStandingPrompt ? NATIVE_STANDING_PROMPT_STARTUP_INPUT : standingPrompt;
|
|
2248
2651
|
}
|
|
2249
2652
|
const effectiveConfig = await this.buildSpawnConfig(agentId, config);
|
|
2250
2653
|
const { process: proc } = driver.spawn({
|
|
2251
2654
|
agentId,
|
|
2252
2655
|
config: effectiveConfig,
|
|
2656
|
+
standingPrompt,
|
|
2253
2657
|
prompt,
|
|
2254
2658
|
workingDirectory: agentDataDir,
|
|
2255
2659
|
chatBridgePath: this.chatBridgePath,
|
|
2660
|
+
slockCliPath: this.slockCliPath,
|
|
2256
2661
|
daemonApiKey: this.daemonApiKey
|
|
2257
2662
|
});
|
|
2258
2663
|
const agentProcess = {
|
|
@@ -2417,27 +2822,28 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2417
2822
|
}
|
|
2418
2823
|
}
|
|
2419
2824
|
async buildSpawnConfig(agentId, config) {
|
|
2825
|
+
const baseConfig = config.serverUrl === this.serverUrl ? config : { ...config, serverUrl: this.serverUrl };
|
|
2420
2826
|
if (!this.defaultAgentEnvVarsProvider) {
|
|
2421
|
-
return
|
|
2827
|
+
return baseConfig;
|
|
2422
2828
|
}
|
|
2423
2829
|
try {
|
|
2424
2830
|
const defaultEnvVars = await this.defaultAgentEnvVarsProvider({
|
|
2425
|
-
runtime:
|
|
2426
|
-
model:
|
|
2427
|
-
envVars:
|
|
2831
|
+
runtime: baseConfig.runtime,
|
|
2832
|
+
model: baseConfig.model,
|
|
2833
|
+
envVars: baseConfig.envVars
|
|
2428
2834
|
});
|
|
2429
2835
|
if (!defaultEnvVars || Object.keys(defaultEnvVars).length === 0) {
|
|
2430
|
-
return
|
|
2836
|
+
return baseConfig;
|
|
2431
2837
|
}
|
|
2432
2838
|
const mergedEnvVars = {
|
|
2433
2839
|
...defaultEnvVars,
|
|
2434
|
-
...
|
|
2840
|
+
...baseConfig.envVars ?? {}
|
|
2435
2841
|
};
|
|
2436
|
-
if (this.sameEnvVars(mergedEnvVars,
|
|
2437
|
-
return
|
|
2842
|
+
if (this.sameEnvVars(mergedEnvVars, baseConfig.envVars)) {
|
|
2843
|
+
return baseConfig;
|
|
2438
2844
|
}
|
|
2439
2845
|
return {
|
|
2440
|
-
...
|
|
2846
|
+
...baseConfig,
|
|
2441
2847
|
envVars: mergedEnvVars
|
|
2442
2848
|
};
|
|
2443
2849
|
} catch (error) {
|
|
@@ -2445,7 +2851,7 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2445
2851
|
logger.warn(
|
|
2446
2852
|
`[Agent ${agentId}] Failed to resolve default runtime env vars \u2014 continuing without machine-level defaults (${reason})`
|
|
2447
2853
|
);
|
|
2448
|
-
return
|
|
2854
|
+
return baseConfig;
|
|
2449
2855
|
}
|
|
2450
2856
|
}
|
|
2451
2857
|
sameEnvVars(left, right) {
|
|
@@ -2539,7 +2945,7 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2539
2945
|
}
|
|
2540
2946
|
}
|
|
2541
2947
|
async resetWorkspace(agentId) {
|
|
2542
|
-
const agentDataDir =
|
|
2948
|
+
const agentDataDir = path10.join(this.dataDir, agentId);
|
|
2543
2949
|
try {
|
|
2544
2950
|
await rm2(agentDataDir, { recursive: true, force: true });
|
|
2545
2951
|
logger.info(`[Agent ${agentId}] Workspace reset complete (${agentDataDir})`);
|
|
@@ -2577,7 +2983,7 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2577
2983
|
}
|
|
2578
2984
|
// Workspace file browsing
|
|
2579
2985
|
async getFileTree(agentId, dirPath) {
|
|
2580
|
-
const agentDir =
|
|
2986
|
+
const agentDir = path10.join(this.dataDir, agentId);
|
|
2581
2987
|
try {
|
|
2582
2988
|
await stat2(agentDir);
|
|
2583
2989
|
} catch {
|
|
@@ -2585,8 +2991,8 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2585
2991
|
}
|
|
2586
2992
|
let targetDir = agentDir;
|
|
2587
2993
|
if (dirPath) {
|
|
2588
|
-
const resolved =
|
|
2589
|
-
if (!resolved.startsWith(agentDir +
|
|
2994
|
+
const resolved = path10.resolve(agentDir, dirPath);
|
|
2995
|
+
if (!resolved.startsWith(agentDir + path10.sep) && resolved !== agentDir) {
|
|
2590
2996
|
return [];
|
|
2591
2997
|
}
|
|
2592
2998
|
targetDir = resolved;
|
|
@@ -2594,9 +3000,9 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2594
3000
|
return this.listDirectoryChildren(targetDir, agentDir);
|
|
2595
3001
|
}
|
|
2596
3002
|
async readFile(agentId, filePath) {
|
|
2597
|
-
const agentDir =
|
|
2598
|
-
const resolved =
|
|
2599
|
-
if (!resolved.startsWith(agentDir +
|
|
3003
|
+
const agentDir = path10.join(this.dataDir, agentId);
|
|
3004
|
+
const resolved = path10.resolve(agentDir, filePath);
|
|
3005
|
+
if (!resolved.startsWith(agentDir + path10.sep) && resolved !== agentDir) {
|
|
2600
3006
|
throw new Error("Access denied");
|
|
2601
3007
|
}
|
|
2602
3008
|
const info = await stat2(resolved);
|
|
@@ -2620,7 +3026,7 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2620
3026
|
".sh",
|
|
2621
3027
|
".py"
|
|
2622
3028
|
]);
|
|
2623
|
-
const ext =
|
|
3029
|
+
const ext = path10.extname(resolved).toLowerCase();
|
|
2624
3030
|
if (!TEXT_EXTENSIONS.has(ext) && ext !== "") {
|
|
2625
3031
|
return { content: null, binary: true };
|
|
2626
3032
|
}
|
|
@@ -2647,13 +3053,13 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2647
3053
|
const agent = this.agents.get(agentId);
|
|
2648
3054
|
const runtime = runtimeHint || agent?.config.runtime || "claude";
|
|
2649
3055
|
const home = os3.homedir();
|
|
2650
|
-
const workspaceDir =
|
|
3056
|
+
const workspaceDir = path10.join(this.dataDir, agentId);
|
|
2651
3057
|
const paths = _AgentProcessManager.SKILL_PATHS[runtime] || _AgentProcessManager.SKILL_PATHS.claude;
|
|
2652
3058
|
const globalResults = await Promise.all(
|
|
2653
|
-
paths.global.map((p) => this.scanSkillsDir(
|
|
3059
|
+
paths.global.map((p) => this.scanSkillsDir(path10.join(home, p)))
|
|
2654
3060
|
);
|
|
2655
3061
|
const workspaceResults = await Promise.all(
|
|
2656
|
-
paths.workspace.map((p) => this.scanSkillsDir(
|
|
3062
|
+
paths.workspace.map((p) => this.scanSkillsDir(path10.join(workspaceDir, p)))
|
|
2657
3063
|
);
|
|
2658
3064
|
const dedup = (skills) => {
|
|
2659
3065
|
const seen = /* @__PURE__ */ new Set();
|
|
@@ -2682,7 +3088,7 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2682
3088
|
const skills = [];
|
|
2683
3089
|
for (const entry of entries) {
|
|
2684
3090
|
if (entry.isDirectory() || entry.isSymbolicLink()) {
|
|
2685
|
-
const skillMd =
|
|
3091
|
+
const skillMd = path10.join(dir, entry.name, "SKILL.md");
|
|
2686
3092
|
try {
|
|
2687
3093
|
const content = await readFile(skillMd, "utf-8");
|
|
2688
3094
|
const skill = this.parseSkillMd(entry.name, content);
|
|
@@ -2693,7 +3099,7 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2693
3099
|
} else if (entry.name.endsWith(".md")) {
|
|
2694
3100
|
const cmdName = entry.name.replace(/\.md$/, "");
|
|
2695
3101
|
try {
|
|
2696
|
-
const content = await readFile(
|
|
3102
|
+
const content = await readFile(path10.join(dir, entry.name), "utf-8");
|
|
2697
3103
|
const skill = this.parseSkillMd(cmdName, content);
|
|
2698
3104
|
skill.sourcePath = dir;
|
|
2699
3105
|
skills.push(skill);
|
|
@@ -2825,13 +3231,27 @@ Use read_history to catch up on the channels listed above, then stop. Read each
|
|
|
2825
3231
|
}
|
|
2826
3232
|
case "tool_call": {
|
|
2827
3233
|
this.flushPendingTrajectory(agentId);
|
|
2828
|
-
const
|
|
2829
|
-
const inputSummary = summarizeToolInput(toolName,
|
|
2830
|
-
const detail = getToolActivityLabel(toolName);
|
|
2831
|
-
this.broadcastActivity(agentId, "working", detail, [{
|
|
3234
|
+
const invocation = normalizeToolDisplayInvocation(event.name, event.input);
|
|
3235
|
+
const inputSummary = summarizeToolInput(invocation.toolName, invocation.input);
|
|
3236
|
+
const detail = getToolActivityLabel(invocation.toolName);
|
|
3237
|
+
this.broadcastActivity(agentId, "working", detail, [{
|
|
3238
|
+
kind: "tool_start",
|
|
3239
|
+
toolName: invocation.toolName,
|
|
3240
|
+
toolInput: inputSummary
|
|
3241
|
+
}]);
|
|
2832
3242
|
if (ap) ap.isIdle = false;
|
|
2833
3243
|
break;
|
|
2834
3244
|
}
|
|
3245
|
+
case "compaction_started":
|
|
3246
|
+
this.flushPendingTrajectory(agentId);
|
|
3247
|
+
this.broadcastActivity(agentId, "working", "Compacting context", [{ kind: "compaction_started" }]);
|
|
3248
|
+
if (ap) ap.isIdle = false;
|
|
3249
|
+
break;
|
|
3250
|
+
case "compaction_finished":
|
|
3251
|
+
this.flushPendingTrajectory(agentId);
|
|
3252
|
+
this.broadcastActivity(agentId, "working", "Context compaction finished", [{ kind: "compaction_finished" }]);
|
|
3253
|
+
if (ap) ap.isIdle = false;
|
|
3254
|
+
break;
|
|
2835
3255
|
case "turn_end":
|
|
2836
3256
|
this.flushPendingTrajectory(agentId);
|
|
2837
3257
|
if (ap) {
|
|
@@ -2938,8 +3358,8 @@ Respond as appropriate. Complete all your work before stopping.`;
|
|
|
2938
3358
|
const nodes = [];
|
|
2939
3359
|
for (const entry of entries) {
|
|
2940
3360
|
if (entry.name.startsWith(".") || entry.name === "node_modules") continue;
|
|
2941
|
-
const fullPath =
|
|
2942
|
-
const relativePath =
|
|
3361
|
+
const fullPath = path10.join(dir, entry.name);
|
|
3362
|
+
const relativePath = path10.relative(rootDir, fullPath);
|
|
2943
3363
|
let info;
|
|
2944
3364
|
try {
|
|
2945
3365
|
info = await stat2(fullPath);
|
|
@@ -3089,6 +3509,85 @@ var DaemonConnection = class {
|
|
|
3089
3509
|
}
|
|
3090
3510
|
};
|
|
3091
3511
|
|
|
3512
|
+
// src/reminderCache.ts
|
|
3513
|
+
var DEFAULT_MAX_DELAY_MS = 24 * 60 * 60 * 1e3;
|
|
3514
|
+
var ReminderCache = class {
|
|
3515
|
+
entries = /* @__PURE__ */ new Map();
|
|
3516
|
+
clock;
|
|
3517
|
+
onFire;
|
|
3518
|
+
maxDelayMs;
|
|
3519
|
+
constructor(opts) {
|
|
3520
|
+
this.clock = opts.clock ?? systemClock;
|
|
3521
|
+
this.onFire = opts.onFire;
|
|
3522
|
+
this.maxDelayMs = opts.maxDelayMs ?? DEFAULT_MAX_DELAY_MS;
|
|
3523
|
+
}
|
|
3524
|
+
upsert(job) {
|
|
3525
|
+
const existing = this.entries.get(job.reminderId);
|
|
3526
|
+
if (existing && existing.job.version >= job.version) {
|
|
3527
|
+
logger.info(`[ReminderCache] Stale upsert for ${job.reminderId} (incoming v${job.version} <= cached v${existing.job.version}) \u2014 ignored`);
|
|
3528
|
+
return;
|
|
3529
|
+
}
|
|
3530
|
+
if (existing?.timer) this.clock.clearTimeout(existing.timer);
|
|
3531
|
+
const timer = this.scheduleTimer(job);
|
|
3532
|
+
this.entries.set(job.reminderId, { job, timer });
|
|
3533
|
+
}
|
|
3534
|
+
cancel(reminderId, version) {
|
|
3535
|
+
const existing = this.entries.get(reminderId);
|
|
3536
|
+
if (!existing) return;
|
|
3537
|
+
if (existing.job.version > version) {
|
|
3538
|
+
logger.info(`[ReminderCache] Stale cancel for ${reminderId} (incoming v${version} < cached v${existing.job.version}) \u2014 ignored`);
|
|
3539
|
+
return;
|
|
3540
|
+
}
|
|
3541
|
+
if (existing.timer) this.clock.clearTimeout(existing.timer);
|
|
3542
|
+
this.entries.delete(reminderId);
|
|
3543
|
+
}
|
|
3544
|
+
snapshot(agentId, jobs) {
|
|
3545
|
+
for (const [reminderId, entry] of this.entries) {
|
|
3546
|
+
if (entry.job.ownerAgentId !== agentId) continue;
|
|
3547
|
+
if (entry.timer) this.clock.clearTimeout(entry.timer);
|
|
3548
|
+
this.entries.delete(reminderId);
|
|
3549
|
+
}
|
|
3550
|
+
for (const job of jobs) {
|
|
3551
|
+
if (job.ownerAgentId !== agentId) {
|
|
3552
|
+
logger.warn(
|
|
3553
|
+
`[ReminderCache] snapshot for agent ${agentId} carried job ${job.reminderId} owned by ${job.ownerAgentId} \u2014 skipping`
|
|
3554
|
+
);
|
|
3555
|
+
continue;
|
|
3556
|
+
}
|
|
3557
|
+
const timer = this.scheduleTimer(job);
|
|
3558
|
+
this.entries.set(job.reminderId, { job, timer });
|
|
3559
|
+
}
|
|
3560
|
+
}
|
|
3561
|
+
clear() {
|
|
3562
|
+
for (const entry of this.entries.values()) {
|
|
3563
|
+
if (entry.timer) this.clock.clearTimeout(entry.timer);
|
|
3564
|
+
}
|
|
3565
|
+
this.entries.clear();
|
|
3566
|
+
}
|
|
3567
|
+
size() {
|
|
3568
|
+
return this.entries.size;
|
|
3569
|
+
}
|
|
3570
|
+
getJob(reminderId) {
|
|
3571
|
+
return this.entries.get(reminderId)?.job ?? null;
|
|
3572
|
+
}
|
|
3573
|
+
scheduleTimer(job) {
|
|
3574
|
+
const fireAt = Date.parse(job.fireAt);
|
|
3575
|
+
if (Number.isNaN(fireAt)) {
|
|
3576
|
+
logger.warn(`[ReminderCache] Invalid fireAt for ${job.reminderId}: ${job.fireAt}`);
|
|
3577
|
+
return null;
|
|
3578
|
+
}
|
|
3579
|
+
const delay = Math.max(0, Math.min(this.maxDelayMs, fireAt - this.clock.now()));
|
|
3580
|
+
return this.clock.setTimeout(() => {
|
|
3581
|
+
this.entries.delete(job.reminderId);
|
|
3582
|
+
try {
|
|
3583
|
+
this.onFire(job);
|
|
3584
|
+
} catch (err) {
|
|
3585
|
+
logger.error(`[ReminderCache] onFire threw for ${job.reminderId}`, err);
|
|
3586
|
+
}
|
|
3587
|
+
}, delay);
|
|
3588
|
+
}
|
|
3589
|
+
};
|
|
3590
|
+
|
|
3092
3591
|
// src/core.ts
|
|
3093
3592
|
var DAEMON_CLI_USAGE = "Usage: slock-daemon --server-url <url> --api-key <key>";
|
|
3094
3593
|
function parseDaemonCliArgs(args) {
|
|
@@ -3110,19 +3609,30 @@ function readDaemonVersion(moduleUrl = import.meta.url) {
|
|
|
3110
3609
|
}
|
|
3111
3610
|
}
|
|
3112
3611
|
function resolveChatBridgePath(moduleUrl = import.meta.url) {
|
|
3113
|
-
const dirname =
|
|
3114
|
-
const jsPath =
|
|
3612
|
+
const dirname = path11.dirname(fileURLToPath(moduleUrl));
|
|
3613
|
+
const jsPath = path11.resolve(dirname, "chat-bridge.js");
|
|
3115
3614
|
try {
|
|
3116
3615
|
accessSync(jsPath);
|
|
3117
3616
|
return jsPath;
|
|
3118
3617
|
} catch {
|
|
3119
|
-
return
|
|
3618
|
+
return path11.resolve(dirname, "chat-bridge.ts");
|
|
3619
|
+
}
|
|
3620
|
+
}
|
|
3621
|
+
function resolveSlockCliPath(moduleUrl = import.meta.url) {
|
|
3622
|
+
const thisDir = path11.dirname(fileURLToPath(moduleUrl));
|
|
3623
|
+
const bundledDistPath = path11.resolve(thisDir, "cli", "index.js");
|
|
3624
|
+
try {
|
|
3625
|
+
accessSync(bundledDistPath);
|
|
3626
|
+
return bundledDistPath;
|
|
3627
|
+
} catch {
|
|
3628
|
+
const workspaceDistPath = path11.resolve(thisDir, "..", "..", "cli", "dist", "index.js");
|
|
3629
|
+
accessSync(workspaceDistPath);
|
|
3630
|
+
return workspaceDistPath;
|
|
3120
3631
|
}
|
|
3121
3632
|
}
|
|
3122
3633
|
function detectRuntimes() {
|
|
3123
3634
|
const ids = [];
|
|
3124
3635
|
const versions = {};
|
|
3125
|
-
const cmd = process.platform === "win32" ? "where" : "which";
|
|
3126
3636
|
for (const runtime of RUNTIMES) {
|
|
3127
3637
|
try {
|
|
3128
3638
|
const probe = getDriver(runtime.id).probe?.();
|
|
@@ -3133,15 +3643,16 @@ function detectRuntimes() {
|
|
|
3133
3643
|
}
|
|
3134
3644
|
} catch {
|
|
3135
3645
|
}
|
|
3136
|
-
|
|
3137
|
-
|
|
3646
|
+
const detectionBinaries = [runtime.binary];
|
|
3647
|
+
for (const binary of detectionBinaries) {
|
|
3648
|
+
const resolved = resolveCommandOnPath(binary);
|
|
3649
|
+
if (!resolved) continue;
|
|
3138
3650
|
ids.push(runtime.id);
|
|
3139
|
-
|
|
3140
|
-
|
|
3651
|
+
const version = readCommandVersion(binary);
|
|
3652
|
+
if (version) {
|
|
3141
3653
|
versions[runtime.id] = version;
|
|
3142
|
-
} catch {
|
|
3143
3654
|
}
|
|
3144
|
-
|
|
3655
|
+
break;
|
|
3145
3656
|
}
|
|
3146
3657
|
}
|
|
3147
3658
|
return { ids, versions };
|
|
@@ -3169,6 +3680,12 @@ function summarizeIncomingMessage(msg) {
|
|
|
3169
3680
|
return `(directory=${msg.directoryName})`;
|
|
3170
3681
|
case "machine:runtime_models:detect":
|
|
3171
3682
|
return `(runtime=${msg.runtime}, req=${msg.requestId})`;
|
|
3683
|
+
case "reminder.upsert":
|
|
3684
|
+
return `(agent=${msg.agentId}, id=${msg.reminder.reminderId}, v${msg.reminder.version}, fireAt=${msg.reminder.fireAt})`;
|
|
3685
|
+
case "reminder.cancel":
|
|
3686
|
+
return `(agent=${msg.agentId}, id=${msg.reminderId}, v${msg.version})`;
|
|
3687
|
+
case "reminder.snapshot":
|
|
3688
|
+
return `(agent=${msg.agentId}, count=${msg.reminders.length})`;
|
|
3172
3689
|
default:
|
|
3173
3690
|
return "";
|
|
3174
3691
|
}
|
|
@@ -3177,18 +3694,27 @@ var DaemonCore = class {
|
|
|
3177
3694
|
options;
|
|
3178
3695
|
daemonVersion;
|
|
3179
3696
|
chatBridgePath;
|
|
3697
|
+
slockCliPath;
|
|
3180
3698
|
runtimeDetector;
|
|
3181
3699
|
agentManager;
|
|
3182
3700
|
connection;
|
|
3701
|
+
reminderCache;
|
|
3183
3702
|
constructor(options) {
|
|
3184
3703
|
this.options = options;
|
|
3185
3704
|
this.daemonVersion = options.daemonVersion ?? readDaemonVersion();
|
|
3186
3705
|
this.chatBridgePath = options.chatBridgePath ?? resolveChatBridgePath();
|
|
3706
|
+
this.slockCliPath = options.slockCliPath ?? resolveSlockCliPath();
|
|
3187
3707
|
this.runtimeDetector = options.runtimeDetector ?? detectRuntimes;
|
|
3708
|
+
this.reminderCache = new ReminderCache({
|
|
3709
|
+
clock: options.reminderClock,
|
|
3710
|
+
onFire: (job) => this.onReminderFire(job)
|
|
3711
|
+
});
|
|
3188
3712
|
let connection;
|
|
3189
3713
|
const agentManagerOptions = {
|
|
3190
3714
|
dataDir: options.dataDir,
|
|
3191
|
-
|
|
3715
|
+
serverUrl: options.serverUrl,
|
|
3716
|
+
defaultAgentEnvVarsProvider: options.defaultAgentEnvVarsProvider,
|
|
3717
|
+
slockCliPath: this.slockCliPath
|
|
3192
3718
|
};
|
|
3193
3719
|
this.agentManager = options.agentManagerFactory ? options.agentManagerFactory(this.chatBridgePath, (msg) => connection.send(msg), options.apiKey, agentManagerOptions) : new AgentProcessManager(this.chatBridgePath, (msg) => connection.send(msg), options.apiKey, agentManagerOptions);
|
|
3194
3720
|
const connectionFactory = options.connectionFactory ?? ((connOptions) => new DaemonConnection(connOptions));
|
|
@@ -3208,6 +3734,7 @@ var DaemonCore = class {
|
|
|
3208
3734
|
}
|
|
3209
3735
|
async stop() {
|
|
3210
3736
|
logger.info("[Slock Daemon] Shutting down...");
|
|
3737
|
+
this.reminderCache.clear();
|
|
3211
3738
|
await this.agentManager.stopAll();
|
|
3212
3739
|
this.connection.disconnect();
|
|
3213
3740
|
}
|
|
@@ -3302,11 +3829,31 @@ var DaemonCore = class {
|
|
|
3302
3829
|
});
|
|
3303
3830
|
break;
|
|
3304
3831
|
}
|
|
3832
|
+
case "reminder.upsert":
|
|
3833
|
+
this.reminderCache.upsert(msg.reminder);
|
|
3834
|
+
break;
|
|
3835
|
+
case "reminder.cancel":
|
|
3836
|
+
this.reminderCache.cancel(msg.reminderId, msg.version);
|
|
3837
|
+
break;
|
|
3838
|
+
case "reminder.snapshot":
|
|
3839
|
+
logger.info(`[Daemon] Reminder snapshot for agent ${msg.agentId}: ${msg.reminders.length} entries`);
|
|
3840
|
+
this.reminderCache.snapshot(msg.agentId, msg.reminders);
|
|
3841
|
+
break;
|
|
3305
3842
|
case "ping":
|
|
3306
3843
|
this.connection.send({ type: "pong" });
|
|
3307
3844
|
break;
|
|
3308
3845
|
}
|
|
3309
3846
|
}
|
|
3847
|
+
onReminderFire(job) {
|
|
3848
|
+
logger.info(`[Daemon] Reminder ${job.reminderId} fired locally (agent=${job.ownerAgentId})`);
|
|
3849
|
+
this.connection.send({
|
|
3850
|
+
type: "reminder.fire_attempt",
|
|
3851
|
+
agentId: job.ownerAgentId,
|
|
3852
|
+
reminderId: job.reminderId,
|
|
3853
|
+
version: job.version,
|
|
3854
|
+
firedAtClient: (/* @__PURE__ */ new Date()).toISOString()
|
|
3855
|
+
});
|
|
3856
|
+
}
|
|
3310
3857
|
handleConnect() {
|
|
3311
3858
|
const { ids: runtimes, versions: runtimeVersions } = this.runtimeDetector();
|
|
3312
3859
|
const runtimeInfo = runtimes.map((id) => runtimeVersions[id] ? `${id} (${runtimeVersions[id]})` : id);
|
|
@@ -3330,6 +3877,13 @@ var DaemonCore = class {
|
|
|
3330
3877
|
for (const { agentId, sessionId, launchId } of this.agentManager.getIdleAgentSessionIds()) {
|
|
3331
3878
|
this.connection.send({ type: "agent:session", agentId, sessionId, launchId: launchId || void 0 });
|
|
3332
3879
|
}
|
|
3880
|
+
const agentsForSnapshot = new Set(this.agentManager.getRunningAgentIds());
|
|
3881
|
+
for (const { agentId } of this.agentManager.getIdleAgentSessionIds()) {
|
|
3882
|
+
agentsForSnapshot.add(agentId);
|
|
3883
|
+
}
|
|
3884
|
+
for (const agentId of agentsForSnapshot) {
|
|
3885
|
+
this.connection.send({ type: "reminder.snapshot.request", agentId });
|
|
3886
|
+
}
|
|
3333
3887
|
this.options.lifecycleHooks?.onConnect?.();
|
|
3334
3888
|
}
|
|
3335
3889
|
handleDisconnect() {
|
|
@@ -3346,6 +3900,7 @@ export {
|
|
|
3346
3900
|
parseDaemonCliArgs,
|
|
3347
3901
|
readDaemonVersion,
|
|
3348
3902
|
resolveChatBridgePath,
|
|
3903
|
+
resolveSlockCliPath,
|
|
3349
3904
|
detectRuntimes,
|
|
3350
3905
|
DaemonCore
|
|
3351
3906
|
};
|