zeitlich 0.2.13 → 0.2.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +61 -50
- package/dist/adapters/sandbox/daytona/index.cjs +205 -0
- package/dist/adapters/sandbox/daytona/index.cjs.map +1 -0
- package/dist/adapters/sandbox/daytona/index.d.cts +86 -0
- package/dist/adapters/sandbox/daytona/index.d.ts +86 -0
- package/dist/adapters/sandbox/daytona/index.js +202 -0
- package/dist/adapters/sandbox/daytona/index.js.map +1 -0
- package/dist/adapters/sandbox/inmemory/index.cjs +174 -0
- package/dist/adapters/sandbox/inmemory/index.cjs.map +1 -0
- package/dist/adapters/sandbox/inmemory/index.d.cts +28 -0
- package/dist/adapters/sandbox/inmemory/index.d.ts +28 -0
- package/dist/adapters/sandbox/inmemory/index.js +172 -0
- package/dist/adapters/sandbox/inmemory/index.js.map +1 -0
- package/dist/adapters/sandbox/virtual/index.cjs +405 -0
- package/dist/adapters/sandbox/virtual/index.cjs.map +1 -0
- package/dist/adapters/sandbox/virtual/index.d.cts +85 -0
- package/dist/adapters/sandbox/virtual/index.d.ts +85 -0
- package/dist/adapters/sandbox/virtual/index.js +400 -0
- package/dist/adapters/sandbox/virtual/index.js.map +1 -0
- package/dist/adapters/thread/google-genai/index.cjs +306 -0
- package/dist/adapters/thread/google-genai/index.cjs.map +1 -0
- package/dist/adapters/thread/google-genai/index.d.cts +145 -0
- package/dist/adapters/thread/google-genai/index.d.ts +145 -0
- package/dist/adapters/thread/google-genai/index.js +300 -0
- package/dist/adapters/thread/google-genai/index.js.map +1 -0
- package/dist/adapters/{langchain → thread/langchain}/index.cjs +29 -9
- package/dist/adapters/thread/langchain/index.cjs.map +1 -0
- package/dist/adapters/{langchain → thread/langchain}/index.d.cts +17 -21
- package/dist/adapters/{langchain → thread/langchain}/index.d.ts +17 -21
- package/dist/adapters/{langchain → thread/langchain}/index.js +29 -9
- package/dist/adapters/thread/langchain/index.js.map +1 -0
- package/dist/index.cjs +866 -567
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +235 -74
- package/dist/index.d.ts +235 -74
- package/dist/index.js +854 -562
- package/dist/index.js.map +1 -1
- package/dist/{thread-manager-qc0g5Rvd.d.cts → types-35POpVfa.d.cts} +7 -6
- package/dist/{thread-manager-qc0g5Rvd.d.ts → types-35POpVfa.d.ts} +7 -6
- package/dist/types-BMXzv7TN.d.cts +476 -0
- package/dist/types-BMXzv7TN.d.ts +476 -0
- package/dist/types-BVP87m_W.d.cts +121 -0
- package/dist/types-BWvIYK28.d.ts +391 -0
- package/dist/types-CDubRtad.d.cts +115 -0
- package/dist/types-CDubRtad.d.ts +115 -0
- package/dist/types-CwwgQ_9H.d.ts +121 -0
- package/dist/types-Dje1TdH6.d.cts +391 -0
- package/dist/workflow.cjs +460 -321
- package/dist/workflow.cjs.map +1 -1
- package/dist/workflow.d.cts +271 -222
- package/dist/workflow.d.ts +271 -222
- package/dist/workflow.js +456 -319
- package/dist/workflow.js.map +1 -1
- package/package.json +65 -8
- package/src/adapters/sandbox/daytona/filesystem.ts +136 -0
- package/src/adapters/sandbox/daytona/index.ts +149 -0
- package/src/adapters/sandbox/daytona/types.ts +34 -0
- package/src/adapters/sandbox/inmemory/index.ts +213 -0
- package/src/adapters/sandbox/virtual/filesystem.ts +345 -0
- package/src/adapters/sandbox/virtual/index.ts +88 -0
- package/src/adapters/sandbox/virtual/mutations.ts +38 -0
- package/src/adapters/sandbox/virtual/provider.ts +101 -0
- package/src/adapters/sandbox/virtual/tree.ts +82 -0
- package/src/adapters/sandbox/virtual/types.ts +127 -0
- package/src/adapters/sandbox/virtual/virtual-sandbox.test.ts +523 -0
- package/src/adapters/sandbox/virtual/with-virtual-sandbox.ts +91 -0
- package/src/adapters/thread/google-genai/activities.ts +132 -0
- package/src/adapters/thread/google-genai/index.ts +41 -0
- package/src/adapters/thread/google-genai/model-invoker.ts +154 -0
- package/src/adapters/thread/google-genai/thread-manager.ts +169 -0
- package/src/adapters/{langchain → thread/langchain}/activities.ts +22 -15
- package/src/adapters/{langchain → thread/langchain}/index.ts +1 -1
- package/src/adapters/{langchain → thread/langchain}/model-invoker.ts +15 -18
- package/src/adapters/{langchain → thread/langchain}/thread-manager.ts +1 -1
- package/src/index.ts +32 -24
- package/src/lib/activity.ts +87 -0
- package/src/lib/hooks/index.ts +11 -0
- package/src/lib/hooks/types.ts +98 -0
- package/src/lib/model/helpers.ts +6 -0
- package/src/lib/model/index.ts +13 -0
- package/src/lib/{model-invoker.ts → model/types.ts} +18 -1
- package/src/lib/sandbox/index.ts +19 -0
- package/src/lib/sandbox/manager.ts +76 -0
- package/src/lib/sandbox/sandbox.test.ts +158 -0
- package/src/lib/{fs.ts → sandbox/tree.ts} +6 -6
- package/src/lib/sandbox/types.ts +164 -0
- package/src/lib/session/index.ts +11 -0
- package/src/lib/{session.ts → session/session.ts} +83 -50
- package/src/lib/session/types.ts +95 -0
- package/src/lib/skills/fs-provider.ts +16 -15
- package/src/lib/skills/handler.ts +31 -0
- package/src/lib/skills/index.ts +5 -1
- package/src/lib/skills/register.ts +20 -0
- package/src/lib/skills/tool.ts +47 -0
- package/src/lib/state/index.ts +9 -0
- package/src/lib/{state-manager.ts → state/manager.ts} +10 -147
- package/src/lib/state/types.ts +134 -0
- package/src/lib/subagent/define.ts +71 -0
- package/src/lib/subagent/handler.ts +99 -0
- package/src/lib/subagent/index.ts +13 -0
- package/src/lib/subagent/register.ts +68 -0
- package/src/lib/subagent/tool.ts +80 -0
- package/src/lib/subagent/types.ts +92 -0
- package/src/lib/thread/index.ts +7 -0
- package/src/lib/{thread-manager.ts → thread/manager.ts} +20 -33
- package/src/lib/thread/types.ts +39 -0
- package/src/lib/tool-router/auto-append.ts +55 -0
- package/src/lib/tool-router/index.ts +41 -0
- package/src/lib/tool-router/router.ts +462 -0
- package/src/lib/tool-router/types.ts +478 -0
- package/src/lib/tool-router/with-sandbox.ts +70 -0
- package/src/lib/types.ts +5 -382
- package/src/tools/bash/bash.test.ts +53 -55
- package/src/tools/bash/handler.ts +23 -51
- package/src/tools/edit/handler.ts +67 -81
- package/src/tools/glob/handler.ts +60 -17
- package/src/tools/read-file/handler.ts +67 -0
- package/src/tools/read-skill/handler.ts +1 -31
- package/src/tools/read-skill/tool.ts +5 -47
- package/src/tools/subagent/handler.ts +1 -100
- package/src/tools/subagent/tool.ts +5 -93
- package/src/tools/task-create/handler.ts +1 -1
- package/src/tools/task-get/handler.ts +1 -1
- package/src/tools/task-list/handler.ts +1 -1
- package/src/tools/task-update/handler.ts +1 -1
- package/src/tools/write-file/handler.ts +47 -0
- package/src/workflow.ts +88 -47
- package/tsup.config.ts +8 -1
- package/dist/adapters/langchain/index.cjs.map +0 -1
- package/dist/adapters/langchain/index.js.map +0 -1
- package/dist/model-invoker-y_zlyMqu.d.cts +0 -892
- package/dist/model-invoker-y_zlyMqu.d.ts +0 -892
- package/src/lib/tool-router.ts +0 -977
- package/src/lib/workflow-helpers.ts +0 -50
- /package/src/lib/{thread-id.ts → thread/id.ts} +0 -0
package/dist/index.cjs
CHANGED
|
@@ -2,157 +2,15 @@
|
|
|
2
2
|
|
|
3
3
|
var workflow = require('@temporalio/workflow');
|
|
4
4
|
var z14 = require('zod');
|
|
5
|
-
var
|
|
6
|
-
var justBash = require('just-bash');
|
|
7
|
-
var promises = require('fs/promises');
|
|
5
|
+
var common = require('@temporalio/common');
|
|
8
6
|
var path = require('path');
|
|
7
|
+
var activity = require('@temporalio/activity');
|
|
9
8
|
|
|
10
9
|
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
|
|
11
10
|
|
|
12
11
|
var z14__default = /*#__PURE__*/_interopDefault(z14);
|
|
13
12
|
|
|
14
|
-
// src/lib/session.ts
|
|
15
|
-
var SUBAGENT_TOOL_NAME = "Subagent";
|
|
16
|
-
function buildSubagentDescription(subagents) {
|
|
17
|
-
const subagentList = subagents.map((s) => {
|
|
18
|
-
const continuation = s.allowThreadContinuation ? "\n*(Supports thread continuation \u2014 pass a threadId to resume a previous conversation)*" : "";
|
|
19
|
-
return `## ${s.agentName}
|
|
20
|
-
${s.description}${continuation}`;
|
|
21
|
-
}).join("\n\n");
|
|
22
|
-
return `The ${SUBAGENT_TOOL_NAME} tool launches specialized agents (subagents) that autonomously handle complex work. Each agent type has specific capabilities and tools available to it.
|
|
23
|
-
|
|
24
|
-
# Available subagents:
|
|
25
|
-
${subagentList}
|
|
26
|
-
`;
|
|
27
|
-
}
|
|
28
|
-
function createSubagentTool(subagents) {
|
|
29
|
-
if (subagents.length === 0) {
|
|
30
|
-
throw new Error("createTaskTool requires at least one subagent");
|
|
31
|
-
}
|
|
32
|
-
const names = subagents.map((s) => s.agentName);
|
|
33
|
-
const hasThreadContinuation = subagents.some(
|
|
34
|
-
(s) => s.allowThreadContinuation
|
|
35
|
-
);
|
|
36
|
-
const baseFields = {
|
|
37
|
-
subagent: z14__default.default.enum(names).describe("The type of subagent to launch"),
|
|
38
|
-
description: z14__default.default.string().describe("A short (3-5 word) description of the task"),
|
|
39
|
-
prompt: z14__default.default.string().describe("The task for the agent to perform")
|
|
40
|
-
};
|
|
41
|
-
const schema = hasThreadContinuation ? z14__default.default.object({
|
|
42
|
-
...baseFields,
|
|
43
|
-
threadId: z14__default.default.string().nullable().describe(
|
|
44
|
-
"Thread ID to continue an existing conversation, or null to start a new one"
|
|
45
|
-
)
|
|
46
|
-
}) : z14__default.default.object(baseFields);
|
|
47
|
-
return {
|
|
48
|
-
name: SUBAGENT_TOOL_NAME,
|
|
49
|
-
description: buildSubagentDescription(subagents),
|
|
50
|
-
schema
|
|
51
|
-
};
|
|
52
|
-
}
|
|
53
|
-
var BASE62 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
|
54
|
-
function getShortId(length = 12) {
|
|
55
|
-
const hex = workflow.uuid4().replace(/-/g, "");
|
|
56
|
-
let result = "";
|
|
57
|
-
for (let i = 0; i < length; i++) {
|
|
58
|
-
const byte = parseInt(hex.slice(i * 2, i * 2 + 2), 16);
|
|
59
|
-
result += BASE62[byte % BASE62.length];
|
|
60
|
-
}
|
|
61
|
-
return result;
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
// src/tools/subagent/handler.ts
|
|
65
|
-
function createSubagentHandler(subagents) {
|
|
66
|
-
const { taskQueue: parentTaskQueue } = workflow.workflowInfo();
|
|
67
|
-
return async (args) => {
|
|
68
|
-
const config = subagents.find((s) => s.agentName === args.subagent);
|
|
69
|
-
if (!config) {
|
|
70
|
-
throw new Error(
|
|
71
|
-
`Unknown subagent: ${args.subagent}. Available: ${subagents.map((s) => s.agentName).join(", ")}`
|
|
72
|
-
);
|
|
73
|
-
}
|
|
74
|
-
const childWorkflowId = `${args.subagent}-${getShortId()}`;
|
|
75
|
-
const input = {
|
|
76
|
-
prompt: args.prompt,
|
|
77
|
-
...config.context && { context: config.context },
|
|
78
|
-
...args.threadId && config.allowThreadContinuation && { threadId: args.threadId }
|
|
79
|
-
};
|
|
80
|
-
const childOpts = {
|
|
81
|
-
workflowId: childWorkflowId,
|
|
82
|
-
args: [input],
|
|
83
|
-
taskQueue: config.taskQueue ?? parentTaskQueue
|
|
84
|
-
};
|
|
85
|
-
const { toolResponse, data, usage, threadId: childThreadId } = typeof config.workflow === "string" ? await workflow.executeChild(config.workflow, childOpts) : await workflow.executeChild(config.workflow, childOpts);
|
|
86
|
-
if (!toolResponse) {
|
|
87
|
-
return {
|
|
88
|
-
toolResponse: "Subagent workflow returned no response",
|
|
89
|
-
data: null,
|
|
90
|
-
...usage && { usage }
|
|
91
|
-
};
|
|
92
|
-
}
|
|
93
|
-
const validated = config.resultSchema ? config.resultSchema.safeParse(data) : null;
|
|
94
|
-
if (validated && !validated.success) {
|
|
95
|
-
return {
|
|
96
|
-
toolResponse: `Subagent workflow returned invalid data: ${validated.error.message}`,
|
|
97
|
-
data: null,
|
|
98
|
-
...usage && { usage }
|
|
99
|
-
};
|
|
100
|
-
}
|
|
101
|
-
let finalToolResponse = toolResponse;
|
|
102
|
-
if (config.allowThreadContinuation && childThreadId) {
|
|
103
|
-
finalToolResponse = typeof toolResponse === "string" ? `${toolResponse}
|
|
104
|
-
|
|
105
|
-
[Thread ID: ${childThreadId}]` : toolResponse;
|
|
106
|
-
}
|
|
107
|
-
return {
|
|
108
|
-
toolResponse: finalToolResponse,
|
|
109
|
-
data: validated ? validated.data : data,
|
|
110
|
-
...usage && { usage }
|
|
111
|
-
};
|
|
112
|
-
};
|
|
113
|
-
}
|
|
114
|
-
var READ_SKILL_TOOL_NAME = "ReadSkill";
|
|
115
|
-
function buildReadSkillDescription(skills) {
|
|
116
|
-
const skillList = skills.map((s) => `- **${s.name}**: ${s.description}`).join("\n");
|
|
117
|
-
return `Load the full instructions for a skill. Read the skill before following its instructions.
|
|
118
|
-
|
|
119
|
-
# Available skills:
|
|
120
|
-
${skillList}
|
|
121
|
-
`;
|
|
122
|
-
}
|
|
123
|
-
function createReadSkillTool(skills) {
|
|
124
|
-
if (skills.length === 0) {
|
|
125
|
-
throw new Error("createReadSkillTool requires at least one skill");
|
|
126
|
-
}
|
|
127
|
-
const names = skills.map((s) => s.name);
|
|
128
|
-
return {
|
|
129
|
-
name: READ_SKILL_TOOL_NAME,
|
|
130
|
-
description: buildReadSkillDescription(skills),
|
|
131
|
-
schema: z14__default.default.object({
|
|
132
|
-
skill_name: z14__default.default.enum(names).describe("The name of the skill to load")
|
|
133
|
-
})
|
|
134
|
-
};
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
// src/tools/read-skill/handler.ts
|
|
138
|
-
function createReadSkillHandler(skills) {
|
|
139
|
-
const skillMap = new Map(skills.map((s) => [s.name, s]));
|
|
140
|
-
return (args) => {
|
|
141
|
-
const skill = skillMap.get(args.skill_name);
|
|
142
|
-
if (!skill) {
|
|
143
|
-
return {
|
|
144
|
-
toolResponse: JSON.stringify({
|
|
145
|
-
error: `Skill "${args.skill_name}" not found`
|
|
146
|
-
}),
|
|
147
|
-
data: null
|
|
148
|
-
};
|
|
149
|
-
}
|
|
150
|
-
return {
|
|
151
|
-
toolResponse: skill.instructions,
|
|
152
|
-
data: null
|
|
153
|
-
};
|
|
154
|
-
};
|
|
155
|
-
}
|
|
13
|
+
// src/lib/session/session.ts
|
|
156
14
|
function createToolRouter(options) {
|
|
157
15
|
const { appendToolResult } = options;
|
|
158
16
|
const toolMap = /* @__PURE__ */ new Map();
|
|
@@ -160,45 +18,12 @@ function createToolRouter(options) {
|
|
|
160
18
|
toolMap.set(tool.name, tool);
|
|
161
19
|
}
|
|
162
20
|
const isEnabled = (tool) => tool.enabled ?? true;
|
|
163
|
-
if (options.
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
for (const s of options.subagents) {
|
|
167
|
-
if (s.hooks) subagentHooksMap.set(s.agentName, s.hooks);
|
|
168
|
-
}
|
|
169
|
-
const resolveSubagentName = (args) => args.subagent;
|
|
170
|
-
toolMap.set(SUBAGENT_TOOL_NAME, {
|
|
171
|
-
...createSubagentTool(options.subagents),
|
|
172
|
-
handler: createSubagentHandler(options.subagents),
|
|
173
|
-
...subagentHooksMap.size > 0 && {
|
|
174
|
-
hooks: {
|
|
175
|
-
onPreToolUse: async (ctx) => {
|
|
176
|
-
const hooks = subagentHooksMap.get(resolveSubagentName(ctx.args));
|
|
177
|
-
return hooks?.onPreExecution?.(ctx) ?? {};
|
|
178
|
-
},
|
|
179
|
-
onPostToolUse: async (ctx) => {
|
|
180
|
-
const hooks = subagentHooksMap.get(resolveSubagentName(ctx.args));
|
|
181
|
-
await hooks?.onPostExecution?.(ctx);
|
|
182
|
-
},
|
|
183
|
-
onPostToolUseFailure: async (ctx) => {
|
|
184
|
-
const hooks = subagentHooksMap.get(resolveSubagentName(ctx.args));
|
|
185
|
-
return hooks?.onExecutionFailure?.(ctx) ?? {};
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
});
|
|
21
|
+
if (options.plugins) {
|
|
22
|
+
for (const plugin of options.plugins) {
|
|
23
|
+
toolMap.set(plugin.name, plugin);
|
|
190
24
|
}
|
|
191
25
|
}
|
|
192
|
-
|
|
193
|
-
toolMap.set(READ_SKILL_TOOL_NAME, {
|
|
194
|
-
...createReadSkillTool(options.skills),
|
|
195
|
-
handler: createReadSkillHandler(options.skills)
|
|
196
|
-
});
|
|
197
|
-
}
|
|
198
|
-
async function processToolCall(toolCall, turn, handlerContext) {
|
|
199
|
-
const startTime = Date.now();
|
|
200
|
-
const tool = toolMap.get(toolCall.name);
|
|
201
|
-
const toolHooks = tool?.hooks;
|
|
26
|
+
async function runPreHooks(toolCall, tool, turn) {
|
|
202
27
|
let effectiveArgs = toolCall.args;
|
|
203
28
|
if (options.hooks?.onPreToolUse) {
|
|
204
29
|
const preResult = await options.hooks.onPreToolUse({
|
|
@@ -206,58 +31,105 @@ function createToolRouter(options) {
|
|
|
206
31
|
threadId: options.threadId,
|
|
207
32
|
turn
|
|
208
33
|
});
|
|
209
|
-
if (preResult?.skip) {
|
|
210
|
-
|
|
211
|
-
threadId: options.threadId,
|
|
212
|
-
toolCallId: toolCall.id,
|
|
213
|
-
toolName: toolCall.name,
|
|
214
|
-
content: JSON.stringify({
|
|
215
|
-
skipped: true,
|
|
216
|
-
reason: "Skipped by PreToolUse hook"
|
|
217
|
-
})
|
|
218
|
-
});
|
|
219
|
-
return null;
|
|
220
|
-
}
|
|
221
|
-
if (preResult?.modifiedArgs !== void 0) {
|
|
34
|
+
if (preResult?.skip) return { skip: true };
|
|
35
|
+
if (preResult?.modifiedArgs !== void 0)
|
|
222
36
|
effectiveArgs = preResult.modifiedArgs;
|
|
223
|
-
}
|
|
224
37
|
}
|
|
225
|
-
if (
|
|
226
|
-
const preResult = await
|
|
38
|
+
if (tool?.hooks?.onPreToolUse) {
|
|
39
|
+
const preResult = await tool.hooks.onPreToolUse({
|
|
227
40
|
args: effectiveArgs,
|
|
228
41
|
threadId: options.threadId,
|
|
229
42
|
turn
|
|
230
43
|
});
|
|
231
|
-
if (preResult?.skip) {
|
|
232
|
-
|
|
233
|
-
threadId: options.threadId,
|
|
234
|
-
toolCallId: toolCall.id,
|
|
235
|
-
toolName: toolCall.name,
|
|
236
|
-
content: JSON.stringify({
|
|
237
|
-
skipped: true,
|
|
238
|
-
reason: "Skipped by tool PreToolUse hook"
|
|
239
|
-
})
|
|
240
|
-
});
|
|
241
|
-
return null;
|
|
242
|
-
}
|
|
243
|
-
if (preResult?.modifiedArgs !== void 0) {
|
|
44
|
+
if (preResult?.skip) return { skip: true };
|
|
45
|
+
if (preResult?.modifiedArgs !== void 0)
|
|
244
46
|
effectiveArgs = preResult.modifiedArgs;
|
|
245
|
-
}
|
|
246
47
|
}
|
|
48
|
+
return { skip: false, args: effectiveArgs };
|
|
49
|
+
}
|
|
50
|
+
async function runFailureHooks(toolCall, tool, error, effectiveArgs, turn) {
|
|
51
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
52
|
+
const errorStr = String(error);
|
|
53
|
+
if (tool?.hooks?.onPostToolUseFailure) {
|
|
54
|
+
const r = await tool.hooks.onPostToolUseFailure({
|
|
55
|
+
args: effectiveArgs,
|
|
56
|
+
error: err,
|
|
57
|
+
threadId: options.threadId,
|
|
58
|
+
turn
|
|
59
|
+
});
|
|
60
|
+
if (r?.fallbackContent !== void 0)
|
|
61
|
+
return { content: r.fallbackContent, result: { error: errorStr, recovered: true } };
|
|
62
|
+
if (r?.suppress)
|
|
63
|
+
return {
|
|
64
|
+
content: JSON.stringify({ error: errorStr, suppressed: true }),
|
|
65
|
+
result: { error: errorStr, suppressed: true }
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
if (options.hooks?.onPostToolUseFailure) {
|
|
69
|
+
const r = await options.hooks.onPostToolUseFailure({
|
|
70
|
+
toolCall,
|
|
71
|
+
error: err,
|
|
72
|
+
threadId: options.threadId,
|
|
73
|
+
turn
|
|
74
|
+
});
|
|
75
|
+
if (r?.fallbackContent !== void 0)
|
|
76
|
+
return { content: r.fallbackContent, result: { error: errorStr, recovered: true } };
|
|
77
|
+
if (r?.suppress)
|
|
78
|
+
return {
|
|
79
|
+
content: JSON.stringify({ error: errorStr, suppressed: true }),
|
|
80
|
+
result: { error: errorStr, suppressed: true }
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
throw workflow.ApplicationFailure.fromError(error, { nonRetryable: true });
|
|
84
|
+
}
|
|
85
|
+
async function runPostHooks(toolCall, tool, toolResult, effectiveArgs, turn, durationMs) {
|
|
86
|
+
if (tool?.hooks?.onPostToolUse) {
|
|
87
|
+
await tool.hooks.onPostToolUse({
|
|
88
|
+
args: effectiveArgs,
|
|
89
|
+
result: toolResult.data,
|
|
90
|
+
threadId: options.threadId,
|
|
91
|
+
turn,
|
|
92
|
+
durationMs
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
if (options.hooks?.onPostToolUse) {
|
|
96
|
+
await options.hooks.onPostToolUse({
|
|
97
|
+
toolCall,
|
|
98
|
+
result: toolResult,
|
|
99
|
+
threadId: options.threadId,
|
|
100
|
+
turn,
|
|
101
|
+
durationMs
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
async function processToolCall(toolCall, turn, sandboxId) {
|
|
106
|
+
const startTime = Date.now();
|
|
107
|
+
const tool = toolMap.get(toolCall.name);
|
|
108
|
+
const preResult = await runPreHooks(toolCall, tool, turn);
|
|
109
|
+
if (preResult.skip) {
|
|
110
|
+
await appendToolResult({
|
|
111
|
+
threadId: options.threadId,
|
|
112
|
+
toolCallId: toolCall.id,
|
|
113
|
+
toolName: toolCall.name,
|
|
114
|
+
content: JSON.stringify({ skipped: true, reason: "Skipped by PreToolUse hook" })
|
|
115
|
+
});
|
|
116
|
+
return null;
|
|
117
|
+
}
|
|
118
|
+
const effectiveArgs = preResult.args;
|
|
247
119
|
let result;
|
|
248
120
|
let content;
|
|
249
121
|
let resultAppended = false;
|
|
250
122
|
try {
|
|
251
123
|
if (tool) {
|
|
252
|
-
const
|
|
253
|
-
...handlerContext ?? {},
|
|
124
|
+
const routerContext = {
|
|
254
125
|
threadId: options.threadId,
|
|
255
126
|
toolCallId: toolCall.id,
|
|
256
|
-
toolName: toolCall.name
|
|
127
|
+
toolName: toolCall.name,
|
|
128
|
+
...sandboxId !== void 0 && { sandboxId }
|
|
257
129
|
};
|
|
258
130
|
const response = await tool.handler(
|
|
259
131
|
effectiveArgs,
|
|
260
|
-
|
|
132
|
+
routerContext
|
|
261
133
|
);
|
|
262
134
|
result = response.data;
|
|
263
135
|
content = response.toolResponse;
|
|
@@ -267,47 +139,9 @@ function createToolRouter(options) {
|
|
|
267
139
|
content = JSON.stringify(result, null, 2);
|
|
268
140
|
}
|
|
269
141
|
} catch (error) {
|
|
270
|
-
const
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
const failureResult = await toolHooks.onPostToolUseFailure({
|
|
274
|
-
args: effectiveArgs,
|
|
275
|
-
error: err,
|
|
276
|
-
threadId: options.threadId,
|
|
277
|
-
turn
|
|
278
|
-
});
|
|
279
|
-
if (failureResult?.fallbackContent !== void 0) {
|
|
280
|
-
content = failureResult.fallbackContent;
|
|
281
|
-
result = { error: String(error), recovered: true };
|
|
282
|
-
recovered = true;
|
|
283
|
-
} else if (failureResult?.suppress) {
|
|
284
|
-
content = JSON.stringify({ error: String(error), suppressed: true });
|
|
285
|
-
result = { error: String(error), suppressed: true };
|
|
286
|
-
recovered = true;
|
|
287
|
-
}
|
|
288
|
-
}
|
|
289
|
-
if (!recovered && options.hooks?.onPostToolUseFailure) {
|
|
290
|
-
const failureResult = await options.hooks.onPostToolUseFailure({
|
|
291
|
-
toolCall,
|
|
292
|
-
error: err,
|
|
293
|
-
threadId: options.threadId,
|
|
294
|
-
turn
|
|
295
|
-
});
|
|
296
|
-
if (failureResult?.fallbackContent !== void 0) {
|
|
297
|
-
content = failureResult.fallbackContent;
|
|
298
|
-
result = { error: String(error), recovered: true };
|
|
299
|
-
recovered = true;
|
|
300
|
-
} else if (failureResult?.suppress) {
|
|
301
|
-
content = JSON.stringify({ error: String(error), suppressed: true });
|
|
302
|
-
result = { error: String(error), suppressed: true };
|
|
303
|
-
recovered = true;
|
|
304
|
-
}
|
|
305
|
-
}
|
|
306
|
-
if (!recovered) {
|
|
307
|
-
throw workflow.ApplicationFailure.fromError(error, {
|
|
308
|
-
nonRetryable: true
|
|
309
|
-
});
|
|
310
|
-
}
|
|
142
|
+
const recovery = await runFailureHooks(toolCall, tool, error, effectiveArgs, turn);
|
|
143
|
+
result = recovery.result;
|
|
144
|
+
content = recovery.content;
|
|
311
145
|
}
|
|
312
146
|
if (!resultAppended) {
|
|
313
147
|
await appendToolResult({
|
|
@@ -322,29 +156,10 @@ function createToolRouter(options) {
|
|
|
322
156
|
name: toolCall.name,
|
|
323
157
|
data: result
|
|
324
158
|
};
|
|
325
|
-
|
|
326
|
-
if (toolHooks?.onPostToolUse) {
|
|
327
|
-
await toolHooks.onPostToolUse({
|
|
328
|
-
args: effectiveArgs,
|
|
329
|
-
result,
|
|
330
|
-
threadId: options.threadId,
|
|
331
|
-
turn,
|
|
332
|
-
durationMs
|
|
333
|
-
});
|
|
334
|
-
}
|
|
335
|
-
if (options.hooks?.onPostToolUse) {
|
|
336
|
-
await options.hooks.onPostToolUse({
|
|
337
|
-
toolCall,
|
|
338
|
-
result: toolResult,
|
|
339
|
-
threadId: options.threadId,
|
|
340
|
-
turn,
|
|
341
|
-
durationMs
|
|
342
|
-
});
|
|
343
|
-
}
|
|
159
|
+
await runPostHooks(toolCall, tool, toolResult, effectiveArgs, turn, Date.now() - startTime);
|
|
344
160
|
return toolResult;
|
|
345
161
|
}
|
|
346
162
|
return {
|
|
347
|
-
// --- Methods from registry ---
|
|
348
163
|
hasTools() {
|
|
349
164
|
return Array.from(toolMap.values()).some(isEnabled);
|
|
350
165
|
},
|
|
@@ -368,32 +183,25 @@ function createToolRouter(options) {
|
|
|
368
183
|
return Array.from(toolMap.entries()).filter(([, tool]) => isEnabled(tool)).map(([name]) => name);
|
|
369
184
|
},
|
|
370
185
|
getToolDefinitions() {
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
description: tool.description,
|
|
379
|
-
schema: tool.schema,
|
|
380
|
-
strict: tool.strict,
|
|
381
|
-
max_uses: tool.max_uses
|
|
382
|
-
})),
|
|
383
|
-
...activeSubagents.length > 0 ? [createSubagentTool(activeSubagents)] : [],
|
|
384
|
-
...activeSkills.length > 0 ? [createReadSkillTool(activeSkills)] : []
|
|
385
|
-
];
|
|
186
|
+
return Array.from(toolMap).filter(([, tool]) => isEnabled(tool)).map(([name, tool]) => ({
|
|
187
|
+
name,
|
|
188
|
+
description: tool.description,
|
|
189
|
+
schema: tool.schema,
|
|
190
|
+
strict: tool.strict,
|
|
191
|
+
max_uses: tool.max_uses
|
|
192
|
+
}));
|
|
386
193
|
},
|
|
387
|
-
// --- Methods for processing tool calls ---
|
|
388
194
|
async processToolCalls(toolCalls, context) {
|
|
389
195
|
if (toolCalls.length === 0) {
|
|
390
196
|
return [];
|
|
391
197
|
}
|
|
392
198
|
const turn = context?.turn ?? 0;
|
|
393
|
-
const
|
|
199
|
+
const sandboxId = context?.sandboxId;
|
|
394
200
|
if (options.parallel) {
|
|
395
201
|
const results2 = await Promise.all(
|
|
396
|
-
toolCalls.map(
|
|
202
|
+
toolCalls.map(
|
|
203
|
+
(tc) => processToolCall(tc, turn, sandboxId)
|
|
204
|
+
)
|
|
397
205
|
);
|
|
398
206
|
return results2.filter(
|
|
399
207
|
(r) => r !== null
|
|
@@ -401,7 +209,11 @@ function createToolRouter(options) {
|
|
|
401
209
|
}
|
|
402
210
|
const results = [];
|
|
403
211
|
for (const toolCall of toolCalls) {
|
|
404
|
-
const result = await processToolCall(
|
|
212
|
+
const result = await processToolCall(
|
|
213
|
+
toolCall,
|
|
214
|
+
turn,
|
|
215
|
+
sandboxId
|
|
216
|
+
);
|
|
405
217
|
if (result !== null) {
|
|
406
218
|
results.push(result);
|
|
407
219
|
}
|
|
@@ -413,17 +225,16 @@ function createToolRouter(options) {
|
|
|
413
225
|
if (matchingCalls.length === 0) {
|
|
414
226
|
return [];
|
|
415
227
|
}
|
|
416
|
-
const handlerContext = context?.handlerContext ?? {};
|
|
417
228
|
const processOne = async (toolCall) => {
|
|
418
|
-
const
|
|
419
|
-
...handlerContext ?? {},
|
|
229
|
+
const routerContext = {
|
|
420
230
|
threadId: options.threadId,
|
|
421
231
|
toolCallId: toolCall.id,
|
|
422
|
-
toolName: toolCall.name
|
|
232
|
+
toolName: toolCall.name,
|
|
233
|
+
...context?.sandboxId !== void 0 && { sandboxId: context.sandboxId }
|
|
423
234
|
};
|
|
424
235
|
const response = await handler(
|
|
425
236
|
toolCall.args,
|
|
426
|
-
|
|
237
|
+
routerContext
|
|
427
238
|
);
|
|
428
239
|
if (!response.resultAppended) {
|
|
429
240
|
await appendToolResult({
|
|
@@ -439,59 +250,232 @@ function createToolRouter(options) {
|
|
|
439
250
|
data: response.data
|
|
440
251
|
};
|
|
441
252
|
};
|
|
442
|
-
if (options.parallel) {
|
|
443
|
-
return Promise.all(matchingCalls.map(processOne));
|
|
444
|
-
}
|
|
445
|
-
const results = [];
|
|
446
|
-
for (const toolCall of matchingCalls) {
|
|
447
|
-
results.push(await processOne(toolCall));
|
|
448
|
-
}
|
|
449
|
-
return results;
|
|
253
|
+
if (options.parallel) {
|
|
254
|
+
return Promise.all(matchingCalls.map(processOne));
|
|
255
|
+
}
|
|
256
|
+
const results = [];
|
|
257
|
+
for (const toolCall of matchingCalls) {
|
|
258
|
+
results.push(await processOne(toolCall));
|
|
259
|
+
}
|
|
260
|
+
return results;
|
|
261
|
+
},
|
|
262
|
+
filterByName(toolCalls, name) {
|
|
263
|
+
return toolCalls.filter(
|
|
264
|
+
(tc) => tc.name === name
|
|
265
|
+
);
|
|
266
|
+
},
|
|
267
|
+
hasToolCall(toolCalls, name) {
|
|
268
|
+
return toolCalls.some((tc) => tc.name === name);
|
|
269
|
+
},
|
|
270
|
+
getResultsByName(results, name) {
|
|
271
|
+
return results.filter((r) => r.name === name);
|
|
272
|
+
}
|
|
273
|
+
};
|
|
274
|
+
}
|
|
275
|
+
function defineTool(tool) {
|
|
276
|
+
return tool;
|
|
277
|
+
}
|
|
278
|
+
function hasNoOtherToolCalls(toolCalls, excludeName) {
|
|
279
|
+
return toolCalls.filter((tc) => tc.name !== excludeName).length === 0;
|
|
280
|
+
}
|
|
281
|
+
var BASE62 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
|
282
|
+
function getShortId(length = 12) {
|
|
283
|
+
const hex = workflow.uuid4().replace(/-/g, "");
|
|
284
|
+
let result = "";
|
|
285
|
+
for (let i = 0; i < length; i++) {
|
|
286
|
+
const byte = parseInt(hex.slice(i * 2, i * 2 + 2), 16);
|
|
287
|
+
result += BASE62[byte % BASE62.length];
|
|
288
|
+
}
|
|
289
|
+
return result;
|
|
290
|
+
}
|
|
291
|
+
var SUBAGENT_TOOL_NAME = "Subagent";
|
|
292
|
+
function buildSubagentDescription(subagents) {
|
|
293
|
+
const subagentList = subagents.map((s) => {
|
|
294
|
+
const continuation = s.allowThreadContinuation ? "\n*(Supports thread continuation \u2014 pass a threadId to resume a previous conversation)*" : "";
|
|
295
|
+
return `## ${s.agentName}
|
|
296
|
+
${s.description}${continuation}`;
|
|
297
|
+
}).join("\n\n");
|
|
298
|
+
return `The ${SUBAGENT_TOOL_NAME} tool launches specialized agents (subagents) that autonomously handle complex work. Each agent type has specific capabilities and tools available to it.
|
|
299
|
+
|
|
300
|
+
# Available subagents:
|
|
301
|
+
${subagentList}
|
|
302
|
+
`;
|
|
303
|
+
}
|
|
304
|
+
function createSubagentTool(subagents) {
|
|
305
|
+
if (subagents.length === 0) {
|
|
306
|
+
throw new Error("createSubagentTool requires at least one subagent");
|
|
307
|
+
}
|
|
308
|
+
const names = subagents.map((s) => s.agentName);
|
|
309
|
+
const hasThreadContinuation = subagents.some(
|
|
310
|
+
(s) => s.allowThreadContinuation
|
|
311
|
+
);
|
|
312
|
+
const baseFields = {
|
|
313
|
+
subagent: z14__default.default.enum(names).describe("The type of subagent to launch"),
|
|
314
|
+
description: z14__default.default.string().describe("A short (3-5 word) description of the task"),
|
|
315
|
+
prompt: z14__default.default.string().describe("The task for the agent to perform")
|
|
316
|
+
};
|
|
317
|
+
const schema = hasThreadContinuation ? z14__default.default.object({
|
|
318
|
+
...baseFields,
|
|
319
|
+
threadId: z14__default.default.string().nullable().describe(
|
|
320
|
+
"Thread ID to continue an existing conversation, or null to start a new one"
|
|
321
|
+
)
|
|
322
|
+
}) : z14__default.default.object(baseFields);
|
|
323
|
+
return {
|
|
324
|
+
name: SUBAGENT_TOOL_NAME,
|
|
325
|
+
description: buildSubagentDescription(subagents),
|
|
326
|
+
schema
|
|
327
|
+
};
|
|
328
|
+
}
|
|
329
|
+
function createSubagentHandler(subagents) {
|
|
330
|
+
const { taskQueue: parentTaskQueue } = workflow.workflowInfo();
|
|
331
|
+
return async (args, context) => {
|
|
332
|
+
const config = subagents.find((s) => s.agentName === args.subagent);
|
|
333
|
+
if (!config) {
|
|
334
|
+
throw new Error(
|
|
335
|
+
`Unknown subagent: ${args.subagent}. Available: ${subagents.map((s) => s.agentName).join(", ")}`
|
|
336
|
+
);
|
|
337
|
+
}
|
|
338
|
+
const childWorkflowId = `${args.subagent}-${getShortId()}`;
|
|
339
|
+
const { sandboxId: parentSandboxId } = context;
|
|
340
|
+
const inheritSandbox = config.sandbox !== "own" && !!parentSandboxId;
|
|
341
|
+
const input = {
|
|
342
|
+
prompt: args.prompt,
|
|
343
|
+
...config.context && { context: config.context },
|
|
344
|
+
...args.threadId && args.threadId !== null && config.allowThreadContinuation && { previousThreadId: args.threadId },
|
|
345
|
+
...inheritSandbox && { sandboxId: parentSandboxId }
|
|
346
|
+
};
|
|
347
|
+
const childOpts = {
|
|
348
|
+
workflowId: childWorkflowId,
|
|
349
|
+
args: [input],
|
|
350
|
+
taskQueue: config.taskQueue ?? parentTaskQueue
|
|
351
|
+
};
|
|
352
|
+
const {
|
|
353
|
+
toolResponse,
|
|
354
|
+
data,
|
|
355
|
+
usage,
|
|
356
|
+
threadId: childThreadId
|
|
357
|
+
} = typeof config.workflow === "string" ? await workflow.executeChild(config.workflow, childOpts) : await workflow.executeChild(config.workflow, childOpts);
|
|
358
|
+
if (!toolResponse) {
|
|
359
|
+
return {
|
|
360
|
+
toolResponse: "Subagent workflow returned no response",
|
|
361
|
+
data: null,
|
|
362
|
+
...usage && { usage }
|
|
363
|
+
};
|
|
364
|
+
}
|
|
365
|
+
const validated = config.resultSchema ? config.resultSchema.safeParse(data) : null;
|
|
366
|
+
if (validated && !validated.success) {
|
|
367
|
+
return {
|
|
368
|
+
toolResponse: `Subagent workflow returned invalid data: ${validated.error.message}`,
|
|
369
|
+
data: null,
|
|
370
|
+
...usage && { usage }
|
|
371
|
+
};
|
|
372
|
+
}
|
|
373
|
+
let finalToolResponse = toolResponse;
|
|
374
|
+
if (config.allowThreadContinuation && childThreadId) {
|
|
375
|
+
finalToolResponse = typeof toolResponse === "string" ? `${toolResponse}
|
|
376
|
+
|
|
377
|
+
[Thread ID: ${childThreadId}]` : toolResponse;
|
|
378
|
+
}
|
|
379
|
+
return {
|
|
380
|
+
toolResponse: finalToolResponse,
|
|
381
|
+
data: validated ? validated.data : data,
|
|
382
|
+
...usage && { usage }
|
|
383
|
+
};
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
// src/lib/subagent/register.ts
|
|
388
|
+
function buildSubagentRegistration(subagents) {
|
|
389
|
+
if (subagents.length === 0) return null;
|
|
390
|
+
const getEnabled = () => subagents.filter((s) => s.enabled ?? true);
|
|
391
|
+
const subagentHooksMap = /* @__PURE__ */ new Map();
|
|
392
|
+
for (const s of subagents) {
|
|
393
|
+
if (s.hooks) subagentHooksMap.set(s.agentName, s.hooks);
|
|
394
|
+
}
|
|
395
|
+
const resolveSubagentName = (args) => args.subagent;
|
|
396
|
+
return {
|
|
397
|
+
name: SUBAGENT_TOOL_NAME,
|
|
398
|
+
get enabled() {
|
|
399
|
+
return getEnabled().length > 0;
|
|
450
400
|
},
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
return toolCalls.filter(
|
|
454
|
-
(tc) => tc.name === name
|
|
455
|
-
);
|
|
401
|
+
get description() {
|
|
402
|
+
return createSubagentTool(getEnabled()).description;
|
|
456
403
|
},
|
|
457
|
-
|
|
458
|
-
return
|
|
404
|
+
get schema() {
|
|
405
|
+
return createSubagentTool(getEnabled()).schema;
|
|
459
406
|
},
|
|
460
|
-
|
|
461
|
-
|
|
407
|
+
handler: createSubagentHandler(subagents),
|
|
408
|
+
...subagentHooksMap.size > 0 && {
|
|
409
|
+
hooks: {
|
|
410
|
+
onPreToolUse: async (ctx) => {
|
|
411
|
+
const hooks = subagentHooksMap.get(resolveSubagentName(ctx.args));
|
|
412
|
+
return hooks?.onPreExecution?.(ctx) ?? {};
|
|
413
|
+
},
|
|
414
|
+
onPostToolUse: async (ctx) => {
|
|
415
|
+
const hooks = subagentHooksMap.get(resolveSubagentName(ctx.args));
|
|
416
|
+
await hooks?.onPostExecution?.(ctx);
|
|
417
|
+
},
|
|
418
|
+
onPostToolUseFailure: async (ctx) => {
|
|
419
|
+
const hooks = subagentHooksMap.get(resolveSubagentName(ctx.args));
|
|
420
|
+
return hooks?.onExecutionFailure?.(ctx) ?? {};
|
|
421
|
+
}
|
|
422
|
+
}
|
|
462
423
|
}
|
|
463
424
|
};
|
|
464
425
|
}
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
426
|
+
var READ_SKILL_TOOL_NAME = "ReadSkill";
|
|
427
|
+
function buildReadSkillDescription(skills) {
|
|
428
|
+
const skillList = skills.map((s) => `- **${s.name}**: ${s.description}`).join("\n");
|
|
429
|
+
return `Load the full instructions for a skill. Read the skill before following its instructions.
|
|
430
|
+
|
|
431
|
+
# Available skills:
|
|
432
|
+
${skillList}
|
|
433
|
+
`;
|
|
434
|
+
}
|
|
435
|
+
function createReadSkillTool(skills) {
|
|
436
|
+
if (skills.length === 0) {
|
|
437
|
+
throw new Error("createReadSkillTool requires at least one skill");
|
|
438
|
+
}
|
|
439
|
+
const names = skills.map((s) => s.name);
|
|
440
|
+
return {
|
|
441
|
+
name: READ_SKILL_TOOL_NAME,
|
|
442
|
+
description: buildReadSkillDescription(skills),
|
|
443
|
+
schema: z14__default.default.object({
|
|
444
|
+
skill_name: z14__default.default.enum(names).describe("The name of the skill to load")
|
|
445
|
+
})
|
|
446
|
+
};
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
// src/lib/skills/handler.ts
|
|
450
|
+
function createReadSkillHandler(skills) {
|
|
451
|
+
const skillMap = new Map(skills.map((s) => [s.name, s]));
|
|
452
|
+
return (args) => {
|
|
453
|
+
const skill = skillMap.get(args.skill_name);
|
|
454
|
+
if (!skill) {
|
|
455
|
+
return {
|
|
456
|
+
toolResponse: JSON.stringify({
|
|
457
|
+
error: `Skill "${args.skill_name}" not found`
|
|
458
|
+
}),
|
|
459
|
+
data: null
|
|
460
|
+
};
|
|
461
|
+
}
|
|
477
462
|
return {
|
|
478
|
-
toolResponse:
|
|
479
|
-
data:
|
|
480
|
-
resultAppended: true
|
|
463
|
+
toolResponse: skill.instructions,
|
|
464
|
+
data: null
|
|
481
465
|
};
|
|
482
466
|
};
|
|
483
467
|
}
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
return
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
468
|
+
|
|
469
|
+
// src/lib/skills/register.ts
|
|
470
|
+
function buildSkillRegistration(skills) {
|
|
471
|
+
if (skills.length === 0) return null;
|
|
472
|
+
return {
|
|
473
|
+
...createReadSkillTool(skills),
|
|
474
|
+
handler: createReadSkillHandler(skills)
|
|
475
|
+
};
|
|
492
476
|
}
|
|
493
477
|
|
|
494
|
-
// src/lib/session.ts
|
|
478
|
+
// src/lib/session/session.ts
|
|
495
479
|
var createSession = async ({
|
|
496
480
|
threadId: providedThreadId,
|
|
497
481
|
agentName,
|
|
@@ -507,22 +491,34 @@ var createSession = async ({
|
|
|
507
491
|
hooks = {},
|
|
508
492
|
appendSystemPrompt = true,
|
|
509
493
|
continueThread = false,
|
|
510
|
-
waitForInputTimeout = "48h"
|
|
494
|
+
waitForInputTimeout = "48h",
|
|
495
|
+
sandbox: sandboxOps,
|
|
496
|
+
sandboxId: inheritedSandboxId
|
|
511
497
|
}) => {
|
|
512
|
-
const
|
|
498
|
+
const sourceThreadId = continueThread ? providedThreadId : void 0;
|
|
499
|
+
const threadId = continueThread && providedThreadId ? getShortId() : providedThreadId ?? getShortId();
|
|
513
500
|
const {
|
|
514
501
|
appendToolResult,
|
|
515
502
|
appendHumanMessage,
|
|
516
503
|
initializeThread,
|
|
517
|
-
appendSystemMessage
|
|
504
|
+
appendSystemMessage,
|
|
505
|
+
forkThread
|
|
518
506
|
} = threadOps ?? proxyDefaultThreadOps();
|
|
507
|
+
const plugins = [];
|
|
508
|
+
if (subagents) {
|
|
509
|
+
const reg = buildSubagentRegistration(subagents);
|
|
510
|
+
if (reg) plugins.push(reg);
|
|
511
|
+
}
|
|
512
|
+
if (skills) {
|
|
513
|
+
const reg = buildSkillRegistration(skills);
|
|
514
|
+
if (reg) plugins.push(reg);
|
|
515
|
+
}
|
|
519
516
|
const toolRouter = createToolRouter({
|
|
520
517
|
tools,
|
|
521
518
|
appendToolResult,
|
|
522
519
|
threadId,
|
|
523
520
|
hooks,
|
|
524
|
-
|
|
525
|
-
skills,
|
|
521
|
+
plugins,
|
|
526
522
|
parallel: processToolsInParallel
|
|
527
523
|
});
|
|
528
524
|
const callSessionEnd = async (exitReason, turns) => {
|
|
@@ -559,6 +555,17 @@ var createSession = async ({
|
|
|
559
555
|
stateManager.run();
|
|
560
556
|
}
|
|
561
557
|
);
|
|
558
|
+
let sandboxId = inheritedSandboxId;
|
|
559
|
+
const ownsSandbox = !sandboxId && !!sandboxOps;
|
|
560
|
+
if (ownsSandbox) {
|
|
561
|
+
const result = await sandboxOps.createSandbox({ id: threadId });
|
|
562
|
+
sandboxId = result.sandboxId;
|
|
563
|
+
if (result.stateUpdate) {
|
|
564
|
+
stateManager.mergeUpdate(
|
|
565
|
+
result.stateUpdate
|
|
566
|
+
);
|
|
567
|
+
}
|
|
568
|
+
}
|
|
562
569
|
if (hooks.onSessionStart) {
|
|
563
570
|
await hooks.onSessionStart({
|
|
564
571
|
threadId,
|
|
@@ -567,7 +574,9 @@ var createSession = async ({
|
|
|
567
574
|
});
|
|
568
575
|
}
|
|
569
576
|
const systemPrompt = stateManager.getSystemPrompt();
|
|
570
|
-
if (
|
|
577
|
+
if (continueThread && sourceThreadId) {
|
|
578
|
+
await forkThread(sourceThreadId, threadId);
|
|
579
|
+
} else {
|
|
571
580
|
if (appendSystemPrompt) {
|
|
572
581
|
if (!systemPrompt || systemPrompt.trim() === "") {
|
|
573
582
|
throw workflow.ApplicationFailure.create({
|
|
@@ -599,6 +608,7 @@ var createSession = async ({
|
|
|
599
608
|
stateManager.complete();
|
|
600
609
|
exitReason = "completed";
|
|
601
610
|
return {
|
|
611
|
+
threadId,
|
|
602
612
|
finalMessage: message,
|
|
603
613
|
exitReason,
|
|
604
614
|
usage: stateManager.getTotalUsage()
|
|
@@ -622,7 +632,8 @@ var createSession = async ({
|
|
|
622
632
|
const toolCallResults = await toolRouter.processToolCalls(
|
|
623
633
|
parsedToolCalls,
|
|
624
634
|
{
|
|
625
|
-
turn: currentTurn
|
|
635
|
+
turn: currentTurn,
|
|
636
|
+
...sandboxId !== void 0 && { sandboxId }
|
|
626
637
|
}
|
|
627
638
|
);
|
|
628
639
|
for (const result of toolCallResults) {
|
|
@@ -650,8 +661,12 @@ var createSession = async ({
|
|
|
650
661
|
throw workflow.ApplicationFailure.fromError(error);
|
|
651
662
|
} finally {
|
|
652
663
|
await callSessionEnd(exitReason, stateManager.getTurns());
|
|
664
|
+
if (ownsSandbox && sandboxId && sandboxOps) {
|
|
665
|
+
await sandboxOps.destroySandbox(sandboxId);
|
|
666
|
+
}
|
|
653
667
|
}
|
|
654
668
|
return {
|
|
669
|
+
threadId,
|
|
655
670
|
finalMessage: null,
|
|
656
671
|
exitReason,
|
|
657
672
|
usage: stateManager.getTotalUsage()
|
|
@@ -672,16 +687,109 @@ function proxyDefaultThreadOps(options) {
|
|
|
672
687
|
}
|
|
673
688
|
);
|
|
674
689
|
}
|
|
690
|
+
function proxySandboxOps(options) {
|
|
691
|
+
return workflow.proxyActivities(
|
|
692
|
+
options ?? {
|
|
693
|
+
startToCloseTimeout: "30s",
|
|
694
|
+
retry: {
|
|
695
|
+
maximumAttempts: 3,
|
|
696
|
+
initialInterval: "2s",
|
|
697
|
+
maximumInterval: "30s",
|
|
698
|
+
backoffCoefficient: 2
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
);
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
// src/lib/thread/manager.ts
|
|
705
|
+
var THREAD_TTL_SECONDS = 60 * 60 * 24 * 90;
|
|
706
|
+
var APPEND_IDEMPOTENT_SCRIPT = `
|
|
707
|
+
if redis.call('EXISTS', KEYS[1]) == 1 then
|
|
708
|
+
return 0
|
|
709
|
+
end
|
|
710
|
+
for i = 2, #ARGV do
|
|
711
|
+
redis.call('RPUSH', KEYS[2], ARGV[i])
|
|
712
|
+
end
|
|
713
|
+
redis.call('EXPIRE', KEYS[2], tonumber(ARGV[1]))
|
|
714
|
+
redis.call('SET', KEYS[1], '1', 'EX', tonumber(ARGV[1]))
|
|
715
|
+
return 1
|
|
716
|
+
`;
|
|
717
|
+
function getThreadKey(threadId, key) {
|
|
718
|
+
return `thread:${threadId}:${key}`;
|
|
719
|
+
}
|
|
720
|
+
function createThreadManager(config) {
|
|
721
|
+
const {
|
|
722
|
+
redis,
|
|
723
|
+
threadId,
|
|
724
|
+
key = "messages",
|
|
725
|
+
serialize = (m) => JSON.stringify(m),
|
|
726
|
+
deserialize = (raw) => JSON.parse(raw),
|
|
727
|
+
idOf
|
|
728
|
+
} = config;
|
|
729
|
+
const redisKey = getThreadKey(threadId, key);
|
|
730
|
+
const metaKey = getThreadKey(threadId, `${key}:meta`);
|
|
731
|
+
async function assertThreadExists() {
|
|
732
|
+
const exists = await redis.exists(metaKey);
|
|
733
|
+
if (!exists) {
|
|
734
|
+
throw new Error(`Thread "${threadId}" (key: ${key}) does not exist`);
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
return {
|
|
738
|
+
async initialize() {
|
|
739
|
+
await redis.del(redisKey);
|
|
740
|
+
await redis.set(metaKey, "1", "EX", THREAD_TTL_SECONDS);
|
|
741
|
+
},
|
|
742
|
+
async load() {
|
|
743
|
+
await assertThreadExists();
|
|
744
|
+
const data = await redis.lrange(redisKey, 0, -1);
|
|
745
|
+
return data.map(deserialize);
|
|
746
|
+
},
|
|
747
|
+
async append(messages) {
|
|
748
|
+
if (messages.length === 0) return;
|
|
749
|
+
await assertThreadExists();
|
|
750
|
+
if (idOf) {
|
|
751
|
+
const dedupId = messages.map(idOf).join(":");
|
|
752
|
+
const dedupKey = getThreadKey(threadId, `dedup:${dedupId}`);
|
|
753
|
+
await redis.eval(
|
|
754
|
+
APPEND_IDEMPOTENT_SCRIPT,
|
|
755
|
+
2,
|
|
756
|
+
dedupKey,
|
|
757
|
+
redisKey,
|
|
758
|
+
String(THREAD_TTL_SECONDS),
|
|
759
|
+
...messages.map(serialize)
|
|
760
|
+
);
|
|
761
|
+
} else {
|
|
762
|
+
await redis.rpush(redisKey, ...messages.map(serialize));
|
|
763
|
+
await redis.expire(redisKey, THREAD_TTL_SECONDS);
|
|
764
|
+
}
|
|
765
|
+
},
|
|
766
|
+
async fork(newThreadId) {
|
|
767
|
+
await assertThreadExists();
|
|
768
|
+
const data = await redis.lrange(redisKey, 0, -1);
|
|
769
|
+
const forked = createThreadManager({
|
|
770
|
+
...config,
|
|
771
|
+
threadId: newThreadId
|
|
772
|
+
});
|
|
773
|
+
await forked.initialize();
|
|
774
|
+
if (data.length > 0) {
|
|
775
|
+
const newKey = getThreadKey(newThreadId, key);
|
|
776
|
+
await redis.rpush(newKey, ...data);
|
|
777
|
+
await redis.expire(newKey, THREAD_TTL_SECONDS);
|
|
778
|
+
}
|
|
779
|
+
return forked;
|
|
780
|
+
},
|
|
781
|
+
async delete() {
|
|
782
|
+
await redis.del(redisKey, metaKey);
|
|
783
|
+
}
|
|
784
|
+
};
|
|
785
|
+
}
|
|
675
786
|
|
|
676
787
|
// src/lib/types.ts
|
|
677
|
-
var agentQueryName = (agentName) => `get${agentName}State`;
|
|
678
|
-
var agentStateChangeUpdateName = (agentName) => `waitFor${agentName}StateChange`;
|
|
679
788
|
function isTerminalStatus(status) {
|
|
680
789
|
return status === "COMPLETED" || status === "FAILED" || status === "CANCELLED";
|
|
681
790
|
}
|
|
682
791
|
function createAgentStateManager({
|
|
683
|
-
initialState
|
|
684
|
-
agentName
|
|
792
|
+
initialState
|
|
685
793
|
}) {
|
|
686
794
|
let status = initialState?.status ?? "RUNNING";
|
|
687
795
|
let version = initialState?.version ?? 0;
|
|
@@ -712,11 +820,9 @@ function createAgentStateManager({
|
|
|
712
820
|
...customState
|
|
713
821
|
};
|
|
714
822
|
}
|
|
715
|
-
const stateQuery = workflow.defineQuery(
|
|
716
|
-
agentQueryName(agentName)
|
|
717
|
-
);
|
|
823
|
+
const stateQuery = workflow.defineQuery("getAgentState");
|
|
718
824
|
const stateChangeUpdate = workflow.defineUpdate(
|
|
719
|
-
|
|
825
|
+
"waitForAgentStateChange"
|
|
720
826
|
);
|
|
721
827
|
workflow.setHandler(stateQuery, () => buildState());
|
|
722
828
|
workflow.setHandler(stateChangeUpdate, async (lastKnownVersion) => {
|
|
@@ -780,6 +886,10 @@ function createAgentStateManager({
|
|
|
780
886
|
customState[key] = value;
|
|
781
887
|
version++;
|
|
782
888
|
},
|
|
889
|
+
mergeUpdate(update) {
|
|
890
|
+
Object.assign(customState, update);
|
|
891
|
+
version++;
|
|
892
|
+
},
|
|
783
893
|
getCurrentState() {
|
|
784
894
|
return buildState();
|
|
785
895
|
},
|
|
@@ -835,6 +945,127 @@ function createAgentStateManager({
|
|
|
835
945
|
};
|
|
836
946
|
}
|
|
837
947
|
|
|
948
|
+
// src/lib/tool-router/auto-append.ts
|
|
949
|
+
function withAutoAppend(threadHandler, handler) {
|
|
950
|
+
return async (args, context) => {
|
|
951
|
+
const response = await handler(args, context);
|
|
952
|
+
await threadHandler({
|
|
953
|
+
threadId: context.threadId,
|
|
954
|
+
toolCallId: context.toolCallId,
|
|
955
|
+
toolName: context.toolName,
|
|
956
|
+
content: response.toolResponse
|
|
957
|
+
});
|
|
958
|
+
return {
|
|
959
|
+
toolResponse: "Response appended via withAutoAppend",
|
|
960
|
+
data: response.data,
|
|
961
|
+
resultAppended: true
|
|
962
|
+
};
|
|
963
|
+
};
|
|
964
|
+
}
|
|
965
|
+
|
|
966
|
+
// src/lib/tool-router/with-sandbox.ts
|
|
967
|
+
function withSandbox(manager, handler) {
|
|
968
|
+
return async (args, context) => {
|
|
969
|
+
if (!context.sandboxId) {
|
|
970
|
+
return {
|
|
971
|
+
toolResponse: `Error: No sandbox configured for this agent. The ${context.toolName} tool requires a sandbox.`,
|
|
972
|
+
data: null
|
|
973
|
+
};
|
|
974
|
+
}
|
|
975
|
+
const sandbox = await manager.getSandbox(context.sandboxId);
|
|
976
|
+
return handler(args, { ...context, sandbox, sandboxId: context.sandboxId });
|
|
977
|
+
};
|
|
978
|
+
}
|
|
979
|
+
|
|
980
|
+
// src/lib/subagent/define.ts
|
|
981
|
+
function defineSubagent(config) {
|
|
982
|
+
return config;
|
|
983
|
+
}
|
|
984
|
+
var SandboxNotSupportedError = class extends common.ApplicationFailure {
|
|
985
|
+
constructor(operation) {
|
|
986
|
+
super(
|
|
987
|
+
`Sandbox does not support: ${operation}`,
|
|
988
|
+
"SandboxNotSupportedError",
|
|
989
|
+
true
|
|
990
|
+
);
|
|
991
|
+
}
|
|
992
|
+
};
|
|
993
|
+
var SandboxNotFoundError = class extends common.ApplicationFailure {
|
|
994
|
+
constructor(sandboxId) {
|
|
995
|
+
super(`Sandbox not found: ${sandboxId}`, "SandboxNotFoundError", true);
|
|
996
|
+
}
|
|
997
|
+
};
|
|
998
|
+
|
|
999
|
+
// src/adapters/sandbox/virtual/mutations.ts
|
|
1000
|
+
function applyVirtualTreeMutations(stateManager, mutations) {
|
|
1001
|
+
let tree = [...stateManager.get("fileTree")];
|
|
1002
|
+
for (const m of mutations) {
|
|
1003
|
+
switch (m.type) {
|
|
1004
|
+
case "add":
|
|
1005
|
+
tree.push(m.entry);
|
|
1006
|
+
break;
|
|
1007
|
+
case "remove":
|
|
1008
|
+
tree = tree.filter((e) => e.path !== m.path);
|
|
1009
|
+
break;
|
|
1010
|
+
case "update":
|
|
1011
|
+
tree = tree.map(
|
|
1012
|
+
(e) => e.path === m.path ? { ...e, ...m.entry } : e
|
|
1013
|
+
);
|
|
1014
|
+
break;
|
|
1015
|
+
}
|
|
1016
|
+
}
|
|
1017
|
+
stateManager.set("fileTree", tree);
|
|
1018
|
+
return tree;
|
|
1019
|
+
}
|
|
1020
|
+
|
|
1021
|
+
// src/adapters/sandbox/virtual/tree.ts
|
|
1022
|
+
var buildTree = (entries) => {
|
|
1023
|
+
const root = { name: "/", children: /* @__PURE__ */ new Map(), isFile: false };
|
|
1024
|
+
for (const entry of entries) {
|
|
1025
|
+
const parts = entry.path.split("/").filter(Boolean);
|
|
1026
|
+
let current = root;
|
|
1027
|
+
for (const part of parts) {
|
|
1028
|
+
let child = current.children.get(part);
|
|
1029
|
+
if (!child) {
|
|
1030
|
+
child = { name: part, children: /* @__PURE__ */ new Map(), isFile: false };
|
|
1031
|
+
current.children.set(part, child);
|
|
1032
|
+
}
|
|
1033
|
+
current = child;
|
|
1034
|
+
}
|
|
1035
|
+
current.isFile = current.children.size === 0;
|
|
1036
|
+
}
|
|
1037
|
+
return root;
|
|
1038
|
+
};
|
|
1039
|
+
var printNode = (node, tab, sort) => {
|
|
1040
|
+
const entries = [...node.children.values()];
|
|
1041
|
+
if (sort) {
|
|
1042
|
+
entries.sort((a, b) => {
|
|
1043
|
+
if (!a.isFile && !b.isFile) return a.name.localeCompare(b.name);
|
|
1044
|
+
if (!a.isFile) return -1;
|
|
1045
|
+
if (!b.isFile) return 1;
|
|
1046
|
+
return a.name.localeCompare(b.name);
|
|
1047
|
+
});
|
|
1048
|
+
}
|
|
1049
|
+
let str = "";
|
|
1050
|
+
for (const [i, entry] of entries.entries()) {
|
|
1051
|
+
const isLast = i === entries.length - 1;
|
|
1052
|
+
const branch = isLast ? "\u2514\u2500" : "\u251C\u2500";
|
|
1053
|
+
const childTab = tab + (isLast ? " " : "\u2502 ");
|
|
1054
|
+
if (entry.isFile) {
|
|
1055
|
+
str += "\n" + tab + branch + " " + entry.name;
|
|
1056
|
+
} else {
|
|
1057
|
+
const subtree = printNode(entry, childTab, sort);
|
|
1058
|
+
str += "\n" + tab + branch + " " + entry.name + "/" + subtree;
|
|
1059
|
+
}
|
|
1060
|
+
}
|
|
1061
|
+
return str;
|
|
1062
|
+
};
|
|
1063
|
+
function formatVirtualFileTree(entries, opts = {}) {
|
|
1064
|
+
const sort = opts.sort ?? true;
|
|
1065
|
+
const root = buildTree(entries);
|
|
1066
|
+
return "/" + printNode(root, "", sort);
|
|
1067
|
+
}
|
|
1068
|
+
|
|
838
1069
|
// src/lib/skills/parse.ts
|
|
839
1070
|
function parseSkillFile(raw) {
|
|
840
1071
|
const trimmed = raw.replace(/^\uFEFF/, "");
|
|
@@ -1256,197 +1487,313 @@ var createAskUserQuestionHandler = () => async (args) => {
|
|
|
1256
1487
|
data: { questions: args.questions }
|
|
1257
1488
|
};
|
|
1258
1489
|
};
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
return 1
|
|
1272
|
-
`;
|
|
1273
|
-
function getThreadKey(threadId, key) {
|
|
1274
|
-
return `thread:${threadId}:${key}`;
|
|
1275
|
-
}
|
|
1276
|
-
function createThreadManager(config) {
|
|
1277
|
-
const {
|
|
1278
|
-
redis,
|
|
1279
|
-
threadId,
|
|
1280
|
-
key = "messages",
|
|
1281
|
-
serialize = (m) => JSON.stringify(m),
|
|
1282
|
-
deserialize = (raw) => JSON.parse(raw),
|
|
1283
|
-
idOf
|
|
1284
|
-
} = config;
|
|
1285
|
-
const redisKey = getThreadKey(threadId, key);
|
|
1286
|
-
const metaKey = getThreadKey(threadId, `${key}:meta`);
|
|
1287
|
-
async function assertThreadExists() {
|
|
1288
|
-
const exists = await redis.exists(metaKey);
|
|
1289
|
-
if (!exists) {
|
|
1290
|
-
throw new Error(`Thread "${threadId}" (key: ${key}) does not exist`);
|
|
1490
|
+
var FileSystemSkillProvider = class {
|
|
1491
|
+
constructor(fs, baseDir) {
|
|
1492
|
+
this.fs = fs;
|
|
1493
|
+
this.baseDir = baseDir;
|
|
1494
|
+
}
|
|
1495
|
+
async listSkills() {
|
|
1496
|
+
const dirs = await this.discoverSkillDirs();
|
|
1497
|
+
const skills = [];
|
|
1498
|
+
for (const dir of dirs) {
|
|
1499
|
+
const raw = await this.fs.readFile(path.join(this.baseDir, dir, "SKILL.md"));
|
|
1500
|
+
const { frontmatter } = parseSkillFile(raw);
|
|
1501
|
+
skills.push(frontmatter);
|
|
1291
1502
|
}
|
|
1503
|
+
return skills;
|
|
1292
1504
|
}
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
}
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
}
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1505
|
+
async getSkill(name) {
|
|
1506
|
+
const raw = await this.fs.readFile(
|
|
1507
|
+
path.join(this.baseDir, name, "SKILL.md")
|
|
1508
|
+
);
|
|
1509
|
+
const { frontmatter, body } = parseSkillFile(raw);
|
|
1510
|
+
if (frontmatter.name !== name) {
|
|
1511
|
+
throw new Error(
|
|
1512
|
+
`Skill directory "${name}" contains SKILL.md with mismatched name "${frontmatter.name}"`
|
|
1513
|
+
);
|
|
1514
|
+
}
|
|
1515
|
+
return { ...frontmatter, instructions: body };
|
|
1516
|
+
}
|
|
1517
|
+
/**
|
|
1518
|
+
* Convenience method to load all skills with full instructions.
|
|
1519
|
+
* Returns `Skill[]` ready to pass into a workflow.
|
|
1520
|
+
*/
|
|
1521
|
+
async loadAll() {
|
|
1522
|
+
const dirs = await this.discoverSkillDirs();
|
|
1523
|
+
const skills = [];
|
|
1524
|
+
for (const dir of dirs) {
|
|
1525
|
+
const raw = await this.fs.readFile(path.join(this.baseDir, dir, "SKILL.md"));
|
|
1526
|
+
const { frontmatter, body } = parseSkillFile(raw);
|
|
1527
|
+
skills.push({ ...frontmatter, instructions: body });
|
|
1528
|
+
}
|
|
1529
|
+
return skills;
|
|
1530
|
+
}
|
|
1531
|
+
async discoverSkillDirs() {
|
|
1532
|
+
const entries = await this.fs.readdirWithFileTypes(this.baseDir);
|
|
1533
|
+
const dirs = [];
|
|
1534
|
+
for (const entry of entries) {
|
|
1535
|
+
if (!entry.isDirectory) continue;
|
|
1536
|
+
const skillPath = path.join(this.baseDir, entry.name, "SKILL.md");
|
|
1537
|
+
if (await this.fs.exists(skillPath)) {
|
|
1538
|
+
dirs.push(entry.name);
|
|
1320
1539
|
}
|
|
1321
|
-
},
|
|
1322
|
-
async delete() {
|
|
1323
|
-
await redis.del(redisKey, metaKey);
|
|
1324
1540
|
}
|
|
1325
|
-
|
|
1326
|
-
}
|
|
1327
|
-
|
|
1541
|
+
return dirs;
|
|
1542
|
+
}
|
|
1543
|
+
};
|
|
1544
|
+
async function queryParentWorkflowState(client) {
|
|
1328
1545
|
const { workflowExecution } = activity.Context.current().info;
|
|
1329
1546
|
const handle = client.getHandle(
|
|
1330
1547
|
workflowExecution.workflowId,
|
|
1331
1548
|
workflowExecution.runId
|
|
1332
1549
|
);
|
|
1333
|
-
return handle.query(
|
|
1550
|
+
return handle.query("getAgentState");
|
|
1334
1551
|
}
|
|
1335
|
-
function createRunAgentActivity(client,
|
|
1552
|
+
function createRunAgentActivity(client, handler) {
|
|
1336
1553
|
return async (config) => {
|
|
1337
|
-
const state = await queryParentWorkflowState(
|
|
1338
|
-
|
|
1339
|
-
agentQueryName(config.agentName)
|
|
1340
|
-
);
|
|
1341
|
-
return invoker({ ...config, state });
|
|
1554
|
+
const state = await queryParentWorkflowState(client);
|
|
1555
|
+
return handler({ ...config, state });
|
|
1342
1556
|
};
|
|
1343
1557
|
}
|
|
1344
|
-
function
|
|
1345
|
-
return async (
|
|
1346
|
-
|
|
1347
|
-
return {
|
|
1348
|
-
toolResponse: "Hello, world!",
|
|
1349
|
-
data: { files: [] }
|
|
1350
|
-
};
|
|
1558
|
+
function withParentWorkflowState(client, handler) {
|
|
1559
|
+
return async (args, context) => {
|
|
1560
|
+
const state = await queryParentWorkflowState(client);
|
|
1561
|
+
return handler(args, { ...context, state });
|
|
1351
1562
|
};
|
|
1352
1563
|
}
|
|
1353
1564
|
|
|
1565
|
+
// src/lib/sandbox/manager.ts
|
|
1566
|
+
var SandboxManager = class {
|
|
1567
|
+
constructor(provider) {
|
|
1568
|
+
this.provider = provider;
|
|
1569
|
+
}
|
|
1570
|
+
async create(options) {
|
|
1571
|
+
const { sandbox, stateUpdate } = await this.provider.create(options);
|
|
1572
|
+
return { sandboxId: sandbox.id, ...stateUpdate && { stateUpdate } };
|
|
1573
|
+
}
|
|
1574
|
+
async getSandbox(id) {
|
|
1575
|
+
return this.provider.get(id);
|
|
1576
|
+
}
|
|
1577
|
+
async destroy(id) {
|
|
1578
|
+
await this.provider.destroy(id);
|
|
1579
|
+
}
|
|
1580
|
+
async snapshot(id) {
|
|
1581
|
+
return this.provider.snapshot(id);
|
|
1582
|
+
}
|
|
1583
|
+
async restore(snapshot) {
|
|
1584
|
+
const sandbox = await this.provider.restore(snapshot);
|
|
1585
|
+
return sandbox.id;
|
|
1586
|
+
}
|
|
1587
|
+
/**
|
|
1588
|
+
* Returns Temporal activity functions matching {@link SandboxOps}.
|
|
1589
|
+
* Spread these into your worker's activity map.
|
|
1590
|
+
*/
|
|
1591
|
+
createActivities() {
|
|
1592
|
+
return {
|
|
1593
|
+
createSandbox: async (options) => {
|
|
1594
|
+
return this.create(options);
|
|
1595
|
+
},
|
|
1596
|
+
destroySandbox: async (sandboxId) => {
|
|
1597
|
+
await this.destroy(sandboxId);
|
|
1598
|
+
},
|
|
1599
|
+
snapshotSandbox: async (sandboxId) => {
|
|
1600
|
+
return this.snapshot(sandboxId);
|
|
1601
|
+
}
|
|
1602
|
+
};
|
|
1603
|
+
}
|
|
1604
|
+
};
|
|
1605
|
+
|
|
1606
|
+
// src/tools/bash/handler.ts
|
|
1607
|
+
var bashHandler = async (args, { sandbox }) => {
|
|
1608
|
+
try {
|
|
1609
|
+
const result = await sandbox.exec(args.command);
|
|
1610
|
+
return {
|
|
1611
|
+
toolResponse: `Exit code: ${result.exitCode}
|
|
1612
|
+
|
|
1613
|
+
stdout:
|
|
1614
|
+
${result.stdout}
|
|
1615
|
+
|
|
1616
|
+
stderr:
|
|
1617
|
+
${result.stderr}`,
|
|
1618
|
+
data: result
|
|
1619
|
+
};
|
|
1620
|
+
} catch (error) {
|
|
1621
|
+
const err = error instanceof Error ? error : new Error("Unknown error");
|
|
1622
|
+
return {
|
|
1623
|
+
toolResponse: `Error executing bash command: ${err.message}`,
|
|
1624
|
+
data: null
|
|
1625
|
+
};
|
|
1626
|
+
}
|
|
1627
|
+
};
|
|
1628
|
+
|
|
1354
1629
|
// src/tools/edit/handler.ts
|
|
1355
1630
|
function escapeRegExp(str) {
|
|
1356
1631
|
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
1357
1632
|
}
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1633
|
+
var editHandler = async (args, { sandbox }) => {
|
|
1634
|
+
const { fs } = sandbox;
|
|
1635
|
+
const { file_path, old_string, new_string, replace_all = false } = args;
|
|
1636
|
+
if (old_string === new_string) {
|
|
1637
|
+
return {
|
|
1638
|
+
toolResponse: `Error: old_string and new_string must be different.`,
|
|
1639
|
+
data: { path: file_path, success: false, replacements: 0 }
|
|
1640
|
+
};
|
|
1641
|
+
}
|
|
1642
|
+
try {
|
|
1643
|
+
const exists = await fs.exists(file_path);
|
|
1644
|
+
if (!exists) {
|
|
1362
1645
|
return {
|
|
1363
|
-
toolResponse: `Error:
|
|
1646
|
+
toolResponse: `Error: File "${file_path}" does not exist.`,
|
|
1364
1647
|
data: { path: file_path, success: false, replacements: 0 }
|
|
1365
1648
|
};
|
|
1366
1649
|
}
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
if (!exists) {
|
|
1370
|
-
return {
|
|
1371
|
-
toolResponse: `Error: File "${file_path}" does not exist.`,
|
|
1372
|
-
data: { path: file_path, success: false, replacements: 0 }
|
|
1373
|
-
};
|
|
1374
|
-
}
|
|
1375
|
-
const content = await fs.readFile(file_path);
|
|
1376
|
-
if (!content.includes(old_string)) {
|
|
1377
|
-
return {
|
|
1378
|
-
toolResponse: `Error: Could not find the specified text in "${file_path}". Make sure old_string matches exactly (whitespace-sensitive).`,
|
|
1379
|
-
data: { path: file_path, success: false, replacements: 0 }
|
|
1380
|
-
};
|
|
1381
|
-
}
|
|
1382
|
-
const escapedOldString = escapeRegExp(old_string);
|
|
1383
|
-
const globalRegex = new RegExp(escapedOldString, "g");
|
|
1384
|
-
const occurrences = (content.match(globalRegex) || []).length;
|
|
1385
|
-
if (!replace_all && occurrences > 1) {
|
|
1386
|
-
return {
|
|
1387
|
-
toolResponse: `Error: old_string appears ${occurrences} times in "${file_path}". Either provide more context to make it unique, or use replace_all: true.`,
|
|
1388
|
-
data: { path: file_path, success: false, replacements: 0 }
|
|
1389
|
-
};
|
|
1390
|
-
}
|
|
1391
|
-
let newContent;
|
|
1392
|
-
let replacements;
|
|
1393
|
-
if (replace_all) {
|
|
1394
|
-
newContent = content.split(old_string).join(new_string);
|
|
1395
|
-
replacements = occurrences;
|
|
1396
|
-
} else {
|
|
1397
|
-
const index = content.indexOf(old_string);
|
|
1398
|
-
newContent = content.slice(0, index) + new_string + content.slice(index + old_string.length);
|
|
1399
|
-
replacements = 1;
|
|
1400
|
-
}
|
|
1401
|
-
await fs.writeFile(file_path, newContent);
|
|
1402
|
-
const summary = replace_all ? `Replaced ${replacements} occurrence(s)` : `Replaced 1 occurrence`;
|
|
1650
|
+
const content = await fs.readFile(file_path);
|
|
1651
|
+
if (!content.includes(old_string)) {
|
|
1403
1652
|
return {
|
|
1404
|
-
toolResponse:
|
|
1405
|
-
data: { path: file_path, success:
|
|
1653
|
+
toolResponse: `Error: Could not find the specified text in "${file_path}". Make sure old_string matches exactly (whitespace-sensitive).`,
|
|
1654
|
+
data: { path: file_path, success: false, replacements: 0 }
|
|
1406
1655
|
};
|
|
1407
|
-
}
|
|
1408
|
-
|
|
1656
|
+
}
|
|
1657
|
+
const escapedOldString = escapeRegExp(old_string);
|
|
1658
|
+
const globalRegex = new RegExp(escapedOldString, "g");
|
|
1659
|
+
const occurrences = (content.match(globalRegex) || []).length;
|
|
1660
|
+
if (!replace_all && occurrences > 1) {
|
|
1409
1661
|
return {
|
|
1410
|
-
toolResponse: `Error
|
|
1662
|
+
toolResponse: `Error: old_string appears ${occurrences} times in "${file_path}". Either provide more context to make it unique, or use replace_all: true.`,
|
|
1411
1663
|
data: { path: file_path, success: false, replacements: 0 }
|
|
1412
1664
|
};
|
|
1413
1665
|
}
|
|
1414
|
-
|
|
1666
|
+
let newContent;
|
|
1667
|
+
let replacements;
|
|
1668
|
+
if (replace_all) {
|
|
1669
|
+
newContent = content.split(old_string).join(new_string);
|
|
1670
|
+
replacements = occurrences;
|
|
1671
|
+
} else {
|
|
1672
|
+
const index = content.indexOf(old_string);
|
|
1673
|
+
newContent = content.slice(0, index) + new_string + content.slice(index + old_string.length);
|
|
1674
|
+
replacements = 1;
|
|
1675
|
+
}
|
|
1676
|
+
await fs.writeFile(file_path, newContent);
|
|
1677
|
+
const summary = replace_all ? `Replaced ${replacements} occurrence(s)` : `Replaced 1 occurrence`;
|
|
1678
|
+
return {
|
|
1679
|
+
toolResponse: `${summary} in ${file_path}`,
|
|
1680
|
+
data: { path: file_path, success: true, replacements }
|
|
1681
|
+
};
|
|
1682
|
+
} catch (error) {
|
|
1683
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1684
|
+
return {
|
|
1685
|
+
toolResponse: `Error editing file "${file_path}": ${message}`,
|
|
1686
|
+
data: { path: file_path, success: false, replacements: 0 }
|
|
1687
|
+
};
|
|
1688
|
+
}
|
|
1689
|
+
};
|
|
1690
|
+
|
|
1691
|
+
// src/tools/glob/handler.ts
|
|
1692
|
+
function matchGlob(pattern, path) {
|
|
1693
|
+
const regex = pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*\*/g, "{{GLOBSTAR}}").replace(/\*/g, "[^/]*").replace(/\{\{GLOBSTAR\}\}/g, ".*");
|
|
1694
|
+
return new RegExp(`^${regex}$`).test(path);
|
|
1415
1695
|
}
|
|
1416
|
-
|
|
1417
|
-
const
|
|
1418
|
-
const
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1696
|
+
async function walk(fs, dir) {
|
|
1697
|
+
const results = [];
|
|
1698
|
+
const entries = await fs.readdirWithFileTypes(dir);
|
|
1699
|
+
for (const entry of entries) {
|
|
1700
|
+
const full = dir === "/" ? `/${entry.name}` : `${dir}/${entry.name}`;
|
|
1701
|
+
if (entry.isDirectory) {
|
|
1702
|
+
results.push(...await walk(fs, full));
|
|
1703
|
+
} else {
|
|
1704
|
+
results.push(full);
|
|
1424
1705
|
}
|
|
1425
|
-
}
|
|
1426
|
-
|
|
1706
|
+
}
|
|
1707
|
+
return results;
|
|
1708
|
+
}
|
|
1709
|
+
var globHandler = async (args, { sandbox }) => {
|
|
1710
|
+
const { fs } = sandbox;
|
|
1711
|
+
const { pattern, root = "/" } = args;
|
|
1427
1712
|
try {
|
|
1428
|
-
const
|
|
1429
|
-
const
|
|
1713
|
+
const allFiles = await walk(fs, root);
|
|
1714
|
+
const relativeTo = root.endsWith("/") ? root : `${root}/`;
|
|
1715
|
+
const matched = allFiles.map((f) => f.startsWith(relativeTo) ? f.slice(relativeTo.length) : f).filter((f) => matchGlob(pattern, f));
|
|
1430
1716
|
return {
|
|
1431
|
-
toolResponse: `
|
|
1432
|
-
|
|
1433
|
-
|
|
1434
|
-
|
|
1717
|
+
toolResponse: matched.length > 0 ? `Found ${matched.length} file(s):
|
|
1718
|
+
${matched.join("\n")}` : `No files matched pattern "${pattern}"`,
|
|
1719
|
+
data: { files: matched }
|
|
1720
|
+
};
|
|
1721
|
+
} catch (error) {
|
|
1722
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1723
|
+
return {
|
|
1724
|
+
toolResponse: `Error running glob: ${message}`,
|
|
1725
|
+
data: { files: [] }
|
|
1726
|
+
};
|
|
1727
|
+
}
|
|
1728
|
+
};
|
|
1435
1729
|
|
|
1436
|
-
|
|
1437
|
-
|
|
1438
|
-
|
|
1730
|
+
// src/tools/read-file/handler.ts
|
|
1731
|
+
var readFileHandler = async (args, { sandbox }) => {
|
|
1732
|
+
const { fs } = sandbox;
|
|
1733
|
+
const { path, offset, limit } = args;
|
|
1734
|
+
try {
|
|
1735
|
+
const exists = await fs.exists(path);
|
|
1736
|
+
if (!exists) {
|
|
1737
|
+
return {
|
|
1738
|
+
toolResponse: `Error: File "${path}" does not exist.`,
|
|
1739
|
+
data: null
|
|
1740
|
+
};
|
|
1741
|
+
}
|
|
1742
|
+
const raw = await fs.readFile(path);
|
|
1743
|
+
const lines = raw.split("\n");
|
|
1744
|
+
const totalLines = lines.length;
|
|
1745
|
+
if (offset !== void 0 || limit !== void 0) {
|
|
1746
|
+
const start = Math.max(0, (offset ?? 1) - 1);
|
|
1747
|
+
const end = limit !== void 0 ? start + limit : lines.length;
|
|
1748
|
+
const slice = lines.slice(start, end);
|
|
1749
|
+
const numbered2 = slice.map((line, i) => `${String(start + i + 1).padStart(6)}|${line}`).join("\n");
|
|
1750
|
+
return {
|
|
1751
|
+
toolResponse: numbered2,
|
|
1752
|
+
data: { path, content: numbered2, totalLines }
|
|
1753
|
+
};
|
|
1754
|
+
}
|
|
1755
|
+
const numbered = lines.map((line, i) => `${String(i + 1).padStart(6)}|${line}`).join("\n");
|
|
1756
|
+
return {
|
|
1757
|
+
toolResponse: numbered,
|
|
1758
|
+
data: { path, content: numbered, totalLines }
|
|
1439
1759
|
};
|
|
1440
1760
|
} catch (error) {
|
|
1441
|
-
const
|
|
1761
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1442
1762
|
return {
|
|
1443
|
-
toolResponse: `Error
|
|
1763
|
+
toolResponse: `Error reading file "${path}": ${message}`,
|
|
1444
1764
|
data: null
|
|
1445
1765
|
};
|
|
1446
1766
|
}
|
|
1447
1767
|
};
|
|
1448
1768
|
|
|
1449
|
-
// src/
|
|
1769
|
+
// src/tools/write-file/handler.ts
|
|
1770
|
+
var writeFileHandler = async (args, { sandbox }) => {
|
|
1771
|
+
const { fs } = sandbox;
|
|
1772
|
+
const { file_path, content } = args;
|
|
1773
|
+
try {
|
|
1774
|
+
const lastSlash = file_path.lastIndexOf("/");
|
|
1775
|
+
if (lastSlash > 0) {
|
|
1776
|
+
const dir = file_path.slice(0, lastSlash);
|
|
1777
|
+
const dirExists = await fs.exists(dir);
|
|
1778
|
+
if (!dirExists) {
|
|
1779
|
+
await fs.mkdir(dir, { recursive: true });
|
|
1780
|
+
}
|
|
1781
|
+
}
|
|
1782
|
+
await fs.writeFile(file_path, content);
|
|
1783
|
+
return {
|
|
1784
|
+
toolResponse: `Successfully wrote to ${file_path}`,
|
|
1785
|
+
data: { path: file_path, success: true }
|
|
1786
|
+
};
|
|
1787
|
+
} catch (error) {
|
|
1788
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1789
|
+
return {
|
|
1790
|
+
toolResponse: `Error writing file "${file_path}": ${message}`,
|
|
1791
|
+
data: { path: file_path, success: false }
|
|
1792
|
+
};
|
|
1793
|
+
}
|
|
1794
|
+
};
|
|
1795
|
+
|
|
1796
|
+
// src/lib/sandbox/tree.ts
|
|
1450
1797
|
var basename = (path, separator) => {
|
|
1451
1798
|
if (path[path.length - 1] === separator) path = path.slice(0, -1);
|
|
1452
1799
|
const lastSlashIndex = path.lastIndexOf(separator);
|
|
@@ -1475,7 +1822,7 @@ var toTree = async (fs, opts = {}) => {
|
|
|
1475
1822
|
const sort = opts.sort ?? true;
|
|
1476
1823
|
let subtree = " (...)";
|
|
1477
1824
|
if (depth > 0) {
|
|
1478
|
-
const list = await fs.readdirWithFileTypes
|
|
1825
|
+
const list = await fs.readdirWithFileTypes(dir);
|
|
1479
1826
|
if (sort) {
|
|
1480
1827
|
list.sort((a, b) => {
|
|
1481
1828
|
if (a.isDirectory && b.isDirectory) {
|
|
@@ -1509,78 +1856,22 @@ var toTree = async (fs, opts = {}) => {
|
|
|
1509
1856
|
const base = basename(dir, separator) + separator;
|
|
1510
1857
|
return base + subtree;
|
|
1511
1858
|
};
|
|
1512
|
-
var FileSystemSkillProvider = class {
|
|
1513
|
-
constructor(baseDir) {
|
|
1514
|
-
this.baseDir = baseDir;
|
|
1515
|
-
}
|
|
1516
|
-
async listSkills() {
|
|
1517
|
-
const dirs = await this.discoverSkillDirs();
|
|
1518
|
-
const skills = [];
|
|
1519
|
-
for (const dir of dirs) {
|
|
1520
|
-
const raw = await promises.readFile(path.join(this.baseDir, dir, "SKILL.md"), "utf-8");
|
|
1521
|
-
const { frontmatter } = parseSkillFile(raw);
|
|
1522
|
-
skills.push(frontmatter);
|
|
1523
|
-
}
|
|
1524
|
-
return skills;
|
|
1525
|
-
}
|
|
1526
|
-
async getSkill(name) {
|
|
1527
|
-
const raw = await promises.readFile(
|
|
1528
|
-
path.join(this.baseDir, name, "SKILL.md"),
|
|
1529
|
-
"utf-8"
|
|
1530
|
-
);
|
|
1531
|
-
const { frontmatter, body } = parseSkillFile(raw);
|
|
1532
|
-
if (frontmatter.name !== name) {
|
|
1533
|
-
throw new Error(
|
|
1534
|
-
`Skill directory "${name}" contains SKILL.md with mismatched name "${frontmatter.name}"`
|
|
1535
|
-
);
|
|
1536
|
-
}
|
|
1537
|
-
return { ...frontmatter, instructions: body };
|
|
1538
|
-
}
|
|
1539
|
-
/**
|
|
1540
|
-
* Convenience method to load all skills with full instructions.
|
|
1541
|
-
* Returns `Skill[]` ready to pass into a workflow.
|
|
1542
|
-
*/
|
|
1543
|
-
async loadAll() {
|
|
1544
|
-
const dirs = await this.discoverSkillDirs();
|
|
1545
|
-
const skills = [];
|
|
1546
|
-
for (const dir of dirs) {
|
|
1547
|
-
const raw = await promises.readFile(path.join(this.baseDir, dir, "SKILL.md"), "utf-8");
|
|
1548
|
-
const { frontmatter, body } = parseSkillFile(raw);
|
|
1549
|
-
skills.push({ ...frontmatter, instructions: body });
|
|
1550
|
-
}
|
|
1551
|
-
return skills;
|
|
1552
|
-
}
|
|
1553
|
-
async discoverSkillDirs() {
|
|
1554
|
-
const entries = await promises.readdir(this.baseDir, { withFileTypes: true });
|
|
1555
|
-
const dirs = [];
|
|
1556
|
-
for (const entry of entries) {
|
|
1557
|
-
if (!entry.isDirectory()) continue;
|
|
1558
|
-
try {
|
|
1559
|
-
await promises.readFile(path.join(this.baseDir, entry.name, "SKILL.md"), "utf-8");
|
|
1560
|
-
dirs.push(entry.name);
|
|
1561
|
-
} catch {
|
|
1562
|
-
}
|
|
1563
|
-
}
|
|
1564
|
-
return dirs;
|
|
1565
|
-
}
|
|
1566
|
-
};
|
|
1567
1859
|
|
|
1568
1860
|
exports.FileSystemSkillProvider = FileSystemSkillProvider;
|
|
1569
|
-
exports.
|
|
1570
|
-
exports.
|
|
1861
|
+
exports.SandboxManager = SandboxManager;
|
|
1862
|
+
exports.SandboxNotFoundError = SandboxNotFoundError;
|
|
1863
|
+
exports.SandboxNotSupportedError = SandboxNotSupportedError;
|
|
1864
|
+
exports.applyVirtualTreeMutations = applyVirtualTreeMutations;
|
|
1571
1865
|
exports.askUserQuestionTool = askUserQuestionTool;
|
|
1866
|
+
exports.bashHandler = bashHandler;
|
|
1572
1867
|
exports.bashTool = bashTool;
|
|
1573
1868
|
exports.createAgentStateManager = createAgentStateManager;
|
|
1574
1869
|
exports.createAskUserQuestionHandler = createAskUserQuestionHandler;
|
|
1575
|
-
exports.createBashHandler = createBashHandler;
|
|
1576
1870
|
exports.createBashToolDescription = createBashToolDescription;
|
|
1577
|
-
exports.createEditHandler = createEditHandler;
|
|
1578
|
-
exports.createGlobHandler = createGlobHandler;
|
|
1579
1871
|
exports.createReadSkillHandler = createReadSkillHandler;
|
|
1580
1872
|
exports.createReadSkillTool = createReadSkillTool;
|
|
1581
1873
|
exports.createRunAgentActivity = createRunAgentActivity;
|
|
1582
1874
|
exports.createSession = createSession;
|
|
1583
|
-
exports.createSubagentTool = createSubagentTool;
|
|
1584
1875
|
exports.createTaskCreateHandler = createTaskCreateHandler;
|
|
1585
1876
|
exports.createTaskGetHandler = createTaskGetHandler;
|
|
1586
1877
|
exports.createTaskListHandler = createTaskListHandler;
|
|
@@ -1589,15 +1880,20 @@ exports.createThreadManager = createThreadManager;
|
|
|
1589
1880
|
exports.createToolRouter = createToolRouter;
|
|
1590
1881
|
exports.defineSubagent = defineSubagent;
|
|
1591
1882
|
exports.defineTool = defineTool;
|
|
1883
|
+
exports.editHandler = editHandler;
|
|
1592
1884
|
exports.editTool = editTool;
|
|
1885
|
+
exports.formatVirtualFileTree = formatVirtualFileTree;
|
|
1593
1886
|
exports.getShortId = getShortId;
|
|
1887
|
+
exports.globHandler = globHandler;
|
|
1594
1888
|
exports.globTool = globTool;
|
|
1595
1889
|
exports.grepTool = grepTool;
|
|
1596
1890
|
exports.hasNoOtherToolCalls = hasNoOtherToolCalls;
|
|
1597
1891
|
exports.isTerminalStatus = isTerminalStatus;
|
|
1598
1892
|
exports.parseSkillFile = parseSkillFile;
|
|
1599
1893
|
exports.proxyDefaultThreadOps = proxyDefaultThreadOps;
|
|
1894
|
+
exports.proxySandboxOps = proxySandboxOps;
|
|
1600
1895
|
exports.queryParentWorkflowState = queryParentWorkflowState;
|
|
1896
|
+
exports.readFileHandler = readFileHandler;
|
|
1601
1897
|
exports.readFileTool = readFileTool;
|
|
1602
1898
|
exports.taskCreateTool = taskCreateTool;
|
|
1603
1899
|
exports.taskGetTool = taskGetTool;
|
|
@@ -1605,6 +1901,9 @@ exports.taskListTool = taskListTool;
|
|
|
1605
1901
|
exports.taskUpdateTool = taskUpdateTool;
|
|
1606
1902
|
exports.toTree = toTree;
|
|
1607
1903
|
exports.withAutoAppend = withAutoAppend;
|
|
1904
|
+
exports.withParentWorkflowState = withParentWorkflowState;
|
|
1905
|
+
exports.withSandbox = withSandbox;
|
|
1906
|
+
exports.writeFileHandler = writeFileHandler;
|
|
1608
1907
|
exports.writeFileTool = writeFileTool;
|
|
1609
1908
|
//# sourceMappingURL=index.cjs.map
|
|
1610
1909
|
//# sourceMappingURL=index.cjs.map
|