agent-worker 0.13.0 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -3
- package/dist/{backends-BWzhErjT.mjs → backends-BYWmuyF9.mjs} +1 -1
- package/dist/{backends-CziIqKRg.mjs → backends-C7pQwuAx.mjs} +310 -222
- package/dist/cli/index.mjs +2044 -478
- package/dist/context-CdcZpO-0.mjs +4 -0
- package/dist/create-tool-gcUuI1FD.mjs +32 -0
- package/dist/index.d.mts +65 -87
- package/dist/index.mjs +465 -21
- package/dist/{memory-provider-BtLYtdQH.mjs → memory-provider-ZLOKyCxA.mjs} +8 -3
- package/dist/runner-DB-b57iZ.mjs +670 -0
- package/dist/workflow-DQ6Eju4n.mjs +664 -0
- package/package.json +4 -3
- package/dist/context-BqEyt2SF.mjs +0 -4
- package/dist/logger-Bfdo83xL.mjs +0 -63
- package/dist/runner-CnxROIev.mjs +0 -1496
- package/dist/worker-DBJ8136Q.mjs +0 -448
- package/dist/workflow-CIE3WPNx.mjs +0 -272
- /package/dist/{display-pretty-BCJq5v9d.mjs → display-pretty-Kyd40DEF.mjs} +0 -0
|
@@ -0,0 +1,664 @@
|
|
|
1
|
+
import "./backends-C7pQwuAx.mjs";
|
|
2
|
+
import "./create-tool-gcUuI1FD.mjs";
|
|
3
|
+
import { a as getBackendByType, b as resolveContextDir, c as createAgentLoop, d as generateWorkflowMCPConfig, f as writeBackendMcpConfig, h as LOOP_DEFAULTS, i as createSilentLogger, k as CONTEXT_DEFAULTS, l as runSdkAgent, m as formatInbox, n as createWiredLoop, o as getBackendForModel, p as buildAgentPrompt, r as createChannelLogger, s as checkWorkflowIdle, t as createMinimalRuntime, u as runMockAgent } from "./cli/index.mjs";
|
|
4
|
+
import "./memory-provider-ZLOKyCxA.mjs";
|
|
5
|
+
import { createWorkflowProvider, initWorkflow, n as interpolate, runWorkflowWithLoops, shutdownLoops, t as createContext } from "./runner-DB-b57iZ.mjs";
|
|
6
|
+
import { existsSync, mkdirSync, readFileSync, rmSync } from "node:fs";
|
|
7
|
+
import { basename, dirname, join, resolve } from "node:path";
|
|
8
|
+
import { parse } from "yaml";
|
|
9
|
+
import { homedir } from "node:os";
|
|
10
|
+
import { execFileSync } from "node:child_process";
|
|
11
|
+
import { parseArgs } from "node:util";
|
|
12
|
+
|
|
13
|
+
//#region src/workflow/source.ts
|
|
14
|
+
/**
|
|
15
|
+
* Workflow source resolver — supports local files and remote GitHub references.
|
|
16
|
+
*
|
|
17
|
+
* Formats:
|
|
18
|
+
* Local: ./review.yml, /path/to/review.yml, review.yml
|
|
19
|
+
*
|
|
20
|
+
* Remote (full path):
|
|
21
|
+
* github:owner/repo@ref/path/file.yml (pinned to ref)
|
|
22
|
+
* github:owner/repo/path/file.yml (default branch: main)
|
|
23
|
+
*
|
|
24
|
+
* Remote (shorthand — resolves to workflows/<name>.yml):
|
|
25
|
+
* github:owner/repo@ref#name
|
|
26
|
+
* github:owner/repo#name
|
|
27
|
+
*
|
|
28
|
+
* The @ref is always on the repo segment (format D), keeping repo+version
|
|
29
|
+
* as a single semantic unit.
|
|
30
|
+
*
|
|
31
|
+
* Remote sources are cloned (shallow) to a local cache directory:
|
|
32
|
+
* ~/.cache/agent-worker/sources/{owner}/{repo}/{ref}/
|
|
33
|
+
*
|
|
34
|
+
* The `sourceDir` field exposes the repo root, accessible in workflows
|
|
35
|
+
* as ${{ source.dir }}.
|
|
36
|
+
*/
|
|
37
|
+
const GITHUB_PREFIX = "github:";
|
|
38
|
+
const DEFAULT_REF = "main";
|
|
39
|
+
const CACHE_BASE = join(homedir(), ".cache", "agent-worker", "sources");
|
|
40
|
+
/** Check if the input is a remote source reference */
|
|
41
|
+
function isRemoteSource(input) {
|
|
42
|
+
return input.startsWith(GITHUB_PREFIX);
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Resolve a workflow source — local file or remote GitHub reference.
|
|
46
|
+
* Returns a WorkflowSource that can read the YAML and resolve relative files.
|
|
47
|
+
*/
|
|
48
|
+
async function resolveSource(input) {
|
|
49
|
+
if (isRemoteSource(input)) return resolveGitHubSource(input);
|
|
50
|
+
return resolveLocalSource(input);
|
|
51
|
+
}
|
|
52
|
+
function resolveLocalSource(filePath) {
|
|
53
|
+
const absolutePath = resolve(filePath);
|
|
54
|
+
if (!existsSync(absolutePath)) throw new Error(`Workflow file not found: ${absolutePath}`);
|
|
55
|
+
const content = readFileSync(absolutePath, "utf-8");
|
|
56
|
+
const workflowDir = dirname(absolutePath);
|
|
57
|
+
return {
|
|
58
|
+
content,
|
|
59
|
+
displayPath: absolutePath,
|
|
60
|
+
inferredName: basename(absolutePath, ".yml").replace(".yaml", ""),
|
|
61
|
+
sourceDir: workflowDir,
|
|
62
|
+
readRelativeFile: async (relativePath) => {
|
|
63
|
+
const fullPath = relativePath.startsWith("/") ? relativePath : join(workflowDir, relativePath);
|
|
64
|
+
if (existsSync(fullPath)) return readFileSync(fullPath, "utf-8");
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Parse a github: reference string into its components.
|
|
71
|
+
*
|
|
72
|
+
* Supports two formats:
|
|
73
|
+
* Full path: github:owner/repo[@ref]/path/to/file.yml
|
|
74
|
+
* Shorthand: github:owner/repo[@ref]#name -> workflows/name.yml
|
|
75
|
+
*/
|
|
76
|
+
function parseGitHubRef(input) {
|
|
77
|
+
if (!input.startsWith(GITHUB_PREFIX)) throw new Error(`Not a GitHub reference: "${input}"`);
|
|
78
|
+
const rest = input.slice(7);
|
|
79
|
+
const hashIdx = rest.indexOf("#");
|
|
80
|
+
if (hashIdx !== -1) {
|
|
81
|
+
const repoStr = rest.slice(0, hashIdx);
|
|
82
|
+
const name = rest.slice(hashIdx + 1);
|
|
83
|
+
if (!name) throw new Error(`Missing workflow name after '#' in: "${input}"`);
|
|
84
|
+
const { owner, repo, ref } = parseRepoSegment(repoStr);
|
|
85
|
+
return {
|
|
86
|
+
owner,
|
|
87
|
+
repo,
|
|
88
|
+
ref,
|
|
89
|
+
path: `workflows/${name}.yml`
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
const firstSlash = rest.indexOf("/");
|
|
93
|
+
const secondSlash = firstSlash === -1 ? -1 : rest.indexOf("/", firstSlash + 1);
|
|
94
|
+
if (firstSlash === -1 || secondSlash === -1) throw new Error(`Invalid GitHub reference: "${input}". Expected: github:owner/repo/path or github:owner/repo#name`);
|
|
95
|
+
const repoStr = rest.slice(0, secondSlash);
|
|
96
|
+
const path = rest.slice(secondSlash + 1);
|
|
97
|
+
if (!path) throw new Error(`Missing file path in: "${input}"`);
|
|
98
|
+
const { owner, repo, ref } = parseRepoSegment(repoStr);
|
|
99
|
+
return {
|
|
100
|
+
owner,
|
|
101
|
+
repo,
|
|
102
|
+
ref,
|
|
103
|
+
path
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Validate a git ref (branch, tag, or SHA) to prevent injection.
|
|
108
|
+
* Allows alphanumeric, hyphens, dots, underscores, slashes — the standard git ref charset.
|
|
109
|
+
*/
|
|
110
|
+
function validateGitRef(ref) {
|
|
111
|
+
if (!/^[a-zA-Z0-9._\-/]+$/.test(ref)) throw new Error(`Invalid git ref: "${ref}". Only alphanumeric, hyphens, dots, underscores, and slashes are allowed.`);
|
|
112
|
+
}
|
|
113
|
+
/** Parse "owner/repo" or "owner/repo@ref" */
|
|
114
|
+
function parseRepoSegment(repoStr) {
|
|
115
|
+
let ref = DEFAULT_REF;
|
|
116
|
+
const atIdx = repoStr.indexOf("@");
|
|
117
|
+
let cleanStr = repoStr;
|
|
118
|
+
if (atIdx !== -1) {
|
|
119
|
+
ref = repoStr.slice(atIdx + 1);
|
|
120
|
+
cleanStr = repoStr.slice(0, atIdx);
|
|
121
|
+
if (!ref) throw new Error(`Empty ref after '@' in: "${repoStr}"`);
|
|
122
|
+
}
|
|
123
|
+
const parts = cleanStr.split("/");
|
|
124
|
+
if (parts.length !== 2 || !parts[0] || !parts[1]) throw new Error(`Invalid repository format: "${repoStr}". Expected "owner/repo"`);
|
|
125
|
+
validateGitRef(ref);
|
|
126
|
+
return {
|
|
127
|
+
owner: parts[0],
|
|
128
|
+
repo: parts[1],
|
|
129
|
+
ref
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Get cache directory for a repo+ref combination.
|
|
134
|
+
*/
|
|
135
|
+
function getCacheDir(ref) {
|
|
136
|
+
return join(CACHE_BASE, ref.owner, ref.repo, ref.ref);
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Build the clone URL. Uses HTTPS; GITHUB_TOKEN auth via git credential helper
|
|
140
|
+
* or the GIT_ASKPASS/GIT_AUTH mechanism handled by git itself.
|
|
141
|
+
*/
|
|
142
|
+
function getCloneUrl(ref) {
|
|
143
|
+
const token = process.env.GITHUB_TOKEN;
|
|
144
|
+
if (token) return `https://${token}@github.com/${ref.owner}/${ref.repo}.git`;
|
|
145
|
+
return `https://github.com/${ref.owner}/${ref.repo}.git`;
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* Check if a ref looks like a branch/tag name (mutable) vs a commit SHA (immutable).
|
|
149
|
+
* Commit SHAs are 7-40 hex chars.
|
|
150
|
+
*/
|
|
151
|
+
function isImmutableRef(ref) {
|
|
152
|
+
return /^[0-9a-f]{7,40}$/.test(ref);
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Clone or update a remote repo to the cache directory.
|
|
156
|
+
*
|
|
157
|
+
* Strategy:
|
|
158
|
+
* - If cache exists and ref is immutable (SHA): skip, use cached
|
|
159
|
+
* - If cache exists and ref is mutable (branch/tag): git fetch + reset
|
|
160
|
+
* - If cache doesn't exist: shallow clone
|
|
161
|
+
*
|
|
162
|
+
* @returns Absolute path to the cache directory (repo root)
|
|
163
|
+
*/
|
|
164
|
+
function ensureClone(ref) {
|
|
165
|
+
const cacheDir = getCacheDir(ref);
|
|
166
|
+
if (existsSync(join(cacheDir, ".git"))) {
|
|
167
|
+
if (isImmutableRef(ref.ref)) return cacheDir;
|
|
168
|
+
try {
|
|
169
|
+
execFileSync("git", [
|
|
170
|
+
"fetch",
|
|
171
|
+
"origin",
|
|
172
|
+
ref.ref,
|
|
173
|
+
"--depth",
|
|
174
|
+
"1"
|
|
175
|
+
], {
|
|
176
|
+
cwd: cacheDir,
|
|
177
|
+
stdio: "pipe",
|
|
178
|
+
timeout: 3e4
|
|
179
|
+
});
|
|
180
|
+
execFileSync("git", [
|
|
181
|
+
"reset",
|
|
182
|
+
"--hard",
|
|
183
|
+
"FETCH_HEAD"
|
|
184
|
+
], {
|
|
185
|
+
cwd: cacheDir,
|
|
186
|
+
stdio: "pipe",
|
|
187
|
+
timeout: 1e4
|
|
188
|
+
});
|
|
189
|
+
return cacheDir;
|
|
190
|
+
} catch {}
|
|
191
|
+
}
|
|
192
|
+
mkdirSync(dirname(cacheDir), { recursive: true });
|
|
193
|
+
if (existsSync(cacheDir)) rmSync(cacheDir, {
|
|
194
|
+
recursive: true,
|
|
195
|
+
force: true
|
|
196
|
+
});
|
|
197
|
+
const url = getCloneUrl(ref);
|
|
198
|
+
execFileSync("git", [
|
|
199
|
+
"clone",
|
|
200
|
+
"--depth",
|
|
201
|
+
"1",
|
|
202
|
+
"--single-branch",
|
|
203
|
+
"--branch",
|
|
204
|
+
ref.ref,
|
|
205
|
+
url,
|
|
206
|
+
cacheDir
|
|
207
|
+
], {
|
|
208
|
+
stdio: "pipe",
|
|
209
|
+
timeout: 6e4
|
|
210
|
+
});
|
|
211
|
+
return cacheDir;
|
|
212
|
+
}
|
|
213
|
+
/** Resolve a github: reference to a WorkflowSource */
|
|
214
|
+
async function resolveGitHubSource(input) {
|
|
215
|
+
const ref = parseGitHubRef(input);
|
|
216
|
+
const repoDir = ensureClone(ref);
|
|
217
|
+
const workflowPath = join(repoDir, ref.path);
|
|
218
|
+
if (!existsSync(workflowPath)) throw new Error(`Remote workflow not found: ${ref.path}\n Source: ${input}\n Parsed: ${ref.owner}/${ref.repo}@${ref.ref} -> ${ref.path}\n Clone: ${repoDir}`);
|
|
219
|
+
const content = readFileSync(workflowPath, "utf-8");
|
|
220
|
+
const workflowDir = dirname(workflowPath);
|
|
221
|
+
const inferredName = basename(ref.path, ".yml").replace(".yaml", "");
|
|
222
|
+
return {
|
|
223
|
+
content,
|
|
224
|
+
displayPath: `${GITHUB_PREFIX}${ref.owner}/${ref.repo}@${ref.ref}/${ref.path}`,
|
|
225
|
+
inferredName,
|
|
226
|
+
sourceDir: repoDir,
|
|
227
|
+
readRelativeFile: async (relativePath) => {
|
|
228
|
+
const fullPath = relativePath.startsWith("/") ? relativePath : join(workflowDir, relativePath);
|
|
229
|
+
if (existsSync(fullPath)) return readFileSync(fullPath, "utf-8");
|
|
230
|
+
return null;
|
|
231
|
+
}
|
|
232
|
+
};
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
//#endregion
|
|
236
|
+
//#region src/workflow/parser.ts
|
|
237
|
+
/**
|
|
238
|
+
* Workflow file parser
|
|
239
|
+
*/
|
|
240
|
+
/**
|
|
241
|
+
* Parse a workflow file (local or remote).
|
|
242
|
+
*
|
|
243
|
+
* Supports:
|
|
244
|
+
* Local: review.yml, ./path/to/review.yml
|
|
245
|
+
* Remote: github:owner/repo@ref/path/file.yml
|
|
246
|
+
* github:owner/repo#name[@ref]
|
|
247
|
+
*/
|
|
248
|
+
async function parseWorkflowFile(filePath, options) {
|
|
249
|
+
const workflow = options?.workflow ?? "global";
|
|
250
|
+
const tag = options?.tag ?? "main";
|
|
251
|
+
const source = await resolveSource(filePath);
|
|
252
|
+
const contextBaseDir = isRemoteSource(filePath) ? process.cwd() : dirname(resolve(filePath));
|
|
253
|
+
let raw;
|
|
254
|
+
try {
|
|
255
|
+
raw = parse(source.content);
|
|
256
|
+
} catch (error) {
|
|
257
|
+
throw new Error(`Failed to parse YAML: ${error instanceof Error ? error.message : String(error)}`);
|
|
258
|
+
}
|
|
259
|
+
const validation = validateWorkflow(raw);
|
|
260
|
+
if (!validation.valid) {
|
|
261
|
+
const messages = validation.errors.map((e) => ` - ${e.path}: ${e.message}`).join("\n");
|
|
262
|
+
throw new Error(`Invalid workflow file:\n${messages}`);
|
|
263
|
+
}
|
|
264
|
+
const name = raw.name || source.inferredName;
|
|
265
|
+
const agents = {};
|
|
266
|
+
for (const [agentName, agentDef] of Object.entries(raw.agents)) agents[agentName] = await resolveAgent(agentDef, source.readRelativeFile);
|
|
267
|
+
const context = resolveContext(raw.context, contextBaseDir, name, workflow, tag);
|
|
268
|
+
return {
|
|
269
|
+
name,
|
|
270
|
+
filePath: source.displayPath,
|
|
271
|
+
sourceDir: source.sourceDir,
|
|
272
|
+
agents,
|
|
273
|
+
context,
|
|
274
|
+
params: raw.params,
|
|
275
|
+
setup: raw.setup || [],
|
|
276
|
+
kickoff: raw.kickoff
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Resolve context configuration
|
|
281
|
+
*
|
|
282
|
+
* - undefined (not set): default file provider enabled
|
|
283
|
+
* - null: default file provider enabled (YAML `context:` syntax)
|
|
284
|
+
* - false: explicitly disabled
|
|
285
|
+
* - { provider: 'file', config?: { dir | bind } }: file provider (ephemeral or persistent)
|
|
286
|
+
* - { provider: 'memory' }: memory provider (for testing)
|
|
287
|
+
*/
|
|
288
|
+
function resolveContext(config, workflowDir, workflowName, workflow, tag) {
|
|
289
|
+
const resolve = (template) => resolveContextDir(template, {
|
|
290
|
+
workflowName,
|
|
291
|
+
workflow,
|
|
292
|
+
tag,
|
|
293
|
+
baseDir: workflowDir
|
|
294
|
+
});
|
|
295
|
+
if (config === false) return;
|
|
296
|
+
if (config === void 0 || config === null) return {
|
|
297
|
+
provider: "file",
|
|
298
|
+
dir: resolve(CONTEXT_DEFAULTS.dir)
|
|
299
|
+
};
|
|
300
|
+
if (config.provider === "memory") return {
|
|
301
|
+
provider: "memory",
|
|
302
|
+
documentOwner: config.documentOwner
|
|
303
|
+
};
|
|
304
|
+
const bindPath = config.config?.bind;
|
|
305
|
+
if (bindPath) return {
|
|
306
|
+
provider: "file",
|
|
307
|
+
dir: resolve(bindPath),
|
|
308
|
+
persistent: true,
|
|
309
|
+
documentOwner: config.documentOwner
|
|
310
|
+
};
|
|
311
|
+
return {
|
|
312
|
+
provider: "file",
|
|
313
|
+
dir: resolve(config.config?.dir || CONTEXT_DEFAULTS.dir),
|
|
314
|
+
documentOwner: config.documentOwner
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
/**
|
|
318
|
+
* Resolve agent definition (load system prompt from file if needed).
|
|
319
|
+
*
|
|
320
|
+
* Uses a `readRelativeFile` function to abstract local vs remote file access.
|
|
321
|
+
* Also transforms `wakeup` and `wakeup_prompt` fields into a `ScheduleConfig`
|
|
322
|
+
* object, which is the format expected by the daemon and loop layers
|
|
323
|
+
* for setting up periodic wakeup timers.
|
|
324
|
+
*/
|
|
325
|
+
async function resolveAgent(agent, readRelativeFile) {
|
|
326
|
+
let resolvedSystemPrompt = agent.system_prompt;
|
|
327
|
+
if (resolvedSystemPrompt?.endsWith(".txt") || resolvedSystemPrompt?.endsWith(".md")) {
|
|
328
|
+
const content = await readRelativeFile(resolvedSystemPrompt);
|
|
329
|
+
if (content !== null) resolvedSystemPrompt = content;
|
|
330
|
+
}
|
|
331
|
+
let schedule;
|
|
332
|
+
if (agent.wakeup !== void 0) {
|
|
333
|
+
schedule = { wakeup: agent.wakeup };
|
|
334
|
+
if (agent.wakeup_prompt) schedule.prompt = agent.wakeup_prompt;
|
|
335
|
+
}
|
|
336
|
+
return {
|
|
337
|
+
...agent,
|
|
338
|
+
resolvedSystemPrompt,
|
|
339
|
+
schedule
|
|
340
|
+
};
|
|
341
|
+
}
|
|
342
|
+
/**
|
|
343
|
+
* Validate workflow structure
|
|
344
|
+
*/
|
|
345
|
+
function validateWorkflow(workflow) {
|
|
346
|
+
const errors = [];
|
|
347
|
+
if (!workflow || typeof workflow !== "object") {
|
|
348
|
+
errors.push({
|
|
349
|
+
path: "",
|
|
350
|
+
message: "Workflow must be an object"
|
|
351
|
+
});
|
|
352
|
+
return {
|
|
353
|
+
valid: false,
|
|
354
|
+
errors
|
|
355
|
+
};
|
|
356
|
+
}
|
|
357
|
+
const w = workflow;
|
|
358
|
+
if (!w.agents || typeof w.agents !== "object") errors.push({
|
|
359
|
+
path: "agents",
|
|
360
|
+
message: "Required field \"agents\" must be an object"
|
|
361
|
+
});
|
|
362
|
+
else {
|
|
363
|
+
const agents = w.agents;
|
|
364
|
+
for (const [name, agent] of Object.entries(agents)) validateAgent(name, agent, errors);
|
|
365
|
+
}
|
|
366
|
+
if (w.context !== void 0 && w.context !== null && w.context !== false) validateContext(w.context, errors);
|
|
367
|
+
if (w.params !== void 0) if (!Array.isArray(w.params)) errors.push({
|
|
368
|
+
path: "params",
|
|
369
|
+
message: "Params must be an array"
|
|
370
|
+
});
|
|
371
|
+
else {
|
|
372
|
+
const names = /* @__PURE__ */ new Set();
|
|
373
|
+
const shorts = /* @__PURE__ */ new Set();
|
|
374
|
+
for (let i = 0; i < w.params.length; i++) validateParam(`params[${i}]`, w.params[i], errors, names, shorts);
|
|
375
|
+
}
|
|
376
|
+
if (w.setup !== void 0) if (!Array.isArray(w.setup)) errors.push({
|
|
377
|
+
path: "setup",
|
|
378
|
+
message: "Setup must be an array"
|
|
379
|
+
});
|
|
380
|
+
else for (let i = 0; i < w.setup.length; i++) validateSetupTask(`setup[${i}]`, w.setup[i], errors);
|
|
381
|
+
if (w.kickoff !== void 0 && typeof w.kickoff !== "string") errors.push({
|
|
382
|
+
path: "kickoff",
|
|
383
|
+
message: "Kickoff must be a string"
|
|
384
|
+
});
|
|
385
|
+
return {
|
|
386
|
+
valid: errors.length === 0,
|
|
387
|
+
errors
|
|
388
|
+
};
|
|
389
|
+
}
|
|
390
|
+
function validateContext(context, errors) {
|
|
391
|
+
if (typeof context !== "object" || context === null) {
|
|
392
|
+
errors.push({
|
|
393
|
+
path: "context",
|
|
394
|
+
message: "Context must be an object or false"
|
|
395
|
+
});
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
const c = context;
|
|
399
|
+
if (!c.provider || typeof c.provider !== "string") {
|
|
400
|
+
errors.push({
|
|
401
|
+
path: "context.provider",
|
|
402
|
+
message: "Context requires \"provider\" field (file or memory)"
|
|
403
|
+
});
|
|
404
|
+
return;
|
|
405
|
+
}
|
|
406
|
+
if (c.provider !== "file" && c.provider !== "memory") {
|
|
407
|
+
errors.push({
|
|
408
|
+
path: "context.provider",
|
|
409
|
+
message: "Context provider must be \"file\" or \"memory\""
|
|
410
|
+
});
|
|
411
|
+
return;
|
|
412
|
+
}
|
|
413
|
+
if (c.documentOwner !== void 0 && typeof c.documentOwner !== "string") errors.push({
|
|
414
|
+
path: "context.documentOwner",
|
|
415
|
+
message: "Context documentOwner must be a string"
|
|
416
|
+
});
|
|
417
|
+
if (c.provider === "file" && c.config !== void 0) {
|
|
418
|
+
if (typeof c.config !== "object" || c.config === null) {
|
|
419
|
+
errors.push({
|
|
420
|
+
path: "context.config",
|
|
421
|
+
message: "Context config must be an object"
|
|
422
|
+
});
|
|
423
|
+
return;
|
|
424
|
+
}
|
|
425
|
+
const cfg = c.config;
|
|
426
|
+
if (cfg.dir !== void 0 && cfg.bind !== void 0) {
|
|
427
|
+
errors.push({
|
|
428
|
+
path: "context.config",
|
|
429
|
+
message: "\"dir\" and \"bind\" are mutually exclusive — use one or the other"
|
|
430
|
+
});
|
|
431
|
+
return;
|
|
432
|
+
}
|
|
433
|
+
if (cfg.dir !== void 0 && typeof cfg.dir !== "string") errors.push({
|
|
434
|
+
path: "context.config.dir",
|
|
435
|
+
message: "Context config dir must be a string"
|
|
436
|
+
});
|
|
437
|
+
if (cfg.bind !== void 0 && typeof cfg.bind !== "string") errors.push({
|
|
438
|
+
path: "context.config.bind",
|
|
439
|
+
message: "Context config bind must be a string path"
|
|
440
|
+
});
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
const RESERVED_NAMESPACES = [
|
|
444
|
+
"env",
|
|
445
|
+
"workflow",
|
|
446
|
+
"params",
|
|
447
|
+
"source"
|
|
448
|
+
];
|
|
449
|
+
const VALID_PARAM_TYPES = [
|
|
450
|
+
"string",
|
|
451
|
+
"number",
|
|
452
|
+
"boolean"
|
|
453
|
+
];
|
|
454
|
+
function validateSetupTask(path, task, errors) {
|
|
455
|
+
if (!task || typeof task !== "object") {
|
|
456
|
+
errors.push({
|
|
457
|
+
path,
|
|
458
|
+
message: "Setup task must be an object"
|
|
459
|
+
});
|
|
460
|
+
return;
|
|
461
|
+
}
|
|
462
|
+
const t = task;
|
|
463
|
+
if (!t.shell || typeof t.shell !== "string") errors.push({
|
|
464
|
+
path: `${path}.shell`,
|
|
465
|
+
message: "Setup task requires \"shell\" field as string"
|
|
466
|
+
});
|
|
467
|
+
if (t.as !== void 0 && typeof t.as !== "string") errors.push({
|
|
468
|
+
path: `${path}.as`,
|
|
469
|
+
message: "Setup task \"as\" field must be a string"
|
|
470
|
+
});
|
|
471
|
+
if (typeof t.as === "string" && RESERVED_NAMESPACES.includes(t.as)) errors.push({
|
|
472
|
+
path: `${path}.as`,
|
|
473
|
+
message: `"${t.as}" is a reserved namespace and cannot be used as a variable name`
|
|
474
|
+
});
|
|
475
|
+
}
|
|
476
|
+
function validateParam(path, param, errors, names, shorts) {
|
|
477
|
+
if (!param || typeof param !== "object") {
|
|
478
|
+
errors.push({
|
|
479
|
+
path,
|
|
480
|
+
message: "Param must be an object"
|
|
481
|
+
});
|
|
482
|
+
return;
|
|
483
|
+
}
|
|
484
|
+
const p = param;
|
|
485
|
+
if (!p.name || typeof p.name !== "string") {
|
|
486
|
+
errors.push({
|
|
487
|
+
path: `${path}.name`,
|
|
488
|
+
message: "Param requires \"name\" field as string"
|
|
489
|
+
});
|
|
490
|
+
return;
|
|
491
|
+
}
|
|
492
|
+
if (names.has(p.name)) errors.push({
|
|
493
|
+
path: `${path}.name`,
|
|
494
|
+
message: `Duplicate param name: "${p.name}"`
|
|
495
|
+
});
|
|
496
|
+
names.add(p.name);
|
|
497
|
+
if (p.description !== void 0 && typeof p.description !== "string") errors.push({
|
|
498
|
+
path: `${path}.description`,
|
|
499
|
+
message: "Param description must be a string"
|
|
500
|
+
});
|
|
501
|
+
if (p.type !== void 0) {
|
|
502
|
+
if (typeof p.type !== "string" || !VALID_PARAM_TYPES.includes(p.type)) errors.push({
|
|
503
|
+
path: `${path}.type`,
|
|
504
|
+
message: `Param type must be one of: ${VALID_PARAM_TYPES.join(", ")}`
|
|
505
|
+
});
|
|
506
|
+
}
|
|
507
|
+
if (p.short !== void 0) if (typeof p.short !== "string" || p.short.length !== 1) errors.push({
|
|
508
|
+
path: `${path}.short`,
|
|
509
|
+
message: "Param short must be a single character"
|
|
510
|
+
});
|
|
511
|
+
else {
|
|
512
|
+
if (shorts.has(p.short)) errors.push({
|
|
513
|
+
path: `${path}.short`,
|
|
514
|
+
message: `Duplicate param short flag: "-${p.short}"`
|
|
515
|
+
});
|
|
516
|
+
shorts.add(p.short);
|
|
517
|
+
}
|
|
518
|
+
if (p.required !== void 0 && typeof p.required !== "boolean") errors.push({
|
|
519
|
+
path: `${path}.required`,
|
|
520
|
+
message: "Param required must be a boolean"
|
|
521
|
+
});
|
|
522
|
+
}
|
|
523
|
+
/** Backends that don't require an explicit model field */
|
|
524
|
+
const CLI_BACKENDS = [
|
|
525
|
+
"claude",
|
|
526
|
+
"cursor",
|
|
527
|
+
"codex",
|
|
528
|
+
"opencode",
|
|
529
|
+
"mock"
|
|
530
|
+
];
|
|
531
|
+
function validateAgent(name, agent, errors) {
|
|
532
|
+
const path = `agents.${name}`;
|
|
533
|
+
if (!agent || typeof agent !== "object") {
|
|
534
|
+
errors.push({
|
|
535
|
+
path,
|
|
536
|
+
message: "Agent must be an object"
|
|
537
|
+
});
|
|
538
|
+
return;
|
|
539
|
+
}
|
|
540
|
+
const a = agent;
|
|
541
|
+
const backend = typeof a.backend === "string" ? a.backend : "default";
|
|
542
|
+
if (a.model !== void 0 && typeof a.model !== "string") errors.push({
|
|
543
|
+
path: `${path}.model`,
|
|
544
|
+
message: "Field \"model\" must be a string"
|
|
545
|
+
});
|
|
546
|
+
else if (!a.model && !CLI_BACKENDS.includes(backend)) errors.push({
|
|
547
|
+
path: `${path}.model`,
|
|
548
|
+
message: "Required field \"model\" must be a string (required for default backend)"
|
|
549
|
+
});
|
|
550
|
+
if (a.system_prompt !== void 0 && typeof a.system_prompt !== "string") errors.push({
|
|
551
|
+
path: `${path}.system_prompt`,
|
|
552
|
+
message: "Optional field \"system_prompt\" must be a string"
|
|
553
|
+
});
|
|
554
|
+
if (a.tools !== void 0 && !Array.isArray(a.tools)) errors.push({
|
|
555
|
+
path: `${path}.tools`,
|
|
556
|
+
message: "Optional field \"tools\" must be an array"
|
|
557
|
+
});
|
|
558
|
+
if (a.wakeup !== void 0) {
|
|
559
|
+
if (typeof a.wakeup !== "string" && typeof a.wakeup !== "number") errors.push({
|
|
560
|
+
path: `${path}.wakeup`,
|
|
561
|
+
message: "Field \"wakeup\" must be a string (duration or cron) or number (ms)"
|
|
562
|
+
});
|
|
563
|
+
else if (typeof a.wakeup === "number" && a.wakeup <= 0) errors.push({
|
|
564
|
+
path: `${path}.wakeup`,
|
|
565
|
+
message: "Field \"wakeup\" must be a positive number when specified as ms"
|
|
566
|
+
});
|
|
567
|
+
}
|
|
568
|
+
if (a.wakeup_prompt !== void 0) {
|
|
569
|
+
if (typeof a.wakeup_prompt !== "string") errors.push({
|
|
570
|
+
path: `${path}.wakeup_prompt`,
|
|
571
|
+
message: "Field \"wakeup_prompt\" must be a string"
|
|
572
|
+
});
|
|
573
|
+
if (a.wakeup === void 0) errors.push({
|
|
574
|
+
path: `${path}.wakeup_prompt`,
|
|
575
|
+
message: "Field \"wakeup_prompt\" can only be used when \"wakeup\" is also specified"
|
|
576
|
+
});
|
|
577
|
+
}
|
|
578
|
+
if (a.provider !== void 0) {
|
|
579
|
+
if (typeof a.provider === "string") {} else if (typeof a.provider === "object" && a.provider !== null && !Array.isArray(a.provider)) {
|
|
580
|
+
const p = a.provider;
|
|
581
|
+
if (!p.name || typeof p.name !== "string") errors.push({
|
|
582
|
+
path: `${path}.provider.name`,
|
|
583
|
+
message: "Field \"provider.name\" is required and must be a string"
|
|
584
|
+
});
|
|
585
|
+
if (p.base_url !== void 0 && typeof p.base_url !== "string") errors.push({
|
|
586
|
+
path: `${path}.provider.base_url`,
|
|
587
|
+
message: "Field \"provider.base_url\" must be a string"
|
|
588
|
+
});
|
|
589
|
+
if (p.api_key !== void 0 && typeof p.api_key !== "string") errors.push({
|
|
590
|
+
path: `${path}.provider.api_key`,
|
|
591
|
+
message: "Field \"provider.api_key\" must be a string"
|
|
592
|
+
});
|
|
593
|
+
} else errors.push({
|
|
594
|
+
path: `${path}.provider`,
|
|
595
|
+
message: "Field \"provider\" must be a string or object with { name, base_url?, api_key? }"
|
|
596
|
+
});
|
|
597
|
+
if (CLI_BACKENDS.includes(backend) && backend !== "mock") errors.push({
|
|
598
|
+
path: `${path}.provider`,
|
|
599
|
+
message: `Field "provider" is ignored for CLI backend "${backend}" (only works with default backend)`
|
|
600
|
+
});
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
/**
|
|
604
|
+
* Parse CLI arguments against workflow param definitions.
|
|
605
|
+
* Uses Node's built-in util.parseArgs().
|
|
606
|
+
*
|
|
607
|
+
* @param defs Param definitions from workflow YAML
|
|
608
|
+
* @param argv Raw CLI arguments (everything after the workflow file)
|
|
609
|
+
* @returns Resolved param values as string map
|
|
610
|
+
* @throws Error if required params are missing or types are invalid
|
|
611
|
+
*/
|
|
612
|
+
function parseWorkflowParams(defs, argv) {
|
|
613
|
+
if (defs.length === 0) return {};
|
|
614
|
+
const options = {};
|
|
615
|
+
for (const def of defs) {
|
|
616
|
+
const opt = { type: def.type === "boolean" ? "boolean" : "string" };
|
|
617
|
+
if (def.short) opt.short = def.short;
|
|
618
|
+
options[def.name] = opt;
|
|
619
|
+
}
|
|
620
|
+
const { values } = parseArgs({
|
|
621
|
+
args: argv,
|
|
622
|
+
options,
|
|
623
|
+
strict: true
|
|
624
|
+
});
|
|
625
|
+
const result = {};
|
|
626
|
+
const missing = [];
|
|
627
|
+
for (const def of defs) {
|
|
628
|
+
let raw = values[def.name];
|
|
629
|
+
if (raw === void 0 && def.default !== void 0) raw = String(def.default);
|
|
630
|
+
if (raw === void 0) {
|
|
631
|
+
if (def.required) {
|
|
632
|
+
const flag = def.short ? `-${def.short}/--${def.name}` : `--${def.name}`;
|
|
633
|
+
missing.push(flag);
|
|
634
|
+
}
|
|
635
|
+
continue;
|
|
636
|
+
}
|
|
637
|
+
if (def.type === "number") {
|
|
638
|
+
const num = Number(raw);
|
|
639
|
+
if (isNaN(num)) throw new Error(`Parameter --${def.name} must be a number, got: "${raw}"`);
|
|
640
|
+
result[def.name] = String(num);
|
|
641
|
+
} else result[def.name] = String(raw);
|
|
642
|
+
}
|
|
643
|
+
if (missing.length > 0) throw new Error(`Missing required parameter(s): ${missing.join(", ")}`);
|
|
644
|
+
return result;
|
|
645
|
+
}
|
|
646
|
+
/**
|
|
647
|
+
* Format parameter help text for workflow params
|
|
648
|
+
*/
|
|
649
|
+
function formatParamHelp(defs) {
|
|
650
|
+
if (defs.length === 0) return "";
|
|
651
|
+
const lines = ["", "Workflow parameters:"];
|
|
652
|
+
for (const def of defs) {
|
|
653
|
+
const flags = def.short ? `-${def.short}, --${def.name}` : ` --${def.name}`;
|
|
654
|
+
const type = def.type || "string";
|
|
655
|
+
const req = def.required ? " (required)" : "";
|
|
656
|
+
const dflt = def.default !== void 0 ? ` [default: ${def.default}]` : "";
|
|
657
|
+
const desc = def.description || "";
|
|
658
|
+
lines.push(` ${flags} <${type}> ${desc}${req}${dflt}`);
|
|
659
|
+
}
|
|
660
|
+
return lines.join("\n");
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
//#endregion
|
|
664
|
+
export { formatParamHelp, parseWorkflowFile, parseWorkflowParams, runWorkflowWithLoops, shutdownLoops };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "agent-worker",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.15.0",
|
|
4
4
|
"description": "SDK and CLI for creating and testing agent workers with Vercel AI SDK",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.mjs",
|
|
@@ -33,13 +33,13 @@
|
|
|
33
33
|
},
|
|
34
34
|
"dependencies": {
|
|
35
35
|
"@clack/prompts": "^1.0.0",
|
|
36
|
+
"@hono/node-server": "^1.19.9",
|
|
36
37
|
"@modelcontextprotocol/sdk": "^1.26.0",
|
|
37
38
|
"ai": "^6.0.69",
|
|
38
39
|
"bash-tool": "^1.3.12",
|
|
39
40
|
"chalk": "^5.6.2",
|
|
40
41
|
"commander": "^14.0.3",
|
|
41
42
|
"execa": "^9.6.1",
|
|
42
|
-
"@hono/node-server": "^1.19.9",
|
|
43
43
|
"hono": "^4.11.9",
|
|
44
44
|
"just-bash": "^2.8.0",
|
|
45
45
|
"nanoid": "^5.1.6",
|
|
@@ -47,7 +47,7 @@
|
|
|
47
47
|
"string-width": "^8.1.1",
|
|
48
48
|
"wrap-ansi": "^9.0.2",
|
|
49
49
|
"yaml": "^2.7.0",
|
|
50
|
-
"zod": "^3.
|
|
50
|
+
"zod": "^4.3.6"
|
|
51
51
|
},
|
|
52
52
|
"devDependencies": {
|
|
53
53
|
"@ai-sdk/anthropic": "^3.0.0",
|
|
@@ -58,6 +58,7 @@
|
|
|
58
58
|
"@ai-sdk/openai": "^3.0.0",
|
|
59
59
|
"@ai-sdk/xai": "^1.0.0",
|
|
60
60
|
"@types/bun": "latest",
|
|
61
|
+
"@types/node": ">=22",
|
|
61
62
|
"@typescript/native-preview": "^7.0.0-dev.20260203.1",
|
|
62
63
|
"oxfmt": "^0.28.0",
|
|
63
64
|
"oxlint": "^1.43.0",
|
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
import { C as formatToolParams, S as formatInbox, T as EventLog, _ as shouldUseResource, a as FileStorage, b as formatProposalList, c as CONTEXT_DEFAULTS, d as RESOURCE_PREFIX, f as RESOURCE_SCHEME, g as generateResourceId, h as extractMentions, i as resolveContextDir, l as MENTION_PATTERN, m as createResourceRef, n as createFileContextProvider, o as MemoryStorage, p as calculatePriority, r as getDefaultContextDir, s as ContextProviderImpl, t as FileContextProvider, u as MESSAGE_LENGTH_THRESHOLD, v as createContextMCPServer, w as getAgentId, x as createLogTool, y as formatProposal } from "./cli/index.mjs";
|
|
2
|
-
import { n as createMemoryContextProvider, t as MemoryContextProvider } from "./memory-provider-BtLYtdQH.mjs";
|
|
3
|
-
|
|
4
|
-
export { createFileContextProvider };
|