@flue/sdk 0.3.0 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,1664 @@
1
+ import { a as parseFrontmatterFile, n as createTools, t as BUILTIN_TOOL_NAMES } from "./agent-BB4lwAd5.mjs";
2
+ import * as esbuild from "esbuild";
3
+ import * as fs from "node:fs";
4
+ import * as path from "node:path";
5
+ import { packageUpSync } from "package-up";
6
+ import { spawn } from "node:child_process";
7
+ import { parseEnv } from "node:util";
8
+
9
+ //#region src/cloudflare-wrangler-merge.ts
10
+ /**
11
+ * Merge Flue's Cloudflare additions into the user's wrangler config.
12
+ *
13
+ * Philosophy: the user's wrangler config is the source of truth. Flue contributes
14
+ * the pieces it owns (the Worker entrypoint, its per-agent Durable Object
15
+ * bindings, the Sandbox DO, the migration tag) and leaves everything else
16
+ * untouched. The merged result is written to `dist/wrangler.jsonc` so the
17
+ * deployed Worker sees both.
18
+ *
19
+ * We delegate parsing and normalization to wrangler's own `unstable_readConfig`
20
+ * (lazy-imported so Node-only Flue users don't pay for it). This gets us:
21
+ * - Both jsonc and TOML support for free.
22
+ * - Wrangler's own validation diagnostics (clearer errors than ours).
23
+ * - Path normalization: relative paths in fields like `containers[].image`
24
+ * are resolved to absolute paths against the user's config dir before
25
+ * we merge. This is critical because we write the merged config to
26
+ * `dist/wrangler.jsonc` — wrangler resolves relative paths against the
27
+ * config file's own directory, so without normalization a user's
28
+ * `containers[].image: "./Dockerfile"` would resolve to `dist/Dockerfile`
29
+ * after the move and fail to deploy.
30
+ *
31
+ * Flue still owns merge semantics (DO binding de-dup by `name`, migration
32
+ * append-if-tag-absent) and Flue-specific validation (compat date floor,
33
+ * required compat flags) — wrangler doesn't know about those.
34
+ */
35
+ /** Minimum compatibility_date Flue supports. */
36
+ const MIN_COMPATIBILITY_DATE = "2026-04-01";
37
+ /** compatibility_flag Flue requires for pi-ai's process.env-based API key lookup. */
38
+ const REQUIRED_COMPAT_FLAG = "nodejs_compat";
39
+ /**
40
+ * Read and normalize the user's wrangler config from `outputDir`.
41
+ *
42
+ * Looks for `wrangler.jsonc`, `wrangler.json`, then `wrangler.toml` (jsonc is
43
+ * Cloudflare's recommended format for new projects, but all three work).
44
+ * Returns an empty config if no file is present.
45
+ *
46
+ * Delegates parsing + normalization to wrangler via `unstable_readConfig`. This
47
+ * is async only because wrangler is a lazy import (it's a peer dep — Flue users
48
+ * who only target Node should not pay for resolving it). The wrangler call
49
+ * itself is synchronous under the hood.
50
+ *
51
+ * The returned config has been through wrangler's `normalizeAndValidateConfig`,
52
+ * which:
53
+ * - Resolves relative paths to absolute (notably `containers[].image`).
54
+ * - Fills in defaults (`compatibility_date` if absent, etc.).
55
+ * - Merges `env.*` per-environment overrides.
56
+ * - Throws on validation errors via wrangler's own `UserError`.
57
+ *
58
+ * The verbose / defaulted output is intentional — the cost is a slightly bigger
59
+ * `dist/wrangler.jsonc` and the benefit is correctness without us reimplementing
60
+ * wrangler's path-resolution logic.
61
+ */
62
+ async function readUserWranglerConfig(outputDir) {
63
+ const candidates = [
64
+ "wrangler.jsonc",
65
+ "wrangler.json",
66
+ "wrangler.toml"
67
+ ];
68
+ let foundPath = null;
69
+ for (const name of candidates) {
70
+ const candidate = path.join(outputDir, name);
71
+ if (fs.existsSync(candidate)) {
72
+ foundPath = candidate;
73
+ break;
74
+ }
75
+ }
76
+ if (!foundPath) return {
77
+ config: {},
78
+ path: null
79
+ };
80
+ let wrangler;
81
+ try {
82
+ wrangler = await import("wrangler");
83
+ } catch (err) {
84
+ throw new Error(`[flue] Reading the Cloudflare wrangler config requires the "wrangler" package as a peer dependency.
85
+ Install it in your project:
86
+
87
+ npm install --save-dev wrangler
88
+
89
+ Underlying error: ${err instanceof Error ? err.message : String(err)}`);
90
+ }
91
+ let parsed;
92
+ try {
93
+ parsed = wrangler.unstable_readConfig({ config: foundPath }, { hideWarnings: true });
94
+ } catch (err) {
95
+ throw new Error(`[flue] Failed to read ${foundPath}: ${err instanceof Error ? err.message : String(err)}`);
96
+ }
97
+ return {
98
+ config: parsed,
99
+ path: foundPath
100
+ };
101
+ }
102
+ /**
103
+ * Validate that the user's wrangler config meets Flue's minimum runtime
104
+ * requirements. Throws a clear error describing the fix if it doesn't.
105
+ *
106
+ * We're intentionally strict here rather than silently massaging bad configs —
107
+ * the failure modes when these are wrong (missing nodejs_compat, old
108
+ * compat_date) produce confusing runtime errors, and surfacing the problem at
109
+ * build time is much friendlier.
110
+ *
111
+ * Together with `mergeFlueAdditions`, this enforces two invariants on every
112
+ * Flue worker:
113
+ * 1. `nodejs_compat` is in `compatibility_flags` (added if missing).
114
+ * 2. `compatibility_date >= MIN_COMPATIBILITY_DATE` (defaulted if missing).
115
+ *
116
+ * Those invariants are what let `dev.ts` hardcode `nodejsCompatMode: 'v2'`
117
+ * without re-deriving it from the config on every reload.
118
+ */
119
+ function validateUserWranglerConfig(config) {
120
+ if (Array.isArray(config.compatibility_flags)) {
121
+ if (!config.compatibility_flags.includes(REQUIRED_COMPAT_FLAG)) throw new Error(`[flue] Your wrangler config's "compatibility_flags" is missing "${REQUIRED_COMPAT_FLAG}". Flue relies on it at runtime (e.g. for API key resolution via process.env). Add "${REQUIRED_COMPAT_FLAG}" to the list.`);
122
+ }
123
+ if (typeof config.compatibility_date === "string") {
124
+ const userDate = config.compatibility_date;
125
+ if (!/^\d{4}-\d{2}-\d{2}$/.test(userDate)) throw new Error(`[flue] Your wrangler config's "compatibility_date" ("${userDate}") is not in YYYY-MM-DD format.`);
126
+ if (userDate < MIN_COMPATIBILITY_DATE) throw new Error(`[flue] Your wrangler config's "compatibility_date" is "${userDate}". Flue requires at least "${MIN_COMPATIBILITY_DATE}" for SQLite-backed Durable Object support, nodejs_compat v2, and AsyncLocalStorage. Bump the date (set it to today unless you have a specific reason).`);
127
+ }
128
+ }
129
+ /**
130
+ * Produce the merged wrangler config: start from the user's, layer Flue's
131
+ * contributions on top. Pure function — caller handles reading and writing.
132
+ */
133
+ function mergeFlueAdditions(userConfig, additions) {
134
+ const merged = { ...userConfig };
135
+ merged.main = additions.main;
136
+ if (typeof merged.name !== "string" || merged.name.length === 0) merged.name = additions.defaultName;
137
+ if (typeof merged.compatibility_date !== "string") merged.compatibility_date = MIN_COMPATIBILITY_DATE;
138
+ const existingFlags = Array.isArray(merged.compatibility_flags) ? merged.compatibility_flags.filter((f) => typeof f === "string") : [];
139
+ if (!existingFlags.includes(REQUIRED_COMPAT_FLAG)) existingFlags.push(REQUIRED_COMPAT_FLAG);
140
+ merged.compatibility_flags = existingFlags;
141
+ const existingDo = typeof merged.durable_objects === "object" && merged.durable_objects !== null ? merged.durable_objects : {};
142
+ const existingBindings = Array.isArray(existingDo.bindings) ? existingDo.bindings : [];
143
+ const existingBindingNames = new Set(existingBindings.filter((b) => typeof b === "object" && b !== null).map((b) => b.name).filter((n) => typeof n === "string"));
144
+ const flueBindingsToAdd = additions.doBindings.filter((b) => !existingBindingNames.has(b.name));
145
+ merged.durable_objects = {
146
+ ...existingDo,
147
+ bindings: [...existingBindings, ...flueBindingsToAdd]
148
+ };
149
+ const existingMigrations = Array.isArray(merged.migrations) ? merged.migrations : [];
150
+ const existingMigrationTags = new Set(existingMigrations.filter((m) => typeof m === "object" && m !== null).map((m) => m.tag).filter((t) => typeof t === "string"));
151
+ const migrationsOut = [...existingMigrations];
152
+ if (!existingMigrationTags.has(additions.migration.tag)) migrationsOut.push(additions.migration);
153
+ merged.migrations = migrationsOut;
154
+ return merged;
155
+ }
156
+ /**
157
+ * Strip wrangler-normalizer defaults that cause spurious warnings when wrangler
158
+ * re-parses our generated dist/wrangler.jsonc.
159
+ *
160
+ * Background: `unstable_readConfig` returns a fully-normalized `Unstable_Config`
161
+ * with every section populated to a default — including `unsafe: {}`. Wrangler's
162
+ * own validator then emits a `"unsafe" fields are experimental` warning whenever
163
+ * the field is *present*, regardless of whether it's empty. So our merged file,
164
+ * which inherits the empty default, would trip the warning at every dev start
165
+ * and every deploy.
166
+ *
167
+ * We delete `unsafe` only when it's an empty object (the exact shape wrangler's
168
+ * normalizer produces). If a user has actually written `unsafe: {...}` in their
169
+ * own wrangler.jsonc, the value will be non-empty and we leave it alone — the
170
+ * warning in that case is wrangler's intended diagnostic, not noise.
171
+ *
172
+ * Other normalizer-defaulted-empty fields (`vars: {}`, `kv_namespaces: []`,
173
+ * `python_modules: { exclude: ['**\/*.pyc'] }`, etc.) are left in place. They're
174
+ * harmless: wrangler doesn't warn about them, dist/wrangler.jsonc is an
175
+ * internal build artifact, and stripping them only saves bytes. Only `unsafe`
176
+ * has a user-visible side effect we need to fix.
177
+ *
178
+ * If wrangler adds another field to its `experimental()` warning list in a
179
+ * future version (today there are only two: `unsafe` and `secrets`), this
180
+ * function is the place to extend.
181
+ *
182
+ * Mutates `merged` in place to match the shallow-clone pattern in
183
+ * `mergeFlueAdditions`.
184
+ */
185
+ function stripNoisyWranglerDefaults(merged) {
186
+ if ("unsafe" in merged && typeof merged.unsafe === "object" && merged.unsafe !== null && !Array.isArray(merged.unsafe) && Object.keys(merged.unsafe).length === 0) delete merged.unsafe;
187
+ }
188
+ /**
189
+ * Return the list of `class_name`s declared in the user's wrangler
190
+ * `durable_objects.bindings` that contain the literal substring `Sandbox`
191
+ * (case-sensitive).
192
+ *
193
+ * This is Flue's convention for wiring `@cloudflare/sandbox`: any DO binding
194
+ * whose class name contains `Sandbox` triggers an automatic re-export in the
195
+ * generated Worker entry:
196
+ *
197
+ * export { Sandbox as <class_name> } from '@cloudflare/sandbox';
198
+ *
199
+ * The alias lets users pick arbitrary class names (e.g. `PyBoxSandbox`,
200
+ * `SupportSandbox`) while still pointing at the single class shipped by the
201
+ * `@cloudflare/sandbox` package. Each distinct `class_name` can be paired with
202
+ * a different container image in the user's `containers[]` config.
203
+ *
204
+ * Returns unique, sorted class names. Non-object bindings or bindings without
205
+ * a string `class_name` are ignored.
206
+ */
207
+ function detectSandboxBindings(userConfig) {
208
+ const doObj = userConfig.durable_objects;
209
+ if (typeof doObj !== "object" || doObj === null) return [];
210
+ const bindings = doObj.bindings;
211
+ if (!Array.isArray(bindings)) return [];
212
+ const found = /* @__PURE__ */ new Set();
213
+ for (const entry of bindings) {
214
+ if (typeof entry !== "object" || entry === null) continue;
215
+ const className = entry.class_name;
216
+ if (typeof className !== "string") continue;
217
+ if (className.includes("Sandbox")) found.add(className);
218
+ }
219
+ return Array.from(found).sort();
220
+ }
221
+ /**
222
+ * When the user has declared one or more `Sandbox`-named DO bindings, verify
223
+ * that `@cloudflare/sandbox` is declared in the nearest package.json. Surfaces
224
+ * a friendly, actionable error at build time rather than letting esbuild emit
225
+ * a confusing module-resolution failure.
226
+ *
227
+ * The check is lenient: if no package.json can be located or parsed, we skip
228
+ * silently and let esbuild's own error path take over. This avoids false
229
+ * positives in unusual project layouts.
230
+ */
231
+ function assertSandboxPackageInstalled(sandboxClassNames, searchDirs) {
232
+ if (sandboxClassNames.length === 0) return;
233
+ for (const dir of searchDirs) {
234
+ let current = dir;
235
+ while (current !== path.dirname(current)) {
236
+ const pkgPath = path.join(current, "package.json");
237
+ if (fs.existsSync(pkgPath)) try {
238
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
239
+ if ("@cloudflare/sandbox" in {
240
+ ...pkg.dependencies ?? {},
241
+ ...pkg.devDependencies ?? {},
242
+ ...pkg.peerDependencies ?? {},
243
+ ...pkg.optionalDependencies ?? {}
244
+ }) return;
245
+ } catch {
246
+ return;
247
+ }
248
+ current = path.dirname(current);
249
+ }
250
+ }
251
+ throw new Error(`[flue] Your wrangler config declares DO binding(s) whose class_name contains "Sandbox" (${sandboxClassNames.join(", ")}), but @cloudflare/sandbox is not in your package.json. Install it: \`npm install @cloudflare/sandbox\`.`);
252
+ }
253
+ /**
254
+ * Write the wrangler deploy-redirect file at `<outputDir>/.wrangler/deploy/config.json`
255
+ * so that `wrangler deploy` run from `outputDir` automatically picks up the
256
+ * generated `dist/wrangler.jsonc`.
257
+ *
258
+ * This is wrangler's own native redirection mechanism (the same one Astro's
259
+ * Cloudflare adapter uses). We only write the file if one doesn't already
260
+ * exist — if the user has set one up, respect their intent.
261
+ */
262
+ function writeDeployRedirectIfMissing(outputDir) {
263
+ const redirectDir = path.join(outputDir, ".wrangler", "deploy");
264
+ const redirectPath = path.join(redirectDir, "config.json");
265
+ if (fs.existsSync(redirectPath)) return;
266
+ fs.mkdirSync(redirectDir, { recursive: true });
267
+ fs.writeFileSync(redirectPath, JSON.stringify({ configPath: "../../dist/wrangler.jsonc" }, null, 2) + "\n", "utf-8");
268
+ }
269
+
270
+ //#endregion
271
+ //#region src/build-plugin-cloudflare.ts
272
+ /** Cloudflare build plugin. Produces a Worker + DO entry point with SSE/webhook/sync modes. */
273
+ var CloudflarePlugin = class {
274
+ name = "cloudflare";
275
+ bundle = "none";
276
+ entryFilename = "_entry.ts";
277
+ /**
278
+ * Per-build cache of the user's wrangler config. Both `generateEntryPoint`
279
+ * and `additionalOutputs` need it (for sandbox detection + the merge), and
280
+ * a fresh `CloudflarePlugin` instance is constructed for each build (see
281
+ * `resolvePlugin` in build.ts), so the cache is implicitly scoped to a
282
+ * single build.
283
+ */
284
+ userConfigCache;
285
+ async getUserConfig(outputDir) {
286
+ if (!this.userConfigCache) this.userConfigCache = await readUserWranglerConfig(outputDir);
287
+ return this.userConfigCache;
288
+ }
289
+ async generateEntryPoint(ctx) {
290
+ const { agents, roles } = ctx;
291
+ const rolesJson = JSON.stringify(roles);
292
+ const webhookAgents = agents.filter((a) => a.triggers.webhook);
293
+ const agentImports = agents.map((a) => {
294
+ return `import ${agentVarName$1(a.name)} from '${a.filePath.replace(/\\/g, "/")}';`;
295
+ }).join("\n");
296
+ const manifest = JSON.stringify({ agents: agents.map((a) => ({
297
+ name: a.name,
298
+ triggers: a.triggers
299
+ })) }, null, 2);
300
+ const agentClasses = webhookAgents.map((a) => {
301
+ const className = agentClassName(a.name);
302
+ const handlerVar = agentVarName$1(a.name);
303
+ return `export class ${className} extends Agent {
304
+ async onRequest(request) {
305
+ return handleAgentRequest(request, this, ${JSON.stringify(a.name)}, ${handlerVar});
306
+ }
307
+
308
+ async onFiberRecovered(ctx) {
309
+ if (ctx.name?.startsWith('flue:')) {
310
+ return handleFlueFiberRecovered(ctx, this, ${JSON.stringify(a.name)});
311
+ }
312
+ if (typeof super.onFiberRecovered === 'function') {
313
+ return super.onFiberRecovered(ctx);
314
+ }
315
+ }
316
+ }`;
317
+ }).join("\n\n");
318
+ const { config: userConfig } = await this.getUserConfig(ctx.outputDir);
319
+ return `
320
+ // Auto-generated by @flue/sdk build (cloudflare)
321
+ import { Agent, routeAgentRequest } from 'agents';
322
+ import { Bash, InMemoryFs } from 'just-bash';
323
+ import {
324
+ createFlueContext,
325
+ InMemorySessionStore,
326
+ bashFactoryToSessionEnv,
327
+ resolveModel,
328
+ } from '@flue/sdk/internal';
329
+ import { runWithCloudflareContext, cfSandboxToSessionEnv } from '@flue/sdk/cloudflare';
330
+
331
+ ${agentImports}
332
+
333
+ // ─── Config ─────────────────────────────────────────────────────────────────
334
+
335
+ const roles = ${rolesJson};
336
+ const skills = {};
337
+ const systemPrompt = '';
338
+ const manifest = ${manifest};
339
+
340
+ // ─── Infrastructure ─────────────────────────────────────────────────────────
341
+
342
+ // No build-time model default. The user sets model at runtime via
343
+ // \`init({ model: "provider/model-id" })\` for an agent default, or via
344
+ // \`{ model: "provider/model-id" }\` on any individual prompt/skill/task call.
345
+ const model = undefined;
346
+
347
+ // ─── Sandbox Environments ───────────────────────────────────────────────────
348
+
349
+ /**
350
+ * Create an empty in-memory sandbox (default).
351
+ */
352
+ async function createDefaultEnv() {
353
+ const fs = new InMemoryFs();
354
+ return bashFactoryToSessionEnv(() => new Bash({
355
+ fs,
356
+ network: { dangerouslyAllowFullInternetAccess: true },
357
+ }));
358
+ }
359
+
360
+ /**
361
+ * 'local' sandbox is not available on Cloudflare Workers.
362
+ */
363
+ async function createLocalEnv() {
364
+ throw new Error(
365
+ "[flue] 'local' sandbox is not supported on Cloudflare Workers. " +
366
+ "Use the default empty sandbox, pass a BashFactory, " +
367
+ "or pass a sandbox instance (from any SDK — e.g. @cloudflare/sandbox " +
368
+ "or a Flue connector) to init({ sandbox })."
369
+ );
370
+ }
371
+
372
+ /**
373
+ * Detect and wrap external sandbox instances (e.g. from @cloudflare/sandbox's
374
+ * getSandbox()). Returns SessionEnv if the object quacks like a container
375
+ * sandbox, null otherwise.
376
+ */
377
+ function resolveSandbox(sandbox) {
378
+ if (
379
+ sandbox && typeof sandbox === 'object' &&
380
+ typeof sandbox.exec === 'function' &&
381
+ typeof sandbox.readFile === 'function' &&
382
+ typeof sandbox.destroy === 'function' &&
383
+ !('getCwd' in sandbox) && !('fs' in sandbox)
384
+ ) {
385
+ return cfSandboxToSessionEnv(sandbox);
386
+ }
387
+ return null;
388
+ }
389
+
390
+ // Fallback in-memory store (used if no DO storage is available).
391
+ const memoryStore = new InMemorySessionStore();
392
+
393
+ // Create a DO-backed session store from the Durable Object's SQL storage.
394
+ function createDOStore(sql) {
395
+ // Ensure the table exists
396
+ sql.exec(
397
+ 'CREATE TABLE IF NOT EXISTS flue_sessions (id TEXT PRIMARY KEY, data TEXT NOT NULL, updated_at INTEGER NOT NULL)'
398
+ );
399
+ return {
400
+ async save(id, data) {
401
+ const json = JSON.stringify(data);
402
+ sql.exec(
403
+ 'INSERT OR REPLACE INTO flue_sessions (id, data, updated_at) VALUES (?, ?, ?)',
404
+ id, json, Date.now()
405
+ );
406
+ },
407
+ async load(id) {
408
+ const rows = sql.exec('SELECT data FROM flue_sessions WHERE id = ?', id).toArray();
409
+ if (rows.length === 0) return null;
410
+ return JSON.parse(rows[0].data);
411
+ },
412
+ async delete(id) {
413
+ sql.exec('DELETE FROM flue_sessions WHERE id = ?', id);
414
+ },
415
+ };
416
+ }
417
+
418
+ function createContextForRequest(id, payload, doInstance) {
419
+ // Use DO SQLite storage by default, fall back to in-memory
420
+ const defaultStore = doInstance?.ctx?.storage?.sql
421
+ ? createDOStore(doInstance.ctx.storage.sql)
422
+ : memoryStore;
423
+
424
+ return createFlueContext({
425
+ id,
426
+ payload,
427
+ env: doInstance?.env ?? {},
428
+ agentConfig: {
429
+ systemPrompt, skills, roles, model, resolveModel,
430
+ },
431
+ createDefaultEnv,
432
+ createLocalEnv,
433
+ defaultStore,
434
+ resolveSandbox,
435
+ });
436
+ }
437
+
438
+ function runWithInstanceContext(doInstance, fn) {
439
+ return runWithCloudflareContext(
440
+ { env: doInstance.env, agentInstance: doInstance, storage: doInstance.ctx.storage },
441
+ fn,
442
+ );
443
+ }
444
+
445
+ function assertAgentsDurabilityApi(doInstance, method) {
446
+ if (typeof doInstance[method] !== 'function') {
447
+ throw new Error(
448
+ '[flue] The installed "agents" package does not provide the required Cloudflare Agents SDK method "' +
449
+ method +
450
+ '". Install or upgrade the "agents" package in your project.',
451
+ );
452
+ }
453
+ }
454
+
455
+ function runHandlerWithKeepAlive(doInstance, ctx, handler) {
456
+ return runWithInstanceContext(doInstance, () => {
457
+ assertAgentsDurabilityApi(doInstance, 'keepAliveWhile');
458
+ return doInstance.keepAliveWhile(() => handler(ctx));
459
+ });
460
+ }
461
+
462
+ function startWebhookFiber(doInstance, requestId, agentName, id, payload, handler) {
463
+ const run = async (fiber) => {
464
+ fiber?.stash?.({
465
+ version: 1,
466
+ kind: 'webhook',
467
+ agentName,
468
+ id,
469
+ requestId,
470
+ phase: 'running',
471
+ startedAt: Date.now(),
472
+ });
473
+
474
+ const ctx = createContextForRequest(id, payload, doInstance);
475
+ return runWithInstanceContext(doInstance, async () => {
476
+ try {
477
+ return await handler(ctx);
478
+ } finally {
479
+ ctx.setEventCallback(undefined);
480
+ }
481
+ });
482
+ };
483
+
484
+ assertAgentsDurabilityApi(doInstance, 'runFiber');
485
+ return doInstance.runFiber('flue:webhook:' + requestId, run);
486
+ }
487
+
488
+ async function handleFlueFiberRecovered(ctx, _doInstance, agentName) {
489
+ if (!ctx.name || !ctx.name.startsWith('flue:')) return;
490
+ console.warn('[flue] Cloudflare fiber interrupted:', agentName, ctx.name, ctx.snapshot ?? null);
491
+ }
492
+
493
+ // ─── Shared Request Handler ────────────────────────────────────────────────
494
+
495
+ async function handleAgentRequest(request, doInstance, agentName, handler) {
496
+ // Agent id is the DO "room name" set by routeAgentRequest
497
+ const id = doInstance.name;
498
+
499
+ // Parse payload
500
+ let payload;
501
+ try {
502
+ payload = await request.json();
503
+ } catch {
504
+ payload = {};
505
+ }
506
+
507
+ const accept = request.headers.get('accept') || '';
508
+ const isWebhook = request.headers.get('x-webhook') === 'true';
509
+ const isSSE = accept.includes('text/event-stream') && !isWebhook;
510
+
511
+ try {
512
+ // Fire-and-forget (webhook mode)
513
+ if (isWebhook) {
514
+ const requestId = crypto.randomUUID();
515
+ startWebhookFiber(doInstance, requestId, agentName, id, payload, handler).then(
516
+ (result) => {
517
+ console.log('[flue] Webhook handler complete:', agentName,
518
+ result !== undefined ? JSON.stringify(result) : '(no return)');
519
+ },
520
+ (err) => {
521
+ console.error('[flue] Webhook handler error:', agentName, err);
522
+ },
523
+ );
524
+ return new Response(JSON.stringify({ status: 'accepted', requestId }), {
525
+ status: 202,
526
+ headers: { 'content-type': 'application/json' },
527
+ });
528
+ }
529
+
530
+ // SSE streaming mode
531
+ if (isSSE) {
532
+ const { readable, writable } = new TransformStream();
533
+ const writer = writable.getWriter();
534
+ const encoder = new TextEncoder();
535
+ let eventId = 0;
536
+ let isIdle = false;
537
+
538
+ const writeSSE = async (data, event) => {
539
+ const lines = [];
540
+ if (event) lines.push('event: ' + event);
541
+ lines.push('id: ' + eventId++);
542
+ lines.push('data: ' + JSON.stringify(data));
543
+ lines.push('', '');
544
+ await writer.write(encoder.encode(lines.join('\\n')));
545
+ };
546
+
547
+ const ctx = createContextForRequest(id, payload, doInstance);
548
+ ctx.setEventCallback((event) => {
549
+ if (event.type === 'idle') isIdle = true;
550
+ writeSSE(event, event.type).catch(() => {});
551
+ });
552
+
553
+ (async () => {
554
+ try {
555
+ const result = await runHandlerWithKeepAlive(doInstance, ctx, handler);
556
+ if (!isIdle) {
557
+ await writeSSE({ type: 'idle' }, 'idle');
558
+ }
559
+ await writeSSE(
560
+ { type: 'result', data: result !== undefined ? result : null },
561
+ 'result',
562
+ );
563
+ } catch (err) {
564
+ await writeSSE(
565
+ { type: 'error', error: String(err) },
566
+ 'error',
567
+ );
568
+ if (!isIdle) {
569
+ await writeSSE({ type: 'idle' }, 'idle');
570
+ }
571
+ } finally {
572
+ ctx.setEventCallback(undefined);
573
+ await writer.close();
574
+ }
575
+ })();
576
+
577
+ return new Response(readable, {
578
+ headers: {
579
+ 'content-type': 'text/event-stream',
580
+ 'cache-control': 'no-cache',
581
+ 'connection': 'keep-alive',
582
+ },
583
+ });
584
+ }
585
+
586
+ // Sync mode (default)
587
+ const ctx = createContextForRequest(id, payload, doInstance);
588
+ try {
589
+ const result = await runHandlerWithKeepAlive(doInstance, ctx, handler);
590
+ return new Response(
591
+ JSON.stringify({ result: result !== undefined ? result : null }),
592
+ { headers: { 'content-type': 'application/json' } },
593
+ );
594
+ } finally {
595
+ ctx.setEventCallback(undefined);
596
+ }
597
+ } catch (err) {
598
+ console.error('[flue] Agent error:', agentName, err);
599
+ return new Response(
600
+ JSON.stringify({ error: String(err) }),
601
+ { status: 500, headers: { 'content-type': 'application/json' } },
602
+ );
603
+ }
604
+ }
605
+
606
+ // ─── Per-Agent Durable Object Classes ──────────────────────────────────────
607
+
608
+ ${agentClasses}
609
+
610
+ // ─── User-declared Sandbox re-exports ──────────────────────────────────────
611
+ // One line per DO binding in the user's wrangler.jsonc whose class_name
612
+ // contains "Sandbox". Flue aliases the single \`Sandbox\` class shipped by
613
+ // \`@cloudflare/sandbox\` so each user-chosen class_name resolves at the
614
+ // bundle's top level. The binding + container image configuration is owned
615
+ // by the user's wrangler.jsonc.
616
+ ${detectSandboxBindings(userConfig).map((name) => `export { Sandbox as ${name} } from '@cloudflare/sandbox';`).join("\n")}
617
+
618
+ // ─── Worker Fetch Handler ───────────────────────────────────────────────────
619
+
620
+ export default {
621
+ async fetch(request, env) {
622
+ const url = new URL(request.url);
623
+
624
+ // Health check
625
+ if (url.pathname === '/health') {
626
+ return new Response(JSON.stringify({ status: 'ok' }), {
627
+ headers: { 'content-type': 'application/json' },
628
+ });
629
+ }
630
+
631
+ // Agent manifest
632
+ if (url.pathname === '/agents' && request.method === 'GET') {
633
+ return new Response(JSON.stringify(manifest), {
634
+ headers: { 'content-type': 'application/json' },
635
+ });
636
+ }
637
+
638
+ // Route to per-agent DOs via the Agents SDK
639
+ // URL: /agents/<agent-name>/<id>
640
+ const response = await routeAgentRequest(request, env);
641
+ if (response) return response;
642
+
643
+ return new Response('Not found', { status: 404 });
644
+ },
645
+ };
646
+ `;
647
+ }
648
+ async additionalOutputs(ctx) {
649
+ const outputs = {};
650
+ const flueBindings = ctx.agents.filter((a) => a.triggers.webhook).map((a) => ({
651
+ class_name: agentClassName(a.name),
652
+ name: agentClassName(a.name)
653
+ }));
654
+ const flueSqliteClasses = flueBindings.map((b) => b.class_name);
655
+ const additions = {
656
+ defaultName: path.basename(ctx.outputDir) || "flue-agents",
657
+ main: "_entry.ts",
658
+ doBindings: flueBindings,
659
+ migration: {
660
+ tag: "flue-v1",
661
+ new_sqlite_classes: flueSqliteClasses
662
+ }
663
+ };
664
+ const { config: userConfig, path: userConfigPath } = await this.getUserConfig(ctx.outputDir);
665
+ if (userConfigPath) console.log(`[flue] Merging with user wrangler config: ${userConfigPath}`);
666
+ validateUserWranglerConfig(userConfig);
667
+ const sandboxClassNames = detectSandboxBindings(userConfig);
668
+ if (sandboxClassNames.length > 0) {
669
+ assertSandboxPackageInstalled(sandboxClassNames, [ctx.outputDir, ctx.workspaceDir]);
670
+ for (const className of sandboxClassNames) console.log(`[flue] Detected Sandbox-named DO binding "${className}" — re-exporting from @cloudflare/sandbox.`);
671
+ }
672
+ const merged = mergeFlueAdditions(userConfig, additions);
673
+ stripNoisyWranglerDefaults(merged);
674
+ if (typeof merged.$schema !== "string") merged.$schema = "https://workers.cloudflare.com/schema/wrangler.json";
675
+ outputs["wrangler.jsonc"] = JSON.stringify(merged, null, 2);
676
+ writeDeployRedirectIfMissing(ctx.outputDir);
677
+ return outputs;
678
+ }
679
+ };
680
+ function agentVarName$1(name) {
681
+ return "handler_" + name.replace(/[^a-zA-Z0-9]/g, "_");
682
+ }
683
+ /**
684
+ * Convert agent name to a PascalCase DO class name.
685
+ * "hello" → "Hello", "with-cloudflare" → "WithCloudflare"
686
+ *
687
+ * routeAgentRequest() converts binding names to kebab-case for URL matching,
688
+ * so "WithCloudflare" → "with-cloudflare" → URL /agents/with-cloudflare/:id
689
+ */
690
+ function agentClassName(name) {
691
+ return name.split(/[-_]/).map((part) => part.charAt(0).toUpperCase() + part.slice(1)).join("");
692
+ }
693
+
694
+ //#endregion
695
+ //#region src/build-plugin-node.ts
696
+ var NodePlugin = class {
697
+ name = "node";
698
+ bundle = "esbuild";
699
+ generateEntryPoint(ctx) {
700
+ const { agents, roles } = ctx;
701
+ const rolesJson = JSON.stringify(roles);
702
+ const webhookAgents = agents.filter((a) => a.triggers.webhook);
703
+ return `
704
+ // Auto-generated by @flue/sdk build (node)
705
+ import { Hono } from 'hono';
706
+ import { streamSSE } from 'hono/streaming';
707
+ import { serve } from '@hono/node-server';
708
+ import { Bash, InMemoryFs, MountableFs, ReadWriteFs } from 'just-bash';
709
+ import {
710
+ createFlueContext,
711
+ InMemorySessionStore,
712
+ bashFactoryToSessionEnv,
713
+ resolveModel,
714
+ } from '@flue/sdk/internal';
715
+ import { randomUUID } from 'node:crypto';
716
+
717
+ ${agents.map((a) => {
718
+ return `import ${agentVarName(a.name)} from '${a.filePath.replace(/\\/g, "/")}';`;
719
+ }).join("\n")}
720
+
721
+ // ─── Config ─────────────────────────────────────────────────────────────────
722
+
723
+ const skills = {};
724
+ const roles = ${rolesJson};
725
+ const systemPrompt = '';
726
+
727
+ const handlers = {
728
+ ${agents.map((a) => ` ${JSON.stringify(a.name)}: ${agentVarName(a.name)},`).join("\n")}
729
+ };
730
+
731
+ const webhookAgents = new Set(${JSON.stringify(webhookAgents.map((a) => a.name))});
732
+
733
+ // When the CLI starts this server via \`flue run\`, it sets FLUE_MODE=local.
734
+ // In local mode the HTTP route accepts any registered agent (including
735
+ // trigger-less CI-only agents). In any other mode the route is restricted to
736
+ // agents with \`webhook: true\`, preventing accidental public exposure of
737
+ // agents that the user only intended to invoke from their CI pipeline.
738
+ const isLocalMode = process.env.FLUE_MODE === 'local';
739
+
740
+ const manifest = ${JSON.stringify({ agents: agents.map((a) => ({
741
+ name: a.name,
742
+ triggers: a.triggers
743
+ })) }, null, 2)};
744
+
745
+ // ─── Infrastructure ─────────────────────────────────────────────────────────
746
+
747
+ // No build-time model default. The user sets model at runtime via
748
+ // \`init({ model: "provider/model-id" })\` for an agent default, or via
749
+ // \`{ model: "provider/model-id" }\` on any individual prompt/skill/task call.
750
+ const model = undefined;
751
+
752
+ // ─── Sandbox Environments ───────────────────────────────────────────────────
753
+
754
+ /**
755
+ * Create an empty in-memory sandbox (default).
756
+ * Uses InMemoryFs (no real filesystem access) with sensible defaults:
757
+ * cwd = /home/user, /tmp exists, /bin and /usr/bin exist.
758
+ */
759
+ async function createDefaultEnv() {
760
+ const fs = new InMemoryFs();
761
+ return bashFactoryToSessionEnv(() => new Bash({
762
+ fs,
763
+ network: { dangerouslyAllowFullInternetAccess: true },
764
+ }));
765
+ }
766
+
767
+ /**
768
+ * Create a local sandbox backed by the host filesystem.
769
+ * Mounts process.cwd() at /workspace via ReadWriteFs + MountableFs.
770
+ */
771
+ async function createLocalEnv() {
772
+ const rwfs = new ReadWriteFs({ root: process.cwd() });
773
+ const fs = new MountableFs({ base: new InMemoryFs() });
774
+ fs.mount('/workspace', rwfs);
775
+ return bashFactoryToSessionEnv(() => new Bash({
776
+ fs,
777
+ cwd: '/workspace',
778
+ network: { dangerouslyAllowFullInternetAccess: true },
779
+ }));
780
+ }
781
+
782
+ // Default persistence store for Node — in-memory, process lifetime.
783
+ const defaultStore = new InMemorySessionStore();
784
+
785
+ function createContextForRequest(id, payload) {
786
+ return createFlueContext({
787
+ id,
788
+ payload,
789
+ env: process.env,
790
+ agentConfig: {
791
+ systemPrompt, skills, roles, model, resolveModel,
792
+ },
793
+ createDefaultEnv,
794
+ createLocalEnv,
795
+ defaultStore,
796
+ });
797
+ }
798
+
799
+ // ─── Server ─────────────────────────────────────────────────────────────────
800
+
801
+ const app = new Hono();
802
+
803
+ app.get('/health', (c) => c.json({ status: 'ok' }));
804
+ app.get('/agents', (c) => c.json(manifest));
805
+
806
+ // Agent id is required in the URL
807
+ app.post('/agents/:name', (c) => {
808
+ return c.json({
809
+ error: 'Agent id is required. Use /agents/:name/:id',
810
+ }, 400);
811
+ });
812
+
813
+ app.post('/agents/:name/:id', async (c) => {
814
+ const name = c.req.param('name');
815
+ const id = c.req.param('id');
816
+
817
+ if (!handlers[name]) {
818
+ return c.json({ error: 'Agent not found' }, 404);
819
+ }
820
+ if (!webhookAgents.has(name) && !isLocalMode) {
821
+ return c.json({ error: 'Agent "' + name + '" is not web-accessible (no webhook trigger)' }, 404);
822
+ }
823
+
824
+ const handler = handlers[name];
825
+ let payload;
826
+ try {
827
+ payload = await c.req.json();
828
+ } catch {
829
+ payload = {};
830
+ }
831
+
832
+ const accept = c.req.header('accept') || '';
833
+ const isWebhook = c.req.header('x-webhook') === 'true';
834
+ const isSSE = accept.includes('text/event-stream') && !isWebhook;
835
+
836
+ // Fire-and-forget (webhook mode)
837
+ if (isWebhook) {
838
+ const requestId = randomUUID();
839
+ const ctx = createContextForRequest(id, payload);
840
+ handler(ctx).then(
841
+ (result) => {
842
+ ctx.setEventCallback(undefined);
843
+ console.log('[flue] Webhook handler complete:', name, result !== undefined ? JSON.stringify(result) : '(no return)');
844
+ },
845
+ (err) => {
846
+ ctx.setEventCallback(undefined);
847
+ console.error('[flue] Webhook handler error:', name, err);
848
+ },
849
+ );
850
+ return c.json({ status: 'accepted', requestId }, 202);
851
+ }
852
+
853
+ // SSE streaming mode
854
+ if (isSSE) {
855
+ return streamSSE(c, async (stream) => {
856
+ let eventId = 0;
857
+ let isIdle = false;
858
+ const ctx = createContextForRequest(id, payload);
859
+ ctx.setEventCallback((event) => {
860
+ if (event.type === 'idle') isIdle = true;
861
+ stream.writeSSE({ data: JSON.stringify(event), event: event.type, id: String(eventId++) }).catch(() => {});
862
+ });
863
+
864
+ try {
865
+ const result = await handler(ctx);
866
+ if (!isIdle) {
867
+ const idle = { type: 'idle' };
868
+ await stream.writeSSE({ data: JSON.stringify(idle), event: 'idle', id: String(eventId++) });
869
+ }
870
+ await stream.writeSSE({
871
+ data: JSON.stringify({ type: 'result', data: result !== undefined ? result : null }),
872
+ event: 'result',
873
+ id: String(eventId++),
874
+ });
875
+ } catch (err) {
876
+ await stream.writeSSE({
877
+ data: JSON.stringify({ type: 'error', error: String(err) }),
878
+ event: 'error',
879
+ id: String(eventId++),
880
+ });
881
+ if (!isIdle) {
882
+ const idle = { type: 'idle' };
883
+ await stream.writeSSE({ data: JSON.stringify(idle), event: 'idle', id: String(eventId++) });
884
+ }
885
+ } finally {
886
+ ctx.setEventCallback(undefined);
887
+ }
888
+ });
889
+ }
890
+
891
+ // Sync mode (default)
892
+ try {
893
+ const ctx = createContextForRequest(id, payload);
894
+ const result = await handler(ctx);
895
+ ctx.setEventCallback(undefined);
896
+ return c.json({ result: result !== undefined ? result : null });
897
+ } catch (err) {
898
+ console.error('[flue] Agent error:', name, err);
899
+ return c.json({ error: String(err) }, 500);
900
+ }
901
+ });
902
+
903
+ // ─── Start ──────────────────────────────────────────────────────────────────
904
+
905
+ const port = parseInt(process.env.PORT || '3000', 10);
906
+
907
+ const server = serve({ fetch: app.fetch, port });
908
+ console.log('[flue] Server listening on http://localhost:' + port);
909
+ if (isLocalMode) {
910
+ console.log('[flue] Mode: local (all agents invokable, including trigger-less)');
911
+ console.log('[flue] Available agents: ' + ${JSON.stringify(agents.map((a) => a.name).join(", "))});
912
+ } else {
913
+ console.log('[flue] Available agents: ' + ${JSON.stringify(webhookAgents.map((a) => a.name).join(", "))});
914
+ }
915
+
916
+ process.on('SIGINT', () => { server.close(); process.exit(0); });
917
+ process.on('SIGTERM', () => { server.close(); process.exit(0); });
918
+ `;
919
+ }
920
+ esbuildOptions(_ctx) {
921
+ return {
922
+ platform: "node",
923
+ target: "node22",
924
+ external: ["node-liblzma", "@mongodb-js/zstd"]
925
+ };
926
+ }
927
+ };
928
+ function agentVarName(name) {
929
+ return "handler_" + name.replace(/[^a-zA-Z0-9]/g, "_");
930
+ }
931
+
932
+ //#endregion
933
+ //#region src/build.ts
934
+ /**
935
+ * Build a workspace into a deployable artifact.
936
+ *
937
+ * `options.workspaceDir` is treated as an explicit workspace root — the directory
938
+ * directly containing agents/ and roles/. No .flue/ waterfall is performed here;
939
+ * callers that want waterfall behavior (e.g. the CLI when --workspace is omitted)
940
+ * should use `resolveWorkspaceFromCwd` first.
941
+ *
942
+ * AGENTS.md and .agents/skills/ are NOT bundled — discovered at runtime from session cwd.
943
+ */
944
+ async function build(options) {
945
+ const workspaceDir = path.resolve(options.workspaceDir);
946
+ const outputDir = path.resolve(options.outputDir);
947
+ const plugin = resolvePlugin(options);
948
+ console.log(`[flue] Building workspace: ${workspaceDir}`);
949
+ console.log(`[flue] Output: ${outputDir}/dist`);
950
+ console.log(`[flue] Target: ${plugin.name}`);
951
+ const roles = discoverRoles(workspaceDir);
952
+ const agents = discoverAgents(workspaceDir);
953
+ if (agents.length === 0) throw new Error(`[flue] No agent files found.\n\nExpected at: ${path.join(workspaceDir, "agents")}/\nAdd at least one agent file (e.g. hello.ts).`);
954
+ const webhookAgents = agents.filter((a) => a.triggers.webhook);
955
+ const cronAgents = agents.filter((a) => a.triggers.cron);
956
+ const triggerlessAgents = agents.filter((a) => !a.triggers.webhook && !a.triggers.cron);
957
+ console.log(`[flue] Found ${Object.keys(roles).length} role(s): ${Object.keys(roles).join(", ") || "(none)"}`);
958
+ console.log(`[flue] Found ${agents.length} agent(s): ${agents.map((a) => a.name).join(", ")}`);
959
+ if (webhookAgents.length > 0) console.log(`[flue] Webhook agents: ${webhookAgents.map((a) => a.name).join(", ")}`);
960
+ if (cronAgents.length > 0) console.log(`[flue] Cron agents (manifest only): ${cronAgents.map((a) => `${a.name} (${a.triggers.cron})`).join(", ")}`);
961
+ if (triggerlessAgents.length > 0) console.log(`[flue] CLI-only agents (no HTTP route in deployed build): ${triggerlessAgents.map((a) => a.name).join(", ")}`);
962
+ console.log(`[flue] AGENTS.md and .agents/skills/ will be discovered at runtime from session cwd`);
963
+ const distDir = path.join(outputDir, "dist");
964
+ fs.mkdirSync(distDir, { recursive: true });
965
+ const manifest = { agents: agents.map((a) => ({
966
+ name: a.name,
967
+ triggers: a.triggers
968
+ })) };
969
+ const manifestPath = path.join(distDir, "manifest.json");
970
+ fs.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2), "utf-8");
971
+ console.log(`[flue] Generated: ${manifestPath}`);
972
+ const ctx = {
973
+ agents,
974
+ roles,
975
+ workspaceDir,
976
+ outputDir,
977
+ options
978
+ };
979
+ const serverCode = await plugin.generateEntryPoint(ctx);
980
+ const bundleStrategy = plugin.bundle ?? "esbuild";
981
+ let anyChanged = false;
982
+ if (bundleStrategy === "esbuild") {
983
+ const entryPath = path.join(distDir, "_entry_server.ts");
984
+ const outPath = path.join(distDir, "server.mjs");
985
+ fs.writeFileSync(entryPath, serverCode, "utf-8");
986
+ try {
987
+ const nodePathsSet = collectNodePaths(workspaceDir);
988
+ const { external: pluginExternal = [], ...pluginEsbuildOpts } = plugin.esbuildOptions ? plugin.esbuildOptions(ctx) : {};
989
+ const userExternals = getUserExternals(workspaceDir);
990
+ await esbuild.build({
991
+ entryPoints: [entryPath],
992
+ bundle: true,
993
+ outfile: outPath,
994
+ format: "esm",
995
+ external: [...pluginExternal, ...userExternals],
996
+ nodePaths: [...nodePathsSet],
997
+ logLevel: "warning",
998
+ loader: {
999
+ ".ts": "ts",
1000
+ ".node": "empty"
1001
+ },
1002
+ treeShaking: true,
1003
+ sourcemap: true,
1004
+ ...pluginEsbuildOpts
1005
+ });
1006
+ console.log(`[flue] Built: ${outPath}`);
1007
+ anyChanged = true;
1008
+ } finally {
1009
+ try {
1010
+ fs.unlinkSync(entryPath);
1011
+ } catch {}
1012
+ }
1013
+ } else if (bundleStrategy === "none") {
1014
+ if (!plugin.entryFilename) throw new Error(`[flue] Plugin "${plugin.name}" set bundle: 'none' but did not provide entryFilename.`);
1015
+ const outPath = path.join(distDir, plugin.entryFilename);
1016
+ if (!fs.existsSync(outPath) || fs.readFileSync(outPath, "utf-8") !== serverCode) {
1017
+ fs.writeFileSync(outPath, serverCode, "utf-8");
1018
+ console.log(`[flue] Wrote entry: ${outPath} (no bundle — downstream tool handles it)`);
1019
+ anyChanged = true;
1020
+ } else console.log(`[flue] Entry unchanged: ${outPath}`);
1021
+ } else throw new Error(`[flue] Unknown bundle strategy: ${bundleStrategy}`);
1022
+ if (plugin.additionalOutputs) {
1023
+ const outputs = await plugin.additionalOutputs(ctx);
1024
+ for (const [filename, content] of Object.entries(outputs)) {
1025
+ const filePath = path.join(distDir, filename);
1026
+ fs.mkdirSync(path.dirname(filePath), { recursive: true });
1027
+ if (!fs.existsSync(filePath) || fs.readFileSync(filePath, "utf-8") !== content) {
1028
+ fs.writeFileSync(filePath, content, "utf-8");
1029
+ console.log(`[flue] Generated: ${filePath}`);
1030
+ anyChanged = true;
1031
+ }
1032
+ }
1033
+ }
1034
+ console.log(`[flue] Build complete. Output: ${distDir}`);
1035
+ return { changed: anyChanged };
1036
+ }
1037
+ function resolvePlugin(options) {
1038
+ if (options.plugin) return options.plugin;
1039
+ if (!options.target) throw new Error("[flue] No build target specified. Use --target to choose a target:\n flue build --target node\n flue build --target cloudflare");
1040
+ switch (options.target) {
1041
+ case "node": return new NodePlugin();
1042
+ case "cloudflare": return new CloudflarePlugin();
1043
+ default: throw new Error(`[flue] Unknown target: "${options.target}". Supported targets: node, cloudflare`);
1044
+ }
1045
+ }
1046
+ /**
1047
+ * Resolve a Flue workspace directory from the current working directory,
1048
+ * using the two-layout convention. Intended for the CLI when `--workspace` is
1049
+ * not provided — callers that pass an explicit workspace path should skip this
1050
+ * and pass the path straight to `build()`.
1051
+ *
1052
+ * Two supported layouts, checked in order:
1053
+ * 1. `<cwd>/.flue/` — use this when Flue is embedded in an existing project.
1054
+ * 2. `<cwd>/` — use this when the project itself is the Flue workspace.
1055
+ *
1056
+ * If `.flue/` exists, it wins unconditionally — no mixing with the bare layout.
1057
+ * Returns null if neither is present so the caller can produce a helpful error.
1058
+ */
1059
+ function resolveWorkspaceFromCwd(cwd) {
1060
+ const dotFlue = path.join(cwd, ".flue");
1061
+ if (fs.existsSync(dotFlue)) return dotFlue;
1062
+ if (fs.existsSync(path.join(cwd, "agents"))) return cwd;
1063
+ return null;
1064
+ }
1065
+ function discoverRoles(workspaceRoot) {
1066
+ const rolesDir = path.join(workspaceRoot, "roles");
1067
+ if (!fs.existsSync(rolesDir)) return {};
1068
+ const roles = {};
1069
+ for (const entry of fs.readdirSync(rolesDir)) {
1070
+ if (!/\.(md|markdown)$/i.test(entry)) continue;
1071
+ const filePath = path.join(rolesDir, entry);
1072
+ const content = fs.readFileSync(filePath, "utf-8");
1073
+ const name = entry.replace(/\.(md|markdown)$/i, "");
1074
+ const parsed = parseFrontmatterFile(content, name);
1075
+ roles[name] = {
1076
+ name,
1077
+ description: parsed.description,
1078
+ instructions: parsed.body,
1079
+ model: parsed.frontmatter.model
1080
+ };
1081
+ }
1082
+ return roles;
1083
+ }
1084
+ function discoverAgents(workspaceRoot) {
1085
+ const agentsDir = path.join(workspaceRoot, "agents");
1086
+ if (!fs.existsSync(agentsDir)) return [];
1087
+ return fs.readdirSync(agentsDir).filter((f) => /\.(ts|js|mts|mjs)$/.test(f)).map((f) => {
1088
+ const filePath = path.join(agentsDir, f);
1089
+ const triggers = parseTriggers(filePath);
1090
+ return {
1091
+ name: f.replace(/\.(ts|js|mts|mjs)$/, ""),
1092
+ filePath,
1093
+ triggers
1094
+ };
1095
+ });
1096
+ }
1097
+ /** Extract trigger config via regex. Only triggers are parsed at build time (needed for routing). */
1098
+ function parseTriggers(filePath) {
1099
+ const source = fs.readFileSync(filePath, "utf-8");
1100
+ const result = {};
1101
+ const triggersExportMatch = source.match(/export\s+const\s+triggers\s*=\s*\{([^}]*)\}/);
1102
+ if (!triggersExportMatch) return result;
1103
+ const triggersBlock = triggersExportMatch[1] ?? "";
1104
+ if (/webhook\s*:\s*true/.test(triggersBlock)) result.webhook = true;
1105
+ const cronMatch = triggersBlock.match(/cron\s*:\s*['"]([^'"]+)['"]/);
1106
+ if (cronMatch?.[1]) result.cron = cronMatch[1];
1107
+ return result;
1108
+ }
1109
+ /** Externalize user's direct deps (bare name + subpath wildcard). */
1110
+ function getUserExternals(workspaceDir) {
1111
+ const pkgPath = packageUpSync({ cwd: workspaceDir });
1112
+ if (!pkgPath) return [];
1113
+ try {
1114
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
1115
+ return Object.keys({
1116
+ ...pkg.dependencies,
1117
+ ...pkg.devDependencies,
1118
+ ...pkg.peerDependencies
1119
+ }).flatMap((name) => [name, `${name}/*`]);
1120
+ } catch {
1121
+ return [];
1122
+ }
1123
+ }
1124
+ function collectNodePaths(workspaceDir) {
1125
+ const nodePathsSet = /* @__PURE__ */ new Set();
1126
+ for (const startDir of [workspaceDir, getSDKDir()]) {
1127
+ let dir = startDir;
1128
+ while (dir !== path.dirname(dir)) {
1129
+ const nm = path.join(dir, "node_modules");
1130
+ if (fs.existsSync(nm)) nodePathsSet.add(nm);
1131
+ dir = path.dirname(dir);
1132
+ }
1133
+ }
1134
+ return nodePathsSet;
1135
+ }
1136
+ function getSDKDir() {
1137
+ try {
1138
+ return path.dirname(new URL(import.meta.url).pathname);
1139
+ } catch {
1140
+ return __dirname;
1141
+ }
1142
+ }
1143
+
1144
+ //#endregion
1145
+ //#region src/dev.ts
1146
+ /**
1147
+ * Flue dev server.
1148
+ *
1149
+ * Watches the user's workspace, rebuilds on file changes, and reloads the
1150
+ * underlying server. Distinct from `flue run`: dev is the long-running,
1151
+ * edit-and-iterate command, while `flue run` is the one-shot
1152
+ * production-style invoker (build → run → exit).
1153
+ *
1154
+ * # Two very different reload models
1155
+ *
1156
+ * Node and Cloudflare use fundamentally different rebuild strategies, because
1157
+ * what they each provide downstream is fundamentally different:
1158
+ *
1159
+ * - **Node** has no host bundler. Our esbuild pass produces the final
1160
+ * `dist/server.mjs`. On any change in the workspace we rebuild and respawn
1161
+ * the child Node process. Sub-second restart is fine.
1162
+ *
1163
+ * - **Cloudflare** uses Wrangler's bundler (the same one `wrangler dev` and
1164
+ * `wrangler deploy` use). Wrangler watches the entry's transitive import
1165
+ * graph itself and reloads workerd on source edits. So we *don't* need to
1166
+ * rebuild for body edits — wrangler handles it. We only need to act when:
1167
+ * 1. The set of agents changes (added / removed / triggers changed) →
1168
+ * regenerate `dist/_entry.ts`. Wrangler picks up the new entry
1169
+ * automatically because it's already watching it.
1170
+ * 2. The user's `wrangler.jsonc` changes → re-merge our additions and
1171
+ * restart the worker (config changes don't hot-apply).
1172
+ * Pure body edits to agent files: wrangler reloads workerd; we do nothing.
1173
+ *
1174
+ * # Watching
1175
+ *
1176
+ * Watching uses `node:fs.watch` recursive (Node 20+). Debounced 150ms. The
1177
+ * Node path treats every non-ignored change as a rebuild trigger; the
1178
+ * Cloudflare path filters to "structural" changes only.
1179
+ */
1180
+ /** Default port for `flue dev`. F=3, L=5, U=8, E=3 on a phone keypad. */
1181
+ const DEFAULT_DEV_PORT = 3583;
1182
+ /**
1183
+ * Start a Flue dev server. Resolves only when the server is shut down (e.g.
1184
+ * via SIGINT). Errors during the initial build/start are thrown synchronously;
1185
+ * errors during subsequent rebuilds are logged but do NOT exit the dev server
1186
+ * — the user is editing code, after all, and we want to recover when they fix it.
1187
+ */
1188
+ async function dev(options) {
1189
+ const workspaceDir = path.resolve(options.workspaceDir);
1190
+ const outputDir = path.resolve(options.outputDir);
1191
+ const port = options.port ?? DEFAULT_DEV_PORT;
1192
+ const envFiles = resolveEnvFiles(options.envFiles, outputDir);
1193
+ for (const f of envFiles) console.error(`[flue] Loading env from: ${f}`);
1194
+ const buildOptions = {
1195
+ workspaceDir,
1196
+ outputDir,
1197
+ target: options.target
1198
+ };
1199
+ console.error(`[flue] Starting dev server (target: ${options.target})`);
1200
+ console.error(`[flue] Watching: ${workspaceDir}`);
1201
+ console.error(`[flue] Building...`);
1202
+ const initialStart = Date.now();
1203
+ try {
1204
+ await build(buildOptions);
1205
+ } catch (err) {
1206
+ throw new Error(`[flue] Initial build failed: ${err instanceof Error ? err.message : String(err)}`);
1207
+ }
1208
+ console.error(`[flue] Built in ${Date.now() - initialStart}ms`);
1209
+ const reloader = options.target === "node" ? new NodeReloader({
1210
+ outputDir,
1211
+ port,
1212
+ envFiles
1213
+ }) : await createCloudflareReloader({
1214
+ outputDir,
1215
+ port,
1216
+ envFiles
1217
+ });
1218
+ await reloader.start();
1219
+ if (reloader.url) {
1220
+ console.error(`[flue] Server: ${reloader.url}`);
1221
+ const exampleAgent = pickExampleAgentName(outputDir, workspaceDir);
1222
+ if (exampleAgent) {
1223
+ console.error(`[flue] Try: curl -X POST ${reloader.url}/agents/${exampleAgent}/test-1 \\`);
1224
+ console.error(` -H 'Content-Type: application/json' -d '{}'`);
1225
+ }
1226
+ }
1227
+ console.error(`[flue] Press Ctrl+C to stop\n`);
1228
+ const rebuilder = createRebuilder(buildOptions, reloader);
1229
+ const envFileSet = new Set(envFiles);
1230
+ const watcher = createWatcher({
1231
+ workspaceDir,
1232
+ outputDir,
1233
+ target: options.target,
1234
+ envFiles,
1235
+ onChange: (relPath) => {
1236
+ if (!reloader.shouldRebuildOn(relPath)) return;
1237
+ const isEnvFile = envFileSet.has(relPath);
1238
+ console.error(`[flue] Change detected: ${relPath}`);
1239
+ rebuilder.schedule(isEnvFile);
1240
+ }
1241
+ });
1242
+ let shuttingDown = false;
1243
+ const shutdown = async (signal, exitCode) => {
1244
+ if (shuttingDown) return;
1245
+ shuttingDown = true;
1246
+ console.error(`\n[flue] Received ${signal}, shutting down...`);
1247
+ watcher.close();
1248
+ try {
1249
+ await reloader.stop();
1250
+ } catch (err) {
1251
+ console.error(`[flue] Error during shutdown: ${err instanceof Error ? err.message : String(err)}`);
1252
+ }
1253
+ console.error(`[flue] Stopped.`);
1254
+ process.exit(exitCode);
1255
+ };
1256
+ process.on("SIGINT", () => void shutdown("SIGINT", 130));
1257
+ process.on("SIGTERM", () => void shutdown("SIGTERM", 143));
1258
+ process.on("exit", () => {
1259
+ try {
1260
+ reloader.killSync?.();
1261
+ } catch {}
1262
+ });
1263
+ await new Promise(() => {});
1264
+ }
1265
+ function createRebuilder(buildOptions, reloader) {
1266
+ let running = false;
1267
+ let queued = false;
1268
+ let queuedForce = false;
1269
+ let pendingForce = false;
1270
+ let debounceTimer = null;
1271
+ const runOnce = async (force) => {
1272
+ running = true;
1273
+ const start = Date.now();
1274
+ console.error(`[flue] Rebuilding...`);
1275
+ try {
1276
+ const { changed } = await build(buildOptions);
1277
+ await reloader.reload(changed || force);
1278
+ console.error(`[flue] Reloaded in ${Date.now() - start}ms\n`);
1279
+ } catch (err) {
1280
+ console.error(`[flue] Rebuild failed: ${err instanceof Error ? err.message : String(err)}\n`);
1281
+ } finally {
1282
+ running = false;
1283
+ if (queued) {
1284
+ const nextForce = queuedForce;
1285
+ queued = false;
1286
+ queuedForce = false;
1287
+ runOnce(nextForce);
1288
+ }
1289
+ }
1290
+ };
1291
+ return { schedule(forceReload = false) {
1292
+ if (forceReload) pendingForce = true;
1293
+ if (debounceTimer) clearTimeout(debounceTimer);
1294
+ debounceTimer = setTimeout(() => {
1295
+ debounceTimer = null;
1296
+ const force = pendingForce;
1297
+ pendingForce = false;
1298
+ if (running) {
1299
+ queued = true;
1300
+ if (force) queuedForce = true;
1301
+ } else runOnce(force);
1302
+ }, 150);
1303
+ } };
1304
+ }
1305
+ /**
1306
+ * Watch the workspace for changes. Uses `fs.watch` recursive (Node 20+).
1307
+ *
1308
+ * Watched roots:
1309
+ * - `<workspaceDir>` — agents/, roles/, AGENTS.md, .agents/skills/.
1310
+ * - For Cloudflare: also `<outputDir>/wrangler.jsonc` (and `.json`),
1311
+ * since changes there require a worker restart.
1312
+ *
1313
+ * Ignored:
1314
+ * - `dist/`, `node_modules/`, `.git/`, `.turbo/`
1315
+ * - dotfiles other than the ones we explicitly care about (AGENTS.md is
1316
+ * not a dotfile, so it's fine)
1317
+ * - editor backup/swap suffixes
1318
+ */
1319
+ function createWatcher(options) {
1320
+ const { workspaceDir, outputDir, target, envFiles, onChange } = options;
1321
+ const watchers = [];
1322
+ const isIgnoredPath = (relPath) => {
1323
+ const parts = relPath.replace(/\\/g, "/").split("/");
1324
+ for (const part of parts) {
1325
+ if (part === "node_modules") return true;
1326
+ if (part === "dist") return true;
1327
+ if (part === ".git") return true;
1328
+ if (part === ".turbo") return true;
1329
+ }
1330
+ const base = parts[parts.length - 1] ?? "";
1331
+ if (!base) return true;
1332
+ if (base.startsWith(".") && base !== ".flueignore") return true;
1333
+ if (base.endsWith("~") || base.endsWith(".swp") || base.endsWith(".swx")) return true;
1334
+ if (base === ".DS_Store") return true;
1335
+ return false;
1336
+ };
1337
+ try {
1338
+ const w = fs.watch(workspaceDir, { recursive: true }, (_event, filename) => {
1339
+ if (!filename) return;
1340
+ const rel = filename.toString();
1341
+ if (isIgnoredPath(rel)) return;
1342
+ onChange(rel);
1343
+ });
1344
+ watchers.push(w);
1345
+ } catch (err) {
1346
+ console.error(`[flue] Failed to watch ${workspaceDir}: ${err instanceof Error ? err.message : String(err)}`);
1347
+ }
1348
+ if (target === "cloudflare") for (const cfgName of [
1349
+ "wrangler.jsonc",
1350
+ "wrangler.json",
1351
+ "wrangler.toml"
1352
+ ]) {
1353
+ const cfgPath = path.join(outputDir, cfgName);
1354
+ if (!fs.existsSync(cfgPath)) continue;
1355
+ try {
1356
+ const w = fs.watch(cfgPath, () => onChange(cfgName));
1357
+ watchers.push(w);
1358
+ } catch {}
1359
+ }
1360
+ for (const envPath of envFiles) try {
1361
+ const w = fs.watch(envPath, () => onChange(envPath));
1362
+ watchers.push(w);
1363
+ } catch {}
1364
+ return { close() {
1365
+ for (const w of watchers) try {
1366
+ w.close();
1367
+ } catch {}
1368
+ } };
1369
+ }
1370
+ var NodeReloader = class {
1371
+ child = null;
1372
+ serverPath;
1373
+ outputDir;
1374
+ port;
1375
+ envFiles;
1376
+ url;
1377
+ constructor(opts) {
1378
+ this.outputDir = opts.outputDir;
1379
+ this.port = opts.port;
1380
+ this.envFiles = opts.envFiles;
1381
+ this.serverPath = path.join(this.outputDir, "dist", "server.mjs");
1382
+ this.url = `http://localhost:${this.port}`;
1383
+ }
1384
+ async start() {
1385
+ await this.spawnAndWait();
1386
+ }
1387
+ shouldRebuildOn(_relPath) {
1388
+ return true;
1389
+ }
1390
+ async reload(_buildChanged) {
1391
+ await this.killChild();
1392
+ await this.spawnAndWait();
1393
+ }
1394
+ async stop() {
1395
+ await this.killChild();
1396
+ }
1397
+ killSync() {
1398
+ const child = this.child;
1399
+ if (!child || child.killed) return;
1400
+ try {
1401
+ child.kill("SIGKILL");
1402
+ } catch {}
1403
+ }
1404
+ async spawnAndWait() {
1405
+ const fromFiles = parseEnvFiles(this.envFiles);
1406
+ const child = spawn("node", [this.serverPath], {
1407
+ stdio: [
1408
+ "ignore",
1409
+ "pipe",
1410
+ "pipe"
1411
+ ],
1412
+ cwd: this.outputDir,
1413
+ env: {
1414
+ ...fromFiles,
1415
+ ...process.env,
1416
+ PORT: String(this.port),
1417
+ FLUE_MODE: "local"
1418
+ }
1419
+ });
1420
+ this.child = child;
1421
+ const pipe = (data) => {
1422
+ const text = data.toString().trimEnd();
1423
+ for (const line of text.split("\n")) {
1424
+ if (!line.trim()) continue;
1425
+ if (line.includes("[flue] Server listening") || line.includes("[flue] Available agents:") || line.includes("[flue] Mode: local")) continue;
1426
+ console.error(line);
1427
+ }
1428
+ };
1429
+ child.stdout?.on("data", pipe);
1430
+ child.stderr?.on("data", pipe);
1431
+ child.on("exit", (code, signal) => {
1432
+ if (this.child === child) {
1433
+ this.child = null;
1434
+ if (code !== 0 && code !== null) console.error(`[flue] Node server exited unexpectedly (code=${code}, signal=${signal ?? "none"})`);
1435
+ }
1436
+ });
1437
+ if (!await waitForHealth(this.url, 15e3)) {
1438
+ await this.killChild();
1439
+ throw new Error("Node server did not become ready within 15s");
1440
+ }
1441
+ }
1442
+ async killChild() {
1443
+ const child = this.child;
1444
+ if (!child || child.killed) {
1445
+ this.child = null;
1446
+ return;
1447
+ }
1448
+ this.child = null;
1449
+ await new Promise((resolve) => {
1450
+ let resolved = false;
1451
+ const done = () => {
1452
+ if (!resolved) {
1453
+ resolved = true;
1454
+ resolve();
1455
+ }
1456
+ };
1457
+ child.once("exit", done);
1458
+ try {
1459
+ child.kill("SIGTERM");
1460
+ } catch {
1461
+ done();
1462
+ return;
1463
+ }
1464
+ setTimeout(() => {
1465
+ try {
1466
+ if (!child.killed) child.kill("SIGKILL");
1467
+ } catch {}
1468
+ done();
1469
+ }, 1e3);
1470
+ });
1471
+ }
1472
+ };
1473
+ /**
1474
+ * Lazy-import wrangler so users targeting only Node don't need it installed.
1475
+ * If the import fails, surface a friendly message pointing at the peer-dep.
1476
+ */
1477
+ async function createCloudflareReloader(opts) {
1478
+ let wrangler;
1479
+ try {
1480
+ wrangler = await import("wrangler");
1481
+ } catch (err) {
1482
+ throw new Error(`[flue] Cloudflare dev requires the "wrangler" package as a peer dependency.
1483
+ Install it in your project:
1484
+
1485
+ npm install --save-dev wrangler
1486
+
1487
+ Underlying error: ${err instanceof Error ? err.message : String(err)}`);
1488
+ }
1489
+ return new CloudflareReloader(wrangler, opts);
1490
+ }
1491
+ var CloudflareReloader = class {
1492
+ worker = null;
1493
+ wrangler;
1494
+ outputDir;
1495
+ port;
1496
+ configPath;
1497
+ envFiles;
1498
+ url;
1499
+ constructor(wrangler, opts) {
1500
+ this.wrangler = wrangler;
1501
+ this.outputDir = opts.outputDir;
1502
+ this.port = opts.port;
1503
+ this.envFiles = opts.envFiles;
1504
+ this.configPath = path.join(this.outputDir, "dist", "wrangler.jsonc");
1505
+ }
1506
+ async start() {
1507
+ await this.startWorker();
1508
+ }
1509
+ /**
1510
+ * On Cloudflare, wrangler watches the entry's transitive imports itself
1511
+ * and hot-reloads workerd when an agent file body changes. We only need
1512
+ * to act when something *structural* changes — i.e. something that
1513
+ * affects what `_entry.ts` or `wrangler.jsonc` look like.
1514
+ *
1515
+ * Concretely, we trigger a Flue-side rebuild for:
1516
+ * - File adds/removes in `agents/` (the agent set determines DO classes
1517
+ * and binding declarations).
1518
+ * - Changes to `agents/*.ts` — these MAY change the exported `triggers`,
1519
+ * so we have to re-parse them. (Plain body edits redo a tiny amount
1520
+ * of work but the rebuild is cheap and idempotent.)
1521
+ * - Changes to `roles/*.md` — roles are baked into the entry as JSON.
1522
+ * - Changes to the user's `wrangler.jsonc` — affects the merged config.
1523
+ *
1524
+ * Notes we explicitly DO ignore for rebuild purposes (wrangler handles
1525
+ * them): edits to imported source files outside of `agents/`/`roles/`,
1526
+ * AGENTS.md, and `.agents/skills/` (those are runtime-discovered, not
1527
+ * baked into the entry).
1528
+ */
1529
+ shouldRebuildOn(relPath) {
1530
+ if (this.envFiles.includes(relPath)) return true;
1531
+ const normalized = relPath.replace(/\\/g, "/");
1532
+ if (normalized === "wrangler.jsonc" || normalized === "wrangler.json" || normalized === "wrangler.toml") return true;
1533
+ if (normalized.startsWith("agents/")) return true;
1534
+ if (normalized.startsWith("roles/")) return true;
1535
+ return false;
1536
+ }
1537
+ async reload(buildChanged) {
1538
+ if (!buildChanged) {
1539
+ console.error(`[flue] No structural change — wrangler will hot-reload\n`);
1540
+ return;
1541
+ }
1542
+ await this.disposeWorker();
1543
+ await this.startWorker();
1544
+ }
1545
+ async stop() {
1546
+ await this.disposeWorker();
1547
+ }
1548
+ killSync() {
1549
+ this.worker = null;
1550
+ }
1551
+ async startWorker() {
1552
+ if (!fs.existsSync(this.configPath)) throw new Error(`[flue] Expected ${this.configPath} after build, but it doesn't exist. Did the Cloudflare build succeed?`);
1553
+ this.worker = await this.wrangler.unstable_startWorker({
1554
+ config: this.configPath,
1555
+ envFiles: this.envFiles,
1556
+ build: { nodejsCompatMode: "v2" },
1557
+ dev: {
1558
+ server: {
1559
+ hostname: "localhost",
1560
+ port: this.port
1561
+ },
1562
+ watch: false,
1563
+ logLevel: "info"
1564
+ }
1565
+ });
1566
+ try {
1567
+ this.url = (await this.worker.url).toString().replace(/\/$/, "");
1568
+ } catch {
1569
+ this.url = `http://127.0.0.1:${this.port}`;
1570
+ }
1571
+ }
1572
+ async disposeWorker() {
1573
+ const worker = this.worker;
1574
+ this.worker = null;
1575
+ if (!worker) return;
1576
+ try {
1577
+ await worker.dispose();
1578
+ } catch (err) {
1579
+ console.error(`[flue] Error disposing Cloudflare worker: ${err instanceof Error ? err.message : String(err)}`);
1580
+ }
1581
+ }
1582
+ };
1583
+ /**
1584
+ * Resolve and validate a list of env-file paths. Returns absolute paths.
1585
+ *
1586
+ * Throws a friendly `[flue]`-prefixed error if any path doesn't exist. The
1587
+ * goal of `--env` is explicitness — silent skip on a typo would defeat
1588
+ * the purpose.
1589
+ */
1590
+ function resolveEnvFiles(envFiles, cwd) {
1591
+ if (!envFiles || envFiles.length === 0) return [];
1592
+ return envFiles.map((p) => {
1593
+ const abs = path.isAbsolute(p) ? p : path.resolve(cwd, p);
1594
+ if (!fs.existsSync(abs)) throw new Error(`[flue] --env points at a path that doesn't exist: ${p}`);
1595
+ return abs;
1596
+ });
1597
+ }
1598
+ /**
1599
+ * Parse one or more `.env`-format files and return their merged contents.
1600
+ * Later files override earlier files on key collision.
1601
+ *
1602
+ * Uses Node's built-in `util.parseEnv` (Node 20.6+; Flue requires Node 22+).
1603
+ * No `dotenv` package needed.
1604
+ *
1605
+ * Parse-only — doesn't touch `process.env`. Caller composes with
1606
+ * `process.env` as needed (typical pattern: spread file vars first, then
1607
+ * `process.env`, so shell-set values win).
1608
+ */
1609
+ function parseEnvFiles(absolutePaths) {
1610
+ const merged = {};
1611
+ for (const p of absolutePaths) {
1612
+ const parsed = parseEnv(fs.readFileSync(p, "utf-8"));
1613
+ Object.assign(merged, parsed);
1614
+ }
1615
+ return merged;
1616
+ }
1617
+ async function waitForHealth(baseUrl, timeoutMs) {
1618
+ const start = Date.now();
1619
+ while (Date.now() - start < timeoutMs) {
1620
+ try {
1621
+ const controller = new AbortController();
1622
+ const timeout = setTimeout(() => controller.abort(), 1e3);
1623
+ const res = await fetch(`${baseUrl}/health`, { signal: controller.signal });
1624
+ clearTimeout(timeout);
1625
+ if (res.ok) return true;
1626
+ } catch {}
1627
+ await new Promise((r) => setTimeout(r, 200));
1628
+ }
1629
+ return false;
1630
+ }
1631
+ /**
1632
+ * Pick a webhook agent name to print in the friendly curl example. Falls back
1633
+ * to any agent if none have webhook triggers (the example would 404 on the
1634
+ * dev server in that case, but it's still a hint at the URL shape). Reads the
1635
+ * manifest written by the build, with a directory-scan fallback in case the
1636
+ * manifest is somehow missing.
1637
+ *
1638
+ * Best-effort — silently returns null if anything goes wrong.
1639
+ */
1640
+ function pickExampleAgentName(outputDir, workspaceDir) {
1641
+ try {
1642
+ const manifestPath = path.join(outputDir, "dist", "manifest.json");
1643
+ if (fs.existsSync(manifestPath)) {
1644
+ const agents = JSON.parse(fs.readFileSync(manifestPath, "utf-8")).agents ?? [];
1645
+ const webhook = agents.find((a) => a.triggers?.webhook);
1646
+ if (webhook) return webhook.name;
1647
+ if (agents[0]) return agents[0].name;
1648
+ }
1649
+ } catch {}
1650
+ try {
1651
+ const agentsDir = path.join(workspaceDir, "agents");
1652
+ if (!fs.existsSync(agentsDir)) return null;
1653
+ for (const e of fs.readdirSync(agentsDir)) {
1654
+ const m = e.match(/^([a-zA-Z0-9_-]+)\.(ts|js|mts|mjs)$/);
1655
+ if (m && m[1]) return m[1];
1656
+ }
1657
+ return null;
1658
+ } catch {
1659
+ return null;
1660
+ }
1661
+ }
1662
+
1663
+ //#endregion
1664
+ export { BUILTIN_TOOL_NAMES, DEFAULT_DEV_PORT, build, createTools, dev, parseEnvFiles, resolveEnvFiles, resolveWorkspaceFromCwd };