@flue/sdk 0.3.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,1568 @@
1
+ import { a as parseFrontmatterFile, n as createTools, t as BUILTIN_TOOL_NAMES } from "./agent-BB4lwAd5.mjs";
2
+ import * as esbuild from "esbuild";
3
+ import * as fs from "node:fs";
4
+ import * as path from "node:path";
5
+ import { packageUpSync } from "package-up";
6
+ import { spawn } from "node:child_process";
7
+
8
+ //#region src/cloudflare-wrangler-merge.ts
9
+ /**
10
+ * Merge Flue's Cloudflare additions into the user's wrangler config.
11
+ *
12
+ * Philosophy: the user's wrangler config is the source of truth. Flue contributes
13
+ * the pieces it owns (the Worker entrypoint, its per-agent Durable Object
14
+ * bindings, the Sandbox DO, the migration tag) and leaves everything else
15
+ * untouched. The merged result is written to `dist/wrangler.jsonc` so the
16
+ * deployed Worker sees both.
17
+ *
18
+ * We delegate parsing and normalization to wrangler's own `unstable_readConfig`
19
+ * (lazy-imported so Node-only Flue users don't pay for it). This gets us:
20
+ * - Both jsonc and TOML support for free.
21
+ * - Wrangler's own validation diagnostics (clearer errors than ours).
22
+ * - Path normalization: relative paths in fields like `containers[].image`
23
+ * are resolved to absolute paths against the user's config dir before
24
+ * we merge. This is critical because we write the merged config to
25
+ * `dist/wrangler.jsonc` — wrangler resolves relative paths against the
26
+ * config file's own directory, so without normalization a user's
27
+ * `containers[].image: "./Dockerfile"` would resolve to `dist/Dockerfile`
28
+ * after the move and fail to deploy.
29
+ *
30
+ * Flue still owns merge semantics (DO binding de-dup by `name`, migration
31
+ * append-if-tag-absent) and Flue-specific validation (compat date floor,
32
+ * required compat flags) — wrangler doesn't know about those.
33
+ */
34
+ /** Minimum compatibility_date Flue supports. */
35
+ const MIN_COMPATIBILITY_DATE = "2026-04-01";
36
+ /** compatibility_flag Flue requires for pi-ai's process.env-based API key lookup. */
37
+ const REQUIRED_COMPAT_FLAG = "nodejs_compat";
38
+ /**
39
+ * Read and normalize the user's wrangler config from `outputDir`.
40
+ *
41
+ * Looks for `wrangler.jsonc`, `wrangler.json`, then `wrangler.toml` (jsonc is
42
+ * Cloudflare's recommended format for new projects, but all three work).
43
+ * Returns an empty config if no file is present.
44
+ *
45
+ * Delegates parsing + normalization to wrangler via `unstable_readConfig`. This
46
+ * is async only because wrangler is a lazy import (it's a peer dep — Flue users
47
+ * who only target Node should not pay for resolving it). The wrangler call
48
+ * itself is synchronous under the hood.
49
+ *
50
+ * The returned config has been through wrangler's `normalizeAndValidateConfig`,
51
+ * which:
52
+ * - Resolves relative paths to absolute (notably `containers[].image`).
53
+ * - Fills in defaults (`compatibility_date` if absent, etc.).
54
+ * - Merges `env.*` per-environment overrides.
55
+ * - Throws on validation errors via wrangler's own `UserError`.
56
+ *
57
+ * The verbose / defaulted output is intentional — the cost is a slightly bigger
58
+ * `dist/wrangler.jsonc` and the benefit is correctness without us reimplementing
59
+ * wrangler's path-resolution logic.
60
+ */
61
+ async function readUserWranglerConfig(outputDir) {
62
+ const candidates = [
63
+ "wrangler.jsonc",
64
+ "wrangler.json",
65
+ "wrangler.toml"
66
+ ];
67
+ let foundPath = null;
68
+ for (const name of candidates) {
69
+ const candidate = path.join(outputDir, name);
70
+ if (fs.existsSync(candidate)) {
71
+ foundPath = candidate;
72
+ break;
73
+ }
74
+ }
75
+ if (!foundPath) return {
76
+ config: {},
77
+ path: null
78
+ };
79
+ let wrangler;
80
+ try {
81
+ wrangler = await import("wrangler");
82
+ } catch (err) {
83
+ throw new Error(`[flue] Reading the Cloudflare wrangler config requires the "wrangler" package as a peer dependency.
84
+ Install it in your project:
85
+
86
+ npm install --save-dev wrangler
87
+
88
+ Underlying error: ${err instanceof Error ? err.message : String(err)}`);
89
+ }
90
+ let parsed;
91
+ try {
92
+ parsed = wrangler.unstable_readConfig({ config: foundPath }, { hideWarnings: true });
93
+ } catch (err) {
94
+ throw new Error(`[flue] Failed to read ${foundPath}: ${err instanceof Error ? err.message : String(err)}`);
95
+ }
96
+ return {
97
+ config: parsed,
98
+ path: foundPath
99
+ };
100
+ }
101
+ /**
102
+ * Validate that the user's wrangler config meets Flue's minimum runtime
103
+ * requirements. Throws a clear error describing the fix if it doesn't.
104
+ *
105
+ * We're intentionally strict here rather than silently massaging bad configs —
106
+ * the failure modes when these are wrong (missing nodejs_compat, old
107
+ * compat_date) produce confusing runtime errors, and surfacing the problem at
108
+ * build time is much friendlier.
109
+ *
110
+ * Together with `mergeFlueAdditions`, this enforces two invariants on every
111
+ * Flue worker:
112
+ * 1. `nodejs_compat` is in `compatibility_flags` (added if missing).
113
+ * 2. `compatibility_date >= MIN_COMPATIBILITY_DATE` (defaulted if missing).
114
+ *
115
+ * Those invariants are what let `dev.ts` hardcode `nodejsCompatMode: 'v2'`
116
+ * without re-deriving it from the config on every reload.
117
+ */
118
+ function validateUserWranglerConfig(config) {
119
+ if (Array.isArray(config.compatibility_flags)) {
120
+ if (!config.compatibility_flags.includes(REQUIRED_COMPAT_FLAG)) throw new Error(`[flue] Your wrangler config's "compatibility_flags" is missing "${REQUIRED_COMPAT_FLAG}". Flue relies on it at runtime (e.g. for API key resolution via process.env). Add "${REQUIRED_COMPAT_FLAG}" to the list.`);
121
+ }
122
+ if (typeof config.compatibility_date === "string") {
123
+ const userDate = config.compatibility_date;
124
+ if (!/^\d{4}-\d{2}-\d{2}$/.test(userDate)) throw new Error(`[flue] Your wrangler config's "compatibility_date" ("${userDate}") is not in YYYY-MM-DD format.`);
125
+ if (userDate < MIN_COMPATIBILITY_DATE) throw new Error(`[flue] Your wrangler config's "compatibility_date" is "${userDate}". Flue requires at least "${MIN_COMPATIBILITY_DATE}" for SQLite-backed Durable Object support, nodejs_compat v2, and AsyncLocalStorage. Bump the date (set it to today unless you have a specific reason).`);
126
+ }
127
+ }
128
+ /**
129
+ * Produce the merged wrangler config: start from the user's, layer Flue's
130
+ * contributions on top. Pure function — caller handles reading and writing.
131
+ */
132
+ function mergeFlueAdditions(userConfig, additions) {
133
+ const merged = { ...userConfig };
134
+ merged.main = additions.main;
135
+ if (typeof merged.name !== "string" || merged.name.length === 0) merged.name = additions.defaultName;
136
+ if (typeof merged.compatibility_date !== "string") merged.compatibility_date = MIN_COMPATIBILITY_DATE;
137
+ const existingFlags = Array.isArray(merged.compatibility_flags) ? merged.compatibility_flags.filter((f) => typeof f === "string") : [];
138
+ if (!existingFlags.includes(REQUIRED_COMPAT_FLAG)) existingFlags.push(REQUIRED_COMPAT_FLAG);
139
+ merged.compatibility_flags = existingFlags;
140
+ const existingDo = typeof merged.durable_objects === "object" && merged.durable_objects !== null ? merged.durable_objects : {};
141
+ const existingBindings = Array.isArray(existingDo.bindings) ? existingDo.bindings : [];
142
+ const existingBindingNames = new Set(existingBindings.filter((b) => typeof b === "object" && b !== null).map((b) => b.name).filter((n) => typeof n === "string"));
143
+ const flueBindingsToAdd = additions.doBindings.filter((b) => !existingBindingNames.has(b.name));
144
+ merged.durable_objects = {
145
+ ...existingDo,
146
+ bindings: [...existingBindings, ...flueBindingsToAdd]
147
+ };
148
+ const existingMigrations = Array.isArray(merged.migrations) ? merged.migrations : [];
149
+ const existingMigrationTags = new Set(existingMigrations.filter((m) => typeof m === "object" && m !== null).map((m) => m.tag).filter((t) => typeof t === "string"));
150
+ const migrationsOut = [...existingMigrations];
151
+ if (!existingMigrationTags.has(additions.migration.tag)) migrationsOut.push(additions.migration);
152
+ merged.migrations = migrationsOut;
153
+ return merged;
154
+ }
155
+ /**
156
+ * Return the list of `class_name`s declared in the user's wrangler
157
+ * `durable_objects.bindings` that contain the literal substring `Sandbox`
158
+ * (case-sensitive).
159
+ *
160
+ * This is Flue's convention for wiring `@cloudflare/sandbox`: any DO binding
161
+ * whose class name contains `Sandbox` triggers an automatic re-export in the
162
+ * generated Worker entry:
163
+ *
164
+ * export { Sandbox as <class_name> } from '@cloudflare/sandbox';
165
+ *
166
+ * The alias lets users pick arbitrary class names (e.g. `PyBoxSandbox`,
167
+ * `SupportSandbox`) while still pointing at the single class shipped by the
168
+ * `@cloudflare/sandbox` package. Each distinct `class_name` can be paired with
169
+ * a different container image in the user's `containers[]` config.
170
+ *
171
+ * Returns unique, sorted class names. Non-object bindings or bindings without
172
+ * a string `class_name` are ignored.
173
+ */
174
+ function detectSandboxBindings(userConfig) {
175
+ const doObj = userConfig.durable_objects;
176
+ if (typeof doObj !== "object" || doObj === null) return [];
177
+ const bindings = doObj.bindings;
178
+ if (!Array.isArray(bindings)) return [];
179
+ const found = /* @__PURE__ */ new Set();
180
+ for (const entry of bindings) {
181
+ if (typeof entry !== "object" || entry === null) continue;
182
+ const className = entry.class_name;
183
+ if (typeof className !== "string") continue;
184
+ if (className.includes("Sandbox")) found.add(className);
185
+ }
186
+ return Array.from(found).sort();
187
+ }
188
+ /**
189
+ * When the user has declared one or more `Sandbox`-named DO bindings, verify
190
+ * that `@cloudflare/sandbox` is declared in the nearest package.json. Surfaces
191
+ * a friendly, actionable error at build time rather than letting esbuild emit
192
+ * a confusing module-resolution failure.
193
+ *
194
+ * The check is lenient: if no package.json can be located or parsed, we skip
195
+ * silently and let esbuild's own error path take over. This avoids false
196
+ * positives in unusual project layouts.
197
+ */
198
+ function assertSandboxPackageInstalled(sandboxClassNames, searchDirs) {
199
+ if (sandboxClassNames.length === 0) return;
200
+ for (const dir of searchDirs) {
201
+ let current = dir;
202
+ while (current !== path.dirname(current)) {
203
+ const pkgPath = path.join(current, "package.json");
204
+ if (fs.existsSync(pkgPath)) try {
205
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
206
+ if ("@cloudflare/sandbox" in {
207
+ ...pkg.dependencies ?? {},
208
+ ...pkg.devDependencies ?? {},
209
+ ...pkg.peerDependencies ?? {},
210
+ ...pkg.optionalDependencies ?? {}
211
+ }) return;
212
+ } catch {
213
+ return;
214
+ }
215
+ current = path.dirname(current);
216
+ }
217
+ }
218
+ throw new Error(`[flue] Your wrangler config declares DO binding(s) whose class_name contains "Sandbox" (${sandboxClassNames.join(", ")}), but @cloudflare/sandbox is not in your package.json. Install it: \`npm install @cloudflare/sandbox\`.`);
219
+ }
220
+ /**
221
+ * Write the wrangler deploy-redirect file at `<outputDir>/.wrangler/deploy/config.json`
222
+ * so that `wrangler deploy` run from `outputDir` automatically picks up the
223
+ * generated `dist/wrangler.jsonc`.
224
+ *
225
+ * This is wrangler's own native redirection mechanism (the same one Astro's
226
+ * Cloudflare adapter uses). We only write the file if one doesn't already
227
+ * exist — if the user has set one up, respect their intent.
228
+ */
229
+ function writeDeployRedirectIfMissing(outputDir) {
230
+ const redirectDir = path.join(outputDir, ".wrangler", "deploy");
231
+ const redirectPath = path.join(redirectDir, "config.json");
232
+ if (fs.existsSync(redirectPath)) return;
233
+ fs.mkdirSync(redirectDir, { recursive: true });
234
+ fs.writeFileSync(redirectPath, JSON.stringify({ configPath: "../../dist/wrangler.jsonc" }, null, 2) + "\n", "utf-8");
235
+ }
236
+
237
+ //#endregion
238
+ //#region src/build-plugin-cloudflare.ts
239
+ /** Cloudflare build plugin. Produces a Worker + DO entry point with SSE/webhook/sync modes. */
240
+ var CloudflarePlugin = class {
241
+ name = "cloudflare";
242
+ bundle = "none";
243
+ entryFilename = "_entry.ts";
244
+ /**
245
+ * Per-build cache of the user's wrangler config. Both `generateEntryPoint`
246
+ * and `additionalOutputs` need it (for sandbox detection + the merge), and
247
+ * a fresh `CloudflarePlugin` instance is constructed for each build (see
248
+ * `resolvePlugin` in build.ts), so the cache is implicitly scoped to a
249
+ * single build.
250
+ */
251
+ userConfigCache;
252
+ async getUserConfig(outputDir) {
253
+ if (!this.userConfigCache) this.userConfigCache = await readUserWranglerConfig(outputDir);
254
+ return this.userConfigCache;
255
+ }
256
+ async generateEntryPoint(ctx) {
257
+ const { agents, roles } = ctx;
258
+ const rolesJson = JSON.stringify(roles);
259
+ const webhookAgents = agents.filter((a) => a.triggers.webhook);
260
+ const agentImports = agents.map((a) => {
261
+ return `import ${agentVarName$1(a.name)} from '${a.filePath.replace(/\\/g, "/")}';`;
262
+ }).join("\n");
263
+ const manifest = JSON.stringify({ agents: agents.map((a) => ({
264
+ name: a.name,
265
+ triggers: a.triggers
266
+ })) }, null, 2);
267
+ const agentClasses = webhookAgents.map((a) => {
268
+ const className = agentClassName(a.name);
269
+ const handlerVar = agentVarName$1(a.name);
270
+ return `export class ${className} extends Agent {
271
+ async onRequest(request) {
272
+ return handleAgentRequest(request, this, ${JSON.stringify(a.name)}, ${handlerVar});
273
+ }
274
+
275
+ async onFiberRecovered(ctx) {
276
+ if (ctx.name?.startsWith('flue:')) {
277
+ return handleFlueFiberRecovered(ctx, this, ${JSON.stringify(a.name)});
278
+ }
279
+ if (typeof super.onFiberRecovered === 'function') {
280
+ return super.onFiberRecovered(ctx);
281
+ }
282
+ }
283
+ }`;
284
+ }).join("\n\n");
285
+ const { config: userConfig } = await this.getUserConfig(ctx.outputDir);
286
+ return `
287
+ // Auto-generated by @flue/sdk build (cloudflare)
288
+ import { Agent, routeAgentRequest } from 'agents';
289
+ import { Bash, InMemoryFs } from 'just-bash';
290
+ import {
291
+ createFlueContext,
292
+ InMemorySessionStore,
293
+ bashFactoryToSessionEnv,
294
+ resolveModel,
295
+ } from '@flue/sdk/internal';
296
+ import { runWithCloudflareContext, cfSandboxToSessionEnv } from '@flue/sdk/cloudflare';
297
+
298
+ ${agentImports}
299
+
300
+ // ─── Config ─────────────────────────────────────────────────────────────────
301
+
302
+ const roles = ${rolesJson};
303
+ const skills = {};
304
+ const systemPrompt = '';
305
+ const manifest = ${manifest};
306
+
307
+ // ─── Infrastructure ─────────────────────────────────────────────────────────
308
+
309
+ // No build-time model default. The user sets model at runtime via
310
+ // \`init({ model: "provider/model-id" })\` for an agent default, or via
311
+ // \`{ model: "provider/model-id" }\` on any individual prompt/skill/task call.
312
+ const model = undefined;
313
+
314
+ // ─── Sandbox Environments ───────────────────────────────────────────────────
315
+
316
+ /**
317
+ * Create an empty in-memory sandbox (default).
318
+ */
319
+ async function createDefaultEnv() {
320
+ const fs = new InMemoryFs();
321
+ return bashFactoryToSessionEnv(() => new Bash({
322
+ fs,
323
+ network: { dangerouslyAllowFullInternetAccess: true },
324
+ }));
325
+ }
326
+
327
+ /**
328
+ * 'local' sandbox is not available on Cloudflare Workers.
329
+ */
330
+ async function createLocalEnv() {
331
+ throw new Error(
332
+ "[flue] 'local' sandbox is not supported on Cloudflare Workers. " +
333
+ "Use the default empty sandbox, pass a BashFactory, " +
334
+ "or pass a sandbox instance (from any SDK — e.g. @cloudflare/sandbox " +
335
+ "or a Flue connector) to init({ sandbox })."
336
+ );
337
+ }
338
+
339
+ /**
340
+ * Detect and wrap external sandbox instances (e.g. from @cloudflare/sandbox's
341
+ * getSandbox()). Returns SessionEnv if the object quacks like a container
342
+ * sandbox, null otherwise.
343
+ */
344
+ function resolveSandbox(sandbox) {
345
+ if (
346
+ sandbox && typeof sandbox === 'object' &&
347
+ typeof sandbox.exec === 'function' &&
348
+ typeof sandbox.readFile === 'function' &&
349
+ typeof sandbox.destroy === 'function' &&
350
+ !('getCwd' in sandbox) && !('fs' in sandbox)
351
+ ) {
352
+ return cfSandboxToSessionEnv(sandbox);
353
+ }
354
+ return null;
355
+ }
356
+
357
+ // Fallback in-memory store (used if no DO storage is available).
358
+ const memoryStore = new InMemorySessionStore();
359
+
360
+ // Create a DO-backed session store from the Durable Object's SQL storage.
361
+ function createDOStore(sql) {
362
+ // Ensure the table exists
363
+ sql.exec(
364
+ 'CREATE TABLE IF NOT EXISTS flue_sessions (id TEXT PRIMARY KEY, data TEXT NOT NULL, updated_at INTEGER NOT NULL)'
365
+ );
366
+ return {
367
+ async save(id, data) {
368
+ const json = JSON.stringify(data);
369
+ sql.exec(
370
+ 'INSERT OR REPLACE INTO flue_sessions (id, data, updated_at) VALUES (?, ?, ?)',
371
+ id, json, Date.now()
372
+ );
373
+ },
374
+ async load(id) {
375
+ const rows = sql.exec('SELECT data FROM flue_sessions WHERE id = ?', id).toArray();
376
+ if (rows.length === 0) return null;
377
+ return JSON.parse(rows[0].data);
378
+ },
379
+ async delete(id) {
380
+ sql.exec('DELETE FROM flue_sessions WHERE id = ?', id);
381
+ },
382
+ };
383
+ }
384
+
385
+ function createContextForRequest(id, payload, doInstance) {
386
+ // Use DO SQLite storage by default, fall back to in-memory
387
+ const defaultStore = doInstance?.ctx?.storage?.sql
388
+ ? createDOStore(doInstance.ctx.storage.sql)
389
+ : memoryStore;
390
+
391
+ return createFlueContext({
392
+ id,
393
+ payload,
394
+ env: doInstance?.env ?? {},
395
+ agentConfig: {
396
+ systemPrompt, skills, roles, model, resolveModel,
397
+ },
398
+ createDefaultEnv,
399
+ createLocalEnv,
400
+ defaultStore,
401
+ resolveSandbox,
402
+ });
403
+ }
404
+
405
+ function runWithInstanceContext(doInstance, fn) {
406
+ return runWithCloudflareContext(
407
+ { env: doInstance.env, agentInstance: doInstance, storage: doInstance.ctx.storage },
408
+ fn,
409
+ );
410
+ }
411
+
412
+ function assertAgentsDurabilityApi(doInstance, method) {
413
+ if (typeof doInstance[method] !== 'function') {
414
+ throw new Error(
415
+ '[flue] The installed "agents" package does not provide the required Cloudflare Agents SDK method "' +
416
+ method +
417
+ '". Install or upgrade the "agents" package in your project.',
418
+ );
419
+ }
420
+ }
421
+
422
+ function runHandlerWithKeepAlive(doInstance, ctx, handler) {
423
+ return runWithInstanceContext(doInstance, () => {
424
+ assertAgentsDurabilityApi(doInstance, 'keepAliveWhile');
425
+ return doInstance.keepAliveWhile(() => handler(ctx));
426
+ });
427
+ }
428
+
429
+ function startWebhookFiber(doInstance, requestId, agentName, id, payload, handler) {
430
+ const run = async (fiber) => {
431
+ fiber?.stash?.({
432
+ version: 1,
433
+ kind: 'webhook',
434
+ agentName,
435
+ id,
436
+ requestId,
437
+ phase: 'running',
438
+ startedAt: Date.now(),
439
+ });
440
+
441
+ const ctx = createContextForRequest(id, payload, doInstance);
442
+ return runWithInstanceContext(doInstance, async () => {
443
+ try {
444
+ return await handler(ctx);
445
+ } finally {
446
+ ctx.setEventCallback(undefined);
447
+ }
448
+ });
449
+ };
450
+
451
+ assertAgentsDurabilityApi(doInstance, 'runFiber');
452
+ return doInstance.runFiber('flue:webhook:' + requestId, run);
453
+ }
454
+
455
+ async function handleFlueFiberRecovered(ctx, _doInstance, agentName) {
456
+ if (!ctx.name || !ctx.name.startsWith('flue:')) return;
457
+ console.warn('[flue] Cloudflare fiber interrupted:', agentName, ctx.name, ctx.snapshot ?? null);
458
+ }
459
+
460
+ // ─── Shared Request Handler ────────────────────────────────────────────────
461
+
462
+ async function handleAgentRequest(request, doInstance, agentName, handler) {
463
+ // Agent id is the DO "room name" set by routeAgentRequest
464
+ const id = doInstance.name;
465
+
466
+ // Parse payload
467
+ let payload;
468
+ try {
469
+ payload = await request.json();
470
+ } catch {
471
+ payload = {};
472
+ }
473
+
474
+ const accept = request.headers.get('accept') || '';
475
+ const isWebhook = request.headers.get('x-webhook') === 'true';
476
+ const isSSE = accept.includes('text/event-stream') && !isWebhook;
477
+
478
+ try {
479
+ // Fire-and-forget (webhook mode)
480
+ if (isWebhook) {
481
+ const requestId = crypto.randomUUID();
482
+ startWebhookFiber(doInstance, requestId, agentName, id, payload, handler).then(
483
+ (result) => {
484
+ console.log('[flue] Webhook handler complete:', agentName,
485
+ result !== undefined ? JSON.stringify(result) : '(no return)');
486
+ },
487
+ (err) => {
488
+ console.error('[flue] Webhook handler error:', agentName, err);
489
+ },
490
+ );
491
+ return new Response(JSON.stringify({ status: 'accepted', requestId }), {
492
+ status: 202,
493
+ headers: { 'content-type': 'application/json' },
494
+ });
495
+ }
496
+
497
+ // SSE streaming mode
498
+ if (isSSE) {
499
+ const { readable, writable } = new TransformStream();
500
+ const writer = writable.getWriter();
501
+ const encoder = new TextEncoder();
502
+ let eventId = 0;
503
+ let isIdle = false;
504
+
505
+ const writeSSE = async (data, event) => {
506
+ const lines = [];
507
+ if (event) lines.push('event: ' + event);
508
+ lines.push('id: ' + eventId++);
509
+ lines.push('data: ' + JSON.stringify(data));
510
+ lines.push('', '');
511
+ await writer.write(encoder.encode(lines.join('\\n')));
512
+ };
513
+
514
+ const ctx = createContextForRequest(id, payload, doInstance);
515
+ ctx.setEventCallback((event) => {
516
+ if (event.type === 'idle') isIdle = true;
517
+ writeSSE(event, event.type).catch(() => {});
518
+ });
519
+
520
+ (async () => {
521
+ try {
522
+ const result = await runHandlerWithKeepAlive(doInstance, ctx, handler);
523
+ if (!isIdle) {
524
+ await writeSSE({ type: 'idle' }, 'idle');
525
+ }
526
+ await writeSSE(
527
+ { type: 'result', data: result !== undefined ? result : null },
528
+ 'result',
529
+ );
530
+ } catch (err) {
531
+ await writeSSE(
532
+ { type: 'error', error: String(err) },
533
+ 'error',
534
+ );
535
+ if (!isIdle) {
536
+ await writeSSE({ type: 'idle' }, 'idle');
537
+ }
538
+ } finally {
539
+ ctx.setEventCallback(undefined);
540
+ await writer.close();
541
+ }
542
+ })();
543
+
544
+ return new Response(readable, {
545
+ headers: {
546
+ 'content-type': 'text/event-stream',
547
+ 'cache-control': 'no-cache',
548
+ 'connection': 'keep-alive',
549
+ },
550
+ });
551
+ }
552
+
553
+ // Sync mode (default)
554
+ const ctx = createContextForRequest(id, payload, doInstance);
555
+ try {
556
+ const result = await runHandlerWithKeepAlive(doInstance, ctx, handler);
557
+ return new Response(
558
+ JSON.stringify({ result: result !== undefined ? result : null }),
559
+ { headers: { 'content-type': 'application/json' } },
560
+ );
561
+ } finally {
562
+ ctx.setEventCallback(undefined);
563
+ }
564
+ } catch (err) {
565
+ console.error('[flue] Agent error:', agentName, err);
566
+ return new Response(
567
+ JSON.stringify({ error: String(err) }),
568
+ { status: 500, headers: { 'content-type': 'application/json' } },
569
+ );
570
+ }
571
+ }
572
+
573
+ // ─── Per-Agent Durable Object Classes ──────────────────────────────────────
574
+
575
+ ${agentClasses}
576
+
577
+ // ─── User-declared Sandbox re-exports ──────────────────────────────────────
578
+ // One line per DO binding in the user's wrangler.jsonc whose class_name
579
+ // contains "Sandbox". Flue aliases the single \`Sandbox\` class shipped by
580
+ // \`@cloudflare/sandbox\` so each user-chosen class_name resolves at the
581
+ // bundle's top level. The binding + container image configuration is owned
582
+ // by the user's wrangler.jsonc.
583
+ ${detectSandboxBindings(userConfig).map((name) => `export { Sandbox as ${name} } from '@cloudflare/sandbox';`).join("\n")}
584
+
585
+ // ─── Worker Fetch Handler ───────────────────────────────────────────────────
586
+
587
+ export default {
588
+ async fetch(request, env) {
589
+ const url = new URL(request.url);
590
+
591
+ // Health check
592
+ if (url.pathname === '/health') {
593
+ return new Response(JSON.stringify({ status: 'ok' }), {
594
+ headers: { 'content-type': 'application/json' },
595
+ });
596
+ }
597
+
598
+ // Agent manifest
599
+ if (url.pathname === '/agents' && request.method === 'GET') {
600
+ return new Response(JSON.stringify(manifest), {
601
+ headers: { 'content-type': 'application/json' },
602
+ });
603
+ }
604
+
605
+ // Route to per-agent DOs via the Agents SDK
606
+ // URL: /agents/<agent-name>/<id>
607
+ const response = await routeAgentRequest(request, env);
608
+ if (response) return response;
609
+
610
+ return new Response('Not found', { status: 404 });
611
+ },
612
+ };
613
+ `;
614
+ }
615
+ async additionalOutputs(ctx) {
616
+ const outputs = {};
617
+ const flueBindings = ctx.agents.filter((a) => a.triggers.webhook).map((a) => ({
618
+ class_name: agentClassName(a.name),
619
+ name: agentClassName(a.name)
620
+ }));
621
+ const flueSqliteClasses = flueBindings.map((b) => b.class_name);
622
+ const additions = {
623
+ defaultName: path.basename(ctx.outputDir) || "flue-agents",
624
+ main: "_entry.ts",
625
+ doBindings: flueBindings,
626
+ migration: {
627
+ tag: "flue-v1",
628
+ new_sqlite_classes: flueSqliteClasses
629
+ }
630
+ };
631
+ const { config: userConfig, path: userConfigPath } = await this.getUserConfig(ctx.outputDir);
632
+ if (userConfigPath) console.log(`[flue] Merging with user wrangler config: ${userConfigPath}`);
633
+ validateUserWranglerConfig(userConfig);
634
+ const sandboxClassNames = detectSandboxBindings(userConfig);
635
+ if (sandboxClassNames.length > 0) {
636
+ assertSandboxPackageInstalled(sandboxClassNames, [ctx.outputDir, ctx.workspaceDir]);
637
+ for (const className of sandboxClassNames) console.log(`[flue] Detected Sandbox-named DO binding "${className}" — re-exporting from @cloudflare/sandbox.`);
638
+ }
639
+ const merged = mergeFlueAdditions(userConfig, additions);
640
+ if (typeof merged.$schema !== "string") merged.$schema = "https://workers.cloudflare.com/schema/wrangler.json";
641
+ outputs["wrangler.jsonc"] = JSON.stringify(merged, null, 2);
642
+ writeDeployRedirectIfMissing(ctx.outputDir);
643
+ return outputs;
644
+ }
645
+ };
646
+ function agentVarName$1(name) {
647
+ return "handler_" + name.replace(/[^a-zA-Z0-9]/g, "_");
648
+ }
649
+ /**
650
+ * Convert agent name to a PascalCase DO class name.
651
+ * "hello" → "Hello", "with-cloudflare" → "WithCloudflare"
652
+ *
653
+ * routeAgentRequest() converts binding names to kebab-case for URL matching,
654
+ * so "WithCloudflare" → "with-cloudflare" → URL /agents/with-cloudflare/:id
655
+ */
656
+ function agentClassName(name) {
657
+ return name.split(/[-_]/).map((part) => part.charAt(0).toUpperCase() + part.slice(1)).join("");
658
+ }
659
+
660
+ //#endregion
661
+ //#region src/build-plugin-node.ts
662
+ var NodePlugin = class {
663
+ name = "node";
664
+ bundle = "esbuild";
665
+ generateEntryPoint(ctx) {
666
+ const { agents, roles } = ctx;
667
+ const rolesJson = JSON.stringify(roles);
668
+ const webhookAgents = agents.filter((a) => a.triggers.webhook);
669
+ return `
670
+ // Auto-generated by @flue/sdk build (node)
671
+ import { Hono } from 'hono';
672
+ import { streamSSE } from 'hono/streaming';
673
+ import { serve } from '@hono/node-server';
674
+ import { Bash, InMemoryFs, MountableFs, ReadWriteFs } from 'just-bash';
675
+ import {
676
+ createFlueContext,
677
+ InMemorySessionStore,
678
+ bashFactoryToSessionEnv,
679
+ resolveModel,
680
+ } from '@flue/sdk/internal';
681
+ import { randomUUID } from 'node:crypto';
682
+
683
+ ${agents.map((a) => {
684
+ return `import ${agentVarName(a.name)} from '${a.filePath.replace(/\\/g, "/")}';`;
685
+ }).join("\n")}
686
+
687
+ // ─── Config ─────────────────────────────────────────────────────────────────
688
+
689
+ const skills = {};
690
+ const roles = ${rolesJson};
691
+ const systemPrompt = '';
692
+
693
+ const handlers = {
694
+ ${agents.map((a) => ` ${JSON.stringify(a.name)}: ${agentVarName(a.name)},`).join("\n")}
695
+ };
696
+
697
+ const webhookAgents = new Set(${JSON.stringify(webhookAgents.map((a) => a.name))});
698
+
699
+ // When the CLI starts this server via \`flue run\`, it sets FLUE_MODE=local.
700
+ // In local mode the HTTP route accepts any registered agent (including
701
+ // trigger-less CI-only agents). In any other mode the route is restricted to
702
+ // agents with \`webhook: true\`, preventing accidental public exposure of
703
+ // agents that the user only intended to invoke from their CI pipeline.
704
+ const isLocalMode = process.env.FLUE_MODE === 'local';
705
+
706
+ const manifest = ${JSON.stringify({ agents: agents.map((a) => ({
707
+ name: a.name,
708
+ triggers: a.triggers
709
+ })) }, null, 2)};
710
+
711
+ // ─── Infrastructure ─────────────────────────────────────────────────────────
712
+
713
+ // No build-time model default. The user sets model at runtime via
714
+ // \`init({ model: "provider/model-id" })\` for an agent default, or via
715
+ // \`{ model: "provider/model-id" }\` on any individual prompt/skill/task call.
716
+ const model = undefined;
717
+
718
+ // ─── Sandbox Environments ───────────────────────────────────────────────────
719
+
720
+ /**
721
+ * Create an empty in-memory sandbox (default).
722
+ * Uses InMemoryFs (no real filesystem access) with sensible defaults:
723
+ * cwd = /home/user, /tmp exists, /bin and /usr/bin exist.
724
+ */
725
+ async function createDefaultEnv() {
726
+ const fs = new InMemoryFs();
727
+ return bashFactoryToSessionEnv(() => new Bash({
728
+ fs,
729
+ network: { dangerouslyAllowFullInternetAccess: true },
730
+ }));
731
+ }
732
+
733
+ /**
734
+ * Create a local sandbox backed by the host filesystem.
735
+ * Mounts process.cwd() at /workspace via ReadWriteFs + MountableFs.
736
+ */
737
+ async function createLocalEnv() {
738
+ const rwfs = new ReadWriteFs({ root: process.cwd() });
739
+ const fs = new MountableFs({ base: new InMemoryFs() });
740
+ fs.mount('/workspace', rwfs);
741
+ return bashFactoryToSessionEnv(() => new Bash({
742
+ fs,
743
+ cwd: '/workspace',
744
+ network: { dangerouslyAllowFullInternetAccess: true },
745
+ }));
746
+ }
747
+
748
+ // Default persistence store for Node — in-memory, process lifetime.
749
+ const defaultStore = new InMemorySessionStore();
750
+
751
+ function createContextForRequest(id, payload) {
752
+ return createFlueContext({
753
+ id,
754
+ payload,
755
+ env: process.env,
756
+ agentConfig: {
757
+ systemPrompt, skills, roles, model, resolveModel,
758
+ },
759
+ createDefaultEnv,
760
+ createLocalEnv,
761
+ defaultStore,
762
+ });
763
+ }
764
+
765
+ // ─── Server ─────────────────────────────────────────────────────────────────
766
+
767
+ const app = new Hono();
768
+
769
+ app.get('/health', (c) => c.json({ status: 'ok' }));
770
+ app.get('/agents', (c) => c.json(manifest));
771
+
772
+ // Agent id is required in the URL
773
+ app.post('/agents/:name', (c) => {
774
+ return c.json({
775
+ error: 'Agent id is required. Use /agents/:name/:id',
776
+ }, 400);
777
+ });
778
+
779
+ app.post('/agents/:name/:id', async (c) => {
780
+ const name = c.req.param('name');
781
+ const id = c.req.param('id');
782
+
783
+ if (!handlers[name]) {
784
+ return c.json({ error: 'Agent not found' }, 404);
785
+ }
786
+ if (!webhookAgents.has(name) && !isLocalMode) {
787
+ return c.json({ error: 'Agent "' + name + '" is not web-accessible (no webhook trigger)' }, 404);
788
+ }
789
+
790
+ const handler = handlers[name];
791
+ let payload;
792
+ try {
793
+ payload = await c.req.json();
794
+ } catch {
795
+ payload = {};
796
+ }
797
+
798
+ const accept = c.req.header('accept') || '';
799
+ const isWebhook = c.req.header('x-webhook') === 'true';
800
+ const isSSE = accept.includes('text/event-stream') && !isWebhook;
801
+
802
+ // Fire-and-forget (webhook mode)
803
+ if (isWebhook) {
804
+ const requestId = randomUUID();
805
+ const ctx = createContextForRequest(id, payload);
806
+ handler(ctx).then(
807
+ (result) => {
808
+ ctx.setEventCallback(undefined);
809
+ console.log('[flue] Webhook handler complete:', name, result !== undefined ? JSON.stringify(result) : '(no return)');
810
+ },
811
+ (err) => {
812
+ ctx.setEventCallback(undefined);
813
+ console.error('[flue] Webhook handler error:', name, err);
814
+ },
815
+ );
816
+ return c.json({ status: 'accepted', requestId }, 202);
817
+ }
818
+
819
+ // SSE streaming mode
820
+ if (isSSE) {
821
+ return streamSSE(c, async (stream) => {
822
+ let eventId = 0;
823
+ let isIdle = false;
824
+ const ctx = createContextForRequest(id, payload);
825
+ ctx.setEventCallback((event) => {
826
+ if (event.type === 'idle') isIdle = true;
827
+ stream.writeSSE({ data: JSON.stringify(event), event: event.type, id: String(eventId++) }).catch(() => {});
828
+ });
829
+
830
+ try {
831
+ const result = await handler(ctx);
832
+ if (!isIdle) {
833
+ const idle = { type: 'idle' };
834
+ await stream.writeSSE({ data: JSON.stringify(idle), event: 'idle', id: String(eventId++) });
835
+ }
836
+ await stream.writeSSE({
837
+ data: JSON.stringify({ type: 'result', data: result !== undefined ? result : null }),
838
+ event: 'result',
839
+ id: String(eventId++),
840
+ });
841
+ } catch (err) {
842
+ await stream.writeSSE({
843
+ data: JSON.stringify({ type: 'error', error: String(err) }),
844
+ event: 'error',
845
+ id: String(eventId++),
846
+ });
847
+ if (!isIdle) {
848
+ const idle = { type: 'idle' };
849
+ await stream.writeSSE({ data: JSON.stringify(idle), event: 'idle', id: String(eventId++) });
850
+ }
851
+ } finally {
852
+ ctx.setEventCallback(undefined);
853
+ }
854
+ });
855
+ }
856
+
857
+ // Sync mode (default)
858
+ try {
859
+ const ctx = createContextForRequest(id, payload);
860
+ const result = await handler(ctx);
861
+ ctx.setEventCallback(undefined);
862
+ return c.json({ result: result !== undefined ? result : null });
863
+ } catch (err) {
864
+ console.error('[flue] Agent error:', name, err);
865
+ return c.json({ error: String(err) }, 500);
866
+ }
867
+ });
868
+
869
+ // ─── Start ──────────────────────────────────────────────────────────────────
870
+
871
+ const port = parseInt(process.env.PORT || '3000', 10);
872
+
873
+ const server = serve({ fetch: app.fetch, port });
874
+ console.log('[flue] Server listening on http://localhost:' + port);
875
+ if (isLocalMode) {
876
+ console.log('[flue] Mode: local (all agents invokable, including trigger-less)');
877
+ console.log('[flue] Available agents: ' + ${JSON.stringify(agents.map((a) => a.name).join(", "))});
878
+ } else {
879
+ console.log('[flue] Available agents: ' + ${JSON.stringify(webhookAgents.map((a) => a.name).join(", "))});
880
+ }
881
+
882
+ process.on('SIGINT', () => { server.close(); process.exit(0); });
883
+ process.on('SIGTERM', () => { server.close(); process.exit(0); });
884
+ `;
885
+ }
886
+ esbuildOptions(_ctx) {
887
+ return {
888
+ platform: "node",
889
+ target: "node22",
890
+ external: ["node-liblzma", "@mongodb-js/zstd"]
891
+ };
892
+ }
893
+ };
894
+ function agentVarName(name) {
895
+ return "handler_" + name.replace(/[^a-zA-Z0-9]/g, "_");
896
+ }
897
+
898
+ //#endregion
899
+ //#region src/build.ts
900
+ /**
901
+ * Build a workspace into a deployable artifact.
902
+ *
903
+ * `options.workspaceDir` is treated as an explicit workspace root — the directory
904
+ * directly containing agents/ and roles/. No .flue/ waterfall is performed here;
905
+ * callers that want waterfall behavior (e.g. the CLI when --workspace is omitted)
906
+ * should use `resolveWorkspaceFromCwd` first.
907
+ *
908
+ * AGENTS.md and .agents/skills/ are NOT bundled — discovered at runtime from session cwd.
909
+ */
910
+ async function build(options) {
911
+ const workspaceDir = path.resolve(options.workspaceDir);
912
+ const outputDir = path.resolve(options.outputDir);
913
+ const plugin = resolvePlugin(options);
914
+ console.log(`[flue] Building workspace: ${workspaceDir}`);
915
+ console.log(`[flue] Output: ${outputDir}/dist`);
916
+ console.log(`[flue] Target: ${plugin.name}`);
917
+ const roles = discoverRoles(workspaceDir);
918
+ const agents = discoverAgents(workspaceDir);
919
+ if (agents.length === 0) throw new Error(`[flue] No agent files found.\n\nExpected at: ${path.join(workspaceDir, "agents")}/\nAdd at least one agent file (e.g. hello.ts).`);
920
+ const webhookAgents = agents.filter((a) => a.triggers.webhook);
921
+ const cronAgents = agents.filter((a) => a.triggers.cron);
922
+ const triggerlessAgents = agents.filter((a) => !a.triggers.webhook && !a.triggers.cron);
923
+ console.log(`[flue] Found ${Object.keys(roles).length} role(s): ${Object.keys(roles).join(", ") || "(none)"}`);
924
+ console.log(`[flue] Found ${agents.length} agent(s): ${agents.map((a) => a.name).join(", ")}`);
925
+ if (webhookAgents.length > 0) console.log(`[flue] Webhook agents: ${webhookAgents.map((a) => a.name).join(", ")}`);
926
+ if (cronAgents.length > 0) console.log(`[flue] Cron agents (manifest only): ${cronAgents.map((a) => `${a.name} (${a.triggers.cron})`).join(", ")}`);
927
+ if (triggerlessAgents.length > 0) console.log(`[flue] CLI-only agents (no HTTP route in deployed build): ${triggerlessAgents.map((a) => a.name).join(", ")}`);
928
+ console.log(`[flue] AGENTS.md and .agents/skills/ will be discovered at runtime from session cwd`);
929
+ const distDir = path.join(outputDir, "dist");
930
+ fs.mkdirSync(distDir, { recursive: true });
931
+ const manifest = { agents: agents.map((a) => ({
932
+ name: a.name,
933
+ triggers: a.triggers
934
+ })) };
935
+ const manifestPath = path.join(distDir, "manifest.json");
936
+ fs.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2), "utf-8");
937
+ console.log(`[flue] Generated: ${manifestPath}`);
938
+ const ctx = {
939
+ agents,
940
+ roles,
941
+ workspaceDir,
942
+ outputDir,
943
+ options
944
+ };
945
+ const serverCode = await plugin.generateEntryPoint(ctx);
946
+ const bundleStrategy = plugin.bundle ?? "esbuild";
947
+ let anyChanged = false;
948
+ if (bundleStrategy === "esbuild") {
949
+ const entryPath = path.join(distDir, "_entry_server.ts");
950
+ const outPath = path.join(distDir, "server.mjs");
951
+ fs.writeFileSync(entryPath, serverCode, "utf-8");
952
+ try {
953
+ const nodePathsSet = collectNodePaths(workspaceDir);
954
+ const { external: pluginExternal = [], ...pluginEsbuildOpts } = plugin.esbuildOptions ? plugin.esbuildOptions(ctx) : {};
955
+ const userExternals = getUserExternals(workspaceDir);
956
+ await esbuild.build({
957
+ entryPoints: [entryPath],
958
+ bundle: true,
959
+ outfile: outPath,
960
+ format: "esm",
961
+ external: [...pluginExternal, ...userExternals],
962
+ nodePaths: [...nodePathsSet],
963
+ logLevel: "warning",
964
+ loader: {
965
+ ".ts": "ts",
966
+ ".node": "empty"
967
+ },
968
+ treeShaking: true,
969
+ sourcemap: true,
970
+ ...pluginEsbuildOpts
971
+ });
972
+ console.log(`[flue] Built: ${outPath}`);
973
+ anyChanged = true;
974
+ } finally {
975
+ try {
976
+ fs.unlinkSync(entryPath);
977
+ } catch {}
978
+ }
979
+ } else if (bundleStrategy === "none") {
980
+ if (!plugin.entryFilename) throw new Error(`[flue] Plugin "${plugin.name}" set bundle: 'none' but did not provide entryFilename.`);
981
+ const outPath = path.join(distDir, plugin.entryFilename);
982
+ if (!fs.existsSync(outPath) || fs.readFileSync(outPath, "utf-8") !== serverCode) {
983
+ fs.writeFileSync(outPath, serverCode, "utf-8");
984
+ console.log(`[flue] Wrote entry: ${outPath} (no bundle — downstream tool handles it)`);
985
+ anyChanged = true;
986
+ } else console.log(`[flue] Entry unchanged: ${outPath}`);
987
+ } else throw new Error(`[flue] Unknown bundle strategy: ${bundleStrategy}`);
988
+ if (plugin.additionalOutputs) {
989
+ const outputs = await plugin.additionalOutputs(ctx);
990
+ for (const [filename, content] of Object.entries(outputs)) {
991
+ const filePath = path.join(distDir, filename);
992
+ fs.mkdirSync(path.dirname(filePath), { recursive: true });
993
+ if (!fs.existsSync(filePath) || fs.readFileSync(filePath, "utf-8") !== content) {
994
+ fs.writeFileSync(filePath, content, "utf-8");
995
+ console.log(`[flue] Generated: ${filePath}`);
996
+ anyChanged = true;
997
+ }
998
+ }
999
+ }
1000
+ console.log(`[flue] Build complete. Output: ${distDir}`);
1001
+ return { changed: anyChanged };
1002
+ }
1003
+ function resolvePlugin(options) {
1004
+ if (options.plugin) return options.plugin;
1005
+ if (!options.target) throw new Error("[flue] No build target specified. Use --target to choose a target:\n flue build --target node\n flue build --target cloudflare");
1006
+ switch (options.target) {
1007
+ case "node": return new NodePlugin();
1008
+ case "cloudflare": return new CloudflarePlugin();
1009
+ default: throw new Error(`[flue] Unknown target: "${options.target}". Supported targets: node, cloudflare`);
1010
+ }
1011
+ }
1012
+ /**
1013
+ * Resolve a Flue workspace directory from the current working directory,
1014
+ * using the two-layout convention. Intended for the CLI when `--workspace` is
1015
+ * not provided — callers that pass an explicit workspace path should skip this
1016
+ * and pass the path straight to `build()`.
1017
+ *
1018
+ * Two supported layouts, checked in order:
1019
+ * 1. `<cwd>/.flue/` — use this when Flue is embedded in an existing project.
1020
+ * 2. `<cwd>/` — use this when the project itself is the Flue workspace.
1021
+ *
1022
+ * If `.flue/` exists, it wins unconditionally — no mixing with the bare layout.
1023
+ * Returns null if neither is present so the caller can produce a helpful error.
1024
+ */
1025
+ function resolveWorkspaceFromCwd(cwd) {
1026
+ const dotFlue = path.join(cwd, ".flue");
1027
+ if (fs.existsSync(dotFlue)) return dotFlue;
1028
+ if (fs.existsSync(path.join(cwd, "agents"))) return cwd;
1029
+ return null;
1030
+ }
1031
+ function discoverRoles(workspaceRoot) {
1032
+ const rolesDir = path.join(workspaceRoot, "roles");
1033
+ if (!fs.existsSync(rolesDir)) return {};
1034
+ const roles = {};
1035
+ for (const entry of fs.readdirSync(rolesDir)) {
1036
+ if (!/\.(md|markdown)$/i.test(entry)) continue;
1037
+ const filePath = path.join(rolesDir, entry);
1038
+ const content = fs.readFileSync(filePath, "utf-8");
1039
+ const name = entry.replace(/\.(md|markdown)$/i, "");
1040
+ const parsed = parseFrontmatterFile(content, name);
1041
+ roles[name] = {
1042
+ name,
1043
+ description: parsed.description,
1044
+ instructions: parsed.body,
1045
+ model: parsed.frontmatter.model
1046
+ };
1047
+ }
1048
+ return roles;
1049
+ }
1050
+ function discoverAgents(workspaceRoot) {
1051
+ const agentsDir = path.join(workspaceRoot, "agents");
1052
+ if (!fs.existsSync(agentsDir)) return [];
1053
+ return fs.readdirSync(agentsDir).filter((f) => /\.(ts|js|mts|mjs)$/.test(f)).map((f) => {
1054
+ const filePath = path.join(agentsDir, f);
1055
+ const triggers = parseTriggers(filePath);
1056
+ return {
1057
+ name: f.replace(/\.(ts|js|mts|mjs)$/, ""),
1058
+ filePath,
1059
+ triggers
1060
+ };
1061
+ });
1062
+ }
1063
+ /** Extract trigger config via regex. Only triggers are parsed at build time (needed for routing). */
1064
+ function parseTriggers(filePath) {
1065
+ const source = fs.readFileSync(filePath, "utf-8");
1066
+ const result = {};
1067
+ const triggersExportMatch = source.match(/export\s+const\s+triggers\s*=\s*\{([^}]*)\}/);
1068
+ if (!triggersExportMatch) return result;
1069
+ const triggersBlock = triggersExportMatch[1] ?? "";
1070
+ if (/webhook\s*:\s*true/.test(triggersBlock)) result.webhook = true;
1071
+ const cronMatch = triggersBlock.match(/cron\s*:\s*['"]([^'"]+)['"]/);
1072
+ if (cronMatch?.[1]) result.cron = cronMatch[1];
1073
+ return result;
1074
+ }
1075
+ /** Externalize user's direct deps (bare name + subpath wildcard). */
1076
+ function getUserExternals(workspaceDir) {
1077
+ const pkgPath = packageUpSync({ cwd: workspaceDir });
1078
+ if (!pkgPath) return [];
1079
+ try {
1080
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
1081
+ return Object.keys({
1082
+ ...pkg.dependencies,
1083
+ ...pkg.devDependencies,
1084
+ ...pkg.peerDependencies
1085
+ }).flatMap((name) => [name, `${name}/*`]);
1086
+ } catch {
1087
+ return [];
1088
+ }
1089
+ }
1090
+ function collectNodePaths(workspaceDir) {
1091
+ const nodePathsSet = /* @__PURE__ */ new Set();
1092
+ for (const startDir of [workspaceDir, getSDKDir()]) {
1093
+ let dir = startDir;
1094
+ while (dir !== path.dirname(dir)) {
1095
+ const nm = path.join(dir, "node_modules");
1096
+ if (fs.existsSync(nm)) nodePathsSet.add(nm);
1097
+ dir = path.dirname(dir);
1098
+ }
1099
+ }
1100
+ return nodePathsSet;
1101
+ }
1102
+ function getSDKDir() {
1103
+ try {
1104
+ return path.dirname(new URL(import.meta.url).pathname);
1105
+ } catch {
1106
+ return __dirname;
1107
+ }
1108
+ }
1109
+
1110
+ //#endregion
1111
+ //#region src/dev.ts
1112
+ /**
1113
+ * Flue dev server.
1114
+ *
1115
+ * Watches the user's workspace, rebuilds on file changes, and reloads the
1116
+ * underlying server. Distinct from `flue run`: dev is the long-running,
1117
+ * edit-and-iterate command, while `flue run` is the one-shot
1118
+ * production-style invoker (build → run → exit).
1119
+ *
1120
+ * # Two very different reload models
1121
+ *
1122
+ * Node and Cloudflare use fundamentally different rebuild strategies, because
1123
+ * what they each provide downstream is fundamentally different:
1124
+ *
1125
+ * - **Node** has no host bundler. Our esbuild pass produces the final
1126
+ * `dist/server.mjs`. On any change in the workspace we rebuild and respawn
1127
+ * the child Node process. Sub-second restart is fine.
1128
+ *
1129
+ * - **Cloudflare** uses Wrangler's bundler (the same one `wrangler dev` and
1130
+ * `wrangler deploy` use). Wrangler watches the entry's transitive import
1131
+ * graph itself and reloads workerd on source edits. So we *don't* need to
1132
+ * rebuild for body edits — wrangler handles it. We only need to act when:
1133
+ * 1. The set of agents changes (added / removed / triggers changed) →
1134
+ * regenerate `dist/_entry.ts`. Wrangler picks up the new entry
1135
+ * automatically because it's already watching it.
1136
+ * 2. The user's `wrangler.jsonc` changes → re-merge our additions and
1137
+ * restart the worker (config changes don't hot-apply).
1138
+ * Pure body edits to agent files: wrangler reloads workerd; we do nothing.
1139
+ *
1140
+ * # Watching
1141
+ *
1142
+ * Watching uses `node:fs.watch` recursive (Node 20+). Debounced 150ms. The
1143
+ * Node path treats every non-ignored change as a rebuild trigger; the
1144
+ * Cloudflare path filters to "structural" changes only.
1145
+ */
1146
+ /** Default port for `flue dev`. F=3, L=5, U=8, E=3 on a phone keypad. */
1147
+ const DEFAULT_DEV_PORT = 3583;
1148
+ /**
1149
+ * Start a Flue dev server. Resolves only when the server is shut down (e.g.
1150
+ * via SIGINT). Errors during the initial build/start are thrown synchronously;
1151
+ * errors during subsequent rebuilds are logged but do NOT exit the dev server
1152
+ * — the user is editing code, after all, and we want to recover when they fix it.
1153
+ */
1154
+ async function dev(options) {
1155
+ const workspaceDir = path.resolve(options.workspaceDir);
1156
+ const outputDir = path.resolve(options.outputDir);
1157
+ const port = options.port ?? DEFAULT_DEV_PORT;
1158
+ const buildOptions = {
1159
+ workspaceDir,
1160
+ outputDir,
1161
+ target: options.target
1162
+ };
1163
+ console.error(`[flue] Starting dev server (target: ${options.target})`);
1164
+ console.error(`[flue] Watching: ${workspaceDir}`);
1165
+ console.error(`[flue] Building...`);
1166
+ const initialStart = Date.now();
1167
+ try {
1168
+ await build(buildOptions);
1169
+ } catch (err) {
1170
+ throw new Error(`[flue] Initial build failed: ${err instanceof Error ? err.message : String(err)}`);
1171
+ }
1172
+ console.error(`[flue] Built in ${Date.now() - initialStart}ms`);
1173
+ const reloader = options.target === "node" ? new NodeReloader({
1174
+ outputDir,
1175
+ port
1176
+ }) : await createCloudflareReloader({
1177
+ outputDir,
1178
+ port
1179
+ });
1180
+ await reloader.start();
1181
+ if (reloader.url) {
1182
+ console.error(`[flue] Server: ${reloader.url}`);
1183
+ const exampleAgent = pickExampleAgentName(outputDir, workspaceDir);
1184
+ if (exampleAgent) {
1185
+ console.error(`[flue] Try: curl -X POST ${reloader.url}/agents/${exampleAgent}/test-1 \\`);
1186
+ console.error(` -H 'Content-Type: application/json' -d '{}'`);
1187
+ }
1188
+ }
1189
+ console.error(`[flue] Press Ctrl+C to stop\n`);
1190
+ const rebuilder = createRebuilder(buildOptions, reloader);
1191
+ const watcher = createWatcher({
1192
+ workspaceDir,
1193
+ outputDir,
1194
+ target: options.target,
1195
+ onChange: (relPath) => {
1196
+ if (!reloader.shouldRebuildOn(relPath)) return;
1197
+ console.error(`[flue] Change detected: ${relPath}`);
1198
+ rebuilder.schedule();
1199
+ }
1200
+ });
1201
+ let shuttingDown = false;
1202
+ const shutdown = async (signal, exitCode) => {
1203
+ if (shuttingDown) return;
1204
+ shuttingDown = true;
1205
+ console.error(`\n[flue] Received ${signal}, shutting down...`);
1206
+ watcher.close();
1207
+ try {
1208
+ await reloader.stop();
1209
+ } catch (err) {
1210
+ console.error(`[flue] Error during shutdown: ${err instanceof Error ? err.message : String(err)}`);
1211
+ }
1212
+ console.error(`[flue] Stopped.`);
1213
+ process.exit(exitCode);
1214
+ };
1215
+ process.on("SIGINT", () => void shutdown("SIGINT", 130));
1216
+ process.on("SIGTERM", () => void shutdown("SIGTERM", 143));
1217
+ process.on("exit", () => {
1218
+ try {
1219
+ reloader.killSync?.();
1220
+ } catch {}
1221
+ });
1222
+ await new Promise(() => {});
1223
+ }
1224
+ function createRebuilder(buildOptions, reloader) {
1225
+ let running = false;
1226
+ let queued = false;
1227
+ let debounceTimer = null;
1228
+ const runOnce = async () => {
1229
+ running = true;
1230
+ const start = Date.now();
1231
+ console.error(`[flue] Rebuilding...`);
1232
+ try {
1233
+ const { changed } = await build(buildOptions);
1234
+ await reloader.reload(changed);
1235
+ console.error(`[flue] Reloaded in ${Date.now() - start}ms\n`);
1236
+ } catch (err) {
1237
+ console.error(`[flue] Rebuild failed: ${err instanceof Error ? err.message : String(err)}\n`);
1238
+ } finally {
1239
+ running = false;
1240
+ if (queued) {
1241
+ queued = false;
1242
+ runOnce();
1243
+ }
1244
+ }
1245
+ };
1246
+ return { schedule() {
1247
+ if (debounceTimer) clearTimeout(debounceTimer);
1248
+ debounceTimer = setTimeout(() => {
1249
+ debounceTimer = null;
1250
+ if (running) queued = true;
1251
+ else runOnce();
1252
+ }, 150);
1253
+ } };
1254
+ }
1255
+ /**
1256
+ * Watch the workspace for changes. Uses `fs.watch` recursive (Node 20+).
1257
+ *
1258
+ * Watched roots:
1259
+ * - `<workspaceDir>` — agents/, roles/, AGENTS.md, .agents/skills/.
1260
+ * - For Cloudflare: also `<outputDir>/wrangler.jsonc` (and `.json`),
1261
+ * since changes there require a worker restart.
1262
+ *
1263
+ * Ignored:
1264
+ * - `dist/`, `node_modules/`, `.git/`, `.turbo/`
1265
+ * - dotfiles other than the ones we explicitly care about (AGENTS.md is
1266
+ * not a dotfile, so it's fine)
1267
+ * - editor backup/swap suffixes
1268
+ */
1269
+ function createWatcher(options) {
1270
+ const { workspaceDir, outputDir, target, onChange } = options;
1271
+ const watchers = [];
1272
+ const isIgnoredPath = (relPath) => {
1273
+ const parts = relPath.replace(/\\/g, "/").split("/");
1274
+ for (const part of parts) {
1275
+ if (part === "node_modules") return true;
1276
+ if (part === "dist") return true;
1277
+ if (part === ".git") return true;
1278
+ if (part === ".turbo") return true;
1279
+ }
1280
+ const base = parts[parts.length - 1] ?? "";
1281
+ if (!base) return true;
1282
+ if (base.startsWith(".") && base !== ".flueignore") return true;
1283
+ if (base.endsWith("~") || base.endsWith(".swp") || base.endsWith(".swx")) return true;
1284
+ if (base === ".DS_Store") return true;
1285
+ return false;
1286
+ };
1287
+ try {
1288
+ const w = fs.watch(workspaceDir, { recursive: true }, (_event, filename) => {
1289
+ if (!filename) return;
1290
+ const rel = filename.toString();
1291
+ if (isIgnoredPath(rel)) return;
1292
+ onChange(rel);
1293
+ });
1294
+ watchers.push(w);
1295
+ } catch (err) {
1296
+ console.error(`[flue] Failed to watch ${workspaceDir}: ${err instanceof Error ? err.message : String(err)}`);
1297
+ }
1298
+ if (target === "cloudflare") for (const cfgName of [
1299
+ "wrangler.jsonc",
1300
+ "wrangler.json",
1301
+ "wrangler.toml"
1302
+ ]) {
1303
+ const cfgPath = path.join(outputDir, cfgName);
1304
+ if (!fs.existsSync(cfgPath)) continue;
1305
+ try {
1306
+ const w = fs.watch(cfgPath, () => onChange(cfgName));
1307
+ watchers.push(w);
1308
+ } catch {}
1309
+ }
1310
+ return { close() {
1311
+ for (const w of watchers) try {
1312
+ w.close();
1313
+ } catch {}
1314
+ } };
1315
+ }
1316
+ var NodeReloader = class {
1317
+ child = null;
1318
+ serverPath;
1319
+ outputDir;
1320
+ port;
1321
+ url;
1322
+ constructor(opts) {
1323
+ this.outputDir = opts.outputDir;
1324
+ this.port = opts.port;
1325
+ this.serverPath = path.join(this.outputDir, "dist", "server.mjs");
1326
+ this.url = `http://localhost:${this.port}`;
1327
+ }
1328
+ async start() {
1329
+ await this.spawnAndWait();
1330
+ }
1331
+ shouldRebuildOn(_relPath) {
1332
+ return true;
1333
+ }
1334
+ async reload(_buildChanged) {
1335
+ await this.killChild();
1336
+ await this.spawnAndWait();
1337
+ }
1338
+ async stop() {
1339
+ await this.killChild();
1340
+ }
1341
+ killSync() {
1342
+ const child = this.child;
1343
+ if (!child || child.killed) return;
1344
+ try {
1345
+ child.kill("SIGKILL");
1346
+ } catch {}
1347
+ }
1348
+ async spawnAndWait() {
1349
+ const child = spawn("node", [this.serverPath], {
1350
+ stdio: [
1351
+ "ignore",
1352
+ "pipe",
1353
+ "pipe"
1354
+ ],
1355
+ cwd: this.outputDir,
1356
+ env: {
1357
+ ...process.env,
1358
+ PORT: String(this.port),
1359
+ FLUE_MODE: "local"
1360
+ }
1361
+ });
1362
+ this.child = child;
1363
+ const pipe = (data) => {
1364
+ const text = data.toString().trimEnd();
1365
+ for (const line of text.split("\n")) {
1366
+ if (!line.trim()) continue;
1367
+ if (line.includes("[flue] Server listening") || line.includes("[flue] Available agents:") || line.includes("[flue] Mode: local")) continue;
1368
+ console.error(line);
1369
+ }
1370
+ };
1371
+ child.stdout?.on("data", pipe);
1372
+ child.stderr?.on("data", pipe);
1373
+ child.on("exit", (code, signal) => {
1374
+ if (this.child === child) {
1375
+ this.child = null;
1376
+ if (code !== 0 && code !== null) console.error(`[flue] Node server exited unexpectedly (code=${code}, signal=${signal ?? "none"})`);
1377
+ }
1378
+ });
1379
+ if (!await waitForHealth(this.url, 15e3)) {
1380
+ await this.killChild();
1381
+ throw new Error("Node server did not become ready within 15s");
1382
+ }
1383
+ }
1384
+ async killChild() {
1385
+ const child = this.child;
1386
+ if (!child || child.killed) {
1387
+ this.child = null;
1388
+ return;
1389
+ }
1390
+ this.child = null;
1391
+ await new Promise((resolve) => {
1392
+ let resolved = false;
1393
+ const done = () => {
1394
+ if (!resolved) {
1395
+ resolved = true;
1396
+ resolve();
1397
+ }
1398
+ };
1399
+ child.once("exit", done);
1400
+ try {
1401
+ child.kill("SIGTERM");
1402
+ } catch {
1403
+ done();
1404
+ return;
1405
+ }
1406
+ setTimeout(() => {
1407
+ try {
1408
+ if (!child.killed) child.kill("SIGKILL");
1409
+ } catch {}
1410
+ done();
1411
+ }, 1e3);
1412
+ });
1413
+ }
1414
+ };
1415
+ /**
1416
+ * Lazy-import wrangler so users targeting only Node don't need it installed.
1417
+ * If the import fails, surface a friendly message pointing at the peer-dep.
1418
+ */
1419
+ async function createCloudflareReloader(opts) {
1420
+ let wrangler;
1421
+ try {
1422
+ wrangler = await import("wrangler");
1423
+ } catch (err) {
1424
+ throw new Error(`[flue] Cloudflare dev requires the "wrangler" package as a peer dependency.
1425
+ Install it in your project:
1426
+
1427
+ npm install --save-dev wrangler
1428
+
1429
+ Underlying error: ${err instanceof Error ? err.message : String(err)}`);
1430
+ }
1431
+ return new CloudflareReloader(wrangler, opts);
1432
+ }
1433
+ var CloudflareReloader = class {
1434
+ worker = null;
1435
+ wrangler;
1436
+ outputDir;
1437
+ port;
1438
+ configPath;
1439
+ url;
1440
+ constructor(wrangler, opts) {
1441
+ this.wrangler = wrangler;
1442
+ this.outputDir = opts.outputDir;
1443
+ this.port = opts.port;
1444
+ this.configPath = path.join(this.outputDir, "dist", "wrangler.jsonc");
1445
+ }
1446
+ async start() {
1447
+ await this.startWorker();
1448
+ }
1449
+ /**
1450
+ * On Cloudflare, wrangler watches the entry's transitive imports itself
1451
+ * and hot-reloads workerd when an agent file body changes. We only need
1452
+ * to act when something *structural* changes — i.e. something that
1453
+ * affects what `_entry.ts` or `wrangler.jsonc` look like.
1454
+ *
1455
+ * Concretely, we trigger a Flue-side rebuild for:
1456
+ * - File adds/removes in `agents/` (the agent set determines DO classes
1457
+ * and binding declarations).
1458
+ * - Changes to `agents/*.ts` — these MAY change the exported `triggers`,
1459
+ * so we have to re-parse them. (Plain body edits redo a tiny amount
1460
+ * of work but the rebuild is cheap and idempotent.)
1461
+ * - Changes to `roles/*.md` — roles are baked into the entry as JSON.
1462
+ * - Changes to the user's `wrangler.jsonc` — affects the merged config.
1463
+ *
1464
+ * Notes we explicitly DO ignore for rebuild purposes (wrangler handles
1465
+ * them): edits to imported source files outside of `agents/`/`roles/`,
1466
+ * AGENTS.md, and `.agents/skills/` (those are runtime-discovered, not
1467
+ * baked into the entry).
1468
+ */
1469
+ shouldRebuildOn(relPath) {
1470
+ const normalized = relPath.replace(/\\/g, "/");
1471
+ if (normalized === "wrangler.jsonc" || normalized === "wrangler.json" || normalized === "wrangler.toml") return true;
1472
+ if (normalized.startsWith("agents/")) return true;
1473
+ if (normalized.startsWith("roles/")) return true;
1474
+ return false;
1475
+ }
1476
+ async reload(buildChanged) {
1477
+ if (!buildChanged) {
1478
+ console.error(`[flue] No structural change — wrangler will hot-reload\n`);
1479
+ return;
1480
+ }
1481
+ await this.disposeWorker();
1482
+ await this.startWorker();
1483
+ }
1484
+ async stop() {
1485
+ await this.disposeWorker();
1486
+ }
1487
+ killSync() {
1488
+ this.worker = null;
1489
+ }
1490
+ async startWorker() {
1491
+ if (!fs.existsSync(this.configPath)) throw new Error(`[flue] Expected ${this.configPath} after build, but it doesn't exist. Did the Cloudflare build succeed?`);
1492
+ this.worker = await this.wrangler.unstable_startWorker({
1493
+ config: this.configPath,
1494
+ build: { nodejsCompatMode: "v2" },
1495
+ dev: {
1496
+ server: {
1497
+ hostname: "localhost",
1498
+ port: this.port
1499
+ },
1500
+ watch: false,
1501
+ logLevel: "info"
1502
+ }
1503
+ });
1504
+ try {
1505
+ this.url = (await this.worker.url).toString().replace(/\/$/, "");
1506
+ } catch {
1507
+ this.url = `http://127.0.0.1:${this.port}`;
1508
+ }
1509
+ }
1510
+ async disposeWorker() {
1511
+ const worker = this.worker;
1512
+ this.worker = null;
1513
+ if (!worker) return;
1514
+ try {
1515
+ await worker.dispose();
1516
+ } catch (err) {
1517
+ console.error(`[flue] Error disposing Cloudflare worker: ${err instanceof Error ? err.message : String(err)}`);
1518
+ }
1519
+ }
1520
+ };
1521
+ async function waitForHealth(baseUrl, timeoutMs) {
1522
+ const start = Date.now();
1523
+ while (Date.now() - start < timeoutMs) {
1524
+ try {
1525
+ const controller = new AbortController();
1526
+ const timeout = setTimeout(() => controller.abort(), 1e3);
1527
+ const res = await fetch(`${baseUrl}/health`, { signal: controller.signal });
1528
+ clearTimeout(timeout);
1529
+ if (res.ok) return true;
1530
+ } catch {}
1531
+ await new Promise((r) => setTimeout(r, 200));
1532
+ }
1533
+ return false;
1534
+ }
1535
+ /**
1536
+ * Pick a webhook agent name to print in the friendly curl example. Falls back
1537
+ * to any agent if none have webhook triggers (the example would 404 on the
1538
+ * dev server in that case, but it's still a hint at the URL shape). Reads the
1539
+ * manifest written by the build, with a directory-scan fallback in case the
1540
+ * manifest is somehow missing.
1541
+ *
1542
+ * Best-effort — silently returns null if anything goes wrong.
1543
+ */
1544
+ function pickExampleAgentName(outputDir, workspaceDir) {
1545
+ try {
1546
+ const manifestPath = path.join(outputDir, "dist", "manifest.json");
1547
+ if (fs.existsSync(manifestPath)) {
1548
+ const agents = JSON.parse(fs.readFileSync(manifestPath, "utf-8")).agents ?? [];
1549
+ const webhook = agents.find((a) => a.triggers?.webhook);
1550
+ if (webhook) return webhook.name;
1551
+ if (agents[0]) return agents[0].name;
1552
+ }
1553
+ } catch {}
1554
+ try {
1555
+ const agentsDir = path.join(workspaceDir, "agents");
1556
+ if (!fs.existsSync(agentsDir)) return null;
1557
+ for (const e of fs.readdirSync(agentsDir)) {
1558
+ const m = e.match(/^([a-zA-Z0-9_-]+)\.(ts|js|mts|mjs)$/);
1559
+ if (m && m[1]) return m[1];
1560
+ }
1561
+ return null;
1562
+ } catch {
1563
+ return null;
1564
+ }
1565
+ }
1566
+
1567
+ //#endregion
1568
+ export { BUILTIN_TOOL_NAMES, DEFAULT_DEV_PORT, build, createTools, dev, resolveWorkspaceFromCwd };