@toist/aja 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +69 -0
- package/migrations/001_initial.sql +111 -0
- package/package.json +27 -0
- package/src/cache-db.ts +17 -0
- package/src/cache.ts +67 -0
- package/src/config.ts +129 -0
- package/src/data-db.ts +21 -0
- package/src/db-handles.ts +70 -0
- package/src/hitl.ts +257 -0
- package/src/index.ts +34 -0
- package/src/instance.ts +64 -0
- package/src/kinds/control.ts +26 -0
- package/src/kinds/custom.ts +19 -0
- package/src/kinds/data.ts +30 -0
- package/src/kinds/db.ts +92 -0
- package/src/kinds/hitl.ts +56 -0
- package/src/kinds/http.ts +134 -0
- package/src/kinds/index.ts +66 -0
- package/src/kinds/runs.ts +130 -0
- package/src/kinds/transform.ts +123 -0
- package/src/kinds/types.ts +16 -0
- package/src/lock.ts +64 -0
- package/src/migrate.ts +204 -0
- package/src/pipeline.ts +601 -0
- package/src/resources.ts +350 -0
- package/src/runs.ts +53 -0
- package/src/runtime-db.ts +48 -0
- package/src/server.ts +537 -0
- package/src/startRunner.ts +87 -0
package/src/server.ts
ADDED
|
@@ -0,0 +1,537 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
import { Hono } from "hono"
|
|
3
|
+
import { cors } from "hono/cors"
|
|
4
|
+
import { existsSync, readFileSync, writeFileSync } from "node:fs"
|
|
5
|
+
import { join } from "node:path"
|
|
6
|
+
import { runtimeDb, dataDb, cacheDb, initDbs } from "./db-handles.ts"
|
|
7
|
+
import { makeCache } from "./cache.ts"
|
|
8
|
+
import { loadAll, watchAll, getPipelines, getPipeline, runPipeline, validateSpec, publicSpec, triggerSubRun, makeLogger, type RunOutcome } from "./pipeline.ts"
|
|
9
|
+
import { parseYaml, type PlatformCtx } from "@toist/spec"
|
|
10
|
+
import { manifest, getKind, type ExecContext } from "./kinds/index.ts"
|
|
11
|
+
import { answerTask, getTask, loadNodeOutputs, persistNodeOutputs } from "./hitl.ts"
|
|
12
|
+
import { loadInstance } from "./instance.ts"
|
|
13
|
+
import {
|
|
14
|
+
buildResourceCtx, listResourceTypes, listResources, getResource,
|
|
15
|
+
upsertResource, patchResource, deleteResource,
|
|
16
|
+
} from "./resources.ts"
|
|
17
|
+
import { makeRunStore } from "./runs.ts"
|
|
18
|
+
import { pipelinesDir, isUiDisabled, isWatchDisabled, getCorsOrigins } from "./config.ts"
|
|
19
|
+
|
|
20
|
+
const VALID_PIPELINE_ID = /^[a-z0-9][a-z0-9-]*$/
|
|
21
|
+
|
|
22
|
+
// Legacy entry path: when bun runs this module directly (pm2 dev mode), do
|
|
23
|
+
// the lifecycle init here. When startRunner imports this module, init has
|
|
24
|
+
// already happened — initDbs() is idempotent so the guard below is belt-and-
|
|
25
|
+
// suspenders. The conditional avoids unnecessary work when import.meta.main
|
|
26
|
+
// is false (the startRunner-imported case).
|
|
27
|
+
if (import.meta.main) {
|
|
28
|
+
await initDbs()
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// apiApp holds every HTTP route. The outer `app` (declared at the bottom)
|
|
32
|
+
// mounts apiApp at /api and owns CORS + future static UI serving. Per
|
|
33
|
+
// instance-spec.md §9 + §12 Roadmap: routes live under /api/* so the UI
|
|
34
|
+
// can mount cleanly at / when packaging lands.
|
|
35
|
+
const apiApp = new Hono()
|
|
36
|
+
|
|
37
|
+
const cache = makeCache(cacheDb())
|
|
38
|
+
|
|
39
|
+
const PIPELINES_DIR = pipelinesDir()
|
|
40
|
+
loadAll(PIPELINES_DIR)
|
|
41
|
+
if (!isWatchDisabled()) {
|
|
42
|
+
watchAll(PIPELINES_DIR)
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// ─── instance metadata ───────────────────────────────────────────────────────
|
|
46
|
+
apiApp.get("/", (c) => {
|
|
47
|
+
const inst = loadInstance()
|
|
48
|
+
return c.json({
|
|
49
|
+
name: inst.instanceName ?? process.env.PLATFORM_INSTANCE_NAME ?? "platform",
|
|
50
|
+
version: "0.1.0",
|
|
51
|
+
platformVersion: inst.platformVersion,
|
|
52
|
+
tier: inst.tier,
|
|
53
|
+
pipelines: getPipelines().length,
|
|
54
|
+
kinds: manifest().length,
|
|
55
|
+
})
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
// Per-instance metadata: tier, teasers, branding. Distinct from runtime
|
|
59
|
+
// state in /runs and pipeline state in /pipelines.
|
|
60
|
+
apiApp.get("/instance", (c) => c.json(loadInstance()))
|
|
61
|
+
|
|
62
|
+
// ─── kinds (capability) ──────────────────────────────────────────────────────
|
|
63
|
+
apiApp.get("/manifest", (c) => c.json({ kinds: manifest() }))
|
|
64
|
+
|
|
65
|
+
apiApp.post("/kinds/:id/invoke", async (c) => {
|
|
66
|
+
const kind = getKind(c.req.param("id"))
|
|
67
|
+
if (!kind) return c.json({ error: "kind not found" }, 404)
|
|
68
|
+
|
|
69
|
+
const body = await c.req.json().catch(() => ({}))
|
|
70
|
+
const { params = {}, input = {}, confirm = false } = body as {
|
|
71
|
+
params?: Record<string, unknown>
|
|
72
|
+
input?: Record<string, unknown>
|
|
73
|
+
confirm?: boolean
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
if (kind.sideEffect && !confirm) {
|
|
77
|
+
return c.json({
|
|
78
|
+
error: "side_effect_requires_confirm",
|
|
79
|
+
message: `Kind "${kind.id}" mutates external state. Pass { confirm: true } to invoke ad-hoc.`,
|
|
80
|
+
}, 400)
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const invokeCtx: ExecContext = {
|
|
84
|
+
runId: -1, // synthetic — invoke is not a tracked run
|
|
85
|
+
db: dataDb(),
|
|
86
|
+
cache,
|
|
87
|
+
log: (level, msg) => console.log(`[invoke ${kind.id}] ${level}: ${msg}`),
|
|
88
|
+
resource: buildResourceCtx(runtimeDb()),
|
|
89
|
+
runs: makeRunStore(runtimeDb()),
|
|
90
|
+
subRun: async () => {
|
|
91
|
+
throw new Error("ctx.subRun is not available in ad-hoc kinds.invoke — call from inside a pipeline run instead.")
|
|
92
|
+
},
|
|
93
|
+
step: { nodeId: "<invoke>" },
|
|
94
|
+
suspend: async () => {
|
|
95
|
+
throw new Error("ctx.suspend is not available in ad-hoc kinds.invoke — call from inside a pipeline run instead.")
|
|
96
|
+
},
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
const t0 = performance.now()
|
|
100
|
+
try {
|
|
101
|
+
const output = await kind.run(invokeCtx, params, input)
|
|
102
|
+
return c.json({
|
|
103
|
+
kind: kind.id,
|
|
104
|
+
output,
|
|
105
|
+
duration_ms: Math.round(performance.now() - t0),
|
|
106
|
+
})
|
|
107
|
+
} catch (err: unknown) {
|
|
108
|
+
return c.json({
|
|
109
|
+
kind: kind.id,
|
|
110
|
+
error: err instanceof Error ? err.message : String(err),
|
|
111
|
+
duration_ms: Math.round(performance.now() - t0),
|
|
112
|
+
}, 500)
|
|
113
|
+
}
|
|
114
|
+
})
|
|
115
|
+
|
|
116
|
+
// ─── resources ───────────────────────────────────────────────────────────────
|
|
117
|
+
|
|
118
|
+
apiApp.get("/resource-types", (c) => c.json(listResourceTypes()))
|
|
119
|
+
|
|
120
|
+
apiApp.get("/resources", (c) => c.json(listResources(runtimeDb())))
|
|
121
|
+
|
|
122
|
+
apiApp.get("/resources/:name", (c) => {
|
|
123
|
+
const r = getResource(runtimeDb(), c.req.param("name"))
|
|
124
|
+
if (!r) return c.json({ error: "not found" }, 404)
|
|
125
|
+
return c.json(r)
|
|
126
|
+
})
|
|
127
|
+
|
|
128
|
+
apiApp.post("/resources", async (c) => {
|
|
129
|
+
const body = await c.req.json().catch(() => ({})) as {
|
|
130
|
+
name?: string; type?: string; fields?: Record<string, unknown>
|
|
131
|
+
}
|
|
132
|
+
if (!body.name || typeof body.name !== "string") return c.json({ error: "name required" }, 400)
|
|
133
|
+
if (!body.type || typeof body.type !== "string") return c.json({ error: "type required" }, 400)
|
|
134
|
+
const fields = body.fields ?? {}
|
|
135
|
+
const r = upsertResource(runtimeDb(), body.name, body.type, fields)
|
|
136
|
+
return c.json(r, 201)
|
|
137
|
+
})
|
|
138
|
+
|
|
139
|
+
apiApp.put("/resources/:name", async (c) => {
|
|
140
|
+
const name = c.req.param("name")
|
|
141
|
+
const body = await c.req.json().catch(() => ({})) as {
|
|
142
|
+
type?: string; fields?: Record<string, unknown>
|
|
143
|
+
}
|
|
144
|
+
if (body.type !== undefined && body.fields !== undefined) {
|
|
145
|
+
// Full replacement
|
|
146
|
+
const r = upsertResource(runtimeDb(), name, body.type, body.fields)
|
|
147
|
+
return c.json(r)
|
|
148
|
+
}
|
|
149
|
+
if (body.fields !== undefined) {
|
|
150
|
+
// Patch fields only
|
|
151
|
+
const r = patchResource(runtimeDb(), name, body.fields)
|
|
152
|
+
if (!r) return c.json({ error: "not found" }, 404)
|
|
153
|
+
return c.json(r)
|
|
154
|
+
}
|
|
155
|
+
return c.json({ error: "provide type+fields for replacement or fields for patch" }, 400)
|
|
156
|
+
})
|
|
157
|
+
|
|
158
|
+
apiApp.delete("/resources/:name", (c) => {
|
|
159
|
+
const ok = deleteResource(runtimeDb(), c.req.param("name"))
|
|
160
|
+
if (!ok) return c.json({ error: "not found" }, 404)
|
|
161
|
+
return new Response(null, { status: 204 })
|
|
162
|
+
})
|
|
163
|
+
|
|
164
|
+
// ─── pipelines ───────────────────────────────────────────────────────────────
|
|
165
|
+
apiApp.get("/pipelines", (c) => {
|
|
166
|
+
const list = getPipelines().map((p) => ({
|
|
167
|
+
id: p.id,
|
|
168
|
+
label: p.label ?? p.id,
|
|
169
|
+
description: p.description,
|
|
170
|
+
nodeCount: p.nodes.length,
|
|
171
|
+
}))
|
|
172
|
+
return c.json(list)
|
|
173
|
+
})
|
|
174
|
+
|
|
175
|
+
apiApp.get("/pipelines/:id", (c) => {
|
|
176
|
+
const spec = getPipeline(c.req.param("id"))
|
|
177
|
+
if (!spec) return c.json({ error: "not found" }, 404)
|
|
178
|
+
return c.json(publicSpec(spec))
|
|
179
|
+
})
|
|
180
|
+
|
|
181
|
+
// Raw YAML source for editing. Returns the file as written to disk (preserves
|
|
182
|
+
// comments and formatting), not the parsed spec.
|
|
183
|
+
apiApp.get("/pipelines/:id/source", (c) => {
|
|
184
|
+
const id = c.req.param("id")
|
|
185
|
+
if (!VALID_PIPELINE_ID.test(id)) return c.json({ error: "invalid id" }, 400)
|
|
186
|
+
const path = join(PIPELINES_DIR, `${id}.yaml`)
|
|
187
|
+
if (!existsSync(path)) return c.json({ error: "not found" }, 404)
|
|
188
|
+
return c.json({ id, yaml: readFileSync(path, "utf8") })
|
|
189
|
+
})
|
|
190
|
+
|
|
191
|
+
// Create a new pipeline file. The id comes from the YAML's `id:` field — no
|
|
192
|
+
// separate body.id, since two sources of truth caused silent overwrites.
|
|
193
|
+
// Refuses to overwrite an existing id.
|
|
194
|
+
apiApp.post("/pipelines", async (c) => {
|
|
195
|
+
const body = await c.req.json().catch(() => ({})) as { yaml?: string }
|
|
196
|
+
if (!body.yaml) return c.json({ error: "yaml required" }, 400)
|
|
197
|
+
|
|
198
|
+
let parsed: unknown
|
|
199
|
+
try { parsed = parseYaml(body.yaml) }
|
|
200
|
+
catch (err) { return c.json({ ok: false, errors: [`YAML parse: ${(err as Error).message}`] }, 400) }
|
|
201
|
+
|
|
202
|
+
const id = (parsed as { id?: string })?.id
|
|
203
|
+
if (!id) return c.json({ error: "yaml must contain a top-level `id:` field" }, 400)
|
|
204
|
+
if (!VALID_PIPELINE_ID.test(id)) return c.json({ error: `invalid id "${id}" — lowercase alphanum + hyphens, must start with alphanum` }, 400)
|
|
205
|
+
|
|
206
|
+
const path = join(PIPELINES_DIR, `${id}.yaml`)
|
|
207
|
+
if (existsSync(path)) return c.json({ error: `pipeline "${id}" already exists` }, 409)
|
|
208
|
+
|
|
209
|
+
const result = validateSpec(parsed)
|
|
210
|
+
if (!result.ok) return c.json({ ok: false, errors: result.errors }, 400)
|
|
211
|
+
|
|
212
|
+
writeFileSync(path, body.yaml)
|
|
213
|
+
// Sync reload — don't rely on the fs watcher, since the client may navigate
|
|
214
|
+
// to the new pipeline immediately and would otherwise see a 404.
|
|
215
|
+
loadAll(PIPELINES_DIR)
|
|
216
|
+
return c.json({ id, ok: true }, 201)
|
|
217
|
+
})
|
|
218
|
+
|
|
219
|
+
// Update an existing pipeline file. Refuses to change the id (renames are a
|
|
220
|
+
// separate operation we don't yet support).
|
|
221
|
+
apiApp.put("/pipelines/:id", async (c) => {
|
|
222
|
+
const id = c.req.param("id")
|
|
223
|
+
if (!VALID_PIPELINE_ID.test(id)) return c.json({ error: "invalid id" }, 400)
|
|
224
|
+
|
|
225
|
+
const path = join(PIPELINES_DIR, `${id}.yaml`)
|
|
226
|
+
if (!existsSync(path)) return c.json({ error: "not found" }, 404)
|
|
227
|
+
|
|
228
|
+
const body = await c.req.json().catch(() => ({})) as { yaml?: string }
|
|
229
|
+
if (!body.yaml) return c.json({ error: "yaml required" }, 400)
|
|
230
|
+
|
|
231
|
+
let parsed: unknown
|
|
232
|
+
try { parsed = parseYaml(body.yaml) }
|
|
233
|
+
catch (err) { return c.json({ ok: false, errors: [`YAML parse: ${(err as Error).message}`] }, 400) }
|
|
234
|
+
|
|
235
|
+
const yamlId = (parsed as { id?: string })?.id
|
|
236
|
+
if (yamlId && yamlId !== id) {
|
|
237
|
+
return c.json({ error: `id mismatch: url says "${id}", yaml says "${yamlId}". Renames are not supported via edit.` }, 400)
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
const result = validateSpec(parsed)
|
|
241
|
+
if (!result.ok) return c.json({ ok: false, errors: result.errors }, 400)
|
|
242
|
+
|
|
243
|
+
writeFileSync(path, body.yaml)
|
|
244
|
+
loadAll(PIPELINES_DIR)
|
|
245
|
+
return c.json({ id, ok: true })
|
|
246
|
+
})
|
|
247
|
+
|
|
248
|
+
apiApp.post("/pipelines/validate", async (c) => {
|
|
249
|
+
const body = await c.req.json().catch(() => ({})) as { yaml?: string; spec?: unknown }
|
|
250
|
+
let spec: unknown
|
|
251
|
+
if (typeof body.yaml === "string") {
|
|
252
|
+
try { spec = parseYaml(body.yaml) }
|
|
253
|
+
catch (err) {
|
|
254
|
+
return c.json({ ok: false, errors: [`YAML parse error: ${(err as Error).message}`] }, 400)
|
|
255
|
+
}
|
|
256
|
+
} else if (body.spec) {
|
|
257
|
+
spec = body.spec
|
|
258
|
+
} else {
|
|
259
|
+
return c.json({ ok: false, errors: ["Provide either `yaml` (string) or `spec` (object)"] }, 400)
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
const result = validateSpec(spec)
|
|
263
|
+
return c.json({
|
|
264
|
+
ok: result.ok,
|
|
265
|
+
errors: result.errors,
|
|
266
|
+
warnings: result.warnings ?? [],
|
|
267
|
+
edges: result.edges ?? [],
|
|
268
|
+
}, result.ok ? 200 : 400)
|
|
269
|
+
})
|
|
270
|
+
|
|
271
|
+
/** Build the per-run baseCtx for runPipeline. Self-references via subRun
|
|
272
|
+
* so sub-runs threaded from this top-level run carry the right context;
|
|
273
|
+
* triggerSubRun rebinds the same closure to the sub-run's own runId. */
|
|
274
|
+
function buildBaseCtx(runId: number): PlatformCtx {
|
|
275
|
+
const baseCtx: PlatformCtx = {
|
|
276
|
+
runId,
|
|
277
|
+
db: dataDb(),
|
|
278
|
+
cache,
|
|
279
|
+
log: makeLogger(runId),
|
|
280
|
+
resource: buildResourceCtx(runtimeDb()),
|
|
281
|
+
runs: makeRunStore(runtimeDb()),
|
|
282
|
+
subRun: undefined as unknown as PlatformCtx["subRun"],
|
|
283
|
+
}
|
|
284
|
+
baseCtx.subRun = (pid, payload) => triggerSubRun(pid, payload, baseCtx)
|
|
285
|
+
return baseCtx
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
function persistOutcome(runId: number, outcome: RunOutcome): void {
|
|
289
|
+
if (outcome.status === "done") {
|
|
290
|
+
runtimeDb().prepare(
|
|
291
|
+
`UPDATE runs SET status='done', result=?, steps=?, finished_at=datetime('now'),
|
|
292
|
+
updated_at=datetime('now'), current_node=NULL WHERE id=?`,
|
|
293
|
+
).run(JSON.stringify(outcome.output), JSON.stringify(outcome.steps), runId)
|
|
294
|
+
} else {
|
|
295
|
+
persistNodeOutputs(runtimeDb(), runId, outcome.results)
|
|
296
|
+
runtimeDb().prepare(
|
|
297
|
+
`UPDATE runs SET status='suspended', steps=?, current_node=?, updated_at=datetime('now') WHERE id=?`,
|
|
298
|
+
).run(JSON.stringify(outcome.steps), outcome.suspendedAt.nodeId, runId)
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
function outcomeResponse(runId: number, pipelineId: string, outcome: RunOutcome) {
|
|
303
|
+
if (outcome.status === "done") {
|
|
304
|
+
return { id: runId, pipeline: pipelineId, status: "done", result: outcome.output, steps: outcome.steps }
|
|
305
|
+
}
|
|
306
|
+
const task = getTask(runtimeDb(), outcome.suspendedAt.taskId)!
|
|
307
|
+
return {
|
|
308
|
+
id: runId, pipeline: pipelineId, status: "suspended",
|
|
309
|
+
suspendedAt: outcome.suspendedAt.nodeId,
|
|
310
|
+
task: {
|
|
311
|
+
id: task.id,
|
|
312
|
+
kind: task.kind,
|
|
313
|
+
prompt: task.prompt,
|
|
314
|
+
schema: task.schema,
|
|
315
|
+
assignee: task.assignee,
|
|
316
|
+
responseToken: task.responseToken,
|
|
317
|
+
},
|
|
318
|
+
steps: outcome.steps,
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
apiApp.post("/pipelines/:id/run", async (c) => {
|
|
323
|
+
const id = c.req.param("id")
|
|
324
|
+
const spec = getPipeline(id)
|
|
325
|
+
if (!spec) return c.json({ error: "not found" }, 404)
|
|
326
|
+
|
|
327
|
+
const payload = await c.req.json().catch(() => ({}))
|
|
328
|
+
const run = runtimeDb().prepare(
|
|
329
|
+
"INSERT INTO runs (pipeline, status, payload, trigger, updated_at) VALUES (?, 'running', ?, 'api', datetime('now')) RETURNING id",
|
|
330
|
+
).get(id, JSON.stringify(payload)) as { id: number }
|
|
331
|
+
|
|
332
|
+
try {
|
|
333
|
+
const outcome = await runPipeline(spec, payload as Record<string, unknown>, buildBaseCtx(run.id))
|
|
334
|
+
persistOutcome(run.id, outcome)
|
|
335
|
+
return c.json(outcomeResponse(run.id, id, outcome))
|
|
336
|
+
} catch (err: unknown) {
|
|
337
|
+
const message = err instanceof Error ? err.message : String(err)
|
|
338
|
+
runtimeDb().prepare(
|
|
339
|
+
"UPDATE runs SET status='error', error=?, finished_at=datetime('now'), updated_at=datetime('now') WHERE id=?",
|
|
340
|
+
).run(message, run.id)
|
|
341
|
+
return c.json({ id: run.id, pipeline: id, status: "error", error: message }, 500)
|
|
342
|
+
}
|
|
343
|
+
})
|
|
344
|
+
|
|
345
|
+
// ─── HITL — tasks queue + resume ─────────────────────────────────────────────
|
|
346
|
+
|
|
347
|
+
apiApp.get("/tasks", (c) => {
|
|
348
|
+
const status = c.req.query("status") ?? "open"
|
|
349
|
+
const assignee = c.req.query("assignee")
|
|
350
|
+
const runId = c.req.query("run_id")
|
|
351
|
+
const limit = Number(c.req.query("limit") ?? 200)
|
|
352
|
+
|
|
353
|
+
const where: string[] = []
|
|
354
|
+
const args: unknown[] = []
|
|
355
|
+
if (status !== "all") { where.push("t.status = ?"); args.push(status) }
|
|
356
|
+
if (assignee) { where.push("t.assignee = ?"); args.push(assignee) }
|
|
357
|
+
if (runId) { where.push("t.run_id = ?"); args.push(Number(runId)) }
|
|
358
|
+
|
|
359
|
+
const whereSql = where.length ? `WHERE ${where.join(" AND ")}` : ""
|
|
360
|
+
args.push(limit)
|
|
361
|
+
const sql =
|
|
362
|
+
`SELECT t.id, t.run_id, t.node_id, t.kind, t.prompt, t.assignee, t.status,
|
|
363
|
+
t.created_at, t.responded_at, r.pipeline AS pipeline
|
|
364
|
+
FROM tasks t LEFT JOIN runs r ON t.run_id = r.id
|
|
365
|
+
${whereSql}
|
|
366
|
+
ORDER BY t.id DESC LIMIT ?`
|
|
367
|
+
return c.json(runtimeDb().prepare(sql).all(...args))
|
|
368
|
+
})
|
|
369
|
+
|
|
370
|
+
apiApp.get("/tasks/:id", (c) => {
|
|
371
|
+
const task = getTask(runtimeDb(), Number(c.req.param("id")))
|
|
372
|
+
if (!task) return c.json({ error: "not found" }, 404)
|
|
373
|
+
return c.json(task)
|
|
374
|
+
})
|
|
375
|
+
|
|
376
|
+
apiApp.post("/runs/:runId/tasks/:taskId/respond", async (c) => {
|
|
377
|
+
const runId = Number(c.req.param("runId"))
|
|
378
|
+
const taskId = Number(c.req.param("taskId"))
|
|
379
|
+
const body = await c.req.json().catch(() => ({})) as {
|
|
380
|
+
token?: string
|
|
381
|
+
response?: unknown
|
|
382
|
+
respondedBy?: string
|
|
383
|
+
}
|
|
384
|
+
if (!body.token) return c.json({ error: "missing token" }, 400)
|
|
385
|
+
|
|
386
|
+
// Read the task BEFORE answering so we know its kind (drives resume semantics).
|
|
387
|
+
const taskBefore = getTask(runtimeDb(), taskId)
|
|
388
|
+
if (!taskBefore) return c.json({ error: "task not found" }, 404)
|
|
389
|
+
|
|
390
|
+
try {
|
|
391
|
+
answerTask(runtimeDb(), taskId, body.token, body.response, body.respondedBy ?? null)
|
|
392
|
+
} catch (err) {
|
|
393
|
+
return c.json({ error: err instanceof Error ? err.message : String(err) }, 400)
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
const runRow = runtimeDb().prepare("SELECT id, pipeline, payload FROM runs WHERE id = ?").get(runId) as
|
|
397
|
+
{ id: number; pipeline: string; payload: string | null } | undefined
|
|
398
|
+
if (!runRow) return c.json({ error: "run not found" }, 404)
|
|
399
|
+
|
|
400
|
+
const spec = getPipeline(runRow.pipeline)
|
|
401
|
+
if (!spec) return c.json({ error: `pipeline "${runRow.pipeline}" no longer loaded` }, 500)
|
|
402
|
+
|
|
403
|
+
const payload = runRow.payload ? JSON.parse(runRow.payload) : {}
|
|
404
|
+
|
|
405
|
+
// error_review tasks have a different resume contract: the response carries
|
|
406
|
+
// an `action` (retry | skip | abort), not free-form user input. Translate
|
|
407
|
+
// before re-entering the dispatcher.
|
|
408
|
+
if (taskBefore.kind === "error_review") {
|
|
409
|
+
const action = (body.response as { action?: string } | null)?.action
|
|
410
|
+
const value = (body.response as { value?: unknown } | null)?.value
|
|
411
|
+
|
|
412
|
+
if (action === "abort") {
|
|
413
|
+
runtimeDb().prepare(
|
|
414
|
+
"UPDATE runs SET status='error', error=?, finished_at=datetime('now'), updated_at=datetime('now') WHERE id=?",
|
|
415
|
+
).run(`aborted via error_review task ${taskId}`, runId)
|
|
416
|
+
return c.json({ id: runId, pipeline: runRow.pipeline, status: "error", error: "aborted by reviewer" })
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
if (action === "skip") {
|
|
420
|
+
// Insert the supplied value as the failed node's output. The dispatcher
|
|
421
|
+
// resume path will treat the node as already-done.
|
|
422
|
+
persistNodeOutputs(runtimeDb(), runId, { [taskBefore.nodeId]: value ?? null })
|
|
423
|
+
} else if (action !== "retry") {
|
|
424
|
+
return c.json({
|
|
425
|
+
error: `error_review response must include action: "retry" | "skip" | "abort" (got ${JSON.stringify(action)})`,
|
|
426
|
+
}, 400)
|
|
427
|
+
}
|
|
428
|
+
// retry falls through — the failed node has no node_outputs entry, so
|
|
429
|
+
// re-entering the dispatcher will execute it again.
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
const resumeOutputs = loadNodeOutputs(runtimeDb(), runId)
|
|
433
|
+
|
|
434
|
+
runtimeDb().prepare(
|
|
435
|
+
"UPDATE runs SET status='running', trigger='resumed', current_node=NULL, updated_at=datetime('now') WHERE id=?",
|
|
436
|
+
).run(runId)
|
|
437
|
+
|
|
438
|
+
try {
|
|
439
|
+
const outcome = await runPipeline(spec, payload, buildBaseCtx(runId), { resumeOutputs })
|
|
440
|
+
persistOutcome(runId, outcome)
|
|
441
|
+
return c.json(outcomeResponse(runId, runRow.pipeline, outcome))
|
|
442
|
+
} catch (err: unknown) {
|
|
443
|
+
const message = err instanceof Error ? err.message : String(err)
|
|
444
|
+
runtimeDb().prepare(
|
|
445
|
+
"UPDATE runs SET status='error', error=?, finished_at=datetime('now'), updated_at=datetime('now') WHERE id=?",
|
|
446
|
+
).run(message, runId)
|
|
447
|
+
return c.json({ id: runId, pipeline: runRow.pipeline, status: "error", error: message }, 500)
|
|
448
|
+
}
|
|
449
|
+
})
|
|
450
|
+
|
|
451
|
+
// ─── runs (runtime ledger) ───────────────────────────────────────────────────
|
|
452
|
+
apiApp.get("/runs", (c) => {
|
|
453
|
+
const limit = Number(c.req.query("limit") ?? 50)
|
|
454
|
+
const pipelineFilter = c.req.query("pipeline")
|
|
455
|
+
const rows = pipelineFilter
|
|
456
|
+
? runtimeDb().prepare("SELECT * FROM runs WHERE pipeline = ? ORDER BY id DESC LIMIT ?").all(pipelineFilter, limit)
|
|
457
|
+
: runtimeDb().prepare("SELECT * FROM runs ORDER BY id DESC LIMIT ?").all(limit)
|
|
458
|
+
return c.json(rows)
|
|
459
|
+
})
|
|
460
|
+
|
|
461
|
+
apiApp.get("/runs/:id", (c) => {
|
|
462
|
+
const row = runtimeDb().prepare("SELECT * FROM runs WHERE id=?").get(Number(c.req.param("id")))
|
|
463
|
+
if (!row) return c.json({ error: "not found" }, 404)
|
|
464
|
+
return c.json(row)
|
|
465
|
+
})
|
|
466
|
+
|
|
467
|
+
apiApp.get("/runs/:id/logs", (c) => {
|
|
468
|
+
const runId = Number(c.req.param("id"))
|
|
469
|
+
const rows = runtimeDb().prepare(
|
|
470
|
+
"SELECT id, ts, level, msg FROM logs WHERE run_id = ? ORDER BY id ASC",
|
|
471
|
+
).all(runId)
|
|
472
|
+
return c.json(rows)
|
|
473
|
+
})
|
|
474
|
+
|
|
475
|
+
// ─── lifecycle ───────────────────────────────────────────────────────────────
|
|
476
|
+
setInterval(() => cache.prune(), 5 * 60 * 1000)
|
|
477
|
+
|
|
478
|
+
const PORT = Number(process.env.PORT ?? 2132)
|
|
479
|
+
|
|
480
|
+
// Bun's auto-launch path: only emit the port-log when index.ts is the entry
|
|
481
|
+
// (i.e. legacy dev mode `bun src/index.ts`). When startRunner.ts imports this
|
|
482
|
+
// module to extract the Hono app, it manages its own Bun.serve() and logs
|
|
483
|
+
// against options.port, so this log would be wrong.
|
|
484
|
+
if (import.meta.main) {
|
|
485
|
+
console.log(`[runner] http://localhost:${PORT}`)
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
// Outer app: owns CORS, the /api mount, and the bundled UI static serving
|
|
489
|
+
// (per instance-spec.md §7 lifecycle step 9). Hosts can opt out via
|
|
490
|
+
// startRunner({ disableUi: true }) — useful when fronting with a separate UI
|
|
491
|
+
// or running headless.
|
|
492
|
+
const app = new Hono()
|
|
493
|
+
const corsOrigins = getCorsOrigins()
|
|
494
|
+
app.use("*", corsOrigins !== null ? cors({ origin: corsOrigins }) : cors())
|
|
495
|
+
app.route("/api", apiApp)
|
|
496
|
+
|
|
497
|
+
if (!isUiDisabled()) {
|
|
498
|
+
const { distDir } = await import("@toist/ui")
|
|
499
|
+
const indexPath = join(distDir, "index.html")
|
|
500
|
+
if (existsSync(indexPath)) {
|
|
501
|
+
// SPA serving: any GET maps to a file under dist/; non-asset paths fall
|
|
502
|
+
// back to index.html so the client-side router takes over.
|
|
503
|
+
//
|
|
504
|
+
// Content-Type is set explicitly from BunFile.type. `new Response(file)`
|
|
505
|
+
// alone does not always carry the MIME header through to the wire — at
|
|
506
|
+
// least in some host environments (observed: published @toist/ui in
|
|
507
|
+
// node_modules served from a host's runner) the header arrives empty,
|
|
508
|
+
// which trips the browser's strict MIME check on JS module scripts.
|
|
509
|
+
// Setting headers explicitly is correct and safe regardless of host.
|
|
510
|
+
const respond = (file: ReturnType<typeof Bun.file>) =>
|
|
511
|
+
new Response(file, {
|
|
512
|
+
headers: { "Content-Type": file.type || "application/octet-stream" },
|
|
513
|
+
})
|
|
514
|
+
app.get("/*", async (c) => {
|
|
515
|
+
const url = new URL(c.req.url).pathname
|
|
516
|
+
const tryPath = url === "/" ? "/index.html" : url
|
|
517
|
+
const file = Bun.file(join(distDir, tryPath))
|
|
518
|
+
if (await file.exists()) return respond(file)
|
|
519
|
+
// SPA fallback for unknown non-asset routes (no extension)
|
|
520
|
+
if (!tryPath.includes(".")) {
|
|
521
|
+
return respond(Bun.file(indexPath))
|
|
522
|
+
}
|
|
523
|
+
return c.notFound()
|
|
524
|
+
})
|
|
525
|
+
} else {
|
|
526
|
+
console.warn(
|
|
527
|
+
`[runner] UI dist not found at ${distDir}; skipping static mount. ` +
|
|
528
|
+
`Run \`bun run build\` in @toist/ui to populate it.`,
|
|
529
|
+
)
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
// Exported `fetch` lets startRunner.ts (and any future caller) bind the Hono
|
|
534
|
+
// app to its own Bun.serve options without going through `default`.
|
|
535
|
+
export const fetch = app.fetch
|
|
536
|
+
|
|
537
|
+
export default { port: PORT, fetch: app.fetch }
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
// Public API entry per context/instance-spec.md §4. Hosts that consume
|
|
3
|
+
// `@toist/aja` (post-extraction) call this with options to start a runner
|
|
4
|
+
// instance. The legacy CLI path — `bun src/index.ts` invoked directly by pm2
|
|
5
|
+
// in this repo's dev mode — still works unchanged: index.ts triggers initDbs
|
|
6
|
+
// on its own when import.meta.main is true.
|
|
7
|
+
//
|
|
8
|
+
// Lifecycle (matches instance-spec.md §7, scoped to what this iteration
|
|
9
|
+
// covers):
|
|
10
|
+
//
|
|
11
|
+
// 1. setRootDir(options.rootDir) — config knows the host's root
|
|
12
|
+
// 2. initDbs() — acquire lock, migrate, open dbs
|
|
13
|
+
// 3. dynamic import("./index.ts") — builds the Hono app (its own
|
|
14
|
+
// initDbs guard is a no-op since
|
|
15
|
+
// import.meta.main is false here)
|
|
16
|
+
// 4. Bun.serve({ port, fetch }) — bind app to host's port
|
|
17
|
+
// 5. return { port, stop }
|
|
18
|
+
//
|
|
19
|
+
// On stop():
|
|
20
|
+
// - server.stop() — close the HTTP server
|
|
21
|
+
// - closeDbs() — close all three handles, release the runner lock
|
|
22
|
+
//
|
|
23
|
+
// What this iteration does NOT yet do (Roadmap, Phase F W2 follow-ups):
|
|
24
|
+
// - Per-path overrides (pipelineDir, resourceDir, dataDir) as options —
|
|
25
|
+
// env vars cover them today
|
|
26
|
+
// - disableUi, disableMcp, disableWatch options — no UI/MCP yet served
|
|
27
|
+
// from this process; watch is always on
|
|
28
|
+
// - Custom logger injection — current code goes through console.log
|
|
29
|
+
|
|
30
|
+
import type { Server } from "bun"
|
|
31
|
+
import { setRootDir, setDisableUi, setDisableWatch, setCorsOrigins } from "./config.ts"
|
|
32
|
+
import { initDbs, closeDbs } from "./db-handles.ts"
|
|
33
|
+
|
|
34
|
+
export interface StartRunnerOptions {
|
|
35
|
+
/** TCP port for the HTTP server. Required — no default. */
|
|
36
|
+
port: number
|
|
37
|
+
/** Root directory for all instance-relative paths. Required.
|
|
38
|
+
* Resolves: pipelines/, resources/, data/, instance.json. */
|
|
39
|
+
rootDir: string
|
|
40
|
+
/** Disable the bundled UI static mount. Default: false (UI is served
|
|
41
|
+
* from @toist/ui/dist when the build artefacts exist). Set true
|
|
42
|
+
* for headless deployments or when fronting with a separate UI server. */
|
|
43
|
+
disableUi?: boolean
|
|
44
|
+
/** Disable the filesystem watcher on pipelineDir/resourceDir. Default:
|
|
45
|
+
* false (watcher is on; pipelines hot-reload on file changes).
|
|
46
|
+
* Production deployments may set true to avoid inotify/fsevent overhead
|
|
47
|
+
* and prevent accidental reloads from concurrent edits. */
|
|
48
|
+
disableWatch?: boolean
|
|
49
|
+
/** CORS allow-origin configuration. `string` for a single origin,
|
|
50
|
+
* `string[]` for an allowlist, `undefined` for default (permissive `*`).
|
|
51
|
+
* Production deployments should typically pin this. */
|
|
52
|
+
corsOrigins?: string | string[]
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export interface RunnerHandle {
|
|
56
|
+
/** The port the runner is actually listening on. */
|
|
57
|
+
port: number
|
|
58
|
+
/** Stop the runner gracefully — close the HTTP server, close DB handles,
|
|
59
|
+
* release the runner lock. */
|
|
60
|
+
stop(): Promise<void>
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export async function startRunner(options: StartRunnerOptions): Promise<RunnerHandle> {
|
|
64
|
+
setRootDir(options.rootDir)
|
|
65
|
+
setDisableUi(options.disableUi ?? false)
|
|
66
|
+
setDisableWatch(options.disableWatch ?? false)
|
|
67
|
+
setCorsOrigins(options.corsOrigins)
|
|
68
|
+
await initDbs()
|
|
69
|
+
|
|
70
|
+
// Dynamic import deferred until after initDbs so server.ts's module-load
|
|
71
|
+
// expressions that call cacheDb() / dataDb() / runtimeDb() see populated
|
|
72
|
+
// handles. The static-import alternative would force module evaluation
|
|
73
|
+
// before initDbs runs.
|
|
74
|
+
const mod = await import("./server.ts")
|
|
75
|
+
const fetch = mod.fetch as (req: Request) => Response | Promise<Response>
|
|
76
|
+
|
|
77
|
+
const server: Server = Bun.serve({ port: options.port, fetch })
|
|
78
|
+
console.log(`[runner] http://localhost:${server.port}`)
|
|
79
|
+
|
|
80
|
+
return {
|
|
81
|
+
port: server.port,
|
|
82
|
+
async stop() {
|
|
83
|
+
server.stop()
|
|
84
|
+
await closeDbs()
|
|
85
|
+
},
|
|
86
|
+
}
|
|
87
|
+
}
|