@toist/aja 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,350 @@
1
+ // 2121
2
+ // Resource system — typed external-system handles.
3
+ //
4
+ // Three-tier override chain (highest priority first):
5
+ // 1. ENV-VAR: PLATFORM_RESOURCE_<NAME>_<FIELD>=value
6
+ // 2. DB: runtime.db resources table (set via UI or POST /resources)
7
+ // 3. YAML: <runner>/resources/*.yaml (committable; secrets use { $env: "VAR" })
8
+ //
9
+ // ctx.resource.<name>.<field> is the pipeline-side surface.
10
+ // Fields from higher tiers override lower tiers field-by-field.
11
+ //
12
+ // Sensitive fields (x-sensitive:true in the Type schema) are stored
13
+ // clear-text in v1. A load-time warning surfaces this. Encryption at
14
+ // rest is deferred to pipeline-spec.md §16.1.
15
+
16
+ import { existsSync, readdirSync, readFileSync } from "node:fs"
17
+ import { basename, extname, join, dirname } from "node:path"
18
+ import { fileURLToPath } from "node:url"
19
+ import type { Database } from "bun:sqlite"
20
+ import { parseYaml, YamlError } from "@toist/spec"
21
+ import type { ResourceTypeDef } from "@toist/spec"
22
+
23
+ const __dir = dirname(fileURLToPath(import.meta.url))
24
+ // CWD is apps/runner/server/; resources/*.yaml lives at apps/runner/resources/
25
+ const RESOURCES_DIR = join(__dir, "..", "..", "resources")
26
+
27
+ // ─── Resource Type registry ───────────────────────────────────────────────────
28
+
29
+ const typeRegistry = new Map<string, ResourceTypeDef>()
30
+
31
+ export function registerResourceType(...types: ResourceTypeDef[]): void {
32
+ for (const t of types) typeRegistry.set(t.name, t)
33
+ }
34
+
35
+ export function getResourceType(name: string): ResourceTypeDef | undefined {
36
+ return typeRegistry.get(name)
37
+ }
38
+
39
+ export function listResourceTypes(): ResourceTypeDef[] {
40
+ return [...typeRegistry.values()]
41
+ }
42
+
43
+ const BUILTIN_TYPES: ResourceTypeDef[] = [
44
+ {
45
+ name: "AnthropicApi",
46
+ description: "Anthropic API credentials and default model config",
47
+ schema: {
48
+ $schema: "https://json-schema.org/draft/2020-12/schema",
49
+ type: "object",
50
+ properties: {
51
+ apiKey: { type: "string", "x-sensitive": true, description: "Anthropic API key (sk-ant-…)" },
52
+ model: { type: "string", default: "claude-sonnet-4-6", description: "Default model ID" },
53
+ },
54
+ required: ["apiKey"],
55
+ },
56
+ },
57
+ {
58
+ name: "OpenAI",
59
+ description: "OpenAI API credentials",
60
+ schema: {
61
+ $schema: "https://json-schema.org/draft/2020-12/schema",
62
+ type: "object",
63
+ properties: {
64
+ apiKey: { type: "string", "x-sensitive": true },
65
+ model: { type: "string", default: "gpt-4o" },
66
+ baseUrl: { type: "string", description: "Override for OpenAI-compatible endpoints" },
67
+ },
68
+ required: ["apiKey"],
69
+ },
70
+ },
71
+ {
72
+ name: "Postgres",
73
+ description: "PostgreSQL database connection",
74
+ schema: {
75
+ $schema: "https://json-schema.org/draft/2020-12/schema",
76
+ type: "object",
77
+ properties: {
78
+ host: { type: "string" },
79
+ port: { type: "number", default: 5432 },
80
+ database: { type: "string" },
81
+ user: { type: "string" },
82
+ password: { type: "string", "x-sensitive": true },
83
+ ssl: { type: "boolean", default: false },
84
+ },
85
+ required: ["host", "database", "user"],
86
+ },
87
+ },
88
+ {
89
+ name: "S3",
90
+ description: "AWS S3 or S3-compatible object storage",
91
+ schema: {
92
+ $schema: "https://json-schema.org/draft/2020-12/schema",
93
+ type: "object",
94
+ properties: {
95
+ bucket: { type: "string" },
96
+ region: { type: "string" },
97
+ accessKeyId: { type: "string", "x-sensitive": true },
98
+ secretAccessKey: { type: "string", "x-sensitive": true },
99
+ endpoint: { type: "string", description: "S3-compatible endpoint URL (optional)" },
100
+ },
101
+ required: ["bucket", "accessKeyId", "secretAccessKey"],
102
+ },
103
+ },
104
+ {
105
+ name: "HttpApi",
106
+ description: "Generic HTTP API with base URL and optional auth token",
107
+ schema: {
108
+ $schema: "https://json-schema.org/draft/2020-12/schema",
109
+ type: "object",
110
+ properties: {
111
+ baseUrl: { type: "string" },
112
+ apiKey: { type: "string", "x-sensitive": true },
113
+ bearerToken: { type: "string", "x-sensitive": true },
114
+ },
115
+ required: ["baseUrl"],
116
+ },
117
+ },
118
+ ]
119
+
120
+ registerResourceType(...BUILTIN_TYPES)
121
+
122
+ // ─── Sensitive-field detection ────────────────────────────────────────────────
123
+
124
+ function sensitiveFields(typeName: string): Set<string> {
125
+ const type = typeRegistry.get(typeName)
126
+ if (!type) return new Set()
127
+ const props = (type.schema?.properties ?? {}) as Record<string, { "x-sensitive"?: boolean }>
128
+ return new Set(Object.entries(props).filter(([, v]) => v?.["x-sensitive"]).map(([k]) => k))
129
+ }
130
+
131
+ function warnSensitive(name: string, typeName: string, fields: Record<string, unknown>): void {
132
+ const sensitive = sensitiveFields(typeName)
133
+ const exposed = Object.keys(fields).filter((k) => sensitive.has(k))
134
+ if (exposed.length > 0) {
135
+ console.warn(
136
+ `[resources] resource "${name}" (${typeName}) has sensitive fields stored unencrypted: ${exposed.join(", ")}. Encryption pending (§16.1).`,
137
+ )
138
+ }
139
+ }
140
+
141
+ // ─── $env placeholder resolver ────────────────────────────────────────────────
142
+
143
+ function resolveEnvPlaceholders(val: unknown, source: string): unknown {
144
+ if (val === null || typeof val !== "object") return val
145
+ if (Array.isArray(val)) return val.map((v) => resolveEnvPlaceholders(v, source))
146
+ const obj = val as Record<string, unknown>
147
+ if ("$env" in obj && typeof obj["$env"] === "string") {
148
+ const envVal = process.env[obj["$env"]]
149
+ if (envVal === undefined) {
150
+ throw new Error(`[resources] ${source}: $env placeholder "${obj["$env"]}" is not set at startup`)
151
+ }
152
+ return envVal
153
+ }
154
+ return Object.fromEntries(
155
+ Object.entries(obj).map(([k, v]) => [k, resolveEnvPlaceholders(v, source)]),
156
+ )
157
+ }
158
+
159
+ // ─── YAML resource loader ─────────────────────────────────────────────────────
160
+
161
+ interface ResourceYaml {
162
+ type: string
163
+ name?: string
164
+ fields?: Record<string, unknown>
165
+ }
166
+
167
+ function loadResourceYamls(): Map<string, { type: string; fields: Record<string, unknown> }> {
168
+ const out = new Map<string, { type: string; fields: Record<string, unknown> }>()
169
+ if (!existsSync(RESOURCES_DIR)) return out
170
+
171
+ const files = readdirSync(RESOURCES_DIR).filter((f) => /\.ya?ml$/.test(f))
172
+ for (const file of files) {
173
+ const path = join(RESOURCES_DIR, file)
174
+ try {
175
+ const raw = readFileSync(path, "utf8")
176
+ const parsed = parseYaml(raw) as ResourceYaml
177
+ if (!parsed || typeof parsed !== "object" || !parsed.type) {
178
+ console.warn(`[resources] ${file}: missing required "type" field — skipped`)
179
+ continue
180
+ }
181
+ const stem = basename(file, extname(file))
182
+ const name = parsed.name ?? stem
183
+ const rawFields = parsed.fields ?? {}
184
+ const fields = resolveEnvPlaceholders(rawFields, file) as Record<string, unknown>
185
+ out.set(name, { type: parsed.type, fields })
186
+ } catch (err) {
187
+ if (err instanceof YamlError) {
188
+ console.warn(`[resources] ${file}: YAML parse error — ${err.message}`)
189
+ } else {
190
+ console.warn(`[resources] ${file}: ${(err as Error).message}`)
191
+ }
192
+ }
193
+ }
194
+ return out
195
+ }
196
+
197
+ // ─── DB resource loader ───────────────────────────────────────────────────────
198
+
199
+ interface ResourceRow {
200
+ id: number
201
+ name: string
202
+ type: string
203
+ fields_json: string
204
+ created_at: string
205
+ updated_at: string
206
+ }
207
+
208
+ function loadResourcesFromDb(db: Database): Map<string, { type: string; fields: Record<string, unknown> }> {
209
+ const out = new Map<string, { type: string; fields: Record<string, unknown> }>()
210
+ try {
211
+ const rows = db.prepare("SELECT name, type, fields_json FROM resources").all() as
212
+ { name: string; type: string; fields_json: string }[]
213
+ for (const row of rows) {
214
+ try {
215
+ const fields = JSON.parse(row.fields_json) as Record<string, unknown>
216
+ out.set(row.name, { type: row.type, fields })
217
+ } catch {
218
+ console.warn(`[resources] DB row "${row.name}": invalid fields_json — skipped`)
219
+ }
220
+ }
221
+ } catch (err) {
222
+ // Table may not exist yet (migration pending). Log and continue.
223
+ console.warn(`[resources] DB read failed: ${(err as Error).message}`)
224
+ }
225
+ return out
226
+ }
227
+
228
+ // ─── ENV-VAR resolver ─────────────────────────────────────────────────────────
229
+ // Scans process.env for PLATFORM_RESOURCE_<NAME>_<FIELD>=value.
230
+ // NAME and FIELD are uppercased in the env key; both are lowercased when
231
+ // applied to ctx.resource. Supports single-level fields only (no nested ENV).
232
+
233
+ function applyEnvOverrides(result: Map<string, { type: string; fields: Record<string, unknown> }>): void {
234
+ for (const [key, value] of Object.entries(process.env)) {
235
+ if (!value) continue
236
+ const m = key.match(/^PLATFORM_RESOURCE_([A-Z][A-Z0-9_]*)_([A-Z][A-Z0-9_]*)$/)
237
+ if (!m) continue
238
+ const name = m[1].toLowerCase()
239
+ const field = m[2].toLowerCase()
240
+ const entry = result.get(name)
241
+ if (entry) {
242
+ entry.fields[field] = value
243
+ } else {
244
+ // ENV-only resource with no YAML or DB entry — create a minimal record.
245
+ result.set(name, { type: "Unknown", fields: { [field]: value } })
246
+ }
247
+ }
248
+ }
249
+
250
+ // ─── Main: build ctx.resource ─────────────────────────────────────────────────
251
+
252
+ /** Merges all three tiers and returns the ctx.resource namespace for runPipeline. */
253
+ export function buildResourceCtx(db: Database): Record<string, Record<string, unknown>> {
254
+ // Tier 3 (lowest): YAML files
255
+ const merged = loadResourceYamls()
256
+
257
+ // Tier 2: DB overrides YAML field-by-field
258
+ for (const [name, dbEntry] of loadResourcesFromDb(db)) {
259
+ const existing = merged.get(name)
260
+ if (existing) {
261
+ existing.fields = { ...existing.fields, ...dbEntry.fields }
262
+ } else {
263
+ merged.set(name, dbEntry)
264
+ }
265
+ }
266
+
267
+ // Tier 1 (highest): ENV-VAR overrides
268
+ applyEnvOverrides(merged)
269
+
270
+ // Flatten to Record<name, fields> and warn on exposed sensitive fields
271
+ const out: Record<string, Record<string, unknown>> = {}
272
+ for (const [name, { type, fields }] of merged) {
273
+ warnSensitive(name, type, fields)
274
+ out[name] = fields
275
+ }
276
+ return out
277
+ }
278
+
279
+ // ─── DB CRUD (used by API routes) ─────────────────────────────────────────────
280
+
281
+ export interface ResourceRecord {
282
+ id: number
283
+ name: string
284
+ type: string
285
+ fields: Record<string, unknown>
286
+ created_at: string
287
+ updated_at: string
288
+ }
289
+
290
+ export function listResources(db: Database): ResourceRecord[] {
291
+ const rows = db.prepare(
292
+ "SELECT id, name, type, fields_json, created_at, updated_at FROM resources ORDER BY name",
293
+ ).all() as ResourceRow[]
294
+ return rows.map((r) => ({
295
+ id: r.id,
296
+ name: r.name,
297
+ type: r.type,
298
+ fields: JSON.parse(r.fields_json) as Record<string, unknown>,
299
+ created_at: r.created_at,
300
+ updated_at: r.updated_at,
301
+ }))
302
+ }
303
+
304
+ export function getResource(db: Database, name: string): ResourceRecord | null {
305
+ const row = db.prepare(
306
+ "SELECT id, name, type, fields_json, created_at, updated_at FROM resources WHERE name = ?",
307
+ ).get(name) as ResourceRow | undefined
308
+ if (!row) return null
309
+ return {
310
+ id: row.id, name: row.name, type: row.type,
311
+ fields: JSON.parse(row.fields_json) as Record<string, unknown>,
312
+ created_at: row.created_at, updated_at: row.updated_at,
313
+ }
314
+ }
315
+
316
+ export function upsertResource(
317
+ db: Database,
318
+ name: string,
319
+ type: string,
320
+ fields: Record<string, unknown>,
321
+ ): ResourceRecord {
322
+ db.prepare(`
323
+ INSERT INTO resources (name, type, fields_json)
324
+ VALUES (?, ?, ?)
325
+ ON CONFLICT(name) DO UPDATE SET
326
+ type = excluded.type,
327
+ fields_json = excluded.fields_json,
328
+ updated_at = datetime('now')
329
+ `).run(name, type, JSON.stringify(fields))
330
+ return getResource(db, name)!
331
+ }
332
+
333
+ export function patchResource(
334
+ db: Database,
335
+ name: string,
336
+ fields: Record<string, unknown>,
337
+ ): ResourceRecord | null {
338
+ const existing = getResource(db, name)
339
+ if (!existing) return null
340
+ const merged = { ...existing.fields, ...fields }
341
+ db.prepare(
342
+ "UPDATE resources SET fields_json = ?, updated_at = datetime('now') WHERE name = ?",
343
+ ).run(JSON.stringify(merged), name)
344
+ return getResource(db, name)!
345
+ }
346
+
347
+ export function deleteResource(db: Database, name: string): boolean {
348
+ const result = db.prepare("DELETE FROM resources WHERE name = ?").run(name)
349
+ return result.changes > 0
350
+ }
package/src/runs.ts ADDED
@@ -0,0 +1,53 @@
1
+ // 2121
2
+ // Read-only capability over the runtime ledger, exposed to kinds as
3
+ // ctx.runs. Powers the runs.lastOutput and runs.nodeOutput kinds (M3, M4
4
+ // in pipeline-spec.md §12). Kept here rather than in runtime-db.ts because
5
+ // the latter is intentionally not exposed to kinds — kinds see ctx.db
6
+ // (data store) and ctx.cache (capability), not the runtime tables.
7
+
8
+ import type { Database } from "bun:sqlite"
9
+ import type { RunStore } from "@toist/spec"
10
+
11
+ interface RunRow {
12
+ id: number
13
+ result: string | null
14
+ }
15
+
16
+ interface NodeOutputRow {
17
+ output_json: string | null
18
+ }
19
+
20
+ export function makeRunStore(runtimeDb: Database): RunStore {
21
+ return {
22
+ lastOutput(pipeline, opts) {
23
+ const status = opts?.status ?? "done"
24
+ const row = runtimeDb.prepare(
25
+ "SELECT id, result FROM runs WHERE pipeline = ? AND status = ? ORDER BY id DESC LIMIT 1",
26
+ ).get(pipeline, status) as RunRow | undefined
27
+ if (!row || row.result === null) return null
28
+ try {
29
+ return JSON.parse(row.result) as unknown
30
+ } catch {
31
+ return null
32
+ }
33
+ },
34
+
35
+ nodeOutput(pipeline, node, opts) {
36
+ const status = opts?.status ?? "done"
37
+ const row = runtimeDb.prepare(
38
+ `SELECT no.output_json
39
+ FROM node_outputs no
40
+ JOIN runs r ON no.run_id = r.id
41
+ WHERE r.pipeline = ? AND r.status = ? AND no.node_id = ?
42
+ ORDER BY r.id DESC
43
+ LIMIT 1`,
44
+ ).get(pipeline, status, node) as NodeOutputRow | undefined
45
+ if (!row || row.output_json === null) return null
46
+ try {
47
+ return JSON.parse(row.output_json) as unknown
48
+ } catch {
49
+ return null
50
+ }
51
+ },
52
+ }
53
+ }
@@ -0,0 +1,48 @@
1
+ // 2121
2
+ // Platform-internal runtime ledger. Holds:
3
+ // - runs — every pipeline execution (status, payload, result, steps, timing)
4
+ // - logs — log lines emitted from kinds during a run, scoped to run_id
5
+ // - tasks — HITL pending tasks (added in migration 002)
6
+ // - node_outputs — checkpoint-between-nodes memoization (added in migration 002)
7
+ //
8
+ // Schema is owned by the platform and evolved via numbered SQL migrations
9
+ // in ../migrations/. See migrate.ts for the runner.
10
+ //
11
+ // Kinds never receive a handle to this DB — they only see ctx.db (data
12
+ // store) and ctx.cache (capability), so they cannot accidentally touch
13
+ // runtime tables.
14
+ //
15
+ // Migration policy: by default the runner applies pending migrations at
16
+ // startup. RUNTIME_AUTO_MIGRATE=false flips to fail-loud — startup aborts if
17
+ // anything is pending.
18
+ //
19
+ // Lifecycle: openRuntimeDb() is a pure factory. The runner lock is acquired
20
+ // separately by db-handles.initDbs() before this is called, so two callers
21
+ // cannot race on migrations.
22
+
23
+ import { Database } from "bun:sqlite"
24
+ import { mkdirSync } from "node:fs"
25
+ import { dirname } from "node:path"
26
+ import { runMigrations, pendingMigrations } from "./migrate.ts"
27
+ import { runtimeDbPath } from "./config.ts"
28
+
29
+ export function openRuntimeDb(): Database {
30
+ const path = runtimeDbPath()
31
+ mkdirSync(dirname(path), { recursive: true })
32
+
33
+ if (process.env.RUNTIME_AUTO_MIGRATE === "false") {
34
+ const pending = pendingMigrations(path)
35
+ if (pending.length > 0) {
36
+ console.error(
37
+ `[runtime-db] ${pending.length} pending migration(s); RUNTIME_AUTO_MIGRATE=false.`,
38
+ )
39
+ console.error(`[runtime-db] pending: ${pending.join(", ")}`)
40
+ console.error(`[runtime-db] run migrations explicitly, or unset RUNTIME_AUTO_MIGRATE.`)
41
+ process.exit(1)
42
+ }
43
+ } else {
44
+ runMigrations(path)
45
+ }
46
+
47
+ return new Database(path, { create: true })
48
+ }