@toist/aja 0.7.1 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +49 -1
- package/README.md +62 -0
- package/package.json +6 -4
- package/src/cache-db.ts +1 -9
- package/src/cli.ts +17 -11
- package/src/client.ts +76 -0
- package/src/data-db.ts +1 -13
- package/src/index.ts +35 -2
- package/src/instance-metadata.ts +48 -0
- package/src/kinds/index.ts +23 -61
- package/src/lock.ts +27 -53
- package/src/migrate.ts +3 -3
- package/src/pipeline-store.ts +31 -0
- package/src/resources-fs.ts +43 -0
- package/src/resources.ts +27 -190
- package/src/run-events.ts +42 -0
- package/src/runtime-db.ts +11 -30
- package/src/server.ts +506 -496
- package/src/sqlite-runtime.ts +135 -0
- package/src/startRunner.ts +56 -70
- package/src/stores/sqlite.ts +243 -0
- package/src/stores/types.ts +18 -0
- package/src/config.ts +0 -129
- package/src/db-handles.ts +0 -70
- package/src/hitl.ts +0 -257
- package/src/instance.ts +0 -64
- package/src/kinds/control.ts +0 -26
- package/src/kinds/data.ts +0 -30
- package/src/kinds/db.ts +0 -92
- package/src/kinds/hitl.ts +0 -56
- package/src/kinds/http.ts +0 -134
- package/src/kinds/runs.ts +0 -130
- package/src/kinds/transform.ts +0 -123
- package/src/kinds/types.ts +0 -16
- package/src/pipeline.ts +0 -605
- package/src/runs.ts +0 -53
package/src/migrate.ts
CHANGED
|
@@ -103,7 +103,7 @@ function backupAndPrune(dbPath: string): string | null {
|
|
|
103
103
|
const backupPath = join(dir, `${base}.bak-${ts}`)
|
|
104
104
|
|
|
105
105
|
copyFileSync(dbPath, backupPath)
|
|
106
|
-
console.
|
|
106
|
+
console.error(`[migrate] backup ${backupPath}`)
|
|
107
107
|
|
|
108
108
|
const prefix = `${base}.bak-`
|
|
109
109
|
const all = readdirSync(dir)
|
|
@@ -113,7 +113,7 @@ function backupAndPrune(dbPath: string): string | null {
|
|
|
113
113
|
|
|
114
114
|
for (const stale of all.slice(BACKUP_RETAIN)) {
|
|
115
115
|
unlinkSync(join(dir, stale.name))
|
|
116
|
-
console.
|
|
116
|
+
console.error(`[migrate] pruned old backup ${stale.name}`)
|
|
117
117
|
}
|
|
118
118
|
|
|
119
119
|
return backupPath
|
|
@@ -191,7 +191,7 @@ export function runMigrations(dbPath: string, opts: MigrateOptions = {}): Migrat
|
|
|
191
191
|
try {
|
|
192
192
|
apply()
|
|
193
193
|
result.applied.push(file.filename)
|
|
194
|
-
console.
|
|
194
|
+
console.error(`[migrate] applied ${file.filename}`)
|
|
195
195
|
} catch (err) {
|
|
196
196
|
const msg = err instanceof Error ? err.message : String(err)
|
|
197
197
|
throw new Error(`[migrate] failed applying "${file.filename}": ${msg}`)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { join } from "node:path"
|
|
2
|
+
import { getPipeline, getPipelines, loadAll, watchAll, type KindRegistry } from "@toist/core"
|
|
3
|
+
import type { PipelineSpec } from "@toist/spec"
|
|
4
|
+
|
|
5
|
+
export interface FilesystemPipelineStore {
|
|
6
|
+
get(id: string): PipelineSpec | null
|
|
7
|
+
list(): PipelineSpec[]
|
|
8
|
+
close(): void
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function createFilesystemPipelineStore(options: {
|
|
12
|
+
rootDir: string
|
|
13
|
+
watch?: boolean
|
|
14
|
+
registry?: KindRegistry
|
|
15
|
+
}): FilesystemPipelineStore {
|
|
16
|
+
const pipelinesDir = join(options.rootDir, "pipelines")
|
|
17
|
+
loadAll(pipelinesDir, { registry: options.registry })
|
|
18
|
+
const watcher = options.watch === false ? null : watchAll(pipelinesDir, undefined, { registry: options.registry })
|
|
19
|
+
|
|
20
|
+
return {
|
|
21
|
+
get(id) {
|
|
22
|
+
return getPipeline(id) ?? null
|
|
23
|
+
},
|
|
24
|
+
list() {
|
|
25
|
+
return getPipelines()
|
|
26
|
+
},
|
|
27
|
+
close() {
|
|
28
|
+
watcher?.close()
|
|
29
|
+
},
|
|
30
|
+
}
|
|
31
|
+
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
import { existsSync, readFileSync } from "node:fs"
|
|
3
|
+
import { join } from "node:path"
|
|
4
|
+
import YAML from "yaml"
|
|
5
|
+
|
|
6
|
+
const ENV_EXPR = /\$\{env:([A-Za-z_][A-Za-z0-9_]*)(?::-(.*?))?\}/g
|
|
7
|
+
|
|
8
|
+
function isPlainObject(value: unknown): value is Record<string, unknown> {
|
|
9
|
+
return !!value && typeof value === "object" && !Array.isArray(value)
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
function substituteEnvInString(value: string): string {
|
|
13
|
+
return value.replace(ENV_EXPR, (_match, name: string, fallback: string | undefined) => {
|
|
14
|
+
const resolved = process.env[name]
|
|
15
|
+
if (resolved !== undefined) return resolved
|
|
16
|
+
if (fallback !== undefined) return fallback
|
|
17
|
+
throw new Error(`[resources-fs] missing required environment variable ${name}`)
|
|
18
|
+
})
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function substituteEnv(value: unknown): unknown {
|
|
22
|
+
if (typeof value === "string") return substituteEnvInString(value)
|
|
23
|
+
if (Array.isArray(value)) return value.map((entry) => substituteEnv(entry))
|
|
24
|
+
if (!isPlainObject(value)) return value
|
|
25
|
+
return Object.fromEntries(
|
|
26
|
+
Object.entries(value).map(([key, entry]) => [key, substituteEnv(entry)]),
|
|
27
|
+
)
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export async function loadFilesystemResources(
|
|
31
|
+
rootDir: string,
|
|
32
|
+
_opts: { env?: string } = {},
|
|
33
|
+
): Promise<Record<string, Record<string, unknown>>> {
|
|
34
|
+
const candidates = [join(rootDir, "toist.yml"), join(rootDir, "toist.yaml")]
|
|
35
|
+
const path = candidates.find((candidate) => existsSync(candidate))
|
|
36
|
+
if (!path) return {}
|
|
37
|
+
|
|
38
|
+
const parsed = (YAML.parse(readFileSync(path, "utf8")) ?? {}) as Record<string, unknown>
|
|
39
|
+
const resources = parsed.resources
|
|
40
|
+
if (!isPlainObject(resources)) return {}
|
|
41
|
+
|
|
42
|
+
return substituteEnv(resources) as Record<string, Record<string, unknown>>
|
|
43
|
+
}
|
package/src/resources.ts
CHANGED
|
@@ -1,29 +1,14 @@
|
|
|
1
1
|
// 2121
|
|
2
|
-
// Resource
|
|
2
|
+
// Resource type registry + runtime DB CRUD for the admin API.
|
|
3
3
|
//
|
|
4
|
-
//
|
|
5
|
-
//
|
|
6
|
-
//
|
|
7
|
-
//
|
|
8
|
-
//
|
|
9
|
-
// ctx.resource.<name>.<field> is the pipeline-side surface.
|
|
10
|
-
// Fields from higher tiers override lower tiers field-by-field.
|
|
11
|
-
//
|
|
12
|
-
// Sensitive fields (x-sensitive:true in the Type schema) are stored
|
|
13
|
-
// clear-text in v1. A load-time warning surfaces this. Encryption at
|
|
14
|
-
// rest is deferred to pipeline-spec.md §16.1.
|
|
4
|
+
// Note: execution-time resource resolution no longer reads from the resources
|
|
5
|
+
// table. Phase 4 resolves resources from <rootDir>/toist.yml at runtime
|
|
6
|
+
// construction time via resources-fs.ts. The DB table remains for the admin
|
|
7
|
+
// surface / legacy UI editor until a later UX pass decides its fate.
|
|
15
8
|
|
|
16
|
-
import { existsSync, readdirSync, readFileSync } from "node:fs"
|
|
17
|
-
import { basename, extname, join, dirname } from "node:path"
|
|
18
|
-
import { fileURLToPath } from "node:url"
|
|
19
9
|
import type { Database } from "bun:sqlite"
|
|
20
|
-
import { parseYaml, YamlError } from "@toist/spec"
|
|
21
10
|
import type { ResourceTypeDef } from "@toist/spec"
|
|
22
11
|
|
|
23
|
-
const __dir = dirname(fileURLToPath(import.meta.url))
|
|
24
|
-
// CWD is apps/runner/server/; resources/*.yaml lives at apps/runner/resources/
|
|
25
|
-
const RESOURCES_DIR = join(__dir, "..", "..", "resources")
|
|
26
|
-
|
|
27
12
|
// ─── Resource Type registry ───────────────────────────────────────────────────
|
|
28
13
|
|
|
29
14
|
const typeRegistry = new Map<string, ResourceTypeDef>()
|
|
@@ -49,7 +34,7 @@ const BUILTIN_TYPES: ResourceTypeDef[] = [
|
|
|
49
34
|
type: "object",
|
|
50
35
|
properties: {
|
|
51
36
|
apiKey: { type: "string", "x-sensitive": true, description: "Anthropic API key (sk-ant-…)" },
|
|
52
|
-
model:
|
|
37
|
+
model: { type: "string", default: "claude-sonnet-4-6", description: "Default model ID" },
|
|
53
38
|
},
|
|
54
39
|
required: ["apiKey"],
|
|
55
40
|
},
|
|
@@ -61,8 +46,8 @@ const BUILTIN_TYPES: ResourceTypeDef[] = [
|
|
|
61
46
|
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
62
47
|
type: "object",
|
|
63
48
|
properties: {
|
|
64
|
-
apiKey:
|
|
65
|
-
model:
|
|
49
|
+
apiKey: { type: "string", "x-sensitive": true },
|
|
50
|
+
model: { type: "string", default: "gpt-4o" },
|
|
66
51
|
baseUrl: { type: "string", description: "Override for OpenAI-compatible endpoints" },
|
|
67
52
|
},
|
|
68
53
|
required: ["apiKey"],
|
|
@@ -75,12 +60,12 @@ const BUILTIN_TYPES: ResourceTypeDef[] = [
|
|
|
75
60
|
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
76
61
|
type: "object",
|
|
77
62
|
properties: {
|
|
78
|
-
host:
|
|
79
|
-
port:
|
|
63
|
+
host: { type: "string" },
|
|
64
|
+
port: { type: "number", default: 5432 },
|
|
80
65
|
database: { type: "string" },
|
|
81
|
-
user:
|
|
66
|
+
user: { type: "string" },
|
|
82
67
|
password: { type: "string", "x-sensitive": true },
|
|
83
|
-
ssl:
|
|
68
|
+
ssl: { type: "boolean", default: false },
|
|
84
69
|
},
|
|
85
70
|
required: ["host", "database", "user"],
|
|
86
71
|
},
|
|
@@ -92,11 +77,11 @@ const BUILTIN_TYPES: ResourceTypeDef[] = [
|
|
|
92
77
|
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
93
78
|
type: "object",
|
|
94
79
|
properties: {
|
|
95
|
-
bucket:
|
|
96
|
-
region:
|
|
97
|
-
accessKeyId:
|
|
80
|
+
bucket: { type: "string" },
|
|
81
|
+
region: { type: "string" },
|
|
82
|
+
accessKeyId: { type: "string", "x-sensitive": true },
|
|
98
83
|
secretAccessKey: { type: "string", "x-sensitive": true },
|
|
99
|
-
endpoint:
|
|
84
|
+
endpoint: { type: "string", description: "S3-compatible endpoint URL (optional)" },
|
|
100
85
|
},
|
|
101
86
|
required: ["bucket", "accessKeyId", "secretAccessKey"],
|
|
102
87
|
},
|
|
@@ -108,8 +93,8 @@ const BUILTIN_TYPES: ResourceTypeDef[] = [
|
|
|
108
93
|
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
109
94
|
type: "object",
|
|
110
95
|
properties: {
|
|
111
|
-
baseUrl:
|
|
112
|
-
apiKey:
|
|
96
|
+
baseUrl: { type: "string" },
|
|
97
|
+
apiKey: { type: "string", "x-sensitive": true },
|
|
113
98
|
bearerToken: { type: "string", "x-sensitive": true },
|
|
114
99
|
},
|
|
115
100
|
required: ["baseUrl"],
|
|
@@ -119,82 +104,7 @@ const BUILTIN_TYPES: ResourceTypeDef[] = [
|
|
|
119
104
|
|
|
120
105
|
registerResourceType(...BUILTIN_TYPES)
|
|
121
106
|
|
|
122
|
-
// ───
|
|
123
|
-
|
|
124
|
-
function sensitiveFields(typeName: string): Set<string> {
|
|
125
|
-
const type = typeRegistry.get(typeName)
|
|
126
|
-
if (!type) return new Set()
|
|
127
|
-
const props = (type.schema?.properties ?? {}) as Record<string, { "x-sensitive"?: boolean }>
|
|
128
|
-
return new Set(Object.entries(props).filter(([, v]) => v?.["x-sensitive"]).map(([k]) => k))
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
function warnSensitive(name: string, typeName: string, fields: Record<string, unknown>): void {
|
|
132
|
-
const sensitive = sensitiveFields(typeName)
|
|
133
|
-
const exposed = Object.keys(fields).filter((k) => sensitive.has(k))
|
|
134
|
-
if (exposed.length > 0) {
|
|
135
|
-
console.warn(
|
|
136
|
-
`[resources] resource "${name}" (${typeName}) has sensitive fields stored unencrypted: ${exposed.join(", ")}. Encryption pending (§16.1).`,
|
|
137
|
-
)
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
// ─── $env placeholder resolver ────────────────────────────────────────────────
|
|
142
|
-
|
|
143
|
-
function resolveEnvPlaceholders(val: unknown, source: string): unknown {
|
|
144
|
-
if (val === null || typeof val !== "object") return val
|
|
145
|
-
if (Array.isArray(val)) return val.map((v) => resolveEnvPlaceholders(v, source))
|
|
146
|
-
const obj = val as Record<string, unknown>
|
|
147
|
-
if ("$env" in obj && typeof obj["$env"] === "string") {
|
|
148
|
-
const envVal = process.env[obj["$env"]]
|
|
149
|
-
if (envVal === undefined) {
|
|
150
|
-
throw new Error(`[resources] ${source}: $env placeholder "${obj["$env"]}" is not set at startup`)
|
|
151
|
-
}
|
|
152
|
-
return envVal
|
|
153
|
-
}
|
|
154
|
-
return Object.fromEntries(
|
|
155
|
-
Object.entries(obj).map(([k, v]) => [k, resolveEnvPlaceholders(v, source)]),
|
|
156
|
-
)
|
|
157
|
-
}
|
|
158
|
-
|
|
159
|
-
// ─── YAML resource loader ─────────────────────────────────────────────────────
|
|
160
|
-
|
|
161
|
-
interface ResourceYaml {
|
|
162
|
-
type: string
|
|
163
|
-
name?: string
|
|
164
|
-
fields?: Record<string, unknown>
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
function loadResourceYamls(): Map<string, { type: string; fields: Record<string, unknown> }> {
|
|
168
|
-
const out = new Map<string, { type: string; fields: Record<string, unknown> }>()
|
|
169
|
-
if (!existsSync(RESOURCES_DIR)) return out
|
|
170
|
-
|
|
171
|
-
const files = readdirSync(RESOURCES_DIR).filter((f) => /\.ya?ml$/.test(f))
|
|
172
|
-
for (const file of files) {
|
|
173
|
-
const path = join(RESOURCES_DIR, file)
|
|
174
|
-
try {
|
|
175
|
-
const raw = readFileSync(path, "utf8")
|
|
176
|
-
const parsed = parseYaml(raw) as ResourceYaml
|
|
177
|
-
if (!parsed || typeof parsed !== "object" || !parsed.type) {
|
|
178
|
-
console.warn(`[resources] ${file}: missing required "type" field — skipped`)
|
|
179
|
-
continue
|
|
180
|
-
}
|
|
181
|
-
const stem = basename(file, extname(file))
|
|
182
|
-
const name = parsed.name ?? stem
|
|
183
|
-
const rawFields = parsed.fields ?? {}
|
|
184
|
-
const fields = resolveEnvPlaceholders(rawFields, file) as Record<string, unknown>
|
|
185
|
-
out.set(name, { type: parsed.type, fields })
|
|
186
|
-
} catch (err) {
|
|
187
|
-
if (err instanceof YamlError) {
|
|
188
|
-
console.warn(`[resources] ${file}: YAML parse error — ${err.message}`)
|
|
189
|
-
} else {
|
|
190
|
-
console.warn(`[resources] ${file}: ${(err as Error).message}`)
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
return out
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
// ─── DB resource loader ───────────────────────────────────────────────────────
|
|
107
|
+
// ─── DB CRUD (used by API routes) ─────────────────────────────────────────────
|
|
198
108
|
|
|
199
109
|
interface ResourceRow {
|
|
200
110
|
id: number
|
|
@@ -205,79 +115,6 @@ interface ResourceRow {
|
|
|
205
115
|
updated_at: string
|
|
206
116
|
}
|
|
207
117
|
|
|
208
|
-
function loadResourcesFromDb(db: Database): Map<string, { type: string; fields: Record<string, unknown> }> {
|
|
209
|
-
const out = new Map<string, { type: string; fields: Record<string, unknown> }>()
|
|
210
|
-
try {
|
|
211
|
-
const rows = db.prepare("SELECT name, type, fields_json FROM resources").all() as
|
|
212
|
-
{ name: string; type: string; fields_json: string }[]
|
|
213
|
-
for (const row of rows) {
|
|
214
|
-
try {
|
|
215
|
-
const fields = JSON.parse(row.fields_json) as Record<string, unknown>
|
|
216
|
-
out.set(row.name, { type: row.type, fields })
|
|
217
|
-
} catch {
|
|
218
|
-
console.warn(`[resources] DB row "${row.name}": invalid fields_json — skipped`)
|
|
219
|
-
}
|
|
220
|
-
}
|
|
221
|
-
} catch (err) {
|
|
222
|
-
// Table may not exist yet (migration pending). Log and continue.
|
|
223
|
-
console.warn(`[resources] DB read failed: ${(err as Error).message}`)
|
|
224
|
-
}
|
|
225
|
-
return out
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
// ─── ENV-VAR resolver ─────────────────────────────────────────────────────────
|
|
229
|
-
// Scans process.env for PLATFORM_RESOURCE_<NAME>_<FIELD>=value.
|
|
230
|
-
// NAME and FIELD are uppercased in the env key; both are lowercased when
|
|
231
|
-
// applied to ctx.resource. Supports single-level fields only (no nested ENV).
|
|
232
|
-
|
|
233
|
-
function applyEnvOverrides(result: Map<string, { type: string; fields: Record<string, unknown> }>): void {
|
|
234
|
-
for (const [key, value] of Object.entries(process.env)) {
|
|
235
|
-
if (!value) continue
|
|
236
|
-
const m = key.match(/^PLATFORM_RESOURCE_([A-Z][A-Z0-9_]*)_([A-Z][A-Z0-9_]*)$/)
|
|
237
|
-
if (!m) continue
|
|
238
|
-
const name = m[1].toLowerCase()
|
|
239
|
-
const field = m[2].toLowerCase()
|
|
240
|
-
const entry = result.get(name)
|
|
241
|
-
if (entry) {
|
|
242
|
-
entry.fields[field] = value
|
|
243
|
-
} else {
|
|
244
|
-
// ENV-only resource with no YAML or DB entry — create a minimal record.
|
|
245
|
-
result.set(name, { type: "Unknown", fields: { [field]: value } })
|
|
246
|
-
}
|
|
247
|
-
}
|
|
248
|
-
}
|
|
249
|
-
|
|
250
|
-
// ─── Main: build ctx.resource ─────────────────────────────────────────────────
|
|
251
|
-
|
|
252
|
-
/** Merges all three tiers and returns the ctx.resource namespace for runPipeline. */
|
|
253
|
-
export function buildResourceCtx(db: Database): Record<string, Record<string, unknown>> {
|
|
254
|
-
// Tier 3 (lowest): YAML files
|
|
255
|
-
const merged = loadResourceYamls()
|
|
256
|
-
|
|
257
|
-
// Tier 2: DB overrides YAML field-by-field
|
|
258
|
-
for (const [name, dbEntry] of loadResourcesFromDb(db)) {
|
|
259
|
-
const existing = merged.get(name)
|
|
260
|
-
if (existing) {
|
|
261
|
-
existing.fields = { ...existing.fields, ...dbEntry.fields }
|
|
262
|
-
} else {
|
|
263
|
-
merged.set(name, dbEntry)
|
|
264
|
-
}
|
|
265
|
-
}
|
|
266
|
-
|
|
267
|
-
// Tier 1 (highest): ENV-VAR overrides
|
|
268
|
-
applyEnvOverrides(merged)
|
|
269
|
-
|
|
270
|
-
// Flatten to Record<name, fields> and warn on exposed sensitive fields
|
|
271
|
-
const out: Record<string, Record<string, unknown>> = {}
|
|
272
|
-
for (const [name, { type, fields }] of merged) {
|
|
273
|
-
warnSensitive(name, type, fields)
|
|
274
|
-
out[name] = fields
|
|
275
|
-
}
|
|
276
|
-
return out
|
|
277
|
-
}
|
|
278
|
-
|
|
279
|
-
// ─── DB CRUD (used by API routes) ─────────────────────────────────────────────
|
|
280
|
-
|
|
281
118
|
export interface ResourceRecord {
|
|
282
119
|
id: number
|
|
283
120
|
name: string
|
|
@@ -307,9 +144,12 @@ export function getResource(db: Database, name: string): ResourceRecord | null {
|
|
|
307
144
|
).get(name) as ResourceRow | undefined
|
|
308
145
|
if (!row) return null
|
|
309
146
|
return {
|
|
310
|
-
id: row.id,
|
|
147
|
+
id: row.id,
|
|
148
|
+
name: row.name,
|
|
149
|
+
type: row.type,
|
|
311
150
|
fields: JSON.parse(row.fields_json) as Record<string, unknown>,
|
|
312
|
-
created_at: row.created_at,
|
|
151
|
+
created_at: row.created_at,
|
|
152
|
+
updated_at: row.updated_at,
|
|
313
153
|
}
|
|
314
154
|
}
|
|
315
155
|
|
|
@@ -338,13 +178,10 @@ export function patchResource(
|
|
|
338
178
|
const existing = getResource(db, name)
|
|
339
179
|
if (!existing) return null
|
|
340
180
|
const merged = { ...existing.fields, ...fields }
|
|
341
|
-
db.
|
|
342
|
-
"UPDATE resources SET fields_json = ?, updated_at = datetime('now') WHERE name = ?",
|
|
343
|
-
).run(JSON.stringify(merged), name)
|
|
344
|
-
return getResource(db, name)!
|
|
181
|
+
return upsertResource(db, name, existing.type, merged)
|
|
345
182
|
}
|
|
346
183
|
|
|
347
184
|
export function deleteResource(db: Database, name: string): boolean {
|
|
348
|
-
const
|
|
349
|
-
return
|
|
185
|
+
const changed = db.prepare("DELETE FROM resources WHERE name = ?").run(name).changes
|
|
186
|
+
return changed > 0
|
|
350
187
|
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import type { ToistEvent } from "@toist/core"
|
|
2
|
+
|
|
3
|
+
export interface RunEventBroker {
|
|
4
|
+
emit(event: ToistEvent): Promise<void>
|
|
5
|
+
finish(): void
|
|
6
|
+
subscribe(): AsyncIterable<ToistEvent>
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export function createRunEventBroker(): RunEventBroker {
|
|
10
|
+
const events: ToistEvent[] = []
|
|
11
|
+
let finished = false
|
|
12
|
+
const waiters = new Set<() => void>()
|
|
13
|
+
|
|
14
|
+
return {
|
|
15
|
+
async emit(event) {
|
|
16
|
+
events.push(event)
|
|
17
|
+
for (const notify of waiters) notify()
|
|
18
|
+
waiters.clear()
|
|
19
|
+
},
|
|
20
|
+
finish() {
|
|
21
|
+
finished = true
|
|
22
|
+
for (const notify of waiters) notify()
|
|
23
|
+
waiters.clear()
|
|
24
|
+
},
|
|
25
|
+
subscribe() {
|
|
26
|
+
let index = 0
|
|
27
|
+
return {
|
|
28
|
+
[Symbol.asyncIterator]() {
|
|
29
|
+
return {
|
|
30
|
+
async next(): Promise<IteratorResult<ToistEvent>> {
|
|
31
|
+
while (index >= events.length) {
|
|
32
|
+
if (finished) return { value: undefined as never, done: true }
|
|
33
|
+
await new Promise<void>((resolve) => waiters.add(resolve))
|
|
34
|
+
}
|
|
35
|
+
return { value: events[index++], done: false }
|
|
36
|
+
},
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
}
|
|
40
|
+
},
|
|
41
|
+
}
|
|
42
|
+
}
|
package/src/runtime-db.ts
CHANGED
|
@@ -1,44 +1,25 @@
|
|
|
1
1
|
// 2121
|
|
2
|
-
// Platform-internal runtime ledger. Holds:
|
|
3
|
-
// - runs — every pipeline execution (status, payload, result, steps, timing)
|
|
4
|
-
// - logs — log lines emitted from kinds during a run, scoped to run_id
|
|
5
|
-
// - tasks — HITL pending tasks (added in migration 002)
|
|
6
|
-
// - node_outputs — checkpoint-between-nodes memoization (added in migration 002)
|
|
7
|
-
//
|
|
8
|
-
// Schema is owned by the platform and evolved via numbered SQL migrations
|
|
9
|
-
// in ../migrations/. See migrate.ts for the runner.
|
|
10
|
-
//
|
|
11
|
-
// Kinds never receive a handle to this DB — they only see ctx.db (data
|
|
12
|
-
// store) and ctx.cache (capability), so they cannot accidentally touch
|
|
13
|
-
// runtime tables.
|
|
14
|
-
//
|
|
15
|
-
// Migration policy: by default the runner applies pending migrations at
|
|
16
|
-
// startup. RUNTIME_AUTO_MIGRATE=false flips to fail-loud — startup aborts if
|
|
17
|
-
// anything is pending.
|
|
18
|
-
//
|
|
19
|
-
// Lifecycle: openRuntimeDb() is a pure factory. The runner lock is acquired
|
|
20
|
-
// separately by db-handles.initDbs() before this is called, so two callers
|
|
21
|
-
// cannot race on migrations.
|
|
22
|
-
|
|
23
2
|
import { Database } from "bun:sqlite"
|
|
24
3
|
import { mkdirSync } from "node:fs"
|
|
25
4
|
import { dirname } from "node:path"
|
|
26
5
|
import { runMigrations, pendingMigrations } from "./migrate.ts"
|
|
27
|
-
import { runtimeDbPath } from "./config.ts"
|
|
28
6
|
|
|
29
|
-
export function openRuntimeDb(): Database {
|
|
30
|
-
const path = runtimeDbPath()
|
|
7
|
+
export function openRuntimeDb(path: string, opts: { skipMigrations?: boolean } = {}): Database {
|
|
31
8
|
mkdirSync(dirname(path), { recursive: true })
|
|
32
9
|
|
|
33
|
-
if (
|
|
10
|
+
if (opts.skipMigrations) {
|
|
11
|
+
const pending = pendingMigrations(path)
|
|
12
|
+
if (pending.length > 0) {
|
|
13
|
+
throw new Error(
|
|
14
|
+
`[runtime-db] ${pending.length} pending migration(s) with skipMigrations=true: ${pending.join(", ")}`,
|
|
15
|
+
)
|
|
16
|
+
}
|
|
17
|
+
} else if (process.env.RUNTIME_AUTO_MIGRATE === "false") {
|
|
34
18
|
const pending = pendingMigrations(path)
|
|
35
19
|
if (pending.length > 0) {
|
|
36
|
-
|
|
37
|
-
`[runtime-db] ${pending.length} pending migration(s); RUNTIME_AUTO_MIGRATE=false
|
|
20
|
+
throw new Error(
|
|
21
|
+
`[runtime-db] ${pending.length} pending migration(s); RUNTIME_AUTO_MIGRATE=false. Pending: ${pending.join(", ")}`,
|
|
38
22
|
)
|
|
39
|
-
console.error(`[runtime-db] pending: ${pending.join(", ")}`)
|
|
40
|
-
console.error(`[runtime-db] run migrations explicitly, or unset RUNTIME_AUTO_MIGRATE.`)
|
|
41
|
-
process.exit(1)
|
|
42
23
|
}
|
|
43
24
|
} else {
|
|
44
25
|
runMigrations(path)
|