@toist/aja 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +69 -0
- package/migrations/001_initial.sql +111 -0
- package/package.json +27 -0
- package/src/cache-db.ts +17 -0
- package/src/cache.ts +67 -0
- package/src/config.ts +129 -0
- package/src/data-db.ts +21 -0
- package/src/db-handles.ts +70 -0
- package/src/hitl.ts +257 -0
- package/src/index.ts +34 -0
- package/src/instance.ts +64 -0
- package/src/kinds/control.ts +26 -0
- package/src/kinds/custom.ts +19 -0
- package/src/kinds/data.ts +30 -0
- package/src/kinds/db.ts +92 -0
- package/src/kinds/hitl.ts +56 -0
- package/src/kinds/http.ts +134 -0
- package/src/kinds/index.ts +66 -0
- package/src/kinds/runs.ts +130 -0
- package/src/kinds/transform.ts +123 -0
- package/src/kinds/types.ts +16 -0
- package/src/lock.ts +64 -0
- package/src/migrate.ts +204 -0
- package/src/pipeline.ts +601 -0
- package/src/resources.ts +350 -0
- package/src/runs.ts +53 -0
- package/src/runtime-db.ts +48 -0
- package/src/server.ts +537 -0
- package/src/startRunner.ts +87 -0
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
// Generic HTTP kinds. Together with transform.* and db.*, these let pipelines
|
|
3
|
+
// talk to any external API without writing domain-specific kinds first —
|
|
4
|
+
// critical for `pipelines.create` from natural-language conversations.
|
|
5
|
+
|
|
6
|
+
import type { NodeKind } from "./types.ts"
|
|
7
|
+
|
|
8
|
+
interface FetchParams {
|
|
9
|
+
url: string
|
|
10
|
+
headers?: Record<string, string>
|
|
11
|
+
cache?: string // TTL string, e.g. "5m" — enables transparent caching
|
|
12
|
+
parse?: "json" | "text" | "auto" // default "auto": parse JSON if Content-Type matches
|
|
13
|
+
timeoutMs?: number
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
async function doFetch(
|
|
17
|
+
url: string,
|
|
18
|
+
init: RequestInit,
|
|
19
|
+
parse: FetchParams["parse"],
|
|
20
|
+
timeoutMs?: number,
|
|
21
|
+
): Promise<{ status: number; headers: Record<string, string>; body: unknown }> {
|
|
22
|
+
const controller = timeoutMs != null ? new AbortController() : undefined
|
|
23
|
+
const timer = controller && timeoutMs != null
|
|
24
|
+
? setTimeout(() => controller.abort(), timeoutMs)
|
|
25
|
+
: undefined
|
|
26
|
+
try {
|
|
27
|
+
const r = await fetch(url, { ...init, signal: controller?.signal })
|
|
28
|
+
const headers: Record<string, string> = {}
|
|
29
|
+
r.headers.forEach((v, k) => { headers[k] = v })
|
|
30
|
+
|
|
31
|
+
const ct = r.headers.get("content-type") ?? ""
|
|
32
|
+
const wantJson = parse === "json" || (parse === "auto" && ct.includes("application/json"))
|
|
33
|
+
const body = wantJson ? await r.json() : await r.text()
|
|
34
|
+
return { status: r.status, headers, body }
|
|
35
|
+
} finally {
|
|
36
|
+
if (timer) clearTimeout(timer)
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export const httpFetch: NodeKind<FetchParams, Record<string, never>> = {
|
|
41
|
+
id: "http.fetch",
|
|
42
|
+
category: "io",
|
|
43
|
+
label: "HTTP GET",
|
|
44
|
+
description: "Fetch a URL and return the parsed body. Optionally cached transparently with TTL.",
|
|
45
|
+
icon: "Globe",
|
|
46
|
+
params: {
|
|
47
|
+
url: { type: "string", label: "URL", required: true, placeholder: "https://api.example.com/v1/items" },
|
|
48
|
+
headers: { type: "json", label: "Headers", description: "Object of header name → value" },
|
|
49
|
+
cache: { type: "string", label: "Cache TTL", description: "e.g. '5m', '1h'. Omit to disable caching.", placeholder: "5m" },
|
|
50
|
+
parse: { type: "select", label: "Parse mode", default: "auto",
|
|
51
|
+
options: [
|
|
52
|
+
{ value: "auto", label: "Auto (by Content-Type)" },
|
|
53
|
+
{ value: "json", label: "Always JSON" },
|
|
54
|
+
{ value: "text", label: "Plain text" },
|
|
55
|
+
] },
|
|
56
|
+
timeoutMs: { type: "number", label: "Timeout (ms)", default: 10000 },
|
|
57
|
+
},
|
|
58
|
+
inputs: {},
|
|
59
|
+
outputs: {
|
|
60
|
+
status: { type: "number" },
|
|
61
|
+
headers: { type: "object" },
|
|
62
|
+
body: { type: "any" },
|
|
63
|
+
},
|
|
64
|
+
run: async (ctx, params) => {
|
|
65
|
+
const parseMode = params.parse ?? "auto"
|
|
66
|
+
|
|
67
|
+
if (params.cache) {
|
|
68
|
+
const key = ctx.cache.key("http.fetch", params.url, params.headers ?? {}, parseMode)
|
|
69
|
+
const hit = ctx.cache.get<unknown>(key)
|
|
70
|
+
if (hit) {
|
|
71
|
+
ctx.log("info", `cache hit ${params.url}`)
|
|
72
|
+
return hit
|
|
73
|
+
}
|
|
74
|
+
const result = await doFetch(params.url, {
|
|
75
|
+
method: "GET",
|
|
76
|
+
headers: params.headers,
|
|
77
|
+
}, parseMode, params.timeoutMs)
|
|
78
|
+
ctx.cache.set(key, result, { ttl: params.cache })
|
|
79
|
+
return result
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return doFetch(params.url, {
|
|
83
|
+
method: "GET",
|
|
84
|
+
headers: params.headers,
|
|
85
|
+
}, parseMode, params.timeoutMs)
|
|
86
|
+
},
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
interface PostParams {
|
|
90
|
+
url: string
|
|
91
|
+
headers?: Record<string, string>
|
|
92
|
+
parse?: "json" | "text" | "auto"
|
|
93
|
+
timeoutMs?: number
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
interface PostInput {
|
|
97
|
+
body: unknown
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
export const httpPost: NodeKind<PostParams, PostInput> = {
|
|
101
|
+
id: "http.post",
|
|
102
|
+
category: "io",
|
|
103
|
+
label: "HTTP POST",
|
|
104
|
+
description: "POST a JSON body to a URL and return the parsed response. Side-effecting — requires confirm in ad-hoc invokes.",
|
|
105
|
+
icon: "Send",
|
|
106
|
+
sideEffect: true,
|
|
107
|
+
params: {
|
|
108
|
+
url: { type: "string", label: "URL", required: true, placeholder: "https://api.example.com/v1/items" },
|
|
109
|
+
headers: { type: "json", label: "Headers" },
|
|
110
|
+
parse: { type: "select", label: "Parse mode", default: "auto",
|
|
111
|
+
options: [
|
|
112
|
+
{ value: "auto", label: "Auto (by Content-Type)" },
|
|
113
|
+
{ value: "json", label: "Always JSON" },
|
|
114
|
+
{ value: "text", label: "Plain text" },
|
|
115
|
+
] },
|
|
116
|
+
timeoutMs: { type: "number", label: "Timeout (ms)", default: 10000 },
|
|
117
|
+
},
|
|
118
|
+
inputs: {
|
|
119
|
+
body: { type: "any", required: true, label: "Request body (JSON-serialised)" },
|
|
120
|
+
},
|
|
121
|
+
outputs: {
|
|
122
|
+
status: { type: "number" },
|
|
123
|
+
headers: { type: "object" },
|
|
124
|
+
body: { type: "any" },
|
|
125
|
+
},
|
|
126
|
+
run: async (ctx, params, input) => {
|
|
127
|
+
ctx.log("info", `POST ${params.url}`)
|
|
128
|
+
return doFetch(params.url, {
|
|
129
|
+
method: "POST",
|
|
130
|
+
headers: { "Content-Type": "application/json", ...(params.headers ?? {}) },
|
|
131
|
+
body: JSON.stringify(input.body),
|
|
132
|
+
}, params.parse ?? "auto", params.timeoutMs)
|
|
133
|
+
},
|
|
134
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
import type { NodeKind, NodeKindManifest } from "./types.ts"
|
|
3
|
+
import { dataJson, dataMerge } from "./data.ts"
|
|
4
|
+
import { transformFilter, transformMap, transformSort, transformAggregate } from "./transform.ts"
|
|
5
|
+
import { dbInsert, dbQuery } from "./db.ts"
|
|
6
|
+
import { httpFetch, httpPost } from "./http.ts"
|
|
7
|
+
import { trigger, sink } from "./control.ts"
|
|
8
|
+
import { humanInput } from "./hitl.ts"
|
|
9
|
+
import { runsLastOutput, runsNodeOutput, pipelineRun } from "./runs.ts"
|
|
10
|
+
|
|
11
|
+
const builtins: NodeKind<any, any>[] = [
|
|
12
|
+
trigger,
|
|
13
|
+
sink,
|
|
14
|
+
dataJson,
|
|
15
|
+
dataMerge,
|
|
16
|
+
transformFilter,
|
|
17
|
+
transformMap,
|
|
18
|
+
transformSort,
|
|
19
|
+
transformAggregate,
|
|
20
|
+
dbInsert,
|
|
21
|
+
dbQuery,
|
|
22
|
+
httpFetch,
|
|
23
|
+
httpPost,
|
|
24
|
+
humanInput,
|
|
25
|
+
runsLastOutput,
|
|
26
|
+
runsNodeOutput,
|
|
27
|
+
pipelineRun,
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
export const registry: Map<string, NodeKind<any, any>> = new Map(builtins.map((k) => [k.id, k]))
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Register one or more custom domain kinds. Domain projects edit
|
|
34
|
+
* `runner/src/kinds/custom.ts` (created by `platform init`) and call
|
|
35
|
+
* `register(...)` there with their own kinds. Builtin kinds and domain kinds
|
|
36
|
+
* live in the same registry — `getKind(id)` resolves both transparently.
|
|
37
|
+
*
|
|
38
|
+
* Re-registering an existing id replaces the previous entry; useful for hot
|
|
39
|
+
* iteration but warn loudly so accidental shadows are visible.
|
|
40
|
+
*/
|
|
41
|
+
export function register(...kinds: NodeKind<any, any>[]): void {
|
|
42
|
+
for (const k of kinds) {
|
|
43
|
+
if (registry.has(k.id) && !builtins.find((b) => b.id === k.id)) {
|
|
44
|
+
console.warn(`[kinds] re-registering "${k.id}" — previous registration replaced`)
|
|
45
|
+
}
|
|
46
|
+
registry.set(k.id, k)
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export function getKind(id: string): NodeKind<any, any> | undefined {
|
|
51
|
+
return registry.get(id)
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export function manifest(): NodeKindManifest[] {
|
|
55
|
+
return [...registry.values()].map(({ run: _e, ...rest }) => rest)
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export type { NodeKind, NodeKindManifest, ParamDef, PortDef, ExecContext, PlatformCtx } from "./types.ts"
|
|
59
|
+
|
|
60
|
+
// Side-effect import: domain kinds register themselves when this module loads.
|
|
61
|
+
// Dynamic import + top-level await defers custom.ts evaluation until AFTER
|
|
62
|
+
// `registry` and `register` are initialised — a static `import "./custom.ts"`
|
|
63
|
+
// gets hoisted ahead of the const init, putting `registry` in the temporal
|
|
64
|
+
// dead zone when custom.ts calls register(). The dynamic form runs at
|
|
65
|
+
// statement position, after init, breaking the cycle.
|
|
66
|
+
await import("./custom.ts")
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
// Cross-pipeline composition kinds. Per pipeline-spec.md §12 (M2, M3, M4):
|
|
3
|
+
//
|
|
4
|
+
// pipeline.run — invoke another pipeline as a sub-run (M2)
|
|
5
|
+
// runs.lastOutput — final output of the most recent run of a pipeline (M3)
|
|
6
|
+
// runs.nodeOutput — output of a specific node from the most recent run (M4)
|
|
7
|
+
//
|
|
8
|
+
// Read kinds go through ctx.runs (RunStore capability); the sub-run kind
|
|
9
|
+
// goes through ctx.subRun (SubRun capability). Neither sees the runtime
|
|
10
|
+
// DB directly — the surface stays narrow and immutable.
|
|
11
|
+
|
|
12
|
+
import type { NodeKind } from "./types.ts"
|
|
13
|
+
|
|
14
|
+
export const runsLastOutput: NodeKind<
|
|
15
|
+
{ pipeline: string; status?: string },
|
|
16
|
+
Record<string, never>
|
|
17
|
+
> = {
|
|
18
|
+
id: "runs.lastOutput",
|
|
19
|
+
category: "io",
|
|
20
|
+
label: "Last run output",
|
|
21
|
+
description:
|
|
22
|
+
"Read the final output of the most recent run of another pipeline. " +
|
|
23
|
+
"Defaults to runs with status \"done\"; pass status to read failed/suspended runs " +
|
|
24
|
+
"(useful for monitoring or replay flows). Returns null when no matching run exists.",
|
|
25
|
+
icon: "History",
|
|
26
|
+
params: {
|
|
27
|
+
pipeline: { type: "string", label: "Pipeline", required: true, placeholder: "callpool" },
|
|
28
|
+
status: { type: "select", label: "Status", default: "done",
|
|
29
|
+
options: [
|
|
30
|
+
{ value: "done", label: "done" },
|
|
31
|
+
{ value: "failed", label: "failed" },
|
|
32
|
+
{ value: "suspended", label: "suspended" },
|
|
33
|
+
] },
|
|
34
|
+
},
|
|
35
|
+
inputs: {},
|
|
36
|
+
outputs: { value: { type: "any" } },
|
|
37
|
+
run: (ctx, params) => ctx.runs.lastOutput(params.pipeline, { status: params.status ?? "done" }),
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export const runsNodeOutput: NodeKind<
|
|
41
|
+
{ pipeline: string; node: string; status?: string },
|
|
42
|
+
Record<string, never>
|
|
43
|
+
> = {
|
|
44
|
+
id: "runs.nodeOutput",
|
|
45
|
+
category: "io",
|
|
46
|
+
label: "Last run node output",
|
|
47
|
+
description:
|
|
48
|
+
"Read the output of a specific node from the most recent run of another pipeline. " +
|
|
49
|
+
"Backed by the same node_outputs table that powers HITL resume — useful for " +
|
|
50
|
+
"consuming intermediate results without rerunning upstream work.",
|
|
51
|
+
icon: "GitBranch",
|
|
52
|
+
params: {
|
|
53
|
+
pipeline: { type: "string", label: "Pipeline", required: true, placeholder: "callpool" },
|
|
54
|
+
node: { type: "string", label: "Node id", required: true, placeholder: "buildings" },
|
|
55
|
+
status: { type: "select", label: "Status", default: "done",
|
|
56
|
+
options: [
|
|
57
|
+
{ value: "done", label: "done" },
|
|
58
|
+
{ value: "failed", label: "failed" },
|
|
59
|
+
{ value: "suspended", label: "suspended" },
|
|
60
|
+
] },
|
|
61
|
+
},
|
|
62
|
+
inputs: {},
|
|
63
|
+
outputs: { value: { type: "any" } },
|
|
64
|
+
run: (ctx, params) => ctx.runs.nodeOutput(params.pipeline, params.node, { status: params.status ?? "done" }),
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// ─── pipeline.run — sub-pipeline invocation (M2) ─────────────────────────────
|
|
68
|
+
|
|
69
|
+
export const pipelineRun: NodeKind<
|
|
70
|
+
{ pipeline: string; onFailure?: "propagate" | "capture" },
|
|
71
|
+
{ payload: Record<string, unknown> }
|
|
72
|
+
> = {
|
|
73
|
+
id: "pipeline.run",
|
|
74
|
+
category: "control",
|
|
75
|
+
label: "Sub-pipeline",
|
|
76
|
+
description:
|
|
77
|
+
"Invoke another pipeline as a sub-run. Each sub-run gets its own run-id, " +
|
|
78
|
+
"its own node_outputs, and its own lifecycle. The output flows back as the " +
|
|
79
|
+
"kind's value on success. v1 ships two onFailure modes: `propagate` (default — " +
|
|
80
|
+
"sub-run failure raises in the parent) and `capture` (failure becomes the " +
|
|
81
|
+
"node's output as { status, error, partialResults, runId } so downstream can " +
|
|
82
|
+
"branch on shape). `suspend` lands with the recovery model implementation.",
|
|
83
|
+
icon: "Workflow",
|
|
84
|
+
sideEffect: true,
|
|
85
|
+
params: {
|
|
86
|
+
pipeline: { type: "string", label: "Pipeline id", required: true, placeholder: "callpool" },
|
|
87
|
+
onFailure: { type: "select", label: "On failure", default: "propagate",
|
|
88
|
+
options: [
|
|
89
|
+
{ value: "propagate", label: "propagate (parent's onError takes over)" },
|
|
90
|
+
{ value: "capture", label: "capture (failure becomes node output)" },
|
|
91
|
+
] },
|
|
92
|
+
},
|
|
93
|
+
inputs: { payload: { type: "object", label: "Payload", required: true } },
|
|
94
|
+
outputs: { value: { type: "any" } },
|
|
95
|
+
run: async (ctx, params, input) => {
|
|
96
|
+
const onFailure = params.onFailure ?? "propagate"
|
|
97
|
+
const outcome = await ctx.subRun(params.pipeline, input.payload)
|
|
98
|
+
|
|
99
|
+
if (outcome.status === "done") return outcome.output
|
|
100
|
+
|
|
101
|
+
if (outcome.status === "suspended") {
|
|
102
|
+
// Sub-run is parked on a HITL task. Surface the suspension as
|
|
103
|
+
// structured output regardless of onFailure — the sub-run isn't
|
|
104
|
+
// failed (there's no error), it's waiting. Cross-nesting resume is
|
|
105
|
+
// §16.1 work; for v1 this signals "nothing to consume yet."
|
|
106
|
+
return {
|
|
107
|
+
__subRun: {
|
|
108
|
+
status: "suspended",
|
|
109
|
+
runId: outcome.runId,
|
|
110
|
+
suspendedAt: outcome.suspendedAt,
|
|
111
|
+
partialResults: outcome.partialResults,
|
|
112
|
+
},
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// status === "failed"
|
|
117
|
+
if (onFailure === "capture") {
|
|
118
|
+
return {
|
|
119
|
+
__subRun: {
|
|
120
|
+
status: "failed",
|
|
121
|
+
runId: outcome.runId,
|
|
122
|
+
error: outcome.error,
|
|
123
|
+
partialResults: outcome.partialResults,
|
|
124
|
+
},
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
// propagate (default): re-throw so the parent's onError abort policy applies
|
|
128
|
+
throw new Error(`sub-run "${params.pipeline}" (run ${outcome.runId}) failed: ${outcome.error}`)
|
|
129
|
+
},
|
|
130
|
+
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
import type { NodeKind } from "./types.ts"
|
|
3
|
+
|
|
4
|
+
// Per pipeline-spec v1: `{ expr: "..." }` is evaluated by the runner before
|
|
5
|
+
// the kind sees it, so a kind that needs a callable receives the function
|
|
6
|
+
// directly. The pipeline author writes:
|
|
7
|
+
//
|
|
8
|
+
// condition: { expr: "(item) => item.score >= 0.5" }
|
|
9
|
+
// expression: { expr: "(item) => ({ ...item, doubled: item.x * 2 })" }
|
|
10
|
+
//
|
|
11
|
+
// The discriminator resolves these to actual function values; the kind just
|
|
12
|
+
// invokes them per array item.
|
|
13
|
+
|
|
14
|
+
type ItemFn<R> = (item: unknown, input: unknown) => R
|
|
15
|
+
|
|
16
|
+
function asFn<R>(v: unknown, kindId: string, paramName: string): ItemFn<R> {
|
|
17
|
+
if (typeof v !== "function") {
|
|
18
|
+
throw new Error(
|
|
19
|
+
`${kindId}: params.${paramName} must be a function ` +
|
|
20
|
+
`(authored as { expr: "(item) => ..." }); got ${v === null ? "null" : typeof v}`,
|
|
21
|
+
)
|
|
22
|
+
}
|
|
23
|
+
return v as ItemFn<R>
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export const transformFilter: NodeKind<{ condition: unknown }, { items: unknown[] }> = {
|
|
27
|
+
id: "transform.filter",
|
|
28
|
+
category: "transform",
|
|
29
|
+
label: "Filter",
|
|
30
|
+
description: "Keep array items matching a boolean predicate.",
|
|
31
|
+
icon: "Filter",
|
|
32
|
+
params: {
|
|
33
|
+
condition: { type: "expression", label: "Condition", required: true, placeholder: "(item) => item.score >= 0.5" },
|
|
34
|
+
},
|
|
35
|
+
inputs: { items: { type: "array", required: true } },
|
|
36
|
+
outputs: { items: { type: "array" } },
|
|
37
|
+
run: (_ctx, params, input) => {
|
|
38
|
+
const items = Array.isArray(input.items) ? input.items : []
|
|
39
|
+
const fn = asFn<boolean>(params.condition, "transform.filter", "condition")
|
|
40
|
+
return items.filter((item) => !!fn(item, input))
|
|
41
|
+
},
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export const transformMap: NodeKind<{ expression: unknown }, { items: unknown[] }> = {
|
|
45
|
+
id: "transform.map",
|
|
46
|
+
category: "transform",
|
|
47
|
+
label: "Map",
|
|
48
|
+
description: "Apply a function to each array item, producing a new item.",
|
|
49
|
+
icon: "MoveRight",
|
|
50
|
+
params: {
|
|
51
|
+
expression: { type: "expression", label: "Expression", required: true, placeholder: "(item) => ({ ...item, score: item.signals / 20 })" },
|
|
52
|
+
},
|
|
53
|
+
inputs: { items: { type: "array", required: true } },
|
|
54
|
+
outputs: { items: { type: "array" } },
|
|
55
|
+
run: (_ctx, params, input) => {
|
|
56
|
+
const items = Array.isArray(input.items) ? input.items : []
|
|
57
|
+
const fn = asFn<unknown>(params.expression, "transform.map", "expression")
|
|
58
|
+
return items.map((item) => fn(item, input))
|
|
59
|
+
},
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export const transformSort: NodeKind<{ by: string; direction?: "asc" | "desc" }, { items: unknown[] }> = {
|
|
63
|
+
id: "transform.sort",
|
|
64
|
+
category: "transform",
|
|
65
|
+
label: "Sort",
|
|
66
|
+
description: "Sort an array by a field, ascending or descending.",
|
|
67
|
+
icon: "ArrowDownAZ",
|
|
68
|
+
params: {
|
|
69
|
+
by: { type: "string", label: "Sort by", required: true, placeholder: "score" },
|
|
70
|
+
direction: { type: "select", label: "Direction", default: "desc",
|
|
71
|
+
options: [{ value: "asc", label: "Ascending" }, { value: "desc", label: "Descending" }] },
|
|
72
|
+
},
|
|
73
|
+
inputs: { items: { type: "array", required: true } },
|
|
74
|
+
outputs: { items: { type: "array" } },
|
|
75
|
+
run: (_ctx, params, input) => {
|
|
76
|
+
const items = Array.isArray(input.items) ? [...input.items] : []
|
|
77
|
+
const dir = params.direction === "asc" ? 1 : -1
|
|
78
|
+
items.sort((a, b) => {
|
|
79
|
+
const av = (a as Record<string, unknown>)[params.by]
|
|
80
|
+
const bv = (b as Record<string, unknown>)[params.by]
|
|
81
|
+
if ((av as number) < (bv as number)) return -1 * dir
|
|
82
|
+
if ((av as number) > (bv as number)) return 1 * dir
|
|
83
|
+
return 0
|
|
84
|
+
})
|
|
85
|
+
return items
|
|
86
|
+
},
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
export const transformAggregate: NodeKind<
|
|
90
|
+
{ op: "count" | "sum" | "avg" | "min" | "max"; key?: string },
|
|
91
|
+
{ items: unknown[] }
|
|
92
|
+
> = {
|
|
93
|
+
id: "transform.aggregate",
|
|
94
|
+
category: "transform",
|
|
95
|
+
label: "Aggregate",
|
|
96
|
+
description: "Reduce an array to a single value (count, sum, avg, min, max).",
|
|
97
|
+
icon: "Sigma",
|
|
98
|
+
params: {
|
|
99
|
+
op: { type: "select", label: "Operation", required: true, default: "count",
|
|
100
|
+
options: [
|
|
101
|
+
{ value: "count", label: "Count" },
|
|
102
|
+
{ value: "sum", label: "Sum" },
|
|
103
|
+
{ value: "avg", label: "Average" },
|
|
104
|
+
{ value: "min", label: "Min" },
|
|
105
|
+
{ value: "max", label: "Max" },
|
|
106
|
+
] },
|
|
107
|
+
key: { type: "string", label: "Numeric key", description: "Field to aggregate (required for sum/avg/min/max)", placeholder: "score" },
|
|
108
|
+
},
|
|
109
|
+
inputs: { items: { type: "array", required: true } },
|
|
110
|
+
outputs: { value: { type: "number" } },
|
|
111
|
+
run: (_ctx, params, input) => {
|
|
112
|
+
const items = Array.isArray(input.items) ? input.items : []
|
|
113
|
+
if (params.op === "count") return items.length
|
|
114
|
+
if (!params.key) throw new Error(`aggregate.${params.op} requires "key"`)
|
|
115
|
+
const nums = items.map((it) => Number((it as Record<string, unknown>)[params.key!])).filter(Number.isFinite)
|
|
116
|
+
if (nums.length === 0) return 0
|
|
117
|
+
if (params.op === "sum") return nums.reduce((a, b) => a + b, 0)
|
|
118
|
+
if (params.op === "avg") return nums.reduce((a, b) => a + b, 0) / nums.length
|
|
119
|
+
if (params.op === "min") return Math.min(...nums)
|
|
120
|
+
if (params.op === "max") return Math.max(...nums)
|
|
121
|
+
return 0
|
|
122
|
+
},
|
|
123
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
// Backwards-compat re-export shim. Kind contract types now live in
|
|
3
|
+
// `@toist/spec`. Domain projects editing `kinds/custom.ts` can keep
|
|
4
|
+
// importing from `./types.ts`; new runner-internal code should import
|
|
5
|
+
// from `@toist/spec` directly.
|
|
6
|
+
|
|
7
|
+
export type {
|
|
8
|
+
ParamDef,
|
|
9
|
+
PortDef,
|
|
10
|
+
Cache,
|
|
11
|
+
HitlSpec,
|
|
12
|
+
PlatformCtx,
|
|
13
|
+
ExecContext,
|
|
14
|
+
NodeKind,
|
|
15
|
+
NodeKindManifest,
|
|
16
|
+
} from "@toist/spec"
|
package/src/lock.ts
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
// Single-leader runner lock. Prevents two runner processes from racing on
|
|
3
|
+
// migrations, scheduled cron-wheel firings, or HITL resume dispatch.
|
|
4
|
+
//
|
|
5
|
+
// The lock guards the data/ directory using a sentinel lockfile at
|
|
6
|
+
// data/.lock. proper-lockfile creates a holder directory that includes mtime;
|
|
7
|
+
// stale locks (from a crashed previous process) are detected after `stale` ms
|
|
8
|
+
// and overtaken automatically.
|
|
9
|
+
//
|
|
10
|
+
// Watch-mode reload tolerance: bun --watch SIGTERMs the old process before
|
|
11
|
+
// spawning the new one. The cleanup handlers below release the lock on
|
|
12
|
+
// SIGTERM/SIGINT/exit. Async lock acquisition with retries handles the brief
|
|
13
|
+
// window where the old process has not yet released.
|
|
14
|
+
|
|
15
|
+
import lockfile from "proper-lockfile"
|
|
16
|
+
import { mkdirSync } from "node:fs"
|
|
17
|
+
import { dataDir, lockfilePath } from "./config.ts"
|
|
18
|
+
|
|
19
|
+
let release: (() => Promise<void>) | null = null
|
|
20
|
+
|
|
21
|
+
export async function acquireRunnerLock(): Promise<void> {
|
|
22
|
+
const dir = dataDir()
|
|
23
|
+
const lockPath = lockfilePath()
|
|
24
|
+
mkdirSync(dir, { recursive: true })
|
|
25
|
+
|
|
26
|
+
try {
|
|
27
|
+
release = await lockfile.lock(dir, {
|
|
28
|
+
lockfilePath: lockPath,
|
|
29
|
+
realpath: false,
|
|
30
|
+
stale: 10_000,
|
|
31
|
+
retries: { retries: 5, factor: 2, minTimeout: 100, maxTimeout: 1000 },
|
|
32
|
+
})
|
|
33
|
+
} catch (err) {
|
|
34
|
+
const msg = err instanceof Error ? err.message : String(err)
|
|
35
|
+
console.error(`[runner] failed to acquire ${lockPath}: ${msg}`)
|
|
36
|
+
console.error(`[runner] another runner appears live. Stop it first; only one runner per project is allowed.`)
|
|
37
|
+
process.exit(1)
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const cleanup = (): void => {
|
|
41
|
+
if (!release) return
|
|
42
|
+
const r = release
|
|
43
|
+
release = null
|
|
44
|
+
r().catch(() => { /* best-effort */ })
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
process.on("exit", cleanup)
|
|
48
|
+
process.on("SIGTERM", () => { cleanup(); process.exit(0) })
|
|
49
|
+
process.on("SIGINT", () => { cleanup(); process.exit(0) })
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/** Release the runner lock explicitly. Used by `startRunner`'s graceful
|
|
53
|
+
* `stop()` path. Idempotent: no-op if the lock is not held. The signal
|
|
54
|
+
* cleanup handlers installed by acquireRunnerLock remain in place. */
|
|
55
|
+
export async function releaseRunnerLock(): Promise<void> {
|
|
56
|
+
if (!release) return
|
|
57
|
+
const r = release
|
|
58
|
+
release = null
|
|
59
|
+
try {
|
|
60
|
+
await r()
|
|
61
|
+
} catch {
|
|
62
|
+
// best-effort — surfacing this would mask the actual stop reason
|
|
63
|
+
}
|
|
64
|
+
}
|