@aigne/ash 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/DESIGN.md +41 -0
  2. package/dist/ai-dev-loop/ash-run-result.cjs +12 -0
  3. package/dist/ai-dev-loop/ash-run-result.d.cts +28 -0
  4. package/dist/ai-dev-loop/ash-run-result.d.cts.map +1 -0
  5. package/dist/ai-dev-loop/ash-run-result.d.mts +28 -0
  6. package/dist/ai-dev-loop/ash-run-result.d.mts.map +1 -0
  7. package/dist/ai-dev-loop/ash-run-result.mjs +11 -0
  8. package/dist/ai-dev-loop/ash-run-result.mjs.map +1 -0
  9. package/dist/ai-dev-loop/ash-typed-error.cjs +51 -0
  10. package/dist/ai-dev-loop/ash-typed-error.d.cts +54 -0
  11. package/dist/ai-dev-loop/ash-typed-error.d.cts.map +1 -0
  12. package/dist/ai-dev-loop/ash-typed-error.d.mts +54 -0
  13. package/dist/ai-dev-loop/ash-typed-error.d.mts.map +1 -0
  14. package/dist/ai-dev-loop/ash-typed-error.mjs +50 -0
  15. package/dist/ai-dev-loop/ash-typed-error.mjs.map +1 -0
  16. package/dist/ai-dev-loop/ash-validate.cjs +27 -0
  17. package/dist/ai-dev-loop/ash-validate.d.cts +7 -0
  18. package/dist/ai-dev-loop/ash-validate.d.cts.map +1 -0
  19. package/dist/ai-dev-loop/ash-validate.d.mts +7 -0
  20. package/dist/ai-dev-loop/ash-validate.d.mts.map +1 -0
  21. package/dist/ai-dev-loop/ash-validate.mjs +28 -0
  22. package/dist/ai-dev-loop/ash-validate.mjs.map +1 -0
  23. package/dist/ai-dev-loop/dev-loop.cjs +134 -0
  24. package/dist/ai-dev-loop/dev-loop.d.cts +28 -0
  25. package/dist/ai-dev-loop/dev-loop.d.cts.map +1 -0
  26. package/dist/ai-dev-loop/dev-loop.d.mts +28 -0
  27. package/dist/ai-dev-loop/dev-loop.d.mts.map +1 -0
  28. package/dist/ai-dev-loop/dev-loop.mjs +135 -0
  29. package/dist/ai-dev-loop/dev-loop.mjs.map +1 -0
  30. package/dist/ai-dev-loop/index.cjs +24 -0
  31. package/dist/ai-dev-loop/index.d.cts +9 -0
  32. package/dist/ai-dev-loop/index.d.mts +9 -0
  33. package/dist/ai-dev-loop/index.mjs +10 -0
  34. package/dist/ai-dev-loop/live-mode.cjs +17 -0
  35. package/dist/ai-dev-loop/live-mode.d.cts +24 -0
  36. package/dist/ai-dev-loop/live-mode.d.cts.map +1 -0
  37. package/dist/ai-dev-loop/live-mode.d.mts +24 -0
  38. package/dist/ai-dev-loop/live-mode.d.mts.map +1 -0
  39. package/dist/ai-dev-loop/live-mode.mjs +17 -0
  40. package/dist/ai-dev-loop/live-mode.mjs.map +1 -0
  41. package/dist/ai-dev-loop/meta-tools.cjs +123 -0
  42. package/dist/ai-dev-loop/meta-tools.d.cts +24 -0
  43. package/dist/ai-dev-loop/meta-tools.d.cts.map +1 -0
  44. package/dist/ai-dev-loop/meta-tools.d.mts +24 -0
  45. package/dist/ai-dev-loop/meta-tools.d.mts.map +1 -0
  46. package/dist/ai-dev-loop/meta-tools.mjs +120 -0
  47. package/dist/ai-dev-loop/meta-tools.mjs.map +1 -0
  48. package/dist/ai-dev-loop/structured-runner.cjs +154 -0
  49. package/dist/ai-dev-loop/structured-runner.d.cts +12 -0
  50. package/dist/ai-dev-loop/structured-runner.d.cts.map +1 -0
  51. package/dist/ai-dev-loop/structured-runner.d.mts +12 -0
  52. package/dist/ai-dev-loop/structured-runner.d.mts.map +1 -0
  53. package/dist/ai-dev-loop/structured-runner.mjs +155 -0
  54. package/dist/ai-dev-loop/structured-runner.mjs.map +1 -0
  55. package/dist/ai-dev-loop/system-prompt.cjs +55 -0
  56. package/dist/ai-dev-loop/system-prompt.d.cts +20 -0
  57. package/dist/ai-dev-loop/system-prompt.d.cts.map +1 -0
  58. package/dist/ai-dev-loop/system-prompt.d.mts +20 -0
  59. package/dist/ai-dev-loop/system-prompt.d.mts.map +1 -0
  60. package/dist/ai-dev-loop/system-prompt.mjs +54 -0
  61. package/dist/ai-dev-loop/system-prompt.mjs.map +1 -0
  62. package/dist/ast.d.cts +140 -0
  63. package/dist/ast.d.cts.map +1 -0
  64. package/dist/ast.d.mts +140 -0
  65. package/dist/ast.d.mts.map +1 -0
  66. package/dist/compiler.cjs +802 -0
  67. package/dist/compiler.d.cts +103 -0
  68. package/dist/compiler.d.cts.map +1 -0
  69. package/dist/compiler.d.mts +103 -0
  70. package/dist/compiler.d.mts.map +1 -0
  71. package/dist/compiler.mjs +802 -0
  72. package/dist/compiler.mjs.map +1 -0
  73. package/dist/index.cjs +14 -0
  74. package/dist/index.d.cts +7 -0
  75. package/dist/index.d.mts +7 -0
  76. package/dist/index.mjs +7 -0
  77. package/dist/lexer.cjs +451 -0
  78. package/dist/lexer.d.cts +14 -0
  79. package/dist/lexer.d.cts.map +1 -0
  80. package/dist/lexer.d.mts +14 -0
  81. package/dist/lexer.d.mts.map +1 -0
  82. package/dist/lexer.mjs +451 -0
  83. package/dist/lexer.mjs.map +1 -0
  84. package/dist/parser.cjs +734 -0
  85. package/dist/parser.d.cts +40 -0
  86. package/dist/parser.d.cts.map +1 -0
  87. package/dist/parser.d.mts +40 -0
  88. package/dist/parser.d.mts.map +1 -0
  89. package/dist/parser.mjs +734 -0
  90. package/dist/parser.mjs.map +1 -0
  91. package/dist/reference.cjs +130 -0
  92. package/dist/reference.d.cts +11 -0
  93. package/dist/reference.d.cts.map +1 -0
  94. package/dist/reference.d.mts +11 -0
  95. package/dist/reference.d.mts.map +1 -0
  96. package/dist/reference.mjs +130 -0
  97. package/dist/reference.mjs.map +1 -0
  98. package/dist/template.cjs +85 -0
  99. package/dist/template.mjs +84 -0
  100. package/dist/template.mjs.map +1 -0
  101. package/dist/type-checker.cjs +582 -0
  102. package/dist/type-checker.d.cts +31 -0
  103. package/dist/type-checker.d.cts.map +1 -0
  104. package/dist/type-checker.d.mts +31 -0
  105. package/dist/type-checker.d.mts.map +1 -0
  106. package/dist/type-checker.mjs +573 -0
  107. package/dist/type-checker.mjs.map +1 -0
  108. package/package.json +29 -0
  109. package/src/ai-dev-loop/ash-run-result.test.ts +113 -0
  110. package/src/ai-dev-loop/ash-run-result.ts +46 -0
  111. package/src/ai-dev-loop/ash-typed-error.test.ts +136 -0
  112. package/src/ai-dev-loop/ash-typed-error.ts +50 -0
  113. package/src/ai-dev-loop/ash-validate.test.ts +54 -0
  114. package/src/ai-dev-loop/ash-validate.ts +34 -0
  115. package/src/ai-dev-loop/dev-loop.test.ts +364 -0
  116. package/src/ai-dev-loop/dev-loop.ts +156 -0
  117. package/src/ai-dev-loop/dry-run.test.ts +107 -0
  118. package/src/ai-dev-loop/e2e-multi-fix.test.ts +473 -0
  119. package/src/ai-dev-loop/e2e.test.ts +324 -0
  120. package/src/ai-dev-loop/index.ts +15 -0
  121. package/src/ai-dev-loop/invariants.test.ts +253 -0
  122. package/src/ai-dev-loop/live-mode.test.ts +63 -0
  123. package/src/ai-dev-loop/live-mode.ts +33 -0
  124. package/src/ai-dev-loop/meta-tools.test.ts +120 -0
  125. package/src/ai-dev-loop/meta-tools.ts +142 -0
  126. package/src/ai-dev-loop/structured-runner.test.ts +159 -0
  127. package/src/ai-dev-loop/structured-runner.ts +209 -0
  128. package/src/ai-dev-loop/system-prompt.test.ts +102 -0
  129. package/src/ai-dev-loop/system-prompt.ts +81 -0
  130. package/src/ast.ts +186 -0
  131. package/src/compiler.test.ts +2933 -0
  132. package/src/compiler.ts +1103 -0
  133. package/src/e2e.test.ts +552 -0
  134. package/src/index.ts +16 -0
  135. package/src/lexer.test.ts +538 -0
  136. package/src/lexer.ts +222 -0
  137. package/src/parser.test.ts +1024 -0
  138. package/src/parser.ts +835 -0
  139. package/src/reference.test.ts +166 -0
  140. package/src/reference.ts +125 -0
  141. package/src/template.test.ts +210 -0
  142. package/src/template.ts +139 -0
  143. package/src/type-checker.test.ts +1494 -0
  144. package/src/type-checker.ts +785 -0
  145. package/tsconfig.json +9 -0
  146. package/tsdown.config.ts +12 -0
@@ -0,0 +1,1103 @@
1
+ import type { Program, JobDeclaration, PipelineStage, WhereClause, FanoutExpression, OutputExpression, TopLevelStatement, QueryCondition, LetStatement, GroupByExpression, ActionExpression, Expression, MapExpression, RouteExpression, LookupExpression, ParamDeclaration, TriggerDeclaration } from "./ast.js";
2
+ import { AshLexer } from "./lexer.js";
3
+ import { AshParser } from "./parser.js";
4
+ import { checkPipelineTypes, checkProhibitedPatterns, checkAnnotations, checkJobCaps, typeErrorsToDiagnostics, compileErrorsToDiagnostics, annotationErrorsToDiagnostics, parseSyntaxError, parseCaps, hasCapFor } from "./type-checker.js";
5
+ import type { AshDiagnostic, CapEntry } from "./type-checker.js";
6
+ import { resolveActionParams, resolveTemplatePath } from "./template.js";
7
+
8
+ export interface CompileResult {
9
+ program?: CompiledProgram;
10
+ diagnostics: AshDiagnostic[];
11
+ sourceHash?: string;
12
+ }
13
+
14
+ function fnv1aHash(str: string): string {
15
+ let hash = 2166136261;
16
+ for (let i = 0; i < str.length; i++) {
17
+ hash ^= str.charCodeAt(i);
18
+ hash = Math.imul(hash, 16777619) >>> 0;
19
+ }
20
+ return hash.toString(16).padStart(8, "0");
21
+ }
22
+
23
+ function collectActions(stages: PipelineStage[]): ActionExpression[] {
24
+ const actions: ActionExpression[] = [];
25
+ for (const stage of stages) {
26
+ if (stage.kind === "action") actions.push(stage);
27
+ else if (stage.kind === "fanout") {
28
+ for (const branch of stage.branches) actions.push(...collectActions(branch));
29
+ }
30
+ }
31
+ return actions;
32
+ }
33
+
34
+ export function compileSource(source: string): CompileResult {
35
+ const diagnostics: AshDiagnostic[] = [];
36
+ const sourceHash = fnv1aHash(source);
37
+ const lexer = new AshLexer();
38
+ const parser = new AshParser();
39
+ const compiler = new AshCompiler();
40
+
41
+ // Lex
42
+ let tokens;
43
+ try {
44
+ tokens = lexer.tokenize(source);
45
+ } catch (e: any) {
46
+ return { sourceHash, diagnostics: [parseSyntaxError(e.message ?? String(e))] };
47
+ }
48
+
49
+ // Parse
50
+ let ast;
51
+ try {
52
+ ast = parser.parse(tokens);
53
+ } catch (e: any) {
54
+ return { sourceHash, diagnostics: [parseSyntaxError(e.message ?? String(e))] };
55
+ }
56
+
57
+ // Type check
58
+ const prohibErrors = checkProhibitedPatterns(ast);
59
+ diagnostics.push(...compileErrorsToDiagnostics(prohibErrors));
60
+
61
+ for (const job of ast.jobs) {
62
+ const typeErrors = checkPipelineTypes(job.pipeline);
63
+ diagnostics.push(...typeErrorsToDiagnostics(typeErrors));
64
+
65
+ const annErrors = checkAnnotations(job);
66
+ diagnostics.push(...annotationErrorsToDiagnostics(annErrors));
67
+
68
+ // @caps: static path security check
69
+ diagnostics.push(...checkJobCaps(job));
70
+
71
+ // @readonly check: pipeline must not contain save, publish, or tee
72
+ const isReadonly = job.annotations.some(a => a.name === "readonly");
73
+ if (isReadonly) {
74
+ for (const stage of job.pipeline) {
75
+ if (stage.kind === "save" || stage.kind === "publish" || stage.kind === "tee" || stage.kind === "action") {
76
+ diagnostics.push({
77
+ code: "ASH_READONLY_VIOLATION",
78
+ message: `@readonly job '${job.name}' contains '${stage.kind}' which writes data`,
79
+ });
80
+ }
81
+ }
82
+ // Check route targets: @readonly must propagate to route-target jobs
83
+ for (const stage of job.pipeline) {
84
+ if (stage.kind === "route") {
85
+ const targetNames = [...stage.branches.map(b => b.targetJob), ...(stage.fallback ? [stage.fallback] : [])];
86
+ for (const targetName of targetNames) {
87
+ const targetJob = ast.jobs.find(j => j.name === targetName);
88
+ if (!targetJob) continue;
89
+ for (const tStage of targetJob.pipeline) {
90
+ if (tStage.kind === "save" || tStage.kind === "publish" || tStage.kind === "tee" || tStage.kind === "action") {
91
+ diagnostics.push({
92
+ code: "ASH_READONLY_VIOLATION",
93
+ message: `@readonly job '${job.name}' routes to '${targetName}' which contains '${tStage.kind}' (write operation)`,
94
+ });
95
+ break; // one error per target is enough
96
+ }
97
+ }
98
+ }
99
+ }
100
+ }
101
+ }
102
+ }
103
+
104
+ // Let pipeline restrictions: writes in let bindings are never legitimate
105
+ const WRITE_STAGES = new Set(["save", "publish", "tee", "action"]);
106
+ const letStatements = ast.statements.filter((s): s is LetStatement => s.kind === "let" && !!s.pipeline);
107
+ for (const letStmt of letStatements) {
108
+ if (letStmt.pipeline) {
109
+ for (const stage of letStmt.pipeline) {
110
+ if (WRITE_STAGES.has(stage.kind)) {
111
+ diagnostics.push({
112
+ code: "ASH_LET_WRITE",
113
+ message: `let '${letStmt.name}' pipeline contains '${stage.kind}' — write operations in let bindings are not permitted`,
114
+ });
115
+ }
116
+ }
117
+ }
118
+ }
119
+
120
+ // Let pipeline pre-approval bypass: let pipelines with find/action execute before @approval checks
121
+ const hasApproval = ast.jobs.some(j => j.annotations.some(a => a.name === "approval"));
122
+ if (hasApproval && letStatements.length > 0) {
123
+ const sideEffectLets = letStatements.filter(s =>
124
+ s.pipeline?.some(stage => stage.kind === "find" || stage.kind === "action"),
125
+ );
126
+ if (sideEffectLets.length > 0) {
127
+ diagnostics.push({
128
+ code: "ASH_LET_PRE_APPROVAL",
129
+ severity: "warning",
130
+ message: `let bindings (${sideEffectLets.map(s => s.name).join(", ")}) execute before @approval checks — side effects in let pipelines bypass approval`,
131
+ });
132
+ }
133
+ }
134
+
135
+ // Param write-gate: $param in where clause before write operation = logic bomb risk
136
+ const paramNames = new Set(ast.statements.filter((s): s is ParamDeclaration => s.kind === "param").map(s => s.name));
137
+ if (paramNames.size > 0) {
138
+ for (const job of ast.jobs) {
139
+ const hasWrite = job.pipeline.some(s => WRITE_STAGES.has(s.kind));
140
+ if (!hasWrite) continue;
141
+ for (const stage of job.pipeline) {
142
+ if (stage.kind === "where" && typeof stage.right === "string" && stage.right.startsWith("$")) {
143
+ const varName = stage.right.slice(1);
144
+ if (paramNames.has(varName)) {
145
+ diagnostics.push({
146
+ code: "ASH_PARAM_WRITE_GATE",
147
+ severity: "warning",
148
+ message: `Job '${job.name}': param '$${varName}' gates write operations — callers can override param to change behavior`,
149
+ });
150
+ break; // one warning per job
151
+ }
152
+ }
153
+ }
154
+ }
155
+ }
156
+
157
+ // Mixed annotation deception check: @readonly jobs alongside unrestricted write jobs
158
+ const readonlyJobs = ast.jobs.filter(j => j.annotations.some(a => a.name === "readonly"));
159
+ if (readonlyJobs.length > 0 && readonlyJobs.length < ast.jobs.length) {
160
+ const nonReadonly = ast.jobs.filter(j => !j.annotations.some(a => a.name === "readonly"));
161
+ const writingJobs = nonReadonly.filter(j =>
162
+ j.pipeline.some(s => s.kind === "save" || s.kind === "publish" || s.kind === "tee" || s.kind === "action"),
163
+ );
164
+ if (writingJobs.length > 0) {
165
+ diagnostics.push({
166
+ code: "ASH_MIXED_SECURITY",
167
+ severity: "warning",
168
+ message: `Program has mixed security posture: ${readonlyJobs.length} @readonly job(s) alongside ${writingJobs.length} unrestricted writing job(s) (${writingJobs.map(j => j.name).join(", ")}) — this can mask malicious intent`,
169
+ });
170
+ }
171
+ }
172
+
173
+ // Action hardening: uncapped, amplification (caps+budget gated), cross-provider (caps+budget gated)
174
+ for (const job of ast.jobs) {
175
+ const actions = collectActions(job.pipeline);
176
+ if (actions.length === 0) continue;
177
+
178
+ const hasCaps = job.annotations.some(a => a.name === "caps");
179
+ const hasBudget = job.annotations.some(a => a.name === "budget");
180
+
181
+ // No @caps + action → error (actions require explicit capability declaration)
182
+ if (!hasCaps) {
183
+ diagnostics.push({
184
+ code: "ASH_UNCAPPED_ACTION",
185
+ message: `Job '${job.name}' contains action stages but has no @caps — actions require explicit capability declaration`,
186
+ });
187
+ }
188
+
189
+ // Multiple actions → caps+budget gated
190
+ if (actions.length > 1) {
191
+ if (hasCaps && hasBudget) {
192
+ diagnostics.push({
193
+ code: "ASH_ACTION_AMPLIFICATION",
194
+ severity: "warning",
195
+ message: `Job '${job.name}' contains ${actions.length} action stages — budget-gated multi-action execution permitted`,
196
+ });
197
+ } else {
198
+ diagnostics.push({
199
+ code: "ASH_ACTION_AMPLIFICATION",
200
+ message: `Job '${job.name}' contains ${actions.length} action stages — multi-action jobs require @caps and @budget to prevent amplification attacks`,
201
+ });
202
+ }
203
+ }
204
+
205
+ // Actions targeting multiple providers → caps+budget gated
206
+ const providers = new Set(actions.filter(a => !a.relative).map(a => a.path.split("/")[1]).filter(Boolean));
207
+ if (providers.size > 1) {
208
+ if (hasCaps && hasBudget) {
209
+ diagnostics.push({
210
+ code: "ASH_CROSS_PROVIDER_ACTION",
211
+ severity: "warning",
212
+ message: `Job '${job.name}' has actions targeting ${providers.size} providers (${[...providers].join(", ")}) — budget-gated cross-provider execution permitted`,
213
+ });
214
+ } else {
215
+ diagnostics.push({
216
+ code: "ASH_CROSS_PROVIDER_ACTION",
217
+ message: `Job '${job.name}' has actions targeting ${providers.size} providers (${[...providers].join(", ")}) — cross-provider actions require @caps and @budget`,
218
+ });
219
+ }
220
+ }
221
+
222
+ // Relative action checks
223
+ const relativeActions = actions.filter(a => a.relative);
224
+ if (relativeActions.length > 0) {
225
+ // Relative action without upstream find → error
226
+ const hasFindUpstream = job.pipeline.some((s, idx) =>
227
+ s.kind === "find" && job.pipeline.findIndex(p => p === relativeActions[0]) > idx
228
+ );
229
+ if (!hasFindUpstream) {
230
+ diagnostics.push({
231
+ code: "ASH_RELATIVE_ACTION_NO_FIND",
232
+ message: `Job '${job.name}': relative action '${relativeActions[0].path}' has no upstream 'find' — relative actions require a find stage to provide records with paths`,
233
+ });
234
+ }
235
+
236
+ // Relative action = inherent amplification (1 stage, N execs)
237
+ if (hasCaps && hasBudget) {
238
+ diagnostics.push({
239
+ code: "ASH_ACTION_AMPLIFICATION",
240
+ severity: "warning",
241
+ message: `Job '${job.name}': relative action '${relativeActions[0].path}' executes per-record — budget-gated amplification permitted`,
242
+ });
243
+ } else if (!hasCaps || !hasBudget) {
244
+ diagnostics.push({
245
+ code: "ASH_ACTION_AMPLIFICATION",
246
+ message: `Job '${job.name}': relative action '${relativeActions[0].path}' executes per-record — requires @caps and @budget to prevent amplification`,
247
+ });
248
+ }
249
+ }
250
+ }
251
+
252
+ // Write operation hardening: publish and tee require @caps
253
+ for (const job of ast.jobs) {
254
+ const hasCaps = job.annotations.some(a => a.name === "caps");
255
+ const hasFind = job.pipeline.some(s => s.kind === "find" || s.kind === "lookup");
256
+ const writeStages = job.pipeline.filter(s => s.kind === "publish" || s.kind === "tee");
257
+
258
+ if (writeStages.length > 0 && hasFind && !hasCaps) {
259
+ for (const ws of writeStages) {
260
+ diagnostics.push({
261
+ code: "ASH_UNCAPPED_WRITE",
262
+ message: `Job '${job.name}': '${ws.kind}' combined with data reads requires @caps — write operations on external data must declare capabilities`,
263
+ });
264
+ }
265
+ }
266
+ }
267
+
268
+ // Budget ceiling: reject unreasonably high budget values
269
+ const BUDGET_CEILINGS: Record<string, number> = { actions: 100, writes: 100, records: 10000 };
270
+ for (const job of ast.jobs) {
271
+ const budgetAnn = job.annotations.find(a => a.name === "budget");
272
+ if (!budgetAnn) continue;
273
+ for (let i = 0; i < budgetAnn.args.length; i += 2) {
274
+ const dim = budgetAnn.args[i];
275
+ const val = Number(budgetAnn.args[i + 1]);
276
+ if (dim in BUDGET_CEILINGS && val > BUDGET_CEILINGS[dim]) {
277
+ diagnostics.push({
278
+ code: "ASH_BUDGET_EXCESSIVE",
279
+ message: `Job '${job.name}': @budget(${dim} ${val}) exceeds ceiling of ${BUDGET_CEILINGS[dim]} — reduce budget or justify with explicit approval`,
280
+ });
281
+ }
282
+ }
283
+ }
284
+
285
+ // Compile
286
+ const program = compiler.compile(ast);
287
+ return { program, diagnostics, sourceHash };
288
+ }
289
+
290
+ export interface OutputEvent {
291
+ kind: "text";
292
+ content: string;
293
+ context?: LogContext;
294
+ }
295
+
296
+ export interface OutputHandler {
297
+ output(event: OutputEvent): void;
298
+ }
299
+
300
+ export interface JobContext {
301
+ world: WorldInterface;
302
+ caps: Set<string>;
303
+ logger: JobLogger;
304
+ output?: OutputHandler;
305
+ }
306
+
307
+ export interface WorldInterface {
308
+ read(path: string, query?: QueryCondition): unknown[] | Promise<unknown[]>;
309
+ write(path: string, data: unknown[]): void;
310
+ publish(topic: string, data: unknown[]): void;
311
+ exec?(path: string, input: unknown[], params?: Record<string, unknown>): Promise<unknown[]>;
312
+ input?(prompt: string): string | Promise<string>;
313
+ }
314
+
315
+ export interface LogContext {
316
+ programId?: string;
317
+ procId?: string;
318
+ agentId?: string;
319
+ sessionId?: string;
320
+ jobName?: string;
321
+ stageIndex?: number;
322
+ }
323
+
324
+ export interface JobLogger {
325
+ log(stage: string, action: string, detail?: unknown): void;
326
+ emit?(level: "debug" | "info" | "warn" | "error", message: string, context?: LogContext): void;
327
+ }
328
+
329
+ export interface JobResult {
330
+ status: "ok" | "error" | "partial";
331
+ recordCount: number;
332
+ errors: string[];
333
+ }
334
+
335
+ export interface StageMetrics {
336
+ name: string;
337
+ index: number;
338
+ inputCount: number;
339
+ outputCount: number;
340
+ durationMs: number;
341
+ error?: string;
342
+ }
343
+
344
+ export interface JobReport extends JobResult {
345
+ jobName: string;
346
+ totalDurationMs: number;
347
+ stages: StageMetrics[];
348
+ }
349
+
350
+ export interface ProgramReport {
351
+ jobs: JobReport[];
352
+ totalDurationMs: number;
353
+ }
354
+
355
+ export interface CompiledJob {
356
+ kind: "job";
357
+ name: string;
358
+ trigger?: TriggerDeclaration;
359
+ execute(ctx: JobContext, initialStream?: unknown[]): Promise<JobResult>;
360
+ }
361
+
362
+ export interface CompiledOutput {
363
+ kind: "output";
364
+ message: string;
365
+ execute(ctx: JobContext): Promise<void>;
366
+ }
367
+
368
+ export type CompiledUnit = CompiledJob | CompiledOutput;
369
+
370
+ export interface CompiledProgram {
371
+ jobs: CompiledJob[];
372
+ units: CompiledUnit[];
373
+ jobMap: Map<string, CompiledJob>;
374
+ params: Map<string, string | number>; // param defaults (can be overridden before execution)
375
+ routeTargets?: Set<string>; // jobs that are ONLY route targets (should not auto-execute as top-level)
376
+ }
377
+
378
+ function safeOutput(ctx: JobContext, content: string, logContext?: LogContext): void {
379
+ if (ctx.output) {
380
+ try {
381
+ ctx.output.output({ kind: "text", content, context: logContext });
382
+ } catch {
383
+ // Output fault must not crash pipeline
384
+ }
385
+ }
386
+ safeEmit(ctx.logger, "info", content, logContext);
387
+ }
388
+
389
+ function safeEmit(logger: JobLogger, level: "debug" | "info" | "warn" | "error", message: string, context?: LogContext): void {
390
+ if (logger.emit) {
391
+ try {
392
+ logger.emit(level, message, context);
393
+ } catch {
394
+ // Log fault must not crash pipeline (INVARIANT: emit failure is silent)
395
+ }
396
+ }
397
+ }
398
+
399
+ /**
400
+ * Strict equality with boolean coercion only.
401
+ * No string fallback (prevents type confusion like 0 == "0").
402
+ * Boolean coercion: true/"true" and false/"false" are equal.
403
+ */
404
+ function strictEqual(a: unknown, b: unknown): boolean {
405
+ if (a === b) return true;
406
+ // Boolean coercion: "true" matches true, "false" matches false
407
+ if (typeof a === "boolean" && typeof b === "string") return a === (b === "true");
408
+ if (typeof b === "boolean" && typeof a === "string") return b === (a === "true");
409
+ return false;
410
+ }
411
+
412
+ export class AshCompiler {
413
+ private _currentJobName?: string;
414
+ private _currentStageIndex?: number;
415
+ private _jobMap = new Map<string, CompiledJob>();
416
+
417
+ compile(ast: Program): CompiledProgram {
418
+ // Collect param defaults
419
+ const params = new Map<string, string | number>();
420
+ for (const stmt of ast.statements) {
421
+ if (stmt.kind === "param") {
422
+ params.set(stmt.name, stmt.defaultValue);
423
+ }
424
+ }
425
+
426
+ // Collect static let bindings (params first, then lets override)
427
+ const variables = new Map<string, string | number>(params);
428
+ const runtimeLets: LetStatement[] = [];
429
+ for (const stmt of ast.statements) {
430
+ if (stmt.kind === "let") {
431
+ if (stmt.pipeline) {
432
+ runtimeLets.push(stmt);
433
+ } else {
434
+ variables.set(stmt.name, stmt.value);
435
+ }
436
+ }
437
+ }
438
+
439
+ const units: CompiledUnit[] = ast.statements
440
+ .filter((s): s is Exclude<TopLevelStatement, LetStatement | ParamDeclaration> => s.kind !== "let" && s.kind !== "param")
441
+ .map((stmt) => this.compileStatement(stmt, variables, runtimeLets));
442
+ const jobs = units.filter((u): u is CompiledJob => u.kind === "job");
443
+ const jobMap = new Map<string, CompiledJob>();
444
+ for (const job of jobs) jobMap.set(job.name, job);
445
+ this._jobMap = jobMap;
446
+
447
+ // Identify route-target-only jobs: jobs referenced by route stages but with no trigger
448
+ const routeTargetNames = new Set<string>();
449
+ const triggeredJobs = new Set<string>();
450
+ for (const job of ast.jobs) {
451
+ if (job.trigger) triggeredJobs.add(job.name);
452
+ for (const stage of job.pipeline) {
453
+ if (stage.kind === "route") {
454
+ for (const branch of stage.branches) {
455
+ routeTargetNames.add(branch.targetJob);
456
+ }
457
+ if (stage.fallback) routeTargetNames.add(stage.fallback);
458
+ }
459
+ }
460
+ }
461
+ // Exclude triggered jobs — they have independent execution reasons
462
+ for (const name of triggeredJobs) routeTargetNames.delete(name);
463
+ const routeTargets = routeTargetNames.size > 0 ? routeTargetNames : undefined;
464
+
465
+ return { jobs, units, jobMap, params, routeTargets };
466
+ }
467
+
468
+ private compileStatement(stmt: Exclude<TopLevelStatement, LetStatement | ParamDeclaration>, variables: Map<string, string | number>, runtimeLets: LetStatement[] = []): CompiledUnit {
469
+ if (stmt.kind === "output") {
470
+ return this.compileTopLevelOutput(stmt);
471
+ }
472
+ return this.compileJob(stmt, variables, runtimeLets);
473
+ }
474
+
475
+ private compileTopLevelOutput(output: OutputExpression): CompiledOutput {
476
+ return {
477
+ kind: "output",
478
+ message: output.message,
479
+ execute: async (ctx: JobContext): Promise<void> => {
480
+ ctx.logger.log("output", "output", { message: output.message });
481
+ safeOutput(ctx, output.message);
482
+ },
483
+ };
484
+ }
485
+
486
+ private compileJob(job: JobDeclaration, variables: Map<string, string | number> = new Map(), runtimeLets: LetStatement[] = []): CompiledJob {
487
+ const stages = job.pipeline;
488
+ const annotations = job.annotations;
489
+
490
+ // Extract annotation configs
491
+ const retryAnn = annotations.find(a => a.name === "retry");
492
+ const timeoutAnn = annotations.find(a => a.name === "timeout");
493
+ const onErrorAnn = annotations.find(a => a.name === "on_error");
494
+ const budgetAnn = annotations.find(a => a.name === "budget");
495
+ const retryCount = retryAnn ? (retryAnn.args.length > 0 ? Number(retryAnn.args[0]) : 3) : 0;
496
+ const timeoutMs = timeoutAnn ? (timeoutAnn.args.length > 0 ? Number(timeoutAnn.args[0]) : 0) : 0;
497
+ const onErrorStrategy = onErrorAnn?.args[0] as "skip" | "save" | "fail" | undefined;
498
+ const onErrorPath = onErrorStrategy === "save" ? onErrorAnn?.args[1] : undefined;
499
+
500
+ // Parse budget limits
501
+ const budgetLimits: Record<string, number> = {};
502
+ if (budgetAnn) {
503
+ for (let i = 0; i < budgetAnn.args.length; i += 2) {
504
+ budgetLimits[budgetAnn.args[i]] = Number(budgetAnn.args[i + 1]);
505
+ }
506
+ }
507
+
508
+ // Parse @caps for runtime enforcement (script-declared caps are authoritative)
509
+ const capsAnns = annotations.filter(a => a.name === "caps");
510
+ const scriptCaps: CapEntry[] = [];
511
+ for (const ann of capsAnns) {
512
+ if (ann.args.length > 0 && ann.args.length % 2 === 0) {
513
+ scriptCaps.push(...parseCaps(ann));
514
+ }
515
+ }
516
+
517
+ const self = this;
518
+
519
+ const runOnce = async (ctx: JobContext, initialStream?: unknown[]): Promise<JobReport> => {
520
+ const errors: string[] = [];
521
+ const jobInitialStream = initialStream ?? [];
522
+ let stream: unknown[] = [...jobInitialStream];
523
+ const stageMetrics: StageMetrics[] = [];
524
+ const jobStart = performance.now();
525
+ const budgetUsed = { actions: 0, writes: 0, records: 0, tokens: 0, cost: 0 };
526
+ const checkBudget = (dim: string, increment: number) => {
527
+ if (!(dim in budgetLimits)) return;
528
+ (budgetUsed as any)[dim] += increment;
529
+ if ((budgetUsed as any)[dim] > budgetLimits[dim]) {
530
+ throw new Error(`budget exceeded: ${dim} used ${(budgetUsed as any)[dim]}, limit ${budgetLimits[dim]}`);
531
+ }
532
+ };
533
+
534
+ // Runtime caps enforcement: if script declares @caps, enforce them with glob matching.
535
+ // Script-declared caps are the ceiling — ctx.caps (caller-provided) cannot widen them.
536
+ const checkCap = (op: CapEntry["op"], path: string): void => {
537
+ if (scriptCaps.length > 0 && !hasCapFor(scriptCaps, op, path)) {
538
+ throw new Error(`Permission denied: @caps does not allow ${op} '${path}'`);
539
+ }
540
+ };
541
+
542
+ // Resolve runtime lets
543
+ const runtimeVars = new Map(variables);
544
+ for (const rtLet of runtimeLets) {
545
+ if (rtLet.pipeline) {
546
+ try {
547
+ let rtStream: unknown[] = [];
548
+ for (const stage of rtLet.pipeline) {
549
+ rtStream = await self.executeStage(stage, rtStream, ctx, errors, runtimeVars);
550
+ }
551
+ // Bind the result: if single object with a numeric/string value, extract it
552
+ if (rtStream.length === 1 && typeof rtStream[0] === "object" && rtStream[0] !== null) {
553
+ const obj = rtStream[0] as Record<string, unknown>;
554
+ const keys = Object.keys(obj);
555
+ if (keys.length === 1) {
556
+ const val = obj[keys[0]];
557
+ if (typeof val === "number" || typeof val === "string") {
558
+ runtimeVars.set(rtLet.name, val);
559
+ continue;
560
+ }
561
+ }
562
+ }
563
+ // Fallback: use length or first value
564
+ if (rtStream.length === 1) {
565
+ const v = rtStream[0];
566
+ if (typeof v === "number" || typeof v === "string") {
567
+ runtimeVars.set(rtLet.name, v);
568
+ } else {
569
+ runtimeVars.set(rtLet.name, rtStream.length);
570
+ }
571
+ } else {
572
+ runtimeVars.set(rtLet.name, rtStream.length);
573
+ }
574
+ } catch (e: any) {
575
+ errors.push(`Runtime let '${rtLet.name}' failed: ${e.message ?? String(e)}`);
576
+ return { status: "error", recordCount: 0, errors, jobName: job.name, totalDurationMs: performance.now() - jobStart, stages: stageMetrics };
577
+ }
578
+ }
579
+ }
580
+
581
+ for (let i = 0; i < stages.length; i++) {
582
+ const stage = stages[i];
583
+ const inputCount = stream.length;
584
+ const stageStart = performance.now();
585
+ self._currentJobName = job.name;
586
+ self._currentStageIndex = i;
587
+ try {
588
+ // Skip absolute actions on empty downstream stream (not first stage).
589
+ // "action as source" (i === 0) still executes — it generates data from nothing.
590
+ // Relative actions already skip naturally (for-of loop over empty stream).
591
+ if (stage.kind === "action" && !stage.relative && i > 0 && stream.length === 0) {
592
+ const durationMs = performance.now() - stageStart;
593
+ ctx.logger.log(stage.kind, "skip", { reason: "empty-stream" });
594
+ safeEmit(ctx.logger, "debug", `${stage.kind} skip (empty stream)`, { jobName: job.name, stageIndex: i });
595
+ stageMetrics.push({ name: stage.kind, index: i, inputCount, outputCount: 0, durationMs });
596
+ continue;
597
+ }
598
+ ctx.logger.log(stage.kind, "enter", { recordCount: stream.length });
599
+ safeEmit(ctx.logger, "debug", `${stage.kind} enter`, { jobName: job.name, stageIndex: i });
600
+ stream = await self.executeStage(stage, stream, ctx, errors, runtimeVars, checkBudget, checkCap, jobInitialStream);
601
+ // Budget checks after stage execution
602
+ // Relative actions are checked per-record inside executeStage; only check absolute actions here
603
+ // Absolute actions with inline params also check per-record inside executeStage
604
+ const hasInlineParams = stage.kind === "action" && !stage.relative &&
605
+ stage.params && Object.keys(stage.params).length > 0;
606
+ if (stage.kind === "action" && !stage.relative && !hasInlineParams) checkBudget("actions", 1);
607
+ if (stage.kind === "save" || stage.kind === "publish" || stage.kind === "tee") checkBudget("writes", 1);
608
+ checkBudget("records", stream.length);
609
+ const durationMs = performance.now() - stageStart;
610
+ ctx.logger.log(stage.kind, "exit", { recordCount: stream.length });
611
+ safeEmit(ctx.logger, "debug", `${stage.kind} exit`, { jobName: job.name, stageIndex: i });
612
+ stageMetrics.push({ name: stage.kind, index: i, inputCount, outputCount: stream.length, durationMs });
613
+ } catch (e: any) {
614
+ const durationMs = performance.now() - stageStart;
615
+ const errMsg = e.message ?? String(e);
616
+ errors.push(errMsg);
617
+
618
+ if (onErrorStrategy === "skip") {
619
+ // Skip: log error, continue with remaining stream
620
+ stageMetrics.push({ name: stage.kind, index: i, inputCount, outputCount: stream.length, durationMs, error: errMsg });
621
+ continue;
622
+ } else if (onErrorStrategy === "save" && onErrorPath) {
623
+ // Save: write failed items to error path
624
+ try {
625
+ ctx.world.write(onErrorPath, stream.map(item => ({ _error: errMsg, _item: item })));
626
+ } catch {
627
+ errors.push(`Failed to write errors to ${onErrorPath}`);
628
+ }
629
+ stageMetrics.push({ name: stage.kind, index: i, inputCount, outputCount: stream.length, durationMs, error: errMsg });
630
+ continue;
631
+ }
632
+
633
+ // Default (fail): terminate
634
+ stageMetrics.push({ name: stage.kind, index: i, inputCount, outputCount: stream.length, durationMs, error: errMsg });
635
+ return { status: "error", recordCount: stream.length, errors, jobName: job.name, totalDurationMs: performance.now() - jobStart, stages: stageMetrics };
636
+ }
637
+ }
638
+
639
+ return {
640
+ status: errors.length > 0 ? "partial" : "ok",
641
+ recordCount: stream.length,
642
+ errors,
643
+ jobName: job.name,
644
+ totalDurationMs: performance.now() - jobStart,
645
+ stages: stageMetrics,
646
+ };
647
+ };
648
+
649
+ const runWithTimeout = async (ctx: JobContext, initialStream?: unknown[]): Promise<JobResult> => {
650
+ if (timeoutMs <= 0) return runOnce(ctx, initialStream);
651
+ return Promise.race([
652
+ runOnce(ctx, initialStream),
653
+ new Promise<JobResult>((_, reject) =>
654
+ setTimeout(() => reject(new Error(`Timeout: job exceeded ${timeoutMs}ms`)), timeoutMs)
655
+ ),
656
+ ]);
657
+ };
658
+
659
+ return {
660
+ kind: "job",
661
+ name: job.name,
662
+ trigger: job.trigger,
663
+ execute: async (ctx: JobContext, initialStream?: unknown[]): Promise<JobResult> => {
664
+ if (retryCount <= 0 && !retryAnn) {
665
+ // No retry annotation at all
666
+ return runWithTimeout(ctx, initialStream);
667
+ }
668
+
669
+ const maxAttempts = retryCount > 0 ? retryCount : 1;
670
+ const allErrors: string[] = [];
671
+
672
+ for (let attempt = 0; attempt < maxAttempts; attempt++) {
673
+ try {
674
+ const result = await runWithTimeout(ctx, initialStream);
675
+ if (result.status !== "error") return result;
676
+ allErrors.push(...result.errors);
677
+ // If last attempt, return error
678
+ if (attempt === maxAttempts - 1) return { status: "error", recordCount: result.recordCount, errors: allErrors };
679
+ // Backoff (in tests we don't actually wait)
680
+ } catch (e: any) {
681
+ allErrors.push(e.message ?? String(e));
682
+ if (attempt === maxAttempts - 1) {
683
+ return { status: "error", recordCount: 0, errors: allErrors };
684
+ }
685
+ }
686
+ }
687
+
688
+ return { status: "error", recordCount: 0, errors: allErrors };
689
+ },
690
+ };
691
+ }
692
+
693
+ private async executeStage(stage: PipelineStage, stream: unknown[], ctx: JobContext, errors: string[], variables: Map<string, string | number> = new Map(), budgetCheck?: (dim: string, increment: number) => void, capCheck?: (op: CapEntry["op"], path: string) => void, initialStream?: unknown[]): Promise<unknown[]> {
694
+ switch (stage.kind) {
695
+ case "find": {
696
+ // Resolve template path from stream context (e.g. find /msgs/${data.messageId})
697
+ let findPath = stage.path;
698
+ if (findPath.includes("${") && stream.length > 0) {
699
+ const rec = (typeof stream[0] === "object" && stream[0] !== null ? stream[0] : {}) as Record<string, unknown>;
700
+ findPath = resolveTemplatePath(findPath, rec);
701
+ }
702
+
703
+ // Runtime @caps enforcement (script-declared caps)
704
+ if (capCheck) capCheck("read", findPath);
705
+ const capPath = findPath.split("/").slice(0, 3).join("/");
706
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) {
707
+ throw new Error(`Permission denied: cannot read '${findPath}'`);
708
+ }
709
+ try {
710
+ const query = stage.query ? this.resolveQueryVars(stage.query, variables) : undefined;
711
+ let result = await ctx.world.read(findPath, query);
712
+ // Fallback: if world.read ignores query, apply in-memory filter
713
+ if (query && result.length > 0) {
714
+ result = result.filter((item) => {
715
+ try {
716
+ return this.evaluateWhere(
717
+ { kind: "where", left: query!.field, op: query!.op, right: query!.value },
718
+ item,
719
+ );
720
+ } catch {
721
+ return false;
722
+ }
723
+ });
724
+ }
725
+ return result;
726
+ } catch {
727
+ return []; // non-existent path → empty stream
728
+ }
729
+ }
730
+
731
+ case "where": {
732
+ // Resolve $variable references in where clause
733
+ const resolvedStage = this.resolveWhereVars(stage, variables);
734
+ return stream.filter((item) => {
735
+ try {
736
+ return this.evaluateWhere(resolvedStage, item);
737
+ } catch {
738
+ return false; // missing field → skip
739
+ }
740
+ });
741
+ }
742
+
743
+ case "map": {
744
+ // System fields that map cannot overwrite (identity fields from AFS nodes)
745
+ const IMMUTABLE_FIELDS = ["path", "kind"];
746
+
747
+ const preserveSystemFields = (original: unknown, mapped: Record<string, unknown>): Record<string, unknown> => {
748
+ if (typeof original === "object" && original !== null) {
749
+ for (const field of IMMUTABLE_FIELDS) {
750
+ if (field in (original as Record<string, unknown>)) {
751
+ mapped[field] = (original as Record<string, unknown>)[field];
752
+ }
753
+ }
754
+ }
755
+ return mapped;
756
+ };
757
+
758
+ if (stage.exprMappings) {
759
+ return stream.map((item) => {
760
+ const result: Record<string, unknown> = {};
761
+ for (const [key, expr] of Object.entries(stage.exprMappings!)) {
762
+ result[key] = this.evaluateExpression(expr, item, variables);
763
+ }
764
+ return preserveSystemFields(item, result);
765
+ });
766
+ }
767
+ if (stage.expression) {
768
+ return stream.map((item) => this.evaluateExpression(stage.expression!, item, variables));
769
+ }
770
+ if (stage.mappings) {
771
+ return stream.map((item) => {
772
+ const result: Record<string, unknown> = {};
773
+ for (const [key, field] of Object.entries(stage.mappings!)) {
774
+ result[key] = this.resolveField(item, field);
775
+ }
776
+ return preserveSystemFields(item, result);
777
+ });
778
+ }
779
+ return stream.map((item) => this.resolveField(item, stage.field));
780
+ }
781
+
782
+ case "save": {
783
+ // Runtime @caps enforcement (script-declared caps)
784
+ if (capCheck) capCheck("write", stage.path);
785
+ const capPath = stage.path.split("/").slice(0, 3).join("/");
786
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) {
787
+ errors.push(`Permission denied: cannot write '${stage.path}'`);
788
+ return stream;
789
+ }
790
+ ctx.world.write(stage.path, stream);
791
+ return [];
792
+ }
793
+
794
+ case "publish": {
795
+ // Runtime @caps enforcement (script-declared caps)
796
+ if (capCheck) capCheck("write", stage.path);
797
+ const capPath = stage.path.split("/").slice(0, 3).join("/");
798
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) {
799
+ errors.push(`Permission denied: cannot publish '${stage.path}'`);
800
+ return stream;
801
+ }
802
+ ctx.world.publish(stage.path, stream);
803
+ return [];
804
+ }
805
+
806
+ case "tee": {
807
+ // Runtime @caps enforcement (script-declared caps)
808
+ if (capCheck) capCheck("write", stage.path);
809
+ try {
810
+ ctx.world.write(stage.path, [...stream]);
811
+ } catch (e: any) {
812
+ errors.push(`tee side-write failed: ${e.message}`);
813
+ // Main pipeline continues
814
+ }
815
+ return stream;
816
+ }
817
+
818
+ case "fanout": {
819
+ return await this.executeFanout(stage, stream, ctx, errors, variables, budgetCheck, capCheck, initialStream);
820
+ }
821
+
822
+ case "output": {
823
+ if (stage.expression) {
824
+ // Expression mode: evaluate per stream item, emit each result as text
825
+ for (const item of stream) {
826
+ const val = this.evaluateExpression(stage.expression, item, variables);
827
+ const text = val == null ? "" : String(val);
828
+ ctx.logger.log("output", "output", { message: text, streamSize: stream.length });
829
+ safeOutput(ctx, text, { jobName: this._currentJobName, stageIndex: this._currentStageIndex });
830
+ }
831
+ } else {
832
+ ctx.logger.log("output", "output", { message: stage.message, streamSize: stream.length });
833
+ safeOutput(ctx, stage.message, { jobName: this._currentJobName, stageIndex: this._currentStageIndex });
834
+ }
835
+ return stream; // pass-through
836
+ }
837
+
838
+ case "input": {
839
+ ctx.logger.log("input", "prompt", { prompt: stage.prompt });
840
+ const response = ctx.world.input
841
+ ? await ctx.world.input(stage.prompt)
842
+ : "";
843
+ return [{ prompt: stage.prompt, response }];
844
+ }
845
+
846
+ case "count": {
847
+ return [{ count: stream.length }];
848
+ }
849
+
850
+ case "group-by": {
851
+ const groups = new Map<unknown, unknown[]>();
852
+ for (const item of stream) {
853
+ const key = this.resolveField(item, stage.field);
854
+ if (!groups.has(key)) groups.set(key, []);
855
+ groups.get(key)!.push(item);
856
+ }
857
+ return Array.from(groups.entries()).map(([key, items]) => ({ key, items }));
858
+ }
859
+
860
+ case "route": {
861
+ // Dispatch items to target jobs based on field value
862
+ const buckets = new Map<string, unknown[]>();
863
+ for (const item of stream) {
864
+ const val = String(this.resolveField(item, stage.field) ?? "");
865
+ const branch = stage.branches.find(b => b.value === val);
866
+ const targetJob = branch ? branch.targetJob : stage.fallback;
867
+ if (targetJob) {
868
+ if (!buckets.has(targetJob)) buckets.set(targetJob, []);
869
+ buckets.get(targetJob)!.push(item);
870
+ }
871
+ // If no match and no fallback, item is dropped (silently)
872
+ }
873
+ // Execute each target job with its bucket as initial stream
874
+ for (const [jobName, items] of buckets) {
875
+ const compiled = this._jobMap.get(jobName);
876
+ if (!compiled) {
877
+ errors.push(`Route target job '${jobName}' not found`);
878
+ continue;
879
+ }
880
+ await compiled.execute(ctx, items);
881
+ }
882
+ return []; // route is terminal
883
+ }
884
+
885
+ case "lookup": {
886
+ // Left join: read lookup source, build index by joinKey, merge into stream items
887
+ const lookupData = await ctx.world.read(stage.path);
888
+ const index = new Map<string, unknown>();
889
+ for (const item of lookupData) {
890
+ const key = String(this.resolveField(item, stage.joinKey) ?? "");
891
+ if (!index.has(key)) index.set(key, item);
892
+ }
893
+ return stream.map((item) => {
894
+ const key = String(this.resolveField(item, stage.joinKey) ?? "");
895
+ const match = index.get(key);
896
+ if (match && typeof match === "object" && match !== null && typeof item === "object" && item !== null) {
897
+ return { ...match, ...item }; // item fields take precedence
898
+ }
899
+ return item; // LEFT JOIN: unmatched items preserved
900
+ });
901
+ }
902
+
903
+ case "action": {
904
+ if (!ctx.world.exec) {
905
+ throw new Error(`WorldInterface.exec not available — cannot execute action '${stage.path}'`);
906
+ }
907
+
908
+ const hasParams = stage.params && Object.keys(stage.params).length > 0;
909
+ const hasTemplatePath = stage.path.includes("${");
910
+
911
+ if (stage.relative) {
912
+ // Per-record relative action: resolve path from each record
913
+ const results: unknown[] = [];
914
+ for (const record of stream) {
915
+ // Pre-check budget before each execution
916
+ if (budgetCheck) budgetCheck("actions", 1);
917
+ if (typeof record !== "object" || record === null || !("path" in record)) {
918
+ throw new Error(`Relative action '${stage.path}' requires records with a 'path' field`);
919
+ }
920
+ const recordPath = String((record as Record<string, unknown>).path);
921
+ const fullPath = recordPath + "/.actions/" + stage.path;
922
+ // Runtime caps enforcement: verify resolved path against @caps (glob-aware)
923
+ if (capCheck) capCheck("exec", fullPath);
924
+ const capPath = fullPath.split("/").slice(0, 3).join("/");
925
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) {
926
+ throw new Error(`Permission denied: cannot exec '${fullPath}'`);
927
+ }
928
+ const rec = record as Record<string, unknown>;
929
+ // New merge model: if inline params exist, resolve templates and pass ONLY params (no stream merge)
930
+ const resolvedParams = hasParams ? resolveActionParams(stage.params as Record<string, unknown>, rec) : undefined;
931
+ const input = hasParams ? [] : [record];
932
+ const result = await ctx.world.exec(fullPath, input, resolvedParams);
933
+ if (result != null) {
934
+ if (Array.isArray(result)) results.push(...result);
935
+ else results.push(result);
936
+ }
937
+ }
938
+ return results;
939
+ }
940
+
941
+ // Absolute action — with inline params or template path: per-record execution
942
+ if (hasParams || hasTemplatePath) {
943
+ const results: unknown[] = [];
944
+ const records = stream.length > 0 ? stream : [{}];
945
+ // Path templates resolve from initial stream (trigger event / job context)
946
+ // Params templates resolve from current stream record
947
+ const initialRec = (initialStream && initialStream.length > 0
948
+ && typeof initialStream[0] === "object" && initialStream[0] !== null)
949
+ ? initialStream[0] as Record<string, unknown>
950
+ : undefined;
951
+ for (const record of records) {
952
+ if (budgetCheck) budgetCheck("actions", 1);
953
+ const rec = (typeof record === "object" && record !== null ? record : {}) as Record<string, unknown>;
954
+ // Path: prefer initial stream record (event context), fall back to current stream record
955
+ const pathRec = initialRec ?? rec;
956
+ const resolvedPath = hasTemplatePath ? resolveTemplatePath(stage.path, pathRec) : stage.path;
957
+ // Caps check on resolved path
958
+ if (capCheck) capCheck("exec", resolvedPath);
959
+ const capPath = resolvedPath.split("/").slice(0, 3).join("/");
960
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) {
961
+ throw new Error(`Permission denied: cannot exec '${resolvedPath}'`);
962
+ }
963
+ const resolvedParams = hasParams ? resolveActionParams(stage.params as Record<string, unknown>, rec) : undefined;
964
+ const input = hasParams ? [] : [record];
965
+ const result = await ctx.world.exec(resolvedPath, input, resolvedParams);
966
+ if (result != null) {
967
+ if (Array.isArray(result)) results.push(...result);
968
+ else results.push(result);
969
+ }
970
+ }
971
+ return results;
972
+ }
973
+
974
+ // No params, no template path — passthrough (backward compatible)
975
+ // Runtime @caps enforcement (script-declared caps)
976
+ if (capCheck) capCheck("exec", stage.path);
977
+ const capPath = stage.path.split("/").slice(0, 3).join("/");
978
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) {
979
+ throw new Error(`Permission denied: cannot exec '${stage.path}'`);
980
+ }
981
+ const result = await ctx.world.exec(stage.path, stream, undefined);
982
+ if (result == null) return [];
983
+ if (!Array.isArray(result)) return [result];
984
+ return result;
985
+ }
986
+
987
+ default:
988
+ throw new Error(`Unknown stage kind: ${(stage as any).kind}`);
989
+ }
990
+ }
991
+
992
+ private resolveQueryVars(query: QueryCondition, variables: Map<string, string | number>): QueryCondition {
993
+ if (typeof query.value === "string" && /^\$[a-zA-Z_]\w*$/.test(query.value)) {
994
+ const varName = query.value.slice(1);
995
+ if (!variables.has(varName)) {
996
+ throw new Error(`Undefined variable: $${varName}`);
997
+ }
998
+ return { ...query, value: variables.get(varName)! };
999
+ }
1000
+ return query;
1001
+ }
1002
+
1003
+ private resolveWhereVars(clause: WhereClause, variables: Map<string, string | number>): WhereClause {
1004
+ // Only resolve $varName pattern ($ followed by identifier chars only)
1005
+ if (typeof clause.right === "string" && /^\$[a-zA-Z_]\w*$/.test(clause.right)) {
1006
+ const varName = clause.right.slice(1);
1007
+ if (!variables.has(varName)) {
1008
+ throw new Error(`Undefined variable: $${varName}`);
1009
+ }
1010
+ return { ...clause, right: variables.get(varName)! };
1011
+ }
1012
+ return clause;
1013
+ }
1014
+
1015
+ private evaluateWhere(clause: WhereClause, item: unknown): boolean {
1016
+ const left = this.resolveField(item, clause.left);
1017
+ const right = clause.right;
1018
+
1019
+ switch (clause.op) {
1020
+ case "==": return strictEqual(left, right);
1021
+ case "!=": return !strictEqual(left, right);
1022
+ case ">":
1023
+ case "<":
1024
+ case ">=":
1025
+ case "<=": {
1026
+ const l = Number(left);
1027
+ const r = Number(right);
1028
+ // NaN or Infinity in comparisons → reject (safety: non-finite values should not pass guards)
1029
+ if (!Number.isFinite(l) || !Number.isFinite(r)) return false;
1030
+ if (clause.op === ">") return l > r;
1031
+ if (clause.op === "<") return l < r;
1032
+ if (clause.op === ">=") return l >= r;
1033
+ return l <= r;
1034
+ }
1035
+ default: return false;
1036
+ }
1037
+ }
1038
+
1039
+ private resolveField(item: unknown, field: string): unknown {
1040
+ const parts = field.split(".");
1041
+ let current: any = item;
1042
+ for (const part of parts) {
1043
+ if (current == null) return undefined;
1044
+ if (typeof current !== "object" || !Object.hasOwn(current, part)) return undefined;
1045
+ current = current[part];
1046
+ }
1047
+ return current;
1048
+ }
1049
+
1050
+ private evaluateExpression(expr: Expression, item: unknown, variables: Map<string, string | number>): unknown {
1051
+ switch (expr.kind) {
1052
+ case "literal":
1053
+ return expr.value;
1054
+ case "field_access":
1055
+ return this.resolveField(item, expr.path);
1056
+ case "var_ref": {
1057
+ if (!variables.has(expr.name)) {
1058
+ throw new Error(`Undefined variable: $${expr.name}`);
1059
+ }
1060
+ return variables.get(expr.name)!;
1061
+ }
1062
+ case "binary": {
1063
+ const left = this.evaluateExpression(expr.left, item, variables);
1064
+ const right = this.evaluateExpression(expr.right, item, variables);
1065
+ switch (expr.op) {
1066
+ case "+":
1067
+ // String concat if either side is string
1068
+ if (typeof left === "string" || typeof right === "string") {
1069
+ return String(left ?? "") + String(right ?? "");
1070
+ }
1071
+ return Number(left) + Number(right);
1072
+ case "-":
1073
+ return Number(left) - Number(right);
1074
+ case "*":
1075
+ return Number(left) * Number(right);
1076
+ case "/":
1077
+ return Number(left) / Number(right);
1078
+ default:
1079
+ throw new Error(`Unknown operator: ${(expr as any).op}`);
1080
+ }
1081
+ }
1082
+ default:
1083
+ throw new Error(`Unknown expression kind: ${(expr as any).kind}`);
1084
+ }
1085
+ }
1086
+
1087
+ private async executeFanout(stage: FanoutExpression, stream: unknown[], ctx: JobContext, errors: string[], variables: Map<string, string | number> = new Map(), budgetCheck?: (dim: string, increment: number) => void, capCheck?: (op: CapEntry["op"], path: string) => void, initialStream?: unknown[]): Promise<unknown[]> {
1088
+ const results: unknown[][] = [];
1089
+ for (const branch of stage.branches) {
1090
+ let branchStream = [...stream];
1091
+ try {
1092
+ for (const branchStage of branch) {
1093
+ branchStream = await this.executeStage(branchStage, branchStream, ctx, errors, variables, budgetCheck, capCheck, initialStream);
1094
+ }
1095
+ results.push(branchStream);
1096
+ } catch (e: any) {
1097
+ errors.push(`fanout branch failed: ${e.message}`);
1098
+ }
1099
+ }
1100
+ // Return merged results from all successful branches
1101
+ return results.flat();
1102
+ }
1103
+ }