@aigne/ash 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/DESIGN.md +41 -0
  2. package/dist/ai-dev-loop/ash-run-result.cjs +12 -0
  3. package/dist/ai-dev-loop/ash-run-result.d.cts +28 -0
  4. package/dist/ai-dev-loop/ash-run-result.d.cts.map +1 -0
  5. package/dist/ai-dev-loop/ash-run-result.d.mts +28 -0
  6. package/dist/ai-dev-loop/ash-run-result.d.mts.map +1 -0
  7. package/dist/ai-dev-loop/ash-run-result.mjs +11 -0
  8. package/dist/ai-dev-loop/ash-run-result.mjs.map +1 -0
  9. package/dist/ai-dev-loop/ash-typed-error.cjs +51 -0
  10. package/dist/ai-dev-loop/ash-typed-error.d.cts +54 -0
  11. package/dist/ai-dev-loop/ash-typed-error.d.cts.map +1 -0
  12. package/dist/ai-dev-loop/ash-typed-error.d.mts +54 -0
  13. package/dist/ai-dev-loop/ash-typed-error.d.mts.map +1 -0
  14. package/dist/ai-dev-loop/ash-typed-error.mjs +50 -0
  15. package/dist/ai-dev-loop/ash-typed-error.mjs.map +1 -0
  16. package/dist/ai-dev-loop/ash-validate.cjs +27 -0
  17. package/dist/ai-dev-loop/ash-validate.d.cts +7 -0
  18. package/dist/ai-dev-loop/ash-validate.d.cts.map +1 -0
  19. package/dist/ai-dev-loop/ash-validate.d.mts +7 -0
  20. package/dist/ai-dev-loop/ash-validate.d.mts.map +1 -0
  21. package/dist/ai-dev-loop/ash-validate.mjs +28 -0
  22. package/dist/ai-dev-loop/ash-validate.mjs.map +1 -0
  23. package/dist/ai-dev-loop/dev-loop.cjs +134 -0
  24. package/dist/ai-dev-loop/dev-loop.d.cts +28 -0
  25. package/dist/ai-dev-loop/dev-loop.d.cts.map +1 -0
  26. package/dist/ai-dev-loop/dev-loop.d.mts +28 -0
  27. package/dist/ai-dev-loop/dev-loop.d.mts.map +1 -0
  28. package/dist/ai-dev-loop/dev-loop.mjs +135 -0
  29. package/dist/ai-dev-loop/dev-loop.mjs.map +1 -0
  30. package/dist/ai-dev-loop/index.cjs +24 -0
  31. package/dist/ai-dev-loop/index.d.cts +9 -0
  32. package/dist/ai-dev-loop/index.d.mts +9 -0
  33. package/dist/ai-dev-loop/index.mjs +10 -0
  34. package/dist/ai-dev-loop/live-mode.cjs +17 -0
  35. package/dist/ai-dev-loop/live-mode.d.cts +24 -0
  36. package/dist/ai-dev-loop/live-mode.d.cts.map +1 -0
  37. package/dist/ai-dev-loop/live-mode.d.mts +24 -0
  38. package/dist/ai-dev-loop/live-mode.d.mts.map +1 -0
  39. package/dist/ai-dev-loop/live-mode.mjs +17 -0
  40. package/dist/ai-dev-loop/live-mode.mjs.map +1 -0
  41. package/dist/ai-dev-loop/meta-tools.cjs +123 -0
  42. package/dist/ai-dev-loop/meta-tools.d.cts +24 -0
  43. package/dist/ai-dev-loop/meta-tools.d.cts.map +1 -0
  44. package/dist/ai-dev-loop/meta-tools.d.mts +24 -0
  45. package/dist/ai-dev-loop/meta-tools.d.mts.map +1 -0
  46. package/dist/ai-dev-loop/meta-tools.mjs +120 -0
  47. package/dist/ai-dev-loop/meta-tools.mjs.map +1 -0
  48. package/dist/ai-dev-loop/structured-runner.cjs +154 -0
  49. package/dist/ai-dev-loop/structured-runner.d.cts +12 -0
  50. package/dist/ai-dev-loop/structured-runner.d.cts.map +1 -0
  51. package/dist/ai-dev-loop/structured-runner.d.mts +12 -0
  52. package/dist/ai-dev-loop/structured-runner.d.mts.map +1 -0
  53. package/dist/ai-dev-loop/structured-runner.mjs +155 -0
  54. package/dist/ai-dev-loop/structured-runner.mjs.map +1 -0
  55. package/dist/ai-dev-loop/system-prompt.cjs +55 -0
  56. package/dist/ai-dev-loop/system-prompt.d.cts +20 -0
  57. package/dist/ai-dev-loop/system-prompt.d.cts.map +1 -0
  58. package/dist/ai-dev-loop/system-prompt.d.mts +20 -0
  59. package/dist/ai-dev-loop/system-prompt.d.mts.map +1 -0
  60. package/dist/ai-dev-loop/system-prompt.mjs +54 -0
  61. package/dist/ai-dev-loop/system-prompt.mjs.map +1 -0
  62. package/dist/ast.d.cts +140 -0
  63. package/dist/ast.d.cts.map +1 -0
  64. package/dist/ast.d.mts +140 -0
  65. package/dist/ast.d.mts.map +1 -0
  66. package/dist/compiler.cjs +802 -0
  67. package/dist/compiler.d.cts +103 -0
  68. package/dist/compiler.d.cts.map +1 -0
  69. package/dist/compiler.d.mts +103 -0
  70. package/dist/compiler.d.mts.map +1 -0
  71. package/dist/compiler.mjs +802 -0
  72. package/dist/compiler.mjs.map +1 -0
  73. package/dist/index.cjs +14 -0
  74. package/dist/index.d.cts +7 -0
  75. package/dist/index.d.mts +7 -0
  76. package/dist/index.mjs +7 -0
  77. package/dist/lexer.cjs +451 -0
  78. package/dist/lexer.d.cts +14 -0
  79. package/dist/lexer.d.cts.map +1 -0
  80. package/dist/lexer.d.mts +14 -0
  81. package/dist/lexer.d.mts.map +1 -0
  82. package/dist/lexer.mjs +451 -0
  83. package/dist/lexer.mjs.map +1 -0
  84. package/dist/parser.cjs +734 -0
  85. package/dist/parser.d.cts +40 -0
  86. package/dist/parser.d.cts.map +1 -0
  87. package/dist/parser.d.mts +40 -0
  88. package/dist/parser.d.mts.map +1 -0
  89. package/dist/parser.mjs +734 -0
  90. package/dist/parser.mjs.map +1 -0
  91. package/dist/reference.cjs +130 -0
  92. package/dist/reference.d.cts +11 -0
  93. package/dist/reference.d.cts.map +1 -0
  94. package/dist/reference.d.mts +11 -0
  95. package/dist/reference.d.mts.map +1 -0
  96. package/dist/reference.mjs +130 -0
  97. package/dist/reference.mjs.map +1 -0
  98. package/dist/template.cjs +85 -0
  99. package/dist/template.mjs +84 -0
  100. package/dist/template.mjs.map +1 -0
  101. package/dist/type-checker.cjs +582 -0
  102. package/dist/type-checker.d.cts +31 -0
  103. package/dist/type-checker.d.cts.map +1 -0
  104. package/dist/type-checker.d.mts +31 -0
  105. package/dist/type-checker.d.mts.map +1 -0
  106. package/dist/type-checker.mjs +573 -0
  107. package/dist/type-checker.mjs.map +1 -0
  108. package/package.json +29 -0
  109. package/src/ai-dev-loop/ash-run-result.test.ts +113 -0
  110. package/src/ai-dev-loop/ash-run-result.ts +46 -0
  111. package/src/ai-dev-loop/ash-typed-error.test.ts +136 -0
  112. package/src/ai-dev-loop/ash-typed-error.ts +50 -0
  113. package/src/ai-dev-loop/ash-validate.test.ts +54 -0
  114. package/src/ai-dev-loop/ash-validate.ts +34 -0
  115. package/src/ai-dev-loop/dev-loop.test.ts +364 -0
  116. package/src/ai-dev-loop/dev-loop.ts +156 -0
  117. package/src/ai-dev-loop/dry-run.test.ts +107 -0
  118. package/src/ai-dev-loop/e2e-multi-fix.test.ts +473 -0
  119. package/src/ai-dev-loop/e2e.test.ts +324 -0
  120. package/src/ai-dev-loop/index.ts +15 -0
  121. package/src/ai-dev-loop/invariants.test.ts +253 -0
  122. package/src/ai-dev-loop/live-mode.test.ts +63 -0
  123. package/src/ai-dev-loop/live-mode.ts +33 -0
  124. package/src/ai-dev-loop/meta-tools.test.ts +120 -0
  125. package/src/ai-dev-loop/meta-tools.ts +142 -0
  126. package/src/ai-dev-loop/structured-runner.test.ts +159 -0
  127. package/src/ai-dev-loop/structured-runner.ts +209 -0
  128. package/src/ai-dev-loop/system-prompt.test.ts +102 -0
  129. package/src/ai-dev-loop/system-prompt.ts +81 -0
  130. package/src/ast.ts +186 -0
  131. package/src/compiler.test.ts +2933 -0
  132. package/src/compiler.ts +1103 -0
  133. package/src/e2e.test.ts +552 -0
  134. package/src/index.ts +16 -0
  135. package/src/lexer.test.ts +538 -0
  136. package/src/lexer.ts +222 -0
  137. package/src/parser.test.ts +1024 -0
  138. package/src/parser.ts +835 -0
  139. package/src/reference.test.ts +166 -0
  140. package/src/reference.ts +125 -0
  141. package/src/template.test.ts +210 -0
  142. package/src/template.ts +139 -0
  143. package/src/type-checker.test.ts +1494 -0
  144. package/src/type-checker.ts +785 -0
  145. package/tsconfig.json +9 -0
  146. package/tsdown.config.ts +12 -0
@@ -0,0 +1,802 @@
1
+ const require_lexer = require('./lexer.cjs');
2
+ const require_parser = require('./parser.cjs');
3
+ const require_type_checker = require('./type-checker.cjs');
4
+ const require_template = require('./template.cjs');
5
+
6
+ //#region src/compiler.ts
7
+ function fnv1aHash(str) {
8
+ let hash = 2166136261;
9
+ for (let i = 0; i < str.length; i++) {
10
+ hash ^= str.charCodeAt(i);
11
+ hash = Math.imul(hash, 16777619) >>> 0;
12
+ }
13
+ return hash.toString(16).padStart(8, "0");
14
+ }
15
+ function collectActions(stages) {
16
+ const actions = [];
17
+ for (const stage of stages) if (stage.kind === "action") actions.push(stage);
18
+ else if (stage.kind === "fanout") for (const branch of stage.branches) actions.push(...collectActions(branch));
19
+ return actions;
20
+ }
21
+ function compileSource(source) {
22
+ const diagnostics = [];
23
+ const sourceHash = fnv1aHash(source);
24
+ const lexer = new require_lexer.AshLexer();
25
+ const parser = new require_parser.AshParser();
26
+ const compiler = new AshCompiler();
27
+ let tokens;
28
+ try {
29
+ tokens = lexer.tokenize(source);
30
+ } catch (e) {
31
+ return {
32
+ sourceHash,
33
+ diagnostics: [require_type_checker.parseSyntaxError(e.message ?? String(e))]
34
+ };
35
+ }
36
+ let ast;
37
+ try {
38
+ ast = parser.parse(tokens);
39
+ } catch (e) {
40
+ return {
41
+ sourceHash,
42
+ diagnostics: [require_type_checker.parseSyntaxError(e.message ?? String(e))]
43
+ };
44
+ }
45
+ const prohibErrors = require_type_checker.checkProhibitedPatterns(ast);
46
+ diagnostics.push(...require_type_checker.compileErrorsToDiagnostics(prohibErrors));
47
+ for (const job of ast.jobs) {
48
+ const typeErrors = require_type_checker.checkPipelineTypes(job.pipeline);
49
+ diagnostics.push(...require_type_checker.typeErrorsToDiagnostics(typeErrors));
50
+ const annErrors = require_type_checker.checkAnnotations(job);
51
+ diagnostics.push(...require_type_checker.annotationErrorsToDiagnostics(annErrors));
52
+ diagnostics.push(...require_type_checker.checkJobCaps(job));
53
+ if (job.annotations.some((a) => a.name === "readonly")) {
54
+ for (const stage of job.pipeline) if (stage.kind === "save" || stage.kind === "publish" || stage.kind === "tee" || stage.kind === "action") diagnostics.push({
55
+ code: "ASH_READONLY_VIOLATION",
56
+ message: `@readonly job '${job.name}' contains '${stage.kind}' which writes data`
57
+ });
58
+ for (const stage of job.pipeline) if (stage.kind === "route") {
59
+ const targetNames = [...stage.branches.map((b) => b.targetJob), ...stage.fallback ? [stage.fallback] : []];
60
+ for (const targetName of targetNames) {
61
+ const targetJob = ast.jobs.find((j) => j.name === targetName);
62
+ if (!targetJob) continue;
63
+ for (const tStage of targetJob.pipeline) if (tStage.kind === "save" || tStage.kind === "publish" || tStage.kind === "tee" || tStage.kind === "action") {
64
+ diagnostics.push({
65
+ code: "ASH_READONLY_VIOLATION",
66
+ message: `@readonly job '${job.name}' routes to '${targetName}' which contains '${tStage.kind}' (write operation)`
67
+ });
68
+ break;
69
+ }
70
+ }
71
+ }
72
+ }
73
+ }
74
+ const WRITE_STAGES = new Set([
75
+ "save",
76
+ "publish",
77
+ "tee",
78
+ "action"
79
+ ]);
80
+ const letStatements = ast.statements.filter((s) => s.kind === "let" && !!s.pipeline);
81
+ for (const letStmt of letStatements) if (letStmt.pipeline) {
82
+ for (const stage of letStmt.pipeline) if (WRITE_STAGES.has(stage.kind)) diagnostics.push({
83
+ code: "ASH_LET_WRITE",
84
+ message: `let '${letStmt.name}' pipeline contains '${stage.kind}' — write operations in let bindings are not permitted`
85
+ });
86
+ }
87
+ if (ast.jobs.some((j) => j.annotations.some((a) => a.name === "approval")) && letStatements.length > 0) {
88
+ const sideEffectLets = letStatements.filter((s) => s.pipeline?.some((stage) => stage.kind === "find" || stage.kind === "action"));
89
+ if (sideEffectLets.length > 0) diagnostics.push({
90
+ code: "ASH_LET_PRE_APPROVAL",
91
+ severity: "warning",
92
+ message: `let bindings (${sideEffectLets.map((s) => s.name).join(", ")}) execute before @approval checks — side effects in let pipelines bypass approval`
93
+ });
94
+ }
95
+ const paramNames = new Set(ast.statements.filter((s) => s.kind === "param").map((s) => s.name));
96
+ if (paramNames.size > 0) for (const job of ast.jobs) {
97
+ if (!job.pipeline.some((s) => WRITE_STAGES.has(s.kind))) continue;
98
+ for (const stage of job.pipeline) if (stage.kind === "where" && typeof stage.right === "string" && stage.right.startsWith("$")) {
99
+ const varName = stage.right.slice(1);
100
+ if (paramNames.has(varName)) {
101
+ diagnostics.push({
102
+ code: "ASH_PARAM_WRITE_GATE",
103
+ severity: "warning",
104
+ message: `Job '${job.name}': param '$${varName}' gates write operations — callers can override param to change behavior`
105
+ });
106
+ break;
107
+ }
108
+ }
109
+ }
110
+ const readonlyJobs = ast.jobs.filter((j) => j.annotations.some((a) => a.name === "readonly"));
111
+ if (readonlyJobs.length > 0 && readonlyJobs.length < ast.jobs.length) {
112
+ const writingJobs = ast.jobs.filter((j) => !j.annotations.some((a) => a.name === "readonly")).filter((j) => j.pipeline.some((s) => s.kind === "save" || s.kind === "publish" || s.kind === "tee" || s.kind === "action"));
113
+ if (writingJobs.length > 0) diagnostics.push({
114
+ code: "ASH_MIXED_SECURITY",
115
+ severity: "warning",
116
+ message: `Program has mixed security posture: ${readonlyJobs.length} @readonly job(s) alongside ${writingJobs.length} unrestricted writing job(s) (${writingJobs.map((j) => j.name).join(", ")}) — this can mask malicious intent`
117
+ });
118
+ }
119
+ for (const job of ast.jobs) {
120
+ const actions = collectActions(job.pipeline);
121
+ if (actions.length === 0) continue;
122
+ const hasCaps = job.annotations.some((a) => a.name === "caps");
123
+ const hasBudget = job.annotations.some((a) => a.name === "budget");
124
+ if (!hasCaps) diagnostics.push({
125
+ code: "ASH_UNCAPPED_ACTION",
126
+ message: `Job '${job.name}' contains action stages but has no @caps — actions require explicit capability declaration`
127
+ });
128
+ if (actions.length > 1) if (hasCaps && hasBudget) diagnostics.push({
129
+ code: "ASH_ACTION_AMPLIFICATION",
130
+ severity: "warning",
131
+ message: `Job '${job.name}' contains ${actions.length} action stages — budget-gated multi-action execution permitted`
132
+ });
133
+ else diagnostics.push({
134
+ code: "ASH_ACTION_AMPLIFICATION",
135
+ message: `Job '${job.name}' contains ${actions.length} action stages — multi-action jobs require @caps and @budget to prevent amplification attacks`
136
+ });
137
+ const providers = new Set(actions.filter((a) => !a.relative).map((a) => a.path.split("/")[1]).filter(Boolean));
138
+ if (providers.size > 1) if (hasCaps && hasBudget) diagnostics.push({
139
+ code: "ASH_CROSS_PROVIDER_ACTION",
140
+ severity: "warning",
141
+ message: `Job '${job.name}' has actions targeting ${providers.size} providers (${[...providers].join(", ")}) — budget-gated cross-provider execution permitted`
142
+ });
143
+ else diagnostics.push({
144
+ code: "ASH_CROSS_PROVIDER_ACTION",
145
+ message: `Job '${job.name}' has actions targeting ${providers.size} providers (${[...providers].join(", ")}) — cross-provider actions require @caps and @budget`
146
+ });
147
+ const relativeActions = actions.filter((a) => a.relative);
148
+ if (relativeActions.length > 0) {
149
+ if (!job.pipeline.some((s, idx) => s.kind === "find" && job.pipeline.findIndex((p) => p === relativeActions[0]) > idx)) diagnostics.push({
150
+ code: "ASH_RELATIVE_ACTION_NO_FIND",
151
+ message: `Job '${job.name}': relative action '${relativeActions[0].path}' has no upstream 'find' — relative actions require a find stage to provide records with paths`
152
+ });
153
+ if (hasCaps && hasBudget) diagnostics.push({
154
+ code: "ASH_ACTION_AMPLIFICATION",
155
+ severity: "warning",
156
+ message: `Job '${job.name}': relative action '${relativeActions[0].path}' executes per-record — budget-gated amplification permitted`
157
+ });
158
+ else if (!hasCaps || !hasBudget) diagnostics.push({
159
+ code: "ASH_ACTION_AMPLIFICATION",
160
+ message: `Job '${job.name}': relative action '${relativeActions[0].path}' executes per-record — requires @caps and @budget to prevent amplification`
161
+ });
162
+ }
163
+ }
164
+ for (const job of ast.jobs) {
165
+ const hasCaps = job.annotations.some((a) => a.name === "caps");
166
+ const hasFind = job.pipeline.some((s) => s.kind === "find" || s.kind === "lookup");
167
+ const writeStages = job.pipeline.filter((s) => s.kind === "publish" || s.kind === "tee");
168
+ if (writeStages.length > 0 && hasFind && !hasCaps) for (const ws of writeStages) diagnostics.push({
169
+ code: "ASH_UNCAPPED_WRITE",
170
+ message: `Job '${job.name}': '${ws.kind}' combined with data reads requires @caps — write operations on external data must declare capabilities`
171
+ });
172
+ }
173
+ const BUDGET_CEILINGS = {
174
+ actions: 100,
175
+ writes: 100,
176
+ records: 1e4
177
+ };
178
+ for (const job of ast.jobs) {
179
+ const budgetAnn = job.annotations.find((a) => a.name === "budget");
180
+ if (!budgetAnn) continue;
181
+ for (let i = 0; i < budgetAnn.args.length; i += 2) {
182
+ const dim = budgetAnn.args[i];
183
+ const val = Number(budgetAnn.args[i + 1]);
184
+ if (dim in BUDGET_CEILINGS && val > BUDGET_CEILINGS[dim]) diagnostics.push({
185
+ code: "ASH_BUDGET_EXCESSIVE",
186
+ message: `Job '${job.name}': @budget(${dim} ${val}) exceeds ceiling of ${BUDGET_CEILINGS[dim]} — reduce budget or justify with explicit approval`
187
+ });
188
+ }
189
+ }
190
+ return {
191
+ program: compiler.compile(ast),
192
+ diagnostics,
193
+ sourceHash
194
+ };
195
+ }
196
+ function safeOutput(ctx, content, logContext) {
197
+ if (ctx.output) try {
198
+ ctx.output.output({
199
+ kind: "text",
200
+ content,
201
+ context: logContext
202
+ });
203
+ } catch {}
204
+ safeEmit(ctx.logger, "info", content, logContext);
205
+ }
206
+ function safeEmit(logger, level, message, context) {
207
+ if (logger.emit) try {
208
+ logger.emit(level, message, context);
209
+ } catch {}
210
+ }
211
+ /**
212
+ * Strict equality with boolean coercion only.
213
+ * No string fallback (prevents type confusion like 0 == "0").
214
+ * Boolean coercion: true/"true" and false/"false" are equal.
215
+ */
216
+ function strictEqual(a, b) {
217
+ if (a === b) return true;
218
+ if (typeof a === "boolean" && typeof b === "string") return a === (b === "true");
219
+ if (typeof b === "boolean" && typeof a === "string") return b === (a === "true");
220
+ return false;
221
+ }
222
+ var AshCompiler = class {
223
+ _currentJobName;
224
+ _currentStageIndex;
225
+ _jobMap = /* @__PURE__ */ new Map();
226
+ compile(ast) {
227
+ const params = /* @__PURE__ */ new Map();
228
+ for (const stmt of ast.statements) if (stmt.kind === "param") params.set(stmt.name, stmt.defaultValue);
229
+ const variables = new Map(params);
230
+ const runtimeLets = [];
231
+ for (const stmt of ast.statements) if (stmt.kind === "let") if (stmt.pipeline) runtimeLets.push(stmt);
232
+ else variables.set(stmt.name, stmt.value);
233
+ const units = ast.statements.filter((s) => s.kind !== "let" && s.kind !== "param").map((stmt) => this.compileStatement(stmt, variables, runtimeLets));
234
+ const jobs = units.filter((u) => u.kind === "job");
235
+ const jobMap = /* @__PURE__ */ new Map();
236
+ for (const job of jobs) jobMap.set(job.name, job);
237
+ this._jobMap = jobMap;
238
+ const routeTargetNames = /* @__PURE__ */ new Set();
239
+ const triggeredJobs = /* @__PURE__ */ new Set();
240
+ for (const job of ast.jobs) {
241
+ if (job.trigger) triggeredJobs.add(job.name);
242
+ for (const stage of job.pipeline) if (stage.kind === "route") {
243
+ for (const branch of stage.branches) routeTargetNames.add(branch.targetJob);
244
+ if (stage.fallback) routeTargetNames.add(stage.fallback);
245
+ }
246
+ }
247
+ for (const name of triggeredJobs) routeTargetNames.delete(name);
248
+ return {
249
+ jobs,
250
+ units,
251
+ jobMap,
252
+ params,
253
+ routeTargets: routeTargetNames.size > 0 ? routeTargetNames : void 0
254
+ };
255
+ }
256
+ compileStatement(stmt, variables, runtimeLets = []) {
257
+ if (stmt.kind === "output") return this.compileTopLevelOutput(stmt);
258
+ return this.compileJob(stmt, variables, runtimeLets);
259
+ }
260
+ compileTopLevelOutput(output) {
261
+ return {
262
+ kind: "output",
263
+ message: output.message,
264
+ execute: async (ctx) => {
265
+ ctx.logger.log("output", "output", { message: output.message });
266
+ safeOutput(ctx, output.message);
267
+ }
268
+ };
269
+ }
270
+ compileJob(job, variables = /* @__PURE__ */ new Map(), runtimeLets = []) {
271
+ const stages = job.pipeline;
272
+ const annotations = job.annotations;
273
+ const retryAnn = annotations.find((a) => a.name === "retry");
274
+ const timeoutAnn = annotations.find((a) => a.name === "timeout");
275
+ const onErrorAnn = annotations.find((a) => a.name === "on_error");
276
+ const budgetAnn = annotations.find((a) => a.name === "budget");
277
+ const retryCount = retryAnn ? retryAnn.args.length > 0 ? Number(retryAnn.args[0]) : 3 : 0;
278
+ const timeoutMs = timeoutAnn ? timeoutAnn.args.length > 0 ? Number(timeoutAnn.args[0]) : 0 : 0;
279
+ const onErrorStrategy = onErrorAnn?.args[0];
280
+ const onErrorPath = onErrorStrategy === "save" ? onErrorAnn?.args[1] : void 0;
281
+ const budgetLimits = {};
282
+ if (budgetAnn) for (let i = 0; i < budgetAnn.args.length; i += 2) budgetLimits[budgetAnn.args[i]] = Number(budgetAnn.args[i + 1]);
283
+ const capsAnns = annotations.filter((a) => a.name === "caps");
284
+ const scriptCaps = [];
285
+ for (const ann of capsAnns) if (ann.args.length > 0 && ann.args.length % 2 === 0) scriptCaps.push(...require_type_checker.parseCaps(ann));
286
+ const self = this;
287
+ const runOnce = async (ctx, initialStream) => {
288
+ const errors = [];
289
+ const jobInitialStream = initialStream ?? [];
290
+ let stream = [...jobInitialStream];
291
+ const stageMetrics = [];
292
+ const jobStart = performance.now();
293
+ const budgetUsed = {
294
+ actions: 0,
295
+ writes: 0,
296
+ records: 0,
297
+ tokens: 0,
298
+ cost: 0
299
+ };
300
+ const checkBudget = (dim, increment) => {
301
+ if (!(dim in budgetLimits)) return;
302
+ budgetUsed[dim] += increment;
303
+ if (budgetUsed[dim] > budgetLimits[dim]) throw new Error(`budget exceeded: ${dim} used ${budgetUsed[dim]}, limit ${budgetLimits[dim]}`);
304
+ };
305
+ const checkCap = (op, path) => {
306
+ if (scriptCaps.length > 0 && !require_type_checker.hasCapFor(scriptCaps, op, path)) throw new Error(`Permission denied: @caps does not allow ${op} '${path}'`);
307
+ };
308
+ const runtimeVars = new Map(variables);
309
+ for (const rtLet of runtimeLets) if (rtLet.pipeline) try {
310
+ let rtStream = [];
311
+ for (const stage of rtLet.pipeline) rtStream = await self.executeStage(stage, rtStream, ctx, errors, runtimeVars);
312
+ if (rtStream.length === 1 && typeof rtStream[0] === "object" && rtStream[0] !== null) {
313
+ const obj = rtStream[0];
314
+ const keys = Object.keys(obj);
315
+ if (keys.length === 1) {
316
+ const val = obj[keys[0]];
317
+ if (typeof val === "number" || typeof val === "string") {
318
+ runtimeVars.set(rtLet.name, val);
319
+ continue;
320
+ }
321
+ }
322
+ }
323
+ if (rtStream.length === 1) {
324
+ const v = rtStream[0];
325
+ if (typeof v === "number" || typeof v === "string") runtimeVars.set(rtLet.name, v);
326
+ else runtimeVars.set(rtLet.name, rtStream.length);
327
+ } else runtimeVars.set(rtLet.name, rtStream.length);
328
+ } catch (e) {
329
+ errors.push(`Runtime let '${rtLet.name}' failed: ${e.message ?? String(e)}`);
330
+ return {
331
+ status: "error",
332
+ recordCount: 0,
333
+ errors,
334
+ jobName: job.name,
335
+ totalDurationMs: performance.now() - jobStart,
336
+ stages: stageMetrics
337
+ };
338
+ }
339
+ for (let i = 0; i < stages.length; i++) {
340
+ const stage = stages[i];
341
+ const inputCount = stream.length;
342
+ const stageStart = performance.now();
343
+ self._currentJobName = job.name;
344
+ self._currentStageIndex = i;
345
+ try {
346
+ if (stage.kind === "action" && !stage.relative && i > 0 && stream.length === 0) {
347
+ const durationMs$1 = performance.now() - stageStart;
348
+ ctx.logger.log(stage.kind, "skip", { reason: "empty-stream" });
349
+ safeEmit(ctx.logger, "debug", `${stage.kind} skip (empty stream)`, {
350
+ jobName: job.name,
351
+ stageIndex: i
352
+ });
353
+ stageMetrics.push({
354
+ name: stage.kind,
355
+ index: i,
356
+ inputCount,
357
+ outputCount: 0,
358
+ durationMs: durationMs$1
359
+ });
360
+ continue;
361
+ }
362
+ ctx.logger.log(stage.kind, "enter", { recordCount: stream.length });
363
+ safeEmit(ctx.logger, "debug", `${stage.kind} enter`, {
364
+ jobName: job.name,
365
+ stageIndex: i
366
+ });
367
+ stream = await self.executeStage(stage, stream, ctx, errors, runtimeVars, checkBudget, checkCap, jobInitialStream);
368
+ const hasInlineParams = stage.kind === "action" && !stage.relative && stage.params && Object.keys(stage.params).length > 0;
369
+ if (stage.kind === "action" && !stage.relative && !hasInlineParams) checkBudget("actions", 1);
370
+ if (stage.kind === "save" || stage.kind === "publish" || stage.kind === "tee") checkBudget("writes", 1);
371
+ checkBudget("records", stream.length);
372
+ const durationMs = performance.now() - stageStart;
373
+ ctx.logger.log(stage.kind, "exit", { recordCount: stream.length });
374
+ safeEmit(ctx.logger, "debug", `${stage.kind} exit`, {
375
+ jobName: job.name,
376
+ stageIndex: i
377
+ });
378
+ stageMetrics.push({
379
+ name: stage.kind,
380
+ index: i,
381
+ inputCount,
382
+ outputCount: stream.length,
383
+ durationMs
384
+ });
385
+ } catch (e) {
386
+ const durationMs = performance.now() - stageStart;
387
+ const errMsg = e.message ?? String(e);
388
+ errors.push(errMsg);
389
+ if (onErrorStrategy === "skip") {
390
+ stageMetrics.push({
391
+ name: stage.kind,
392
+ index: i,
393
+ inputCount,
394
+ outputCount: stream.length,
395
+ durationMs,
396
+ error: errMsg
397
+ });
398
+ continue;
399
+ } else if (onErrorStrategy === "save" && onErrorPath) {
400
+ try {
401
+ ctx.world.write(onErrorPath, stream.map((item) => ({
402
+ _error: errMsg,
403
+ _item: item
404
+ })));
405
+ } catch {
406
+ errors.push(`Failed to write errors to ${onErrorPath}`);
407
+ }
408
+ stageMetrics.push({
409
+ name: stage.kind,
410
+ index: i,
411
+ inputCount,
412
+ outputCount: stream.length,
413
+ durationMs,
414
+ error: errMsg
415
+ });
416
+ continue;
417
+ }
418
+ stageMetrics.push({
419
+ name: stage.kind,
420
+ index: i,
421
+ inputCount,
422
+ outputCount: stream.length,
423
+ durationMs,
424
+ error: errMsg
425
+ });
426
+ return {
427
+ status: "error",
428
+ recordCount: stream.length,
429
+ errors,
430
+ jobName: job.name,
431
+ totalDurationMs: performance.now() - jobStart,
432
+ stages: stageMetrics
433
+ };
434
+ }
435
+ }
436
+ return {
437
+ status: errors.length > 0 ? "partial" : "ok",
438
+ recordCount: stream.length,
439
+ errors,
440
+ jobName: job.name,
441
+ totalDurationMs: performance.now() - jobStart,
442
+ stages: stageMetrics
443
+ };
444
+ };
445
+ const runWithTimeout = async (ctx, initialStream) => {
446
+ if (timeoutMs <= 0) return runOnce(ctx, initialStream);
447
+ return Promise.race([runOnce(ctx, initialStream), new Promise((_, reject) => setTimeout(() => reject(/* @__PURE__ */ new Error(`Timeout: job exceeded ${timeoutMs}ms`)), timeoutMs))]);
448
+ };
449
+ return {
450
+ kind: "job",
451
+ name: job.name,
452
+ trigger: job.trigger,
453
+ execute: async (ctx, initialStream) => {
454
+ if (retryCount <= 0 && !retryAnn) return runWithTimeout(ctx, initialStream);
455
+ const maxAttempts = retryCount > 0 ? retryCount : 1;
456
+ const allErrors = [];
457
+ for (let attempt = 0; attempt < maxAttempts; attempt++) try {
458
+ const result = await runWithTimeout(ctx, initialStream);
459
+ if (result.status !== "error") return result;
460
+ allErrors.push(...result.errors);
461
+ if (attempt === maxAttempts - 1) return {
462
+ status: "error",
463
+ recordCount: result.recordCount,
464
+ errors: allErrors
465
+ };
466
+ } catch (e) {
467
+ allErrors.push(e.message ?? String(e));
468
+ if (attempt === maxAttempts - 1) return {
469
+ status: "error",
470
+ recordCount: 0,
471
+ errors: allErrors
472
+ };
473
+ }
474
+ return {
475
+ status: "error",
476
+ recordCount: 0,
477
+ errors: allErrors
478
+ };
479
+ }
480
+ };
481
+ }
482
+ async executeStage(stage, stream, ctx, errors, variables = /* @__PURE__ */ new Map(), budgetCheck, capCheck, initialStream) {
483
+ switch (stage.kind) {
484
+ case "find": {
485
+ let findPath = stage.path;
486
+ if (findPath.includes("${") && stream.length > 0) {
487
+ const rec = typeof stream[0] === "object" && stream[0] !== null ? stream[0] : {};
488
+ findPath = require_template.resolveTemplatePath(findPath, rec);
489
+ }
490
+ if (capCheck) capCheck("read", findPath);
491
+ const capPath = findPath.split("/").slice(0, 3).join("/");
492
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) throw new Error(`Permission denied: cannot read '${findPath}'`);
493
+ try {
494
+ const query = stage.query ? this.resolveQueryVars(stage.query, variables) : void 0;
495
+ let result = await ctx.world.read(findPath, query);
496
+ if (query && result.length > 0) result = result.filter((item) => {
497
+ try {
498
+ return this.evaluateWhere({
499
+ kind: "where",
500
+ left: query.field,
501
+ op: query.op,
502
+ right: query.value
503
+ }, item);
504
+ } catch {
505
+ return false;
506
+ }
507
+ });
508
+ return result;
509
+ } catch {
510
+ return [];
511
+ }
512
+ }
513
+ case "where": {
514
+ const resolvedStage = this.resolveWhereVars(stage, variables);
515
+ return stream.filter((item) => {
516
+ try {
517
+ return this.evaluateWhere(resolvedStage, item);
518
+ } catch {
519
+ return false;
520
+ }
521
+ });
522
+ }
523
+ case "map": {
524
+ const IMMUTABLE_FIELDS = ["path", "kind"];
525
+ const preserveSystemFields = (original, mapped) => {
526
+ if (typeof original === "object" && original !== null) {
527
+ for (const field of IMMUTABLE_FIELDS) if (field in original) mapped[field] = original[field];
528
+ }
529
+ return mapped;
530
+ };
531
+ if (stage.exprMappings) return stream.map((item) => {
532
+ const result = {};
533
+ for (const [key, expr] of Object.entries(stage.exprMappings)) result[key] = this.evaluateExpression(expr, item, variables);
534
+ return preserveSystemFields(item, result);
535
+ });
536
+ if (stage.expression) return stream.map((item) => this.evaluateExpression(stage.expression, item, variables));
537
+ if (stage.mappings) return stream.map((item) => {
538
+ const result = {};
539
+ for (const [key, field] of Object.entries(stage.mappings)) result[key] = this.resolveField(item, field);
540
+ return preserveSystemFields(item, result);
541
+ });
542
+ return stream.map((item) => this.resolveField(item, stage.field));
543
+ }
544
+ case "save": {
545
+ if (capCheck) capCheck("write", stage.path);
546
+ const capPath = stage.path.split("/").slice(0, 3).join("/");
547
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) {
548
+ errors.push(`Permission denied: cannot write '${stage.path}'`);
549
+ return stream;
550
+ }
551
+ ctx.world.write(stage.path, stream);
552
+ return [];
553
+ }
554
+ case "publish": {
555
+ if (capCheck) capCheck("write", stage.path);
556
+ const capPath = stage.path.split("/").slice(0, 3).join("/");
557
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) {
558
+ errors.push(`Permission denied: cannot publish '${stage.path}'`);
559
+ return stream;
560
+ }
561
+ ctx.world.publish(stage.path, stream);
562
+ return [];
563
+ }
564
+ case "tee":
565
+ if (capCheck) capCheck("write", stage.path);
566
+ try {
567
+ ctx.world.write(stage.path, [...stream]);
568
+ } catch (e) {
569
+ errors.push(`tee side-write failed: ${e.message}`);
570
+ }
571
+ return stream;
572
+ case "fanout": return await this.executeFanout(stage, stream, ctx, errors, variables, budgetCheck, capCheck, initialStream);
573
+ case "output":
574
+ if (stage.expression) for (const item of stream) {
575
+ const val = this.evaluateExpression(stage.expression, item, variables);
576
+ const text = val == null ? "" : String(val);
577
+ ctx.logger.log("output", "output", {
578
+ message: text,
579
+ streamSize: stream.length
580
+ });
581
+ safeOutput(ctx, text, {
582
+ jobName: this._currentJobName,
583
+ stageIndex: this._currentStageIndex
584
+ });
585
+ }
586
+ else {
587
+ ctx.logger.log("output", "output", {
588
+ message: stage.message,
589
+ streamSize: stream.length
590
+ });
591
+ safeOutput(ctx, stage.message, {
592
+ jobName: this._currentJobName,
593
+ stageIndex: this._currentStageIndex
594
+ });
595
+ }
596
+ return stream;
597
+ case "input": {
598
+ ctx.logger.log("input", "prompt", { prompt: stage.prompt });
599
+ const response = ctx.world.input ? await ctx.world.input(stage.prompt) : "";
600
+ return [{
601
+ prompt: stage.prompt,
602
+ response
603
+ }];
604
+ }
605
+ case "count": return [{ count: stream.length }];
606
+ case "group-by": {
607
+ const groups = /* @__PURE__ */ new Map();
608
+ for (const item of stream) {
609
+ const key = this.resolveField(item, stage.field);
610
+ if (!groups.has(key)) groups.set(key, []);
611
+ groups.get(key).push(item);
612
+ }
613
+ return Array.from(groups.entries()).map(([key, items]) => ({
614
+ key,
615
+ items
616
+ }));
617
+ }
618
+ case "route": {
619
+ const buckets = /* @__PURE__ */ new Map();
620
+ for (const item of stream) {
621
+ const val = String(this.resolveField(item, stage.field) ?? "");
622
+ const branch = stage.branches.find((b) => b.value === val);
623
+ const targetJob = branch ? branch.targetJob : stage.fallback;
624
+ if (targetJob) {
625
+ if (!buckets.has(targetJob)) buckets.set(targetJob, []);
626
+ buckets.get(targetJob).push(item);
627
+ }
628
+ }
629
+ for (const [jobName, items] of buckets) {
630
+ const compiled = this._jobMap.get(jobName);
631
+ if (!compiled) {
632
+ errors.push(`Route target job '${jobName}' not found`);
633
+ continue;
634
+ }
635
+ await compiled.execute(ctx, items);
636
+ }
637
+ return [];
638
+ }
639
+ case "lookup": {
640
+ const lookupData = await ctx.world.read(stage.path);
641
+ const index = /* @__PURE__ */ new Map();
642
+ for (const item of lookupData) {
643
+ const key = String(this.resolveField(item, stage.joinKey) ?? "");
644
+ if (!index.has(key)) index.set(key, item);
645
+ }
646
+ return stream.map((item) => {
647
+ const key = String(this.resolveField(item, stage.joinKey) ?? "");
648
+ const match = index.get(key);
649
+ if (match && typeof match === "object" && match !== null && typeof item === "object" && item !== null) return {
650
+ ...match,
651
+ ...item
652
+ };
653
+ return item;
654
+ });
655
+ }
656
+ case "action": {
657
+ if (!ctx.world.exec) throw new Error(`WorldInterface.exec not available — cannot execute action '${stage.path}'`);
658
+ const hasParams = stage.params && Object.keys(stage.params).length > 0;
659
+ const hasTemplatePath = stage.path.includes("${");
660
+ if (stage.relative) {
661
+ const results = [];
662
+ for (const record of stream) {
663
+ if (budgetCheck) budgetCheck("actions", 1);
664
+ if (typeof record !== "object" || record === null || !("path" in record)) throw new Error(`Relative action '${stage.path}' requires records with a 'path' field`);
665
+ const fullPath = String(record.path) + "/.actions/" + stage.path;
666
+ if (capCheck) capCheck("exec", fullPath);
667
+ const capPath$1 = fullPath.split("/").slice(0, 3).join("/");
668
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath$1) && !ctx.caps.has("*")) throw new Error(`Permission denied: cannot exec '${fullPath}'`);
669
+ const rec = record;
670
+ const resolvedParams = hasParams ? require_template.resolveActionParams(stage.params, rec) : void 0;
671
+ const input = hasParams ? [] : [record];
672
+ const result$1 = await ctx.world.exec(fullPath, input, resolvedParams);
673
+ if (result$1 != null) if (Array.isArray(result$1)) results.push(...result$1);
674
+ else results.push(result$1);
675
+ }
676
+ return results;
677
+ }
678
+ if (hasParams || hasTemplatePath) {
679
+ const results = [];
680
+ const records = stream.length > 0 ? stream : [{}];
681
+ const initialRec = initialStream && initialStream.length > 0 && typeof initialStream[0] === "object" && initialStream[0] !== null ? initialStream[0] : void 0;
682
+ for (const record of records) {
683
+ if (budgetCheck) budgetCheck("actions", 1);
684
+ const rec = typeof record === "object" && record !== null ? record : {};
685
+ const pathRec = initialRec ?? rec;
686
+ const resolvedPath = hasTemplatePath ? require_template.resolveTemplatePath(stage.path, pathRec) : stage.path;
687
+ if (capCheck) capCheck("exec", resolvedPath);
688
+ const capPath$1 = resolvedPath.split("/").slice(0, 3).join("/");
689
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath$1) && !ctx.caps.has("*")) throw new Error(`Permission denied: cannot exec '${resolvedPath}'`);
690
+ const resolvedParams = hasParams ? require_template.resolveActionParams(stage.params, rec) : void 0;
691
+ const input = hasParams ? [] : [record];
692
+ const result$1 = await ctx.world.exec(resolvedPath, input, resolvedParams);
693
+ if (result$1 != null) if (Array.isArray(result$1)) results.push(...result$1);
694
+ else results.push(result$1);
695
+ }
696
+ return results;
697
+ }
698
+ if (capCheck) capCheck("exec", stage.path);
699
+ const capPath = stage.path.split("/").slice(0, 3).join("/");
700
+ if (ctx.caps.size > 0 && !ctx.caps.has(capPath) && !ctx.caps.has("*")) throw new Error(`Permission denied: cannot exec '${stage.path}'`);
701
+ const result = await ctx.world.exec(stage.path, stream, void 0);
702
+ if (result == null) return [];
703
+ if (!Array.isArray(result)) return [result];
704
+ return result;
705
+ }
706
+ default: throw new Error(`Unknown stage kind: ${stage.kind}`);
707
+ }
708
+ }
709
+ resolveQueryVars(query, variables) {
710
+ if (typeof query.value === "string" && /^\$[a-zA-Z_]\w*$/.test(query.value)) {
711
+ const varName = query.value.slice(1);
712
+ if (!variables.has(varName)) throw new Error(`Undefined variable: $${varName}`);
713
+ return {
714
+ ...query,
715
+ value: variables.get(varName)
716
+ };
717
+ }
718
+ return query;
719
+ }
720
+ resolveWhereVars(clause, variables) {
721
+ if (typeof clause.right === "string" && /^\$[a-zA-Z_]\w*$/.test(clause.right)) {
722
+ const varName = clause.right.slice(1);
723
+ if (!variables.has(varName)) throw new Error(`Undefined variable: $${varName}`);
724
+ return {
725
+ ...clause,
726
+ right: variables.get(varName)
727
+ };
728
+ }
729
+ return clause;
730
+ }
731
+ evaluateWhere(clause, item) {
732
+ const left = this.resolveField(item, clause.left);
733
+ const right = clause.right;
734
+ switch (clause.op) {
735
+ case "==": return strictEqual(left, right);
736
+ case "!=": return !strictEqual(left, right);
737
+ case ">":
738
+ case "<":
739
+ case ">=":
740
+ case "<=": {
741
+ const l = Number(left);
742
+ const r = Number(right);
743
+ if (!Number.isFinite(l) || !Number.isFinite(r)) return false;
744
+ if (clause.op === ">") return l > r;
745
+ if (clause.op === "<") return l < r;
746
+ if (clause.op === ">=") return l >= r;
747
+ return l <= r;
748
+ }
749
+ default: return false;
750
+ }
751
+ }
752
+ resolveField(item, field) {
753
+ const parts = field.split(".");
754
+ let current = item;
755
+ for (const part of parts) {
756
+ if (current == null) return void 0;
757
+ if (typeof current !== "object" || !Object.hasOwn(current, part)) return void 0;
758
+ current = current[part];
759
+ }
760
+ return current;
761
+ }
762
+ evaluateExpression(expr, item, variables) {
763
+ switch (expr.kind) {
764
+ case "literal": return expr.value;
765
+ case "field_access": return this.resolveField(item, expr.path);
766
+ case "var_ref":
767
+ if (!variables.has(expr.name)) throw new Error(`Undefined variable: $${expr.name}`);
768
+ return variables.get(expr.name);
769
+ case "binary": {
770
+ const left = this.evaluateExpression(expr.left, item, variables);
771
+ const right = this.evaluateExpression(expr.right, item, variables);
772
+ switch (expr.op) {
773
+ case "+":
774
+ if (typeof left === "string" || typeof right === "string") return String(left ?? "") + String(right ?? "");
775
+ return Number(left) + Number(right);
776
+ case "-": return Number(left) - Number(right);
777
+ case "*": return Number(left) * Number(right);
778
+ case "/": return Number(left) / Number(right);
779
+ default: throw new Error(`Unknown operator: ${expr.op}`);
780
+ }
781
+ }
782
+ default: throw new Error(`Unknown expression kind: ${expr.kind}`);
783
+ }
784
+ }
785
+ async executeFanout(stage, stream, ctx, errors, variables = /* @__PURE__ */ new Map(), budgetCheck, capCheck, initialStream) {
786
+ const results = [];
787
+ for (const branch of stage.branches) {
788
+ let branchStream = [...stream];
789
+ try {
790
+ for (const branchStage of branch) branchStream = await this.executeStage(branchStage, branchStream, ctx, errors, variables, budgetCheck, capCheck, initialStream);
791
+ results.push(branchStream);
792
+ } catch (e) {
793
+ errors.push(`fanout branch failed: ${e.message}`);
794
+ }
795
+ }
796
+ return results.flat();
797
+ }
798
+ };
799
+
800
+ //#endregion
801
+ exports.AshCompiler = AshCompiler;
802
+ exports.compileSource = compileSource;