@aigne/ash 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/DESIGN.md +41 -0
  2. package/dist/ai-dev-loop/ash-run-result.cjs +12 -0
  3. package/dist/ai-dev-loop/ash-run-result.d.cts +28 -0
  4. package/dist/ai-dev-loop/ash-run-result.d.cts.map +1 -0
  5. package/dist/ai-dev-loop/ash-run-result.d.mts +28 -0
  6. package/dist/ai-dev-loop/ash-run-result.d.mts.map +1 -0
  7. package/dist/ai-dev-loop/ash-run-result.mjs +11 -0
  8. package/dist/ai-dev-loop/ash-run-result.mjs.map +1 -0
  9. package/dist/ai-dev-loop/ash-typed-error.cjs +51 -0
  10. package/dist/ai-dev-loop/ash-typed-error.d.cts +54 -0
  11. package/dist/ai-dev-loop/ash-typed-error.d.cts.map +1 -0
  12. package/dist/ai-dev-loop/ash-typed-error.d.mts +54 -0
  13. package/dist/ai-dev-loop/ash-typed-error.d.mts.map +1 -0
  14. package/dist/ai-dev-loop/ash-typed-error.mjs +50 -0
  15. package/dist/ai-dev-loop/ash-typed-error.mjs.map +1 -0
  16. package/dist/ai-dev-loop/ash-validate.cjs +27 -0
  17. package/dist/ai-dev-loop/ash-validate.d.cts +7 -0
  18. package/dist/ai-dev-loop/ash-validate.d.cts.map +1 -0
  19. package/dist/ai-dev-loop/ash-validate.d.mts +7 -0
  20. package/dist/ai-dev-loop/ash-validate.d.mts.map +1 -0
  21. package/dist/ai-dev-loop/ash-validate.mjs +28 -0
  22. package/dist/ai-dev-loop/ash-validate.mjs.map +1 -0
  23. package/dist/ai-dev-loop/dev-loop.cjs +134 -0
  24. package/dist/ai-dev-loop/dev-loop.d.cts +28 -0
  25. package/dist/ai-dev-loop/dev-loop.d.cts.map +1 -0
  26. package/dist/ai-dev-loop/dev-loop.d.mts +28 -0
  27. package/dist/ai-dev-loop/dev-loop.d.mts.map +1 -0
  28. package/dist/ai-dev-loop/dev-loop.mjs +135 -0
  29. package/dist/ai-dev-loop/dev-loop.mjs.map +1 -0
  30. package/dist/ai-dev-loop/index.cjs +24 -0
  31. package/dist/ai-dev-loop/index.d.cts +9 -0
  32. package/dist/ai-dev-loop/index.d.mts +9 -0
  33. package/dist/ai-dev-loop/index.mjs +10 -0
  34. package/dist/ai-dev-loop/live-mode.cjs +17 -0
  35. package/dist/ai-dev-loop/live-mode.d.cts +24 -0
  36. package/dist/ai-dev-loop/live-mode.d.cts.map +1 -0
  37. package/dist/ai-dev-loop/live-mode.d.mts +24 -0
  38. package/dist/ai-dev-loop/live-mode.d.mts.map +1 -0
  39. package/dist/ai-dev-loop/live-mode.mjs +17 -0
  40. package/dist/ai-dev-loop/live-mode.mjs.map +1 -0
  41. package/dist/ai-dev-loop/meta-tools.cjs +123 -0
  42. package/dist/ai-dev-loop/meta-tools.d.cts +24 -0
  43. package/dist/ai-dev-loop/meta-tools.d.cts.map +1 -0
  44. package/dist/ai-dev-loop/meta-tools.d.mts +24 -0
  45. package/dist/ai-dev-loop/meta-tools.d.mts.map +1 -0
  46. package/dist/ai-dev-loop/meta-tools.mjs +120 -0
  47. package/dist/ai-dev-loop/meta-tools.mjs.map +1 -0
  48. package/dist/ai-dev-loop/structured-runner.cjs +154 -0
  49. package/dist/ai-dev-loop/structured-runner.d.cts +12 -0
  50. package/dist/ai-dev-loop/structured-runner.d.cts.map +1 -0
  51. package/dist/ai-dev-loop/structured-runner.d.mts +12 -0
  52. package/dist/ai-dev-loop/structured-runner.d.mts.map +1 -0
  53. package/dist/ai-dev-loop/structured-runner.mjs +155 -0
  54. package/dist/ai-dev-loop/structured-runner.mjs.map +1 -0
  55. package/dist/ai-dev-loop/system-prompt.cjs +55 -0
  56. package/dist/ai-dev-loop/system-prompt.d.cts +20 -0
  57. package/dist/ai-dev-loop/system-prompt.d.cts.map +1 -0
  58. package/dist/ai-dev-loop/system-prompt.d.mts +20 -0
  59. package/dist/ai-dev-loop/system-prompt.d.mts.map +1 -0
  60. package/dist/ai-dev-loop/system-prompt.mjs +54 -0
  61. package/dist/ai-dev-loop/system-prompt.mjs.map +1 -0
  62. package/dist/ast.d.cts +140 -0
  63. package/dist/ast.d.cts.map +1 -0
  64. package/dist/ast.d.mts +140 -0
  65. package/dist/ast.d.mts.map +1 -0
  66. package/dist/compiler.cjs +802 -0
  67. package/dist/compiler.d.cts +103 -0
  68. package/dist/compiler.d.cts.map +1 -0
  69. package/dist/compiler.d.mts +103 -0
  70. package/dist/compiler.d.mts.map +1 -0
  71. package/dist/compiler.mjs +802 -0
  72. package/dist/compiler.mjs.map +1 -0
  73. package/dist/index.cjs +14 -0
  74. package/dist/index.d.cts +7 -0
  75. package/dist/index.d.mts +7 -0
  76. package/dist/index.mjs +7 -0
  77. package/dist/lexer.cjs +451 -0
  78. package/dist/lexer.d.cts +14 -0
  79. package/dist/lexer.d.cts.map +1 -0
  80. package/dist/lexer.d.mts +14 -0
  81. package/dist/lexer.d.mts.map +1 -0
  82. package/dist/lexer.mjs +451 -0
  83. package/dist/lexer.mjs.map +1 -0
  84. package/dist/parser.cjs +734 -0
  85. package/dist/parser.d.cts +40 -0
  86. package/dist/parser.d.cts.map +1 -0
  87. package/dist/parser.d.mts +40 -0
  88. package/dist/parser.d.mts.map +1 -0
  89. package/dist/parser.mjs +734 -0
  90. package/dist/parser.mjs.map +1 -0
  91. package/dist/reference.cjs +130 -0
  92. package/dist/reference.d.cts +11 -0
  93. package/dist/reference.d.cts.map +1 -0
  94. package/dist/reference.d.mts +11 -0
  95. package/dist/reference.d.mts.map +1 -0
  96. package/dist/reference.mjs +130 -0
  97. package/dist/reference.mjs.map +1 -0
  98. package/dist/template.cjs +85 -0
  99. package/dist/template.mjs +84 -0
  100. package/dist/template.mjs.map +1 -0
  101. package/dist/type-checker.cjs +582 -0
  102. package/dist/type-checker.d.cts +31 -0
  103. package/dist/type-checker.d.cts.map +1 -0
  104. package/dist/type-checker.d.mts +31 -0
  105. package/dist/type-checker.d.mts.map +1 -0
  106. package/dist/type-checker.mjs +573 -0
  107. package/dist/type-checker.mjs.map +1 -0
  108. package/package.json +29 -0
  109. package/src/ai-dev-loop/ash-run-result.test.ts +113 -0
  110. package/src/ai-dev-loop/ash-run-result.ts +46 -0
  111. package/src/ai-dev-loop/ash-typed-error.test.ts +136 -0
  112. package/src/ai-dev-loop/ash-typed-error.ts +50 -0
  113. package/src/ai-dev-loop/ash-validate.test.ts +54 -0
  114. package/src/ai-dev-loop/ash-validate.ts +34 -0
  115. package/src/ai-dev-loop/dev-loop.test.ts +364 -0
  116. package/src/ai-dev-loop/dev-loop.ts +156 -0
  117. package/src/ai-dev-loop/dry-run.test.ts +107 -0
  118. package/src/ai-dev-loop/e2e-multi-fix.test.ts +473 -0
  119. package/src/ai-dev-loop/e2e.test.ts +324 -0
  120. package/src/ai-dev-loop/index.ts +15 -0
  121. package/src/ai-dev-loop/invariants.test.ts +253 -0
  122. package/src/ai-dev-loop/live-mode.test.ts +63 -0
  123. package/src/ai-dev-loop/live-mode.ts +33 -0
  124. package/src/ai-dev-loop/meta-tools.test.ts +120 -0
  125. package/src/ai-dev-loop/meta-tools.ts +142 -0
  126. package/src/ai-dev-loop/structured-runner.test.ts +159 -0
  127. package/src/ai-dev-loop/structured-runner.ts +209 -0
  128. package/src/ai-dev-loop/system-prompt.test.ts +102 -0
  129. package/src/ai-dev-loop/system-prompt.ts +81 -0
  130. package/src/ast.ts +186 -0
  131. package/src/compiler.test.ts +2933 -0
  132. package/src/compiler.ts +1103 -0
  133. package/src/e2e.test.ts +552 -0
  134. package/src/index.ts +16 -0
  135. package/src/lexer.test.ts +538 -0
  136. package/src/lexer.ts +222 -0
  137. package/src/parser.test.ts +1024 -0
  138. package/src/parser.ts +835 -0
  139. package/src/reference.test.ts +166 -0
  140. package/src/reference.ts +125 -0
  141. package/src/template.test.ts +210 -0
  142. package/src/template.ts +139 -0
  143. package/src/type-checker.test.ts +1494 -0
  144. package/src/type-checker.ts +785 -0
  145. package/tsconfig.json +9 -0
  146. package/tsdown.config.ts +12 -0
@@ -0,0 +1,1494 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import { AshLexer } from "./lexer.js";
3
+ import { AshParser } from "./parser.js";
4
+ import { checkPipelineTypes, checkProhibitedPatterns, checkAnnotations } from "./type-checker.js";
5
+ import { compileSource } from "./compiler.js";
6
+ import type { PipelineStage, Program } from "./ast.js";
7
+ import type { AshDiagnostic } from "./type-checker.js";
8
+
9
+ const lexer = new AshLexer();
10
+ const parser = new AshParser();
11
+
12
+ function parse(source: string): Program {
13
+ return parser.parse(lexer.tokenize(source));
14
+ }
15
+
16
+ function stages(source: string): PipelineStage[] {
17
+ return parse(source).jobs[0].pipeline;
18
+ }
19
+
20
+ describe("Phase 2: Type Checker + Prohibited Patterns", () => {
21
+ // ── Happy: type compatibility ──
22
+
23
+ it("find → where → type compatible (object_stream → object_stream)", () => {
24
+ const errors = checkPipelineTypes(stages("job q { find /world/x | where age > 18 }"));
25
+ expect(errors).toHaveLength(0);
26
+ });
27
+
28
+ it("where → map → type compatible", () => {
29
+ const errors = checkPipelineTypes(stages("job q { where x > 1 | map name }"));
30
+ expect(errors).toHaveLength(0);
31
+ });
32
+
33
+ it("map → save → type compatible", () => {
34
+ const errors = checkPipelineTypes(stages("job q { map name | save /world/out }"));
35
+ expect(errors).toHaveLength(0);
36
+ });
37
+
38
+ it("full chain find | where | map | tee | save → passes", () => {
39
+ const errors = checkPipelineTypes(stages("job q { find /world/x | where x > 1 | map name | tee /world/bak | save /world/out }"));
40
+ expect(errors).toHaveLength(0);
41
+ });
42
+
43
+ it("fanout accepts object_stream → passes", () => {
44
+ const errors = checkPipelineTypes(stages("job q { find /world/x | fanout { save /world/a, save /world/b } }"));
45
+ expect(errors).toHaveLength(0);
46
+ });
47
+
48
+ it("publish at end receives object_stream → passes", () => {
49
+ const errors = checkPipelineTypes(stages("job q { find /world/x | publish /topic/out }"));
50
+ expect(errors).toHaveLength(0);
51
+ });
52
+
53
+ // ── Output / Input type checking ──
54
+
55
+ it("output in pipeline: passes through object_stream", () => {
56
+ const errors = checkPipelineTypes(stages('job q { find /world/x | output "msg" | save /world/out }'));
57
+ expect(errors).toHaveLength(0);
58
+ });
59
+
60
+ it("input as source: outputs object_stream", () => {
61
+ const errors = checkPipelineTypes(stages('job q { input "name?" | save /world/out }'));
62
+ expect(errors).toHaveLength(0);
63
+ });
64
+
65
+ // ── Bad: type incompatibility ──
66
+
67
+ it("save → where → type mismatch (none → object_stream)", () => {
68
+ const errors = checkPipelineTypes(stages("job q { save /world/out | where x > 1 }"));
69
+ expect(errors).toHaveLength(1);
70
+ expect(errors[0].message).toMatch(/type mismatch/i);
71
+ expect(errors[0].actual).toBe("none");
72
+ expect(errors[0].expected).toBe("object_stream");
73
+ });
74
+
75
+ it("map receives none input → mismatch", () => {
76
+ const errors = checkPipelineTypes(stages("job q { save /world/out | map name }"));
77
+ expect(errors).toHaveLength(1);
78
+ });
79
+
80
+ it("two saves in sequence → type error", () => {
81
+ const errors = checkPipelineTypes(stages("job q { save /world/a | save /world/b }"));
82
+ expect(errors).toHaveLength(1);
83
+ });
84
+
85
+ // ── Edge ──
86
+
87
+ it("single command pipeline → no type check needed, passes", () => {
88
+ const errors = checkPipelineTypes(stages("job q { find /world/x }"));
89
+ expect(errors).toHaveLength(0);
90
+ });
91
+
92
+ it("tee in middle: object_stream → object_stream → correct", () => {
93
+ const errors = checkPipelineTypes(stages("job q { find /world/x | tee /world/bak | map name }"));
94
+ expect(errors).toHaveLength(0);
95
+ });
96
+
97
+ // ── Security ──
98
+
99
+ it("unknown annotation rejected", () => {
100
+ const prog = parse("@unknown_ann\njob q { find /world/x }");
101
+ const errors = checkAnnotations(prog.jobs[0]);
102
+ expect(errors.length).toBeGreaterThanOrEqual(1);
103
+ expect(errors[0].kind).toBe("prohibited");
104
+ });
105
+
106
+ it("valid annotation accepted", () => {
107
+ const prog = parse("@approval(human)\njob q { find /world/x }");
108
+ const errors = checkAnnotations(prog.jobs[0]);
109
+ expect(errors).toHaveLength(0);
110
+ });
111
+
112
+ // ── Data Loss ──
113
+
114
+ it("type error includes expected/actual type + pipeline position", () => {
115
+ const errors = checkPipelineTypes(stages("job q { save /world/out | where x > 1 }"));
116
+ expect(errors[0].stage).toBe(1);
117
+ expect(errors[0].expected).toBeDefined();
118
+ expect(errors[0].actual).toBeDefined();
119
+ });
120
+ });
121
+
122
+ describe("Phase 8: count/group-by type checking", () => {
123
+ it("find | count → type compatible (object_stream → single_object)", () => {
124
+ const errors = checkPipelineTypes(stages("job q { find /world/x | count }"));
125
+ expect(errors).toHaveLength(0);
126
+ });
127
+
128
+ it("find | count | save → type compatible (single_object auto-promotes)", () => {
129
+ const errors = checkPipelineTypes(stages("job q { find /world/x | count | save /world/out }"));
130
+ expect(errors).toHaveLength(0);
131
+ });
132
+
133
+ it("find | group-by dept → type compatible", () => {
134
+ const errors = checkPipelineTypes(stages("job q { find /world/x | group-by dept }"));
135
+ expect(errors).toHaveLength(0);
136
+ });
137
+
138
+ it("find | group-by dept | save → type compatible", () => {
139
+ const errors = checkPipelineTypes(stages("job q { find /world/x | group-by dept | save /world/out }"));
140
+ expect(errors).toHaveLength(0);
141
+ });
142
+
143
+ it("count is recognized as builtin (not unknown command)", () => {
144
+ const prog = parse("job q { find /world/x | count }");
145
+ const errors = checkProhibitedPatterns(prog);
146
+ expect(errors).toHaveLength(0);
147
+ });
148
+
149
+ it("group-by is recognized as builtin (not unknown command)", () => {
150
+ const prog = parse("job q { find /world/x | group-by dept }");
151
+ const errors = checkProhibitedPatterns(prog);
152
+ expect(errors).toHaveLength(0);
153
+ });
154
+ });
155
+
156
+ describe("Phase 9: Type Checker — annotation validation + enhanced errors", () => {
157
+ // Happy
158
+ it("find → where → map → save full chain passes bidirectional check", () => {
159
+ const errors = checkPipelineTypes(stages("job q { find /world/x | where x > 1 | map name | save /world/out }"));
160
+ expect(errors).toHaveLength(0);
161
+ });
162
+
163
+ it("@approval(human) passes validation", () => {
164
+ const prog = parse("@approval(human)\njob q { find /world/x }");
165
+ const errors = checkAnnotations(prog.jobs[0]);
166
+ expect(errors).toHaveLength(0);
167
+ });
168
+
169
+ it("@approval(auto) passes validation", () => {
170
+ const prog = parse("@approval(auto)\njob q { find /world/x }");
171
+ const errors = checkAnnotations(prog.jobs[0]);
172
+ expect(errors).toHaveLength(0);
173
+ });
174
+
175
+ it("@retry(3) passes validation", () => {
176
+ const prog = parse("@retry(3)\njob q { find /world/x }");
177
+ const errors = checkAnnotations(prog.jobs[0]);
178
+ expect(errors).toHaveLength(0);
179
+ });
180
+
181
+ it("@timeout(5000) passes validation", () => {
182
+ const prog = parse("@timeout(5000)\njob q { find /world/x }");
183
+ const errors = checkAnnotations(prog.jobs[0]);
184
+ expect(errors).toHaveLength(0);
185
+ });
186
+
187
+ // Bad
188
+ it("input → save → map → detects type mismatch after save", () => {
189
+ const errors = checkPipelineTypes(stages('job q { input "x" | save /world/out | map name }'));
190
+ expect(errors.length).toBeGreaterThan(0);
191
+ });
192
+
193
+ it("@approval(robot) → invalid value", () => {
194
+ const prog = parse("@approval(robot)\njob q { find /world/x }");
195
+ const errors = checkAnnotations(prog.jobs[0]);
196
+ expect(errors.length).toBeGreaterThan(0);
197
+ });
198
+
199
+ it("@retry(-1) → rejected (annotation validator rejects non-integer arg)", () => {
200
+ // With MINUS as a valid token, `-1` parses as two tokens: `-`, `1`
201
+ // The annotation sees arg[0] = "-" which is not a valid integer
202
+ const prog = parse("@retry(-1)\njob q { find /world/x }");
203
+ const errors = checkAnnotations(prog.jobs[0]);
204
+ expect(errors.length).toBeGreaterThan(0);
205
+ });
206
+
207
+ it("@retry(abc) → non-numeric argument", () => {
208
+ const prog = parse("@retry(abc)\njob q { find /world/x }");
209
+ const errors = checkAnnotations(prog.jobs[0]);
210
+ expect(errors.length).toBeGreaterThan(0);
211
+ });
212
+
213
+ // Edge
214
+ it("@readonly no args → passes", () => {
215
+ const prog = parse("@readonly\njob q { find /world/x }");
216
+ const errors = checkAnnotations(prog.jobs[0]);
217
+ expect(errors).toHaveLength(0);
218
+ });
219
+
220
+ it("multiple annotations combined → all validated", () => {
221
+ const prog = parse("@approval(human)\n@retry(3)\n@timeout(5000)\njob q { find /world/x }");
222
+ const errors = checkAnnotations(prog.jobs[0]);
223
+ expect(errors).toHaveLength(0);
224
+ });
225
+
226
+ // Security
227
+ it("annotation value cannot contain code execution", () => {
228
+ const prog = parse("@approval(process.exit)\njob q { find /world/x }");
229
+ const errors = checkAnnotations(prog.jobs[0]);
230
+ expect(errors.length).toBeGreaterThan(0);
231
+ });
232
+
233
+ // Data Loss
234
+ it("enhanced checks do not break v0 valid programs (regression)", () => {
235
+ const prog = parse("@approval(human)\n@readonly\njob q { find /world/x | where x > 1 | map name | save /world/out }");
236
+ const typeErrors = checkPipelineTypes(prog.jobs[0].pipeline);
237
+ const annErrors = checkAnnotations(prog.jobs[0]);
238
+ const prohibErrors = checkProhibitedPatterns(prog);
239
+ expect(typeErrors).toHaveLength(0);
240
+ expect(annErrors).toHaveLength(0);
241
+ expect(prohibErrors).toHaveLength(0);
242
+ });
243
+ });
244
+
245
+ describe("Phase v3-0: action type checking", () => {
246
+ it("action stage has stdin=object_stream, stdout=object_stream (find | action passes)", () => {
247
+ const errors = checkPipelineTypes(stages("job q { find /world/x | action /tools/transform }"));
248
+ expect(errors).toHaveLength(0);
249
+ });
250
+
251
+ it("action after find passes type check", () => {
252
+ const errors = checkPipelineTypes(stages("job q { find /world/x | action /tools/x }"));
253
+ expect(errors).toHaveLength(0);
254
+ });
255
+
256
+ it("action before save passes type check", () => {
257
+ const errors = checkPipelineTypes(stages("job q { action /tools/x | save /world/out }"));
258
+ expect(errors).toHaveLength(0);
259
+ });
260
+
261
+ it("find | action | save full chain passes", () => {
262
+ const errors = checkPipelineTypes(stages("job q { find /world/x | action /tools/t | save /world/out }"));
263
+ expect(errors).toHaveLength(0);
264
+ });
265
+
266
+ it("action is recognized as builtin (not unknown command)", () => {
267
+ const prog = parse("job q { find /world/x | action /tools/t }");
268
+ const errors = checkProhibitedPatterns(prog);
269
+ expect(errors).toHaveLength(0);
270
+ });
271
+
272
+ it("action as pipeline source (stdin=object_stream but first stage is allowed)", () => {
273
+ // action as first stage: receives empty stream implicitly
274
+ const errors = checkPipelineTypes(stages("job q { action /tools/generate }"));
275
+ expect(errors).toHaveLength(0);
276
+ });
277
+
278
+ it("save | action → type mismatch (save outputs none, action expects object_stream)", () => {
279
+ const errors = checkPipelineTypes(stages("job q { save /world/x | action /tools/t }"));
280
+ expect(errors).toHaveLength(1);
281
+ expect(errors[0].message).toMatch(/type mismatch/i);
282
+ });
283
+
284
+ it("@on_error(skip) annotation passes validation", () => {
285
+ const prog = parse("@on_error(skip)\njob q { find /world/x | action /tools/t }");
286
+ const errors = checkAnnotations(prog.jobs[0]);
287
+ expect(errors).toHaveLength(0);
288
+ });
289
+
290
+ it("@on_error(fail) annotation passes validation", () => {
291
+ const prog = parse("@on_error(fail)\njob q { find /world/x }");
292
+ const errors = checkAnnotations(prog.jobs[0]);
293
+ expect(errors).toHaveLength(0);
294
+ });
295
+
296
+ it("@on_error(invalid) → annotation error", () => {
297
+ const prog = parse("@on_error(invalid)\njob q { find /world/x }");
298
+ const errors = checkAnnotations(prog.jobs[0]);
299
+ expect(errors.length).toBeGreaterThan(0);
300
+ });
301
+ });
302
+
303
+ describe("Phase 10: Structured Diagnostics (AshDiagnostic)", () => {
304
+ // Happy
305
+ it("compileSource valid program → { program, diagnostics: [] }", () => {
306
+ const result = compileSource("job q { find /world/x | save /world/out }");
307
+ expect(result.program).toBeDefined();
308
+ expect(result.diagnostics).toEqual([]);
309
+ });
310
+
311
+ it("multiple type errors collected as AshDiagnostic[] with correct code", () => {
312
+ // save → where is a type mismatch; also check code field
313
+ const result = compileSource("job q { save /world/out | where x > 1 }");
314
+ expect(result.diagnostics.length).toBeGreaterThan(0);
315
+ expect(result.diagnostics[0].code).toBe("ASH_TYPE_MISMATCH");
316
+ });
317
+
318
+ it("unknown command suggestion includes available commands list", () => {
319
+ // We need a program with an unknown command — but parser won't produce one.
320
+ // Instead, test via checkProhibitedPatterns directly with a crafted program.
321
+ // The compileSource path catches this at the type-check stage.
322
+ // For this test, use the toDiagnostics helper indirectly.
323
+ const prog = parse("job q { find /world/x }");
324
+ // Manually inject unknown stage
325
+ (prog.jobs[0].pipeline as any).push({ kind: "xyzzy" });
326
+ const errors = checkProhibitedPatterns(prog);
327
+ expect(errors.length).toBeGreaterThan(0);
328
+ expect(errors[0].message).toContain("xyzzy");
329
+ });
330
+
331
+ it("annotation error returns ASH_ANNOTATION_INVALID code", () => {
332
+ const result = compileSource("@approval(robot)\njob q { find /world/x }");
333
+ expect(result.diagnostics.some(d => d.code === "ASH_ANNOTATION_INVALID")).toBe(true);
334
+ });
335
+
336
+ // Bad
337
+ it("unterminated string → ASH_SYNTAX_UNTERMINATED + line/column", () => {
338
+ const result = compileSource('job q { output "hello }');
339
+ expect(result.program).toBeUndefined();
340
+ expect(result.diagnostics).toHaveLength(1);
341
+ expect(result.diagnostics[0].code).toBe("ASH_SYNTAX_UNTERMINATED");
342
+ expect(result.diagnostics[0].line).toBeDefined();
343
+ expect(result.diagnostics[0].column).toBeDefined();
344
+ });
345
+
346
+ it("illegal character → ASH_SYNTAX_UNEXPECTED + line/column", () => {
347
+ const result = compileSource("job q { find /world/x & }");
348
+ expect(result.program).toBeUndefined();
349
+ expect(result.diagnostics).toHaveLength(1);
350
+ expect(result.diagnostics[0].code).toBe("ASH_SYNTAX_UNEXPECTED");
351
+ expect(result.diagnostics[0].line).toBeDefined();
352
+ });
353
+
354
+ it("parser error (missing {) → ASH_SYNTAX_UNEXPECTED + location", () => {
355
+ const result = compileSource("job q find /world/x }");
356
+ expect(result.program).toBeUndefined();
357
+ expect(result.diagnostics).toHaveLength(1);
358
+ expect(result.diagnostics[0].code).toBe("ASH_SYNTAX_UNEXPECTED");
359
+ });
360
+
361
+ it("unknown command → ASH_UNKNOWN_COMMAND + suggestion", () => {
362
+ const prog = parse("job q { find /world/x }");
363
+ (prog.jobs[0].pipeline as any).push({ kind: "savee" });
364
+ const errors = checkProhibitedPatterns(prog);
365
+ expect(errors[0].message).toContain("savee");
366
+ });
367
+
368
+ it("type mismatch (save | where) → ASH_TYPE_MISMATCH + stage", () => {
369
+ const result = compileSource("job q { save /world/out | where x > 1 }");
370
+ const diag = result.diagnostics.find(d => d.code === "ASH_TYPE_MISMATCH");
371
+ expect(diag).toBeDefined();
372
+ expect(diag!.stage).toBe(1);
373
+ });
374
+
375
+ it("undefined variable → ASH_UNDEFINED_VAR", () => {
376
+ const result = compileSource("job q { find /world/x | where x > $missing }");
377
+ // This is a runtime error currently, so compileSource won't catch it at compile time.
378
+ // undefined var is detected at runtime. For compile-time, we check let duplicates.
379
+ // Adjust: duplicate variable test
380
+ expect(result.diagnostics).toEqual([]); // no compile-time error for undefined var (runtime)
381
+ });
382
+
383
+ it("duplicate variable → ASH_DUPLICATE_VAR", () => {
384
+ const result = compileSource("let x = 1\nlet x = 2\njob q { find /world/x }");
385
+ expect(result.program).toBeUndefined();
386
+ expect(result.diagnostics[0].code).toBe("ASH_DUPLICATE_VAR");
387
+ });
388
+
389
+ // Edge
390
+ it("empty source → { program, diagnostics: [] }", () => {
391
+ const result = compileSource("");
392
+ expect(result.program).toBeDefined();
393
+ expect(result.diagnostics).toEqual([]);
394
+ });
395
+
396
+ it("multiple stages multiple errors collected in same diagnostics[]", () => {
397
+ // Two type errors: save | where | map (save→where mismatch, and save→map mismatch)
398
+ const result = compileSource("job q { save /world/out | where x > 1 | map name }");
399
+ // At least one type mismatch
400
+ expect(result.diagnostics.filter(d => d.code === "ASH_TYPE_MISMATCH").length).toBeGreaterThanOrEqual(1);
401
+ });
402
+
403
+ it("lexer error stops further processing (no parser/type-check)", () => {
404
+ const result = compileSource('job q { output "unterminated');
405
+ expect(result.program).toBeUndefined();
406
+ expect(result.diagnostics).toHaveLength(1);
407
+ // Only lexer error, no parser error
408
+ expect(result.diagnostics[0].code).toBe("ASH_SYNTAX_UNTERMINATED");
409
+ });
410
+
411
+ // Security
412
+ // Route DAG validation
413
+ it("route: target job not found → error diagnostic", () => {
414
+ const result = compileSource('job main { find /data | route type { "a" -> job nonexistent } }');
415
+ expect(result.diagnostics.some(d => d.message.includes("nonexistent"))).toBe(true);
416
+ });
417
+
418
+ it("route: valid targets → no error", () => {
419
+ const result = compileSource('job main { find /data | route type { "a" -> job handler } }\njob handler { save /world/out }');
420
+ const routeErrors = result.diagnostics.filter(d => d.message.includes("Route"));
421
+ expect(routeErrors).toHaveLength(0);
422
+ });
423
+
424
+ it("route: cycle detection (A→B→A) → error", () => {
425
+ const result = compileSource('job a { find /data | route type { "x" -> job b } }\njob b { find /data | route type { "y" -> job a } }');
426
+ expect(result.diagnostics.some(d => d.message.includes("cycle"))).toBe(true);
427
+ });
428
+
429
+ it("lookup: stdin=object_stream, stdout=object_stream", () => {
430
+ const result = compileSource("job q { find /data | lookup /customers on cust_id | save /out }");
431
+ const typeErrors = result.diagnostics.filter(d => d.code === "ASH_TYPE_MISMATCH");
432
+ expect(typeErrors).toHaveLength(0);
433
+ });
434
+
435
+ it("diagnostic message does not leak internal stack trace", () => {
436
+ const result = compileSource('job q { output "unterminated');
437
+ for (const d of result.diagnostics) {
438
+ expect(d.message).not.toMatch(/at\s+\w+\s*\(/); // no stack frames
439
+ expect(d.message).not.toContain("Error:");
440
+ }
441
+ });
442
+
443
+ it("trigger job compiles without diagnostics", () => {
444
+ const result = compileSource("job handler on /data/inbox:created { find /data/inbox | save /out }");
445
+ const errors = result.diagnostics.filter(d => d.code !== "ASH_TYPE_MISMATCH");
446
+ expect(errors).toHaveLength(0);
447
+ });
448
+
449
+ it("trigger job: compiled program includes trigger metadata", () => {
450
+ const result = compileSource("job handler on /data/inbox:created { find /data/inbox | save /out }");
451
+ expect(result.program?.jobs[0].trigger).toEqual({ kind: "event", path: "/data/inbox", event: "created" });
452
+ });
453
+
454
+ it("cron trigger compiles without diagnostics", () => {
455
+ const result = compileSource('job ticker on cron("*/5 * * * *") { find /data | save /out }');
456
+ const errors = result.diagnostics.filter(d => d.code !== "ASH_TYPE_MISMATCH");
457
+ expect(errors).toHaveLength(0);
458
+ });
459
+
460
+ it("cron trigger: compiled program includes cron metadata", () => {
461
+ const result = compileSource('job ticker on cron("0 * * * *") { find /data | save /out }');
462
+ expect(result.program?.jobs[0].trigger).toEqual({ kind: "cron", expression: "0 * * * *" });
463
+ });
464
+
465
+ it("cron trigger does not trigger event-loop detection", () => {
466
+ const result = compileSource('job writer on cron("*/5 * * * *") { find /data | save /data }');
467
+ const loopErrors = result.diagnostics.filter(d => d.message.includes("event loop"));
468
+ expect(loopErrors).toHaveLength(0);
469
+ });
470
+
471
+ it("event trigger + save to same path still triggers event-loop detection", () => {
472
+ const result = compileSource("job loop on /data:created { find /data | save /data }");
473
+ expect(result.diagnostics.some(d => d.message.includes("event loop"))).toBe(true);
474
+ });
475
+ });
476
+
477
+ describe("Hardening: stress-test edge cases", () => {
478
+ // 1. Duplicate job names
479
+ it("duplicate job name → error", () => {
480
+ const result = compileSource("job q { find /a | save /b }\njob q { find /c | save /d }");
481
+ expect(result.diagnostics.some(d => d.message.includes("Duplicate job name"))).toBe(true);
482
+ });
483
+
484
+ // 2. @retry hard limit (error)
485
+ it("@retry(999999) → error for exceeding hard limit", () => {
486
+ const result = compileSource("@retry(999999)\njob q { find /a | save /b }");
487
+ expect(result.diagnostics.some(d => d.message.includes("retry") && d.severity !== "warning")).toBe(true);
488
+ });
489
+
490
+ it("@retry(10) → passes without warning", () => {
491
+ const result = compileSource("@retry(10)\njob q { find /a | save /b }");
492
+ expect(result.diagnostics).toHaveLength(0);
493
+ });
494
+
495
+ // 3. @approval conflict
496
+ it("@approval(human) + @approval(auto) on same job → error", () => {
497
+ const result = compileSource("@approval(human)\n@approval(auto)\njob q { find /a | save /b }");
498
+ expect(result.diagnostics.some(d => d.message.includes("@approval") && d.message.includes("conflict"))).toBe(true);
499
+ });
500
+
501
+ // 4. Path traversal ../ (error — no legitimate use)
502
+ it("path with .. → error", () => {
503
+ const result = compileSource("job q { find /data/../secrets | save /out }");
504
+ expect(result.diagnostics.some(d => d.message.includes("..") && d.severity !== "warning")).toBe(true);
505
+ });
506
+
507
+ it("normal path → no warning", () => {
508
+ const result = compileSource("job q { find /data/users | save /out }");
509
+ expect(result.diagnostics).toHaveLength(0);
510
+ });
511
+
512
+ // 5. Empty job → warning
513
+ it("empty job body → warning", () => {
514
+ const result = compileSource("job empty { }");
515
+ expect(result.diagnostics.some(d => d.message.includes("empty") && d.severity === "warning")).toBe(true);
516
+ });
517
+
518
+ // 6. Division by zero / Infinity / NaN at compile time (literal expressions)
519
+ it("literal division by zero → warning", () => {
520
+ const result = compileSource("job q { find /a | map { x: 1 / 0 } | save /b }");
521
+ expect(result.diagnostics.some(d => d.message.includes("division by zero") && d.severity === "warning")).toBe(true);
522
+ });
523
+
524
+ // 7. Reserved keywords as job names → error
525
+ it("keyword 'find' as job name → parse error", () => {
526
+ const result = compileSource("job find { find /a | save /b }");
527
+ expect(result.diagnostics.length).toBeGreaterThan(0);
528
+ });
529
+
530
+ it("keyword 'route' as job name → parse error", () => {
531
+ const result = compileSource("job route { find /a | save /b }");
532
+ expect(result.diagnostics.length).toBeGreaterThan(0);
533
+ });
534
+
535
+ it("keyword 'job' as job name → parse error", () => {
536
+ const result = compileSource("job job { find /a | save /b }");
537
+ expect(result.diagnostics.length).toBeGreaterThan(0);
538
+ });
539
+
540
+ it("non-keyword job name → passes", () => {
541
+ const result = compileSource("job my_etl { find /a | save /b }");
542
+ expect(result.diagnostics).toHaveLength(0);
543
+ });
544
+
545
+ // 8. Prototype chain safety tested in compiler.test.ts (runtime behavior)
546
+
547
+ // 9. $ in paths → warning (bare $ only, not ${...} templates)
548
+ it("path containing bare $ → warning", () => {
549
+ const result = compileSource("job q { find /data/$evil | save /out }");
550
+ expect(result.diagnostics.some(d => d.message.includes("$") && d.severity === "warning")).toBe(true);
551
+ });
552
+
553
+ it("path with ${template} → no warning (template interpolation allowed)", () => {
554
+ // Use string concat to avoid JS template literal interpolation
555
+ const src = "job q { find /data/" + "${id}" + " | save /out }";
556
+ const result = compileSource(src);
557
+ const dollarWarnings = result.diagnostics.filter(d =>
558
+ d.message.includes("$") && d.message.includes("variable interpolation"),
559
+ );
560
+ expect(dollarWarnings).toHaveLength(0);
561
+ });
562
+
563
+ it("path with ${nested.field} → no warning", () => {
564
+ const src = "job q { find /data/" + "${data.messageId}" + " | save /out }";
565
+ const result = compileSource(src);
566
+ const dollarWarnings = result.diagnostics.filter(d =>
567
+ d.message.includes("$") && d.message.includes("variable interpolation"),
568
+ );
569
+ expect(dollarWarnings).toHaveLength(0);
570
+ });
571
+
572
+ it("action with template path and caps → skip compile-time cap check", () => {
573
+ const src = "@caps(read /data/*, exec /api/*)\njob q { find /data/x | action /api/" + "${method}" + " }";
574
+ const result = compileSource(src);
575
+ // Template path should NOT trigger CAP_DENIED (runtime check instead)
576
+ const capDenied = result.diagnostics.filter(d =>
577
+ d.code === "ASH_CAP_DENIED" && d.message.includes("${"),
578
+ );
579
+ expect(capDenied).toHaveLength(0);
580
+ });
581
+
582
+ // 10. Pipeline depth limit → warning
583
+ it("pipeline with >30 stages → warning", () => {
584
+ const stages = Array.from({ length: 35 }, () => "where x > 1").join(" | ");
585
+ const result = compileSource(`job q { find /a | ${stages} | save /b }`);
586
+ expect(result.diagnostics.some(d => d.message.includes("stages") && d.severity === "warning")).toBe(true);
587
+ });
588
+
589
+ it("pipeline with 5 stages + @caps → no warning", () => {
590
+ const result = compileSource("@caps(read /a/* write /b/* write /c/*)\njob q { find /a | where x > 1 | map name | tee /b | save /c }");
591
+ expect(result.diagnostics).toHaveLength(0);
592
+ });
593
+
594
+ // 11. Fanout nesting depth → warning
595
+ it("fanout nested >3 levels → warning", () => {
596
+ const result = compileSource(`
597
+ job q {
598
+ find /a | fanout {
599
+ fanout {
600
+ fanout {
601
+ fanout { save /deep1, save /deep2 },
602
+ save /b
603
+ },
604
+ save /c
605
+ },
606
+ save /d
607
+ }
608
+ }
609
+ `);
610
+ expect(result.diagnostics.some(d => d.message.includes("fanout") && d.severity === "warning")).toBe(true);
611
+ });
612
+
613
+ // 12. Job name length limit
614
+ it("job name > 64 chars → warning", () => {
615
+ const longName = "a".repeat(65);
616
+ const result = compileSource(`job ${longName} { find /a | save /b }`);
617
+ expect(result.diagnostics.some(d => d.message.includes("64 characters") && d.severity === "warning")).toBe(true);
618
+ });
619
+
620
+ it("job name 10 chars → no warning", () => {
621
+ const result = compileSource("job short_name { find /a | save /b }");
622
+ expect(result.diagnostics).toHaveLength(0);
623
+ });
624
+
625
+ // 13. Program job count limit
626
+ it("program with >50 jobs → warning", () => {
627
+ const jobs = Array.from({ length: 55 }, (_, i) => `job j${i} { find /a | save /b${i} }`).join("\n");
628
+ const result = compileSource(jobs);
629
+ expect(result.diagnostics.some(d => d.message.includes("50 jobs") && d.severity === "warning")).toBe(true);
630
+ });
631
+
632
+ it("program with 3 jobs → no warning", () => {
633
+ const result = compileSource(`
634
+ job a { find /a | save /b }
635
+ job b { find /c | save /d }
636
+ job c { find /e | save /f }
637
+ `);
638
+ expect(result.diagnostics).toHaveLength(0);
639
+ });
640
+
641
+ // 14. Fanout total branch count limit (anti-DDoS)
642
+ it("fanout with >10 branches → warning", () => {
643
+ const branches = Array.from({ length: 12 }, (_, i) => `save /out${i}`).join(", ");
644
+ const result = compileSource(`job q { find /a | fanout { ${branches} } }`);
645
+ expect(result.diagnostics.some(d => d.message.includes("branches") && d.severity === "warning")).toBe(true);
646
+ });
647
+
648
+ it("fanout with 3 branches → no warning", () => {
649
+ const result = compileSource("job q { find /a | fanout { save /x, save /y, save /z } }");
650
+ expect(result.diagnostics).toHaveLength(0);
651
+ });
652
+ });
653
+
654
+ // ── @caps: capability-based path security ──
655
+
656
+ describe("@caps: capability-based security", () => {
657
+ // ── Annotation validation ──
658
+
659
+ it("@caps with valid args → passes", () => {
660
+ const result = compileSource(`
661
+ @caps(read /data/*, write /out/*)
662
+ job q { find /data/users | save /out/clean }
663
+ `);
664
+ expect(result.diagnostics).toHaveLength(0);
665
+ });
666
+
667
+ it("@caps with no args → error", () => {
668
+ const result = compileSource(`
669
+ @caps()
670
+ job q { find /a | save /b }
671
+ `);
672
+ expect(result.diagnostics.some(d => d.message.includes("@caps"))).toBe(true);
673
+ });
674
+
675
+ it("@caps with odd number of args → error", () => {
676
+ const result = compileSource(`
677
+ @caps(read /data/* write)
678
+ job q { find /data/x | save /out }
679
+ `);
680
+ expect(result.diagnostics.some(d => d.message.includes("pairs"))).toBe(true);
681
+ });
682
+
683
+ it("@caps with invalid operation → error", () => {
684
+ const result = compileSource(`
685
+ @caps(delete /data/*)
686
+ job q { find /data/x | save /out }
687
+ `);
688
+ expect(result.diagnostics.some(d => d.message.includes("delete"))).toBe(true);
689
+ });
690
+
691
+ // ── Static path checking ──
692
+
693
+ it("find path within caps → passes", () => {
694
+ const result = compileSource(`
695
+ @caps(read /data/*)
696
+ job q { find /data/users | save /out }
697
+ `);
698
+ // save /out not in caps → should fail
699
+ expect(result.diagnostics.some(d => d.code === "ASH_CAP_DENIED")).toBe(true);
700
+ });
701
+
702
+ it("find path outside caps → denied", () => {
703
+ const result = compileSource(`
704
+ @caps(read /allowed/*)
705
+ job q { find /secret/users | save /out }
706
+ `);
707
+ expect(result.diagnostics.some(d =>
708
+ d.code === "ASH_CAP_DENIED" && d.message.includes("/secret/users"),
709
+ )).toBe(true);
710
+ });
711
+
712
+ it("save path within caps → passes", () => {
713
+ const result = compileSource(`
714
+ @caps(read /data/*, write /out/*)
715
+ job q { find /data/users | save /out/clean }
716
+ `);
717
+ expect(result.diagnostics).toHaveLength(0);
718
+ });
719
+
720
+ it("save path outside caps → denied", () => {
721
+ const result = compileSource(`
722
+ @caps(read /data/*, write /out/*)
723
+ job q { find /data/users | save /forbidden/path }
724
+ `);
725
+ expect(result.diagnostics.some(d =>
726
+ d.code === "ASH_CAP_DENIED" && d.message.includes("/forbidden/path"),
727
+ )).toBe(true);
728
+ });
729
+
730
+ it("action path needs exec cap", () => {
731
+ const result = compileSource(`
732
+ @caps(read /data/*, exec /api/enrich)
733
+ job q { find /data/in | action /api/enrich | save /out }
734
+ `);
735
+ // action OK but save /out not covered
736
+ expect(result.diagnostics.some(d =>
737
+ d.code === "ASH_CAP_DENIED" && d.message.includes("/out"),
738
+ )).toBe(true);
739
+ // action /api/enrich should NOT be denied
740
+ expect(result.diagnostics.some(d =>
741
+ d.code === "ASH_CAP_DENIED" && d.message.includes("/api/enrich"),
742
+ )).toBe(false);
743
+ });
744
+
745
+ it("action path without exec cap → denied", () => {
746
+ const result = compileSource(`
747
+ @caps(read /data/*)
748
+ job q { find /data/in | action /api/enrich | save /out }
749
+ `);
750
+ expect(result.diagnostics.some(d =>
751
+ d.code === "ASH_CAP_DENIED" && d.message.includes("/api/enrich"),
752
+ )).toBe(true);
753
+ });
754
+
755
+ it("tee path needs write cap", () => {
756
+ const result = compileSource(`
757
+ @caps(read /data/*, write /backup/*, write /out/*)
758
+ job q { find /data/in | tee /backup/snap | save /out/clean }
759
+ `);
760
+ expect(result.diagnostics).toHaveLength(0);
761
+ });
762
+
763
+ it("publish path needs write cap", () => {
764
+ const result = compileSource(`
765
+ @caps(read /data/*, write /events/*)
766
+ job q { find /data/in | publish /events/out }
767
+ `);
768
+ expect(result.diagnostics).toHaveLength(0);
769
+ });
770
+
771
+ it("lookup path needs read cap", () => {
772
+ const result = compileSource(`
773
+ @caps(read /data/*, write /out/*)
774
+ job q { find /data/orders | lookup /data/customers on customer_id | save /out/enriched }
775
+ `);
776
+ expect(result.diagnostics).toHaveLength(0);
777
+ });
778
+
779
+ it("lookup path outside read caps → denied", () => {
780
+ const result = compileSource(`
781
+ @caps(read /data/orders, write /out/*)
782
+ job q { find /data/orders | lookup /data/customers on customer_id | save /out/enriched }
783
+ `);
784
+ expect(result.diagnostics.some(d =>
785
+ d.code === "ASH_CAP_DENIED" && d.message.includes("/data/customers"),
786
+ )).toBe(true);
787
+ });
788
+
789
+ // ── Glob matching ──
790
+
791
+ it("wildcard * matches nested paths", () => {
792
+ const result = compileSource(`
793
+ @caps(read /ha/sensors/*)
794
+ job q { find /ha/sensors/temperature/bedroom | save /out }
795
+ `);
796
+ // find OK, save /out denied
797
+ expect(result.diagnostics.some(d =>
798
+ d.code === "ASH_CAP_DENIED" && d.message.includes("/ha/sensors/"),
799
+ )).toBe(false);
800
+ });
801
+
802
+ it("exact path match without wildcard", () => {
803
+ const result = compileSource(`
804
+ @caps(read /data/users, write /out/clean)
805
+ job q { find /data/users | save /out/clean }
806
+ `);
807
+ expect(result.diagnostics).toHaveLength(0);
808
+ });
809
+
810
+ it("exact path doesn't match sub-paths", () => {
811
+ const result = compileSource(`
812
+ @caps(read /data)
813
+ job q { find /data/users | save /out }
814
+ `);
815
+ expect(result.diagnostics.some(d =>
816
+ d.code === "ASH_CAP_DENIED" && d.message.includes("/data/users"),
817
+ )).toBe(true);
818
+ });
819
+
820
+ // ── No @caps → no check (backwards compatible) ──
821
+
822
+ it("job without @caps → no cap checking", () => {
823
+ const result = compileSource("job q { find /anything | save /anywhere }");
824
+ expect(result.diagnostics.filter(d => d.code === "ASH_CAP_DENIED")).toHaveLength(0);
825
+ });
826
+
827
+ // ── Fanout paths checked recursively ──
828
+
829
+ it("fanout branches checked against caps", () => {
830
+ const result = compileSource(`
831
+ @caps(read /data/*, write /out/*)
832
+ job q { find /data/in | fanout { save /out/a, save /out/b } }
833
+ `);
834
+ expect(result.diagnostics).toHaveLength(0);
835
+ });
836
+
837
+ it("fanout branch with denied path → error", () => {
838
+ const result = compileSource(`
839
+ @caps(read /data/*, write /out/*)
840
+ job q { find /data/in | fanout { save /out/a, save /forbidden/b } }
841
+ `);
842
+ expect(result.diagnostics.some(d =>
843
+ d.code === "ASH_CAP_DENIED" && d.message.includes("/forbidden/b"),
844
+ )).toBe(true);
845
+ });
846
+ });
847
+
848
+ describe("white-hat hardening — round 3", () => {
849
+ // ── Fix 1: @readonly must block tee ──
850
+
851
+ it("@readonly + tee → READONLY_VIOLATION", () => {
852
+ const result = compileSource(`
853
+ @readonly
854
+ job q { find /data/x | tee /data/backup | count | save /out }
855
+ `);
856
+ // tee is a write operation — should trigger readonly violation
857
+ expect(result.diagnostics.some(d =>
858
+ d.code === "ASH_READONLY_VIOLATION" && d.message.includes("tee"),
859
+ )).toBe(true);
860
+ });
861
+
862
+ // ── Fix 2: @readonly propagates to route targets ──
863
+
864
+ it("@readonly job routes to writing job → error", () => {
865
+ const result = compileSource(`
866
+ @readonly
867
+ job safe { find /data/x | route type { "a" -> job writer } }
868
+ job writer { save /out/stolen }
869
+ `);
870
+ expect(result.diagnostics.some(d =>
871
+ d.code === "ASH_READONLY_VIOLATION" && d.message.includes("writer"),
872
+ )).toBe(true);
873
+ });
874
+
875
+ it("@readonly job routes to readonly-safe job → ok", () => {
876
+ const result = compileSource(`
877
+ @readonly
878
+ job safe { find /data/x | route type { "a" -> job counter } }
879
+ job counter { count | output "done" }
880
+ `);
881
+ const readonlyErrors = result.diagnostics.filter(d => d.code === "ASH_READONLY_VIOLATION");
882
+ expect(readonlyErrors).toHaveLength(0);
883
+ });
884
+
885
+ // ── Fix 3: event loop detection — on + save to same path ──
886
+
887
+ it("on /path:created + save /path → event loop warning", () => {
888
+ const result = compileSource(`
889
+ job loop on /data/raw:created { find /data/raw | save /data/raw }
890
+ `);
891
+ expect(result.diagnostics.some(d =>
892
+ d.message.includes("event loop") || d.message.includes("trigger path"),
893
+ )).toBe(true);
894
+ });
895
+
896
+ it("on /path:created + tee /path → event loop warning", () => {
897
+ const result = compileSource(`
898
+ job loop on /data/raw:created { find /data/raw | tee /data/raw | save /data/out }
899
+ `);
900
+ expect(result.diagnostics.some(d =>
901
+ d.message.includes("event loop") || d.message.includes("trigger path"),
902
+ )).toBe(true);
903
+ });
904
+
905
+ it("on /path:created + save /other → no warning", () => {
906
+ const result = compileSource(`
907
+ job ok on /data/raw:created { find /data/raw | save /data/clean }
908
+ `);
909
+ const loopWarnings = result.diagnostics.filter(d =>
910
+ d.message.includes("event loop") || d.message.includes("trigger path"),
911
+ );
912
+ expect(loopWarnings).toHaveLength(0);
913
+ });
914
+
915
+ // ── Fix 4: action calling /.actions/run or /.actions/exec → self-recursion error ──
916
+
917
+ it("action /.actions/run → self-recursion error", () => {
918
+ const result = compileSource(`
919
+ job q { find /data/x | action /.actions/run | save /out }
920
+ `);
921
+ expect(result.diagnostics.some(d =>
922
+ d.message.includes("self-referenc") || d.message.includes("recursion"),
923
+ )).toBe(true);
924
+ });
925
+
926
+ it("action /.actions/exec → self-recursion error", () => {
927
+ const result = compileSource(`
928
+ job q { find /data/x | action /.actions/exec | save /out }
929
+ `);
930
+ expect(result.diagnostics.some(d =>
931
+ d.message.includes("self-referenc") || d.message.includes("recursion"),
932
+ )).toBe(true);
933
+ });
934
+
935
+ it("action /api/normal → no warning", () => {
936
+ const result = compileSource(`
937
+ job q { find /data/x | action /api/normal | save /out }
938
+ `);
939
+ const recursionWarnings = result.diagnostics.filter(d =>
940
+ d.message.includes("self-referenc") || d.message.includes("recursion"),
941
+ );
942
+ expect(recursionWarnings).toHaveLength(0);
943
+ });
944
+
945
+ it("action /.actions/agent-run → allowed (budget-constrained LLM loop)", () => {
946
+ const result = compileSource(`
947
+ job q { find /data/x | action /.actions/agent-run | save /out }
948
+ `);
949
+ const recursionWarnings = result.diagnostics.filter(d =>
950
+ d.message.includes("self-referenc") || d.message.includes("recursion"),
951
+ );
952
+ expect(recursionWarnings).toHaveLength(0);
953
+ });
954
+
955
+ // ── Fix 5: NaN/Infinity expression warning ──
956
+
957
+ it("division that may produce NaN/Infinity → warning", () => {
958
+ const result = compileSource(`
959
+ job q { find /data/x | map { ratio: score / total } | save /out }
960
+ `);
961
+ // Division by a field that could be zero — warning about potential NaN
962
+ expect(result.diagnostics.some(d =>
963
+ d.message.includes("NaN") || d.message.includes("division"),
964
+ )).toBe(true);
965
+ });
966
+ });
967
+
968
+ describe("black-hat hardening — round 4", () => {
969
+ // ── Fix 1: mixed annotation deception warning ──
970
+
971
+ it("@readonly job + non-readonly writing job → mixed security warning", () => {
972
+ const result = compileSource(`
973
+ @readonly
974
+ job monitor { find /data/x | output "status ok" }
975
+ job backdoor { find /data/x | save /secret/out }
976
+ `);
977
+ expect(result.diagnostics.some(d =>
978
+ d.message.includes("mixed security"),
979
+ )).toBe(true);
980
+ });
981
+
982
+ it("@readonly + @approval(auto) + hidden writer → deception warning", () => {
983
+ const result = compileSource(`
984
+ @readonly @approval(auto)
985
+ job safe_monitor { find /data/x | output "all good" }
986
+ job install_backdoor { save /scripts/evil }
987
+ `);
988
+ expect(result.diagnostics.some(d =>
989
+ d.message.includes("mixed security"),
990
+ )).toBe(true);
991
+ });
992
+
993
+ it("all @readonly jobs → no mixed security warning", () => {
994
+ const result = compileSource(`
995
+ @readonly
996
+ job a { find /data/x | count }
997
+ @readonly
998
+ job b { find /data/y | output "done" }
999
+ `);
1000
+ const mixed = result.diagnostics.filter(d => d.message.includes("mixed security"));
1001
+ expect(mixed).toHaveLength(0);
1002
+ });
1003
+
1004
+ it("no @readonly jobs → no mixed security warning", () => {
1005
+ const result = compileSource(`
1006
+ job a { find /data/x | save /out/a }
1007
+ job b { find /data/y | save /out/b }
1008
+ `);
1009
+ const mixed = result.diagnostics.filter(d => d.message.includes("mixed security"));
1010
+ expect(mixed).toHaveLength(0);
1011
+ });
1012
+
1013
+ it("@readonly + non-readonly without writes → no warning", () => {
1014
+ const result = compileSource(`
1015
+ @readonly
1016
+ job monitor { find /data/x | output "ok" }
1017
+ job counter { find /data/y | count }
1018
+ `);
1019
+ const mixed = result.diagnostics.filter(d => d.message.includes("mixed security"));
1020
+ expect(mixed).toHaveLength(0);
1021
+ });
1022
+
1023
+ // ── Fix 2: JS reserved field names → environment fingerprinting ──
1024
+
1025
+ it("map constructor → prototype pollution error", () => {
1026
+ const result = compileSource(`
1027
+ job q { find /data/x | map constructor | save /out }
1028
+ `);
1029
+ expect(result.diagnostics.some(d =>
1030
+ d.message.includes("constructor") && d.message.includes("prototype pollution"),
1031
+ )).toBe(true);
1032
+ });
1033
+
1034
+ it("map __proto__ → prototype pollution error", () => {
1035
+ const result = compileSource(`
1036
+ job q { find /data/x | map __proto__ | save /out }
1037
+ `);
1038
+ expect(result.diagnostics.some(d =>
1039
+ d.message.includes("__proto__") && d.message.includes("prototype pollution"),
1040
+ )).toBe(true);
1041
+ });
1042
+
1043
+ it("map { x: constructor } expression → JS reserved warning", () => {
1044
+ const result = compileSource(`
1045
+ job q { find /data/x | map { x: constructor } | save /out }
1046
+ `);
1047
+ expect(result.diagnostics.some(d =>
1048
+ d.message.includes("constructor") && d.message.includes("JavaScript"),
1049
+ )).toBe(true);
1050
+ });
1051
+
1052
+ it("where constructor > 0 → prototype pollution error", () => {
1053
+ const result = compileSource(`
1054
+ job q { find /data/x | where constructor > 0 | save /out }
1055
+ `);
1056
+ expect(result.diagnostics.some(d =>
1057
+ d.message.includes("constructor") && d.message.includes("prototype pollution"),
1058
+ )).toBe(true);
1059
+ });
1060
+
1061
+ it("map name → no JS reserved warning", () => {
1062
+ const result = compileSource(`
1063
+ job q { find /data/x | map name | save /out }
1064
+ `);
1065
+ const jsWarnings = result.diagnostics.filter(d => d.message.includes("JavaScript"));
1066
+ expect(jsWarnings).toHaveLength(0);
1067
+ });
1068
+
1069
+ it("where age > 18 → no JS reserved warning", () => {
1070
+ const result = compileSource(`
1071
+ job q { find /data/x | where age > 18 | save /out }
1072
+ `);
1073
+ const jsWarnings = result.diagnostics.filter(d => d.message.includes("JavaScript"));
1074
+ expect(jsWarnings).toHaveLength(0);
1075
+ });
1076
+
1077
+ // ── Fix 3: TOCTOU — sourceHash in compileSource result ──
1078
+
1079
+ it("compileSource returns sourceHash", () => {
1080
+ const result = compileSource("job q { find /data/x | save /out }");
1081
+ expect(result.sourceHash).toBeDefined();
1082
+ expect(typeof result.sourceHash).toBe("string");
1083
+ expect(result.sourceHash!.length).toBeGreaterThan(0);
1084
+ });
1085
+
1086
+ it("same source → same sourceHash", () => {
1087
+ const r1 = compileSource("job q { find /data/x | save /out }");
1088
+ const r2 = compileSource("job q { find /data/x | save /out }");
1089
+ expect(r1.sourceHash).toBe(r2.sourceHash);
1090
+ });
1091
+
1092
+ it("different source → different sourceHash", () => {
1093
+ const r1 = compileSource("job q { find /data/x | save /out }");
1094
+ const r2 = compileSource("job q { find /data/x | save /out2 }");
1095
+ expect(r1.sourceHash).not.toBe(r2.sourceHash);
1096
+ });
1097
+ });
1098
+
1099
+ describe("black-hat hardening — round 5 (semantic trust chain)", () => {
1100
+ // ── Fix #15: strict equality in where clauses ──
1101
+
1102
+ it("where with strict equality: same types match", () => {
1103
+ const result = compileSource('job q { find /data/x | where name == "alice" | save /out }');
1104
+ expect(result.diagnostics).toHaveLength(0);
1105
+ });
1106
+
1107
+ // ── Fix #11/#22: let pipeline restrictions ──
1108
+
1109
+ it("let pipeline with save → error (writes in let bindings never legitimate)", () => {
1110
+ const result = compileSource(`
1111
+ let stolen = find /data/secret | save /stolen
1112
+ job q { find /data/x | save /out }
1113
+ `);
1114
+ expect(result.diagnostics.some(d =>
1115
+ d.message.includes("let") && (d.message.includes("write") || d.message.includes("save")),
1116
+ )).toBe(true);
1117
+ });
1118
+
1119
+ it("let pipeline with tee → error", () => {
1120
+ const result = compileSource(`
1121
+ let x = find /data/raw | tee /data/leaked
1122
+ job q { find /data/x | save /out }
1123
+ `);
1124
+ expect(result.diagnostics.some(d =>
1125
+ d.message.includes("let") && (d.message.includes("write") || d.message.includes("tee")),
1126
+ )).toBe(true);
1127
+ });
1128
+
1129
+ it("let pipeline with publish → error", () => {
1130
+ const result = compileSource(`
1131
+ let x = find /data/raw | publish /events/out
1132
+ job q { find /data/x | save /out }
1133
+ `);
1134
+ expect(result.diagnostics.some(d =>
1135
+ d.message.includes("let") && (d.message.includes("write") || d.message.includes("publish")),
1136
+ )).toBe(true);
1137
+ });
1138
+
1139
+ it("let pipeline with find only → no error", () => {
1140
+ const result = compileSource(`
1141
+ let x = find /data/users | count
1142
+ job q { find /data/x | save /out }
1143
+ `);
1144
+ const letErrors = result.diagnostics.filter(d =>
1145
+ d.message.includes("let") && d.message.includes("write"),
1146
+ );
1147
+ expect(letErrors).toHaveLength(0);
1148
+ });
1149
+
1150
+ it("let pipeline with find + @approval job → pre-approval warning", () => {
1151
+ const result = compileSource(`
1152
+ let recon = find /data/secret | count
1153
+ @approval(human)
1154
+ job safe { find /data/x | save /out }
1155
+ `);
1156
+ expect(result.diagnostics.some(d =>
1157
+ d.message.includes("let") && d.message.includes("approval"),
1158
+ )).toBe(true);
1159
+ });
1160
+
1161
+ it("let pipeline with find + no @approval → no pre-approval warning", () => {
1162
+ const result = compileSource(`
1163
+ let x = find /data/users | count
1164
+ job q { find /data/x | save /out }
1165
+ `);
1166
+ const approvalWarnings = result.diagnostics.filter(d =>
1167
+ d.message.includes("let") && d.message.includes("approval"),
1168
+ );
1169
+ expect(approvalWarnings).toHaveLength(0);
1170
+ });
1171
+
1172
+ // ── Fix #18: param write-gate warning ──
1173
+
1174
+ it("$param in where clause before save → param write-gate warning", () => {
1175
+ const result = compileSource(`
1176
+ param mode = "safe"
1177
+ job executor { find /data/x | where mode == $mode | save /data/wiped }
1178
+ `);
1179
+ expect(result.diagnostics.some(d =>
1180
+ d.message.includes("param") && d.message.includes("write"),
1181
+ )).toBe(true);
1182
+ });
1183
+
1184
+ it("$param in where clause without writes → no warning", () => {
1185
+ const result = compileSource(`
1186
+ param threshold = 50
1187
+ job reader { find /data/x | where score > $threshold | count }
1188
+ `);
1189
+ const paramWarnings = result.diagnostics.filter(d =>
1190
+ d.message.includes("param") && d.message.includes("write"),
1191
+ );
1192
+ expect(paramWarnings).toHaveLength(0);
1193
+ });
1194
+
1195
+ it("$param in where clause with downstream tee → warning", () => {
1196
+ const result = compileSource(`
1197
+ param mode = "safe"
1198
+ job q { find /data/x | where status == $mode | tee /out/leak | count }
1199
+ `);
1200
+ expect(result.diagnostics.some(d =>
1201
+ d.message.includes("param") && d.message.includes("write"),
1202
+ )).toBe(true);
1203
+ });
1204
+
1205
+ it("param exists but not used in where → no warning", () => {
1206
+ const result = compileSource(`
1207
+ param limit = 100
1208
+ job q { find /data/x | save /out }
1209
+ `);
1210
+ const paramWarnings = result.diagnostics.filter(d =>
1211
+ d.message.includes("param") && d.message.includes("write"),
1212
+ );
1213
+ expect(paramWarnings).toHaveLength(0);
1214
+ });
1215
+
1216
+ // ── Fix A11: param duplicate + param/let collision ──
1217
+
1218
+ it("duplicate param name → parse error", () => {
1219
+ const result = compileSource("param x = 1\nparam x = 2\njob q { find /data | save /out }");
1220
+ expect(result.diagnostics.some(d => d.message.includes("Duplicate param"))).toBe(true);
1221
+ });
1222
+
1223
+ it("param + let same name → parse error", () => {
1224
+ const result = compileSource("param x = 1\nlet x = 2\njob q { find /data | save /out }");
1225
+ expect(result.diagnostics.some(d => d.message.includes("conflicts"))).toBe(true);
1226
+ });
1227
+
1228
+ it("let + param same name → parse error", () => {
1229
+ const result = compileSource("let x = 1\nparam x = 2\njob q { find /data | save /out }");
1230
+ expect(result.diagnostics.some(d => d.message.includes("conflicts"))).toBe(true);
1231
+ });
1232
+ });
1233
+
1234
+ describe("black-hat hardening — round 6 (compiler implementation attacks)", () => {
1235
+ // ── 核弹4: __proto__ as map OUTPUT key (not just value) ──
1236
+
1237
+ it("map { __proto__: value } → prototype pollution error", () => {
1238
+ const result = compileSource("job q { find /data | map { __proto__: name } | save /out }");
1239
+ expect(result.diagnostics.some(d =>
1240
+ d.message.includes("__proto__") && d.message.includes("prototype pollution"),
1241
+ )).toBe(true);
1242
+ });
1243
+
1244
+ it("map { constructor: value } → prototype pollution error", () => {
1245
+ const result = compileSource("job q { find /data | map { constructor: age } | save /out }");
1246
+ expect(result.diagnostics.some(d =>
1247
+ d.message.includes("constructor") && d.message.includes("prototype pollution"),
1248
+ )).toBe(true);
1249
+ });
1250
+
1251
+ it("map { safe_key: __proto__ } → JS reserved value warning (already worked)", () => {
1252
+ const result = compileSource("job q { find /data | map { x: __proto__ } | save /out }");
1253
+ expect(result.diagnostics.some(d =>
1254
+ d.message.includes("__proto__") && d.message.includes("JavaScript"),
1255
+ )).toBe(true);
1256
+ });
1257
+
1258
+ it("map { normal: name } → no JS reserved warning", () => {
1259
+ const result = compileSource("job q { find /data | map { label: name } | save /out }");
1260
+ const jsWarnings = result.diagnostics.filter(d => d.message.includes("JavaScript") || d.message.includes("pollution"));
1261
+ expect(jsWarnings).toHaveLength(0);
1262
+ });
1263
+
1264
+ // ── 核弹5: fanout duplicate write path ──
1265
+
1266
+ it("fanout with two branches writing to same path → warning", () => {
1267
+ const result = compileSource(`
1268
+ job q { find /data | fanout { save /out/target, save /out/target } }
1269
+ `);
1270
+ expect(result.diagnostics.some(d =>
1271
+ d.message.includes("fanout") && d.message.includes("/out/target") && d.message.includes("branches writing"),
1272
+ )).toBe(true);
1273
+ });
1274
+
1275
+ it("fanout with branches writing to different paths → no warning", () => {
1276
+ const result = compileSource(`
1277
+ job q { find /data | fanout { save /out/a, save /out/b } }
1278
+ `);
1279
+ const dupWarnings = result.diagnostics.filter(d => d.message.includes("branches writing"));
1280
+ expect(dupWarnings).toHaveLength(0);
1281
+ });
1282
+
1283
+ it("fanout: tee + save to same path across branches → warning", () => {
1284
+ const result = compileSource(`
1285
+ job q { find /data | fanout { tee /out/x | save /a, save /out/x } }
1286
+ `);
1287
+ expect(result.diagnostics.some(d =>
1288
+ d.message.includes("/out/x") && d.message.includes("branches writing"),
1289
+ )).toBe(true);
1290
+ });
1291
+
1292
+ // ── 核弹13: tee chain amplification ──
1293
+
1294
+ it("pipeline with >5 tee stages → data amplification warning", () => {
1295
+ const tees = Array.from({ length: 7 }, (_, i) => `tee /out/t${i}`).join(" | ");
1296
+ const result = compileSource(`job q { find /data | ${tees} | save /final }`);
1297
+ expect(result.diagnostics.some(d =>
1298
+ d.message.includes("tee") && d.message.includes("amplification"),
1299
+ )).toBe(true);
1300
+ });
1301
+
1302
+ it("pipeline with 3 tee stages → no warning", () => {
1303
+ const result = compileSource("job q { find /data | tee /a | tee /b | tee /c | save /out }");
1304
+ const teeWarnings = result.diagnostics.filter(d => d.message.includes("amplification"));
1305
+ expect(teeWarnings).toHaveLength(0);
1306
+ });
1307
+ });
1308
+
1309
+ describe("regression round 7: remaining compiler issues", () => {
1310
+ // ── Issue 1: __proto__/constructor/prototype as map key → ERROR (not just warning) ──
1311
+
1312
+ it("map { __proto__: expr } → error (blocks compilation)", () => {
1313
+ const result = compileSource("job q { find /data | map { __proto__: name } | save /out }");
1314
+ const protoErrors = result.diagnostics.filter(d =>
1315
+ d.message.includes("__proto__") && d.message.includes("prototype pollution") && d.severity !== "warning",
1316
+ );
1317
+ expect(protoErrors.length).toBeGreaterThan(0);
1318
+ });
1319
+
1320
+ it("map { constructor: expr } → error (blocks compilation)", () => {
1321
+ const result = compileSource("job q { find /data | map { constructor: age } | save /out }");
1322
+ const protoErrors = result.diagnostics.filter(d =>
1323
+ d.message.includes("constructor") && d.message.includes("prototype pollution") && d.severity !== "warning",
1324
+ );
1325
+ expect(protoErrors.length).toBeGreaterThan(0);
1326
+ });
1327
+
1328
+ it("map { prototype: expr } → error (blocks compilation)", () => {
1329
+ const result = compileSource("job q { find /data | map { prototype: name } | save /out }");
1330
+ const protoErrors = result.diagnostics.filter(d =>
1331
+ d.message.includes("prototype") && d.message.includes("pollution") && d.severity !== "warning",
1332
+ );
1333
+ expect(protoErrors.length).toBeGreaterThan(0);
1334
+ });
1335
+
1336
+ // ── Issue 2: JS reserved words as map key → warning ──
1337
+
1338
+ it("map { delete: expr } → JS reserved warning", () => {
1339
+ const result = compileSource("job q { find /data | map { delete: name } | save /out }");
1340
+ expect(result.diagnostics.some(d =>
1341
+ d.message.includes("delete") && d.message.includes("JavaScript"),
1342
+ )).toBe(true);
1343
+ });
1344
+
1345
+ it("map { return: expr } → JS reserved warning", () => {
1346
+ const result = compileSource("job q { find /data | map { return: name } | save /out }");
1347
+ expect(result.diagnostics.some(d =>
1348
+ d.message.includes("return") && d.message.includes("JavaScript"),
1349
+ )).toBe(true);
1350
+ });
1351
+
1352
+ it("map { class: expr } → JS reserved warning", () => {
1353
+ const result = compileSource("job q { find /data | map { class: name } | save /out }");
1354
+ expect(result.diagnostics.some(d =>
1355
+ d.message.includes("class") && d.message.includes("JavaScript"),
1356
+ )).toBe(true);
1357
+ });
1358
+
1359
+ it("map { safe_key: value } → no warning", () => {
1360
+ const result = compileSource("job q { find /data | map { label: name } | save /out }");
1361
+ const jsWarnings = result.diagnostics.filter(d =>
1362
+ d.message.includes("JavaScript") || d.message.includes("pollution"),
1363
+ );
1364
+ expect(jsWarnings).toHaveLength(0);
1365
+ });
1366
+
1367
+ // ── Issue 3: map duplicate key → parse error ──
1368
+
1369
+ it("map { a: x, a: y } → duplicate key error", () => {
1370
+ const result = compileSource("job q { find /data | map { a: x, a: y } | save /out }");
1371
+ expect(result.diagnostics.some(d => d.message.includes("Duplicate map key"))).toBe(true);
1372
+ });
1373
+
1374
+ it("map { a: x, b: y } → no error", () => {
1375
+ const result = compileSource("job q { find /data | map { a: x, b: y } | save /out }");
1376
+ expect(result.diagnostics).toHaveLength(0);
1377
+ });
1378
+
1379
+ // ── Issue 4: @timeout hard limit ──
1380
+
1381
+ it("@timeout(99999999) → exceeds hard limit error", () => {
1382
+ const result = compileSource("@timeout(99999999)\njob q { find /a | save /b }");
1383
+ expect(result.diagnostics.some(d => d.message.includes("@timeout") && d.message.includes("hard limit"))).toBe(true);
1384
+ });
1385
+
1386
+ it("@timeout(3600001) → exceeds hard limit error", () => {
1387
+ const result = compileSource("@timeout(3600001)\njob q { find /a | save /b }");
1388
+ expect(result.diagnostics.some(d => d.message.includes("@timeout") && d.message.includes("hard limit"))).toBe(true);
1389
+ });
1390
+
1391
+ it("@timeout(60000) → passes", () => {
1392
+ const result = compileSource("@timeout(60000)\njob q { find /a | save /b }");
1393
+ expect(result.diagnostics).toHaveLength(0);
1394
+ });
1395
+
1396
+ it("@timeout(3600000) → passes (exactly 1 hour)", () => {
1397
+ const result = compileSource("@timeout(3600000)\njob q { find /a | save /b }");
1398
+ expect(result.diagnostics).toHaveLength(0);
1399
+ });
1400
+ });
1401
+
1402
+ // ── Regression round 8: combination attacks ──
1403
+
1404
+ describe("regression round 8 — combination attacks", () => {
1405
+
1406
+ // ── Issue 1: Duplicate annotation detection ──
1407
+
1408
+ it("duplicate @timeout → error (audit deception)", () => {
1409
+ const result = compileSource("@timeout(1000)\n@timeout(999999)\njob q { find /a | save /b }");
1410
+ expect(result.diagnostics.some(d =>
1411
+ d.message.includes("@timeout") && d.message.includes("appears 2 times"),
1412
+ )).toBe(true);
1413
+ });
1414
+
1415
+ it("duplicate @retry → error", () => {
1416
+ const result = compileSource("@retry(3)\n@retry(99)\njob q { find /a | save /b }");
1417
+ expect(result.diagnostics.some(d =>
1418
+ d.message.includes("@retry") && d.message.includes("appears 2 times"),
1419
+ )).toBe(true);
1420
+ });
1421
+
1422
+ it("duplicate @on_error → error", () => {
1423
+ const result = compileSource("@on_error(skip)\n@on_error(fail)\njob q { find /a | save /b }");
1424
+ expect(result.diagnostics.some(d =>
1425
+ d.message.includes("@on_error") && d.message.includes("appears 2 times"),
1426
+ )).toBe(true);
1427
+ });
1428
+
1429
+ it("triple @readonly → error", () => {
1430
+ const result = compileSource("@readonly\n@readonly\n@readonly\njob q { find /a | save /b }");
1431
+ expect(result.diagnostics.some(d =>
1432
+ d.message.includes("@readonly") && d.message.includes("appears 3 times"),
1433
+ )).toBe(true);
1434
+ });
1435
+
1436
+ it("single @timeout → no duplicate error", () => {
1437
+ const result = compileSource("@timeout(5000)\njob q { find /a | save /b }");
1438
+ expect(result.diagnostics.filter(d => d.message.includes("appears")).length).toBe(0);
1439
+ });
1440
+
1441
+ // ── Issue 2: Route without fallback → warning ──
1442
+
1443
+ it("route without default branch → warning about silent drop", () => {
1444
+ const result = compileSource('job main { find /data | route level { "safe" -> job handle_safe, "danger" -> job handle_danger } }\njob handle_safe { save /safe }\njob handle_danger { save /danger }');
1445
+ expect(result.diagnostics.some(d =>
1446
+ d.message.includes("no default branch") && d.message.includes("silently dropped"),
1447
+ )).toBe(true);
1448
+ });
1449
+
1450
+ it("route with default branch → no warning", () => {
1451
+ const result = compileSource('job main { find /data | route level { "safe" -> job handle_safe, _ -> job fallback } }\njob handle_safe { save /safe }\njob fallback { save /fallback }');
1452
+ expect(result.diagnostics.filter(d => d.message.includes("no default branch")).length).toBe(0);
1453
+ });
1454
+
1455
+ // ── Issue 3: -Infinity / NaN bypass in where comparisons ──
1456
+
1457
+ // ── Issue 3: -Infinity / NaN / Infinity where bypass — runtime tests in compiler.test.ts ──
1458
+ // (Static analysis can't catch these; they're runtime evaluation guards)
1459
+ });
1460
+
1461
+ describe("Relative action diagnostics", () => {
1462
+ it("relative action without upstream find → ASH_RELATIVE_ACTION_NO_FIND error", () => {
1463
+ const result = compileSource(
1464
+ '@caps(exec /ha/*)\n@budget(actions 10)\njob q { action turn_off }',
1465
+ );
1466
+ expect(result.diagnostics.some(d => d.code === "ASH_RELATIVE_ACTION_NO_FIND")).toBe(true);
1467
+ });
1468
+
1469
+ it("relative action without @caps → ASH_UNCAPPED_ACTION", () => {
1470
+ const result = compileSource(
1471
+ 'job q { find /ha/lights | action turn_off }',
1472
+ );
1473
+ expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_ACTION")).toBe(true);
1474
+ });
1475
+
1476
+ it("relative action without @budget → ASH_ACTION_AMPLIFICATION error", () => {
1477
+ const result = compileSource(
1478
+ '@caps(read /ha/* exec /ha/*)\njob q { find /ha/lights | action turn_off }',
1479
+ );
1480
+ const ampDiag = result.diagnostics.filter(d => d.code === "ASH_ACTION_AMPLIFICATION");
1481
+ // Should have an error-level amplification diagnostic (not just warning)
1482
+ expect(ampDiag.some(d => d.severity !== "warning")).toBe(true);
1483
+ });
1484
+
1485
+ it("relative action with @caps + @budget → warning (permitted)", () => {
1486
+ const result = compileSource(
1487
+ '@caps(read /ha/* exec /ha/*)\n@budget(actions 100)\njob q { find /ha/lights | action turn_off }',
1488
+ );
1489
+ const ampDiag = result.diagnostics.filter(d =>
1490
+ d.code === "ASH_ACTION_AMPLIFICATION" && d.severity === "warning",
1491
+ );
1492
+ expect(ampDiag.length).toBeGreaterThan(0);
1493
+ });
1494
+ });