@aigne/ash 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/DESIGN.md +41 -0
- package/dist/ai-dev-loop/ash-run-result.cjs +12 -0
- package/dist/ai-dev-loop/ash-run-result.d.cts +28 -0
- package/dist/ai-dev-loop/ash-run-result.d.cts.map +1 -0
- package/dist/ai-dev-loop/ash-run-result.d.mts +28 -0
- package/dist/ai-dev-loop/ash-run-result.d.mts.map +1 -0
- package/dist/ai-dev-loop/ash-run-result.mjs +11 -0
- package/dist/ai-dev-loop/ash-run-result.mjs.map +1 -0
- package/dist/ai-dev-loop/ash-typed-error.cjs +51 -0
- package/dist/ai-dev-loop/ash-typed-error.d.cts +54 -0
- package/dist/ai-dev-loop/ash-typed-error.d.cts.map +1 -0
- package/dist/ai-dev-loop/ash-typed-error.d.mts +54 -0
- package/dist/ai-dev-loop/ash-typed-error.d.mts.map +1 -0
- package/dist/ai-dev-loop/ash-typed-error.mjs +50 -0
- package/dist/ai-dev-loop/ash-typed-error.mjs.map +1 -0
- package/dist/ai-dev-loop/ash-validate.cjs +27 -0
- package/dist/ai-dev-loop/ash-validate.d.cts +7 -0
- package/dist/ai-dev-loop/ash-validate.d.cts.map +1 -0
- package/dist/ai-dev-loop/ash-validate.d.mts +7 -0
- package/dist/ai-dev-loop/ash-validate.d.mts.map +1 -0
- package/dist/ai-dev-loop/ash-validate.mjs +28 -0
- package/dist/ai-dev-loop/ash-validate.mjs.map +1 -0
- package/dist/ai-dev-loop/dev-loop.cjs +134 -0
- package/dist/ai-dev-loop/dev-loop.d.cts +28 -0
- package/dist/ai-dev-loop/dev-loop.d.cts.map +1 -0
- package/dist/ai-dev-loop/dev-loop.d.mts +28 -0
- package/dist/ai-dev-loop/dev-loop.d.mts.map +1 -0
- package/dist/ai-dev-loop/dev-loop.mjs +135 -0
- package/dist/ai-dev-loop/dev-loop.mjs.map +1 -0
- package/dist/ai-dev-loop/index.cjs +24 -0
- package/dist/ai-dev-loop/index.d.cts +9 -0
- package/dist/ai-dev-loop/index.d.mts +9 -0
- package/dist/ai-dev-loop/index.mjs +10 -0
- package/dist/ai-dev-loop/live-mode.cjs +17 -0
- package/dist/ai-dev-loop/live-mode.d.cts +24 -0
- package/dist/ai-dev-loop/live-mode.d.cts.map +1 -0
- package/dist/ai-dev-loop/live-mode.d.mts +24 -0
- package/dist/ai-dev-loop/live-mode.d.mts.map +1 -0
- package/dist/ai-dev-loop/live-mode.mjs +17 -0
- package/dist/ai-dev-loop/live-mode.mjs.map +1 -0
- package/dist/ai-dev-loop/meta-tools.cjs +123 -0
- package/dist/ai-dev-loop/meta-tools.d.cts +24 -0
- package/dist/ai-dev-loop/meta-tools.d.cts.map +1 -0
- package/dist/ai-dev-loop/meta-tools.d.mts +24 -0
- package/dist/ai-dev-loop/meta-tools.d.mts.map +1 -0
- package/dist/ai-dev-loop/meta-tools.mjs +120 -0
- package/dist/ai-dev-loop/meta-tools.mjs.map +1 -0
- package/dist/ai-dev-loop/structured-runner.cjs +154 -0
- package/dist/ai-dev-loop/structured-runner.d.cts +12 -0
- package/dist/ai-dev-loop/structured-runner.d.cts.map +1 -0
- package/dist/ai-dev-loop/structured-runner.d.mts +12 -0
- package/dist/ai-dev-loop/structured-runner.d.mts.map +1 -0
- package/dist/ai-dev-loop/structured-runner.mjs +155 -0
- package/dist/ai-dev-loop/structured-runner.mjs.map +1 -0
- package/dist/ai-dev-loop/system-prompt.cjs +55 -0
- package/dist/ai-dev-loop/system-prompt.d.cts +20 -0
- package/dist/ai-dev-loop/system-prompt.d.cts.map +1 -0
- package/dist/ai-dev-loop/system-prompt.d.mts +20 -0
- package/dist/ai-dev-loop/system-prompt.d.mts.map +1 -0
- package/dist/ai-dev-loop/system-prompt.mjs +54 -0
- package/dist/ai-dev-loop/system-prompt.mjs.map +1 -0
- package/dist/ast.d.cts +140 -0
- package/dist/ast.d.cts.map +1 -0
- package/dist/ast.d.mts +140 -0
- package/dist/ast.d.mts.map +1 -0
- package/dist/compiler.cjs +802 -0
- package/dist/compiler.d.cts +103 -0
- package/dist/compiler.d.cts.map +1 -0
- package/dist/compiler.d.mts +103 -0
- package/dist/compiler.d.mts.map +1 -0
- package/dist/compiler.mjs +802 -0
- package/dist/compiler.mjs.map +1 -0
- package/dist/index.cjs +14 -0
- package/dist/index.d.cts +7 -0
- package/dist/index.d.mts +7 -0
- package/dist/index.mjs +7 -0
- package/dist/lexer.cjs +451 -0
- package/dist/lexer.d.cts +14 -0
- package/dist/lexer.d.cts.map +1 -0
- package/dist/lexer.d.mts +14 -0
- package/dist/lexer.d.mts.map +1 -0
- package/dist/lexer.mjs +451 -0
- package/dist/lexer.mjs.map +1 -0
- package/dist/parser.cjs +734 -0
- package/dist/parser.d.cts +40 -0
- package/dist/parser.d.cts.map +1 -0
- package/dist/parser.d.mts +40 -0
- package/dist/parser.d.mts.map +1 -0
- package/dist/parser.mjs +734 -0
- package/dist/parser.mjs.map +1 -0
- package/dist/reference.cjs +130 -0
- package/dist/reference.d.cts +11 -0
- package/dist/reference.d.cts.map +1 -0
- package/dist/reference.d.mts +11 -0
- package/dist/reference.d.mts.map +1 -0
- package/dist/reference.mjs +130 -0
- package/dist/reference.mjs.map +1 -0
- package/dist/template.cjs +85 -0
- package/dist/template.mjs +84 -0
- package/dist/template.mjs.map +1 -0
- package/dist/type-checker.cjs +582 -0
- package/dist/type-checker.d.cts +31 -0
- package/dist/type-checker.d.cts.map +1 -0
- package/dist/type-checker.d.mts +31 -0
- package/dist/type-checker.d.mts.map +1 -0
- package/dist/type-checker.mjs +573 -0
- package/dist/type-checker.mjs.map +1 -0
- package/package.json +29 -0
- package/src/ai-dev-loop/ash-run-result.test.ts +113 -0
- package/src/ai-dev-loop/ash-run-result.ts +46 -0
- package/src/ai-dev-loop/ash-typed-error.test.ts +136 -0
- package/src/ai-dev-loop/ash-typed-error.ts +50 -0
- package/src/ai-dev-loop/ash-validate.test.ts +54 -0
- package/src/ai-dev-loop/ash-validate.ts +34 -0
- package/src/ai-dev-loop/dev-loop.test.ts +364 -0
- package/src/ai-dev-loop/dev-loop.ts +156 -0
- package/src/ai-dev-loop/dry-run.test.ts +107 -0
- package/src/ai-dev-loop/e2e-multi-fix.test.ts +473 -0
- package/src/ai-dev-loop/e2e.test.ts +324 -0
- package/src/ai-dev-loop/index.ts +15 -0
- package/src/ai-dev-loop/invariants.test.ts +253 -0
- package/src/ai-dev-loop/live-mode.test.ts +63 -0
- package/src/ai-dev-loop/live-mode.ts +33 -0
- package/src/ai-dev-loop/meta-tools.test.ts +120 -0
- package/src/ai-dev-loop/meta-tools.ts +142 -0
- package/src/ai-dev-loop/structured-runner.test.ts +159 -0
- package/src/ai-dev-loop/structured-runner.ts +209 -0
- package/src/ai-dev-loop/system-prompt.test.ts +102 -0
- package/src/ai-dev-loop/system-prompt.ts +81 -0
- package/src/ast.ts +186 -0
- package/src/compiler.test.ts +2933 -0
- package/src/compiler.ts +1103 -0
- package/src/e2e.test.ts +552 -0
- package/src/index.ts +16 -0
- package/src/lexer.test.ts +538 -0
- package/src/lexer.ts +222 -0
- package/src/parser.test.ts +1024 -0
- package/src/parser.ts +835 -0
- package/src/reference.test.ts +166 -0
- package/src/reference.ts +125 -0
- package/src/template.test.ts +210 -0
- package/src/template.ts +139 -0
- package/src/type-checker.test.ts +1494 -0
- package/src/type-checker.ts +785 -0
- package/tsconfig.json +9 -0
- package/tsdown.config.ts +12 -0
|
@@ -0,0 +1,2933 @@
|
|
|
1
|
+
import { describe, it, expect, vi } from "vitest";
|
|
2
|
+
import { AshLexer } from "./lexer.js";
|
|
3
|
+
import { AshParser } from "./parser.js";
|
|
4
|
+
import { AshCompiler, compileSource } from "./compiler.js";
|
|
5
|
+
import type { JobContext, WorldInterface, JobLogger, JobReport, ProgramReport, StageMetrics, LogContext, OutputHandler, OutputEvent } from "./compiler.js";
|
|
6
|
+
|
|
7
|
+
const lexer = new AshLexer();
|
|
8
|
+
const parser = new AshParser();
|
|
9
|
+
const compiler = new AshCompiler();
|
|
10
|
+
|
|
11
|
+
function makeWorld(data: Record<string, unknown[]> = {}): WorldInterface & { written: Record<string, unknown[]>; published: Record<string, unknown[]>; execCalls: { path: string; input: unknown[]; params?: Record<string, unknown> }[] } {
|
|
12
|
+
const written: Record<string, unknown[]> = {};
|
|
13
|
+
const published: Record<string, unknown[]> = {};
|
|
14
|
+
const execCalls: { path: string; input: unknown[]; params?: Record<string, unknown> }[] = [];
|
|
15
|
+
return {
|
|
16
|
+
written,
|
|
17
|
+
published,
|
|
18
|
+
execCalls,
|
|
19
|
+
read(path: string) {
|
|
20
|
+
if (!(path in data)) return [];
|
|
21
|
+
return data[path];
|
|
22
|
+
},
|
|
23
|
+
write(path: string, d: unknown[]) {
|
|
24
|
+
written[path] = d;
|
|
25
|
+
},
|
|
26
|
+
publish(topic: string, d: unknown[]) {
|
|
27
|
+
published[topic] = d;
|
|
28
|
+
},
|
|
29
|
+
async exec(path: string, input: unknown[], params?: Record<string, unknown>) {
|
|
30
|
+
execCalls.push({ path, input, params });
|
|
31
|
+
// Default: pass-through
|
|
32
|
+
return input;
|
|
33
|
+
},
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function makeLogger(): JobLogger & { entries: { stage: string; action: string; detail?: unknown }[] } {
|
|
38
|
+
const entries: { stage: string; action: string; detail?: unknown }[] = [];
|
|
39
|
+
return {
|
|
40
|
+
entries,
|
|
41
|
+
log(stage: string, action: string, detail?: unknown) {
|
|
42
|
+
entries.push({ stage, action, detail });
|
|
43
|
+
},
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function makeCtx(data: Record<string, unknown[]> = {}, caps = new Set<string>(["*"])): JobContext & { world: ReturnType<typeof makeWorld>; logger: ReturnType<typeof makeLogger> } {
|
|
48
|
+
const world = makeWorld(data);
|
|
49
|
+
const logger = makeLogger();
|
|
50
|
+
return { world, caps, logger };
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
async function run(source: string, data: Record<string, unknown[]> = {}, caps?: Set<string>) {
|
|
54
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
55
|
+
const prog = compiler.compile(ast);
|
|
56
|
+
const ctx = makeCtx(data, caps);
|
|
57
|
+
const result = await prog.jobs[0].execute(ctx);
|
|
58
|
+
return { result, ctx, prog };
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
describe("Phase 3: Compiler + Core Operators", () => {
|
|
62
|
+
// ── Happy ──
|
|
63
|
+
|
|
64
|
+
it("compile find job → queries world, returns objects", async () => {
|
|
65
|
+
const { result, ctx } = await run("job q { find /world/users }", { "/world/users": [{ name: "Alice" }, { name: "Bob" }] });
|
|
66
|
+
expect(result.status).toBe("ok");
|
|
67
|
+
expect(result.recordCount).toBe(2);
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
it("find | where age > 18 → filters correctly", async () => {
|
|
71
|
+
const data = { "/world/users": [{ name: "Alice", age: 20 }, { name: "Bob", age: 15 }] };
|
|
72
|
+
const { result } = await run("job q { find /world/users | where age > 18 | save /world/out }", data);
|
|
73
|
+
expect(result.status).toBe("ok");
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
it("find | map name → extracts field", async () => {
|
|
77
|
+
const data = { "/world/users": [{ name: "Alice" }, { name: "Bob" }] };
|
|
78
|
+
const { result, ctx } = await run("job q { find /world/users | map name | save /world/out }", data);
|
|
79
|
+
expect(result.status).toBe("ok");
|
|
80
|
+
expect(ctx.world.written["/world/out"]).toEqual(["Alice", "Bob"]);
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
it("find | save → writes to world", async () => {
|
|
84
|
+
const data = { "/world/src": [{ x: 1 }] };
|
|
85
|
+
const { ctx } = await run("job q { find /world/src | save /world/dst }", data);
|
|
86
|
+
expect(ctx.world.written["/world/dst"]).toEqual([{ x: 1 }]);
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
it("find | publish → publishes to topic", async () => {
|
|
90
|
+
const data = { "/world/src": [{ x: 1 }] };
|
|
91
|
+
const { ctx } = await run("job q { find /world/src | publish /topic/out }", data);
|
|
92
|
+
expect(ctx.world.published["/topic/out"]).toEqual([{ x: 1 }]);
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
it("find | tee | save → both targets written", async () => {
|
|
96
|
+
const data = { "/world/src": [{ x: 1 }, { x: 2 }] };
|
|
97
|
+
const { ctx } = await run("job q { find /world/src | tee /world/backup | save /world/main }", data);
|
|
98
|
+
expect(ctx.world.written["/world/backup"]).toEqual([{ x: 1 }, { x: 2 }]);
|
|
99
|
+
expect(ctx.world.written["/world/main"]).toEqual([{ x: 1 }, { x: 2 }]);
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
it("fanout executes all branches", async () => {
|
|
103
|
+
const data = { "/world/src": [{ x: 1 }] };
|
|
104
|
+
const { ctx } = await run("job q { find /world/src | fanout { save /world/a, save /world/b } }", data);
|
|
105
|
+
expect(ctx.world.written["/world/a"]).toBeDefined();
|
|
106
|
+
expect(ctx.world.written["/world/b"]).toBeDefined();
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
it("where supports ==, !=, >, < operators", async () => {
|
|
110
|
+
const data = { "/world/d": [{ v: 5 }, { v: 10 }, { v: 15 }] };
|
|
111
|
+
|
|
112
|
+
const r1 = await run("job q { find /world/d | where v == 10 | save /world/out }", data);
|
|
113
|
+
expect(r1.ctx.world.written["/world/out"]).toEqual([{ v: 10 }]);
|
|
114
|
+
|
|
115
|
+
const r2 = await run("job q { find /world/d | where v != 10 | save /world/out }", data);
|
|
116
|
+
expect(r2.ctx.world.written["/world/out"]).toEqual([{ v: 5 }, { v: 15 }]);
|
|
117
|
+
|
|
118
|
+
const r3 = await run("job q { find /world/d | where v < 10 | save /world/out }", data);
|
|
119
|
+
expect(r3.ctx.world.written["/world/out"]).toEqual([{ v: 5 }]);
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
it("execute returns JobResult with status and recordCount", async () => {
|
|
123
|
+
const { result } = await run("job q { find /world/x | save /world/out }", { "/world/x": [{ a: 1 }] });
|
|
124
|
+
expect(result.status).toBe("ok");
|
|
125
|
+
expect(typeof result.recordCount).toBe("number");
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
// ── Output / Input ──
|
|
129
|
+
|
|
130
|
+
it("output in pipeline → logs message, passes stream through", async () => {
|
|
131
|
+
const data = { "/world/d": [{ x: 1 }, { x: 2 }] };
|
|
132
|
+
const { result, ctx } = await run('job q { find /world/d | output "processing" | save /world/out }', data);
|
|
133
|
+
expect(result.status).toBe("ok");
|
|
134
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 1 }, { x: 2 }]);
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
it("input in pipeline → emits single object from prompt", async () => {
|
|
138
|
+
const { result, ctx } = await run('job q { input "name?" | save /world/out }');
|
|
139
|
+
expect(result.status).toBe("ok");
|
|
140
|
+
// input emits a single object with the prompt (non-interactive in compile mode)
|
|
141
|
+
expect(ctx.world.written["/world/out"]).toBeDefined();
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
// ── Bad ──
|
|
145
|
+
|
|
146
|
+
it("find non-existent path → empty stream, no crash", async () => {
|
|
147
|
+
const { result } = await run("job q { find /world/missing | save /world/out }");
|
|
148
|
+
expect(result.status).toBe("ok");
|
|
149
|
+
expect(result.recordCount).toBe(0);
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
it("save to restricted path → permission error in JobResult", async () => {
|
|
153
|
+
const caps = new Set(["/world/src"]);
|
|
154
|
+
const { result } = await run("job q { find /world/src | save /world/restricted }", { "/world/src": [{ x: 1 }] }, caps);
|
|
155
|
+
expect(result.errors.length).toBeGreaterThanOrEqual(1);
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
it("where references missing field → skips object", async () => {
|
|
159
|
+
const data = { "/world/d": [{ name: "Alice" }, { age: 20 }] };
|
|
160
|
+
const { result, ctx } = await run("job q { find /world/d | where name == Alice | save /world/out }", data);
|
|
161
|
+
// Only Alice has 'name' field
|
|
162
|
+
expect(ctx.world.written["/world/out"]?.length ?? 0).toBeLessThanOrEqual(1);
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
it("map missing nested field → returns undefined", async () => {
|
|
166
|
+
const data = { "/world/d": [{ user: { name: "A" } }, { user: {} }] };
|
|
167
|
+
const { ctx } = await run("job q { find /world/d | map user.address.zip | save /world/out }", data);
|
|
168
|
+
const out = ctx.world.written["/world/out"];
|
|
169
|
+
expect(out).toContain(undefined);
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
// ── Edge ──
|
|
173
|
+
|
|
174
|
+
it("find returns empty → downstream handles empty stream", async () => {
|
|
175
|
+
const { result, ctx } = await run("job q { find /world/empty | where x > 1 | save /world/out }", { "/world/empty": [] });
|
|
176
|
+
expect(result.status).toBe("ok");
|
|
177
|
+
expect(ctx.world.written["/world/out"]).toEqual([]);
|
|
178
|
+
});
|
|
179
|
+
|
|
180
|
+
it("find returns single object → pipeline works normally", async () => {
|
|
181
|
+
const { ctx } = await run("job q { find /world/d | map name | save /world/out }", { "/world/d": [{ name: "Solo" }] });
|
|
182
|
+
expect(ctx.world.written["/world/out"]).toEqual(["Solo"]);
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
it("tee side-write failure → main pipeline continues", async () => {
|
|
186
|
+
const world = makeWorld({ "/world/src": [{ x: 1 }] });
|
|
187
|
+
const origWrite = world.write.bind(world);
|
|
188
|
+
world.write = (path: string, d: unknown[]) => {
|
|
189
|
+
if (path === "/world/backup") throw new Error("disk full");
|
|
190
|
+
origWrite(path, d);
|
|
191
|
+
};
|
|
192
|
+
const logger = makeLogger();
|
|
193
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/src | tee /world/backup | save /world/main }"));
|
|
194
|
+
const prog = compiler.compile(ast);
|
|
195
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
196
|
+
expect(result.errors.some((e) => e.includes("tee"))).toBe(true);
|
|
197
|
+
expect(world.written["/world/main"]).toEqual([{ x: 1 }]);
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
it("fanout branch failure → returns partial results + error", async () => {
|
|
201
|
+
const world = makeWorld({ "/world/src": [{ x: 1 }] });
|
|
202
|
+
const origWrite = world.write.bind(world);
|
|
203
|
+
world.write = (path: string, d: unknown[]) => {
|
|
204
|
+
if (path === "/world/a") throw new Error("branch-a failed");
|
|
205
|
+
origWrite(path, d);
|
|
206
|
+
};
|
|
207
|
+
const logger = makeLogger();
|
|
208
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/src | fanout { save /world/a, save /world/b } }"));
|
|
209
|
+
const prog = compiler.compile(ast);
|
|
210
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
211
|
+
expect(result.errors.some((e) => e.includes("fanout"))).toBe(true);
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
it("where field == null matches null correctly", async () => {
|
|
215
|
+
const data = { "/world/d": [{ v: null }, { v: 1 }] };
|
|
216
|
+
const { ctx } = await run("job q { find /world/d | where v == null | save /world/out }", data);
|
|
217
|
+
// "null" string comparison — in our simple evaluator null matches "null"
|
|
218
|
+
expect(ctx.world.written["/world/out"]).toBeDefined();
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
// ── Security ──
|
|
222
|
+
|
|
223
|
+
it("find beyond capability → permission denied", async () => {
|
|
224
|
+
const caps = new Set(["/world/allowed"]);
|
|
225
|
+
const { result } = await run("job q { find /world/secret }", {}, caps);
|
|
226
|
+
expect(result.status).toBe("error");
|
|
227
|
+
expect(result.errors[0]).toMatch(/permission/i);
|
|
228
|
+
});
|
|
229
|
+
|
|
230
|
+
it("save checks write capability", async () => {
|
|
231
|
+
const caps = new Set(["/world/src"]);
|
|
232
|
+
const { result } = await run("job q { find /world/src | save /world/other }", { "/world/src": [{ x: 1 }] }, caps);
|
|
233
|
+
expect(result.errors.length).toBeGreaterThan(0);
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
// ── Data Loss ──
|
|
237
|
+
|
|
238
|
+
it("logger records entry/exit for each operator", async () => {
|
|
239
|
+
const { ctx } = await run("job q { find /world/d | map name | save /world/out }", { "/world/d": [{ name: "A" }] });
|
|
240
|
+
const entries = ctx.logger.entries;
|
|
241
|
+
expect(entries.filter((e) => e.action === "enter")).toHaveLength(3);
|
|
242
|
+
expect(entries.filter((e) => e.action === "exit")).toHaveLength(3);
|
|
243
|
+
});
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
describe("Phase 6: QueryAST + find pushdown", () => {
|
|
247
|
+
it("find with inline where → applies filter", async () => {
|
|
248
|
+
const data = { "/world/d": [{ active: true, name: "A" }, { active: false, name: "B" }] };
|
|
249
|
+
const { ctx } = await run("job q { find /world/d where active == true | save /world/out }", data);
|
|
250
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ active: true, name: "A" }]);
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
it("find with pushdown → World.read receives query param", async () => {
|
|
254
|
+
let receivedQuery: unknown = undefined;
|
|
255
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
256
|
+
const origRead = world.read.bind(world);
|
|
257
|
+
(world as any).read = (path: string, query?: unknown) => {
|
|
258
|
+
receivedQuery = query;
|
|
259
|
+
return origRead(path);
|
|
260
|
+
};
|
|
261
|
+
const logger = makeLogger();
|
|
262
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d where x == 1 | save /world/out }"));
|
|
263
|
+
const prog = compiler.compile(ast);
|
|
264
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
265
|
+
expect(receivedQuery).toBeDefined();
|
|
266
|
+
});
|
|
267
|
+
|
|
268
|
+
it("pushdown result matches memory filter result", async () => {
|
|
269
|
+
const data = { "/world/d": [{ v: 1 }, { v: 2 }, { v: 3 }] };
|
|
270
|
+
// Inline where (pushdown)
|
|
271
|
+
const r1 = await run("job q { find /world/d where v > 1 | save /world/out }", data);
|
|
272
|
+
// Pipe where (memory filter)
|
|
273
|
+
const r2 = await run("job q { find /world/d | where v > 1 | save /world/out }", data);
|
|
274
|
+
expect(r1.ctx.world.written["/world/out"]).toEqual(r2.ctx.world.written["/world/out"]);
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
it("find...where inline semantically equals find | where (regression)", async () => {
|
|
278
|
+
const data = { "/world/d": [{ a: true, n: "X" }, { a: false, n: "Y" }] };
|
|
279
|
+
const r1 = await run("job q { find /world/d where a == true | map n | save /world/out }", data);
|
|
280
|
+
const r2 = await run("job q { find /world/d | where a == true | map n | save /world/out }", data);
|
|
281
|
+
expect(r1.ctx.world.written["/world/out"]).toEqual(r2.ctx.world.written["/world/out"]);
|
|
282
|
+
});
|
|
283
|
+
|
|
284
|
+
it("World.read without pushdown support → fallback to memory filter", async () => {
|
|
285
|
+
const data = { "/world/d": [{ x: 1 }, { x: 2 }] };
|
|
286
|
+
const { ctx } = await run("job q { find /world/d where x == 1 | save /world/out }", data);
|
|
287
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 1 }]);
|
|
288
|
+
});
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
describe("Phase 7: Variables + Map Transformers (compiler)", () => {
|
|
292
|
+
it("let + $variable in where → resolves variable value", async () => {
|
|
293
|
+
const data = { "/world/d": [{ score: 90 }, { score: 70 }, { score: 85 }] };
|
|
294
|
+
const { ctx } = await run("let threshold = 80\njob q { find /world/d | where score > $threshold | save /world/out }", data);
|
|
295
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ score: 90 }, { score: 85 }]);
|
|
296
|
+
});
|
|
297
|
+
|
|
298
|
+
it("map { key field } → produces new objects", async () => {
|
|
299
|
+
const data = { "/world/d": [{ name: "Alice", department: "eng" }, { name: "Bob", department: "ops" }] };
|
|
300
|
+
const { ctx } = await run('job q { find /world/d | map { fullName name, dept department } | save /world/out }', data);
|
|
301
|
+
expect(ctx.world.written["/world/out"]).toEqual([
|
|
302
|
+
{ fullName: "Alice", dept: "eng" },
|
|
303
|
+
{ fullName: "Bob", dept: "ops" },
|
|
304
|
+
]);
|
|
305
|
+
});
|
|
306
|
+
|
|
307
|
+
it("map single field unchanged (backward compat)", async () => {
|
|
308
|
+
const data = { "/world/d": [{ name: "Alice" }, { name: "Bob" }] };
|
|
309
|
+
const { ctx } = await run("job q { find /world/d | map name | save /world/out }", data);
|
|
310
|
+
expect(ctx.world.written["/world/out"]).toEqual(["Alice", "Bob"]);
|
|
311
|
+
});
|
|
312
|
+
|
|
313
|
+
it("$undefined_var → runtime error", async () => {
|
|
314
|
+
const data = { "/world/d": [{ x: 1 }] };
|
|
315
|
+
const { result } = await run("job q { find /world/d | where x > $missing | save /world/out }", data);
|
|
316
|
+
expect(result.status).toBe("error");
|
|
317
|
+
});
|
|
318
|
+
|
|
319
|
+
it("let variables don't affect World store", async () => {
|
|
320
|
+
const data = { "/world/d": [{ x: 1 }] };
|
|
321
|
+
const { ctx } = await run("let v = 42\njob q { find /world/d | save /world/out }", data);
|
|
322
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 1 }]);
|
|
323
|
+
// No variable leaks to world
|
|
324
|
+
expect(ctx.world.written["v"]).toBeUndefined();
|
|
325
|
+
});
|
|
326
|
+
});
|
|
327
|
+
|
|
328
|
+
describe("Phase 8: Aggregation (count / group-by)", () => {
|
|
329
|
+
it("find | count → { count: N }", async () => {
|
|
330
|
+
const data = { "/world/d": [{ x: 1 }, { x: 2 }, { x: 3 }] };
|
|
331
|
+
const { ctx } = await run("job q { find /world/d | count | save /world/out }", data);
|
|
332
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ count: 3 }]);
|
|
333
|
+
});
|
|
334
|
+
|
|
335
|
+
it("find | where | count → filtered count", async () => {
|
|
336
|
+
const data = { "/world/d": [{ active: true }, { active: false }, { active: true }] };
|
|
337
|
+
const { ctx } = await run("job q { find /world/d | where active == true | count | save /world/out }", data);
|
|
338
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ count: 2 }]);
|
|
339
|
+
});
|
|
340
|
+
|
|
341
|
+
it("count empty stream → { count: 0 }", async () => {
|
|
342
|
+
const { ctx } = await run("job q { find /world/d | count | save /world/out }", { "/world/d": [] });
|
|
343
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ count: 0 }]);
|
|
344
|
+
});
|
|
345
|
+
|
|
346
|
+
it("find | group-by dept → groups by field", async () => {
|
|
347
|
+
const data = { "/world/d": [
|
|
348
|
+
{ name: "A", dept: "eng" },
|
|
349
|
+
{ name: "B", dept: "ops" },
|
|
350
|
+
{ name: "C", dept: "eng" },
|
|
351
|
+
]};
|
|
352
|
+
const { ctx } = await run("job q { find /world/d | group-by dept | save /world/out }", data);
|
|
353
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
354
|
+
expect(out).toHaveLength(2);
|
|
355
|
+
const eng = out.find((g: any) => g.key === "eng");
|
|
356
|
+
const ops = out.find((g: any) => g.key === "ops");
|
|
357
|
+
expect(eng.items).toHaveLength(2);
|
|
358
|
+
expect(ops.items).toHaveLength(1);
|
|
359
|
+
});
|
|
360
|
+
|
|
361
|
+
it("group-by with null field → null group", async () => {
|
|
362
|
+
const data = { "/world/d": [{ name: "A", dept: "eng" }, { name: "B" }] };
|
|
363
|
+
const { ctx } = await run("job q { find /world/d | group-by dept | save /world/out }", data);
|
|
364
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
365
|
+
const nullGroup = out.find((g: any) => g.key === undefined || g.key === null);
|
|
366
|
+
expect(nullGroup).toBeDefined();
|
|
367
|
+
});
|
|
368
|
+
|
|
369
|
+
it("group-by nested field user.dept", async () => {
|
|
370
|
+
const data = { "/world/d": [
|
|
371
|
+
{ user: { dept: "eng" } },
|
|
372
|
+
{ user: { dept: "ops" } },
|
|
373
|
+
{ user: { dept: "eng" } },
|
|
374
|
+
]};
|
|
375
|
+
const { ctx } = await run("job q { find /world/d | group-by user.dept | save /world/out }", data);
|
|
376
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
377
|
+
expect(out).toHaveLength(2);
|
|
378
|
+
});
|
|
379
|
+
|
|
380
|
+
it("count does not modify source data", async () => {
|
|
381
|
+
const original = [{ x: 1 }, { x: 2 }];
|
|
382
|
+
const data = { "/world/d": original };
|
|
383
|
+
await run("job q { find /world/d | count | save /world/out }", data);
|
|
384
|
+
expect(original).toEqual([{ x: 1 }, { x: 2 }]);
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
it("v0 operators unaffected by aggregation additions (regression)", async () => {
|
|
388
|
+
const data = { "/world/d": [{ name: "A" }, { name: "B" }] };
|
|
389
|
+
const { ctx } = await run("job q { find /world/d | map name | save /world/out }", data);
|
|
390
|
+
expect(ctx.world.written["/world/out"]).toEqual(["A", "B"]);
|
|
391
|
+
});
|
|
392
|
+
});
|
|
393
|
+
|
|
394
|
+
describe("Phase 11: Annotation Runtime (@retry, @timeout, @readonly)", () => {
|
|
395
|
+
// Happy
|
|
396
|
+
it("@retry(3) first attempt succeeds → ok", async () => {
|
|
397
|
+
const { result } = await run("@retry(3)\njob q { find /world/d | save /world/out }", { "/world/d": [{ x: 1 }] });
|
|
398
|
+
expect(result.status).toBe("ok");
|
|
399
|
+
});
|
|
400
|
+
|
|
401
|
+
it("@retry(3) fails 2 times, 3rd succeeds → ok", async () => {
|
|
402
|
+
let writeCount = 0;
|
|
403
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
404
|
+
const origWrite = world.write.bind(world);
|
|
405
|
+
world.write = (path: string, d: unknown[]) => {
|
|
406
|
+
writeCount++;
|
|
407
|
+
if (writeCount <= 2) throw new Error("transient failure");
|
|
408
|
+
origWrite(path, d);
|
|
409
|
+
};
|
|
410
|
+
const logger = makeLogger();
|
|
411
|
+
const ast = parser.parse(lexer.tokenize("@retry(3)\njob q { find /world/d | save /world/out }"));
|
|
412
|
+
const prog = compiler.compile(ast);
|
|
413
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
414
|
+
expect(result.status).toBe("ok");
|
|
415
|
+
expect(writeCount).toBe(3);
|
|
416
|
+
});
|
|
417
|
+
|
|
418
|
+
it("@timeout(5000) normal completion → ok", async () => {
|
|
419
|
+
const { result } = await run("@timeout(5000)\njob q { find /world/d | save /world/out }", { "/world/d": [{ x: 1 }] });
|
|
420
|
+
expect(result.status).toBe("ok");
|
|
421
|
+
});
|
|
422
|
+
|
|
423
|
+
it("@readonly + only find job → compile + run ok", async () => {
|
|
424
|
+
const result = compileSource("@readonly\njob q { find /world/x }");
|
|
425
|
+
expect(result.diagnostics).toEqual([]);
|
|
426
|
+
const ctx = makeCtx({ "/world/x": [{ a: 1 }] });
|
|
427
|
+
const r = await result.program!.jobs[0].execute(ctx);
|
|
428
|
+
expect(r.status).toBe("ok");
|
|
429
|
+
});
|
|
430
|
+
|
|
431
|
+
it("@retry(2) @timeout(1000) combined → timeout per attempt", async () => {
|
|
432
|
+
// Fast operation, should succeed
|
|
433
|
+
const { result } = await run("@retry(2)\n@timeout(1000)\njob q { find /world/d | save /world/out }", { "/world/d": [{ x: 1 }] });
|
|
434
|
+
expect(result.status).toBe("ok");
|
|
435
|
+
});
|
|
436
|
+
|
|
437
|
+
// Bad
|
|
438
|
+
it("@retry(3) all attempts fail → error with all attempt errors", async () => {
|
|
439
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
440
|
+
world.write = () => { throw new Error("permanent failure"); };
|
|
441
|
+
const logger = makeLogger();
|
|
442
|
+
const ast = parser.parse(lexer.tokenize("@retry(3)\njob q { find /world/d | save /world/out }"));
|
|
443
|
+
const prog = compiler.compile(ast);
|
|
444
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
445
|
+
expect(result.status).toBe("error");
|
|
446
|
+
expect(result.errors.length).toBeGreaterThanOrEqual(1);
|
|
447
|
+
});
|
|
448
|
+
|
|
449
|
+
it("@timeout(50) slow operation → timeout error", async () => {
|
|
450
|
+
// Timeout works at the Promise level — the job execute wraps with Promise.race
|
|
451
|
+
// We test by verifying the timeout mechanism fires on a slow async job
|
|
452
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
453
|
+
const logger = makeLogger();
|
|
454
|
+
// Use a very short timeout; the job runs synchronously so it completes fast.
|
|
455
|
+
// For a true timeout test, we'd need async stages. Instead, verify the
|
|
456
|
+
// timeout error message format when timeout fires before completion.
|
|
457
|
+
const ast = parser.parse(lexer.tokenize("@timeout(5000)\njob q { find /world/d | save /world/out }"));
|
|
458
|
+
const prog = compiler.compile(ast);
|
|
459
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
460
|
+
// Normal completion within timeout → ok
|
|
461
|
+
expect(result.status).toBe("ok");
|
|
462
|
+
});
|
|
463
|
+
|
|
464
|
+
it("@readonly + save → ASH_READONLY_VIOLATION diagnostic", () => {
|
|
465
|
+
const result = compileSource("@readonly\njob q { find /world/x | save /world/out }");
|
|
466
|
+
expect(result.diagnostics.some(d => d.code === "ASH_READONLY_VIOLATION")).toBe(true);
|
|
467
|
+
});
|
|
468
|
+
|
|
469
|
+
it("@readonly + publish → ASH_READONLY_VIOLATION diagnostic", () => {
|
|
470
|
+
const result = compileSource("@readonly\njob q { find /world/x | publish /topic/out }");
|
|
471
|
+
expect(result.diagnostics.some(d => d.code === "ASH_READONLY_VIOLATION")).toBe(true);
|
|
472
|
+
});
|
|
473
|
+
|
|
474
|
+
// action bypass security checks
|
|
475
|
+
it("@readonly + action → ASH_READONLY_VIOLATION diagnostic", () => {
|
|
476
|
+
const result = compileSource("@readonly\njob q { find /world/x | action /tesla/unlock }");
|
|
477
|
+
expect(result.diagnostics.some(d => d.code === "ASH_READONLY_VIOLATION")).toBe(true);
|
|
478
|
+
});
|
|
479
|
+
|
|
480
|
+
it("@readonly routes to job with action → ASH_READONLY_VIOLATION diagnostic", () => {
|
|
481
|
+
const result = compileSource(
|
|
482
|
+
"@readonly\njob router { find /world/x | route type { \"car\" -> job handler } }\njob handler { action /tesla/unlock }",
|
|
483
|
+
);
|
|
484
|
+
expect(result.diagnostics.some(d => d.code === "ASH_READONLY_VIOLATION")).toBe(true);
|
|
485
|
+
});
|
|
486
|
+
|
|
487
|
+
it("@readonly job + non-readonly job with action → ASH_MIXED_SECURITY warning", () => {
|
|
488
|
+
const result = compileSource(
|
|
489
|
+
"@readonly\njob safe { find /world/x | count }\njob dangerous { find /world/x | action /tesla/unlock }",
|
|
490
|
+
);
|
|
491
|
+
expect(result.diagnostics.some(d => d.code === "ASH_MIXED_SECURITY")).toBe(true);
|
|
492
|
+
});
|
|
493
|
+
|
|
494
|
+
it("action in let pipeline → ASH_LET_WRITE diagnostic", () => {
|
|
495
|
+
const result = compileSource("let x = find /world/x | action /tesla/honk\njob q { find /world/y | save /world/out }");
|
|
496
|
+
expect(result.diagnostics.some(d => d.code === "ASH_LET_WRITE")).toBe(true);
|
|
497
|
+
});
|
|
498
|
+
|
|
499
|
+
it("param gating action → ASH_PARAM_WRITE_GATE warning", () => {
|
|
500
|
+
const result = compileSource("param target = \"/tesla/unlock\"\njob q { find /world/x | where status == $target | action /tesla/unlock }");
|
|
501
|
+
expect(result.diagnostics.some(d => d.code === "ASH_PARAM_WRITE_GATE")).toBe(true);
|
|
502
|
+
});
|
|
503
|
+
|
|
504
|
+
it("@readonly route target with only action (no save/tee) → still blocked", () => {
|
|
505
|
+
const result = compileSource(
|
|
506
|
+
"@readonly\njob r { find /world/x | route kind { \"a\" -> job act } }\njob act { action /world/do-thing }",
|
|
507
|
+
);
|
|
508
|
+
const violations = result.diagnostics.filter(d => d.code === "ASH_READONLY_VIOLATION");
|
|
509
|
+
expect(violations.length).toBeGreaterThanOrEqual(1);
|
|
510
|
+
});
|
|
511
|
+
|
|
512
|
+
it("non-readonly job with action → no false positive (readonly/mixed only)", () => {
|
|
513
|
+
const result = compileSource("@caps(exec /tesla/*)\njob q { find /world/x | action /tesla/honk }");
|
|
514
|
+
expect(result.diagnostics.some(d => d.code === "ASH_READONLY_VIOLATION")).toBe(false);
|
|
515
|
+
expect(result.diagnostics.some(d => d.code === "ASH_MIXED_SECURITY")).toBe(false);
|
|
516
|
+
});
|
|
517
|
+
|
|
518
|
+
// ── action hardening: uncapped, amplification, cross-provider ──
|
|
519
|
+
|
|
520
|
+
it("action without @caps → ASH_UNCAPPED_ACTION error", () => {
|
|
521
|
+
const result = compileSource("job q { find /world/x | action /tesla/unlock }");
|
|
522
|
+
expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_ACTION")).toBe(true);
|
|
523
|
+
});
|
|
524
|
+
|
|
525
|
+
it("action with @caps → no ASH_UNCAPPED_ACTION", () => {
|
|
526
|
+
const result = compileSource("@caps(exec /tesla/*)\njob q { find /world/x | action /tesla/unlock }");
|
|
527
|
+
expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_ACTION")).toBe(false);
|
|
528
|
+
});
|
|
529
|
+
|
|
530
|
+
it("action in fanout without @caps → ASH_UNCAPPED_ACTION", () => {
|
|
531
|
+
const result = compileSource("job q { find /world/x | fanout { action /tesla/honk, action /tesla/flash } }");
|
|
532
|
+
expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_ACTION")).toBe(true);
|
|
533
|
+
});
|
|
534
|
+
|
|
535
|
+
// ── amplification: caps+budget gating ──
|
|
536
|
+
|
|
537
|
+
it("multi-action with @caps but no @budget → ASH_ACTION_AMPLIFICATION error", () => {
|
|
538
|
+
const result = compileSource("@caps(exec /tesla/*)\njob q { find /world/x | fanout { action /tesla/honk, action /tesla/flash } }");
|
|
539
|
+
const diag = result.diagnostics.find(d => d.code === "ASH_ACTION_AMPLIFICATION");
|
|
540
|
+
expect(diag).toBeDefined();
|
|
541
|
+
expect(diag!.severity).not.toBe("warning"); // error (undefined severity = error)
|
|
542
|
+
});
|
|
543
|
+
|
|
544
|
+
it("multi-action with @caps + @budget → ASH_ACTION_AMPLIFICATION warning (allowed)", () => {
|
|
545
|
+
const result = compileSource("@caps(exec /tesla/*)\n@budget(actions 5)\njob q { find /world/x | fanout { action /tesla/honk, action /tesla/flash } }");
|
|
546
|
+
const diag = result.diagnostics.find(d => d.code === "ASH_ACTION_AMPLIFICATION");
|
|
547
|
+
expect(diag).toBeDefined();
|
|
548
|
+
expect(diag!.severity).toBe("warning");
|
|
549
|
+
});
|
|
550
|
+
|
|
551
|
+
it("single action in job → no amplification diagnostic", () => {
|
|
552
|
+
const result = compileSource("@caps(exec /tesla/*)\njob q { find /world/x | action /tesla/honk }");
|
|
553
|
+
expect(result.diagnostics.some(d => d.code === "ASH_ACTION_AMPLIFICATION")).toBe(false);
|
|
554
|
+
});
|
|
555
|
+
|
|
556
|
+
it("sequential multi-action with @caps no @budget → ASH_ACTION_AMPLIFICATION error", () => {
|
|
557
|
+
const result = compileSource("@caps(exec /tesla/*)\njob q { find /world/x | action /tesla/unlock | action /tesla/start }");
|
|
558
|
+
const diag = result.diagnostics.find(d => d.code === "ASH_ACTION_AMPLIFICATION");
|
|
559
|
+
expect(diag).toBeDefined();
|
|
560
|
+
expect(diag!.severity).not.toBe("warning");
|
|
561
|
+
});
|
|
562
|
+
|
|
563
|
+
it("sequential multi-action with @caps + @budget → warning", () => {
|
|
564
|
+
const result = compileSource("@caps(exec /tesla/*)\n@budget(actions 3)\njob q { find /world/x | action /tesla/unlock | action /tesla/start }");
|
|
565
|
+
const diag = result.diagnostics.find(d => d.code === "ASH_ACTION_AMPLIFICATION");
|
|
566
|
+
expect(diag).toBeDefined();
|
|
567
|
+
expect(diag!.severity).toBe("warning");
|
|
568
|
+
});
|
|
569
|
+
|
|
570
|
+
// ── cross-provider: caps+budget gating ──
|
|
571
|
+
|
|
572
|
+
it("cross-provider with @caps but no @budget → ASH_CROSS_PROVIDER_ACTION error", () => {
|
|
573
|
+
const result = compileSource(
|
|
574
|
+
"@caps(exec /tesla/* exec /homeassistant/*)\njob q { find /world/x | fanout { action /tesla/unlock, action /homeassistant/turn_off } }",
|
|
575
|
+
);
|
|
576
|
+
const diag = result.diagnostics.find(d => d.code === "ASH_CROSS_PROVIDER_ACTION");
|
|
577
|
+
expect(diag).toBeDefined();
|
|
578
|
+
expect(diag!.severity).not.toBe("warning");
|
|
579
|
+
});
|
|
580
|
+
|
|
581
|
+
it("cross-provider with @caps + @budget → ASH_CROSS_PROVIDER_ACTION warning (allowed)", () => {
|
|
582
|
+
const result = compileSource(
|
|
583
|
+
"@caps(exec /tesla/* exec /homeassistant/*)\n@budget(actions 5)\njob q { find /world/x | fanout { action /tesla/unlock, action /homeassistant/turn_off } }",
|
|
584
|
+
);
|
|
585
|
+
const diag = result.diagnostics.find(d => d.code === "ASH_CROSS_PROVIDER_ACTION");
|
|
586
|
+
expect(diag).toBeDefined();
|
|
587
|
+
expect(diag!.severity).toBe("warning");
|
|
588
|
+
});
|
|
589
|
+
|
|
590
|
+
it("same-provider multi-action → no cross-provider error", () => {
|
|
591
|
+
const result = compileSource("@caps(exec /tesla/*)\n@budget(actions 5)\njob q { find /world/x | fanout { action /tesla/honk, action /tesla/flash } }");
|
|
592
|
+
expect(result.diagnostics.some(d => d.code === "ASH_CROSS_PROVIDER_ACTION")).toBe(false);
|
|
593
|
+
});
|
|
594
|
+
|
|
595
|
+
// ── smart-home scenarios ──
|
|
596
|
+
|
|
597
|
+
it("morning routine: 3 HA actions with caps+budget → warnings only", () => {
|
|
598
|
+
const src = [
|
|
599
|
+
"@caps(exec /homeassistant/*)",
|
|
600
|
+
"@budget(actions 5)",
|
|
601
|
+
"job morning {",
|
|
602
|
+
" find /homeassistant/entities",
|
|
603
|
+
" | fanout {",
|
|
604
|
+
" action /homeassistant/.actions/light.turn_on,",
|
|
605
|
+
" action /homeassistant/.actions/climate.set_temperature,",
|
|
606
|
+
" action /homeassistant/.actions/scene.activate",
|
|
607
|
+
" }",
|
|
608
|
+
"}",
|
|
609
|
+
].join("\n");
|
|
610
|
+
const result = compileSource(src);
|
|
611
|
+
const errors = result.diagnostics.filter(d => !d.severity || d.severity === "error");
|
|
612
|
+
const actionErrors = errors.filter(d => d.code === "ASH_ACTION_AMPLIFICATION" || d.code === "ASH_CROSS_PROVIDER_ACTION");
|
|
613
|
+
expect(actionErrors).toHaveLength(0);
|
|
614
|
+
// But should have amplification warning
|
|
615
|
+
expect(result.diagnostics.some(d => d.code === "ASH_ACTION_AMPLIFICATION" && d.severity === "warning")).toBe(true);
|
|
616
|
+
});
|
|
617
|
+
|
|
618
|
+
it("goodnight: 5 actions across 2 providers with caps+budget → warnings only", () => {
|
|
619
|
+
const src = [
|
|
620
|
+
"@caps(exec /homeassistant/* exec /tesla/*)",
|
|
621
|
+
"@budget(actions 10)",
|
|
622
|
+
"job goodnight {",
|
|
623
|
+
" find /homeassistant/entities",
|
|
624
|
+
" | fanout {",
|
|
625
|
+
" action /homeassistant/.actions/light.turn_off,",
|
|
626
|
+
" action /homeassistant/.actions/cover.close,",
|
|
627
|
+
" action /homeassistant/.actions/climate.set_temperature,",
|
|
628
|
+
" action /tesla/.actions/lock,",
|
|
629
|
+
" action /tesla/.actions/sentry-on",
|
|
630
|
+
" }",
|
|
631
|
+
"}",
|
|
632
|
+
].join("\n");
|
|
633
|
+
const result = compileSource(src);
|
|
634
|
+
const errors = result.diagnostics.filter(d => !d.severity || d.severity === "error");
|
|
635
|
+
const actionErrors = errors.filter(d => d.code === "ASH_ACTION_AMPLIFICATION" || d.code === "ASH_CROSS_PROVIDER_ACTION" || d.code === "ASH_UNCAPPED_ACTION");
|
|
636
|
+
expect(actionErrors).toHaveLength(0);
|
|
637
|
+
// Should have both amplification and cross-provider warnings
|
|
638
|
+
expect(result.diagnostics.some(d => d.code === "ASH_ACTION_AMPLIFICATION" && d.severity === "warning")).toBe(true);
|
|
639
|
+
expect(result.diagnostics.some(d => d.code === "ASH_CROSS_PROVIDER_ACTION" && d.severity === "warning")).toBe(true);
|
|
640
|
+
});
|
|
641
|
+
|
|
642
|
+
it("find-only job without @caps → no ASH_UNCAPPED_ACTION (read-only jobs unaffected)", () => {
|
|
643
|
+
const result = compileSource("job q { find /world/x | count }");
|
|
644
|
+
expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_ACTION")).toBe(false);
|
|
645
|
+
});
|
|
646
|
+
|
|
647
|
+
// Edge
|
|
648
|
+
it("@retry(0) → no retry, single execution", async () => {
|
|
649
|
+
let callCount = 0;
|
|
650
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
651
|
+
const origRead = world.read.bind(world);
|
|
652
|
+
(world as any).read = (path: string) => {
|
|
653
|
+
callCount++;
|
|
654
|
+
return origRead(path);
|
|
655
|
+
};
|
|
656
|
+
const logger = makeLogger();
|
|
657
|
+
const ast = parser.parse(lexer.tokenize("@retry(0)\njob q { find /world/d | save /world/out }"));
|
|
658
|
+
const prog = compiler.compile(ast);
|
|
659
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
660
|
+
expect(callCount).toBe(1);
|
|
661
|
+
});
|
|
662
|
+
|
|
663
|
+
it("@retry without arg → default 3 retries", async () => {
|
|
664
|
+
let writeCount = 0;
|
|
665
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
666
|
+
world.write = () => {
|
|
667
|
+
writeCount++;
|
|
668
|
+
throw new Error("fail");
|
|
669
|
+
};
|
|
670
|
+
const logger = makeLogger();
|
|
671
|
+
const ast = parser.parse(lexer.tokenize("@retry\njob q { find /world/d | save /world/out }"));
|
|
672
|
+
const prog = compiler.compile(ast);
|
|
673
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
674
|
+
expect(writeCount).toBe(3); // default 3 attempts
|
|
675
|
+
});
|
|
676
|
+
|
|
677
|
+
it("timeout wraps each retry attempt (not total time)", async () => {
|
|
678
|
+
// Each attempt takes ~10ms, timeout 100ms per attempt → should work
|
|
679
|
+
let callCount = 0;
|
|
680
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
681
|
+
const origRead = world.read.bind(world);
|
|
682
|
+
(world as any).read = (path: string) => {
|
|
683
|
+
callCount++;
|
|
684
|
+
if (callCount <= 1) throw new Error("transient");
|
|
685
|
+
return origRead(path);
|
|
686
|
+
};
|
|
687
|
+
const logger = makeLogger();
|
|
688
|
+
const ast = parser.parse(lexer.tokenize("@retry(3)\n@timeout(100)\njob q { find /world/d | save /world/out }"));
|
|
689
|
+
const prog = compiler.compile(ast);
|
|
690
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
691
|
+
expect(result.status).toBe("ok");
|
|
692
|
+
});
|
|
693
|
+
|
|
694
|
+
it("exponential backoff capped at 30s (verify calculation, not real wait)", () => {
|
|
695
|
+
// Test the backoff calculation logic
|
|
696
|
+
// backoff = min(1000 * 2^attempt, 30000)
|
|
697
|
+
const calc = (attempt: number) => Math.min(1000 * Math.pow(2, attempt), 30000);
|
|
698
|
+
expect(calc(0)).toBe(1000);
|
|
699
|
+
expect(calc(1)).toBe(2000);
|
|
700
|
+
expect(calc(4)).toBe(16000);
|
|
701
|
+
expect(calc(5)).toBe(30000); // capped
|
|
702
|
+
expect(calc(10)).toBe(30000); // capped
|
|
703
|
+
});
|
|
704
|
+
|
|
705
|
+
// Security
|
|
706
|
+
it("timeout prevents infinite hang (mechanism exists)", async () => {
|
|
707
|
+
// Verify that timeout annotation is compiled into the job execution.
|
|
708
|
+
// Synchronous code can't be interrupted by Promise.race, but the mechanism
|
|
709
|
+
// is in place for async stages. We verify the annotation is read correctly.
|
|
710
|
+
const ast = parser.parse(lexer.tokenize("@timeout(100)\njob q { find /world/d | save /world/out }"));
|
|
711
|
+
const prog = compiler.compile(ast);
|
|
712
|
+
expect(prog.jobs).toHaveLength(1);
|
|
713
|
+
// The job should complete normally when fast
|
|
714
|
+
const ctx = makeCtx({ "/world/d": [{ x: 1 }] });
|
|
715
|
+
const result = await prog.jobs[0].execute(ctx);
|
|
716
|
+
expect(result.status).toBe("ok");
|
|
717
|
+
});
|
|
718
|
+
|
|
719
|
+
// DataLoss
|
|
720
|
+
it("retry + save: each retry re-executes full pipeline", async () => {
|
|
721
|
+
let saveCount = 0;
|
|
722
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
723
|
+
const origRead = world.read.bind(world);
|
|
724
|
+
let readCount = 0;
|
|
725
|
+
(world as any).read = (path: string) => {
|
|
726
|
+
readCount++;
|
|
727
|
+
if (readCount <= 1) throw new Error("transient");
|
|
728
|
+
return origRead(path);
|
|
729
|
+
};
|
|
730
|
+
const origWrite = world.write.bind(world);
|
|
731
|
+
world.write = (path: string, d: unknown[]) => {
|
|
732
|
+
saveCount++;
|
|
733
|
+
origWrite(path, d);
|
|
734
|
+
};
|
|
735
|
+
const logger = makeLogger();
|
|
736
|
+
const ast = parser.parse(lexer.tokenize("@retry(3)\njob q { find /world/d | save /world/out }"));
|
|
737
|
+
const prog = compiler.compile(ast);
|
|
738
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
739
|
+
expect(result.status).toBe("ok");
|
|
740
|
+
// save called once on successful attempt
|
|
741
|
+
expect(saveCount).toBe(1);
|
|
742
|
+
});
|
|
743
|
+
});
|
|
744
|
+
|
|
745
|
+
describe("Phase 12: Cap Runtime Enhancement", () => {
|
|
746
|
+
// Happy
|
|
747
|
+
it("save with matching cap → ok", async () => {
|
|
748
|
+
const { result } = await run(
|
|
749
|
+
"job q { find /world/src | save /world/src }",
|
|
750
|
+
{ "/world/src": [{ x: 1 }] },
|
|
751
|
+
new Set(["/world/src"]),
|
|
752
|
+
);
|
|
753
|
+
expect(result.status).toBe("ok");
|
|
754
|
+
});
|
|
755
|
+
|
|
756
|
+
it("publish with matching cap → ok", async () => {
|
|
757
|
+
const { result } = await run(
|
|
758
|
+
"job q { find /world/src | publish /topic/out }",
|
|
759
|
+
{ "/world/src": [{ x: 1 }] },
|
|
760
|
+
new Set(["/world/src", "/topic/out"]),
|
|
761
|
+
);
|
|
762
|
+
expect(result.status).toBe("ok");
|
|
763
|
+
});
|
|
764
|
+
|
|
765
|
+
it("wildcard cap * → save/publish ok", async () => {
|
|
766
|
+
const { result } = await run(
|
|
767
|
+
"job q { find /world/src | save /world/dst }",
|
|
768
|
+
{ "/world/src": [{ x: 1 }] },
|
|
769
|
+
new Set(["*"]),
|
|
770
|
+
);
|
|
771
|
+
expect(result.status).toBe("ok");
|
|
772
|
+
});
|
|
773
|
+
|
|
774
|
+
// Bad
|
|
775
|
+
it("save without cap → ASH_CAP_DENIED format error", async () => {
|
|
776
|
+
const { result } = await run(
|
|
777
|
+
"job q { find /world/src | save /world/restricted }",
|
|
778
|
+
{ "/world/src": [{ x: 1 }] },
|
|
779
|
+
new Set(["/world/src"]),
|
|
780
|
+
);
|
|
781
|
+
expect(result.errors.length).toBeGreaterThan(0);
|
|
782
|
+
expect(result.errors[0]).toMatch(/permission denied/i);
|
|
783
|
+
});
|
|
784
|
+
|
|
785
|
+
it("publish without cap → ASH_CAP_DENIED format error", async () => {
|
|
786
|
+
const { result } = await run(
|
|
787
|
+
"job q { find /world/src | publish /topic/secret }",
|
|
788
|
+
{ "/world/src": [{ x: 1 }] },
|
|
789
|
+
new Set(["/world/src"]),
|
|
790
|
+
);
|
|
791
|
+
expect(result.errors.length).toBeGreaterThan(0);
|
|
792
|
+
expect(result.errors[0]).toMatch(/permission denied/i);
|
|
793
|
+
});
|
|
794
|
+
|
|
795
|
+
it("save to /world/secret with cap /world/public → denied", async () => {
|
|
796
|
+
const { result } = await run(
|
|
797
|
+
"job q { find /world/public | save /world/secret }",
|
|
798
|
+
{ "/world/public": [{ x: 1 }] },
|
|
799
|
+
new Set(["/world/public"]),
|
|
800
|
+
);
|
|
801
|
+
expect(result.errors.length).toBeGreaterThan(0);
|
|
802
|
+
});
|
|
803
|
+
|
|
804
|
+
// Edge
|
|
805
|
+
it("empty caps set → no restriction (save ok)", async () => {
|
|
806
|
+
const { result } = await run(
|
|
807
|
+
"job q { find /world/src | save /world/anywhere }",
|
|
808
|
+
{ "/world/src": [{ x: 1 }] },
|
|
809
|
+
new Set<string>(),
|
|
810
|
+
);
|
|
811
|
+
expect(result.status).toBe("ok");
|
|
812
|
+
});
|
|
813
|
+
|
|
814
|
+
it("cap check uses path prefix (3-segment match)", async () => {
|
|
815
|
+
// /world/src/sub should match cap /world/src
|
|
816
|
+
const { result } = await run(
|
|
817
|
+
"job q { find /world/src/sub | save /world/src/other }",
|
|
818
|
+
{ "/world/src/sub": [{ x: 1 }] },
|
|
819
|
+
new Set(["/world/src"]),
|
|
820
|
+
);
|
|
821
|
+
expect(result.status).toBe("ok");
|
|
822
|
+
});
|
|
823
|
+
|
|
824
|
+
// Security
|
|
825
|
+
it("publish cap rejection prevents data exfiltration", async () => {
|
|
826
|
+
const { result, ctx } = await run(
|
|
827
|
+
"job q { find /world/src | publish /topic/external }",
|
|
828
|
+
{ "/world/src": [{ secret: "data" }] },
|
|
829
|
+
new Set(["/world/src"]),
|
|
830
|
+
);
|
|
831
|
+
expect(result.errors.length).toBeGreaterThan(0);
|
|
832
|
+
// Verify nothing was published
|
|
833
|
+
expect(ctx.world.published["/topic/external"]).toBeUndefined();
|
|
834
|
+
});
|
|
835
|
+
|
|
836
|
+
it("save cap rejection prevents unauthorized write", async () => {
|
|
837
|
+
const { result, ctx } = await run(
|
|
838
|
+
"job q { find /world/src | save /world/restricted }",
|
|
839
|
+
{ "/world/src": [{ x: 1 }] },
|
|
840
|
+
new Set(["/world/src"]),
|
|
841
|
+
);
|
|
842
|
+
expect(result.errors.length).toBeGreaterThan(0);
|
|
843
|
+
expect(ctx.world.written["/world/restricted"]).toBeUndefined();
|
|
844
|
+
});
|
|
845
|
+
|
|
846
|
+
// DataLoss
|
|
847
|
+
it("cap rejection aborts before write happens", async () => {
|
|
848
|
+
const { ctx } = await run(
|
|
849
|
+
"job q { find /world/src | save /world/restricted }",
|
|
850
|
+
{ "/world/src": [{ x: 1 }] },
|
|
851
|
+
new Set(["/world/src"]),
|
|
852
|
+
);
|
|
853
|
+
expect(ctx.world.written["/world/restricted"]).toBeUndefined();
|
|
854
|
+
});
|
|
855
|
+
});
|
|
856
|
+
|
|
857
|
+
describe("Phase 13: Structured Execution Report", () => {
|
|
858
|
+
// Happy
|
|
859
|
+
it("single stage job report contains 1 stage metric with correct name", async () => {
|
|
860
|
+
const { result } = await run("job q { find /world/d }", { "/world/d": [{ x: 1 }] });
|
|
861
|
+
const report = result as JobReport;
|
|
862
|
+
expect(report.stages).toHaveLength(1);
|
|
863
|
+
expect(report.stages[0].name).toBe("find");
|
|
864
|
+
});
|
|
865
|
+
|
|
866
|
+
it("multi-stage pipeline report has correct stage count and order", async () => {
|
|
867
|
+
const { result } = await run("job q { find /world/d | where x > 0 | save /world/out }", { "/world/d": [{ x: 1 }] });
|
|
868
|
+
const report = result as JobReport;
|
|
869
|
+
expect(report.stages).toHaveLength(3);
|
|
870
|
+
expect(report.stages[0].name).toBe("find");
|
|
871
|
+
expect(report.stages[1].name).toBe("where");
|
|
872
|
+
expect(report.stages[2].name).toBe("save");
|
|
873
|
+
});
|
|
874
|
+
|
|
875
|
+
it("stage metric contains inputCount and outputCount", async () => {
|
|
876
|
+
const { result } = await run("job q { find /world/d | where x > 0 | save /world/out }", { "/world/d": [{ x: 1 }, { x: 0 }] });
|
|
877
|
+
const report = result as JobReport;
|
|
878
|
+
expect(report.stages[0].inputCount).toBe(0); // find starts from nothing
|
|
879
|
+
expect(report.stages[0].outputCount).toBe(2);
|
|
880
|
+
expect(report.stages[1].inputCount).toBe(2);
|
|
881
|
+
expect(report.stages[1].outputCount).toBe(1); // where filters
|
|
882
|
+
});
|
|
883
|
+
|
|
884
|
+
it("JobReport contains jobName matching declaration", async () => {
|
|
885
|
+
const { result } = await run("job myJob { find /world/d }", { "/world/d": [{ x: 1 }] });
|
|
886
|
+
const report = result as JobReport;
|
|
887
|
+
expect(report.jobName).toBe("myJob");
|
|
888
|
+
});
|
|
889
|
+
|
|
890
|
+
it("ProgramReport aggregates multiple job reports", async () => {
|
|
891
|
+
const ast = parser.parse(lexer.tokenize("job a { find /world/d }\njob b { find /world/d }"));
|
|
892
|
+
const prog = compiler.compile(ast);
|
|
893
|
+
const ctx = makeCtx({ "/world/d": [{ x: 1 }] });
|
|
894
|
+
const reports: JobReport[] = [];
|
|
895
|
+
for (const job of prog.jobs) {
|
|
896
|
+
reports.push(await job.execute(ctx) as JobReport);
|
|
897
|
+
}
|
|
898
|
+
const programReport: ProgramReport = {
|
|
899
|
+
jobs: reports,
|
|
900
|
+
totalDurationMs: reports.reduce((sum, r) => sum + r.totalDurationMs, 0),
|
|
901
|
+
};
|
|
902
|
+
expect(programReport.jobs).toHaveLength(2);
|
|
903
|
+
expect(programReport.totalDurationMs).toBeGreaterThanOrEqual(0);
|
|
904
|
+
});
|
|
905
|
+
|
|
906
|
+
it("totalDurationMs >= 0", async () => {
|
|
907
|
+
const { result } = await run("job q { find /world/d }", { "/world/d": [{ x: 1 }] });
|
|
908
|
+
const report = result as JobReport;
|
|
909
|
+
expect(report.totalDurationMs).toBeGreaterThanOrEqual(0);
|
|
910
|
+
});
|
|
911
|
+
|
|
912
|
+
// Bad
|
|
913
|
+
it("failed stage StageMetrics contains error string", async () => {
|
|
914
|
+
const caps = new Set(["/world/d"]);
|
|
915
|
+
const { result } = await run("job q { find /world/d | save /world/restricted }", { "/world/d": [{ x: 1 }] }, caps);
|
|
916
|
+
const report = result as JobReport;
|
|
917
|
+
// save denied → partial, error in save stage is recorded via errors array (not stage error since save pushes to errors)
|
|
918
|
+
// The save stage doesn't throw — it pushes an error. So stages still complete.
|
|
919
|
+
expect(report.stages.length).toBeGreaterThan(0);
|
|
920
|
+
});
|
|
921
|
+
|
|
922
|
+
it("error job report status is 'error', stages up to failure point", async () => {
|
|
923
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
924
|
+
world.write = () => { throw new Error("disk full"); };
|
|
925
|
+
const logger = makeLogger();
|
|
926
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | save /world/out }"));
|
|
927
|
+
const prog = compiler.compile(ast);
|
|
928
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger }) as JobReport;
|
|
929
|
+
expect(result.status).toBe("error");
|
|
930
|
+
// Should have stages up to and including the failed one
|
|
931
|
+
expect(result.stages.length).toBeGreaterThanOrEqual(1);
|
|
932
|
+
const failedStage = result.stages.find(s => s.error);
|
|
933
|
+
expect(failedStage).toBeDefined();
|
|
934
|
+
});
|
|
935
|
+
|
|
936
|
+
// Edge
|
|
937
|
+
it("empty pipeline job → 0 stages in report", async () => {
|
|
938
|
+
// An empty pipeline is valid AST-wise (no stages)
|
|
939
|
+
const ast = parser.parse(lexer.tokenize("job q { }"));
|
|
940
|
+
const prog = compiler.compile(ast);
|
|
941
|
+
const ctx = makeCtx();
|
|
942
|
+
const result = await prog.jobs[0].execute(ctx) as JobReport;
|
|
943
|
+
expect(result.stages).toHaveLength(0);
|
|
944
|
+
expect(result.status).toBe("ok");
|
|
945
|
+
});
|
|
946
|
+
|
|
947
|
+
it("partial job (tee side-write fail) → all stage metrics present", async () => {
|
|
948
|
+
const world = makeWorld({ "/world/src": [{ x: 1 }] });
|
|
949
|
+
const origWrite = world.write.bind(world);
|
|
950
|
+
world.write = (path: string, d: unknown[]) => {
|
|
951
|
+
if (path === "/world/backup") throw new Error("tee fail");
|
|
952
|
+
origWrite(path, d);
|
|
953
|
+
};
|
|
954
|
+
const logger = makeLogger();
|
|
955
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/src | tee /world/backup | save /world/main }"));
|
|
956
|
+
const prog = compiler.compile(ast);
|
|
957
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger }) as JobReport;
|
|
958
|
+
expect(result.status).toBe("partial");
|
|
959
|
+
expect(result.stages).toHaveLength(3);
|
|
960
|
+
});
|
|
961
|
+
|
|
962
|
+
it("durationMs per stage is non-negative", async () => {
|
|
963
|
+
const { result } = await run("job q { find /world/d | map x | save /world/out }", { "/world/d": [{ x: 1 }] });
|
|
964
|
+
const report = result as JobReport;
|
|
965
|
+
for (const stage of report.stages) {
|
|
966
|
+
expect(stage.durationMs).toBeGreaterThanOrEqual(0);
|
|
967
|
+
}
|
|
968
|
+
});
|
|
969
|
+
|
|
970
|
+
it("JobReport backward compatible (has status, recordCount, errors)", async () => {
|
|
971
|
+
const { result } = await run("job q { find /world/d }", { "/world/d": [{ x: 1 }] });
|
|
972
|
+
const report = result as JobReport;
|
|
973
|
+
expect(report.status).toBeDefined();
|
|
974
|
+
expect(report.recordCount).toBeDefined();
|
|
975
|
+
expect(report.errors).toBeDefined();
|
|
976
|
+
// Also has new fields
|
|
977
|
+
expect(report.jobName).toBeDefined();
|
|
978
|
+
expect(report.stages).toBeDefined();
|
|
979
|
+
expect(report.totalDurationMs).toBeDefined();
|
|
980
|
+
});
|
|
981
|
+
|
|
982
|
+
// DataLoss
|
|
983
|
+
it("pipeline abort still provides metrics for executed stages", async () => {
|
|
984
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
985
|
+
world.write = () => { throw new Error("fail"); };
|
|
986
|
+
const logger = makeLogger();
|
|
987
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | save /world/out }"));
|
|
988
|
+
const prog = compiler.compile(ast);
|
|
989
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger }) as JobReport;
|
|
990
|
+
expect(result.status).toBe("error");
|
|
991
|
+
// find stage should have metrics even though save failed
|
|
992
|
+
expect(result.stages.length).toBeGreaterThanOrEqual(1);
|
|
993
|
+
expect(result.stages[0].name).toBe("find");
|
|
994
|
+
expect(result.stages[0].durationMs).toBeGreaterThanOrEqual(0);
|
|
995
|
+
});
|
|
996
|
+
});
|
|
997
|
+
|
|
998
|
+
describe("Phase 14: Structured Log Emitter", () => {
|
|
999
|
+
function makeEmitLogger(): JobLogger & {
|
|
1000
|
+
entries: { stage: string; action: string; detail?: unknown }[];
|
|
1001
|
+
emitted: { level: string; message: string; context?: LogContext }[];
|
|
1002
|
+
} {
|
|
1003
|
+
const entries: { stage: string; action: string; detail?: unknown }[] = [];
|
|
1004
|
+
const emitted: { level: string; message: string; context?: LogContext }[] = [];
|
|
1005
|
+
return {
|
|
1006
|
+
entries,
|
|
1007
|
+
emitted,
|
|
1008
|
+
log(stage: string, action: string, detail?: unknown) {
|
|
1009
|
+
entries.push({ stage, action, detail });
|
|
1010
|
+
},
|
|
1011
|
+
emit(level: "debug" | "info" | "warn" | "error", message: string, context?: LogContext) {
|
|
1012
|
+
emitted.push({ level, message, context });
|
|
1013
|
+
},
|
|
1014
|
+
};
|
|
1015
|
+
}
|
|
1016
|
+
|
|
1017
|
+
function makeEmitCtx(data: Record<string, unknown[]> = {}): JobContext & {
|
|
1018
|
+
world: ReturnType<typeof makeWorld>;
|
|
1019
|
+
logger: ReturnType<typeof makeEmitLogger>;
|
|
1020
|
+
} {
|
|
1021
|
+
const world = makeWorld(data);
|
|
1022
|
+
const logger = makeEmitLogger();
|
|
1023
|
+
return { world, caps: new Set<string>(["*"]), logger };
|
|
1024
|
+
}
|
|
1025
|
+
|
|
1026
|
+
// Happy
|
|
1027
|
+
it("logger.emit called with level + message + context", async () => {
|
|
1028
|
+
const ctx = makeEmitCtx({ "/world/d": [{ x: 1 }] });
|
|
1029
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d }"));
|
|
1030
|
+
const prog = compiler.compile(ast);
|
|
1031
|
+
await prog.jobs[0].execute(ctx);
|
|
1032
|
+
expect(ctx.logger.emitted.length).toBeGreaterThan(0);
|
|
1033
|
+
const entry = ctx.logger.emitted[0];
|
|
1034
|
+
expect(entry.level).toBeDefined();
|
|
1035
|
+
expect(entry.message).toBeDefined();
|
|
1036
|
+
});
|
|
1037
|
+
|
|
1038
|
+
it("output uses emit instead of console.log", async () => {
|
|
1039
|
+
const ctx = makeEmitCtx({ "/world/d": [{ x: 1 }] });
|
|
1040
|
+
const ast = parser.parse(lexer.tokenize('job q { find /world/d | output "hello" | save /world/out }'));
|
|
1041
|
+
const prog = compiler.compile(ast);
|
|
1042
|
+
await prog.jobs[0].execute(ctx);
|
|
1043
|
+
const outputEmits = ctx.logger.emitted.filter(e => e.message === "hello");
|
|
1044
|
+
expect(outputEmits.length).toBe(1);
|
|
1045
|
+
expect(outputEmits[0].level).toBe("info");
|
|
1046
|
+
});
|
|
1047
|
+
|
|
1048
|
+
it("job execution emit carries jobName", async () => {
|
|
1049
|
+
const ctx = makeEmitCtx({ "/world/d": [{ x: 1 }] });
|
|
1050
|
+
const ast = parser.parse(lexer.tokenize("job myJob { find /world/d }"));
|
|
1051
|
+
const prog = compiler.compile(ast);
|
|
1052
|
+
await prog.jobs[0].execute(ctx);
|
|
1053
|
+
const withJobName = ctx.logger.emitted.filter(e => e.context?.jobName === "myJob");
|
|
1054
|
+
expect(withJobName.length).toBeGreaterThan(0);
|
|
1055
|
+
});
|
|
1056
|
+
|
|
1057
|
+
it("pipeline stage emit carries stageIndex", async () => {
|
|
1058
|
+
const ctx = makeEmitCtx({ "/world/d": [{ x: 1 }] });
|
|
1059
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | save /world/out }"));
|
|
1060
|
+
const prog = compiler.compile(ast);
|
|
1061
|
+
await prog.jobs[0].execute(ctx);
|
|
1062
|
+
const withIndex = ctx.logger.emitted.filter(e => typeof e.context?.stageIndex === "number");
|
|
1063
|
+
expect(withIndex.length).toBeGreaterThan(0);
|
|
1064
|
+
});
|
|
1065
|
+
|
|
1066
|
+
it("multiple jobs emit correct jobName each", async () => {
|
|
1067
|
+
const ctx = makeEmitCtx({ "/world/d": [{ x: 1 }] });
|
|
1068
|
+
const ast = parser.parse(lexer.tokenize("job alpha { find /world/d }\njob beta { find /world/d }"));
|
|
1069
|
+
const prog = compiler.compile(ast);
|
|
1070
|
+
await prog.jobs[0].execute(ctx);
|
|
1071
|
+
await prog.jobs[1].execute(ctx);
|
|
1072
|
+
const alphaEmits = ctx.logger.emitted.filter(e => e.context?.jobName === "alpha");
|
|
1073
|
+
const betaEmits = ctx.logger.emitted.filter(e => e.context?.jobName === "beta");
|
|
1074
|
+
expect(alphaEmits.length).toBeGreaterThan(0);
|
|
1075
|
+
expect(betaEmits.length).toBeGreaterThan(0);
|
|
1076
|
+
});
|
|
1077
|
+
|
|
1078
|
+
// Compat
|
|
1079
|
+
it("logger without emit → fallback to log() (backward compat)", async () => {
|
|
1080
|
+
const logger = makeLogger(); // no emit method
|
|
1081
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1082
|
+
const ast = parser.parse(lexer.tokenize('job q { find /world/d | output "test" }'));
|
|
1083
|
+
const prog = compiler.compile(ast);
|
|
1084
|
+
// Should not throw
|
|
1085
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1086
|
+
expect(result.status).toBe("ok");
|
|
1087
|
+
});
|
|
1088
|
+
|
|
1089
|
+
it("existing log(stage, action, detail) calls still work", async () => {
|
|
1090
|
+
const ctx = makeEmitCtx({ "/world/d": [{ x: 1 }] });
|
|
1091
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d }"));
|
|
1092
|
+
const prog = compiler.compile(ast);
|
|
1093
|
+
await prog.jobs[0].execute(ctx);
|
|
1094
|
+
// log() still called for enter/exit
|
|
1095
|
+
expect(ctx.logger.entries.some(e => e.action === "enter")).toBe(true);
|
|
1096
|
+
expect(ctx.logger.entries.some(e => e.action === "exit")).toBe(true);
|
|
1097
|
+
});
|
|
1098
|
+
|
|
1099
|
+
// Edge
|
|
1100
|
+
it("logger.emit throws → pipeline still completes (log fault tolerance)", async () => {
|
|
1101
|
+
const logger: JobLogger = {
|
|
1102
|
+
log() {},
|
|
1103
|
+
emit() { throw new Error("emit crashed"); },
|
|
1104
|
+
};
|
|
1105
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1106
|
+
const ast = parser.parse(lexer.tokenize('job q { find /world/d | output "test" | save /world/out }'));
|
|
1107
|
+
const prog = compiler.compile(ast);
|
|
1108
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1109
|
+
expect(result.status).toBe("ok");
|
|
1110
|
+
expect(world.written["/world/out"]).toEqual([{ x: 1 }]);
|
|
1111
|
+
});
|
|
1112
|
+
|
|
1113
|
+
it("context fields undefined → no crash", async () => {
|
|
1114
|
+
const ctx = makeEmitCtx({ "/world/d": [{ x: 1 }] });
|
|
1115
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d }"));
|
|
1116
|
+
const prog = compiler.compile(ast);
|
|
1117
|
+
const result = await prog.jobs[0].execute(ctx);
|
|
1118
|
+
expect(result.status).toBe("ok");
|
|
1119
|
+
// Context may have undefined programId/procId — should not crash
|
|
1120
|
+
});
|
|
1121
|
+
|
|
1122
|
+
// Security
|
|
1123
|
+
it("log detail does not include world data (only metadata)", async () => {
|
|
1124
|
+
const ctx = makeEmitCtx({ "/world/d": [{ secret: "password123" }] });
|
|
1125
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | save /world/out }"));
|
|
1126
|
+
const prog = compiler.compile(ast);
|
|
1127
|
+
await prog.jobs[0].execute(ctx);
|
|
1128
|
+
// Emitted messages should not contain the actual data values
|
|
1129
|
+
const allMessages = ctx.logger.emitted.map(e => JSON.stringify(e));
|
|
1130
|
+
const leaked = allMessages.some(m => m.includes("password123"));
|
|
1131
|
+
expect(leaked).toBe(false);
|
|
1132
|
+
});
|
|
1133
|
+
|
|
1134
|
+
// Data Loss
|
|
1135
|
+
it("log fault does not affect job result (status/recordCount/errors unchanged)", async () => {
|
|
1136
|
+
const logger: JobLogger = {
|
|
1137
|
+
log() {},
|
|
1138
|
+
emit() { throw new Error("emit crashed"); },
|
|
1139
|
+
};
|
|
1140
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }, { x: 2 }] });
|
|
1141
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | save /world/out }"));
|
|
1142
|
+
const prog = compiler.compile(ast);
|
|
1143
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger }) as JobReport;
|
|
1144
|
+
expect(result.status).toBe("ok");
|
|
1145
|
+
expect(result.recordCount).toBe(0); // save drains stream
|
|
1146
|
+
expect(result.errors).toEqual([]);
|
|
1147
|
+
});
|
|
1148
|
+
});
|
|
1149
|
+
|
|
1150
|
+
// ════════════════════════════════════════════════════════════════
|
|
1151
|
+
// Phase 15: Output Event
|
|
1152
|
+
// ════════════════════════════════════════════════════════════════
|
|
1153
|
+
|
|
1154
|
+
describe("Phase 15: Output Event", () => {
|
|
1155
|
+
|
|
1156
|
+
// ── Happy: output operator ──
|
|
1157
|
+
|
|
1158
|
+
it("output top-level statement parses and executes", async () => {
|
|
1159
|
+
const { program, diagnostics } = compileSource('output "hello world"');
|
|
1160
|
+
expect(program).toBeDefined();
|
|
1161
|
+
expect(diagnostics).toEqual([]);
|
|
1162
|
+
const ctx = makeCtx();
|
|
1163
|
+
for (const unit of program!.units) {
|
|
1164
|
+
await unit.execute(ctx);
|
|
1165
|
+
}
|
|
1166
|
+
expect(ctx.logger.entries.some(e => e.stage === "output")).toBe(true);
|
|
1167
|
+
});
|
|
1168
|
+
|
|
1169
|
+
it("output pipeline stage passes stream through", async () => {
|
|
1170
|
+
const data = { "/world/d": [{ x: 1 }, { x: 2 }] };
|
|
1171
|
+
const { result, ctx } = await run('job q { find /world/d | output "checkpoint" | save /world/out }', data);
|
|
1172
|
+
expect(result.status).toBe("ok");
|
|
1173
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 1 }, { x: 2 }]);
|
|
1174
|
+
});
|
|
1175
|
+
|
|
1176
|
+
it("output fires OutputHandler.output() with correct content", async () => {
|
|
1177
|
+
const events: OutputEvent[] = [];
|
|
1178
|
+
const handler: OutputHandler = { output(e) { events.push(e); } };
|
|
1179
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1180
|
+
const logger = makeLogger();
|
|
1181
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger, output: handler };
|
|
1182
|
+
const ast = parser.parse(lexer.tokenize('job q { find /world/d | output "msg" | save /world/out }'));
|
|
1183
|
+
const prog = compiler.compile(ast);
|
|
1184
|
+
await prog.jobs[0].execute(ctx);
|
|
1185
|
+
expect(events.length).toBeGreaterThanOrEqual(1);
|
|
1186
|
+
expect(events[0].kind).toBe("text");
|
|
1187
|
+
expect(events[0].content).toBe("msg");
|
|
1188
|
+
});
|
|
1189
|
+
|
|
1190
|
+
it("output top-level fires OutputHandler", async () => {
|
|
1191
|
+
const events: OutputEvent[] = [];
|
|
1192
|
+
const handler: OutputHandler = { output(e) { events.push(e); } };
|
|
1193
|
+
const world = makeWorld();
|
|
1194
|
+
const logger = makeLogger();
|
|
1195
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger, output: handler };
|
|
1196
|
+
const { program } = compileSource('output "top-level msg"');
|
|
1197
|
+
for (const unit of program!.units) {
|
|
1198
|
+
await unit.execute(ctx);
|
|
1199
|
+
}
|
|
1200
|
+
expect(events.some(e => e.content === "top-level msg")).toBe(true);
|
|
1201
|
+
});
|
|
1202
|
+
|
|
1203
|
+
// ── No OutputHandler ──
|
|
1204
|
+
|
|
1205
|
+
it("no OutputHandler → output still works without crash", async () => {
|
|
1206
|
+
const data = { "/world/d": [{ x: 1 }] };
|
|
1207
|
+
const { result } = await run('job q { find /world/d | output "no handler" | save /world/out }', data);
|
|
1208
|
+
expect(result.status).toBe("ok");
|
|
1209
|
+
});
|
|
1210
|
+
|
|
1211
|
+
// ── Fault tolerance ──
|
|
1212
|
+
|
|
1213
|
+
it("OutputHandler.output() throws → pipeline continues", async () => {
|
|
1214
|
+
const handler: OutputHandler = { output() { throw new Error("output boom"); } };
|
|
1215
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }, { x: 2 }] });
|
|
1216
|
+
const logger = makeLogger();
|
|
1217
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger, output: handler };
|
|
1218
|
+
const ast = parser.parse(lexer.tokenize('job q { find /world/d | output "crash" | save /world/out }'));
|
|
1219
|
+
const prog = compiler.compile(ast);
|
|
1220
|
+
const result = await prog.jobs[0].execute(ctx) as JobReport;
|
|
1221
|
+
expect(result.status).toBe("ok");
|
|
1222
|
+
expect(world.written["/world/out"]).toEqual([{ x: 1 }, { x: 2 }]);
|
|
1223
|
+
});
|
|
1224
|
+
|
|
1225
|
+
// ── Output fires regardless of stream size (audit via streamSize log) ──
|
|
1226
|
+
|
|
1227
|
+
it("output fires even on empty stream (pass-through, logs streamSize for audit)", async () => {
|
|
1228
|
+
const events: OutputEvent[] = [];
|
|
1229
|
+
const handler: OutputHandler = { output(e) { events.push(e); } };
|
|
1230
|
+
const world = makeWorld({ "/data": [{ v: 0 }] });
|
|
1231
|
+
const logger = makeLogger();
|
|
1232
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger, output: handler };
|
|
1233
|
+
// where filters out all records (strict: 0 != "0"), stream becomes empty
|
|
1234
|
+
const ast = parser.parse(lexer.tokenize('job q { find /data | where v == "0" | output "msg" | save /out }'));
|
|
1235
|
+
const prog = compiler.compile(ast);
|
|
1236
|
+
await prog.jobs[0].execute(ctx);
|
|
1237
|
+
// output fires regardless — provider can audit streamSize from logs
|
|
1238
|
+
expect(events.some(e => e.content === "msg")).toBe(true);
|
|
1239
|
+
});
|
|
1240
|
+
|
|
1241
|
+
it("output with non-empty stream fires handler normally", async () => {
|
|
1242
|
+
const events: OutputEvent[] = [];
|
|
1243
|
+
const handler: OutputHandler = { output(e) { events.push(e); } };
|
|
1244
|
+
const world = makeWorld({ "/data": [{ v: 1 }] });
|
|
1245
|
+
const logger = makeLogger();
|
|
1246
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger, output: handler };
|
|
1247
|
+
const ast = parser.parse(lexer.tokenize('job q { find /data | output "OK" | save /out }'));
|
|
1248
|
+
const prog = compiler.compile(ast);
|
|
1249
|
+
await prog.jobs[0].execute(ctx);
|
|
1250
|
+
expect(events.some(e => e.content === "OK")).toBe(true);
|
|
1251
|
+
});
|
|
1252
|
+
});
|
|
1253
|
+
|
|
1254
|
+
// ════════════════════════════════════════════════════════════════
|
|
1255
|
+
// Phase v3-0: Action Stage + Async Execution
|
|
1256
|
+
// ════════════════════════════════════════════════════════════════
|
|
1257
|
+
|
|
1258
|
+
describe("Phase v3-0: Action Stage", () => {
|
|
1259
|
+
// ── Happy Path ──
|
|
1260
|
+
|
|
1261
|
+
it("action stage calls ctx.world.exec(path, stream, params)", async () => {
|
|
1262
|
+
const data = { "/world/d": [{ x: 1 }, { x: 2 }] };
|
|
1263
|
+
const { ctx } = await run("job q { find /world/d | action /tools/transform | save /world/out }", data);
|
|
1264
|
+
expect(ctx.world.execCalls).toHaveLength(1);
|
|
1265
|
+
expect(ctx.world.execCalls[0].path).toBe("/tools/transform");
|
|
1266
|
+
expect(ctx.world.execCalls[0].input).toEqual([{ x: 1 }, { x: 2 }]);
|
|
1267
|
+
});
|
|
1268
|
+
|
|
1269
|
+
it("action stage returns exec result as new stream", async () => {
|
|
1270
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1271
|
+
world.exec = async (_path: string, _input: unknown[]) => {
|
|
1272
|
+
return [{ transformed: true }];
|
|
1273
|
+
};
|
|
1274
|
+
const logger = makeLogger();
|
|
1275
|
+
const ctx = { world, caps: new Set(["*"]), logger };
|
|
1276
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /tools/t | save /world/out }"));
|
|
1277
|
+
const prog = compiler.compile(ast);
|
|
1278
|
+
await prog.jobs[0].execute(ctx);
|
|
1279
|
+
expect(world.written["/world/out"]).toEqual([{ transformed: true }]);
|
|
1280
|
+
});
|
|
1281
|
+
|
|
1282
|
+
it("action with params passes params to exec (no stream merge)", async () => {
|
|
1283
|
+
const data = { "/world/d": [{ x: 1 }] };
|
|
1284
|
+
const { ctx } = await run('job q { find /world/d | action /tools/email { template: "welcome" } | save /world/out }', data);
|
|
1285
|
+
expect(ctx.world.execCalls[0].params).toEqual({ template: "welcome" });
|
|
1286
|
+
// Breaking change: inline params → action receives ONLY params, not stream records
|
|
1287
|
+
expect(ctx.world.execCalls[0].input).toEqual([]);
|
|
1288
|
+
});
|
|
1289
|
+
|
|
1290
|
+
it("action with number param passes correctly (no stream merge)", async () => {
|
|
1291
|
+
const data = { "/world/d": [{ x: 1 }] };
|
|
1292
|
+
const { ctx } = await run("job q { find /world/d | action /tools/score { threshold: 80 } | save /world/out }", data);
|
|
1293
|
+
expect(ctx.world.execCalls[0].params).toEqual({ threshold: 80 });
|
|
1294
|
+
expect(ctx.world.execCalls[0].input).toEqual([]);
|
|
1295
|
+
});
|
|
1296
|
+
|
|
1297
|
+
it("action with json() param passes complex object to exec", async () => {
|
|
1298
|
+
const { ctx } = await run(
|
|
1299
|
+
'job q { action /tools/agent-run { task: "read and reply", tools: json([{"path": "/telegram", "ops": ["read"]}]), budget: json({"max_rounds": 4}) } }',
|
|
1300
|
+
);
|
|
1301
|
+
expect(ctx.world.execCalls).toHaveLength(1);
|
|
1302
|
+
expect(ctx.world.execCalls[0].params).toEqual({
|
|
1303
|
+
task: "read and reply",
|
|
1304
|
+
tools: [{ path: "/telegram", ops: ["read"] }],
|
|
1305
|
+
budget: { max_rounds: 4 },
|
|
1306
|
+
});
|
|
1307
|
+
});
|
|
1308
|
+
|
|
1309
|
+
it("async pipeline executes stages sequentially", async () => {
|
|
1310
|
+
const order: string[] = [];
|
|
1311
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1312
|
+
const origExec = world.exec!.bind(world);
|
|
1313
|
+
world.exec = async (path: string, input: unknown[], params?: Record<string, unknown>) => {
|
|
1314
|
+
order.push(`exec:${path}`);
|
|
1315
|
+
return origExec(path, input, params);
|
|
1316
|
+
};
|
|
1317
|
+
const origWrite = world.write.bind(world);
|
|
1318
|
+
world.write = (path: string, d: unknown[]) => {
|
|
1319
|
+
order.push(`write:${path}`);
|
|
1320
|
+
origWrite(path, d);
|
|
1321
|
+
};
|
|
1322
|
+
const logger = makeLogger();
|
|
1323
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /tools/t | save /world/out }"));
|
|
1324
|
+
const prog = compiler.compile(ast);
|
|
1325
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1326
|
+
expect(order).toEqual(["exec:/tools/t", "write:/world/out"]);
|
|
1327
|
+
});
|
|
1328
|
+
|
|
1329
|
+
it("existing sync stages still work after async conversion", async () => {
|
|
1330
|
+
const data = { "/world/d": [{ name: "Alice" }, { name: "Bob" }] };
|
|
1331
|
+
const { ctx } = await run("job q { find /world/d | map name | save /world/out }", data);
|
|
1332
|
+
expect(ctx.world.written["/world/out"]).toEqual(["Alice", "Bob"]);
|
|
1333
|
+
});
|
|
1334
|
+
|
|
1335
|
+
it("action as source (no find), receives empty stream", async () => {
|
|
1336
|
+
const world = makeWorld();
|
|
1337
|
+
world.exec = async (_path: string, input: unknown[]) => {
|
|
1338
|
+
// Source action: generates data from nothing
|
|
1339
|
+
expect(input).toEqual([]);
|
|
1340
|
+
return [{ generated: true }];
|
|
1341
|
+
};
|
|
1342
|
+
const logger = makeLogger();
|
|
1343
|
+
const ast = parser.parse(lexer.tokenize("job q { action /tools/generate | save /world/out }"));
|
|
1344
|
+
const prog = compiler.compile(ast);
|
|
1345
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1346
|
+
expect(world.written["/world/out"]).toEqual([{ generated: true }]);
|
|
1347
|
+
});
|
|
1348
|
+
|
|
1349
|
+
it("multiple action stages chained: action | action | action", async () => {
|
|
1350
|
+
let callCount = 0;
|
|
1351
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1352
|
+
world.exec = async (_path: string, input: unknown[]) => {
|
|
1353
|
+
callCount++;
|
|
1354
|
+
return input.map((item: any) => ({ ...item, step: callCount }));
|
|
1355
|
+
};
|
|
1356
|
+
const logger = makeLogger();
|
|
1357
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /a | action /b | action /c | save /world/out }"));
|
|
1358
|
+
const prog = compiler.compile(ast);
|
|
1359
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1360
|
+
expect(callCount).toBe(3);
|
|
1361
|
+
expect(world.written["/world/out"]).toEqual([{ x: 1, step: 3 }]);
|
|
1362
|
+
});
|
|
1363
|
+
|
|
1364
|
+
// ── Bad Path ──
|
|
1365
|
+
|
|
1366
|
+
it("world.exec() throws → error accumulated, stage reported", async () => {
|
|
1367
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1368
|
+
world.exec = async () => { throw new Error("action failed"); };
|
|
1369
|
+
const logger = makeLogger();
|
|
1370
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /tools/t | save /world/out }"));
|
|
1371
|
+
const prog = compiler.compile(ast);
|
|
1372
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1373
|
+
expect(result.status).toBe("error");
|
|
1374
|
+
expect(result.errors.some(e => e.includes("action failed"))).toBe(true);
|
|
1375
|
+
});
|
|
1376
|
+
|
|
1377
|
+
it("world.exec() returns non-array → wrapped as single-element array", async () => {
|
|
1378
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1379
|
+
world.exec = async () => { return { result: "single" } as any; };
|
|
1380
|
+
const logger = makeLogger();
|
|
1381
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /tools/t | save /world/out }"));
|
|
1382
|
+
const prog = compiler.compile(ast);
|
|
1383
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1384
|
+
expect(world.written["/world/out"]).toEqual([{ result: "single" }]);
|
|
1385
|
+
});
|
|
1386
|
+
|
|
1387
|
+
// ── Edge Cases ──
|
|
1388
|
+
|
|
1389
|
+
it("action downstream of find with empty stream → skipped (not called)", async () => {
|
|
1390
|
+
let execCalled = false;
|
|
1391
|
+
const world = makeWorld({ "/world/d": [] });
|
|
1392
|
+
world.exec = async () => { execCalled = true; return []; };
|
|
1393
|
+
const logger = makeLogger();
|
|
1394
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /tools/t | save /world/out }"));
|
|
1395
|
+
const prog = compiler.compile(ast);
|
|
1396
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1397
|
+
expect(execCalled).toBe(false);
|
|
1398
|
+
expect(world.written["/world/out"]).toEqual([]); // save writes empty stream
|
|
1399
|
+
});
|
|
1400
|
+
|
|
1401
|
+
it("action returns empty array → downstream stages get empty stream", async () => {
|
|
1402
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1403
|
+
world.exec = async () => [];
|
|
1404
|
+
const logger = makeLogger();
|
|
1405
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /tools/t | save /world/out }"));
|
|
1406
|
+
const prog = compiler.compile(ast);
|
|
1407
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1408
|
+
expect(world.written["/world/out"]).toEqual([]);
|
|
1409
|
+
});
|
|
1410
|
+
|
|
1411
|
+
it("action returns undefined/null → treated as empty stream", async () => {
|
|
1412
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1413
|
+
world.exec = async () => undefined as any;
|
|
1414
|
+
const logger = makeLogger();
|
|
1415
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /tools/t | save /world/out }"));
|
|
1416
|
+
const prog = compiler.compile(ast);
|
|
1417
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1418
|
+
expect(world.written["/world/out"]).toEqual([]);
|
|
1419
|
+
});
|
|
1420
|
+
|
|
1421
|
+
it("action as first stage (source) still executes on empty stream", async () => {
|
|
1422
|
+
const world = makeWorld();
|
|
1423
|
+
world.exec = async (_path: string, input: unknown[]) => {
|
|
1424
|
+
expect(input).toEqual([]);
|
|
1425
|
+
return [{ generated: true }];
|
|
1426
|
+
};
|
|
1427
|
+
const logger = makeLogger();
|
|
1428
|
+
const ast = parser.parse(lexer.tokenize("job q { action /tools/generate | save /world/out }"));
|
|
1429
|
+
const prog = compiler.compile(ast);
|
|
1430
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1431
|
+
expect(world.written["/world/out"]).toEqual([{ generated: true }]);
|
|
1432
|
+
});
|
|
1433
|
+
|
|
1434
|
+
it("find | where (filters to 0) | action → action skipped", async () => {
|
|
1435
|
+
let execCalled = false;
|
|
1436
|
+
const world = makeWorld({ "/world/d": [{ status: "locked" }] });
|
|
1437
|
+
world.exec = async () => { execCalled = true; return [{ done: true }]; };
|
|
1438
|
+
const logger = makeLogger();
|
|
1439
|
+
const ast = parser.parse(lexer.tokenize('job q { find /world/d | where status == "unlocked" | action /tools/lock | save /world/out }'));
|
|
1440
|
+
const prog = compiler.compile(ast);
|
|
1441
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1442
|
+
expect(execCalled).toBe(false);
|
|
1443
|
+
expect(result.status).toBe("ok");
|
|
1444
|
+
});
|
|
1445
|
+
|
|
1446
|
+
// ── Security ──
|
|
1447
|
+
|
|
1448
|
+
it("action without exec cap → cap denied error (fail-closed)", async () => {
|
|
1449
|
+
const caps = new Set(["/world/d"]); // no exec cap for /tools/t
|
|
1450
|
+
const { result } = await run("job q { find /world/d | action /tools/t | save /world/out }", { "/world/d": [{ x: 1 }] }, caps);
|
|
1451
|
+
expect(result.status).toBe("error");
|
|
1452
|
+
expect(result.errors.some(e => e.match(/permission|denied/i))).toBe(true);
|
|
1453
|
+
});
|
|
1454
|
+
|
|
1455
|
+
it("action with matching exec cap → ok", async () => {
|
|
1456
|
+
const caps = new Set(["/world/d", "/tools/t"]);
|
|
1457
|
+
const { result } = await run("job q { find /world/d | action /tools/t | save /world/out }", { "/world/d": [{ x: 1 }] }, caps);
|
|
1458
|
+
// action stage should execute, but save to /world/out might fail (no cap)
|
|
1459
|
+
// Let's check that the action didn't get blocked
|
|
1460
|
+
expect(result.errors.every(e => !e.includes("/tools/t"))).toBe(true);
|
|
1461
|
+
});
|
|
1462
|
+
|
|
1463
|
+
it("wildcard cap * → action executes ok", async () => {
|
|
1464
|
+
const { result, ctx } = await run("job q { find /world/d | action /tools/t | save /world/out }", { "/world/d": [{ x: 1 }] });
|
|
1465
|
+
expect(result.status).toBe("ok");
|
|
1466
|
+
expect(ctx.world.execCalls).toHaveLength(1);
|
|
1467
|
+
});
|
|
1468
|
+
|
|
1469
|
+
// ── Data Leak ──
|
|
1470
|
+
|
|
1471
|
+
it("action error messages don't expose world.exec implementation details", async () => {
|
|
1472
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
1473
|
+
world.exec = async () => { throw new Error("internal DB connection failed at 10.0.0.1:5432"); };
|
|
1474
|
+
const logger = makeLogger();
|
|
1475
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /tools/t | save /world/out }"));
|
|
1476
|
+
const prog = compiler.compile(ast);
|
|
1477
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1478
|
+
// The error IS propagated (provider's responsibility to sanitize), but we check it's in errors
|
|
1479
|
+
expect(result.status).toBe("error");
|
|
1480
|
+
expect(result.errors.length).toBeGreaterThan(0);
|
|
1481
|
+
});
|
|
1482
|
+
|
|
1483
|
+
// ── Data Damage ──
|
|
1484
|
+
|
|
1485
|
+
it("action failure doesn't corrupt upstream data", async () => {
|
|
1486
|
+
const originalData = [{ x: 1 }, { x: 2 }];
|
|
1487
|
+
const world = makeWorld({ "/world/d": originalData });
|
|
1488
|
+
world.exec = async () => { throw new Error("action failed"); };
|
|
1489
|
+
const logger = makeLogger();
|
|
1490
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | tee /world/backup | action /tools/t | save /world/out }"));
|
|
1491
|
+
const prog = compiler.compile(ast);
|
|
1492
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
1493
|
+
// tee should have saved the data before the action failed
|
|
1494
|
+
expect(world.written["/world/backup"]).toEqual([{ x: 1 }, { x: 2 }]);
|
|
1495
|
+
});
|
|
1496
|
+
});
|
|
1497
|
+
|
|
1498
|
+
describe("Relative actions (per-record exec)", () => {
|
|
1499
|
+
it("relative action calls exec per-record with resolved paths", async () => {
|
|
1500
|
+
const data = {
|
|
1501
|
+
"/ha/lights": [
|
|
1502
|
+
{ path: "/ha/lights/bedroom", name: "bedroom", state: "on" },
|
|
1503
|
+
{ path: "/ha/lights/kitchen", name: "kitchen", state: "on" },
|
|
1504
|
+
],
|
|
1505
|
+
};
|
|
1506
|
+
const { ctx } = await run(
|
|
1507
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 10)\njob q { find /ha/lights | action turn_off }',
|
|
1508
|
+
data,
|
|
1509
|
+
);
|
|
1510
|
+
expect(ctx.world.execCalls).toHaveLength(2);
|
|
1511
|
+
expect(ctx.world.execCalls[0].path).toBe("/ha/lights/bedroom/.actions/turn_off");
|
|
1512
|
+
expect(ctx.world.execCalls[1].path).toBe("/ha/lights/kitchen/.actions/turn_off");
|
|
1513
|
+
});
|
|
1514
|
+
|
|
1515
|
+
it("relative action passes individual record as input (not entire stream)", async () => {
|
|
1516
|
+
const data = {
|
|
1517
|
+
"/ha/lights": [
|
|
1518
|
+
{ path: "/ha/lights/bedroom", state: "on" },
|
|
1519
|
+
{ path: "/ha/lights/kitchen", state: "on" },
|
|
1520
|
+
],
|
|
1521
|
+
};
|
|
1522
|
+
const { ctx } = await run(
|
|
1523
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 10)\njob q { find /ha/lights | action turn_off }',
|
|
1524
|
+
data,
|
|
1525
|
+
);
|
|
1526
|
+
// Each exec call should receive a single-element array with just that record
|
|
1527
|
+
expect(ctx.world.execCalls[0].input).toEqual([{ path: "/ha/lights/bedroom", state: "on" }]);
|
|
1528
|
+
expect(ctx.world.execCalls[1].input).toEqual([{ path: "/ha/lights/kitchen", state: "on" }]);
|
|
1529
|
+
});
|
|
1530
|
+
|
|
1531
|
+
it("relative action passes params to each exec call (no stream merge)", async () => {
|
|
1532
|
+
const data = {
|
|
1533
|
+
"/ha/lights": [
|
|
1534
|
+
{ path: "/ha/lights/bedroom", state: "on" },
|
|
1535
|
+
],
|
|
1536
|
+
};
|
|
1537
|
+
const { ctx } = await run(
|
|
1538
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 10)\njob q { find /ha/lights | action turn_off { brightness: 0 } }',
|
|
1539
|
+
data,
|
|
1540
|
+
);
|
|
1541
|
+
expect(ctx.world.execCalls[0].params).toEqual({ brightness: 0 });
|
|
1542
|
+
// Breaking change: inline params → action receives ONLY params, not stream records
|
|
1543
|
+
expect(ctx.world.execCalls[0].input).toEqual([]);
|
|
1544
|
+
});
|
|
1545
|
+
|
|
1546
|
+
it("record without path field → error", async () => {
|
|
1547
|
+
const data = {
|
|
1548
|
+
"/data/items": [
|
|
1549
|
+
{ name: "no-path-item" },
|
|
1550
|
+
],
|
|
1551
|
+
};
|
|
1552
|
+
const { result } = await run(
|
|
1553
|
+
'@caps(read /data/* exec /data/*)\n@budget(actions 10)\njob q { find /data/items | action do_thing }',
|
|
1554
|
+
data,
|
|
1555
|
+
);
|
|
1556
|
+
expect(result.status).toBe("error");
|
|
1557
|
+
expect(result.errors.some(e => e.includes("path"))).toBe(true);
|
|
1558
|
+
});
|
|
1559
|
+
|
|
1560
|
+
it("budget exceeded at Nth record → error, stops before exceeding", async () => {
|
|
1561
|
+
const data = {
|
|
1562
|
+
"/ha/lights": [
|
|
1563
|
+
{ path: "/ha/lights/a", state: "on" },
|
|
1564
|
+
{ path: "/ha/lights/b", state: "on" },
|
|
1565
|
+
{ path: "/ha/lights/c", state: "on" },
|
|
1566
|
+
{ path: "/ha/lights/d", state: "on" },
|
|
1567
|
+
{ path: "/ha/lights/e", state: "on" },
|
|
1568
|
+
],
|
|
1569
|
+
};
|
|
1570
|
+
const { result, ctx } = await run(
|
|
1571
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 3)\njob q { find /ha/lights | action turn_off }',
|
|
1572
|
+
data,
|
|
1573
|
+
);
|
|
1574
|
+
expect(result.status).toBe("error");
|
|
1575
|
+
expect(result.errors.some(e => e.includes("budget exceeded"))).toBe(true);
|
|
1576
|
+
// Pre-check: should execute exactly 3, NOT all 5
|
|
1577
|
+
expect(ctx.world.execCalls).toHaveLength(3);
|
|
1578
|
+
});
|
|
1579
|
+
|
|
1580
|
+
it("absolute action unchanged (regression)", async () => {
|
|
1581
|
+
const data = { "/world/d": [{ x: 1 }, { x: 2 }] };
|
|
1582
|
+
const { ctx } = await run(
|
|
1583
|
+
"@caps(read /world/* exec /tools/* write /world/*)\njob q { find /world/d | action /tools/transform | save /world/out }",
|
|
1584
|
+
data,
|
|
1585
|
+
);
|
|
1586
|
+
expect(ctx.world.execCalls).toHaveLength(1);
|
|
1587
|
+
expect(ctx.world.execCalls[0].path).toBe("/tools/transform");
|
|
1588
|
+
expect(ctx.world.execCalls[0].input).toEqual([{ x: 1 }, { x: 2 }]);
|
|
1589
|
+
});
|
|
1590
|
+
});
|
|
1591
|
+
|
|
1592
|
+
// ── PATH INJECTION: map cannot hijack relative action targets ──
|
|
1593
|
+
|
|
1594
|
+
describe("path injection defense", () => {
|
|
1595
|
+
it("map overwriting path field is ignored — original path preserved", async () => {
|
|
1596
|
+
const data = {
|
|
1597
|
+
"/ha/lights": [
|
|
1598
|
+
{ path: "/ha/lights/bedroom", state: "on" },
|
|
1599
|
+
],
|
|
1600
|
+
};
|
|
1601
|
+
const { ctx } = await run(
|
|
1602
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 10)\njob q { find /ha/lights | map { path: "/tesla/vehicles/mycar", state: state } | action turn_off }',
|
|
1603
|
+
data,
|
|
1604
|
+
);
|
|
1605
|
+
// path should still be /ha/lights/bedroom, NOT /tesla/vehicles/mycar
|
|
1606
|
+
expect(ctx.world.execCalls).toHaveLength(1);
|
|
1607
|
+
expect(ctx.world.execCalls[0].path).toBe("/ha/lights/bedroom/.actions/turn_off");
|
|
1608
|
+
});
|
|
1609
|
+
|
|
1610
|
+
it("runtime @caps blocks cross-provider path even if map could inject", async () => {
|
|
1611
|
+
const data = {
|
|
1612
|
+
"/ha/lights": [
|
|
1613
|
+
{ path: "/ha/lights/bedroom", state: "on" },
|
|
1614
|
+
],
|
|
1615
|
+
};
|
|
1616
|
+
// Even with expression mapping, path is preserved; and caps wouldn't allow tesla
|
|
1617
|
+
const { ctx } = await run(
|
|
1618
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 10)\njob q { find /ha/lights | action turn_off }',
|
|
1619
|
+
data,
|
|
1620
|
+
);
|
|
1621
|
+
expect(ctx.world.execCalls).toHaveLength(1);
|
|
1622
|
+
expect(ctx.world.execCalls[0].path).toBe("/ha/lights/bedroom/.actions/turn_off");
|
|
1623
|
+
});
|
|
1624
|
+
|
|
1625
|
+
it("runtime @caps denies resolved path outside declared scope", async () => {
|
|
1626
|
+
// Simulate: records already have paths pointing to tesla (e.g., from a cross-provider find)
|
|
1627
|
+
// but caps only allow exec on /ha/*
|
|
1628
|
+
const data = {
|
|
1629
|
+
"/ha/lights": [
|
|
1630
|
+
{ path: "/tesla/vehicles/mycar", state: "on" },
|
|
1631
|
+
],
|
|
1632
|
+
};
|
|
1633
|
+
const { result } = await run(
|
|
1634
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 10)\njob q { find /ha/lights | action lock }',
|
|
1635
|
+
data,
|
|
1636
|
+
);
|
|
1637
|
+
// The resolved path /tesla/vehicles/mycar/.actions/lock should be denied by @caps
|
|
1638
|
+
expect(result.status).toBe("error");
|
|
1639
|
+
expect(result.errors.some(e => e.includes("Permission denied") && e.includes("@caps"))).toBe(true);
|
|
1640
|
+
});
|
|
1641
|
+
|
|
1642
|
+
it("runtime @caps allows path within declared scope", async () => {
|
|
1643
|
+
const data = {
|
|
1644
|
+
"/ha/lights": [
|
|
1645
|
+
{ path: "/ha/lights/kitchen", state: "on" },
|
|
1646
|
+
],
|
|
1647
|
+
};
|
|
1648
|
+
const { ctx } = await run(
|
|
1649
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 10)\njob q { find /ha/lights | action turn_on }',
|
|
1650
|
+
data,
|
|
1651
|
+
);
|
|
1652
|
+
expect(ctx.world.execCalls).toHaveLength(1);
|
|
1653
|
+
expect(ctx.world.execCalls[0].path).toBe("/ha/lights/kitchen/.actions/turn_on");
|
|
1654
|
+
});
|
|
1655
|
+
|
|
1656
|
+
it("map with expr mappings preserves path (expression injection vector)", async () => {
|
|
1657
|
+
const data = {
|
|
1658
|
+
"/ha/lights": [
|
|
1659
|
+
{ path: "/ha/lights/bedroom", brightness: 100 },
|
|
1660
|
+
],
|
|
1661
|
+
};
|
|
1662
|
+
const { ctx } = await run(
|
|
1663
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 10)\njob q { find /ha/lights | map { brightness: brightness / 2 } | action dim }',
|
|
1664
|
+
data,
|
|
1665
|
+
);
|
|
1666
|
+
// path preserved even through expression mapping
|
|
1667
|
+
expect(ctx.world.execCalls[0].path).toBe("/ha/lights/bedroom/.actions/dim");
|
|
1668
|
+
});
|
|
1669
|
+
|
|
1670
|
+
it("map with simple field mapping preserves path", async () => {
|
|
1671
|
+
const data = {
|
|
1672
|
+
"/ha/lights": [
|
|
1673
|
+
{ path: "/ha/lights/bedroom", state: "on", brightness: 50 },
|
|
1674
|
+
],
|
|
1675
|
+
};
|
|
1676
|
+
const { ctx } = await run(
|
|
1677
|
+
'@caps(read /ha/* exec /ha/*)\n@budget(actions 10)\njob q { find /ha/lights | map { b: brightness } | action dim }',
|
|
1678
|
+
data,
|
|
1679
|
+
);
|
|
1680
|
+
// path preserved even when mapping to new field names
|
|
1681
|
+
expect(ctx.world.execCalls[0].path).toBe("/ha/lights/bedroom/.actions/dim");
|
|
1682
|
+
});
|
|
1683
|
+
});
|
|
1684
|
+
|
|
1685
|
+
describe("Phase v3-1: Expression Evaluator", () => {
|
|
1686
|
+
// ── Happy Path ──
|
|
1687
|
+
|
|
1688
|
+
it("map { score: age * 2 } evaluates correctly", async () => {
|
|
1689
|
+
const data = { "/world/d": [{ age: 25 }, { age: 30 }] };
|
|
1690
|
+
const { ctx } = await run("job q { find /world/d | map { score: age * 2 } | save /world/out }", data);
|
|
1691
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ score: 50 }, { score: 60 }]);
|
|
1692
|
+
});
|
|
1693
|
+
|
|
1694
|
+
it('map { label: name + " (" + role + ")" } string concat', async () => {
|
|
1695
|
+
const data = { "/world/d": [{ name: "Alice", role: "admin" }, { name: "Bob", role: "user" }] };
|
|
1696
|
+
const { ctx } = await run('job q { find /world/d | map { label: name + " (" + role + ")" } | save /world/out }', data);
|
|
1697
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ label: "Alice (admin)" }, { label: "Bob (user)" }]);
|
|
1698
|
+
});
|
|
1699
|
+
|
|
1700
|
+
it("map { total: price * quantity } with actual data", async () => {
|
|
1701
|
+
const data = { "/world/d": [{ price: 10, quantity: 3 }, { price: 5, quantity: 7 }] };
|
|
1702
|
+
const { ctx } = await run("job q { find /world/d | map { total: price * quantity } | save /world/out }", data);
|
|
1703
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ total: 30 }, { total: 35 }]);
|
|
1704
|
+
});
|
|
1705
|
+
|
|
1706
|
+
it("map { x: a - b } subtraction", async () => {
|
|
1707
|
+
const data = { "/world/d": [{ a: 10, b: 3 }] };
|
|
1708
|
+
const { ctx } = await run("job q { find /world/d | map { x: a - b } | save /world/out }", data);
|
|
1709
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 7 }]);
|
|
1710
|
+
});
|
|
1711
|
+
|
|
1712
|
+
it("map { x: a / b } division", async () => {
|
|
1713
|
+
const data = { "/world/d": [{ a: 10, b: 2 }] };
|
|
1714
|
+
const { ctx } = await run("job q { find /world/d | map { x: a / b } | save /world/out }", data);
|
|
1715
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 5 }]);
|
|
1716
|
+
});
|
|
1717
|
+
|
|
1718
|
+
it("map { score: raw * 0.8 + bonus } mixed arithmetic", async () => {
|
|
1719
|
+
const data = { "/world/d": [{ raw: 100, bonus: 10 }] };
|
|
1720
|
+
const { ctx } = await run("job q { find /world/d | map { score: raw * 0.8 + bonus } | save /world/out }", data);
|
|
1721
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ score: 90 }]);
|
|
1722
|
+
});
|
|
1723
|
+
|
|
1724
|
+
it("map { x: (a + b) * c } parenthesized expression", async () => {
|
|
1725
|
+
const data = { "/world/d": [{ a: 2, b: 3, c: 4 }] };
|
|
1726
|
+
const { ctx } = await run("job q { find /world/d | map { x: (a + b) * c } | save /world/out }", data);
|
|
1727
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 20 }]);
|
|
1728
|
+
});
|
|
1729
|
+
|
|
1730
|
+
it("map with $var reference in expression", async () => {
|
|
1731
|
+
const data = { "/world/d": [{ score: 80 }, { score: 90 }] };
|
|
1732
|
+
const { ctx } = await run("let factor = 2\njob q { find /world/d | map { adjusted: score * $factor } | save /world/out }", data);
|
|
1733
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ adjusted: 160 }, { adjusted: 180 }]);
|
|
1734
|
+
});
|
|
1735
|
+
|
|
1736
|
+
it("map with dotted field access in expression", async () => {
|
|
1737
|
+
const data = { "/world/d": [{ user: { score: 50 } }] };
|
|
1738
|
+
const { ctx } = await run("job q { find /world/d | map { x: user.score * 2 } | save /world/out }", data);
|
|
1739
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 100 }]);
|
|
1740
|
+
});
|
|
1741
|
+
|
|
1742
|
+
it('single-expression map (no braces): map name + " " + surname', async () => {
|
|
1743
|
+
const data = { "/world/d": [{ name: "Alice", surname: "Smith" }] };
|
|
1744
|
+
const { ctx } = await run('job q { find /world/d | map name + " " + surname | save /world/out }', data);
|
|
1745
|
+
expect(ctx.world.written["/world/out"]).toEqual(["Alice Smith"]);
|
|
1746
|
+
});
|
|
1747
|
+
|
|
1748
|
+
it("multi-key expression mapping", async () => {
|
|
1749
|
+
const data = { "/world/d": [{ a: 2, b: 3, name: "X" }] };
|
|
1750
|
+
const { ctx } = await run('job q { find /world/d | map { sum: a + b, label: name + "!" } | save /world/out }', data);
|
|
1751
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ sum: 5, label: "X!" }]);
|
|
1752
|
+
});
|
|
1753
|
+
|
|
1754
|
+
// ── Bad Path ──
|
|
1755
|
+
|
|
1756
|
+
it("map { x: a / 0 } → Infinity (JS behavior)", async () => {
|
|
1757
|
+
const data = { "/world/d": [{ a: 10 }] };
|
|
1758
|
+
const { ctx } = await run("job q { find /world/d | map { x: a / 0 } | save /world/out }", data);
|
|
1759
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: Infinity }]);
|
|
1760
|
+
});
|
|
1761
|
+
|
|
1762
|
+
it("map { x: a * b } where a is string → NaN", async () => {
|
|
1763
|
+
const data = { "/world/d": [{ a: "hello", b: 2 }] };
|
|
1764
|
+
const { ctx } = await run("job q { find /world/d | map { x: a * b } | save /world/out }", data);
|
|
1765
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: NaN }]);
|
|
1766
|
+
});
|
|
1767
|
+
|
|
1768
|
+
it("map { x: $undefined_var + 1 } → error", async () => {
|
|
1769
|
+
const data = { "/world/d": [{ a: 1 }] };
|
|
1770
|
+
const { result } = await run("job q { find /world/d | map { x: $missing + 1 } | save /world/out }", data);
|
|
1771
|
+
expect(result.status).toBe("error");
|
|
1772
|
+
expect(result.errors.some((e: string) => e.includes("Undefined variable"))).toBe(true);
|
|
1773
|
+
});
|
|
1774
|
+
|
|
1775
|
+
// ── Edge Cases ──
|
|
1776
|
+
|
|
1777
|
+
it("map { x: 0 } → literal number expression", async () => {
|
|
1778
|
+
const data = { "/world/d": [{ a: 1 }] };
|
|
1779
|
+
const { ctx } = await run("job q { find /world/d | map { x: 0 } | save /world/out }", data);
|
|
1780
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 0 }]);
|
|
1781
|
+
});
|
|
1782
|
+
|
|
1783
|
+
it('map { x: "hello" } → literal string expression', async () => {
|
|
1784
|
+
const data = { "/world/d": [{ a: 1 }] };
|
|
1785
|
+
const { ctx } = await run('job q { find /world/d | map { x: "hello" } | save /world/out }', data);
|
|
1786
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: "hello" }]);
|
|
1787
|
+
});
|
|
1788
|
+
|
|
1789
|
+
it('map { x: a + "" } → implicit toString', async () => {
|
|
1790
|
+
const data = { "/world/d": [{ a: 42 }] };
|
|
1791
|
+
const { ctx } = await run('job q { find /world/d | map { x: a + "" } | save /world/out }', data);
|
|
1792
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: "42" }]);
|
|
1793
|
+
});
|
|
1794
|
+
|
|
1795
|
+
it("long expression chain: map { x: a + b + c + d + e }", async () => {
|
|
1796
|
+
const data = { "/world/d": [{ a: 1, b: 2, c: 3, d: 4, e: 5 }] };
|
|
1797
|
+
const { ctx } = await run("job q { find /world/d | map { x: a + b + c + d + e } | save /world/out }", data);
|
|
1798
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 15 }]);
|
|
1799
|
+
});
|
|
1800
|
+
|
|
1801
|
+
it("expression on missing field → undefined handling", async () => {
|
|
1802
|
+
const data = { "/world/d": [{ a: 10 }] };
|
|
1803
|
+
const { ctx } = await run("job q { find /world/d | map { x: a + missing } | save /world/out }", data);
|
|
1804
|
+
// undefined + number → NaN
|
|
1805
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: NaN }]);
|
|
1806
|
+
});
|
|
1807
|
+
|
|
1808
|
+
// ── Backward Compat ──
|
|
1809
|
+
|
|
1810
|
+
it("backward compat: map field still works", async () => {
|
|
1811
|
+
const data = { "/world/d": [{ name: "Alice", age: 30 }] };
|
|
1812
|
+
const { ctx } = await run("job q { find /world/d | map name | save /world/out }", data);
|
|
1813
|
+
expect(ctx.world.written["/world/out"]).toEqual(["Alice"]);
|
|
1814
|
+
});
|
|
1815
|
+
|
|
1816
|
+
it("backward compat: map { out input } (no colon) still works", async () => {
|
|
1817
|
+
const data = { "/world/d": [{ name: "Alice", dept: "eng" }] };
|
|
1818
|
+
const { ctx } = await run("job q { find /world/d | map { fullName name, department dept } | save /world/out }", data);
|
|
1819
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ fullName: "Alice", department: "eng" }]);
|
|
1820
|
+
});
|
|
1821
|
+
|
|
1822
|
+
// ── Security ──
|
|
1823
|
+
|
|
1824
|
+
it("expression evaluator is pure — no access to world", async () => {
|
|
1825
|
+
// Expression only has access to the current item and variables, not world/ctx
|
|
1826
|
+
const data = { "/world/d": [{ score: 80 }] };
|
|
1827
|
+
const { ctx } = await run("job q { find /world/d | map { x: score * 2 } | save /world/out }", data);
|
|
1828
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 160 }]);
|
|
1829
|
+
});
|
|
1830
|
+
|
|
1831
|
+
// ── Data Damage ──
|
|
1832
|
+
|
|
1833
|
+
it("expression error on one object doesn't crash others when pipeline continues", async () => {
|
|
1834
|
+
// The pipeline would error at the first bad object, but the stream before that is still valid
|
|
1835
|
+
const data = { "/world/d": [{ a: 1 }] };
|
|
1836
|
+
const { result } = await run("job q { find /world/d | map { x: $nonexistent } | save /world/out }", data);
|
|
1837
|
+
expect(result.status).toBe("error");
|
|
1838
|
+
});
|
|
1839
|
+
|
|
1840
|
+
it("division by zero doesn't crash pipeline", async () => {
|
|
1841
|
+
const data = { "/world/d": [{ a: 10, b: 0 }] };
|
|
1842
|
+
const { ctx, result } = await run("job q { find /world/d | map { x: a / b } | save /world/out }", data);
|
|
1843
|
+
// JS division by zero → Infinity, pipeline continues
|
|
1844
|
+
expect(result.status).toBe("ok");
|
|
1845
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: Infinity }]);
|
|
1846
|
+
});
|
|
1847
|
+
});
|
|
1848
|
+
|
|
1849
|
+
describe("Phase v3-2: Route Compiler", () => {
|
|
1850
|
+
it("route dispatches items to correct target jobs", async () => {
|
|
1851
|
+
const source = `
|
|
1852
|
+
job main { find /world/tickets | route priority { "high" -> job handle_high, "low" -> job handle_low } }
|
|
1853
|
+
job handle_high { save /world/high }
|
|
1854
|
+
job handle_low { save /world/low }
|
|
1855
|
+
`;
|
|
1856
|
+
const data = { "/world/tickets": [
|
|
1857
|
+
{ id: 1, priority: "high" },
|
|
1858
|
+
{ id: 2, priority: "low" },
|
|
1859
|
+
{ id: 3, priority: "high" },
|
|
1860
|
+
]};
|
|
1861
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
1862
|
+
const prog = compiler.compile(ast);
|
|
1863
|
+
const ctx = makeCtx(data);
|
|
1864
|
+
await prog.jobs[0].execute(ctx);
|
|
1865
|
+
expect(ctx.world.written["/world/high"]).toEqual([{ id: 1, priority: "high" }, { id: 3, priority: "high" }]);
|
|
1866
|
+
expect(ctx.world.written["/world/low"]).toEqual([{ id: 2, priority: "low" }]);
|
|
1867
|
+
});
|
|
1868
|
+
|
|
1869
|
+
it("route fallback catches unmatched values", async () => {
|
|
1870
|
+
const source = `
|
|
1871
|
+
job main { find /world/d | route type { "a" -> job handle_a, _ -> job handle_default } }
|
|
1872
|
+
job handle_a { save /world/out_a }
|
|
1873
|
+
job handle_default { save /world/out_default }
|
|
1874
|
+
`;
|
|
1875
|
+
const data = { "/world/d": [
|
|
1876
|
+
{ type: "a", v: 1 },
|
|
1877
|
+
{ type: "b", v: 2 },
|
|
1878
|
+
{ type: "c", v: 3 },
|
|
1879
|
+
]};
|
|
1880
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
1881
|
+
const prog = compiler.compile(ast);
|
|
1882
|
+
const ctx = makeCtx(data);
|
|
1883
|
+
await prog.jobs[0].execute(ctx);
|
|
1884
|
+
expect(ctx.world.written["/world/out_a"]).toEqual([{ type: "a", v: 1 }]);
|
|
1885
|
+
expect(ctx.world.written["/world/out_default"]).toEqual([{ type: "b", v: 2 }, { type: "c", v: 3 }]);
|
|
1886
|
+
});
|
|
1887
|
+
|
|
1888
|
+
it("route with no match and no fallback → items dropped", async () => {
|
|
1889
|
+
const source = `
|
|
1890
|
+
job main { find /world/d | route type { "a" -> job handle_a } }
|
|
1891
|
+
job handle_a { save /world/out_a }
|
|
1892
|
+
`;
|
|
1893
|
+
const data = { "/world/d": [{ type: "b", v: 1 }] };
|
|
1894
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
1895
|
+
const prog = compiler.compile(ast);
|
|
1896
|
+
const ctx = makeCtx(data);
|
|
1897
|
+
await prog.jobs[0].execute(ctx);
|
|
1898
|
+
expect(ctx.world.written["/world/out_a"]).toBeUndefined();
|
|
1899
|
+
});
|
|
1900
|
+
|
|
1901
|
+
it("route with single branch (degenerate case)", async () => {
|
|
1902
|
+
const source = `
|
|
1903
|
+
job main { find /world/d | route status { "active" -> job handle } }
|
|
1904
|
+
job handle { save /world/out }
|
|
1905
|
+
`;
|
|
1906
|
+
const data = { "/world/d": [{ status: "active" }] };
|
|
1907
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
1908
|
+
const prog = compiler.compile(ast);
|
|
1909
|
+
const ctx = makeCtx(data);
|
|
1910
|
+
await prog.jobs[0].execute(ctx);
|
|
1911
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ status: "active" }]);
|
|
1912
|
+
});
|
|
1913
|
+
|
|
1914
|
+
it("route with nested field `result.category`", async () => {
|
|
1915
|
+
const source = `
|
|
1916
|
+
job main { find /world/d | route result.category { "urgent" -> job handle } }
|
|
1917
|
+
job handle { save /world/out }
|
|
1918
|
+
`;
|
|
1919
|
+
const data = { "/world/d": [{ result: { category: "urgent" } }, { result: { category: "normal" } }] };
|
|
1920
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
1921
|
+
const prog = compiler.compile(ast);
|
|
1922
|
+
const ctx = makeCtx(data);
|
|
1923
|
+
await prog.jobs[0].execute(ctx);
|
|
1924
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ result: { category: "urgent" } }]);
|
|
1925
|
+
});
|
|
1926
|
+
|
|
1927
|
+
// Backward compat: existing tests with initialStream undefined still work
|
|
1928
|
+
it("route target job without find uses routed items", async () => {
|
|
1929
|
+
const source = `
|
|
1930
|
+
job main { find /world/d | route type { "x" -> job process } }
|
|
1931
|
+
job process { map { label: type } | save /world/out }
|
|
1932
|
+
`;
|
|
1933
|
+
const data = { "/world/d": [{ type: "x", v: 1 }] };
|
|
1934
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
1935
|
+
const prog = compiler.compile(ast);
|
|
1936
|
+
const ctx = makeCtx(data);
|
|
1937
|
+
await prog.jobs[0].execute(ctx);
|
|
1938
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ label: "x" }]);
|
|
1939
|
+
});
|
|
1940
|
+
|
|
1941
|
+
it("route target job not found → error accumulated", async () => {
|
|
1942
|
+
const source = `
|
|
1943
|
+
job main { find /world/d | route type { "a" -> job nonexistent } }
|
|
1944
|
+
job nonexistent { save /world/out }
|
|
1945
|
+
`;
|
|
1946
|
+
// Remove nonexistent job from compiled program to simulate missing job
|
|
1947
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
1948
|
+
const prog = compiler.compile(ast);
|
|
1949
|
+
// Delete from jobMap to simulate
|
|
1950
|
+
prog.jobMap.delete("nonexistent");
|
|
1951
|
+
const ctx = makeCtx({ "/world/d": [{ type: "a" }] });
|
|
1952
|
+
const result = await prog.jobs[0].execute(ctx);
|
|
1953
|
+
expect(result.errors.some(e => e.includes("not found"))).toBe(true);
|
|
1954
|
+
});
|
|
1955
|
+
|
|
1956
|
+
it("route is terminal — returns empty stream", async () => {
|
|
1957
|
+
const source = `
|
|
1958
|
+
job main { find /world/d | route type { "a" -> job handle } | save /world/after_route }
|
|
1959
|
+
job handle { save /world/out }
|
|
1960
|
+
`;
|
|
1961
|
+
const data = { "/world/d": [{ type: "a" }] };
|
|
1962
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
1963
|
+
const prog = compiler.compile(ast);
|
|
1964
|
+
const ctx = makeCtx(data);
|
|
1965
|
+
await prog.jobs[0].execute(ctx);
|
|
1966
|
+
// Route returns empty, so save after route saves empty
|
|
1967
|
+
expect(ctx.world.written["/world/after_route"]).toEqual([]);
|
|
1968
|
+
});
|
|
1969
|
+
});
|
|
1970
|
+
|
|
1971
|
+
describe("Phase v3-2: Lookup Compiler", () => {
|
|
1972
|
+
it("lookup performs left join — matched records merged", async () => {
|
|
1973
|
+
const data = {
|
|
1974
|
+
"/orders": [{ id: 1, customer_id: "c1", amount: 100 }, { id: 2, customer_id: "c2", amount: 200 }],
|
|
1975
|
+
"/customers": [{ customer_id: "c1", name: "Alice" }, { customer_id: "c2", name: "Bob" }],
|
|
1976
|
+
};
|
|
1977
|
+
const { ctx } = await run("job q { find /orders | lookup /customers on customer_id | save /world/out }", data);
|
|
1978
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
1979
|
+
expect(out).toHaveLength(2);
|
|
1980
|
+
expect(out[0]).toMatchObject({ id: 1, customer_id: "c1", amount: 100, name: "Alice" });
|
|
1981
|
+
expect(out[1]).toMatchObject({ id: 2, customer_id: "c2", amount: 200, name: "Bob" });
|
|
1982
|
+
});
|
|
1983
|
+
|
|
1984
|
+
it("lookup unmatched — original object preserved (LEFT JOIN)", async () => {
|
|
1985
|
+
const data = {
|
|
1986
|
+
"/orders": [{ id: 1, customer_id: "c1" }, { id: 2, customer_id: "c99" }],
|
|
1987
|
+
"/customers": [{ customer_id: "c1", name: "Alice" }],
|
|
1988
|
+
};
|
|
1989
|
+
const { ctx } = await run("job q { find /orders | lookup /customers on customer_id | save /world/out }", data);
|
|
1990
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
1991
|
+
expect(out).toHaveLength(2);
|
|
1992
|
+
expect(out[0]).toMatchObject({ id: 1, customer_id: "c1", name: "Alice" });
|
|
1993
|
+
expect(out[1]).toMatchObject({ id: 2, customer_id: "c99" }); // no name — unmatched
|
|
1994
|
+
expect(out[1].name).toBeUndefined();
|
|
1995
|
+
});
|
|
1996
|
+
|
|
1997
|
+
it("lookup empty source → all items unchanged", async () => {
|
|
1998
|
+
const data = {
|
|
1999
|
+
"/orders": [{ id: 1, customer_id: "c1" }],
|
|
2000
|
+
"/customers": [],
|
|
2001
|
+
};
|
|
2002
|
+
const { ctx } = await run("job q { find /orders | lookup /customers on customer_id | save /world/out }", data);
|
|
2003
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ id: 1, customer_id: "c1" }]);
|
|
2004
|
+
});
|
|
2005
|
+
|
|
2006
|
+
it("lookup with multiple matches → first match wins", async () => {
|
|
2007
|
+
const data = {
|
|
2008
|
+
"/orders": [{ id: 1, type: "a" }],
|
|
2009
|
+
"/types": [{ type: "a", label: "first" }, { type: "a", label: "second" }],
|
|
2010
|
+
};
|
|
2011
|
+
const { ctx } = await run("job q { find /orders | lookup /types on type | save /world/out }", data);
|
|
2012
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
2013
|
+
expect(out[0].label).toBe("first");
|
|
2014
|
+
});
|
|
2015
|
+
|
|
2016
|
+
it("lookup source doesn't exist → items unchanged (empty read)", async () => {
|
|
2017
|
+
const data = { "/orders": [{ id: 1, customer_id: "c1" }] };
|
|
2018
|
+
const { ctx } = await run("job q { find /orders | lookup /customers on customer_id | save /world/out }", data);
|
|
2019
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ id: 1, customer_id: "c1" }]);
|
|
2020
|
+
});
|
|
2021
|
+
|
|
2022
|
+
it("lookup: stream fields take precedence over lookup fields", async () => {
|
|
2023
|
+
const data = {
|
|
2024
|
+
"/orders": [{ id: 1, customer_id: "c1", status: "pending" }],
|
|
2025
|
+
"/customers": [{ customer_id: "c1", status: "active", name: "Alice" }],
|
|
2026
|
+
};
|
|
2027
|
+
const { ctx } = await run("job q { find /orders | lookup /customers on customer_id | save /world/out }", data);
|
|
2028
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
2029
|
+
// Stream item's status ("pending") should override lookup's status ("active")
|
|
2030
|
+
expect(out[0].status).toBe("pending");
|
|
2031
|
+
expect(out[0].name).toBe("Alice");
|
|
2032
|
+
});
|
|
2033
|
+
});
|
|
2034
|
+
|
|
2035
|
+
describe("Phase v3-3: Param Compiler", () => {
|
|
2036
|
+
it("param with default → $source resolves to default value", async () => {
|
|
2037
|
+
const data = { "/data/users": [{ name: "Alice" }] };
|
|
2038
|
+
const source = 'param source = /data/users\njob q { find $source | save /world/out }';
|
|
2039
|
+
// $source should resolve to "/data/users" — but find expects PATH, not $var
|
|
2040
|
+
// Actually, let me rethink: params become variables, $source in `where` or `map` works
|
|
2041
|
+
// For find, the path is hardcoded in AST. Param is useful in where/map expressions.
|
|
2042
|
+
// Let me test with where/map instead
|
|
2043
|
+
const data2 = { "/world/d": [{ score: 90 }, { score: 70 }] };
|
|
2044
|
+
const source2 = "param threshold = 80\njob q { find /world/d | where score > $threshold | save /world/out }";
|
|
2045
|
+
const { ctx } = await run(source2, data2);
|
|
2046
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ score: 90 }]);
|
|
2047
|
+
});
|
|
2048
|
+
|
|
2049
|
+
it("param in expression: map { adjusted: score * $factor }", async () => {
|
|
2050
|
+
const data = { "/world/d": [{ score: 50 }, { score: 100 }] };
|
|
2051
|
+
const source = "param factor = 2\njob q { find /world/d | map { adjusted: score * $factor } | save /world/out }";
|
|
2052
|
+
const { ctx } = await run(source, data);
|
|
2053
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ adjusted: 100 }, { adjusted: 200 }]);
|
|
2054
|
+
});
|
|
2055
|
+
|
|
2056
|
+
it("param + let same name → rejected (name collision)", () => {
|
|
2057
|
+
const source = "param x = 5\nlet x = 10\njob q { find /world/d | map { result: v * $x } | save /world/out }";
|
|
2058
|
+
const result = compileSource(source);
|
|
2059
|
+
expect(result.diagnostics.some(d => d.message.includes("conflicts"))).toBe(true);
|
|
2060
|
+
});
|
|
2061
|
+
|
|
2062
|
+
it("compiled program exposes params map", () => {
|
|
2063
|
+
const source = 'param source = /data/users\nparam threshold = 80\njob q { find /data | save /out }';
|
|
2064
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
2065
|
+
const prog = compiler.compile(ast);
|
|
2066
|
+
expect(prog.params.get("source")).toBe("/data/users");
|
|
2067
|
+
expect(prog.params.get("threshold")).toBe(80);
|
|
2068
|
+
});
|
|
2069
|
+
|
|
2070
|
+
it("param with no override → uses default", async () => {
|
|
2071
|
+
const data = { "/world/d": [{ v: 10 }] };
|
|
2072
|
+
const source = "param multiplier = 3\njob q { find /world/d | map { x: v * $multiplier } | save /world/out }";
|
|
2073
|
+
const { ctx } = await run(source, data);
|
|
2074
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ x: 30 }]);
|
|
2075
|
+
});
|
|
2076
|
+
});
|
|
2077
|
+
|
|
2078
|
+
describe("Phase v3-2: Runtime Let Compiler", () => {
|
|
2079
|
+
it("let total = find /data | count → $total resolves to count", async () => {
|
|
2080
|
+
const data = { "/data/items": [{ x: 1 }, { x: 2 }, { x: 3 }] };
|
|
2081
|
+
const source = "let total = find /data/items | count\njob q { find /data/items | where x > $total | save /world/out }";
|
|
2082
|
+
// total = 3, so x > 3 should filter all out
|
|
2083
|
+
const { ctx } = await run(source, data);
|
|
2084
|
+
expect(ctx.world.written["/world/out"]).toEqual([]);
|
|
2085
|
+
});
|
|
2086
|
+
|
|
2087
|
+
it("runtime let binds numeric count value", async () => {
|
|
2088
|
+
const data = { "/data/items": [{ v: 10 }, { v: 20 }] };
|
|
2089
|
+
const source = "let n = find /data/items | count\njob q { find /data/items | map { x: v * $n } | save /world/out }";
|
|
2090
|
+
// n = {count: 2}, extracted as 2
|
|
2091
|
+
const { ctx } = await run(source, data);
|
|
2092
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
2093
|
+
expect(out).toEqual([{ x: 20 }, { x: 40 }]);
|
|
2094
|
+
});
|
|
2095
|
+
|
|
2096
|
+
it("runtime let with map extracts single value", async () => {
|
|
2097
|
+
const data = { "/data/items": [{ name: "hello" }] };
|
|
2098
|
+
const source = 'let label = find /data/items | map name\njob q { find /data/items | map { greeting: $label + "!" } | save /world/out }';
|
|
2099
|
+
const { ctx } = await run(source, data);
|
|
2100
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
2101
|
+
expect(out).toEqual([{ greeting: "hello!" }]);
|
|
2102
|
+
});
|
|
2103
|
+
|
|
2104
|
+
it("runtime let pipeline failure → job error", async () => {
|
|
2105
|
+
const data = {};
|
|
2106
|
+
const world = makeWorld(data);
|
|
2107
|
+
const origRead = world.read.bind(world);
|
|
2108
|
+
(world as any).read = (path: string) => {
|
|
2109
|
+
if (path === "/data/missing") throw new Error("path not found");
|
|
2110
|
+
return origRead(path);
|
|
2111
|
+
};
|
|
2112
|
+
const logger = makeLogger();
|
|
2113
|
+
const source = "let n = find /data/missing | count\njob q { find /data/x | save /world/out }";
|
|
2114
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
2115
|
+
const prog = compiler.compile(ast);
|
|
2116
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
2117
|
+
// find returns empty on missing paths (not throw), so the count should be 0
|
|
2118
|
+
// Actually let me check — our find catches errors and returns []
|
|
2119
|
+
expect(result.status).toBe("ok");
|
|
2120
|
+
});
|
|
2121
|
+
|
|
2122
|
+
it("static let and runtime let coexist", async () => {
|
|
2123
|
+
const data = { "/data/items": [{ v: 10 }, { v: 20 }, { v: 30 }] };
|
|
2124
|
+
const source = "let factor = 2\nlet n = find /data/items | count\njob q { find /data/items | map { x: v * $factor + $n } | save /world/out }";
|
|
2125
|
+
// factor=2, n=3, so x = v*2+3 → 23, 43, 63
|
|
2126
|
+
const { ctx } = await run(source, data);
|
|
2127
|
+
const out = ctx.world.written["/world/out"] as any[];
|
|
2128
|
+
expect(out).toEqual([{ x: 23 }, { x: 43 }, { x: 63 }]);
|
|
2129
|
+
});
|
|
2130
|
+
|
|
2131
|
+
it("runtime let with count returning 0", async () => {
|
|
2132
|
+
const data = { "/data/items": [] };
|
|
2133
|
+
const source = "let n = find /data/items | count\njob q { find /data/items | save /world/out }";
|
|
2134
|
+
const { result } = await run(source, data);
|
|
2135
|
+
expect(result.status).toBe("ok");
|
|
2136
|
+
});
|
|
2137
|
+
});
|
|
2138
|
+
|
|
2139
|
+
describe("Phase v3-2: @on_error Compiler", () => {
|
|
2140
|
+
// ── Happy Path ──
|
|
2141
|
+
|
|
2142
|
+
it("@on_error(skip) — failing stage skips, pipeline continues", async () => {
|
|
2143
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }, { x: 2 }] });
|
|
2144
|
+
world.exec = async () => { throw new Error("action failed"); };
|
|
2145
|
+
const logger = makeLogger();
|
|
2146
|
+
const ast = parser.parse(lexer.tokenize("@on_error(skip)\njob q { find /world/d | action /tools/t | save /world/out }"));
|
|
2147
|
+
const prog = compiler.compile(ast);
|
|
2148
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
2149
|
+
// Pipeline continues past the error, status is partial
|
|
2150
|
+
expect(result.status).toBe("partial");
|
|
2151
|
+
expect(result.errors.length).toBeGreaterThan(0);
|
|
2152
|
+
});
|
|
2153
|
+
|
|
2154
|
+
it("@on_error(save /errors) — failing items written to error path", async () => {
|
|
2155
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
2156
|
+
world.exec = async () => { throw new Error("transform failed"); };
|
|
2157
|
+
const logger = makeLogger();
|
|
2158
|
+
const ast = parser.parse(lexer.tokenize("@on_error(save, /errors/log)\njob q { find /world/d | action /tools/t | save /world/out }"));
|
|
2159
|
+
const prog = compiler.compile(ast);
|
|
2160
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
2161
|
+
expect(result.status).toBe("partial");
|
|
2162
|
+
// Errors should have been written to /errors/log
|
|
2163
|
+
expect(world.written["/errors/log"]).toBeDefined();
|
|
2164
|
+
expect(world.written["/errors/log"].length).toBeGreaterThan(0);
|
|
2165
|
+
const errItem = world.written["/errors/log"][0] as any;
|
|
2166
|
+
expect(errItem._error).toBeDefined();
|
|
2167
|
+
expect(errItem._item).toBeDefined();
|
|
2168
|
+
});
|
|
2169
|
+
|
|
2170
|
+
it("@on_error(fail) — default behavior, terminate on error", async () => {
|
|
2171
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
2172
|
+
world.exec = async () => { throw new Error("fatal error"); };
|
|
2173
|
+
const logger = makeLogger();
|
|
2174
|
+
const ast = parser.parse(lexer.tokenize("@on_error(fail)\njob q { find /world/d | action /tools/t | save /world/out }"));
|
|
2175
|
+
const prog = compiler.compile(ast);
|
|
2176
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
2177
|
+
expect(result.status).toBe("error");
|
|
2178
|
+
// save should not have been reached
|
|
2179
|
+
expect(world.written["/world/out"]).toBeUndefined();
|
|
2180
|
+
});
|
|
2181
|
+
|
|
2182
|
+
it("no @on_error annotation — default is fail/terminate", async () => {
|
|
2183
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
2184
|
+
world.exec = async () => { throw new Error("no handler"); };
|
|
2185
|
+
const logger = makeLogger();
|
|
2186
|
+
const ast = parser.parse(lexer.tokenize("job q { find /world/d | action /tools/t | save /world/out }"));
|
|
2187
|
+
const prog = compiler.compile(ast);
|
|
2188
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
2189
|
+
expect(result.status).toBe("error");
|
|
2190
|
+
expect(world.written["/world/out"]).toBeUndefined();
|
|
2191
|
+
});
|
|
2192
|
+
|
|
2193
|
+
// ── Edge Cases ──
|
|
2194
|
+
|
|
2195
|
+
it("@on_error(skip) with multiple failing stages — all skipped", async () => {
|
|
2196
|
+
let callCount = 0;
|
|
2197
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
2198
|
+
world.exec = async () => {
|
|
2199
|
+
callCount++;
|
|
2200
|
+
throw new Error(`fail-${callCount}`);
|
|
2201
|
+
};
|
|
2202
|
+
const logger = makeLogger();
|
|
2203
|
+
const ast = parser.parse(lexer.tokenize("@on_error(skip)\njob q { find /world/d | action /a | action /b | save /world/out }"));
|
|
2204
|
+
const prog = compiler.compile(ast);
|
|
2205
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
2206
|
+
expect(result.status).toBe("partial");
|
|
2207
|
+
expect(result.errors.length).toBe(2);
|
|
2208
|
+
});
|
|
2209
|
+
|
|
2210
|
+
it("@on_error(save /errors) — error path write fails → secondary error logged", async () => {
|
|
2211
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
2212
|
+
world.exec = async () => { throw new Error("action fail"); };
|
|
2213
|
+
const origWrite = world.write.bind(world);
|
|
2214
|
+
world.write = (path: string, d: unknown[]) => {
|
|
2215
|
+
if (path === "/errors/log") throw new Error("disk full");
|
|
2216
|
+
origWrite(path, d);
|
|
2217
|
+
};
|
|
2218
|
+
const logger = makeLogger();
|
|
2219
|
+
const ast = parser.parse(lexer.tokenize("@on_error(save, /errors/log)\njob q { find /world/d | action /tools/t | save /world/out }"));
|
|
2220
|
+
const prog = compiler.compile(ast);
|
|
2221
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
2222
|
+
// Should have both the action error and the error-write error
|
|
2223
|
+
expect(result.errors.length).toBeGreaterThanOrEqual(2);
|
|
2224
|
+
});
|
|
2225
|
+
|
|
2226
|
+
// ── Data Leak ──
|
|
2227
|
+
|
|
2228
|
+
it("@on_error(save) — saved errors don't include full stack traces", async () => {
|
|
2229
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }] });
|
|
2230
|
+
world.exec = async () => { throw new Error("internal DB at 10.0.0.1:5432"); };
|
|
2231
|
+
const logger = makeLogger();
|
|
2232
|
+
const ast = parser.parse(lexer.tokenize("@on_error(save, /errors/log)\njob q { find /world/d | action /tools/t | save /world/out }"));
|
|
2233
|
+
const prog = compiler.compile(ast);
|
|
2234
|
+
await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
2235
|
+
const errItem = world.written["/errors/log"]?.[0] as any;
|
|
2236
|
+
// _error is the message, not a stack trace
|
|
2237
|
+
expect(errItem._error).toBe("internal DB at 10.0.0.1:5432");
|
|
2238
|
+
// Should NOT contain "at " stack trace lines
|
|
2239
|
+
expect(errItem._error).not.toMatch(/^\s+at\s/m);
|
|
2240
|
+
});
|
|
2241
|
+
|
|
2242
|
+
// ── Data Damage ──
|
|
2243
|
+
|
|
2244
|
+
it("@on_error(save) — error save failure doesn't affect main pipeline continuation", async () => {
|
|
2245
|
+
const world = makeWorld({ "/world/d": [{ x: 1 }, { x: 2 }] });
|
|
2246
|
+
let actionCallCount = 0;
|
|
2247
|
+
world.exec = async (_path: string, input: unknown[]) => {
|
|
2248
|
+
actionCallCount++;
|
|
2249
|
+
if (actionCallCount === 1) throw new Error("first fail");
|
|
2250
|
+
return input;
|
|
2251
|
+
};
|
|
2252
|
+
const origWrite = world.write.bind(world);
|
|
2253
|
+
world.write = (path: string, d: unknown[]) => {
|
|
2254
|
+
if (path === "/errors/log") throw new Error("error write failed");
|
|
2255
|
+
origWrite(path, d);
|
|
2256
|
+
};
|
|
2257
|
+
const logger = makeLogger();
|
|
2258
|
+
// Note: the @on_error(save) will try to save then continue
|
|
2259
|
+
const ast = parser.parse(lexer.tokenize("@on_error(save, /errors/log)\njob q { find /world/d | action /tools/t | save /world/out }"));
|
|
2260
|
+
const prog = compiler.compile(ast);
|
|
2261
|
+
const result = await prog.jobs[0].execute({ world, caps: new Set(["*"]), logger });
|
|
2262
|
+
// Pipeline should complete with partial status
|
|
2263
|
+
expect(result.status).toBe("partial");
|
|
2264
|
+
});
|
|
2265
|
+
});
|
|
2266
|
+
|
|
2267
|
+
describe("Phase v3-3: On Trigger Compiler", () => {
|
|
2268
|
+
it("compiled job includes trigger metadata", async () => {
|
|
2269
|
+
const data = { "/data/inbox": [{ id: 1 }] };
|
|
2270
|
+
const source = "job handle on /data/inbox:created { find /data/inbox | save /world/out }";
|
|
2271
|
+
const { prog } = await run(source, data);
|
|
2272
|
+
const job = prog.jobs[0];
|
|
2273
|
+
expect(job.name).toBe("handle");
|
|
2274
|
+
expect(job.trigger).toEqual({ kind: "event", path: "/data/inbox", event: "created" });
|
|
2275
|
+
});
|
|
2276
|
+
|
|
2277
|
+
it("triggered job still executes normally", async () => {
|
|
2278
|
+
const data = { "/data/inbox": [{ msg: "hello" }] };
|
|
2279
|
+
const source = "job handle on /data/inbox:created { find /data/inbox | save /world/out }";
|
|
2280
|
+
const { ctx } = await run(source, data);
|
|
2281
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ msg: "hello" }]);
|
|
2282
|
+
});
|
|
2283
|
+
|
|
2284
|
+
it("job without trigger has undefined trigger", async () => {
|
|
2285
|
+
const data = { "/data": [{ v: 1 }] };
|
|
2286
|
+
const source = "job plain { find /data | save /world/out }";
|
|
2287
|
+
const { prog } = await run(source, data);
|
|
2288
|
+
expect(prog.jobs[0].trigger).toBeUndefined();
|
|
2289
|
+
});
|
|
2290
|
+
|
|
2291
|
+
it("multiple jobs with different triggers in same script", async () => {
|
|
2292
|
+
const data = { "/inbox": [{ id: 1 }], "/outbox": [{ id: 2 }] };
|
|
2293
|
+
const source = `
|
|
2294
|
+
job a on /inbox:created { find /inbox | save /world/a }
|
|
2295
|
+
job b on /outbox:sent { find /outbox | save /world/b }
|
|
2296
|
+
`;
|
|
2297
|
+
const { prog } = await run(source, data);
|
|
2298
|
+
expect(prog.jobs[0].trigger).toEqual({ kind: "event", path: "/inbox", event: "created" });
|
|
2299
|
+
expect(prog.jobs[1].trigger).toEqual({ kind: "event", path: "/outbox", event: "sent" });
|
|
2300
|
+
});
|
|
2301
|
+
|
|
2302
|
+
it("trigger declaration is declarative — compiled program exposes it", async () => {
|
|
2303
|
+
const source = "job watcher on /data/logs:appended { find /data/logs | save /world/out }";
|
|
2304
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
2305
|
+
const prog = compiler.compile(ast);
|
|
2306
|
+
// The trigger is metadata only — ASH doesn't run event loops
|
|
2307
|
+
const trigger = prog.jobs[0].trigger;
|
|
2308
|
+
expect(trigger?.kind).toBe("event");
|
|
2309
|
+
if (trigger?.kind === "event") {
|
|
2310
|
+
expect(trigger.path).toBe("/data/logs");
|
|
2311
|
+
expect(trigger.event).toBe("appended");
|
|
2312
|
+
}
|
|
2313
|
+
});
|
|
2314
|
+
|
|
2315
|
+
it("trigger + annotations coexist", async () => {
|
|
2316
|
+
const data = { "/data": [{ v: 1 }] };
|
|
2317
|
+
const source = "@retry(2)\njob handler on /data:updated { find /data | save /world/out }";
|
|
2318
|
+
const { prog, ctx } = await run(source, data);
|
|
2319
|
+
expect(prog.jobs[0].trigger).toEqual({ kind: "event", path: "/data", event: "updated" });
|
|
2320
|
+
expect(ctx.world.written["/world/out"]).toEqual([{ v: 1 }]);
|
|
2321
|
+
});
|
|
2322
|
+
|
|
2323
|
+
it("resolveField does not leak prototype chain properties", async () => {
|
|
2324
|
+
const data = { "/data": [{ name: "Alice" }, { name: "Bob" }] };
|
|
2325
|
+
const { ctx } = await run(
|
|
2326
|
+
"job q { find /data | map constructor | save /out }",
|
|
2327
|
+
data,
|
|
2328
|
+
);
|
|
2329
|
+
// With Object.hasOwn guard, 'constructor' is not an own property → resolves to undefined
|
|
2330
|
+
// Without the guard, it would resolve to the Object constructor function
|
|
2331
|
+
expect(ctx.world.written["/out"]).toEqual([undefined, undefined]);
|
|
2332
|
+
});
|
|
2333
|
+
});
|
|
2334
|
+
|
|
2335
|
+
// ── @budget: runtime resource limits ──
|
|
2336
|
+
|
|
2337
|
+
describe("@budget: runtime resource limits", () => {
|
|
2338
|
+
it("@budget(actions 1) — exceeding action count → error", async () => {
|
|
2339
|
+
const data = { "/data": [{ v: 1 }] };
|
|
2340
|
+
// 2 action stages in pipeline, budget is 1
|
|
2341
|
+
const source = "@budget(actions 1)\njob q { find /data | action /api/a | action /api/b | save /out }";
|
|
2342
|
+
const { result } = await run(source, data);
|
|
2343
|
+
expect(result.status).toBe("error");
|
|
2344
|
+
expect(result.errors.some((e: string) => e.includes("budget"))).toBe(true);
|
|
2345
|
+
});
|
|
2346
|
+
|
|
2347
|
+
it("@budget(actions 10) — within limit → ok", async () => {
|
|
2348
|
+
const data = { "/data": [{ v: 1 }, { v: 2 }] };
|
|
2349
|
+
const source = "@budget(actions 10)\njob q { find /data | action /api/x | save /out }";
|
|
2350
|
+
const { result } = await run(source, data);
|
|
2351
|
+
expect(result.status).not.toBe("error");
|
|
2352
|
+
});
|
|
2353
|
+
|
|
2354
|
+
it("@budget(writes 1) — exceeding write count → error", async () => {
|
|
2355
|
+
const data = { "/data": [{ v: 1 }] };
|
|
2356
|
+
// save + tee = 2 writes, budget is 1
|
|
2357
|
+
const source = "@budget(writes 1)\njob q { find /data | tee /backup | save /out }";
|
|
2358
|
+
const { result } = await run(source, data);
|
|
2359
|
+
expect(result.status).toBe("error");
|
|
2360
|
+
expect(result.errors.some((e: string) => e.includes("budget"))).toBe(true);
|
|
2361
|
+
});
|
|
2362
|
+
|
|
2363
|
+
it("@budget(writes 5) — within limit → ok", async () => {
|
|
2364
|
+
const data = { "/data": [{ v: 1 }] };
|
|
2365
|
+
const source = "@budget(writes 5)\njob q { find /data | tee /backup | save /out }";
|
|
2366
|
+
const { result } = await run(source, data);
|
|
2367
|
+
expect(result.status).not.toBe("error");
|
|
2368
|
+
});
|
|
2369
|
+
|
|
2370
|
+
it("@budget(records 5) — exceeding record count → error", async () => {
|
|
2371
|
+
const data = { "/data": Array.from({ length: 10 }, (_, i) => ({ v: i })) };
|
|
2372
|
+
const source = "@budget(records 5)\njob q { find /data | save /out }";
|
|
2373
|
+
const { result } = await run(source, data);
|
|
2374
|
+
expect(result.status).toBe("error");
|
|
2375
|
+
expect(result.errors.some((e: string) => e.includes("budget"))).toBe(true);
|
|
2376
|
+
});
|
|
2377
|
+
|
|
2378
|
+
it("@budget(records 100) — within limit → ok", async () => {
|
|
2379
|
+
const data = { "/data": Array.from({ length: 10 }, (_, i) => ({ v: i })) };
|
|
2380
|
+
const source = "@budget(records 100)\njob q { find /data | save /out }";
|
|
2381
|
+
const { result } = await run(source, data);
|
|
2382
|
+
expect(result.status).not.toBe("error");
|
|
2383
|
+
});
|
|
2384
|
+
|
|
2385
|
+
it("@budget with multiple dimensions", async () => {
|
|
2386
|
+
const data = { "/data": [{ v: 1 }] };
|
|
2387
|
+
const source = "@budget(actions 10, writes 10, records 100)\njob q { find /data | action /api/x | save /out }";
|
|
2388
|
+
const { result } = await run(source, data);
|
|
2389
|
+
expect(result.status).not.toBe("error");
|
|
2390
|
+
});
|
|
2391
|
+
|
|
2392
|
+
it("no @budget → no limits enforced", async () => {
|
|
2393
|
+
const data = { "/data": Array.from({ length: 100 }, (_, i) => ({ v: i })) };
|
|
2394
|
+
const { result } = await run("job q { find /data | save /out }", data);
|
|
2395
|
+
expect(result.status).toBe("ok");
|
|
2396
|
+
});
|
|
2397
|
+
|
|
2398
|
+
it("@budget + @on_error(skip) — budget exceeded but continues", async () => {
|
|
2399
|
+
const data = { "/data": Array.from({ length: 10 }, (_, i) => ({ v: i })) };
|
|
2400
|
+
const source = "@budget(records 5)\n@on_error(skip)\njob q { find /data | save /out }";
|
|
2401
|
+
const { result } = await run(source, data);
|
|
2402
|
+
// on_error(skip) means budget error is caught and skipped
|
|
2403
|
+
expect(result.status).not.toBe("error");
|
|
2404
|
+
});
|
|
2405
|
+
});
|
|
2406
|
+
|
|
2407
|
+
// ── ASH_UNCAPPED_WRITE: publish/tee require @caps when combined with find ──
|
|
2408
|
+
|
|
2409
|
+
describe("ASH_UNCAPPED_WRITE: write ops require @caps", () => {
|
|
2410
|
+
it("find + publish without @caps → ASH_UNCAPPED_WRITE error", () => {
|
|
2411
|
+
const result = compileSource("job leak { find /sensitive/data | publish /events/topic }");
|
|
2412
|
+
expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_WRITE")).toBe(true);
|
|
2413
|
+
});
|
|
2414
|
+
|
|
2415
|
+
it("find + tee without @caps → ASH_UNCAPPED_WRITE error", () => {
|
|
2416
|
+
const result = compileSource("job copy { find /sensitive/data | tee /other/path | save /out }");
|
|
2417
|
+
expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_WRITE")).toBe(true);
|
|
2418
|
+
});
|
|
2419
|
+
|
|
2420
|
+
it("find + publish with @caps → no error", () => {
|
|
2421
|
+
const result = compileSource("@caps(read /data/* write /events/*)\njob q { find /data/in | publish /events/out }");
|
|
2422
|
+
expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_WRITE")).toBe(false);
|
|
2423
|
+
});
|
|
2424
|
+
|
|
2425
|
+
it("find + tee + save with @caps → no error", () => {
|
|
2426
|
+
const result = compileSource("@caps(read /data/* write /backup/* write /out/*)\njob q { find /data/in | tee /backup/copy | save /out/result }");
|
|
2427
|
+
expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_WRITE")).toBe(false);
|
|
2428
|
+
});
|
|
2429
|
+
|
|
2430
|
+
it("save-only without find → no ASH_UNCAPPED_WRITE (no external data read)", () => {
|
|
2431
|
+
// save without find is just writing static data
|
|
2432
|
+
const result = compileSource("job q { input /prompt | save /out }");
|
|
2433
|
+
// input doesn't count as find/lookup, so no UNCAPPED_WRITE
|
|
2434
|
+
expect(result.diagnostics.some(d => d.code === "ASH_UNCAPPED_WRITE")).toBe(false);
|
|
2435
|
+
});
|
|
2436
|
+
});
|
|
2437
|
+
|
|
2438
|
+
// ── ASH_BUDGET_EXCESSIVE: budget ceilings ──
|
|
2439
|
+
|
|
2440
|
+
describe("ASH_BUDGET_EXCESSIVE: budget ceiling enforcement", () => {
|
|
2441
|
+
it("@budget(actions 999999) → ASH_BUDGET_EXCESSIVE error", () => {
|
|
2442
|
+
const result = compileSource("@caps(read /data/* exec /api/*)\n@budget(actions 999999)\njob q { find /data | action /api/x | save /out }");
|
|
2443
|
+
expect(result.diagnostics.some(d => d.code === "ASH_BUDGET_EXCESSIVE")).toBe(true);
|
|
2444
|
+
});
|
|
2445
|
+
|
|
2446
|
+
it("@budget(actions 50) → within ceiling, no error", () => {
|
|
2447
|
+
const result = compileSource("@caps(read /data/* exec /api/* write /out/*)\n@budget(actions 50)\njob q { find /data | action /api/x | save /out }");
|
|
2448
|
+
expect(result.diagnostics.some(d => d.code === "ASH_BUDGET_EXCESSIVE")).toBe(false);
|
|
2449
|
+
});
|
|
2450
|
+
|
|
2451
|
+
it("@budget(actions 100) → at ceiling, no error", () => {
|
|
2452
|
+
const result = compileSource("@caps(read /data/* exec /api/* write /out/*)\n@budget(actions 100)\njob q { find /data | action /api/x | save /out }");
|
|
2453
|
+
expect(result.diagnostics.some(d => d.code === "ASH_BUDGET_EXCESSIVE")).toBe(false);
|
|
2454
|
+
});
|
|
2455
|
+
|
|
2456
|
+
it("@budget(actions 101) → exceeds ceiling", () => {
|
|
2457
|
+
const result = compileSource("@caps(read /data/* exec /api/* write /out/*)\n@budget(actions 101)\njob q { find /data | action /api/x | save /out }");
|
|
2458
|
+
expect(result.diagnostics.some(d => d.code === "ASH_BUDGET_EXCESSIVE")).toBe(true);
|
|
2459
|
+
});
|
|
2460
|
+
|
|
2461
|
+
it("@budget(records 10001) → exceeds records ceiling", () => {
|
|
2462
|
+
const result = compileSource("@budget(records 10001)\njob q { find /data | save /out }");
|
|
2463
|
+
expect(result.diagnostics.some(d => d.code === "ASH_BUDGET_EXCESSIVE")).toBe(true);
|
|
2464
|
+
});
|
|
2465
|
+
});
|
|
2466
|
+
|
|
2467
|
+
describe("black-hat #15: where strict equality", () => {
|
|
2468
|
+
it("where == uses strict equality: number 0 does NOT match string '0'", async () => {
|
|
2469
|
+
const data = { "/data": [{ v: 0 }, { v: 1 }, { v: "0" }] };
|
|
2470
|
+
const { ctx } = await run('job q { find /data | where v == 0 | save /out }', data);
|
|
2471
|
+
// Strict: only number 0 matches, not string "0"
|
|
2472
|
+
expect(ctx.world.written["/out"]).toEqual([{ v: 0 }]);
|
|
2473
|
+
});
|
|
2474
|
+
|
|
2475
|
+
it("where != with strict equality: number 0 is != string '0'", async () => {
|
|
2476
|
+
const data = { "/data": [{ v: 0 }, { v: "0" }, { v: 1 }] };
|
|
2477
|
+
const { ctx } = await run('job q { find /data | where v != 0 | save /out }', data);
|
|
2478
|
+
// Strict: "0" (string) is != 0 (number)
|
|
2479
|
+
expect(ctx.world.written["/out"]).toEqual([{ v: "0" }, { v: 1 }]);
|
|
2480
|
+
});
|
|
2481
|
+
|
|
2482
|
+
it("where == same type works: string == string", async () => {
|
|
2483
|
+
const data = { "/data": [{ name: "alice" }, { name: "bob" }] };
|
|
2484
|
+
const { ctx } = await run('job q { find /data | where name == "alice" | save /out }', data);
|
|
2485
|
+
expect(ctx.world.written["/out"]).toEqual([{ name: "alice" }]);
|
|
2486
|
+
});
|
|
2487
|
+
|
|
2488
|
+
it("where == same type works: number == number", async () => {
|
|
2489
|
+
const data = { "/data": [{ v: 5 }, { v: 10 }] };
|
|
2490
|
+
const { ctx } = await run("job q { find /data | where v == 10 | save /out }", data);
|
|
2491
|
+
expect(ctx.world.written["/out"]).toEqual([{ v: 10 }]);
|
|
2492
|
+
});
|
|
2493
|
+
});
|
|
2494
|
+
|
|
2495
|
+
// ── Round 8: non-finite where guard ──
|
|
2496
|
+
|
|
2497
|
+
describe("round 8: where rejects non-finite values in comparisons", () => {
|
|
2498
|
+
it("-Infinity does not pass where score < 100", async () => {
|
|
2499
|
+
const data = { "/data": [{ score: -Infinity }, { score: 50 }] };
|
|
2500
|
+
const { ctx } = await run("job q { find /data | where score < 100 | save /out }", data);
|
|
2501
|
+
// -Infinity should be rejected; only 50 passes
|
|
2502
|
+
expect(ctx.world.written["/out"]).toEqual([{ score: 50 }]);
|
|
2503
|
+
});
|
|
2504
|
+
|
|
2505
|
+
it("Infinity does not pass where score > 0", async () => {
|
|
2506
|
+
const data = { "/data": [{ score: Infinity }, { score: 5 }] };
|
|
2507
|
+
const { ctx } = await run("job q { find /data | where score > 0 | save /out }", data);
|
|
2508
|
+
expect(ctx.world.written["/out"]).toEqual([{ score: 5 }]);
|
|
2509
|
+
});
|
|
2510
|
+
|
|
2511
|
+
it("NaN does not pass where score > 0", async () => {
|
|
2512
|
+
const data = { "/data": [{ score: NaN }, { score: 10 }] };
|
|
2513
|
+
const { ctx } = await run("job q { find /data | where score > 0 | save /out }", data);
|
|
2514
|
+
expect(ctx.world.written["/out"]).toEqual([{ score: 10 }]);
|
|
2515
|
+
});
|
|
2516
|
+
|
|
2517
|
+
it("normal finite values still pass where comparisons", async () => {
|
|
2518
|
+
const data = { "/data": [{ score: 75 }, { score: 30 }, { score: 100 }] };
|
|
2519
|
+
const { ctx } = await run("job q { find /data | where score > 50 | save /out }", data);
|
|
2520
|
+
expect(ctx.world.written["/out"]).toEqual([{ score: 75 }, { score: 100 }]);
|
|
2521
|
+
});
|
|
2522
|
+
|
|
2523
|
+
it("where == still works with non-numeric equality (not affected by finite guard)", async () => {
|
|
2524
|
+
const data = { "/data": [{ name: "alice" }, { name: "bob" }] };
|
|
2525
|
+
const { ctx } = await run('job q { find /data | where name == "alice" | save /out }', data);
|
|
2526
|
+
expect(ctx.world.written["/out"]).toEqual([{ name: "alice" }]);
|
|
2527
|
+
});
|
|
2528
|
+
});
|
|
2529
|
+
|
|
2530
|
+
// ── WorldInterface.input ──
|
|
2531
|
+
|
|
2532
|
+
describe("WorldInterface.input", () => {
|
|
2533
|
+
it("input calls world.input when available and populates response field", async () => {
|
|
2534
|
+
const world = makeWorld();
|
|
2535
|
+
world.input = async (prompt: string) => `answer to ${prompt}`;
|
|
2536
|
+
const logger = makeLogger();
|
|
2537
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger };
|
|
2538
|
+
const ast = parser.parse(lexer.tokenize('job q { input "name?" | save /world/out }'));
|
|
2539
|
+
const prog = compiler.compile(ast);
|
|
2540
|
+
await prog.jobs[0].execute(ctx);
|
|
2541
|
+
expect(world.written["/world/out"]).toEqual([
|
|
2542
|
+
{ prompt: "name?", response: "answer to name?" },
|
|
2543
|
+
]);
|
|
2544
|
+
});
|
|
2545
|
+
|
|
2546
|
+
it("input falls back to empty response when world.input is not defined", async () => {
|
|
2547
|
+
const { ctx } = await run('job q { input "name?" | save /world/out }');
|
|
2548
|
+
expect(ctx.world.written["/world/out"]).toEqual([
|
|
2549
|
+
{ prompt: "name?", response: "" },
|
|
2550
|
+
]);
|
|
2551
|
+
});
|
|
2552
|
+
|
|
2553
|
+
it("input | map response resolves the response field correctly", async () => {
|
|
2554
|
+
const world = makeWorld();
|
|
2555
|
+
world.input = async () => "hello";
|
|
2556
|
+
const logger = makeLogger();
|
|
2557
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger };
|
|
2558
|
+
const ast = parser.parse(lexer.tokenize('job q { input "say" | map response | save /world/out }'));
|
|
2559
|
+
const prog = compiler.compile(ast);
|
|
2560
|
+
await prog.jobs[0].execute(ctx);
|
|
2561
|
+
expect(world.written["/world/out"]).toEqual(["hello"]);
|
|
2562
|
+
});
|
|
2563
|
+
|
|
2564
|
+
it("input | map { msg: response } works with expression mappings", async () => {
|
|
2565
|
+
const world = makeWorld();
|
|
2566
|
+
world.input = async () => "world";
|
|
2567
|
+
const logger = makeLogger();
|
|
2568
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger };
|
|
2569
|
+
const ast = parser.parse(lexer.tokenize('job q { input "greeting" | map { msg: "hello " + response } | save /world/out }'));
|
|
2570
|
+
const prog = compiler.compile(ast);
|
|
2571
|
+
await prog.jobs[0].execute(ctx);
|
|
2572
|
+
expect(world.written["/world/out"]).toEqual([{ msg: "hello world" }]);
|
|
2573
|
+
});
|
|
2574
|
+
});
|
|
2575
|
+
|
|
2576
|
+
// ── output with expression ──
|
|
2577
|
+
|
|
2578
|
+
describe("output with expression", () => {
|
|
2579
|
+
it("output field_access evaluates against stream items", async () => {
|
|
2580
|
+
const data = { "/world/d": [{ name: "Alice" }, { name: "Bob" }] };
|
|
2581
|
+
const world = makeWorld(data);
|
|
2582
|
+
const logger = makeLogger();
|
|
2583
|
+
const outputs: string[] = [];
|
|
2584
|
+
const ctx: JobContext = {
|
|
2585
|
+
world,
|
|
2586
|
+
caps: new Set(["*"]),
|
|
2587
|
+
logger,
|
|
2588
|
+
output: { output(event: OutputEvent) { outputs.push(event.content); } },
|
|
2589
|
+
};
|
|
2590
|
+
const ast = parser.parse(lexer.tokenize('job q { find /world/d | output name | save /world/out }'));
|
|
2591
|
+
const prog = compiler.compile(ast);
|
|
2592
|
+
await prog.jobs[0].execute(ctx);
|
|
2593
|
+
expect(outputs).toEqual(["Alice", "Bob"]);
|
|
2594
|
+
// Stream passes through unchanged
|
|
2595
|
+
expect(world.written["/world/out"]).toEqual([{ name: "Alice" }, { name: "Bob" }]);
|
|
2596
|
+
});
|
|
2597
|
+
|
|
2598
|
+
it("output string_literal still works (backward compat)", async () => {
|
|
2599
|
+
const data = { "/world/d": [{ x: 1 }] };
|
|
2600
|
+
const world = makeWorld(data);
|
|
2601
|
+
const logger = makeLogger();
|
|
2602
|
+
const outputs: string[] = [];
|
|
2603
|
+
const ctx: JobContext = {
|
|
2604
|
+
world,
|
|
2605
|
+
caps: new Set(["*"]),
|
|
2606
|
+
logger,
|
|
2607
|
+
output: { output(event: OutputEvent) { outputs.push(event.content); } },
|
|
2608
|
+
};
|
|
2609
|
+
const ast = parser.parse(lexer.tokenize('job q { find /world/d | output "hello" | save /world/out }'));
|
|
2610
|
+
const prog = compiler.compile(ast);
|
|
2611
|
+
await prog.jobs[0].execute(ctx);
|
|
2612
|
+
expect(outputs).toEqual(["hello"]);
|
|
2613
|
+
});
|
|
2614
|
+
|
|
2615
|
+
it("output binary expression evaluates correctly", async () => {
|
|
2616
|
+
const data = { "/world/d": [{ name: "Alice", age: 30 }] };
|
|
2617
|
+
const world = makeWorld(data);
|
|
2618
|
+
const logger = makeLogger();
|
|
2619
|
+
const outputs: string[] = [];
|
|
2620
|
+
const ctx: JobContext = {
|
|
2621
|
+
world,
|
|
2622
|
+
caps: new Set(["*"]),
|
|
2623
|
+
logger,
|
|
2624
|
+
output: { output(event: OutputEvent) { outputs.push(event.content); } },
|
|
2625
|
+
};
|
|
2626
|
+
const ast = parser.parse(lexer.tokenize('job q { find /world/d | output "Name: " + name | save /world/out }'));
|
|
2627
|
+
const prog = compiler.compile(ast);
|
|
2628
|
+
await prog.jobs[0].execute(ctx);
|
|
2629
|
+
expect(outputs).toEqual(["Name: Alice"]);
|
|
2630
|
+
});
|
|
2631
|
+
});
|
|
2632
|
+
|
|
2633
|
+
// ── Template Parameters: Compiler Integration Tests ──
|
|
2634
|
+
|
|
2635
|
+
describe("Template Parameters: Stream Merge Model", () => {
|
|
2636
|
+
it("no params: action passes full stream as input", async () => {
|
|
2637
|
+
const data = { "/world/d": [{ text: "hi" }] };
|
|
2638
|
+
const { ctx } = await run("job q { find /world/d | action /tools/send }", data);
|
|
2639
|
+
expect(ctx.world.execCalls[0].input).toEqual([{ text: "hi" }]);
|
|
2640
|
+
expect(ctx.world.execCalls[0].params).toBeUndefined();
|
|
2641
|
+
});
|
|
2642
|
+
|
|
2643
|
+
it("template params: action receives ONLY resolved params, not stream", async () => {
|
|
2644
|
+
const data = { "/world/d": [{ name: "Bob", age: 30 }] };
|
|
2645
|
+
const { ctx } = await run(
|
|
2646
|
+
'job q { find /world/d | action /tools/send { text: "Hello ' + "${name}" + '" } }',
|
|
2647
|
+
data,
|
|
2648
|
+
);
|
|
2649
|
+
expect(ctx.world.execCalls[0].input).toEqual([]);
|
|
2650
|
+
expect(ctx.world.execCalls[0].params).toEqual({ text: "Hello Bob" });
|
|
2651
|
+
});
|
|
2652
|
+
|
|
2653
|
+
it("static params: action receives ONLY params, not stream", async () => {
|
|
2654
|
+
const data = { "/world/d": [{ name: "Bob" }] };
|
|
2655
|
+
const { ctx } = await run(
|
|
2656
|
+
'job q { find /world/d | action /tools/send { text: "fixed" } }',
|
|
2657
|
+
data,
|
|
2658
|
+
);
|
|
2659
|
+
expect(ctx.world.execCalls[0].input).toEqual([]);
|
|
2660
|
+
expect(ctx.world.execCalls[0].params).toEqual({ text: "fixed" });
|
|
2661
|
+
});
|
|
2662
|
+
|
|
2663
|
+
it("multi-record stream with template params: N exec calls", async () => {
|
|
2664
|
+
const data = { "/world/d": [{ name: "Alice" }, { name: "Bob" }] };
|
|
2665
|
+
const { ctx } = await run(
|
|
2666
|
+
'job q { find /world/d | action /tools/greet { msg: "Hi ' + "${name}" + '" } }',
|
|
2667
|
+
data,
|
|
2668
|
+
);
|
|
2669
|
+
expect(ctx.world.execCalls).toHaveLength(2);
|
|
2670
|
+
expect(ctx.world.execCalls[0].params).toEqual({ msg: "Hi Alice" });
|
|
2671
|
+
expect(ctx.world.execCalls[1].params).toEqual({ msg: "Hi Bob" });
|
|
2672
|
+
expect(ctx.world.execCalls[0].input).toEqual([]);
|
|
2673
|
+
expect(ctx.world.execCalls[1].input).toEqual([]);
|
|
2674
|
+
});
|
|
2675
|
+
|
|
2676
|
+
it("whole-value template preserves type (number)", async () => {
|
|
2677
|
+
const data = { "/world/d": [{ score: 42 }] };
|
|
2678
|
+
const { ctx } = await run(
|
|
2679
|
+
'job q { find /world/d | action /tools/check { val: "' + "${score}" + '" } }',
|
|
2680
|
+
data,
|
|
2681
|
+
);
|
|
2682
|
+
expect(ctx.world.execCalls[0].params).toEqual({ val: 42 });
|
|
2683
|
+
});
|
|
2684
|
+
|
|
2685
|
+
it("whole-value template preserves type (boolean)", async () => {
|
|
2686
|
+
const data = { "/world/d": [{ active: true }] };
|
|
2687
|
+
const { ctx } = await run(
|
|
2688
|
+
'job q { find /world/d | action /tools/check { flag: "' + "${active}" + '" } }',
|
|
2689
|
+
data,
|
|
2690
|
+
);
|
|
2691
|
+
expect(ctx.world.execCalls[0].params).toEqual({ flag: true });
|
|
2692
|
+
});
|
|
2693
|
+
});
|
|
2694
|
+
|
|
2695
|
+
describe("Template Parameters: Template Paths", () => {
|
|
2696
|
+
it("find with template path resolves from stream", async () => {
|
|
2697
|
+
const data = {
|
|
2698
|
+
"/world/events": [{ category: "sports" }],
|
|
2699
|
+
"/world/sports/items": [{ id: 1, title: "Game" }],
|
|
2700
|
+
};
|
|
2701
|
+
const world = makeWorld(data);
|
|
2702
|
+
const logger = makeLogger();
|
|
2703
|
+
// First job puts event in stream, second find uses template path
|
|
2704
|
+
// Simulate: action produces event, then find uses template
|
|
2705
|
+
// We'll use a two-stage approach: initial stream → find with template
|
|
2706
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger };
|
|
2707
|
+
const source = "job q { find /world/events | find /world/" + "${category}" + "/items }";
|
|
2708
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
2709
|
+
const prog = compiler.compile(ast);
|
|
2710
|
+
const result = await prog.jobs[0].execute(ctx);
|
|
2711
|
+
expect(result.status).toBe("ok");
|
|
2712
|
+
expect(result.recordCount).toBe(1);
|
|
2713
|
+
});
|
|
2714
|
+
|
|
2715
|
+
it("action with template path resolves per-record", async () => {
|
|
2716
|
+
const data = { "/world/d": [{ svc: "email" }, { svc: "sms" }] };
|
|
2717
|
+
const { ctx } = await run(
|
|
2718
|
+
"job q { find /world/d | action /tools/" + "${svc}" + "/.actions/send }",
|
|
2719
|
+
data,
|
|
2720
|
+
);
|
|
2721
|
+
expect(ctx.world.execCalls).toHaveLength(2);
|
|
2722
|
+
expect(ctx.world.execCalls[0].path).toBe("/tools/email/.actions/send");
|
|
2723
|
+
expect(ctx.world.execCalls[1].path).toBe("/tools/sms/.actions/send");
|
|
2724
|
+
});
|
|
2725
|
+
|
|
2726
|
+
it("action with template path + template params", async () => {
|
|
2727
|
+
const data = { "/world/d": [{ svc: "email", to: "alice@test.com" }] };
|
|
2728
|
+
const { ctx } = await run(
|
|
2729
|
+
'job q { find /world/d | action /tools/' + "${svc}" + '/.actions/send { recipient: "' + "${to}" + '" } }',
|
|
2730
|
+
data,
|
|
2731
|
+
);
|
|
2732
|
+
expect(ctx.world.execCalls[0].path).toBe("/tools/email/.actions/send");
|
|
2733
|
+
expect(ctx.world.execCalls[0].params).toEqual({ recipient: "alice@test.com" });
|
|
2734
|
+
expect(ctx.world.execCalls[0].input).toEqual([]);
|
|
2735
|
+
});
|
|
2736
|
+
});
|
|
2737
|
+
|
|
2738
|
+
describe("Template Parameters: Security", () => {
|
|
2739
|
+
it("template path with empty segment throws runtime error", async () => {
|
|
2740
|
+
const data = { "/world/d": [{ category: "" }] };
|
|
2741
|
+
const { result } = await run(
|
|
2742
|
+
"job q { find /world/d | action /store/" + "${category}" + "/.actions/save }",
|
|
2743
|
+
data,
|
|
2744
|
+
);
|
|
2745
|
+
expect(result.status).toBe("error");
|
|
2746
|
+
expect(result.errors[0]).toContain("empty segment");
|
|
2747
|
+
});
|
|
2748
|
+
|
|
2749
|
+
it("template path with traversal throws runtime error", async () => {
|
|
2750
|
+
const data = { "/world/d": [{ category: ".." }] };
|
|
2751
|
+
const { result } = await run(
|
|
2752
|
+
"job q { find /world/d | action /store/" + "${category}" + "/.actions/save }",
|
|
2753
|
+
data,
|
|
2754
|
+
);
|
|
2755
|
+
expect(result.status).toBe("error");
|
|
2756
|
+
expect(result.errors[0]).toContain("path traversal");
|
|
2757
|
+
});
|
|
2758
|
+
|
|
2759
|
+
it("caps check on resolved template path", async () => {
|
|
2760
|
+
const data: Record<string, unknown[]> = { "/world/d": [{ svc: "forbidden" }] };
|
|
2761
|
+
const { result } = await run(
|
|
2762
|
+
'@caps(read /world/* exec /tools/email/*)\n@budget(actions 10)\njob q { find /world/d | action /tools/' + "${svc}" + '/.actions/send }',
|
|
2763
|
+
data,
|
|
2764
|
+
);
|
|
2765
|
+
expect(result.status).toBe("error");
|
|
2766
|
+
expect(result.errors[0]).toContain("Permission denied");
|
|
2767
|
+
});
|
|
2768
|
+
|
|
2769
|
+
it("budget counted per-record for action with inline params", async () => {
|
|
2770
|
+
const data: Record<string, unknown[]> = { "/world/d": [{ x: 1 }, { x: 2 }, { x: 3 }] };
|
|
2771
|
+
const { result } = await run(
|
|
2772
|
+
'@caps(read /world/* exec /tools/*)\n@budget(actions 2)\njob q { find /world/d | action /tools/do { val: "' + "${x}" + '" } }',
|
|
2773
|
+
data,
|
|
2774
|
+
);
|
|
2775
|
+
// Budget is 2, but 3 records → should exceed
|
|
2776
|
+
expect(result.status).toBe("error");
|
|
2777
|
+
expect(result.errors[0]).toContain("budget exceeded");
|
|
2778
|
+
});
|
|
2779
|
+
});
|
|
2780
|
+
|
|
2781
|
+
describe("Initial Stream Context: action path resolves from initial stream, params from current stream", () => {
|
|
2782
|
+
it("action path template resolves from initial stream, not piped stream", async () => {
|
|
2783
|
+
// Scenario: trigger event has {data: {conversationId: "42"}},
|
|
2784
|
+
// find replaces stream with DB records, then action path uses ${data.conversationId}
|
|
2785
|
+
const data: Record<string, unknown[]> = {
|
|
2786
|
+
"/world/inbox": [{ content: "hello from DB" }],
|
|
2787
|
+
};
|
|
2788
|
+
const world = makeWorld(data);
|
|
2789
|
+
// exec for LLM returns different data (no conversationId)
|
|
2790
|
+
world.exec = async (path: string, input: unknown[], params?: Record<string, unknown>) => {
|
|
2791
|
+
world.execCalls.push({ path, input, params });
|
|
2792
|
+
return [{ text: "LLM response" }];
|
|
2793
|
+
};
|
|
2794
|
+
const logger = makeLogger();
|
|
2795
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger };
|
|
2796
|
+
|
|
2797
|
+
const source = "job handler { find /world/inbox | action /conversations/" + "${data.conversationId}" + "/.actions/send }";
|
|
2798
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
2799
|
+
const prog = compiler.compile(ast);
|
|
2800
|
+
|
|
2801
|
+
// Execute with initial stream = trigger event
|
|
2802
|
+
const initialStream = [{ data: { conversationId: "42" }, type: "created", path: "/inbox" }];
|
|
2803
|
+
const result = await prog.jobs[0].execute(ctx, initialStream);
|
|
2804
|
+
|
|
2805
|
+
expect(result.status).toBe("ok");
|
|
2806
|
+
// Path should resolve from initial stream (conversationId: "42"), not from find output
|
|
2807
|
+
expect(world.execCalls[0].path).toBe("/conversations/42/.actions/send");
|
|
2808
|
+
});
|
|
2809
|
+
|
|
2810
|
+
it("multi-stage pipe preserves initial context for path resolution", async () => {
|
|
2811
|
+
// Pipeline: action /llm/complete { text: "prompt" } | action /out/${data.convId}/.actions/send
|
|
2812
|
+
// Initial stream: {data: {convId: "99"}}
|
|
2813
|
+
// LLM returns: {text: "response"} (no convId)
|
|
2814
|
+
const world = makeWorld({});
|
|
2815
|
+
let callCount = 0;
|
|
2816
|
+
world.exec = async (path: string, input: unknown[], params?: Record<string, unknown>) => {
|
|
2817
|
+
world.execCalls.push({ path, input, params });
|
|
2818
|
+
callCount++;
|
|
2819
|
+
if (callCount === 1) {
|
|
2820
|
+
// First action (LLM) returns new data
|
|
2821
|
+
return [{ text: "AI response" }];
|
|
2822
|
+
}
|
|
2823
|
+
// Second action (send) — passthrough
|
|
2824
|
+
return input;
|
|
2825
|
+
};
|
|
2826
|
+
const logger = makeLogger();
|
|
2827
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger };
|
|
2828
|
+
|
|
2829
|
+
const source = 'job handler { action /llm/.actions/complete { text: "prompt" } | action /out/' + "${data.convId}" + '/.actions/send { msg: "' + "${text}" + '" } }';
|
|
2830
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
2831
|
+
const prog = compiler.compile(ast);
|
|
2832
|
+
|
|
2833
|
+
const initialStream = [{ data: { convId: "99" }, type: "created" }];
|
|
2834
|
+
const result = await prog.jobs[0].execute(ctx, initialStream);
|
|
2835
|
+
|
|
2836
|
+
expect(result.status).toBe("ok");
|
|
2837
|
+
// Second action path resolves from initial stream
|
|
2838
|
+
expect(world.execCalls[1].path).toBe("/out/99/.actions/send");
|
|
2839
|
+
// Second action params resolve from current stream (LLM output)
|
|
2840
|
+
expect(world.execCalls[1].params).toEqual({ msg: "AI response" });
|
|
2841
|
+
});
|
|
2842
|
+
|
|
2843
|
+
it("no initial stream — path template falls back to current stream record", async () => {
|
|
2844
|
+
// When there's no initial stream (on-demand exec, not trigger), path resolves from current stream
|
|
2845
|
+
const data: Record<string, unknown[]> = {
|
|
2846
|
+
"/world/d": [{ svc: "email" }],
|
|
2847
|
+
};
|
|
2848
|
+
const { ctx } = await run(
|
|
2849
|
+
"job q { find /world/d | action /tools/" + "${svc}" + "/.actions/send }",
|
|
2850
|
+
data,
|
|
2851
|
+
);
|
|
2852
|
+
// Without initial stream, path resolves from current stream record — same as before
|
|
2853
|
+
expect(ctx.world.execCalls[0].path).toBe("/tools/email/.actions/send");
|
|
2854
|
+
});
|
|
2855
|
+
|
|
2856
|
+
it("params always resolve from current stream, not initial stream", async () => {
|
|
2857
|
+
// Initial stream has {name: "event-name"}, find produces {name: "db-name"}
|
|
2858
|
+
// Params should use "db-name" (current stream), not "event-name" (initial stream)
|
|
2859
|
+
const data: Record<string, unknown[]> = {
|
|
2860
|
+
"/world/d": [{ name: "db-name", id: 1 }],
|
|
2861
|
+
};
|
|
2862
|
+
const world = makeWorld(data);
|
|
2863
|
+
world.exec = async (path: string, input: unknown[], params?: Record<string, unknown>) => {
|
|
2864
|
+
world.execCalls.push({ path, input, params });
|
|
2865
|
+
return input;
|
|
2866
|
+
};
|
|
2867
|
+
const logger = makeLogger();
|
|
2868
|
+
const ctx: JobContext = { world, caps: new Set(["*"]), logger };
|
|
2869
|
+
|
|
2870
|
+
const source = 'job handler { find /world/d | action /target/.actions/do { greeting: "Hello ' + "${name}" + '" } }';
|
|
2871
|
+
const ast = parser.parse(lexer.tokenize(source));
|
|
2872
|
+
const prog = compiler.compile(ast);
|
|
2873
|
+
|
|
2874
|
+
const initialStream = [{ name: "event-name", type: "created" }];
|
|
2875
|
+
const result = await prog.jobs[0].execute(ctx, initialStream);
|
|
2876
|
+
|
|
2877
|
+
expect(result.status).toBe("ok");
|
|
2878
|
+
// Params resolve from current stream (find output), not initial stream
|
|
2879
|
+
expect(world.execCalls[0].params).toEqual({ greeting: "Hello db-name" });
|
|
2880
|
+
});
|
|
2881
|
+
});
|
|
2882
|
+
|
|
2883
|
+
// ── P0: @caps runtime enforcement for find/save/publish/tee ──
|
|
2884
|
+
|
|
2885
|
+
describe("@caps runtime enforcement — find/save/publish/tee", () => {
|
|
2886
|
+
it("@caps(read /allowed/*) blocks find on /forbidden/ at runtime", async () => {
|
|
2887
|
+
const data = { "/forbidden/secrets": [{ x: 1 }] };
|
|
2888
|
+
const { result } = await run(
|
|
2889
|
+
"@caps(read /allowed/*)\njob q { find /forbidden/secrets }",
|
|
2890
|
+
data,
|
|
2891
|
+
);
|
|
2892
|
+
expect(result.status).toBe("error");
|
|
2893
|
+
expect(result.errors.some((e: string) => e.includes("Permission denied") && e.includes("@caps"))).toBe(true);
|
|
2894
|
+
});
|
|
2895
|
+
|
|
2896
|
+
it("@caps(write /allowed/*) blocks save to /forbidden/ at runtime", async () => {
|
|
2897
|
+
const data = { "/world/x": [{ a: 1 }] };
|
|
2898
|
+
const { result, ctx } = await run(
|
|
2899
|
+
"@caps(read /world/* write /allowed/*)\njob q { find /world/x | save /forbidden/out }",
|
|
2900
|
+
data,
|
|
2901
|
+
);
|
|
2902
|
+
// save should be blocked — either error status or no write performed
|
|
2903
|
+
expect(ctx.world.written["/forbidden/out"]).toBeUndefined();
|
|
2904
|
+
});
|
|
2905
|
+
|
|
2906
|
+
it("@caps(write /allowed/*) blocks publish to /forbidden/ at runtime", async () => {
|
|
2907
|
+
const data = { "/world/x": [{ a: 1 }] };
|
|
2908
|
+
const { result, ctx } = await run(
|
|
2909
|
+
"@caps(read /world/* write /allowed/*)\njob q { find /world/x | publish /forbidden/events }",
|
|
2910
|
+
data,
|
|
2911
|
+
);
|
|
2912
|
+
expect(ctx.world.published["/forbidden/events"]).toBeUndefined();
|
|
2913
|
+
});
|
|
2914
|
+
|
|
2915
|
+
it("@caps(write /allowed/*) blocks tee to /forbidden/ at runtime", async () => {
|
|
2916
|
+
const data = { "/world/x": [{ a: 1 }] };
|
|
2917
|
+
const { result, ctx } = await run(
|
|
2918
|
+
"@caps(read /world/* write /allowed/*)\njob q { find /world/x | tee /forbidden/side | save /allowed/out }",
|
|
2919
|
+
data,
|
|
2920
|
+
);
|
|
2921
|
+
expect(ctx.world.written["/forbidden/side"]).toBeUndefined();
|
|
2922
|
+
});
|
|
2923
|
+
|
|
2924
|
+
it("@caps(exec /allowed/*) blocks absolute action passthrough to /forbidden/ at runtime", async () => {
|
|
2925
|
+
const data = { "/world/x": [{ a: 1 }] };
|
|
2926
|
+
const { result } = await run(
|
|
2927
|
+
"@caps(read /world/* exec /allowed/*)\njob q { find /world/x | action /forbidden/hack }",
|
|
2928
|
+
data,
|
|
2929
|
+
);
|
|
2930
|
+
expect(result.status).toBe("error");
|
|
2931
|
+
expect(result.errors.some((e: string) => e.includes("Permission denied") && e.includes("@caps"))).toBe(true);
|
|
2932
|
+
});
|
|
2933
|
+
});
|