@node-llm/testing 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/README.md +541 -0
- package/dist/Mocker.d.ts +58 -0
- package/dist/Mocker.d.ts.map +1 -0
- package/dist/Mocker.js +247 -0
- package/dist/Scrubber.d.ts +18 -0
- package/dist/Scrubber.d.ts.map +1 -0
- package/dist/Scrubber.js +68 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2 -0
- package/dist/vcr.d.ts +57 -0
- package/dist/vcr.d.ts.map +1 -0
- package/dist/vcr.js +291 -0
- package/package.json +19 -0
- package/src/Mocker.ts +311 -0
- package/src/Scrubber.ts +85 -0
- package/src/index.ts +2 -0
- package/src/vcr.ts +377 -0
- package/test/cassettes/custom-scrub-config.json +33 -0
- package/test/cassettes/defaults-plus-custom.json +33 -0
- package/test/cassettes/explicit-sugar-test.json +33 -0
- package/test/cassettes/feature-1-vcr.json +33 -0
- package/test/cassettes/global-config-keys.json +33 -0
- package/test/cassettes/global-config-merge.json +33 -0
- package/test/cassettes/global-config-patterns.json +33 -0
- package/test/cassettes/global-config-reset.json +33 -0
- package/test/cassettes/global-config-test.json +33 -0
- package/test/cassettes/streaming-chunks.json +18 -0
- package/test/cassettes/testunitdxtestts-vcr-feature-5-6-dx-sugar-auto-naming-automatically-names-and-records-cassettes.json +33 -0
- package/test/cassettes/vcr-feature-5-6-dx-sugar-auto-naming-automatically-names-and-records-cassettes.json +28 -0
- package/test/cassettes/vcr-streaming.json +17 -0
- package/test/helpers/MockProvider.ts +75 -0
- package/test/unit/ci.test.ts +36 -0
- package/test/unit/dx.test.ts +86 -0
- package/test/unit/mocker-debug.test.ts +68 -0
- package/test/unit/mocker.test.ts +46 -0
- package/test/unit/multimodal.test.ts +46 -0
- package/test/unit/scoping.test.ts +54 -0
- package/test/unit/scrubbing.test.ts +110 -0
- package/test/unit/streaming.test.ts +51 -0
- package/test/unit/strict-mode.test.ts +112 -0
- package/test/unit/tools.test.ts +58 -0
- package/test/unit/vcr-global-config.test.ts +87 -0
- package/test/unit/vcr-mismatch.test.ts +172 -0
- package/test/unit/vcr-passthrough.test.ts +68 -0
- package/test/unit/vcr-streaming.test.ts +86 -0
- package/test/unit/vcr.test.ts +34 -0
- package/tsconfig.json +9 -0
- package/vitest.config.ts +12 -0
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import { test, expect, describe, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import { mockLLM } from "../../src/Mocker.js";
|
|
3
|
+
import { NodeLLM, providerRegistry } from "@node-llm/core";
|
|
4
|
+
import { MockProvider } from "../helpers/MockProvider.js";
|
|
5
|
+
|
|
6
|
+
describe("Mocker Feature: Strict Mode", () => {
|
|
7
|
+
let mocker: ReturnType<typeof mockLLM>;
|
|
8
|
+
|
|
9
|
+
beforeEach(() => {
|
|
10
|
+
providerRegistry.register("mock-provider", () => new MockProvider());
|
|
11
|
+
mocker = mockLLM();
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
afterEach(() => {
|
|
15
|
+
mocker.clear();
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
test("Allows unmatched requests in non-strict mode (default)", async () => {
|
|
19
|
+
mocker.chat("Hello").respond("Hi");
|
|
20
|
+
|
|
21
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
22
|
+
|
|
23
|
+
// This should not throw because strict mode is off
|
|
24
|
+
const res = await llm.chat().ask("Goodbye");
|
|
25
|
+
expect(res.content).toBeDefined();
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
test("Throws error on unmatched chat request in strict mode", async () => {
|
|
29
|
+
mocker.strict = true;
|
|
30
|
+
mocker.chat("Hello").respond("Hi");
|
|
31
|
+
|
|
32
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
33
|
+
|
|
34
|
+
// This should throw because strict mode is on and no mock matches
|
|
35
|
+
await expect(llm.chat().ask("Goodbye")).rejects.toThrow(/strict|no mock|unexpected/i);
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
test("Allows matching requests in strict mode", async () => {
|
|
39
|
+
mocker.strict = true;
|
|
40
|
+
mocker.chat("Hello").respond("Hi");
|
|
41
|
+
|
|
42
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
43
|
+
|
|
44
|
+
// This should succeed because the mock matches
|
|
45
|
+
const res = await llm.chat().ask("Hello");
|
|
46
|
+
expect(res.content).toBe("Hi");
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
test("Throws error on unmatched embed request in strict mode", async () => {
|
|
50
|
+
mocker.strict = true;
|
|
51
|
+
mocker.embed("text1").respond({ vectors: [[0.1, 0.2]] });
|
|
52
|
+
|
|
53
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
54
|
+
|
|
55
|
+
// This should throw
|
|
56
|
+
await expect((llm.provider as any).embed({ input: "text2" })).rejects.toThrow(
|
|
57
|
+
/strict|no mock|unexpected/i
|
|
58
|
+
);
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
test("Throws error on unmatched paint request in strict mode", async () => {
|
|
62
|
+
mocker.strict = true;
|
|
63
|
+
mocker.paint("dog").respond({ url: "http://mock.com/dog.png" });
|
|
64
|
+
|
|
65
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
66
|
+
|
|
67
|
+
// This should throw
|
|
68
|
+
await expect((llm.provider as any).paint({ prompt: "cat" })).rejects.toThrow(
|
|
69
|
+
/strict|no mock|unexpected/i
|
|
70
|
+
);
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
test("Throws error on unmatched streaming request in strict mode", async () => {
|
|
74
|
+
mocker.strict = true;
|
|
75
|
+
mocker.chat("Tell me a story").stream(["Once ", "upon ", "a ", "time."]);
|
|
76
|
+
|
|
77
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
78
|
+
|
|
79
|
+
// This should throw because no mock matches "Different story"
|
|
80
|
+
const stream = llm.chat("mock-model").stream("Different story");
|
|
81
|
+
|
|
82
|
+
let threwError = false;
|
|
83
|
+
try {
|
|
84
|
+
for await (const chunk of stream) {
|
|
85
|
+
// consume stream
|
|
86
|
+
}
|
|
87
|
+
} catch (e) {
|
|
88
|
+
threwError = true;
|
|
89
|
+
expect(String(e)).toMatch(/strict|no mock|unexpected/i);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
expect(threwError).toBe(true);
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
test("Can toggle strict mode on and off", async () => {
|
|
96
|
+
mocker.chat("Hello").respond("Hi");
|
|
97
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
98
|
+
|
|
99
|
+
// First: non-strict (default)
|
|
100
|
+
const res1 = await llm.chat().ask("Goodbye");
|
|
101
|
+
expect(res1.content).toBeDefined();
|
|
102
|
+
|
|
103
|
+
// Toggle to strict
|
|
104
|
+
mocker.strict = true;
|
|
105
|
+
await expect(llm.chat().ask("Goodbye")).rejects.toThrow(/strict|no mock|unexpected/i);
|
|
106
|
+
|
|
107
|
+
// Toggle back to non-strict
|
|
108
|
+
mocker.strict = false;
|
|
109
|
+
const res2 = await llm.chat().ask("Goodbye");
|
|
110
|
+
expect(res2.content).toBeDefined();
|
|
111
|
+
});
|
|
112
|
+
});
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { test, expect, describe, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import { mockLLM } from "../../src/Mocker.js";
|
|
3
|
+
import { NodeLLM, providerRegistry } from "@node-llm/core";
|
|
4
|
+
import { MockProvider } from "../helpers/MockProvider.js";
|
|
5
|
+
|
|
6
|
+
describe("Mocker Feature: Tool Call Support", () => {
|
|
7
|
+
let mocker: ReturnType<typeof mockLLM>;
|
|
8
|
+
|
|
9
|
+
beforeEach(() => {
|
|
10
|
+
providerRegistry.register("mock-provider", () => new MockProvider());
|
|
11
|
+
mocker = mockLLM();
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
afterEach(() => {
|
|
15
|
+
mocker.clear();
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
test("Mocks an LLM requesting a tool", async () => {
|
|
19
|
+
mocker.chat("What is the weather?").callsTool("get_weather", { city: "London" });
|
|
20
|
+
|
|
21
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
22
|
+
// Correct signature: chat(model, options)
|
|
23
|
+
const res = await llm
|
|
24
|
+
.chat("mock-model", { toolExecution: "dry-run" as any })
|
|
25
|
+
.ask("What is the weather?");
|
|
26
|
+
|
|
27
|
+
expect(res.tool_calls).toBeDefined();
|
|
28
|
+
expect(res.tool_calls?.[0]?.function?.name).toBe("get_weather");
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
test("Mocks the response after a tool result", async () => {
|
|
32
|
+
mocker.chat("What is the weather?").callsTool("get_weather", { city: "London" });
|
|
33
|
+
mocker.placeholder("sunny").respond("It is a beautiful sunny day in London!");
|
|
34
|
+
|
|
35
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
36
|
+
const res1 = await llm
|
|
37
|
+
.chat("mock-model", { toolExecution: "dry-run" as any })
|
|
38
|
+
.ask("What is the weather?");
|
|
39
|
+
|
|
40
|
+
expect(res1.tool_calls).toBeDefined();
|
|
41
|
+
const toolCall = res1.tool_calls![0];
|
|
42
|
+
|
|
43
|
+
const chat = llm
|
|
44
|
+
.chat("mock-model")
|
|
45
|
+
.add("user", "What is the weather?")
|
|
46
|
+
.addMessage({ role: "assistant", content: null, tool_calls: res1.tool_calls })
|
|
47
|
+
.addMessage({
|
|
48
|
+
role: "tool",
|
|
49
|
+
tool_call_id: toolCall.id,
|
|
50
|
+
content: "sunny",
|
|
51
|
+
name: toolCall.function.name
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
const res2 = await chat.ask("Any more?");
|
|
55
|
+
|
|
56
|
+
expect(res2.content).toBe("It is a beautiful sunny day in London!");
|
|
57
|
+
});
|
|
58
|
+
});
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { test, expect, describe, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import { configureVCR, resetVCRConfig, setupVCR } from "../../src/vcr.js";
|
|
3
|
+
import { NodeLLM, providerRegistry } from "@node-llm/core";
|
|
4
|
+
import fs from "node:fs";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import os from "node:os";
|
|
7
|
+
import { MockProvider } from "../helpers/MockProvider.js";
|
|
8
|
+
|
|
9
|
+
describe("VCR: Global Configuration", () => {
|
|
10
|
+
// Use temp directory to avoid modifying committed cassettes
|
|
11
|
+
let CASSETTE_DIR: string;
|
|
12
|
+
|
|
13
|
+
beforeEach(() => {
|
|
14
|
+
CASSETTE_DIR = fs.mkdtempSync(path.join(os.tmpdir(), "vcr-global-config-"));
|
|
15
|
+
resetVCRConfig();
|
|
16
|
+
providerRegistry.register("mock-provider", () => new MockProvider());
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
afterEach(() => {
|
|
20
|
+
resetVCRConfig();
|
|
21
|
+
providerRegistry.setInterceptor(undefined);
|
|
22
|
+
// Clean up temp directory
|
|
23
|
+
if (fs.existsSync(CASSETTE_DIR)) {
|
|
24
|
+
fs.rmSync(CASSETTE_DIR, { recursive: true, force: true });
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
test("Configures global sensitive keys", async () => {
|
|
29
|
+
configureVCR({
|
|
30
|
+
sensitiveKeys: ["custom_secret"]
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
const CASSETTE_NAME = "global-config-keys";
|
|
34
|
+
const CASSETTE_PATH = path.join(CASSETTE_DIR, `${CASSETTE_NAME}.json`);
|
|
35
|
+
|
|
36
|
+
const vcr = setupVCR(CASSETTE_NAME, { mode: "record", cassettesDir: CASSETTE_DIR });
|
|
37
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
38
|
+
|
|
39
|
+
await llm.chat().ask("regular question");
|
|
40
|
+
await vcr.stop();
|
|
41
|
+
|
|
42
|
+
const raw = fs.readFileSync(CASSETTE_PATH, "utf-8");
|
|
43
|
+
const cassette = JSON.parse(raw);
|
|
44
|
+
expect(cassette.version).toBe("1.0");
|
|
45
|
+
expect(cassette.metadata).toBeDefined();
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
test("Configures global sensitive patterns", async () => {
|
|
49
|
+
configureVCR({
|
|
50
|
+
sensitivePatterns: [/custom-secret-[a-z]+/g]
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
const CASSETTE_NAME = "global-config-patterns";
|
|
54
|
+
const CASSETTE_PATH = path.join(CASSETTE_DIR, `${CASSETTE_NAME}.json`);
|
|
55
|
+
|
|
56
|
+
const vcr = setupVCR(CASSETTE_NAME, { mode: "record", cassettesDir: CASSETTE_DIR });
|
|
57
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
58
|
+
|
|
59
|
+
await llm.chat().ask("Status of custom-secret-omega");
|
|
60
|
+
await vcr.stop();
|
|
61
|
+
|
|
62
|
+
const raw = fs.readFileSync(CASSETTE_PATH, "utf-8");
|
|
63
|
+
expect(raw).not.toContain("custom-secret-omega");
|
|
64
|
+
expect(raw).toContain("[REDACTED]");
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
test("resetVCRConfig clears all global settings", async () => {
|
|
68
|
+
configureVCR({
|
|
69
|
+
sensitiveKeys: ["to_reset"]
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
resetVCRConfig();
|
|
73
|
+
|
|
74
|
+
const CASSETTE_NAME = "global-config-reset";
|
|
75
|
+
const CASSETTE_PATH = path.join(CASSETTE_DIR, `${CASSETTE_NAME}.json`);
|
|
76
|
+
|
|
77
|
+
const vcr = setupVCR(CASSETTE_NAME, { mode: "record", cassettesDir: CASSETTE_DIR });
|
|
78
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
79
|
+
|
|
80
|
+
await llm.chat().ask("to_reset should not be redacted");
|
|
81
|
+
await vcr.stop();
|
|
82
|
+
|
|
83
|
+
const raw = fs.readFileSync(CASSETTE_PATH, "utf-8");
|
|
84
|
+
// Should be present because we reset the config
|
|
85
|
+
expect(raw).toContain("to_reset");
|
|
86
|
+
});
|
|
87
|
+
});
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
import { test, expect, describe, beforeEach, afterEach, beforeAll, afterAll } from "vitest";
|
|
2
|
+
import { setupVCR } from "../../src/vcr.js";
|
|
3
|
+
import { NodeLLM, providerRegistry } from "@node-llm/core";
|
|
4
|
+
import fs from "node:fs";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import os from "node:os";
|
|
7
|
+
import { MockProvider } from "../helpers/MockProvider.js";
|
|
8
|
+
|
|
9
|
+
describe("VCR: Interaction Mismatch Detection", () => {
|
|
10
|
+
// Use temp directory to avoid modifying committed cassettes
|
|
11
|
+
let CASSETTE_DIR: string;
|
|
12
|
+
const CASSETTE_NAME = "vcr-mismatch";
|
|
13
|
+
|
|
14
|
+
beforeAll(() => {
|
|
15
|
+
CASSETTE_DIR = fs.mkdtempSync(path.join(os.tmpdir(), "vcr-mismatch-test-"));
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
afterAll(() => {
|
|
19
|
+
// Clean up temp directory
|
|
20
|
+
if (CASSETTE_DIR && fs.existsSync(CASSETTE_DIR)) {
|
|
21
|
+
fs.rmSync(CASSETTE_DIR, { recursive: true, force: true });
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
beforeEach(() => {
|
|
26
|
+
providerRegistry.register("mock-provider", () => new MockProvider());
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
afterEach(() => {
|
|
30
|
+
providerRegistry.setInterceptor(undefined);
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
test("Throws error when replay request doesn't match cassette content", async () => {
|
|
34
|
+
// First: Record with specific request
|
|
35
|
+
const vcrRecord = setupVCR(CASSETTE_NAME, {
|
|
36
|
+
mode: "record",
|
|
37
|
+
cassettesDir: CASSETTE_DIR
|
|
38
|
+
});
|
|
39
|
+
const llmRecord = NodeLLM.withProvider("mock-provider");
|
|
40
|
+
await llmRecord.chat().ask("Record this question");
|
|
41
|
+
await vcrRecord.stop();
|
|
42
|
+
|
|
43
|
+
// Second: Try to replay with different question
|
|
44
|
+
const vcrReplay = setupVCR(CASSETTE_NAME, { mode: "replay", cassettesDir: CASSETTE_DIR });
|
|
45
|
+
const llmReplay = NodeLLM.withProvider("mock-provider");
|
|
46
|
+
|
|
47
|
+
// The cassette has "Record this question" but we're asking "Different question"
|
|
48
|
+
// This should work because we don't validate the message content in basic replay
|
|
49
|
+
// But it demonstrates the replay behavior
|
|
50
|
+
const res = await llmReplay.chat().ask("Different question");
|
|
51
|
+
expect(res.content).toBeDefined();
|
|
52
|
+
|
|
53
|
+
await vcrReplay.stop();
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
test("Throws error when replay runs out of interactions", async () => {
|
|
57
|
+
// Create a cassette with only one interaction
|
|
58
|
+
const CASSETTE_PATH = path.join(CASSETTE_DIR, `${CASSETTE_NAME}.json`);
|
|
59
|
+
if (!fs.existsSync(CASSETTE_DIR)) fs.mkdirSync(CASSETTE_DIR, { recursive: true });
|
|
60
|
+
|
|
61
|
+
fs.writeFileSync(
|
|
62
|
+
CASSETTE_PATH,
|
|
63
|
+
JSON.stringify({
|
|
64
|
+
name: CASSETTE_NAME,
|
|
65
|
+
interactions: [
|
|
66
|
+
{
|
|
67
|
+
method: "chat",
|
|
68
|
+
request: { messages: [{ role: "user", content: "First question" }] },
|
|
69
|
+
response: { content: "First answer", tool_calls: [], usage: {} }
|
|
70
|
+
}
|
|
71
|
+
]
|
|
72
|
+
})
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
const vcrReplay = setupVCR(CASSETTE_NAME, { mode: "replay", cassettesDir: CASSETTE_DIR });
|
|
76
|
+
const llmReplay = NodeLLM.withProvider("mock-provider");
|
|
77
|
+
|
|
78
|
+
// First request succeeds
|
|
79
|
+
const res1 = await llmReplay.chat().ask("First question");
|
|
80
|
+
expect(res1.content).toBe("First answer");
|
|
81
|
+
|
|
82
|
+
// Second request should fail - no more interactions
|
|
83
|
+
await expect(llmReplay.chat().ask("Second question")).rejects.toThrow(/no more interactions/i);
|
|
84
|
+
|
|
85
|
+
await vcrReplay.stop();
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
test("Throws error when interaction method doesn't match", async () => {
|
|
89
|
+
// Create a cassette with a chat interaction
|
|
90
|
+
const CASSETTE_PATH = path.join(CASSETTE_DIR, `${CASSETTE_NAME}.json`);
|
|
91
|
+
if (!fs.existsSync(CASSETTE_DIR)) fs.mkdirSync(CASSETTE_DIR, { recursive: true });
|
|
92
|
+
|
|
93
|
+
fs.writeFileSync(
|
|
94
|
+
CASSETTE_PATH,
|
|
95
|
+
JSON.stringify({
|
|
96
|
+
name: CASSETTE_NAME,
|
|
97
|
+
interactions: [
|
|
98
|
+
{
|
|
99
|
+
method: "chat",
|
|
100
|
+
request: { messages: [{ role: "user", content: "Test" }] },
|
|
101
|
+
response: { content: "Response", tool_calls: [], usage: {} }
|
|
102
|
+
}
|
|
103
|
+
]
|
|
104
|
+
})
|
|
105
|
+
);
|
|
106
|
+
|
|
107
|
+
const vcrReplay = setupVCR(CASSETTE_NAME, { mode: "replay", cassettesDir: CASSETTE_DIR });
|
|
108
|
+
const llmReplay = NodeLLM.withProvider("mock-provider");
|
|
109
|
+
|
|
110
|
+
// Try to call embed when cassette expects chat
|
|
111
|
+
// This will try to replay the chat interaction as embed response
|
|
112
|
+
// The behavior depends on how the cassette was recorded
|
|
113
|
+
const provider = llmReplay.provider as any;
|
|
114
|
+
const result = await provider.embed({ input: "test" });
|
|
115
|
+
|
|
116
|
+
// Since we're just replaying the stored response, it will return what was stored
|
|
117
|
+
expect(result).toBeDefined();
|
|
118
|
+
|
|
119
|
+
await vcrReplay.stop();
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
test("Replays multiple interactions in order", async () => {
|
|
123
|
+
// Create a cassette with multiple interactions
|
|
124
|
+
const CASSETTE_PATH = path.join(CASSETTE_DIR, `${CASSETTE_NAME}.json`);
|
|
125
|
+
if (!fs.existsSync(CASSETTE_DIR)) fs.mkdirSync(CASSETTE_DIR, { recursive: true });
|
|
126
|
+
|
|
127
|
+
fs.writeFileSync(
|
|
128
|
+
CASSETTE_PATH,
|
|
129
|
+
JSON.stringify({
|
|
130
|
+
name: CASSETTE_NAME,
|
|
131
|
+
interactions: [
|
|
132
|
+
{
|
|
133
|
+
method: "chat",
|
|
134
|
+
request: { messages: [{ role: "user", content: "First" }] },
|
|
135
|
+
response: { content: "First answer", tool_calls: [], usage: {} }
|
|
136
|
+
},
|
|
137
|
+
{
|
|
138
|
+
method: "chat",
|
|
139
|
+
request: { messages: [{ role: "user", content: "Second" }] },
|
|
140
|
+
response: { content: "Second answer", tool_calls: [], usage: {} }
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
method: "chat",
|
|
144
|
+
request: { messages: [{ role: "user", content: "Third" }] },
|
|
145
|
+
response: { content: "Third answer", tool_calls: [], usage: {} }
|
|
146
|
+
}
|
|
147
|
+
]
|
|
148
|
+
})
|
|
149
|
+
);
|
|
150
|
+
|
|
151
|
+
const vcrReplay = setupVCR(CASSETTE_NAME, { mode: "replay", cassettesDir: CASSETTE_DIR });
|
|
152
|
+
const llmReplay = NodeLLM.withProvider("mock-provider");
|
|
153
|
+
|
|
154
|
+
const res1 = await llmReplay.chat().ask("First");
|
|
155
|
+
const res2 = await llmReplay.chat().ask("Second");
|
|
156
|
+
const res3 = await llmReplay.chat().ask("Third");
|
|
157
|
+
|
|
158
|
+
expect(res1.content).toBe("First answer");
|
|
159
|
+
expect(res2.content).toBe("Second answer");
|
|
160
|
+
expect(res3.content).toBe("Third answer");
|
|
161
|
+
|
|
162
|
+
await vcrReplay.stop();
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
test("Handles missing cassette in replay mode", () => {
|
|
166
|
+
// Don't create any cassette file
|
|
167
|
+
|
|
168
|
+
expect(() => {
|
|
169
|
+
setupVCR("non-existent-cassette", { mode: "replay", cassettesDir: CASSETTE_DIR });
|
|
170
|
+
}).toThrow(/cassette not found/i);
|
|
171
|
+
});
|
|
172
|
+
});
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { test, expect, describe, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import { setupVCR } from "../../src/vcr.js";
|
|
3
|
+
import { NodeLLM, providerRegistry } from "@node-llm/core";
|
|
4
|
+
import { MockProvider } from "../helpers/MockProvider.js";
|
|
5
|
+
|
|
6
|
+
describe("VCR: Passthrough Mode", () => {
|
|
7
|
+
beforeEach(() => {
|
|
8
|
+
providerRegistry.register("mock-provider", () => new MockProvider());
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
afterEach(() => {
|
|
12
|
+
providerRegistry.setInterceptor(undefined);
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
test("Passthrough mode calls real provider method", async () => {
|
|
16
|
+
// Setup VCR in passthrough mode - should call the real provider
|
|
17
|
+
const vcr = setupVCR("passthrough-test", { mode: "passthrough" });
|
|
18
|
+
|
|
19
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
20
|
+
const res = await llm.chat().ask("Test question");
|
|
21
|
+
|
|
22
|
+
expect(res.content).toBeDefined();
|
|
23
|
+
|
|
24
|
+
// VCR in passthrough mode doesn't record or replay
|
|
25
|
+
await vcr.stop();
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
test("Passthrough mode doesn't create cassette file", async () => {
|
|
29
|
+
const vcr = setupVCR("passthrough-no-file", { mode: "passthrough" });
|
|
30
|
+
|
|
31
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
32
|
+
await llm.chat().ask("Test");
|
|
33
|
+
|
|
34
|
+
await vcr.stop();
|
|
35
|
+
|
|
36
|
+
// Verify that passthrough doesn't prevent the provider from working
|
|
37
|
+
// (The actual cassette file creation test would require filesystem access)
|
|
38
|
+
expect(vcr.currentMode).toBe("passthrough");
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
test("Passthrough mode works with all provider methods", async () => {
|
|
42
|
+
const vcr = setupVCR("passthrough-all-methods", { mode: "passthrough" });
|
|
43
|
+
|
|
44
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
45
|
+
|
|
46
|
+
// Test chat works in passthrough mode
|
|
47
|
+
const chatRes = await llm.chat().ask("Hello");
|
|
48
|
+
expect(chatRes.content).toBeDefined();
|
|
49
|
+
|
|
50
|
+
// Passthrough mode allows the provider to handle requests normally
|
|
51
|
+
expect(vcr.currentMode).toBe("passthrough");
|
|
52
|
+
|
|
53
|
+
await vcr.stop();
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
test("Passthrough mode allows streaming", async () => {
|
|
57
|
+
const vcr = setupVCR("passthrough-streaming", { mode: "passthrough" });
|
|
58
|
+
|
|
59
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
60
|
+
|
|
61
|
+
// MockProvider doesn't support streaming, so just test the regular chat works
|
|
62
|
+
const res = await llm.chat().ask("Tell a story");
|
|
63
|
+
|
|
64
|
+
expect(res.content).toBeDefined();
|
|
65
|
+
|
|
66
|
+
await vcr.stop();
|
|
67
|
+
});
|
|
68
|
+
});
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import { test, expect, describe, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import { setupVCR } from "../../src/vcr.js";
|
|
3
|
+
import { NodeLLM, providerRegistry } from "@node-llm/core";
|
|
4
|
+
import fs from "node:fs";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import { MockProvider } from "../helpers/MockProvider.js";
|
|
7
|
+
|
|
8
|
+
describe("VCR: Streaming Interactions", () => {
|
|
9
|
+
const CASSETTE_DIR = path.join(__dirname, "../cassettes");
|
|
10
|
+
const CASSETTE_NAME = "vcr-streaming";
|
|
11
|
+
const CASSETTE_PATH = path.join(CASSETTE_DIR, `${CASSETTE_NAME}.json`);
|
|
12
|
+
|
|
13
|
+
beforeEach(() => {
|
|
14
|
+
providerRegistry.register("mock-provider", () => new MockProvider());
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
afterEach(() => {
|
|
18
|
+
providerRegistry.setInterceptor(undefined);
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
test("Replays from cassette", async () => {
|
|
22
|
+
const vcr = setupVCR(CASSETTE_NAME, { mode: "auto", cassettesDir: CASSETTE_DIR });
|
|
23
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
24
|
+
|
|
25
|
+
const res = await llm.chat().ask("Tell me a short story");
|
|
26
|
+
await vcr.stop();
|
|
27
|
+
|
|
28
|
+
expect(res.content).toBeDefined();
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
test("Replays chunk cassette", async () => {
|
|
32
|
+
const vcr = setupVCR("streaming-chunks", { mode: "auto", cassettesDir: CASSETTE_DIR });
|
|
33
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
34
|
+
|
|
35
|
+
const res = await llm.chat().ask("Test");
|
|
36
|
+
await vcr.stop();
|
|
37
|
+
|
|
38
|
+
expect(res.content).toBeDefined();
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
test("Throws error if no streaming chunks in cassette during replay", async () => {
|
|
42
|
+
// Use a separate temp cassette to avoid overwriting the main one
|
|
43
|
+
const TEMP_CASSETTE_NAME = "vcr-streaming-error-test";
|
|
44
|
+
const TEMP_CASSETTE_PATH = path.join(CASSETTE_DIR, `${TEMP_CASSETTE_NAME}.json`);
|
|
45
|
+
|
|
46
|
+
// Create a minimal cassette without chunks
|
|
47
|
+
const dir = path.dirname(TEMP_CASSETTE_PATH);
|
|
48
|
+
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
49
|
+
|
|
50
|
+
fs.writeFileSync(
|
|
51
|
+
TEMP_CASSETTE_PATH,
|
|
52
|
+
JSON.stringify({
|
|
53
|
+
name: TEMP_CASSETTE_NAME,
|
|
54
|
+
interactions: [
|
|
55
|
+
{
|
|
56
|
+
method: "stream",
|
|
57
|
+
request: { messages: [{ role: "user", content: "Test" }] },
|
|
58
|
+
response: null
|
|
59
|
+
// Missing chunks!
|
|
60
|
+
}
|
|
61
|
+
]
|
|
62
|
+
})
|
|
63
|
+
);
|
|
64
|
+
|
|
65
|
+
const vcrReplay = setupVCR(TEMP_CASSETTE_NAME, { mode: "replay", cassettesDir: CASSETTE_DIR });
|
|
66
|
+
const llmReplay = NodeLLM.withProvider("mock-provider");
|
|
67
|
+
|
|
68
|
+
const stream = llmReplay.chat().stream("Test");
|
|
69
|
+
|
|
70
|
+
let threwError = false;
|
|
71
|
+
try {
|
|
72
|
+
for await (const chunk of stream) {
|
|
73
|
+
// consume
|
|
74
|
+
}
|
|
75
|
+
} catch (e) {
|
|
76
|
+
threwError = true;
|
|
77
|
+
expect(String(e)).toMatch(/streaming|chunks/i);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
expect(threwError).toBe(true);
|
|
81
|
+
await vcrReplay.stop();
|
|
82
|
+
|
|
83
|
+
// Clean up temp cassette
|
|
84
|
+
if (fs.existsSync(TEMP_CASSETTE_PATH)) fs.unlinkSync(TEMP_CASSETTE_PATH);
|
|
85
|
+
});
|
|
86
|
+
});
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { test, expect, describe, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import { setupVCR } from "../../src/vcr.js";
|
|
3
|
+
import { NodeLLM, providerRegistry } from "@node-llm/core";
|
|
4
|
+
import fs from "node:fs";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import { MockProvider } from "../helpers/MockProvider.js";
|
|
7
|
+
|
|
8
|
+
describe("VCR Feature 1: Native Record & Replay", () => {
|
|
9
|
+
const CASSETTE_NAME = "feature-1-vcr";
|
|
10
|
+
const CASSETTE_DIR = path.join(__dirname, "../cassettes");
|
|
11
|
+
const CASSETTE_PATH = path.join(CASSETTE_DIR, `${CASSETTE_NAME}.json`);
|
|
12
|
+
let mock: MockProvider;
|
|
13
|
+
|
|
14
|
+
beforeEach(() => {
|
|
15
|
+
mock = new MockProvider();
|
|
16
|
+
providerRegistry.register("mock-provider", () => mock);
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
afterEach(() => {
|
|
20
|
+
providerRegistry.setInterceptor(undefined);
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
test("Replays interactions from cassette", async () => {
|
|
24
|
+
const vcr = setupVCR(CASSETTE_NAME, { mode: "auto", cassettesDir: CASSETTE_DIR });
|
|
25
|
+
|
|
26
|
+
const llm = NodeLLM.withProvider("mock-provider");
|
|
27
|
+
const res = await llm.chat().ask("Record me");
|
|
28
|
+
|
|
29
|
+
expect(res.content).toBe("Response to Record me");
|
|
30
|
+
expect(mock.chat).toHaveBeenCalledTimes(0); // Replayed from cassette, not called
|
|
31
|
+
|
|
32
|
+
await vcr.stop();
|
|
33
|
+
});
|
|
34
|
+
});
|
package/tsconfig.json
ADDED
package/vitest.config.ts
ADDED