llm-mock-server 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/.github/dependabot.yml +11 -0
  2. package/.github/workflows/test.yml +34 -0
  3. package/.markdownlint.jsonc +11 -0
  4. package/.node-version +1 -0
  5. package/.oxlintrc.json +35 -0
  6. package/ARCHITECTURE.md +125 -0
  7. package/LICENCE +21 -0
  8. package/README.md +448 -0
  9. package/package.json +55 -0
  10. package/src/cli-validators.ts +56 -0
  11. package/src/cli.ts +128 -0
  12. package/src/formats/anthropic/index.ts +14 -0
  13. package/src/formats/anthropic/parse.ts +48 -0
  14. package/src/formats/anthropic/schema.ts +133 -0
  15. package/src/formats/anthropic/serialize.ts +91 -0
  16. package/src/formats/openai/index.ts +14 -0
  17. package/src/formats/openai/parse.ts +34 -0
  18. package/src/formats/openai/schema.ts +147 -0
  19. package/src/formats/openai/serialize.ts +92 -0
  20. package/src/formats/parse-helpers.ts +79 -0
  21. package/src/formats/responses/index.ts +14 -0
  22. package/src/formats/responses/parse.ts +56 -0
  23. package/src/formats/responses/schema.ts +143 -0
  24. package/src/formats/responses/serialize.ts +129 -0
  25. package/src/formats/types.ts +17 -0
  26. package/src/history.ts +66 -0
  27. package/src/index.ts +44 -0
  28. package/src/loader.ts +213 -0
  29. package/src/logger.ts +58 -0
  30. package/src/mock-server.ts +237 -0
  31. package/src/route-handler.ts +113 -0
  32. package/src/rule-engine.ts +119 -0
  33. package/src/sse-writer.ts +35 -0
  34. package/src/types/index.ts +4 -0
  35. package/src/types/reply.ts +49 -0
  36. package/src/types/request.ts +45 -0
  37. package/src/types/rule.ts +74 -0
  38. package/src/types.ts +5 -0
  39. package/test/cli-validators.test.ts +131 -0
  40. package/test/formats/anthropic-schema.test.ts +192 -0
  41. package/test/formats/anthropic.test.ts +260 -0
  42. package/test/formats/openai-schema.test.ts +105 -0
  43. package/test/formats/openai.test.ts +243 -0
  44. package/test/formats/responses-schema.test.ts +114 -0
  45. package/test/formats/responses.test.ts +299 -0
  46. package/test/loader.test.ts +314 -0
  47. package/test/mock-server.test.ts +565 -0
  48. package/test/rule-engine.test.ts +213 -0
  49. package/tsconfig.json +26 -0
  50. package/tsconfig.test.json +11 -0
  51. package/vitest.config.ts +18 -0
@@ -0,0 +1,192 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import { AnthropicRequestSchema } from "../../src/formats/anthropic/schema.js";
3
+
4
+ describe("AnthropicRequestSchema", () => {
5
+ const validRequest = {
6
+ model: "claude-sonnet-4-6",
7
+ max_tokens: 1024,
8
+ messages: [{ role: "user", content: "Hello" }],
9
+ };
10
+
11
+ it("accepts a valid minimal request", () => {
12
+ expect(AnthropicRequestSchema.safeParse(validRequest).success).toBe(true);
13
+ });
14
+
15
+ it("rejects missing model", () => {
16
+ const { model: _model, ...rest } = validRequest;
17
+ expect(AnthropicRequestSchema.safeParse(rest).success).toBe(false);
18
+ });
19
+
20
+ it("rejects empty model string", () => {
21
+ expect(AnthropicRequestSchema.safeParse({
22
+ ...validRequest, model: "",
23
+ }).success).toBe(false);
24
+ });
25
+
26
+ it("rejects missing max_tokens", () => {
27
+ const { max_tokens: _mt, ...rest } = validRequest;
28
+ expect(AnthropicRequestSchema.safeParse(rest).success).toBe(false);
29
+ });
30
+
31
+ it("rejects non-positive max_tokens", () => {
32
+ expect(AnthropicRequestSchema.safeParse({ ...validRequest, max_tokens: 0 }).success).toBe(false);
33
+ expect(AnthropicRequestSchema.safeParse({ ...validRequest, max_tokens: -1 }).success).toBe(false);
34
+ });
35
+
36
+ it("rejects empty messages array", () => {
37
+ expect(AnthropicRequestSchema.safeParse({ ...validRequest, messages: [] }).success).toBe(false);
38
+ });
39
+
40
+ it("rejects missing messages", () => {
41
+ const { messages: _m, ...rest } = validRequest;
42
+ expect(AnthropicRequestSchema.safeParse(rest).success).toBe(false);
43
+ });
44
+
45
+ it("accepts string content shorthand", () => {
46
+ expect(AnthropicRequestSchema.safeParse(validRequest).success).toBe(true);
47
+ });
48
+
49
+ it("accepts array content with text blocks", () => {
50
+ expect(AnthropicRequestSchema.safeParse({
51
+ ...validRequest,
52
+ messages: [{ role: "user", content: [{ type: "text", text: "Hello" }] }],
53
+ }).success).toBe(true);
54
+ });
55
+
56
+ it("accepts array content with tool_use blocks", () => {
57
+ const result = AnthropicRequestSchema.safeParse({
58
+ ...validRequest,
59
+ messages: [{
60
+ role: "assistant",
61
+ content: [{
62
+ type: "tool_use", id: "toolu_01", name: "get_weather", input: { location: "SF" },
63
+ }],
64
+ }],
65
+ });
66
+ expect(result.success).toBe(true);
67
+ if (result.success) {
68
+ expect(result.data.messages[0]!.content).toEqual([
69
+ { type: "tool_use", id: "toolu_01", name: "get_weather", input: { location: "SF" } },
70
+ ]);
71
+ }
72
+ });
73
+
74
+ it("accepts tool_result blocks with string content", () => {
75
+ expect(AnthropicRequestSchema.safeParse({
76
+ ...validRequest,
77
+ messages: [{
78
+ role: "user",
79
+ content: [{ type: "tool_result", tool_use_id: "toolu_01", content: "Sunny, 72F" }],
80
+ }],
81
+ }).success).toBe(true);
82
+ });
83
+
84
+ it("accepts tool_result blocks with TextBlock[] content", () => {
85
+ expect(AnthropicRequestSchema.safeParse({
86
+ ...validRequest,
87
+ messages: [{
88
+ role: "user",
89
+ content: [{ type: "tool_result", tool_use_id: "toolu_02", content: [{ type: "text", text: "Result" }] }],
90
+ }],
91
+ }).success).toBe(true);
92
+ });
93
+
94
+ it("accepts mixed content blocks in a single message", () => {
95
+ const result = AnthropicRequestSchema.safeParse({
96
+ ...validRequest,
97
+ messages: [{
98
+ role: "assistant",
99
+ content: [
100
+ { type: "text", text: "Let me check." },
101
+ { type: "tool_use", id: "toolu_01", name: "get_weather", input: { location: "SF" } },
102
+ ],
103
+ }],
104
+ });
105
+ expect(result.success).toBe(true);
106
+ if (result.success) {
107
+ expect(result.data.messages[0]!.content).toHaveLength(2);
108
+ }
109
+ });
110
+
111
+ it("filters out unknown content block types", () => {
112
+ const result = AnthropicRequestSchema.safeParse({
113
+ ...validRequest,
114
+ messages: [{
115
+ role: "assistant",
116
+ content: [
117
+ { type: "thinking", thinking: "Let me consider..." },
118
+ { type: "text", text: "Here is my answer." },
119
+ ],
120
+ }],
121
+ });
122
+ expect(result.success).toBe(true);
123
+ if (result.success) {
124
+ const blocks = result.data.messages[0]!.content;
125
+ expect(blocks).toHaveLength(1);
126
+ expect(blocks).toEqual([{ type: "text", text: "Here is my answer." }]);
127
+ }
128
+ });
129
+
130
+ it("accepts a message where all blocks are unknown", () => {
131
+ const result = AnthropicRequestSchema.safeParse({
132
+ ...validRequest,
133
+ messages: [{
134
+ role: "assistant",
135
+ content: [
136
+ { type: "thinking", thinking: "hmm" },
137
+ { type: "server_tool_use", id: "st_01", name: "web_search" },
138
+ ],
139
+ }],
140
+ });
141
+ expect(result.success).toBe(true);
142
+ if (result.success) {
143
+ expect(result.data.messages[0]!.content).toHaveLength(0);
144
+ }
145
+ });
146
+
147
+ it("accepts system as string", () => {
148
+ expect(AnthropicRequestSchema.safeParse({
149
+ ...validRequest, system: "You are a helpful assistant.",
150
+ }).success).toBe(true);
151
+ });
152
+
153
+ it("accepts system as TextBlock array", () => {
154
+ expect(AnthropicRequestSchema.safeParse({
155
+ ...validRequest, system: [{ type: "text", text: "You are a helpful assistant." }],
156
+ }).success).toBe(true);
157
+ });
158
+
159
+ it("accepts stream: true", () => {
160
+ const result = AnthropicRequestSchema.safeParse({ ...validRequest, stream: true });
161
+ expect(result.success).toBe(true);
162
+ if (result.success) expect(result.data.stream).toBe(true);
163
+ });
164
+
165
+ it("accepts stream: false", () => {
166
+ const result = AnthropicRequestSchema.safeParse({ ...validRequest, stream: false });
167
+ expect(result.success).toBe(true);
168
+ if (result.success) expect(result.data.stream).toBe(false);
169
+ });
170
+
171
+ it("accepts optional fields", () => {
172
+ expect(AnthropicRequestSchema.safeParse({
173
+ ...validRequest,
174
+ temperature: 0.7,
175
+ top_p: 0.9,
176
+ top_k: 40,
177
+ stop_sequences: ["Human:"],
178
+ metadata: { user_id: "test" },
179
+ }).success).toBe(true);
180
+ });
181
+
182
+ it("accepts tools array", () => {
183
+ expect(AnthropicRequestSchema.safeParse({
184
+ ...validRequest,
185
+ tools: [{
186
+ name: "get_weather",
187
+ description: "Get the weather",
188
+ input_schema: { type: "object", properties: { location: { type: "string" } } },
189
+ }],
190
+ }).success).toBe(true);
191
+ });
192
+ });
@@ -0,0 +1,260 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import { anthropicFormat } from "../../src/formats/anthropic/index.js";
3
+ import type {
4
+ AnthropicMessageStart, AnthropicBlockEvent, AnthropicDelta,
5
+ AnthropicComplete, AnthropicError,
6
+ } from "../../src/formats/anthropic/schema.js";
7
+
8
+ function parse<T>(chunk: { data: string }): T {
9
+ return JSON.parse(chunk.data) as T;
10
+ }
11
+
12
+ describe("Anthropic Format", () => {
13
+ describe("parseRequest", () => {
14
+ it("parses messages with top-level system", () => {
15
+ const req = anthropicFormat.parseRequest({
16
+ model: "claude-sonnet-4-6",
17
+ system: "You are a pirate",
18
+ messages: [{ role: "user", content: "Hello" }],
19
+ max_tokens: 1024,
20
+ stream: true,
21
+ });
22
+ expect(req.format).toBe("anthropic");
23
+ expect(req.model).toBe("claude-sonnet-4-6");
24
+ expect(req.systemMessage).toBe("You are a pirate");
25
+ expect(req.lastMessage).toBe("Hello");
26
+ expect(req.messages).toHaveLength(2);
27
+ });
28
+
29
+ it("parses system as array of blocks", () => {
30
+ const req = anthropicFormat.parseRequest({
31
+ model: "claude-sonnet-4-6",
32
+ max_tokens: 1024,
33
+ system: [{ type: "text", text: "Be helpful" }],
34
+ messages: [{ role: "user", content: "hi" }],
35
+ });
36
+ expect(req.systemMessage).toBe("Be helpful");
37
+ });
38
+
39
+ it("parses content block arrays in messages", () => {
40
+ const req = anthropicFormat.parseRequest({
41
+ model: "claude-sonnet-4-6",
42
+ max_tokens: 1024,
43
+ messages: [{ role: "user", content: [{ type: "text", text: "Hello there" }] }],
44
+ });
45
+ expect(req.lastMessage).toBe("Hello there");
46
+ });
47
+
48
+ it("parses tools with input_schema", () => {
49
+ const req = anthropicFormat.parseRequest({
50
+ model: "claude-sonnet-4-6",
51
+ max_tokens: 1024,
52
+ messages: [{ role: "user", content: "read file" }],
53
+ tools: [{ name: "read_file", description: "Read", input_schema: { type: "object" } }],
54
+ });
55
+ expect(req.tools).toHaveLength(1);
56
+ expect(req.tools![0]!.name).toBe("read_file");
57
+ });
58
+
59
+ it("extracts toolNames from tools array", () => {
60
+ const req = anthropicFormat.parseRequest({
61
+ model: "claude-sonnet-4-6",
62
+ max_tokens: 1024,
63
+ messages: [{ role: "user", content: "hi" }],
64
+ tools: [
65
+ { name: "get_weather", input_schema: {} },
66
+ { name: "search", input_schema: {} },
67
+ ],
68
+ });
69
+ expect(req.toolNames).toEqual(["get_weather", "search"]);
70
+ });
71
+
72
+ it("extracts lastToolCallId from tool_result blocks", () => {
73
+ const req = anthropicFormat.parseRequest({
74
+ model: "claude-sonnet-4-6",
75
+ max_tokens: 1024,
76
+ messages: [
77
+ { role: "user", content: "hi" },
78
+ { role: "user", content: [{ type: "tool_result", tool_use_id: "toolu_123", content: "result" }] },
79
+ ],
80
+ });
81
+ expect(req.lastToolCallId).toBe("toolu_123");
82
+ });
83
+ });
84
+
85
+ describe("serialize (streaming)", () => {
86
+ it("produces correct event sequence for text", () => {
87
+ const chunks = anthropicFormat.serialize({ text: "Hello" }, "claude-sonnet-4-6");
88
+ const events = chunks.map((c) => c.event);
89
+ expect(events).toEqual([
90
+ "message_start",
91
+ "content_block_start",
92
+ "content_block_delta",
93
+ "content_block_stop",
94
+ "message_delta",
95
+ "message_stop",
96
+ ]);
97
+ });
98
+
99
+ it("message_start contains correct structure", () => {
100
+ const chunks = anthropicFormat.serialize({ text: "Hello" }, "claude-sonnet-4-6");
101
+ const msg = parse<AnthropicMessageStart>(chunks[0]!);
102
+ expect(msg.message).toMatchObject({
103
+ type: "message",
104
+ role: "assistant",
105
+ model: "claude-sonnet-4-6",
106
+ content: [],
107
+ stop_reason: null,
108
+ });
109
+ expect(msg.message.usage.input_tokens).toBeTypeOf("number");
110
+ expect(msg.message.usage.output_tokens).toBe(0);
111
+ });
112
+
113
+ it("text block uses index 0 when no reasoning", () => {
114
+ const chunks = anthropicFormat.serialize({ text: "Hello" }, "claude-sonnet-4-6");
115
+ const blockStart = chunks.find((c) => c.event === "content_block_start");
116
+ const data = parse<AnthropicBlockEvent>(blockStart!);
117
+ expect(data.index).toBe(0);
118
+ expect(data.content_block?.type).toBe("text");
119
+ });
120
+
121
+ it("thinking block at index 0 and text block at index 1 when reasoning present", () => {
122
+ const chunks = anthropicFormat.serialize(
123
+ { text: "42", reasoning: "Let me think" },
124
+ "claude-sonnet-4-6",
125
+ );
126
+ const blockStarts = chunks
127
+ .filter((c) => c.event === "content_block_start")
128
+ .map((c) => parse<AnthropicBlockEvent>(c));
129
+
130
+ expect(blockStarts[0]!.index).toBe(0);
131
+ expect(blockStarts[0]!.content_block?.type).toBe("thinking");
132
+ expect(blockStarts[1]!.index).toBe(1);
133
+ expect(blockStarts[1]!.content_block?.type).toBe("text");
134
+ });
135
+
136
+ it("thinking delta has correct type and content", () => {
137
+ const chunks = anthropicFormat.serialize(
138
+ { text: "42", reasoning: "Let me think" },
139
+ "claude-sonnet-4-6",
140
+ );
141
+ const thinkingDelta = chunks.find((c) => {
142
+ if (c.event !== "content_block_delta") return false;
143
+ return parse<AnthropicBlockEvent>(c).delta?.type === "thinking_delta";
144
+ });
145
+ expect(thinkingDelta).toBeDefined();
146
+ expect(parse<AnthropicBlockEvent>(thinkingDelta!).delta?.thinking).toBe("Let me think");
147
+ });
148
+
149
+ it("closes thinking block before text block starts", () => {
150
+ const chunks = anthropicFormat.serialize(
151
+ { text: "answer", reasoning: "think" },
152
+ "claude-sonnet-4-6",
153
+ );
154
+ const events = chunks.map((c) => ({ event: c.event, data: parse<AnthropicBlockEvent>(c) }));
155
+ const thinkingStop = events.findIndex((e) => e.event === "content_block_stop" && e.data.index === 0);
156
+ const textStart = events.findIndex((e) => e.event === "content_block_start" && e.data.content_block?.type === "text");
157
+ expect(thinkingStop).toBeLessThan(textStart);
158
+ });
159
+
160
+ it("includes tool_use blocks with correct structure", () => {
161
+ const chunks = anthropicFormat.serialize(
162
+ { tools: [{ name: "read_file", args: { path: "/tmp" } }] },
163
+ "claude-sonnet-4-6",
164
+ );
165
+ const toolStart = chunks.find((c) => {
166
+ if (c.event !== "content_block_start") return false;
167
+ return parse<AnthropicBlockEvent>(c).content_block?.type === "tool_use";
168
+ });
169
+ expect(toolStart).toBeDefined();
170
+ const block = parse<AnthropicBlockEvent>(toolStart!).content_block!;
171
+ expect(block.name).toBe("read_file");
172
+ expect(block.id).toBeTypeOf("string");
173
+ expect(block.input).toEqual({});
174
+ });
175
+
176
+ it("sets stop_reason to tool_use when tools present", () => {
177
+ const chunks = anthropicFormat.serialize(
178
+ { tools: [{ name: "read_file", args: {} }] },
179
+ "claude-sonnet-4-6",
180
+ );
181
+ const delta = chunks.find((c) => c.event === "message_delta");
182
+ expect(parse<AnthropicDelta>(delta!).delta).toMatchObject({ stop_reason: "tool_use" });
183
+ });
184
+
185
+ it("includes stop_sequence: null in message_delta", () => {
186
+ const chunks = anthropicFormat.serialize({ text: "Hello" }, "claude-sonnet-4-6");
187
+ const delta = chunks.find((c) => c.event === "message_delta");
188
+ expect(parse<AnthropicDelta>(delta!).delta.stop_sequence).toBeNull();
189
+ });
190
+
191
+ it("message_delta includes output_tokens in usage", () => {
192
+ const chunks = anthropicFormat.serialize({ text: "Hello", usage: { input: 20, output: 15 } }, "claude-sonnet-4-6");
193
+ const delta = chunks.find((c) => c.event === "message_delta");
194
+ expect(parse<AnthropicDelta>(delta!).usage.output_tokens).toBe(15);
195
+ });
196
+ });
197
+
198
+ describe("serializeComplete (non-streaming)", () => {
199
+ it("produces correct top-level structure", () => {
200
+ const result = anthropicFormat.serializeComplete({ text: "Hello" }, "claude-sonnet-4-6") as AnthropicComplete;
201
+ expect(result.type).toBe("message");
202
+ expect(result.role).toBe("assistant");
203
+ expect(result.model).toBe("claude-sonnet-4-6");
204
+ expect(result.stop_reason).toBe("end_turn");
205
+ expect(result.stop_sequence).toBeNull();
206
+ });
207
+
208
+ it("includes text content block", () => {
209
+ const result = anthropicFormat.serializeComplete({ text: "Hello, world!" }, "claude-sonnet-4-6") as AnthropicComplete;
210
+ expect(result.content[0]).toMatchObject({ type: "text", text: "Hello, world!" });
211
+ });
212
+
213
+ it("includes thinking before text when reasoning provided", () => {
214
+ const result = anthropicFormat.serializeComplete(
215
+ { text: "42", reasoning: "Thinking..." },
216
+ "claude-sonnet-4-6",
217
+ ) as AnthropicComplete;
218
+ expect(result.content[0]!.type).toBe("thinking");
219
+ expect(result.content[0]!.thinking).toBe("Thinking...");
220
+ expect(result.content[1]!.type).toBe("text");
221
+ });
222
+
223
+ it("includes tool_use with correct structure", () => {
224
+ const result = anthropicFormat.serializeComplete(
225
+ { tools: [{ name: "read_file", args: { path: "/tmp" } }] },
226
+ "claude-sonnet-4-6",
227
+ ) as AnthropicComplete;
228
+ const tool = result.content.find((c) => c.type === "tool_use");
229
+ expect(tool).toBeDefined();
230
+ expect(tool!.name).toBe("read_file");
231
+ expect(tool!.input).toEqual({ path: "/tmp" });
232
+ expect(tool!.id).toBeTypeOf("string");
233
+ });
234
+
235
+ it("sets stop_reason to tool_use when tools present", () => {
236
+ const result = anthropicFormat.serializeComplete(
237
+ { tools: [{ name: "read_file", args: {} }] },
238
+ "claude-sonnet-4-6",
239
+ ) as AnthropicComplete;
240
+ expect(result.stop_reason).toBe("tool_use");
241
+ });
242
+
243
+ it("includes usage tokens", () => {
244
+ const result = anthropicFormat.serializeComplete(
245
+ { text: "hi", usage: { input: 20, output: 15 } },
246
+ "claude-sonnet-4-6",
247
+ ) as AnthropicComplete;
248
+ expect(result.usage).toEqual({ input_tokens: 20, output_tokens: 15 });
249
+ });
250
+ });
251
+
252
+ describe("serializeError", () => {
253
+ it("produces Anthropic error format", () => {
254
+ const result = anthropicFormat.serializeError({ status: 400, message: "Bad request", type: "invalid_request_error" }) as AnthropicError;
255
+ expect(result.type).toBe("error");
256
+ expect(result.error.type).toBe("invalid_request_error");
257
+ expect(result.error.message).toBe("Bad request");
258
+ });
259
+ });
260
+ });
@@ -0,0 +1,105 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import { OpenAIRequestSchema } from "../../src/formats/openai/schema.js";
3
+
4
+ describe("OpenAIRequestSchema", () => {
5
+ const validRequest = {
6
+ model: "gpt-5.4",
7
+ messages: [{ role: "user", content: "Hello" }],
8
+ };
9
+
10
+ it("accepts a valid minimal request", () => {
11
+ expect(OpenAIRequestSchema.safeParse(validRequest).success).toBe(true);
12
+ });
13
+
14
+ it("rejects missing model", () => {
15
+ expect(OpenAIRequestSchema.safeParse({
16
+ messages: [{ role: "user", content: "Hello" }],
17
+ }).success).toBe(false);
18
+ });
19
+
20
+ it("rejects empty model string", () => {
21
+ expect(OpenAIRequestSchema.safeParse({
22
+ model: "",
23
+ messages: [{ role: "user", content: "Hello" }],
24
+ }).success).toBe(false);
25
+ });
26
+
27
+ it("rejects empty messages array", () => {
28
+ expect(OpenAIRequestSchema.safeParse({
29
+ model: "gpt-5.4",
30
+ messages: [],
31
+ }).success).toBe(false);
32
+ });
33
+
34
+ it("rejects missing messages", () => {
35
+ expect(OpenAIRequestSchema.safeParse({
36
+ model: "gpt-5.4",
37
+ }).success).toBe(false);
38
+ });
39
+
40
+ it("accepts array content format in messages", () => {
41
+ expect(OpenAIRequestSchema.safeParse({
42
+ model: "gpt-5.4",
43
+ messages: [{ role: "user", content: [{ type: "text", text: "Hello" }] }],
44
+ }).success).toBe(true);
45
+ });
46
+
47
+ it("accepts null content", () => {
48
+ expect(OpenAIRequestSchema.safeParse({
49
+ model: "gpt-5.4",
50
+ messages: [{ role: "assistant", content: null }],
51
+ }).success).toBe(true);
52
+ });
53
+
54
+ it("accepts optional fields", () => {
55
+ expect(OpenAIRequestSchema.safeParse({
56
+ ...validRequest,
57
+ temperature: 0.7,
58
+ top_p: 0.9,
59
+ max_tokens: 100,
60
+ presence_penalty: 0.5,
61
+ frequency_penalty: 0.5,
62
+ user: "test-user",
63
+ }).success).toBe(true);
64
+ });
65
+
66
+ it("accepts tools array", () => {
67
+ expect(OpenAIRequestSchema.safeParse({
68
+ ...validRequest,
69
+ tools: [{
70
+ type: "function",
71
+ function: {
72
+ name: "get_weather",
73
+ description: "Get the weather",
74
+ parameters: { type: "object", properties: {} },
75
+ },
76
+ }],
77
+ }).success).toBe(true);
78
+ });
79
+
80
+ it("accepts stream: true", () => {
81
+ const result = OpenAIRequestSchema.safeParse({ ...validRequest, stream: true });
82
+ expect(result.success).toBe(true);
83
+ if (result.success) expect(result.data.stream).toBe(true);
84
+ });
85
+
86
+ it("accepts stream: false", () => {
87
+ const result = OpenAIRequestSchema.safeParse({ ...validRequest, stream: false });
88
+ expect(result.success).toBe(true);
89
+ if (result.success) expect(result.data.stream).toBe(false);
90
+ });
91
+
92
+ it("accepts messages with tool_calls", () => {
93
+ expect(OpenAIRequestSchema.safeParse({
94
+ model: "gpt-5.4",
95
+ messages: [{
96
+ role: "assistant",
97
+ tool_calls: [{
98
+ id: "call_1",
99
+ type: "function",
100
+ function: { name: "search", arguments: "{}" },
101
+ }],
102
+ }],
103
+ }).success).toBe(true);
104
+ });
105
+ });