lynkr 2.0.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,686 @@
1
+ const assert = require("assert");
2
+ const { describe, it, beforeEach, afterEach } = require("node:test");
3
+
4
+ describe("llama.cpp Integration", () => {
5
+ let originalEnv;
6
+
7
+ beforeEach(() => {
8
+ originalEnv = { ...process.env };
9
+
10
+ // Clear module cache
11
+ delete require.cache[require.resolve("../src/config")];
12
+ delete require.cache[require.resolve("../src/clients/routing")];
13
+ delete require.cache[require.resolve("../src/clients/openrouter-utils")];
14
+ });
15
+
16
+ afterEach(() => {
17
+ process.env = originalEnv;
18
+ });
19
+
20
+ describe("Configuration", () => {
21
+ it("should accept llamacpp as a valid MODEL_PROVIDER", () => {
22
+ process.env.MODEL_PROVIDER = "llamacpp";
23
+ process.env.LLAMACPP_ENDPOINT = "http://localhost:8080";
24
+
25
+ const config = require("../src/config");
26
+ assert.strictEqual(config.modelProvider.type, "llamacpp");
27
+ });
28
+
29
+ it("should use default endpoint when LLAMACPP_ENDPOINT is not set", () => {
30
+ process.env.MODEL_PROVIDER = "llamacpp";
31
+ delete process.env.LLAMACPP_ENDPOINT;
32
+
33
+ const config = require("../src/config");
34
+ assert.strictEqual(config.llamacpp.endpoint, "http://localhost:8080");
35
+ });
36
+
37
+ it("should use custom endpoint when LLAMACPP_ENDPOINT is set", () => {
38
+ process.env.MODEL_PROVIDER = "llamacpp";
39
+ process.env.LLAMACPP_ENDPOINT = "http://192.168.1.100:9000";
40
+
41
+ const config = require("../src/config");
42
+ assert.strictEqual(config.llamacpp.endpoint, "http://192.168.1.100:9000");
43
+ });
44
+
45
+ it("should throw error when LLAMACPP_ENDPOINT is invalid URL", () => {
46
+ process.env.MODEL_PROVIDER = "llamacpp";
47
+ process.env.LLAMACPP_ENDPOINT = "not-a-valid-url";
48
+
49
+ assert.throws(
50
+ () => require("../src/config"),
51
+ /LLAMACPP_ENDPOINT must be a valid URL/
52
+ );
53
+ });
54
+
55
+ it("should use default model when LLAMACPP_MODEL is not set", () => {
56
+ process.env.MODEL_PROVIDER = "llamacpp";
57
+ delete process.env.LLAMACPP_MODEL;
58
+
59
+ const config = require("../src/config");
60
+ assert.strictEqual(config.llamacpp.model, "default");
61
+ });
62
+
63
+ it("should use custom model when LLAMACPP_MODEL is set", () => {
64
+ process.env.MODEL_PROVIDER = "llamacpp";
65
+ process.env.LLAMACPP_MODEL = "qwen2.5-coder-7b";
66
+
67
+ const config = require("../src/config");
68
+ assert.strictEqual(config.llamacpp.model, "qwen2.5-coder-7b");
69
+ });
70
+
71
+ it("should use default timeout when LLAMACPP_TIMEOUT_MS is not set", () => {
72
+ process.env.MODEL_PROVIDER = "llamacpp";
73
+ delete process.env.LLAMACPP_TIMEOUT_MS;
74
+
75
+ const config = require("../src/config");
76
+ assert.strictEqual(config.llamacpp.timeout, 120000);
77
+ });
78
+
79
+ it("should use custom timeout when LLAMACPP_TIMEOUT_MS is set", () => {
80
+ process.env.MODEL_PROVIDER = "llamacpp";
81
+ process.env.LLAMACPP_TIMEOUT_MS = "300000";
82
+
83
+ const config = require("../src/config");
84
+ assert.strictEqual(config.llamacpp.timeout, 300000);
85
+ });
86
+
87
+ it("should have null apiKey when LLAMACPP_API_KEY is not set", () => {
88
+ process.env.MODEL_PROVIDER = "llamacpp";
89
+ delete process.env.LLAMACPP_API_KEY;
90
+
91
+ const config = require("../src/config");
92
+ assert.strictEqual(config.llamacpp.apiKey, null);
93
+ });
94
+
95
+ it("should store apiKey when LLAMACPP_API_KEY is set", () => {
96
+ process.env.MODEL_PROVIDER = "llamacpp";
97
+ process.env.LLAMACPP_API_KEY = "my-secret-key";
98
+
99
+ const config = require("../src/config");
100
+ assert.strictEqual(config.llamacpp.apiKey, "my-secret-key");
101
+ });
102
+ });
103
+
104
+ describe("Routing", () => {
105
+ it("should route to llamacpp when MODEL_PROVIDER is llamacpp", () => {
106
+ process.env.MODEL_PROVIDER = "llamacpp";
107
+ process.env.LLAMACPP_ENDPOINT = "http://localhost:8080";
108
+ process.env.PREFER_OLLAMA = "false";
109
+
110
+ const config = require("../src/config");
111
+ const routing = require("../src/clients/routing");
112
+
113
+ const payload = { messages: [{ role: "user", content: "test" }] };
114
+ const provider = routing.determineProvider(payload);
115
+
116
+ assert.strictEqual(provider, "llamacpp");
117
+ });
118
+
119
+ it("should route to llamacpp as fallback for heavy tool count", () => {
120
+ // Clear other API keys to ensure llama.cpp fallback is used
121
+ delete process.env.OPENROUTER_API_KEY;
122
+ delete process.env.OPENAI_API_KEY;
123
+ delete process.env.AZURE_OPENAI_API_KEY;
124
+
125
+ process.env.MODEL_PROVIDER = "ollama";
126
+ process.env.PREFER_OLLAMA = "true";
127
+ process.env.OLLAMA_MODEL = "qwen2.5-coder:latest";
128
+ process.env.OLLAMA_MAX_TOOLS_FOR_ROUTING = "2";
129
+ process.env.OPENROUTER_MAX_TOOLS_FOR_ROUTING = "5";
130
+ process.env.LLAMACPP_ENDPOINT = "http://localhost:8080";
131
+ process.env.FALLBACK_ENABLED = "true";
132
+ process.env.FALLBACK_PROVIDER = "llamacpp";
133
+
134
+ const config = require("../src/config");
135
+ const routing = require("../src/clients/routing");
136
+
137
+ // 10 tools - above both thresholds, should go to fallback provider (llamacpp)
138
+ const payload = {
139
+ messages: [{ role: "user", content: "test" }],
140
+ tools: Array.from({ length: 10 }, (_, i) => ({ name: `tool${i}`, description: "test" })),
141
+ };
142
+
143
+ const provider = routing.determineProvider(payload);
144
+ // Should route to llamacpp as the configured fallback provider
145
+ assert.strictEqual(provider, "llamacpp");
146
+ });
147
+
148
+ it("should use llamacpp as fallback provider when configured", () => {
149
+ process.env.MODEL_PROVIDER = "ollama";
150
+ process.env.PREFER_OLLAMA = "true";
151
+ process.env.OLLAMA_MODEL = "qwen2.5-coder:latest";
152
+ process.env.FALLBACK_PROVIDER = "llamacpp";
153
+ process.env.LLAMACPP_ENDPOINT = "http://localhost:8080";
154
+ process.env.FALLBACK_ENABLED = "true";
155
+
156
+ const config = require("../src/config");
157
+ const routing = require("../src/clients/routing");
158
+
159
+ assert.strictEqual(routing.getFallbackProvider(), "llamacpp");
160
+ });
161
+ });
162
+
163
+ describe("Response Conversion", () => {
164
+ // llama.cpp uses OpenAI-compatible format, so we reuse the same converter
165
+
166
+ it("should convert llama.cpp text response to Anthropic format", () => {
167
+ process.env.MODEL_PROVIDER = "databricks";
168
+ process.env.DATABRICKS_API_KEY = "test-key";
169
+ process.env.DATABRICKS_API_BASE = "http://test.com";
170
+
171
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
172
+
173
+ const llamacppResponse = {
174
+ id: "chatcmpl-123",
175
+ object: "chat.completion",
176
+ created: 1677652288,
177
+ model: "qwen2.5-coder-7b",
178
+ choices: [
179
+ {
180
+ index: 0,
181
+ message: {
182
+ role: "assistant",
183
+ content: "Hello! I'm running on llama.cpp."
184
+ },
185
+ finish_reason: "stop"
186
+ }
187
+ ],
188
+ usage: {
189
+ prompt_tokens: 9,
190
+ completion_tokens: 12,
191
+ total_tokens: 21
192
+ }
193
+ };
194
+
195
+ const result = convertOpenRouterResponseToAnthropic(llamacppResponse, "claude-sonnet-4-5");
196
+
197
+ assert.strictEqual(result.role, "assistant");
198
+ assert.strictEqual(result.model, "claude-sonnet-4-5");
199
+ assert.strictEqual(Array.isArray(result.content), true);
200
+ assert.strictEqual(result.content.length, 1);
201
+ assert.strictEqual(result.content[0].type, "text");
202
+ assert.strictEqual(result.content[0].text, "Hello! I'm running on llama.cpp.");
203
+ assert.strictEqual(result.stop_reason, "end_turn");
204
+ assert.strictEqual(result.usage.input_tokens, 9);
205
+ assert.strictEqual(result.usage.output_tokens, 12);
206
+ });
207
+
208
+ it("should convert llama.cpp tool call response to Anthropic format", () => {
209
+ process.env.MODEL_PROVIDER = "databricks";
210
+ process.env.DATABRICKS_API_KEY = "test-key";
211
+ process.env.DATABRICKS_API_BASE = "http://test.com";
212
+
213
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
214
+
215
+ const llamacppResponse = {
216
+ id: "chatcmpl-123",
217
+ object: "chat.completion",
218
+ model: "qwen2.5-coder-7b",
219
+ choices: [
220
+ {
221
+ index: 0,
222
+ message: {
223
+ role: "assistant",
224
+ content: "I'll read that file for you.",
225
+ tool_calls: [
226
+ {
227
+ id: "call_abc123",
228
+ type: "function",
229
+ function: {
230
+ name: "Read",
231
+ arguments: JSON.stringify({
232
+ file_path: "/tmp/example.txt"
233
+ })
234
+ }
235
+ }
236
+ ]
237
+ },
238
+ finish_reason: "tool_calls"
239
+ }
240
+ ],
241
+ usage: {
242
+ prompt_tokens: 50,
243
+ completion_tokens: 30,
244
+ total_tokens: 80
245
+ }
246
+ };
247
+
248
+ const result = convertOpenRouterResponseToAnthropic(llamacppResponse, "claude-sonnet-4-5");
249
+
250
+ assert.strictEqual(result.role, "assistant");
251
+ assert.strictEqual(result.content.length, 2); // text + tool_use
252
+ assert.strictEqual(result.content[0].type, "text");
253
+ assert.strictEqual(result.content[0].text, "I'll read that file for you.");
254
+ assert.strictEqual(result.content[1].type, "tool_use");
255
+ assert.strictEqual(result.content[1].name, "Read");
256
+ assert.strictEqual(result.content[1].id, "call_abc123");
257
+ assert.deepStrictEqual(result.content[1].input, {
258
+ file_path: "/tmp/example.txt"
259
+ });
260
+ assert.strictEqual(result.stop_reason, "tool_use");
261
+ });
262
+
263
+ it("should convert llama.cpp parallel tool calls to Anthropic format", () => {
264
+ process.env.MODEL_PROVIDER = "databricks";
265
+ process.env.DATABRICKS_API_KEY = "test-key";
266
+ process.env.DATABRICKS_API_BASE = "http://test.com";
267
+
268
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
269
+
270
+ const llamacppResponse = {
271
+ id: "chatcmpl-123",
272
+ model: "qwen2.5-coder-7b",
273
+ choices: [
274
+ {
275
+ message: {
276
+ role: "assistant",
277
+ content: "I'll search for both patterns.",
278
+ tool_calls: [
279
+ {
280
+ id: "call_1",
281
+ type: "function",
282
+ function: {
283
+ name: "Grep",
284
+ arguments: JSON.stringify({ pattern: "TODO" })
285
+ }
286
+ },
287
+ {
288
+ id: "call_2",
289
+ type: "function",
290
+ function: {
291
+ name: "Grep",
292
+ arguments: JSON.stringify({ pattern: "FIXME" })
293
+ }
294
+ }
295
+ ]
296
+ },
297
+ finish_reason: "tool_calls"
298
+ }
299
+ ],
300
+ usage: { prompt_tokens: 30, completion_tokens: 40, total_tokens: 70 }
301
+ };
302
+
303
+ const result = convertOpenRouterResponseToAnthropic(llamacppResponse, "claude-sonnet-4-5");
304
+
305
+ assert.strictEqual(result.content.length, 3); // text + 2 tool_uses
306
+ assert.strictEqual(result.content[0].type, "text");
307
+ assert.strictEqual(result.content[1].type, "tool_use");
308
+ assert.strictEqual(result.content[1].name, "Grep");
309
+ assert.strictEqual(result.content[1].id, "call_1");
310
+ assert.strictEqual(result.content[2].type, "tool_use");
311
+ assert.strictEqual(result.content[2].name, "Grep");
312
+ assert.strictEqual(result.content[2].id, "call_2");
313
+ });
314
+
315
+ it("should handle llama.cpp response with only tool calls (no text content)", () => {
316
+ process.env.MODEL_PROVIDER = "databricks";
317
+ process.env.DATABRICKS_API_KEY = "test-key";
318
+ process.env.DATABRICKS_API_BASE = "http://test.com";
319
+
320
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
321
+
322
+ const llamacppResponse = {
323
+ id: "chatcmpl-123",
324
+ model: "qwen2.5-coder-7b",
325
+ choices: [
326
+ {
327
+ message: {
328
+ role: "assistant",
329
+ content: null, // llama.cpp may return null content with tool calls
330
+ tool_calls: [
331
+ {
332
+ id: "call_xyz",
333
+ type: "function",
334
+ function: {
335
+ name: "Bash",
336
+ arguments: JSON.stringify({ command: "pwd" })
337
+ }
338
+ }
339
+ ]
340
+ },
341
+ finish_reason: "tool_calls"
342
+ }
343
+ ],
344
+ usage: { prompt_tokens: 20, completion_tokens: 15, total_tokens: 35 }
345
+ };
346
+
347
+ const result = convertOpenRouterResponseToAnthropic(llamacppResponse, "claude-sonnet-4-5");
348
+
349
+ // Should have tool_use block (at least one)
350
+ assert.strictEqual(result.role, "assistant");
351
+ assert.strictEqual(Array.isArray(result.content), true);
352
+ assert.strictEqual(result.content.length >= 1, true);
353
+ // Find the tool_use block
354
+ const toolUseBlock = result.content.find(c => c.type === "tool_use");
355
+ assert.strictEqual(toolUseBlock !== undefined, true);
356
+ assert.strictEqual(toolUseBlock.name, "Bash");
357
+ });
358
+ });
359
+
360
+ describe("Message Conversion", () => {
361
+ it("should convert Anthropic messages to llama.cpp (OpenAI) format", () => {
362
+ process.env.MODEL_PROVIDER = "databricks";
363
+ process.env.DATABRICKS_API_KEY = "test-key";
364
+ process.env.DATABRICKS_API_BASE = "http://test.com";
365
+
366
+ const { convertAnthropicMessagesToOpenRouter } = require("../src/clients/openrouter-utils");
367
+
368
+ const anthropicMessages = [
369
+ {
370
+ role: "user",
371
+ content: [
372
+ { type: "text", text: "What is 2 + 2?" }
373
+ ]
374
+ },
375
+ {
376
+ role: "assistant",
377
+ content: [
378
+ { type: "text", text: "2 + 2 equals 4." }
379
+ ]
380
+ }
381
+ ];
382
+
383
+ const result = convertAnthropicMessagesToOpenRouter(anthropicMessages);
384
+
385
+ assert.strictEqual(result.length, 2);
386
+ assert.strictEqual(result[0].role, "user");
387
+ assert.strictEqual(result[0].content, "What is 2 + 2?");
388
+ assert.strictEqual(result[1].role, "assistant");
389
+ assert.strictEqual(result[1].content, "2 + 2 equals 4.");
390
+ });
391
+
392
+ it("should convert Anthropic tool_result messages to llama.cpp (OpenAI) format", () => {
393
+ process.env.MODEL_PROVIDER = "databricks";
394
+ process.env.DATABRICKS_API_KEY = "test-key";
395
+ process.env.DATABRICKS_API_BASE = "http://test.com";
396
+
397
+ const { convertAnthropicMessagesToOpenRouter } = require("../src/clients/openrouter-utils");
398
+
399
+ // Must have a preceding assistant message with tool_use for tool_result to be valid
400
+ const anthropicMessages = [
401
+ {
402
+ role: "user",
403
+ content: [{ type: "text", text: "Run a command" }]
404
+ },
405
+ {
406
+ role: "assistant",
407
+ content: [
408
+ { type: "text", text: "I'll run that command." },
409
+ {
410
+ type: "tool_use",
411
+ id: "call_456",
412
+ name: "Bash",
413
+ input: { command: "echo hello" }
414
+ }
415
+ ]
416
+ },
417
+ {
418
+ role: "user",
419
+ content: [
420
+ {
421
+ type: "tool_result",
422
+ tool_use_id: "call_456",
423
+ content: "hello"
424
+ }
425
+ ]
426
+ }
427
+ ];
428
+
429
+ const result = convertAnthropicMessagesToOpenRouter(anthropicMessages);
430
+
431
+ // Should have user message, assistant message with tool call, and tool result
432
+ assert.strictEqual(result.length >= 3, true);
433
+ // Find the tool result message
434
+ const toolResultMsg = result.find(m => m.role === "tool");
435
+ assert.strictEqual(toolResultMsg !== undefined, true);
436
+ assert.strictEqual(toolResultMsg.tool_call_id, "call_456");
437
+ assert.strictEqual(toolResultMsg.content, "hello");
438
+ });
439
+ });
440
+
441
+ describe("Tool Conversion", () => {
442
+ it("should convert Anthropic tools to llama.cpp (OpenAI) format", () => {
443
+ process.env.MODEL_PROVIDER = "databricks";
444
+ process.env.DATABRICKS_API_KEY = "test-key";
445
+ process.env.DATABRICKS_API_BASE = "http://test.com";
446
+
447
+ const { convertAnthropicToolsToOpenRouter } = require("../src/clients/openrouter-utils");
448
+
449
+ const anthropicTools = [
450
+ {
451
+ name: "Read",
452
+ description: "Read a file from disk",
453
+ input_schema: {
454
+ type: "object",
455
+ properties: {
456
+ file_path: { type: "string", description: "Path to the file" }
457
+ },
458
+ required: ["file_path"]
459
+ }
460
+ }
461
+ ];
462
+
463
+ const result = convertAnthropicToolsToOpenRouter(anthropicTools);
464
+
465
+ assert.strictEqual(result.length, 1);
466
+ assert.strictEqual(result[0].type, "function");
467
+ assert.strictEqual(result[0].function.name, "Read");
468
+ assert.strictEqual(result[0].function.description, "Read a file from disk");
469
+ assert.deepStrictEqual(result[0].function.parameters, {
470
+ type: "object",
471
+ properties: {
472
+ file_path: { type: "string", description: "Path to the file" }
473
+ },
474
+ required: ["file_path"]
475
+ });
476
+ });
477
+ });
478
+
479
+ describe("Error Handling", () => {
480
+ it("should throw error when llama.cpp response has no choices", () => {
481
+ process.env.MODEL_PROVIDER = "databricks";
482
+ process.env.DATABRICKS_API_KEY = "test-key";
483
+ process.env.DATABRICKS_API_BASE = "http://test.com";
484
+
485
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
486
+
487
+ const errorResponse = {
488
+ error: {
489
+ message: "Model not loaded",
490
+ type: "invalid_request_error"
491
+ }
492
+ };
493
+
494
+ assert.throws(
495
+ () => convertOpenRouterResponseToAnthropic(errorResponse, "test-model"),
496
+ /No choices in OpenRouter response/
497
+ );
498
+ });
499
+
500
+ it("should throw error when llama.cpp response has empty choices array", () => {
501
+ process.env.MODEL_PROVIDER = "databricks";
502
+ process.env.DATABRICKS_API_KEY = "test-key";
503
+ process.env.DATABRICKS_API_BASE = "http://test.com";
504
+
505
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
506
+
507
+ const emptyChoicesResponse = {
508
+ id: "chatcmpl-123",
509
+ model: "qwen2.5-coder-7b",
510
+ choices: [],
511
+ usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 }
512
+ };
513
+
514
+ assert.throws(
515
+ () => convertOpenRouterResponseToAnthropic(emptyChoicesResponse, "test-model"),
516
+ /No choices in OpenRouter response/
517
+ );
518
+ });
519
+
520
+ it("should handle malformed tool call arguments gracefully", () => {
521
+ process.env.MODEL_PROVIDER = "databricks";
522
+ process.env.DATABRICKS_API_KEY = "test-key";
523
+ process.env.DATABRICKS_API_BASE = "http://test.com";
524
+
525
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
526
+
527
+ const responseWithBadArgs = {
528
+ id: "chatcmpl-123",
529
+ model: "qwen2.5-coder-7b",
530
+ choices: [
531
+ {
532
+ message: {
533
+ role: "assistant",
534
+ content: "Using tool",
535
+ tool_calls: [
536
+ {
537
+ id: "call_bad",
538
+ type: "function",
539
+ function: {
540
+ name: "Read",
541
+ arguments: "invalid json {"
542
+ }
543
+ }
544
+ ]
545
+ },
546
+ finish_reason: "tool_calls"
547
+ }
548
+ ],
549
+ usage: { prompt_tokens: 10, completion_tokens: 20, total_tokens: 30 }
550
+ };
551
+
552
+ const result = convertOpenRouterResponseToAnthropic(responseWithBadArgs, "test-model");
553
+
554
+ // Should still convert, but with empty input object
555
+ assert.strictEqual(result.content[1].type, "tool_use");
556
+ assert.deepStrictEqual(result.content[1].input, {});
557
+ });
558
+ });
559
+
560
+ describe("Finish Reason Mapping", () => {
561
+ it("should map stop finish_reason to end_turn", () => {
562
+ process.env.MODEL_PROVIDER = "databricks";
563
+ process.env.DATABRICKS_API_KEY = "test-key";
564
+ process.env.DATABRICKS_API_BASE = "http://test.com";
565
+
566
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
567
+
568
+ const response = {
569
+ choices: [
570
+ {
571
+ message: { role: "assistant", content: "Complete" },
572
+ finish_reason: "stop"
573
+ }
574
+ ],
575
+ usage: { prompt_tokens: 5, completion_tokens: 1, total_tokens: 6 }
576
+ };
577
+
578
+ const result = convertOpenRouterResponseToAnthropic(response, "test-model");
579
+ assert.strictEqual(result.stop_reason, "end_turn");
580
+ });
581
+
582
+ it("should map tool_calls finish_reason to tool_use", () => {
583
+ process.env.MODEL_PROVIDER = "databricks";
584
+ process.env.DATABRICKS_API_KEY = "test-key";
585
+ process.env.DATABRICKS_API_BASE = "http://test.com";
586
+
587
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
588
+
589
+ const response = {
590
+ choices: [
591
+ {
592
+ message: {
593
+ role: "assistant",
594
+ content: "Executing tool",
595
+ tool_calls: [
596
+ {
597
+ id: "call_1",
598
+ type: "function",
599
+ function: { name: "test", arguments: "{}" }
600
+ }
601
+ ]
602
+ },
603
+ finish_reason: "tool_calls"
604
+ }
605
+ ],
606
+ usage: { prompt_tokens: 5, completion_tokens: 10, total_tokens: 15 }
607
+ };
608
+
609
+ const result = convertOpenRouterResponseToAnthropic(response, "test-model");
610
+ assert.strictEqual(result.stop_reason, "tool_use");
611
+ });
612
+
613
+ it("should map length finish_reason to max_tokens", () => {
614
+ process.env.MODEL_PROVIDER = "databricks";
615
+ process.env.DATABRICKS_API_KEY = "test-key";
616
+ process.env.DATABRICKS_API_BASE = "http://test.com";
617
+
618
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
619
+
620
+ const response = {
621
+ choices: [
622
+ {
623
+ message: { role: "assistant", content: "Truncated response..." },
624
+ finish_reason: "length"
625
+ }
626
+ ],
627
+ usage: { prompt_tokens: 5, completion_tokens: 100, total_tokens: 105 }
628
+ };
629
+
630
+ const result = convertOpenRouterResponseToAnthropic(response, "test-model");
631
+ assert.strictEqual(result.stop_reason, "max_tokens");
632
+ });
633
+ });
634
+
635
+ describe("Usage Metrics", () => {
636
+ it("should correctly map llama.cpp usage to Anthropic format", () => {
637
+ process.env.MODEL_PROVIDER = "databricks";
638
+ process.env.DATABRICKS_API_KEY = "test-key";
639
+ process.env.DATABRICKS_API_BASE = "http://test.com";
640
+
641
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
642
+
643
+ const response = {
644
+ choices: [
645
+ {
646
+ message: { role: "assistant", content: "Response" },
647
+ finish_reason: "stop"
648
+ }
649
+ ],
650
+ usage: {
651
+ prompt_tokens: 200,
652
+ completion_tokens: 100,
653
+ total_tokens: 300
654
+ }
655
+ };
656
+
657
+ const result = convertOpenRouterResponseToAnthropic(response, "test-model");
658
+
659
+ assert.strictEqual(result.usage.input_tokens, 200);
660
+ assert.strictEqual(result.usage.output_tokens, 100);
661
+ });
662
+
663
+ it("should handle missing usage gracefully", () => {
664
+ process.env.MODEL_PROVIDER = "databricks";
665
+ process.env.DATABRICKS_API_KEY = "test-key";
666
+ process.env.DATABRICKS_API_BASE = "http://test.com";
667
+
668
+ const { convertOpenRouterResponseToAnthropic } = require("../src/clients/openrouter-utils");
669
+
670
+ const response = {
671
+ choices: [
672
+ {
673
+ message: { role: "assistant", content: "Response" },
674
+ finish_reason: "stop"
675
+ }
676
+ ]
677
+ // No usage field
678
+ };
679
+
680
+ const result = convertOpenRouterResponseToAnthropic(response, "test-model");
681
+
682
+ assert.strictEqual(result.usage.input_tokens, 0);
683
+ assert.strictEqual(result.usage.output_tokens, 0);
684
+ });
685
+ });
686
+ });