@amux.ai/adapter-deepseek 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,634 @@
1
+ 'use strict';
2
+
3
+ var llmBridge = require('@amux.ai/llm-bridge');
4
+
5
+ // src/inbound/request-parser.ts
6
+ function parseRequest(request) {
7
+ const req = request;
8
+ let system;
9
+ const messages = [];
10
+ for (const msg of req.messages) {
11
+ if (msg.role === "system") {
12
+ if (typeof msg.content === "string") {
13
+ system = system ? `${system}
14
+ ${msg.content}` : msg.content;
15
+ }
16
+ } else {
17
+ messages.push(parseMessage(msg));
18
+ }
19
+ }
20
+ const tools = req.tools?.map((tool) => parseTool(tool));
21
+ const toolChoice = req.tool_choice ? parseToolChoice(req.tool_choice) : void 0;
22
+ return {
23
+ messages,
24
+ model: req.model,
25
+ tools,
26
+ toolChoice,
27
+ stream: req.stream,
28
+ system,
29
+ generation: {
30
+ temperature: req.temperature,
31
+ topP: req.top_p,
32
+ maxTokens: req.max_tokens,
33
+ stopSequences: req.stop ? Array.isArray(req.stop) ? req.stop : [req.stop] : void 0,
34
+ presencePenalty: req.presence_penalty,
35
+ frequencyPenalty: req.frequency_penalty,
36
+ responseFormat: req.response_format ? { type: req.response_format.type } : void 0,
37
+ logprobs: req.logprobs,
38
+ topLogprobs: req.top_logprobs,
39
+ // DeepSeek-specific: thinking mode
40
+ thinking: req.thinking ? { enabled: req.thinking.type === "enabled" } : void 0
41
+ },
42
+ raw: request
43
+ };
44
+ }
45
+ function parseMessage(msg) {
46
+ return {
47
+ role: msg.role,
48
+ content: parseContent(msg.content),
49
+ name: msg.name,
50
+ toolCalls: msg.tool_calls,
51
+ toolCallId: msg.tool_call_id,
52
+ // DeepSeek-specific: reasoning content
53
+ reasoningContent: msg.reasoning_content
54
+ };
55
+ }
56
+ function parseContent(content) {
57
+ if (content === null || content === void 0) {
58
+ return "";
59
+ }
60
+ if (typeof content === "string") {
61
+ return content;
62
+ }
63
+ return content.map((part) => {
64
+ if (part.type === "text") {
65
+ return {
66
+ type: "text",
67
+ text: part.text
68
+ };
69
+ }
70
+ if (part.type === "image_url") {
71
+ const url = part.image_url.url;
72
+ if (url.startsWith("data:")) {
73
+ const match = url.match(/^data:([^;]+);base64,(.+)$/);
74
+ if (match) {
75
+ return {
76
+ type: "image",
77
+ source: {
78
+ type: "base64",
79
+ mediaType: match[1],
80
+ data: match[2]
81
+ }
82
+ };
83
+ }
84
+ }
85
+ return {
86
+ type: "image",
87
+ source: {
88
+ type: "url",
89
+ url
90
+ }
91
+ };
92
+ }
93
+ return {
94
+ type: "text",
95
+ text: JSON.stringify(part)
96
+ };
97
+ });
98
+ }
99
+ function parseTool(tool) {
100
+ return {
101
+ type: "function",
102
+ function: {
103
+ name: tool.function.name,
104
+ description: tool.function.description,
105
+ parameters: tool.function.parameters,
106
+ strict: tool.function.strict
107
+ }
108
+ };
109
+ }
110
+ function parseToolChoice(choice) {
111
+ if (typeof choice === "string") {
112
+ return choice;
113
+ }
114
+ return {
115
+ type: "function",
116
+ function: {
117
+ name: choice.function.name
118
+ }
119
+ };
120
+ }
121
+ function parseResponse(response) {
122
+ const res = response;
123
+ const choices = res.choices.map((choice) => ({
124
+ index: choice.index,
125
+ message: {
126
+ role: choice.message.role,
127
+ content: choice.message.content ?? "",
128
+ toolCalls: choice.message.tool_calls,
129
+ // DeepSeek-specific: reasoning content
130
+ reasoningContent: choice.message.reasoning_content
131
+ },
132
+ finishReason: llmBridge.mapFinishReason(choice.finish_reason),
133
+ logprobs: choice.logprobs
134
+ }));
135
+ return {
136
+ id: res.id,
137
+ model: res.model,
138
+ choices,
139
+ created: res.created,
140
+ systemFingerprint: res.system_fingerprint,
141
+ usage: res.usage ? {
142
+ promptTokens: res.usage.prompt_tokens,
143
+ completionTokens: res.usage.completion_tokens,
144
+ totalTokens: res.usage.total_tokens,
145
+ details: {
146
+ reasoningTokens: res.usage.completion_tokens_details?.reasoning_tokens,
147
+ // DeepSeek-specific: cache tokens
148
+ cachedTokens: res.usage.prompt_cache_hit_tokens
149
+ }
150
+ } : void 0,
151
+ // Store DeepSeek-specific cache info in extensions
152
+ extensions: res.usage?.prompt_cache_hit_tokens !== void 0 ? {
153
+ deepseek: {
154
+ promptCacheHitTokens: res.usage.prompt_cache_hit_tokens,
155
+ promptCacheMissTokens: res.usage.prompt_cache_miss_tokens
156
+ }
157
+ } : void 0,
158
+ raw: response
159
+ };
160
+ }
161
+
162
+ // src/inbound/stream-parser.ts
163
+ function mapFinishReason2(reason) {
164
+ const reasonMap = {
165
+ stop: "stop",
166
+ length: "length",
167
+ tool_calls: "tool_calls",
168
+ content_filter: "content_filter",
169
+ insufficient_system_resource: "error"
170
+ };
171
+ return reasonMap[reason] ?? "stop";
172
+ }
173
+ function parseStream(chunk) {
174
+ const data = chunk;
175
+ if (!data.choices || data.choices.length === 0) {
176
+ if (data.usage) {
177
+ return {
178
+ type: "end",
179
+ id: data.id,
180
+ model: data.model,
181
+ usage: {
182
+ promptTokens: data.usage.prompt_tokens,
183
+ completionTokens: data.usage.completion_tokens,
184
+ totalTokens: data.usage.total_tokens,
185
+ details: {
186
+ reasoningTokens: data.usage.completion_tokens_details?.reasoning_tokens,
187
+ cachedTokens: data.usage.prompt_cache_hit_tokens
188
+ }
189
+ },
190
+ raw: chunk
191
+ };
192
+ }
193
+ return null;
194
+ }
195
+ const choice = data.choices[0];
196
+ if (!choice) return null;
197
+ const delta = choice.delta;
198
+ const events = [];
199
+ if (delta.role && !delta.content && !delta.tool_calls && !delta.reasoning_content) {
200
+ return {
201
+ type: "start",
202
+ id: data.id,
203
+ model: data.model,
204
+ raw: chunk
205
+ };
206
+ }
207
+ if (delta.reasoning_content) {
208
+ events.push({
209
+ type: "reasoning",
210
+ id: data.id,
211
+ model: data.model,
212
+ reasoning: {
213
+ type: "reasoning",
214
+ delta: delta.reasoning_content,
215
+ index: choice.index
216
+ },
217
+ raw: chunk
218
+ });
219
+ }
220
+ if (delta.content) {
221
+ events.push({
222
+ type: "content",
223
+ id: data.id,
224
+ model: data.model,
225
+ content: {
226
+ type: "content",
227
+ delta: delta.content,
228
+ index: choice.index
229
+ },
230
+ raw: chunk
231
+ });
232
+ }
233
+ if (delta.tool_calls && delta.tool_calls.length > 0) {
234
+ const toolCall = delta.tool_calls[0];
235
+ if (toolCall) {
236
+ events.push({
237
+ type: "tool_call",
238
+ id: data.id,
239
+ model: data.model,
240
+ toolCall: {
241
+ type: "tool_call",
242
+ id: toolCall.id,
243
+ name: toolCall.function?.name,
244
+ arguments: toolCall.function?.arguments,
245
+ index: toolCall.index
246
+ },
247
+ raw: chunk
248
+ });
249
+ }
250
+ }
251
+ if (choice.finish_reason) {
252
+ events.push({
253
+ type: "end",
254
+ id: data.id,
255
+ model: data.model,
256
+ finishReason: mapFinishReason2(choice.finish_reason),
257
+ usage: data.usage ? {
258
+ promptTokens: data.usage.prompt_tokens,
259
+ completionTokens: data.usage.completion_tokens,
260
+ totalTokens: data.usage.total_tokens,
261
+ details: {
262
+ reasoningTokens: data.usage.completion_tokens_details?.reasoning_tokens,
263
+ cachedTokens: data.usage.prompt_cache_hit_tokens
264
+ }
265
+ } : void 0,
266
+ raw: chunk
267
+ });
268
+ }
269
+ if (events.length === 0) {
270
+ return null;
271
+ }
272
+ const firstEvent = events[0];
273
+ return events.length === 1 && firstEvent ? firstEvent : events;
274
+ }
275
+ function parseError(error) {
276
+ return llmBridge.parseOpenAICompatibleError(error);
277
+ }
278
+
279
+ // src/outbound/request-builder.ts
280
+ function buildRequest(ir) {
281
+ const messages = [];
282
+ const isReasonerModel = ir.model?.includes("reasoner");
283
+ if (ir.system && !isReasonerModel) {
284
+ messages.push({
285
+ role: "system",
286
+ content: ir.system
287
+ });
288
+ }
289
+ for (const msg of ir.messages) {
290
+ if (isReasonerModel && msg.role === "system") {
291
+ continue;
292
+ }
293
+ const message = {
294
+ role: msg.role,
295
+ content: buildContent(msg.content),
296
+ name: msg.name,
297
+ tool_calls: msg.toolCalls,
298
+ tool_call_id: msg.toolCallId
299
+ };
300
+ if (!isReasonerModel && msg.reasoningContent !== void 0) {
301
+ message.reasoning_content = msg.reasoningContent;
302
+ }
303
+ messages.push(message);
304
+ }
305
+ const request = {
306
+ model: ir.model ?? "deepseek-chat",
307
+ messages,
308
+ stream: ir.stream
309
+ };
310
+ if (ir.tools && ir.tools.length > 0) {
311
+ request.tools = ir.tools.map((tool) => ({
312
+ type: "function",
313
+ function: {
314
+ name: tool.function.name,
315
+ description: tool.function.description,
316
+ parameters: tool.function.parameters,
317
+ strict: tool.function.strict
318
+ }
319
+ }));
320
+ }
321
+ if (ir.toolChoice) {
322
+ request.tool_choice = ir.toolChoice;
323
+ }
324
+ if (ir.generation) {
325
+ if (ir.generation.temperature !== void 0) {
326
+ request.temperature = ir.generation.temperature;
327
+ }
328
+ if (ir.generation.topP !== void 0) {
329
+ request.top_p = ir.generation.topP;
330
+ }
331
+ if (ir.generation.maxTokens !== void 0) {
332
+ request.max_tokens = Math.min(Math.max(ir.generation.maxTokens, 1), 8192);
333
+ }
334
+ if (ir.generation.stopSequences && ir.generation.stopSequences.length > 0) {
335
+ request.stop = ir.generation.stopSequences;
336
+ }
337
+ if (ir.generation.presencePenalty !== void 0) {
338
+ request.presence_penalty = ir.generation.presencePenalty;
339
+ }
340
+ if (ir.generation.frequencyPenalty !== void 0) {
341
+ request.frequency_penalty = ir.generation.frequencyPenalty;
342
+ }
343
+ if (ir.generation.responseFormat) {
344
+ if (ir.generation.responseFormat.type === "json_object") {
345
+ request.response_format = { type: "json_object" };
346
+ }
347
+ }
348
+ if (ir.generation.logprobs !== void 0) {
349
+ request.logprobs = ir.generation.logprobs;
350
+ }
351
+ if (ir.generation.topLogprobs !== void 0) {
352
+ request.top_logprobs = ir.generation.topLogprobs;
353
+ }
354
+ if (ir.generation.thinking) {
355
+ request.thinking = {
356
+ type: ir.generation.thinking.enabled ? "enabled" : "disabled"
357
+ };
358
+ }
359
+ }
360
+ if (ir.stream) {
361
+ request.stream_options = { include_usage: true };
362
+ }
363
+ return request;
364
+ }
365
+ function buildContent(content) {
366
+ if (typeof content === "string") {
367
+ return content || null;
368
+ }
369
+ if (!content || content.length === 0) {
370
+ return null;
371
+ }
372
+ const allText = content.every((part) => part.type === "text");
373
+ if (allText) {
374
+ return content.map((part) => part.type === "text" ? part.text : "").join("");
375
+ }
376
+ return content.map((part) => {
377
+ if (part.type === "text") {
378
+ return { type: "text", text: part.text };
379
+ }
380
+ if (part.type === "image") {
381
+ const imgPart = part;
382
+ if (imgPart.source.type === "url") {
383
+ return {
384
+ type: "image_url",
385
+ image_url: { url: imgPart.source.url }
386
+ };
387
+ }
388
+ return {
389
+ type: "image_url",
390
+ image_url: {
391
+ url: `data:${imgPart.source.mediaType};base64,${imgPart.source.data}`
392
+ }
393
+ };
394
+ }
395
+ return { type: "text", text: JSON.stringify(part) };
396
+ });
397
+ }
398
+ function buildResponse(ir) {
399
+ return {
400
+ id: ir.id,
401
+ object: "chat.completion",
402
+ created: ir.created ?? Math.floor(Date.now() / 1e3),
403
+ model: ir.model,
404
+ system_fingerprint: ir.systemFingerprint,
405
+ choices: ir.choices.map((choice) => ({
406
+ index: choice.index,
407
+ message: {
408
+ role: choice.message.role,
409
+ content: llmBridge.contentToString(choice.message.content),
410
+ tool_calls: choice.message.toolCalls,
411
+ // DeepSeek-specific: reasoning content
412
+ reasoning_content: choice.message.reasoningContent
413
+ },
414
+ finish_reason: choice.finishReason ?? "stop",
415
+ logprobs: choice.logprobs
416
+ })),
417
+ usage: ir.usage ? {
418
+ prompt_tokens: ir.usage.promptTokens,
419
+ completion_tokens: ir.usage.completionTokens,
420
+ total_tokens: ir.usage.totalTokens,
421
+ // DeepSeek-specific: cache tokens
422
+ prompt_cache_hit_tokens: ir.usage.details?.cachedTokens,
423
+ prompt_cache_miss_tokens: ir.extensions?.deepseek?.promptCacheMissTokens,
424
+ completion_tokens_details: ir.usage.details?.reasoningTokens ? {
425
+ reasoning_tokens: ir.usage.details.reasoningTokens
426
+ } : void 0
427
+ } : void 0
428
+ };
429
+ }
430
+
431
+ // src/outbound/stream-builder.ts
432
+ function createStreamBuilder() {
433
+ let chunkId = `chatcmpl-${Date.now()}`;
434
+ let model = "";
435
+ let created = Math.floor(Date.now() / 1e3);
436
+ const toolCallsState = /* @__PURE__ */ new Map();
437
+ return {
438
+ process(event) {
439
+ const events = [];
440
+ if (event.id) chunkId = event.id;
441
+ if (event.model) model = event.model;
442
+ if (event.type === "start") {
443
+ events.push({
444
+ event: "data",
445
+ data: {
446
+ id: chunkId,
447
+ object: "chat.completion.chunk",
448
+ created,
449
+ model,
450
+ choices: [{
451
+ index: 0,
452
+ delta: { role: "assistant", content: "" },
453
+ finish_reason: null
454
+ }]
455
+ }
456
+ });
457
+ }
458
+ if (event.type === "content" && event.content?.delta) {
459
+ events.push({
460
+ event: "data",
461
+ data: {
462
+ id: chunkId,
463
+ object: "chat.completion.chunk",
464
+ created,
465
+ model,
466
+ choices: [{
467
+ index: 0,
468
+ delta: { content: event.content.delta },
469
+ finish_reason: null
470
+ }]
471
+ }
472
+ });
473
+ }
474
+ if (event.type === "reasoning" && event.reasoning?.delta) {
475
+ events.push({
476
+ event: "data",
477
+ data: {
478
+ id: chunkId,
479
+ object: "chat.completion.chunk",
480
+ created,
481
+ model,
482
+ choices: [{
483
+ index: 0,
484
+ delta: { reasoning_content: event.reasoning.delta },
485
+ finish_reason: null
486
+ }]
487
+ }
488
+ });
489
+ }
490
+ if (event.type === "tool_call" && event.toolCall) {
491
+ const toolIndex = event.toolCall.index ?? 0;
492
+ const toolCallDelta = { index: toolIndex };
493
+ if (event.toolCall.name) {
494
+ toolCallDelta.id = event.toolCall.id || `call_${Date.now()}_${toolIndex}`;
495
+ toolCallDelta.type = "function";
496
+ toolCallDelta.function = { name: event.toolCall.name };
497
+ toolCallsState.set(toolIndex, {
498
+ id: toolCallDelta.id,
499
+ name: event.toolCall.name
500
+ });
501
+ }
502
+ if (event.toolCall.arguments) {
503
+ toolCallDelta.function = {
504
+ ...toolCallDelta.function,
505
+ arguments: event.toolCall.arguments
506
+ };
507
+ }
508
+ events.push({
509
+ event: "data",
510
+ data: {
511
+ id: chunkId,
512
+ object: "chat.completion.chunk",
513
+ created,
514
+ model,
515
+ choices: [{
516
+ index: 0,
517
+ delta: { tool_calls: [toolCallDelta] },
518
+ finish_reason: null
519
+ }]
520
+ }
521
+ });
522
+ }
523
+ if (event.type === "end") {
524
+ const finishReason = mapFinishReason3(event.finishReason);
525
+ const finalChunk = {
526
+ id: chunkId,
527
+ object: "chat.completion.chunk",
528
+ created,
529
+ model,
530
+ choices: [{
531
+ index: 0,
532
+ delta: {},
533
+ finish_reason: finishReason
534
+ }]
535
+ };
536
+ if (event.usage) {
537
+ finalChunk.usage = {
538
+ prompt_tokens: event.usage.promptTokens ?? 0,
539
+ completion_tokens: event.usage.completionTokens ?? 0,
540
+ total_tokens: event.usage.totalTokens ?? 0
541
+ };
542
+ }
543
+ events.push({ event: "data", data: finalChunk });
544
+ }
545
+ if (event.type === "error" && event.error) {
546
+ events.push({
547
+ event: "data",
548
+ data: {
549
+ error: {
550
+ message: event.error.message,
551
+ type: "server_error",
552
+ code: event.error.code
553
+ }
554
+ }
555
+ });
556
+ }
557
+ return events;
558
+ },
559
+ finalize() {
560
+ return [{ event: "data", data: "[DONE]" }];
561
+ }
562
+ };
563
+ }
564
+ function mapFinishReason3(reason) {
565
+ if (!reason) return "stop";
566
+ const reasonMap = {
567
+ stop: "stop",
568
+ length: "length",
569
+ tool_calls: "tool_calls",
570
+ content_filter: "content_filter",
571
+ end_turn: "stop",
572
+ max_tokens: "length"
573
+ };
574
+ return reasonMap[reason] ?? "stop";
575
+ }
576
+
577
+ // src/adapter.ts
578
+ var deepseekAdapter = {
579
+ name: "deepseek",
580
+ version: "1.0.0",
581
+ capabilities: {
582
+ streaming: true,
583
+ tools: true,
584
+ vision: false,
585
+ multimodal: false,
586
+ systemPrompt: true,
587
+ toolChoice: true,
588
+ reasoning: true,
589
+ // DeepSeek-reasoner supports reasoning
590
+ webSearch: false,
591
+ jsonMode: true,
592
+ logprobs: true,
593
+ seed: false
594
+ },
595
+ inbound: {
596
+ parseRequest: (request) => {
597
+ return parseRequest(request);
598
+ },
599
+ parseResponse: (response) => {
600
+ return parseResponse(response);
601
+ },
602
+ parseStream: (chunk) => {
603
+ return parseStream(chunk);
604
+ },
605
+ parseError: (error) => {
606
+ return parseError(error);
607
+ }
608
+ },
609
+ outbound: {
610
+ buildRequest: (ir) => {
611
+ return buildRequest(ir);
612
+ },
613
+ buildResponse: (ir) => {
614
+ return buildResponse(ir);
615
+ },
616
+ createStreamBuilder
617
+ },
618
+ getInfo() {
619
+ return {
620
+ name: this.name,
621
+ version: this.version,
622
+ capabilities: this.capabilities,
623
+ endpoint: {
624
+ baseUrl: "https://api.deepseek.com",
625
+ chatPath: "/v1/chat/completions",
626
+ modelsPath: "/v1/models"
627
+ }
628
+ };
629
+ }
630
+ };
631
+
632
+ exports.deepseekAdapter = deepseekAdapter;
633
+ //# sourceMappingURL=index.cjs.map
634
+ //# sourceMappingURL=index.cjs.map