@amux.ai/adapter-deepseek 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,632 @@
1
+ import { contentToString, parseOpenAICompatibleError, mapFinishReason } from '@amux.ai/llm-bridge';
2
+
3
+ // src/inbound/request-parser.ts
4
+ function parseRequest(request) {
5
+ const req = request;
6
+ let system;
7
+ const messages = [];
8
+ for (const msg of req.messages) {
9
+ if (msg.role === "system") {
10
+ if (typeof msg.content === "string") {
11
+ system = system ? `${system}
12
+ ${msg.content}` : msg.content;
13
+ }
14
+ } else {
15
+ messages.push(parseMessage(msg));
16
+ }
17
+ }
18
+ const tools = req.tools?.map((tool) => parseTool(tool));
19
+ const toolChoice = req.tool_choice ? parseToolChoice(req.tool_choice) : void 0;
20
+ return {
21
+ messages,
22
+ model: req.model,
23
+ tools,
24
+ toolChoice,
25
+ stream: req.stream,
26
+ system,
27
+ generation: {
28
+ temperature: req.temperature,
29
+ topP: req.top_p,
30
+ maxTokens: req.max_tokens,
31
+ stopSequences: req.stop ? Array.isArray(req.stop) ? req.stop : [req.stop] : void 0,
32
+ presencePenalty: req.presence_penalty,
33
+ frequencyPenalty: req.frequency_penalty,
34
+ responseFormat: req.response_format ? { type: req.response_format.type } : void 0,
35
+ logprobs: req.logprobs,
36
+ topLogprobs: req.top_logprobs,
37
+ // DeepSeek-specific: thinking mode
38
+ thinking: req.thinking ? { enabled: req.thinking.type === "enabled" } : void 0
39
+ },
40
+ raw: request
41
+ };
42
+ }
43
+ function parseMessage(msg) {
44
+ return {
45
+ role: msg.role,
46
+ content: parseContent(msg.content),
47
+ name: msg.name,
48
+ toolCalls: msg.tool_calls,
49
+ toolCallId: msg.tool_call_id,
50
+ // DeepSeek-specific: reasoning content
51
+ reasoningContent: msg.reasoning_content
52
+ };
53
+ }
54
+ function parseContent(content) {
55
+ if (content === null || content === void 0) {
56
+ return "";
57
+ }
58
+ if (typeof content === "string") {
59
+ return content;
60
+ }
61
+ return content.map((part) => {
62
+ if (part.type === "text") {
63
+ return {
64
+ type: "text",
65
+ text: part.text
66
+ };
67
+ }
68
+ if (part.type === "image_url") {
69
+ const url = part.image_url.url;
70
+ if (url.startsWith("data:")) {
71
+ const match = url.match(/^data:([^;]+);base64,(.+)$/);
72
+ if (match) {
73
+ return {
74
+ type: "image",
75
+ source: {
76
+ type: "base64",
77
+ mediaType: match[1],
78
+ data: match[2]
79
+ }
80
+ };
81
+ }
82
+ }
83
+ return {
84
+ type: "image",
85
+ source: {
86
+ type: "url",
87
+ url
88
+ }
89
+ };
90
+ }
91
+ return {
92
+ type: "text",
93
+ text: JSON.stringify(part)
94
+ };
95
+ });
96
+ }
97
+ function parseTool(tool) {
98
+ return {
99
+ type: "function",
100
+ function: {
101
+ name: tool.function.name,
102
+ description: tool.function.description,
103
+ parameters: tool.function.parameters,
104
+ strict: tool.function.strict
105
+ }
106
+ };
107
+ }
108
+ function parseToolChoice(choice) {
109
+ if (typeof choice === "string") {
110
+ return choice;
111
+ }
112
+ return {
113
+ type: "function",
114
+ function: {
115
+ name: choice.function.name
116
+ }
117
+ };
118
+ }
119
+ function parseResponse(response) {
120
+ const res = response;
121
+ const choices = res.choices.map((choice) => ({
122
+ index: choice.index,
123
+ message: {
124
+ role: choice.message.role,
125
+ content: choice.message.content ?? "",
126
+ toolCalls: choice.message.tool_calls,
127
+ // DeepSeek-specific: reasoning content
128
+ reasoningContent: choice.message.reasoning_content
129
+ },
130
+ finishReason: mapFinishReason(choice.finish_reason),
131
+ logprobs: choice.logprobs
132
+ }));
133
+ return {
134
+ id: res.id,
135
+ model: res.model,
136
+ choices,
137
+ created: res.created,
138
+ systemFingerprint: res.system_fingerprint,
139
+ usage: res.usage ? {
140
+ promptTokens: res.usage.prompt_tokens,
141
+ completionTokens: res.usage.completion_tokens,
142
+ totalTokens: res.usage.total_tokens,
143
+ details: {
144
+ reasoningTokens: res.usage.completion_tokens_details?.reasoning_tokens,
145
+ // DeepSeek-specific: cache tokens
146
+ cachedTokens: res.usage.prompt_cache_hit_tokens
147
+ }
148
+ } : void 0,
149
+ // Store DeepSeek-specific cache info in extensions
150
+ extensions: res.usage?.prompt_cache_hit_tokens !== void 0 ? {
151
+ deepseek: {
152
+ promptCacheHitTokens: res.usage.prompt_cache_hit_tokens,
153
+ promptCacheMissTokens: res.usage.prompt_cache_miss_tokens
154
+ }
155
+ } : void 0,
156
+ raw: response
157
+ };
158
+ }
159
+
160
+ // src/inbound/stream-parser.ts
161
+ function mapFinishReason2(reason) {
162
+ const reasonMap = {
163
+ stop: "stop",
164
+ length: "length",
165
+ tool_calls: "tool_calls",
166
+ content_filter: "content_filter",
167
+ insufficient_system_resource: "error"
168
+ };
169
+ return reasonMap[reason] ?? "stop";
170
+ }
171
+ function parseStream(chunk) {
172
+ const data = chunk;
173
+ if (!data.choices || data.choices.length === 0) {
174
+ if (data.usage) {
175
+ return {
176
+ type: "end",
177
+ id: data.id,
178
+ model: data.model,
179
+ usage: {
180
+ promptTokens: data.usage.prompt_tokens,
181
+ completionTokens: data.usage.completion_tokens,
182
+ totalTokens: data.usage.total_tokens,
183
+ details: {
184
+ reasoningTokens: data.usage.completion_tokens_details?.reasoning_tokens,
185
+ cachedTokens: data.usage.prompt_cache_hit_tokens
186
+ }
187
+ },
188
+ raw: chunk
189
+ };
190
+ }
191
+ return null;
192
+ }
193
+ const choice = data.choices[0];
194
+ if (!choice) return null;
195
+ const delta = choice.delta;
196
+ const events = [];
197
+ if (delta.role && !delta.content && !delta.tool_calls && !delta.reasoning_content) {
198
+ return {
199
+ type: "start",
200
+ id: data.id,
201
+ model: data.model,
202
+ raw: chunk
203
+ };
204
+ }
205
+ if (delta.reasoning_content) {
206
+ events.push({
207
+ type: "reasoning",
208
+ id: data.id,
209
+ model: data.model,
210
+ reasoning: {
211
+ type: "reasoning",
212
+ delta: delta.reasoning_content,
213
+ index: choice.index
214
+ },
215
+ raw: chunk
216
+ });
217
+ }
218
+ if (delta.content) {
219
+ events.push({
220
+ type: "content",
221
+ id: data.id,
222
+ model: data.model,
223
+ content: {
224
+ type: "content",
225
+ delta: delta.content,
226
+ index: choice.index
227
+ },
228
+ raw: chunk
229
+ });
230
+ }
231
+ if (delta.tool_calls && delta.tool_calls.length > 0) {
232
+ const toolCall = delta.tool_calls[0];
233
+ if (toolCall) {
234
+ events.push({
235
+ type: "tool_call",
236
+ id: data.id,
237
+ model: data.model,
238
+ toolCall: {
239
+ type: "tool_call",
240
+ id: toolCall.id,
241
+ name: toolCall.function?.name,
242
+ arguments: toolCall.function?.arguments,
243
+ index: toolCall.index
244
+ },
245
+ raw: chunk
246
+ });
247
+ }
248
+ }
249
+ if (choice.finish_reason) {
250
+ events.push({
251
+ type: "end",
252
+ id: data.id,
253
+ model: data.model,
254
+ finishReason: mapFinishReason2(choice.finish_reason),
255
+ usage: data.usage ? {
256
+ promptTokens: data.usage.prompt_tokens,
257
+ completionTokens: data.usage.completion_tokens,
258
+ totalTokens: data.usage.total_tokens,
259
+ details: {
260
+ reasoningTokens: data.usage.completion_tokens_details?.reasoning_tokens,
261
+ cachedTokens: data.usage.prompt_cache_hit_tokens
262
+ }
263
+ } : void 0,
264
+ raw: chunk
265
+ });
266
+ }
267
+ if (events.length === 0) {
268
+ return null;
269
+ }
270
+ const firstEvent = events[0];
271
+ return events.length === 1 && firstEvent ? firstEvent : events;
272
+ }
273
+ function parseError(error) {
274
+ return parseOpenAICompatibleError(error);
275
+ }
276
+
277
+ // src/outbound/request-builder.ts
278
+ function buildRequest(ir) {
279
+ const messages = [];
280
+ const isReasonerModel = ir.model?.includes("reasoner");
281
+ if (ir.system && !isReasonerModel) {
282
+ messages.push({
283
+ role: "system",
284
+ content: ir.system
285
+ });
286
+ }
287
+ for (const msg of ir.messages) {
288
+ if (isReasonerModel && msg.role === "system") {
289
+ continue;
290
+ }
291
+ const message = {
292
+ role: msg.role,
293
+ content: buildContent(msg.content),
294
+ name: msg.name,
295
+ tool_calls: msg.toolCalls,
296
+ tool_call_id: msg.toolCallId
297
+ };
298
+ if (!isReasonerModel && msg.reasoningContent !== void 0) {
299
+ message.reasoning_content = msg.reasoningContent;
300
+ }
301
+ messages.push(message);
302
+ }
303
+ const request = {
304
+ model: ir.model ?? "deepseek-chat",
305
+ messages,
306
+ stream: ir.stream
307
+ };
308
+ if (ir.tools && ir.tools.length > 0) {
309
+ request.tools = ir.tools.map((tool) => ({
310
+ type: "function",
311
+ function: {
312
+ name: tool.function.name,
313
+ description: tool.function.description,
314
+ parameters: tool.function.parameters,
315
+ strict: tool.function.strict
316
+ }
317
+ }));
318
+ }
319
+ if (ir.toolChoice) {
320
+ request.tool_choice = ir.toolChoice;
321
+ }
322
+ if (ir.generation) {
323
+ if (ir.generation.temperature !== void 0) {
324
+ request.temperature = ir.generation.temperature;
325
+ }
326
+ if (ir.generation.topP !== void 0) {
327
+ request.top_p = ir.generation.topP;
328
+ }
329
+ if (ir.generation.maxTokens !== void 0) {
330
+ request.max_tokens = Math.min(Math.max(ir.generation.maxTokens, 1), 8192);
331
+ }
332
+ if (ir.generation.stopSequences && ir.generation.stopSequences.length > 0) {
333
+ request.stop = ir.generation.stopSequences;
334
+ }
335
+ if (ir.generation.presencePenalty !== void 0) {
336
+ request.presence_penalty = ir.generation.presencePenalty;
337
+ }
338
+ if (ir.generation.frequencyPenalty !== void 0) {
339
+ request.frequency_penalty = ir.generation.frequencyPenalty;
340
+ }
341
+ if (ir.generation.responseFormat) {
342
+ if (ir.generation.responseFormat.type === "json_object") {
343
+ request.response_format = { type: "json_object" };
344
+ }
345
+ }
346
+ if (ir.generation.logprobs !== void 0) {
347
+ request.logprobs = ir.generation.logprobs;
348
+ }
349
+ if (ir.generation.topLogprobs !== void 0) {
350
+ request.top_logprobs = ir.generation.topLogprobs;
351
+ }
352
+ if (ir.generation.thinking) {
353
+ request.thinking = {
354
+ type: ir.generation.thinking.enabled ? "enabled" : "disabled"
355
+ };
356
+ }
357
+ }
358
+ if (ir.stream) {
359
+ request.stream_options = { include_usage: true };
360
+ }
361
+ return request;
362
+ }
363
+ function buildContent(content) {
364
+ if (typeof content === "string") {
365
+ return content || null;
366
+ }
367
+ if (!content || content.length === 0) {
368
+ return null;
369
+ }
370
+ const allText = content.every((part) => part.type === "text");
371
+ if (allText) {
372
+ return content.map((part) => part.type === "text" ? part.text : "").join("");
373
+ }
374
+ return content.map((part) => {
375
+ if (part.type === "text") {
376
+ return { type: "text", text: part.text };
377
+ }
378
+ if (part.type === "image") {
379
+ const imgPart = part;
380
+ if (imgPart.source.type === "url") {
381
+ return {
382
+ type: "image_url",
383
+ image_url: { url: imgPart.source.url }
384
+ };
385
+ }
386
+ return {
387
+ type: "image_url",
388
+ image_url: {
389
+ url: `data:${imgPart.source.mediaType};base64,${imgPart.source.data}`
390
+ }
391
+ };
392
+ }
393
+ return { type: "text", text: JSON.stringify(part) };
394
+ });
395
+ }
396
+ function buildResponse(ir) {
397
+ return {
398
+ id: ir.id,
399
+ object: "chat.completion",
400
+ created: ir.created ?? Math.floor(Date.now() / 1e3),
401
+ model: ir.model,
402
+ system_fingerprint: ir.systemFingerprint,
403
+ choices: ir.choices.map((choice) => ({
404
+ index: choice.index,
405
+ message: {
406
+ role: choice.message.role,
407
+ content: contentToString(choice.message.content),
408
+ tool_calls: choice.message.toolCalls,
409
+ // DeepSeek-specific: reasoning content
410
+ reasoning_content: choice.message.reasoningContent
411
+ },
412
+ finish_reason: choice.finishReason ?? "stop",
413
+ logprobs: choice.logprobs
414
+ })),
415
+ usage: ir.usage ? {
416
+ prompt_tokens: ir.usage.promptTokens,
417
+ completion_tokens: ir.usage.completionTokens,
418
+ total_tokens: ir.usage.totalTokens,
419
+ // DeepSeek-specific: cache tokens
420
+ prompt_cache_hit_tokens: ir.usage.details?.cachedTokens,
421
+ prompt_cache_miss_tokens: ir.extensions?.deepseek?.promptCacheMissTokens,
422
+ completion_tokens_details: ir.usage.details?.reasoningTokens ? {
423
+ reasoning_tokens: ir.usage.details.reasoningTokens
424
+ } : void 0
425
+ } : void 0
426
+ };
427
+ }
428
+
429
+ // src/outbound/stream-builder.ts
430
+ function createStreamBuilder() {
431
+ let chunkId = `chatcmpl-${Date.now()}`;
432
+ let model = "";
433
+ let created = Math.floor(Date.now() / 1e3);
434
+ const toolCallsState = /* @__PURE__ */ new Map();
435
+ return {
436
+ process(event) {
437
+ const events = [];
438
+ if (event.id) chunkId = event.id;
439
+ if (event.model) model = event.model;
440
+ if (event.type === "start") {
441
+ events.push({
442
+ event: "data",
443
+ data: {
444
+ id: chunkId,
445
+ object: "chat.completion.chunk",
446
+ created,
447
+ model,
448
+ choices: [{
449
+ index: 0,
450
+ delta: { role: "assistant", content: "" },
451
+ finish_reason: null
452
+ }]
453
+ }
454
+ });
455
+ }
456
+ if (event.type === "content" && event.content?.delta) {
457
+ events.push({
458
+ event: "data",
459
+ data: {
460
+ id: chunkId,
461
+ object: "chat.completion.chunk",
462
+ created,
463
+ model,
464
+ choices: [{
465
+ index: 0,
466
+ delta: { content: event.content.delta },
467
+ finish_reason: null
468
+ }]
469
+ }
470
+ });
471
+ }
472
+ if (event.type === "reasoning" && event.reasoning?.delta) {
473
+ events.push({
474
+ event: "data",
475
+ data: {
476
+ id: chunkId,
477
+ object: "chat.completion.chunk",
478
+ created,
479
+ model,
480
+ choices: [{
481
+ index: 0,
482
+ delta: { reasoning_content: event.reasoning.delta },
483
+ finish_reason: null
484
+ }]
485
+ }
486
+ });
487
+ }
488
+ if (event.type === "tool_call" && event.toolCall) {
489
+ const toolIndex = event.toolCall.index ?? 0;
490
+ const toolCallDelta = { index: toolIndex };
491
+ if (event.toolCall.name) {
492
+ toolCallDelta.id = event.toolCall.id || `call_${Date.now()}_${toolIndex}`;
493
+ toolCallDelta.type = "function";
494
+ toolCallDelta.function = { name: event.toolCall.name };
495
+ toolCallsState.set(toolIndex, {
496
+ id: toolCallDelta.id,
497
+ name: event.toolCall.name
498
+ });
499
+ }
500
+ if (event.toolCall.arguments) {
501
+ toolCallDelta.function = {
502
+ ...toolCallDelta.function,
503
+ arguments: event.toolCall.arguments
504
+ };
505
+ }
506
+ events.push({
507
+ event: "data",
508
+ data: {
509
+ id: chunkId,
510
+ object: "chat.completion.chunk",
511
+ created,
512
+ model,
513
+ choices: [{
514
+ index: 0,
515
+ delta: { tool_calls: [toolCallDelta] },
516
+ finish_reason: null
517
+ }]
518
+ }
519
+ });
520
+ }
521
+ if (event.type === "end") {
522
+ const finishReason = mapFinishReason3(event.finishReason);
523
+ const finalChunk = {
524
+ id: chunkId,
525
+ object: "chat.completion.chunk",
526
+ created,
527
+ model,
528
+ choices: [{
529
+ index: 0,
530
+ delta: {},
531
+ finish_reason: finishReason
532
+ }]
533
+ };
534
+ if (event.usage) {
535
+ finalChunk.usage = {
536
+ prompt_tokens: event.usage.promptTokens ?? 0,
537
+ completion_tokens: event.usage.completionTokens ?? 0,
538
+ total_tokens: event.usage.totalTokens ?? 0
539
+ };
540
+ }
541
+ events.push({ event: "data", data: finalChunk });
542
+ }
543
+ if (event.type === "error" && event.error) {
544
+ events.push({
545
+ event: "data",
546
+ data: {
547
+ error: {
548
+ message: event.error.message,
549
+ type: "server_error",
550
+ code: event.error.code
551
+ }
552
+ }
553
+ });
554
+ }
555
+ return events;
556
+ },
557
+ finalize() {
558
+ return [{ event: "data", data: "[DONE]" }];
559
+ }
560
+ };
561
+ }
562
+ function mapFinishReason3(reason) {
563
+ if (!reason) return "stop";
564
+ const reasonMap = {
565
+ stop: "stop",
566
+ length: "length",
567
+ tool_calls: "tool_calls",
568
+ content_filter: "content_filter",
569
+ end_turn: "stop",
570
+ max_tokens: "length"
571
+ };
572
+ return reasonMap[reason] ?? "stop";
573
+ }
574
+
575
+ // src/adapter.ts
576
+ var deepseekAdapter = {
577
+ name: "deepseek",
578
+ version: "1.0.0",
579
+ capabilities: {
580
+ streaming: true,
581
+ tools: true,
582
+ vision: false,
583
+ multimodal: false,
584
+ systemPrompt: true,
585
+ toolChoice: true,
586
+ reasoning: true,
587
+ // DeepSeek-reasoner supports reasoning
588
+ webSearch: false,
589
+ jsonMode: true,
590
+ logprobs: true,
591
+ seed: false
592
+ },
593
+ inbound: {
594
+ parseRequest: (request) => {
595
+ return parseRequest(request);
596
+ },
597
+ parseResponse: (response) => {
598
+ return parseResponse(response);
599
+ },
600
+ parseStream: (chunk) => {
601
+ return parseStream(chunk);
602
+ },
603
+ parseError: (error) => {
604
+ return parseError(error);
605
+ }
606
+ },
607
+ outbound: {
608
+ buildRequest: (ir) => {
609
+ return buildRequest(ir);
610
+ },
611
+ buildResponse: (ir) => {
612
+ return buildResponse(ir);
613
+ },
614
+ createStreamBuilder
615
+ },
616
+ getInfo() {
617
+ return {
618
+ name: this.name,
619
+ version: this.version,
620
+ capabilities: this.capabilities,
621
+ endpoint: {
622
+ baseUrl: "https://api.deepseek.com",
623
+ chatPath: "/v1/chat/completions",
624
+ modelsPath: "/v1/models"
625
+ }
626
+ };
627
+ }
628
+ };
629
+
630
+ export { deepseekAdapter };
631
+ //# sourceMappingURL=index.js.map
632
+ //# sourceMappingURL=index.js.map