@langchain/deepseek 0.0.2 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,417 +1,403 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ChatDeepSeek = void 0;
4
- const env_1 = require("@langchain/core/utils/env");
5
- const openai_1 = require("@langchain/openai");
1
+ const require_rolldown_runtime = require('./_virtual/rolldown_runtime.cjs');
2
+ const __langchain_core_utils_env = require_rolldown_runtime.__toESM(require("@langchain/core/utils/env"));
3
+ const __langchain_openai = require_rolldown_runtime.__toESM(require("@langchain/openai"));
4
+
5
+ //#region src/chat_models.ts
6
6
  /**
7
- * Deepseek chat model integration.
8
- *
9
- * The Deepseek API is compatible to the OpenAI API with some limitations.
10
- *
11
- * Setup:
12
- * Install `@langchain/deepseek` and set an environment variable named `DEEPSEEK_API_KEY`.
13
- *
14
- * ```bash
15
- * npm install @langchain/deepseek
16
- * export DEEPSEEK_API_KEY="your-api-key"
17
- * ```
18
- *
19
- * ## [Constructor args](https://api.js.langchain.com/classes/_langchain_deepseek.ChatDeepSeek.html#constructor)
20
- *
21
- * ## [Runtime args](https://api.js.langchain.com/interfaces/_langchain_deepseek.ChatDeepSeekCallOptions.html)
22
- *
23
- * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
24
- * They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below:
25
- *
26
- * ```typescript
27
- * // When calling `.withConfig`, call options should be passed via the first argument
28
- * const llmWithArgsBound = llm.withConfig({
29
- * stop: ["\n"],
30
- * tools: [...],
31
- * });
32
- *
33
- * // When calling `.bindTools`, call options should be passed via the second argument
34
- * const llmWithTools = llm.bindTools(
35
- * [...],
36
- * {
37
- * tool_choice: "auto",
38
- * }
39
- * );
40
- * ```
41
- *
42
- * ## Examples
43
- *
44
- * <details open>
45
- * <summary><strong>Instantiate</strong></summary>
46
- *
47
- * ```typescript
48
- * import { ChatDeepSeek } from '@langchain/deepseek';
49
- *
50
- * const llm = new ChatDeepSeek({
51
- * model: "deepseek-reasoner",
52
- * temperature: 0,
53
- * // other params...
54
- * });
55
- * ```
56
- * </details>
57
- *
58
- * <br />
59
- *
60
- * <details>
61
- * <summary><strong>Invoking</strong></summary>
62
- *
63
- * ```typescript
64
- * const input = `Translate "I love programming" into French.`;
65
- *
66
- * // Models also accept a list of chat messages or a formatted prompt
67
- * const result = await llm.invoke(input);
68
- * console.log(result);
69
- * ```
70
- *
71
- * ```txt
72
- * AIMessage {
73
- * "content": "The French translation of \"I love programming\" is \"J'aime programmer\". In this sentence, \"J'aime\" is the first person singular conjugation of the French verb \"aimer\" which means \"to love\", and \"programmer\" is the French infinitive for \"to program\". I hope this helps! Let me know if you have any other questions.",
74
- * "additional_kwargs": {
75
- * "reasoning_content": "...",
76
- * },
77
- * "response_metadata": {
78
- * "tokenUsage": {
79
- * "completionTokens": 82,
80
- * "promptTokens": 20,
81
- * "totalTokens": 102
82
- * },
83
- * "finish_reason": "stop"
84
- * },
85
- * "tool_calls": [],
86
- * "invalid_tool_calls": []
87
- * }
88
- * ```
89
- * </details>
90
- *
91
- * <br />
92
- *
93
- * <details>
94
- * <summary><strong>Streaming Chunks</strong></summary>
95
- *
96
- * ```typescript
97
- * for await (const chunk of await llm.stream(input)) {
98
- * console.log(chunk);
99
- * }
100
- * ```
101
- *
102
- * ```txt
103
- * AIMessageChunk {
104
- * "content": "",
105
- * "additional_kwargs": {
106
- * "reasoning_content": "...",
107
- * },
108
- * "response_metadata": {
109
- * "finishReason": null
110
- * },
111
- * "tool_calls": [],
112
- * "tool_call_chunks": [],
113
- * "invalid_tool_calls": []
114
- * }
115
- * AIMessageChunk {
116
- * "content": "The",
117
- * "additional_kwargs": {
118
- * "reasoning_content": "...",
119
- * },
120
- * "response_metadata": {
121
- * "finishReason": null
122
- * },
123
- * "tool_calls": [],
124
- * "tool_call_chunks": [],
125
- * "invalid_tool_calls": []
126
- * }
127
- * AIMessageChunk {
128
- * "content": " French",
129
- * "additional_kwargs": {
130
- * "reasoning_content": "...",
131
- * },
132
- * "response_metadata": {
133
- * "finishReason": null
134
- * },
135
- * "tool_calls": [],
136
- * "tool_call_chunks": [],
137
- * "invalid_tool_calls": []
138
- * }
139
- * AIMessageChunk {
140
- * "content": " translation",
141
- * "additional_kwargs": {
142
- * "reasoning_content": "...",
143
- * },
144
- * "response_metadata": {
145
- * "finishReason": null
146
- * },
147
- * "tool_calls": [],
148
- * "tool_call_chunks": [],
149
- * "invalid_tool_calls": []
150
- * }
151
- * AIMessageChunk {
152
- * "content": " of",
153
- * "additional_kwargs": {
154
- * "reasoning_content": "...",
155
- * },
156
- * "response_metadata": {
157
- * "finishReason": null
158
- * },
159
- * "tool_calls": [],
160
- * "tool_call_chunks": [],
161
- * "invalid_tool_calls": []
162
- * }
163
- * AIMessageChunk {
164
- * "content": " \"",
165
- * "additional_kwargs": {
166
- * "reasoning_content": "...",
167
- * },
168
- * "response_metadata": {
169
- * "finishReason": null
170
- * },
171
- * "tool_calls": [],
172
- * "tool_call_chunks": [],
173
- * "invalid_tool_calls": []
174
- * }
175
- * AIMessageChunk {
176
- * "content": "I",
177
- * "additional_kwargs": {
178
- * "reasoning_content": "...",
179
- * },
180
- * "response_metadata": {
181
- * "finishReason": null
182
- * },
183
- * "tool_calls": [],
184
- * "tool_call_chunks": [],
185
- * "invalid_tool_calls": []
186
- * }
187
- * AIMessageChunk {
188
- * "content": " love",
189
- * "additional_kwargs": {
190
- * "reasoning_content": "...",
191
- * },
192
- * "response_metadata": {
193
- * "finishReason": null
194
- * },
195
- * "tool_calls": [],
196
- * "tool_call_chunks": [],
197
- * "invalid_tool_calls": []
198
- * }
199
- * ...
200
- * AIMessageChunk {
201
- * "content": ".",
202
- * "additional_kwargs": {
203
- * "reasoning_content": "...",
204
- * },
205
- * "response_metadata": {
206
- * "finishReason": null
207
- * },
208
- * "tool_calls": [],
209
- * "tool_call_chunks": [],
210
- * "invalid_tool_calls": []
211
- * }
212
- * AIMessageChunk {
213
- * "content": "",
214
- * "additional_kwargs": {
215
- * "reasoning_content": "...",
216
- * },
217
- * "response_metadata": {
218
- * "finishReason": "stop"
219
- * },
220
- * "tool_calls": [],
221
- * "tool_call_chunks": [],
222
- * "invalid_tool_calls": []
223
- * }
224
- * ```
225
- * </details>
226
- *
227
- * <br />
228
- *
229
- * <details>
230
- * <summary><strong>Aggregate Streamed Chunks</strong></summary>
231
- *
232
- * ```typescript
233
- * import { AIMessageChunk } from '@langchain/core/messages';
234
- * import { concat } from '@langchain/core/utils/stream';
235
- *
236
- * const stream = await llm.stream(input);
237
- * let full: AIMessageChunk | undefined;
238
- * for await (const chunk of stream) {
239
- * full = !full ? chunk : concat(full, chunk);
240
- * }
241
- * console.log(full);
242
- * ```
243
- *
244
- * ```txt
245
- * AIMessageChunk {
246
- * "content": "The French translation of \"I love programming\" is \"J'aime programmer\". In this sentence, \"J'aime\" is the first person singular conjugation of the French verb \"aimer\" which means \"to love\", and \"programmer\" is the French infinitive for \"to program\". I hope this helps! Let me know if you have any other questions.",
247
- * "additional_kwargs": {
248
- * "reasoning_content": "...",
249
- * },
250
- * "response_metadata": {
251
- * "finishReason": "stop"
252
- * },
253
- * "tool_calls": [],
254
- * "tool_call_chunks": [],
255
- * "invalid_tool_calls": []
256
- * }
257
- * ```
258
- * </details>
259
- *
260
- * <br />
261
- *
262
- * <details>
263
- * <summary><strong>Bind tools</strong></summary>
264
- *
265
- * ```typescript
266
- * import { z } from 'zod';
267
- *
268
- * const llmForToolCalling = new ChatDeepSeek({
269
- * model: "deepseek-chat",
270
- * temperature: 0,
271
- * // other params...
272
- * });
273
- *
274
- * const GetWeather = {
275
- * name: "GetWeather",
276
- * description: "Get the current weather in a given location",
277
- * schema: z.object({
278
- * location: z.string().describe("The city and state, e.g. San Francisco, CA")
279
- * }),
280
- * }
281
- *
282
- * const GetPopulation = {
283
- * name: "GetPopulation",
284
- * description: "Get the current population in a given location",
285
- * schema: z.object({
286
- * location: z.string().describe("The city and state, e.g. San Francisco, CA")
287
- * }),
288
- * }
289
- *
290
- * const llmWithTools = llmForToolCalling.bindTools([GetWeather, GetPopulation]);
291
- * const aiMsg = await llmWithTools.invoke(
292
- * "Which city is hotter today and which is bigger: LA or NY?"
293
- * );
294
- * console.log(aiMsg.tool_calls);
295
- * ```
296
- *
297
- * ```txt
298
- * [
299
- * {
300
- * name: 'GetWeather',
301
- * args: { location: 'Los Angeles, CA' },
302
- * type: 'tool_call',
303
- * id: 'call_cd34'
304
- * },
305
- * {
306
- * name: 'GetWeather',
307
- * args: { location: 'New York, NY' },
308
- * type: 'tool_call',
309
- * id: 'call_68rf'
310
- * },
311
- * {
312
- * name: 'GetPopulation',
313
- * args: { location: 'Los Angeles, CA' },
314
- * type: 'tool_call',
315
- * id: 'call_f81z'
316
- * },
317
- * {
318
- * name: 'GetPopulation',
319
- * args: { location: 'New York, NY' },
320
- * type: 'tool_call',
321
- * id: 'call_8byt'
322
- * }
323
- * ]
324
- * ```
325
- * </details>
326
- *
327
- * <br />
328
- *
329
- * <details>
330
- * <summary><strong>Structured Output</strong></summary>
331
- *
332
- * ```typescript
333
- * import { z } from 'zod';
334
- *
335
- * const Joke = z.object({
336
- * setup: z.string().describe("The setup of the joke"),
337
- * punchline: z.string().describe("The punchline to the joke"),
338
- * rating: z.number().optional().describe("How funny the joke is, from 1 to 10")
339
- * }).describe('Joke to tell user.');
340
- *
341
- * const structuredLlm = llmForToolCalling.withStructuredOutput(Joke, { name: "Joke" });
342
- * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
343
- * console.log(jokeResult);
344
- * ```
345
- *
346
- * ```txt
347
- * {
348
- * setup: "Why don't cats play poker in the wild?",
349
- * punchline: 'Because there are too many cheetahs.'
350
- * }
351
- * ```
352
- * </details>
353
- *
354
- * <br />
355
- */
356
- class ChatDeepSeek extends openai_1.ChatOpenAI {
357
- static lc_name() {
358
- return "ChatDeepSeek";
359
- }
360
- _llmType() {
361
- return "deepseek";
362
- }
363
- get lc_secrets() {
364
- return {
365
- apiKey: "DEEPSEEK_API_KEY",
366
- };
367
- }
368
- constructor(fields) {
369
- const apiKey = fields?.apiKey || (0, env_1.getEnvironmentVariable)("DEEPSEEK_API_KEY");
370
- if (!apiKey) {
371
- throw new Error(`Deepseek API key not found. Please set the DEEPSEEK_API_KEY environment variable or pass the key into "apiKey" field.`);
372
- }
373
- super({
374
- ...fields,
375
- apiKey,
376
- configuration: {
377
- baseURL: "https://api.deepseek.com",
378
- ...fields?.configuration,
379
- },
380
- });
381
- Object.defineProperty(this, "lc_serializable", {
382
- enumerable: true,
383
- configurable: true,
384
- writable: true,
385
- value: true
386
- });
387
- Object.defineProperty(this, "lc_namespace", {
388
- enumerable: true,
389
- configurable: true,
390
- writable: true,
391
- value: ["langchain", "chat_models", "deepseek"]
392
- });
393
- }
394
- _convertOpenAIDeltaToBaseMessageChunk(
395
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
396
- delta, rawResponse, defaultRole) {
397
- const messageChunk = super._convertOpenAIDeltaToBaseMessageChunk(delta, rawResponse, defaultRole);
398
- messageChunk.additional_kwargs.reasoning_content = delta.reasoning_content;
399
- return messageChunk;
400
- }
401
- _convertOpenAIChatCompletionMessageToBaseMessage(message, rawResponse) {
402
- const langChainMessage = super._convertOpenAIChatCompletionMessageToBaseMessage(message, rawResponse);
403
- langChainMessage.additional_kwargs.reasoning_content =
404
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
405
- message.reasoning_content;
406
- return langChainMessage;
407
- }
408
- withStructuredOutput(outputSchema, config) {
409
- const ensuredConfig = { ...config };
410
- // Deepseek does not support json schema yet
411
- if (ensuredConfig?.method === undefined) {
412
- ensuredConfig.method = "functionCalling";
413
- }
414
- return super.withStructuredOutput(outputSchema, ensuredConfig);
415
- }
416
- }
7
+ * Deepseek chat model integration.
8
+ *
9
+ * The Deepseek API is compatible to the OpenAI API with some limitations.
10
+ *
11
+ * Setup:
12
+ * Install `@langchain/deepseek` and set an environment variable named `DEEPSEEK_API_KEY`.
13
+ *
14
+ * ```bash
15
+ * npm install @langchain/deepseek
16
+ * export DEEPSEEK_API_KEY="your-api-key"
17
+ * ```
18
+ *
19
+ * ## [Constructor args](https://api.js.langchain.com/classes/_langchain_deepseek.ChatDeepSeek.html#constructor)
20
+ *
21
+ * ## [Runtime args](https://api.js.langchain.com/interfaces/_langchain_deepseek.ChatDeepSeekCallOptions.html)
22
+ *
23
+ * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
24
+ * They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below:
25
+ *
26
+ * ```typescript
27
+ * // When calling `.withConfig`, call options should be passed via the first argument
28
+ * const llmWithArgsBound = llm.withConfig({
29
+ * stop: ["\n"],
30
+ * tools: [...],
31
+ * });
32
+ *
33
+ * // When calling `.bindTools`, call options should be passed via the second argument
34
+ * const llmWithTools = llm.bindTools(
35
+ * [...],
36
+ * {
37
+ * tool_choice: "auto",
38
+ * }
39
+ * );
40
+ * ```
41
+ *
42
+ * ## Examples
43
+ *
44
+ * <details open>
45
+ * <summary><strong>Instantiate</strong></summary>
46
+ *
47
+ * ```typescript
48
+ * import { ChatDeepSeek } from '@langchain/deepseek';
49
+ *
50
+ * const llm = new ChatDeepSeek({
51
+ * model: "deepseek-reasoner",
52
+ * temperature: 0,
53
+ * // other params...
54
+ * });
55
+ * ```
56
+ * </details>
57
+ *
58
+ * <br />
59
+ *
60
+ * <details>
61
+ * <summary><strong>Invoking</strong></summary>
62
+ *
63
+ * ```typescript
64
+ * const input = `Translate "I love programming" into French.`;
65
+ *
66
+ * // Models also accept a list of chat messages or a formatted prompt
67
+ * const result = await llm.invoke(input);
68
+ * console.log(result);
69
+ * ```
70
+ *
71
+ * ```txt
72
+ * AIMessage {
73
+ * "content": "The French translation of \"I love programming\" is \"J'aime programmer\". In this sentence, \"J'aime\" is the first person singular conjugation of the French verb \"aimer\" which means \"to love\", and \"programmer\" is the French infinitive for \"to program\". I hope this helps! Let me know if you have any other questions.",
74
+ * "additional_kwargs": {
75
+ * "reasoning_content": "...",
76
+ * },
77
+ * "response_metadata": {
78
+ * "tokenUsage": {
79
+ * "completionTokens": 82,
80
+ * "promptTokens": 20,
81
+ * "totalTokens": 102
82
+ * },
83
+ * "finish_reason": "stop"
84
+ * },
85
+ * "tool_calls": [],
86
+ * "invalid_tool_calls": []
87
+ * }
88
+ * ```
89
+ * </details>
90
+ *
91
+ * <br />
92
+ *
93
+ * <details>
94
+ * <summary><strong>Streaming Chunks</strong></summary>
95
+ *
96
+ * ```typescript
97
+ * for await (const chunk of await llm.stream(input)) {
98
+ * console.log(chunk);
99
+ * }
100
+ * ```
101
+ *
102
+ * ```txt
103
+ * AIMessageChunk {
104
+ * "content": "",
105
+ * "additional_kwargs": {
106
+ * "reasoning_content": "...",
107
+ * },
108
+ * "response_metadata": {
109
+ * "finishReason": null
110
+ * },
111
+ * "tool_calls": [],
112
+ * "tool_call_chunks": [],
113
+ * "invalid_tool_calls": []
114
+ * }
115
+ * AIMessageChunk {
116
+ * "content": "The",
117
+ * "additional_kwargs": {
118
+ * "reasoning_content": "...",
119
+ * },
120
+ * "response_metadata": {
121
+ * "finishReason": null
122
+ * },
123
+ * "tool_calls": [],
124
+ * "tool_call_chunks": [],
125
+ * "invalid_tool_calls": []
126
+ * }
127
+ * AIMessageChunk {
128
+ * "content": " French",
129
+ * "additional_kwargs": {
130
+ * "reasoning_content": "...",
131
+ * },
132
+ * "response_metadata": {
133
+ * "finishReason": null
134
+ * },
135
+ * "tool_calls": [],
136
+ * "tool_call_chunks": [],
137
+ * "invalid_tool_calls": []
138
+ * }
139
+ * AIMessageChunk {
140
+ * "content": " translation",
141
+ * "additional_kwargs": {
142
+ * "reasoning_content": "...",
143
+ * },
144
+ * "response_metadata": {
145
+ * "finishReason": null
146
+ * },
147
+ * "tool_calls": [],
148
+ * "tool_call_chunks": [],
149
+ * "invalid_tool_calls": []
150
+ * }
151
+ * AIMessageChunk {
152
+ * "content": " of",
153
+ * "additional_kwargs": {
154
+ * "reasoning_content": "...",
155
+ * },
156
+ * "response_metadata": {
157
+ * "finishReason": null
158
+ * },
159
+ * "tool_calls": [],
160
+ * "tool_call_chunks": [],
161
+ * "invalid_tool_calls": []
162
+ * }
163
+ * AIMessageChunk {
164
+ * "content": " \"",
165
+ * "additional_kwargs": {
166
+ * "reasoning_content": "...",
167
+ * },
168
+ * "response_metadata": {
169
+ * "finishReason": null
170
+ * },
171
+ * "tool_calls": [],
172
+ * "tool_call_chunks": [],
173
+ * "invalid_tool_calls": []
174
+ * }
175
+ * AIMessageChunk {
176
+ * "content": "I",
177
+ * "additional_kwargs": {
178
+ * "reasoning_content": "...",
179
+ * },
180
+ * "response_metadata": {
181
+ * "finishReason": null
182
+ * },
183
+ * "tool_calls": [],
184
+ * "tool_call_chunks": [],
185
+ * "invalid_tool_calls": []
186
+ * }
187
+ * AIMessageChunk {
188
+ * "content": " love",
189
+ * "additional_kwargs": {
190
+ * "reasoning_content": "...",
191
+ * },
192
+ * "response_metadata": {
193
+ * "finishReason": null
194
+ * },
195
+ * "tool_calls": [],
196
+ * "tool_call_chunks": [],
197
+ * "invalid_tool_calls": []
198
+ * }
199
+ * ...
200
+ * AIMessageChunk {
201
+ * "content": ".",
202
+ * "additional_kwargs": {
203
+ * "reasoning_content": "...",
204
+ * },
205
+ * "response_metadata": {
206
+ * "finishReason": null
207
+ * },
208
+ * "tool_calls": [],
209
+ * "tool_call_chunks": [],
210
+ * "invalid_tool_calls": []
211
+ * }
212
+ * AIMessageChunk {
213
+ * "content": "",
214
+ * "additional_kwargs": {
215
+ * "reasoning_content": "...",
216
+ * },
217
+ * "response_metadata": {
218
+ * "finishReason": "stop"
219
+ * },
220
+ * "tool_calls": [],
221
+ * "tool_call_chunks": [],
222
+ * "invalid_tool_calls": []
223
+ * }
224
+ * ```
225
+ * </details>
226
+ *
227
+ * <br />
228
+ *
229
+ * <details>
230
+ * <summary><strong>Aggregate Streamed Chunks</strong></summary>
231
+ *
232
+ * ```typescript
233
+ * import { AIMessageChunk } from '@langchain/core/messages';
234
+ * import { concat } from '@langchain/core/utils/stream';
235
+ *
236
+ * const stream = await llm.stream(input);
237
+ * let full: AIMessageChunk | undefined;
238
+ * for await (const chunk of stream) {
239
+ * full = !full ? chunk : concat(full, chunk);
240
+ * }
241
+ * console.log(full);
242
+ * ```
243
+ *
244
+ * ```txt
245
+ * AIMessageChunk {
246
+ * "content": "The French translation of \"I love programming\" is \"J'aime programmer\". In this sentence, \"J'aime\" is the first person singular conjugation of the French verb \"aimer\" which means \"to love\", and \"programmer\" is the French infinitive for \"to program\". I hope this helps! Let me know if you have any other questions.",
247
+ * "additional_kwargs": {
248
+ * "reasoning_content": "...",
249
+ * },
250
+ * "response_metadata": {
251
+ * "finishReason": "stop"
252
+ * },
253
+ * "tool_calls": [],
254
+ * "tool_call_chunks": [],
255
+ * "invalid_tool_calls": []
256
+ * }
257
+ * ```
258
+ * </details>
259
+ *
260
+ * <br />
261
+ *
262
+ * <details>
263
+ * <summary><strong>Bind tools</strong></summary>
264
+ *
265
+ * ```typescript
266
+ * import { z } from 'zod';
267
+ *
268
+ * const llmForToolCalling = new ChatDeepSeek({
269
+ * model: "deepseek-chat",
270
+ * temperature: 0,
271
+ * // other params...
272
+ * });
273
+ *
274
+ * const GetWeather = {
275
+ * name: "GetWeather",
276
+ * description: "Get the current weather in a given location",
277
+ * schema: z.object({
278
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
279
+ * }),
280
+ * }
281
+ *
282
+ * const GetPopulation = {
283
+ * name: "GetPopulation",
284
+ * description: "Get the current population in a given location",
285
+ * schema: z.object({
286
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
287
+ * }),
288
+ * }
289
+ *
290
+ * const llmWithTools = llmForToolCalling.bindTools([GetWeather, GetPopulation]);
291
+ * const aiMsg = await llmWithTools.invoke(
292
+ * "Which city is hotter today and which is bigger: LA or NY?"
293
+ * );
294
+ * console.log(aiMsg.tool_calls);
295
+ * ```
296
+ *
297
+ * ```txt
298
+ * [
299
+ * {
300
+ * name: 'GetWeather',
301
+ * args: { location: 'Los Angeles, CA' },
302
+ * type: 'tool_call',
303
+ * id: 'call_cd34'
304
+ * },
305
+ * {
306
+ * name: 'GetWeather',
307
+ * args: { location: 'New York, NY' },
308
+ * type: 'tool_call',
309
+ * id: 'call_68rf'
310
+ * },
311
+ * {
312
+ * name: 'GetPopulation',
313
+ * args: { location: 'Los Angeles, CA' },
314
+ * type: 'tool_call',
315
+ * id: 'call_f81z'
316
+ * },
317
+ * {
318
+ * name: 'GetPopulation',
319
+ * args: { location: 'New York, NY' },
320
+ * type: 'tool_call',
321
+ * id: 'call_8byt'
322
+ * }
323
+ * ]
324
+ * ```
325
+ * </details>
326
+ *
327
+ * <br />
328
+ *
329
+ * <details>
330
+ * <summary><strong>Structured Output</strong></summary>
331
+ *
332
+ * ```typescript
333
+ * import { z } from 'zod';
334
+ *
335
+ * const Joke = z.object({
336
+ * setup: z.string().describe("The setup of the joke"),
337
+ * punchline: z.string().describe("The punchline to the joke"),
338
+ * rating: z.number().optional().describe("How funny the joke is, from 1 to 10")
339
+ * }).describe('Joke to tell user.');
340
+ *
341
+ * const structuredLlm = llmForToolCalling.withStructuredOutput(Joke, { name: "Joke" });
342
+ * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
343
+ * console.log(jokeResult);
344
+ * ```
345
+ *
346
+ * ```txt
347
+ * {
348
+ * setup: "Why don't cats play poker in the wild?",
349
+ * punchline: 'Because there are too many cheetahs.'
350
+ * }
351
+ * ```
352
+ * </details>
353
+ *
354
+ * <br />
355
+ */
356
+ var ChatDeepSeek = class extends __langchain_openai.ChatOpenAICompletions {
357
+ static lc_name() {
358
+ return "ChatDeepSeek";
359
+ }
360
+ _llmType() {
361
+ return "deepseek";
362
+ }
363
+ get lc_secrets() {
364
+ return { apiKey: "DEEPSEEK_API_KEY" };
365
+ }
366
+ lc_serializable = true;
367
+ lc_namespace = [
368
+ "langchain",
369
+ "chat_models",
370
+ "deepseek"
371
+ ];
372
+ constructor(fields) {
373
+ const apiKey = fields?.apiKey || (0, __langchain_core_utils_env.getEnvironmentVariable)("DEEPSEEK_API_KEY");
374
+ if (!apiKey) throw new Error(`Deepseek API key not found. Please set the DEEPSEEK_API_KEY environment variable or pass the key into "apiKey" field.`);
375
+ super({
376
+ ...fields,
377
+ apiKey,
378
+ configuration: {
379
+ baseURL: "https://api.deepseek.com",
380
+ ...fields?.configuration
381
+ }
382
+ });
383
+ }
384
+ _convertCompletionsDeltaToBaseMessageChunk(delta, rawResponse, defaultRole) {
385
+ const messageChunk = super._convertCompletionsDeltaToBaseMessageChunk(delta, rawResponse, defaultRole);
386
+ messageChunk.additional_kwargs.reasoning_content = delta.reasoning_content;
387
+ return messageChunk;
388
+ }
389
+ _convertCompletionsMessageToBaseMessage(message, rawResponse) {
390
+ const langChainMessage = super._convertCompletionsMessageToBaseMessage(message, rawResponse);
391
+ langChainMessage.additional_kwargs.reasoning_content = message.reasoning_content;
392
+ return langChainMessage;
393
+ }
394
+ withStructuredOutput(outputSchema, config) {
395
+ const ensuredConfig = { ...config };
396
+ if (ensuredConfig?.method === void 0) ensuredConfig.method = "functionCalling";
397
+ return super.withStructuredOutput(outputSchema, ensuredConfig);
398
+ }
399
+ };
400
+
401
+ //#endregion
417
402
  exports.ChatDeepSeek = ChatDeepSeek;
403
+ //# sourceMappingURL=chat_models.cjs.map