@langchain/deepseek 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License
2
+
3
+ Copyright (c) 2025 LangChain
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,80 @@
1
+ # @langchain/deepseek
2
+
3
+ This package contains the LangChain.js integrations for DeepSeek.
4
+
5
+ ## Installation
6
+
7
+ ```bash npm2yarn
8
+ npm install @langchain/deepseek @langchain/core
9
+ ```
10
+
11
+ ## Chat models
12
+
13
+ This package adds support for DeepSeek's chat model inference.
14
+
15
+ Set the necessary environment variable (or pass it in via the constructor):
16
+
17
+ ```bash
18
+ export DEEPSEEK_API_KEY=
19
+ ```
20
+
21
+ ```typescript
22
+ import { ChatDeepSeek } from "@langchain/deepseek";
23
+ import { HumanMessage } from "@langchain/core/messages";
24
+
25
+ const model = new ChatDeepSeek({
26
+ apiKey: process.env.DEEPSEEK_API_KEY, // Default value.
27
+ model: "<model_name>",
28
+ });
29
+
30
+ const res = await model.invoke([
31
+ {
32
+ role: "user",
33
+ content: message,
34
+ },
35
+ ]);
36
+ ```
37
+
38
+ ## Development
39
+
40
+ To develop the `@langchain/deepseek` package, you'll need to follow these instructions:
41
+
42
+ ### Install dependencies
43
+
44
+ ```bash
45
+ yarn install
46
+ ```
47
+
48
+ ### Build the package
49
+
50
+ ```bash
51
+ yarn build
52
+ ```
53
+
54
+ Or from the repo root:
55
+
56
+ ```bash
57
+ yarn build --filter=@langchain/deepseek
58
+ ```
59
+
60
+ ### Run tests
61
+
62
+ Test files should live within a `tests/` file in the `src/` folder. Unit tests should end in `.test.ts` and integration tests should
63
+ end in `.int.test.ts`:
64
+
65
+ ```bash
66
+ $ yarn test
67
+ $ yarn test:int
68
+ ```
69
+
70
+ ### Lint & Format
71
+
72
+ Run the linter & formatter to ensure your code is up to standard:
73
+
74
+ ```bash
75
+ yarn lint && yarn format
76
+ ```
77
+
78
+ ### Adding new entrypoints
79
+
80
+ If you add a new file to be exported, either import & re-export from `src/index.ts`, or add it to the `entrypoints` field in the `config` variable located inside `langchain.config.js` and run `yarn build` to generate the new entrypoint.
@@ -0,0 +1,417 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ChatDeepSeek = void 0;
4
+ const env_1 = require("@langchain/core/utils/env");
5
+ const openai_1 = require("@langchain/openai");
6
+ /**
7
+ * Deepseek chat model integration.
8
+ *
9
+ * The Deepseek API is compatible to the OpenAI API with some limitations.
10
+ *
11
+ * Setup:
12
+ * Install `@langchain/deepseek` and set an environment variable named `DEEPSEEK_API_KEY`.
13
+ *
14
+ * ```bash
15
+ * npm install @langchain/deepseek
16
+ * export DEEPSEEK_API_KEY="your-api-key"
17
+ * ```
18
+ *
19
+ * ## [Constructor args](https://api.js.langchain.com/classes/_langchain_deepseek.ChatDeepSeek.html#constructor)
20
+ *
21
+ * ## [Runtime args](https://api.js.langchain.com/interfaces/_langchain_deepseek.ChatDeepSeekCallOptions.html)
22
+ *
23
+ * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
24
+ * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
25
+ *
26
+ * ```typescript
27
+ * // When calling `.bind`, call options should be passed via the first argument
28
+ * const llmWithArgsBound = llm.bind({
29
+ * stop: ["\n"],
30
+ * tools: [...],
31
+ * });
32
+ *
33
+ * // When calling `.bindTools`, call options should be passed via the second argument
34
+ * const llmWithTools = llm.bindTools(
35
+ * [...],
36
+ * {
37
+ * tool_choice: "auto",
38
+ * }
39
+ * );
40
+ * ```
41
+ *
42
+ * ## Examples
43
+ *
44
+ * <details open>
45
+ * <summary><strong>Instantiate</strong></summary>
46
+ *
47
+ * ```typescript
48
+ * import { ChatDeepSeek } from '@langchain/deepseek';
49
+ *
50
+ * const llm = new ChatDeepSeek({
51
+ * model: "deepseek-reasoner",
52
+ * temperature: 0,
53
+ * // other params...
54
+ * });
55
+ * ```
56
+ * </details>
57
+ *
58
+ * <br />
59
+ *
60
+ * <details>
61
+ * <summary><strong>Invoking</strong></summary>
62
+ *
63
+ * ```typescript
64
+ * const input = `Translate "I love programming" into French.`;
65
+ *
66
+ * // Models also accept a list of chat messages or a formatted prompt
67
+ * const result = await llm.invoke(input);
68
+ * console.log(result);
69
+ * ```
70
+ *
71
+ * ```txt
72
+ * AIMessage {
73
+ * "content": "The French translation of \"I love programming\" is \"J'aime programmer\". In this sentence, \"J'aime\" is the first person singular conjugation of the French verb \"aimer\" which means \"to love\", and \"programmer\" is the French infinitive for \"to program\". I hope this helps! Let me know if you have any other questions.",
74
+ * "additional_kwargs": {
75
+ * "reasoning_content": "...",
76
+ * },
77
+ * "response_metadata": {
78
+ * "tokenUsage": {
79
+ * "completionTokens": 82,
80
+ * "promptTokens": 20,
81
+ * "totalTokens": 102
82
+ * },
83
+ * "finish_reason": "stop"
84
+ * },
85
+ * "tool_calls": [],
86
+ * "invalid_tool_calls": []
87
+ * }
88
+ * ```
89
+ * </details>
90
+ *
91
+ * <br />
92
+ *
93
+ * <details>
94
+ * <summary><strong>Streaming Chunks</strong></summary>
95
+ *
96
+ * ```typescript
97
+ * for await (const chunk of await llm.stream(input)) {
98
+ * console.log(chunk);
99
+ * }
100
+ * ```
101
+ *
102
+ * ```txt
103
+ * AIMessageChunk {
104
+ * "content": "",
105
+ * "additional_kwargs": {
106
+ * "reasoning_content": "...",
107
+ * },
108
+ * "response_metadata": {
109
+ * "finishReason": null
110
+ * },
111
+ * "tool_calls": [],
112
+ * "tool_call_chunks": [],
113
+ * "invalid_tool_calls": []
114
+ * }
115
+ * AIMessageChunk {
116
+ * "content": "The",
117
+ * "additional_kwargs": {
118
+ * "reasoning_content": "...",
119
+ * },
120
+ * "response_metadata": {
121
+ * "finishReason": null
122
+ * },
123
+ * "tool_calls": [],
124
+ * "tool_call_chunks": [],
125
+ * "invalid_tool_calls": []
126
+ * }
127
+ * AIMessageChunk {
128
+ * "content": " French",
129
+ * "additional_kwargs": {
130
+ * "reasoning_content": "...",
131
+ * },
132
+ * "response_metadata": {
133
+ * "finishReason": null
134
+ * },
135
+ * "tool_calls": [],
136
+ * "tool_call_chunks": [],
137
+ * "invalid_tool_calls": []
138
+ * }
139
+ * AIMessageChunk {
140
+ * "content": " translation",
141
+ * "additional_kwargs": {
142
+ * "reasoning_content": "...",
143
+ * },
144
+ * "response_metadata": {
145
+ * "finishReason": null
146
+ * },
147
+ * "tool_calls": [],
148
+ * "tool_call_chunks": [],
149
+ * "invalid_tool_calls": []
150
+ * }
151
+ * AIMessageChunk {
152
+ * "content": " of",
153
+ * "additional_kwargs": {
154
+ * "reasoning_content": "...",
155
+ * },
156
+ * "response_metadata": {
157
+ * "finishReason": null
158
+ * },
159
+ * "tool_calls": [],
160
+ * "tool_call_chunks": [],
161
+ * "invalid_tool_calls": []
162
+ * }
163
+ * AIMessageChunk {
164
+ * "content": " \"",
165
+ * "additional_kwargs": {
166
+ * "reasoning_content": "...",
167
+ * },
168
+ * "response_metadata": {
169
+ * "finishReason": null
170
+ * },
171
+ * "tool_calls": [],
172
+ * "tool_call_chunks": [],
173
+ * "invalid_tool_calls": []
174
+ * }
175
+ * AIMessageChunk {
176
+ * "content": "I",
177
+ * "additional_kwargs": {
178
+ * "reasoning_content": "...",
179
+ * },
180
+ * "response_metadata": {
181
+ * "finishReason": null
182
+ * },
183
+ * "tool_calls": [],
184
+ * "tool_call_chunks": [],
185
+ * "invalid_tool_calls": []
186
+ * }
187
+ * AIMessageChunk {
188
+ * "content": " love",
189
+ * "additional_kwargs": {
190
+ * "reasoning_content": "...",
191
+ * },
192
+ * "response_metadata": {
193
+ * "finishReason": null
194
+ * },
195
+ * "tool_calls": [],
196
+ * "tool_call_chunks": [],
197
+ * "invalid_tool_calls": []
198
+ * }
199
+ * ...
200
+ * AIMessageChunk {
201
+ * "content": ".",
202
+ * "additional_kwargs": {
203
+ * "reasoning_content": "...",
204
+ * },
205
+ * "response_metadata": {
206
+ * "finishReason": null
207
+ * },
208
+ * "tool_calls": [],
209
+ * "tool_call_chunks": [],
210
+ * "invalid_tool_calls": []
211
+ * }
212
+ * AIMessageChunk {
213
+ * "content": "",
214
+ * "additional_kwargs": {
215
+ * "reasoning_content": "...",
216
+ * },
217
+ * "response_metadata": {
218
+ * "finishReason": "stop"
219
+ * },
220
+ * "tool_calls": [],
221
+ * "tool_call_chunks": [],
222
+ * "invalid_tool_calls": []
223
+ * }
224
+ * ```
225
+ * </details>
226
+ *
227
+ * <br />
228
+ *
229
+ * <details>
230
+ * <summary><strong>Aggregate Streamed Chunks</strong></summary>
231
+ *
232
+ * ```typescript
233
+ * import { AIMessageChunk } from '@langchain/core/messages';
234
+ * import { concat } from '@langchain/core/utils/stream';
235
+ *
236
+ * const stream = await llm.stream(input);
237
+ * let full: AIMessageChunk | undefined;
238
+ * for await (const chunk of stream) {
239
+ * full = !full ? chunk : concat(full, chunk);
240
+ * }
241
+ * console.log(full);
242
+ * ```
243
+ *
244
+ * ```txt
245
+ * AIMessageChunk {
246
+ * "content": "The French translation of \"I love programming\" is \"J'aime programmer\". In this sentence, \"J'aime\" is the first person singular conjugation of the French verb \"aimer\" which means \"to love\", and \"programmer\" is the French infinitive for \"to program\". I hope this helps! Let me know if you have any other questions.",
247
+ * "additional_kwargs": {
248
+ * "reasoning_content": "...",
249
+ * },
250
+ * "response_metadata": {
251
+ * "finishReason": "stop"
252
+ * },
253
+ * "tool_calls": [],
254
+ * "tool_call_chunks": [],
255
+ * "invalid_tool_calls": []
256
+ * }
257
+ * ```
258
+ * </details>
259
+ *
260
+ * <br />
261
+ *
262
+ * <details>
263
+ * <summary><strong>Bind tools</strong></summary>
264
+ *
265
+ * ```typescript
266
+ * import { z } from 'zod';
267
+ *
268
+ * const llmForToolCalling = new ChatDeepSeek({
269
+ * model: "deepseek-chat",
270
+ * temperature: 0,
271
+ * // other params...
272
+ * });
273
+ *
274
+ * const GetWeather = {
275
+ * name: "GetWeather",
276
+ * description: "Get the current weather in a given location",
277
+ * schema: z.object({
278
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
279
+ * }),
280
+ * }
281
+ *
282
+ * const GetPopulation = {
283
+ * name: "GetPopulation",
284
+ * description: "Get the current population in a given location",
285
+ * schema: z.object({
286
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
287
+ * }),
288
+ * }
289
+ *
290
+ * const llmWithTools = llmForToolCalling.bindTools([GetWeather, GetPopulation]);
291
+ * const aiMsg = await llmWithTools.invoke(
292
+ * "Which city is hotter today and which is bigger: LA or NY?"
293
+ * );
294
+ * console.log(aiMsg.tool_calls);
295
+ * ```
296
+ *
297
+ * ```txt
298
+ * [
299
+ * {
300
+ * name: 'GetWeather',
301
+ * args: { location: 'Los Angeles, CA' },
302
+ * type: 'tool_call',
303
+ * id: 'call_cd34'
304
+ * },
305
+ * {
306
+ * name: 'GetWeather',
307
+ * args: { location: 'New York, NY' },
308
+ * type: 'tool_call',
309
+ * id: 'call_68rf'
310
+ * },
311
+ * {
312
+ * name: 'GetPopulation',
313
+ * args: { location: 'Los Angeles, CA' },
314
+ * type: 'tool_call',
315
+ * id: 'call_f81z'
316
+ * },
317
+ * {
318
+ * name: 'GetPopulation',
319
+ * args: { location: 'New York, NY' },
320
+ * type: 'tool_call',
321
+ * id: 'call_8byt'
322
+ * }
323
+ * ]
324
+ * ```
325
+ * </details>
326
+ *
327
+ * <br />
328
+ *
329
+ * <details>
330
+ * <summary><strong>Structured Output</strong></summary>
331
+ *
332
+ * ```typescript
333
+ * import { z } from 'zod';
334
+ *
335
+ * const Joke = z.object({
336
+ * setup: z.string().describe("The setup of the joke"),
337
+ * punchline: z.string().describe("The punchline to the joke"),
338
+ * rating: z.number().optional().describe("How funny the joke is, from 1 to 10")
339
+ * }).describe('Joke to tell user.');
340
+ *
341
+ * const structuredLlm = llmForToolCalling.withStructuredOutput(Joke, { name: "Joke" });
342
+ * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
343
+ * console.log(jokeResult);
344
+ * ```
345
+ *
346
+ * ```txt
347
+ * {
348
+ * setup: "Why don't cats play poker in the wild?",
349
+ * punchline: 'Because there are too many cheetahs.'
350
+ * }
351
+ * ```
352
+ * </details>
353
+ *
354
+ * <br />
355
+ */
356
+ class ChatDeepSeek extends openai_1.ChatOpenAI {
357
+ static lc_name() {
358
+ return "ChatDeepSeek";
359
+ }
360
+ _llmType() {
361
+ return "deepseek";
362
+ }
363
+ get lc_secrets() {
364
+ return {
365
+ apiKey: "DEEPSEEK_API_KEY",
366
+ };
367
+ }
368
+ constructor(fields) {
369
+ const apiKey = fields?.apiKey || (0, env_1.getEnvironmentVariable)("DEEPSEEK_API_KEY");
370
+ if (!apiKey) {
371
+ throw new Error(`Deepseek API key not found. Please set the DEEPSEEK_API_KEY environment variable or pass the key into "apiKey" field.`);
372
+ }
373
+ super({
374
+ ...fields,
375
+ apiKey,
376
+ configuration: {
377
+ baseURL: "https://api.deepseek.com",
378
+ ...fields?.configuration,
379
+ },
380
+ });
381
+ Object.defineProperty(this, "lc_serializable", {
382
+ enumerable: true,
383
+ configurable: true,
384
+ writable: true,
385
+ value: true
386
+ });
387
+ Object.defineProperty(this, "lc_namespace", {
388
+ enumerable: true,
389
+ configurable: true,
390
+ writable: true,
391
+ value: ["langchain", "chat_models", "deepseek"]
392
+ });
393
+ }
394
+ _convertOpenAIDeltaToBaseMessageChunk(
395
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
396
+ delta, rawResponse, defaultRole) {
397
+ const messageChunk = super._convertOpenAIDeltaToBaseMessageChunk(delta, rawResponse, defaultRole);
398
+ messageChunk.additional_kwargs.reasoning_content = delta.reasoning_content;
399
+ return messageChunk;
400
+ }
401
+ _convertOpenAIChatCompletionMessageToBaseMessage(message, rawResponse) {
402
+ const langChainMessage = super._convertOpenAIChatCompletionMessageToBaseMessage(message, rawResponse);
403
+ langChainMessage.additional_kwargs.reasoning_content =
404
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
405
+ message.reasoning_content;
406
+ return langChainMessage;
407
+ }
408
+ withStructuredOutput(outputSchema, config) {
409
+ const ensuredConfig = { ...config };
410
+ // Deepseek does not support json schema yet
411
+ if (ensuredConfig?.method === undefined) {
412
+ ensuredConfig.method = "functionCalling";
413
+ }
414
+ return super.withStructuredOutput(outputSchema, ensuredConfig);
415
+ }
416
+ }
417
+ exports.ChatDeepSeek = ChatDeepSeek;