@langchain/deepseek 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -21,11 +21,11 @@ const openai_1 = require("@langchain/openai");
21
21
  * ## [Runtime args](https://api.js.langchain.com/interfaces/_langchain_deepseek.ChatDeepSeekCallOptions.html)
22
22
  *
23
23
  * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
24
- * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
24
+ * They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below:
25
25
  *
26
26
  * ```typescript
27
- * // When calling `.bind`, call options should be passed via the first argument
28
- * const llmWithArgsBound = llm.bind({
27
+ * // When calling `.withConfig`, call options should be passed via the first argument
28
+ * const llmWithArgsBound = llm.withConfig({
29
29
  * stop: ["\n"],
30
30
  * tools: [...],
31
31
  * });
@@ -353,7 +353,7 @@ const openai_1 = require("@langchain/openai");
353
353
  *
354
354
  * <br />
355
355
  */
356
- class ChatDeepSeek extends openai_1.ChatOpenAI {
356
+ class ChatDeepSeek extends openai_1.ChatOpenAICompletions {
357
357
  static lc_name() {
358
358
  return "ChatDeepSeek";
359
359
  }
@@ -391,15 +391,15 @@ class ChatDeepSeek extends openai_1.ChatOpenAI {
391
391
  value: ["langchain", "chat_models", "deepseek"]
392
392
  });
393
393
  }
394
- _convertOpenAIDeltaToBaseMessageChunk(
394
+ _convertCompletionsDeltaToBaseMessageChunk(
395
395
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
396
396
  delta, rawResponse, defaultRole) {
397
- const messageChunk = super._convertOpenAIDeltaToBaseMessageChunk(delta, rawResponse, defaultRole);
397
+ const messageChunk = super._convertCompletionsDeltaToBaseMessageChunk(delta, rawResponse, defaultRole);
398
398
  messageChunk.additional_kwargs.reasoning_content = delta.reasoning_content;
399
399
  return messageChunk;
400
400
  }
401
- _convertOpenAIChatCompletionMessageToBaseMessage(message, rawResponse) {
402
- const langChainMessage = super._convertOpenAIChatCompletionMessageToBaseMessage(message, rawResponse);
401
+ _convertCompletionsMessageToBaseMessage(message, rawResponse) {
402
+ const langChainMessage = super._convertCompletionsMessageToBaseMessage(message, rawResponse);
403
403
  langChainMessage.additional_kwargs.reasoning_content =
404
404
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
405
405
  message.reasoning_content;
@@ -1,8 +1,8 @@
1
- import { BaseLanguageModelInput } from "@langchain/core/language_models/base";
1
+ import { BaseLanguageModelInput, StructuredOutputMethodOptions } from "@langchain/core/language_models/base";
2
2
  import { BaseMessage } from "@langchain/core/messages";
3
3
  import { Runnable } from "@langchain/core/runnables";
4
- import { ChatOpenAI, ChatOpenAICallOptions, ChatOpenAIFields, ChatOpenAIStructuredOutputMethodOptions, OpenAIClient } from "@langchain/openai";
5
- import { z } from "zod";
4
+ import { InteropZodType } from "@langchain/core/utils/types";
5
+ import { ChatOpenAICallOptions, ChatOpenAICompletions, ChatOpenAIFields, OpenAIClient } from "@langchain/openai";
6
6
  export interface ChatDeepSeekCallOptions extends ChatOpenAICallOptions {
7
7
  headers?: Record<string, string>;
8
8
  }
@@ -59,11 +59,11 @@ export interface ChatDeepSeekInput extends ChatOpenAIFields {
59
59
  * ## [Runtime args](https://api.js.langchain.com/interfaces/_langchain_deepseek.ChatDeepSeekCallOptions.html)
60
60
  *
61
61
  * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
62
- * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
62
+ * They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below:
63
63
  *
64
64
  * ```typescript
65
- * // When calling `.bind`, call options should be passed via the first argument
66
- * const llmWithArgsBound = llm.bind({
65
+ * // When calling `.withConfig`, call options should be passed via the first argument
66
+ * const llmWithArgsBound = llm.withConfig({
67
67
  * stop: ["\n"],
68
68
  * tools: [...],
69
69
  * });
@@ -391,7 +391,7 @@ export interface ChatDeepSeekInput extends ChatOpenAIFields {
391
391
  *
392
392
  * <br />
393
393
  */
394
- export declare class ChatDeepSeek extends ChatOpenAI<ChatDeepSeekCallOptions> {
394
+ export declare class ChatDeepSeek extends ChatOpenAICompletions<ChatDeepSeekCallOptions> {
395
395
  static lc_name(): string;
396
396
  _llmType(): string;
397
397
  get lc_secrets(): {
@@ -400,14 +400,14 @@ export declare class ChatDeepSeek extends ChatOpenAI<ChatDeepSeekCallOptions> {
400
400
  lc_serializable: boolean;
401
401
  lc_namespace: string[];
402
402
  constructor(fields?: Partial<ChatDeepSeekInput>);
403
- protected _convertOpenAIDeltaToBaseMessageChunk(delta: Record<string, any>, rawResponse: OpenAIClient.ChatCompletionChunk, defaultRole?: "function" | "user" | "system" | "developer" | "assistant" | "tool"): import("@langchain/core/messages").AIMessageChunk | import("@langchain/core/messages").HumanMessageChunk | import("@langchain/core/messages").SystemMessageChunk | import("@langchain/core/messages").FunctionMessageChunk | import("@langchain/core/messages").ToolMessageChunk | import("@langchain/core/messages").ChatMessageChunk;
404
- protected _convertOpenAIChatCompletionMessageToBaseMessage(message: OpenAIClient.ChatCompletionMessage, rawResponse: OpenAIClient.ChatCompletion): BaseMessage;
405
- withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;
406
- withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {
403
+ protected _convertCompletionsDeltaToBaseMessageChunk(delta: Record<string, any>, rawResponse: OpenAIClient.ChatCompletionChunk, defaultRole?: "function" | "user" | "system" | "developer" | "assistant" | "tool"): import("@langchain/core/messages").AIMessageChunk | import("@langchain/core/messages").HumanMessageChunk | import("@langchain/core/messages").SystemMessageChunk | import("@langchain/core/messages").FunctionMessageChunk | import("@langchain/core/messages").ToolMessageChunk | import("@langchain/core/messages").ChatMessageChunk;
404
+ protected _convertCompletionsMessageToBaseMessage(message: OpenAIClient.ChatCompletionMessage, rawResponse: OpenAIClient.ChatCompletion): BaseMessage;
405
+ withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;
406
+ withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {
407
407
  raw: BaseMessage;
408
408
  parsed: RunOutput;
409
409
  }>;
410
- withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<boolean>): Runnable<BaseLanguageModelInput, RunOutput> | Runnable<BaseLanguageModelInput, {
410
+ withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<boolean>): Runnable<BaseLanguageModelInput, RunOutput> | Runnable<BaseLanguageModelInput, {
411
411
  raw: BaseMessage;
412
412
  parsed: RunOutput;
413
413
  }>;
@@ -1,5 +1,5 @@
1
1
  import { getEnvironmentVariable } from "@langchain/core/utils/env";
2
- import { ChatOpenAI, } from "@langchain/openai";
2
+ import { ChatOpenAICompletions, } from "@langchain/openai";
3
3
  /**
4
4
  * Deepseek chat model integration.
5
5
  *
@@ -18,11 +18,11 @@ import { ChatOpenAI, } from "@langchain/openai";
18
18
  * ## [Runtime args](https://api.js.langchain.com/interfaces/_langchain_deepseek.ChatDeepSeekCallOptions.html)
19
19
  *
20
20
  * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
21
- * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
21
+ * They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below:
22
22
  *
23
23
  * ```typescript
24
- * // When calling `.bind`, call options should be passed via the first argument
25
- * const llmWithArgsBound = llm.bind({
24
+ * // When calling `.withConfig`, call options should be passed via the first argument
25
+ * const llmWithArgsBound = llm.withConfig({
26
26
  * stop: ["\n"],
27
27
  * tools: [...],
28
28
  * });
@@ -350,7 +350,7 @@ import { ChatOpenAI, } from "@langchain/openai";
350
350
  *
351
351
  * <br />
352
352
  */
353
- export class ChatDeepSeek extends ChatOpenAI {
353
+ export class ChatDeepSeek extends ChatOpenAICompletions {
354
354
  static lc_name() {
355
355
  return "ChatDeepSeek";
356
356
  }
@@ -388,15 +388,15 @@ export class ChatDeepSeek extends ChatOpenAI {
388
388
  value: ["langchain", "chat_models", "deepseek"]
389
389
  });
390
390
  }
391
- _convertOpenAIDeltaToBaseMessageChunk(
391
+ _convertCompletionsDeltaToBaseMessageChunk(
392
392
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
393
393
  delta, rawResponse, defaultRole) {
394
- const messageChunk = super._convertOpenAIDeltaToBaseMessageChunk(delta, rawResponse, defaultRole);
394
+ const messageChunk = super._convertCompletionsDeltaToBaseMessageChunk(delta, rawResponse, defaultRole);
395
395
  messageChunk.additional_kwargs.reasoning_content = delta.reasoning_content;
396
396
  return messageChunk;
397
397
  }
398
- _convertOpenAIChatCompletionMessageToBaseMessage(message, rawResponse) {
399
- const langChainMessage = super._convertOpenAIChatCompletionMessageToBaseMessage(message, rawResponse);
398
+ _convertCompletionsMessageToBaseMessage(message, rawResponse) {
399
+ const langChainMessage = super._convertCompletionsMessageToBaseMessage(message, rawResponse);
400
400
  langChainMessage.additional_kwargs.reasoning_content =
401
401
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
402
402
  message.reasoning_content;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/deepseek",
3
- "version": "0.0.1",
3
+ "version": "0.1.0",
4
4
  "description": "Deepseek integration for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {
@@ -12,12 +12,12 @@
12
12
  "type": "git",
13
13
  "url": "git@github.com:langchain-ai/langchainjs.git"
14
14
  },
15
- "homepage": "https://github.com/langchain-ai/langchainjs/tree/main/libs/@langchain/deepseek",
15
+ "homepage": "https://github.com/langchain-ai/langchainjs/tree/main/libs/langchain-deepseek",
16
16
  "scripts": {
17
17
  "build": "yarn turbo:command build:internal --filter=@langchain/deepseek",
18
18
  "build:internal": "yarn lc_build --create-entrypoints --pre --tree-shaking",
19
19
  "lint:eslint": "NODE_OPTIONS=--max-old-space-size=4096 eslint --cache --ext .ts,.js src/",
20
- "lint:dpdm": "dpdm --exit-code circular:1 --no-warning --no-tree src/*.ts src/**/*.ts",
20
+ "lint:dpdm": "dpdm --skip-dynamic-imports circular --exit-code circular:1 --no-warning --no-tree src/*.ts src/**/*.ts",
21
21
  "lint": "yarn lint:eslint && yarn lint:dpdm",
22
22
  "lint:fix": "yarn lint:eslint --fix && yarn lint:dpdm",
23
23
  "clean": "rm -rf .turbo dist/",
@@ -32,24 +32,23 @@
32
32
  "author": "LangChain",
33
33
  "license": "MIT",
34
34
  "dependencies": {
35
- "@langchain/openai": "^0.4.2",
36
- "zod": "^3.24.1"
35
+ "@langchain/openai": "^0.6.0"
37
36
  },
38
37
  "peerDependencies": {
39
- "@langchain/core": ">=0.3.0 <0.4.0"
38
+ "@langchain/core": ">=0.3.58 <0.4.0"
40
39
  },
41
40
  "devDependencies": {
42
41
  "@jest/globals": "^29.5.0",
43
- "@langchain/core": "workspace:*",
42
+ "@langchain/core": "0.3.62",
44
43
  "@langchain/scripts": ">=0.1.0 <0.2.0",
45
- "@langchain/standard-tests": "workspace:*",
44
+ "@langchain/standard-tests": "0.0.0",
46
45
  "@swc/core": "^1.3.90",
47
46
  "@swc/jest": "^0.2.29",
48
47
  "@tsconfig/recommended": "^1.0.3",
49
48
  "@typescript-eslint/eslint-plugin": "^6.12.0",
50
49
  "@typescript-eslint/parser": "^6.12.0",
51
50
  "dotenv": "^16.3.1",
52
- "dpdm": "^3.12.0",
51
+ "dpdm": "^3.14.0",
53
52
  "eslint": "^8.33.0",
54
53
  "eslint-config-airbnb-base": "^15.0.0",
55
54
  "eslint-config-prettier": "^8.6.0",
@@ -59,10 +58,10 @@
59
58
  "jest": "^29.5.0",
60
59
  "jest-environment-node": "^29.6.4",
61
60
  "prettier": "^2.8.3",
62
- "release-it": "^15.10.1",
61
+ "release-it": "^18.1.2",
63
62
  "rollup": "^4.5.2",
64
63
  "ts-jest": "^29.1.0",
65
- "typescript": "<5.2.0"
64
+ "typescript": "~5.8.3"
66
65
  },
67
66
  "publishConfig": {
68
67
  "access": "public"
@@ -86,4 +85,4 @@
86
85
  "index.d.ts",
87
86
  "index.d.cts"
88
87
  ]
89
- }
88
+ }