@langchain/deepseek 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -21,11 +21,11 @@ const openai_1 = require("@langchain/openai");
21
21
  * ## [Runtime args](https://api.js.langchain.com/interfaces/_langchain_deepseek.ChatDeepSeekCallOptions.html)
22
22
  *
23
23
  * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
24
- * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
24
+ * They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below:
25
25
  *
26
26
  * ```typescript
27
- * // When calling `.bind`, call options should be passed via the first argument
28
- * const llmWithArgsBound = llm.bind({
27
+ * // When calling `.withConfig`, call options should be passed via the first argument
28
+ * const llmWithArgsBound = llm.withConfig({
29
29
  * stop: ["\n"],
30
30
  * tools: [...],
31
31
  * });
@@ -1,8 +1,8 @@
1
1
  import { BaseLanguageModelInput } from "@langchain/core/language_models/base";
2
2
  import { BaseMessage } from "@langchain/core/messages";
3
3
  import { Runnable } from "@langchain/core/runnables";
4
+ import { InteropZodType } from "@langchain/core/utils/types";
4
5
  import { ChatOpenAI, ChatOpenAICallOptions, ChatOpenAIFields, ChatOpenAIStructuredOutputMethodOptions, OpenAIClient } from "@langchain/openai";
5
- import { z } from "zod";
6
6
  export interface ChatDeepSeekCallOptions extends ChatOpenAICallOptions {
7
7
  headers?: Record<string, string>;
8
8
  }
@@ -59,11 +59,11 @@ export interface ChatDeepSeekInput extends ChatOpenAIFields {
59
59
  * ## [Runtime args](https://api.js.langchain.com/interfaces/_langchain_deepseek.ChatDeepSeekCallOptions.html)
60
60
  *
61
61
  * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
62
- * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
62
+ * They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below:
63
63
  *
64
64
  * ```typescript
65
- * // When calling `.bind`, call options should be passed via the first argument
66
- * const llmWithArgsBound = llm.bind({
65
+ * // When calling `.withConfig`, call options should be passed via the first argument
66
+ * const llmWithArgsBound = llm.withConfig({
67
67
  * stop: ["\n"],
68
68
  * tools: [...],
69
69
  * });
@@ -402,12 +402,12 @@ export declare class ChatDeepSeek extends ChatOpenAI<ChatDeepSeekCallOptions> {
402
402
  constructor(fields?: Partial<ChatDeepSeekInput>);
403
403
  protected _convertOpenAIDeltaToBaseMessageChunk(delta: Record<string, any>, rawResponse: OpenAIClient.ChatCompletionChunk, defaultRole?: "function" | "user" | "system" | "developer" | "assistant" | "tool"): import("@langchain/core/messages").AIMessageChunk | import("@langchain/core/messages").HumanMessageChunk | import("@langchain/core/messages").SystemMessageChunk | import("@langchain/core/messages").FunctionMessageChunk | import("@langchain/core/messages").ToolMessageChunk | import("@langchain/core/messages").ChatMessageChunk;
404
404
  protected _convertOpenAIChatCompletionMessageToBaseMessage(message: OpenAIClient.ChatCompletionMessage, rawResponse: OpenAIClient.ChatCompletion): BaseMessage;
405
- withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;
406
- withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {
405
+ withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;
406
+ withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {
407
407
  raw: BaseMessage;
408
408
  parsed: RunOutput;
409
409
  }>;
410
- withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<boolean>): Runnable<BaseLanguageModelInput, RunOutput> | Runnable<BaseLanguageModelInput, {
410
+ withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<boolean>): Runnable<BaseLanguageModelInput, RunOutput> | Runnable<BaseLanguageModelInput, {
411
411
  raw: BaseMessage;
412
412
  parsed: RunOutput;
413
413
  }>;
@@ -18,11 +18,11 @@ import { ChatOpenAI, } from "@langchain/openai";
18
18
  * ## [Runtime args](https://api.js.langchain.com/interfaces/_langchain_deepseek.ChatDeepSeekCallOptions.html)
19
19
  *
20
20
  * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
21
- * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
21
+ * They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below:
22
22
  *
23
23
  * ```typescript
24
- * // When calling `.bind`, call options should be passed via the first argument
25
- * const llmWithArgsBound = llm.bind({
24
+ * // When calling `.withConfig`, call options should be passed via the first argument
25
+ * const llmWithArgsBound = llm.withConfig({
26
26
  * stop: ["\n"],
27
27
  * tools: [...],
28
28
  * });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/deepseek",
3
- "version": "0.0.1",
3
+ "version": "0.0.2",
4
4
  "description": "Deepseek integration for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {
@@ -12,7 +12,7 @@
12
12
  "type": "git",
13
13
  "url": "git@github.com:langchain-ai/langchainjs.git"
14
14
  },
15
- "homepage": "https://github.com/langchain-ai/langchainjs/tree/main/libs/@langchain/deepseek",
15
+ "homepage": "https://github.com/langchain-ai/langchainjs/tree/main/libs/langchain-deepseek",
16
16
  "scripts": {
17
17
  "build": "yarn turbo:command build:internal --filter=@langchain/deepseek",
18
18
  "build:internal": "yarn lc_build --create-entrypoints --pre --tree-shaking",
@@ -32,11 +32,10 @@
32
32
  "author": "LangChain",
33
33
  "license": "MIT",
34
34
  "dependencies": {
35
- "@langchain/openai": "^0.4.2",
36
- "zod": "^3.24.1"
35
+ "@langchain/openai": "^0.5.5"
37
36
  },
38
37
  "peerDependencies": {
39
- "@langchain/core": ">=0.3.0 <0.4.0"
38
+ "@langchain/core": ">=0.3.58 <0.4.0"
40
39
  },
41
40
  "devDependencies": {
42
41
  "@jest/globals": "^29.5.0",
@@ -59,7 +58,7 @@
59
58
  "jest": "^29.5.0",
60
59
  "jest-environment-node": "^29.6.4",
61
60
  "prettier": "^2.8.3",
62
- "release-it": "^15.10.1",
61
+ "release-it": "^18.1.2",
63
62
  "rollup": "^4.5.2",
64
63
  "ts-jest": "^29.1.0",
65
64
  "typescript": "<5.2.0"