@langchain/anthropic 0.0.6 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -49,7 +49,7 @@ export ANTHROPIC_API_KEY=your-api-key
49
49
  Then initialize
50
50
 
51
51
  ```typescript
52
- import { ChatAnthropic } from "@langchain/anthropic";
52
+ import { ChatAnthropicMessages } from "@langchain/anthropic";
53
53
 
54
54
  const model = new ChatAnthropic({
55
55
  anthropicApiKey: process.env.ANTHROPIC_API_KEY,
@@ -60,7 +60,7 @@ const response = await model.invoke(new HumanMessage("Hello world!"));
60
60
  ### Streaming
61
61
 
62
62
  ```typescript
63
- import { ChatAnthropic } from "@langchain/anthropic";
63
+ import { ChatAnthropicMessages } from "@langchain/anthropic";
64
64
 
65
65
  const model = new ChatAnthropic({
66
66
  anthropicApiKey: process.env.ANTHROPIC_API_KEY,
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ChatAnthropic = void 0;
3
+ exports.ChatAnthropic = exports.ChatAnthropicMessages = void 0;
4
4
  const sdk_1 = require("@anthropic-ai/sdk");
5
5
  const messages_1 = require("@langchain/core/messages");
6
6
  const outputs_1 = require("@langchain/core/outputs");
@@ -27,7 +27,7 @@ const chat_models_1 = require("@langchain/core/language_models/chat_models");
27
27
  * console.log(res);
28
28
  * ```
29
29
  */
30
- class ChatAnthropic extends chat_models_1.BaseChatModel {
30
+ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
31
31
  static lc_name() {
32
32
  return "ChatAnthropic";
33
33
  }
@@ -381,4 +381,7 @@ class ChatAnthropic extends chat_models_1.BaseChatModel {
381
381
  return [];
382
382
  }
383
383
  }
384
+ exports.ChatAnthropicMessages = ChatAnthropicMessages;
385
+ class ChatAnthropic extends ChatAnthropicMessages {
386
+ }
384
387
  exports.ChatAnthropic = ChatAnthropic;
@@ -87,7 +87,7 @@ type Kwargs = Record<string, any>;
87
87
  * console.log(res);
88
88
  * ```
89
89
  */
90
- export declare class ChatAnthropic<CallOptions extends BaseLanguageModelCallOptions = BaseLanguageModelCallOptions> extends BaseChatModel<CallOptions> implements AnthropicInput {
90
+ export declare class ChatAnthropicMessages<CallOptions extends BaseLanguageModelCallOptions = BaseLanguageModelCallOptions> extends BaseChatModel<CallOptions> implements AnthropicInput {
91
91
  static lc_name(): string;
92
92
  get lc_secrets(): {
93
93
  [key: string]: string;
@@ -123,7 +123,6 @@ export declare class ChatAnthropic<CallOptions extends BaseLanguageModelCallOpti
123
123
  temperature?: number | undefined;
124
124
  top_k?: number | undefined;
125
125
  top_p?: number | undefined;
126
- 'x-api-key'?: string | undefined;
127
126
  model_name: string;
128
127
  };
129
128
  /**
@@ -139,7 +138,6 @@ export declare class ChatAnthropic<CallOptions extends BaseLanguageModelCallOpti
139
138
  temperature?: number | undefined;
140
139
  top_k?: number | undefined;
141
140
  top_p?: number | undefined;
142
- 'x-api-key'?: string | undefined;
143
141
  model_name: string;
144
142
  };
145
143
  _streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
@@ -168,4 +166,6 @@ export declare class ChatAnthropic<CallOptions extends BaseLanguageModelCallOpti
168
166
  /** @ignore */
169
167
  _combineLLMOutput(): never[];
170
168
  }
169
+ export declare class ChatAnthropic extends ChatAnthropicMessages {
170
+ }
171
171
  export {};
@@ -24,7 +24,7 @@ import { BaseChatModel, } from "@langchain/core/language_models/chat_models";
24
24
  * console.log(res);
25
25
  * ```
26
26
  */
27
- export class ChatAnthropic extends BaseChatModel {
27
+ export class ChatAnthropicMessages extends BaseChatModel {
28
28
  static lc_name() {
29
29
  return "ChatAnthropic";
30
30
  }
@@ -378,3 +378,5 @@ export class ChatAnthropic extends BaseChatModel {
378
378
  return [];
379
379
  }
380
380
  }
381
+ export class ChatAnthropic extends ChatAnthropicMessages {
382
+ }
@@ -4,9 +4,9 @@ import { HumanMessage } from "@langchain/core/messages";
4
4
  import { ChatPromptValue } from "@langchain/core/prompt_values";
5
5
  import { PromptTemplate, ChatPromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate, } from "@langchain/core/prompts";
6
6
  import { CallbackManager } from "@langchain/core/callbacks/manager";
7
- import { ChatAnthropic } from "../chat_models.js";
8
- test("Test ChatAnthropic", async () => {
9
- const chat = new ChatAnthropic({
7
+ import { ChatAnthropicMessages } from "../chat_models.js";
8
+ test("Test ChatAnthropicMessages", async () => {
9
+ const chat = new ChatAnthropicMessages({
10
10
  modelName: "claude-instant-1.2",
11
11
  maxRetries: 0,
12
12
  });
@@ -14,8 +14,8 @@ test("Test ChatAnthropic", async () => {
14
14
  const res = await chat.call([message]);
15
15
  console.log({ res });
16
16
  });
17
- test("Test ChatAnthropic Generate", async () => {
18
- const chat = new ChatAnthropic({
17
+ test("Test ChatAnthropicMessages Generate", async () => {
18
+ const chat = new ChatAnthropicMessages({
19
19
  modelName: "claude-instant-1.2",
20
20
  maxRetries: 0,
21
21
  });
@@ -30,8 +30,8 @@ test("Test ChatAnthropic Generate", async () => {
30
30
  }
31
31
  console.log({ res });
32
32
  });
33
- test("Test ChatAnthropic Generate w/ ClientOptions", async () => {
34
- const chat = new ChatAnthropic({
33
+ test("Test ChatAnthropicMessages Generate w/ ClientOptions", async () => {
34
+ const chat = new ChatAnthropicMessages({
35
35
  modelName: "claude-instant-1.2",
36
36
  maxRetries: 0,
37
37
  clientOptions: {
@@ -51,8 +51,8 @@ test("Test ChatAnthropic Generate w/ ClientOptions", async () => {
51
51
  }
52
52
  console.log({ res });
53
53
  });
54
- test("Test ChatAnthropic Generate with a signal in call options", async () => {
55
- const chat = new ChatAnthropic({
54
+ test("Test ChatAnthropicMessages Generate with a signal in call options", async () => {
55
+ const chat = new ChatAnthropicMessages({
56
56
  modelName: "claude-instant-1.2",
57
57
  maxRetries: 0,
58
58
  });
@@ -68,8 +68,8 @@ test("Test ChatAnthropic Generate with a signal in call options", async () => {
68
68
  return res;
69
69
  }).rejects.toThrow();
70
70
  }, 10000);
71
- test("Test ChatAnthropic tokenUsage with a batch", async () => {
72
- const model = new ChatAnthropic({
71
+ test("Test ChatAnthropicMessages tokenUsage with a batch", async () => {
72
+ const model = new ChatAnthropicMessages({
73
73
  temperature: 0,
74
74
  maxRetries: 0,
75
75
  modelName: "claude-instant-1.2",
@@ -80,10 +80,10 @@ test("Test ChatAnthropic tokenUsage with a batch", async () => {
80
80
  ]);
81
81
  console.log({ res });
82
82
  });
83
- test("Test ChatAnthropic in streaming mode", async () => {
83
+ test("Test ChatAnthropicMessages in streaming mode", async () => {
84
84
  let nrNewTokens = 0;
85
85
  let streamedCompletion = "";
86
- const model = new ChatAnthropic({
86
+ const model = new ChatAnthropicMessages({
87
87
  modelName: "claude-instant-1.2",
88
88
  maxRetries: 0,
89
89
  streaming: true,
@@ -100,10 +100,10 @@ test("Test ChatAnthropic in streaming mode", async () => {
100
100
  expect(nrNewTokens > 0).toBe(true);
101
101
  expect(res.content).toBe(streamedCompletion);
102
102
  });
103
- test("Test ChatAnthropic in streaming mode with a signal", async () => {
103
+ test("Test ChatAnthropicMessages in streaming mode with a signal", async () => {
104
104
  let nrNewTokens = 0;
105
105
  let streamedCompletion = "";
106
- const model = new ChatAnthropic({
106
+ const model = new ChatAnthropicMessages({
107
107
  modelName: "claude-instant-1.2",
108
108
  maxRetries: 0,
109
109
  streaming: true,
@@ -127,8 +127,8 @@ test("Test ChatAnthropic in streaming mode with a signal", async () => {
127
127
  }).rejects.toThrow();
128
128
  console.log({ nrNewTokens, streamedCompletion });
129
129
  }, 5000);
130
- test("Test ChatAnthropic prompt value", async () => {
131
- const chat = new ChatAnthropic({
130
+ test("Test ChatAnthropicMessages prompt value", async () => {
131
+ const chat = new ChatAnthropicMessages({
132
132
  modelName: "claude-instant-1.2",
133
133
  maxRetries: 0,
134
134
  });
@@ -142,8 +142,8 @@ test("Test ChatAnthropic prompt value", async () => {
142
142
  }
143
143
  console.log({ res });
144
144
  });
145
- test("ChatAnthropic, docs, prompt templates", async () => {
146
- const chat = new ChatAnthropic({
145
+ test("ChatAnthropicMessages, docs, prompt templates", async () => {
146
+ const chat = new ChatAnthropicMessages({
147
147
  modelName: "claude-instant-1.2",
148
148
  maxRetries: 0,
149
149
  temperature: 0,
@@ -162,8 +162,8 @@ test("ChatAnthropic, docs, prompt templates", async () => {
162
162
  ]);
163
163
  console.log(responseA.generations);
164
164
  });
165
- test("ChatAnthropic, longer chain of messages", async () => {
166
- const chat = new ChatAnthropic({
165
+ test("ChatAnthropicMessages, longer chain of messages", async () => {
166
+ const chat = new ChatAnthropicMessages({
167
167
  modelName: "claude-1.3",
168
168
  maxRetries: 0,
169
169
  temperature: 0,
@@ -180,10 +180,10 @@ test("ChatAnthropic, longer chain of messages", async () => {
180
180
  ]);
181
181
  console.log(responseA.generations);
182
182
  });
183
- test("ChatAnthropic, Anthropic apiUrl set manually via constructor", async () => {
183
+ test("ChatAnthropicMessages, Anthropic apiUrl set manually via constructor", async () => {
184
184
  // Pass the default URL through (should use this, and work as normal)
185
185
  const anthropicApiUrl = "https://api.anthropic.com";
186
- const chat = new ChatAnthropic({
186
+ const chat = new ChatAnthropicMessages({
187
187
  modelName: "claude-instant-1.2",
188
188
  maxRetries: 0,
189
189
  anthropicApiUrl,
@@ -192,8 +192,8 @@ test("ChatAnthropic, Anthropic apiUrl set manually via constructor", async () =>
192
192
  const res = await chat.call([message]);
193
193
  console.log({ res });
194
194
  });
195
- test("ChatAnthropic, Claude V2", async () => {
196
- const chat = new ChatAnthropic({
195
+ test("ChatAnthropicMessages, Claude V2", async () => {
196
+ const chat = new ChatAnthropicMessages({
197
197
  modelName: "claude-2.1",
198
198
  maxRetries: 0,
199
199
  temperature: 0,
@@ -210,8 +210,8 @@ test("ChatAnthropic, Claude V2", async () => {
210
210
  ]);
211
211
  console.log(responseA.generations);
212
212
  });
213
- test("Test ChatAnthropic stream method", async () => {
214
- const model = new ChatAnthropic({
213
+ test("Test ChatAnthropicMessages stream method", async () => {
214
+ const model = new ChatAnthropicMessages({
215
215
  maxTokens: 50,
216
216
  maxRetries: 0,
217
217
  modelName: "claude-instant-1.2",
@@ -224,9 +224,9 @@ test("Test ChatAnthropic stream method", async () => {
224
224
  }
225
225
  expect(chunks.length).toBeGreaterThan(1);
226
226
  });
227
- test("Test ChatAnthropic stream method with abort", async () => {
227
+ test("Test ChatAnthropicMessages stream method with abort", async () => {
228
228
  await expect(async () => {
229
- const model = new ChatAnthropic({
229
+ const model = new ChatAnthropicMessages({
230
230
  maxTokens: 500,
231
231
  maxRetries: 0,
232
232
  modelName: "claude-instant-1.2",
@@ -239,8 +239,8 @@ test("Test ChatAnthropic stream method with abort", async () => {
239
239
  }
240
240
  }).rejects.toThrow();
241
241
  });
242
- test("Test ChatAnthropic stream method with early break", async () => {
243
- const model = new ChatAnthropic({
242
+ test("Test ChatAnthropicMessages stream method with early break", async () => {
243
+ const model = new ChatAnthropicMessages({
244
244
  maxTokens: 50,
245
245
  maxRetries: 0,
246
246
  modelName: "claude-instant-1.2",
@@ -255,8 +255,8 @@ test("Test ChatAnthropic stream method with early break", async () => {
255
255
  }
256
256
  }
257
257
  });
258
- test("Test ChatAnthropic headers passed through", async () => {
259
- const chat = new ChatAnthropic({
258
+ test("Test ChatAnthropicMessages headers passed through", async () => {
259
+ const chat = new ChatAnthropicMessages({
260
260
  modelName: "claude-instant-1.2",
261
261
  maxRetries: 0,
262
262
  anthropicApiKey: "NOT_REAL",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/anthropic",
3
- "version": "0.0.6",
3
+ "version": "0.0.8",
4
4
  "description": "Anthropic integrations for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {
@@ -25,7 +25,6 @@
25
25
  "lint:fix": "yarn lint:eslint --fix && yarn lint:dpdm",
26
26
  "clean": "rimraf .turbo/ dist/ && NODE_OPTIONS=--max-old-space-size=4096 node scripts/create-entrypoints.js pre",
27
27
  "prepack": "yarn build",
28
- "release": "release-it --only-version --config .release-it.json",
29
28
  "test": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%",
30
29
  "test:watch": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts",
31
30
  "test:single": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules yarn run jest --config jest.config.cjs --testTimeout 100000",
@@ -35,7 +34,7 @@
35
34
  "author": "LangChain",
36
35
  "license": "MIT",
37
36
  "dependencies": {
38
- "@anthropic-ai/sdk": "^0.11.0",
37
+ "@anthropic-ai/sdk": "^0.12.0",
39
38
  "@langchain/core": "~0.1.3"
40
39
  },
41
40
  "devDependencies": {