@langchain/anthropic 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/chat_models.d.ts +1 -1
- package/dist/tests/chat_models.int.test.js +15 -33
- package/package.json +3 -3
package/README.md
CHANGED
|
@@ -64,7 +64,7 @@ import { ChatAnthropicMessages } from "@langchain/anthropic";
|
|
|
64
64
|
|
|
65
65
|
const model = new ChatAnthropic({
|
|
66
66
|
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
|
|
67
|
-
modelName: "claude-
|
|
67
|
+
modelName: "claude-3-sonnet-20240229",
|
|
68
68
|
});
|
|
69
69
|
const response = await model.stream(new HumanMessage("Hello world!"));
|
|
70
70
|
```
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -57,7 +57,7 @@ export interface AnthropicInput {
|
|
|
57
57
|
clientOptions: ClientOptions;
|
|
58
58
|
/** Holds any additional parameters that are valid to pass to {@link
|
|
59
59
|
* https://console.anthropic.com/docs/api/reference |
|
|
60
|
-
* `anthropic.
|
|
60
|
+
* `anthropic.messages`} that are not explicitly specified on this class.
|
|
61
61
|
*/
|
|
62
62
|
invocationKwargs?: Kwargs;
|
|
63
63
|
}
|
|
@@ -7,7 +7,7 @@ import { CallbackManager } from "@langchain/core/callbacks/manager";
|
|
|
7
7
|
import { ChatAnthropic } from "../chat_models.js";
|
|
8
8
|
test.skip("Test ChatAnthropic", async () => {
|
|
9
9
|
const chat = new ChatAnthropic({
|
|
10
|
-
modelName: "claude-
|
|
10
|
+
modelName: "claude-3-sonnet-20240229",
|
|
11
11
|
maxRetries: 0,
|
|
12
12
|
});
|
|
13
13
|
const message = new HumanMessage("Hello!");
|
|
@@ -16,7 +16,7 @@ test.skip("Test ChatAnthropic", async () => {
|
|
|
16
16
|
});
|
|
17
17
|
test("Test ChatAnthropic Generate", async () => {
|
|
18
18
|
const chat = new ChatAnthropic({
|
|
19
|
-
modelName: "claude-
|
|
19
|
+
modelName: "claude-3-sonnet-20240229",
|
|
20
20
|
maxRetries: 0,
|
|
21
21
|
});
|
|
22
22
|
const message = new HumanMessage("Hello!");
|
|
@@ -32,7 +32,7 @@ test("Test ChatAnthropic Generate", async () => {
|
|
|
32
32
|
});
|
|
33
33
|
test("Test ChatAnthropic Generate w/ ClientOptions", async () => {
|
|
34
34
|
const chat = new ChatAnthropic({
|
|
35
|
-
modelName: "claude-
|
|
35
|
+
modelName: "claude-3-sonnet-20240229",
|
|
36
36
|
maxRetries: 0,
|
|
37
37
|
clientOptions: {
|
|
38
38
|
defaultHeaders: {
|
|
@@ -53,7 +53,7 @@ test("Test ChatAnthropic Generate w/ ClientOptions", async () => {
|
|
|
53
53
|
});
|
|
54
54
|
test("Test ChatAnthropic Generate with a signal in call options", async () => {
|
|
55
55
|
const chat = new ChatAnthropic({
|
|
56
|
-
modelName: "claude-
|
|
56
|
+
modelName: "claude-3-sonnet-20240229",
|
|
57
57
|
maxRetries: 0,
|
|
58
58
|
});
|
|
59
59
|
const controller = new AbortController();
|
|
@@ -72,7 +72,7 @@ test("Test ChatAnthropic tokenUsage with a batch", async () => {
|
|
|
72
72
|
const model = new ChatAnthropic({
|
|
73
73
|
temperature: 0,
|
|
74
74
|
maxRetries: 0,
|
|
75
|
-
modelName: "claude-
|
|
75
|
+
modelName: "claude-3-sonnet-20240229",
|
|
76
76
|
});
|
|
77
77
|
const res = await model.generate([
|
|
78
78
|
[new HumanMessage(`Hello!`)],
|
|
@@ -84,7 +84,7 @@ test("Test ChatAnthropic in streaming mode", async () => {
|
|
|
84
84
|
let nrNewTokens = 0;
|
|
85
85
|
let streamedCompletion = "";
|
|
86
86
|
const model = new ChatAnthropic({
|
|
87
|
-
modelName: "claude-
|
|
87
|
+
modelName: "claude-3-sonnet-20240229",
|
|
88
88
|
maxRetries: 0,
|
|
89
89
|
streaming: true,
|
|
90
90
|
callbacks: CallbackManager.fromHandlers({
|
|
@@ -104,7 +104,7 @@ test("Test ChatAnthropic in streaming mode with a signal", async () => {
|
|
|
104
104
|
let nrNewTokens = 0;
|
|
105
105
|
let streamedCompletion = "";
|
|
106
106
|
const model = new ChatAnthropic({
|
|
107
|
-
modelName: "claude-
|
|
107
|
+
modelName: "claude-3-sonnet-20240229",
|
|
108
108
|
maxRetries: 0,
|
|
109
109
|
streaming: true,
|
|
110
110
|
callbacks: CallbackManager.fromHandlers({
|
|
@@ -129,7 +129,7 @@ test("Test ChatAnthropic in streaming mode with a signal", async () => {
|
|
|
129
129
|
}, 5000);
|
|
130
130
|
test("Test ChatAnthropic prompt value", async () => {
|
|
131
131
|
const chat = new ChatAnthropic({
|
|
132
|
-
modelName: "claude-
|
|
132
|
+
modelName: "claude-3-sonnet-20240229",
|
|
133
133
|
maxRetries: 0,
|
|
134
134
|
});
|
|
135
135
|
const message = new HumanMessage("Hello!");
|
|
@@ -144,7 +144,7 @@ test("Test ChatAnthropic prompt value", async () => {
|
|
|
144
144
|
});
|
|
145
145
|
test("ChatAnthropic, docs, prompt templates", async () => {
|
|
146
146
|
const chat = new ChatAnthropic({
|
|
147
|
-
modelName: "claude-
|
|
147
|
+
modelName: "claude-3-sonnet-20240229",
|
|
148
148
|
maxRetries: 0,
|
|
149
149
|
temperature: 0,
|
|
150
150
|
});
|
|
@@ -164,7 +164,7 @@ test("ChatAnthropic, docs, prompt templates", async () => {
|
|
|
164
164
|
});
|
|
165
165
|
test("ChatAnthropic, longer chain of messages", async () => {
|
|
166
166
|
const chat = new ChatAnthropic({
|
|
167
|
-
modelName: "claude-
|
|
167
|
+
modelName: "claude-3-sonnet-20240229",
|
|
168
168
|
maxRetries: 0,
|
|
169
169
|
temperature: 0,
|
|
170
170
|
});
|
|
@@ -184,7 +184,7 @@ test("ChatAnthropic, Anthropic apiUrl set manually via constructor", async () =>
|
|
|
184
184
|
// Pass the default URL through (should use this, and work as normal)
|
|
185
185
|
const anthropicApiUrl = "https://api.anthropic.com";
|
|
186
186
|
const chat = new ChatAnthropic({
|
|
187
|
-
modelName: "claude-
|
|
187
|
+
modelName: "claude-3-sonnet-20240229",
|
|
188
188
|
maxRetries: 0,
|
|
189
189
|
anthropicApiUrl,
|
|
190
190
|
});
|
|
@@ -192,29 +192,11 @@ test("ChatAnthropic, Anthropic apiUrl set manually via constructor", async () =>
|
|
|
192
192
|
const res = await chat.call([message]);
|
|
193
193
|
console.log({ res });
|
|
194
194
|
});
|
|
195
|
-
test("ChatAnthropic, Claude V2", async () => {
|
|
196
|
-
const chat = new ChatAnthropic({
|
|
197
|
-
modelName: "claude-2.1",
|
|
198
|
-
maxRetries: 0,
|
|
199
|
-
temperature: 0,
|
|
200
|
-
});
|
|
201
|
-
const chatPrompt = ChatPromptTemplate.fromMessages([
|
|
202
|
-
HumanMessagePromptTemplate.fromTemplate(`Hi, my name is Joe!`),
|
|
203
|
-
AIMessagePromptTemplate.fromTemplate(`Nice to meet you, Joe!`),
|
|
204
|
-
HumanMessagePromptTemplate.fromTemplate("{text}"),
|
|
205
|
-
]);
|
|
206
|
-
const responseA = await chat.generatePrompt([
|
|
207
|
-
await chatPrompt.formatPromptValue({
|
|
208
|
-
text: "What did I just say my name was?",
|
|
209
|
-
}),
|
|
210
|
-
]);
|
|
211
|
-
console.log(responseA.generations);
|
|
212
|
-
});
|
|
213
195
|
test("Test ChatAnthropic stream method", async () => {
|
|
214
196
|
const model = new ChatAnthropic({
|
|
215
197
|
maxTokens: 50,
|
|
216
198
|
maxRetries: 0,
|
|
217
|
-
modelName: "claude-
|
|
199
|
+
modelName: "claude-3-sonnet-20240229",
|
|
218
200
|
});
|
|
219
201
|
const stream = await model.stream("Print hello world.");
|
|
220
202
|
const chunks = [];
|
|
@@ -229,7 +211,7 @@ test("Test ChatAnthropic stream method with abort", async () => {
|
|
|
229
211
|
const model = new ChatAnthropic({
|
|
230
212
|
maxTokens: 500,
|
|
231
213
|
maxRetries: 0,
|
|
232
|
-
modelName: "claude-
|
|
214
|
+
modelName: "claude-3-sonnet-20240229",
|
|
233
215
|
});
|
|
234
216
|
const stream = await model.stream("How is your day going? Be extremely verbose.", {
|
|
235
217
|
signal: AbortSignal.timeout(1000),
|
|
@@ -243,7 +225,7 @@ test("Test ChatAnthropic stream method with early break", async () => {
|
|
|
243
225
|
const model = new ChatAnthropic({
|
|
244
226
|
maxTokens: 50,
|
|
245
227
|
maxRetries: 0,
|
|
246
|
-
modelName: "claude-
|
|
228
|
+
modelName: "claude-3-sonnet-20240229",
|
|
247
229
|
});
|
|
248
230
|
const stream = await model.stream("How is your day going? Be extremely verbose.");
|
|
249
231
|
let i = 0;
|
|
@@ -257,7 +239,7 @@ test("Test ChatAnthropic stream method with early break", async () => {
|
|
|
257
239
|
});
|
|
258
240
|
test("Test ChatAnthropic headers passed through", async () => {
|
|
259
241
|
const chat = new ChatAnthropic({
|
|
260
|
-
modelName: "claude-
|
|
242
|
+
modelName: "claude-3-sonnet-20240229",
|
|
261
243
|
maxRetries: 0,
|
|
262
244
|
anthropicApiKey: "NOT_REAL",
|
|
263
245
|
clientOptions: {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/anthropic",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.1",
|
|
4
4
|
"description": "Anthropic integrations for LangChain.js",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -38,8 +38,8 @@
|
|
|
38
38
|
"author": "LangChain",
|
|
39
39
|
"license": "MIT",
|
|
40
40
|
"dependencies": {
|
|
41
|
-
"@anthropic-ai/sdk": "^0.
|
|
42
|
-
"@langchain/core": "~0.1
|
|
41
|
+
"@anthropic-ai/sdk": "^0.15.0",
|
|
42
|
+
"@langchain/core": "~0.1"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@jest/globals": "^29.5.0",
|