@langchain/anthropic 0.0.6 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/chat_models.cjs +5 -2
- package/dist/chat_models.d.ts +3 -1
- package/dist/chat_models.js +3 -1
- package/dist/tests/chat_models.int.test.js +33 -33
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -49,7 +49,7 @@ export ANTHROPIC_API_KEY=your-api-key
|
|
|
49
49
|
Then initialize
|
|
50
50
|
|
|
51
51
|
```typescript
|
|
52
|
-
import {
|
|
52
|
+
import { ChatAnthropicMessages } from "@langchain/anthropic";
|
|
53
53
|
|
|
54
54
|
const model = new ChatAnthropic({
|
|
55
55
|
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
|
|
@@ -60,7 +60,7 @@ const response = await model.invoke(new HumanMessage("Hello world!"));
|
|
|
60
60
|
### Streaming
|
|
61
61
|
|
|
62
62
|
```typescript
|
|
63
|
-
import {
|
|
63
|
+
import { ChatAnthropicMessages } from "@langchain/anthropic";
|
|
64
64
|
|
|
65
65
|
const model = new ChatAnthropic({
|
|
66
66
|
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
|
package/dist/chat_models.cjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ChatAnthropic = void 0;
|
|
3
|
+
exports.ChatAnthropic = exports.ChatAnthropicMessages = void 0;
|
|
4
4
|
const sdk_1 = require("@anthropic-ai/sdk");
|
|
5
5
|
const messages_1 = require("@langchain/core/messages");
|
|
6
6
|
const outputs_1 = require("@langchain/core/outputs");
|
|
@@ -27,7 +27,7 @@ const chat_models_1 = require("@langchain/core/language_models/chat_models");
|
|
|
27
27
|
* console.log(res);
|
|
28
28
|
* ```
|
|
29
29
|
*/
|
|
30
|
-
class
|
|
30
|
+
class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
31
31
|
static lc_name() {
|
|
32
32
|
return "ChatAnthropic";
|
|
33
33
|
}
|
|
@@ -381,4 +381,7 @@ class ChatAnthropic extends chat_models_1.BaseChatModel {
|
|
|
381
381
|
return [];
|
|
382
382
|
}
|
|
383
383
|
}
|
|
384
|
+
exports.ChatAnthropicMessages = ChatAnthropicMessages;
|
|
385
|
+
class ChatAnthropic extends ChatAnthropicMessages {
|
|
386
|
+
}
|
|
384
387
|
exports.ChatAnthropic = ChatAnthropic;
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -87,7 +87,7 @@ type Kwargs = Record<string, any>;
|
|
|
87
87
|
* console.log(res);
|
|
88
88
|
* ```
|
|
89
89
|
*/
|
|
90
|
-
export declare class
|
|
90
|
+
export declare class ChatAnthropicMessages<CallOptions extends BaseLanguageModelCallOptions = BaseLanguageModelCallOptions> extends BaseChatModel<CallOptions> implements AnthropicInput {
|
|
91
91
|
static lc_name(): string;
|
|
92
92
|
get lc_secrets(): {
|
|
93
93
|
[key: string]: string;
|
|
@@ -168,4 +168,6 @@ export declare class ChatAnthropic<CallOptions extends BaseLanguageModelCallOpti
|
|
|
168
168
|
/** @ignore */
|
|
169
169
|
_combineLLMOutput(): never[];
|
|
170
170
|
}
|
|
171
|
+
export declare class ChatAnthropic extends ChatAnthropicMessages {
|
|
172
|
+
}
|
|
171
173
|
export {};
|
package/dist/chat_models.js
CHANGED
|
@@ -24,7 +24,7 @@ import { BaseChatModel, } from "@langchain/core/language_models/chat_models";
|
|
|
24
24
|
* console.log(res);
|
|
25
25
|
* ```
|
|
26
26
|
*/
|
|
27
|
-
export class
|
|
27
|
+
export class ChatAnthropicMessages extends BaseChatModel {
|
|
28
28
|
static lc_name() {
|
|
29
29
|
return "ChatAnthropic";
|
|
30
30
|
}
|
|
@@ -378,3 +378,5 @@ export class ChatAnthropic extends BaseChatModel {
|
|
|
378
378
|
return [];
|
|
379
379
|
}
|
|
380
380
|
}
|
|
381
|
+
export class ChatAnthropic extends ChatAnthropicMessages {
|
|
382
|
+
}
|
|
@@ -4,9 +4,9 @@ import { HumanMessage } from "@langchain/core/messages";
|
|
|
4
4
|
import { ChatPromptValue } from "@langchain/core/prompt_values";
|
|
5
5
|
import { PromptTemplate, ChatPromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate, } from "@langchain/core/prompts";
|
|
6
6
|
import { CallbackManager } from "@langchain/core/callbacks/manager";
|
|
7
|
-
import {
|
|
8
|
-
test("Test
|
|
9
|
-
const chat = new
|
|
7
|
+
import { ChatAnthropicMessages } from "../chat_models.js";
|
|
8
|
+
test("Test ChatAnthropicMessages", async () => {
|
|
9
|
+
const chat = new ChatAnthropicMessages({
|
|
10
10
|
modelName: "claude-instant-1.2",
|
|
11
11
|
maxRetries: 0,
|
|
12
12
|
});
|
|
@@ -14,8 +14,8 @@ test("Test ChatAnthropic", async () => {
|
|
|
14
14
|
const res = await chat.call([message]);
|
|
15
15
|
console.log({ res });
|
|
16
16
|
});
|
|
17
|
-
test("Test
|
|
18
|
-
const chat = new
|
|
17
|
+
test("Test ChatAnthropicMessages Generate", async () => {
|
|
18
|
+
const chat = new ChatAnthropicMessages({
|
|
19
19
|
modelName: "claude-instant-1.2",
|
|
20
20
|
maxRetries: 0,
|
|
21
21
|
});
|
|
@@ -30,8 +30,8 @@ test("Test ChatAnthropic Generate", async () => {
|
|
|
30
30
|
}
|
|
31
31
|
console.log({ res });
|
|
32
32
|
});
|
|
33
|
-
test("Test
|
|
34
|
-
const chat = new
|
|
33
|
+
test("Test ChatAnthropicMessages Generate w/ ClientOptions", async () => {
|
|
34
|
+
const chat = new ChatAnthropicMessages({
|
|
35
35
|
modelName: "claude-instant-1.2",
|
|
36
36
|
maxRetries: 0,
|
|
37
37
|
clientOptions: {
|
|
@@ -51,8 +51,8 @@ test("Test ChatAnthropic Generate w/ ClientOptions", async () => {
|
|
|
51
51
|
}
|
|
52
52
|
console.log({ res });
|
|
53
53
|
});
|
|
54
|
-
test("Test
|
|
55
|
-
const chat = new
|
|
54
|
+
test("Test ChatAnthropicMessages Generate with a signal in call options", async () => {
|
|
55
|
+
const chat = new ChatAnthropicMessages({
|
|
56
56
|
modelName: "claude-instant-1.2",
|
|
57
57
|
maxRetries: 0,
|
|
58
58
|
});
|
|
@@ -68,8 +68,8 @@ test("Test ChatAnthropic Generate with a signal in call options", async () => {
|
|
|
68
68
|
return res;
|
|
69
69
|
}).rejects.toThrow();
|
|
70
70
|
}, 10000);
|
|
71
|
-
test("Test
|
|
72
|
-
const model = new
|
|
71
|
+
test("Test ChatAnthropicMessages tokenUsage with a batch", async () => {
|
|
72
|
+
const model = new ChatAnthropicMessages({
|
|
73
73
|
temperature: 0,
|
|
74
74
|
maxRetries: 0,
|
|
75
75
|
modelName: "claude-instant-1.2",
|
|
@@ -80,10 +80,10 @@ test("Test ChatAnthropic tokenUsage with a batch", async () => {
|
|
|
80
80
|
]);
|
|
81
81
|
console.log({ res });
|
|
82
82
|
});
|
|
83
|
-
test("Test
|
|
83
|
+
test("Test ChatAnthropicMessages in streaming mode", async () => {
|
|
84
84
|
let nrNewTokens = 0;
|
|
85
85
|
let streamedCompletion = "";
|
|
86
|
-
const model = new
|
|
86
|
+
const model = new ChatAnthropicMessages({
|
|
87
87
|
modelName: "claude-instant-1.2",
|
|
88
88
|
maxRetries: 0,
|
|
89
89
|
streaming: true,
|
|
@@ -100,10 +100,10 @@ test("Test ChatAnthropic in streaming mode", async () => {
|
|
|
100
100
|
expect(nrNewTokens > 0).toBe(true);
|
|
101
101
|
expect(res.content).toBe(streamedCompletion);
|
|
102
102
|
});
|
|
103
|
-
test("Test
|
|
103
|
+
test("Test ChatAnthropicMessages in streaming mode with a signal", async () => {
|
|
104
104
|
let nrNewTokens = 0;
|
|
105
105
|
let streamedCompletion = "";
|
|
106
|
-
const model = new
|
|
106
|
+
const model = new ChatAnthropicMessages({
|
|
107
107
|
modelName: "claude-instant-1.2",
|
|
108
108
|
maxRetries: 0,
|
|
109
109
|
streaming: true,
|
|
@@ -127,8 +127,8 @@ test("Test ChatAnthropic in streaming mode with a signal", async () => {
|
|
|
127
127
|
}).rejects.toThrow();
|
|
128
128
|
console.log({ nrNewTokens, streamedCompletion });
|
|
129
129
|
}, 5000);
|
|
130
|
-
test("Test
|
|
131
|
-
const chat = new
|
|
130
|
+
test("Test ChatAnthropicMessages prompt value", async () => {
|
|
131
|
+
const chat = new ChatAnthropicMessages({
|
|
132
132
|
modelName: "claude-instant-1.2",
|
|
133
133
|
maxRetries: 0,
|
|
134
134
|
});
|
|
@@ -142,8 +142,8 @@ test("Test ChatAnthropic prompt value", async () => {
|
|
|
142
142
|
}
|
|
143
143
|
console.log({ res });
|
|
144
144
|
});
|
|
145
|
-
test("
|
|
146
|
-
const chat = new
|
|
145
|
+
test("ChatAnthropicMessages, docs, prompt templates", async () => {
|
|
146
|
+
const chat = new ChatAnthropicMessages({
|
|
147
147
|
modelName: "claude-instant-1.2",
|
|
148
148
|
maxRetries: 0,
|
|
149
149
|
temperature: 0,
|
|
@@ -162,8 +162,8 @@ test("ChatAnthropic, docs, prompt templates", async () => {
|
|
|
162
162
|
]);
|
|
163
163
|
console.log(responseA.generations);
|
|
164
164
|
});
|
|
165
|
-
test("
|
|
166
|
-
const chat = new
|
|
165
|
+
test("ChatAnthropicMessages, longer chain of messages", async () => {
|
|
166
|
+
const chat = new ChatAnthropicMessages({
|
|
167
167
|
modelName: "claude-1.3",
|
|
168
168
|
maxRetries: 0,
|
|
169
169
|
temperature: 0,
|
|
@@ -180,10 +180,10 @@ test("ChatAnthropic, longer chain of messages", async () => {
|
|
|
180
180
|
]);
|
|
181
181
|
console.log(responseA.generations);
|
|
182
182
|
});
|
|
183
|
-
test("
|
|
183
|
+
test("ChatAnthropicMessages, Anthropic apiUrl set manually via constructor", async () => {
|
|
184
184
|
// Pass the default URL through (should use this, and work as normal)
|
|
185
185
|
const anthropicApiUrl = "https://api.anthropic.com";
|
|
186
|
-
const chat = new
|
|
186
|
+
const chat = new ChatAnthropicMessages({
|
|
187
187
|
modelName: "claude-instant-1.2",
|
|
188
188
|
maxRetries: 0,
|
|
189
189
|
anthropicApiUrl,
|
|
@@ -192,8 +192,8 @@ test("ChatAnthropic, Anthropic apiUrl set manually via constructor", async () =>
|
|
|
192
192
|
const res = await chat.call([message]);
|
|
193
193
|
console.log({ res });
|
|
194
194
|
});
|
|
195
|
-
test("
|
|
196
|
-
const chat = new
|
|
195
|
+
test("ChatAnthropicMessages, Claude V2", async () => {
|
|
196
|
+
const chat = new ChatAnthropicMessages({
|
|
197
197
|
modelName: "claude-2.1",
|
|
198
198
|
maxRetries: 0,
|
|
199
199
|
temperature: 0,
|
|
@@ -210,8 +210,8 @@ test("ChatAnthropic, Claude V2", async () => {
|
|
|
210
210
|
]);
|
|
211
211
|
console.log(responseA.generations);
|
|
212
212
|
});
|
|
213
|
-
test("Test
|
|
214
|
-
const model = new
|
|
213
|
+
test("Test ChatAnthropicMessages stream method", async () => {
|
|
214
|
+
const model = new ChatAnthropicMessages({
|
|
215
215
|
maxTokens: 50,
|
|
216
216
|
maxRetries: 0,
|
|
217
217
|
modelName: "claude-instant-1.2",
|
|
@@ -224,9 +224,9 @@ test("Test ChatAnthropic stream method", async () => {
|
|
|
224
224
|
}
|
|
225
225
|
expect(chunks.length).toBeGreaterThan(1);
|
|
226
226
|
});
|
|
227
|
-
test("Test
|
|
227
|
+
test("Test ChatAnthropicMessages stream method with abort", async () => {
|
|
228
228
|
await expect(async () => {
|
|
229
|
-
const model = new
|
|
229
|
+
const model = new ChatAnthropicMessages({
|
|
230
230
|
maxTokens: 500,
|
|
231
231
|
maxRetries: 0,
|
|
232
232
|
modelName: "claude-instant-1.2",
|
|
@@ -239,8 +239,8 @@ test("Test ChatAnthropic stream method with abort", async () => {
|
|
|
239
239
|
}
|
|
240
240
|
}).rejects.toThrow();
|
|
241
241
|
});
|
|
242
|
-
test("Test
|
|
243
|
-
const model = new
|
|
242
|
+
test("Test ChatAnthropicMessages stream method with early break", async () => {
|
|
243
|
+
const model = new ChatAnthropicMessages({
|
|
244
244
|
maxTokens: 50,
|
|
245
245
|
maxRetries: 0,
|
|
246
246
|
modelName: "claude-instant-1.2",
|
|
@@ -255,8 +255,8 @@ test("Test ChatAnthropic stream method with early break", async () => {
|
|
|
255
255
|
}
|
|
256
256
|
}
|
|
257
257
|
});
|
|
258
|
-
test("Test
|
|
259
|
-
const chat = new
|
|
258
|
+
test("Test ChatAnthropicMessages headers passed through", async () => {
|
|
259
|
+
const chat = new ChatAnthropicMessages({
|
|
260
260
|
modelName: "claude-instant-1.2",
|
|
261
261
|
maxRetries: 0,
|
|
262
262
|
anthropicApiKey: "NOT_REAL",
|