@langchain/anthropic 0.0.10 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/chat_models.cjs +5 -7
- package/dist/chat_models.d.ts +12 -10
- package/dist/chat_models.js +5 -7
- package/dist/tests/chat_models.int.test.js +47 -65
- package/package.json +4 -4
package/README.md
CHANGED
|
@@ -64,7 +64,7 @@ import { ChatAnthropicMessages } from "@langchain/anthropic";
|
|
|
64
64
|
|
|
65
65
|
const model = new ChatAnthropic({
|
|
66
66
|
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
|
|
67
|
-
modelName: "claude-
|
|
67
|
+
modelName: "claude-3-sonnet-20240229",
|
|
68
68
|
});
|
|
69
69
|
const response = await model.stream(new HumanMessage("Hello world!"));
|
|
70
70
|
```
|
package/dist/chat_models.cjs
CHANGED
|
@@ -15,10 +15,12 @@ const chat_models_1 = require("@langchain/core/language_models/chat_models");
|
|
|
15
15
|
* @remarks
|
|
16
16
|
* Any parameters that are valid to be passed to {@link
|
|
17
17
|
* https://console.anthropic.com/docs/api/reference |
|
|
18
|
-
* `anthropic.
|
|
18
|
+
* `anthropic.messages`} can be passed through {@link invocationKwargs},
|
|
19
19
|
* even if not explicitly available on this class.
|
|
20
20
|
* @example
|
|
21
21
|
* ```typescript
|
|
22
|
+
* import { ChatAnthropic } from "@langchain/anthropic";
|
|
23
|
+
*
|
|
22
24
|
* const model = new ChatAnthropic({
|
|
23
25
|
* temperature: 0.9,
|
|
24
26
|
* anthropicApiKey: 'YOUR-API-KEY',
|
|
@@ -342,9 +344,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
342
344
|
maxRetries: 0,
|
|
343
345
|
});
|
|
344
346
|
}
|
|
345
|
-
const makeCompletionRequest = async () => this.streamingClient.
|
|
346
|
-
// TODO: Fix typing once underlying SDK is fixed to not require unnecessary "anthropic-beta" param
|
|
347
|
-
{
|
|
347
|
+
const makeCompletionRequest = async () => this.streamingClient.messages.create({
|
|
348
348
|
...request,
|
|
349
349
|
...this.invocationKwargs,
|
|
350
350
|
stream: true,
|
|
@@ -365,9 +365,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
365
365
|
maxRetries: 0,
|
|
366
366
|
});
|
|
367
367
|
}
|
|
368
|
-
const makeCompletionRequest = async () => this.batchClient.
|
|
369
|
-
// TODO: Fix typing once underlying SDK is fixed to not require unnecessary "anthropic-beta" param
|
|
370
|
-
{
|
|
368
|
+
const makeCompletionRequest = async () => this.batchClient.messages.create({
|
|
371
369
|
...request,
|
|
372
370
|
...this.invocationKwargs,
|
|
373
371
|
});
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -5,10 +5,10 @@ import { type BaseMessage } from "@langchain/core/messages";
|
|
|
5
5
|
import { ChatGenerationChunk, type ChatResult } from "@langchain/core/outputs";
|
|
6
6
|
import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
7
7
|
import { type BaseLanguageModelCallOptions } from "@langchain/core/language_models/base";
|
|
8
|
-
type AnthropicMessage = Anthropic.
|
|
9
|
-
type AnthropicMessageCreateParams =
|
|
10
|
-
type AnthropicStreamingMessageCreateParams =
|
|
11
|
-
type AnthropicMessageStreamEvent = Anthropic.
|
|
8
|
+
type AnthropicMessage = Anthropic.MessageParam;
|
|
9
|
+
type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
|
|
10
|
+
type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
|
|
11
|
+
type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
|
|
12
12
|
/**
|
|
13
13
|
* Input to AnthropicChat class.
|
|
14
14
|
*/
|
|
@@ -57,7 +57,7 @@ export interface AnthropicInput {
|
|
|
57
57
|
clientOptions: ClientOptions;
|
|
58
58
|
/** Holds any additional parameters that are valid to pass to {@link
|
|
59
59
|
* https://console.anthropic.com/docs/api/reference |
|
|
60
|
-
* `anthropic.
|
|
60
|
+
* `anthropic.messages`} that are not explicitly specified on this class.
|
|
61
61
|
*/
|
|
62
62
|
invocationKwargs?: Kwargs;
|
|
63
63
|
}
|
|
@@ -75,10 +75,12 @@ type Kwargs = Record<string, any>;
|
|
|
75
75
|
* @remarks
|
|
76
76
|
* Any parameters that are valid to be passed to {@link
|
|
77
77
|
* https://console.anthropic.com/docs/api/reference |
|
|
78
|
-
* `anthropic.
|
|
78
|
+
* `anthropic.messages`} can be passed through {@link invocationKwargs},
|
|
79
79
|
* even if not explicitly available on this class.
|
|
80
80
|
* @example
|
|
81
81
|
* ```typescript
|
|
82
|
+
* import { ChatAnthropic } from "@langchain/anthropic";
|
|
83
|
+
*
|
|
82
84
|
* const model = new ChatAnthropic({
|
|
83
85
|
* temperature: 0.9,
|
|
84
86
|
* anthropicApiKey: 'YOUR-API-KEY',
|
|
@@ -111,13 +113,13 @@ export declare class ChatAnthropicMessages<CallOptions extends BaseLanguageModel
|
|
|
111
113
|
/**
|
|
112
114
|
* Get the parameters used to invoke the model
|
|
113
115
|
*/
|
|
114
|
-
invocationParams(options?: this["ParsedCallOptions"]): Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"
|
|
116
|
+
invocationParams(options?: this["ParsedCallOptions"]): Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"> & Kwargs;
|
|
115
117
|
/** @ignore */
|
|
116
118
|
_identifyingParams(): {
|
|
119
|
+
metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined;
|
|
117
120
|
stream?: boolean | undefined;
|
|
118
121
|
max_tokens: number;
|
|
119
122
|
model: string;
|
|
120
|
-
metadata?: Anthropic.Beta.Messages.MessageCreateParams.Metadata | undefined;
|
|
121
123
|
stop_sequences?: string[] | undefined;
|
|
122
124
|
system?: string | undefined;
|
|
123
125
|
temperature?: number | undefined;
|
|
@@ -129,10 +131,10 @@ export declare class ChatAnthropicMessages<CallOptions extends BaseLanguageModel
|
|
|
129
131
|
* Get the identifying parameters for the model
|
|
130
132
|
*/
|
|
131
133
|
identifyingParams(): {
|
|
134
|
+
metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined;
|
|
132
135
|
stream?: boolean | undefined;
|
|
133
136
|
max_tokens: number;
|
|
134
137
|
model: string;
|
|
135
|
-
metadata?: Anthropic.Beta.Messages.MessageCreateParams.Metadata | undefined;
|
|
136
138
|
stop_sequences?: string[] | undefined;
|
|
137
139
|
system?: string | undefined;
|
|
138
140
|
temperature?: number | undefined;
|
|
@@ -161,7 +163,7 @@ export declare class ChatAnthropicMessages<CallOptions extends BaseLanguageModel
|
|
|
161
163
|
/** @ignore */
|
|
162
164
|
protected completionWithRetry(request: AnthropicMessageCreateParams & Kwargs, options: {
|
|
163
165
|
signal?: AbortSignal;
|
|
164
|
-
}): Promise<Anthropic.
|
|
166
|
+
}): Promise<Anthropic.Message>;
|
|
165
167
|
_llmType(): string;
|
|
166
168
|
/** @ignore */
|
|
167
169
|
_combineLLMOutput(): never[];
|
package/dist/chat_models.js
CHANGED
|
@@ -12,10 +12,12 @@ import { BaseChatModel, } from "@langchain/core/language_models/chat_models";
|
|
|
12
12
|
* @remarks
|
|
13
13
|
* Any parameters that are valid to be passed to {@link
|
|
14
14
|
* https://console.anthropic.com/docs/api/reference |
|
|
15
|
-
* `anthropic.
|
|
15
|
+
* `anthropic.messages`} can be passed through {@link invocationKwargs},
|
|
16
16
|
* even if not explicitly available on this class.
|
|
17
17
|
* @example
|
|
18
18
|
* ```typescript
|
|
19
|
+
* import { ChatAnthropic } from "@langchain/anthropic";
|
|
20
|
+
*
|
|
19
21
|
* const model = new ChatAnthropic({
|
|
20
22
|
* temperature: 0.9,
|
|
21
23
|
* anthropicApiKey: 'YOUR-API-KEY',
|
|
@@ -339,9 +341,7 @@ export class ChatAnthropicMessages extends BaseChatModel {
|
|
|
339
341
|
maxRetries: 0,
|
|
340
342
|
});
|
|
341
343
|
}
|
|
342
|
-
const makeCompletionRequest = async () => this.streamingClient.
|
|
343
|
-
// TODO: Fix typing once underlying SDK is fixed to not require unnecessary "anthropic-beta" param
|
|
344
|
-
{
|
|
344
|
+
const makeCompletionRequest = async () => this.streamingClient.messages.create({
|
|
345
345
|
...request,
|
|
346
346
|
...this.invocationKwargs,
|
|
347
347
|
stream: true,
|
|
@@ -362,9 +362,7 @@ export class ChatAnthropicMessages extends BaseChatModel {
|
|
|
362
362
|
maxRetries: 0,
|
|
363
363
|
});
|
|
364
364
|
}
|
|
365
|
-
const makeCompletionRequest = async () => this.batchClient.
|
|
366
|
-
// TODO: Fix typing once underlying SDK is fixed to not require unnecessary "anthropic-beta" param
|
|
367
|
-
{
|
|
365
|
+
const makeCompletionRequest = async () => this.batchClient.messages.create({
|
|
368
366
|
...request,
|
|
369
367
|
...this.invocationKwargs,
|
|
370
368
|
});
|
|
@@ -4,19 +4,19 @@ import { HumanMessage } from "@langchain/core/messages";
|
|
|
4
4
|
import { ChatPromptValue } from "@langchain/core/prompt_values";
|
|
5
5
|
import { PromptTemplate, ChatPromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate, } from "@langchain/core/prompts";
|
|
6
6
|
import { CallbackManager } from "@langchain/core/callbacks/manager";
|
|
7
|
-
import {
|
|
8
|
-
test("Test
|
|
9
|
-
const chat = new
|
|
10
|
-
modelName: "claude-
|
|
7
|
+
import { ChatAnthropic } from "../chat_models.js";
|
|
8
|
+
test.skip("Test ChatAnthropic", async () => {
|
|
9
|
+
const chat = new ChatAnthropic({
|
|
10
|
+
modelName: "claude-3-sonnet-20240229",
|
|
11
11
|
maxRetries: 0,
|
|
12
12
|
});
|
|
13
13
|
const message = new HumanMessage("Hello!");
|
|
14
|
-
const res = await chat.
|
|
14
|
+
const res = await chat.invoke([message]);
|
|
15
15
|
console.log({ res });
|
|
16
16
|
});
|
|
17
|
-
test("Test
|
|
18
|
-
const chat = new
|
|
19
|
-
modelName: "claude-
|
|
17
|
+
test("Test ChatAnthropic Generate", async () => {
|
|
18
|
+
const chat = new ChatAnthropic({
|
|
19
|
+
modelName: "claude-3-sonnet-20240229",
|
|
20
20
|
maxRetries: 0,
|
|
21
21
|
});
|
|
22
22
|
const message = new HumanMessage("Hello!");
|
|
@@ -30,9 +30,9 @@ test("Test ChatAnthropicMessages Generate", async () => {
|
|
|
30
30
|
}
|
|
31
31
|
console.log({ res });
|
|
32
32
|
});
|
|
33
|
-
test("Test
|
|
34
|
-
const chat = new
|
|
35
|
-
modelName: "claude-
|
|
33
|
+
test("Test ChatAnthropic Generate w/ ClientOptions", async () => {
|
|
34
|
+
const chat = new ChatAnthropic({
|
|
35
|
+
modelName: "claude-3-sonnet-20240229",
|
|
36
36
|
maxRetries: 0,
|
|
37
37
|
clientOptions: {
|
|
38
38
|
defaultHeaders: {
|
|
@@ -51,9 +51,9 @@ test("Test ChatAnthropicMessages Generate w/ ClientOptions", async () => {
|
|
|
51
51
|
}
|
|
52
52
|
console.log({ res });
|
|
53
53
|
});
|
|
54
|
-
test("Test
|
|
55
|
-
const chat = new
|
|
56
|
-
modelName: "claude-
|
|
54
|
+
test("Test ChatAnthropic Generate with a signal in call options", async () => {
|
|
55
|
+
const chat = new ChatAnthropic({
|
|
56
|
+
modelName: "claude-3-sonnet-20240229",
|
|
57
57
|
maxRetries: 0,
|
|
58
58
|
});
|
|
59
59
|
const controller = new AbortController();
|
|
@@ -68,11 +68,11 @@ test("Test ChatAnthropicMessages Generate with a signal in call options", async
|
|
|
68
68
|
return res;
|
|
69
69
|
}).rejects.toThrow();
|
|
70
70
|
}, 10000);
|
|
71
|
-
test("Test
|
|
72
|
-
const model = new
|
|
71
|
+
test("Test ChatAnthropic tokenUsage with a batch", async () => {
|
|
72
|
+
const model = new ChatAnthropic({
|
|
73
73
|
temperature: 0,
|
|
74
74
|
maxRetries: 0,
|
|
75
|
-
modelName: "claude-
|
|
75
|
+
modelName: "claude-3-sonnet-20240229",
|
|
76
76
|
});
|
|
77
77
|
const res = await model.generate([
|
|
78
78
|
[new HumanMessage(`Hello!`)],
|
|
@@ -80,11 +80,11 @@ test("Test ChatAnthropicMessages tokenUsage with a batch", async () => {
|
|
|
80
80
|
]);
|
|
81
81
|
console.log({ res });
|
|
82
82
|
});
|
|
83
|
-
test("Test
|
|
83
|
+
test("Test ChatAnthropic in streaming mode", async () => {
|
|
84
84
|
let nrNewTokens = 0;
|
|
85
85
|
let streamedCompletion = "";
|
|
86
|
-
const model = new
|
|
87
|
-
modelName: "claude-
|
|
86
|
+
const model = new ChatAnthropic({
|
|
87
|
+
modelName: "claude-3-sonnet-20240229",
|
|
88
88
|
maxRetries: 0,
|
|
89
89
|
streaming: true,
|
|
90
90
|
callbacks: CallbackManager.fromHandlers({
|
|
@@ -100,11 +100,11 @@ test("Test ChatAnthropicMessages in streaming mode", async () => {
|
|
|
100
100
|
expect(nrNewTokens > 0).toBe(true);
|
|
101
101
|
expect(res.content).toBe(streamedCompletion);
|
|
102
102
|
});
|
|
103
|
-
test("Test
|
|
103
|
+
test("Test ChatAnthropic in streaming mode with a signal", async () => {
|
|
104
104
|
let nrNewTokens = 0;
|
|
105
105
|
let streamedCompletion = "";
|
|
106
|
-
const model = new
|
|
107
|
-
modelName: "claude-
|
|
106
|
+
const model = new ChatAnthropic({
|
|
107
|
+
modelName: "claude-3-sonnet-20240229",
|
|
108
108
|
maxRetries: 0,
|
|
109
109
|
streaming: true,
|
|
110
110
|
callbacks: CallbackManager.fromHandlers({
|
|
@@ -127,9 +127,9 @@ test("Test ChatAnthropicMessages in streaming mode with a signal", async () => {
|
|
|
127
127
|
}).rejects.toThrow();
|
|
128
128
|
console.log({ nrNewTokens, streamedCompletion });
|
|
129
129
|
}, 5000);
|
|
130
|
-
test("Test
|
|
131
|
-
const chat = new
|
|
132
|
-
modelName: "claude-
|
|
130
|
+
test("Test ChatAnthropic prompt value", async () => {
|
|
131
|
+
const chat = new ChatAnthropic({
|
|
132
|
+
modelName: "claude-3-sonnet-20240229",
|
|
133
133
|
maxRetries: 0,
|
|
134
134
|
});
|
|
135
135
|
const message = new HumanMessage("Hello!");
|
|
@@ -142,9 +142,9 @@ test("Test ChatAnthropicMessages prompt value", async () => {
|
|
|
142
142
|
}
|
|
143
143
|
console.log({ res });
|
|
144
144
|
});
|
|
145
|
-
test("
|
|
146
|
-
const chat = new
|
|
147
|
-
modelName: "claude-
|
|
145
|
+
test("ChatAnthropic, docs, prompt templates", async () => {
|
|
146
|
+
const chat = new ChatAnthropic({
|
|
147
|
+
modelName: "claude-3-sonnet-20240229",
|
|
148
148
|
maxRetries: 0,
|
|
149
149
|
temperature: 0,
|
|
150
150
|
});
|
|
@@ -162,9 +162,9 @@ test("ChatAnthropicMessages, docs, prompt templates", async () => {
|
|
|
162
162
|
]);
|
|
163
163
|
console.log(responseA.generations);
|
|
164
164
|
});
|
|
165
|
-
test("
|
|
166
|
-
const chat = new
|
|
167
|
-
modelName: "claude-
|
|
165
|
+
test("ChatAnthropic, longer chain of messages", async () => {
|
|
166
|
+
const chat = new ChatAnthropic({
|
|
167
|
+
modelName: "claude-3-sonnet-20240229",
|
|
168
168
|
maxRetries: 0,
|
|
169
169
|
temperature: 0,
|
|
170
170
|
});
|
|
@@ -180,11 +180,11 @@ test("ChatAnthropicMessages, longer chain of messages", async () => {
|
|
|
180
180
|
]);
|
|
181
181
|
console.log(responseA.generations);
|
|
182
182
|
});
|
|
183
|
-
test("
|
|
183
|
+
test("ChatAnthropic, Anthropic apiUrl set manually via constructor", async () => {
|
|
184
184
|
// Pass the default URL through (should use this, and work as normal)
|
|
185
185
|
const anthropicApiUrl = "https://api.anthropic.com";
|
|
186
|
-
const chat = new
|
|
187
|
-
modelName: "claude-
|
|
186
|
+
const chat = new ChatAnthropic({
|
|
187
|
+
modelName: "claude-3-sonnet-20240229",
|
|
188
188
|
maxRetries: 0,
|
|
189
189
|
anthropicApiUrl,
|
|
190
190
|
});
|
|
@@ -192,29 +192,11 @@ test("ChatAnthropicMessages, Anthropic apiUrl set manually via constructor", asy
|
|
|
192
192
|
const res = await chat.call([message]);
|
|
193
193
|
console.log({ res });
|
|
194
194
|
});
|
|
195
|
-
test("
|
|
196
|
-
const
|
|
197
|
-
modelName: "claude-2.1",
|
|
198
|
-
maxRetries: 0,
|
|
199
|
-
temperature: 0,
|
|
200
|
-
});
|
|
201
|
-
const chatPrompt = ChatPromptTemplate.fromMessages([
|
|
202
|
-
HumanMessagePromptTemplate.fromTemplate(`Hi, my name is Joe!`),
|
|
203
|
-
AIMessagePromptTemplate.fromTemplate(`Nice to meet you, Joe!`),
|
|
204
|
-
HumanMessagePromptTemplate.fromTemplate("{text}"),
|
|
205
|
-
]);
|
|
206
|
-
const responseA = await chat.generatePrompt([
|
|
207
|
-
await chatPrompt.formatPromptValue({
|
|
208
|
-
text: "What did I just say my name was?",
|
|
209
|
-
}),
|
|
210
|
-
]);
|
|
211
|
-
console.log(responseA.generations);
|
|
212
|
-
});
|
|
213
|
-
test("Test ChatAnthropicMessages stream method", async () => {
|
|
214
|
-
const model = new ChatAnthropicMessages({
|
|
195
|
+
test("Test ChatAnthropic stream method", async () => {
|
|
196
|
+
const model = new ChatAnthropic({
|
|
215
197
|
maxTokens: 50,
|
|
216
198
|
maxRetries: 0,
|
|
217
|
-
modelName: "claude-
|
|
199
|
+
modelName: "claude-3-sonnet-20240229",
|
|
218
200
|
});
|
|
219
201
|
const stream = await model.stream("Print hello world.");
|
|
220
202
|
const chunks = [];
|
|
@@ -224,12 +206,12 @@ test("Test ChatAnthropicMessages stream method", async () => {
|
|
|
224
206
|
}
|
|
225
207
|
expect(chunks.length).toBeGreaterThan(1);
|
|
226
208
|
});
|
|
227
|
-
test("Test
|
|
209
|
+
test("Test ChatAnthropic stream method with abort", async () => {
|
|
228
210
|
await expect(async () => {
|
|
229
|
-
const model = new
|
|
211
|
+
const model = new ChatAnthropic({
|
|
230
212
|
maxTokens: 500,
|
|
231
213
|
maxRetries: 0,
|
|
232
|
-
modelName: "claude-
|
|
214
|
+
modelName: "claude-3-sonnet-20240229",
|
|
233
215
|
});
|
|
234
216
|
const stream = await model.stream("How is your day going? Be extremely verbose.", {
|
|
235
217
|
signal: AbortSignal.timeout(1000),
|
|
@@ -239,11 +221,11 @@ test("Test ChatAnthropicMessages stream method with abort", async () => {
|
|
|
239
221
|
}
|
|
240
222
|
}).rejects.toThrow();
|
|
241
223
|
});
|
|
242
|
-
test("Test
|
|
243
|
-
const model = new
|
|
224
|
+
test("Test ChatAnthropic stream method with early break", async () => {
|
|
225
|
+
const model = new ChatAnthropic({
|
|
244
226
|
maxTokens: 50,
|
|
245
227
|
maxRetries: 0,
|
|
246
|
-
modelName: "claude-
|
|
228
|
+
modelName: "claude-3-sonnet-20240229",
|
|
247
229
|
});
|
|
248
230
|
const stream = await model.stream("How is your day going? Be extremely verbose.");
|
|
249
231
|
let i = 0;
|
|
@@ -255,9 +237,9 @@ test("Test ChatAnthropicMessages stream method with early break", async () => {
|
|
|
255
237
|
}
|
|
256
238
|
}
|
|
257
239
|
});
|
|
258
|
-
test("Test
|
|
259
|
-
const chat = new
|
|
260
|
-
modelName: "claude-
|
|
240
|
+
test("Test ChatAnthropic headers passed through", async () => {
|
|
241
|
+
const chat = new ChatAnthropic({
|
|
242
|
+
modelName: "claude-3-sonnet-20240229",
|
|
261
243
|
maxRetries: 0,
|
|
262
244
|
anthropicApiKey: "NOT_REAL",
|
|
263
245
|
clientOptions: {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/anthropic",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.1.1",
|
|
4
4
|
"description": "Anthropic integrations for LangChain.js",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -24,7 +24,7 @@
|
|
|
24
24
|
"lint:dpdm": "dpdm --exit-code circular:1 --no-warning --no-tree src/*.ts src/**/*.ts",
|
|
25
25
|
"lint": "yarn lint:eslint && yarn lint:dpdm",
|
|
26
26
|
"lint:fix": "yarn lint:eslint --fix && yarn lint:dpdm",
|
|
27
|
-
"clean": "rimraf .turbo/ dist/ && NODE_OPTIONS=--max-old-space-size=4096 yarn create-entrypoints --
|
|
27
|
+
"clean": "rimraf .turbo/ dist/ && NODE_OPTIONS=--max-old-space-size=4096 yarn lc-build --config ./langchain.config.js --create-entrypoints --pre",
|
|
28
28
|
"prepack": "yarn build",
|
|
29
29
|
"test": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%",
|
|
30
30
|
"test:watch": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts",
|
|
@@ -38,8 +38,8 @@
|
|
|
38
38
|
"author": "LangChain",
|
|
39
39
|
"license": "MIT",
|
|
40
40
|
"dependencies": {
|
|
41
|
-
"@anthropic-ai/sdk": "^0.
|
|
42
|
-
"@langchain/core": "~0.1
|
|
41
|
+
"@anthropic-ai/sdk": "^0.15.0",
|
|
42
|
+
"@langchain/core": "~0.1"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@jest/globals": "^29.5.0",
|