@langchain/anthropic 0.3.26 → 1.0.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -8
- package/dist/_virtual/rolldown_runtime.cjs +25 -0
- package/dist/chat_models.cjs +772 -1000
- package/dist/chat_models.cjs.map +1 -0
- package/dist/chat_models.d.cts +615 -0
- package/dist/chat_models.d.cts.map +1 -0
- package/dist/chat_models.d.ts +222 -206
- package/dist/chat_models.d.ts.map +1 -0
- package/dist/chat_models.js +766 -991
- package/dist/chat_models.js.map +1 -0
- package/dist/index.cjs +6 -20
- package/dist/index.d.cts +4 -0
- package/dist/index.d.ts +4 -3
- package/dist/index.js +4 -2
- package/dist/output_parsers.cjs +65 -104
- package/dist/output_parsers.cjs.map +1 -0
- package/dist/output_parsers.js +64 -100
- package/dist/output_parsers.js.map +1 -0
- package/dist/types.d.cts +32 -0
- package/dist/types.d.cts.map +1 -0
- package/dist/types.d.ts +29 -31
- package/dist/types.d.ts.map +1 -0
- package/dist/utils/content.cjs +153 -0
- package/dist/utils/content.cjs.map +1 -0
- package/dist/utils/content.js +148 -0
- package/dist/utils/content.js.map +1 -0
- package/dist/utils/errors.cjs +16 -27
- package/dist/utils/errors.cjs.map +1 -0
- package/dist/utils/errors.js +17 -25
- package/dist/utils/errors.js.map +1 -0
- package/dist/utils/index.cjs +7 -0
- package/dist/utils/index.cjs.map +1 -0
- package/dist/utils/index.js +6 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/message_inputs.cjs +218 -535
- package/dist/utils/message_inputs.cjs.map +1 -0
- package/dist/utils/message_inputs.js +219 -533
- package/dist/utils/message_inputs.js.map +1 -0
- package/dist/utils/message_outputs.cjs +185 -246
- package/dist/utils/message_outputs.cjs.map +1 -0
- package/dist/utils/message_outputs.js +184 -243
- package/dist/utils/message_outputs.js.map +1 -0
- package/dist/utils/prompts.cjs +46 -45
- package/dist/utils/prompts.cjs.map +1 -0
- package/dist/utils/prompts.d.cts +45 -0
- package/dist/utils/prompts.d.cts.map +1 -0
- package/dist/utils/prompts.d.ts +8 -2
- package/dist/utils/prompts.d.ts.map +1 -0
- package/dist/utils/prompts.js +46 -42
- package/dist/utils/prompts.js.map +1 -0
- package/dist/utils/standard.cjs +127 -0
- package/dist/utils/standard.cjs.map +1 -0
- package/dist/utils/standard.js +127 -0
- package/dist/utils/standard.js.map +1 -0
- package/dist/utils/tools.cjs +14 -25
- package/dist/utils/tools.cjs.map +1 -0
- package/dist/utils/tools.js +14 -23
- package/dist/utils/tools.js.map +1 -0
- package/package.json +30 -53
- package/dist/experimental/index.cjs +0 -17
- package/dist/experimental/index.d.ts +0 -1
- package/dist/experimental/index.js +0 -1
- package/dist/experimental/tool_calling.cjs +0 -318
- package/dist/experimental/tool_calling.d.ts +0 -57
- package/dist/experimental/tool_calling.js +0 -314
- package/dist/experimental/utils/tool_calling.cjs +0 -106
- package/dist/experimental/utils/tool_calling.d.ts +0 -10
- package/dist/experimental/utils/tool_calling.js +0 -101
- package/dist/load/import_constants.cjs +0 -5
- package/dist/load/import_constants.d.ts +0 -1
- package/dist/load/import_constants.js +0 -2
- package/dist/load/import_map.cjs +0 -39
- package/dist/load/import_map.d.ts +0 -2
- package/dist/load/import_map.js +0 -3
- package/dist/load/import_type.cjs +0 -3
- package/dist/load/import_type.d.ts +0 -5
- package/dist/load/import_type.js +0 -2
- package/dist/load/index.cjs +0 -63
- package/dist/load/index.d.ts +0 -14
- package/dist/load/index.js +0 -25
- package/dist/load/map_keys.cjs +0 -2
- package/dist/load/map_keys.d.ts +0 -3
- package/dist/load/map_keys.js +0 -1
- package/dist/load/serializable.cjs +0 -17
- package/dist/load/serializable.d.ts +0 -1
- package/dist/load/serializable.js +0 -1
- package/dist/output_parsers.d.ts +0 -22
- package/dist/types.cjs +0 -48
- package/dist/types.js +0 -45
- package/dist/utils/errors.d.ts +0 -3
- package/dist/utils/message_inputs.d.ts +0 -14
- package/dist/utils/message_outputs.d.ts +0 -14
- package/dist/utils/tools.d.ts +0 -3
- package/experimental.cjs +0 -1
- package/experimental.d.cts +0 -1
- package/experimental.d.ts +0 -1
- package/experimental.js +0 -1
- package/index.cjs +0 -1
- package/index.d.cts +0 -1
- package/index.d.ts +0 -1
- package/index.js +0 -1
|
@@ -1,246 +1,187 @@
|
|
|
1
|
-
import { AIMessage, AIMessageChunk, } from "@langchain/core/messages";
|
|
2
1
|
import { extractToolCalls } from "../output_parsers.js";
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
{
|
|
144
|
-
index: data.index,
|
|
145
|
-
input: data.delta.partial_json,
|
|
146
|
-
type: data.delta.type,
|
|
147
|
-
},
|
|
148
|
-
],
|
|
149
|
-
additional_kwargs: {},
|
|
150
|
-
tool_call_chunks: [
|
|
151
|
-
{
|
|
152
|
-
index: data.index,
|
|
153
|
-
args: data.delta.partial_json,
|
|
154
|
-
},
|
|
155
|
-
],
|
|
156
|
-
}),
|
|
157
|
-
};
|
|
158
|
-
}
|
|
159
|
-
else if (data.type === "content_block_start" &&
|
|
160
|
-
data.content_block.type === "text") {
|
|
161
|
-
const content = data.content_block?.text;
|
|
162
|
-
if (content !== undefined) {
|
|
163
|
-
return {
|
|
164
|
-
chunk: new AIMessageChunk({
|
|
165
|
-
content: fields.coerceContentToString
|
|
166
|
-
? content
|
|
167
|
-
: [
|
|
168
|
-
{
|
|
169
|
-
index: data.index,
|
|
170
|
-
...data.content_block,
|
|
171
|
-
},
|
|
172
|
-
],
|
|
173
|
-
additional_kwargs: {},
|
|
174
|
-
}),
|
|
175
|
-
};
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
else if (data.type === "content_block_start" &&
|
|
179
|
-
data.content_block.type === "redacted_thinking") {
|
|
180
|
-
return {
|
|
181
|
-
chunk: new AIMessageChunk({
|
|
182
|
-
content: fields.coerceContentToString
|
|
183
|
-
? ""
|
|
184
|
-
: [{ index: data.index, ...data.content_block }],
|
|
185
|
-
}),
|
|
186
|
-
};
|
|
187
|
-
}
|
|
188
|
-
else if (data.type === "content_block_start" &&
|
|
189
|
-
data.content_block.type === "thinking") {
|
|
190
|
-
const content = data.content_block.thinking;
|
|
191
|
-
return {
|
|
192
|
-
chunk: new AIMessageChunk({
|
|
193
|
-
content: fields.coerceContentToString
|
|
194
|
-
? content
|
|
195
|
-
: [{ index: data.index, ...data.content_block }],
|
|
196
|
-
}),
|
|
197
|
-
};
|
|
198
|
-
}
|
|
199
|
-
return null;
|
|
2
|
+
import { AIMessage, AIMessageChunk } from "@langchain/core/messages";
|
|
3
|
+
|
|
4
|
+
//#region src/utils/message_outputs.ts
|
|
5
|
+
function _makeMessageChunkFromAnthropicEvent(data, fields) {
|
|
6
|
+
const response_metadata = { model_provider: "anthropic" };
|
|
7
|
+
if (data.type === "message_start") {
|
|
8
|
+
const { content, usage,...additionalKwargs } = data.message;
|
|
9
|
+
const filteredAdditionalKwargs = {};
|
|
10
|
+
for (const [key, value] of Object.entries(additionalKwargs)) if (value !== void 0 && value !== null) filteredAdditionalKwargs[key] = value;
|
|
11
|
+
const { input_tokens, output_tokens,...rest } = usage ?? {};
|
|
12
|
+
const usageMetadata = {
|
|
13
|
+
input_tokens,
|
|
14
|
+
output_tokens,
|
|
15
|
+
total_tokens: input_tokens + output_tokens,
|
|
16
|
+
input_token_details: {
|
|
17
|
+
cache_creation: rest.cache_creation_input_tokens,
|
|
18
|
+
cache_read: rest.cache_read_input_tokens
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
return { chunk: new AIMessageChunk({
|
|
22
|
+
content: fields.coerceContentToString ? "" : [],
|
|
23
|
+
additional_kwargs: filteredAdditionalKwargs,
|
|
24
|
+
usage_metadata: fields.streamUsage ? usageMetadata : void 0,
|
|
25
|
+
response_metadata: {
|
|
26
|
+
...response_metadata,
|
|
27
|
+
usage: { ...rest }
|
|
28
|
+
},
|
|
29
|
+
id: data.message.id
|
|
30
|
+
}) };
|
|
31
|
+
} else if (data.type === "message_delta") {
|
|
32
|
+
const usageMetadata = {
|
|
33
|
+
input_tokens: 0,
|
|
34
|
+
output_tokens: data.usage.output_tokens,
|
|
35
|
+
total_tokens: data.usage.output_tokens,
|
|
36
|
+
input_token_details: {
|
|
37
|
+
cache_creation: data.usage.cache_creation_input_tokens,
|
|
38
|
+
cache_read: data.usage.cache_read_input_tokens
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
return { chunk: new AIMessageChunk({
|
|
42
|
+
content: fields.coerceContentToString ? "" : [],
|
|
43
|
+
response_metadata,
|
|
44
|
+
additional_kwargs: { ...data.delta },
|
|
45
|
+
usage_metadata: fields.streamUsage ? usageMetadata : void 0
|
|
46
|
+
}) };
|
|
47
|
+
} else if (data.type === "content_block_start" && [
|
|
48
|
+
"tool_use",
|
|
49
|
+
"document",
|
|
50
|
+
"server_tool_use",
|
|
51
|
+
"web_search_tool_result"
|
|
52
|
+
].includes(data.content_block.type)) {
|
|
53
|
+
const contentBlock = data.content_block;
|
|
54
|
+
let toolCallChunks;
|
|
55
|
+
if (contentBlock.type === "tool_use") toolCallChunks = [{
|
|
56
|
+
id: contentBlock.id,
|
|
57
|
+
index: data.index,
|
|
58
|
+
name: contentBlock.name,
|
|
59
|
+
args: ""
|
|
60
|
+
}];
|
|
61
|
+
else toolCallChunks = [];
|
|
62
|
+
return { chunk: new AIMessageChunk({
|
|
63
|
+
content: fields.coerceContentToString ? "" : [{
|
|
64
|
+
index: data.index,
|
|
65
|
+
...data.content_block,
|
|
66
|
+
input: contentBlock.type === "server_tool_use" || contentBlock.type === "tool_use" ? "" : void 0
|
|
67
|
+
}],
|
|
68
|
+
response_metadata,
|
|
69
|
+
additional_kwargs: {},
|
|
70
|
+
tool_call_chunks: toolCallChunks
|
|
71
|
+
}) };
|
|
72
|
+
} else if (data.type === "content_block_delta" && [
|
|
73
|
+
"text_delta",
|
|
74
|
+
"citations_delta",
|
|
75
|
+
"thinking_delta",
|
|
76
|
+
"signature_delta"
|
|
77
|
+
].includes(data.delta.type)) if (fields.coerceContentToString && "text" in data.delta) return { chunk: new AIMessageChunk({ content: data.delta.text }) };
|
|
78
|
+
else {
|
|
79
|
+
const contentBlock = data.delta;
|
|
80
|
+
if ("citation" in contentBlock) {
|
|
81
|
+
contentBlock.citations = [contentBlock.citation];
|
|
82
|
+
delete contentBlock.citation;
|
|
83
|
+
}
|
|
84
|
+
if (contentBlock.type === "thinking_delta" || contentBlock.type === "signature_delta") return { chunk: new AIMessageChunk({
|
|
85
|
+
content: [{
|
|
86
|
+
index: data.index,
|
|
87
|
+
...contentBlock,
|
|
88
|
+
type: "thinking"
|
|
89
|
+
}],
|
|
90
|
+
response_metadata
|
|
91
|
+
}) };
|
|
92
|
+
return { chunk: new AIMessageChunk({
|
|
93
|
+
content: [{
|
|
94
|
+
index: data.index,
|
|
95
|
+
...contentBlock,
|
|
96
|
+
type: "text"
|
|
97
|
+
}],
|
|
98
|
+
response_metadata
|
|
99
|
+
}) };
|
|
100
|
+
}
|
|
101
|
+
else if (data.type === "content_block_delta" && data.delta.type === "input_json_delta") return { chunk: new AIMessageChunk({
|
|
102
|
+
content: fields.coerceContentToString ? "" : [{
|
|
103
|
+
index: data.index,
|
|
104
|
+
input: data.delta.partial_json,
|
|
105
|
+
type: data.delta.type
|
|
106
|
+
}],
|
|
107
|
+
response_metadata,
|
|
108
|
+
additional_kwargs: {},
|
|
109
|
+
tool_call_chunks: [{
|
|
110
|
+
index: data.index,
|
|
111
|
+
args: data.delta.partial_json
|
|
112
|
+
}]
|
|
113
|
+
}) };
|
|
114
|
+
else if (data.type === "content_block_start" && data.content_block.type === "text") {
|
|
115
|
+
const content = data.content_block?.text;
|
|
116
|
+
if (content !== void 0) return { chunk: new AIMessageChunk({
|
|
117
|
+
content: fields.coerceContentToString ? content : [{
|
|
118
|
+
index: data.index,
|
|
119
|
+
...data.content_block
|
|
120
|
+
}],
|
|
121
|
+
response_metadata,
|
|
122
|
+
additional_kwargs: {}
|
|
123
|
+
}) };
|
|
124
|
+
} else if (data.type === "content_block_start" && data.content_block.type === "redacted_thinking") return { chunk: new AIMessageChunk({
|
|
125
|
+
content: fields.coerceContentToString ? "" : [{
|
|
126
|
+
index: data.index,
|
|
127
|
+
...data.content_block
|
|
128
|
+
}],
|
|
129
|
+
response_metadata
|
|
130
|
+
}) };
|
|
131
|
+
else if (data.type === "content_block_start" && data.content_block.type === "thinking") {
|
|
132
|
+
const content = data.content_block.thinking;
|
|
133
|
+
return { chunk: new AIMessageChunk({
|
|
134
|
+
content: fields.coerceContentToString ? content : [{
|
|
135
|
+
index: data.index,
|
|
136
|
+
...data.content_block
|
|
137
|
+
}],
|
|
138
|
+
response_metadata
|
|
139
|
+
}) };
|
|
140
|
+
}
|
|
141
|
+
return null;
|
|
200
142
|
}
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
}),
|
|
242
|
-
},
|
|
243
|
-
];
|
|
244
|
-
return generations;
|
|
245
|
-
}
|
|
143
|
+
function anthropicResponseToChatMessages(messages, additionalKwargs) {
|
|
144
|
+
const response_metadata = {
|
|
145
|
+
...additionalKwargs,
|
|
146
|
+
model_provider: "anthropic"
|
|
147
|
+
};
|
|
148
|
+
const usage = additionalKwargs.usage;
|
|
149
|
+
const usageMetadata = usage != null ? {
|
|
150
|
+
input_tokens: usage.input_tokens ?? 0,
|
|
151
|
+
output_tokens: usage.output_tokens ?? 0,
|
|
152
|
+
total_tokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0),
|
|
153
|
+
input_token_details: {
|
|
154
|
+
cache_creation: usage.cache_creation_input_tokens,
|
|
155
|
+
cache_read: usage.cache_read_input_tokens
|
|
156
|
+
}
|
|
157
|
+
} : void 0;
|
|
158
|
+
if (messages.length === 1 && messages[0].type === "text") return [{
|
|
159
|
+
text: messages[0].text,
|
|
160
|
+
message: new AIMessage({
|
|
161
|
+
content: messages[0].text,
|
|
162
|
+
additional_kwargs: additionalKwargs,
|
|
163
|
+
usage_metadata: usageMetadata,
|
|
164
|
+
response_metadata,
|
|
165
|
+
id: additionalKwargs.id
|
|
166
|
+
})
|
|
167
|
+
}];
|
|
168
|
+
else {
|
|
169
|
+
const toolCalls = extractToolCalls(messages);
|
|
170
|
+
const generations = [{
|
|
171
|
+
text: "",
|
|
172
|
+
message: new AIMessage({
|
|
173
|
+
content: messages,
|
|
174
|
+
additional_kwargs: additionalKwargs,
|
|
175
|
+
tool_calls: toolCalls,
|
|
176
|
+
usage_metadata: usageMetadata,
|
|
177
|
+
response_metadata,
|
|
178
|
+
id: additionalKwargs.id
|
|
179
|
+
})
|
|
180
|
+
}];
|
|
181
|
+
return generations;
|
|
182
|
+
}
|
|
246
183
|
}
|
|
184
|
+
|
|
185
|
+
//#endregion
|
|
186
|
+
export { _makeMessageChunkFromAnthropicEvent, anthropicResponseToChatMessages };
|
|
187
|
+
//# sourceMappingURL=message_outputs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"message_outputs.js","names":["data: Anthropic.Messages.RawMessageStreamEvent","fields: {\n streamUsage: boolean;\n coerceContentToString: boolean;\n }","filteredAdditionalKwargs: Record<string, any>","usageMetadata: UsageMetadata","toolCallChunks: ToolCallChunk[]","contentBlock: Record<string, any>","messages: AnthropicMessageResponse[]","additionalKwargs: Record<string, unknown>","usage: Record<string, number> | null | undefined","generations: ChatGeneration[]"],"sources":["../../src/utils/message_outputs.ts"],"sourcesContent":["/**\n * This util file contains functions for converting Anthropic messages to LangChain messages.\n */\nimport Anthropic from \"@anthropic-ai/sdk\";\nimport {\n AIMessage,\n AIMessageChunk,\n UsageMetadata,\n} from \"@langchain/core/messages\";\nimport type { ToolCallChunk } from \"@langchain/core/messages/tool\";\nimport { ChatGeneration } from \"@langchain/core/outputs\";\nimport { AnthropicMessageResponse } from \"../types.js\";\nimport { extractToolCalls } from \"../output_parsers.js\";\n\nexport function _makeMessageChunkFromAnthropicEvent(\n data: Anthropic.Messages.RawMessageStreamEvent,\n fields: {\n streamUsage: boolean;\n coerceContentToString: boolean;\n }\n): {\n chunk: AIMessageChunk;\n} | null {\n const response_metadata = { model_provider: \"anthropic\" };\n if (data.type === \"message_start\") {\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const { content, usage, ...additionalKwargs } = data.message;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const filteredAdditionalKwargs: Record<string, any> = {};\n for (const [key, value] of Object.entries(additionalKwargs)) {\n if (value !== undefined && value !== null) {\n filteredAdditionalKwargs[key] = value;\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const { input_tokens, output_tokens, ...rest }: Record<string, any> =\n usage ?? {};\n const usageMetadata: UsageMetadata = {\n input_tokens,\n output_tokens,\n total_tokens: input_tokens + output_tokens,\n input_token_details: {\n cache_creation: rest.cache_creation_input_tokens,\n cache_read: rest.cache_read_input_tokens,\n },\n };\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString ? \"\" : [],\n additional_kwargs: filteredAdditionalKwargs,\n usage_metadata: fields.streamUsage ? usageMetadata : undefined,\n response_metadata: {\n ...response_metadata,\n usage: {\n ...rest,\n },\n },\n id: data.message.id,\n }),\n };\n } else if (data.type === \"message_delta\") {\n const usageMetadata: UsageMetadata = {\n input_tokens: 0,\n output_tokens: data.usage.output_tokens,\n total_tokens: data.usage.output_tokens,\n input_token_details: {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n cache_creation: (data.usage as any).cache_creation_input_tokens,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n cache_read: (data.usage as any).cache_read_input_tokens,\n },\n };\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString ? \"\" : [],\n response_metadata,\n additional_kwargs: { ...data.delta },\n usage_metadata: fields.streamUsage ? usageMetadata : undefined,\n }),\n };\n } else if (\n data.type === \"content_block_start\" &&\n [\n \"tool_use\",\n \"document\",\n \"server_tool_use\",\n \"web_search_tool_result\",\n ].includes(data.content_block.type)\n ) {\n const contentBlock = data.content_block;\n let toolCallChunks: ToolCallChunk[];\n if (contentBlock.type === \"tool_use\") {\n toolCallChunks = [\n {\n id: contentBlock.id,\n index: data.index,\n name: contentBlock.name,\n args: \"\",\n },\n ];\n } else {\n toolCallChunks = [];\n }\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString\n ? \"\"\n : [\n {\n index: data.index,\n ...data.content_block,\n input:\n contentBlock.type === \"server_tool_use\" ||\n contentBlock.type === \"tool_use\"\n ? \"\"\n : undefined,\n },\n ],\n response_metadata,\n additional_kwargs: {},\n tool_call_chunks: toolCallChunks,\n }),\n };\n } else if (\n data.type === \"content_block_delta\" &&\n [\n \"text_delta\",\n \"citations_delta\",\n \"thinking_delta\",\n \"signature_delta\",\n ].includes(data.delta.type)\n ) {\n if (fields.coerceContentToString && \"text\" in data.delta) {\n return {\n chunk: new AIMessageChunk({\n content: data.delta.text,\n }),\n };\n } else {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const contentBlock: Record<string, any> = data.delta;\n if (\"citation\" in contentBlock) {\n contentBlock.citations = [contentBlock.citation];\n delete contentBlock.citation;\n }\n if (\n contentBlock.type === \"thinking_delta\" ||\n contentBlock.type === \"signature_delta\"\n ) {\n return {\n chunk: new AIMessageChunk({\n content: [{ index: data.index, ...contentBlock, type: \"thinking\" }],\n response_metadata,\n }),\n };\n }\n\n return {\n chunk: new AIMessageChunk({\n content: [{ index: data.index, ...contentBlock, type: \"text\" }],\n response_metadata,\n }),\n };\n }\n } else if (\n data.type === \"content_block_delta\" &&\n data.delta.type === \"input_json_delta\"\n ) {\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString\n ? \"\"\n : [\n {\n index: data.index,\n input: data.delta.partial_json,\n type: data.delta.type,\n },\n ],\n response_metadata,\n additional_kwargs: {},\n tool_call_chunks: [\n {\n index: data.index,\n args: data.delta.partial_json,\n },\n ],\n }),\n };\n } else if (\n data.type === \"content_block_start\" &&\n data.content_block.type === \"text\"\n ) {\n const content = data.content_block?.text;\n if (content !== undefined) {\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString\n ? content\n : [\n {\n index: data.index,\n ...data.content_block,\n },\n ],\n response_metadata,\n additional_kwargs: {},\n }),\n };\n }\n } else if (\n data.type === \"content_block_start\" &&\n data.content_block.type === \"redacted_thinking\"\n ) {\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString\n ? \"\"\n : [{ index: data.index, ...data.content_block }],\n response_metadata,\n }),\n };\n } else if (\n data.type === \"content_block_start\" &&\n data.content_block.type === \"thinking\"\n ) {\n const content = data.content_block.thinking;\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString\n ? content\n : [{ index: data.index, ...data.content_block }],\n response_metadata,\n }),\n };\n }\n return null;\n}\n\nexport function anthropicResponseToChatMessages(\n messages: AnthropicMessageResponse[],\n additionalKwargs: Record<string, unknown>\n): ChatGeneration[] {\n const response_metadata = {\n ...additionalKwargs,\n model_provider: \"anthropic\",\n };\n const usage: Record<string, number> | null | undefined =\n additionalKwargs.usage as Record<string, number> | null | undefined;\n const usageMetadata =\n usage != null\n ? {\n input_tokens: usage.input_tokens ?? 0,\n output_tokens: usage.output_tokens ?? 0,\n total_tokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0),\n input_token_details: {\n cache_creation: usage.cache_creation_input_tokens,\n cache_read: usage.cache_read_input_tokens,\n },\n }\n : undefined;\n if (messages.length === 1 && messages[0].type === \"text\") {\n return [\n {\n text: messages[0].text,\n message: new AIMessage({\n content: messages[0].text,\n additional_kwargs: additionalKwargs,\n usage_metadata: usageMetadata,\n response_metadata,\n id: additionalKwargs.id as string,\n }),\n },\n ];\n } else {\n const toolCalls = extractToolCalls(messages);\n const generations: ChatGeneration[] = [\n {\n text: \"\",\n message: new AIMessage({\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n content: messages as any,\n additional_kwargs: additionalKwargs,\n tool_calls: toolCalls,\n usage_metadata: usageMetadata,\n response_metadata,\n id: additionalKwargs.id as string,\n }),\n },\n ];\n return generations;\n }\n}\n"],"mappings":";;;;AAcA,SAAgB,oCACdA,MACAC,QAMO;CACP,MAAM,oBAAoB,EAAE,gBAAgB,YAAa;AACzD,KAAI,KAAK,SAAS,iBAAiB;EAEjC,MAAM,EAAE,SAAS,MAAO,GAAG,kBAAkB,GAAG,KAAK;EAErD,MAAMC,2BAAgD,CAAE;AACxD,OAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,iBAAiB,CACzD,KAAI,UAAU,UAAa,UAAU,MACnC,yBAAyB,OAAO;EAIpC,MAAM,EAAE,cAAc,cAAe,GAAG,MAA2B,GACjE,SAAS,CAAE;EACb,MAAMC,gBAA+B;GACnC;GACA;GACA,cAAc,eAAe;GAC7B,qBAAqB;IACnB,gBAAgB,KAAK;IACrB,YAAY,KAAK;GAClB;EACF;AACD,SAAO,EACL,OAAO,IAAI,eAAe;GACxB,SAAS,OAAO,wBAAwB,KAAK,CAAE;GAC/C,mBAAmB;GACnB,gBAAgB,OAAO,cAAc,gBAAgB;GACrD,mBAAmB;IACjB,GAAG;IACH,OAAO,EACL,GAAG,KACJ;GACF;GACD,IAAI,KAAK,QAAQ;EAClB,GACF;CACF,WAAU,KAAK,SAAS,iBAAiB;EACxC,MAAMA,gBAA+B;GACnC,cAAc;GACd,eAAe,KAAK,MAAM;GAC1B,cAAc,KAAK,MAAM;GACzB,qBAAqB;IAEnB,gBAAiB,KAAK,MAAc;IAEpC,YAAa,KAAK,MAAc;GACjC;EACF;AACD,SAAO,EACL,OAAO,IAAI,eAAe;GACxB,SAAS,OAAO,wBAAwB,KAAK,CAAE;GAC/C;GACA,mBAAmB,EAAE,GAAG,KAAK,MAAO;GACpC,gBAAgB,OAAO,cAAc,gBAAgB;EACtD,GACF;CACF,WACC,KAAK,SAAS,yBACd;EACE;EACA;EACA;EACA;CACD,EAAC,SAAS,KAAK,cAAc,KAAK,EACnC;EACA,MAAM,eAAe,KAAK;EAC1B,IAAIC;AACJ,MAAI,aAAa,SAAS,YACxB,iBAAiB,CACf;GACE,IAAI,aAAa;GACjB,OAAO,KAAK;GACZ,MAAM,aAAa;GACnB,MAAM;EACP,CACF;OAED,iBAAiB,CAAE;AAErB,SAAO,EACL,OAAO,IAAI,eAAe;GACxB,SAAS,OAAO,wBACZ,KACA,CACE;IACE,OAAO,KAAK;IACZ,GAAG,KAAK;IACR,OACE,aAAa,SAAS,qBACtB,aAAa,SAAS,aAClB,KACA;GACP,CACF;GACL;GACA,mBAAmB,CAAE;GACrB,kBAAkB;EACnB,GACF;CACF,WACC,KAAK,SAAS,yBACd;EACE;EACA;EACA;EACA;CACD,EAAC,SAAS,KAAK,MAAM,KAAK,CAE3B,KAAI,OAAO,yBAAyB,UAAU,KAAK,MACjD,QAAO,EACL,OAAO,IAAI,eAAe,EACxB,SAAS,KAAK,MAAM,KACrB,GACF;MACI;EAEL,MAAMC,eAAoC,KAAK;AAC/C,MAAI,cAAc,cAAc;GAC9B,aAAa,YAAY,CAAC,aAAa,QAAS;GAChD,OAAO,aAAa;EACrB;AACD,MACE,aAAa,SAAS,oBACtB,aAAa,SAAS,kBAEtB,QAAO,EACL,OAAO,IAAI,eAAe;GACxB,SAAS,CAAC;IAAE,OAAO,KAAK;IAAO,GAAG;IAAc,MAAM;GAAY,CAAC;GACnE;EACD,GACF;AAGH,SAAO,EACL,OAAO,IAAI,eAAe;GACxB,SAAS,CAAC;IAAE,OAAO,KAAK;IAAO,GAAG;IAAc,MAAM;GAAQ,CAAC;GAC/D;EACD,GACF;CACF;UAED,KAAK,SAAS,yBACd,KAAK,MAAM,SAAS,mBAEpB,QAAO,EACL,OAAO,IAAI,eAAe;EACxB,SAAS,OAAO,wBACZ,KACA,CACE;GACE,OAAO,KAAK;GACZ,OAAO,KAAK,MAAM;GAClB,MAAM,KAAK,MAAM;EAClB,CACF;EACL;EACA,mBAAmB,CAAE;EACrB,kBAAkB,CAChB;GACE,OAAO,KAAK;GACZ,MAAM,KAAK,MAAM;EAClB,CACF;CACF,GACF;UAED,KAAK,SAAS,yBACd,KAAK,cAAc,SAAS,QAC5B;EACA,MAAM,UAAU,KAAK,eAAe;AACpC,MAAI,YAAY,OACd,QAAO,EACL,OAAO,IAAI,eAAe;GACxB,SAAS,OAAO,wBACZ,UACA,CACE;IACE,OAAO,KAAK;IACZ,GAAG,KAAK;GACT,CACF;GACL;GACA,mBAAmB,CAAE;EACtB,GACF;CAEJ,WACC,KAAK,SAAS,yBACd,KAAK,cAAc,SAAS,oBAE5B,QAAO,EACL,OAAO,IAAI,eAAe;EACxB,SAAS,OAAO,wBACZ,KACA,CAAC;GAAE,OAAO,KAAK;GAAO,GAAG,KAAK;EAAe,CAAC;EAClD;CACD,GACF;UAED,KAAK,SAAS,yBACd,KAAK,cAAc,SAAS,YAC5B;EACA,MAAM,UAAU,KAAK,cAAc;AACnC,SAAO,EACL,OAAO,IAAI,eAAe;GACxB,SAAS,OAAO,wBACZ,UACA,CAAC;IAAE,OAAO,KAAK;IAAO,GAAG,KAAK;GAAe,CAAC;GAClD;EACD,GACF;CACF;AACD,QAAO;AACR;AAED,SAAgB,gCACdC,UACAC,kBACkB;CAClB,MAAM,oBAAoB;EACxB,GAAG;EACH,gBAAgB;CACjB;CACD,MAAMC,QACJ,iBAAiB;CACnB,MAAM,gBACJ,SAAS,OACL;EACE,cAAc,MAAM,gBAAgB;EACpC,eAAe,MAAM,iBAAiB;EACtC,eAAe,MAAM,gBAAgB,MAAM,MAAM,iBAAiB;EAClE,qBAAqB;GACnB,gBAAgB,MAAM;GACtB,YAAY,MAAM;EACnB;CACF,IACD;AACN,KAAI,SAAS,WAAW,KAAK,SAAS,GAAG,SAAS,OAChD,QAAO,CACL;EACE,MAAM,SAAS,GAAG;EAClB,SAAS,IAAI,UAAU;GACrB,SAAS,SAAS,GAAG;GACrB,mBAAmB;GACnB,gBAAgB;GAChB;GACA,IAAI,iBAAiB;EACtB;CACF,CACF;MACI;EACL,MAAM,YAAY,iBAAiB,SAAS;EAC5C,MAAMC,cAAgC,CACpC;GACE,MAAM;GACN,SAAS,IAAI,UAAU;IAErB,SAAS;IACT,mBAAmB;IACnB,YAAY;IACZ,gBAAgB;IAChB;IACA,IAAI,iBAAiB;GACtB;EACF,CACF;AACD,SAAO;CACR;AACF"}
|
package/dist/utils/prompts.cjs
CHANGED
|
@@ -1,48 +1,49 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
const message_inputs_js_1 = require("./message_inputs.cjs");
|
|
1
|
+
const require_message_inputs = require('./message_inputs.cjs');
|
|
2
|
+
|
|
3
|
+
//#region src/utils/prompts.ts
|
|
5
4
|
/**
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
5
|
+
* Convert a formatted LangChain prompt (e.g. pulled from the hub) into
|
|
6
|
+
* a format expected by Anthropic's JS SDK.
|
|
7
|
+
*
|
|
8
|
+
* Requires the "@langchain/anthropic" package to be installed in addition
|
|
9
|
+
* to the Anthropic SDK.
|
|
10
|
+
*
|
|
11
|
+
* @example
|
|
12
|
+
* ```ts
|
|
13
|
+
* import { convertPromptToAnthropic } from "langsmith/utils/hub/anthropic";
|
|
14
|
+
* import { pull } from "langchain/hub";
|
|
15
|
+
*
|
|
16
|
+
* import Anthropic from '@anthropic-ai/sdk';
|
|
17
|
+
*
|
|
18
|
+
* const prompt = await pull("jacob/joke-generator");
|
|
19
|
+
* const formattedPrompt = await prompt.invoke({
|
|
20
|
+
* topic: "cats",
|
|
21
|
+
* });
|
|
22
|
+
*
|
|
23
|
+
* const { system, messages } = convertPromptToAnthropic(formattedPrompt);
|
|
24
|
+
*
|
|
25
|
+
* const anthropicClient = new Anthropic({
|
|
26
|
+
* apiKey: 'your_api_key',
|
|
27
|
+
* });
|
|
28
|
+
*
|
|
29
|
+
* const anthropicResponse = await anthropicClient.messages.create({
|
|
30
|
+
* model: "claude-3-5-sonnet-20240620",
|
|
31
|
+
* max_tokens: 1024,
|
|
32
|
+
* stream: false,
|
|
33
|
+
* system,
|
|
34
|
+
* messages,
|
|
35
|
+
* });
|
|
36
|
+
* ```
|
|
37
|
+
* @param formattedPrompt
|
|
38
|
+
* @returns A partial Anthropic payload.
|
|
39
|
+
*/
|
|
41
40
|
function convertPromptToAnthropic(formattedPrompt) {
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
}
|
|
47
|
-
return anthropicBody;
|
|
41
|
+
const messages = formattedPrompt.toChatMessages();
|
|
42
|
+
const anthropicBody = require_message_inputs._convertMessagesToAnthropicPayload(messages);
|
|
43
|
+
if (anthropicBody.messages === void 0) anthropicBody.messages = [];
|
|
44
|
+
return anthropicBody;
|
|
48
45
|
}
|
|
46
|
+
|
|
47
|
+
//#endregion
|
|
48
|
+
exports.convertPromptToAnthropic = convertPromptToAnthropic;
|
|
49
|
+
//# sourceMappingURL=prompts.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"prompts.cjs","names":["formattedPrompt: BasePromptValue","_convertMessagesToAnthropicPayload"],"sources":["../../src/utils/prompts.ts"],"sourcesContent":["import type { BasePromptValue } from \"@langchain/core/prompt_values\";\nimport Anthropic from \"@anthropic-ai/sdk\";\n\nimport { _convertMessagesToAnthropicPayload } from \"./message_inputs.js\";\n\n/**\n * Convert a formatted LangChain prompt (e.g. pulled from the hub) into\n * a format expected by Anthropic's JS SDK.\n *\n * Requires the \"@langchain/anthropic\" package to be installed in addition\n * to the Anthropic SDK.\n *\n * @example\n * ```ts\n * import { convertPromptToAnthropic } from \"langsmith/utils/hub/anthropic\";\n * import { pull } from \"langchain/hub\";\n *\n * import Anthropic from '@anthropic-ai/sdk';\n *\n * const prompt = await pull(\"jacob/joke-generator\");\n * const formattedPrompt = await prompt.invoke({\n * topic: \"cats\",\n * });\n *\n * const { system, messages } = convertPromptToAnthropic(formattedPrompt);\n *\n * const anthropicClient = new Anthropic({\n * apiKey: 'your_api_key',\n * });\n *\n * const anthropicResponse = await anthropicClient.messages.create({\n * model: \"claude-3-5-sonnet-20240620\",\n * max_tokens: 1024,\n * stream: false,\n * system,\n * messages,\n * });\n * ```\n * @param formattedPrompt\n * @returns A partial Anthropic payload.\n */\nexport function convertPromptToAnthropic(\n formattedPrompt: BasePromptValue\n): Anthropic.Messages.MessageCreateParams {\n const messages = formattedPrompt.toChatMessages();\n const anthropicBody = _convertMessagesToAnthropicPayload(messages);\n if (anthropicBody.messages === undefined) {\n anthropicBody.messages = [];\n }\n return anthropicBody;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyCA,SAAgB,yBACdA,iBACwC;CACxC,MAAM,WAAW,gBAAgB,gBAAgB;CACjD,MAAM,gBAAgBC,0DAAmC,SAAS;AAClE,KAAI,cAAc,aAAa,QAC7B,cAAc,WAAW,CAAE;AAE7B,QAAO;AACR"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
2
|
+
import { BasePromptValue } from "@langchain/core/prompt_values";
|
|
3
|
+
|
|
4
|
+
//#region src/utils/prompts.d.ts
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Convert a formatted LangChain prompt (e.g. pulled from the hub) into
|
|
8
|
+
* a format expected by Anthropic's JS SDK.
|
|
9
|
+
*
|
|
10
|
+
* Requires the "@langchain/anthropic" package to be installed in addition
|
|
11
|
+
* to the Anthropic SDK.
|
|
12
|
+
*
|
|
13
|
+
* @example
|
|
14
|
+
* ```ts
|
|
15
|
+
* import { convertPromptToAnthropic } from "langsmith/utils/hub/anthropic";
|
|
16
|
+
* import { pull } from "langchain/hub";
|
|
17
|
+
*
|
|
18
|
+
* import Anthropic from '@anthropic-ai/sdk';
|
|
19
|
+
*
|
|
20
|
+
* const prompt = await pull("jacob/joke-generator");
|
|
21
|
+
* const formattedPrompt = await prompt.invoke({
|
|
22
|
+
* topic: "cats",
|
|
23
|
+
* });
|
|
24
|
+
*
|
|
25
|
+
* const { system, messages } = convertPromptToAnthropic(formattedPrompt);
|
|
26
|
+
*
|
|
27
|
+
* const anthropicClient = new Anthropic({
|
|
28
|
+
* apiKey: 'your_api_key',
|
|
29
|
+
* });
|
|
30
|
+
*
|
|
31
|
+
* const anthropicResponse = await anthropicClient.messages.create({
|
|
32
|
+
* model: "claude-3-5-sonnet-20240620",
|
|
33
|
+
* max_tokens: 1024,
|
|
34
|
+
* stream: false,
|
|
35
|
+
* system,
|
|
36
|
+
* messages,
|
|
37
|
+
* });
|
|
38
|
+
* ```
|
|
39
|
+
* @param formattedPrompt
|
|
40
|
+
* @returns A partial Anthropic payload.
|
|
41
|
+
*/
|
|
42
|
+
declare function convertPromptToAnthropic(formattedPrompt: BasePromptValue): Anthropic.Messages.MessageCreateParams;
|
|
43
|
+
//#endregion
|
|
44
|
+
export { convertPromptToAnthropic };
|
|
45
|
+
//# sourceMappingURL=prompts.d.cts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"prompts.d.cts","names":["BasePromptValue","Anthropic","convertPromptToAnthropic","Messages","MessageCreateParams"],"sources":["../../src/utils/prompts.d.ts"],"sourcesContent":["import type { BasePromptValue } from \"@langchain/core/prompt_values\";\nimport Anthropic from \"@anthropic-ai/sdk\";\n/**\n * Convert a formatted LangChain prompt (e.g. pulled from the hub) into\n * a format expected by Anthropic's JS SDK.\n *\n * Requires the \"@langchain/anthropic\" package to be installed in addition\n * to the Anthropic SDK.\n *\n * @example\n * ```ts\n * import { convertPromptToAnthropic } from \"langsmith/utils/hub/anthropic\";\n * import { pull } from \"langchain/hub\";\n *\n * import Anthropic from '@anthropic-ai/sdk';\n *\n * const prompt = await pull(\"jacob/joke-generator\");\n * const formattedPrompt = await prompt.invoke({\n * topic: \"cats\",\n * });\n *\n * const { system, messages } = convertPromptToAnthropic(formattedPrompt);\n *\n * const anthropicClient = new Anthropic({\n * apiKey: 'your_api_key',\n * });\n *\n * const anthropicResponse = await anthropicClient.messages.create({\n * model: \"claude-3-5-sonnet-20240620\",\n * max_tokens: 1024,\n * stream: false,\n * system,\n * messages,\n * });\n * ```\n * @param formattedPrompt\n * @returns A partial Anthropic payload.\n */\nexport declare function convertPromptToAnthropic(formattedPrompt: BasePromptValue): Anthropic.Messages.MessageCreateParams;\n"],"mappings":";;;;;;;AAsCA;;;;AAA0H;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBAAlGE,wBAAAA,kBAA0CF,kBAAkBC,SAAAA,CAAUE,QAAAA,CAASC"}
|
package/dist/utils/prompts.d.ts
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
|
-
import type { BasePromptValue } from "@langchain/core/prompt_values";
|
|
2
1
|
import Anthropic from "@anthropic-ai/sdk";
|
|
2
|
+
import { BasePromptValue } from "@langchain/core/prompt_values";
|
|
3
|
+
|
|
4
|
+
//#region src/utils/prompts.d.ts
|
|
5
|
+
|
|
3
6
|
/**
|
|
4
7
|
* Convert a formatted LangChain prompt (e.g. pulled from the hub) into
|
|
5
8
|
* a format expected by Anthropic's JS SDK.
|
|
@@ -36,4 +39,7 @@ import Anthropic from "@anthropic-ai/sdk";
|
|
|
36
39
|
* @param formattedPrompt
|
|
37
40
|
* @returns A partial Anthropic payload.
|
|
38
41
|
*/
|
|
39
|
-
|
|
42
|
+
declare function convertPromptToAnthropic(formattedPrompt: BasePromptValue): Anthropic.Messages.MessageCreateParams;
|
|
43
|
+
//#endregion
|
|
44
|
+
export { convertPromptToAnthropic };
|
|
45
|
+
//# sourceMappingURL=prompts.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"prompts.d.ts","names":["BasePromptValue","Anthropic","convertPromptToAnthropic","Messages","MessageCreateParams"],"sources":["../../src/utils/prompts.d.ts"],"sourcesContent":["import type { BasePromptValue } from \"@langchain/core/prompt_values\";\nimport Anthropic from \"@anthropic-ai/sdk\";\n/**\n * Convert a formatted LangChain prompt (e.g. pulled from the hub) into\n * a format expected by Anthropic's JS SDK.\n *\n * Requires the \"@langchain/anthropic\" package to be installed in addition\n * to the Anthropic SDK.\n *\n * @example\n * ```ts\n * import { convertPromptToAnthropic } from \"langsmith/utils/hub/anthropic\";\n * import { pull } from \"langchain/hub\";\n *\n * import Anthropic from '@anthropic-ai/sdk';\n *\n * const prompt = await pull(\"jacob/joke-generator\");\n * const formattedPrompt = await prompt.invoke({\n * topic: \"cats\",\n * });\n *\n * const { system, messages } = convertPromptToAnthropic(formattedPrompt);\n *\n * const anthropicClient = new Anthropic({\n * apiKey: 'your_api_key',\n * });\n *\n * const anthropicResponse = await anthropicClient.messages.create({\n * model: \"claude-3-5-sonnet-20240620\",\n * max_tokens: 1024,\n * stream: false,\n * system,\n * messages,\n * });\n * ```\n * @param formattedPrompt\n * @returns A partial Anthropic payload.\n */\nexport declare function convertPromptToAnthropic(formattedPrompt: BasePromptValue): Anthropic.Messages.MessageCreateParams;\n"],"mappings":";;;;;;;AAsCA;;;;AAA0H;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBAAlGE,wBAAAA,kBAA0CF,kBAAkBC,SAAAA,CAAUE,QAAAA,CAASC"}
|