@langchain/anthropic 0.3.26 → 1.0.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/README.md +8 -8
  2. package/dist/_virtual/rolldown_runtime.cjs +25 -0
  3. package/dist/chat_models.cjs +772 -1000
  4. package/dist/chat_models.cjs.map +1 -0
  5. package/dist/chat_models.d.cts +615 -0
  6. package/dist/chat_models.d.cts.map +1 -0
  7. package/dist/chat_models.d.ts +222 -206
  8. package/dist/chat_models.d.ts.map +1 -0
  9. package/dist/chat_models.js +766 -991
  10. package/dist/chat_models.js.map +1 -0
  11. package/dist/index.cjs +6 -20
  12. package/dist/index.d.cts +4 -0
  13. package/dist/index.d.ts +4 -3
  14. package/dist/index.js +4 -2
  15. package/dist/output_parsers.cjs +65 -104
  16. package/dist/output_parsers.cjs.map +1 -0
  17. package/dist/output_parsers.js +64 -100
  18. package/dist/output_parsers.js.map +1 -0
  19. package/dist/types.d.cts +32 -0
  20. package/dist/types.d.cts.map +1 -0
  21. package/dist/types.d.ts +29 -31
  22. package/dist/types.d.ts.map +1 -0
  23. package/dist/utils/content.cjs +153 -0
  24. package/dist/utils/content.cjs.map +1 -0
  25. package/dist/utils/content.js +148 -0
  26. package/dist/utils/content.js.map +1 -0
  27. package/dist/utils/errors.cjs +16 -27
  28. package/dist/utils/errors.cjs.map +1 -0
  29. package/dist/utils/errors.js +17 -25
  30. package/dist/utils/errors.js.map +1 -0
  31. package/dist/utils/index.cjs +7 -0
  32. package/dist/utils/index.cjs.map +1 -0
  33. package/dist/utils/index.js +6 -0
  34. package/dist/utils/index.js.map +1 -0
  35. package/dist/utils/message_inputs.cjs +218 -535
  36. package/dist/utils/message_inputs.cjs.map +1 -0
  37. package/dist/utils/message_inputs.js +219 -533
  38. package/dist/utils/message_inputs.js.map +1 -0
  39. package/dist/utils/message_outputs.cjs +185 -246
  40. package/dist/utils/message_outputs.cjs.map +1 -0
  41. package/dist/utils/message_outputs.js +184 -243
  42. package/dist/utils/message_outputs.js.map +1 -0
  43. package/dist/utils/prompts.cjs +46 -45
  44. package/dist/utils/prompts.cjs.map +1 -0
  45. package/dist/utils/prompts.d.cts +45 -0
  46. package/dist/utils/prompts.d.cts.map +1 -0
  47. package/dist/utils/prompts.d.ts +8 -2
  48. package/dist/utils/prompts.d.ts.map +1 -0
  49. package/dist/utils/prompts.js +46 -42
  50. package/dist/utils/prompts.js.map +1 -0
  51. package/dist/utils/standard.cjs +127 -0
  52. package/dist/utils/standard.cjs.map +1 -0
  53. package/dist/utils/standard.js +127 -0
  54. package/dist/utils/standard.js.map +1 -0
  55. package/dist/utils/tools.cjs +14 -25
  56. package/dist/utils/tools.cjs.map +1 -0
  57. package/dist/utils/tools.js +14 -23
  58. package/dist/utils/tools.js.map +1 -0
  59. package/package.json +30 -53
  60. package/dist/experimental/index.cjs +0 -17
  61. package/dist/experimental/index.d.ts +0 -1
  62. package/dist/experimental/index.js +0 -1
  63. package/dist/experimental/tool_calling.cjs +0 -318
  64. package/dist/experimental/tool_calling.d.ts +0 -57
  65. package/dist/experimental/tool_calling.js +0 -314
  66. package/dist/experimental/utils/tool_calling.cjs +0 -106
  67. package/dist/experimental/utils/tool_calling.d.ts +0 -10
  68. package/dist/experimental/utils/tool_calling.js +0 -101
  69. package/dist/load/import_constants.cjs +0 -5
  70. package/dist/load/import_constants.d.ts +0 -1
  71. package/dist/load/import_constants.js +0 -2
  72. package/dist/load/import_map.cjs +0 -39
  73. package/dist/load/import_map.d.ts +0 -2
  74. package/dist/load/import_map.js +0 -3
  75. package/dist/load/import_type.cjs +0 -3
  76. package/dist/load/import_type.d.ts +0 -5
  77. package/dist/load/import_type.js +0 -2
  78. package/dist/load/index.cjs +0 -63
  79. package/dist/load/index.d.ts +0 -14
  80. package/dist/load/index.js +0 -25
  81. package/dist/load/map_keys.cjs +0 -2
  82. package/dist/load/map_keys.d.ts +0 -3
  83. package/dist/load/map_keys.js +0 -1
  84. package/dist/load/serializable.cjs +0 -17
  85. package/dist/load/serializable.d.ts +0 -1
  86. package/dist/load/serializable.js +0 -1
  87. package/dist/output_parsers.d.ts +0 -22
  88. package/dist/types.cjs +0 -48
  89. package/dist/types.js +0 -45
  90. package/dist/utils/errors.d.ts +0 -3
  91. package/dist/utils/message_inputs.d.ts +0 -14
  92. package/dist/utils/message_outputs.d.ts +0 -14
  93. package/dist/utils/tools.d.ts +0 -3
  94. package/experimental.cjs +0 -1
  95. package/experimental.d.cts +0 -1
  96. package/experimental.d.ts +0 -1
  97. package/experimental.js +0 -1
  98. package/index.cjs +0 -1
  99. package/index.d.cts +0 -1
  100. package/index.d.ts +0 -1
  101. package/index.js +0 -1
@@ -1,548 +1,231 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports._convertLangChainToolCallToAnthropic = _convertLangChainToolCallToAnthropic;
4
- exports._convertMessagesToAnthropicPayload = _convertMessagesToAnthropicPayload;
5
- /**
6
- * This util file contains functions for converting LangChain messages to Anthropic messages.
7
- */
8
- const messages_1 = require("@langchain/core/messages");
9
- const types_js_1 = require("../types.cjs");
1
+ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
+ const require_content = require('./content.cjs');
3
+ const require_standard = require('./standard.cjs');
4
+ const __langchain_core_messages = require_rolldown_runtime.__toESM(require("@langchain/core/messages"));
5
+
6
+ //#region src/utils/message_inputs.ts
10
7
  function _formatImage(imageUrl) {
11
- const parsed = (0, messages_1.parseBase64DataUrl)({ dataUrl: imageUrl });
12
- if (parsed) {
13
- return {
14
- type: "base64",
15
- media_type: parsed.mime_type,
16
- data: parsed.data,
17
- };
18
- }
19
- let parsedUrl;
20
- try {
21
- parsedUrl = new URL(imageUrl);
22
- }
23
- catch {
24
- throw new Error([
25
- `Malformed image URL: ${JSON.stringify(imageUrl)}. Content blocks of type 'image_url' must be a valid http, https, or base64-encoded data URL.`,
26
- "Example: data:image/png;base64,/9j/4AAQSk...",
27
- "Example: https://example.com/image.jpg",
28
- ].join("\n\n"));
29
- }
30
- if (parsedUrl.protocol === "http:" || parsedUrl.protocol === "https:") {
31
- return {
32
- type: "url",
33
- url: imageUrl,
34
- };
35
- }
36
- throw new Error([
37
- `Invalid image URL protocol: ${JSON.stringify(parsedUrl.protocol)}. Anthropic only supports images as http, https, or base64-encoded data URLs on 'image_url' content blocks.`,
38
- "Example: data:image/png;base64,/9j/4AAQSk...",
39
- "Example: https://example.com/image.jpg",
40
- ].join("\n\n"));
8
+ const parsed = (0, __langchain_core_messages.parseBase64DataUrl)({ dataUrl: imageUrl });
9
+ if (parsed) return {
10
+ type: "base64",
11
+ media_type: parsed.mime_type,
12
+ data: parsed.data
13
+ };
14
+ let parsedUrl;
15
+ try {
16
+ parsedUrl = new URL(imageUrl);
17
+ } catch {
18
+ throw new Error([
19
+ `Malformed image URL: ${JSON.stringify(imageUrl)}. Content blocks of type 'image_url' must be a valid http, https, or base64-encoded data URL.`,
20
+ "Example: data:image/png;base64,/9j/4AAQSk...",
21
+ "Example: https://example.com/image.jpg"
22
+ ].join("\n\n"));
23
+ }
24
+ if (parsedUrl.protocol === "http:" || parsedUrl.protocol === "https:") return {
25
+ type: "url",
26
+ url: imageUrl
27
+ };
28
+ throw new Error([
29
+ `Invalid image URL protocol: ${JSON.stringify(parsedUrl.protocol)}. Anthropic only supports images as http, https, or base64-encoded data URLs on 'image_url' content blocks.`,
30
+ "Example: data:image/png;base64,/9j/4AAQSk...",
31
+ "Example: https://example.com/image.jpg"
32
+ ].join("\n\n"));
41
33
  }
42
34
  function _ensureMessageContents(messages) {
43
- // Merge runs of human/tool messages into single human messages with content blocks.
44
- const updatedMsgs = [];
45
- for (const message of messages) {
46
- if (message._getType() === "tool") {
47
- if (typeof message.content === "string") {
48
- const previousMessage = updatedMsgs[updatedMsgs.length - 1];
49
- if (previousMessage?._getType() === "human" &&
50
- Array.isArray(previousMessage.content) &&
51
- "type" in previousMessage.content[0] &&
52
- previousMessage.content[0].type === "tool_result") {
53
- // If the previous message was a tool result, we merge this tool message into it.
54
- previousMessage.content.push({
55
- type: "tool_result",
56
- content: message.content,
57
- tool_use_id: message.tool_call_id,
58
- });
59
- }
60
- else {
61
- // If not, we create a new human message with the tool result.
62
- updatedMsgs.push(new messages_1.HumanMessage({
63
- content: [
64
- {
65
- type: "tool_result",
66
- content: message.content,
67
- tool_use_id: message.tool_call_id,
68
- },
69
- ],
70
- }));
71
- }
72
- }
73
- else {
74
- updatedMsgs.push(new messages_1.HumanMessage({
75
- content: [
76
- {
77
- type: "tool_result",
78
- // rare case: message.content could be undefined
79
- ...(message.content != null
80
- ? { content: _formatContent(message) }
81
- : {}),
82
- tool_use_id: message.tool_call_id,
83
- },
84
- ],
85
- }));
86
- }
87
- }
88
- else {
89
- updatedMsgs.push(message);
90
- }
91
- }
92
- return updatedMsgs;
35
+ const updatedMsgs = [];
36
+ for (const message of messages) if (message._getType() === "tool") if (typeof message.content === "string") {
37
+ const previousMessage = updatedMsgs[updatedMsgs.length - 1];
38
+ if (previousMessage?._getType() === "human" && Array.isArray(previousMessage.content) && "type" in previousMessage.content[0] && previousMessage.content[0].type === "tool_result") previousMessage.content.push({
39
+ type: "tool_result",
40
+ content: message.content,
41
+ tool_use_id: message.tool_call_id
42
+ });
43
+ else updatedMsgs.push(new __langchain_core_messages.HumanMessage({ content: [{
44
+ type: "tool_result",
45
+ content: message.content,
46
+ tool_use_id: message.tool_call_id
47
+ }] }));
48
+ } else updatedMsgs.push(new __langchain_core_messages.HumanMessage({ content: [{
49
+ type: "tool_result",
50
+ ...message.content != null ? { content: _formatContent(message) } : {},
51
+ tool_use_id: message.tool_call_id
52
+ }] }));
53
+ else updatedMsgs.push(message);
54
+ return updatedMsgs;
93
55
  }
94
56
  function _convertLangChainToolCallToAnthropic(toolCall) {
95
- if (toolCall.id === undefined) {
96
- throw new Error(`Anthropic requires all tool calls to have an "id".`);
97
- }
98
- return {
99
- type: "tool_use",
100
- id: toolCall.id,
101
- name: toolCall.name,
102
- input: toolCall.args,
103
- };
57
+ if (toolCall.id === void 0) throw new Error(`Anthropic requires all tool calls to have an "id".`);
58
+ return {
59
+ type: "tool_use",
60
+ id: toolCall.id,
61
+ name: toolCall.name,
62
+ input: toolCall.args
63
+ };
64
+ }
65
+ function* _formatContentBlocks(content) {
66
+ const toolTypes = [
67
+ "tool_use",
68
+ "tool_result",
69
+ "input_json_delta",
70
+ "server_tool_use",
71
+ "web_search_tool_result",
72
+ "web_search_result"
73
+ ];
74
+ const textTypes = ["text", "text_delta"];
75
+ for (const contentPart of content) {
76
+ if ((0, __langchain_core_messages.isDataContentBlock)(contentPart)) yield (0, __langchain_core_messages.convertToProviderContentBlock)(contentPart, require_content.standardContentBlockConverter);
77
+ const cacheControl = "cache_control" in contentPart ? contentPart.cache_control : void 0;
78
+ if (contentPart.type === "image_url") {
79
+ let source;
80
+ if (typeof contentPart.image_url === "string") source = _formatImage(contentPart.image_url);
81
+ else if (typeof contentPart.image_url === "object" && contentPart.image_url !== null && "url" in contentPart.image_url && typeof contentPart.image_url.url === "string") source = _formatImage(contentPart.image_url.url);
82
+ if (source) yield {
83
+ type: "image",
84
+ source,
85
+ ...cacheControl ? { cache_control: cacheControl } : {}
86
+ };
87
+ } else if (require_content._isAnthropicImageBlockParam(contentPart)) return contentPart;
88
+ else if (contentPart.type === "document") yield {
89
+ ...contentPart,
90
+ ...cacheControl ? { cache_control: cacheControl } : {}
91
+ };
92
+ else if (require_content._isAnthropicThinkingBlock(contentPart)) {
93
+ const block = {
94
+ type: "thinking",
95
+ thinking: contentPart.thinking,
96
+ signature: contentPart.signature,
97
+ ...cacheControl ? { cache_control: cacheControl } : {}
98
+ };
99
+ yield block;
100
+ } else if (require_content._isAnthropicRedactedThinkingBlock(contentPart)) {
101
+ const block = {
102
+ type: "redacted_thinking",
103
+ data: contentPart.data,
104
+ ...cacheControl ? { cache_control: cacheControl } : {}
105
+ };
106
+ yield block;
107
+ } else if (require_content._isAnthropicSearchResultBlock(contentPart)) {
108
+ const block = {
109
+ type: "search_result",
110
+ title: contentPart.title,
111
+ source: contentPart.source,
112
+ ..."cache_control" in contentPart && contentPart.cache_control ? { cache_control: contentPart.cache_control } : {},
113
+ ..."citations" in contentPart && contentPart.citations ? { citations: contentPart.citations } : {},
114
+ content: contentPart.content
115
+ };
116
+ yield block;
117
+ } else if (textTypes.find((t) => t === contentPart.type) && "text" in contentPart) yield {
118
+ type: "text",
119
+ text: contentPart.text,
120
+ ...cacheControl ? { cache_control: cacheControl } : {},
121
+ ..."citations" in contentPart && contentPart.citations ? { citations: contentPart.citations } : {}
122
+ };
123
+ else if (toolTypes.find((t) => t === contentPart.type)) {
124
+ const contentPartCopy = { ...contentPart };
125
+ if ("index" in contentPartCopy) delete contentPartCopy.index;
126
+ if (contentPartCopy.type === "input_json_delta") contentPartCopy.type = "tool_use";
127
+ if ("input" in contentPartCopy) {
128
+ if (typeof contentPartCopy.input === "string") try {
129
+ contentPartCopy.input = JSON.parse(contentPartCopy.input);
130
+ } catch {
131
+ contentPartCopy.input = {};
132
+ }
133
+ }
134
+ yield {
135
+ ...contentPartCopy,
136
+ ...cacheControl ? { cache_control: cacheControl } : {}
137
+ };
138
+ }
139
+ }
104
140
  }
105
- const standardContentBlockConverter = {
106
- providerName: "anthropic",
107
- fromStandardTextBlock(block) {
108
- return {
109
- type: "text",
110
- text: block.text,
111
- ...("citations" in (block.metadata ?? {})
112
- ? { citations: block.metadata.citations }
113
- : {}),
114
- ...("cache_control" in (block.metadata ?? {})
115
- ? { cache_control: block.metadata.cache_control }
116
- : {}),
117
- };
118
- },
119
- fromStandardImageBlock(block) {
120
- if (block.source_type === "url") {
121
- const data = (0, messages_1.parseBase64DataUrl)({
122
- dataUrl: block.url,
123
- asTypedArray: false,
124
- });
125
- if (data) {
126
- return {
127
- type: "image",
128
- source: {
129
- type: "base64",
130
- data: data.data,
131
- media_type: data.mime_type,
132
- },
133
- ...("cache_control" in (block.metadata ?? {})
134
- ? { cache_control: block.metadata.cache_control }
135
- : {}),
136
- };
137
- }
138
- else {
139
- return {
140
- type: "image",
141
- source: {
142
- type: "url",
143
- url: block.url,
144
- media_type: block.mime_type ?? "",
145
- },
146
- ...("cache_control" in (block.metadata ?? {})
147
- ? { cache_control: block.metadata.cache_control }
148
- : {}),
149
- };
150
- }
151
- }
152
- else {
153
- if (block.source_type === "base64") {
154
- return {
155
- type: "image",
156
- source: {
157
- type: "base64",
158
- data: block.data,
159
- media_type: block.mime_type ?? "",
160
- },
161
- ...("cache_control" in (block.metadata ?? {})
162
- ? { cache_control: block.metadata.cache_control }
163
- : {}),
164
- };
165
- }
166
- else {
167
- throw new Error(`Unsupported image source type: ${block.source_type}`);
168
- }
169
- }
170
- },
171
- fromStandardFileBlock(block) {
172
- const mime_type = (block.mime_type ?? "").split(";")[0];
173
- if (block.source_type === "url") {
174
- if (mime_type === "application/pdf" || mime_type === "") {
175
- return {
176
- type: "document",
177
- source: {
178
- type: "url",
179
- url: block.url,
180
- media_type: block.mime_type ?? "",
181
- },
182
- ...("cache_control" in (block.metadata ?? {})
183
- ? { cache_control: block.metadata.cache_control }
184
- : {}),
185
- ...("citations" in (block.metadata ?? {})
186
- ? { citations: block.metadata.citations }
187
- : {}),
188
- ...("context" in (block.metadata ?? {})
189
- ? { context: block.metadata.context }
190
- : {}),
191
- ...("title" in (block.metadata ?? {})
192
- ? { title: block.metadata.title }
193
- : {}),
194
- };
195
- }
196
- throw new Error(`Unsupported file mime type for file url source: ${block.mime_type}`);
197
- }
198
- else if (block.source_type === "text") {
199
- if (mime_type === "text/plain" || mime_type === "") {
200
- return {
201
- type: "document",
202
- source: {
203
- type: "text",
204
- data: block.text,
205
- media_type: block.mime_type ?? "",
206
- },
207
- ...("cache_control" in (block.metadata ?? {})
208
- ? { cache_control: block.metadata.cache_control }
209
- : {}),
210
- ...("citations" in (block.metadata ?? {})
211
- ? { citations: block.metadata.citations }
212
- : {}),
213
- ...("context" in (block.metadata ?? {})
214
- ? { context: block.metadata.context }
215
- : {}),
216
- ...("title" in (block.metadata ?? {})
217
- ? { title: block.metadata.title }
218
- : {}),
219
- };
220
- }
221
- else {
222
- throw new Error(`Unsupported file mime type for file text source: ${block.mime_type}`);
223
- }
224
- }
225
- else if (block.source_type === "base64") {
226
- if (mime_type === "application/pdf" || mime_type === "") {
227
- return {
228
- type: "document",
229
- source: {
230
- type: "base64",
231
- data: block.data,
232
- media_type: "application/pdf",
233
- },
234
- ...("cache_control" in (block.metadata ?? {})
235
- ? { cache_control: block.metadata.cache_control }
236
- : {}),
237
- ...("citations" in (block.metadata ?? {})
238
- ? { citations: block.metadata.citations }
239
- : {}),
240
- ...("context" in (block.metadata ?? {})
241
- ? { context: block.metadata.context }
242
- : {}),
243
- ...("title" in (block.metadata ?? {})
244
- ? { title: block.metadata.title }
245
- : {}),
246
- };
247
- }
248
- else if (["image/jpeg", "image/png", "image/gif", "image/webp"].includes(mime_type)) {
249
- return {
250
- type: "document",
251
- source: {
252
- type: "content",
253
- content: [
254
- {
255
- type: "image",
256
- source: {
257
- type: "base64",
258
- data: block.data,
259
- media_type: mime_type,
260
- },
261
- },
262
- ],
263
- },
264
- ...("cache_control" in (block.metadata ?? {})
265
- ? { cache_control: block.metadata.cache_control }
266
- : {}),
267
- ...("citations" in (block.metadata ?? {})
268
- ? { citations: block.metadata.citations }
269
- : {}),
270
- ...("context" in (block.metadata ?? {})
271
- ? { context: block.metadata.context }
272
- : {}),
273
- ...("title" in (block.metadata ?? {})
274
- ? { title: block.metadata.title }
275
- : {}),
276
- };
277
- }
278
- else {
279
- throw new Error(`Unsupported file mime type for file base64 source: ${block.mime_type}`);
280
- }
281
- }
282
- else {
283
- throw new Error(`Unsupported file source type: ${block.source_type}`);
284
- }
285
- },
286
- };
287
141
  function _formatContent(message) {
288
- const toolTypes = [
289
- "tool_use",
290
- "tool_result",
291
- "input_json_delta",
292
- "server_tool_use",
293
- "web_search_tool_result",
294
- "web_search_result",
295
- ];
296
- const textTypes = ["text", "text_delta"];
297
- const { content } = message;
298
- if (typeof content === "string") {
299
- return content;
300
- }
301
- else {
302
- const contentBlocks = content.map((contentPart) => {
303
- if ((0, messages_1.isDataContentBlock)(contentPart)) {
304
- return (0, messages_1.convertToProviderContentBlock)(contentPart, standardContentBlockConverter);
305
- }
306
- const cacheControl = "cache_control" in contentPart ? contentPart.cache_control : undefined;
307
- if (contentPart.type === "image_url") {
308
- let source;
309
- if (typeof contentPart.image_url === "string") {
310
- source = _formatImage(contentPart.image_url);
311
- }
312
- else {
313
- source = _formatImage(contentPart.image_url.url);
314
- }
315
- return {
316
- type: "image", // Explicitly setting the type as "image"
317
- source,
318
- ...(cacheControl ? { cache_control: cacheControl } : {}),
319
- };
320
- }
321
- else if ((0, types_js_1.isAnthropicImageBlockParam)(contentPart)) {
322
- return contentPart;
323
- }
324
- else if (contentPart.type === "document") {
325
- // PDF
326
- return {
327
- ...contentPart,
328
- ...(cacheControl ? { cache_control: cacheControl } : {}),
329
- };
330
- }
331
- else if (contentPart.type === "thinking") {
332
- const block = {
333
- type: "thinking", // Explicitly setting the type as "thinking"
334
- thinking: contentPart.thinking,
335
- signature: contentPart.signature,
336
- ...(cacheControl ? { cache_control: cacheControl } : {}),
337
- };
338
- return block;
339
- }
340
- else if (contentPart.type === "redacted_thinking") {
341
- const block = {
342
- type: "redacted_thinking", // Explicitly setting the type as "redacted_thinking"
343
- data: contentPart.data,
344
- ...(cacheControl ? { cache_control: cacheControl } : {}),
345
- };
346
- return block;
347
- }
348
- else if (contentPart.type === "search_result") {
349
- const block = {
350
- type: "search_result", // Explicitly setting the type as "search_result"
351
- title: contentPart.title,
352
- source: contentPart.source,
353
- ...("cache_control" in contentPart && contentPart.cache_control
354
- ? { cache_control: contentPart.cache_control }
355
- : {}),
356
- ...("citations" in contentPart && contentPart.citations
357
- ? { citations: contentPart.citations }
358
- : {}),
359
- content: contentPart.content,
360
- };
361
- return block;
362
- }
363
- else if (textTypes.find((t) => t === contentPart.type) &&
364
- "text" in contentPart) {
365
- // Assuming contentPart is of type MessageContentText here
366
- return {
367
- type: "text", // Explicitly setting the type as "text"
368
- text: contentPart.text,
369
- ...(cacheControl ? { cache_control: cacheControl } : {}),
370
- ...("citations" in contentPart && contentPart.citations
371
- ? { citations: contentPart.citations }
372
- : {}),
373
- };
374
- }
375
- else if (toolTypes.find((t) => t === contentPart.type)) {
376
- const contentPartCopy = { ...contentPart };
377
- if ("index" in contentPartCopy) {
378
- // Anthropic does not support passing the index field here, so we remove it.
379
- delete contentPartCopy.index;
380
- }
381
- if (contentPartCopy.type === "input_json_delta") {
382
- // `input_json_delta` type only represents yielding partial tool inputs
383
- // and is not a valid type for Anthropic messages.
384
- contentPartCopy.type = "tool_use";
385
- }
386
- if ("input" in contentPartCopy) {
387
- // Anthropic tool use inputs should be valid objects, when applicable.
388
- if (typeof contentPartCopy.input === "string") {
389
- try {
390
- contentPartCopy.input = JSON.parse(contentPartCopy.input);
391
- }
392
- catch {
393
- contentPartCopy.input = {};
394
- }
395
- }
396
- }
397
- // TODO: Fix when SDK types are fixed
398
- return {
399
- ...contentPartCopy,
400
- ...(cacheControl ? { cache_control: cacheControl } : {}),
401
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
402
- };
403
- }
404
- else if ("functionCall" in contentPart &&
405
- contentPart.functionCall &&
406
- typeof contentPart.functionCall === "object" &&
407
- (0, messages_1.isAIMessage)(message)) {
408
- const correspondingToolCall = message.tool_calls?.find((toolCall) => toolCall.name === contentPart.functionCall.name);
409
- if (!correspondingToolCall) {
410
- throw new Error(`Could not find tool call for function call ${contentPart.functionCall.name}`);
411
- }
412
- // Google GenAI models include a `functionCall` object inside content. We should ignore it as Anthropic will not support it.
413
- return {
414
- id: correspondingToolCall.id,
415
- type: "tool_use",
416
- name: correspondingToolCall.name,
417
- input: contentPart.functionCall.args,
418
- };
419
- }
420
- else {
421
- throw new Error("Unsupported message content format");
422
- }
423
- });
424
- return contentBlocks;
425
- }
142
+ const { content } = message;
143
+ if (typeof content === "string") return content;
144
+ else return Array.from(_formatContentBlocks(content));
426
145
  }
427
146
  /**
428
- * Formats messages as a prompt for the model.
429
- * Used in LangSmith, export is important here.
430
- * @param messages The base messages to format as a prompt.
431
- * @returns The formatted prompt.
432
- */
147
+ * Formats messages as a prompt for the model.
148
+ * Used in LangSmith, export is important here.
149
+ * @param messages The base messages to format as a prompt.
150
+ * @returns The formatted prompt.
151
+ */
433
152
  function _convertMessagesToAnthropicPayload(messages) {
434
- const mergedMessages = _ensureMessageContents(messages);
435
- let system;
436
- if (mergedMessages.length > 0 && mergedMessages[0]._getType() === "system") {
437
- system = messages[0].content;
438
- }
439
- const conversationMessages = system !== undefined ? mergedMessages.slice(1) : mergedMessages;
440
- const formattedMessages = conversationMessages.map((message) => {
441
- let role;
442
- if (message._getType() === "human") {
443
- role = "user";
444
- }
445
- else if (message._getType() === "ai") {
446
- role = "assistant";
447
- }
448
- else if (message._getType() === "tool") {
449
- role = "user";
450
- }
451
- else if (message._getType() === "system") {
452
- throw new Error("System messages are only permitted as the first passed message.");
453
- }
454
- else {
455
- throw new Error(`Message type "${message._getType()}" is not supported.`);
456
- }
457
- if ((0, messages_1.isAIMessage)(message) && !!message.tool_calls?.length) {
458
- if (typeof message.content === "string") {
459
- if (message.content === "") {
460
- return {
461
- role,
462
- content: message.tool_calls.map(_convertLangChainToolCallToAnthropic),
463
- };
464
- }
465
- else {
466
- return {
467
- role,
468
- content: [
469
- { type: "text", text: message.content },
470
- ...message.tool_calls.map(_convertLangChainToolCallToAnthropic),
471
- ],
472
- };
473
- }
474
- }
475
- else {
476
- const { content } = message;
477
- const hasMismatchedToolCalls = !message.tool_calls.every((toolCall) => content.find((contentPart) => (contentPart.type === "tool_use" ||
478
- contentPart.type === "input_json_delta" ||
479
- contentPart.type === "server_tool_use") &&
480
- contentPart.id === toolCall.id));
481
- if (hasMismatchedToolCalls) {
482
- console.warn(`The "tool_calls" field on a message is only respected if content is a string.`);
483
- }
484
- return {
485
- role,
486
- content: _formatContent(message),
487
- };
488
- }
489
- }
490
- else {
491
- return {
492
- role,
493
- content: _formatContent(message),
494
- };
495
- }
496
- });
497
- return {
498
- messages: mergeMessages(formattedMessages),
499
- system,
500
- };
153
+ const mergedMessages = _ensureMessageContents(messages);
154
+ let system;
155
+ if (mergedMessages.length > 0 && mergedMessages[0]._getType() === "system") system = messages[0].content;
156
+ const conversationMessages = system !== void 0 ? mergedMessages.slice(1) : mergedMessages;
157
+ const formattedMessages = conversationMessages.map((message) => {
158
+ let role;
159
+ if (message._getType() === "human") role = "user";
160
+ else if (message._getType() === "ai") role = "assistant";
161
+ else if (message._getType() === "tool") role = "user";
162
+ else if (message._getType() === "system") throw new Error("System messages are only permitted as the first passed message.");
163
+ else throw new Error(`Message type "${message._getType()}" is not supported.`);
164
+ if ((0, __langchain_core_messages.isAIMessage)(message) && message.response_metadata?.output_version === "v1") return {
165
+ role,
166
+ content: require_standard._formatStandardContent(message)
167
+ };
168
+ if ((0, __langchain_core_messages.isAIMessage)(message) && !!message.tool_calls?.length) if (typeof message.content === "string") if (message.content === "") return {
169
+ role,
170
+ content: message.tool_calls.map(_convertLangChainToolCallToAnthropic)
171
+ };
172
+ else return {
173
+ role,
174
+ content: [{
175
+ type: "text",
176
+ text: message.content
177
+ }, ...message.tool_calls.map(_convertLangChainToolCallToAnthropic)]
178
+ };
179
+ else {
180
+ const { content } = message;
181
+ const hasMismatchedToolCalls = !message.tool_calls.every((toolCall) => content.find((contentPart) => (contentPart.type === "tool_use" || contentPart.type === "input_json_delta" || contentPart.type === "server_tool_use") && contentPart.id === toolCall.id));
182
+ if (hasMismatchedToolCalls) console.warn(`The "tool_calls" field on a message is only respected if content is a string.`);
183
+ return {
184
+ role,
185
+ content: _formatContent(message)
186
+ };
187
+ }
188
+ else return {
189
+ role,
190
+ content: _formatContent(message)
191
+ };
192
+ });
193
+ return {
194
+ messages: mergeMessages(formattedMessages),
195
+ system
196
+ };
501
197
  }
502
198
  function mergeMessages(messages) {
503
- if (!messages || messages.length <= 1) {
504
- return messages;
505
- }
506
- const result = [];
507
- let currentMessage = messages[0];
508
- const normalizeContent = (content) => {
509
- if (typeof content === "string") {
510
- return [
511
- {
512
- type: "text",
513
- text: content,
514
- },
515
- ];
516
- }
517
- return content;
518
- };
519
- const isToolResultMessage = (msg) => {
520
- if (msg.role !== "user")
521
- return false;
522
- if (typeof msg.content === "string") {
523
- return false;
524
- }
525
- return (Array.isArray(msg.content) &&
526
- msg.content.every((item) => item.type === "tool_result"));
527
- };
528
- for (let i = 1; i < messages.length; i += 1) {
529
- const nextMessage = messages[i];
530
- if (isToolResultMessage(currentMessage) &&
531
- isToolResultMessage(nextMessage)) {
532
- // Merge the messages by combining their content arrays
533
- currentMessage = {
534
- ...currentMessage,
535
- content: [
536
- ...normalizeContent(currentMessage.content),
537
- ...normalizeContent(nextMessage.content),
538
- ],
539
- };
540
- }
541
- else {
542
- result.push(currentMessage);
543
- currentMessage = nextMessage;
544
- }
545
- }
546
- result.push(currentMessage);
547
- return result;
199
+ if (!messages || messages.length <= 1) return messages;
200
+ const result = [];
201
+ let currentMessage = messages[0];
202
+ const normalizeContent = (content) => {
203
+ if (typeof content === "string") return [{
204
+ type: "text",
205
+ text: content
206
+ }];
207
+ return content;
208
+ };
209
+ const isToolResultMessage = (msg) => {
210
+ if (msg.role !== "user") return false;
211
+ if (typeof msg.content === "string") return false;
212
+ return Array.isArray(msg.content) && msg.content.every((item) => item.type === "tool_result");
213
+ };
214
+ for (let i = 1; i < messages.length; i += 1) {
215
+ const nextMessage = messages[i];
216
+ if (isToolResultMessage(currentMessage) && isToolResultMessage(nextMessage)) currentMessage = {
217
+ ...currentMessage,
218
+ content: [...normalizeContent(currentMessage.content), ...normalizeContent(nextMessage.content)]
219
+ };
220
+ else {
221
+ result.push(currentMessage);
222
+ currentMessage = nextMessage;
223
+ }
224
+ }
225
+ result.push(currentMessage);
226
+ return result;
548
227
  }
228
+
229
+ //#endregion
230
+ exports._convertMessagesToAnthropicPayload = _convertMessagesToAnthropicPayload;
231
+ //# sourceMappingURL=message_inputs.cjs.map