@librechat/agents 3.0.0-rc11 → 3.0.0-rc12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/agents/AgentContext.cjs +6 -2
- package/dist/cjs/agents/AgentContext.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs +16 -1
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/main.cjs +4 -1
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/messages/cache.cjs +49 -0
- package/dist/cjs/messages/cache.cjs.map +1 -0
- package/dist/cjs/messages/content.cjs +53 -0
- package/dist/cjs/messages/content.cjs.map +1 -0
- package/dist/cjs/messages/format.cjs +0 -27
- package/dist/cjs/messages/format.cjs.map +1 -1
- package/dist/esm/agents/AgentContext.mjs +6 -2
- package/dist/esm/agents/AgentContext.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +16 -1
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/main.mjs +3 -1
- package/dist/esm/main.mjs.map +1 -1
- package/dist/esm/messages/cache.mjs +47 -0
- package/dist/esm/messages/cache.mjs.map +1 -0
- package/dist/esm/messages/content.mjs +51 -0
- package/dist/esm/messages/content.mjs.map +1 -0
- package/dist/esm/messages/format.mjs +1 -27
- package/dist/esm/messages/format.mjs.map +1 -1
- package/dist/types/agents/AgentContext.d.ts +4 -1
- package/dist/types/messages/cache.d.ts +8 -0
- package/dist/types/messages/content.d.ts +7 -0
- package/dist/types/messages/format.d.ts +1 -7
- package/dist/types/messages/index.d.ts +2 -0
- package/dist/types/types/graph.d.ts +2 -0
- package/dist/types/types/messages.d.ts +4 -0
- package/package.json +1 -1
- package/src/agents/AgentContext.ts +8 -0
- package/src/graphs/Graph.ts +23 -1
- package/src/messages/cache.test.ts +262 -0
- package/src/messages/cache.ts +56 -0
- package/src/messages/content.test.ts +362 -0
- package/src/messages/content.ts +63 -0
- package/src/messages/format.ts +0 -34
- package/src/messages/index.ts +2 -0
- package/src/types/graph.ts +2 -0
- package/src/types/messages.ts +4 -0
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
import type Anthropic from '@anthropic-ai/sdk';
|
|
2
|
+
import type { AnthropicMessages } from '@/types/messages';
|
|
3
|
+
import { addCacheControl } from './cache';
|
|
4
|
+
|
|
5
|
+
describe('addCacheControl', () => {
|
|
6
|
+
test('should add cache control to the last two user messages with array content', () => {
|
|
7
|
+
const messages: AnthropicMessages = [
|
|
8
|
+
{ role: 'user', content: [{ type: 'text', text: 'Hello' }] },
|
|
9
|
+
{ role: 'assistant', content: [{ type: 'text', text: 'Hi there' }] },
|
|
10
|
+
{ role: 'user', content: [{ type: 'text', text: 'How are you?' }] },
|
|
11
|
+
{
|
|
12
|
+
role: 'assistant',
|
|
13
|
+
content: [{ type: 'text', text: 'I\'m doing well, thanks!' }],
|
|
14
|
+
},
|
|
15
|
+
{ role: 'user', content: [{ type: 'text', text: 'Great!' }] },
|
|
16
|
+
];
|
|
17
|
+
|
|
18
|
+
const result = addCacheControl(messages);
|
|
19
|
+
|
|
20
|
+
expect(result[0].content[0]).not.toHaveProperty('cache_control');
|
|
21
|
+
expect(
|
|
22
|
+
(result[2].content[0] as Anthropic.TextBlockParam).cache_control
|
|
23
|
+
).toEqual({ type: 'ephemeral' });
|
|
24
|
+
expect(
|
|
25
|
+
(result[4].content[0] as Anthropic.TextBlockParam).cache_control
|
|
26
|
+
).toEqual({ type: 'ephemeral' });
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
test('should add cache control to the last two user messages with string content', () => {
|
|
30
|
+
const messages: AnthropicMessages = [
|
|
31
|
+
{ role: 'user', content: 'Hello' },
|
|
32
|
+
{ role: 'assistant', content: 'Hi there' },
|
|
33
|
+
{ role: 'user', content: 'How are you?' },
|
|
34
|
+
{ role: 'assistant', content: 'I\'m doing well, thanks!' },
|
|
35
|
+
{ role: 'user', content: 'Great!' },
|
|
36
|
+
];
|
|
37
|
+
|
|
38
|
+
const result = addCacheControl(messages);
|
|
39
|
+
|
|
40
|
+
expect(result[0].content).toBe('Hello');
|
|
41
|
+
expect(result[2].content[0]).toEqual({
|
|
42
|
+
type: 'text',
|
|
43
|
+
text: 'How are you?',
|
|
44
|
+
cache_control: { type: 'ephemeral' },
|
|
45
|
+
});
|
|
46
|
+
expect(result[4].content[0]).toEqual({
|
|
47
|
+
type: 'text',
|
|
48
|
+
text: 'Great!',
|
|
49
|
+
cache_control: { type: 'ephemeral' },
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
test('should handle mixed string and array content', () => {
|
|
54
|
+
const messages: AnthropicMessages = [
|
|
55
|
+
{ role: 'user', content: 'Hello' },
|
|
56
|
+
{ role: 'assistant', content: 'Hi there' },
|
|
57
|
+
{ role: 'user', content: [{ type: 'text', text: 'How are you?' }] },
|
|
58
|
+
];
|
|
59
|
+
|
|
60
|
+
const result = addCacheControl(messages);
|
|
61
|
+
|
|
62
|
+
expect(result[0].content[0]).toEqual({
|
|
63
|
+
type: 'text',
|
|
64
|
+
text: 'Hello',
|
|
65
|
+
cache_control: { type: 'ephemeral' },
|
|
66
|
+
});
|
|
67
|
+
expect(
|
|
68
|
+
(result[2].content[0] as Anthropic.TextBlockParam).cache_control
|
|
69
|
+
).toEqual({ type: 'ephemeral' });
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
test('should handle less than two user messages', () => {
|
|
73
|
+
const messages: AnthropicMessages = [
|
|
74
|
+
{ role: 'user', content: 'Hello' },
|
|
75
|
+
{ role: 'assistant', content: 'Hi there' },
|
|
76
|
+
];
|
|
77
|
+
|
|
78
|
+
const result = addCacheControl(messages);
|
|
79
|
+
|
|
80
|
+
expect(result[0].content[0]).toEqual({
|
|
81
|
+
type: 'text',
|
|
82
|
+
text: 'Hello',
|
|
83
|
+
cache_control: { type: 'ephemeral' },
|
|
84
|
+
});
|
|
85
|
+
expect(result[1].content).toBe('Hi there');
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
test('should return original array if no user messages', () => {
|
|
89
|
+
const messages: AnthropicMessages = [
|
|
90
|
+
{ role: 'assistant', content: 'Hi there' },
|
|
91
|
+
{ role: 'assistant', content: 'How can I help?' },
|
|
92
|
+
];
|
|
93
|
+
|
|
94
|
+
const result = addCacheControl(messages);
|
|
95
|
+
|
|
96
|
+
expect(result).toEqual(messages);
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
test('should handle empty array', () => {
|
|
100
|
+
const messages: AnthropicMessages = [];
|
|
101
|
+
const result = addCacheControl(messages);
|
|
102
|
+
expect(result).toEqual([]);
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
test('should handle non-array input', () => {
|
|
106
|
+
const messages = 'not an array';
|
|
107
|
+
/** @ts-expect-error - This is a test */
|
|
108
|
+
const result = addCacheControl(messages);
|
|
109
|
+
expect(result).toBe('not an array');
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
test('should not modify assistant messages', () => {
|
|
113
|
+
const messages: AnthropicMessages = [
|
|
114
|
+
{ role: 'user', content: 'Hello' },
|
|
115
|
+
{ role: 'assistant', content: 'Hi there' },
|
|
116
|
+
{ role: 'user', content: 'How are you?' },
|
|
117
|
+
];
|
|
118
|
+
|
|
119
|
+
const result = addCacheControl(messages);
|
|
120
|
+
|
|
121
|
+
expect(result[1].content).toBe('Hi there');
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
test('should handle multiple content items in user messages', () => {
|
|
125
|
+
const messages: AnthropicMessages = [
|
|
126
|
+
{
|
|
127
|
+
role: 'user',
|
|
128
|
+
content: [
|
|
129
|
+
{ type: 'text', text: 'Hello' },
|
|
130
|
+
{
|
|
131
|
+
type: 'image',
|
|
132
|
+
source: { type: 'url', url: 'http://example.com/image.jpg' },
|
|
133
|
+
},
|
|
134
|
+
{ type: 'text', text: 'This is an image' },
|
|
135
|
+
],
|
|
136
|
+
},
|
|
137
|
+
{ role: 'assistant', content: 'Hi there' },
|
|
138
|
+
{ role: 'user', content: 'How are you?' },
|
|
139
|
+
];
|
|
140
|
+
|
|
141
|
+
const result = addCacheControl(messages);
|
|
142
|
+
|
|
143
|
+
expect(result[0].content[0]).not.toHaveProperty('cache_control');
|
|
144
|
+
expect(result[0].content[1]).not.toHaveProperty('cache_control');
|
|
145
|
+
expect(
|
|
146
|
+
(result[0].content[2] as Anthropic.TextBlockParam).cache_control
|
|
147
|
+
).toEqual({ type: 'ephemeral' });
|
|
148
|
+
expect(result[2].content[0]).toEqual({
|
|
149
|
+
type: 'text',
|
|
150
|
+
text: 'How are you?',
|
|
151
|
+
cache_control: { type: 'ephemeral' },
|
|
152
|
+
});
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
test('should handle an array with mixed content types', () => {
|
|
156
|
+
const messages: AnthropicMessages = [
|
|
157
|
+
{ role: 'user', content: 'Hello' },
|
|
158
|
+
{ role: 'assistant', content: 'Hi there' },
|
|
159
|
+
{ role: 'user', content: [{ type: 'text', text: 'How are you?' }] },
|
|
160
|
+
{ role: 'assistant', content: 'I\'m doing well, thanks!' },
|
|
161
|
+
{ role: 'user', content: 'Great!' },
|
|
162
|
+
];
|
|
163
|
+
|
|
164
|
+
const result = addCacheControl(messages);
|
|
165
|
+
|
|
166
|
+
expect(result[0].content).toEqual('Hello');
|
|
167
|
+
expect(result[2].content[0]).toEqual({
|
|
168
|
+
type: 'text',
|
|
169
|
+
text: 'How are you?',
|
|
170
|
+
cache_control: { type: 'ephemeral' },
|
|
171
|
+
});
|
|
172
|
+
expect(result[4].content).toEqual([
|
|
173
|
+
{
|
|
174
|
+
type: 'text',
|
|
175
|
+
text: 'Great!',
|
|
176
|
+
cache_control: { type: 'ephemeral' },
|
|
177
|
+
},
|
|
178
|
+
]);
|
|
179
|
+
expect(result[1].content).toBe('Hi there');
|
|
180
|
+
expect(result[3].content).toBe('I\'m doing well, thanks!');
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
test('should handle edge case with multiple content types', () => {
|
|
184
|
+
const messages: AnthropicMessages = [
|
|
185
|
+
{
|
|
186
|
+
role: 'user',
|
|
187
|
+
content: [
|
|
188
|
+
{
|
|
189
|
+
type: 'image',
|
|
190
|
+
source: {
|
|
191
|
+
type: 'base64',
|
|
192
|
+
media_type: 'image/png',
|
|
193
|
+
data: 'some_base64_string',
|
|
194
|
+
},
|
|
195
|
+
},
|
|
196
|
+
{
|
|
197
|
+
type: 'image',
|
|
198
|
+
source: {
|
|
199
|
+
type: 'base64',
|
|
200
|
+
media_type: 'image/png',
|
|
201
|
+
data: 'another_base64_string',
|
|
202
|
+
},
|
|
203
|
+
},
|
|
204
|
+
{ type: 'text', text: 'what do all these images have in common' },
|
|
205
|
+
],
|
|
206
|
+
},
|
|
207
|
+
{ role: 'assistant', content: 'I see multiple images.' },
|
|
208
|
+
{ role: 'user', content: 'Correct!' },
|
|
209
|
+
];
|
|
210
|
+
|
|
211
|
+
const result = addCacheControl(messages);
|
|
212
|
+
|
|
213
|
+
expect(result[0].content[0]).not.toHaveProperty('cache_control');
|
|
214
|
+
expect(result[0].content[1]).not.toHaveProperty('cache_control');
|
|
215
|
+
expect(
|
|
216
|
+
(result[0].content[2] as Anthropic.ImageBlockParam).cache_control
|
|
217
|
+
).toEqual({ type: 'ephemeral' });
|
|
218
|
+
expect(result[2].content[0]).toEqual({
|
|
219
|
+
type: 'text',
|
|
220
|
+
text: 'Correct!',
|
|
221
|
+
cache_control: { type: 'ephemeral' },
|
|
222
|
+
});
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
test('should handle user message with no text block', () => {
|
|
226
|
+
const messages: AnthropicMessages = [
|
|
227
|
+
{
|
|
228
|
+
role: 'user',
|
|
229
|
+
content: [
|
|
230
|
+
{
|
|
231
|
+
type: 'image',
|
|
232
|
+
source: {
|
|
233
|
+
type: 'base64',
|
|
234
|
+
media_type: 'image/png',
|
|
235
|
+
data: 'some_base64_string',
|
|
236
|
+
},
|
|
237
|
+
},
|
|
238
|
+
{
|
|
239
|
+
type: 'image',
|
|
240
|
+
source: {
|
|
241
|
+
type: 'base64',
|
|
242
|
+
media_type: 'image/png',
|
|
243
|
+
data: 'another_base64_string',
|
|
244
|
+
},
|
|
245
|
+
},
|
|
246
|
+
],
|
|
247
|
+
},
|
|
248
|
+
{ role: 'assistant', content: 'I see two images.' },
|
|
249
|
+
{ role: 'user', content: 'Correct!' },
|
|
250
|
+
];
|
|
251
|
+
|
|
252
|
+
const result = addCacheControl(messages);
|
|
253
|
+
|
|
254
|
+
expect(result[0].content[0]).not.toHaveProperty('cache_control');
|
|
255
|
+
expect(result[0].content[1]).not.toHaveProperty('cache_control');
|
|
256
|
+
expect(result[2].content[0]).toEqual({
|
|
257
|
+
type: 'text',
|
|
258
|
+
text: 'Correct!',
|
|
259
|
+
cache_control: { type: 'ephemeral' },
|
|
260
|
+
});
|
|
261
|
+
});
|
|
262
|
+
});
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import type { AnthropicMessage } from '@/types/messages';
|
|
2
|
+
import type Anthropic from '@anthropic-ai/sdk';
|
|
3
|
+
import { BaseMessage } from '@langchain/core/messages';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Anthropic API: Adds cache control to the appropriate user messages in the payload.
|
|
7
|
+
* @param messages - The array of message objects.
|
|
8
|
+
* @returns - The updated array of message objects with cache control added.
|
|
9
|
+
*/
|
|
10
|
+
export function addCacheControl<T extends AnthropicMessage | BaseMessage>(
|
|
11
|
+
messages: T[]
|
|
12
|
+
): T[] {
|
|
13
|
+
if (!Array.isArray(messages) || messages.length < 2) {
|
|
14
|
+
return messages;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const updatedMessages = [...messages];
|
|
18
|
+
let userMessagesModified = 0;
|
|
19
|
+
|
|
20
|
+
for (
|
|
21
|
+
let i = updatedMessages.length - 1;
|
|
22
|
+
i >= 0 && userMessagesModified < 2;
|
|
23
|
+
i--
|
|
24
|
+
) {
|
|
25
|
+
const message = updatedMessages[i];
|
|
26
|
+
if ('getType' in message && message.getType() !== 'human') {
|
|
27
|
+
continue;
|
|
28
|
+
} else if ('role' in message && message.role !== 'user') {
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
if (typeof message.content === 'string') {
|
|
33
|
+
message.content = [
|
|
34
|
+
{
|
|
35
|
+
type: 'text',
|
|
36
|
+
text: message.content,
|
|
37
|
+
cache_control: { type: 'ephemeral' },
|
|
38
|
+
},
|
|
39
|
+
];
|
|
40
|
+
userMessagesModified++;
|
|
41
|
+
} else if (Array.isArray(message.content)) {
|
|
42
|
+
for (let j = message.content.length - 1; j >= 0; j--) {
|
|
43
|
+
const contentPart = message.content[j];
|
|
44
|
+
if ('type' in contentPart && contentPart.type === 'text') {
|
|
45
|
+
(contentPart as Anthropic.TextBlockParam).cache_control = {
|
|
46
|
+
type: 'ephemeral',
|
|
47
|
+
};
|
|
48
|
+
userMessagesModified++;
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return updatedMessages;
|
|
56
|
+
}
|
|
@@ -0,0 +1,362 @@
|
|
|
1
|
+
import {
|
|
2
|
+
HumanMessage,
|
|
3
|
+
AIMessage,
|
|
4
|
+
SystemMessage,
|
|
5
|
+
} from '@langchain/core/messages';
|
|
6
|
+
import { formatContentStrings } from './content';
|
|
7
|
+
import { ContentTypes } from '@/common';
|
|
8
|
+
|
|
9
|
+
describe('formatContentStrings', () => {
|
|
10
|
+
describe('Human messages', () => {
|
|
11
|
+
it('should convert human message with all text blocks to string', () => {
|
|
12
|
+
const messages = [
|
|
13
|
+
new HumanMessage({
|
|
14
|
+
content: [
|
|
15
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello' },
|
|
16
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'World' },
|
|
17
|
+
],
|
|
18
|
+
}),
|
|
19
|
+
];
|
|
20
|
+
|
|
21
|
+
const result = formatContentStrings(messages);
|
|
22
|
+
|
|
23
|
+
expect(result).toHaveLength(1);
|
|
24
|
+
expect(result[0].content).toBe('Hello\nWorld');
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
it('should not convert human message with mixed content types (text + image)', () => {
|
|
28
|
+
const messages = [
|
|
29
|
+
new HumanMessage({
|
|
30
|
+
content: [
|
|
31
|
+
{ type: ContentTypes.TEXT, text: 'what do you see' },
|
|
32
|
+
{
|
|
33
|
+
type: 'image_url',
|
|
34
|
+
image_url: {
|
|
35
|
+
url: 'data:image/png;base64,iVBO_SOME_BASE64_DATA=',
|
|
36
|
+
detail: 'auto',
|
|
37
|
+
},
|
|
38
|
+
},
|
|
39
|
+
],
|
|
40
|
+
}),
|
|
41
|
+
];
|
|
42
|
+
|
|
43
|
+
const result = formatContentStrings(messages);
|
|
44
|
+
|
|
45
|
+
expect(result).toHaveLength(1);
|
|
46
|
+
expect(result[0].content).toEqual([
|
|
47
|
+
{ type: ContentTypes.TEXT, text: 'what do you see' },
|
|
48
|
+
{
|
|
49
|
+
type: 'image_url',
|
|
50
|
+
image_url: {
|
|
51
|
+
url: 'data:image/png;base64,iVBO_SOME_BASE64_DATA=',
|
|
52
|
+
detail: 'auto',
|
|
53
|
+
},
|
|
54
|
+
},
|
|
55
|
+
]);
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
it('should leave string content unchanged', () => {
|
|
59
|
+
const messages = [
|
|
60
|
+
new HumanMessage({
|
|
61
|
+
content: 'Hello World',
|
|
62
|
+
}),
|
|
63
|
+
];
|
|
64
|
+
|
|
65
|
+
const result = formatContentStrings(messages);
|
|
66
|
+
|
|
67
|
+
expect(result).toHaveLength(1);
|
|
68
|
+
expect(result[0].content).toBe('Hello World');
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
it('should handle empty text blocks', () => {
|
|
72
|
+
const messages = [
|
|
73
|
+
new HumanMessage({
|
|
74
|
+
content: [
|
|
75
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello' },
|
|
76
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: '' },
|
|
77
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'World' },
|
|
78
|
+
],
|
|
79
|
+
}),
|
|
80
|
+
];
|
|
81
|
+
|
|
82
|
+
const result = formatContentStrings(messages);
|
|
83
|
+
|
|
84
|
+
expect(result).toHaveLength(1);
|
|
85
|
+
expect(result[0].content).toBe('Hello\n\nWorld');
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
it('should handle null/undefined text values', () => {
|
|
89
|
+
const messages = [
|
|
90
|
+
new HumanMessage({
|
|
91
|
+
content: [
|
|
92
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello' },
|
|
93
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: null },
|
|
94
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: undefined },
|
|
95
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'World' },
|
|
96
|
+
],
|
|
97
|
+
}),
|
|
98
|
+
];
|
|
99
|
+
|
|
100
|
+
const result = formatContentStrings(messages);
|
|
101
|
+
|
|
102
|
+
expect(result).toHaveLength(1);
|
|
103
|
+
expect(result[0].content).toBe('Hello\n\n\nWorld');
|
|
104
|
+
});
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
describe('AI messages', () => {
|
|
108
|
+
it('should convert AI message with all text blocks to string', () => {
|
|
109
|
+
const messages = [
|
|
110
|
+
new AIMessage({
|
|
111
|
+
content: [
|
|
112
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Hello' },
|
|
113
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'World' },
|
|
114
|
+
],
|
|
115
|
+
}),
|
|
116
|
+
];
|
|
117
|
+
|
|
118
|
+
const result = formatContentStrings(messages);
|
|
119
|
+
|
|
120
|
+
expect(result).toHaveLength(1);
|
|
121
|
+
expect(result[0].content).toBe('Hello\nWorld');
|
|
122
|
+
expect(result[0].getType()).toBe('ai');
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
it('should not convert AI message with mixed content types', () => {
|
|
126
|
+
const messages = [
|
|
127
|
+
new AIMessage({
|
|
128
|
+
content: [
|
|
129
|
+
{
|
|
130
|
+
type: ContentTypes.TEXT,
|
|
131
|
+
[ContentTypes.TEXT]: 'Here is an image',
|
|
132
|
+
},
|
|
133
|
+
{
|
|
134
|
+
type: ContentTypes.TOOL_CALL,
|
|
135
|
+
tool_call: { name: 'generate_image' },
|
|
136
|
+
},
|
|
137
|
+
],
|
|
138
|
+
}),
|
|
139
|
+
];
|
|
140
|
+
|
|
141
|
+
const result = formatContentStrings(messages);
|
|
142
|
+
|
|
143
|
+
expect(result).toHaveLength(1);
|
|
144
|
+
expect(result[0].content).toEqual([
|
|
145
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Here is an image' },
|
|
146
|
+
{ type: ContentTypes.TOOL_CALL, tool_call: { name: 'generate_image' } },
|
|
147
|
+
]);
|
|
148
|
+
});
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
describe('System messages', () => {
|
|
152
|
+
it('should convert System message with all text blocks to string', () => {
|
|
153
|
+
const messages = [
|
|
154
|
+
new SystemMessage({
|
|
155
|
+
content: [
|
|
156
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'System' },
|
|
157
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Message' },
|
|
158
|
+
],
|
|
159
|
+
}),
|
|
160
|
+
];
|
|
161
|
+
|
|
162
|
+
const result = formatContentStrings(messages);
|
|
163
|
+
|
|
164
|
+
expect(result).toHaveLength(1);
|
|
165
|
+
expect(result[0].content).toBe('System\nMessage');
|
|
166
|
+
expect(result[0].getType()).toBe('system');
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
describe('Mixed message types', () => {
|
|
171
|
+
it('should process all valid message types in mixed array', () => {
|
|
172
|
+
const messages = [
|
|
173
|
+
new HumanMessage({
|
|
174
|
+
content: [
|
|
175
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Human' },
|
|
176
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Message' },
|
|
177
|
+
],
|
|
178
|
+
}),
|
|
179
|
+
new AIMessage({
|
|
180
|
+
content: [
|
|
181
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'AI' },
|
|
182
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Response' },
|
|
183
|
+
],
|
|
184
|
+
}),
|
|
185
|
+
new SystemMessage({
|
|
186
|
+
content: [
|
|
187
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'System' },
|
|
188
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Prompt' },
|
|
189
|
+
],
|
|
190
|
+
}),
|
|
191
|
+
];
|
|
192
|
+
|
|
193
|
+
const result = formatContentStrings(messages);
|
|
194
|
+
|
|
195
|
+
expect(result).toHaveLength(3);
|
|
196
|
+
// All messages should be converted
|
|
197
|
+
expect(result[0].content).toBe('Human\nMessage');
|
|
198
|
+
expect(result[0].getType()).toBe('human');
|
|
199
|
+
|
|
200
|
+
expect(result[1].content).toBe('AI\nResponse');
|
|
201
|
+
expect(result[1].getType()).toBe('ai');
|
|
202
|
+
|
|
203
|
+
expect(result[2].content).toBe('System\nPrompt');
|
|
204
|
+
expect(result[2].getType()).toBe('system');
|
|
205
|
+
});
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
describe('Edge cases', () => {
|
|
209
|
+
it('should handle empty array', () => {
|
|
210
|
+
const result = formatContentStrings([]);
|
|
211
|
+
expect(result).toEqual([]);
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
it('should handle messages with non-array content', () => {
|
|
215
|
+
const messages = [
|
|
216
|
+
new HumanMessage({
|
|
217
|
+
content: 'This is a string content',
|
|
218
|
+
}),
|
|
219
|
+
];
|
|
220
|
+
|
|
221
|
+
const result = formatContentStrings(messages);
|
|
222
|
+
|
|
223
|
+
expect(result).toHaveLength(1);
|
|
224
|
+
expect(result[0].content).toBe('This is a string content');
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
it('should trim the final concatenated string', () => {
|
|
228
|
+
const messages = [
|
|
229
|
+
new HumanMessage({
|
|
230
|
+
content: [
|
|
231
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: ' Hello ' },
|
|
232
|
+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: ' World ' },
|
|
233
|
+
],
|
|
234
|
+
}),
|
|
235
|
+
];
|
|
236
|
+
|
|
237
|
+
const result = formatContentStrings(messages);
|
|
238
|
+
|
|
239
|
+
expect(result).toHaveLength(1);
|
|
240
|
+
expect(result[0].content).toBe('Hello \n World');
|
|
241
|
+
});
|
|
242
|
+
});
|
|
243
|
+
|
|
244
|
+
describe('Real-world scenarios', () => {
|
|
245
|
+
it('should handle the exact scenario from the issue', () => {
|
|
246
|
+
const messages = [
|
|
247
|
+
new HumanMessage({
|
|
248
|
+
content: [
|
|
249
|
+
{
|
|
250
|
+
type: 'text',
|
|
251
|
+
text: 'hi there',
|
|
252
|
+
},
|
|
253
|
+
],
|
|
254
|
+
}),
|
|
255
|
+
new AIMessage({
|
|
256
|
+
content: [
|
|
257
|
+
{
|
|
258
|
+
type: 'text',
|
|
259
|
+
text: 'Hi Danny! How can I help you today?',
|
|
260
|
+
},
|
|
261
|
+
],
|
|
262
|
+
}),
|
|
263
|
+
new HumanMessage({
|
|
264
|
+
content: [
|
|
265
|
+
{
|
|
266
|
+
type: 'text',
|
|
267
|
+
text: 'what do you see',
|
|
268
|
+
},
|
|
269
|
+
{
|
|
270
|
+
type: 'image_url',
|
|
271
|
+
image_url: {
|
|
272
|
+
url: 'data:image/png;base64,iVBO_SOME_BASE64_DATA=',
|
|
273
|
+
detail: 'auto',
|
|
274
|
+
},
|
|
275
|
+
},
|
|
276
|
+
],
|
|
277
|
+
}),
|
|
278
|
+
];
|
|
279
|
+
|
|
280
|
+
const result = formatContentStrings(messages);
|
|
281
|
+
|
|
282
|
+
expect(result).toHaveLength(3);
|
|
283
|
+
|
|
284
|
+
// First human message (all text) should be converted
|
|
285
|
+
expect(result[0].content).toBe('hi there');
|
|
286
|
+
expect(result[0].getType()).toBe('human');
|
|
287
|
+
|
|
288
|
+
// AI message (all text) should now also be converted
|
|
289
|
+
expect(result[1].content).toBe('Hi Danny! How can I help you today?');
|
|
290
|
+
expect(result[1].getType()).toBe('ai');
|
|
291
|
+
|
|
292
|
+
// Third message (mixed content) should remain unchanged
|
|
293
|
+
expect(result[2].content).toEqual([
|
|
294
|
+
{
|
|
295
|
+
type: 'text',
|
|
296
|
+
text: 'what do you see',
|
|
297
|
+
},
|
|
298
|
+
{
|
|
299
|
+
type: 'image_url',
|
|
300
|
+
image_url: {
|
|
301
|
+
url: 'data:image/png;base64,iVBO_SOME_BASE64_DATA=',
|
|
302
|
+
detail: 'auto',
|
|
303
|
+
},
|
|
304
|
+
},
|
|
305
|
+
]);
|
|
306
|
+
});
|
|
307
|
+
|
|
308
|
+
it('should handle messages with tool calls', () => {
|
|
309
|
+
const messages = [
|
|
310
|
+
new HumanMessage({
|
|
311
|
+
content: [
|
|
312
|
+
{
|
|
313
|
+
type: ContentTypes.TEXT,
|
|
314
|
+
[ContentTypes.TEXT]: 'Please use the calculator',
|
|
315
|
+
},
|
|
316
|
+
{
|
|
317
|
+
type: ContentTypes.TOOL_CALL,
|
|
318
|
+
tool_call: { name: 'calculator', args: '{"a": 1, "b": 2}' },
|
|
319
|
+
},
|
|
320
|
+
],
|
|
321
|
+
}),
|
|
322
|
+
new AIMessage({
|
|
323
|
+
content: [
|
|
324
|
+
{
|
|
325
|
+
type: ContentTypes.TEXT,
|
|
326
|
+
[ContentTypes.TEXT]: 'I will calculate that for you',
|
|
327
|
+
},
|
|
328
|
+
{
|
|
329
|
+
type: ContentTypes.TOOL_CALL,
|
|
330
|
+
tool_call: { name: 'calculator', args: '{"a": 1, "b": 2}' },
|
|
331
|
+
},
|
|
332
|
+
],
|
|
333
|
+
}),
|
|
334
|
+
];
|
|
335
|
+
|
|
336
|
+
const result = formatContentStrings(messages);
|
|
337
|
+
|
|
338
|
+
expect(result).toHaveLength(2);
|
|
339
|
+
// Should not convert because not all blocks are text
|
|
340
|
+
expect(result[0].content).toEqual([
|
|
341
|
+
{
|
|
342
|
+
type: ContentTypes.TEXT,
|
|
343
|
+
[ContentTypes.TEXT]: 'Please use the calculator',
|
|
344
|
+
},
|
|
345
|
+
{
|
|
346
|
+
type: ContentTypes.TOOL_CALL,
|
|
347
|
+
tool_call: { name: 'calculator', args: '{"a": 1, "b": 2}' },
|
|
348
|
+
},
|
|
349
|
+
]);
|
|
350
|
+
expect(result[1].content).toEqual([
|
|
351
|
+
{
|
|
352
|
+
type: ContentTypes.TEXT,
|
|
353
|
+
[ContentTypes.TEXT]: 'I will calculate that for you',
|
|
354
|
+
},
|
|
355
|
+
{
|
|
356
|
+
type: ContentTypes.TOOL_CALL,
|
|
357
|
+
tool_call: { name: 'calculator', args: '{"a": 1, "b": 2}' },
|
|
358
|
+
},
|
|
359
|
+
]);
|
|
360
|
+
});
|
|
361
|
+
});
|
|
362
|
+
});
|