@lobehub/chat 1.15.29 → 1.15.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of @lobehub/chat might be problematic. Click here for more details.

package/CHANGELOG.md CHANGED
@@ -2,6 +2,31 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.15.30](https://github.com/lobehub/lobe-chat/compare/v1.15.29...v1.15.30)
6
+
7
+ <sup>Released on **2024-09-09**</sup>
8
+
9
+ #### 🐛 Bug Fixes
10
+
11
+ - **misc**: Fix claude 3.5 image with s3 url.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### What's fixed
19
+
20
+ - **misc**: Fix claude 3.5 image with s3 url, closes [#3870](https://github.com/lobehub/lobe-chat/issues/3870) ([89c8dd4](https://github.com/lobehub/lobe-chat/commit/89c8dd4))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
5
30
  ### [Version 1.15.29](https://github.com/lobehub/lobe-chat/compare/v1.15.28...v1.15.29)
6
31
 
7
32
  <sup>Released on **2024-09-09**</sup>
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.15.29",
3
+ "version": "1.15.30",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -29,7 +29,8 @@ export class LobeAnthropicAI implements LobeRuntimeAI {
29
29
 
30
30
  async chat(payload: ChatStreamPayload, options?: ChatCompetitionOptions) {
31
31
  try {
32
- const anthropicPayload = this.buildAnthropicPayload(payload);
32
+ const anthropicPayload = await this.buildAnthropicPayload(payload);
33
+
33
34
  const response = await this.client.messages.create(
34
35
  { ...anthropicPayload, stream: true },
35
36
  {
@@ -86,20 +87,17 @@ export class LobeAnthropicAI implements LobeRuntimeAI {
86
87
  }
87
88
  }
88
89
 
89
- private buildAnthropicPayload(payload: ChatStreamPayload) {
90
+ private async buildAnthropicPayload(payload: ChatStreamPayload) {
90
91
  const { messages, model, max_tokens = 4096, temperature, top_p, tools } = payload;
91
92
  const system_message = messages.find((m) => m.role === 'system');
92
93
  const user_messages = messages.filter((m) => m.role !== 'system');
93
94
 
94
95
  return {
95
96
  max_tokens,
96
- messages: buildAnthropicMessages(user_messages),
97
+ messages: await buildAnthropicMessages(user_messages),
97
98
  model,
98
99
  system: system_message?.content as string,
99
- temperature:
100
- payload.temperature !== undefined
101
- ? temperature / 2
102
- : undefined,
100
+ temperature: payload.temperature !== undefined ? temperature / 2 : undefined,
103
101
  tools: buildAnthropicTools(tools),
104
102
  top_p,
105
103
  } satisfies Anthropic.MessageCreateParams;
@@ -62,7 +62,7 @@ export class LobeBedrockAI implements LobeRuntimeAI {
62
62
  body: JSON.stringify({
63
63
  anthropic_version: 'bedrock-2023-05-31',
64
64
  max_tokens: max_tokens || 4096,
65
- messages: buildAnthropicMessages(user_messages),
65
+ messages: await buildAnthropicMessages(user_messages),
66
66
  system: system_message?.content as string,
67
67
  temperature: temperature / 2,
68
68
  tools: buildAnthropicTools(tools),
@@ -1,6 +1,8 @@
1
1
  import { OpenAI } from 'openai';
2
2
  import { describe, expect, it } from 'vitest';
3
3
 
4
+ import { imageUrlToBase64 } from '@/utils/imageToBase64';
5
+
4
6
  import { OpenAIChatMessage, UserMessageContentPart } from '../types/chat';
5
7
  import {
6
8
  buildAnthropicBlock,
@@ -10,28 +12,30 @@ import {
10
12
  } from './anthropicHelpers';
11
13
  import { parseDataUri } from './uriParser';
12
14
 
13
- describe('anthropicHelpers', () => {
14
- // Mock the parseDataUri function since it's an implementation detail
15
- vi.mock('./uriParser', () => ({
16
- parseDataUri: vi.fn().mockReturnValue({
17
- mimeType: 'image/jpeg',
18
- base64: 'base64EncodedString',
19
- }),
20
- }));
15
+ // Mock the parseDataUri function since it's an implementation detail
16
+ vi.mock('./uriParser', () => ({
17
+ parseDataUri: vi.fn().mockReturnValue({
18
+ mimeType: 'image/jpeg',
19
+ base64: 'base64EncodedString',
20
+ type: 'base64',
21
+ }),
22
+ }));
23
+ vi.mock('@/utils/imageToBase64');
21
24
 
25
+ describe('anthropicHelpers', () => {
22
26
  describe('buildAnthropicBlock', () => {
23
- it('should return the content as is for text type', () => {
27
+ it('should return the content as is for text type', async () => {
24
28
  const content: UserMessageContentPart = { type: 'text', text: 'Hello!' };
25
- const result = buildAnthropicBlock(content);
29
+ const result = await buildAnthropicBlock(content);
26
30
  expect(result).toEqual(content);
27
31
  });
28
32
 
29
- it('should transform an image URL into an Anthropic.ImageBlockParam', () => {
33
+ it('should transform an image URL into an Anthropic.ImageBlockParam', async () => {
30
34
  const content: UserMessageContentPart = {
31
35
  type: 'image_url',
32
36
  image_url: { url: 'data:image/jpeg;base64,base64EncodedString' },
33
37
  };
34
- const result = buildAnthropicBlock(content);
38
+ const result = await buildAnthropicBlock(content);
35
39
  expect(parseDataUri).toHaveBeenCalledWith(content.image_url.url);
36
40
  expect(result).toEqual({
37
41
  source: {
@@ -42,28 +46,96 @@ describe('anthropicHelpers', () => {
42
46
  type: 'image',
43
47
  });
44
48
  });
49
+
50
+ it('should transform a regular image URL into an Anthropic.ImageBlockParam', async () => {
51
+ vi.mocked(parseDataUri).mockReturnValueOnce({
52
+ mimeType: 'image/png',
53
+ base64: null,
54
+ type: 'url',
55
+ });
56
+ vi.mocked(imageUrlToBase64).mockResolvedValue('convertedBase64String');
57
+
58
+ const content = {
59
+ type: 'image_url',
60
+ image_url: { url: 'https://example.com/image.png' },
61
+ } as const;
62
+
63
+ const result = await buildAnthropicBlock(content);
64
+
65
+ expect(parseDataUri).toHaveBeenCalledWith(content.image_url.url);
66
+ expect(imageUrlToBase64).toHaveBeenCalledWith(content.image_url.url);
67
+ expect(result).toEqual({
68
+ source: {
69
+ data: 'convertedBase64String',
70
+ media_type: 'image/png',
71
+ type: 'base64',
72
+ },
73
+ type: 'image',
74
+ });
75
+ });
76
+
77
+ it('should use default media_type for URL images when mimeType is not provided', async () => {
78
+ vi.mocked(parseDataUri).mockReturnValueOnce({
79
+ mimeType: null,
80
+ base64: null,
81
+ type: 'url',
82
+ });
83
+ vi.mocked(imageUrlToBase64).mockResolvedValue('convertedBase64String');
84
+
85
+ const content = {
86
+ type: 'image_url',
87
+ image_url: { url: 'https://example.com/image' },
88
+ } as const;
89
+
90
+ const result = await buildAnthropicBlock(content);
91
+
92
+ expect(result).toEqual({
93
+ source: {
94
+ data: 'convertedBase64String',
95
+ media_type: 'image/png',
96
+ type: 'base64',
97
+ },
98
+ type: 'image',
99
+ });
100
+ });
101
+
102
+ it('should throw an error for invalid image URLs', async () => {
103
+ vi.mocked(parseDataUri).mockReturnValueOnce({
104
+ mimeType: null,
105
+ base64: null,
106
+ // @ts-ignore
107
+ type: 'invalid',
108
+ });
109
+
110
+ const content = {
111
+ type: 'image_url',
112
+ image_url: { url: 'invalid-url' },
113
+ } as const;
114
+
115
+ await expect(buildAnthropicBlock(content)).rejects.toThrow('Invalid image URL: invalid-url');
116
+ });
45
117
  });
46
118
 
47
119
  describe('buildAnthropicMessage', () => {
48
- it('should correctly convert system message to assistant message', () => {
120
+ it('should correctly convert system message to assistant message', async () => {
49
121
  const message: OpenAIChatMessage = {
50
122
  content: [{ type: 'text', text: 'Hello!' }],
51
123
  role: 'system',
52
124
  };
53
- const result = buildAnthropicMessage(message);
125
+ const result = await buildAnthropicMessage(message);
54
126
  expect(result).toEqual({ content: [{ type: 'text', text: 'Hello!' }], role: 'user' });
55
127
  });
56
128
 
57
- it('should correctly convert user message with string content', () => {
129
+ it('should correctly convert user message with string content', async () => {
58
130
  const message: OpenAIChatMessage = {
59
131
  content: 'Hello!',
60
132
  role: 'user',
61
133
  };
62
- const result = buildAnthropicMessage(message);
134
+ const result = await buildAnthropicMessage(message);
63
135
  expect(result).toEqual({ content: 'Hello!', role: 'user' });
64
136
  });
65
137
 
66
- it('should correctly convert user message with content parts', () => {
138
+ it('should correctly convert user message with content parts', async () => {
67
139
  const message: OpenAIChatMessage = {
68
140
  content: [
69
141
  { type: 'text', text: 'Check out this image:' },
@@ -71,19 +143,19 @@ describe('anthropicHelpers', () => {
71
143
  ],
72
144
  role: 'user',
73
145
  };
74
- const result = buildAnthropicMessage(message);
146
+ const result = await buildAnthropicMessage(message);
75
147
  expect(result.role).toBe('user');
76
148
  expect(result.content).toHaveLength(2);
77
149
  expect((result.content[1] as any).type).toBe('image');
78
150
  });
79
151
 
80
- it('should correctly convert tool message', () => {
152
+ it('should correctly convert tool message', async () => {
81
153
  const message: OpenAIChatMessage = {
82
154
  content: 'Tool result content',
83
155
  role: 'tool',
84
156
  tool_call_id: 'tool123',
85
157
  };
86
- const result = buildAnthropicMessage(message);
158
+ const result = await buildAnthropicMessage(message);
87
159
  expect(result.role).toBe('user');
88
160
  expect(result.content).toEqual([
89
161
  {
@@ -94,7 +166,7 @@ describe('anthropicHelpers', () => {
94
166
  ]);
95
167
  });
96
168
 
97
- it('should correctly convert assistant message with tool calls', () => {
169
+ it('should correctly convert assistant message with tool calls', async () => {
98
170
  const message: OpenAIChatMessage = {
99
171
  content: 'Here is the result:',
100
172
  role: 'assistant',
@@ -109,7 +181,7 @@ describe('anthropicHelpers', () => {
109
181
  },
110
182
  ],
111
183
  };
112
- const result = buildAnthropicMessage(message);
184
+ const result = await buildAnthropicMessage(message);
113
185
  expect(result.role).toBe('assistant');
114
186
  expect(result.content).toEqual([
115
187
  { text: 'Here is the result:', type: 'text' },
@@ -122,12 +194,12 @@ describe('anthropicHelpers', () => {
122
194
  ]);
123
195
  });
124
196
 
125
- it('should correctly convert function message', () => {
197
+ it('should correctly convert function message', async () => {
126
198
  const message: OpenAIChatMessage = {
127
199
  content: 'def hello(name):\n return f"Hello {name}"',
128
200
  role: 'function',
129
201
  };
130
- const result = buildAnthropicMessage(message);
202
+ const result = await buildAnthropicMessage(message);
131
203
  expect(result).toEqual({
132
204
  content: 'def hello(name):\n return f"Hello {name}"',
133
205
  role: 'assistant',
@@ -136,13 +208,13 @@ describe('anthropicHelpers', () => {
136
208
  });
137
209
 
138
210
  describe('buildAnthropicMessages', () => {
139
- it('should correctly convert OpenAI Messages to Anthropic Messages', () => {
211
+ it('should correctly convert OpenAI Messages to Anthropic Messages', async () => {
140
212
  const messages: OpenAIChatMessage[] = [
141
213
  { content: 'Hello', role: 'user' },
142
214
  { content: 'Hi', role: 'assistant' },
143
215
  ];
144
216
 
145
- const result = buildAnthropicMessages(messages);
217
+ const result = await buildAnthropicMessages(messages);
146
218
  expect(result).toHaveLength(2);
147
219
  expect(result).toEqual([
148
220
  { content: 'Hello', role: 'user' },
@@ -150,14 +222,14 @@ describe('anthropicHelpers', () => {
150
222
  ]);
151
223
  });
152
224
 
153
- it('messages should end with user', () => {
225
+ it('messages should end with user', async () => {
154
226
  const messages: OpenAIChatMessage[] = [
155
227
  { content: 'Hello', role: 'user' },
156
228
  { content: 'Hello', role: 'user' },
157
229
  { content: 'Hi', role: 'assistant' },
158
230
  ];
159
231
 
160
- const contents = buildAnthropicMessages(messages);
232
+ const contents = await buildAnthropicMessages(messages);
161
233
 
162
234
  expect(contents).toHaveLength(4);
163
235
  expect(contents).toEqual([
@@ -168,7 +240,7 @@ describe('anthropicHelpers', () => {
168
240
  ]);
169
241
  });
170
242
 
171
- it('messages should pair', () => {
243
+ it('messages should pair', async () => {
172
244
  const messages: OpenAIChatMessage[] = [
173
245
  { content: 'a', role: 'assistant' },
174
246
  { content: 'b', role: 'assistant' },
@@ -177,7 +249,7 @@ describe('anthropicHelpers', () => {
177
249
  { content: '你好', role: 'user' },
178
250
  ];
179
251
 
180
- const contents = buildAnthropicMessages(messages);
252
+ const contents = await buildAnthropicMessages(messages);
181
253
 
182
254
  expect(contents).toHaveLength(9);
183
255
  expect(contents).toEqual([
@@ -193,7 +265,7 @@ describe('anthropicHelpers', () => {
193
265
  ]);
194
266
  });
195
267
 
196
- it('should correctly convert OpenAI tool message to Anthropic format', () => {
268
+ it('should correctly convert OpenAI tool message to Anthropic format', async () => {
197
269
  const messages: OpenAIChatMessage[] = [
198
270
  {
199
271
  content: '告诉我杭州和北京的天气,先回答我好的',
@@ -242,7 +314,7 @@ describe('anthropicHelpers', () => {
242
314
  },
243
315
  ];
244
316
 
245
- const contents = buildAnthropicMessages(messages);
317
+ const contents = await buildAnthropicMessages(messages);
246
318
 
247
319
  expect(contents).toEqual([
248
320
  { content: '告诉我杭州和北京的天气,先回答我好的', role: 'user' },
@@ -1,35 +1,52 @@
1
1
  import Anthropic from '@anthropic-ai/sdk';
2
2
  import OpenAI from 'openai';
3
3
 
4
+ import { imageUrlToBase64 } from '@/utils/imageToBase64';
5
+
4
6
  import { OpenAIChatMessage, UserMessageContentPart } from '../types';
5
7
  import { parseDataUri } from './uriParser';
6
8
 
7
- export const buildAnthropicBlock = (
9
+ export const buildAnthropicBlock = async (
8
10
  content: UserMessageContentPart,
9
- ): Anthropic.ContentBlock | Anthropic.ImageBlockParam => {
11
+ ): Promise<Anthropic.ContentBlock | Anthropic.ImageBlockParam> => {
10
12
  switch (content.type) {
11
13
  case 'text': {
12
14
  return content;
13
15
  }
14
16
 
15
17
  case 'image_url': {
16
- const { mimeType, base64 } = parseDataUri(content.image_url.url);
18
+ const { mimeType, base64, type } = parseDataUri(content.image_url.url);
17
19
 
18
- return {
19
- source: {
20
- data: base64 as string,
21
- media_type: mimeType as Anthropic.ImageBlockParam.Source['media_type'],
22
- type: 'base64',
23
- },
24
- type: 'image',
25
- };
20
+ if (type === 'base64')
21
+ return {
22
+ source: {
23
+ data: base64 as string,
24
+ media_type: mimeType as Anthropic.ImageBlockParam.Source['media_type'],
25
+ type: 'base64',
26
+ },
27
+ type: 'image',
28
+ };
29
+
30
+ if (type === 'url') {
31
+ const base64 = await imageUrlToBase64(content.image_url.url);
32
+ return {
33
+ source: {
34
+ data: base64 as string,
35
+ media_type: (mimeType as Anthropic.ImageBlockParam.Source['media_type']) || 'image/png',
36
+ type: 'base64',
37
+ },
38
+ type: 'image',
39
+ };
40
+ }
41
+
42
+ throw new Error(`Invalid image URL: ${content.image_url.url}`);
26
43
  }
27
44
  }
28
45
  };
29
46
 
30
- export const buildAnthropicMessage = (
47
+ export const buildAnthropicMessage = async (
31
48
  message: OpenAIChatMessage,
32
- ): Anthropic.Messages.MessageParam => {
49
+ ): Promise<Anthropic.Messages.MessageParam> => {
33
50
  const content = message.content as string | UserMessageContentPart[];
34
51
 
35
52
  switch (message.role) {
@@ -39,7 +56,10 @@ export const buildAnthropicMessage = (
39
56
 
40
57
  case 'user': {
41
58
  return {
42
- content: typeof content === 'string' ? content : content.map((c) => buildAnthropicBlock(c)),
59
+ content:
60
+ typeof content === 'string'
61
+ ? content
62
+ : await Promise.all(content.map(async (c) => await buildAnthropicBlock(c))),
43
63
  role: 'user',
44
64
  };
45
65
  }
@@ -90,14 +110,15 @@ export const buildAnthropicMessage = (
90
110
  }
91
111
  };
92
112
 
93
- export const buildAnthropicMessages = (
113
+ export const buildAnthropicMessages = async (
94
114
  oaiMessages: OpenAIChatMessage[],
95
- ): Anthropic.Messages.MessageParam[] => {
115
+ ): Promise<Anthropic.Messages.MessageParam[]> => {
96
116
  const messages: Anthropic.Messages.MessageParam[] = [];
97
117
  let lastRole = 'assistant';
98
118
  let pendingToolResults: Anthropic.ToolResultBlockParam[] = [];
99
119
 
100
- oaiMessages.forEach((message, index) => {
120
+ for (const message of oaiMessages) {
121
+ const index = oaiMessages.indexOf(message);
101
122
  // refs: https://docs.anthropic.com/claude/docs/tool-use#tool-use-and-tool-result-content-blocks
102
123
  if (message.role === 'tool') {
103
124
  pendingToolResults.push({
@@ -117,7 +138,7 @@ export const buildAnthropicMessages = (
117
138
  lastRole = 'user';
118
139
  }
119
140
  } else {
120
- const anthropicMessage = buildAnthropicMessage(message);
141
+ const anthropicMessage = await buildAnthropicMessage(message);
121
142
 
122
143
  if (lastRole === anthropicMessage.role) {
123
144
  messages.push({ content: '_', role: lastRole === 'user' ? 'assistant' : 'user' });
@@ -126,7 +147,7 @@ export const buildAnthropicMessages = (
126
147
  lastRole = anthropicMessage.role;
127
148
  messages.push(anthropicMessage);
128
149
  }
129
- });
150
+ }
130
151
 
131
152
  return messages;
132
153
  };
@@ -0,0 +1,90 @@
1
+ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
2
+
3
+ import { imageToBase64, imageUrlToBase64 } from './imageToBase64';
4
+
5
+ describe('imageToBase64', () => {
6
+ let mockImage: HTMLImageElement;
7
+ let mockCanvas: HTMLCanvasElement;
8
+ let mockContext: CanvasRenderingContext2D;
9
+
10
+ beforeEach(() => {
11
+ mockImage = {
12
+ width: 200,
13
+ height: 100,
14
+ } as HTMLImageElement;
15
+
16
+ mockContext = {
17
+ drawImage: vi.fn(),
18
+ } as unknown as CanvasRenderingContext2D;
19
+
20
+ mockCanvas = {
21
+ width: 0,
22
+ height: 0,
23
+ getContext: vi.fn().mockReturnValue(mockContext),
24
+ toDataURL: vi.fn().mockReturnValue('data:image/webp;base64,mockBase64Data'),
25
+ } as unknown as HTMLCanvasElement;
26
+
27
+ vi.spyOn(document, 'createElement').mockReturnValue(mockCanvas);
28
+ });
29
+
30
+ afterEach(() => {
31
+ vi.restoreAllMocks();
32
+ });
33
+
34
+ it('should convert image to base64 with correct size and type', () => {
35
+ const result = imageToBase64({ img: mockImage, size: 100, type: 'image/jpeg' });
36
+
37
+ expect(document.createElement).toHaveBeenCalledWith('canvas');
38
+ expect(mockCanvas.width).toBe(100);
39
+ expect(mockCanvas.height).toBe(100);
40
+ expect(mockCanvas.getContext).toHaveBeenCalledWith('2d');
41
+ expect(mockContext.drawImage).toHaveBeenCalledWith(mockImage, 50, 0, 100, 100, 0, 0, 100, 100);
42
+ expect(mockCanvas.toDataURL).toHaveBeenCalledWith('image/jpeg');
43
+ expect(result).toBe('data:image/webp;base64,mockBase64Data');
44
+ });
45
+
46
+ it('should use default type when not specified', () => {
47
+ imageToBase64({ img: mockImage, size: 100 });
48
+ expect(mockCanvas.toDataURL).toHaveBeenCalledWith('image/webp');
49
+ });
50
+
51
+ it('should handle taller images correctly', () => {
52
+ mockImage.width = 100;
53
+ mockImage.height = 200;
54
+ imageToBase64({ img: mockImage, size: 100 });
55
+ expect(mockContext.drawImage).toHaveBeenCalledWith(mockImage, 0, 50, 100, 100, 0, 0, 100, 100);
56
+ });
57
+ });
58
+
59
+ describe('imageUrlToBase64', () => {
60
+ const mockFetch = vi.fn();
61
+ const mockArrayBuffer = new ArrayBuffer(8);
62
+
63
+ beforeEach(() => {
64
+ global.fetch = mockFetch;
65
+ global.btoa = vi.fn().mockReturnValue('mockBase64String');
66
+ });
67
+
68
+ afterEach(() => {
69
+ vi.restoreAllMocks();
70
+ });
71
+
72
+ it('should convert image URL to base64 string', async () => {
73
+ mockFetch.mockResolvedValue({
74
+ arrayBuffer: () => Promise.resolve(mockArrayBuffer),
75
+ });
76
+
77
+ const result = await imageUrlToBase64('https://example.com/image.jpg');
78
+
79
+ expect(mockFetch).toHaveBeenCalledWith('https://example.com/image.jpg');
80
+ expect(global.btoa).toHaveBeenCalled();
81
+ expect(result).toBe('mockBase64String');
82
+ });
83
+
84
+ it('should throw an error when fetch fails', async () => {
85
+ const mockError = new Error('Fetch failed');
86
+ mockFetch.mockRejectedValue(mockError);
87
+
88
+ await expect(imageUrlToBase64('https://example.com/image.jpg')).rejects.toThrow('Fetch failed');
89
+ });
90
+ });