@lobehub/chat 1.70.11 → 1.71.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/sync-database-schema.yml +25 -0
- package/CHANGELOG.md +25 -0
- package/changelog/v1.json +9 -0
- package/docs/developer/database-schema.dbml +569 -0
- package/locales/ar/models.json +3 -0
- package/locales/bg-BG/models.json +3 -0
- package/locales/de-DE/models.json +3 -0
- package/locales/en-US/models.json +3 -0
- package/locales/es-ES/models.json +3 -0
- package/locales/fa-IR/models.json +3 -0
- package/locales/fr-FR/models.json +3 -0
- package/locales/it-IT/models.json +3 -0
- package/locales/ja-JP/models.json +3 -0
- package/locales/ko-KR/models.json +3 -0
- package/locales/nl-NL/models.json +3 -0
- package/locales/pl-PL/models.json +3 -0
- package/locales/pt-BR/models.json +3 -0
- package/locales/ru-RU/models.json +3 -0
- package/locales/tr-TR/models.json +3 -0
- package/locales/vi-VN/models.json +3 -0
- package/locales/zh-CN/models.json +3 -0
- package/locales/zh-TW/models.json +3 -0
- package/package.json +6 -2
- package/scripts/dbmlWorkflow/index.ts +11 -0
- package/src/config/aiModels/google.ts +17 -0
- package/src/database/client/migrations.json +4 -4
- package/src/database/server/models/message.ts +20 -9
- package/src/database/server/models/user.test.ts +58 -0
- package/src/features/AlertBanner/CloudBanner.tsx +1 -1
- package/src/features/Conversation/Messages/Assistant/index.tsx +4 -1
- package/src/features/Conversation/Messages/User/index.tsx +4 -4
- package/src/libs/agent-runtime/google/index.ts +8 -2
- package/src/libs/agent-runtime/utils/streams/google-ai.test.ts +99 -0
- package/src/libs/agent-runtime/utils/streams/google-ai.ts +69 -23
- package/src/libs/agent-runtime/utils/streams/protocol.ts +2 -0
- package/src/services/chat.ts +33 -15
- package/src/services/file/client.ts +3 -1
- package/src/services/message/server.ts +2 -2
- package/src/services/message/type.ts +2 -2
- package/src/services/upload.ts +82 -1
- package/src/store/chat/slices/aiChat/actions/generateAIChat.ts +44 -4
- package/src/store/chat/slices/message/action.ts +3 -0
- package/src/store/file/slices/upload/action.ts +36 -13
- package/src/store/file/store.ts +2 -0
- package/src/types/files/upload.ts +7 -0
- package/src/types/message/base.ts +22 -1
- package/src/types/message/chat.ts +1 -6
- package/src/types/message/image.ts +11 -0
- package/src/types/message/index.ts +1 -0
- package/src/utils/fetch/fetchSSE.ts +24 -1
package/src/services/chat.ts
CHANGED
@@ -438,6 +438,8 @@ class ChatService {
|
|
438
438
|
provider: params.provider!,
|
439
439
|
});
|
440
440
|
|
441
|
+
// remove plugins
|
442
|
+
delete params.plugins;
|
441
443
|
await this.getChatCompletion(
|
442
444
|
{ ...params, messages: oaiMessages, tools },
|
443
445
|
{
|
@@ -474,7 +476,7 @@ class ChatService {
|
|
474
476
|
// handle content type for vision model
|
475
477
|
// for the models with visual ability, add image url to content
|
476
478
|
// refs: https://platform.openai.com/docs/guides/vision/quick-start
|
477
|
-
const
|
479
|
+
const getUserContent = (m: ChatMessage) => {
|
478
480
|
// only if message doesn't have images and files, then return the plain content
|
479
481
|
if ((!m.imageList || m.imageList.length === 0) && (!m.fileList || m.fileList.length === 0))
|
480
482
|
return m.content;
|
@@ -490,27 +492,43 @@ class ChatService {
|
|
490
492
|
] as UserMessageContentPart[];
|
491
493
|
};
|
492
494
|
|
495
|
+
const getAssistantContent = (m: ChatMessage) => {
|
496
|
+
// signature is a signal of anthropic thinking mode
|
497
|
+
const shouldIncludeThinking = m.reasoning && !!m.reasoning?.signature;
|
498
|
+
|
499
|
+
if (shouldIncludeThinking) {
|
500
|
+
return [
|
501
|
+
{
|
502
|
+
signature: m.reasoning!.signature,
|
503
|
+
thinking: m.reasoning!.content,
|
504
|
+
type: 'thinking',
|
505
|
+
},
|
506
|
+
{ text: m.content, type: 'text' },
|
507
|
+
] as UserMessageContentPart[];
|
508
|
+
}
|
509
|
+
// only if message doesn't have images and files, then return the plain content
|
510
|
+
|
511
|
+
if (m.imageList && m.imageList.length > 0) {
|
512
|
+
return [
|
513
|
+
!!m.content ? { text: m.content, type: 'text' } : undefined,
|
514
|
+
...m.imageList.map(
|
515
|
+
(i) => ({ image_url: { detail: 'auto', url: i.url }, type: 'image_url' }) as const,
|
516
|
+
),
|
517
|
+
].filter(Boolean) as UserMessageContentPart[];
|
518
|
+
}
|
519
|
+
|
520
|
+
return m.content;
|
521
|
+
};
|
522
|
+
|
493
523
|
let postMessages = messages.map((m): OpenAIChatMessage => {
|
494
524
|
const supportTools = isCanUseFC(model, provider);
|
495
525
|
switch (m.role) {
|
496
526
|
case 'user': {
|
497
|
-
return { content:
|
527
|
+
return { content: getUserContent(m), role: m.role };
|
498
528
|
}
|
499
529
|
|
500
530
|
case 'assistant': {
|
501
|
-
|
502
|
-
const shouldIncludeThinking = m.reasoning && !!m.reasoning?.signature;
|
503
|
-
|
504
|
-
const content = shouldIncludeThinking
|
505
|
-
? [
|
506
|
-
{
|
507
|
-
signature: m.reasoning!.signature,
|
508
|
-
thinking: m.reasoning!.content,
|
509
|
-
type: 'thinking',
|
510
|
-
} as any,
|
511
|
-
{ text: m.content, type: 'text' },
|
512
|
-
]
|
513
|
-
: m.content;
|
531
|
+
const content = getAssistantContent(m);
|
514
532
|
|
515
533
|
if (!supportTools) {
|
516
534
|
return { content, role: m.role };
|
@@ -11,6 +11,8 @@ export class ClientService extends BaseClientService implements IFileService {
|
|
11
11
|
}
|
12
12
|
|
13
13
|
createFile: IFileService['createFile'] = async (file) => {
|
14
|
+
const { isExist } = await this.fileModel.checkHash(file.hash!);
|
15
|
+
|
14
16
|
// save to local storage
|
15
17
|
// we may want to save to a remote server later
|
16
18
|
const res = await this.fileModel.create(
|
@@ -23,7 +25,7 @@ export class ClientService extends BaseClientService implements IFileService {
|
|
23
25
|
size: file.size,
|
24
26
|
url: file.url!,
|
25
27
|
},
|
26
|
-
|
28
|
+
!isExist,
|
27
29
|
);
|
28
30
|
|
29
31
|
// get file to base64 url
|
@@ -64,8 +64,8 @@ export class ServerService implements IMessageService {
|
|
64
64
|
return lambdaClient.message.updateMessagePlugin.mutate({ id, value: { arguments: args } });
|
65
65
|
};
|
66
66
|
|
67
|
-
updateMessage: IMessageService['updateMessage'] = async (id,
|
68
|
-
return lambdaClient.message.update.mutate({ id, value
|
67
|
+
updateMessage: IMessageService['updateMessage'] = async (id, value) => {
|
68
|
+
return lambdaClient.message.update.mutate({ id, value });
|
69
69
|
};
|
70
70
|
|
71
71
|
updateMessageTranslate: IMessageService['updateMessageTranslate'] = async (id, translate) => {
|
@@ -8,7 +8,7 @@ import {
|
|
8
8
|
ChatTranslate,
|
9
9
|
CreateMessageParams,
|
10
10
|
MessageItem,
|
11
|
-
ModelRankItem,
|
11
|
+
ModelRankItem, UpdateMessageParams,
|
12
12
|
} from '@/types/message';
|
13
13
|
|
14
14
|
/* eslint-disable typescript-sort-keys/interface */
|
@@ -33,7 +33,7 @@ export interface IMessageService {
|
|
33
33
|
rankModels(): Promise<ModelRankItem[]>;
|
34
34
|
getHeatmaps(): Promise<HeatmapsProps['data']>;
|
35
35
|
updateMessageError(id: string, error: ChatMessageError): Promise<any>;
|
36
|
-
updateMessage(id: string, message: Partial<
|
36
|
+
updateMessage(id: string, message: Partial<UpdateMessageParams>): Promise<any>;
|
37
37
|
updateMessageTTS(id: string, tts: Partial<ChatTTS> | false): Promise<any>;
|
38
38
|
updateMessageTranslate(id: string, translate: Partial<ChatTranslate> | false): Promise<any>;
|
39
39
|
updateMessagePluginState(id: string, value: Record<string, any>): Promise<any>;
|
package/src/services/upload.ts
CHANGED
@@ -1,14 +1,95 @@
|
|
1
|
+
import dayjs from 'dayjs';
|
2
|
+
import { sha256 } from 'js-sha256';
|
3
|
+
|
1
4
|
import { fileEnv } from '@/config/file';
|
5
|
+
import { isServerMode } from '@/const/version';
|
6
|
+
import { parseDataUri } from '@/libs/agent-runtime/utils/uriParser';
|
2
7
|
import { edgeClient } from '@/libs/trpc/client';
|
3
8
|
import { API_ENDPOINTS } from '@/services/_url';
|
4
9
|
import { clientS3Storage } from '@/services/file/ClientS3';
|
5
|
-
import { FileMetadata } from '@/types/files';
|
10
|
+
import { FileMetadata, UploadBase64ToS3Result } from '@/types/files';
|
6
11
|
import { FileUploadState, FileUploadStatus } from '@/types/files/upload';
|
7
12
|
import { uuid } from '@/utils/uuid';
|
8
13
|
|
9
14
|
export const UPLOAD_NETWORK_ERROR = 'NetWorkError';
|
10
15
|
|
16
|
+
interface UploadFileToS3Options {
|
17
|
+
directory?: string;
|
18
|
+
filename?: string;
|
19
|
+
onProgress?: (status: FileUploadStatus, state: FileUploadState) => void;
|
20
|
+
}
|
21
|
+
|
11
22
|
class UploadService {
|
23
|
+
/**
|
24
|
+
* uniform upload method for both server and client
|
25
|
+
*/
|
26
|
+
uploadFileToS3 = async (
|
27
|
+
file: File,
|
28
|
+
options: UploadFileToS3Options = {},
|
29
|
+
): Promise<FileMetadata> => {
|
30
|
+
const { directory, onProgress } = options;
|
31
|
+
|
32
|
+
if (isServerMode) {
|
33
|
+
return this.uploadWithProgress(file, { directory, onProgress });
|
34
|
+
} else {
|
35
|
+
const fileArrayBuffer = await file.arrayBuffer();
|
36
|
+
|
37
|
+
// 1. check file hash
|
38
|
+
const hash = sha256(fileArrayBuffer);
|
39
|
+
|
40
|
+
return this.uploadToClientS3(hash, file);
|
41
|
+
}
|
42
|
+
};
|
43
|
+
|
44
|
+
uploadBase64ToS3 = async (
|
45
|
+
base64Data: string,
|
46
|
+
options: UploadFileToS3Options = {},
|
47
|
+
): Promise<UploadBase64ToS3Result> => {
|
48
|
+
// 解析 base64 数据
|
49
|
+
const { base64, mimeType, type } = parseDataUri(base64Data);
|
50
|
+
|
51
|
+
if (!base64 || !mimeType || type !== 'base64') {
|
52
|
+
throw new Error('Invalid base64 data for image');
|
53
|
+
}
|
54
|
+
|
55
|
+
// 将 base64 转换为 Blob
|
56
|
+
const byteCharacters = atob(base64);
|
57
|
+
const byteArrays = [];
|
58
|
+
|
59
|
+
// 分块处理以避免内存问题
|
60
|
+
for (let offset = 0; offset < byteCharacters.length; offset += 1024) {
|
61
|
+
const slice = byteCharacters.slice(offset, offset + 1024);
|
62
|
+
|
63
|
+
const byteNumbers: number[] = Array.from({ length: slice.length });
|
64
|
+
for (let i = 0; i < slice.length; i++) {
|
65
|
+
byteNumbers[i] = slice.charCodeAt(i);
|
66
|
+
}
|
67
|
+
|
68
|
+
const byteArray = new Uint8Array(byteNumbers);
|
69
|
+
byteArrays.push(byteArray);
|
70
|
+
}
|
71
|
+
|
72
|
+
const blob = new Blob(byteArrays, { type: mimeType });
|
73
|
+
|
74
|
+
// 确定文件扩展名
|
75
|
+
const fileExtension = mimeType.split('/')[1] || 'png';
|
76
|
+
const fileName = `${options.filename || `image_${dayjs().format('YYYY-MM-DD-hh-mm-ss')}`}.${fileExtension}`;
|
77
|
+
|
78
|
+
// 创建文件对象
|
79
|
+
const file = new File([blob], fileName, { type: mimeType });
|
80
|
+
|
81
|
+
// 使用统一的上传方法
|
82
|
+
const metadata = await this.uploadFileToS3(file, options);
|
83
|
+
const hash = sha256(await file.arrayBuffer());
|
84
|
+
|
85
|
+
return {
|
86
|
+
fileType: mimeType,
|
87
|
+
hash,
|
88
|
+
metadata,
|
89
|
+
size: file.size,
|
90
|
+
};
|
91
|
+
};
|
92
|
+
|
12
93
|
uploadWithProgress = async (
|
13
94
|
file: File,
|
14
95
|
{
|
@@ -18,9 +18,11 @@ import { getAiInfraStoreState } from '@/store/aiInfra/store';
|
|
18
18
|
import { chatHelpers } from '@/store/chat/helpers';
|
19
19
|
import { ChatStore } from '@/store/chat/store';
|
20
20
|
import { messageMapKey } from '@/store/chat/utils/messageMapKey';
|
21
|
+
import { getFileStoreState } from '@/store/file/store';
|
21
22
|
import { useSessionStore } from '@/store/session';
|
22
23
|
import { WebBrowsingManifest } from '@/tools/web-browsing';
|
23
24
|
import { ChatMessage, CreateMessageParams, SendMessageParams } from '@/types/message';
|
25
|
+
import { ChatImageItem } from '@/types/message/image';
|
24
26
|
import { MessageSemanticSearchChunk } from '@/types/rag';
|
25
27
|
import { setNamespace } from '@/utils/storeDebug';
|
26
28
|
|
@@ -533,6 +535,8 @@ export const generateAIChat: StateCreator<
|
|
533
535
|
let thinking = '';
|
534
536
|
let thinkingStartAt: number;
|
535
537
|
let duration: number;
|
538
|
+
// to upload image
|
539
|
+
const uploadTasks: Map<string, Promise<{ id?: string; url?: string }>> = new Map();
|
536
540
|
|
537
541
|
const historySummary = topicSelectors.currentActiveTopicSummary(get());
|
538
542
|
await chatService.createAssistantMessageStream({
|
@@ -569,6 +573,21 @@ export const generateAIChat: StateCreator<
|
|
569
573
|
});
|
570
574
|
}
|
571
575
|
|
576
|
+
// 等待所有图片上传完成
|
577
|
+
let finalImages: ChatImageItem[] = [];
|
578
|
+
|
579
|
+
if (uploadTasks.size > 0) {
|
580
|
+
try {
|
581
|
+
// 等待所有上传任务完成
|
582
|
+
const uploadResults = await Promise.all(uploadTasks.values());
|
583
|
+
|
584
|
+
// 使用上传后的 S3 URL 替换原始图像数据
|
585
|
+
finalImages = uploadResults.filter((i) => !!i.url) as ChatImageItem[];
|
586
|
+
} catch (error) {
|
587
|
+
console.error('Error waiting for image uploads:', error);
|
588
|
+
}
|
589
|
+
}
|
590
|
+
|
572
591
|
if (toolCalls && toolCalls.length > 0) {
|
573
592
|
internal_toggleToolCallingStreaming(messageId, undefined);
|
574
593
|
}
|
@@ -579,6 +598,7 @@ export const generateAIChat: StateCreator<
|
|
579
598
|
reasoning: !!reasoning ? { ...reasoning, duration } : undefined,
|
580
599
|
search: !!grounding?.citations ? grounding : undefined,
|
581
600
|
metadata: usage,
|
601
|
+
imageList: finalImages.length > 0 ? finalImages : undefined,
|
582
602
|
});
|
583
603
|
},
|
584
604
|
onMessageHandle: async (chunk) => {
|
@@ -605,6 +625,29 @@ export const generateAIChat: StateCreator<
|
|
605
625
|
break;
|
606
626
|
}
|
607
627
|
|
628
|
+
case 'base64_image': {
|
629
|
+
internal_dispatchMessage({
|
630
|
+
id: messageId,
|
631
|
+
type: 'updateMessage',
|
632
|
+
value: {
|
633
|
+
imageList: chunk.images.map((i) => ({ id: i.id, url: i.data, alt: i.id })),
|
634
|
+
},
|
635
|
+
});
|
636
|
+
const image = chunk.image;
|
637
|
+
|
638
|
+
const task = getFileStoreState()
|
639
|
+
.uploadBase64FileWithProgress(image.data)
|
640
|
+
.then((value) => ({
|
641
|
+
id: value?.id,
|
642
|
+
url: value?.url,
|
643
|
+
alt: value?.filename || value?.id,
|
644
|
+
}));
|
645
|
+
|
646
|
+
uploadTasks.set(image.id, task);
|
647
|
+
|
648
|
+
break;
|
649
|
+
}
|
650
|
+
|
608
651
|
case 'text': {
|
609
652
|
output += chunk.text;
|
610
653
|
|
@@ -658,10 +701,7 @@ export const generateAIChat: StateCreator<
|
|
658
701
|
|
659
702
|
internal_toggleChatLoading(false, messageId, n('generateMessage(end)') as string);
|
660
703
|
|
661
|
-
return {
|
662
|
-
isFunctionCall,
|
663
|
-
traceId: msgTraceId,
|
664
|
-
};
|
704
|
+
return { isFunctionCall, traceId: msgTraceId };
|
665
705
|
},
|
666
706
|
|
667
707
|
internal_resendMessage: async (
|
@@ -21,6 +21,7 @@ import {
|
|
21
21
|
MessageToolCall,
|
22
22
|
ModelReasoning,
|
23
23
|
} from '@/types/message';
|
24
|
+
import { ChatImageItem } from '@/types/message/image';
|
24
25
|
import { GroundingSearch } from '@/types/search';
|
25
26
|
import { TraceEventPayloads } from '@/types/trace';
|
26
27
|
import { setNamespace } from '@/utils/storeDebug';
|
@@ -81,6 +82,7 @@ export interface ChatMessageAction {
|
|
81
82
|
reasoning?: ModelReasoning;
|
82
83
|
search?: GroundingSearch;
|
83
84
|
metadata?: MessageMetadata;
|
85
|
+
imageList?: ChatImageItem[];
|
84
86
|
model?: string;
|
85
87
|
provider?: string;
|
86
88
|
},
|
@@ -319,6 +321,7 @@ export const chatMessage: StateCreator<
|
|
319
321
|
metadata: extra?.metadata,
|
320
322
|
model: extra?.model,
|
321
323
|
provider: extra?.provider,
|
324
|
+
imageList: extra?.imageList,
|
322
325
|
});
|
323
326
|
await refreshMessages();
|
324
327
|
},
|
@@ -11,21 +11,23 @@ import { FileMetadata, UploadFileItem } from '@/types/files';
|
|
11
11
|
|
12
12
|
import { FileStore } from '../../store';
|
13
13
|
|
14
|
+
type OnStatusUpdate = (
|
15
|
+
data:
|
16
|
+
| {
|
17
|
+
id: string;
|
18
|
+
type: 'updateFile';
|
19
|
+
value: Partial<UploadFileItem>;
|
20
|
+
}
|
21
|
+
| {
|
22
|
+
id: string;
|
23
|
+
type: 'removeFile';
|
24
|
+
},
|
25
|
+
) => void;
|
26
|
+
|
14
27
|
interface UploadWithProgressParams {
|
15
28
|
file: File;
|
16
29
|
knowledgeBaseId?: string;
|
17
|
-
onStatusUpdate?:
|
18
|
-
data:
|
19
|
-
| {
|
20
|
-
id: string;
|
21
|
-
type: 'updateFile';
|
22
|
-
value: Partial<UploadFileItem>;
|
23
|
-
}
|
24
|
-
| {
|
25
|
-
id: string;
|
26
|
-
type: 'removeFile';
|
27
|
-
},
|
28
|
-
) => void;
|
30
|
+
onStatusUpdate?: OnStatusUpdate;
|
29
31
|
/**
|
30
32
|
* Optional flag to indicate whether to skip the file type check.
|
31
33
|
* When set to `true`, any file type checks will be bypassed.
|
@@ -35,11 +37,19 @@ interface UploadWithProgressParams {
|
|
35
37
|
}
|
36
38
|
|
37
39
|
interface UploadWithProgressResult {
|
40
|
+
filename?: string;
|
38
41
|
id: string;
|
39
42
|
url: string;
|
40
43
|
}
|
41
44
|
|
42
45
|
export interface FileUploadAction {
|
46
|
+
uploadBase64FileWithProgress: (
|
47
|
+
base64: string,
|
48
|
+
params?: {
|
49
|
+
onStatusUpdate?: OnStatusUpdate;
|
50
|
+
},
|
51
|
+
) => Promise<UploadWithProgressResult | undefined>;
|
52
|
+
|
43
53
|
uploadWithProgress: (
|
44
54
|
params: UploadWithProgressParams,
|
45
55
|
) => Promise<UploadWithProgressResult | undefined>;
|
@@ -51,6 +61,19 @@ export const createFileUploadSlice: StateCreator<
|
|
51
61
|
[],
|
52
62
|
FileUploadAction
|
53
63
|
> = () => ({
|
64
|
+
uploadBase64FileWithProgress: async (base64) => {
|
65
|
+
const { metadata, fileType, size, hash } = await uploadService.uploadBase64ToS3(base64);
|
66
|
+
|
67
|
+
const res = await fileService.createFile({
|
68
|
+
fileType,
|
69
|
+
hash,
|
70
|
+
metadata,
|
71
|
+
name: metadata.filename,
|
72
|
+
size: size,
|
73
|
+
url: metadata.path,
|
74
|
+
});
|
75
|
+
return { ...res, filename: metadata.filename };
|
76
|
+
},
|
54
77
|
uploadWithProgress: async ({ file, onStatusUpdate, knowledgeBaseId, skipCheckFileType }) => {
|
55
78
|
const fileArrayBuffer = await file.arrayBuffer();
|
56
79
|
|
@@ -135,6 +158,6 @@ export const createFileUploadSlice: StateCreator<
|
|
135
158
|
},
|
136
159
|
});
|
137
160
|
|
138
|
-
return data;
|
161
|
+
return { ...data, filename: file.name };
|
139
162
|
},
|
140
163
|
});
|
package/src/store/file/store.ts
CHANGED
@@ -32,3 +32,5 @@ const createStore: StateCreator<FileStore, [['zustand/devtools', never]]> = (...
|
|
32
32
|
const devtools = createDevtools('file');
|
33
33
|
|
34
34
|
export const useFileStore = createWithEqualityFn<FileStore>()(devtools(createStore), shallow);
|
35
|
+
|
36
|
+
export const getFileStoreState = () => useFileStore.getState();
|
@@ -1,3 +1,6 @@
|
|
1
|
+
import { ChatMessageError } from '@/types/message/chat';
|
2
|
+
import { ChatImageItem } from '@/types/message/image';
|
3
|
+
import { ChatToolPayload, MessageToolCall } from '@/types/message/tools';
|
1
4
|
import { GroundingSearch } from '@/types/search';
|
2
5
|
|
3
6
|
export interface CitationItem {
|
@@ -22,12 +25,17 @@ export interface ModelTokensUsage {
|
|
22
25
|
* currently only pplx has citation_tokens
|
23
26
|
*/
|
24
27
|
inputCitationTokens?: number;
|
28
|
+
/**
|
29
|
+
* user prompt image
|
30
|
+
*/
|
31
|
+
inputImageTokens?: number;
|
25
32
|
/**
|
26
33
|
* user prompt input
|
27
34
|
*/
|
28
35
|
inputTextTokens?: number;
|
29
36
|
inputWriteCacheTokens?: number;
|
30
37
|
outputAudioTokens?: number;
|
38
|
+
outputImageTokens?: number;
|
31
39
|
outputReasoningTokens?: number;
|
32
40
|
outputTextTokens?: number;
|
33
41
|
rejectedPredictionTokens?: number;
|
@@ -61,7 +69,6 @@ export interface MessageItem {
|
|
61
69
|
search: GroundingSearch | null;
|
62
70
|
sessionId: string | null;
|
63
71
|
threadId: string | null;
|
64
|
-
// jsonb type
|
65
72
|
tools: any | null;
|
66
73
|
topicId: string | null;
|
67
74
|
// jsonb type
|
@@ -96,3 +103,17 @@ export interface NewMessage {
|
|
96
103
|
updatedAt?: Date;
|
97
104
|
userId: string; // optional because it's generated
|
98
105
|
}
|
106
|
+
|
107
|
+
export interface UpdateMessageParams {
|
108
|
+
content?: string;
|
109
|
+
error?: ChatMessageError | null;
|
110
|
+
imageList?: ChatImageItem[];
|
111
|
+
metadata?: MessageMetadata;
|
112
|
+
model?: string;
|
113
|
+
provider?: string;
|
114
|
+
reasoning?: ModelReasoning;
|
115
|
+
role?: string;
|
116
|
+
search?: GroundingSearch;
|
117
|
+
toolCalls?: MessageToolCall[];
|
118
|
+
tools?: ChatToolPayload[] | null;
|
119
|
+
}
|
@@ -7,6 +7,7 @@ import { MessageSemanticSearchChunk } from '@/types/rag';
|
|
7
7
|
import { GroundingSearch } from '@/types/search';
|
8
8
|
|
9
9
|
import { MessageMetadata, MessageRoleType, ModelReasoning } from './base';
|
10
|
+
import { ChatImageItem } from './image';
|
10
11
|
import { ChatPluginPayload, ChatToolPayload } from './tools';
|
11
12
|
import { Translate } from './translate';
|
12
13
|
|
@@ -37,12 +38,6 @@ export interface ChatFileItem {
|
|
37
38
|
url: string;
|
38
39
|
}
|
39
40
|
|
40
|
-
export interface ChatImageItem {
|
41
|
-
alt: string;
|
42
|
-
id: string;
|
43
|
-
url: string;
|
44
|
-
}
|
45
|
-
|
46
41
|
export interface ChatFileChunk {
|
47
42
|
fileId: string;
|
48
43
|
fileType: string;
|
@@ -12,7 +12,9 @@ import {
|
|
12
12
|
ModelReasoning,
|
13
13
|
ModelTokensUsage,
|
14
14
|
} from '@/types/message';
|
15
|
+
import { ChatImageChunk } from '@/types/message/image';
|
15
16
|
import { GroundingSearch } from '@/types/search';
|
17
|
+
import { nanoid } from '@/utils/uuid';
|
16
18
|
|
17
19
|
import { fetchEventSource } from './fetchEventSource';
|
18
20
|
import { getMessageError } from './parseError';
|
@@ -24,6 +26,7 @@ export type OnFinishHandler = (
|
|
24
26
|
text: string,
|
25
27
|
context: {
|
26
28
|
grounding?: GroundingSearch;
|
29
|
+
images?: ChatImageChunk[];
|
27
30
|
observationId?: string | null;
|
28
31
|
reasoning?: ModelReasoning;
|
29
32
|
toolCalls?: MessageToolCall[];
|
@@ -43,6 +46,13 @@ export interface MessageTextChunk {
|
|
43
46
|
type: 'text';
|
44
47
|
}
|
45
48
|
|
49
|
+
export interface MessageBase64ImageChunk {
|
50
|
+
id: string;
|
51
|
+
image: ChatImageChunk;
|
52
|
+
images: ChatImageChunk[];
|
53
|
+
type: 'base64_image';
|
54
|
+
}
|
55
|
+
|
46
56
|
export interface MessageReasoningChunk {
|
47
57
|
signature?: string;
|
48
58
|
text?: string;
|
@@ -71,7 +81,8 @@ export interface FetchSSEOptions {
|
|
71
81
|
| MessageToolCallsChunk
|
72
82
|
| MessageReasoningChunk
|
73
83
|
| MessageGroundingChunk
|
74
|
-
| MessageUsageChunk
|
84
|
+
| MessageUsageChunk
|
85
|
+
| MessageBase64ImageChunk,
|
75
86
|
) => void;
|
76
87
|
smoothing?: SmoothingParams | boolean;
|
77
88
|
}
|
@@ -330,6 +341,8 @@ export const fetchSSE = async (url: string, options: RequestInit & FetchSSEOptio
|
|
330
341
|
|
331
342
|
let grounding: GroundingSearch | undefined = undefined;
|
332
343
|
let usage: ModelTokensUsage | undefined = undefined;
|
344
|
+
let images: ChatImageChunk[] = [];
|
345
|
+
|
333
346
|
await fetchEventSource(url, {
|
334
347
|
body: options.body,
|
335
348
|
fetch: options?.fetcher,
|
@@ -389,6 +402,15 @@ export const fetchSSE = async (url: string, options: RequestInit & FetchSSEOptio
|
|
389
402
|
break;
|
390
403
|
}
|
391
404
|
|
405
|
+
case 'base64_image': {
|
406
|
+
const id = 'tmp_img_' + nanoid();
|
407
|
+
const item = { data, id, isBase64: true };
|
408
|
+
images.push(item);
|
409
|
+
|
410
|
+
options.onMessageHandle?.({ id, image: item, images, type: 'base64_image' });
|
411
|
+
break;
|
412
|
+
}
|
413
|
+
|
392
414
|
case 'text': {
|
393
415
|
// skip empty text
|
394
416
|
if (!data) break;
|
@@ -492,6 +514,7 @@ export const fetchSSE = async (url: string, options: RequestInit & FetchSSEOptio
|
|
492
514
|
|
493
515
|
await options?.onFinish?.(output, {
|
494
516
|
grounding,
|
517
|
+
images: images.length > 0 ? images : undefined,
|
495
518
|
observationId,
|
496
519
|
reasoning: !!thinking ? { content: thinking, signature: thinkingSignature } : undefined,
|
497
520
|
toolCalls,
|