@lobehub/chat 1.85.7 → 1.85.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/changelog/v1.json +9 -0
- package/package.json +1 -1
- package/packages/file-loaders/package.json +1 -1
- package/packages/file-loaders/test/setup.ts +1 -1
- package/src/features/DataImporter/index.tsx +0 -1
- package/src/server/routers/lambda/__tests__/importer.test.ts +3 -0
- package/src/server/routers/lambda/importer.ts +10 -2
- package/src/services/__tests__/upload.test.ts +4 -6
- package/src/services/import/server.ts +20 -71
- package/src/services/ragEval.ts +1 -1
- package/src/services/upload.ts +52 -18
- package/src/store/chat/slices/builtinTool/actions/dalle.test.ts +2 -0
- package/src/store/file/slices/upload/action.ts +14 -27
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,31 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
### [Version 1.85.8](https://github.com/lobehub/lobe-chat/compare/v1.85.7...v1.85.8)
|
6
|
+
|
7
|
+
<sup>Released on **2025-05-11**</sup>
|
8
|
+
|
9
|
+
#### 🐛 Bug Fixes
|
10
|
+
|
11
|
+
- **misc**: Fix config import issue in the desktop version.
|
12
|
+
|
13
|
+
<br/>
|
14
|
+
|
15
|
+
<details>
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
17
|
+
|
18
|
+
#### What's fixed
|
19
|
+
|
20
|
+
- **misc**: Fix config import issue in the desktop version, closes [#7800](https://github.com/lobehub/lobe-chat/issues/7800) ([2cb8635](https://github.com/lobehub/lobe-chat/commit/2cb8635))
|
21
|
+
|
22
|
+
</details>
|
23
|
+
|
24
|
+
<div align="right">
|
25
|
+
|
26
|
+
[](#readme-top)
|
27
|
+
|
28
|
+
</div>
|
29
|
+
|
5
30
|
### [Version 1.85.7](https://github.com/lobehub/lobe-chat/compare/v1.85.6...v1.85.7)
|
6
31
|
|
7
32
|
<sup>Released on **2025-05-11**</sup>
|
package/changelog/v1.json
CHANGED
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.85.
|
3
|
+
"version": "1.85.8",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -26,6 +26,7 @@
|
|
26
26
|
"dependencies": {
|
27
27
|
"@langchain/community": "^0.3.41",
|
28
28
|
"@langchain/core": "^0.3.45",
|
29
|
+
"@napi-rs/canvas": "^0.1.70",
|
29
30
|
"@xmldom/xmldom": "^0.9.8",
|
30
31
|
"concat-stream": "^2.0.0",
|
31
32
|
"mammoth": "^1.8.0",
|
@@ -37,7 +38,6 @@
|
|
37
38
|
"devDependencies": {
|
38
39
|
"@types/concat-stream": "^2.0.3",
|
39
40
|
"@types/yauzl": "^2.10.3",
|
40
|
-
"canvas": "^3.1.0",
|
41
41
|
"typescript": "^5"
|
42
42
|
},
|
43
43
|
"peerDependencies": {
|
@@ -8,6 +8,7 @@ import { ImportResultData } from '@/types/importer';
|
|
8
8
|
import { importerRouter } from '../importer';
|
9
9
|
|
10
10
|
const mockGetFileContent = vi.fn();
|
11
|
+
const mockDeleteFile = vi.fn();
|
11
12
|
const mockImportData = vi.fn();
|
12
13
|
const mockImportPgData = vi.fn();
|
13
14
|
|
@@ -21,6 +22,7 @@ vi.mock('@/database/repositories/dataImporter', () => ({
|
|
21
22
|
vi.mock('@/server/services/file', () => ({
|
22
23
|
FileService: vi.fn().mockImplementation(() => ({
|
23
24
|
getFileContent: mockGetFileContent,
|
25
|
+
deleteFile: mockDeleteFile,
|
24
26
|
})),
|
25
27
|
}));
|
26
28
|
|
@@ -74,6 +76,7 @@ describe('importerRouter', () => {
|
|
74
76
|
expect(result).toEqual(mockImportResult);
|
75
77
|
expect(mockGetFileContent).toHaveBeenCalledWith('test.json');
|
76
78
|
expect(mockImportData).toHaveBeenCalledWith(JSON.parse(mockFileContent));
|
79
|
+
expect(mockDeleteFile).toHaveBeenCalledWith('test.json');
|
77
80
|
});
|
78
81
|
|
79
82
|
it('should handle PG data import', async () => {
|
@@ -39,11 +39,19 @@ export const importerRouter = router({
|
|
39
39
|
});
|
40
40
|
}
|
41
41
|
|
42
|
+
let result: ImportResultData;
|
42
43
|
if ('schemaHash' in data) {
|
43
|
-
|
44
|
+
result = await ctx.dataImporterService.importPgData(
|
45
|
+
data as unknown as ImportPgDataStructure,
|
46
|
+
);
|
47
|
+
} else {
|
48
|
+
result = await ctx.dataImporterService.importData(data);
|
44
49
|
}
|
45
50
|
|
46
|
-
|
51
|
+
// clean file after upload
|
52
|
+
await ctx.fileService.deleteFile(input.pathname);
|
53
|
+
|
54
|
+
return result;
|
47
55
|
}),
|
48
56
|
|
49
57
|
importByPost: importProcedure
|
@@ -69,7 +69,7 @@ describe('UploadService', () => {
|
|
69
69
|
}
|
70
70
|
});
|
71
71
|
|
72
|
-
const result = await uploadService.
|
72
|
+
const result = await uploadService.uploadToServerS3(mockFile, { onProgress });
|
73
73
|
|
74
74
|
expect(result).toEqual({
|
75
75
|
date: '1',
|
@@ -91,9 +91,7 @@ describe('UploadService', () => {
|
|
91
91
|
}
|
92
92
|
});
|
93
93
|
|
94
|
-
await expect(uploadService.
|
95
|
-
UPLOAD_NETWORK_ERROR,
|
96
|
-
);
|
94
|
+
await expect(uploadService.uploadToServerS3(mockFile, {})).rejects.toBe(UPLOAD_NETWORK_ERROR);
|
97
95
|
});
|
98
96
|
|
99
97
|
it('should handle upload error', async () => {
|
@@ -109,7 +107,7 @@ describe('UploadService', () => {
|
|
109
107
|
}
|
110
108
|
});
|
111
109
|
|
112
|
-
await expect(uploadService.
|
110
|
+
await expect(uploadService.uploadToServerS3(mockFile, {})).rejects.toBe('Bad Request');
|
113
111
|
});
|
114
112
|
});
|
115
113
|
|
@@ -125,7 +123,7 @@ describe('UploadService', () => {
|
|
125
123
|
|
126
124
|
(clientS3Storage.putObject as any).mockResolvedValue(undefined);
|
127
125
|
|
128
|
-
const result = await uploadService
|
126
|
+
const result = await uploadService['uploadToClientS3'](hash, mockFile);
|
129
127
|
|
130
128
|
expect(clientS3Storage.putObject).toHaveBeenCalledWith(hash, mockFile);
|
131
129
|
expect(result).toEqual(expectedResult);
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import { DefaultErrorShape } from '@trpc/server/unstable-core-do-not-import';
|
2
2
|
|
3
|
-
import {
|
3
|
+
import { lambdaClient } from '@/libs/trpc/client';
|
4
|
+
import { uploadService } from '@/services/upload';
|
4
5
|
import { useUserStore } from '@/store/user';
|
5
6
|
import { ImportPgDataStructure } from '@/types/export';
|
6
7
|
import { ImportStage, OnImportCallbacks } from '@/types/importer';
|
@@ -48,30 +49,7 @@ export class ServerService implements IImportService {
|
|
48
49
|
return;
|
49
50
|
}
|
50
51
|
|
51
|
-
|
52
|
-
const filename = `${uuid()}.json`;
|
53
|
-
|
54
|
-
const pathname = `import_config/${filename}`;
|
55
|
-
|
56
|
-
const url = await edgeClient.upload.createS3PreSignedUrl.mutate({ pathname });
|
57
|
-
|
58
|
-
try {
|
59
|
-
callbacks?.onStageChange?.(ImportStage.Uploading);
|
60
|
-
await this.uploadWithProgress(url, data, callbacks?.onFileUploading);
|
61
|
-
} catch {
|
62
|
-
throw new Error('Upload Error');
|
63
|
-
}
|
64
|
-
|
65
|
-
callbacks?.onStageChange?.(ImportStage.Importing);
|
66
|
-
const time = Date.now();
|
67
|
-
try {
|
68
|
-
const result = await lambdaClient.importer.importByFile.mutate({ pathname });
|
69
|
-
const duration = Date.now() - time;
|
70
|
-
callbacks?.onStageChange?.(ImportStage.Success);
|
71
|
-
callbacks?.onSuccess?.(result.results, duration);
|
72
|
-
} catch (e) {
|
73
|
-
handleError(e);
|
74
|
-
}
|
52
|
+
await this.uploadData(data, { callbacks, handleError });
|
75
53
|
};
|
76
54
|
|
77
55
|
importPgData: IImportService['importPgData'] = async (
|
@@ -115,16 +93,28 @@ export class ServerService implements IImportService {
|
|
115
93
|
return;
|
116
94
|
}
|
117
95
|
|
96
|
+
await this.uploadData(data, { callbacks, handleError });
|
97
|
+
};
|
98
|
+
|
99
|
+
private uploadData = async (
|
100
|
+
data: object,
|
101
|
+
{ callbacks, handleError }: { callbacks?: OnImportCallbacks; handleError: (e: unknown) => any },
|
102
|
+
) => {
|
118
103
|
// if the data is too large, upload it to S3 and upload by file
|
119
104
|
const filename = `${uuid()}.json`;
|
120
105
|
|
121
|
-
|
122
|
-
|
123
|
-
const url = await edgeClient.upload.createS3PreSignedUrl.mutate({ pathname });
|
124
|
-
|
106
|
+
let pathname;
|
125
107
|
try {
|
126
108
|
callbacks?.onStageChange?.(ImportStage.Uploading);
|
127
|
-
await
|
109
|
+
const result = await uploadService.uploadDataToS3(data, {
|
110
|
+
filename,
|
111
|
+
onProgress: (status, state) => {
|
112
|
+
callbacks?.onFileUploading?.(state);
|
113
|
+
},
|
114
|
+
pathname: `import_config/${filename}`,
|
115
|
+
});
|
116
|
+
pathname = result.data.path;
|
117
|
+
console.log(pathname);
|
128
118
|
} catch {
|
129
119
|
throw new Error('Upload Error');
|
130
120
|
}
|
@@ -140,45 +130,4 @@ export class ServerService implements IImportService {
|
|
140
130
|
handleError(e);
|
141
131
|
}
|
142
132
|
};
|
143
|
-
|
144
|
-
private uploadWithProgress = async (
|
145
|
-
url: string,
|
146
|
-
data: object,
|
147
|
-
onProgress: OnImportCallbacks['onFileUploading'],
|
148
|
-
) => {
|
149
|
-
const xhr = new XMLHttpRequest();
|
150
|
-
|
151
|
-
let startTime = Date.now();
|
152
|
-
xhr.upload.addEventListener('progress', (event) => {
|
153
|
-
if (event.lengthComputable) {
|
154
|
-
const progress = Number(((event.loaded / event.total) * 100).toFixed(1));
|
155
|
-
|
156
|
-
const speedInByte = event.loaded / ((Date.now() - startTime) / 1000);
|
157
|
-
|
158
|
-
onProgress?.({
|
159
|
-
// if the progress is 100, it means the file is uploaded
|
160
|
-
// but the server is still processing it
|
161
|
-
// so make it as 99.5 and let users think it's still uploading
|
162
|
-
progress: progress === 100 ? 99.5 : progress,
|
163
|
-
restTime: (event.total - event.loaded) / speedInByte,
|
164
|
-
speed: speedInByte / 1024,
|
165
|
-
});
|
166
|
-
}
|
167
|
-
});
|
168
|
-
|
169
|
-
xhr.open('PUT', url);
|
170
|
-
xhr.setRequestHeader('Content-Type', 'application/json');
|
171
|
-
|
172
|
-
return new Promise((resolve, reject) => {
|
173
|
-
xhr.addEventListener('load', () => {
|
174
|
-
if (xhr.status >= 200 && xhr.status < 300) {
|
175
|
-
resolve(xhr.response);
|
176
|
-
} else {
|
177
|
-
reject(xhr.statusText);
|
178
|
-
}
|
179
|
-
});
|
180
|
-
xhr.addEventListener('error', () => reject(xhr.statusText));
|
181
|
-
xhr.send(JSON.stringify(data));
|
182
|
-
});
|
183
|
-
};
|
184
133
|
}
|
package/src/services/ragEval.ts
CHANGED
@@ -40,7 +40,7 @@ class RAGEvalService {
|
|
40
40
|
};
|
41
41
|
|
42
42
|
importDatasetRecords = async (datasetId: number, file: File): Promise<void> => {
|
43
|
-
const { path } = await uploadService.
|
43
|
+
const { path } = await uploadService.uploadToServerS3(file, { directory: 'ragEval' });
|
44
44
|
|
45
45
|
await lambdaClient.ragEval.importDatasetRecords.mutate({ datasetId, pathname: path });
|
46
46
|
};
|
package/src/services/upload.ts
CHANGED
@@ -2,7 +2,7 @@ import dayjs from 'dayjs';
|
|
2
2
|
import { sha256 } from 'js-sha256';
|
3
3
|
|
4
4
|
import { fileEnv } from '@/config/file';
|
5
|
-
import { isServerMode } from '@/const/version';
|
5
|
+
import { isDesktop, isServerMode } from '@/const/version';
|
6
6
|
import { parseDataUri } from '@/libs/agent-runtime/utils/uriParser';
|
7
7
|
import { edgeClient } from '@/libs/trpc/client';
|
8
8
|
import { API_ENDPOINTS } from '@/services/_url';
|
@@ -16,7 +16,10 @@ export const UPLOAD_NETWORK_ERROR = 'NetWorkError';
|
|
16
16
|
interface UploadFileToS3Options {
|
17
17
|
directory?: string;
|
18
18
|
filename?: string;
|
19
|
+
onNotSupported?: () => void;
|
19
20
|
onProgress?: (status: FileUploadStatus, state: FileUploadState) => void;
|
21
|
+
pathname?: string;
|
22
|
+
skipCheckFileType?: boolean;
|
20
23
|
}
|
21
24
|
|
22
25
|
class UploadService {
|
@@ -25,20 +28,43 @@ class UploadService {
|
|
25
28
|
*/
|
26
29
|
uploadFileToS3 = async (
|
27
30
|
file: File,
|
28
|
-
|
29
|
-
): Promise<FileMetadata> => {
|
30
|
-
const {
|
31
|
+
{ onProgress, directory, skipCheckFileType, onNotSupported, pathname }: UploadFileToS3Options,
|
32
|
+
): Promise<{ data: FileMetadata; success: boolean }> => {
|
33
|
+
const { getElectronStoreState } = await import('@/store/electron');
|
34
|
+
const { electronSyncSelectors } = await import('@/store/electron/selectors');
|
35
|
+
// only if not enable sync
|
36
|
+
const state = getElectronStoreState();
|
37
|
+
const isSyncActive = electronSyncSelectors.isSyncActive(state);
|
38
|
+
|
39
|
+
// 桌面端上传逻辑(并且没开启 sync 同步)
|
40
|
+
if (isDesktop && !isSyncActive) {
|
41
|
+
const data = await this.uploadToDesktopS3(file);
|
42
|
+
return { data, success: true };
|
43
|
+
}
|
31
44
|
|
45
|
+
// 服务端上传逻辑
|
32
46
|
if (isServerMode) {
|
33
|
-
|
34
|
-
} else {
|
35
|
-
const fileArrayBuffer = await file.arrayBuffer();
|
47
|
+
// if is server mode, upload to server s3,
|
36
48
|
|
37
|
-
|
38
|
-
|
49
|
+
const data = await this.uploadToServerS3(file, { directory, onProgress, pathname });
|
50
|
+
return { data, success: true };
|
51
|
+
}
|
39
52
|
|
40
|
-
|
53
|
+
// upload to client s3
|
54
|
+
// 客户端上传逻辑
|
55
|
+
if (!skipCheckFileType && !file.type.startsWith('image')) {
|
56
|
+
onNotSupported?.();
|
57
|
+
return { data: undefined as unknown as FileMetadata, success: false };
|
41
58
|
}
|
59
|
+
|
60
|
+
const fileArrayBuffer = await file.arrayBuffer();
|
61
|
+
|
62
|
+
// 1. check file hash
|
63
|
+
const hash = sha256(fileArrayBuffer);
|
64
|
+
// Upload to the indexeddb in the browser
|
65
|
+
const data = await this.uploadToClientS3(hash, file);
|
66
|
+
|
67
|
+
return { data, success: true };
|
42
68
|
};
|
43
69
|
|
44
70
|
uploadBase64ToS3 = async (
|
@@ -79,7 +105,7 @@ class UploadService {
|
|
79
105
|
const file = new File([blob], fileName, { type: mimeType });
|
80
106
|
|
81
107
|
// 使用统一的上传方法
|
82
|
-
const metadata = await this.uploadFileToS3(file, options);
|
108
|
+
const { data: metadata } = await this.uploadFileToS3(file, options);
|
83
109
|
const hash = sha256(await file.arrayBuffer());
|
84
110
|
|
85
111
|
return {
|
@@ -90,19 +116,27 @@ class UploadService {
|
|
90
116
|
};
|
91
117
|
};
|
92
118
|
|
93
|
-
|
119
|
+
uploadDataToS3 = async (data: object, options: UploadFileToS3Options = {}) => {
|
120
|
+
const blob = new Blob([JSON.stringify(data)], { type: 'application/json' });
|
121
|
+
const file = new File([blob], options.filename || 'data.json', { type: 'application/json' });
|
122
|
+
return await this.uploadFileToS3(file, options);
|
123
|
+
};
|
124
|
+
|
125
|
+
uploadToServerS3 = async (
|
94
126
|
file: File,
|
95
127
|
{
|
96
128
|
onProgress,
|
97
129
|
directory,
|
130
|
+
pathname,
|
98
131
|
}: {
|
99
132
|
directory?: string;
|
100
133
|
onProgress?: (status: FileUploadStatus, state: FileUploadState) => void;
|
134
|
+
pathname?: string;
|
101
135
|
},
|
102
136
|
): Promise<FileMetadata> => {
|
103
137
|
const xhr = new XMLHttpRequest();
|
104
138
|
|
105
|
-
const { preSignUrl, ...result } = await this.getSignedUploadUrl(file, directory);
|
139
|
+
const { preSignUrl, ...result } = await this.getSignedUploadUrl(file, { directory, pathname });
|
106
140
|
let startTime = Date.now();
|
107
141
|
xhr.upload.addEventListener('progress', (event) => {
|
108
142
|
if (event.lengthComputable) {
|
@@ -148,7 +182,7 @@ class UploadService {
|
|
148
182
|
return result;
|
149
183
|
};
|
150
184
|
|
151
|
-
|
185
|
+
private uploadToDesktopS3 = async (file: File) => {
|
152
186
|
const fileArrayBuffer = await file.arrayBuffer();
|
153
187
|
const hash = sha256(fileArrayBuffer);
|
154
188
|
|
@@ -157,7 +191,7 @@ class UploadService {
|
|
157
191
|
return metadata;
|
158
192
|
};
|
159
193
|
|
160
|
-
uploadToClientS3 = async (hash: string, file: File): Promise<FileMetadata> => {
|
194
|
+
private uploadToClientS3 = async (hash: string, file: File): Promise<FileMetadata> => {
|
161
195
|
await clientS3Storage.putObject(hash, file);
|
162
196
|
|
163
197
|
return {
|
@@ -183,7 +217,7 @@ class UploadService {
|
|
183
217
|
|
184
218
|
private getSignedUploadUrl = async (
|
185
219
|
file: File,
|
186
|
-
directory?: string,
|
220
|
+
options: { directory?: string; pathname?: string } = {},
|
187
221
|
): Promise<
|
188
222
|
FileMetadata & {
|
189
223
|
preSignUrl: string;
|
@@ -193,8 +227,8 @@ class UploadService {
|
|
193
227
|
|
194
228
|
// 精确到以 h 为单位的 path
|
195
229
|
const date = (Date.now() / 1000 / 60 / 60).toFixed(0);
|
196
|
-
const dirname = `${directory || fileEnv.NEXT_PUBLIC_S3_FILE_PATH}/${date}`;
|
197
|
-
const pathname = `${dirname}/${filename}`;
|
230
|
+
const dirname = `${options.directory || fileEnv.NEXT_PUBLIC_S3_FILE_PATH}/${date}`;
|
231
|
+
const pathname = options.pathname ?? `${dirname}/${filename}`;
|
198
232
|
|
199
233
|
const preSignUrl = await edgeClient.upload.createS3PreSignedUrl.mutate({ pathname });
|
200
234
|
|
@@ -41,6 +41,7 @@ describe('chatToolSlice - dalle', () => {
|
|
41
41
|
vi.spyOn(uploadService, 'getImageFileByUrlWithCORS').mockResolvedValue(
|
42
42
|
new File(['1'], 'file.png', { type: 'image/png' }),
|
43
43
|
);
|
44
|
+
// @ts-ignore
|
44
45
|
vi.spyOn(uploadService, 'uploadToClientS3').mockResolvedValue({} as any);
|
45
46
|
vi.spyOn(ClientService.prototype, 'createFile').mockResolvedValue({
|
46
47
|
id: mockId,
|
@@ -56,6 +57,7 @@ describe('chatToolSlice - dalle', () => {
|
|
56
57
|
});
|
57
58
|
// For each prompt, loading is toggled on and then off
|
58
59
|
expect(imageGenerationService.generateImage).toHaveBeenCalledTimes(prompts.length);
|
60
|
+
// @ts-ignore
|
59
61
|
expect(uploadService.uploadToClientS3).toHaveBeenCalledTimes(prompts.length);
|
60
62
|
expect(result.current.toggleDallEImageLoading).toHaveBeenCalledTimes(prompts.length * 2);
|
61
63
|
});
|
@@ -4,11 +4,8 @@ import { StateCreator } from 'zustand/vanilla';
|
|
4
4
|
|
5
5
|
import { message } from '@/components/AntdStaticMethods';
|
6
6
|
import { LOBE_CHAT_CLOUD } from '@/const/branding';
|
7
|
-
import { isDesktop, isServerMode } from '@/const/version';
|
8
7
|
import { fileService } from '@/services/file';
|
9
8
|
import { uploadService } from '@/services/upload';
|
10
|
-
import { getElectronStoreState } from '@/store/electron';
|
11
|
-
import { electronSyncSelectors } from '@/store/electron/selectors';
|
12
9
|
import { FileMetadata, UploadFileItem } from '@/types/files';
|
13
10
|
|
14
11
|
import { FileStore } from '../../store';
|
@@ -96,25 +93,8 @@ export const createFileUploadSlice: StateCreator<
|
|
96
93
|
}
|
97
94
|
// 2. if file don't exist, need upload files
|
98
95
|
else {
|
99
|
-
|
100
|
-
|
101
|
-
const isSyncActive = electronSyncSelectors.isSyncActive(state);
|
102
|
-
|
103
|
-
if (isDesktop && !isSyncActive) {
|
104
|
-
metadata = await uploadService.uploadToDesktop(file);
|
105
|
-
} else if (isServerMode) {
|
106
|
-
// if is server mode, upload to server s3, or upload to client s3
|
107
|
-
metadata = await uploadService.uploadWithProgress(file, {
|
108
|
-
onProgress: (status, upload) => {
|
109
|
-
onStatusUpdate?.({
|
110
|
-
id: file.name,
|
111
|
-
type: 'updateFile',
|
112
|
-
value: { status: status === 'success' ? 'processing' : status, uploadState: upload },
|
113
|
-
});
|
114
|
-
},
|
115
|
-
});
|
116
|
-
} else {
|
117
|
-
if (!skipCheckFileType && !file.type.startsWith('image')) {
|
96
|
+
const { data, success } = await uploadService.uploadFileToS3(file, {
|
97
|
+
onNotSupported: () => {
|
118
98
|
onStatusUpdate?.({ id: file.name, type: 'removeFile' });
|
119
99
|
message.info({
|
120
100
|
content: t('upload.fileOnlySupportInServerMode', {
|
@@ -124,12 +104,19 @@ export const createFileUploadSlice: StateCreator<
|
|
124
104
|
}),
|
125
105
|
duration: 5,
|
126
106
|
});
|
127
|
-
|
128
|
-
|
107
|
+
},
|
108
|
+
onProgress: (status, upload) => {
|
109
|
+
onStatusUpdate?.({
|
110
|
+
id: file.name,
|
111
|
+
type: 'updateFile',
|
112
|
+
value: { status: status === 'success' ? 'processing' : status, uploadState: upload },
|
113
|
+
});
|
114
|
+
},
|
115
|
+
skipCheckFileType,
|
116
|
+
});
|
117
|
+
if (!success) return;
|
129
118
|
|
130
|
-
|
131
|
-
metadata = await uploadService.uploadToClientS3(hash, file);
|
132
|
-
}
|
119
|
+
metadata = data;
|
133
120
|
}
|
134
121
|
|
135
122
|
// 3. use more powerful file type detector to get file type
|