@uploadista/vue 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-check.log +240 -0
- package/LICENSE +21 -0
- package/README.md +554 -0
- package/package.json +36 -0
- package/src/components/FlowUploadList.vue +342 -0
- package/src/components/FlowUploadZone.vue +305 -0
- package/src/components/UploadList.vue +303 -0
- package/src/components/UploadZone.vue +254 -0
- package/src/components/index.ts +11 -0
- package/src/composables/index.ts +44 -0
- package/src/composables/plugin.ts +76 -0
- package/src/composables/useDragDrop.ts +343 -0
- package/src/composables/useFlowUpload.ts +431 -0
- package/src/composables/useMultiFlowUpload.ts +322 -0
- package/src/composables/useMultiUpload.ts +546 -0
- package/src/composables/useUpload.ts +300 -0
- package/src/composables/useUploadMetrics.ts +502 -0
- package/src/composables/useUploadistaClient.ts +73 -0
- package/src/index.ts +28 -0
- package/src/providers/UploadistaProvider.vue +69 -0
- package/src/providers/index.ts +1 -0
- package/src/utils/index.ts +156 -0
- package/src/utils/is-browser-file.ts +2 -0
- package/tsconfig.json +15 -0
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
BrowserUploadInput,
|
|
3
|
+
FlowUploadItem,
|
|
4
|
+
MultiFlowUploadOptions,
|
|
5
|
+
MultiFlowUploadState,
|
|
6
|
+
} from "@uploadista/client-browser";
|
|
7
|
+
import type { UploadFile } from "@uploadista/core/types";
|
|
8
|
+
import { computed, readonly, ref } from "vue";
|
|
9
|
+
import { useUploadistaClient } from "./useUploadistaClient";
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Vue composable for uploading multiple files through a flow.
|
|
13
|
+
*
|
|
14
|
+
* Must be used within a component tree that has the Uploadista plugin installed.
|
|
15
|
+
*
|
|
16
|
+
* @example
|
|
17
|
+
* ```vue
|
|
18
|
+
* <script setup lang="ts">
|
|
19
|
+
* import { useMultiFlowUpload } from '@uploadista/vue';
|
|
20
|
+
*
|
|
21
|
+
* const multiFlowUpload = useMultiFlowUpload({
|
|
22
|
+
* flowConfig: {
|
|
23
|
+
* flowId: "batch-upload-flow",
|
|
24
|
+
* inputNodeId: "upload-node",
|
|
25
|
+
* storageId: "my-storage",
|
|
26
|
+
* },
|
|
27
|
+
* maxConcurrent: 3,
|
|
28
|
+
* onComplete: (items) => {
|
|
29
|
+
* console.log("All uploads complete:", items);
|
|
30
|
+
* },
|
|
31
|
+
* });
|
|
32
|
+
*
|
|
33
|
+
* const handleFileChange = (event: Event) => {
|
|
34
|
+
* const files = (event.target as HTMLInputElement).files;
|
|
35
|
+
* if (files) {
|
|
36
|
+
* multiFlowUpload.addFiles(files);
|
|
37
|
+
* multiFlowUpload.startUpload();
|
|
38
|
+
* }
|
|
39
|
+
* };
|
|
40
|
+
* </script>
|
|
41
|
+
*
|
|
42
|
+
* <template>
|
|
43
|
+
* <div>
|
|
44
|
+
* <input type="file" multiple @change="handleFileChange" />
|
|
45
|
+
*
|
|
46
|
+
* <div v-for="item in multiFlowUpload.state.items" :key="item.id">
|
|
47
|
+
* <span>{{ item.file.name }}</span>
|
|
48
|
+
* <progress :value="item.progress" :max="100" />
|
|
49
|
+
* <button
|
|
50
|
+
* v-if="item.status === 'uploading'"
|
|
51
|
+
* @click="multiFlowUpload.abortUpload(item.id)"
|
|
52
|
+
* >
|
|
53
|
+
* Cancel
|
|
54
|
+
* </button>
|
|
55
|
+
* </div>
|
|
56
|
+
* </div>
|
|
57
|
+
* </template>
|
|
58
|
+
* ```
|
|
59
|
+
*/
|
|
60
|
+
export function useMultiFlowUpload(
|
|
61
|
+
options: MultiFlowUploadOptions<BrowserUploadInput>,
|
|
62
|
+
) {
|
|
63
|
+
const client = useUploadistaClient();
|
|
64
|
+
const items = ref<FlowUploadItem<BrowserUploadInput>[]>([]);
|
|
65
|
+
const abortFns = ref<Map<string, () => void>>(new Map());
|
|
66
|
+
const queue = ref<string[]>([]);
|
|
67
|
+
const activeCount = ref(0);
|
|
68
|
+
|
|
69
|
+
const maxConcurrent = options.maxConcurrent ?? 3;
|
|
70
|
+
|
|
71
|
+
const calculateTotalProgress = (
|
|
72
|
+
items: FlowUploadItem<BrowserUploadInput>[],
|
|
73
|
+
) => {
|
|
74
|
+
if (items.length === 0) return 0;
|
|
75
|
+
const totalProgress = items.reduce((sum, item) => sum + item.progress, 0);
|
|
76
|
+
return Math.round(totalProgress / items.length);
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
const processQueue = async () => {
|
|
80
|
+
if (activeCount.value >= maxConcurrent || queue.value.length === 0) {
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const itemId = queue.value.shift();
|
|
85
|
+
if (!itemId) return;
|
|
86
|
+
|
|
87
|
+
const item = items.value.find((i) => i.id === itemId);
|
|
88
|
+
if (!item || item.status !== "pending") {
|
|
89
|
+
processQueue();
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
activeCount.value++;
|
|
94
|
+
|
|
95
|
+
items.value = items.value.map((i) =>
|
|
96
|
+
i.id === itemId ? { ...i, status: "uploading" as const } : i,
|
|
97
|
+
);
|
|
98
|
+
|
|
99
|
+
try {
|
|
100
|
+
const { abort, jobId } = await client.client.uploadWithFlow(
|
|
101
|
+
item.file,
|
|
102
|
+
options.flowConfig,
|
|
103
|
+
{
|
|
104
|
+
onJobStart: (jobId: string) => {
|
|
105
|
+
items.value = items.value.map((i) =>
|
|
106
|
+
i.id === itemId ? { ...i, jobId } : i,
|
|
107
|
+
);
|
|
108
|
+
},
|
|
109
|
+
onProgress: (
|
|
110
|
+
_uploadId: string,
|
|
111
|
+
bytesUploaded: number,
|
|
112
|
+
totalBytes: number | null,
|
|
113
|
+
) => {
|
|
114
|
+
const progress = totalBytes
|
|
115
|
+
? Math.round((bytesUploaded / totalBytes) * 100)
|
|
116
|
+
: 0;
|
|
117
|
+
|
|
118
|
+
items.value = items.value.map((i) => {
|
|
119
|
+
if (i.id === itemId) {
|
|
120
|
+
const updated = {
|
|
121
|
+
...i,
|
|
122
|
+
progress,
|
|
123
|
+
bytesUploaded,
|
|
124
|
+
totalBytes: totalBytes || 0,
|
|
125
|
+
};
|
|
126
|
+
options.onItemProgress?.(updated);
|
|
127
|
+
return updated;
|
|
128
|
+
}
|
|
129
|
+
return i;
|
|
130
|
+
});
|
|
131
|
+
},
|
|
132
|
+
onSuccess: (result: UploadFile) => {
|
|
133
|
+
items.value = items.value.map((i) => {
|
|
134
|
+
if (i.id === itemId) {
|
|
135
|
+
const updated = {
|
|
136
|
+
...i,
|
|
137
|
+
status: "success" as const,
|
|
138
|
+
result,
|
|
139
|
+
progress: 100,
|
|
140
|
+
};
|
|
141
|
+
options.onItemSuccess?.(updated);
|
|
142
|
+
return updated;
|
|
143
|
+
}
|
|
144
|
+
return i;
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
// Check if all uploads are complete
|
|
148
|
+
const allComplete = items.value.every(
|
|
149
|
+
(i) =>
|
|
150
|
+
i.status === "success" ||
|
|
151
|
+
i.status === "error" ||
|
|
152
|
+
i.status === "aborted",
|
|
153
|
+
);
|
|
154
|
+
if (allComplete) {
|
|
155
|
+
options.onComplete?.(items.value);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
abortFns.value.delete(itemId);
|
|
159
|
+
activeCount.value--;
|
|
160
|
+
processQueue();
|
|
161
|
+
},
|
|
162
|
+
onError: (error: Error) => {
|
|
163
|
+
items.value = items.value.map((i) => {
|
|
164
|
+
if (i.id === itemId) {
|
|
165
|
+
const updated = { ...i, status: "error" as const, error };
|
|
166
|
+
options.onItemError?.(updated, error);
|
|
167
|
+
return updated;
|
|
168
|
+
}
|
|
169
|
+
return i;
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
// Check if all uploads are complete
|
|
173
|
+
const allComplete = items.value.every(
|
|
174
|
+
(i) =>
|
|
175
|
+
i.status === "success" ||
|
|
176
|
+
i.status === "error" ||
|
|
177
|
+
i.status === "aborted",
|
|
178
|
+
);
|
|
179
|
+
if (allComplete) {
|
|
180
|
+
options.onComplete?.(items.value);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
abortFns.value.delete(itemId);
|
|
184
|
+
activeCount.value--;
|
|
185
|
+
processQueue();
|
|
186
|
+
},
|
|
187
|
+
onShouldRetry: options.onShouldRetry,
|
|
188
|
+
},
|
|
189
|
+
);
|
|
190
|
+
|
|
191
|
+
abortFns.value.set(itemId, abort);
|
|
192
|
+
|
|
193
|
+
items.value = items.value.map((i) =>
|
|
194
|
+
i.id === itemId ? { ...i, jobId } : i,
|
|
195
|
+
);
|
|
196
|
+
} catch (error) {
|
|
197
|
+
items.value = items.value.map((i) =>
|
|
198
|
+
i.id === itemId
|
|
199
|
+
? { ...i, status: "error" as const, error: error as Error }
|
|
200
|
+
: i,
|
|
201
|
+
);
|
|
202
|
+
|
|
203
|
+
activeCount.value--;
|
|
204
|
+
processQueue();
|
|
205
|
+
}
|
|
206
|
+
};
|
|
207
|
+
|
|
208
|
+
const addFiles = (files: File[] | FileList) => {
|
|
209
|
+
const fileArray = Array.from(files);
|
|
210
|
+
const newItems: FlowUploadItem<BrowserUploadInput>[] = fileArray.map(
|
|
211
|
+
(file) => ({
|
|
212
|
+
id: `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
|
213
|
+
file,
|
|
214
|
+
status: "pending",
|
|
215
|
+
progress: 0,
|
|
216
|
+
bytesUploaded: 0,
|
|
217
|
+
totalBytes: file.size,
|
|
218
|
+
error: null,
|
|
219
|
+
result: null,
|
|
220
|
+
jobId: null,
|
|
221
|
+
}),
|
|
222
|
+
);
|
|
223
|
+
|
|
224
|
+
items.value = [...items.value, ...newItems];
|
|
225
|
+
};
|
|
226
|
+
|
|
227
|
+
const removeFile = (id: string) => {
|
|
228
|
+
const abortFn = abortFns.value.get(id);
|
|
229
|
+
if (abortFn) {
|
|
230
|
+
abortFn();
|
|
231
|
+
abortFns.value.delete(id);
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
items.value = items.value.filter((item) => item.id !== id);
|
|
235
|
+
queue.value = queue.value.filter((queueId) => queueId !== id);
|
|
236
|
+
};
|
|
237
|
+
|
|
238
|
+
const startUpload = () => {
|
|
239
|
+
const pendingItems = items.value.filter(
|
|
240
|
+
(item) => item.status === "pending",
|
|
241
|
+
);
|
|
242
|
+
queue.value.push(...pendingItems.map((item) => item.id));
|
|
243
|
+
|
|
244
|
+
for (let i = 0; i < maxConcurrent; i++) {
|
|
245
|
+
processQueue();
|
|
246
|
+
}
|
|
247
|
+
};
|
|
248
|
+
|
|
249
|
+
const abortUpload = (id: string) => {
|
|
250
|
+
const abortFn = abortFns.value.get(id);
|
|
251
|
+
if (abortFn) {
|
|
252
|
+
abortFn();
|
|
253
|
+
abortFns.value.delete(id);
|
|
254
|
+
|
|
255
|
+
items.value = items.value.map((item) =>
|
|
256
|
+
item.id === id ? { ...item, status: "aborted" as const } : item,
|
|
257
|
+
);
|
|
258
|
+
|
|
259
|
+
activeCount.value--;
|
|
260
|
+
processQueue();
|
|
261
|
+
}
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
const abortAll = () => {
|
|
265
|
+
for (const abortFn of abortFns.value.values()) {
|
|
266
|
+
abortFn();
|
|
267
|
+
}
|
|
268
|
+
abortFns.value.clear();
|
|
269
|
+
queue.value = [];
|
|
270
|
+
activeCount.value = 0;
|
|
271
|
+
|
|
272
|
+
items.value = items.value.map((item) =>
|
|
273
|
+
item.status === "uploading"
|
|
274
|
+
? { ...item, status: "aborted" as const }
|
|
275
|
+
: item,
|
|
276
|
+
);
|
|
277
|
+
};
|
|
278
|
+
|
|
279
|
+
const clear = () => {
|
|
280
|
+
abortAll();
|
|
281
|
+
items.value = [];
|
|
282
|
+
};
|
|
283
|
+
|
|
284
|
+
const retryUpload = (id: string) => {
|
|
285
|
+
items.value = items.value.map((item) =>
|
|
286
|
+
item.id === id
|
|
287
|
+
? {
|
|
288
|
+
...item,
|
|
289
|
+
status: "pending" as const,
|
|
290
|
+
progress: 0,
|
|
291
|
+
bytesUploaded: 0,
|
|
292
|
+
error: null,
|
|
293
|
+
}
|
|
294
|
+
: item,
|
|
295
|
+
);
|
|
296
|
+
|
|
297
|
+
queue.value.push(id);
|
|
298
|
+
processQueue();
|
|
299
|
+
};
|
|
300
|
+
|
|
301
|
+
const state = computed<MultiFlowUploadState<BrowserUploadInput>>(() => ({
|
|
302
|
+
items: items.value,
|
|
303
|
+
totalProgress: calculateTotalProgress(items.value),
|
|
304
|
+
activeUploads: items.value.filter((item) => item.status === "uploading")
|
|
305
|
+
.length,
|
|
306
|
+
completedUploads: items.value.filter((item) => item.status === "success")
|
|
307
|
+
.length,
|
|
308
|
+
failedUploads: items.value.filter((item) => item.status === "error").length,
|
|
309
|
+
}));
|
|
310
|
+
|
|
311
|
+
return {
|
|
312
|
+
state: readonly(state),
|
|
313
|
+
addFiles,
|
|
314
|
+
removeFile,
|
|
315
|
+
startUpload,
|
|
316
|
+
abortUpload,
|
|
317
|
+
abortAll,
|
|
318
|
+
clear,
|
|
319
|
+
retryUpload,
|
|
320
|
+
isUploading: computed(() => state.value.activeUploads > 0),
|
|
321
|
+
};
|
|
322
|
+
}
|