@uploadista/vue 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,546 @@
1
+ import type { UploadOptions } from "@uploadista/client-browser";
2
+ import type { UploadFile } from "@uploadista/core/types";
3
+ import { computed, readonly, ref } from "vue";
4
+ import type {
5
+ UploadInput,
6
+ UploadMetrics,
7
+ UploadState,
8
+ UploadStatus,
9
+ } from "./useUpload";
10
+ import { useUploadistaClient } from "./useUploadistaClient";
11
+
12
+ export interface UploadItem {
13
+ id: string;
14
+ file: UploadInput;
15
+ state: UploadState;
16
+ }
17
+
18
+ export interface MultiUploadOptions
19
+ extends Omit<UploadOptions, "onSuccess" | "onError" | "onProgress"> {
20
+ /**
21
+ * Maximum number of concurrent uploads
22
+ */
23
+ maxConcurrent?: number;
24
+
25
+ /**
26
+ * Called when an individual file upload starts
27
+ */
28
+ onUploadStart?: (item: UploadItem) => void;
29
+
30
+ /**
31
+ * Called when an individual file upload progresses
32
+ */
33
+ onUploadProgress?: (
34
+ item: UploadItem,
35
+ progress: number,
36
+ bytesUploaded: number,
37
+ totalBytes: number | null,
38
+ ) => void;
39
+
40
+ /**
41
+ * Called when an individual file upload succeeds
42
+ */
43
+ onUploadSuccess?: (item: UploadItem, result: UploadFile) => void;
44
+
45
+ /**
46
+ * Called when an individual file upload fails
47
+ */
48
+ onUploadError?: (item: UploadItem, error: Error) => void;
49
+
50
+ /**
51
+ * Called when all uploads complete (successfully or with errors)
52
+ */
53
+ onComplete?: (results: {
54
+ successful: UploadItem[];
55
+ failed: UploadItem[];
56
+ total: number;
57
+ }) => void;
58
+ }
59
+
60
+ export interface MultiUploadState {
61
+ /**
62
+ * Total number of uploads
63
+ */
64
+ total: number;
65
+
66
+ /**
67
+ * Number of completed uploads (successful + failed)
68
+ */
69
+ completed: number;
70
+
71
+ /**
72
+ * Number of successful uploads
73
+ */
74
+ successful: number;
75
+
76
+ /**
77
+ * Number of failed uploads
78
+ */
79
+ failed: number;
80
+
81
+ /**
82
+ * Number of currently uploading files
83
+ */
84
+ uploading: number;
85
+
86
+ /**
87
+ * Overall progress as a percentage (0-100)
88
+ */
89
+ progress: number;
90
+
91
+ /**
92
+ * Total bytes uploaded across all files
93
+ */
94
+ totalBytesUploaded: number;
95
+
96
+ /**
97
+ * Total bytes to upload across all files
98
+ */
99
+ totalBytes: number;
100
+
101
+ /**
102
+ * Whether any uploads are currently active
103
+ */
104
+ isUploading: boolean;
105
+
106
+ /**
107
+ * Whether all uploads have completed
108
+ */
109
+ isComplete: boolean;
110
+ }
111
+
112
+ /**
113
+ * Vue composable for managing multiple file uploads with queue management,
114
+ * concurrent upload limits, and batch operations.
115
+ *
116
+ * Must be used within a component tree that has the Uploadista plugin installed.
117
+ *
118
+ * @param options - Multi-upload configuration and event handlers
119
+ * @returns Multi-upload state and control methods
120
+ *
121
+ * @example
122
+ * ```vue
123
+ * <script setup lang="ts">
124
+ * import { useMultiUpload } from '@uploadista/vue';
125
+ *
126
+ * const multiUpload = useMultiUpload({
127
+ * maxConcurrent: 3,
128
+ * onUploadSuccess: (item, result) => {
129
+ * console.log(`${item.file.name} uploaded successfully`);
130
+ * },
131
+ * onComplete: (results) => {
132
+ * console.log(`Upload batch complete: ${results.successful.length}/${results.total} successful`);
133
+ * },
134
+ * });
135
+ *
136
+ * const handleFileChange = (event: Event) => {
137
+ * const files = (event.target as HTMLInputElement).files;
138
+ * if (files) {
139
+ * multiUpload.addFiles(Array.from(files));
140
+ * }
141
+ * };
142
+ * </script>
143
+ *
144
+ * <template>
145
+ * <div>
146
+ * <input type="file" multiple @change="handleFileChange" />
147
+ *
148
+ * <div>Progress: {{ multiUpload.state.progress }}%</div>
149
+ * <div>
150
+ * {{ multiUpload.state.uploading }} uploading,
151
+ * {{ multiUpload.state.successful }} successful,
152
+ * {{ multiUpload.state.failed }} failed
153
+ * </div>
154
+ *
155
+ * <button @click="multiUpload.startAll" :disabled="multiUpload.state.isUploading">
156
+ * Start All
157
+ * </button>
158
+ * <button @click="multiUpload.abortAll" :disabled="!multiUpload.state.isUploading">
159
+ * Abort All
160
+ * </button>
161
+ * <button @click="multiUpload.retryFailed" :disabled="multiUpload.state.failed === 0">
162
+ * Retry Failed
163
+ * </button>
164
+ *
165
+ * <div v-for="item in multiUpload.items" :key="item.id">
166
+ * {{ item.file.name }}: {{ item.state.status }} ({{ item.state.progress }}%)
167
+ * </div>
168
+ * </div>
169
+ * </template>
170
+ * ```
171
+ */
172
+ export function useMultiUpload(options: MultiUploadOptions = {}) {
173
+ const uploadClient = useUploadistaClient();
174
+ const { maxConcurrent = 3 } = options;
175
+ const items = ref<UploadItem[]>([]);
176
+ const nextId = ref(0);
177
+ const activeUploads = ref(new Set<string>());
178
+
179
+ // Store abort controllers for each upload
180
+ const abortControllers = ref<Map<string, { abort: () => void }>>(new Map());
181
+
182
+ // Generate a unique ID for each upload item
183
+ const generateId = () => {
184
+ return `upload-${Date.now()}-${nextId.value++}`;
185
+ };
186
+
187
+ // State update callback for individual uploads
188
+ const onStateUpdate = (id: string, state: Partial<UploadState>) => {
189
+ items.value = items.value.map((item) =>
190
+ item.id === id ? { ...item, state: { ...item.state, ...state } } : item,
191
+ );
192
+ };
193
+
194
+ // Check if all uploads are complete and trigger completion callback
195
+ const checkForCompletion = () => {
196
+ const allComplete = items.value.every((item) =>
197
+ ["success", "error", "aborted"].includes(item.state.status),
198
+ );
199
+
200
+ if (allComplete && items.value.length > 0) {
201
+ const successful = items.value.filter(
202
+ (item) => item.state.status === "success",
203
+ );
204
+ const failed = items.value.filter((item) =>
205
+ ["error", "aborted"].includes(item.state.status),
206
+ );
207
+
208
+ options.onComplete?.({
209
+ successful,
210
+ failed,
211
+ total: items.value.length,
212
+ });
213
+ }
214
+ };
215
+
216
+ // Start the next available upload if we have capacity
217
+ const startNextUpload = async () => {
218
+ if (activeUploads.value.size >= maxConcurrent) {
219
+ return;
220
+ }
221
+
222
+ const nextItem = items.value.find(
223
+ (item) =>
224
+ item.state.status === "idle" && !activeUploads.value.has(item.id),
225
+ );
226
+
227
+ if (!nextItem) {
228
+ return;
229
+ }
230
+
231
+ activeUploads.value.add(nextItem.id);
232
+ options.onUploadStart?.(nextItem);
233
+
234
+ // Update state to uploading
235
+ onStateUpdate(nextItem.id, { status: "uploading" });
236
+
237
+ try {
238
+ const controller = await uploadClient.client.upload(nextItem.file, {
239
+ metadata: options.metadata,
240
+ uploadLengthDeferred: options.uploadLengthDeferred,
241
+ uploadSize: options.uploadSize,
242
+
243
+ onProgress: (
244
+ _uploadId: string,
245
+ bytesUploaded: number,
246
+ totalBytes: number | null,
247
+ ) => {
248
+ const progress = totalBytes
249
+ ? Math.round((bytesUploaded / totalBytes) * 100)
250
+ : 0;
251
+
252
+ onStateUpdate(nextItem.id, {
253
+ progress,
254
+ bytesUploaded,
255
+ totalBytes,
256
+ });
257
+
258
+ options.onUploadProgress?.(
259
+ nextItem,
260
+ progress,
261
+ bytesUploaded,
262
+ totalBytes,
263
+ );
264
+ },
265
+
266
+ onChunkComplete: () => {
267
+ // Optional: could expose this as an option
268
+ },
269
+
270
+ onSuccess: (result: UploadFile) => {
271
+ onStateUpdate(nextItem.id, {
272
+ status: "success",
273
+ result,
274
+ progress: 100,
275
+ });
276
+
277
+ const updatedItem = {
278
+ ...nextItem,
279
+ state: { ...nextItem.state, status: "success" as const, result },
280
+ };
281
+ options.onUploadSuccess?.(updatedItem, result);
282
+
283
+ // Mark complete and start next
284
+ activeUploads.value.delete(nextItem.id);
285
+ abortControllers.value.delete(nextItem.id);
286
+ startNextUpload();
287
+ checkForCompletion();
288
+ },
289
+
290
+ onError: (error: Error) => {
291
+ onStateUpdate(nextItem.id, {
292
+ status: "error",
293
+ error,
294
+ });
295
+
296
+ const updatedItem = {
297
+ ...nextItem,
298
+ state: { ...nextItem.state, status: "error" as const, error },
299
+ };
300
+ options.onUploadError?.(updatedItem, error);
301
+
302
+ // Mark complete and start next
303
+ activeUploads.value.delete(nextItem.id);
304
+ abortControllers.value.delete(nextItem.id);
305
+ startNextUpload();
306
+ checkForCompletion();
307
+ },
308
+
309
+ onShouldRetry: options.onShouldRetry,
310
+ });
311
+
312
+ // Store abort controller
313
+ abortControllers.value.set(nextItem.id, controller);
314
+ } catch (error) {
315
+ onStateUpdate(nextItem.id, {
316
+ status: "error",
317
+ error: error as Error,
318
+ });
319
+
320
+ const updatedItem = {
321
+ ...nextItem,
322
+ state: {
323
+ ...nextItem.state,
324
+ status: "error" as const,
325
+ error: error as Error,
326
+ },
327
+ };
328
+ options.onUploadError?.(updatedItem, error as Error);
329
+
330
+ // Mark complete and start next
331
+ activeUploads.value.delete(nextItem.id);
332
+ abortControllers.value.delete(nextItem.id);
333
+ startNextUpload();
334
+ checkForCompletion();
335
+ }
336
+ };
337
+
338
+ // Calculate overall state
339
+ const state = computed<MultiUploadState>(() => {
340
+ const itemsList = items.value;
341
+ return {
342
+ total: itemsList.length,
343
+ completed: itemsList.filter((item) =>
344
+ ["success", "error", "aborted"].includes(item.state.status),
345
+ ).length,
346
+ successful: itemsList.filter((item) => item.state.status === "success")
347
+ .length,
348
+ failed: itemsList.filter((item) =>
349
+ ["error", "aborted"].includes(item.state.status),
350
+ ).length,
351
+ uploading: itemsList.filter((item) => item.state.status === "uploading")
352
+ .length,
353
+ progress:
354
+ itemsList.length > 0
355
+ ? Math.round(
356
+ itemsList.reduce((sum, item) => sum + item.state.progress, 0) /
357
+ itemsList.length,
358
+ )
359
+ : 0,
360
+ totalBytesUploaded: itemsList.reduce(
361
+ (sum, item) => sum + item.state.bytesUploaded,
362
+ 0,
363
+ ),
364
+ totalBytes: itemsList.reduce(
365
+ (sum, item) => sum + (item.state.totalBytes || 0),
366
+ 0,
367
+ ),
368
+ isUploading: itemsList.some((item) => item.state.status === "uploading"),
369
+ isComplete:
370
+ itemsList.length > 0 &&
371
+ itemsList.every((item) =>
372
+ ["success", "error", "aborted"].includes(item.state.status),
373
+ ),
374
+ };
375
+ });
376
+
377
+ const addFiles = (files: UploadInput[]) => {
378
+ const newItems: UploadItem[] = files.map((file) => {
379
+ const id = generateId();
380
+ return {
381
+ id,
382
+ file,
383
+ state: {
384
+ status: "idle",
385
+ progress: 0,
386
+ bytesUploaded: 0,
387
+ totalBytes: file instanceof File ? file.size : null,
388
+ error: null,
389
+ result: null,
390
+ },
391
+ };
392
+ });
393
+
394
+ items.value = [...items.value, ...newItems];
395
+ };
396
+
397
+ const removeItem = (id: string) => {
398
+ const item = items.value.find((i) => i.id === id);
399
+ if (item && item.state.status === "uploading") {
400
+ // Abort before removing
401
+ const controller = abortControllers.value.get(id);
402
+ if (controller) {
403
+ controller.abort();
404
+ abortControllers.value.delete(id);
405
+ }
406
+ }
407
+
408
+ items.value = items.value.filter((item) => item.id !== id);
409
+ activeUploads.value.delete(id);
410
+ };
411
+
412
+ const abortUpload = (id: string) => {
413
+ const item = items.value.find((i) => i.id === id);
414
+ if (item && item.state.status === "uploading") {
415
+ const controller = abortControllers.value.get(id);
416
+ if (controller) {
417
+ controller.abort();
418
+ abortControllers.value.delete(id);
419
+ }
420
+
421
+ activeUploads.value.delete(id);
422
+
423
+ items.value = items.value.map((i) =>
424
+ i.id === id
425
+ ? { ...i, state: { ...i.state, status: "aborted" as const } }
426
+ : i,
427
+ );
428
+
429
+ // Try to start next upload in queue
430
+ startNextUpload();
431
+ }
432
+ };
433
+
434
+ const retryUpload = (id: string) => {
435
+ const item = items.value.find((i) => i.id === id);
436
+ if (item && ["error", "aborted"].includes(item.state.status)) {
437
+ items.value = items.value.map((i) =>
438
+ i.id === id
439
+ ? {
440
+ ...i,
441
+ state: { ...i.state, status: "idle" as const, error: null },
442
+ }
443
+ : i,
444
+ );
445
+
446
+ // Auto-start the upload
447
+ setTimeout(() => startNextUpload(), 0);
448
+ }
449
+ };
450
+
451
+ const startAll = () => {
452
+ // Start as many uploads as we can up to the concurrent limit
453
+ const idleItems = items.value.filter(
454
+ (item) => item.state.status === "idle",
455
+ );
456
+ const slotsAvailable = maxConcurrent - activeUploads.value.size;
457
+ const itemsToStart = idleItems.slice(0, slotsAvailable);
458
+
459
+ for (const _item of itemsToStart) {
460
+ startNextUpload();
461
+ }
462
+ };
463
+
464
+ const abortAll = () => {
465
+ items.value
466
+ .filter((item) => item.state.status === "uploading")
467
+ .forEach((item) => {
468
+ const controller = abortControllers.value.get(item.id);
469
+ if (controller) {
470
+ controller.abort();
471
+ abortControllers.value.delete(item.id);
472
+ }
473
+ });
474
+
475
+ activeUploads.value.clear();
476
+
477
+ // Update all uploading items to aborted status
478
+ items.value = items.value.map((item) =>
479
+ item.state.status === "uploading"
480
+ ? { ...item, state: { ...item.state, status: "aborted" as const } }
481
+ : item,
482
+ );
483
+ };
484
+
485
+ const retryFailed = () => {
486
+ const failedItems = items.value.filter((item) =>
487
+ ["error", "aborted"].includes(item.state.status),
488
+ );
489
+
490
+ if (failedItems.length > 0) {
491
+ items.value = items.value.map((item) =>
492
+ failedItems.some((f) => f.id === item.id)
493
+ ? {
494
+ ...item,
495
+ state: { ...item.state, status: "idle" as const, error: null },
496
+ }
497
+ : item,
498
+ );
499
+
500
+ // Auto-start uploads if we have capacity
501
+ setTimeout(startAll, 0);
502
+ }
503
+ };
504
+
505
+ const clearCompleted = () => {
506
+ items.value = items.value.filter(
507
+ (item) => !["success", "error", "aborted"].includes(item.state.status),
508
+ );
509
+ };
510
+
511
+ const clearAll = () => {
512
+ abortAll();
513
+ items.value = [];
514
+ activeUploads.value.clear();
515
+ };
516
+
517
+ const getItemsByStatus = (status: UploadStatus) => {
518
+ return items.value.filter((item) => item.state.status === status);
519
+ };
520
+
521
+ // Create aggregated metrics object that delegates to the upload client
522
+ const metrics: UploadMetrics = {
523
+ getInsights: () => uploadClient.client.getChunkingInsights(),
524
+ exportMetrics: () => uploadClient.client.exportMetrics(),
525
+ getNetworkMetrics: () => uploadClient.client.getNetworkMetrics(),
526
+ getNetworkCondition: () => uploadClient.client.getNetworkCondition(),
527
+ resetMetrics: () => uploadClient.client.resetMetrics(),
528
+ };
529
+
530
+ return {
531
+ state: readonly(state),
532
+ items: readonly(items),
533
+ addFiles,
534
+ removeItem,
535
+ removeFile: removeItem, // Alias for consistency
536
+ startAll,
537
+ abortUpload,
538
+ abortAll,
539
+ retryUpload,
540
+ retryFailed,
541
+ clearCompleted,
542
+ clearAll,
543
+ getItemsByStatus,
544
+ metrics,
545
+ };
546
+ }