nuxt-upload-kit 0.1.4 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -74
- package/dist/module.json +1 -1
- package/dist/runtime/composables/useUploadKit/index.js +27 -21
- package/dist/runtime/composables/useUploadKit/plugins/storage/aws-s3.d.ts +63 -0
- package/dist/runtime/composables/useUploadKit/plugins/storage/aws-s3.js +111 -0
- package/dist/runtime/composables/useUploadKit/plugins/storage/azure-datalake.js +2 -2
- package/dist/runtime/composables/useUploadKit/plugins/storage/cloudflare-r2.d.ts +63 -0
- package/dist/runtime/composables/useUploadKit/plugins/storage/cloudflare-r2.js +111 -0
- package/dist/runtime/composables/useUploadKit/plugins/storage/firebase-storage.d.ts +71 -0
- package/dist/runtime/composables/useUploadKit/plugins/storage/firebase-storage.js +122 -0
- package/dist/runtime/composables/useUploadKit/plugins/storage/index.d.ts +15 -0
- package/dist/runtime/composables/useUploadKit/plugins/storage/index.js +3 -0
- package/dist/runtime/composables/useUploadKit/types.d.ts +11 -5
- package/dist/runtime/composables/useUploadKit/types.js +2 -1
- package/package.json +5 -2
package/README.md
CHANGED
|
@@ -6,6 +6,9 @@
|
|
|
6
6
|
|
|
7
7
|
<p align="center">A powerful, plugin-based file upload manager for Nuxt applications.</p>
|
|
8
8
|
|
|
9
|
+
> [!WARNING]
|
|
10
|
+
> This module is experimental and under active development. The API may change between versions without notice. Use in production at your own risk.
|
|
11
|
+
|
|
9
12
|
## Features
|
|
10
13
|
|
|
11
14
|
- 🔌 **Plugin System** - Extensible architecture with built-in plugins for validation, compression, and storage
|
|
@@ -14,7 +17,6 @@
|
|
|
14
17
|
- 🎥 **Video Compression** - FFmpeg-powered video compression (optional)
|
|
15
18
|
- ✅ **Validation** - File type, size, and count validation out of the box
|
|
16
19
|
- 📊 **Progress Tracking** - Real-time upload progress with events
|
|
17
|
-
- 🔄 **File Lifecycle** - Complete control over file preprocessing, processing, and post-upload
|
|
18
20
|
|
|
19
21
|
## Installation
|
|
20
22
|
|
|
@@ -91,80 +93,9 @@ const handleUpload = () => uploader.upload()
|
|
|
91
93
|
</template>
|
|
92
94
|
```
|
|
93
95
|
|
|
94
|
-
##
|
|
95
|
-
|
|
96
|
-
### Azure Data Lake Storage
|
|
97
|
-
|
|
98
|
-
```typescript
|
|
99
|
-
import { PluginAzureDataLake } from "nuxt-upload-kit"
|
|
100
|
-
|
|
101
|
-
const uploader = useUploadKit({
|
|
102
|
-
storage: PluginAzureDataLake({
|
|
103
|
-
sasURL: "https://your-storage.blob.core.windows.net/container?sv=...",
|
|
104
|
-
path: "uploads/images",
|
|
105
|
-
}),
|
|
106
|
-
thumbnails: true,
|
|
107
|
-
})
|
|
108
|
-
```
|
|
109
|
-
|
|
110
|
-
## Configuration Options
|
|
111
|
-
|
|
112
|
-
| Option | Type | Default | Description |
|
|
113
|
-
| ------------------ | ------------------------------- | ------- | ------------------------------- |
|
|
114
|
-
| `storage` | `StoragePlugin` | - | Storage plugin for file uploads |
|
|
115
|
-
| `plugins` | `ProcessingPlugin[]` | `[]` | Additional processing plugins |
|
|
116
|
-
| `maxFiles` | `number \| false` | `false` | Maximum number of files |
|
|
117
|
-
| `maxFileSize` | `number \| false` | `false` | Maximum file size in bytes |
|
|
118
|
-
| `allowedFileTypes` | `string[] \| false` | `false` | Allowed MIME types |
|
|
119
|
-
| `thumbnails` | `boolean \| ThumbnailOptions` | `false` | Enable thumbnail generation |
|
|
120
|
-
| `imageCompression` | `boolean \| CompressionOptions` | `false` | Enable image compression |
|
|
121
|
-
| `autoProceed` | `boolean` | `false` | Auto-upload after adding files |
|
|
122
|
-
|
|
123
|
-
## Events
|
|
124
|
-
|
|
125
|
-
```typescript
|
|
126
|
-
uploader.on("file:added", (file) => console.log("Added:", file.name))
|
|
127
|
-
uploader.on("file:removed", (file) => console.log("Removed:", file.name))
|
|
128
|
-
uploader.on("file:error", ({ file, error }) => console.error(error))
|
|
129
|
-
uploader.on("upload:start", (files) => console.log("Starting upload"))
|
|
130
|
-
uploader.on("upload:progress", ({ file, progress }) => console.log(progress))
|
|
131
|
-
uploader.on("upload:complete", (files) => console.log("Complete!"))
|
|
132
|
-
```
|
|
133
|
-
|
|
134
|
-
## Creating Custom Plugins
|
|
135
|
-
|
|
136
|
-
```typescript
|
|
137
|
-
import { defineProcessingPlugin } from "nuxt-upload-kit"
|
|
138
|
-
|
|
139
|
-
const MyPlugin = defineProcessingPlugin<{ option: string }>((options) => ({
|
|
140
|
-
id: "my-plugin",
|
|
141
|
-
hooks: {
|
|
142
|
-
validate: async (file, context) => {
|
|
143
|
-
// Validation logic
|
|
144
|
-
return file
|
|
145
|
-
},
|
|
146
|
-
process: async (file, context) => {
|
|
147
|
-
// Processing logic
|
|
148
|
-
context.emit("processed", { file })
|
|
149
|
-
return file
|
|
150
|
-
},
|
|
151
|
-
},
|
|
152
|
-
}))
|
|
153
|
-
```
|
|
154
|
-
|
|
155
|
-
## Optional Dependencies
|
|
96
|
+
## Documentation
|
|
156
97
|
|
|
157
|
-
For
|
|
158
|
-
|
|
159
|
-
```bash
|
|
160
|
-
pnpm add @ffmpeg/ffmpeg @ffmpeg/util
|
|
161
|
-
```
|
|
162
|
-
|
|
163
|
-
For Azure storage, install the Azure SDK:
|
|
164
|
-
|
|
165
|
-
```bash
|
|
166
|
-
pnpm add @azure/storage-file-datalake
|
|
167
|
-
```
|
|
98
|
+
For full documentation, visit [nuxt-upload-kit.vercel.app](https://nuxt-upload-kit.vercel.app)
|
|
168
99
|
|
|
169
100
|
## License
|
|
170
101
|
|
package/dist/module.json
CHANGED
|
@@ -21,7 +21,7 @@ const defaultOptions = {
|
|
|
21
21
|
maxFiles: false,
|
|
22
22
|
thumbnails: false,
|
|
23
23
|
imageCompression: false,
|
|
24
|
-
|
|
24
|
+
autoUpload: false
|
|
25
25
|
};
|
|
26
26
|
export const useUploadKit = (_options = {}) => {
|
|
27
27
|
const options = { ...defaultOptions, ..._options };
|
|
@@ -29,6 +29,7 @@ export const useUploadKit = (_options = {}) => {
|
|
|
29
29
|
const emitter = mitt();
|
|
30
30
|
const status = ref("waiting");
|
|
31
31
|
const createdObjectURLs = /* @__PURE__ */ new Map();
|
|
32
|
+
let hasEmittedFilesUploaded = false;
|
|
32
33
|
const pluginEmitFunctions = /* @__PURE__ */ new Map();
|
|
33
34
|
const getPluginEmitFn = (pluginId) => {
|
|
34
35
|
let emitFn = pluginEmitFunctions.get(pluginId);
|
|
@@ -199,7 +200,8 @@ This is deprecated. Use the 'storage' option instead:
|
|
|
199
200
|
const fileToAdd = preprocessedFile || validatedFile;
|
|
200
201
|
files.value.push(fileToAdd);
|
|
201
202
|
emitter.emit("file:added", fileToAdd);
|
|
202
|
-
|
|
203
|
+
hasEmittedFilesUploaded = false;
|
|
204
|
+
if (options.autoUpload) {
|
|
203
205
|
upload();
|
|
204
206
|
}
|
|
205
207
|
return validatedFile;
|
|
@@ -320,7 +322,8 @@ This is deprecated. Use the 'storage' option instead:
|
|
|
320
322
|
files.value[index] = finalFile;
|
|
321
323
|
emitter.emit("file:replaced", finalFile);
|
|
322
324
|
emitter.emit("file:added", finalFile);
|
|
323
|
-
|
|
325
|
+
hasEmittedFilesUploaded = false;
|
|
326
|
+
const shouldUpload = shouldAutoUpload ?? options.autoUpload;
|
|
324
327
|
if (shouldUpload) {
|
|
325
328
|
upload();
|
|
326
329
|
}
|
|
@@ -400,6 +403,11 @@ This is deprecated. Use the 'storage' option instead:
|
|
|
400
403
|
}
|
|
401
404
|
const completed = files.value.filter((f) => f.status === "complete");
|
|
402
405
|
emitter.emit("upload:complete", completed);
|
|
406
|
+
const allComplete = files.value.every((f) => f.status === "complete");
|
|
407
|
+
if (allComplete && files.value.length > 0 && !hasEmittedFilesUploaded) {
|
|
408
|
+
hasEmittedFilesUploaded = true;
|
|
409
|
+
emitter.emit("files:uploaded", files.value);
|
|
410
|
+
}
|
|
403
411
|
};
|
|
404
412
|
const reset = () => {
|
|
405
413
|
cleanupObjectURLs(createdObjectURLs);
|
|
@@ -431,25 +439,23 @@ This is deprecated. Use the 'storage' option instead:
|
|
|
431
439
|
let currentFile = file;
|
|
432
440
|
for (const plugin of options.plugins) {
|
|
433
441
|
const hook = plugin.hooks[stage];
|
|
434
|
-
if (hook)
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
emitter.emit("file:error", { file: currentFile, error });
|
|
449
|
-
}
|
|
450
|
-
console.error(`Plugin ${plugin.id} ${stage} hook error:`, error);
|
|
451
|
-
return null;
|
|
442
|
+
if (!hook) continue;
|
|
443
|
+
try {
|
|
444
|
+
const context = {
|
|
445
|
+
files: files.value,
|
|
446
|
+
options,
|
|
447
|
+
emit: getPluginEmitFn(plugin.id)
|
|
448
|
+
};
|
|
449
|
+
const result = await callPluginHook(hook, stage, currentFile, context);
|
|
450
|
+
if (result && currentFile && "id" in result) {
|
|
451
|
+
currentFile = result;
|
|
452
|
+
}
|
|
453
|
+
} catch (error) {
|
|
454
|
+
if (currentFile) {
|
|
455
|
+
emitter.emit("file:error", { file: currentFile, error });
|
|
452
456
|
}
|
|
457
|
+
console.error(`Plugin ${plugin.id} ${stage} hook error:`, error);
|
|
458
|
+
return null;
|
|
453
459
|
}
|
|
454
460
|
}
|
|
455
461
|
return currentFile;
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
export interface AWSS3Options {
|
|
2
|
+
/**
|
|
3
|
+
* Function to get a presigned URL for uploading a file
|
|
4
|
+
* Your backend should generate this using AWS SDK's getSignedUrl
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* ```typescript
|
|
8
|
+
* getPresignedUploadUrl: async (fileId, contentType) => {
|
|
9
|
+
* const response = await fetch('/api/s3/presign', {
|
|
10
|
+
* method: 'POST',
|
|
11
|
+
* body: JSON.stringify({ key: fileId, contentType })
|
|
12
|
+
* })
|
|
13
|
+
* const { uploadUrl, publicUrl } = await response.json()
|
|
14
|
+
* return { uploadUrl, publicUrl }
|
|
15
|
+
* }
|
|
16
|
+
* ```
|
|
17
|
+
*/
|
|
18
|
+
getPresignedUploadUrl: (fileId: string, contentType: string, metadata: {
|
|
19
|
+
fileName: string;
|
|
20
|
+
fileSize: number;
|
|
21
|
+
}) => Promise<{
|
|
22
|
+
/** Presigned URL for PUT upload */
|
|
23
|
+
uploadUrl: string;
|
|
24
|
+
/** Public URL where the file will be accessible after upload */
|
|
25
|
+
publicUrl: string;
|
|
26
|
+
}>;
|
|
27
|
+
/**
|
|
28
|
+
* Optional function to get a presigned URL for downloading/reading a file
|
|
29
|
+
* Required if you want to use getRemoteFile hook
|
|
30
|
+
*/
|
|
31
|
+
getPresignedDownloadUrl?: (fileId: string) => Promise<string>;
|
|
32
|
+
/**
|
|
33
|
+
* Optional function to delete a file
|
|
34
|
+
* Your backend should handle the actual deletion
|
|
35
|
+
*/
|
|
36
|
+
deleteFile?: (fileId: string) => Promise<void>;
|
|
37
|
+
/**
|
|
38
|
+
* Number of retry attempts for failed operations
|
|
39
|
+
* @default 3
|
|
40
|
+
*/
|
|
41
|
+
retries?: number;
|
|
42
|
+
/**
|
|
43
|
+
* Initial delay between retries in milliseconds
|
|
44
|
+
* Uses exponential backoff: delay * (2 ^ attempt)
|
|
45
|
+
* @default 1000 (1 second)
|
|
46
|
+
*/
|
|
47
|
+
retryDelay?: number;
|
|
48
|
+
}
|
|
49
|
+
export interface AWSS3UploadResult {
|
|
50
|
+
/**
|
|
51
|
+
* Public URL to the uploaded file
|
|
52
|
+
*/
|
|
53
|
+
url: string;
|
|
54
|
+
/**
|
|
55
|
+
* S3 object key (file ID used for upload)
|
|
56
|
+
*/
|
|
57
|
+
key: string;
|
|
58
|
+
/**
|
|
59
|
+
* ETag of the uploaded object (from response headers)
|
|
60
|
+
*/
|
|
61
|
+
etag?: string;
|
|
62
|
+
}
|
|
63
|
+
export declare const PluginAWSS3: (options: AWSS3Options) => import("../../types.js").StoragePlugin<AWSS3UploadResult, Record<string, never>>;
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import { defineStorageAdapter } from "../../types.js";
|
|
2
|
+
export const PluginAWSS3 = defineStorageAdapter((options) => {
|
|
3
|
+
const maxRetries = options.retries ?? 3;
|
|
4
|
+
const initialRetryDelay = options.retryDelay ?? 1e3;
|
|
5
|
+
async function withRetry(operation, operationName) {
|
|
6
|
+
let lastError;
|
|
7
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
8
|
+
try {
|
|
9
|
+
return await operation();
|
|
10
|
+
} catch (error) {
|
|
11
|
+
lastError = error;
|
|
12
|
+
if (attempt === maxRetries) {
|
|
13
|
+
break;
|
|
14
|
+
}
|
|
15
|
+
const delay = initialRetryDelay * Math.pow(2, attempt);
|
|
16
|
+
if (import.meta.dev) {
|
|
17
|
+
console.warn(
|
|
18
|
+
`[S3 Storage] ${operationName} failed (attempt ${attempt + 1}/${maxRetries + 1}). Retrying in ${delay}ms...`,
|
|
19
|
+
error
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
throw new Error(`[S3 Storage] ${operationName} failed after ${maxRetries + 1} attempts: ${lastError?.message}`);
|
|
26
|
+
}
|
|
27
|
+
return {
|
|
28
|
+
id: "aws-s3-storage",
|
|
29
|
+
hooks: {
|
|
30
|
+
/**
|
|
31
|
+
* Upload file to S3 using presigned URL
|
|
32
|
+
*/
|
|
33
|
+
async upload(file, context) {
|
|
34
|
+
if (file.source !== "local" || file.data === null) {
|
|
35
|
+
throw new Error("Cannot upload remote file - no local data available");
|
|
36
|
+
}
|
|
37
|
+
return withRetry(async () => {
|
|
38
|
+
const { uploadUrl, publicUrl } = await options.getPresignedUploadUrl(file.id, file.mimeType, {
|
|
39
|
+
fileName: file.name,
|
|
40
|
+
fileSize: file.size
|
|
41
|
+
});
|
|
42
|
+
const etag = await uploadWithProgress(uploadUrl, file.data, file.mimeType, context.onProgress);
|
|
43
|
+
return {
|
|
44
|
+
url: publicUrl,
|
|
45
|
+
key: file.id,
|
|
46
|
+
etag
|
|
47
|
+
};
|
|
48
|
+
}, `Upload file "${file.name}"`);
|
|
49
|
+
},
|
|
50
|
+
/**
|
|
51
|
+
* Get remote file metadata
|
|
52
|
+
*/
|
|
53
|
+
async getRemoteFile(fileId, _context) {
|
|
54
|
+
if (!options.getPresignedDownloadUrl) {
|
|
55
|
+
throw new Error("[S3 Storage] getPresignedDownloadUrl is required to fetch remote files");
|
|
56
|
+
}
|
|
57
|
+
return withRetry(async () => {
|
|
58
|
+
const downloadUrl = await options.getPresignedDownloadUrl(fileId);
|
|
59
|
+
const response = await fetch(downloadUrl, { method: "HEAD" });
|
|
60
|
+
if (!response.ok) {
|
|
61
|
+
throw new Error(`Failed to get file metadata: ${response.status}`);
|
|
62
|
+
}
|
|
63
|
+
return {
|
|
64
|
+
size: Number.parseInt(response.headers.get("content-length") || "0", 10),
|
|
65
|
+
mimeType: response.headers.get("content-type") || "application/octet-stream",
|
|
66
|
+
remoteUrl: downloadUrl
|
|
67
|
+
};
|
|
68
|
+
}, `Get remote file "${fileId}"`);
|
|
69
|
+
},
|
|
70
|
+
/**
|
|
71
|
+
* Delete file from S3
|
|
72
|
+
*/
|
|
73
|
+
async remove(file, _context) {
|
|
74
|
+
if (!options.deleteFile) {
|
|
75
|
+
throw new Error("[S3 Storage] deleteFile callback is required to delete files");
|
|
76
|
+
}
|
|
77
|
+
return withRetry(async () => {
|
|
78
|
+
await options.deleteFile(file.id);
|
|
79
|
+
}, `Delete file "${file.name}"`);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
});
|
|
84
|
+
function uploadWithProgress(url, data, contentType, onProgress) {
|
|
85
|
+
return new Promise((resolve, reject) => {
|
|
86
|
+
const xhr = new XMLHttpRequest();
|
|
87
|
+
xhr.upload.addEventListener("progress", (event) => {
|
|
88
|
+
if (event.lengthComputable) {
|
|
89
|
+
const percentage = Math.round(event.loaded / event.total * 100);
|
|
90
|
+
onProgress(percentage);
|
|
91
|
+
}
|
|
92
|
+
});
|
|
93
|
+
xhr.addEventListener("load", () => {
|
|
94
|
+
if (xhr.status >= 200 && xhr.status < 300) {
|
|
95
|
+
const etag = xhr.getResponseHeader("ETag")?.replaceAll('"', "");
|
|
96
|
+
resolve(etag);
|
|
97
|
+
} else {
|
|
98
|
+
reject(new Error(`Upload failed with status ${xhr.status}: ${xhr.statusText}`));
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
xhr.addEventListener("error", () => {
|
|
102
|
+
reject(new Error("Upload failed due to network error"));
|
|
103
|
+
});
|
|
104
|
+
xhr.addEventListener("abort", () => {
|
|
105
|
+
reject(new Error("Upload was aborted"));
|
|
106
|
+
});
|
|
107
|
+
xhr.open("PUT", url);
|
|
108
|
+
xhr.setRequestHeader("Content-Type", contentType);
|
|
109
|
+
xhr.send(data);
|
|
110
|
+
});
|
|
111
|
+
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { ref } from "vue";
|
|
2
2
|
import { DataLakeDirectoryClient } from "@azure/storage-file-datalake";
|
|
3
|
-
import {
|
|
4
|
-
export const PluginAzureDataLake =
|
|
3
|
+
import { defineStorageAdapter } from "../../types.js";
|
|
4
|
+
export const PluginAzureDataLake = defineStorageAdapter((options) => {
|
|
5
5
|
const sasURL = ref(options.sasURL || "");
|
|
6
6
|
let refreshPromise = null;
|
|
7
7
|
const directoryCheckedCache = /* @__PURE__ */ new Set();
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
export interface CloudflareR2Options {
|
|
2
|
+
/**
|
|
3
|
+
* Function to get a presigned URL for uploading a file
|
|
4
|
+
* Your backend should generate this using AWS SDK's getSignedUrl with R2 endpoint
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* ```typescript
|
|
8
|
+
* getPresignedUploadUrl: async (fileId, contentType) => {
|
|
9
|
+
* const response = await fetch('/api/r2/presign', {
|
|
10
|
+
* method: 'POST',
|
|
11
|
+
* body: JSON.stringify({ key: fileId, contentType })
|
|
12
|
+
* })
|
|
13
|
+
* const { uploadUrl, publicUrl } = await response.json()
|
|
14
|
+
* return { uploadUrl, publicUrl }
|
|
15
|
+
* }
|
|
16
|
+
* ```
|
|
17
|
+
*/
|
|
18
|
+
getPresignedUploadUrl: (fileId: string, contentType: string, metadata: {
|
|
19
|
+
fileName: string;
|
|
20
|
+
fileSize: number;
|
|
21
|
+
}) => Promise<{
|
|
22
|
+
/** Presigned URL for PUT upload */
|
|
23
|
+
uploadUrl: string;
|
|
24
|
+
/** Public URL where the file will be accessible after upload (r2.dev or custom domain) */
|
|
25
|
+
publicUrl: string;
|
|
26
|
+
}>;
|
|
27
|
+
/**
|
|
28
|
+
* Optional function to get a presigned URL for downloading/reading a file
|
|
29
|
+
* Required if you want to use getRemoteFile hook
|
|
30
|
+
*/
|
|
31
|
+
getPresignedDownloadUrl?: (fileId: string) => Promise<string>;
|
|
32
|
+
/**
|
|
33
|
+
* Optional function to delete a file
|
|
34
|
+
* Your backend should handle the actual deletion
|
|
35
|
+
*/
|
|
36
|
+
deleteFile?: (fileId: string) => Promise<void>;
|
|
37
|
+
/**
|
|
38
|
+
* Number of retry attempts for failed operations
|
|
39
|
+
* @default 3
|
|
40
|
+
*/
|
|
41
|
+
retries?: number;
|
|
42
|
+
/**
|
|
43
|
+
* Initial delay between retries in milliseconds
|
|
44
|
+
* Uses exponential backoff: delay * (2 ^ attempt)
|
|
45
|
+
* @default 1000 (1 second)
|
|
46
|
+
*/
|
|
47
|
+
retryDelay?: number;
|
|
48
|
+
}
|
|
49
|
+
export interface CloudflareR2UploadResult {
|
|
50
|
+
/**
|
|
51
|
+
* Public URL to the uploaded file
|
|
52
|
+
*/
|
|
53
|
+
url: string;
|
|
54
|
+
/**
|
|
55
|
+
* R2 object key (file ID used for upload)
|
|
56
|
+
*/
|
|
57
|
+
key: string;
|
|
58
|
+
/**
|
|
59
|
+
* ETag of the uploaded object (from response headers)
|
|
60
|
+
*/
|
|
61
|
+
etag?: string;
|
|
62
|
+
}
|
|
63
|
+
export declare const PluginCloudflareR2: (options: CloudflareR2Options) => import("../../types.js").StoragePlugin<CloudflareR2UploadResult, Record<string, never>>;
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import { defineStorageAdapter } from "../../types.js";
|
|
2
|
+
export const PluginCloudflareR2 = defineStorageAdapter((options) => {
|
|
3
|
+
const maxRetries = options.retries ?? 3;
|
|
4
|
+
const initialRetryDelay = options.retryDelay ?? 1e3;
|
|
5
|
+
async function withRetry(operation, operationName) {
|
|
6
|
+
let lastError;
|
|
7
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
8
|
+
try {
|
|
9
|
+
return await operation();
|
|
10
|
+
} catch (error) {
|
|
11
|
+
lastError = error;
|
|
12
|
+
if (attempt === maxRetries) {
|
|
13
|
+
break;
|
|
14
|
+
}
|
|
15
|
+
const delay = initialRetryDelay * Math.pow(2, attempt);
|
|
16
|
+
if (import.meta.dev) {
|
|
17
|
+
console.warn(
|
|
18
|
+
`[R2 Storage] ${operationName} failed (attempt ${attempt + 1}/${maxRetries + 1}). Retrying in ${delay}ms...`,
|
|
19
|
+
error
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
throw new Error(`[R2 Storage] ${operationName} failed after ${maxRetries + 1} attempts: ${lastError?.message}`);
|
|
26
|
+
}
|
|
27
|
+
return {
|
|
28
|
+
id: "cloudflare-r2-storage",
|
|
29
|
+
hooks: {
|
|
30
|
+
/**
|
|
31
|
+
* Upload file to R2 using presigned URL
|
|
32
|
+
*/
|
|
33
|
+
async upload(file, context) {
|
|
34
|
+
if (file.source !== "local" || file.data === null) {
|
|
35
|
+
throw new Error("Cannot upload remote file - no local data available");
|
|
36
|
+
}
|
|
37
|
+
return withRetry(async () => {
|
|
38
|
+
const { uploadUrl, publicUrl } = await options.getPresignedUploadUrl(file.id, file.mimeType, {
|
|
39
|
+
fileName: file.name,
|
|
40
|
+
fileSize: file.size
|
|
41
|
+
});
|
|
42
|
+
const etag = await uploadWithProgress(uploadUrl, file.data, file.mimeType, context.onProgress);
|
|
43
|
+
return {
|
|
44
|
+
url: publicUrl,
|
|
45
|
+
key: file.id,
|
|
46
|
+
etag
|
|
47
|
+
};
|
|
48
|
+
}, `Upload file "${file.name}"`);
|
|
49
|
+
},
|
|
50
|
+
/**
|
|
51
|
+
* Get remote file metadata from R2
|
|
52
|
+
*/
|
|
53
|
+
async getRemoteFile(fileId, _context) {
|
|
54
|
+
if (!options.getPresignedDownloadUrl) {
|
|
55
|
+
throw new Error("[R2 Storage] getPresignedDownloadUrl is required to fetch remote files");
|
|
56
|
+
}
|
|
57
|
+
return withRetry(async () => {
|
|
58
|
+
const downloadUrl = await options.getPresignedDownloadUrl(fileId);
|
|
59
|
+
const response = await fetch(downloadUrl, { method: "HEAD" });
|
|
60
|
+
if (!response.ok) {
|
|
61
|
+
throw new Error(`Failed to get file metadata: ${response.status}`);
|
|
62
|
+
}
|
|
63
|
+
return {
|
|
64
|
+
size: Number.parseInt(response.headers.get("content-length") || "0", 10),
|
|
65
|
+
mimeType: response.headers.get("content-type") || "application/octet-stream",
|
|
66
|
+
remoteUrl: downloadUrl
|
|
67
|
+
};
|
|
68
|
+
}, `Get remote file "${fileId}"`);
|
|
69
|
+
},
|
|
70
|
+
/**
|
|
71
|
+
* Delete file from R2
|
|
72
|
+
*/
|
|
73
|
+
async remove(file, _context) {
|
|
74
|
+
if (!options.deleteFile) {
|
|
75
|
+
throw new Error("[R2 Storage] deleteFile callback is required to delete files");
|
|
76
|
+
}
|
|
77
|
+
return withRetry(async () => {
|
|
78
|
+
await options.deleteFile(file.id);
|
|
79
|
+
}, `Delete file "${file.name}"`);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
});
|
|
84
|
+
function uploadWithProgress(url, data, contentType, onProgress) {
|
|
85
|
+
return new Promise((resolve, reject) => {
|
|
86
|
+
const xhr = new XMLHttpRequest();
|
|
87
|
+
xhr.upload.addEventListener("progress", (event) => {
|
|
88
|
+
if (event.lengthComputable) {
|
|
89
|
+
const percentage = Math.round(event.loaded / event.total * 100);
|
|
90
|
+
onProgress(percentage);
|
|
91
|
+
}
|
|
92
|
+
});
|
|
93
|
+
xhr.addEventListener("load", () => {
|
|
94
|
+
if (xhr.status >= 200 && xhr.status < 300) {
|
|
95
|
+
const etag = xhr.getResponseHeader("ETag")?.replaceAll('"', "");
|
|
96
|
+
resolve(etag);
|
|
97
|
+
} else {
|
|
98
|
+
reject(new Error(`Upload failed with status ${xhr.status}: ${xhr.statusText}`));
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
xhr.addEventListener("error", () => {
|
|
102
|
+
reject(new Error("Upload failed due to network error"));
|
|
103
|
+
});
|
|
104
|
+
xhr.addEventListener("abort", () => {
|
|
105
|
+
reject(new Error("Upload was aborted"));
|
|
106
|
+
});
|
|
107
|
+
xhr.open("PUT", url);
|
|
108
|
+
xhr.setRequestHeader("Content-Type", contentType);
|
|
109
|
+
xhr.send(data);
|
|
110
|
+
});
|
|
111
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { type FirebaseStorage } from "firebase/storage";
|
|
2
|
+
export interface FirebaseStorageOptions {
|
|
3
|
+
/**
|
|
4
|
+
* Firebase Storage instance
|
|
5
|
+
* You must initialize Firebase and pass the storage instance
|
|
6
|
+
* @example
|
|
7
|
+
* ```typescript
|
|
8
|
+
* import { getStorage } from 'firebase/storage'
|
|
9
|
+
* import { initializeApp } from 'firebase/app'
|
|
10
|
+
*
|
|
11
|
+
* const app = initializeApp({ ... })
|
|
12
|
+
* const storage = getStorage(app)
|
|
13
|
+
*
|
|
14
|
+
* PluginFirebaseStorage({ storage })
|
|
15
|
+
* ```
|
|
16
|
+
*/
|
|
17
|
+
storage: FirebaseStorage;
|
|
18
|
+
/**
|
|
19
|
+
* Optional path prefix (folder) for uploaded files
|
|
20
|
+
* @example "uploads/images"
|
|
21
|
+
*/
|
|
22
|
+
path?: string;
|
|
23
|
+
/**
|
|
24
|
+
* Custom metadata to attach to uploaded files
|
|
25
|
+
*/
|
|
26
|
+
customMetadata?: Record<string, string>;
|
|
27
|
+
/**
|
|
28
|
+
* Cache-Control header for uploaded files
|
|
29
|
+
* @example "max-age=31536000" for 1 year caching
|
|
30
|
+
*/
|
|
31
|
+
cacheControl?: string;
|
|
32
|
+
/**
|
|
33
|
+
* Content-Disposition header
|
|
34
|
+
* @example "attachment; filename=file.pdf"
|
|
35
|
+
*/
|
|
36
|
+
contentDisposition?: string;
|
|
37
|
+
/**
|
|
38
|
+
* Number of retry attempts for failed operations
|
|
39
|
+
* @default 3
|
|
40
|
+
*/
|
|
41
|
+
retries?: number;
|
|
42
|
+
/**
|
|
43
|
+
* Initial delay between retries in milliseconds
|
|
44
|
+
* Uses exponential backoff: delay * (2 ^ attempt)
|
|
45
|
+
* @default 1000 (1 second)
|
|
46
|
+
*/
|
|
47
|
+
retryDelay?: number;
|
|
48
|
+
}
|
|
49
|
+
export interface FirebaseStorageUploadResult {
|
|
50
|
+
/**
|
|
51
|
+
* Public download URL for the uploaded file
|
|
52
|
+
*/
|
|
53
|
+
url: string;
|
|
54
|
+
/**
|
|
55
|
+
* Full path in Firebase Storage
|
|
56
|
+
*/
|
|
57
|
+
fullPath: string;
|
|
58
|
+
/**
|
|
59
|
+
* Storage bucket name
|
|
60
|
+
*/
|
|
61
|
+
bucket: string;
|
|
62
|
+
/**
|
|
63
|
+
* File generation (version identifier)
|
|
64
|
+
*/
|
|
65
|
+
generation?: string;
|
|
66
|
+
/**
|
|
67
|
+
* MD5 hash of the uploaded content
|
|
68
|
+
*/
|
|
69
|
+
md5Hash?: string;
|
|
70
|
+
}
|
|
71
|
+
export declare const PluginFirebaseStorage: (options: FirebaseStorageOptions) => import("../../types.js").StoragePlugin<FirebaseStorageUploadResult, Record<string, never>>;
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ref as storageRef,
|
|
3
|
+
uploadBytesResumable,
|
|
4
|
+
getDownloadURL,
|
|
5
|
+
getMetadata,
|
|
6
|
+
deleteObject
|
|
7
|
+
} from "firebase/storage";
|
|
8
|
+
import { defineStorageAdapter } from "../../types.js";
|
|
9
|
+
export const PluginFirebaseStorage = defineStorageAdapter((options) => {
|
|
10
|
+
const maxRetries = options.retries ?? 3;
|
|
11
|
+
const initialRetryDelay = options.retryDelay ?? 1e3;
|
|
12
|
+
async function withRetry(operation, operationName) {
|
|
13
|
+
let lastError;
|
|
14
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
15
|
+
try {
|
|
16
|
+
return await operation();
|
|
17
|
+
} catch (error) {
|
|
18
|
+
lastError = error;
|
|
19
|
+
if (attempt === maxRetries) {
|
|
20
|
+
break;
|
|
21
|
+
}
|
|
22
|
+
const delay = initialRetryDelay * Math.pow(2, attempt);
|
|
23
|
+
if (import.meta.dev) {
|
|
24
|
+
console.warn(
|
|
25
|
+
`[Firebase Storage] ${operationName} failed (attempt ${attempt + 1}/${maxRetries + 1}). Retrying in ${delay}ms...`,
|
|
26
|
+
error
|
|
27
|
+
);
|
|
28
|
+
}
|
|
29
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
throw new Error(`[Firebase Storage] ${operationName} failed after ${maxRetries + 1} attempts: ${lastError?.message}`);
|
|
33
|
+
}
|
|
34
|
+
const buildPath = (fileId) => {
|
|
35
|
+
if (options.path) {
|
|
36
|
+
const cleanPath = options.path.replace(/^\/+/, "").replace(/\/+$/, "");
|
|
37
|
+
return `${cleanPath}/${fileId}`;
|
|
38
|
+
}
|
|
39
|
+
return fileId;
|
|
40
|
+
};
|
|
41
|
+
const getStorageRef = (fileId) => {
|
|
42
|
+
const path = buildPath(fileId);
|
|
43
|
+
return storageRef(options.storage, path);
|
|
44
|
+
};
|
|
45
|
+
const uploadToFirebase = (fileId, data, mimeType, fileName, onProgress) => {
|
|
46
|
+
const fileRef = getStorageRef(fileId);
|
|
47
|
+
const metadata = {
|
|
48
|
+
contentType: mimeType,
|
|
49
|
+
cacheControl: options.cacheControl,
|
|
50
|
+
contentDisposition: options.contentDisposition,
|
|
51
|
+
customMetadata: {
|
|
52
|
+
...options.customMetadata,
|
|
53
|
+
originalName: fileName,
|
|
54
|
+
size: String(data.size)
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
return new Promise((resolve, reject) => {
|
|
58
|
+
const uploadTask = uploadBytesResumable(fileRef, data, metadata);
|
|
59
|
+
const handleProgress = (snapshot) => {
|
|
60
|
+
const percentage = Math.round(snapshot.bytesTransferred / snapshot.totalBytes * 100);
|
|
61
|
+
onProgress(percentage);
|
|
62
|
+
};
|
|
63
|
+
const handleError = (error) => reject(error);
|
|
64
|
+
const handleComplete = async () => {
|
|
65
|
+
try {
|
|
66
|
+
const downloadURL = await getDownloadURL(uploadTask.snapshot.ref);
|
|
67
|
+
const uploadMetadata = uploadTask.snapshot.metadata;
|
|
68
|
+
resolve({
|
|
69
|
+
url: downloadURL,
|
|
70
|
+
fullPath: uploadMetadata.fullPath,
|
|
71
|
+
bucket: uploadMetadata.bucket,
|
|
72
|
+
generation: uploadMetadata.generation,
|
|
73
|
+
md5Hash: uploadMetadata.md5Hash
|
|
74
|
+
});
|
|
75
|
+
} catch (error) {
|
|
76
|
+
reject(error);
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
uploadTask.on("state_changed", handleProgress, handleError, handleComplete);
|
|
80
|
+
});
|
|
81
|
+
};
|
|
82
|
+
return {
|
|
83
|
+
id: "firebase-storage",
|
|
84
|
+
hooks: {
|
|
85
|
+
/**
|
|
86
|
+
* Upload file to Firebase Storage
|
|
87
|
+
*/
|
|
88
|
+
async upload(file, context) {
|
|
89
|
+
if (file.source !== "local" || file.data === null) {
|
|
90
|
+
throw new Error("Cannot upload remote file - no local data available");
|
|
91
|
+
}
|
|
92
|
+
return withRetry(
|
|
93
|
+
() => uploadToFirebase(file.id, file.data, file.mimeType, file.name, context.onProgress),
|
|
94
|
+
`Upload file "${file.name}"`
|
|
95
|
+
);
|
|
96
|
+
},
|
|
97
|
+
/**
|
|
98
|
+
* Get remote file metadata from Firebase Storage
|
|
99
|
+
*/
|
|
100
|
+
async getRemoteFile(fileId, _context) {
|
|
101
|
+
return withRetry(async () => {
|
|
102
|
+
const fileRef = getStorageRef(fileId);
|
|
103
|
+
const [metadata, downloadURL] = await Promise.all([getMetadata(fileRef), getDownloadURL(fileRef)]);
|
|
104
|
+
return {
|
|
105
|
+
size: metadata.size,
|
|
106
|
+
mimeType: metadata.contentType || "application/octet-stream",
|
|
107
|
+
remoteUrl: downloadURL
|
|
108
|
+
};
|
|
109
|
+
}, `Get remote file "${fileId}"`);
|
|
110
|
+
},
|
|
111
|
+
/**
|
|
112
|
+
* Delete file from Firebase Storage
|
|
113
|
+
*/
|
|
114
|
+
async remove(file, _context) {
|
|
115
|
+
return withRetry(async () => {
|
|
116
|
+
const fileRef = getStorageRef(file.id);
|
|
117
|
+
await deleteObject(fileRef);
|
|
118
|
+
}, `Delete file "${file.name}"`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
};
|
|
122
|
+
});
|
|
@@ -8,3 +8,18 @@
|
|
|
8
8
|
* If you need multiple storage destinations, create multiple uploader instances.
|
|
9
9
|
*/
|
|
10
10
|
export { PluginAzureDataLake, type AzureDataLakeOptions, type AzureUploadResult } from "./azure-datalake.js";
|
|
11
|
+
/**
|
|
12
|
+
* AWS S3 Storage Adapter (also works with S3-compatible services: MinIO, DigitalOcean Spaces, Wasabi, Backblaze B2)
|
|
13
|
+
* @experimental This adapter is experimental and may change in future releases.
|
|
14
|
+
*/
|
|
15
|
+
export { PluginAWSS3, type AWSS3Options, type AWSS3UploadResult } from "./aws-s3.js";
|
|
16
|
+
/**
|
|
17
|
+
* Cloudflare R2 Storage Adapter
|
|
18
|
+
* @experimental This adapter is experimental and may change in future releases.
|
|
19
|
+
*/
|
|
20
|
+
export { PluginCloudflareR2, type CloudflareR2Options, type CloudflareR2UploadResult } from "./cloudflare-r2.js";
|
|
21
|
+
/**
|
|
22
|
+
* Firebase Storage Adapter
|
|
23
|
+
* @experimental This adapter is experimental and may change in future releases.
|
|
24
|
+
*/
|
|
25
|
+
export { PluginFirebaseStorage, type FirebaseStorageOptions, type FirebaseStorageUploadResult } from "./firebase-storage.js";
|
|
@@ -230,7 +230,7 @@ export interface UploadOptions {
|
|
|
230
230
|
* Automatically start upload after files are added
|
|
231
231
|
* @default false
|
|
232
232
|
*/
|
|
233
|
-
|
|
233
|
+
autoUpload?: boolean;
|
|
234
234
|
}
|
|
235
235
|
export interface ThumbnailOptions {
|
|
236
236
|
width?: number;
|
|
@@ -265,6 +265,8 @@ type CoreUploaderEvents<TUploadResult = any> = {
|
|
|
265
265
|
oldIndex: number;
|
|
266
266
|
newIndex: number;
|
|
267
267
|
};
|
|
268
|
+
/** Emitted when all files have finished uploading (all files have 'complete' status) */
|
|
269
|
+
"files:uploaded": Array<Readonly<UploadFile<TUploadResult>>>;
|
|
268
270
|
};
|
|
269
271
|
export type UploaderEvents<TUploadResult = any> = CoreUploaderEvents<TUploadResult>;
|
|
270
272
|
/**
|
|
@@ -390,13 +392,13 @@ export interface Plugin<TUploadResult = any, TPluginEvents extends Record<string
|
|
|
390
392
|
*/
|
|
391
393
|
export declare function defineProcessingPlugin<TPluginOptions = unknown, TPluginEvents extends Record<string, any> = Record<string, never>>(factory: (options: TPluginOptions) => ProcessingPlugin<any, TPluginEvents>): (options: TPluginOptions) => ProcessingPlugin<any, TPluginEvents>;
|
|
392
394
|
/**
|
|
393
|
-
* Define a storage
|
|
395
|
+
* Define a storage adapter (Azure, S3, GCS, etc.)
|
|
394
396
|
*
|
|
395
|
-
* Storage
|
|
397
|
+
* Storage adapters MUST implement the `upload` hook and should return an object with a `url` property.
|
|
396
398
|
*
|
|
397
399
|
* @example Azure Storage
|
|
398
400
|
* ```typescript
|
|
399
|
-
* export const PluginAzureDataLake =
|
|
401
|
+
* export const PluginAzureDataLake = defineStorageAdapter<AzureOptions, AzureEvents>((options) => ({
|
|
400
402
|
* id: 'azure-datalake-storage',
|
|
401
403
|
* hooks: {
|
|
402
404
|
* upload: async (file, context) => {
|
|
@@ -414,7 +416,11 @@ export declare function defineProcessingPlugin<TPluginOptions = unknown, TPlugin
|
|
|
414
416
|
* }))
|
|
415
417
|
* ```
|
|
416
418
|
*/
|
|
417
|
-
export declare function
|
|
419
|
+
export declare function defineStorageAdapter<TPluginOptions = unknown, TUploadResult = any, TPluginEvents extends Record<string, any> = Record<string, never>>(factory: (options: TPluginOptions) => StoragePlugin<TUploadResult, TPluginEvents>): (options: TPluginOptions) => StoragePlugin<TUploadResult, TPluginEvents>;
|
|
420
|
+
/**
|
|
421
|
+
* @deprecated Use `defineStorageAdapter` instead
|
|
422
|
+
*/
|
|
423
|
+
export declare const defineStoragePlugin: typeof defineStorageAdapter;
|
|
418
424
|
/**
|
|
419
425
|
* Define an uploader plugin with type safety, context access, and custom events.
|
|
420
426
|
* This is the universal plugin factory for all plugin types (storage, validators, processors).
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
export function defineProcessingPlugin(factory) {
|
|
2
2
|
return factory;
|
|
3
3
|
}
|
|
4
|
-
export function
|
|
4
|
+
export function defineStorageAdapter(factory) {
|
|
5
5
|
return factory;
|
|
6
6
|
}
|
|
7
|
+
export const defineStoragePlugin = defineStorageAdapter;
|
|
7
8
|
export function defineUploaderPlugin(factory) {
|
|
8
9
|
return factory;
|
|
9
10
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "nuxt-upload-kit",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.8",
|
|
4
4
|
"description": "A powerful, plugin-based file upload manager for Nuxt applications",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"repository": "https://github.com/genu/nuxt-upload-kit.git",
|
|
@@ -27,6 +27,8 @@
|
|
|
27
27
|
"mitt": "^3.0.1"
|
|
28
28
|
},
|
|
29
29
|
"devDependencies": {
|
|
30
|
+
"@aws-sdk/client-s3": "^3.969.0",
|
|
31
|
+
"@aws-sdk/lib-storage": "^3.969.0",
|
|
30
32
|
"@azure/storage-file-datalake": "^12.28.1",
|
|
31
33
|
"@ffmpeg/ffmpeg": "0.12.15",
|
|
32
34
|
"@ffmpeg/util": "0.12.2",
|
|
@@ -42,7 +44,8 @@
|
|
|
42
44
|
"eslint": "^9.39.2",
|
|
43
45
|
"eslint-config-prettier": "10.1.8",
|
|
44
46
|
"eslint-plugin-prettier": "5.5.5",
|
|
45
|
-
"
|
|
47
|
+
"firebase": "^12.7.0",
|
|
48
|
+
"happy-dom": "^20.2.0",
|
|
46
49
|
"nuxt": "^4.2.2",
|
|
47
50
|
"prettier": "^3.7.4",
|
|
48
51
|
"typescript": "~5.9.3",
|