nuxt-upload-kit 0.1.5 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -6,6 +6,9 @@
6
6
 
7
7
  <p align="center">A powerful, plugin-based file upload manager for Nuxt applications.</p>
8
8
 
9
+ > [!WARNING]
10
+ > This module is experimental and under active development. The API may change between versions without notice. Use in production at your own risk.
11
+
9
12
  ## Features
10
13
 
11
14
  - 🔌 **Plugin System** - Extensible architecture with built-in plugins for validation, compression, and storage
package/dist/module.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "nuxt-upload-kit",
3
3
  "configKey": "uploadKit",
4
- "version": "0.1.5",
4
+ "version": "0.1.8",
5
5
  "builder": {
6
6
  "@nuxt/module-builder": "1.0.2",
7
7
  "unbuild": "unknown"
@@ -0,0 +1,63 @@
1
+ export interface AWSS3Options {
2
+ /**
3
+ * Function to get a presigned URL for uploading a file
4
+ * Your backend should generate this using AWS SDK's getSignedUrl
5
+ *
6
+ * @example
7
+ * ```typescript
8
+ * getPresignedUploadUrl: async (fileId, contentType) => {
9
+ * const response = await fetch('/api/s3/presign', {
10
+ * method: 'POST',
11
+ * body: JSON.stringify({ key: fileId, contentType })
12
+ * })
13
+ * const { uploadUrl, publicUrl } = await response.json()
14
+ * return { uploadUrl, publicUrl }
15
+ * }
16
+ * ```
17
+ */
18
+ getPresignedUploadUrl: (fileId: string, contentType: string, metadata: {
19
+ fileName: string;
20
+ fileSize: number;
21
+ }) => Promise<{
22
+ /** Presigned URL for PUT upload */
23
+ uploadUrl: string;
24
+ /** Public URL where the file will be accessible after upload */
25
+ publicUrl: string;
26
+ }>;
27
+ /**
28
+ * Optional function to get a presigned URL for downloading/reading a file
29
+ * Required if you want to use getRemoteFile hook
30
+ */
31
+ getPresignedDownloadUrl?: (fileId: string) => Promise<string>;
32
+ /**
33
+ * Optional function to delete a file
34
+ * Your backend should handle the actual deletion
35
+ */
36
+ deleteFile?: (fileId: string) => Promise<void>;
37
+ /**
38
+ * Number of retry attempts for failed operations
39
+ * @default 3
40
+ */
41
+ retries?: number;
42
+ /**
43
+ * Initial delay between retries in milliseconds
44
+ * Uses exponential backoff: delay * (2 ^ attempt)
45
+ * @default 1000 (1 second)
46
+ */
47
+ retryDelay?: number;
48
+ }
49
+ export interface AWSS3UploadResult {
50
+ /**
51
+ * Public URL to the uploaded file
52
+ */
53
+ url: string;
54
+ /**
55
+ * S3 object key (file ID used for upload)
56
+ */
57
+ key: string;
58
+ /**
59
+ * ETag of the uploaded object (from response headers)
60
+ */
61
+ etag?: string;
62
+ }
63
+ export declare const PluginAWSS3: (options: AWSS3Options) => import("../../types.js").StoragePlugin<AWSS3UploadResult, Record<string, never>>;
@@ -0,0 +1,111 @@
1
+ import { defineStorageAdapter } from "../../types.js";
2
+ export const PluginAWSS3 = defineStorageAdapter((options) => {
3
+ const maxRetries = options.retries ?? 3;
4
+ const initialRetryDelay = options.retryDelay ?? 1e3;
5
+ async function withRetry(operation, operationName) {
6
+ let lastError;
7
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
8
+ try {
9
+ return await operation();
10
+ } catch (error) {
11
+ lastError = error;
12
+ if (attempt === maxRetries) {
13
+ break;
14
+ }
15
+ const delay = initialRetryDelay * Math.pow(2, attempt);
16
+ if (import.meta.dev) {
17
+ console.warn(
18
+ `[S3 Storage] ${operationName} failed (attempt ${attempt + 1}/${maxRetries + 1}). Retrying in ${delay}ms...`,
19
+ error
20
+ );
21
+ }
22
+ await new Promise((resolve) => setTimeout(resolve, delay));
23
+ }
24
+ }
25
+ throw new Error(`[S3 Storage] ${operationName} failed after ${maxRetries + 1} attempts: ${lastError?.message}`);
26
+ }
27
+ return {
28
+ id: "aws-s3-storage",
29
+ hooks: {
30
+ /**
31
+ * Upload file to S3 using presigned URL
32
+ */
33
+ async upload(file, context) {
34
+ if (file.source !== "local" || file.data === null) {
35
+ throw new Error("Cannot upload remote file - no local data available");
36
+ }
37
+ return withRetry(async () => {
38
+ const { uploadUrl, publicUrl } = await options.getPresignedUploadUrl(file.id, file.mimeType, {
39
+ fileName: file.name,
40
+ fileSize: file.size
41
+ });
42
+ const etag = await uploadWithProgress(uploadUrl, file.data, file.mimeType, context.onProgress);
43
+ return {
44
+ url: publicUrl,
45
+ key: file.id,
46
+ etag
47
+ };
48
+ }, `Upload file "${file.name}"`);
49
+ },
50
+ /**
51
+ * Get remote file metadata
52
+ */
53
+ async getRemoteFile(fileId, _context) {
54
+ if (!options.getPresignedDownloadUrl) {
55
+ throw new Error("[S3 Storage] getPresignedDownloadUrl is required to fetch remote files");
56
+ }
57
+ return withRetry(async () => {
58
+ const downloadUrl = await options.getPresignedDownloadUrl(fileId);
59
+ const response = await fetch(downloadUrl, { method: "HEAD" });
60
+ if (!response.ok) {
61
+ throw new Error(`Failed to get file metadata: ${response.status}`);
62
+ }
63
+ return {
64
+ size: Number.parseInt(response.headers.get("content-length") || "0", 10),
65
+ mimeType: response.headers.get("content-type") || "application/octet-stream",
66
+ remoteUrl: downloadUrl
67
+ };
68
+ }, `Get remote file "${fileId}"`);
69
+ },
70
+ /**
71
+ * Delete file from S3
72
+ */
73
+ async remove(file, _context) {
74
+ if (!options.deleteFile) {
75
+ throw new Error("[S3 Storage] deleteFile callback is required to delete files");
76
+ }
77
+ return withRetry(async () => {
78
+ await options.deleteFile(file.id);
79
+ }, `Delete file "${file.name}"`);
80
+ }
81
+ }
82
+ };
83
+ });
84
+ function uploadWithProgress(url, data, contentType, onProgress) {
85
+ return new Promise((resolve, reject) => {
86
+ const xhr = new XMLHttpRequest();
87
+ xhr.upload.addEventListener("progress", (event) => {
88
+ if (event.lengthComputable) {
89
+ const percentage = Math.round(event.loaded / event.total * 100);
90
+ onProgress(percentage);
91
+ }
92
+ });
93
+ xhr.addEventListener("load", () => {
94
+ if (xhr.status >= 200 && xhr.status < 300) {
95
+ const etag = xhr.getResponseHeader("ETag")?.replaceAll('"', "");
96
+ resolve(etag);
97
+ } else {
98
+ reject(new Error(`Upload failed with status ${xhr.status}: ${xhr.statusText}`));
99
+ }
100
+ });
101
+ xhr.addEventListener("error", () => {
102
+ reject(new Error("Upload failed due to network error"));
103
+ });
104
+ xhr.addEventListener("abort", () => {
105
+ reject(new Error("Upload was aborted"));
106
+ });
107
+ xhr.open("PUT", url);
108
+ xhr.setRequestHeader("Content-Type", contentType);
109
+ xhr.send(data);
110
+ });
111
+ }
@@ -1,7 +1,7 @@
1
1
  import { ref } from "vue";
2
2
  import { DataLakeDirectoryClient } from "@azure/storage-file-datalake";
3
- import { defineStoragePlugin } from "../../types.js";
4
- export const PluginAzureDataLake = defineStoragePlugin((options) => {
3
+ import { defineStorageAdapter } from "../../types.js";
4
+ export const PluginAzureDataLake = defineStorageAdapter((options) => {
5
5
  const sasURL = ref(options.sasURL || "");
6
6
  let refreshPromise = null;
7
7
  const directoryCheckedCache = /* @__PURE__ */ new Set();
@@ -0,0 +1,63 @@
1
+ export interface CloudflareR2Options {
2
+ /**
3
+ * Function to get a presigned URL for uploading a file
4
+ * Your backend should generate this using AWS SDK's getSignedUrl with R2 endpoint
5
+ *
6
+ * @example
7
+ * ```typescript
8
+ * getPresignedUploadUrl: async (fileId, contentType) => {
9
+ * const response = await fetch('/api/r2/presign', {
10
+ * method: 'POST',
11
+ * body: JSON.stringify({ key: fileId, contentType })
12
+ * })
13
+ * const { uploadUrl, publicUrl } = await response.json()
14
+ * return { uploadUrl, publicUrl }
15
+ * }
16
+ * ```
17
+ */
18
+ getPresignedUploadUrl: (fileId: string, contentType: string, metadata: {
19
+ fileName: string;
20
+ fileSize: number;
21
+ }) => Promise<{
22
+ /** Presigned URL for PUT upload */
23
+ uploadUrl: string;
24
+ /** Public URL where the file will be accessible after upload (r2.dev or custom domain) */
25
+ publicUrl: string;
26
+ }>;
27
+ /**
28
+ * Optional function to get a presigned URL for downloading/reading a file
29
+ * Required if you want to use getRemoteFile hook
30
+ */
31
+ getPresignedDownloadUrl?: (fileId: string) => Promise<string>;
32
+ /**
33
+ * Optional function to delete a file
34
+ * Your backend should handle the actual deletion
35
+ */
36
+ deleteFile?: (fileId: string) => Promise<void>;
37
+ /**
38
+ * Number of retry attempts for failed operations
39
+ * @default 3
40
+ */
41
+ retries?: number;
42
+ /**
43
+ * Initial delay between retries in milliseconds
44
+ * Uses exponential backoff: delay * (2 ^ attempt)
45
+ * @default 1000 (1 second)
46
+ */
47
+ retryDelay?: number;
48
+ }
49
+ export interface CloudflareR2UploadResult {
50
+ /**
51
+ * Public URL to the uploaded file
52
+ */
53
+ url: string;
54
+ /**
55
+ * R2 object key (file ID used for upload)
56
+ */
57
+ key: string;
58
+ /**
59
+ * ETag of the uploaded object (from response headers)
60
+ */
61
+ etag?: string;
62
+ }
63
+ export declare const PluginCloudflareR2: (options: CloudflareR2Options) => import("../../types.js").StoragePlugin<CloudflareR2UploadResult, Record<string, never>>;
@@ -0,0 +1,111 @@
1
+ import { defineStorageAdapter } from "../../types.js";
2
+ export const PluginCloudflareR2 = defineStorageAdapter((options) => {
3
+ const maxRetries = options.retries ?? 3;
4
+ const initialRetryDelay = options.retryDelay ?? 1e3;
5
+ async function withRetry(operation, operationName) {
6
+ let lastError;
7
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
8
+ try {
9
+ return await operation();
10
+ } catch (error) {
11
+ lastError = error;
12
+ if (attempt === maxRetries) {
13
+ break;
14
+ }
15
+ const delay = initialRetryDelay * Math.pow(2, attempt);
16
+ if (import.meta.dev) {
17
+ console.warn(
18
+ `[R2 Storage] ${operationName} failed (attempt ${attempt + 1}/${maxRetries + 1}). Retrying in ${delay}ms...`,
19
+ error
20
+ );
21
+ }
22
+ await new Promise((resolve) => setTimeout(resolve, delay));
23
+ }
24
+ }
25
+ throw new Error(`[R2 Storage] ${operationName} failed after ${maxRetries + 1} attempts: ${lastError?.message}`);
26
+ }
27
+ return {
28
+ id: "cloudflare-r2-storage",
29
+ hooks: {
30
+ /**
31
+ * Upload file to R2 using presigned URL
32
+ */
33
+ async upload(file, context) {
34
+ if (file.source !== "local" || file.data === null) {
35
+ throw new Error("Cannot upload remote file - no local data available");
36
+ }
37
+ return withRetry(async () => {
38
+ const { uploadUrl, publicUrl } = await options.getPresignedUploadUrl(file.id, file.mimeType, {
39
+ fileName: file.name,
40
+ fileSize: file.size
41
+ });
42
+ const etag = await uploadWithProgress(uploadUrl, file.data, file.mimeType, context.onProgress);
43
+ return {
44
+ url: publicUrl,
45
+ key: file.id,
46
+ etag
47
+ };
48
+ }, `Upload file "${file.name}"`);
49
+ },
50
+ /**
51
+ * Get remote file metadata from R2
52
+ */
53
+ async getRemoteFile(fileId, _context) {
54
+ if (!options.getPresignedDownloadUrl) {
55
+ throw new Error("[R2 Storage] getPresignedDownloadUrl is required to fetch remote files");
56
+ }
57
+ return withRetry(async () => {
58
+ const downloadUrl = await options.getPresignedDownloadUrl(fileId);
59
+ const response = await fetch(downloadUrl, { method: "HEAD" });
60
+ if (!response.ok) {
61
+ throw new Error(`Failed to get file metadata: ${response.status}`);
62
+ }
63
+ return {
64
+ size: Number.parseInt(response.headers.get("content-length") || "0", 10),
65
+ mimeType: response.headers.get("content-type") || "application/octet-stream",
66
+ remoteUrl: downloadUrl
67
+ };
68
+ }, `Get remote file "${fileId}"`);
69
+ },
70
+ /**
71
+ * Delete file from R2
72
+ */
73
+ async remove(file, _context) {
74
+ if (!options.deleteFile) {
75
+ throw new Error("[R2 Storage] deleteFile callback is required to delete files");
76
+ }
77
+ return withRetry(async () => {
78
+ await options.deleteFile(file.id);
79
+ }, `Delete file "${file.name}"`);
80
+ }
81
+ }
82
+ };
83
+ });
84
+ function uploadWithProgress(url, data, contentType, onProgress) {
85
+ return new Promise((resolve, reject) => {
86
+ const xhr = new XMLHttpRequest();
87
+ xhr.upload.addEventListener("progress", (event) => {
88
+ if (event.lengthComputable) {
89
+ const percentage = Math.round(event.loaded / event.total * 100);
90
+ onProgress(percentage);
91
+ }
92
+ });
93
+ xhr.addEventListener("load", () => {
94
+ if (xhr.status >= 200 && xhr.status < 300) {
95
+ const etag = xhr.getResponseHeader("ETag")?.replaceAll('"', "");
96
+ resolve(etag);
97
+ } else {
98
+ reject(new Error(`Upload failed with status ${xhr.status}: ${xhr.statusText}`));
99
+ }
100
+ });
101
+ xhr.addEventListener("error", () => {
102
+ reject(new Error("Upload failed due to network error"));
103
+ });
104
+ xhr.addEventListener("abort", () => {
105
+ reject(new Error("Upload was aborted"));
106
+ });
107
+ xhr.open("PUT", url);
108
+ xhr.setRequestHeader("Content-Type", contentType);
109
+ xhr.send(data);
110
+ });
111
+ }
@@ -0,0 +1,71 @@
1
+ import { type FirebaseStorage } from "firebase/storage";
2
+ export interface FirebaseStorageOptions {
3
+ /**
4
+ * Firebase Storage instance
5
+ * You must initialize Firebase and pass the storage instance
6
+ * @example
7
+ * ```typescript
8
+ * import { getStorage } from 'firebase/storage'
9
+ * import { initializeApp } from 'firebase/app'
10
+ *
11
+ * const app = initializeApp({ ... })
12
+ * const storage = getStorage(app)
13
+ *
14
+ * PluginFirebaseStorage({ storage })
15
+ * ```
16
+ */
17
+ storage: FirebaseStorage;
18
+ /**
19
+ * Optional path prefix (folder) for uploaded files
20
+ * @example "uploads/images"
21
+ */
22
+ path?: string;
23
+ /**
24
+ * Custom metadata to attach to uploaded files
25
+ */
26
+ customMetadata?: Record<string, string>;
27
+ /**
28
+ * Cache-Control header for uploaded files
29
+ * @example "max-age=31536000" for 1 year caching
30
+ */
31
+ cacheControl?: string;
32
+ /**
33
+ * Content-Disposition header
34
+ * @example "attachment; filename=file.pdf"
35
+ */
36
+ contentDisposition?: string;
37
+ /**
38
+ * Number of retry attempts for failed operations
39
+ * @default 3
40
+ */
41
+ retries?: number;
42
+ /**
43
+ * Initial delay between retries in milliseconds
44
+ * Uses exponential backoff: delay * (2 ^ attempt)
45
+ * @default 1000 (1 second)
46
+ */
47
+ retryDelay?: number;
48
+ }
49
+ export interface FirebaseStorageUploadResult {
50
+ /**
51
+ * Public download URL for the uploaded file
52
+ */
53
+ url: string;
54
+ /**
55
+ * Full path in Firebase Storage
56
+ */
57
+ fullPath: string;
58
+ /**
59
+ * Storage bucket name
60
+ */
61
+ bucket: string;
62
+ /**
63
+ * File generation (version identifier)
64
+ */
65
+ generation?: string;
66
+ /**
67
+ * MD5 hash of the uploaded content
68
+ */
69
+ md5Hash?: string;
70
+ }
71
+ export declare const PluginFirebaseStorage: (options: FirebaseStorageOptions) => import("../../types.js").StoragePlugin<FirebaseStorageUploadResult, Record<string, never>>;
@@ -0,0 +1,122 @@
1
+ import {
2
+ ref as storageRef,
3
+ uploadBytesResumable,
4
+ getDownloadURL,
5
+ getMetadata,
6
+ deleteObject
7
+ } from "firebase/storage";
8
+ import { defineStorageAdapter } from "../../types.js";
9
+ export const PluginFirebaseStorage = defineStorageAdapter((options) => {
10
+ const maxRetries = options.retries ?? 3;
11
+ const initialRetryDelay = options.retryDelay ?? 1e3;
12
+ async function withRetry(operation, operationName) {
13
+ let lastError;
14
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
15
+ try {
16
+ return await operation();
17
+ } catch (error) {
18
+ lastError = error;
19
+ if (attempt === maxRetries) {
20
+ break;
21
+ }
22
+ const delay = initialRetryDelay * Math.pow(2, attempt);
23
+ if (import.meta.dev) {
24
+ console.warn(
25
+ `[Firebase Storage] ${operationName} failed (attempt ${attempt + 1}/${maxRetries + 1}). Retrying in ${delay}ms...`,
26
+ error
27
+ );
28
+ }
29
+ await new Promise((resolve) => setTimeout(resolve, delay));
30
+ }
31
+ }
32
+ throw new Error(`[Firebase Storage] ${operationName} failed after ${maxRetries + 1} attempts: ${lastError?.message}`);
33
+ }
34
+ const buildPath = (fileId) => {
35
+ if (options.path) {
36
+ const cleanPath = options.path.replace(/^\/+/, "").replace(/\/+$/, "");
37
+ return `${cleanPath}/${fileId}`;
38
+ }
39
+ return fileId;
40
+ };
41
+ const getStorageRef = (fileId) => {
42
+ const path = buildPath(fileId);
43
+ return storageRef(options.storage, path);
44
+ };
45
+ const uploadToFirebase = (fileId, data, mimeType, fileName, onProgress) => {
46
+ const fileRef = getStorageRef(fileId);
47
+ const metadata = {
48
+ contentType: mimeType,
49
+ cacheControl: options.cacheControl,
50
+ contentDisposition: options.contentDisposition,
51
+ customMetadata: {
52
+ ...options.customMetadata,
53
+ originalName: fileName,
54
+ size: String(data.size)
55
+ }
56
+ };
57
+ return new Promise((resolve, reject) => {
58
+ const uploadTask = uploadBytesResumable(fileRef, data, metadata);
59
+ const handleProgress = (snapshot) => {
60
+ const percentage = Math.round(snapshot.bytesTransferred / snapshot.totalBytes * 100);
61
+ onProgress(percentage);
62
+ };
63
+ const handleError = (error) => reject(error);
64
+ const handleComplete = async () => {
65
+ try {
66
+ const downloadURL = await getDownloadURL(uploadTask.snapshot.ref);
67
+ const uploadMetadata = uploadTask.snapshot.metadata;
68
+ resolve({
69
+ url: downloadURL,
70
+ fullPath: uploadMetadata.fullPath,
71
+ bucket: uploadMetadata.bucket,
72
+ generation: uploadMetadata.generation,
73
+ md5Hash: uploadMetadata.md5Hash
74
+ });
75
+ } catch (error) {
76
+ reject(error);
77
+ }
78
+ };
79
+ uploadTask.on("state_changed", handleProgress, handleError, handleComplete);
80
+ });
81
+ };
82
+ return {
83
+ id: "firebase-storage",
84
+ hooks: {
85
+ /**
86
+ * Upload file to Firebase Storage
87
+ */
88
+ async upload(file, context) {
89
+ if (file.source !== "local" || file.data === null) {
90
+ throw new Error("Cannot upload remote file - no local data available");
91
+ }
92
+ return withRetry(
93
+ () => uploadToFirebase(file.id, file.data, file.mimeType, file.name, context.onProgress),
94
+ `Upload file "${file.name}"`
95
+ );
96
+ },
97
+ /**
98
+ * Get remote file metadata from Firebase Storage
99
+ */
100
+ async getRemoteFile(fileId, _context) {
101
+ return withRetry(async () => {
102
+ const fileRef = getStorageRef(fileId);
103
+ const [metadata, downloadURL] = await Promise.all([getMetadata(fileRef), getDownloadURL(fileRef)]);
104
+ return {
105
+ size: metadata.size,
106
+ mimeType: metadata.contentType || "application/octet-stream",
107
+ remoteUrl: downloadURL
108
+ };
109
+ }, `Get remote file "${fileId}"`);
110
+ },
111
+ /**
112
+ * Delete file from Firebase Storage
113
+ */
114
+ async remove(file, _context) {
115
+ return withRetry(async () => {
116
+ const fileRef = getStorageRef(file.id);
117
+ await deleteObject(fileRef);
118
+ }, `Delete file "${file.name}"`);
119
+ }
120
+ }
121
+ };
122
+ });
@@ -8,3 +8,18 @@
8
8
  * If you need multiple storage destinations, create multiple uploader instances.
9
9
  */
10
10
  export { PluginAzureDataLake, type AzureDataLakeOptions, type AzureUploadResult } from "./azure-datalake.js";
11
+ /**
12
+ * AWS S3 Storage Adapter (also works with S3-compatible services: MinIO, DigitalOcean Spaces, Wasabi, Backblaze B2)
13
+ * @experimental This adapter is experimental and may change in future releases.
14
+ */
15
+ export { PluginAWSS3, type AWSS3Options, type AWSS3UploadResult } from "./aws-s3.js";
16
+ /**
17
+ * Cloudflare R2 Storage Adapter
18
+ * @experimental This adapter is experimental and may change in future releases.
19
+ */
20
+ export { PluginCloudflareR2, type CloudflareR2Options, type CloudflareR2UploadResult } from "./cloudflare-r2.js";
21
+ /**
22
+ * Firebase Storage Adapter
23
+ * @experimental This adapter is experimental and may change in future releases.
24
+ */
25
+ export { PluginFirebaseStorage, type FirebaseStorageOptions, type FirebaseStorageUploadResult } from "./firebase-storage.js";
@@ -1 +1,4 @@
1
1
  export { PluginAzureDataLake } from "./azure-datalake.js";
2
+ export { PluginAWSS3 } from "./aws-s3.js";
3
+ export { PluginCloudflareR2 } from "./cloudflare-r2.js";
4
+ export { PluginFirebaseStorage } from "./firebase-storage.js";
@@ -392,13 +392,13 @@ export interface Plugin<TUploadResult = any, TPluginEvents extends Record<string
392
392
  */
393
393
  export declare function defineProcessingPlugin<TPluginOptions = unknown, TPluginEvents extends Record<string, any> = Record<string, never>>(factory: (options: TPluginOptions) => ProcessingPlugin<any, TPluginEvents>): (options: TPluginOptions) => ProcessingPlugin<any, TPluginEvents>;
394
394
  /**
395
- * Define a storage plugin (Azure, S3, GCS, etc.)
395
+ * Define a storage adapter (Azure, S3, GCS, etc.)
396
396
  *
397
- * Storage plugins MUST implement the `upload` hook and should return an object with a `url` property.
397
+ * Storage adapters MUST implement the `upload` hook and should return an object with a `url` property.
398
398
  *
399
399
  * @example Azure Storage
400
400
  * ```typescript
401
- * export const PluginAzureDataLake = defineStoragePlugin<AzureOptions, AzureEvents>((options) => ({
401
+ * export const PluginAzureDataLake = defineStorageAdapter<AzureOptions, AzureEvents>((options) => ({
402
402
  * id: 'azure-datalake-storage',
403
403
  * hooks: {
404
404
  * upload: async (file, context) => {
@@ -416,7 +416,11 @@ export declare function defineProcessingPlugin<TPluginOptions = unknown, TPlugin
416
416
  * }))
417
417
  * ```
418
418
  */
419
- export declare function defineStoragePlugin<TPluginOptions = unknown, TUploadResult = any, TPluginEvents extends Record<string, any> = Record<string, never>>(factory: (options: TPluginOptions) => StoragePlugin<TUploadResult, TPluginEvents>): (options: TPluginOptions) => StoragePlugin<TUploadResult, TPluginEvents>;
419
+ export declare function defineStorageAdapter<TPluginOptions = unknown, TUploadResult = any, TPluginEvents extends Record<string, any> = Record<string, never>>(factory: (options: TPluginOptions) => StoragePlugin<TUploadResult, TPluginEvents>): (options: TPluginOptions) => StoragePlugin<TUploadResult, TPluginEvents>;
420
+ /**
421
+ * @deprecated Use `defineStorageAdapter` instead
422
+ */
423
+ export declare const defineStoragePlugin: typeof defineStorageAdapter;
420
424
  /**
421
425
  * Define an uploader plugin with type safety, context access, and custom events.
422
426
  * This is the universal plugin factory for all plugin types (storage, validators, processors).
@@ -1,9 +1,10 @@
1
1
  export function defineProcessingPlugin(factory) {
2
2
  return factory;
3
3
  }
4
- export function defineStoragePlugin(factory) {
4
+ export function defineStorageAdapter(factory) {
5
5
  return factory;
6
6
  }
7
+ export const defineStoragePlugin = defineStorageAdapter;
7
8
  export function defineUploaderPlugin(factory) {
8
9
  return factory;
9
10
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nuxt-upload-kit",
3
- "version": "0.1.5",
3
+ "version": "0.1.8",
4
4
  "description": "A powerful, plugin-based file upload manager for Nuxt applications",
5
5
  "license": "MIT",
6
6
  "repository": "https://github.com/genu/nuxt-upload-kit.git",
@@ -27,6 +27,8 @@
27
27
  "mitt": "^3.0.1"
28
28
  },
29
29
  "devDependencies": {
30
+ "@aws-sdk/client-s3": "^3.969.0",
31
+ "@aws-sdk/lib-storage": "^3.969.0",
30
32
  "@azure/storage-file-datalake": "^12.28.1",
31
33
  "@ffmpeg/ffmpeg": "0.12.15",
32
34
  "@ffmpeg/util": "0.12.2",
@@ -42,7 +44,8 @@
42
44
  "eslint": "^9.39.2",
43
45
  "eslint-config-prettier": "10.1.8",
44
46
  "eslint-plugin-prettier": "5.5.5",
45
- "happy-dom": "^20.1.0",
47
+ "firebase": "^12.7.0",
48
+ "happy-dom": "^20.2.0",
46
49
  "nuxt": "^4.2.2",
47
50
  "prettier": "^3.7.4",
48
51
  "typescript": "~5.9.3",