nuxt-upload-kit 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/README.md +171 -0
  2. package/dist/module.d.mts +14 -0
  3. package/dist/module.json +9 -0
  4. package/dist/module.mjs +29 -0
  5. package/dist/runtime/composables/useFFMpeg.d.ts +14 -0
  6. package/dist/runtime/composables/useFFMpeg.js +66 -0
  7. package/dist/runtime/composables/useUploadKit/index.d.ts +471 -0
  8. package/dist/runtime/composables/useUploadKit/index.js +486 -0
  9. package/dist/runtime/composables/useUploadKit/plugins/image-compressor.d.ts +56 -0
  10. package/dist/runtime/composables/useUploadKit/plugins/image-compressor.js +137 -0
  11. package/dist/runtime/composables/useUploadKit/plugins/index.d.ts +4 -0
  12. package/dist/runtime/composables/useUploadKit/plugins/index.js +4 -0
  13. package/dist/runtime/composables/useUploadKit/plugins/storage/azure-datalake.d.ts +55 -0
  14. package/dist/runtime/composables/useUploadKit/plugins/storage/azure-datalake.js +137 -0
  15. package/dist/runtime/composables/useUploadKit/plugins/storage/index.d.ts +10 -0
  16. package/dist/runtime/composables/useUploadKit/plugins/storage/index.js +1 -0
  17. package/dist/runtime/composables/useUploadKit/plugins/thumbnail-generator.d.ts +8 -0
  18. package/dist/runtime/composables/useUploadKit/plugins/thumbnail-generator.js +99 -0
  19. package/dist/runtime/composables/useUploadKit/plugins/video-compressor.d.ts +72 -0
  20. package/dist/runtime/composables/useUploadKit/plugins/video-compressor.js +111 -0
  21. package/dist/runtime/composables/useUploadKit/types.d.ts +488 -0
  22. package/dist/runtime/composables/useUploadKit/types.js +9 -0
  23. package/dist/runtime/composables/useUploadKit/utils.d.ts +23 -0
  24. package/dist/runtime/composables/useUploadKit/utils.js +45 -0
  25. package/dist/runtime/composables/useUploadKit/validators/allowed-file-types.d.ts +5 -0
  26. package/dist/runtime/composables/useUploadKit/validators/allowed-file-types.js +17 -0
  27. package/dist/runtime/composables/useUploadKit/validators/duplicate-file.d.ts +13 -0
  28. package/dist/runtime/composables/useUploadKit/validators/duplicate-file.js +27 -0
  29. package/dist/runtime/composables/useUploadKit/validators/index.d.ts +4 -0
  30. package/dist/runtime/composables/useUploadKit/validators/index.js +4 -0
  31. package/dist/runtime/composables/useUploadKit/validators/max-file-size.d.ts +5 -0
  32. package/dist/runtime/composables/useUploadKit/validators/max-file-size.js +17 -0
  33. package/dist/runtime/composables/useUploadKit/validators/max-files.d.ts +5 -0
  34. package/dist/runtime/composables/useUploadKit/validators/max-files.js +17 -0
  35. package/dist/runtime/types/index.d.ts +3 -0
  36. package/dist/runtime/types/index.js +3 -0
  37. package/dist/types.d.mts +5 -0
  38. package/package.json +84 -0
@@ -0,0 +1,55 @@
1
+ import { type PathHttpHeaders } from "@azure/storage-file-datalake";
2
+ export interface AzureDataLakeOptions {
3
+ /**
4
+ * Static SAS URL for Azure Data Lake Storage
5
+ */
6
+ sasURL?: string;
7
+ /**
8
+ * Function to dynamically fetch SAS URL
9
+ * Use this to handle token expiration/refreshing.
10
+ * If provided, it will be called before every file operation.
11
+ */
12
+ getSASUrl?: () => Promise<string>;
13
+ /**
14
+ * Optional subdirectory path within the container
15
+ * @example "uploads/images"
16
+ */
17
+ path?: string;
18
+ /**
19
+ * Custom metadata to attach to uploaded files
20
+ */
21
+ metadata?: Record<string, string>;
22
+ /**
23
+ * Custom HTTP headers for uploaded files
24
+ */
25
+ pathHttpHeaders?: Omit<PathHttpHeaders, "contentType">;
26
+ /**
27
+ * Automatically try to create the directory if it doesn't exist.
28
+ * Disable this if your SAS token only has 'Write' (Blob) permissions
29
+ * and not 'Create' (Directory) permissions.
30
+ * @default true
31
+ */
32
+ autoCreateDirectory?: boolean;
33
+ /**
34
+ * Number of retry attempts for failed operations
35
+ * @default 3
36
+ */
37
+ retries?: number;
38
+ /**
39
+ * Initial delay between retries in milliseconds
40
+ * Uses exponential backoff: delay * (2 ^ attempt)
41
+ * @default 1000 (1 second)
42
+ */
43
+ retryDelay?: number;
44
+ }
45
+ export interface AzureUploadResult {
46
+ /**
47
+ * Full URL to the uploaded file
48
+ */
49
+ url: string;
50
+ /**
51
+ * File name/path in the storage
52
+ */
53
+ blobPath: string;
54
+ }
55
+ export declare const PluginAzureDataLake: (options: AzureDataLakeOptions) => import("../../types.js").StoragePlugin<AzureUploadResult, Record<string, never>>;
@@ -0,0 +1,137 @@
1
+ import { ref } from "vue";
2
+ import { DataLakeDirectoryClient } from "@azure/storage-file-datalake";
3
+ import { defineStoragePlugin } from "../../types.js";
4
+ export const PluginAzureDataLake = defineStoragePlugin((options) => {
5
+ const sasURL = ref(options.sasURL || "");
6
+ let refreshPromise = null;
7
+ const directoryCheckedCache = /* @__PURE__ */ new Set();
8
+ const maxRetries = options.retries ?? 3;
9
+ const initialRetryDelay = options.retryDelay ?? 1e3;
10
+ async function withRetry(operation, operationName) {
11
+ let lastError;
12
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
13
+ try {
14
+ return await operation();
15
+ } catch (error) {
16
+ lastError = error;
17
+ if (attempt === maxRetries) {
18
+ break;
19
+ }
20
+ const delay = initialRetryDelay * Math.pow(2, attempt);
21
+ if (import.meta.dev) {
22
+ console.warn(
23
+ `[Azure Storage] ${operationName} failed (attempt ${attempt + 1}/${maxRetries + 1}). Retrying in ${delay}ms...`,
24
+ error
25
+ );
26
+ }
27
+ await new Promise((resolve) => setTimeout(resolve, delay));
28
+ }
29
+ }
30
+ throw new Error(`[Azure Storage] ${operationName} failed after ${maxRetries + 1} attempts: ${lastError?.message}`);
31
+ }
32
+ if (options.getSASUrl && !options.sasURL) {
33
+ options.getSASUrl().then((url) => {
34
+ sasURL.value = url;
35
+ });
36
+ }
37
+ const isTokenExpired = (urlStr, bufferMinutes = 5) => {
38
+ if (!urlStr) return true;
39
+ try {
40
+ const url = new URL(urlStr);
41
+ const expiryStr = url.searchParams.get("se");
42
+ if (!expiryStr) return true;
43
+ const expiry = new Date(expiryStr);
44
+ const now = /* @__PURE__ */ new Date();
45
+ return now.getTime() + bufferMinutes * 60 * 1e3 > expiry.getTime();
46
+ } catch {
47
+ return true;
48
+ }
49
+ };
50
+ const getFileClient = async (blobName) => {
51
+ if (options.getSASUrl && isTokenExpired(sasURL.value)) {
52
+ if (!refreshPromise) {
53
+ refreshPromise = options.getSASUrl().then((url) => {
54
+ refreshPromise = null;
55
+ return url;
56
+ });
57
+ }
58
+ sasURL.value = await refreshPromise;
59
+ }
60
+ let dir = new DataLakeDirectoryClient(sasURL.value);
61
+ if (options.path) {
62
+ const cleanPath = options.path.replace(/^\/+|\/+$/g, "");
63
+ dir = dir.getSubdirectoryClient(cleanPath);
64
+ const shouldCreateDir = options.autoCreateDirectory ?? true;
65
+ if (shouldCreateDir && !directoryCheckedCache.has(cleanPath)) {
66
+ try {
67
+ await dir.createIfNotExists();
68
+ directoryCheckedCache.add(cleanPath);
69
+ } catch (error) {
70
+ if (import.meta.dev) {
71
+ console.debug(`Azure directory already exists or couldn't be created: ${cleanPath}`, error);
72
+ }
73
+ }
74
+ }
75
+ }
76
+ return dir.getFileClient(blobName);
77
+ };
78
+ return {
79
+ id: "azure-datalake-storage",
80
+ hooks: {
81
+ /**
82
+ * Upload file to Azure Blob Storage
83
+ */
84
+ async upload(file, context) {
85
+ if (file.source !== "local" || file.data === null) {
86
+ throw new Error("Cannot upload remote file - no local data available");
87
+ }
88
+ return withRetry(async () => {
89
+ const fileClient = await getFileClient(file.id);
90
+ await fileClient.upload(file.data, {
91
+ metadata: {
92
+ ...options.metadata,
93
+ mimeType: file.mimeType,
94
+ size: String(file.size),
95
+ originalName: file.name
96
+ },
97
+ pathHttpHeaders: {
98
+ ...options.pathHttpHeaders,
99
+ contentType: file.mimeType
100
+ },
101
+ onProgress: ({ loadedBytes }) => {
102
+ const uploadedPercentage = Math.round(loadedBytes / file.size * 100);
103
+ context.onProgress(uploadedPercentage);
104
+ }
105
+ });
106
+ return {
107
+ url: fileClient.url,
108
+ blobPath: fileClient.name
109
+ };
110
+ }, `Upload file "${file.name}"`);
111
+ },
112
+ /**
113
+ * Get remote file metadata from Azure
114
+ */
115
+ async getRemoteFile(fileId, _context) {
116
+ return withRetry(async () => {
117
+ const fileClient = await getFileClient(fileId);
118
+ const properties = await fileClient.getProperties();
119
+ return {
120
+ size: properties.contentLength || 0,
121
+ mimeType: properties.contentType || "application/octet-stream",
122
+ remoteUrl: fileClient.url
123
+ };
124
+ }, `Get remote file "${fileId}"`);
125
+ },
126
+ /**
127
+ * Delete file from Azure Blob Storage
128
+ */
129
+ async remove(file, _context) {
130
+ return withRetry(async () => {
131
+ const fileClient = await getFileClient(file.id);
132
+ await fileClient.deleteIfExists();
133
+ }, `Delete file "${file.name}"`);
134
+ }
135
+ }
136
+ };
137
+ });
@@ -0,0 +1,10 @@
1
+ /**
2
+ * Storage Adapter Plugins
3
+ *
4
+ * These plugins handle the actual upload, download, and deletion of files
5
+ * to/from various cloud storage providers.
6
+ *
7
+ * Only ONE storage plugin should be active per uploader instance.
8
+ * If you need multiple storage destinations, create multiple uploader instances.
9
+ */
10
+ export { PluginAzureDataLake, type AzureDataLakeOptions, type AzureUploadResult } from "./azure-datalake.js";
@@ -0,0 +1 @@
1
+ export { PluginAzureDataLake } from "./azure-datalake.js";
@@ -0,0 +1,8 @@
1
+ interface ThumbnailGeneratorOptions {
2
+ maxWidth?: number;
3
+ maxHeight?: number;
4
+ quality?: number;
5
+ videoCaptureTime?: number;
6
+ }
7
+ export declare const PluginThumbnailGenerator: (options: ThumbnailGeneratorOptions) => import("../types.js").ProcessingPlugin<any, Record<string, never>>;
8
+ export {};
@@ -0,0 +1,99 @@
1
+ import { defineProcessingPlugin } from "../types.js";
2
+ import { calculateThumbnailDimensions } from "../utils.js";
3
+ export const PluginThumbnailGenerator = defineProcessingPlugin((pluginOptions) => {
4
+ return {
5
+ id: "thumbnail-generator",
6
+ hooks: {
7
+ preprocess: async (file, _context) => {
8
+ const { maxWidth = 200, maxHeight = 200, quality = 0.7, videoCaptureTime = 1 } = pluginOptions;
9
+ if (!file.mimeType.startsWith("image/") && !file.mimeType.startsWith("video/")) {
10
+ return file;
11
+ }
12
+ if (file.mimeType === "image/gif") {
13
+ return file;
14
+ }
15
+ if (file.mimeType === "image/svg+xml") {
16
+ return file;
17
+ }
18
+ if (file.source !== "local" || !file.data) {
19
+ return file;
20
+ }
21
+ const sourceUrl = URL.createObjectURL(file.data);
22
+ try {
23
+ let thumbnailUrl;
24
+ if (file.mimeType.startsWith("image/")) {
25
+ thumbnailUrl = await generateImageThumbnail(sourceUrl, maxWidth, maxHeight, quality);
26
+ } else if (file.mimeType.startsWith("video/")) {
27
+ thumbnailUrl = await generateVideoThumbnail(sourceUrl, maxWidth, maxHeight, quality, videoCaptureTime);
28
+ }
29
+ if (thumbnailUrl) {
30
+ file.preview = thumbnailUrl;
31
+ }
32
+ } catch (error) {
33
+ console.warn(`[ThumbnailGenerator] Failed for ${file.name}:`, error);
34
+ } finally {
35
+ URL.revokeObjectURL(sourceUrl);
36
+ }
37
+ return file;
38
+ }
39
+ }
40
+ };
41
+ });
42
+ async function generateImageThumbnail(sourceUrl, maxWidth, maxHeight, quality) {
43
+ return new Promise((resolve, reject) => {
44
+ const image = new Image();
45
+ image.onload = () => {
46
+ try {
47
+ const { width, height } = calculateThumbnailDimensions(image.width, image.height, maxWidth, maxHeight);
48
+ const canvas = document.createElement("canvas");
49
+ canvas.width = width;
50
+ canvas.height = height;
51
+ const ctx = canvas.getContext("2d");
52
+ if (!ctx) {
53
+ throw new Error("Failed to get canvas context");
54
+ }
55
+ ctx.drawImage(image, 0, 0, width, height);
56
+ const thumbnailUrl = canvas.toDataURL("image/jpeg", quality);
57
+ resolve(thumbnailUrl);
58
+ } catch (error) {
59
+ reject(error);
60
+ }
61
+ };
62
+ image.onerror = () => {
63
+ reject(new Error("Failed to load image"));
64
+ };
65
+ image.src = sourceUrl;
66
+ });
67
+ }
68
+ async function generateVideoThumbnail(sourceUrl, maxWidth, maxHeight, quality, captureTime) {
69
+ return new Promise((resolve, reject) => {
70
+ const video = document.createElement("video");
71
+ video.preload = "metadata";
72
+ video.muted = true;
73
+ video.onloadedmetadata = () => {
74
+ const seekTime = Math.min(captureTime, video.duration * 0.1);
75
+ video.currentTime = seekTime;
76
+ };
77
+ video.onseeked = () => {
78
+ try {
79
+ const { width, height } = calculateThumbnailDimensions(video.videoWidth, video.videoHeight, maxWidth, maxHeight);
80
+ const canvas = document.createElement("canvas");
81
+ canvas.width = width;
82
+ canvas.height = height;
83
+ const ctx = canvas.getContext("2d");
84
+ if (!ctx) {
85
+ throw new Error("Failed to get canvas context");
86
+ }
87
+ ctx.drawImage(video, 0, 0, width, height);
88
+ const thumbnailUrl = canvas.toDataURL("image/jpeg", quality);
89
+ resolve(thumbnailUrl);
90
+ } catch (error) {
91
+ reject(error);
92
+ }
93
+ };
94
+ video.onerror = () => {
95
+ reject(new Error("Failed to load video"));
96
+ };
97
+ video.src = sourceUrl;
98
+ });
99
+ }
@@ -0,0 +1,72 @@
1
+ interface VideoCompressorOptions {
2
+ /**
3
+ * Target video codec
4
+ * @default 'libx264'
5
+ */
6
+ codec?: string;
7
+ /**
8
+ * Constant Rate Factor (0-51, lower = better quality, larger file)
9
+ * @default 28
10
+ */
11
+ crf?: number;
12
+ /**
13
+ * Target bitrate (e.g., '1M' = 1 megabit/sec)
14
+ * If specified, overrides CRF
15
+ */
16
+ bitrate?: string;
17
+ /**
18
+ * Maximum width (maintains aspect ratio)
19
+ */
20
+ maxWidth?: number;
21
+ /**
22
+ * Maximum height (maintains aspect ratio)
23
+ */
24
+ maxHeight?: number;
25
+ /**
26
+ * Output format
27
+ * @default 'mp4'
28
+ */
29
+ format?: "mp4" | "webm" | "mov";
30
+ /**
31
+ * Minimum file size to compress (in bytes)
32
+ * Files smaller than this will be skipped
33
+ * @default 10MB
34
+ */
35
+ minSizeToCompress?: number;
36
+ /**
37
+ * Audio codec
38
+ * @default 'aac'
39
+ */
40
+ audioCodec?: string;
41
+ /**
42
+ * Audio bitrate
43
+ * @default '128k'
44
+ */
45
+ audioBitrate?: string;
46
+ }
47
+ type VideoCompressorEvents = {
48
+ start: {
49
+ file: any;
50
+ originalSize: number;
51
+ };
52
+ progress: {
53
+ file: any;
54
+ percentage: number;
55
+ };
56
+ complete: {
57
+ file: any;
58
+ originalSize: number;
59
+ compressedSize: number;
60
+ savedBytes: number;
61
+ };
62
+ skip: {
63
+ file: any;
64
+ reason: string;
65
+ };
66
+ error: {
67
+ file: any;
68
+ error: Error;
69
+ };
70
+ };
71
+ export declare const PluginVideoCompressor: (options: VideoCompressorOptions) => import("../types.js").ProcessingPlugin<any, VideoCompressorEvents>;
72
+ export {};
@@ -0,0 +1,111 @@
1
+ import { defineProcessingPlugin } from "../types.js";
2
+ import { useFFMpeg } from "../../useFFMpeg.js";
3
+ import { watch } from "vue";
4
+ export const PluginVideoCompressor = defineProcessingPlugin(
5
+ (pluginOptions = {}) => {
6
+ return {
7
+ id: "video-compressor",
8
+ hooks: {
9
+ process: async (file, context) => {
10
+ const {
11
+ codec = "libx264",
12
+ crf = 28,
13
+ bitrate,
14
+ maxWidth,
15
+ maxHeight,
16
+ format = "mp4",
17
+ minSizeToCompress = 10 * 1024 * 1024,
18
+ // 10MB
19
+ audioCodec = "aac",
20
+ audioBitrate = "128k"
21
+ } = pluginOptions;
22
+ if (!file.mimeType.startsWith("video/")) {
23
+ return file;
24
+ }
25
+ if (file.source !== "local") {
26
+ context.emit("skip", { file, reason: "Remote file, no local data to compress" });
27
+ return file;
28
+ }
29
+ if (file.size < minSizeToCompress) {
30
+ context.emit("skip", {
31
+ file,
32
+ reason: `File size (${(file.size / 1024 / 1024).toFixed(2)}MB) is below minimum (${(minSizeToCompress / 1024 / 1024).toFixed(2)}MB)`
33
+ });
34
+ return file;
35
+ }
36
+ let inputUrl;
37
+ let stopProgressWatch;
38
+ try {
39
+ context.emit("start", { file, originalSize: file.size });
40
+ inputUrl = URL.createObjectURL(file.data);
41
+ const ffmpeg = useFFMpeg({ inputUrl });
42
+ await ffmpeg.load();
43
+ stopProgressWatch = watch(
44
+ () => ffmpeg.progress.value,
45
+ (progress) => {
46
+ context.emit("progress", { file, percentage: Math.round(progress * 100) });
47
+ }
48
+ );
49
+ const convertOptions = ["-c:v", codec];
50
+ if (bitrate) {
51
+ convertOptions.push("-b:v", bitrate);
52
+ } else {
53
+ convertOptions.push("-crf", crf.toString());
54
+ }
55
+ if (maxWidth || maxHeight) {
56
+ let scaleFilter = "";
57
+ if (maxWidth && maxHeight) {
58
+ scaleFilter = `scale='min(${maxWidth},iw)':'min(${maxHeight},ih)':force_original_aspect_ratio=decrease`;
59
+ } else if (maxWidth) {
60
+ scaleFilter = `scale=${maxWidth}:-2`;
61
+ } else if (maxHeight) {
62
+ scaleFilter = `scale=-2:${maxHeight}`;
63
+ }
64
+ convertOptions.push("-vf", scaleFilter);
65
+ }
66
+ convertOptions.push("-c:a", audioCodec, "-b:a", audioBitrate);
67
+ const compressedFile = await ffmpeg.convert(convertOptions);
68
+ ffmpeg.unload();
69
+ if (!compressedFile) {
70
+ context.emit("error", { file, error: new Error("Compression failed: no output file") });
71
+ return file;
72
+ }
73
+ const originalSize = file.size;
74
+ const compressedSize = compressedFile.size;
75
+ const savedBytes = originalSize - compressedSize;
76
+ if (compressedSize < originalSize) {
77
+ context.emit("complete", {
78
+ file,
79
+ originalSize,
80
+ compressedSize,
81
+ savedBytes
82
+ });
83
+ return {
84
+ ...file,
85
+ data: compressedFile,
86
+ size: compressedFile.size,
87
+ name: file.name.replace(/\.[^.]+$/, `.${format}`),
88
+ mimeType: `video/${format}`
89
+ };
90
+ } else {
91
+ context.emit("skip", {
92
+ file,
93
+ reason: `Compressed size (${(compressedSize / 1024 / 1024).toFixed(2)}MB) is larger than original`
94
+ });
95
+ return file;
96
+ }
97
+ } catch (error) {
98
+ context.emit("error", { file, error });
99
+ console.error(`Video compression error for ${file.name}:`, error);
100
+ return file;
101
+ } finally {
102
+ stopProgressWatch?.();
103
+ if (inputUrl) {
104
+ URL.revokeObjectURL(inputUrl);
105
+ }
106
+ }
107
+ }
108
+ }
109
+ };
110
+ }
111
+ );