nuxt-upload-kit 0.1.21 → 0.1.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/module.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "nuxt-upload-kit",
3
3
  "configKey": "uploadKit",
4
- "version": "0.1.21",
4
+ "version": "0.1.22",
5
5
  "builder": {
6
6
  "@nuxt/module-builder": "1.0.2",
7
7
  "unbuild": "3.6.1"
@@ -5,11 +5,15 @@ export interface AzureDataLakeOptions {
5
5
  */
6
6
  sasURL?: string;
7
7
  /**
8
- * Function to dynamically fetch SAS URL
9
- * Use this to handle token expiration/refreshing.
10
- * If provided, it will be called before every file operation.
8
+ * Function to dynamically fetch SAS URL.
9
+ *
10
+ * The plugin auto-detects whether you return a directory or file SAS:
11
+ * - Directory SAS (sr=d): Cached and reused for batch uploads
12
+ * - File SAS (sr=b): Called per file for granular access control
13
+ *
14
+ * @param storageKey - The intended storage path for the file
11
15
  */
12
- getSASUrl?: () => Promise<string>;
16
+ getSASUrl?: (storageKey: string) => Promise<string>;
13
17
  /**
14
18
  * Optional subdirectory path within the container
15
19
  * @example "uploads/images"
@@ -25,6 +29,7 @@ export interface AzureDataLakeOptions {
25
29
  pathHttpHeaders?: Omit<PathHttpHeaders, "contentType">;
26
30
  /**
27
31
  * Automatically try to create the directory if it doesn't exist.
32
+ * Only applies when using directory-level SAS.
28
33
  * Disable this if your SAS token only has 'Write' (Blob) permissions
29
34
  * and not 'Create' (Directory) permissions.
30
35
  * @default true
@@ -1,11 +1,19 @@
1
1
  import { ref } from "vue";
2
- import { DataLakeDirectoryClient } from "@azure/storage-file-datalake";
2
+ import { DataLakeDirectoryClient, DataLakeFileClient } from "@azure/storage-file-datalake";
3
3
  import { defineStorageAdapter } from "../../types.js";
4
4
  export const PluginAzureDataLake = defineStorageAdapter((options) => {
5
5
  const sasURL = ref(options.sasURL || "");
6
6
  let refreshPromise = null;
7
- const directoryCheckedCache = /* @__PURE__ */ new Set();
8
- const getBasePathFromSasUrl = (url) => {
7
+ let detectedMode = null;
8
+ const detectSasMode = (url) => {
9
+ try {
10
+ const sr = new URL(url).searchParams.get("sr");
11
+ return sr === "d" ? "directory" : "file";
12
+ } catch {
13
+ return "directory";
14
+ }
15
+ };
16
+ const getBlobPathFromUrl = (url) => {
9
17
  try {
10
18
  const parsed = new URL(url);
11
19
  const parts = parsed.pathname.split("/").filter(Boolean);
@@ -14,16 +22,15 @@ export const PluginAzureDataLake = defineStorageAdapter((options) => {
14
22
  return "";
15
23
  }
16
24
  };
17
- const buildFullStorageKey = (filename) => {
18
- const basePath = getBasePathFromSasUrl(sasURL.value);
25
+ const buildFullStorageKey = (filename, forRequest = false) => {
26
+ if (forRequest && detectedMode === "file") {
27
+ const parts2 = [options.path, filename].filter(Boolean);
28
+ return parts2.join("/");
29
+ }
30
+ const basePath = getBlobPathFromUrl(sasURL.value);
19
31
  const parts = [basePath, options.path, filename].filter(Boolean);
20
32
  return parts.join("/");
21
33
  };
22
- if (options.getSASUrl && !options.sasURL) {
23
- options.getSASUrl().then((url) => {
24
- sasURL.value = url;
25
- });
26
- }
27
34
  const isTokenExpired = (urlStr, bufferMinutes = 5) => {
28
35
  if (!urlStr) return true;
29
36
  try {
@@ -37,36 +44,57 @@ export const PluginAzureDataLake = defineStorageAdapter((options) => {
37
44
  return true;
38
45
  }
39
46
  };
40
- const getFileClient = async (fullBlobPath) => {
41
- if (options.getSASUrl && isTokenExpired(sasURL.value)) {
42
- refreshPromise ??= options.getSASUrl().then((url) => {
47
+ const getSasUrlForFile = async (storageKey) => {
48
+ if (options.sasURL) {
49
+ detectedMode ??= detectSasMode(options.sasURL);
50
+ return options.sasURL;
51
+ }
52
+ if (!options.getSASUrl) {
53
+ throw new Error("Either sasURL or getSASUrl must be provided");
54
+ }
55
+ if (detectedMode === "file") return options.getSASUrl(storageKey);
56
+ if (!detectedMode) {
57
+ const url = await options.getSASUrl(storageKey);
58
+ detectedMode = detectSasMode(url);
59
+ sasURL.value = url;
60
+ if (import.meta.dev) console.debug(`[Azure Storage] Auto-detected SAS mode: ${detectedMode}`);
61
+ if (detectedMode === "file") return url;
62
+ }
63
+ if (isTokenExpired(sasURL.value)) {
64
+ refreshPromise ??= options.getSASUrl(storageKey).then((url) => {
43
65
  refreshPromise = null;
66
+ sasURL.value = url;
44
67
  return url;
45
68
  });
46
- sasURL.value = await refreshPromise;
69
+ await refreshPromise;
47
70
  }
48
- const basePath = getBasePathFromSasUrl(sasURL.value);
71
+ return sasURL.value;
72
+ };
73
+ const getFileClientFromDirectory = async (sasUrl, fullBlobPath) => {
74
+ const basePath = getBlobPathFromUrl(sasUrl);
49
75
  const relativePath = basePath && fullBlobPath.startsWith(basePath + "/") ? fullBlobPath.slice(basePath.length + 1) : fullBlobPath;
50
76
  const pathParts = relativePath.split("/");
51
77
  const filename = pathParts.pop();
52
78
  const dirPath = pathParts.join("/");
53
- let dir = new DataLakeDirectoryClient(sasURL.value);
79
+ let dir = new DataLakeDirectoryClient(sasUrl);
54
80
  if (dirPath) {
55
81
  dir = dir.getSubdirectoryClient(dirPath);
56
- const shouldCreateDir = options.autoCreateDirectory ?? true;
57
- if (shouldCreateDir && !directoryCheckedCache.has(dirPath)) {
82
+ if (options.autoCreateDirectory ?? true) {
58
83
  try {
59
84
  await dir.createIfNotExists();
60
- directoryCheckedCache.add(dirPath);
61
- } catch (error) {
62
- if (import.meta.dev) {
63
- console.debug(`Azure directory already exists or couldn't be created: ${dirPath}`, error);
64
- }
85
+ } catch {
65
86
  }
66
87
  }
67
88
  }
68
89
  return dir.getFileClient(filename);
69
90
  };
91
+ const getFileClient = async (storageKey) => {
92
+ const sasUrl = await getSasUrlForFile(storageKey);
93
+ if (detectedMode === "file") {
94
+ return new DataLakeFileClient(sasUrl);
95
+ }
96
+ return getFileClientFromDirectory(sasUrl, storageKey);
97
+ };
70
98
  return {
71
99
  id: "azure-datalake-storage",
72
100
  hooks: {
@@ -77,8 +105,8 @@ export const PluginAzureDataLake = defineStorageAdapter((options) => {
77
105
  if (file.source !== "local" || file.data === null) {
78
106
  throw new Error("Cannot upload remote file - no local data available");
79
107
  }
80
- const storageKey = buildFullStorageKey(file.id);
81
- const fileClient = await getFileClient(storageKey);
108
+ const requestKey = buildFullStorageKey(file.id, true);
109
+ const fileClient = await getFileClient(requestKey);
82
110
  await fileClient.upload(file.data, {
83
111
  metadata: {
84
112
  ...options.metadata,
@@ -95,9 +123,10 @@ export const PluginAzureDataLake = defineStorageAdapter((options) => {
95
123
  context.onProgress(uploadedPercentage);
96
124
  }
97
125
  });
126
+ const actualStorageKey = getBlobPathFromUrl(fileClient.url) || requestKey;
98
127
  return {
99
128
  url: fileClient.url,
100
- storageKey
129
+ storageKey: actualStorageKey
101
130
  };
102
131
  },
103
132
  /**
@@ -34,17 +34,6 @@ export interface FirebaseStorageOptions {
34
34
  * @example "attachment; filename=file.pdf"
35
35
  */
36
36
  contentDisposition?: string;
37
- /**
38
- * Number of retry attempts for failed operations
39
- * @default 3
40
- */
41
- retries?: number;
42
- /**
43
- * Initial delay between retries in milliseconds
44
- * Uses exponential backoff: delay * (2 ^ attempt)
45
- * @default 1000 (1 second)
46
- */
47
- retryDelay?: number;
48
37
  }
49
38
  export interface FirebaseStorageUploadResult {
50
39
  /**
@@ -7,30 +7,6 @@ import {
7
7
  } from "firebase/storage";
8
8
  import { defineStorageAdapter } from "../../types.js";
9
9
  export const PluginFirebaseStorage = defineStorageAdapter((options) => {
10
- const maxRetries = options.retries ?? 3;
11
- const initialRetryDelay = options.retryDelay ?? 1e3;
12
- async function withRetry(operation, operationName) {
13
- let lastError;
14
- for (let attempt = 0; attempt <= maxRetries; attempt++) {
15
- try {
16
- return await operation();
17
- } catch (error) {
18
- lastError = error;
19
- if (attempt === maxRetries) {
20
- break;
21
- }
22
- const delay = initialRetryDelay * Math.pow(2, attempt);
23
- if (import.meta.dev) {
24
- console.warn(
25
- `[Firebase Storage] ${operationName} failed (attempt ${attempt + 1}/${maxRetries + 1}). Retrying in ${delay}ms...`,
26
- error
27
- );
28
- }
29
- await new Promise((resolve) => setTimeout(resolve, delay));
30
- }
31
- }
32
- throw new Error(`[Firebase Storage] ${operationName} failed after ${maxRetries + 1} attempts: ${lastError?.message}`);
33
- }
34
10
  const buildFullStorageKey = (filename) => {
35
11
  if (options.path) {
36
12
  const cleanPath = options.path.replace(/^\/+/, "").replace(/\/+$/, "");
@@ -89,32 +65,27 @@ export const PluginFirebaseStorage = defineStorageAdapter((options) => {
89
65
  throw new Error("Cannot upload remote file - no local data available");
90
66
  }
91
67
  const storageKey = buildFullStorageKey(file.id);
92
- return withRetry(
93
- () => uploadToFirebase(storageKey, file.data, file.mimeType, file.name, context.onProgress),
94
- `Upload file "${file.name}"`
95
- );
68
+ return uploadToFirebase(storageKey, file.data, file.mimeType, file.name, context.onProgress);
96
69
  },
97
70
  /**
98
71
  * Get remote file metadata from Firebase Storage.
99
72
  * Expects the full storageKey (e.g., "uploads/images/filename.jpg").
100
73
  */
101
74
  async getRemoteFile(storageKey, _context) {
102
- return withRetry(async () => {
103
- const fileRef = getStorageRef(storageKey);
104
- const [metadata, downloadURL] = await Promise.all([getMetadata(fileRef), getDownloadURL(fileRef)]);
105
- return {
106
- size: metadata.size,
107
- mimeType: metadata.contentType || "application/octet-stream",
108
- remoteUrl: downloadURL,
109
- uploadResult: {
110
- url: downloadURL,
111
- storageKey,
112
- bucket: metadata.bucket,
113
- generation: metadata.generation,
114
- md5Hash: metadata.md5Hash
115
- }
116
- };
117
- }, `Get remote file "${storageKey}"`);
75
+ const fileRef = getStorageRef(storageKey);
76
+ const [metadata, downloadURL] = await Promise.all([getMetadata(fileRef), getDownloadURL(fileRef)]);
77
+ return {
78
+ size: metadata.size,
79
+ mimeType: metadata.contentType || "application/octet-stream",
80
+ remoteUrl: downloadURL,
81
+ uploadResult: {
82
+ url: downloadURL,
83
+ storageKey,
84
+ bucket: metadata.bucket,
85
+ generation: metadata.generation,
86
+ md5Hash: metadata.md5Hash
87
+ }
88
+ };
118
89
  },
119
90
  /**
120
91
  * Delete file from Firebase Storage.
@@ -123,15 +94,11 @@ export const PluginFirebaseStorage = defineStorageAdapter((options) => {
123
94
  async remove(file, _context) {
124
95
  const storageKey = file.storageKey;
125
96
  if (!storageKey) {
126
- if (import.meta.dev) {
127
- console.debug(`[Firebase Storage] Skipping delete for file "${file.name}" - no storageKey`);
128
- }
97
+ if (import.meta.dev) console.debug(`[Firebase Storage] Skipping delete for file "${file.name}" - no storageKey`);
129
98
  return;
130
99
  }
131
- return withRetry(async () => {
132
- const fileRef = getStorageRef(storageKey);
133
- await deleteObject(fileRef);
134
- }, `Delete file "${file.name}"`);
100
+ const fileRef = getStorageRef(storageKey);
101
+ await deleteObject(fileRef);
135
102
  }
136
103
  }
137
104
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nuxt-upload-kit",
3
- "version": "0.1.21",
3
+ "version": "0.1.22",
4
4
  "description": "A powerful, plugin-based file upload manager for Nuxt applications",
5
5
  "license": "MIT",
6
6
  "repository": "https://github.com/genu/nuxt-upload-kit.git",
@@ -34,12 +34,27 @@
34
34
  "import": "./dist/providers/firebase.mjs"
35
35
  }
36
36
  },
37
+ "scripts": {
38
+ "prepack": "nuxt-module-build build",
39
+ "dev": "pnpm dev:prepare && nuxi dev playground",
40
+ "dev:build": "nuxi build playground",
41
+ "dev:prepare": "nuxt-module-build build --stub && nuxt-module-build prepare && nuxi prepare playground",
42
+ "lint": "eslint .",
43
+ "lint:fix": "eslint . --fix",
44
+ "format": "prettier --write .",
45
+ "format:check": "prettier --check .",
46
+ "test": "vitest run",
47
+ "test:watch": "vitest watch",
48
+ "test:coverage": "vitest run --coverage",
49
+ "test:types": "vue-tsc --noEmit && cd playground && vue-tsc --noEmit",
50
+ "docs:dev": "nuxt dev docs --extends docus"
51
+ },
37
52
  "dependencies": {
38
53
  "mitt": "^3.0.1"
39
54
  },
40
55
  "devDependencies": {
41
- "@aws-sdk/client-s3": "^3.980.0",
42
- "@aws-sdk/lib-storage": "^3.980.0",
56
+ "@aws-sdk/client-s3": "3.982.0",
57
+ "@aws-sdk/lib-storage": "3.982.0",
43
58
  "@azure/storage-file-datalake": "^12.29.0",
44
59
  "@ffmpeg/ffmpeg": "0.12.15",
45
60
  "@ffmpeg/util": "0.12.2",
@@ -52,12 +67,11 @@
52
67
  "@types/node": "latest",
53
68
  "@vitejs/plugin-vue": "^6.0.4",
54
69
  "@vitest/coverage-v8": "^4.0.18",
55
- "changelogen": "^0.6.2",
56
70
  "eslint": "^9.39.2",
57
71
  "eslint-config-prettier": "10.1.8",
58
72
  "eslint-plugin-prettier": "5.5.5",
59
73
  "firebase": "^12.8.0",
60
- "happy-dom": "^20.4.0",
74
+ "happy-dom": "20.5.0",
61
75
  "nuxt": "^4.3.0",
62
76
  "prettier": "^3.8.1",
63
77
  "typescript": "~5.9.3",
@@ -86,19 +100,5 @@
86
100
  "optional": true
87
101
  }
88
102
  },
89
- "scripts": {
90
- "dev": "pnpm dev:prepare && nuxi dev playground",
91
- "dev:build": "nuxi build playground",
92
- "dev:prepare": "nuxt-module-build build --stub && nuxt-module-build prepare && nuxi prepare playground",
93
- "lint": "eslint .",
94
- "lint:fix": "eslint . --fix",
95
- "format": "prettier --write .",
96
- "format:check": "prettier --check .",
97
- "release": "pnpm lint && pnpm test && pnpm prepack && changelogen --release && pnpm publish && git push --follow-tags",
98
- "test": "vitest run",
99
- "test:watch": "vitest watch",
100
- "test:coverage": "vitest run --coverage",
101
- "test:types": "vue-tsc --noEmit && cd playground && vue-tsc --noEmit",
102
- "docs:dev": "nuxt dev docs --extends docus"
103
- }
104
- }
103
+ "packageManager": "pnpm@10.28.2"
104
+ }