n8n-nodes-binary-to-url 0.0.9 → 0.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ export declare class MemoryStorage {
2
+ private static cache;
3
+ private static readonly DEFAULT_TTL;
4
+ private static readonly MAX_CACHE_SIZE;
5
+ private static currentCacheSize;
6
+ static generateFileKey(): string;
7
+ static upload(data: Buffer, contentType: string, ttl?: number): Promise<{
8
+ fileKey: string;
9
+ contentType: string;
10
+ }>;
11
+ static download(fileKey: string): Promise<{
12
+ data: Buffer;
13
+ contentType: string;
14
+ } | null>;
15
+ static delete(fileKey: string): Promise<boolean>;
16
+ static cleanupExpired(): void;
17
+ static cleanupOldest(requiredSpace: number): void;
18
+ static getCacheSize(): number;
19
+ static getCacheCount(): number;
20
+ static clear(): void;
21
+ }
@@ -0,0 +1,92 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.MemoryStorage = void 0;
4
+ // Simple in-memory storage with TTL
5
+ class MemoryStorage {
6
+ static generateFileKey() {
7
+ const timestamp = Date.now();
8
+ const random = Math.random().toString(36).substring(2, 15);
9
+ return `${timestamp}-${random}`;
10
+ }
11
+ static async upload(data, contentType, ttl) {
12
+ const fileKey = this.generateFileKey();
13
+ const now = Date.now();
14
+ const expiresAt = now + (ttl || this.DEFAULT_TTL);
15
+ const fileSize = data.length;
16
+ // Check if adding this file would exceed max cache size
17
+ if (this.currentCacheSize + fileSize > this.MAX_CACHE_SIZE) {
18
+ // Clean up expired files first
19
+ this.cleanupExpired();
20
+ // If still too large, remove oldest files
21
+ if (this.currentCacheSize + fileSize > this.MAX_CACHE_SIZE) {
22
+ this.cleanupOldest(fileSize);
23
+ }
24
+ }
25
+ const file = {
26
+ data,
27
+ contentType,
28
+ uploadedAt: now,
29
+ expiresAt,
30
+ };
31
+ this.cache.set(fileKey, file);
32
+ this.currentCacheSize += fileSize;
33
+ return { fileKey, contentType };
34
+ }
35
+ static async download(fileKey) {
36
+ const file = this.cache.get(fileKey);
37
+ if (!file) {
38
+ return null;
39
+ }
40
+ // Check if expired
41
+ if (Date.now() > file.expiresAt) {
42
+ this.delete(fileKey);
43
+ return null;
44
+ }
45
+ return {
46
+ data: file.data,
47
+ contentType: file.contentType,
48
+ };
49
+ }
50
+ static async delete(fileKey) {
51
+ const file = this.cache.get(fileKey);
52
+ if (!file)
53
+ return false;
54
+ this.currentCacheSize -= file.data.length;
55
+ return this.cache.delete(fileKey);
56
+ }
57
+ static cleanupExpired() {
58
+ const now = Date.now();
59
+ for (const [key, file] of this.cache.entries()) {
60
+ if (now > file.expiresAt) {
61
+ this.delete(key);
62
+ }
63
+ }
64
+ }
65
+ static cleanupOldest(requiredSpace) {
66
+ const entries = Array.from(this.cache.entries());
67
+ // Sort by upload time (oldest first)
68
+ entries.sort((a, b) => a[1].uploadedAt - b[1].uploadedAt);
69
+ let freedSpace = 0;
70
+ for (const [key, file] of entries) {
71
+ if (freedSpace >= requiredSpace)
72
+ break;
73
+ freedSpace += file.data.length;
74
+ this.delete(key);
75
+ }
76
+ }
77
+ static getCacheSize() {
78
+ return this.currentCacheSize;
79
+ }
80
+ static getCacheCount() {
81
+ return this.cache.size;
82
+ }
83
+ static clear() {
84
+ this.cache.clear();
85
+ this.currentCacheSize = 0;
86
+ }
87
+ }
88
+ exports.MemoryStorage = MemoryStorage;
89
+ MemoryStorage.cache = new Map();
90
+ MemoryStorage.DEFAULT_TTL = 60 * 60 * 1000; // 1 hour
91
+ MemoryStorage.MAX_CACHE_SIZE = 100 * 1024 * 1024; // 100 MB
92
+ MemoryStorage.currentCacheSize = 0;
@@ -1,15 +1 @@
1
- import { IExecuteFunctions, IWebhookFunctions } from 'n8n-workflow';
2
- export { S3Storage } from './S3Storage';
3
- export type { StorageConfig as S3StorageConfig } from './S3Storage';
4
- export interface StorageDriver {
5
- uploadStream(data: Buffer, contentType: string, metadata?: Record<string, string>): Promise<{
6
- fileKey: string;
7
- contentType: string;
8
- }>;
9
- downloadStream(fileKey: string): Promise<{
10
- data: Buffer;
11
- contentType: string;
12
- }>;
13
- deleteFile(fileKey: string): Promise<void>;
14
- }
15
- export declare function createStorageDriver(context: IExecuteFunctions | IWebhookFunctions, bucket: string): Promise<StorageDriver>;
1
+ export { MemoryStorage } from './MemoryStorage';
@@ -1,48 +1,5 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.S3Storage = void 0;
4
- exports.createStorageDriver = createStorageDriver;
5
- const S3Storage_1 = require("./S3Storage");
6
- var S3Storage_2 = require("./S3Storage");
7
- Object.defineProperty(exports, "S3Storage", { enumerable: true, get: function () { return S3Storage_2.S3Storage; } });
8
- async function createStorageDriver(context, bucket) {
9
- const credentials = await context.getCredentials('s3Api');
10
- if (!credentials) {
11
- throw new Error('No S3 credentials found. Please configure S3 credentials.');
12
- }
13
- const region = context.getNodeParameter('region', 0);
14
- const endpoint = context.getNodeParameter('endpoint', 0);
15
- const forcePathStyle = context.getNodeParameter('forcePathStyle', 0);
16
- // Extract credentials from S3 API credential
17
- const creds = credentials;
18
- const accessKeyId = creds.accessKeyId || '';
19
- const secretAccessKey = creds.secretAccessKey || '';
20
- const credentialEndpoint = creds.endpoint;
21
- const credentialRegion = creds.region;
22
- // Convert forcePathStyle from credential (could be string or boolean)
23
- const credentialForcePathStyle = String(creds.forcePathStyle) === 'true';
24
- // Use credential values if node parameters are empty
25
- const finalEndpoint = endpoint || credentialEndpoint;
26
- const finalRegion = region || credentialRegion || 'us-east-1';
27
- // Use boolean OR to combine forcePathStyle from node and credential
28
- const finalForcePathStyle = forcePathStyle || credentialForcePathStyle || false;
29
- if (!accessKeyId || !secretAccessKey) {
30
- throw new Error('Invalid credentials. Missing access key or secret key.');
31
- }
32
- // Auto-determine if path style should be forced
33
- let shouldForcePathStyle = finalForcePathStyle;
34
- // Force path style by default if custom endpoint is provided
35
- // This is needed for MinIO, Wasabi, DigitalOcean Spaces, Alibaba OSS, Tencent COS, etc.
36
- if (finalEndpoint && finalEndpoint !== '' && !finalForcePathStyle) {
37
- shouldForcePathStyle = true;
38
- }
39
- const config = {
40
- accessKeyId: accessKeyId,
41
- secretAccessKey: secretAccessKey,
42
- region: finalRegion,
43
- bucket,
44
- endpoint: finalEndpoint || undefined,
45
- forcePathStyle: shouldForcePathStyle,
46
- };
47
- return new S3Storage_1.S3Storage(config);
48
- }
3
+ exports.MemoryStorage = void 0;
4
+ var MemoryStorage_1 = require("./MemoryStorage");
5
+ Object.defineProperty(exports, "MemoryStorage", { enumerable: true, get: function () { return MemoryStorage_1.MemoryStorage; } });
@@ -2,7 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.BinaryToUrl = void 0;
4
4
  const n8n_workflow_1 = require("n8n-workflow");
5
- const drivers_1 = require("../../drivers");
5
+ const MemoryStorage_1 = require("../../drivers/MemoryStorage");
6
6
  const MAX_FILE_SIZE = 100 * 1024 * 1024;
7
7
  const ALLOWED_MIME_TYPES = [
8
8
  'image/jpeg',
@@ -42,18 +42,12 @@ class BinaryToUrl {
42
42
  group: ['transform'],
43
43
  version: 1,
44
44
  subtitle: '={{$parameter["operation"]}}',
45
- description: 'Upload binary files to S3 storage and proxy them via public URL',
45
+ description: 'Upload binary files to memory storage and proxy them via public URL',
46
46
  defaults: {
47
47
  name: 'Binary to URL',
48
48
  },
49
49
  inputs: ['main'],
50
50
  outputs: ['main'],
51
- credentials: [
52
- {
53
- name: 's3Api',
54
- required: true,
55
- },
56
- ],
57
51
  webhooks: [
58
52
  {
59
53
  name: 'default',
@@ -73,13 +67,13 @@ class BinaryToUrl {
73
67
  {
74
68
  name: 'Upload',
75
69
  value: 'upload',
76
- description: 'Upload binary file to storage',
70
+ description: 'Upload binary file to memory storage',
77
71
  action: 'Upload file',
78
72
  },
79
73
  {
80
74
  name: 'Delete',
81
75
  value: 'delete',
82
- description: 'Delete file from storage',
76
+ description: 'Delete file from memory storage',
83
77
  action: 'Delete file',
84
78
  },
85
79
  ],
@@ -98,51 +92,29 @@ class BinaryToUrl {
98
92
  description: 'Name of binary property containing the file to upload',
99
93
  },
100
94
  {
101
- displayName: 'File Key',
102
- name: 'fileKey',
103
- type: 'string',
95
+ displayName: 'File Expiration Time (Seconds)',
96
+ name: 'ttl',
97
+ type: 'number',
104
98
  displayOptions: {
105
99
  show: {
106
- operation: ['delete'],
100
+ operation: ['upload'],
107
101
  },
108
102
  },
109
- default: '',
110
- description: 'Key of the file to delete from storage',
103
+ default: 3600,
104
+ description: 'How long to keep the file in memory (default: 3600 seconds = 1 hour)',
105
+ hint: 'Files are automatically deleted after this time',
111
106
  },
112
107
  {
113
- displayName: 'Bucket',
114
- name: 'bucket',
115
- type: 'string',
116
- default: '',
117
- required: true,
118
- description: 'Storage bucket name',
119
- },
120
- {
121
- displayName: 'Region',
122
- name: 'region',
123
- type: 'string',
124
- default: 'us-east-1',
125
- required: true,
126
- description: 'AWS region (leave empty for some S3-compatible services)',
127
- },
128
- {
129
- displayName: 'Custom Endpoint',
130
- name: 'endpoint',
108
+ displayName: 'File Key',
109
+ name: 'fileKey',
131
110
  type: 'string',
132
- default: '',
133
- description: 'Custom S3 endpoint URL (required for MinIO, DigitalOcean Spaces, Wasabi, etc.)',
134
111
  displayOptions: {
135
112
  show: {
136
- operation: ['upload', 'delete'],
113
+ operation: ['delete'],
137
114
  },
138
115
  },
139
- },
140
- {
141
- displayName: 'Force Path Style',
142
- name: 'forcePathStyle',
143
- type: 'boolean',
144
- default: false,
145
- description: 'Whether to use path-style addressing (required for MinIO, DigitalOcean Spaces, etc.)',
116
+ default: '',
117
+ description: 'Key of the file to delete from storage',
146
118
  },
147
119
  ],
148
120
  usableAsTool: true,
@@ -151,26 +123,13 @@ class BinaryToUrl {
151
123
  async execute() {
152
124
  const items = this.getInputData();
153
125
  const operation = this.getNodeParameter('operation', 0);
154
- const bucket = this.getNodeParameter('bucket', 0);
155
- if (!bucket) {
156
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), 'Bucket name is required');
126
+ if (operation === 'upload') {
127
+ return handleUpload(this, items);
157
128
  }
158
- try {
159
- const storage = await (0, drivers_1.createStorageDriver)(this, bucket);
160
- if (operation === 'upload') {
161
- return handleUpload(this, items, storage);
162
- }
163
- else if (operation === 'delete') {
164
- return handleDelete(this, items, storage);
165
- }
166
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Unknown operation: ${operation}`);
167
- }
168
- catch (error) {
169
- if (error instanceof Error) {
170
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Operation failed: ${error.message}`);
171
- }
172
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Operation failed: ${String(error)}`);
129
+ else if (operation === 'delete') {
130
+ return handleDelete(this, items);
173
131
  }
132
+ throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Unknown operation: ${operation}`);
174
133
  }
175
134
  async webhook() {
176
135
  const req = this.getRequestObject();
@@ -197,52 +156,36 @@ class BinaryToUrl {
197
156
  },
198
157
  };
199
158
  }
200
- const bucket = this.getNodeParameter('bucket', 0);
201
- if (!bucket) {
202
- return {
203
- webhookResponse: {
204
- status: 500,
205
- body: JSON.stringify({ error: 'Node configuration is incomplete' }),
206
- headers: {
207
- 'Content-Type': 'application/json',
208
- },
209
- },
210
- };
211
- }
212
- let storage;
213
159
  try {
214
- storage = await (0, drivers_1.createStorageDriver)(this, bucket);
215
- }
216
- catch (error) {
217
- return {
218
- webhookResponse: {
219
- status: 500,
220
- body: JSON.stringify({ error: error instanceof Error ? error.message : String(error) }),
221
- headers: {
222
- 'Content-Type': 'application/json',
160
+ const result = await MemoryStorage_1.MemoryStorage.download(fileKey);
161
+ if (!result) {
162
+ return {
163
+ webhookResponse: {
164
+ status: 404,
165
+ body: JSON.stringify({ error: 'File not found or expired' }),
166
+ headers: {
167
+ 'Content-Type': 'application/json',
168
+ },
223
169
  },
224
- },
225
- };
226
- }
227
- try {
228
- const { data, contentType } = await storage.downloadStream(fileKey);
170
+ };
171
+ }
229
172
  return {
230
173
  webhookResponse: {
231
174
  status: 200,
232
- body: data.toString('base64'),
175
+ body: result.data.toString('base64'),
233
176
  headers: {
234
- 'Content-Type': contentType,
177
+ 'Content-Type': result.contentType,
235
178
  'Cache-Control': 'public, max-age=86400',
236
179
  'Content-Disposition': 'inline',
237
180
  },
238
181
  },
239
182
  };
240
183
  }
241
- catch {
184
+ catch (error) {
242
185
  return {
243
186
  webhookResponse: {
244
- status: 404,
245
- body: JSON.stringify({ error: 'File not found' }),
187
+ status: 500,
188
+ body: JSON.stringify({ error: error instanceof Error ? error.message : String(error) }),
246
189
  headers: {
247
190
  'Content-Type': 'application/json',
248
191
  },
@@ -252,8 +195,9 @@ class BinaryToUrl {
252
195
  }
253
196
  }
254
197
  exports.BinaryToUrl = BinaryToUrl;
255
- async function handleUpload(context, items, storage) {
198
+ async function handleUpload(context, items) {
256
199
  const binaryPropertyName = context.getNodeParameter('binaryPropertyName', 0);
200
+ const ttl = context.getNodeParameter('ttl', 0);
257
201
  // Build webhook URL using n8n's instance base URL and workflow ID
258
202
  // Format: {baseUrl}/webhook/{workflowId}/file/:fileKey
259
203
  const baseUrl = context.getInstanceBaseUrl();
@@ -276,7 +220,7 @@ async function handleUpload(context, items, storage) {
276
220
  if (fileSize > MAX_FILE_SIZE) {
277
221
  throw new n8n_workflow_1.NodeOperationError(context.getNode(), `File size exceeds maximum limit of ${MAX_FILE_SIZE / 1024 / 1024}MB`);
278
222
  }
279
- const result = await storage.uploadStream(buffer, contentType);
223
+ const result = await MemoryStorage_1.MemoryStorage.upload(buffer, contentType, ttl);
280
224
  // Replace the :fileKey placeholder with the actual file key
281
225
  const proxyUrl = webhookUrl.replace(':fileKey', result.fileKey);
282
226
  returnData.push({
@@ -291,14 +235,14 @@ async function handleUpload(context, items, storage) {
291
235
  }
292
236
  return [returnData];
293
237
  }
294
- async function handleDelete(context, items, storage) {
238
+ async function handleDelete(context, items) {
295
239
  const returnData = [];
296
240
  for (const item of items) {
297
241
  const fileKey = (item.json.fileKey || context.getNodeParameter('fileKey', 0));
298
242
  if (!fileKey) {
299
243
  throw new n8n_workflow_1.NodeOperationError(context.getNode(), 'File key is required for delete operation');
300
244
  }
301
- await storage.deleteFile(fileKey);
245
+ await MemoryStorage_1.MemoryStorage.delete(fileKey);
302
246
  returnData.push({
303
247
  json: {
304
248
  success: true,
package/package.json CHANGED
@@ -1,14 +1,14 @@
1
1
  {
2
2
  "name": "n8n-nodes-binary-to-url",
3
- "version": "0.0.9",
4
- "description": "n8n community node for binary file to public URL bridge with S3 storage",
3
+ "version": "0.0.10",
4
+ "description": "n8n community node for binary file to public URL bridge with in-memory storage",
5
5
  "keywords": [
6
6
  "n8n-community-node-package",
7
7
  "n8n",
8
8
  "binary",
9
9
  "file",
10
10
  "storage",
11
- "s3",
11
+ "memory",
12
12
  "proxy",
13
13
  "webhook",
14
14
  "to-url"
@@ -39,9 +39,6 @@
39
39
  "n8n": {
40
40
  "n8nNodesApiVersion": 1,
41
41
  "strict": true,
42
- "credentials": [
43
- "dist/credentials/S3Api.credentials.js"
44
- ],
45
42
  "nodes": [
46
43
  "dist/nodes/BinaryToUrl/BinaryToUrl.node.js"
47
44
  ]
@@ -1,15 +0,0 @@
1
- import type { ICredentialType, INodeProperties, Icon } from 'n8n-workflow';
2
- export declare class S3Api implements ICredentialType {
3
- name: string;
4
- displayName: string;
5
- icon: Icon;
6
- documentationUrl: string;
7
- properties: INodeProperties[];
8
- test: {
9
- request: {
10
- baseURL: string;
11
- url: string;
12
- method: "GET";
13
- };
14
- };
15
- }
@@ -1,57 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.S3Api = void 0;
4
- class S3Api {
5
- constructor() {
6
- this.name = 's3Api';
7
- this.displayName = 'S3';
8
- this.icon = 'file:../icons/BinaryToUrl.svg';
9
- this.documentationUrl = 'https://docs.aws.amazon.com/AmazonS3/latest/userguide/AccessCredentials.html';
10
- this.properties = [
11
- {
12
- displayName: 'S3 Endpoint',
13
- name: 'endpoint',
14
- type: 'string',
15
- default: '',
16
- description: 'S3-compatible service endpoint (e.g., https://s3.amazonaws.com, https://minio.example.com)',
17
- },
18
- {
19
- displayName: 'Region',
20
- name: 'region',
21
- type: 'string',
22
- default: 'us-east-1',
23
- description: 'AWS region or custom region for S3-compatible service',
24
- },
25
- {
26
- displayName: 'Access Key ID',
27
- name: 'accessKeyId',
28
- type: 'string',
29
- default: '',
30
- },
31
- {
32
- displayName: 'Secret Access Key',
33
- name: 'secretAccessKey',
34
- type: 'string',
35
- default: '',
36
- typeOptions: {
37
- password: true,
38
- },
39
- },
40
- {
41
- displayName: 'Force Path Style',
42
- name: 'forcePathStyle',
43
- type: 'boolean',
44
- default: false,
45
- description: 'Use path-style addressing (required for MinIO, DigitalOcean Spaces, etc.)',
46
- },
47
- ];
48
- this.test = {
49
- request: {
50
- baseURL: '={{$credentials.endpoint}}',
51
- url: '=/',
52
- method: 'GET',
53
- },
54
- };
55
- }
56
- }
57
- exports.S3Api = S3Api;
@@ -1,35 +0,0 @@
1
- export interface StorageConfig {
2
- accessKeyId: string;
3
- secretAccessKey: string;
4
- region: string;
5
- bucket: string;
6
- endpoint?: string;
7
- forcePathStyle?: boolean;
8
- }
9
- export interface UploadResult {
10
- fileKey: string;
11
- contentType: string;
12
- }
13
- export interface DownloadResult {
14
- data: Buffer;
15
- contentType: string;
16
- }
17
- export declare class S3Storage {
18
- private config;
19
- constructor(config: StorageConfig);
20
- uploadStream(data: Buffer, contentType: string, metadata?: Record<string, string>): Promise<UploadResult>;
21
- downloadStream(fileKey: string): Promise<DownloadResult>;
22
- deleteFile(fileKey: string): Promise<void>;
23
- private getEndpoint;
24
- private generateAuthorization;
25
- private getAmzDate;
26
- private getDateStamp;
27
- private getCanonicalHeaders;
28
- private getSignedHeaders;
29
- private sha256;
30
- private hmac;
31
- private getSigningKey;
32
- private hmacSha256;
33
- private generateFileKey;
34
- private getExtensionFromMimeType;
35
- }
@@ -1,298 +0,0 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
- Object.defineProperty(o, "default", { enumerable: true, value: v });
15
- }) : function(o, v) {
16
- o["default"] = v;
17
- });
18
- var __importStar = (this && this.__importStar) || (function () {
19
- var ownKeys = function(o) {
20
- ownKeys = Object.getOwnPropertyNames || function (o) {
21
- var ar = [];
22
- for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
- return ar;
24
- };
25
- return ownKeys(o);
26
- };
27
- return function (mod) {
28
- if (mod && mod.__esModule) return mod;
29
- var result = {};
30
- if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
- __setModuleDefault(result, mod);
32
- return result;
33
- };
34
- })();
35
- Object.defineProperty(exports, "__esModule", { value: true });
36
- exports.S3Storage = void 0;
37
- // Use Node.js crypto in Node environment, Web Crypto API in browser
38
- const crypto = __importStar(require("node:crypto"));
39
- let cryptoInstance;
40
- if (typeof window !== 'undefined' && window?.crypto) {
41
- // Browser environment (n8n Cloud)
42
- cryptoInstance = window.crypto;
43
- }
44
- else {
45
- // Node.js environment
46
- // Create a Web Crypto API compatible wrapper
47
- cryptoInstance = {
48
- subtle: {
49
- digest: async (algorithm, data) => {
50
- const hash = crypto.createHash(algorithm.replace('-', '').toLowerCase());
51
- hash.update(Buffer.from(data));
52
- return Buffer.from(hash.digest()).buffer;
53
- },
54
- importKey: async (format, keyData, algorithm, extractable, usages) => {
55
- return {
56
- algorithm,
57
- extractable,
58
- usages,
59
- data: format === 'raw' ? keyData : keyData,
60
- };
61
- },
62
- sign: async (algorithm, key, data) => {
63
- const hmac = crypto.createHmac('sha256', key.data);
64
- hmac.update(Buffer.from(data));
65
- return Buffer.from(hmac.digest()).buffer;
66
- },
67
- },
68
- };
69
- }
70
- class S3Storage {
71
- constructor(config) {
72
- this.config = config;
73
- }
74
- async uploadStream(data, contentType, metadata) {
75
- const fileKey = this.generateFileKey(contentType);
76
- const endpoint = this.getEndpoint();
77
- const url = `${endpoint}/${this.config.bucket}/${fileKey}`;
78
- const headers = {
79
- 'Content-Type': contentType,
80
- 'x-amz-content-sha256': 'UNSIGNED-PAYLOAD',
81
- };
82
- if (metadata) {
83
- Object.entries(metadata).forEach(([key, value]) => {
84
- headers[`x-amz-meta-${key}`] = value;
85
- });
86
- }
87
- const authorization = await this.generateAuthorization('PUT', `/${this.config.bucket}/${fileKey}`, headers);
88
- try {
89
- const response = await fetch(url, {
90
- method: 'PUT',
91
- headers: {
92
- ...headers,
93
- Authorization: authorization,
94
- },
95
- body: data,
96
- });
97
- if (!response.ok) {
98
- const errorText = await response.text();
99
- throw new Error(`S3 upload failed: ${response.status} ${response.statusText} - ${errorText}`);
100
- }
101
- return {
102
- fileKey,
103
- contentType,
104
- };
105
- }
106
- catch (error) {
107
- if (error instanceof Error) {
108
- throw error;
109
- }
110
- throw new Error(`S3 upload failed: ${String(error)}`);
111
- }
112
- }
113
- async downloadStream(fileKey) {
114
- const endpoint = this.getEndpoint();
115
- const url = `${endpoint}/${this.config.bucket}/${fileKey}`;
116
- const authorization = await this.generateAuthorization('GET', `/${this.config.bucket}/${fileKey}`, {});
117
- try {
118
- const response = await fetch(url, {
119
- method: 'GET',
120
- headers: {
121
- Authorization: authorization,
122
- },
123
- });
124
- if (!response.ok) {
125
- if (response.status === 404) {
126
- throw new Error(`File not found: ${fileKey}`);
127
- }
128
- if (response.status === 403) {
129
- throw new Error(`Access denied to bucket "${this.config.bucket}". Check your credentials`);
130
- }
131
- throw new Error(`S3 download failed: ${response.status} ${response.statusText}`);
132
- }
133
- const contentType = response.headers.get('content-type') || 'application/octet-stream';
134
- const arrayBuffer = await response.arrayBuffer();
135
- const data = Buffer.from(arrayBuffer);
136
- return {
137
- data,
138
- contentType,
139
- };
140
- }
141
- catch (error) {
142
- if (error instanceof Error) {
143
- throw error;
144
- }
145
- throw new Error(`S3 download failed: ${String(error)}`);
146
- }
147
- }
148
- async deleteFile(fileKey) {
149
- const endpoint = this.getEndpoint();
150
- const url = `${endpoint}/${this.config.bucket}/${fileKey}`;
151
- const authorization = await this.generateAuthorization('DELETE', `/${this.config.bucket}/${fileKey}`, {});
152
- try {
153
- const response = await fetch(url, {
154
- method: 'DELETE',
155
- headers: {
156
- Authorization: authorization,
157
- },
158
- });
159
- if (!response.ok && response.status !== 204) {
160
- throw new Error(`S3 delete failed: ${response.status} ${response.statusText}`);
161
- }
162
- }
163
- catch (error) {
164
- if (error instanceof Error) {
165
- throw error;
166
- }
167
- throw new Error(`S3 delete failed: ${String(error)}`);
168
- }
169
- }
170
- getEndpoint() {
171
- if (this.config.endpoint) {
172
- return this.config.endpoint;
173
- }
174
- if (this.config.forcePathStyle) {
175
- return `https://s3.${this.config.region}.amazonaws.com`;
176
- }
177
- return `https://${this.config.bucket}.s3.${this.config.region}.amazonaws.com`;
178
- }
179
- async generateAuthorization(method, path, headers) {
180
- const now = new Date();
181
- const amzDate = this.getAmzDate(now);
182
- const dateStamp = this.getDateStamp(now);
183
- // Canonical request
184
- const canonicalHeaders = this.getCanonicalHeaders(headers);
185
- const signedHeaders = this.getSignedHeaders(headers);
186
- const payloadHash = 'UNSIGNED-PAYLOAD';
187
- const canonicalRequest = [
188
- method,
189
- path,
190
- '', // Query string
191
- canonicalHeaders,
192
- signedHeaders,
193
- payloadHash,
194
- ].join('\n');
195
- const canonicalRequestHash = await this.sha256(canonicalRequest);
196
- // String to sign
197
- const credentialScope = `${dateStamp}/${this.config.region}/s3/aws4_request`;
198
- const stringToSign = ['AWS4-HMAC-SHA256', amzDate, credentialScope, canonicalRequestHash].join('\n');
199
- // Calculate signature
200
- const signingKey = await this.getSigningKey(dateStamp);
201
- const signature = await this.hmac(signingKey, stringToSign);
202
- // Authorization header
203
- return `AWS4-HMAC-SHA256 Credential=${this.config.accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`;
204
- }
205
- getAmzDate(date) {
206
- return date
207
- .toISOString()
208
- .replace(/[:-]|.\d{3}/g, '')
209
- .replace(/T/, 'T');
210
- }
211
- getDateStamp(date) {
212
- return date.toISOString().substring(0, 10).replace(/-/g, '');
213
- }
214
- getCanonicalHeaders(headers) {
215
- const canonicalHeaders = [];
216
- const lowerCaseHeaders = {};
217
- for (const [key, value] of Object.entries(headers)) {
218
- lowerCaseHeaders[key.toLowerCase()] = value.trim();
219
- }
220
- for (const [key, value] of Object.entries(lowerCaseHeaders).sort()) {
221
- canonicalHeaders.push(`${key}:${value}\n`);
222
- }
223
- return canonicalHeaders.join('');
224
- }
225
- getSignedHeaders(headers) {
226
- const lowerCaseHeaders = Object.keys(headers).map((h) => h.toLowerCase());
227
- return lowerCaseHeaders.sort().join(';');
228
- }
229
- async sha256(message) {
230
- const encoder = new TextEncoder();
231
- const data = encoder.encode(message);
232
- const hashBuffer = await cryptoInstance.subtle.digest('SHA-256', data);
233
- const hashArray = Array.from(new Uint8Array(hashBuffer));
234
- return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
235
- }
236
- async hmac(key, message) {
237
- const cryptoKey = await cryptoInstance.subtle.importKey('raw', key, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']);
238
- const encoder = new TextEncoder();
239
- const data = encoder.encode(message);
240
- const signature = await cryptoInstance.subtle.sign('HMAC', cryptoKey, data);
241
- const signatureArray = Array.from(new Uint8Array(signature));
242
- return signatureArray.map((b) => b.toString(16).padStart(2, '0')).join('');
243
- }
244
- async getSigningKey(dateStamp) {
245
- const kDate = await this.hmacSha256(`AWS4${this.config.secretAccessKey}`, dateStamp);
246
- const kRegion = await this.hmacSha256(kDate, this.config.region);
247
- const kService = await this.hmacSha256(kRegion, 's3');
248
- const kSigning = await this.hmacSha256(kService, 'aws4_request');
249
- return kSigning;
250
- }
251
- async hmacSha256(key, message) {
252
- const keyBuffer = typeof key === 'string' ? Buffer.from(key) : key;
253
- const encoder = new TextEncoder();
254
- const data = encoder.encode(message);
255
- const cryptoKey = await cryptoInstance.subtle.importKey('raw', keyBuffer, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']);
256
- const signature = await cryptoInstance.subtle.sign('HMAC', cryptoKey, data);
257
- return Buffer.from(signature);
258
- }
259
- generateFileKey(contentType) {
260
- const ext = this.getExtensionFromMimeType(contentType);
261
- const timestamp = Date.now();
262
- const random = Math.random().toString(36).substring(2, 15);
263
- return `${timestamp}-${random}${ext}`;
264
- }
265
- getExtensionFromMimeType(mimeType) {
266
- const mimeToExt = {
267
- 'image/jpeg': '.jpg',
268
- 'image/png': '.png',
269
- 'image/gif': '.gif',
270
- 'image/webp': '.webp',
271
- 'image/svg+xml': '.svg',
272
- 'image/bmp': '.bmp',
273
- 'image/tiff': '.tiff',
274
- 'image/avif': '.avif',
275
- 'video/mp4': '.mp4',
276
- 'video/webm': '.webm',
277
- 'video/quicktime': '.mov',
278
- 'video/x-msvideo': '.avi',
279
- 'video/x-matroska': '.mkv',
280
- 'application/pdf': '.pdf',
281
- 'application/zip': '.zip',
282
- 'application/x-rar-compressed': '.rar',
283
- 'application/x-7z-compressed': '.7z',
284
- 'audio/mpeg': '.mp3',
285
- 'audio/wav': '.wav',
286
- 'audio/ogg': '.ogg',
287
- 'audio/flac': '.flac',
288
- 'text/plain': '.txt',
289
- 'text/csv': '.csv',
290
- 'application/json': '.json',
291
- 'application/xml': '.xml',
292
- 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx',
293
- 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': '.docx',
294
- };
295
- return mimeToExt[mimeType] || '.bin';
296
- }
297
- }
298
- exports.S3Storage = S3Storage;
package/dist/index.d.ts DELETED
@@ -1,2 +0,0 @@
1
- import { BinaryToUrl } from './nodes/BinaryToUrl/BinaryToUrl.node';
2
- export declare const nodeClasses: (typeof BinaryToUrl)[];
package/dist/index.js DELETED
@@ -1,5 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.nodeClasses = void 0;
4
- const BinaryToUrl_node_1 = require("./nodes/BinaryToUrl/BinaryToUrl.node");
5
- exports.nodeClasses = [BinaryToUrl_node_1.BinaryToUrl];