n8n-nodes-binary-to-url 0.0.3 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/credentials/S3StorageApi.credentials.d.ts +15 -0
- package/dist/credentials/S3StorageApi.credentials.js +35 -0
- package/dist/drivers/S3Storage.js +71 -5
- package/dist/drivers/index.js +10 -33
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2 -2
- package/dist/nodes/{BinaryBridge/BinaryBridge.node.d.ts → BinaryToUrl/BinaryToUrl.node.d.ts} +1 -1
- package/dist/nodes/{BinaryBridge/BinaryBridge.node.js → BinaryToUrl/BinaryToUrl.node.js} +10 -15
- package/package.json +18 -20
- package/dist/nodes/BinaryBridge/BinaryBridge.node.ts +0 -376
- package/nodes/BinaryBridge/BinaryBridge.node.ts +0 -376
- /package/dist/{nodes/BinaryBridge/BinaryBridge.svg → icons/BinaryToUrl.svg} +0 -0
- /package/{nodes/BinaryBridge → dist/nodes/BinaryToUrl}/BinaryBridge.svg +0 -0
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { Icon, ICredentialType, INodeProperties } from 'n8n-workflow';
|
|
2
|
+
export declare class S3StorageApi implements ICredentialType {
|
|
3
|
+
name: string;
|
|
4
|
+
displayName: string;
|
|
5
|
+
icon: Icon;
|
|
6
|
+
documentationUrl: string;
|
|
7
|
+
properties: INodeProperties[];
|
|
8
|
+
test: {
|
|
9
|
+
request: {
|
|
10
|
+
baseURL: string;
|
|
11
|
+
url: string;
|
|
12
|
+
method: "GET";
|
|
13
|
+
};
|
|
14
|
+
};
|
|
15
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.S3StorageApi = void 0;
|
|
4
|
+
class S3StorageApi {
|
|
5
|
+
constructor() {
|
|
6
|
+
this.name = 's3StorageApi';
|
|
7
|
+
this.displayName = 'S3 Storage API';
|
|
8
|
+
this.icon = 'file:../icons/BinaryToUrl.svg';
|
|
9
|
+
this.documentationUrl = 'https://docs.aws.amazon.com/AmazonS3/latest/userguide/AccessCredentials.html';
|
|
10
|
+
this.properties = [
|
|
11
|
+
{
|
|
12
|
+
displayName: 'Access Key ID',
|
|
13
|
+
name: 'accessKeyId',
|
|
14
|
+
type: 'string',
|
|
15
|
+
typeOptions: { password: true },
|
|
16
|
+
default: '',
|
|
17
|
+
},
|
|
18
|
+
{
|
|
19
|
+
displayName: 'Secret Access Key',
|
|
20
|
+
name: 'secretAccessKey',
|
|
21
|
+
type: 'string',
|
|
22
|
+
typeOptions: { password: true },
|
|
23
|
+
default: '',
|
|
24
|
+
},
|
|
25
|
+
];
|
|
26
|
+
this.test = {
|
|
27
|
+
request: {
|
|
28
|
+
baseURL: '={{$credentials.endpoint}}',
|
|
29
|
+
url: '=/',
|
|
30
|
+
method: 'GET',
|
|
31
|
+
},
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
exports.S3StorageApi = S3StorageApi;
|
|
@@ -1,6 +1,72 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
36
|
exports.S3Storage = void 0;
|
|
37
|
+
// Use Node.js crypto in Node environment, Web Crypto API in browser
|
|
38
|
+
const crypto = __importStar(require("node:crypto"));
|
|
39
|
+
let cryptoInstance;
|
|
40
|
+
if (typeof window !== 'undefined' && window?.crypto) {
|
|
41
|
+
// Browser environment (n8n Cloud)
|
|
42
|
+
cryptoInstance = window.crypto;
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
// Node.js environment
|
|
46
|
+
// Create a Web Crypto API compatible wrapper
|
|
47
|
+
cryptoInstance = {
|
|
48
|
+
subtle: {
|
|
49
|
+
digest: async (algorithm, data) => {
|
|
50
|
+
const hash = crypto.createHash(algorithm.replace('-', '').toLowerCase());
|
|
51
|
+
hash.update(Buffer.from(data));
|
|
52
|
+
return Buffer.from(hash.digest()).buffer;
|
|
53
|
+
},
|
|
54
|
+
importKey: async (format, keyData, algorithm, extractable, usages) => {
|
|
55
|
+
return {
|
|
56
|
+
algorithm,
|
|
57
|
+
extractable,
|
|
58
|
+
usages,
|
|
59
|
+
data: format === 'raw' ? keyData : keyData,
|
|
60
|
+
};
|
|
61
|
+
},
|
|
62
|
+
sign: async (algorithm, key, data) => {
|
|
63
|
+
const hmac = crypto.createHmac('sha256', key.data);
|
|
64
|
+
hmac.update(Buffer.from(data));
|
|
65
|
+
return Buffer.from(hmac.digest()).buffer;
|
|
66
|
+
},
|
|
67
|
+
},
|
|
68
|
+
};
|
|
69
|
+
}
|
|
4
70
|
class S3Storage {
|
|
5
71
|
constructor(config) {
|
|
6
72
|
this.config = config;
|
|
@@ -163,15 +229,15 @@ class S3Storage {
|
|
|
163
229
|
async sha256(message) {
|
|
164
230
|
const encoder = new TextEncoder();
|
|
165
231
|
const data = encoder.encode(message);
|
|
166
|
-
const hashBuffer = await
|
|
232
|
+
const hashBuffer = await cryptoInstance.subtle.digest('SHA-256', data);
|
|
167
233
|
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
168
234
|
return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
|
169
235
|
}
|
|
170
236
|
async hmac(key, message) {
|
|
171
|
-
const cryptoKey = await
|
|
237
|
+
const cryptoKey = await cryptoInstance.subtle.importKey('raw', key, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']);
|
|
172
238
|
const encoder = new TextEncoder();
|
|
173
239
|
const data = encoder.encode(message);
|
|
174
|
-
const signature = await
|
|
240
|
+
const signature = await cryptoInstance.subtle.sign('HMAC', cryptoKey, data);
|
|
175
241
|
const signatureArray = Array.from(new Uint8Array(signature));
|
|
176
242
|
return signatureArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
|
177
243
|
}
|
|
@@ -186,8 +252,8 @@ class S3Storage {
|
|
|
186
252
|
const keyBuffer = typeof key === 'string' ? Buffer.from(key) : key;
|
|
187
253
|
const encoder = new TextEncoder();
|
|
188
254
|
const data = encoder.encode(message);
|
|
189
|
-
const cryptoKey = await
|
|
190
|
-
const signature = await
|
|
255
|
+
const cryptoKey = await cryptoInstance.subtle.importKey('raw', keyBuffer, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']);
|
|
256
|
+
const signature = await cryptoInstance.subtle.sign('HMAC', cryptoKey, data);
|
|
191
257
|
return Buffer.from(signature);
|
|
192
258
|
}
|
|
193
259
|
generateFileKey(contentType) {
|
package/dist/drivers/index.js
CHANGED
|
@@ -6,50 +6,27 @@ const S3Storage_1 = require("./S3Storage");
|
|
|
6
6
|
var S3Storage_2 = require("./S3Storage");
|
|
7
7
|
Object.defineProperty(exports, "S3Storage", { enumerable: true, get: function () { return S3Storage_2.S3Storage; } });
|
|
8
8
|
async function createStorageDriver(context, bucket) {
|
|
9
|
-
|
|
10
|
-
let credentials = null;
|
|
11
|
-
let isAwsS3 = false;
|
|
12
|
-
try {
|
|
13
|
-
credentials = await context.getCredentials('awsS3');
|
|
14
|
-
if (credentials) {
|
|
15
|
-
isAwsS3 = false;
|
|
16
|
-
}
|
|
17
|
-
}
|
|
18
|
-
catch (error) {
|
|
19
|
-
// S3 Compatible credentials not found, try AWS S3
|
|
20
|
-
}
|
|
21
|
-
// If S3 Compatible credentials not found, try AWS S3 API credentials
|
|
22
|
-
if (!credentials) {
|
|
23
|
-
try {
|
|
24
|
-
credentials = await context.getCredentials('awsS3Api');
|
|
25
|
-
if (credentials) {
|
|
26
|
-
isAwsS3 = true;
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
catch (error) {
|
|
30
|
-
// AWS S3 credentials not found
|
|
31
|
-
}
|
|
32
|
-
}
|
|
9
|
+
const credentials = await context.getCredentials('s3StorageApi');
|
|
33
10
|
if (!credentials) {
|
|
34
|
-
throw new Error('No S3 credentials found. Please configure
|
|
11
|
+
throw new Error('No S3 credentials found. Please configure S3 credentials.');
|
|
35
12
|
}
|
|
36
13
|
const region = context.getNodeParameter('region', 0);
|
|
37
14
|
const endpoint = context.getNodeParameter('endpoint', 0);
|
|
38
15
|
const forcePathStyle = context.getNodeParameter('forcePathStyle', 0);
|
|
39
|
-
// Extract credentials -
|
|
40
|
-
const
|
|
41
|
-
|
|
16
|
+
// Extract credentials - handle both direct access and data wrapper
|
|
17
|
+
const creds = (credentials.data || credentials);
|
|
18
|
+
// Support multiple field naming conventions
|
|
19
|
+
const accessKeyId = creds.accessKeyId || creds.access_key_id;
|
|
20
|
+
const secretAccessKey = creds.secretAccessKey || creds.secret_access_key;
|
|
42
21
|
if (!accessKeyId || !secretAccessKey) {
|
|
43
22
|
throw new Error('Invalid credentials. Missing access key or secret key.');
|
|
44
23
|
}
|
|
45
24
|
// Auto-determine if path style should be forced
|
|
46
25
|
let shouldForcePathStyle = forcePathStyle;
|
|
47
|
-
//
|
|
26
|
+
// Force path style by default if custom endpoint is provided
|
|
48
27
|
// This is needed for MinIO, Wasabi, DigitalOcean Spaces, Alibaba OSS, Tencent COS, etc.
|
|
49
|
-
if (
|
|
50
|
-
|
|
51
|
-
shouldForcePathStyle = true;
|
|
52
|
-
}
|
|
28
|
+
if (endpoint && endpoint !== '') {
|
|
29
|
+
shouldForcePathStyle = true;
|
|
53
30
|
}
|
|
54
31
|
const config = {
|
|
55
32
|
accessKeyId: accessKeyId,
|
package/dist/index.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export declare const nodeClasses: (typeof
|
|
1
|
+
import { BinaryToUrl } from './nodes/BinaryToUrl/BinaryToUrl.node';
|
|
2
|
+
export declare const nodeClasses: (typeof BinaryToUrl)[];
|
package/dist/index.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.nodeClasses = void 0;
|
|
4
|
-
const
|
|
5
|
-
exports.nodeClasses = [
|
|
4
|
+
const BinaryToUrl_node_1 = require("./nodes/BinaryToUrl/BinaryToUrl.node");
|
|
5
|
+
exports.nodeClasses = [BinaryToUrl_node_1.BinaryToUrl];
|
package/dist/nodes/{BinaryBridge/BinaryBridge.node.d.ts → BinaryToUrl/BinaryToUrl.node.d.ts}
RENAMED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { INodeType, INodeTypeDescription, IExecuteFunctions, IWebhookFunctions, IWebhookResponseData, INodeExecutionData } from 'n8n-workflow';
|
|
2
|
-
export declare class
|
|
2
|
+
export declare class BinaryToUrl implements INodeType {
|
|
3
3
|
description: INodeTypeDescription;
|
|
4
4
|
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]>;
|
|
5
5
|
webhook(this: IWebhookFunctions): Promise<IWebhookResponseData>;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.BinaryToUrl = void 0;
|
|
4
4
|
const n8n_workflow_1 = require("n8n-workflow");
|
|
5
5
|
const drivers_1 = require("../../drivers");
|
|
6
6
|
const MAX_FILE_SIZE = 100 * 1024 * 1024;
|
|
@@ -33,30 +33,24 @@ const ALLOWED_MIME_TYPES = [
|
|
|
33
33
|
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
34
34
|
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
|
35
35
|
];
|
|
36
|
-
class
|
|
36
|
+
class BinaryToUrl {
|
|
37
37
|
constructor() {
|
|
38
38
|
this.description = {
|
|
39
|
-
displayName: 'Binary
|
|
40
|
-
name: '
|
|
39
|
+
displayName: 'Binary to URL',
|
|
40
|
+
name: 'binaryToUrl',
|
|
41
41
|
icon: 'file:BinaryBridge.svg',
|
|
42
42
|
group: ['transform'],
|
|
43
43
|
version: 1,
|
|
44
44
|
subtitle: '={{$parameter["operation"]}}',
|
|
45
45
|
description: 'Upload binary files to S3 storage and proxy them via public URL',
|
|
46
46
|
defaults: {
|
|
47
|
-
name: 'Binary
|
|
47
|
+
name: 'Binary to URL',
|
|
48
48
|
},
|
|
49
49
|
inputs: ['main'],
|
|
50
50
|
outputs: ['main'],
|
|
51
51
|
credentials: [
|
|
52
52
|
{
|
|
53
|
-
name: '
|
|
54
|
-
displayName: 'S3 Compatible',
|
|
55
|
-
required: true,
|
|
56
|
-
},
|
|
57
|
-
{
|
|
58
|
-
name: 'awsS3Api',
|
|
59
|
-
displayName: 'AWS S3',
|
|
53
|
+
name: 's3StorageApi',
|
|
60
54
|
required: true,
|
|
61
55
|
},
|
|
62
56
|
],
|
|
@@ -148,9 +142,10 @@ class BinaryBridge {
|
|
|
148
142
|
name: 'forcePathStyle',
|
|
149
143
|
type: 'boolean',
|
|
150
144
|
default: false,
|
|
151
|
-
description: '
|
|
145
|
+
description: 'Whether to use path-style addressing (required for MinIO, DigitalOcean Spaces, etc.)',
|
|
152
146
|
},
|
|
153
147
|
],
|
|
148
|
+
usableAsTool: true,
|
|
154
149
|
};
|
|
155
150
|
}
|
|
156
151
|
async execute() {
|
|
@@ -243,7 +238,7 @@ class BinaryBridge {
|
|
|
243
238
|
},
|
|
244
239
|
};
|
|
245
240
|
}
|
|
246
|
-
catch
|
|
241
|
+
catch {
|
|
247
242
|
return {
|
|
248
243
|
webhookResponse: {
|
|
249
244
|
status: 404,
|
|
@@ -256,7 +251,7 @@ class BinaryBridge {
|
|
|
256
251
|
}
|
|
257
252
|
}
|
|
258
253
|
}
|
|
259
|
-
exports.
|
|
254
|
+
exports.BinaryToUrl = BinaryToUrl;
|
|
260
255
|
async function handleUpload(context, items, storage) {
|
|
261
256
|
const binaryPropertyName = context.getNodeParameter('binaryPropertyName', 0);
|
|
262
257
|
const webhookBaseUrl = buildWebhookUrl(context, 'default', 'file');
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "n8n-nodes-binary-to-url",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.6",
|
|
4
4
|
"description": "n8n community node for binary file to public URL bridge with S3 storage",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"n8n-community-node-package",
|
|
@@ -24,37 +24,35 @@
|
|
|
24
24
|
},
|
|
25
25
|
"main": "dist/index.js",
|
|
26
26
|
"scripts": {
|
|
27
|
-
"build": "
|
|
28
|
-
"
|
|
27
|
+
"build": "n8n-node build",
|
|
28
|
+
"build:watch": "tsc --watch",
|
|
29
|
+
"dev": "n8n-node dev",
|
|
29
30
|
"format": "prettier --write \"**/*.{js,ts,json,md}\"",
|
|
30
|
-
"lint": "
|
|
31
|
-
"
|
|
32
|
-
"
|
|
33
|
-
"
|
|
31
|
+
"lint": "n8n-node lint",
|
|
32
|
+
"lint:fix": "n8n-node lint --fix",
|
|
33
|
+
"release": "n8n-node release",
|
|
34
|
+
"prepublishOnly": "n8n-node prerelease"
|
|
34
35
|
},
|
|
35
36
|
"files": [
|
|
36
|
-
"dist"
|
|
37
|
-
"nodes"
|
|
37
|
+
"dist"
|
|
38
38
|
],
|
|
39
39
|
"n8n": {
|
|
40
40
|
"n8nNodesApiVersion": 1,
|
|
41
|
-
"
|
|
41
|
+
"strict": true,
|
|
42
|
+
"credentials": [
|
|
43
|
+
"dist/credentials/S3StorageApi.credentials.js"
|
|
44
|
+
],
|
|
42
45
|
"nodes": [
|
|
43
|
-
"dist/nodes/
|
|
46
|
+
"dist/nodes/BinaryToUrl/BinaryToUrl.node.js"
|
|
44
47
|
]
|
|
45
48
|
},
|
|
46
49
|
"dependencies": {},
|
|
47
50
|
"devDependencies": {
|
|
48
|
-
"@
|
|
49
|
-
"
|
|
50
|
-
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
|
51
|
-
"@typescript-eslint/parser": "^6.0.0",
|
|
52
|
-
"eslint": "^8.50.0",
|
|
51
|
+
"@n8n/node-cli": "*",
|
|
52
|
+
"eslint": "9.32.0",
|
|
53
53
|
"eslint-plugin-n8n-nodes-base": "^1.11.0",
|
|
54
|
-
"
|
|
55
|
-
"
|
|
56
|
-
"ts-jest": "^29.4.6",
|
|
57
|
-
"typescript": "^5.2.0"
|
|
54
|
+
"prettier": "3.6.2",
|
|
55
|
+
"typescript": "5.9.2"
|
|
58
56
|
},
|
|
59
57
|
"peerDependencies": {
|
|
60
58
|
"n8n-workflow": "*"
|
|
@@ -1,376 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
INodeType,
|
|
3
|
-
INodeTypeDescription,
|
|
4
|
-
IExecuteFunctions,
|
|
5
|
-
IWebhookFunctions,
|
|
6
|
-
IWebhookResponseData,
|
|
7
|
-
INodeExecutionData,
|
|
8
|
-
NodeOperationError,
|
|
9
|
-
} from 'n8n-workflow';
|
|
10
|
-
import { createStorageDriver, StorageDriver } from '../../drivers';
|
|
11
|
-
|
|
12
|
-
const MAX_FILE_SIZE = 100 * 1024 * 1024;
|
|
13
|
-
const ALLOWED_MIME_TYPES = [
|
|
14
|
-
'image/jpeg',
|
|
15
|
-
'image/png',
|
|
16
|
-
'image/gif',
|
|
17
|
-
'image/webp',
|
|
18
|
-
'image/svg+xml',
|
|
19
|
-
'image/bmp',
|
|
20
|
-
'image/tiff',
|
|
21
|
-
'image/avif',
|
|
22
|
-
'video/mp4',
|
|
23
|
-
'video/webm',
|
|
24
|
-
'video/quicktime',
|
|
25
|
-
'video/x-msvideo',
|
|
26
|
-
'video/x-matroska',
|
|
27
|
-
'application/pdf',
|
|
28
|
-
'application/zip',
|
|
29
|
-
'application/x-rar-compressed',
|
|
30
|
-
'application/x-7z-compressed',
|
|
31
|
-
'audio/mpeg',
|
|
32
|
-
'audio/wav',
|
|
33
|
-
'audio/ogg',
|
|
34
|
-
'audio/flac',
|
|
35
|
-
'text/plain',
|
|
36
|
-
'text/csv',
|
|
37
|
-
'application/json',
|
|
38
|
-
'application/xml',
|
|
39
|
-
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
40
|
-
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
|
41
|
-
];
|
|
42
|
-
|
|
43
|
-
export class BinaryBridge implements INodeType {
|
|
44
|
-
description: INodeTypeDescription = {
|
|
45
|
-
displayName: 'Binary Bridge',
|
|
46
|
-
name: 'binaryBridge',
|
|
47
|
-
icon: 'file:BinaryBridge.svg',
|
|
48
|
-
group: ['transform'],
|
|
49
|
-
version: 1,
|
|
50
|
-
subtitle: '={{$parameter["operation"]}}',
|
|
51
|
-
description: 'Upload binary files to S3 storage and proxy them via public URL',
|
|
52
|
-
defaults: {
|
|
53
|
-
name: 'Binary Bridge',
|
|
54
|
-
},
|
|
55
|
-
inputs: ['main'],
|
|
56
|
-
outputs: ['main'],
|
|
57
|
-
credentials: [
|
|
58
|
-
{
|
|
59
|
-
name: 'awsS3',
|
|
60
|
-
displayName: 'S3 Compatible',
|
|
61
|
-
required: true,
|
|
62
|
-
},
|
|
63
|
-
{
|
|
64
|
-
name: 'awsS3Api',
|
|
65
|
-
displayName: 'AWS S3',
|
|
66
|
-
required: true,
|
|
67
|
-
},
|
|
68
|
-
],
|
|
69
|
-
webhooks: [
|
|
70
|
-
{
|
|
71
|
-
name: 'default',
|
|
72
|
-
httpMethod: 'GET',
|
|
73
|
-
responseMode: 'onReceived',
|
|
74
|
-
path: 'file/:fileKey',
|
|
75
|
-
isFullPath: true,
|
|
76
|
-
},
|
|
77
|
-
],
|
|
78
|
-
properties: [
|
|
79
|
-
{
|
|
80
|
-
displayName: 'Operation',
|
|
81
|
-
name: 'operation',
|
|
82
|
-
type: 'options',
|
|
83
|
-
noDataExpression: true,
|
|
84
|
-
options: [
|
|
85
|
-
{
|
|
86
|
-
name: 'Upload',
|
|
87
|
-
value: 'upload',
|
|
88
|
-
description: 'Upload binary file to storage',
|
|
89
|
-
action: 'Upload file',
|
|
90
|
-
},
|
|
91
|
-
{
|
|
92
|
-
name: 'Delete',
|
|
93
|
-
value: 'delete',
|
|
94
|
-
description: 'Delete file from storage',
|
|
95
|
-
action: 'Delete file',
|
|
96
|
-
},
|
|
97
|
-
],
|
|
98
|
-
default: 'upload',
|
|
99
|
-
},
|
|
100
|
-
{
|
|
101
|
-
displayName: 'Binary Property',
|
|
102
|
-
name: 'binaryPropertyName',
|
|
103
|
-
type: 'string',
|
|
104
|
-
displayOptions: {
|
|
105
|
-
show: {
|
|
106
|
-
operation: ['upload'],
|
|
107
|
-
},
|
|
108
|
-
},
|
|
109
|
-
default: 'data',
|
|
110
|
-
description: 'Name of binary property containing the file to upload',
|
|
111
|
-
},
|
|
112
|
-
{
|
|
113
|
-
displayName: 'File Key',
|
|
114
|
-
name: 'fileKey',
|
|
115
|
-
type: 'string',
|
|
116
|
-
displayOptions: {
|
|
117
|
-
show: {
|
|
118
|
-
operation: ['delete'],
|
|
119
|
-
},
|
|
120
|
-
},
|
|
121
|
-
default: '',
|
|
122
|
-
description: 'Key of the file to delete from storage',
|
|
123
|
-
},
|
|
124
|
-
{
|
|
125
|
-
displayName: 'Bucket',
|
|
126
|
-
name: 'bucket',
|
|
127
|
-
type: 'string',
|
|
128
|
-
default: '',
|
|
129
|
-
required: true,
|
|
130
|
-
description: 'Storage bucket name',
|
|
131
|
-
},
|
|
132
|
-
{
|
|
133
|
-
displayName: 'Region',
|
|
134
|
-
name: 'region',
|
|
135
|
-
type: 'string',
|
|
136
|
-
default: 'us-east-1',
|
|
137
|
-
required: true,
|
|
138
|
-
description: 'AWS region (leave empty for some S3-compatible services)',
|
|
139
|
-
},
|
|
140
|
-
{
|
|
141
|
-
displayName: 'Custom Endpoint',
|
|
142
|
-
name: 'endpoint',
|
|
143
|
-
type: 'string',
|
|
144
|
-
default: '',
|
|
145
|
-
description:
|
|
146
|
-
'Custom S3 endpoint URL (required for MinIO, DigitalOcean Spaces, Wasabi, etc.)',
|
|
147
|
-
displayOptions: {
|
|
148
|
-
show: {
|
|
149
|
-
operation: ['upload', 'delete'],
|
|
150
|
-
},
|
|
151
|
-
},
|
|
152
|
-
},
|
|
153
|
-
{
|
|
154
|
-
displayName: 'Force Path Style',
|
|
155
|
-
name: 'forcePathStyle',
|
|
156
|
-
type: 'boolean',
|
|
157
|
-
default: false,
|
|
158
|
-
description: 'Use path-style addressing (required for MinIO, DigitalOcean Spaces, etc.)',
|
|
159
|
-
},
|
|
160
|
-
],
|
|
161
|
-
};
|
|
162
|
-
|
|
163
|
-
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
|
164
|
-
const items = this.getInputData();
|
|
165
|
-
const operation = this.getNodeParameter('operation', 0) as string;
|
|
166
|
-
const bucket = this.getNodeParameter('bucket', 0) as string;
|
|
167
|
-
|
|
168
|
-
if (!bucket) {
|
|
169
|
-
throw new NodeOperationError(this.getNode(), 'Bucket name is required');
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
try {
|
|
173
|
-
const storage = await createStorageDriver(this, bucket);
|
|
174
|
-
|
|
175
|
-
if (operation === 'upload') {
|
|
176
|
-
return handleUpload(this, items, storage);
|
|
177
|
-
} else if (operation === 'delete') {
|
|
178
|
-
return handleDelete(this, items, storage);
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
throw new NodeOperationError(this.getNode(), `Unknown operation: ${operation}`);
|
|
182
|
-
} catch (error) {
|
|
183
|
-
if (error instanceof Error) {
|
|
184
|
-
throw new NodeOperationError(this.getNode(), `Operation failed: ${error.message}`);
|
|
185
|
-
}
|
|
186
|
-
throw new NodeOperationError(this.getNode(), `Operation failed: ${String(error)}`);
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
async webhook(this: IWebhookFunctions): Promise<IWebhookResponseData> {
|
|
191
|
-
const req = this.getRequestObject();
|
|
192
|
-
const fileKey = req.params.fileKey as string;
|
|
193
|
-
|
|
194
|
-
if (!fileKey) {
|
|
195
|
-
return {
|
|
196
|
-
webhookResponse: {
|
|
197
|
-
status: 400,
|
|
198
|
-
body: JSON.stringify({ error: 'Missing fileKey' }),
|
|
199
|
-
headers: {
|
|
200
|
-
'Content-Type': 'application/json',
|
|
201
|
-
},
|
|
202
|
-
},
|
|
203
|
-
};
|
|
204
|
-
}
|
|
205
|
-
|
|
206
|
-
if (!isValidFileKey(fileKey)) {
|
|
207
|
-
return {
|
|
208
|
-
webhookResponse: {
|
|
209
|
-
status: 400,
|
|
210
|
-
body: JSON.stringify({ error: 'Invalid fileKey' }),
|
|
211
|
-
headers: {
|
|
212
|
-
'Content-Type': 'application/json',
|
|
213
|
-
},
|
|
214
|
-
},
|
|
215
|
-
};
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
const bucket = this.getNodeParameter('bucket', 0) as string;
|
|
219
|
-
|
|
220
|
-
if (!bucket) {
|
|
221
|
-
return {
|
|
222
|
-
webhookResponse: {
|
|
223
|
-
status: 500,
|
|
224
|
-
body: JSON.stringify({ error: 'Node configuration is incomplete' }),
|
|
225
|
-
headers: {
|
|
226
|
-
'Content-Type': 'application/json',
|
|
227
|
-
},
|
|
228
|
-
},
|
|
229
|
-
};
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
let storage;
|
|
233
|
-
try {
|
|
234
|
-
storage = await createStorageDriver(this, bucket);
|
|
235
|
-
} catch (error) {
|
|
236
|
-
return {
|
|
237
|
-
webhookResponse: {
|
|
238
|
-
status: 500,
|
|
239
|
-
body: JSON.stringify({ error: error instanceof Error ? error.message : String(error) }),
|
|
240
|
-
headers: {
|
|
241
|
-
'Content-Type': 'application/json',
|
|
242
|
-
},
|
|
243
|
-
},
|
|
244
|
-
};
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
try {
|
|
248
|
-
const { data, contentType } = await storage.downloadStream(fileKey);
|
|
249
|
-
|
|
250
|
-
return {
|
|
251
|
-
webhookResponse: {
|
|
252
|
-
status: 200,
|
|
253
|
-
body: data.toString('base64'),
|
|
254
|
-
headers: {
|
|
255
|
-
'Content-Type': contentType,
|
|
256
|
-
'Cache-Control': 'public, max-age=86400',
|
|
257
|
-
'Content-Disposition': 'inline',
|
|
258
|
-
},
|
|
259
|
-
},
|
|
260
|
-
};
|
|
261
|
-
} catch (error) {
|
|
262
|
-
return {
|
|
263
|
-
webhookResponse: {
|
|
264
|
-
status: 404,
|
|
265
|
-
body: JSON.stringify({ error: 'File not found' }),
|
|
266
|
-
headers: {
|
|
267
|
-
'Content-Type': 'application/json',
|
|
268
|
-
},
|
|
269
|
-
},
|
|
270
|
-
};
|
|
271
|
-
}
|
|
272
|
-
}
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
async function handleUpload(
|
|
276
|
-
context: IExecuteFunctions,
|
|
277
|
-
items: INodeExecutionData[],
|
|
278
|
-
storage: StorageDriver
|
|
279
|
-
): Promise<INodeExecutionData[][]> {
|
|
280
|
-
const binaryPropertyName = context.getNodeParameter('binaryPropertyName', 0) as string;
|
|
281
|
-
const webhookBaseUrl = buildWebhookUrl(context, 'default', 'file');
|
|
282
|
-
|
|
283
|
-
const returnData: INodeExecutionData[] = [];
|
|
284
|
-
|
|
285
|
-
for (const item of items) {
|
|
286
|
-
const binaryData = item.binary?.[binaryPropertyName];
|
|
287
|
-
|
|
288
|
-
if (!binaryData) {
|
|
289
|
-
throw new NodeOperationError(
|
|
290
|
-
context.getNode(),
|
|
291
|
-
`No binary data found in property "${binaryPropertyName}"`
|
|
292
|
-
);
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
const buffer = Buffer.from(binaryData.data, 'base64');
|
|
296
|
-
|
|
297
|
-
// Use provided MIME type or default
|
|
298
|
-
const contentType = binaryData.mimeType || 'application/octet-stream';
|
|
299
|
-
|
|
300
|
-
if (!ALLOWED_MIME_TYPES.includes(contentType)) {
|
|
301
|
-
throw new NodeOperationError(
|
|
302
|
-
context.getNode(),
|
|
303
|
-
`MIME type "${contentType}" is not allowed. Allowed types: ${ALLOWED_MIME_TYPES.join(', ')}`
|
|
304
|
-
);
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
const fileSize = buffer.length;
|
|
308
|
-
if (fileSize > MAX_FILE_SIZE) {
|
|
309
|
-
throw new NodeOperationError(
|
|
310
|
-
context.getNode(),
|
|
311
|
-
`File size exceeds maximum limit of ${MAX_FILE_SIZE / 1024 / 1024}MB`
|
|
312
|
-
);
|
|
313
|
-
}
|
|
314
|
-
|
|
315
|
-
const result = await storage.uploadStream(buffer, contentType);
|
|
316
|
-
|
|
317
|
-
const proxyUrl = `${webhookBaseUrl}/${result.fileKey}`;
|
|
318
|
-
|
|
319
|
-
returnData.push({
|
|
320
|
-
json: {
|
|
321
|
-
fileKey: result.fileKey,
|
|
322
|
-
proxyUrl,
|
|
323
|
-
contentType,
|
|
324
|
-
fileSize,
|
|
325
|
-
},
|
|
326
|
-
binary: item.binary,
|
|
327
|
-
});
|
|
328
|
-
}
|
|
329
|
-
|
|
330
|
-
return [returnData];
|
|
331
|
-
}
|
|
332
|
-
|
|
333
|
-
async function handleDelete(
|
|
334
|
-
context: IExecuteFunctions,
|
|
335
|
-
items: INodeExecutionData[],
|
|
336
|
-
storage: StorageDriver
|
|
337
|
-
): Promise<INodeExecutionData[][]> {
|
|
338
|
-
const returnData: INodeExecutionData[] = [];
|
|
339
|
-
|
|
340
|
-
for (const item of items) {
|
|
341
|
-
const fileKey = (item.json.fileKey || context.getNodeParameter('fileKey', 0)) as string;
|
|
342
|
-
|
|
343
|
-
if (!fileKey) {
|
|
344
|
-
throw new NodeOperationError(context.getNode(), 'File key is required for delete operation');
|
|
345
|
-
}
|
|
346
|
-
|
|
347
|
-
await storage.deleteFile(fileKey);
|
|
348
|
-
|
|
349
|
-
returnData.push({
|
|
350
|
-
json: {
|
|
351
|
-
success: true,
|
|
352
|
-
deleted: fileKey,
|
|
353
|
-
},
|
|
354
|
-
});
|
|
355
|
-
}
|
|
356
|
-
|
|
357
|
-
return [returnData];
|
|
358
|
-
}
|
|
359
|
-
|
|
360
|
-
function buildWebhookUrl(context: IExecuteFunctions, webhookName: string, path: string): string {
|
|
361
|
-
const baseUrl = context.getInstanceBaseUrl();
|
|
362
|
-
const node = context.getNode();
|
|
363
|
-
const workflow = context.getWorkflow();
|
|
364
|
-
const workflowId = workflow.id;
|
|
365
|
-
const nodeName = encodeURIComponent(node.name.toLowerCase());
|
|
366
|
-
return `${baseUrl}/webhook/${workflowId}/${nodeName}/${path}`;
|
|
367
|
-
}
|
|
368
|
-
|
|
369
|
-
function isValidFileKey(fileKey: string): boolean {
|
|
370
|
-
if (!fileKey || typeof fileKey !== 'string') {
|
|
371
|
-
return false;
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
const fileKeyPattern = /^[0-9]+-[a-z0-9]+\.[a-z0-9]+$/i;
|
|
375
|
-
return fileKeyPattern.test(fileKey);
|
|
376
|
-
}
|
|
@@ -1,376 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
INodeType,
|
|
3
|
-
INodeTypeDescription,
|
|
4
|
-
IExecuteFunctions,
|
|
5
|
-
IWebhookFunctions,
|
|
6
|
-
IWebhookResponseData,
|
|
7
|
-
INodeExecutionData,
|
|
8
|
-
NodeOperationError,
|
|
9
|
-
} from 'n8n-workflow';
|
|
10
|
-
import { createStorageDriver, StorageDriver } from '../../drivers';
|
|
11
|
-
|
|
12
|
-
const MAX_FILE_SIZE = 100 * 1024 * 1024;
|
|
13
|
-
const ALLOWED_MIME_TYPES = [
|
|
14
|
-
'image/jpeg',
|
|
15
|
-
'image/png',
|
|
16
|
-
'image/gif',
|
|
17
|
-
'image/webp',
|
|
18
|
-
'image/svg+xml',
|
|
19
|
-
'image/bmp',
|
|
20
|
-
'image/tiff',
|
|
21
|
-
'image/avif',
|
|
22
|
-
'video/mp4',
|
|
23
|
-
'video/webm',
|
|
24
|
-
'video/quicktime',
|
|
25
|
-
'video/x-msvideo',
|
|
26
|
-
'video/x-matroska',
|
|
27
|
-
'application/pdf',
|
|
28
|
-
'application/zip',
|
|
29
|
-
'application/x-rar-compressed',
|
|
30
|
-
'application/x-7z-compressed',
|
|
31
|
-
'audio/mpeg',
|
|
32
|
-
'audio/wav',
|
|
33
|
-
'audio/ogg',
|
|
34
|
-
'audio/flac',
|
|
35
|
-
'text/plain',
|
|
36
|
-
'text/csv',
|
|
37
|
-
'application/json',
|
|
38
|
-
'application/xml',
|
|
39
|
-
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
40
|
-
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
|
41
|
-
];
|
|
42
|
-
|
|
43
|
-
export class BinaryBridge implements INodeType {
|
|
44
|
-
description: INodeTypeDescription = {
|
|
45
|
-
displayName: 'Binary Bridge',
|
|
46
|
-
name: 'binaryBridge',
|
|
47
|
-
icon: 'file:BinaryBridge.svg',
|
|
48
|
-
group: ['transform'],
|
|
49
|
-
version: 1,
|
|
50
|
-
subtitle: '={{$parameter["operation"]}}',
|
|
51
|
-
description: 'Upload binary files to S3 storage and proxy them via public URL',
|
|
52
|
-
defaults: {
|
|
53
|
-
name: 'Binary Bridge',
|
|
54
|
-
},
|
|
55
|
-
inputs: ['main'],
|
|
56
|
-
outputs: ['main'],
|
|
57
|
-
credentials: [
|
|
58
|
-
{
|
|
59
|
-
name: 'awsS3',
|
|
60
|
-
displayName: 'S3 Compatible',
|
|
61
|
-
required: true,
|
|
62
|
-
},
|
|
63
|
-
{
|
|
64
|
-
name: 'awsS3Api',
|
|
65
|
-
displayName: 'AWS S3',
|
|
66
|
-
required: true,
|
|
67
|
-
},
|
|
68
|
-
],
|
|
69
|
-
webhooks: [
|
|
70
|
-
{
|
|
71
|
-
name: 'default',
|
|
72
|
-
httpMethod: 'GET',
|
|
73
|
-
responseMode: 'onReceived',
|
|
74
|
-
path: 'file/:fileKey',
|
|
75
|
-
isFullPath: true,
|
|
76
|
-
},
|
|
77
|
-
],
|
|
78
|
-
properties: [
|
|
79
|
-
{
|
|
80
|
-
displayName: 'Operation',
|
|
81
|
-
name: 'operation',
|
|
82
|
-
type: 'options',
|
|
83
|
-
noDataExpression: true,
|
|
84
|
-
options: [
|
|
85
|
-
{
|
|
86
|
-
name: 'Upload',
|
|
87
|
-
value: 'upload',
|
|
88
|
-
description: 'Upload binary file to storage',
|
|
89
|
-
action: 'Upload file',
|
|
90
|
-
},
|
|
91
|
-
{
|
|
92
|
-
name: 'Delete',
|
|
93
|
-
value: 'delete',
|
|
94
|
-
description: 'Delete file from storage',
|
|
95
|
-
action: 'Delete file',
|
|
96
|
-
},
|
|
97
|
-
],
|
|
98
|
-
default: 'upload',
|
|
99
|
-
},
|
|
100
|
-
{
|
|
101
|
-
displayName: 'Binary Property',
|
|
102
|
-
name: 'binaryPropertyName',
|
|
103
|
-
type: 'string',
|
|
104
|
-
displayOptions: {
|
|
105
|
-
show: {
|
|
106
|
-
operation: ['upload'],
|
|
107
|
-
},
|
|
108
|
-
},
|
|
109
|
-
default: 'data',
|
|
110
|
-
description: 'Name of binary property containing the file to upload',
|
|
111
|
-
},
|
|
112
|
-
{
|
|
113
|
-
displayName: 'File Key',
|
|
114
|
-
name: 'fileKey',
|
|
115
|
-
type: 'string',
|
|
116
|
-
displayOptions: {
|
|
117
|
-
show: {
|
|
118
|
-
operation: ['delete'],
|
|
119
|
-
},
|
|
120
|
-
},
|
|
121
|
-
default: '',
|
|
122
|
-
description: 'Key of the file to delete from storage',
|
|
123
|
-
},
|
|
124
|
-
{
|
|
125
|
-
displayName: 'Bucket',
|
|
126
|
-
name: 'bucket',
|
|
127
|
-
type: 'string',
|
|
128
|
-
default: '',
|
|
129
|
-
required: true,
|
|
130
|
-
description: 'Storage bucket name',
|
|
131
|
-
},
|
|
132
|
-
{
|
|
133
|
-
displayName: 'Region',
|
|
134
|
-
name: 'region',
|
|
135
|
-
type: 'string',
|
|
136
|
-
default: 'us-east-1',
|
|
137
|
-
required: true,
|
|
138
|
-
description: 'AWS region (leave empty for some S3-compatible services)',
|
|
139
|
-
},
|
|
140
|
-
{
|
|
141
|
-
displayName: 'Custom Endpoint',
|
|
142
|
-
name: 'endpoint',
|
|
143
|
-
type: 'string',
|
|
144
|
-
default: '',
|
|
145
|
-
description:
|
|
146
|
-
'Custom S3 endpoint URL (required for MinIO, DigitalOcean Spaces, Wasabi, etc.)',
|
|
147
|
-
displayOptions: {
|
|
148
|
-
show: {
|
|
149
|
-
operation: ['upload', 'delete'],
|
|
150
|
-
},
|
|
151
|
-
},
|
|
152
|
-
},
|
|
153
|
-
{
|
|
154
|
-
displayName: 'Force Path Style',
|
|
155
|
-
name: 'forcePathStyle',
|
|
156
|
-
type: 'boolean',
|
|
157
|
-
default: false,
|
|
158
|
-
description: 'Use path-style addressing (required for MinIO, DigitalOcean Spaces, etc.)',
|
|
159
|
-
},
|
|
160
|
-
],
|
|
161
|
-
};
|
|
162
|
-
|
|
163
|
-
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
|
164
|
-
const items = this.getInputData();
|
|
165
|
-
const operation = this.getNodeParameter('operation', 0) as string;
|
|
166
|
-
const bucket = this.getNodeParameter('bucket', 0) as string;
|
|
167
|
-
|
|
168
|
-
if (!bucket) {
|
|
169
|
-
throw new NodeOperationError(this.getNode(), 'Bucket name is required');
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
try {
|
|
173
|
-
const storage = await createStorageDriver(this, bucket);
|
|
174
|
-
|
|
175
|
-
if (operation === 'upload') {
|
|
176
|
-
return handleUpload(this, items, storage);
|
|
177
|
-
} else if (operation === 'delete') {
|
|
178
|
-
return handleDelete(this, items, storage);
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
throw new NodeOperationError(this.getNode(), `Unknown operation: ${operation}`);
|
|
182
|
-
} catch (error) {
|
|
183
|
-
if (error instanceof Error) {
|
|
184
|
-
throw new NodeOperationError(this.getNode(), `Operation failed: ${error.message}`);
|
|
185
|
-
}
|
|
186
|
-
throw new NodeOperationError(this.getNode(), `Operation failed: ${String(error)}`);
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
async webhook(this: IWebhookFunctions): Promise<IWebhookResponseData> {
|
|
191
|
-
const req = this.getRequestObject();
|
|
192
|
-
const fileKey = req.params.fileKey as string;
|
|
193
|
-
|
|
194
|
-
if (!fileKey) {
|
|
195
|
-
return {
|
|
196
|
-
webhookResponse: {
|
|
197
|
-
status: 400,
|
|
198
|
-
body: JSON.stringify({ error: 'Missing fileKey' }),
|
|
199
|
-
headers: {
|
|
200
|
-
'Content-Type': 'application/json',
|
|
201
|
-
},
|
|
202
|
-
},
|
|
203
|
-
};
|
|
204
|
-
}
|
|
205
|
-
|
|
206
|
-
if (!isValidFileKey(fileKey)) {
|
|
207
|
-
return {
|
|
208
|
-
webhookResponse: {
|
|
209
|
-
status: 400,
|
|
210
|
-
body: JSON.stringify({ error: 'Invalid fileKey' }),
|
|
211
|
-
headers: {
|
|
212
|
-
'Content-Type': 'application/json',
|
|
213
|
-
},
|
|
214
|
-
},
|
|
215
|
-
};
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
const bucket = this.getNodeParameter('bucket', 0) as string;
|
|
219
|
-
|
|
220
|
-
if (!bucket) {
|
|
221
|
-
return {
|
|
222
|
-
webhookResponse: {
|
|
223
|
-
status: 500,
|
|
224
|
-
body: JSON.stringify({ error: 'Node configuration is incomplete' }),
|
|
225
|
-
headers: {
|
|
226
|
-
'Content-Type': 'application/json',
|
|
227
|
-
},
|
|
228
|
-
},
|
|
229
|
-
};
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
let storage;
|
|
233
|
-
try {
|
|
234
|
-
storage = await createStorageDriver(this, bucket);
|
|
235
|
-
} catch (error) {
|
|
236
|
-
return {
|
|
237
|
-
webhookResponse: {
|
|
238
|
-
status: 500,
|
|
239
|
-
body: JSON.stringify({ error: error instanceof Error ? error.message : String(error) }),
|
|
240
|
-
headers: {
|
|
241
|
-
'Content-Type': 'application/json',
|
|
242
|
-
},
|
|
243
|
-
},
|
|
244
|
-
};
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
try {
|
|
248
|
-
const { data, contentType } = await storage.downloadStream(fileKey);
|
|
249
|
-
|
|
250
|
-
return {
|
|
251
|
-
webhookResponse: {
|
|
252
|
-
status: 200,
|
|
253
|
-
body: data.toString('base64'),
|
|
254
|
-
headers: {
|
|
255
|
-
'Content-Type': contentType,
|
|
256
|
-
'Cache-Control': 'public, max-age=86400',
|
|
257
|
-
'Content-Disposition': 'inline',
|
|
258
|
-
},
|
|
259
|
-
},
|
|
260
|
-
};
|
|
261
|
-
} catch (error) {
|
|
262
|
-
return {
|
|
263
|
-
webhookResponse: {
|
|
264
|
-
status: 404,
|
|
265
|
-
body: JSON.stringify({ error: 'File not found' }),
|
|
266
|
-
headers: {
|
|
267
|
-
'Content-Type': 'application/json',
|
|
268
|
-
},
|
|
269
|
-
},
|
|
270
|
-
};
|
|
271
|
-
}
|
|
272
|
-
}
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
async function handleUpload(
|
|
276
|
-
context: IExecuteFunctions,
|
|
277
|
-
items: INodeExecutionData[],
|
|
278
|
-
storage: StorageDriver
|
|
279
|
-
): Promise<INodeExecutionData[][]> {
|
|
280
|
-
const binaryPropertyName = context.getNodeParameter('binaryPropertyName', 0) as string;
|
|
281
|
-
const webhookBaseUrl = buildWebhookUrl(context, 'default', 'file');
|
|
282
|
-
|
|
283
|
-
const returnData: INodeExecutionData[] = [];
|
|
284
|
-
|
|
285
|
-
for (const item of items) {
|
|
286
|
-
const binaryData = item.binary?.[binaryPropertyName];
|
|
287
|
-
|
|
288
|
-
if (!binaryData) {
|
|
289
|
-
throw new NodeOperationError(
|
|
290
|
-
context.getNode(),
|
|
291
|
-
`No binary data found in property "${binaryPropertyName}"`
|
|
292
|
-
);
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
const buffer = Buffer.from(binaryData.data, 'base64');
|
|
296
|
-
|
|
297
|
-
// Use provided MIME type or default
|
|
298
|
-
const contentType = binaryData.mimeType || 'application/octet-stream';
|
|
299
|
-
|
|
300
|
-
if (!ALLOWED_MIME_TYPES.includes(contentType)) {
|
|
301
|
-
throw new NodeOperationError(
|
|
302
|
-
context.getNode(),
|
|
303
|
-
`MIME type "${contentType}" is not allowed. Allowed types: ${ALLOWED_MIME_TYPES.join(', ')}`
|
|
304
|
-
);
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
const fileSize = buffer.length;
|
|
308
|
-
if (fileSize > MAX_FILE_SIZE) {
|
|
309
|
-
throw new NodeOperationError(
|
|
310
|
-
context.getNode(),
|
|
311
|
-
`File size exceeds maximum limit of ${MAX_FILE_SIZE / 1024 / 1024}MB`
|
|
312
|
-
);
|
|
313
|
-
}
|
|
314
|
-
|
|
315
|
-
const result = await storage.uploadStream(buffer, contentType);
|
|
316
|
-
|
|
317
|
-
const proxyUrl = `${webhookBaseUrl}/${result.fileKey}`;
|
|
318
|
-
|
|
319
|
-
returnData.push({
|
|
320
|
-
json: {
|
|
321
|
-
fileKey: result.fileKey,
|
|
322
|
-
proxyUrl,
|
|
323
|
-
contentType,
|
|
324
|
-
fileSize,
|
|
325
|
-
},
|
|
326
|
-
binary: item.binary,
|
|
327
|
-
});
|
|
328
|
-
}
|
|
329
|
-
|
|
330
|
-
return [returnData];
|
|
331
|
-
}
|
|
332
|
-
|
|
333
|
-
async function handleDelete(
|
|
334
|
-
context: IExecuteFunctions,
|
|
335
|
-
items: INodeExecutionData[],
|
|
336
|
-
storage: StorageDriver
|
|
337
|
-
): Promise<INodeExecutionData[][]> {
|
|
338
|
-
const returnData: INodeExecutionData[] = [];
|
|
339
|
-
|
|
340
|
-
for (const item of items) {
|
|
341
|
-
const fileKey = (item.json.fileKey || context.getNodeParameter('fileKey', 0)) as string;
|
|
342
|
-
|
|
343
|
-
if (!fileKey) {
|
|
344
|
-
throw new NodeOperationError(context.getNode(), 'File key is required for delete operation');
|
|
345
|
-
}
|
|
346
|
-
|
|
347
|
-
await storage.deleteFile(fileKey);
|
|
348
|
-
|
|
349
|
-
returnData.push({
|
|
350
|
-
json: {
|
|
351
|
-
success: true,
|
|
352
|
-
deleted: fileKey,
|
|
353
|
-
},
|
|
354
|
-
});
|
|
355
|
-
}
|
|
356
|
-
|
|
357
|
-
return [returnData];
|
|
358
|
-
}
|
|
359
|
-
|
|
360
|
-
function buildWebhookUrl(context: IExecuteFunctions, webhookName: string, path: string): string {
|
|
361
|
-
const baseUrl = context.getInstanceBaseUrl();
|
|
362
|
-
const node = context.getNode();
|
|
363
|
-
const workflow = context.getWorkflow();
|
|
364
|
-
const workflowId = workflow.id;
|
|
365
|
-
const nodeName = encodeURIComponent(node.name.toLowerCase());
|
|
366
|
-
return `${baseUrl}/webhook/${workflowId}/${nodeName}/${path}`;
|
|
367
|
-
}
|
|
368
|
-
|
|
369
|
-
function isValidFileKey(fileKey: string): boolean {
|
|
370
|
-
if (!fileKey || typeof fileKey !== 'string') {
|
|
371
|
-
return false;
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
const fileKeyPattern = /^[0-9]+-[a-z0-9]+\.[a-z0-9]+$/i;
|
|
375
|
-
return fileKeyPattern.test(fileKey);
|
|
376
|
-
}
|
|
File without changes
|
|
File without changes
|