n8n-nodes-binary-to-url 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,89 @@
1
+ # n8n-nodes-binary-bridge
2
+
3
+ n8n community node for binary file to public URL bridge with S3 storage.
4
+
5
+ ## Features
6
+
7
+ - **Upload Mode**: Upload binary files to S3 storage and get public proxy URL
8
+ - **Delete Mode**: Delete files from S3 storage
9
+ - **Webhook Proxy**: Built-in webhook acts as file stream forwarding server
10
+ - **Streaming**: High-performance streaming to avoid memory overflow
11
+ - **S3 Compatible**: Supports AWS S3 and S3-compatible services (MinIO, DigitalOcean Spaces, etc.)
12
+ - **Custom Endpoint**: Configure custom S3 endpoint for S3-compatible services
13
+ - **Path Style**: Support both virtual-hosted and path-style addressing
14
+
15
+ ## Installation
16
+
17
+ ```bash
18
+ npm install n8n-nodes-binary-bridge
19
+ ```
20
+
21
+ ## Usage
22
+
23
+ ### Upload Mode
24
+
25
+ 1. Add a **Binary Bridge** node to your workflow
26
+ 2. Configure AWS S3 credentials
27
+ 3. Set operation to **Upload**
28
+ 4. Configure bucket name and region
29
+ 5. (Optional) Set custom endpoint for S3-compatible services
30
+ 6. (Optional) Enable force path style if needed
31
+ 7. Connect a node with binary data to the Binary Bridge node
32
+ 8. Execute the workflow
33
+ 9. The node will return:
34
+ - `fileKey`: Unique file identifier
35
+ - `proxyUrl`: Public URL to access the file
36
+ - `contentType`: MIME type of the file
37
+
38
+ ### Delete Mode
39
+
40
+ 1. Add a **Binary Bridge** node to your workflow
41
+ 2. Configure AWS S3 credentials
42
+ 3. Set operation to **Delete**
43
+ 4. Configure bucket name and region
44
+ 5. Set file key to delete (or use from previous upload)
45
+ 6. Execute the workflow
46
+ 7. The node will return:
47
+ - `success`: True if deletion succeeded
48
+ - `deleted`: The file key that was deleted
49
+
50
+ ### Webhook Proxy
51
+
52
+ The webhook URL is automatically generated and can be used to access uploaded files:
53
+
54
+ ```
55
+ https://your-n8n-instance/webhook/{workflowId}/binarybridge/file/{fileKey}
56
+ ```
57
+
58
+ The webhook supports:
59
+
60
+ - **GET** requests to download files
61
+ - **Content-Type** header with correct MIME type
62
+ - **Cache-Control**: 24-hour cache
63
+ - **Content-Disposition**: inline for browser preview
64
+
65
+ ## Architecture
66
+
67
+ This node implements a **Single-Node Proxy** architecture:
68
+
69
+ - Handles file upload to S3 storage
70
+ - Acts as a webhook server for file streaming
71
+ - Creates a data loop without external dependencies
72
+ - Uses streaming to avoid memory issues in n8n Cloud
73
+
74
+ ## Technical Details
75
+
76
+ - **Node Type**: Transform
77
+ - **Version**: 1
78
+ - **n8n Version**: >= 1.0.0
79
+ - **Dependencies**: @aws-sdk/client-s3 (no external dependencies for n8n compatibility)
80
+ - **Streaming**: Uses ReadableStream for efficient file handling
81
+ - **File Key Generation**: Timestamp + random string for security
82
+
83
+ ## License
84
+
85
+ MIT
86
+
87
+ ## Repository
88
+
89
+ https://cnb.cool/ksxh-wwrs/n8n-nodes-binary-bridge
@@ -0,0 +1,27 @@
1
+ import { Readable } from 'stream';
2
+ export interface StorageConfig {
3
+ accessKeyId: string;
4
+ secretAccessKey: string;
5
+ region: string;
6
+ bucket: string;
7
+ endpoint?: string;
8
+ forcePathStyle?: boolean;
9
+ }
10
+ export interface UploadResult {
11
+ fileKey: string;
12
+ contentType: string;
13
+ }
14
+ export interface DownloadResult {
15
+ stream: Readable;
16
+ contentType: string;
17
+ }
18
+ export declare class S3Storage {
19
+ private s3Client;
20
+ private bucket;
21
+ constructor(config: StorageConfig);
22
+ uploadStream(stream: Readable, contentType: string, metadata?: Record<string, string>): Promise<UploadResult>;
23
+ downloadStream(fileKey: string): Promise<DownloadResult>;
24
+ deleteFile(fileKey: string): Promise<void>;
25
+ private generateFileKey;
26
+ private getExtensionFromMimeType;
27
+ }
@@ -0,0 +1,124 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.S3Storage = void 0;
4
+ const client_s3_1 = require("@aws-sdk/client-s3");
5
+ const stream_1 = require("stream");
6
+ class S3Storage {
7
+ constructor(config) {
8
+ this.s3Client = new client_s3_1.S3Client({
9
+ region: config.region,
10
+ credentials: {
11
+ accessKeyId: config.accessKeyId,
12
+ secretAccessKey: config.secretAccessKey,
13
+ },
14
+ endpoint: config.endpoint,
15
+ forcePathStyle: config.forcePathStyle ?? false,
16
+ });
17
+ this.bucket = config.bucket;
18
+ }
19
+ async uploadStream(stream, contentType, metadata) {
20
+ const fileKey = this.generateFileKey(contentType);
21
+ try {
22
+ const command = new client_s3_1.PutObjectCommand({
23
+ Bucket: this.bucket,
24
+ Key: fileKey,
25
+ Body: stream,
26
+ ContentType: contentType,
27
+ Metadata: metadata || {},
28
+ });
29
+ await this.s3Client.send(command);
30
+ return {
31
+ fileKey,
32
+ contentType,
33
+ };
34
+ }
35
+ catch (error) {
36
+ if (error instanceof client_s3_1.S3ServiceException) {
37
+ if (error.name === 'NoSuchBucket') {
38
+ throw new Error(`S3 bucket "${this.bucket}" does not exist or is not accessible`);
39
+ }
40
+ if (error.name === 'AccessDenied') {
41
+ throw new Error(`Access denied to S3 bucket "${this.bucket}". Check your credentials and bucket permissions`);
42
+ }
43
+ throw new Error(`S3 upload failed: ${error.message}`);
44
+ }
45
+ throw error;
46
+ }
47
+ }
48
+ async downloadStream(fileKey) {
49
+ try {
50
+ const command = new client_s3_1.GetObjectCommand({
51
+ Bucket: this.bucket,
52
+ Key: fileKey,
53
+ });
54
+ const response = await this.s3Client.send(command);
55
+ if (!response.Body) {
56
+ throw new Error(`File not found: ${fileKey}`);
57
+ }
58
+ const body = response.Body;
59
+ const stream = body instanceof stream_1.Readable ? body : stream_1.Readable.from(response.Body);
60
+ return {
61
+ stream,
62
+ contentType: response.ContentType || 'application/octet-stream',
63
+ };
64
+ }
65
+ catch (error) {
66
+ if (error instanceof client_s3_1.S3ServiceException) {
67
+ if (error.name === 'NoSuchKey' || error.name === 'NotFound') {
68
+ throw new Error(`File not found: ${fileKey}`);
69
+ }
70
+ if (error.name === 'AccessDenied') {
71
+ throw new Error(`Access denied to S3 bucket "${this.bucket}". Check your credentials and bucket permissions`);
72
+ }
73
+ throw new Error(`S3 download failed: ${error.message}`);
74
+ }
75
+ throw error;
76
+ }
77
+ }
78
+ async deleteFile(fileKey) {
79
+ const command = new client_s3_1.DeleteObjectCommand({
80
+ Bucket: this.bucket,
81
+ Key: fileKey,
82
+ });
83
+ await this.s3Client.send(command);
84
+ }
85
+ generateFileKey(contentType) {
86
+ const ext = this.getExtensionFromMimeType(contentType);
87
+ const timestamp = Date.now();
88
+ const random = Math.random().toString(36).substring(2, 15);
89
+ return `${timestamp}-${random}${ext}`;
90
+ }
91
+ getExtensionFromMimeType(mimeType) {
92
+ const mimeToExt = {
93
+ 'image/jpeg': '.jpg',
94
+ 'image/png': '.png',
95
+ 'image/gif': '.gif',
96
+ 'image/webp': '.webp',
97
+ 'image/svg+xml': '.svg',
98
+ 'image/bmp': '.bmp',
99
+ 'image/tiff': '.tiff',
100
+ 'image/avif': '.avif',
101
+ 'video/mp4': '.mp4',
102
+ 'video/webm': '.webm',
103
+ 'video/quicktime': '.mov',
104
+ 'video/x-msvideo': '.avi',
105
+ 'video/x-matroska': '.mkv',
106
+ 'application/pdf': '.pdf',
107
+ 'application/zip': '.zip',
108
+ 'application/x-rar-compressed': '.rar',
109
+ 'application/x-7z-compressed': '.7z',
110
+ 'audio/mpeg': '.mp3',
111
+ 'audio/wav': '.wav',
112
+ 'audio/ogg': '.ogg',
113
+ 'audio/flac': '.flac',
114
+ 'text/plain': '.txt',
115
+ 'text/csv': '.csv',
116
+ 'application/json': '.json',
117
+ 'application/xml': '.xml',
118
+ 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx',
119
+ 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': '.docx',
120
+ };
121
+ return mimeToExt[mimeType] || '.bin';
122
+ }
123
+ }
124
+ exports.S3Storage = S3Storage;
@@ -0,0 +1,26 @@
1
+ import { Readable } from 'stream';
2
+ export interface SupabaseConfig {
3
+ projectUrl: string;
4
+ apiKey: string;
5
+ bucket: string;
6
+ }
7
+ export interface UploadResult {
8
+ fileKey: string;
9
+ contentType: string;
10
+ }
11
+ export interface DownloadResult {
12
+ stream: Readable;
13
+ contentType: string;
14
+ }
15
+ export declare class SupabaseStorage {
16
+ private client;
17
+ private bucket;
18
+ constructor(config: SupabaseConfig);
19
+ uploadStream(stream: Readable, contentType: string, metadata?: Record<string, string>): Promise<UploadResult>;
20
+ downloadStream(fileKey: string): Promise<DownloadResult>;
21
+ deleteFile(fileKey: string): Promise<void>;
22
+ private streamToBuffer;
23
+ private generateFileKey;
24
+ private getExtensionFromMimeType;
25
+ private getContentTypeFromKey;
26
+ }
@@ -0,0 +1,206 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.SupabaseStorage = void 0;
4
+ const supabase_js_1 = require("@supabase/supabase-js");
5
+ const stream_1 = require("stream");
6
+ class SupabaseStorage {
7
+ constructor(config) {
8
+ this.client = (0, supabase_js_1.createClient)(config.projectUrl, config.apiKey);
9
+ this.bucket = config.bucket;
10
+ }
11
+ async uploadStream(stream, contentType, metadata) {
12
+ const fileKey = this.generateFileKey(contentType);
13
+ try {
14
+ // Supabase SDK doesn't support streaming uploads directly
15
+ // We need to buffer the stream, but add a warning about memory usage
16
+ // For large files, consider using S3 instead which supports true streaming
17
+ const buffer = await this.streamToBuffer(stream);
18
+ // Warn if file is large (> 10MB)
19
+ if (buffer.length > 10 * 1024 * 1024) {
20
+ console.warn(`Large file (${(buffer.length / 1024 / 1024).toFixed(2)}MB) being uploaded to Supabase. Consider using S3 for better streaming support.`);
21
+ }
22
+ const { error } = await this.client.storage.from(this.bucket).upload(fileKey, buffer, {
23
+ contentType,
24
+ upsert: false,
25
+ cacheControl: '86400',
26
+ metadata,
27
+ });
28
+ if (error) {
29
+ if (error.message.includes('Bucket not found')) {
30
+ throw new Error(`Supabase bucket "${this.bucket}" does not exist or is not accessible`);
31
+ }
32
+ if (error.message.includes('Permission denied')) {
33
+ throw new Error(`Access denied to Supabase bucket "${this.bucket}". Check your API key and bucket permissions`);
34
+ }
35
+ throw new Error(`Supabase upload failed: ${error.message}`);
36
+ }
37
+ return {
38
+ fileKey,
39
+ contentType,
40
+ };
41
+ }
42
+ catch (error) {
43
+ if (error instanceof Error) {
44
+ throw error;
45
+ }
46
+ throw new Error(`Supabase upload failed: ${String(error)}`);
47
+ }
48
+ }
49
+ async downloadStream(fileKey) {
50
+ try {
51
+ // Use signed URL for true streaming download
52
+ const { data: signedUrlData, error: signedUrlError } = await this.client.storage
53
+ .from(this.bucket)
54
+ .createSignedUrl(fileKey, 60); // 60 seconds validity
55
+ if (signedUrlError || !signedUrlData) {
56
+ throw new Error(`Failed to create signed URL: ${signedUrlError?.message || 'Unknown error'}`);
57
+ }
58
+ // Fetch the file using the signed URL to get proper streaming support
59
+ const response = await fetch(signedUrlData.signedUrl);
60
+ if (!response.ok) {
61
+ throw new Error(`Failed to download file: ${response.statusText}`);
62
+ }
63
+ if (!response.body) {
64
+ throw new Error(`Response body is empty`);
65
+ }
66
+ // Convert Web Stream to Node Stream
67
+ const nodeStream = stream_1.Readable.fromWeb(response.body);
68
+ const contentType = response.headers.get('content-type') || this.getContentTypeFromKey(fileKey);
69
+ return {
70
+ stream: nodeStream,
71
+ contentType,
72
+ };
73
+ }
74
+ catch (error) {
75
+ // Fallback to the old method if signed URL fails
76
+ try {
77
+ const { data, error } = await this.client.storage.from(this.bucket).download(fileKey);
78
+ if (error) {
79
+ if (error.message.includes('Object not found') || error.message.includes('Not Found')) {
80
+ throw new Error(`File not found: ${fileKey}`);
81
+ }
82
+ if (error.message.includes('Permission denied')) {
83
+ throw new Error(`Access denied to Supabase bucket "${this.bucket}". Check your API key and bucket permissions`);
84
+ }
85
+ throw new Error(`Supabase download failed: ${error.message}`);
86
+ }
87
+ if (!data) {
88
+ throw new Error(`File not found: ${fileKey}`);
89
+ }
90
+ const buffer = await data.arrayBuffer();
91
+ const stream = stream_1.Readable.from(Buffer.from(buffer));
92
+ const contentType = this.getContentTypeFromKey(fileKey);
93
+ return {
94
+ stream,
95
+ contentType,
96
+ };
97
+ }
98
+ catch (fallbackError) {
99
+ if (fallbackError instanceof Error) {
100
+ throw fallbackError;
101
+ }
102
+ throw new Error(`Supabase download failed: ${String(fallbackError)}`);
103
+ }
104
+ }
105
+ }
106
+ async deleteFile(fileKey) {
107
+ try {
108
+ const { error } = await this.client.storage.from(this.bucket).remove([fileKey]);
109
+ if (error) {
110
+ if (error.message.includes('Object not found')) {
111
+ return;
112
+ }
113
+ if (error.message.includes('Permission denied')) {
114
+ throw new Error(`Access denied to Supabase bucket "${this.bucket}". Check your API key and bucket permissions`);
115
+ }
116
+ throw new Error(`Supabase delete failed: ${error.message}`);
117
+ }
118
+ }
119
+ catch (error) {
120
+ if (error instanceof Error) {
121
+ throw error;
122
+ }
123
+ throw new Error(`Supabase delete failed: ${String(error)}`);
124
+ }
125
+ }
126
+ async streamToBuffer(stream) {
127
+ const chunks = [];
128
+ for await (const chunk of stream) {
129
+ chunks.push(chunk);
130
+ }
131
+ return Buffer.concat(chunks);
132
+ }
133
+ generateFileKey(contentType) {
134
+ const ext = this.getExtensionFromMimeType(contentType);
135
+ const timestamp = Date.now();
136
+ const random = Math.random().toString(36).substring(2, 15);
137
+ return `${timestamp}-${random}${ext}`;
138
+ }
139
+ getExtensionFromMimeType(mimeType) {
140
+ const mimeToExt = {
141
+ 'image/jpeg': '.jpg',
142
+ 'image/png': '.png',
143
+ 'image/gif': '.gif',
144
+ 'image/webp': '.webp',
145
+ 'image/svg+xml': '.svg',
146
+ 'image/bmp': '.bmp',
147
+ 'image/tiff': '.tiff',
148
+ 'image/avif': '.avif',
149
+ 'video/mp4': '.mp4',
150
+ 'video/webm': '.webm',
151
+ 'video/quicktime': '.mov',
152
+ 'video/x-msvideo': '.avi',
153
+ 'video/x-matroska': '.mkv',
154
+ 'application/pdf': '.pdf',
155
+ 'application/zip': '.zip',
156
+ 'application/x-rar-compressed': '.rar',
157
+ 'application/x-7z-compressed': '.7z',
158
+ 'audio/mpeg': '.mp3',
159
+ 'audio/wav': '.wav',
160
+ 'audio/ogg': '.ogg',
161
+ 'audio/flac': '.flac',
162
+ 'text/plain': '.txt',
163
+ 'text/csv': '.csv',
164
+ 'application/json': '.json',
165
+ 'application/xml': '.xml',
166
+ 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': '.xlsx',
167
+ 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': '.docx',
168
+ };
169
+ return mimeToExt[mimeType] || '.bin';
170
+ }
171
+ getContentTypeFromKey(fileKey) {
172
+ const ext = fileKey.split('.').pop()?.toLowerCase() || 'bin';
173
+ const extToMime = {
174
+ jpg: 'image/jpeg',
175
+ jpeg: 'image/jpeg',
176
+ png: 'image/png',
177
+ gif: 'image/gif',
178
+ webp: 'image/webp',
179
+ svg: 'image/svg+xml',
180
+ bmp: 'image/bmp',
181
+ tiff: 'image/tiff',
182
+ avif: 'image/avif',
183
+ mp4: 'video/mp4',
184
+ webm: 'video/webm',
185
+ mov: 'video/quicktime',
186
+ avi: 'video/x-msvideo',
187
+ mkv: 'video/x-matroska',
188
+ pdf: 'application/pdf',
189
+ zip: 'application/zip',
190
+ rar: 'application/x-rar-compressed',
191
+ '7z': 'application/x-7z-compressed',
192
+ mp3: 'audio/mpeg',
193
+ wav: 'audio/wav',
194
+ ogg: 'audio/ogg',
195
+ flac: 'audio/flac',
196
+ txt: 'text/plain',
197
+ csv: 'text/csv',
198
+ json: 'application/json',
199
+ xml: 'application/xml',
200
+ xlsx: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
201
+ docx: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
202
+ };
203
+ return extToMime[ext] || 'application/octet-stream';
204
+ }
205
+ }
206
+ exports.SupabaseStorage = SupabaseStorage;
@@ -0,0 +1,17 @@
1
+ import { IExecuteFunctions, IWebhookFunctions } from 'n8n-workflow';
2
+ export { S3Storage } from './S3Storage';
3
+ export type { StorageConfig as S3StorageConfig } from './S3Storage';
4
+ export { SupabaseStorage } from './SupabaseStorage';
5
+ export type { SupabaseConfig } from './SupabaseStorage';
6
+ export interface StorageDriver {
7
+ uploadStream(stream: any, contentType: string, metadata?: Record<string, string>): Promise<{
8
+ fileKey: string;
9
+ contentType: string;
10
+ }>;
11
+ downloadStream(fileKey: string): Promise<{
12
+ stream: any;
13
+ contentType: string;
14
+ }>;
15
+ deleteFile(fileKey: string): Promise<void>;
16
+ }
17
+ export declare function createStorageDriver(context: IExecuteFunctions | IWebhookFunctions, storageDriver: string, bucket: string): Promise<StorageDriver>;
@@ -0,0 +1,44 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.SupabaseStorage = exports.S3Storage = void 0;
4
+ exports.createStorageDriver = createStorageDriver;
5
+ const S3Storage_1 = require("./S3Storage");
6
+ const SupabaseStorage_1 = require("./SupabaseStorage");
7
+ var S3Storage_2 = require("./S3Storage");
8
+ Object.defineProperty(exports, "S3Storage", { enumerable: true, get: function () { return S3Storage_2.S3Storage; } });
9
+ var SupabaseStorage_2 = require("./SupabaseStorage");
10
+ Object.defineProperty(exports, "SupabaseStorage", { enumerable: true, get: function () { return SupabaseStorage_2.SupabaseStorage; } });
11
+ async function createStorageDriver(context, storageDriver, bucket) {
12
+ if (storageDriver === 's3') {
13
+ const credentials = await context.getCredentials('awsS3Api');
14
+ if (!credentials) {
15
+ throw new Error('AWS S3 credentials are required');
16
+ }
17
+ const region = context.getNodeParameter('region', 0);
18
+ const endpoint = context.getNodeParameter('endpoint', 0);
19
+ const forcePathStyle = context.getNodeParameter('forcePathStyle', 0);
20
+ const config = {
21
+ accessKeyId: credentials.accessKeyId,
22
+ secretAccessKey: credentials.secretAccessKey,
23
+ region,
24
+ bucket,
25
+ endpoint: endpoint || undefined,
26
+ forcePathStyle,
27
+ };
28
+ return new S3Storage_1.S3Storage(config);
29
+ }
30
+ else if (storageDriver === 'supabase') {
31
+ const credentials = await context.getCredentials('supabaseApi');
32
+ if (!credentials) {
33
+ throw new Error('Supabase credentials are required');
34
+ }
35
+ const projectUrl = context.getNodeParameter('projectUrl', 0);
36
+ const config = {
37
+ projectUrl,
38
+ apiKey: credentials.apiKey,
39
+ bucket,
40
+ };
41
+ return new SupabaseStorage_1.SupabaseStorage(config);
42
+ }
43
+ throw new Error(`Unknown storage driver: ${storageDriver}`);
44
+ }
@@ -0,0 +1,2 @@
1
+ import { BinaryBridge } from './nodes/BinaryBridge/BinaryBridge.node';
2
+ export declare const nodeClasses: (typeof BinaryBridge)[];
package/dist/index.js ADDED
@@ -0,0 +1,5 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.nodeClasses = void 0;
4
+ const BinaryBridge_node_1 = require("./nodes/BinaryBridge/BinaryBridge.node");
5
+ exports.nodeClasses = [BinaryBridge_node_1.BinaryBridge];
@@ -0,0 +1,6 @@
1
+ import { INodeType, INodeTypeDescription, IExecuteFunctions, IWebhookFunctions, IWebhookResponseData, INodeExecutionData } from 'n8n-workflow';
2
+ export declare class BinaryBridge implements INodeType {
3
+ description: INodeTypeDescription;
4
+ execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]>;
5
+ webhook(this: IWebhookFunctions): Promise<IWebhookResponseData>;
6
+ }