@5minds/node-red-contrib-processcube-tools 1.0.5 → 1.1.0-feature-6eab97-mg0ov11s
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.template +11 -0
- package/email-receiver/email-receiver.html +24 -2
- package/email-receiver/email-receiver.js +34 -0
- package/file-storage/file-storage.html +97 -0
- package/file-storage/file-storage.js +70 -0
- package/package.json +9 -2
- package/storage/providers/fs.js +117 -0
- package/storage/providers/postgres.js +159 -0
- package/storage/storage-core.js +73 -0
package/.env.template
CHANGED
|
@@ -2,3 +2,14 @@ EMAIL_SEND_PORT=
|
|
|
2
2
|
EMAIL_SEND_HOST=
|
|
3
3
|
EMAIL_SEND_USER=
|
|
4
4
|
EMAIL_SEND_PASSWORD=
|
|
5
|
+
|
|
6
|
+
EMAIL_RECEIVE_PORT=
|
|
7
|
+
EMAIL_RECEIVE_HOST=
|
|
8
|
+
EMAIL_RECEIVE_USER=
|
|
9
|
+
EMAIL_RECEIVE_PASSWORD=
|
|
10
|
+
|
|
11
|
+
PGUSER=postgres
|
|
12
|
+
PGPASSWORD=postgres
|
|
13
|
+
PGHOST=localhost
|
|
14
|
+
PGPORT=5432
|
|
15
|
+
PGDATABASE=enginedb
|
|
@@ -12,15 +12,28 @@
|
|
|
12
12
|
tlsType: { value: 'bool' },
|
|
13
13
|
user: { value: '', required: true, validate: RED.validators.typedInput('userType') },
|
|
14
14
|
userType: { value: 'str' },
|
|
15
|
-
password: { value: '', required: true
|
|
15
|
+
password: { value: '', required: true },
|
|
16
16
|
passwordType: { value: 'env', required: true },
|
|
17
17
|
folder: { value: '', required: true, validate: RED.validators.typedInput('folderType') },
|
|
18
18
|
folderType: { value: 'json' },
|
|
19
19
|
markseen: { value: true, validate: RED.validators.typedInput('markseenType') },
|
|
20
20
|
markseenType: { value: 'bool' },
|
|
21
|
+
sendstatus: { value: true },
|
|
21
22
|
},
|
|
22
23
|
inputs: 1,
|
|
23
|
-
outputs:
|
|
24
|
+
// outputs: function () {
|
|
25
|
+
// const sendstatus = this.sendstatus === true || this.sendstatus === 'true';
|
|
26
|
+
// return sendstatus ? 2 : 1;
|
|
27
|
+
// },
|
|
28
|
+
outputs: 2,
|
|
29
|
+
outputLabels: function (index) {
|
|
30
|
+
if (index === 0) {
|
|
31
|
+
return 'Email Message';
|
|
32
|
+
}
|
|
33
|
+
if (index === 1) {
|
|
34
|
+
return 'Status';
|
|
35
|
+
}
|
|
36
|
+
},
|
|
24
37
|
icon: 'font-awesome/fa-inbox',
|
|
25
38
|
label: function () {
|
|
26
39
|
return this.name || 'E-Mail Receiver';
|
|
@@ -118,6 +131,15 @@
|
|
|
118
131
|
<input type="text" id="node-input-markseen" />
|
|
119
132
|
<input type="hidden" id="node-input-markseenType" />
|
|
120
133
|
</div>
|
|
134
|
+
<div class="form-row" style="margin-bottom: 3px;">
|
|
135
|
+
<input
|
|
136
|
+
type="checkbox"
|
|
137
|
+
checked
|
|
138
|
+
id="node-input-sendstatus"
|
|
139
|
+
style="display: inline-block; width: auto; vertical-align: top; margin-left: 5px;"
|
|
140
|
+
/>
|
|
141
|
+
<label style="width:auto" for="node-input-sendstatus">Send status</label>
|
|
142
|
+
</div>
|
|
121
143
|
</script>
|
|
122
144
|
|
|
123
145
|
<script type="text/html" data-help-name="email-receiver">
|
|
@@ -24,6 +24,7 @@ module.exports = function (RED) {
|
|
|
24
24
|
const imap_tls = RED.util.evaluateNodeProperty(config.tls, config.tlsType, node, msg);
|
|
25
25
|
const imap_user = RED.util.evaluateNodeProperty(config.user, config.userType, node, msg);
|
|
26
26
|
const imap_password = RED.util.evaluateNodeProperty(config.password, config.passwordType, node, msg);
|
|
27
|
+
const sendstatus = config.sendstatus === true || config.sendstatus === 'true';
|
|
27
28
|
|
|
28
29
|
// Check if the folder is actually an array
|
|
29
30
|
const imap_folder = RED.util.evaluateNodeProperty(config.folder, config.folderType, node, msg);
|
|
@@ -127,18 +128,51 @@ module.exports = function (RED) {
|
|
|
127
128
|
if (error) {
|
|
128
129
|
node.error('IMAP session terminated: ' + error.message);
|
|
129
130
|
node.status({ fill: 'red', shape: 'ring', text: 'connection error' });
|
|
131
|
+
if (sendstatus) {
|
|
132
|
+
node.send([null, {
|
|
133
|
+
payload: {
|
|
134
|
+
status: 'error',
|
|
135
|
+
message: error.message,
|
|
136
|
+
errors: state.errors,
|
|
137
|
+
}
|
|
138
|
+
}]);
|
|
139
|
+
}
|
|
130
140
|
} else if (state.failures > 0) {
|
|
131
141
|
node.status({
|
|
132
142
|
fill: 'red',
|
|
133
143
|
shape: 'dot',
|
|
134
144
|
text: `Done, ${state.totalMails} mails from ${state.successes}/${state.totalFolders} folders. ${state.failures} failed.`,
|
|
135
145
|
});
|
|
146
|
+
if (sendstatus) {
|
|
147
|
+
node.send([null, {
|
|
148
|
+
payload: {
|
|
149
|
+
status: 'warning',
|
|
150
|
+
total: state.totalMails,
|
|
151
|
+
successes: state.successes,
|
|
152
|
+
failures: state.failures,
|
|
153
|
+
totalFolders: state.totalFolders,
|
|
154
|
+
errors: state.errors,
|
|
155
|
+
}
|
|
156
|
+
}]);
|
|
157
|
+
}
|
|
158
|
+
|
|
136
159
|
} else {
|
|
137
160
|
node.status({
|
|
138
161
|
fill: 'green',
|
|
139
162
|
shape: 'dot',
|
|
140
163
|
text: `Done, fetched ${state.totalMails} mails from ${folders.join(', ')}.`,
|
|
141
164
|
});
|
|
165
|
+
|
|
166
|
+
if (sendstatus) {
|
|
167
|
+
node.send([null, {
|
|
168
|
+
payload: {
|
|
169
|
+
status: 'success',
|
|
170
|
+
total: state.totalMails,
|
|
171
|
+
folders: folders.join(', '),
|
|
172
|
+
}
|
|
173
|
+
}]);
|
|
174
|
+
}
|
|
175
|
+
|
|
142
176
|
}
|
|
143
177
|
if (imap && imap.state !== 'disconnected') {
|
|
144
178
|
imap.end();
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
<script type="text/javascript">
|
|
2
|
+
RED.nodes.registerType('file-storage', {
|
|
3
|
+
category: 'ProcessCube Tools',
|
|
4
|
+
color: '#02AFD6',
|
|
5
|
+
defaults: {
|
|
6
|
+
name: { value: '' },
|
|
7
|
+
provider: { value: 'fs' },
|
|
8
|
+
baseDir: { value: '' },
|
|
9
|
+
pgConnectionString: { value: '' },
|
|
10
|
+
pgSchema: { value: 'public' },
|
|
11
|
+
pgTable: { value: 'files' },
|
|
12
|
+
outputAs: { value: 'stream' },
|
|
13
|
+
defaultAction: { value: 'store' },
|
|
14
|
+
},
|
|
15
|
+
inputs: 1,
|
|
16
|
+
outputs: 1,
|
|
17
|
+
icon: 'file.png',
|
|
18
|
+
label: function () {
|
|
19
|
+
return this.name || 'file-storage';
|
|
20
|
+
},
|
|
21
|
+
});
|
|
22
|
+
</script>
|
|
23
|
+
|
|
24
|
+
<script type="text/html" data-template-name="file-storage">
|
|
25
|
+
<div class="form-row">
|
|
26
|
+
<label for="node-input-name"><i class="fa fa-tag"></i> Name</label>
|
|
27
|
+
<input type="text" id="node-input-name" placeholder="file-storage" />
|
|
28
|
+
</div>
|
|
29
|
+
<div class="form-row">
|
|
30
|
+
<label for="node-input-provider"><i class="fa fa-database"></i> Provider</label>
|
|
31
|
+
<select id="node-input-provider">
|
|
32
|
+
<option value="fs">Filesystem</option>
|
|
33
|
+
<option value="pg">PostgreSQL</option>
|
|
34
|
+
</select>
|
|
35
|
+
</div>
|
|
36
|
+
<div class="form-row">
|
|
37
|
+
<label for="node-input-outputAs"><i class="fa fa-share-square-o"></i> Output</label>
|
|
38
|
+
<select id="node-input-outputAs">
|
|
39
|
+
<option value="stream">Stream</option>
|
|
40
|
+
<option value="buffer">Buffer</option>
|
|
41
|
+
<option value="path">Path (nur FS)</option>
|
|
42
|
+
</select>
|
|
43
|
+
</div>
|
|
44
|
+
<hr />
|
|
45
|
+
<div class="form-tips">Filesystem</div>
|
|
46
|
+
<div class="form-row">
|
|
47
|
+
<label for="node-input-baseDir"><i class="fa fa-folder-open"></i> Base Dir</label>
|
|
48
|
+
<input type="text" id="node-input-baseDir" placeholder="/data/files" />
|
|
49
|
+
</div>
|
|
50
|
+
<hr />
|
|
51
|
+
<div class="form-tips">PostgreSQL</div>
|
|
52
|
+
<div class="form-row">
|
|
53
|
+
<label for="node-input-pgConnectionString"><i class="fa fa-plug"></i> Connection</label>
|
|
54
|
+
<input type="text" id="node-input-pgConnectionString" placeholder="postgres://user:pass@host:5432/db" />
|
|
55
|
+
</div>
|
|
56
|
+
<div class="form-row">
|
|
57
|
+
<label for="node-input-pgSchema"><i class="fa fa-sitemap"></i> Schema</label>
|
|
58
|
+
<input type="text" id="node-input-pgSchema" placeholder="public" />
|
|
59
|
+
</div>
|
|
60
|
+
<div class="form-row">
|
|
61
|
+
<label for="node-input-pgTable"><i class="fa fa-table"></i> Tabelle</label>
|
|
62
|
+
<input type="text" id="node-input-pgTable" placeholder="files" />
|
|
63
|
+
</div>
|
|
64
|
+
<hr />
|
|
65
|
+
<div class="form-row">
|
|
66
|
+
<label for="node-input-defaultAction"><i class="fa fa-cog"></i> Default Action</label>
|
|
67
|
+
<select id="node-input-defaultAction">
|
|
68
|
+
<option value="store">store</option>
|
|
69
|
+
<option value="get">get</option>
|
|
70
|
+
<option value="delete">delete</option>
|
|
71
|
+
</select>
|
|
72
|
+
</div>
|
|
73
|
+
</script>
|
|
74
|
+
|
|
75
|
+
<script type="text/html" data-help-name="file-storage">
|
|
76
|
+
<p>
|
|
77
|
+
File-Storage-Node zum Speichern/Abrufen/Löschen von Dateien inkl. Metadaten. Provider: Filesystem (Datei + JSON)
|
|
78
|
+
oder PostgreSQL (Large Objects + Metadaten-Tabelle).
|
|
79
|
+
</p>
|
|
80
|
+
<h3>Input</h3>
|
|
81
|
+
<pre>
|
|
82
|
+
msg.action = "store" | "get" | "delete"
|
|
83
|
+
msg.payload = Buffer | Readable | String (bei store)
|
|
84
|
+
msg.file = {
|
|
85
|
+
id?: string (bei get/delete),
|
|
86
|
+
filename?: string,
|
|
87
|
+
contentType?: string,
|
|
88
|
+
metadata?: object
|
|
89
|
+
}
|
|
90
|
+
</pre
|
|
91
|
+
>
|
|
92
|
+
<h3>Output</h3>
|
|
93
|
+
<p>
|
|
94
|
+
Bei <code>store</code>: <code>msg.payload</code> enthält Metadaten inkl. <code>id</code>. Bei <code>get</code>:
|
|
95
|
+
<code>msg.payload</code> ist Stream/Buffer/Pfad (je nach Option), Metadaten in <code>msg.file</code>.
|
|
96
|
+
</p>
|
|
97
|
+
</script>
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
module.exports = function (RED) {
|
|
2
|
+
const StorageCore = require('../storage/storage-core');
|
|
3
|
+
|
|
4
|
+
function FileStorageNode(config) {
|
|
5
|
+
RED.nodes.createNode(this, config);
|
|
6
|
+
const node = this;
|
|
7
|
+
|
|
8
|
+
// Node-Konfiguration
|
|
9
|
+
node.provider = config.provider || 'fs';
|
|
10
|
+
node.baseDir = config.baseDir;
|
|
11
|
+
node.pg = {
|
|
12
|
+
connectionString: config.pgConnectionString,
|
|
13
|
+
schema: config.pgSchema || 'public',
|
|
14
|
+
table: config.pgTable || 'files',
|
|
15
|
+
};
|
|
16
|
+
node.outputAs = config.outputAs || 'stream'; // 'stream' | 'buffer' | 'path' (path nur fs)
|
|
17
|
+
|
|
18
|
+
// Storage-Kern
|
|
19
|
+
const storage = new StorageCore({
|
|
20
|
+
provider: node.provider,
|
|
21
|
+
fs: { baseDir: node.baseDir },
|
|
22
|
+
pg: node.pg,
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
storage.init().catch((err) => node.error(err));
|
|
26
|
+
|
|
27
|
+
node.on('input', async function (msg, send, done) {
|
|
28
|
+
try {
|
|
29
|
+
const action = msg.action || config.defaultAction || 'store';
|
|
30
|
+
if (action === 'store') {
|
|
31
|
+
const file = msg.file || {};
|
|
32
|
+
const result = await storage.store(msg.payload, file);
|
|
33
|
+
msg.payload = result;
|
|
34
|
+
msg.file = { ...file, ...result };
|
|
35
|
+
send(msg);
|
|
36
|
+
done();
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (action === 'get') {
|
|
41
|
+
const id = msg.file && msg.file.id;
|
|
42
|
+
if (!id) throw new Error('file.id is required for get');
|
|
43
|
+
const { meta, payload } = await storage.get(id, { as: node.outputAs });
|
|
44
|
+
msg.file = { ...meta, id: meta.id };
|
|
45
|
+
msg.payload = payload;
|
|
46
|
+
send(msg);
|
|
47
|
+
done();
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (action === 'delete') {
|
|
52
|
+
const id = msg.file && msg.file.id;
|
|
53
|
+
if (!id) throw new Error('file.id is required for delete');
|
|
54
|
+
const result = await storage.delete(id);
|
|
55
|
+
msg.payload = result;
|
|
56
|
+
send(msg);
|
|
57
|
+
done();
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
throw new Error(`Unknown action: ${action}`);
|
|
62
|
+
} catch (err) {
|
|
63
|
+
node.error(err, msg);
|
|
64
|
+
if (done) done(err);
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
RED.nodes.registerType('file-storage', FileStorageNode);
|
|
70
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@5minds/node-red-contrib-processcube-tools",
|
|
3
|
-
"version": "1.0
|
|
3
|
+
"version": "1.1.0-feature-6eab97-mg0ov11s",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"description": "Node-RED tools nodes for ProcessCube",
|
|
6
6
|
"scripts": {
|
|
@@ -19,6 +19,10 @@
|
|
|
19
19
|
{
|
|
20
20
|
"name": "Diana Stefan",
|
|
21
21
|
"email": "Diana.Stefan@5Minds.de"
|
|
22
|
+
},
|
|
23
|
+
{
|
|
24
|
+
"name": "Thorsten Kallweit",
|
|
25
|
+
"email": "Thorsten.Kallweit@5Minds.de"
|
|
22
26
|
}
|
|
23
27
|
],
|
|
24
28
|
"repository": {
|
|
@@ -38,7 +42,8 @@
|
|
|
38
42
|
"nodes": {
|
|
39
43
|
"EmailReceiver": "email-receiver/email-receiver.js",
|
|
40
44
|
"EmailSender": "email-sender/email-sender.js",
|
|
41
|
-
"HtmlToText": "processcube-html-to-text/processcube-html-to-text.js"
|
|
45
|
+
"HtmlToText": "processcube-html-to-text/processcube-html-to-text.js",
|
|
46
|
+
"FileStorage": "file-storage/file-storage.js"
|
|
42
47
|
},
|
|
43
48
|
"examples": "examples"
|
|
44
49
|
},
|
|
@@ -47,6 +52,8 @@
|
|
|
47
52
|
"mailparser": "^3.6.8",
|
|
48
53
|
"node-imap": "^0.9.6",
|
|
49
54
|
"nodemailer": "^7.0.6",
|
|
55
|
+
"pg": "^8.16.3",
|
|
56
|
+
"pg-large-object": "^2.0.0",
|
|
50
57
|
"utf7": "^1.0.2"
|
|
51
58
|
},
|
|
52
59
|
"devDependencies": {
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const fsp = require('fs/promises');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const { pipeline } = require('stream');
|
|
5
|
+
const { createHash } = require('crypto');
|
|
6
|
+
const { promisify } = require('util');
|
|
7
|
+
const pump = promisify(pipeline);
|
|
8
|
+
|
|
9
|
+
class FsProvider {
|
|
10
|
+
constructor(opts = {}) {
|
|
11
|
+
this.baseDir = opts.baseDir || path.resolve(process.cwd(), 'data');
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
async init() {
|
|
15
|
+
await fsp.mkdir(this.baseDir, { recursive: true });
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
_buildPaths(id) {
|
|
19
|
+
const d = new Date();
|
|
20
|
+
const parts = [
|
|
21
|
+
String(d.getUTCFullYear()),
|
|
22
|
+
String(d.getUTCMonth() + 1).padStart(2, '0'),
|
|
23
|
+
String(d.getUTCDate()).padStart(2, '0'),
|
|
24
|
+
];
|
|
25
|
+
const dir = path.join(this.baseDir, ...parts);
|
|
26
|
+
const filePath = path.join(dir, id);
|
|
27
|
+
const metaPath = path.join(dir, `${id}.json`);
|
|
28
|
+
return { dir, filePath, metaPath };
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
async store(readable, info) {
|
|
32
|
+
const { id, filename, contentType, metadata, createdAt } = info;
|
|
33
|
+
const { dir, filePath, metaPath } = this._buildPaths(id);
|
|
34
|
+
await fsp.mkdir(dir, { recursive: true });
|
|
35
|
+
|
|
36
|
+
const hash = createHash('sha256');
|
|
37
|
+
let size = 0;
|
|
38
|
+
|
|
39
|
+
const out = fs.createWriteStream(filePath);
|
|
40
|
+
readable.on('data', (chunk) => {
|
|
41
|
+
hash.update(chunk);
|
|
42
|
+
size += chunk.length;
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
await pump(readable, out);
|
|
46
|
+
|
|
47
|
+
const sha256 = hash.digest('hex');
|
|
48
|
+
const meta = { id, filename, contentType, size, sha256, metadata, createdAt };
|
|
49
|
+
await fsp.writeFile(metaPath, JSON.stringify(meta, null, 2));
|
|
50
|
+
|
|
51
|
+
return { size, sha256, path: filePath };
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async get(id, options = { as: 'stream' }) {
|
|
55
|
+
// Find meta file by searching dated folders
|
|
56
|
+
const meta = await this._findMeta(id);
|
|
57
|
+
if (!meta) throw new Error(`File not found: ${id}`);
|
|
58
|
+
const filePath = meta.__filePath;
|
|
59
|
+
|
|
60
|
+
if (options.as === 'path') {
|
|
61
|
+
return { meta, payload: filePath };
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
if (options.as === 'buffer') {
|
|
65
|
+
const buf = await fsp.readFile(filePath);
|
|
66
|
+
return { meta, payload: buf };
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// default: stream
|
|
70
|
+
const stream = fs.createReadStream(filePath);
|
|
71
|
+
return { meta, payload: stream };
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async delete(id) {
|
|
75
|
+
const meta = await this._findMeta(id);
|
|
76
|
+
if (!meta) return; // idempotent
|
|
77
|
+
await fsp.unlink(meta.__filePath).catch(() => {});
|
|
78
|
+
await fsp.unlink(meta.__metaPath).catch(() => {});
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async _findMeta(id) {
|
|
82
|
+
// Walk date folders (YYYY/MM/DD). For Performance: keep index/cache in prod.
|
|
83
|
+
const years = await this._ls(this.baseDir);
|
|
84
|
+
for (const y of years) {
|
|
85
|
+
const yearDir = path.join(this.baseDir, y);
|
|
86
|
+
const months = await this._ls(yearDir);
|
|
87
|
+
for (const m of months) {
|
|
88
|
+
const monthDir = path.join(yearDir, m);
|
|
89
|
+
const days = await this._ls(monthDir);
|
|
90
|
+
for (const d of days) {
|
|
91
|
+
const dir = path.join(monthDir, d);
|
|
92
|
+
const metaPath = path.join(dir, `${id}.json`);
|
|
93
|
+
try {
|
|
94
|
+
const raw = await fsp.readFile(metaPath, 'utf-8');
|
|
95
|
+
const meta = JSON.parse(raw);
|
|
96
|
+
meta.__metaPath = metaPath;
|
|
97
|
+
meta.__filePath = path.join(dir, id);
|
|
98
|
+
return meta;
|
|
99
|
+
} catch (_) {
|
|
100
|
+
/* continue */
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
return null;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
async _ls(dir) {
|
|
109
|
+
try {
|
|
110
|
+
return (await fsp.readdir(dir)).filter((n) => !n.startsWith('.'));
|
|
111
|
+
} catch {
|
|
112
|
+
return [];
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
module.exports = FsProvider;
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
const { Pool } = require('pg');
|
|
2
|
+
const { LargeObjectManager } = require('pg-large-object');
|
|
3
|
+
const { pipeline, PassThrough } = require('stream');
|
|
4
|
+
const { createHash } = require('crypto');
|
|
5
|
+
const { promisify } = require('util');
|
|
6
|
+
const pump = promisify(pipeline);
|
|
7
|
+
|
|
8
|
+
class PgProvider {
|
|
9
|
+
constructor(opts = {}) {
|
|
10
|
+
// this.connectionString = opts.connectionString || process.env.PG_URL || 'postgres://localhost/postgres';
|
|
11
|
+
this.schema = opts.schema || 'public';
|
|
12
|
+
this.table = opts.table || 'files';
|
|
13
|
+
// this.pool = new Pool({ connectionString: this.connectionString });
|
|
14
|
+
this.pool = new Pool();
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
async init() {
|
|
18
|
+
const client = await this.pool.connect();
|
|
19
|
+
try {
|
|
20
|
+
await client.query(`CREATE TABLE IF NOT EXISTS ${this.schema}.${this.table} (
|
|
21
|
+
id UUID PRIMARY KEY,
|
|
22
|
+
loid OID NOT NULL,
|
|
23
|
+
filename TEXT,
|
|
24
|
+
content_type TEXT,
|
|
25
|
+
size BIGINT,
|
|
26
|
+
sha256 TEXT,
|
|
27
|
+
metadata JSONB,
|
|
28
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
|
29
|
+
)`);
|
|
30
|
+
await client.query(
|
|
31
|
+
`CREATE INDEX IF NOT EXISTS idx_${this.table}_created_at ON ${this.schema}.${this.table}(created_at)`,
|
|
32
|
+
);
|
|
33
|
+
} finally {
|
|
34
|
+
client.release();
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async store(readable, info) {
|
|
39
|
+
const { id, filename, contentType, metadata, createdAt } = info;
|
|
40
|
+
const client = await this.pool.connect();
|
|
41
|
+
try {
|
|
42
|
+
const lom = new LargeObjectManager({pg: client});
|
|
43
|
+
await client.query('BEGIN');
|
|
44
|
+
const bufSize = 16384;
|
|
45
|
+
|
|
46
|
+
const result = await lom.createAndWritableStreamAsync(bufSize);
|
|
47
|
+
const oid =result[0];
|
|
48
|
+
const stream = result[1];
|
|
49
|
+
if (!oid || !stream) {
|
|
50
|
+
throw new Error('Failed to create large object');
|
|
51
|
+
}
|
|
52
|
+
const hash = createHash('sha256');
|
|
53
|
+
let size = 0;
|
|
54
|
+
|
|
55
|
+
// Wir berechnen Hash und Größe "on the fly", während wir in das Large Object schreiben
|
|
56
|
+
const hashAndSize = new PassThrough();
|
|
57
|
+
hashAndSize.on('data', (chunk) => {
|
|
58
|
+
hash.update(chunk);
|
|
59
|
+
size += chunk.length;
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
// readable -> hashAndSize -> stream
|
|
63
|
+
const pipelinePromise = pump(readable, hashAndSize, stream);
|
|
64
|
+
|
|
65
|
+
await pipelinePromise;
|
|
66
|
+
|
|
67
|
+
const sha256 = hash.digest('hex');
|
|
68
|
+
|
|
69
|
+
await client.query(
|
|
70
|
+
`INSERT INTO ${this.schema}.${this.table} (id, loid, filename, content_type, size, sha256, metadata, created_at)
|
|
71
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7::jsonb, $8)`,
|
|
72
|
+
[id, oid, filename, contentType, size, sha256, JSON.stringify(metadata || {}), createdAt],
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
await client.query('COMMIT');
|
|
76
|
+
return { size, sha256, oid };
|
|
77
|
+
} catch (err) {
|
|
78
|
+
await client.query('ROLLBACK').catch(() => {});
|
|
79
|
+
throw err;
|
|
80
|
+
} finally {
|
|
81
|
+
client.release();
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async get(id, options = { as: 'stream' }) {
|
|
86
|
+
const client = await this.pool.connect();
|
|
87
|
+
try {
|
|
88
|
+
const { rows } = await client.query(`SELECT * FROM ${this.schema}.${this.table} WHERE id=$1`, [id]);
|
|
89
|
+
if (rows.length === 0) throw new Error(`File not found: ${id}`);
|
|
90
|
+
const meta = rows[0];
|
|
91
|
+
|
|
92
|
+
const bufSize = 16384;
|
|
93
|
+
if (options.as === 'buffer') {
|
|
94
|
+
// Stream LO into memory
|
|
95
|
+
await client.query('BEGIN');
|
|
96
|
+
const lom = new LargeObjectManager({ pg: client });
|
|
97
|
+
|
|
98
|
+
const ro = await lom.openAndReadableStreamAsync(meta.loid, bufSize);
|
|
99
|
+
const totalSize = ro[0];
|
|
100
|
+
const stream = ro[1];
|
|
101
|
+
if (!stream) {
|
|
102
|
+
throw new Error('Failed to open large object for reading');
|
|
103
|
+
}
|
|
104
|
+
const chunks = [];
|
|
105
|
+
stream.on('data', (c) => chunks.push(c));
|
|
106
|
+
await new Promise((res, rej) => stream.on('end', res).on('error', rej));
|
|
107
|
+
await client.query('COMMIT');
|
|
108
|
+
client.release();
|
|
109
|
+
return { meta, payload: Buffer.concat(chunks) };
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (options.as === 'path') {
|
|
113
|
+
throw new Error('options.as="path" is not supported by Postgres provider');
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// default: stream – wrap LO stream so we can close txn when done
|
|
117
|
+
await client.query('BEGIN');
|
|
118
|
+
const lom = new LargeObjectManager({ pg: client });
|
|
119
|
+
const ro = await lom.openAndReadableStreamAsync(meta.loid, bufSize);
|
|
120
|
+
const totalSize = ro[0];
|
|
121
|
+
const stream = ro[1];
|
|
122
|
+
const pass = new PassThrough();
|
|
123
|
+
stream.pipe(pass);
|
|
124
|
+
const done = new Promise((res, rej) => pass.on('end', res).on('error', rej));
|
|
125
|
+
done.finally(async () => {
|
|
126
|
+
await client.query('COMMIT').catch(() => {});
|
|
127
|
+
client.release();
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
// Do not release here; we release in finally of wrapper. We return early, so prevent double release.
|
|
131
|
+
return { meta, payload: pass };
|
|
132
|
+
} catch (err) {
|
|
133
|
+
|
|
134
|
+
throw err;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
async delete(id) {
|
|
139
|
+
const client = await this.pool.connect();
|
|
140
|
+
try {
|
|
141
|
+
await client.query('BEGIN');
|
|
142
|
+
const { rows } = await client.query(`DELETE FROM ${this.schema}.${this.table} WHERE id=$1 RETURNING loid`, [
|
|
143
|
+
id,
|
|
144
|
+
]);
|
|
145
|
+
if (rows.length) {
|
|
146
|
+
const lom = new LargeObjectManager({ pg: client });
|
|
147
|
+
await lom.unlinkAsync(rows[0].loid);
|
|
148
|
+
}
|
|
149
|
+
await client.query('COMMIT');
|
|
150
|
+
} catch (err) {
|
|
151
|
+
await client.query('ROLLBACK').catch(() => {});
|
|
152
|
+
throw err;
|
|
153
|
+
} finally {
|
|
154
|
+
client.release();
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
module.exports = PgProvider;
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
const { v4: uuidv4 } = require('uuid');
|
|
2
|
+
const { Readable } = require('stream');
|
|
3
|
+
const FsProvider = require('./providers/fs');
|
|
4
|
+
const PgProvider = require('./providers/postgres');
|
|
5
|
+
|
|
6
|
+
function ensureReadable(payload) {
|
|
7
|
+
if (!payload && payload !== 0) {
|
|
8
|
+
throw new Error('No payload provided for storage');
|
|
9
|
+
}
|
|
10
|
+
if (Buffer.isBuffer(payload) || typeof payload === 'string' || typeof payload === 'number') {
|
|
11
|
+
return Readable.from(Buffer.isBuffer(payload) ? payload : Buffer.from(String(payload)));
|
|
12
|
+
}
|
|
13
|
+
if (payload && typeof payload.pipe === 'function') {
|
|
14
|
+
return payload; // Readable stream
|
|
15
|
+
}
|
|
16
|
+
throw new Error('Unsupported payload type. Use Buffer, string, number, or Readable stream.');
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
class StorageCore {
|
|
20
|
+
/**
|
|
21
|
+
* @param {Object} config
|
|
22
|
+
* @param {('fs'|'pg')} config.provider
|
|
23
|
+
* @param {Object} [config.fs]
|
|
24
|
+
* @param {string} [config.fs.baseDir]
|
|
25
|
+
* @param {Object} [config.pg]
|
|
26
|
+
* @param {string} [config.pg.connectionString]
|
|
27
|
+
* @param {string} [config.pg.schema]
|
|
28
|
+
* @param {string} [config.pg.table]
|
|
29
|
+
*/
|
|
30
|
+
constructor(config = {}) {
|
|
31
|
+
this.config = config;
|
|
32
|
+
const p = config.provider || 'fs';
|
|
33
|
+
if (p === 'fs') this.provider = new FsProvider(config.fs || {});
|
|
34
|
+
else if (p === 'pg') this.provider = new PgProvider(config.pg || {});
|
|
35
|
+
else throw new Error(`Unknown provider: ${p}`);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async init() {
|
|
39
|
+
if (typeof this.provider.init === 'function') {
|
|
40
|
+
await this.provider.init();
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/** Store a file */
|
|
45
|
+
async store(payload, file = {}) {
|
|
46
|
+
const stream = ensureReadable(payload);
|
|
47
|
+
const id = uuidv4();
|
|
48
|
+
const info = {
|
|
49
|
+
id,
|
|
50
|
+
filename: file.filename || id,
|
|
51
|
+
contentType: file.contentType || 'application/octet-stream',
|
|
52
|
+
metadata: file.metadata || {},
|
|
53
|
+
createdAt: new Date().toISOString(),
|
|
54
|
+
};
|
|
55
|
+
const result = await this.provider.store(stream, info);
|
|
56
|
+
return { ...info, ...result, storage: this.config.provider || 'fs' };
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/** Get a file by id */
|
|
60
|
+
async get(id, options = { as: 'stream' }) {
|
|
61
|
+
if (!id) throw new Error('id is required');
|
|
62
|
+
return this.provider.get(id, options);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/** Delete a file by id */
|
|
66
|
+
async delete(id) {
|
|
67
|
+
if (!id) throw new Error('id is required');
|
|
68
|
+
await this.provider.delete(id);
|
|
69
|
+
return { id, deleted: true };
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
module.exports = StorageCore;
|