@lilaquadrat/studio 9.20.2 → 10.0.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cjs/helpers.d.ts +3 -1
- package/lib/cjs/helpers.js +5 -1
- package/lib/cjs/helpers.js.map +1 -1
- package/lib/cjs/models.d.ts +3 -2
- package/lib/cjs/models.js +5 -3
- package/lib/cjs/models.js.map +1 -1
- package/lib/cjs/services.d.ts +5 -2
- package/lib/cjs/services.js +9 -3
- package/lib/cjs/services.js.map +1 -1
- package/lib/cjs/src/AzureBlobStorage.share.js +0 -1
- package/lib/cjs/src/AzureBlobStorage.share.js.map +1 -1
- package/lib/cjs/src/AzureFileStorage.share.js +0 -2
- package/lib/cjs/src/AzureFileStorage.share.js.map +1 -1
- package/lib/cjs/src/Immutable.class.js +1 -2
- package/lib/cjs/src/Immutable.class.js.map +1 -1
- package/lib/cjs/src/aggegations/structures.aggregation.d.ts +6 -2
- package/lib/cjs/src/aggegations/structures.aggregation.js +3 -1
- package/lib/cjs/src/aggegations/structures.aggregation.js.map +1 -1
- package/lib/cjs/src/classes/models.class.js +0 -1
- package/lib/cjs/src/classes/models.class.js.map +1 -1
- package/lib/cjs/src/docker-compose.class.js +1 -1
- package/lib/cjs/src/docker-compose.class.js.map +1 -1
- package/lib/cjs/src/helpers/HttpStatusCode.enum.js +0 -1
- package/lib/cjs/src/helpers/HttpStatusCode.enum.js.map +1 -1
- package/lib/cjs/src/helpers/batch.d.ts +9 -0
- package/lib/cjs/src/helpers/batch.js +38 -0
- package/lib/cjs/src/helpers/batch.js.map +1 -0
- package/lib/cjs/src/helpers/cleanObject.d.ts +1 -1
- package/lib/cjs/src/helpers/cleanObject.js +1 -0
- package/lib/cjs/src/helpers/cleanObject.js.map +1 -1
- package/lib/cjs/src/helpers/filenameHelper.d.ts +9 -0
- package/lib/cjs/src/helpers/filenameHelper.js +21 -0
- package/lib/cjs/src/helpers/filenameHelper.js.map +1 -0
- package/lib/cjs/src/helpers/isDeepEmpty.js +2 -0
- package/lib/cjs/src/helpers/isDeepEmpty.js.map +1 -1
- package/lib/cjs/src/models/permissions.model.js +23 -0
- package/lib/cjs/src/models/permissions.model.js.map +1 -1
- package/lib/cjs/src/models/storage.model.d.ts +4 -0
- package/lib/cjs/src/models/storage.model.js +112 -0
- package/lib/cjs/src/models/storage.model.js.map +1 -0
- package/lib/cjs/src/models/upload.model.d.ts +4 -0
- package/lib/cjs/src/models/upload.model.js +86 -0
- package/lib/cjs/src/models/upload.model.js.map +1 -0
- package/lib/cjs/src/services/ai.service.d.ts +1 -1
- package/lib/cjs/src/services/ai.service.js +6 -4
- package/lib/cjs/src/services/ai.service.js.map +1 -1
- package/lib/cjs/src/services/certificates.service.js +0 -1
- package/lib/cjs/src/services/certificates.service.js.map +1 -1
- package/lib/cjs/src/services/certificatesAction.service.js +1 -1
- package/lib/cjs/src/services/certificatesAction.service.js.map +1 -1
- package/lib/cjs/src/services/customers.service.js +1 -2
- package/lib/cjs/src/services/customers.service.js.map +1 -1
- package/lib/cjs/src/services/domains.service.js +2 -2
- package/lib/cjs/src/services/domains.service.js.map +1 -1
- package/lib/cjs/src/services/editorBase.service.js +7 -4
- package/lib/cjs/src/services/editorBase.service.js.map +1 -1
- package/lib/cjs/src/services/history.service.js +0 -1
- package/lib/cjs/src/services/history.service.js.map +1 -1
- package/lib/cjs/src/services/jetstream.service.d.ts +21 -0
- package/lib/cjs/src/services/jetstream.service.js +100 -0
- package/lib/cjs/src/services/jetstream.service.js.map +1 -0
- package/lib/cjs/src/services/listParticipants.service.js +7 -7
- package/lib/cjs/src/services/listParticipants.service.js.map +1 -1
- package/lib/cjs/src/services/lists.service.js +2 -2
- package/lib/cjs/src/services/lists.service.js.map +1 -1
- package/lib/cjs/src/services/me.service.d.ts +20 -1
- package/lib/cjs/src/services/me.service.js +42 -1
- package/lib/cjs/src/services/me.service.js.map +1 -1
- package/lib/cjs/src/services/media.service.d.ts +7 -2
- package/lib/cjs/src/services/media.service.js +57 -14
- package/lib/cjs/src/services/media.service.js.map +1 -1
- package/lib/cjs/src/services/payments.service.js +1 -1
- package/lib/cjs/src/services/payments.service.js.map +1 -1
- package/lib/cjs/src/services/share.service.d.ts +1 -6
- package/lib/cjs/src/services/share.service.js +0 -18
- package/lib/cjs/src/services/share.service.js.map +1 -1
- package/lib/cjs/src/services/storage.service.d.ts +192 -0
- package/lib/cjs/src/services/storage.service.js +861 -0
- package/lib/cjs/src/services/storage.service.js.map +1 -0
- package/lib/cjs/src/services/structures.service.js +2 -2
- package/lib/cjs/src/services/structures.service.js.map +1 -1
- package/lib/cjs/src/services/upload.service.d.ts +11 -0
- package/lib/cjs/src/services/upload.service.js +72 -0
- package/lib/cjs/src/services/upload.service.js.map +1 -0
- package/lib/esm/frontend.d.ts +2 -1
- package/lib/esm/frontend.js +2 -1
- package/lib/esm/frontend.js.map +1 -1
- package/lib/esm/src/helpers/batch.d.ts +9 -0
- package/lib/esm/src/helpers/batch.js +46 -0
- package/lib/esm/src/helpers/batch.js.map +1 -0
- package/lib/esm/src/helpers/isDeepEmpty.js +2 -0
- package/lib/esm/src/helpers/isDeepEmpty.js.map +1 -1
- package/package.json +34 -14
- package/lib/cjs/src/models/media.model.d.ts +0 -4
- package/lib/cjs/src/models/media.model.js +0 -139
- package/lib/cjs/src/models/media.model.js.map +0 -1
|
@@ -0,0 +1,861 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.StorageService = void 0;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const promises_1 = require("stream/promises");
|
|
9
|
+
const crypto_1 = require("crypto");
|
|
10
|
+
const promises_2 = require("fs/promises");
|
|
11
|
+
const child_process_1 = require("child_process");
|
|
12
|
+
const Immutable_class_1 = __importDefault(require("../Immutable.class"));
|
|
13
|
+
const storage_model_1 = __importDefault(require("../models/storage.model"));
|
|
14
|
+
const jetstream_service_1 = __importDefault(require("./jetstream.service"));
|
|
15
|
+
const upload_service_1 = __importDefault(require("./upload.service"));
|
|
16
|
+
const filenameHelper_1 = __importDefault(require("../helpers/filenameHelper"));
|
|
17
|
+
class StorageService extends Immutable_class_1.default {
|
|
18
|
+
constructor(options) {
|
|
19
|
+
super();
|
|
20
|
+
this.buckets = {
|
|
21
|
+
cdn: {
|
|
22
|
+
public: true,
|
|
23
|
+
overwrite: false,
|
|
24
|
+
cache: 31536000,
|
|
25
|
+
},
|
|
26
|
+
hosting: {
|
|
27
|
+
public: false,
|
|
28
|
+
overwrite: true,
|
|
29
|
+
cache: 0,
|
|
30
|
+
},
|
|
31
|
+
secure: {
|
|
32
|
+
public: false,
|
|
33
|
+
overwrite: false,
|
|
34
|
+
cache: 31536000,
|
|
35
|
+
},
|
|
36
|
+
internal: {
|
|
37
|
+
public: false,
|
|
38
|
+
overwrite: false,
|
|
39
|
+
cache: 31536000,
|
|
40
|
+
},
|
|
41
|
+
};
|
|
42
|
+
this.model = storage_model_1.default;
|
|
43
|
+
this.options = options;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Generates a secure, time-limited token for accessing files in secure storage buckets.
|
|
47
|
+
*
|
|
48
|
+
* The token is cryptographically signed using HMAC-SHA256 and base64url-encoded to be
|
|
49
|
+
* non-human readable and URL-safe.
|
|
50
|
+
*
|
|
51
|
+
* @param fileId - The file identifier (ObjectId or string) to grant access to
|
|
52
|
+
* @param expiresIn - Time in seconds until the token expires (default: 3600 = 1 hour)
|
|
53
|
+
* @param limits - Optional restrictions to apply to the token
|
|
54
|
+
* @param limits.company - Restrict token to specific company
|
|
55
|
+
* @param limits.project - Restrict token to specific project
|
|
56
|
+
* @param limits.app - Restrict token to specific app
|
|
57
|
+
* @param limits.url - Restrict token to specific URL path prefix
|
|
58
|
+
* @param limits.prefix - Restrict token to specific file prefix
|
|
59
|
+
*
|
|
60
|
+
* @returns Base64url-encoded token containing: fileId:expires[:limits]:signature
|
|
61
|
+
*
|
|
62
|
+
* @example
|
|
63
|
+
* // Generate token valid for 1 hour with no restrictions
|
|
64
|
+
* const { token } = getSecureToken('507f1f77bcf86cd799439011', 3600);
|
|
65
|
+
*
|
|
66
|
+
* @example
|
|
67
|
+
* // Generate token limited to specific company and project
|
|
68
|
+
* const { token } = getSecureToken('507f1f77bcf86cd799439011', 3600, {
|
|
69
|
+
* company: 'acme',
|
|
70
|
+
* project: 'website'
|
|
71
|
+
* });
|
|
72
|
+
*
|
|
73
|
+
* @example
|
|
74
|
+
* // Generate token limited to specific URL path
|
|
75
|
+
* const { token } = getSecureToken('507f1f77bcf86cd799439011', 3600, {
|
|
76
|
+
* url: '/company/project/files/'
|
|
77
|
+
* });
|
|
78
|
+
*/
|
|
79
|
+
getSecureToken(fileId, expiresIn = 3600, limits) {
|
|
80
|
+
const secret = process.env.SERVER_SECRET || 'secret';
|
|
81
|
+
const now = Math.floor(Date.now() / 1000);
|
|
82
|
+
const expires = now + expiresIn;
|
|
83
|
+
// Build payload: fileId:expires[:base64(limits)]
|
|
84
|
+
let payload = `${fileId}:${expires}`;
|
|
85
|
+
if (limits && Object.keys(limits).length > 0) {
|
|
86
|
+
// Base64 encode the JSON to avoid : separator conflicts
|
|
87
|
+
const limitsJson = JSON.stringify(limits);
|
|
88
|
+
const limitsBase64 = Buffer.from(limitsJson).toString('base64');
|
|
89
|
+
payload += `:${limitsBase64}`;
|
|
90
|
+
}
|
|
91
|
+
// Sign the payload with HMAC-SHA256
|
|
92
|
+
const signature = (0, crypto_1.createHmac)('sha256', secret).update(payload).digest('hex');
|
|
93
|
+
const combined = `${payload}:${signature}`;
|
|
94
|
+
// Encode as base64url for URL-safe, non-human readable format
|
|
95
|
+
const token = Buffer.from(combined).toString('base64url');
|
|
96
|
+
return {
|
|
97
|
+
token,
|
|
98
|
+
expiresIn,
|
|
99
|
+
createdAt: now,
|
|
100
|
+
expiresAt: expires,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Validates a secure token and optionally checks if it matches provided limits.
|
|
105
|
+
*
|
|
106
|
+
* Performs validations:
|
|
107
|
+
* 1. Signature verification using HMAC-SHA256
|
|
108
|
+
* 2. Expiration timestamp check
|
|
109
|
+
* 3. Optional limit validation against provided check values
|
|
110
|
+
*
|
|
111
|
+
* @param token - The base64url-encoded token to validate
|
|
112
|
+
* @param checkLimits - Optional values to verify against token's restrictions
|
|
113
|
+
* @param checkLimits.company - Company to check against token limit
|
|
114
|
+
* @param checkLimits.project - Project to check against token limit
|
|
115
|
+
* @param checkLimits.app - App to check against token limit
|
|
116
|
+
* @param checkLimits.url - URL path to check against token limit
|
|
117
|
+
* @param checkLimits.prefix - File prefix to check against token limit
|
|
118
|
+
*
|
|
119
|
+
* @returns true if token is valid and all checks pass, false otherwise
|
|
120
|
+
*
|
|
121
|
+
* @example
|
|
122
|
+
* // Validate token without checking limits
|
|
123
|
+
* const isValid = validateSecureToken(token);
|
|
124
|
+
*
|
|
125
|
+
* @example
|
|
126
|
+
* // Validate token and check company/project access
|
|
127
|
+
* const isValid = validateSecureToken(token, {
|
|
128
|
+
* company: 'acme',
|
|
129
|
+
* project: 'website'
|
|
130
|
+
* });
|
|
131
|
+
*/
|
|
132
|
+
validateSecureToken(token, checkLimits) {
|
|
133
|
+
const secret = process.env.SERVER_SECRET || 'secret';
|
|
134
|
+
let decoded;
|
|
135
|
+
// Decode from base64url format
|
|
136
|
+
try {
|
|
137
|
+
decoded = Buffer.from(token, 'base64url').toString('utf-8');
|
|
138
|
+
}
|
|
139
|
+
catch {
|
|
140
|
+
return false;
|
|
141
|
+
}
|
|
142
|
+
// Extract payload and signature (signature is after last colon)
|
|
143
|
+
const lastColonIndex = decoded.lastIndexOf(':');
|
|
144
|
+
if (lastColonIndex === -1)
|
|
145
|
+
return false;
|
|
146
|
+
const payload = decoded.substring(0, lastColonIndex);
|
|
147
|
+
const signature = decoded.substring(lastColonIndex + 1);
|
|
148
|
+
// Verify signature matches expected HMAC
|
|
149
|
+
const expectedSignature = (0, crypto_1.createHmac)('sha256', secret).update(payload).digest('hex');
|
|
150
|
+
if (signature !== expectedSignature)
|
|
151
|
+
return false;
|
|
152
|
+
// Parse payload structure: fileId:expires[:base64(limits)]
|
|
153
|
+
const parts = payload.split(':');
|
|
154
|
+
// parts[0] is fileId
|
|
155
|
+
// parts[1] is expires timestamp
|
|
156
|
+
// parts[2] is optional base64-encoded JSON limits object
|
|
157
|
+
if (parts.length < 2)
|
|
158
|
+
return false;
|
|
159
|
+
// Check if token has expired
|
|
160
|
+
const expires = parseInt(parts[1], 10);
|
|
161
|
+
if (Date.now() / 1000 > expires)
|
|
162
|
+
return false;
|
|
163
|
+
// If token has limits, validate against checkLimits
|
|
164
|
+
if (parts.length > 2) {
|
|
165
|
+
let tokenLimits;
|
|
166
|
+
try {
|
|
167
|
+
// Decode from base64 first, then parse JSON
|
|
168
|
+
const limitsJson = Buffer.from(parts[2], 'base64').toString('utf-8');
|
|
169
|
+
tokenLimits = JSON.parse(limitsJson);
|
|
170
|
+
}
|
|
171
|
+
catch {
|
|
172
|
+
return false;
|
|
173
|
+
}
|
|
174
|
+
// Token has limits but no values provided to check
|
|
175
|
+
if (!checkLimits)
|
|
176
|
+
return false;
|
|
177
|
+
// Validate each limit field if present in token
|
|
178
|
+
if (tokenLimits.company && tokenLimits.company !== checkLimits.company)
|
|
179
|
+
return false;
|
|
180
|
+
if (tokenLimits.project && tokenLimits.project !== checkLimits.project)
|
|
181
|
+
return false;
|
|
182
|
+
if (tokenLimits.app && tokenLimits.app !== checkLimits.app)
|
|
183
|
+
return false;
|
|
184
|
+
if (tokenLimits.prefix && tokenLimits.prefix !== checkLimits.prefix)
|
|
185
|
+
return false;
|
|
186
|
+
// For URL limit, check if requested URL starts with the token's allowed prefix
|
|
187
|
+
if (tokenLimits.url && checkLimits.url && !checkLimits.url.startsWith(tokenLimits.url))
|
|
188
|
+
return false;
|
|
189
|
+
}
|
|
190
|
+
else if (checkLimits) {
|
|
191
|
+
return false;
|
|
192
|
+
}
|
|
193
|
+
return true;
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Creates a secure download URL with time-limited token for a storage file.
|
|
197
|
+
*
|
|
198
|
+
* @param fileId - The ObjectId of the storage document
|
|
199
|
+
* @param options - Required options containing company, project
|
|
200
|
+
* @param expiresIn - Time in seconds until the token expires (default: 3600 = 1 hour)
|
|
201
|
+
* @param limits - Optional additional restrictions for the token
|
|
202
|
+
*
|
|
203
|
+
* @returns Object containing the full download URL with token and expiration timestamp
|
|
204
|
+
*
|
|
205
|
+
* @example
|
|
206
|
+
* const download = await createSecureDownload(
|
|
207
|
+
* new ObjectId('507f1f77bcf86cd799439011'),
|
|
208
|
+
* { company: 'acme', project: 'website' },
|
|
209
|
+
* 3600
|
|
210
|
+
* );
|
|
211
|
+
* // Returns: { url: 'https://...?token=...', expiresAt: 1234567890 }
|
|
212
|
+
*/
|
|
213
|
+
async createSecureDownload(fileId, options, expiresIn = 3600, limits) {
|
|
214
|
+
// Fetch storage document to get file details
|
|
215
|
+
const storage = await this.getByInternalId(fileId, options);
|
|
216
|
+
if (!storage)
|
|
217
|
+
throw new Error('STORAGE_FILE_NOT_FOUND');
|
|
218
|
+
// Merge provided limits with storage document context
|
|
219
|
+
const tokenLimits = {
|
|
220
|
+
company: limits?.company || storage.company,
|
|
221
|
+
project: limits?.project || storage.project,
|
|
222
|
+
app: limits?.app || storage.app,
|
|
223
|
+
prefix: limits?.prefix || storage.prefix,
|
|
224
|
+
url: limits?.url,
|
|
225
|
+
};
|
|
226
|
+
// Generate secure token
|
|
227
|
+
const tokenData = this.getSecureToken(fileId, expiresIn, tokenLimits);
|
|
228
|
+
// Construct download URL
|
|
229
|
+
const baseUrl = process.env.SECURE_URL;
|
|
230
|
+
const path = storage.prefix ? `${storage.prefix}/${storage.filename}` : storage.filename;
|
|
231
|
+
const url = `${baseUrl}/${storage.app}/${storage.company}/${storage.project}/${path}?token=${tokenData.token}`;
|
|
232
|
+
return {
|
|
233
|
+
url,
|
|
234
|
+
expiresAt: tokenData.expiresAt,
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
async handleFile(uploadInternalId, index, file) {
|
|
238
|
+
if (!file)
|
|
239
|
+
throw new Error('FILE_MISSING');
|
|
240
|
+
/**
|
|
241
|
+
* each uploads needs to be registered with createUpload before uploading an actual file
|
|
242
|
+
*/
|
|
243
|
+
const uploadFile = await upload_service_1.default.single(uploadInternalId);
|
|
244
|
+
if (!uploadFile)
|
|
245
|
+
throw new Error('UPLOAD_NOT_FOUND');
|
|
246
|
+
/**
|
|
247
|
+
* write the file to the disk.
|
|
248
|
+
* after writing the stream we can check if the filesize was exceeded und truncated
|
|
249
|
+
*/
|
|
250
|
+
const path = await this.writeFile(file.file, index, uploadInternalId, 'upload');
|
|
251
|
+
if (file.file.truncated) {
|
|
252
|
+
await fs_1.default.promises.unlink(path.file);
|
|
253
|
+
throw new Error('FILE_SIZE');
|
|
254
|
+
}
|
|
255
|
+
const updloadState = await upload_service_1.default.addPathToUpload(uploadInternalId, index, path.file);
|
|
256
|
+
/**
|
|
257
|
+
* if all chunks are there, complete the file and the upload
|
|
258
|
+
*/
|
|
259
|
+
if (updloadState.chunks === updloadState.paths?.length) {
|
|
260
|
+
await upload_service_1.default.setState(updloadState._id, 'waiting');
|
|
261
|
+
await jetstream_service_1.default.publish('actions.files.uploaded', updloadState);
|
|
262
|
+
}
|
|
263
|
+
return updloadState;
|
|
264
|
+
}
|
|
265
|
+
complete(upload, options) {
|
|
266
|
+
return this.writeFile(upload.paths, undefined, upload._id, 'file', options);
|
|
267
|
+
}
|
|
268
|
+
async writeFile(data, index, internalId, type, options) {
|
|
269
|
+
const pathAndFile = type === 'upload' && index || index === 0
|
|
270
|
+
? this.uploadPath(internalId.toString(), index)
|
|
271
|
+
: this.fanoutPath(internalId.toString());
|
|
272
|
+
await (0, promises_2.mkdir)(`${pathAndFile.dir}`, { recursive: true });
|
|
273
|
+
if (Array.isArray(data)) {
|
|
274
|
+
if (options?.useNode) {
|
|
275
|
+
await this.concatenateFiles(data, pathAndFile.file);
|
|
276
|
+
}
|
|
277
|
+
else {
|
|
278
|
+
await this.concatenateFilesWithCat(data, pathAndFile.file);
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
else {
|
|
282
|
+
const writeStream = fs_1.default.createWriteStream(pathAndFile.file);
|
|
283
|
+
await (0, promises_1.pipeline)(data, writeStream);
|
|
284
|
+
}
|
|
285
|
+
return pathAndFile;
|
|
286
|
+
}
|
|
287
|
+
async writeFileVersion(data, internalId, version) {
|
|
288
|
+
const pathAndFile = this.fanoutPath(internalId.toString(), version);
|
|
289
|
+
await (0, promises_2.mkdir)(`${pathAndFile.dir}`, { recursive: true });
|
|
290
|
+
await fs_1.default.promises.writeFile(pathAndFile.file, Uint8Array.from(data));
|
|
291
|
+
return pathAndFile;
|
|
292
|
+
}
|
|
293
|
+
async concatenateFiles(paths, outputPath) {
|
|
294
|
+
const writeStream = fs_1.default.createWriteStream(outputPath);
|
|
295
|
+
const sortedPaths = paths.sort((a, b) => a.index - b.index);
|
|
296
|
+
for (const single of sortedPaths) {
|
|
297
|
+
const readStream = fs_1.default.createReadStream(single.path);
|
|
298
|
+
await (0, promises_1.pipeline)(readStream, writeStream, { end: false });
|
|
299
|
+
}
|
|
300
|
+
writeStream.end();
|
|
301
|
+
await new Promise((resolve, reject) => {
|
|
302
|
+
writeStream.on('finish', resolve);
|
|
303
|
+
writeStream.on('error', reject);
|
|
304
|
+
});
|
|
305
|
+
}
|
|
306
|
+
async concatenateFilesWithCat(paths, outputPath) {
|
|
307
|
+
const sortedPaths = paths.sort((a, b) => a.index - b.index);
|
|
308
|
+
const filePaths = sortedPaths.map(single => single.path);
|
|
309
|
+
return new Promise((resolve, reject) => {
|
|
310
|
+
const outputStream = fs_1.default.createWriteStream(outputPath);
|
|
311
|
+
const cat = (0, child_process_1.spawn)('cat', filePaths);
|
|
312
|
+
cat.stdout.pipe(outputStream);
|
|
313
|
+
cat.on('error', (error) => {
|
|
314
|
+
reject(new Error(`cat process error: ${error.message}`));
|
|
315
|
+
});
|
|
316
|
+
cat.on('close', (code) => {
|
|
317
|
+
if (code === 0) {
|
|
318
|
+
}
|
|
319
|
+
else {
|
|
320
|
+
reject(new Error(`cat process exited with code ${code}`));
|
|
321
|
+
}
|
|
322
|
+
});
|
|
323
|
+
outputStream.on('error', (error) => {
|
|
324
|
+
reject(new Error(`write stream error: ${error.message}`));
|
|
325
|
+
});
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
async removeUploadFiles(upload) {
|
|
329
|
+
const promises = upload.paths?.map(async (single) => {
|
|
330
|
+
try {
|
|
331
|
+
await fs_1.default.promises.unlink(single.path);
|
|
332
|
+
}
|
|
333
|
+
catch (error) {
|
|
334
|
+
console.error(`failed to remove ${single.path}:`, error);
|
|
335
|
+
}
|
|
336
|
+
});
|
|
337
|
+
if (promises) {
|
|
338
|
+
await Promise.all(promises);
|
|
339
|
+
await fs_1.default.promises.rmdir(`${this.options.uploadFolder}/${upload._id}`);
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
async removeLocalFile(relativePath) {
|
|
343
|
+
const fullPath = `${this.options.dataFolder}/${relativePath}`;
|
|
344
|
+
try {
|
|
345
|
+
await fs_1.default.promises.access(fullPath);
|
|
346
|
+
return await fs_1.default.promises.unlink(fullPath);
|
|
347
|
+
}
|
|
348
|
+
catch (error) {
|
|
349
|
+
if (error.code === 'ENOENT') {
|
|
350
|
+
console.warn(`File not found: ${fullPath}`);
|
|
351
|
+
return;
|
|
352
|
+
}
|
|
353
|
+
throw error;
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
/**
|
|
357
|
+
* creata directory structure for the given file
|
|
358
|
+
* e.g. ``/b1/f1/68fb87d6db4fb20117bcd3ce.000001``
|
|
359
|
+
*/
|
|
360
|
+
fanoutPath(uploadId, version) {
|
|
361
|
+
const baseFolder = this.options.dataFolder;
|
|
362
|
+
/**
|
|
363
|
+
* upload needs to have the part and index in the filename
|
|
364
|
+
*/
|
|
365
|
+
const filename = uploadId;
|
|
366
|
+
// Stable, uniform fan-out from the id (or use sha256 of expected filename+size)
|
|
367
|
+
const hash = (0, crypto_1.createHash)('sha256').update(`${version || ''}${uploadId}`).digest('hex');
|
|
368
|
+
const lvl1 = hash.slice(0, 2);
|
|
369
|
+
const lvl2 = hash.slice(2, 4);
|
|
370
|
+
const dir = [baseFolder, lvl1, lvl2].join('/');
|
|
371
|
+
const file = [dir, filename].join('/');
|
|
372
|
+
const relativePath = [lvl1, lvl2, filename].join('/');
|
|
373
|
+
return { dir, file, relativePath };
|
|
374
|
+
}
|
|
375
|
+
uploadPath(uploadId, index) {
|
|
376
|
+
const baseFolder = `${this.options.uploadFolder}/${uploadId}`;
|
|
377
|
+
const filename = `${uploadId}.${index?.toString().padStart(6, '0')}`;
|
|
378
|
+
const file = [baseFolder, filename].join('/');
|
|
379
|
+
return { dir: baseFolder, file, relativePath: file };
|
|
380
|
+
}
|
|
381
|
+
// async createUpload(filename: string, size: number, chunks: number, mimetype: string, options: UserAppOptionsRequired) {
|
|
382
|
+
// const useFilename = this.sanitizeName(filename);
|
|
383
|
+
// if (await this.exists(options.company, options.project, useFilename)) throw new Error('FILE_EXISTS');
|
|
384
|
+
// if (await UploadService.uploadExists(options.company, options.project, useFilename)) throw new Error('UPLOAD_EXISTS');
|
|
385
|
+
// return UploadService.createUpload(useFilename, size, chunks, mimetype, options);
|
|
386
|
+
// }
|
|
387
|
+
static sanitizeName(name) {
|
|
388
|
+
return name.replace(/[^a-z0-9-_.]/gi, '').toLowerCase();
|
|
389
|
+
}
|
|
390
|
+
getMetadata(upload, metadata) {
|
|
391
|
+
const useMetadata = metadata || {};
|
|
392
|
+
useMetadata.size = upload.size;
|
|
393
|
+
useMetadata.mimetype = upload.mimetype;
|
|
394
|
+
if (upload.mimetype) {
|
|
395
|
+
if (upload.mimetype.match(/^image/)) {
|
|
396
|
+
if (!Array.isArray(useMetadata.tags))
|
|
397
|
+
useMetadata.tags = [];
|
|
398
|
+
useMetadata.tags?.push('image');
|
|
399
|
+
}
|
|
400
|
+
if (upload.mimetype.match(/^video/)) {
|
|
401
|
+
if (!Array.isArray(useMetadata.tags))
|
|
402
|
+
useMetadata.tags = [];
|
|
403
|
+
useMetadata.tags?.push('video');
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
return useMetadata;
|
|
407
|
+
}
|
|
408
|
+
get(company, project, app, query, limit) {
|
|
409
|
+
const realQuery = { company, project, app, parent: { $exists: false } };
|
|
410
|
+
let tags;
|
|
411
|
+
if (query.tags) {
|
|
412
|
+
if (!Array.isArray(query.tags)) {
|
|
413
|
+
tags = [query.tags];
|
|
414
|
+
}
|
|
415
|
+
else {
|
|
416
|
+
tags = query.tags;
|
|
417
|
+
}
|
|
418
|
+
realQuery['metadata.tags'] = { $in: tags };
|
|
419
|
+
}
|
|
420
|
+
if (query.search) {
|
|
421
|
+
realQuery.filename = { $regex: query.search, $options: 'i' };
|
|
422
|
+
}
|
|
423
|
+
// If ignorePrefix is given, return all matching files without prefix filtering
|
|
424
|
+
if (query.ignorePrefix) {
|
|
425
|
+
return storage_model_1.default.db.aggregate([
|
|
426
|
+
{
|
|
427
|
+
$match: realQuery,
|
|
428
|
+
},
|
|
429
|
+
{
|
|
430
|
+
$facet: {
|
|
431
|
+
metadata: [{ $count: 'all' }],
|
|
432
|
+
data: [
|
|
433
|
+
{ $sort: { _id: -1 } },
|
|
434
|
+
{ $skip: limit?.skip || 0 },
|
|
435
|
+
{ $limit: limit?.limit || 50 },
|
|
436
|
+
{ $project: { path: 0, bucket: 0 } },
|
|
437
|
+
{ $addFields: { type: 'file' } },
|
|
438
|
+
],
|
|
439
|
+
},
|
|
440
|
+
},
|
|
441
|
+
{
|
|
442
|
+
$project: {
|
|
443
|
+
all: { $arrayElemAt: ['$metadata.all', 0] },
|
|
444
|
+
data: 1,
|
|
445
|
+
count: { $size: '$data' },
|
|
446
|
+
},
|
|
447
|
+
},
|
|
448
|
+
])
|
|
449
|
+
.toArray()
|
|
450
|
+
.then((result) => result[0] || { all: 0, data: [], count: 0 });
|
|
451
|
+
}
|
|
452
|
+
// If search or tags are given, show only files at the current level (no folders)
|
|
453
|
+
if (query.search || query.tags) {
|
|
454
|
+
const isRootLevel = query.prefix === undefined || query.prefix === '';
|
|
455
|
+
if (isRootLevel) {
|
|
456
|
+
// Root level: only files without prefix
|
|
457
|
+
realQuery.$or = [
|
|
458
|
+
{ prefix: { $exists: false } },
|
|
459
|
+
{ prefix: null },
|
|
460
|
+
{ prefix: '' },
|
|
461
|
+
];
|
|
462
|
+
}
|
|
463
|
+
else {
|
|
464
|
+
// Non-root: only files with exact prefix match
|
|
465
|
+
realQuery.prefix = query.prefix;
|
|
466
|
+
}
|
|
467
|
+
return storage_model_1.default.db.aggregate([
|
|
468
|
+
{
|
|
469
|
+
$match: realQuery,
|
|
470
|
+
},
|
|
471
|
+
{
|
|
472
|
+
$facet: {
|
|
473
|
+
metadata: [{ $count: 'all' }],
|
|
474
|
+
data: [
|
|
475
|
+
{ $sort: { _id: -1 } },
|
|
476
|
+
{ $skip: limit?.skip || 0 },
|
|
477
|
+
{ $limit: limit?.limit || 50 },
|
|
478
|
+
{ $project: { path: 0, bucket: 0 } },
|
|
479
|
+
{ $addFields: { type: 'file' } },
|
|
480
|
+
],
|
|
481
|
+
},
|
|
482
|
+
},
|
|
483
|
+
{
|
|
484
|
+
$project: {
|
|
485
|
+
all: { $arrayElemAt: ['$metadata.all', 0] },
|
|
486
|
+
data: 1,
|
|
487
|
+
count: { $size: '$data' },
|
|
488
|
+
},
|
|
489
|
+
},
|
|
490
|
+
])
|
|
491
|
+
.toArray()
|
|
492
|
+
.then((result) => result[0] || { all: 0, data: [], count: 0 });
|
|
493
|
+
}
|
|
494
|
+
// Treat undefined prefix same as empty string (root level)
|
|
495
|
+
const isRootLevel = query.prefix === undefined || query.prefix === '';
|
|
496
|
+
if (isRootLevel) {
|
|
497
|
+
// Root level: show files without prefix and extract virtual folders
|
|
498
|
+
return storage_model_1.default.db.aggregate([
|
|
499
|
+
{
|
|
500
|
+
$match: realQuery,
|
|
501
|
+
},
|
|
502
|
+
{
|
|
503
|
+
$addFields: {
|
|
504
|
+
// Extract first segment of prefix for root-level folders
|
|
505
|
+
immediateSubfolder: {
|
|
506
|
+
$cond: {
|
|
507
|
+
if: {
|
|
508
|
+
$or: [
|
|
509
|
+
{ $eq: ['$prefix', null] },
|
|
510
|
+
{ $eq: ['$prefix', ''] },
|
|
511
|
+
{ $eq: [{ $type: '$prefix' }, 'missing'] },
|
|
512
|
+
],
|
|
513
|
+
},
|
|
514
|
+
then: null, // File at root level (no prefix)
|
|
515
|
+
else: {
|
|
516
|
+
// Extract first segment of prefix
|
|
517
|
+
$arrayElemAt: [{ $split: ['$prefix', '/'] }, 0],
|
|
518
|
+
},
|
|
519
|
+
},
|
|
520
|
+
},
|
|
521
|
+
},
|
|
522
|
+
},
|
|
523
|
+
{
|
|
524
|
+
$facet: {
|
|
525
|
+
files: [
|
|
526
|
+
{ $match: { immediateSubfolder: null } }, // Files at root level
|
|
527
|
+
{ $sort: { _id: -1 } },
|
|
528
|
+
{ $skip: limit?.skip || 0 },
|
|
529
|
+
{ $limit: limit?.limit || 50 },
|
|
530
|
+
{ $project: { path: 0, bucket: 0, immediateSubfolder: 0 } },
|
|
531
|
+
{ $addFields: { type: 'file' } },
|
|
532
|
+
],
|
|
533
|
+
folders: [
|
|
534
|
+
{ $match: { immediateSubfolder: { $ne: null } } },
|
|
535
|
+
{ $group: { _id: '$immediateSubfolder' } }, // Unique folder names
|
|
536
|
+
{ $sort: { _id: 1 } },
|
|
537
|
+
{
|
|
538
|
+
$project: {
|
|
539
|
+
_id: 0,
|
|
540
|
+
prefix: '$_id',
|
|
541
|
+
name: '$_id',
|
|
542
|
+
type: { $literal: 'folder' },
|
|
543
|
+
},
|
|
544
|
+
},
|
|
545
|
+
],
|
|
546
|
+
metadata: [
|
|
547
|
+
{ $match: { immediateSubfolder: null } },
|
|
548
|
+
{ $count: 'all' },
|
|
549
|
+
],
|
|
550
|
+
},
|
|
551
|
+
},
|
|
552
|
+
{
|
|
553
|
+
$project: {
|
|
554
|
+
all: { $arrayElemAt: ['$metadata.all', 0] },
|
|
555
|
+
data: { $concatArrays: ['$folders', '$files'] },
|
|
556
|
+
count: { $add: [{ $size: '$folders' }, { $size: '$files' }] },
|
|
557
|
+
},
|
|
558
|
+
},
|
|
559
|
+
])
|
|
560
|
+
.toArray()
|
|
561
|
+
.then((result) => result[0] || { all: 0, data: [], count: 0 });
|
|
562
|
+
}
|
|
563
|
+
// Non-root: show exact prefix match and extract subfolders
|
|
564
|
+
const prefixWithSlash = query.prefix.endsWith('/') ? query.prefix : `${query.prefix}/`;
|
|
565
|
+
const escapedPrefix = prefixWithSlash.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
566
|
+
// Build non-root query by adding prefix conditions to realQuery
|
|
567
|
+
const nonRootQuery = {
|
|
568
|
+
...realQuery,
|
|
569
|
+
$or: [
|
|
570
|
+
{ prefix: query.prefix }, // Exact match files
|
|
571
|
+
{ prefix: { $regex: `^${escapedPrefix}` } }, // Deeper files for folder extraction
|
|
572
|
+
],
|
|
573
|
+
};
|
|
574
|
+
return storage_model_1.default.db.aggregate([
|
|
575
|
+
{
|
|
576
|
+
$match: nonRootQuery,
|
|
577
|
+
},
|
|
578
|
+
{
|
|
579
|
+
$addFields: {
|
|
580
|
+
// Extract immediate subfolder name after current prefix
|
|
581
|
+
immediateSubfolder: {
|
|
582
|
+
$cond: {
|
|
583
|
+
if: { $eq: ['$prefix', query.prefix] },
|
|
584
|
+
then: null, // File at exact level
|
|
585
|
+
else: {
|
|
586
|
+
// Extract first segment after prefix
|
|
587
|
+
$arrayElemAt: [
|
|
588
|
+
{ $split: [{ $substr: ['$prefix', prefixWithSlash.length, -1] }, '/'] },
|
|
589
|
+
0,
|
|
590
|
+
],
|
|
591
|
+
},
|
|
592
|
+
},
|
|
593
|
+
},
|
|
594
|
+
},
|
|
595
|
+
},
|
|
596
|
+
{
|
|
597
|
+
$facet: {
|
|
598
|
+
files: [
|
|
599
|
+
{ $match: { immediateSubfolder: null } }, // Files at exact level
|
|
600
|
+
{ $sort: { _id: -1 } },
|
|
601
|
+
{ $skip: limit?.skip || 0 },
|
|
602
|
+
{ $limit: limit?.limit || 50 },
|
|
603
|
+
{ $project: { path: 0, bucket: 0, immediateSubfolder: 0 } },
|
|
604
|
+
{ $addFields: { type: 'file' } },
|
|
605
|
+
],
|
|
606
|
+
folders: [
|
|
607
|
+
{ $match: { immediateSubfolder: { $ne: null } } },
|
|
608
|
+
{ $group: { _id: '$immediateSubfolder' } }, // Unique folder names
|
|
609
|
+
{ $sort: { _id: 1 } },
|
|
610
|
+
{
|
|
611
|
+
$project: {
|
|
612
|
+
_id: 0,
|
|
613
|
+
prefix: { $concat: [prefixWithSlash, '$_id'] },
|
|
614
|
+
name: '$_id',
|
|
615
|
+
type: { $literal: 'folder' },
|
|
616
|
+
},
|
|
617
|
+
},
|
|
618
|
+
],
|
|
619
|
+
metadata: [
|
|
620
|
+
{ $match: { immediateSubfolder: null } },
|
|
621
|
+
{ $count: 'all' },
|
|
622
|
+
],
|
|
623
|
+
},
|
|
624
|
+
},
|
|
625
|
+
{
|
|
626
|
+
$project: {
|
|
627
|
+
all: { $arrayElemAt: ['$metadata.all', 0] },
|
|
628
|
+
data: { $concatArrays: ['$folders', '$files'] },
|
|
629
|
+
count: { $add: [{ $size: '$folders' }, { $size: '$files' }] },
|
|
630
|
+
},
|
|
631
|
+
},
|
|
632
|
+
])
|
|
633
|
+
.toArray()
|
|
634
|
+
.then((result) => result[0] || { all: 0, data: [], count: 0 });
|
|
635
|
+
}
|
|
636
|
+
getTags(company, project, search) {
|
|
637
|
+
let cleanSearch = search.toLowerCase().trim();
|
|
638
|
+
let sortString = cleanSearch;
|
|
639
|
+
let splitSearch = [];
|
|
640
|
+
if (cleanSearch.includes(':')) {
|
|
641
|
+
splitSearch = cleanSearch.split(':');
|
|
642
|
+
cleanSearch = new RegExp(`^${splitSearch[0]}:.*?(${splitSearch[1]})`);
|
|
643
|
+
sortString = cleanSearch[1];
|
|
644
|
+
}
|
|
645
|
+
else {
|
|
646
|
+
cleanSearch = new RegExp(`^(?!([a-z]+):).*(${cleanSearch}).*`);
|
|
647
|
+
}
|
|
648
|
+
return storage_model_1.default.db.find({ company, project, tags: { $regex: cleanSearch, $options: 'i' } }, { projection: { tags: 1 } })
|
|
649
|
+
.toArray()
|
|
650
|
+
.then((matchedDocuments) => this.matchSortTags(matchedDocuments, cleanSearch, sortString));
|
|
651
|
+
}
|
|
652
|
+
single(company, project, filename) {
|
|
653
|
+
return storage_model_1.default.db.aggregate([
|
|
654
|
+
{
|
|
655
|
+
$match: {
|
|
656
|
+
company,
|
|
657
|
+
project,
|
|
658
|
+
id: filename,
|
|
659
|
+
},
|
|
660
|
+
},
|
|
661
|
+
{
|
|
662
|
+
$lookup: {
|
|
663
|
+
from: 'editor',
|
|
664
|
+
as: 'baseContentData',
|
|
665
|
+
let: {
|
|
666
|
+
pid: '$baseContent.id',
|
|
667
|
+
company: '$company',
|
|
668
|
+
project: '$project',
|
|
669
|
+
},
|
|
670
|
+
pipeline: [
|
|
671
|
+
{
|
|
672
|
+
$match: {
|
|
673
|
+
$and: [
|
|
674
|
+
{
|
|
675
|
+
$expr: { $eq: ['$_id', '$$pid'] },
|
|
676
|
+
},
|
|
677
|
+
{
|
|
678
|
+
$expr: { $eq: ['$company', '$$company'] },
|
|
679
|
+
},
|
|
680
|
+
{
|
|
681
|
+
$expr: { $eq: ['$project', '$$project'] },
|
|
682
|
+
},
|
|
683
|
+
],
|
|
684
|
+
},
|
|
685
|
+
},
|
|
686
|
+
{
|
|
687
|
+
$project: {
|
|
688
|
+
id: 1,
|
|
689
|
+
history: 1,
|
|
690
|
+
},
|
|
691
|
+
},
|
|
692
|
+
],
|
|
693
|
+
},
|
|
694
|
+
},
|
|
695
|
+
{
|
|
696
|
+
$unwind: {
|
|
697
|
+
path: '$baseContentData',
|
|
698
|
+
preserveNullAndEmptyArrays: true,
|
|
699
|
+
},
|
|
700
|
+
},
|
|
701
|
+
{
|
|
702
|
+
$addFields: {
|
|
703
|
+
'baseContent.content': '$baseContentData',
|
|
704
|
+
},
|
|
705
|
+
},
|
|
706
|
+
{
|
|
707
|
+
$project: {
|
|
708
|
+
baseContentData: 0,
|
|
709
|
+
},
|
|
710
|
+
},
|
|
711
|
+
])
|
|
712
|
+
.toArray()
|
|
713
|
+
.then((data) => (data.length ? data[0] : null));
|
|
714
|
+
}
|
|
715
|
+
content(company, project, id) {
|
|
716
|
+
return storage_model_1.default.db.aggregate([
|
|
717
|
+
{
|
|
718
|
+
$match: {
|
|
719
|
+
company,
|
|
720
|
+
project,
|
|
721
|
+
'baseContent.id': id,
|
|
722
|
+
},
|
|
723
|
+
},
|
|
724
|
+
{
|
|
725
|
+
$setWindowFields: {
|
|
726
|
+
partitionBy: '$baseContent.id',
|
|
727
|
+
sortBy: {
|
|
728
|
+
'baseContent.version': -1,
|
|
729
|
+
},
|
|
730
|
+
output: {
|
|
731
|
+
availableVersions: {
|
|
732
|
+
$count: {},
|
|
733
|
+
window: {
|
|
734
|
+
documents: [
|
|
735
|
+
'unbounded', 'unbounded',
|
|
736
|
+
],
|
|
737
|
+
},
|
|
738
|
+
},
|
|
739
|
+
},
|
|
740
|
+
},
|
|
741
|
+
},
|
|
742
|
+
{
|
|
743
|
+
$limit: 1,
|
|
744
|
+
},
|
|
745
|
+
])
|
|
746
|
+
.toArray()
|
|
747
|
+
.then((data) => (data.length ? data[0] : null));
|
|
748
|
+
}
|
|
749
|
+
/**
|
|
750
|
+
* checks if the file was already successfully uploaded
|
|
751
|
+
*/
|
|
752
|
+
exists(filename, prefix, app, options) {
|
|
753
|
+
return storage_model_1.default.db.countDocuments({
|
|
754
|
+
company: options.company,
|
|
755
|
+
project: options.project,
|
|
756
|
+
filename,
|
|
757
|
+
prefix,
|
|
758
|
+
app,
|
|
759
|
+
})
|
|
760
|
+
.then((exists) => exists >= 1);
|
|
761
|
+
}
|
|
762
|
+
matchSortTags(documents, search, sort) {
|
|
763
|
+
const mergedArray = [];
|
|
764
|
+
const returnArray = [];
|
|
765
|
+
documents.forEach((document) => {
|
|
766
|
+
document.tags.forEach((tag) => {
|
|
767
|
+
if (mergedArray.find((tagArray) => tagArray.tag === tag))
|
|
768
|
+
return;
|
|
769
|
+
if (!tag.match(search))
|
|
770
|
+
return;
|
|
771
|
+
mergedArray.push({ tag, rating: tag.indexOf(sort) });
|
|
772
|
+
});
|
|
773
|
+
});
|
|
774
|
+
mergedArray.sort((a, b) => (a.rating > b.rating ? 1 : -1));
|
|
775
|
+
mergedArray.forEach((tag) => {
|
|
776
|
+
returnArray.push(tag.tag);
|
|
777
|
+
});
|
|
778
|
+
return returnArray;
|
|
779
|
+
}
|
|
780
|
+
checkAllowedSize(filesize, allowedSize) {
|
|
781
|
+
return filesize < allowedSize;
|
|
782
|
+
}
|
|
783
|
+
stats(company, project) {
|
|
784
|
+
const aggregation = [
|
|
785
|
+
{
|
|
786
|
+
$match: {
|
|
787
|
+
company,
|
|
788
|
+
project,
|
|
789
|
+
parent: { $exists: false },
|
|
790
|
+
},
|
|
791
|
+
},
|
|
792
|
+
{
|
|
793
|
+
$group: {
|
|
794
|
+
_id: null,
|
|
795
|
+
used: {
|
|
796
|
+
$sum: '$metadata.size',
|
|
797
|
+
},
|
|
798
|
+
files: {
|
|
799
|
+
$sum: 1,
|
|
800
|
+
},
|
|
801
|
+
},
|
|
802
|
+
},
|
|
803
|
+
{
|
|
804
|
+
$project: {
|
|
805
|
+
_id: 0,
|
|
806
|
+
used: 1,
|
|
807
|
+
files: 1,
|
|
808
|
+
},
|
|
809
|
+
},
|
|
810
|
+
];
|
|
811
|
+
return storage_model_1.default.db.aggregate(aggregation).toArray()
|
|
812
|
+
.then((value) => value[0] || { files: 0, size: 0 });
|
|
813
|
+
}
|
|
814
|
+
getByInternalId(internalId, options) {
|
|
815
|
+
return storage_model_1.default.db.findOne({ _id: internalId, company: options.company, project: options.project });
|
|
816
|
+
}
|
|
817
|
+
getPathByFilename(filename, app, options) {
|
|
818
|
+
const filenameObject = (0, filenameHelper_1.default)(filename);
|
|
819
|
+
return storage_model_1.default.db.findOne({
|
|
820
|
+
filename: filenameObject.filename,
|
|
821
|
+
prefix: filenameObject.directory,
|
|
822
|
+
company: options.company,
|
|
823
|
+
project: options.project,
|
|
824
|
+
app,
|
|
825
|
+
}, {
|
|
826
|
+
projection: { path: 1, metadata: 1 },
|
|
827
|
+
});
|
|
828
|
+
}
|
|
829
|
+
getByFilename(filename, prefix, bucket, options) {
|
|
830
|
+
return storage_model_1.default.db.findOne({
|
|
831
|
+
filename,
|
|
832
|
+
prefix,
|
|
833
|
+
company: options.company,
|
|
834
|
+
project: options.project,
|
|
835
|
+
bucket,
|
|
836
|
+
});
|
|
837
|
+
}
|
|
838
|
+
async removeBlob(internalId, options) {
|
|
839
|
+
const storage = await this.model.db.findOne({ _id: internalId, company: options.company, project: options.project });
|
|
840
|
+
if (!storage)
|
|
841
|
+
throw new Error('BLOB_NOT_FOUND');
|
|
842
|
+
await fs_1.default.promises.unlink(`${this.options.dataFolder}/${storage.path}`);
|
|
843
|
+
await this.removeBlobVersion(internalId, options);
|
|
844
|
+
return this.delete(internalId, options.user, options.app);
|
|
845
|
+
}
|
|
846
|
+
async removeBlobVersion(parentId, options) {
|
|
847
|
+
const versions = await this.model.db.find({ parent: parentId, company: options.company, project: options.project }).toArray();
|
|
848
|
+
if (versions.length) {
|
|
849
|
+
await Promise.allSettled(versions.map(async (single) => {
|
|
850
|
+
await fs_1.default.promises.unlink(`${this.options.dataFolder}/${single.path}`);
|
|
851
|
+
return this.delete(single._id, options.user, options.app);
|
|
852
|
+
}));
|
|
853
|
+
}
|
|
854
|
+
}
|
|
855
|
+
}
|
|
856
|
+
exports.StorageService = StorageService;
|
|
857
|
+
exports.default = new StorageService({
|
|
858
|
+
dataFolder: process.env.MEDIA_FOLDERS_DATA,
|
|
859
|
+
uploadFolder: process.env.MEDIA_FOLDERS_UPLOAD,
|
|
860
|
+
});
|
|
861
|
+
//# sourceMappingURL=storage.service.js.map
|