emailengine-app 2.61.5 → 2.62.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/CHANGELOG.md +88 -0
  2. package/data/google-crawlers.json +1 -1
  3. package/lib/account.js +20 -7
  4. package/lib/api-routes/account-routes.js +28 -5
  5. package/lib/api-routes/chat-routes.js +1 -1
  6. package/lib/api-routes/export-routes.js +316 -0
  7. package/lib/api-routes/message-routes.js +28 -23
  8. package/lib/api-routes/template-routes.js +28 -7
  9. package/lib/arf-detect.js +1 -1
  10. package/lib/autodetect-imap-settings.js +5 -5
  11. package/lib/consts.js +16 -0
  12. package/lib/db.js +3 -0
  13. package/lib/email-client/base-client.js +6 -4
  14. package/lib/email-client/gmail-client.js +205 -35
  15. package/lib/email-client/imap/mailbox.js +99 -8
  16. package/lib/email-client/imap/subconnection.js +5 -5
  17. package/lib/email-client/imap-client.js +76 -19
  18. package/lib/email-client/message-builder.js +3 -1
  19. package/lib/email-client/notification-handler.js +12 -9
  20. package/lib/email-client/outlook-client.js +364 -73
  21. package/lib/email-client/smtp-pool-manager.js +1 -1
  22. package/lib/export.js +528 -0
  23. package/lib/oauth/gmail.js +24 -16
  24. package/lib/oauth/mail-ru.js +26 -13
  25. package/lib/oauth/outlook.js +29 -19
  26. package/lib/oauth/pubsub/google.js +5 -0
  27. package/lib/routes-ui.js +268 -9
  28. package/lib/schemas.js +274 -81
  29. package/lib/stream-encrypt.js +263 -0
  30. package/lib/sub-script.js +2 -2
  31. package/lib/tools.js +194 -12
  32. package/lib/ui-routes/account-routes.js +23 -0
  33. package/lib/ui-routes/admin-config-routes.js +13 -6
  34. package/lib/ui-routes/admin-entities-routes.js +18 -0
  35. package/lib/webhooks.js +16 -20
  36. package/package.json +20 -20
  37. package/sbom.json +1 -1
  38. package/server.js +66 -7
  39. package/static/js/ace/ace.js +1 -1
  40. package/static/js/ace/ext-language_tools.js +1 -1
  41. package/static/licenses.html +118 -149
  42. package/translations/de.mo +0 -0
  43. package/translations/de.po +63 -36
  44. package/translations/en.mo +0 -0
  45. package/translations/en.po +64 -37
  46. package/translations/et.mo +0 -0
  47. package/translations/et.po +63 -36
  48. package/translations/fr.mo +0 -0
  49. package/translations/fr.po +63 -36
  50. package/translations/ja.mo +0 -0
  51. package/translations/ja.po +63 -36
  52. package/translations/messages.pot +84 -51
  53. package/translations/nl.mo +0 -0
  54. package/translations/nl.po +63 -36
  55. package/translations/pl.mo +0 -0
  56. package/translations/pl.po +63 -36
  57. package/views/accounts/account.hbs +375 -2
  58. package/views/config/network.hbs +45 -0
  59. package/views/config/service.hbs +35 -0
  60. package/workers/api.js +130 -47
  61. package/workers/documents.js +3 -2
  62. package/workers/export.js +933 -0
  63. package/workers/imap.js +34 -1
  64. package/workers/submit.js +33 -6
  65. package/workers/webhooks.js +20 -4
@@ -0,0 +1,933 @@
1
+ 'use strict';
2
+
3
+ const { parentPort } = require('worker_threads');
4
+
5
+ const packageData = require('../package.json');
6
+ const config = require('@zone-eu/wild-config');
7
+ const logger = require('../lib/logger');
8
+ const fs = require('fs');
9
+ const zlib = require('zlib');
10
+ const { pipeline } = require('stream');
11
+
12
+ const {
13
+ REDIS_PREFIX,
14
+ EXPORT_COMPLETED_NOTIFY,
15
+ EXPORT_FAILED_NOTIFY,
16
+ DEFAULT_EXPORT_MAX_MESSAGE_SIZE,
17
+ DEFAULT_EXPORT_MAX_MESSAGES,
18
+ DEFAULT_EXPORT_MAX_SIZE
19
+ } = require('../lib/consts');
20
+ const { getDuration, readEnvValue, threadStats, reloadHttpProxyAgent } = require('../lib/tools');
21
+ const { webhooks: Webhooks } = require('../lib/webhooks');
22
+ const settings = require('../lib/settings');
23
+ const { Export } = require('../lib/export');
24
+
25
+ const Bugsnag = require('@bugsnag/js');
26
+ if (readEnvValue('BUGSNAG_API_KEY')) {
27
+ Bugsnag.start({
28
+ apiKey: readEnvValue('BUGSNAG_API_KEY'),
29
+ appVersion: packageData.version,
30
+ logger: {
31
+ debug(...args) {
32
+ logger.debug({ msg: args.shift(), worker: 'export', source: 'bugsnag', args: args.length ? args : undefined });
33
+ },
34
+ info(...args) {
35
+ logger.debug({ msg: args.shift(), worker: 'export', source: 'bugsnag', args: args.length ? args : undefined });
36
+ },
37
+ warn(...args) {
38
+ logger.warn({ msg: args.shift(), worker: 'export', source: 'bugsnag', args: args.length ? args : undefined });
39
+ },
40
+ error(...args) {
41
+ logger.error({ msg: args.shift(), worker: 'export', source: 'bugsnag', args: args.length ? args : undefined });
42
+ }
43
+ }
44
+ });
45
+ logger.notifyError = Bugsnag.notify.bind(Bugsnag);
46
+ }
47
+
48
+ const { redis, queueConf } = require('../lib/db');
49
+ const { Worker } = require('bullmq');
50
+ const { Account } = require('../lib/account');
51
+ const getSecret = require('../lib/get-secret');
52
+
53
+ config.queues = config.queues || {
54
+ export: 1
55
+ };
56
+
57
+ config.service = config.service || {};
58
+
59
+ const DEFAULT_EENGINE_TIMEOUT = 10 * 1000;
60
+ const EENGINE_TIMEOUT = getDuration(readEnvValue('EENGINE_TIMEOUT') || config.service.commandTimeout) || DEFAULT_EENGINE_TIMEOUT;
61
+
62
+ const DEFAULT_EXPORT_TIMEOUT = 5 * 60 * 1000;
63
+ const EXPORT_TIMEOUT = getDuration(readEnvValue('EENGINE_EXPORT_TIMEOUT')) || DEFAULT_EXPORT_TIMEOUT;
64
+
65
+ const EXPORT_QC = (readEnvValue('EENGINE_EXPORT_QC') && Number(readEnvValue('EENGINE_EXPORT_QC'))) || config.queues.export || 1;
66
+
67
+ const BATCH_SIZE = 100;
68
+ const LIST_PAGE_SIZE = 1000;
69
+ const FOLDER_INDEX_MAX_RETRIES = 3;
70
+ const FOLDER_INDEX_RETRY_DELAY_MS = 1000;
71
+
72
+ const IMAP_MESSAGE_MAX_RETRIES = 3;
73
+ const IMAP_MESSAGE_RETRY_BASE_DELAY = 2000;
74
+ const ACCOUNT_CHECK_INTERVAL = 60 * 1000;
75
+ const LOCK_EXTENSION_INTERVAL = 5 * 60 * 1000;
76
+
77
+ function isTransientError(err) {
78
+ if (['ETIMEDOUT', 'ECONNRESET', 'ENOTFOUND', 'EAI_AGAIN', 'ECONNREFUSED', 'EPIPE', 'EHOSTUNREACH'].includes(err.code)) {
79
+ return true;
80
+ }
81
+ if (err.statusCode >= 500 && err.statusCode < 600) {
82
+ return true;
83
+ }
84
+ if (err.code === 'Timeout' || err.message?.includes('timeout')) {
85
+ return true;
86
+ }
87
+ return false;
88
+ }
89
+
90
+ function isSkippableError(err) {
91
+ return err.code === 'MessageNotFound' || err.statusCode === 404 || err.message?.includes('Failed to generate message ID');
92
+ }
93
+
94
+ let callQueue = new Map();
95
+ let mids = 0;
96
+
97
+ async function call(message, transferList) {
98
+ return new Promise((resolve, reject) => {
99
+ let mid = `${Date.now()}:${++mids}`;
100
+
101
+ let ttl = Math.max(message.timeout || 0, EENGINE_TIMEOUT || 0);
102
+ let timer = setTimeout(() => {
103
+ callQueue.delete(mid);
104
+ let err = new Error('Timeout waiting for command response [T6]');
105
+ err.statusCode = 504;
106
+ err.code = 'Timeout';
107
+ err.ttl = ttl;
108
+ reject(err);
109
+ }, ttl);
110
+
111
+ callQueue.set(mid, { resolve, reject, timer });
112
+
113
+ try {
114
+ parentPort.postMessage(
115
+ {
116
+ cmd: 'call',
117
+ mid,
118
+ message
119
+ },
120
+ transferList
121
+ );
122
+ } catch (err) {
123
+ clearTimeout(timer);
124
+ callQueue.delete(mid);
125
+ return reject(err);
126
+ }
127
+ });
128
+ }
129
+
130
+ async function metrics(logger, key, method, ...args) {
131
+ try {
132
+ parentPort.postMessage({
133
+ cmd: 'metrics',
134
+ key,
135
+ method,
136
+ args
137
+ });
138
+ } catch (err) {
139
+ logger.error({ msg: 'Failed to post metrics to parent', err });
140
+ }
141
+ }
142
+
143
+ async function notify(account, event, data) {
144
+ metrics(logger, 'events', 'inc', { event });
145
+
146
+ const serviceUrl = (await settings.get('serviceUrl')) || null;
147
+ const payload = {
148
+ serviceUrl,
149
+ account,
150
+ date: new Date().toISOString(),
151
+ event,
152
+ data
153
+ };
154
+
155
+ await Webhooks.pushToQueue(event, await Webhooks.formatPayload(event, payload));
156
+ }
157
+
158
+ async function indexMessages(job, exportData) {
159
+ const { account, exportId } = job.data;
160
+ const folders = JSON.parse(exportData.folders || '[]');
161
+ const startDate = new Date(Number(exportData.startDate));
162
+ const endDate = new Date(Number(exportData.endDate));
163
+ const indexingStartTime = new Date();
164
+
165
+ const accountData = await redis.hgetall(`${REDIS_PREFIX}iad:${account}`);
166
+ if (!accountData || !accountData.account) {
167
+ const err = new Error('Account not found or has been deleted');
168
+ err.code = 'AccountNotFound';
169
+ err.statusCode = 404;
170
+ throw err;
171
+ }
172
+
173
+ const accountObject = new Account({
174
+ account,
175
+ redis,
176
+ call,
177
+ secret: await getSecret(),
178
+ timeout: EXPORT_TIMEOUT
179
+ });
180
+
181
+ let mailboxes;
182
+ try {
183
+ mailboxes = await accountObject.listMailboxes();
184
+ } catch (err) {
185
+ throw new Error(`Failed to list mailboxes: ${err.message}`, { cause: err });
186
+ }
187
+
188
+ const foldersToProcess = resolveFolders(folders, mailboxes);
189
+
190
+ await Export.update(account, exportId, { foldersTotal: foldersToProcess.length });
191
+
192
+ const maxMessages = Number(await settings.get('exportMaxMessages')) || DEFAULT_EXPORT_MAX_MESSAGES;
193
+
194
+ logger.info({ msg: 'Starting export indexing', account, exportId, foldersToProcess: foldersToProcess.length, maxMessages: maxMessages || 'unlimited' });
195
+
196
+ let totalIndexed = 0;
197
+ let truncated = false;
198
+ let lastLockExtension = Date.now();
199
+
200
+ for (let i = 0; i < foldersToProcess.length; i++) {
201
+ if (Date.now() - lastLockExtension > LOCK_EXTENSION_INTERVAL) {
202
+ await job.extendLock(job.token, 10 * 60 * 1000).catch(err => {
203
+ logger.warn({ msg: 'Failed to extend job lock during indexing', account, exportId, err });
204
+ });
205
+ lastLockExtension = Date.now();
206
+ }
207
+
208
+ if (await Export.isCancelled(account, exportId)) {
209
+ const err = new Error('Export cancelled by user');
210
+ err.code = 'ExportCancelled';
211
+ throw err;
212
+ }
213
+
214
+ const folderPath = foldersToProcess[i];
215
+ let retries = FOLDER_INDEX_MAX_RETRIES;
216
+ let lastError = null;
217
+
218
+ while (retries > 0) {
219
+ try {
220
+ const remaining = maxMessages ? maxMessages - totalIndexed : 0;
221
+ const queued = await indexFolder(accountObject, account, exportId, folderPath, startDate, endDate, indexingStartTime, remaining);
222
+ totalIndexed += queued;
223
+
224
+ await Export.update(account, exportId, { foldersScanned: i + 1 });
225
+
226
+ logger.trace({
227
+ msg: 'Folder indexed',
228
+ account,
229
+ exportId,
230
+ folder: folderPath,
231
+ foldersScanned: i + 1,
232
+ foldersTotal: foldersToProcess.length,
233
+ totalIndexed
234
+ });
235
+
236
+ lastError = null;
237
+ break;
238
+ } catch (err) {
239
+ lastError = err;
240
+ retries--;
241
+ if (retries > 0) {
242
+ const attemptNumber = FOLDER_INDEX_MAX_RETRIES - retries;
243
+ const delay = FOLDER_INDEX_RETRY_DELAY_MS * Math.pow(2, attemptNumber - 1);
244
+ logger.warn({
245
+ msg: 'Folder indexing failed, retrying',
246
+ account,
247
+ exportId,
248
+ folder: folderPath,
249
+ retriesLeft: retries,
250
+ delayMs: delay,
251
+ err
252
+ });
253
+ await new Promise(resolve => setTimeout(resolve, delay));
254
+ }
255
+ }
256
+ }
257
+
258
+ if (lastError) {
259
+ logger.warn({
260
+ msg: 'Failed to index folder after retries',
261
+ account,
262
+ exportId,
263
+ folder: folderPath,
264
+ maxRetries: FOLDER_INDEX_MAX_RETRIES,
265
+ err: lastError
266
+ });
267
+ }
268
+
269
+ if (maxMessages && totalIndexed >= maxMessages) {
270
+ truncated = true;
271
+ logger.warn({
272
+ msg: 'Export indexing truncated: message limit reached',
273
+ account,
274
+ exportId,
275
+ totalIndexed,
276
+ limit: maxMessages
277
+ });
278
+ break;
279
+ }
280
+ }
281
+
282
+ if (truncated) {
283
+ await Export.update(account, exportId, { truncated: '1' });
284
+ }
285
+ }
286
+
287
+ function resolveFolders(folders, mailboxes) {
288
+ const resolveFolder = folder => {
289
+ if (folder.startsWith('\\')) {
290
+ const match = mailboxes.find(mb => mb.specialUse === folder);
291
+ return match ? match.path : null;
292
+ }
293
+ return folder;
294
+ };
295
+
296
+ if (!folders || folders.length === 0) {
297
+ // If \All folder exists (Gmail/Outlook), use it; otherwise export all except Junk and Trash
298
+ const allMailFolder = mailboxes.find(mb => mb.specialUse === '\\All');
299
+ if (allMailFolder) {
300
+ return [allMailFolder.path];
301
+ }
302
+ return mailboxes.filter(mb => !['\\Junk', '\\Trash'].includes(mb.specialUse)).map(mb => mb.path);
303
+ }
304
+
305
+ return folders.map(resolveFolder).filter(Boolean);
306
+ }
307
+
308
+ async function indexFolder(accountObject, account, exportId, folderPath, startDate, endDate, indexingStartTime, maxMessages) {
309
+ let cursor = null;
310
+ let queued = 0;
311
+
312
+ while (true) {
313
+ const searchCriteria = { since: startDate };
314
+ if (endDate < indexingStartTime) {
315
+ searchCriteria.before = endDate;
316
+ }
317
+
318
+ const listOptions = {
319
+ path: folderPath,
320
+ pageSize: LIST_PAGE_SIZE,
321
+ search: searchCriteria,
322
+ metadataOnly: true,
323
+ cursor
324
+ };
325
+
326
+ const result = await accountObject.listMessages(listOptions);
327
+
328
+ for (const msg of result.messages || []) {
329
+ if (maxMessages && queued >= maxMessages) {
330
+ return queued;
331
+ }
332
+ await Export.queueMessage(account, exportId, {
333
+ folder: folderPath,
334
+ messageId: msg.id || msg.emailId,
335
+ uid: msg.uid,
336
+ size: msg.size || 0,
337
+ date: msg.date ? new Date(msg.date).getTime() : Date.now()
338
+ });
339
+ queued++;
340
+ }
341
+
342
+ if (maxMessages && queued >= maxMessages) {
343
+ return queued;
344
+ }
345
+
346
+ cursor = result.nextPageCursor;
347
+ if (!cursor) {
348
+ break;
349
+ }
350
+ }
351
+
352
+ return queued;
353
+ }
354
+
355
+ async function exportMessages(job, exportData) {
356
+ const { account, exportId } = job.data;
357
+ const { filePath } = exportData;
358
+ const includeAttachments = exportData.includeAttachments === '1';
359
+ const textType = exportData.textType || '*';
360
+ const maxBytes = Number(exportData.maxBytes) || 5 * 1024 * 1024;
361
+ const maxMessageSize = (await settings.get('exportMaxMessageSize')) || DEFAULT_EXPORT_MAX_MESSAGE_SIZE;
362
+ const maxExportSize = Number(await settings.get('exportMaxSize')) || DEFAULT_EXPORT_MAX_SIZE;
363
+ const isEncrypted = exportData.isEncrypted === '1';
364
+
365
+ const accountObject = new Account({
366
+ account,
367
+ redis,
368
+ call,
369
+ secret: await getSecret(),
370
+ timeout: EXPORT_TIMEOUT
371
+ });
372
+
373
+ const gzipStream = zlib.createGzip();
374
+ const fileStream = fs.createWriteStream(filePath, { mode: 0o600 });
375
+
376
+ let encryptStream = null;
377
+ let streamError = null;
378
+
379
+ const secret = isEncrypted ? await getSecret() : null;
380
+
381
+ const streams = [gzipStream];
382
+ if (secret) {
383
+ const { createEncryptStream } = require('../lib/stream-encrypt');
384
+ encryptStream = await createEncryptStream(secret);
385
+ streams.push(encryptStream);
386
+ }
387
+ streams.push(fileStream);
388
+
389
+ pipeline(...streams, err => {
390
+ if (err && !streamError) {
391
+ streamError = err;
392
+ }
393
+ });
394
+
395
+ function writeWithBackpressure(data) {
396
+ if (streamError) {
397
+ return Promise.reject(streamError);
398
+ }
399
+
400
+ if (gzipStream.write(data)) {
401
+ return Promise.resolve();
402
+ }
403
+
404
+ return new Promise((resolve, reject) => {
405
+ const cleanup = () => {
406
+ gzipStream.removeListener('drain', onDrain);
407
+ gzipStream.removeListener('error', onError);
408
+ };
409
+ const onDrain = () => {
410
+ cleanup();
411
+ resolve();
412
+ };
413
+ const onError = err => {
414
+ cleanup();
415
+ reject(err);
416
+ };
417
+ gzipStream.once('drain', onDrain);
418
+ gzipStream.once('error', onError);
419
+ });
420
+ }
421
+
422
+ let lastScore = Number(exportData.lastProcessedScore) || 0;
423
+ let processed = 0;
424
+ let totalBytesWritten = 0;
425
+ let processingError = null;
426
+ let sizeLimitReached = false;
427
+
428
+ let lastAccountCheck = Date.now();
429
+ let lastLockExtension = Date.now();
430
+
431
+ const accountData = await accountObject.loadAccountData(account);
432
+ const isApiAccount = await accountObject.isApiClient(accountData);
433
+ const MESSAGE_FETCH_BATCH_SIZE = 10; // Batch size for parallel message fetching
434
+ const MAX_RATE_LIMIT_RETRIES = 5; // Max retries for rate-limited messages
435
+ const RATE_LIMIT_BASE_DELAY = 5000; // Base delay for rate limit backoff (5 seconds)
436
+
437
+ async function processMessage(message, entry) {
438
+ message.path = entry.folder;
439
+
440
+ if (includeAttachments && message.attachments && message.attachments.length) {
441
+ for (const attachment of message.attachments) {
442
+ try {
443
+ if (attachment.size && attachment.size > maxMessageSize) {
444
+ attachment.contentError = `Attachment too large (${attachment.size} bytes, limit ${maxMessageSize})`;
445
+ continue;
446
+ }
447
+
448
+ const stream = await accountObject.getAttachment(attachment.id);
449
+ const chunks = [];
450
+ let totalSize = 0;
451
+
452
+ for await (const chunk of stream) {
453
+ totalSize += chunk.length;
454
+ if (totalSize > maxMessageSize) {
455
+ if (typeof stream.destroy === 'function') {
456
+ stream.destroy();
457
+ await new Promise(resolve => {
458
+ const CLEANUP_TIMEOUT_MS = 1000;
459
+ stream.once('close', resolve);
460
+ setTimeout(resolve, CLEANUP_TIMEOUT_MS);
461
+ });
462
+ }
463
+ throw new Error(`Attachment exceeds size limit (>${maxMessageSize} bytes)`);
464
+ }
465
+ chunks.push(chunk);
466
+ }
467
+
468
+ attachment.content = Buffer.concat(chunks).toString('base64');
469
+ } catch (attachErr) {
470
+ attachment.contentError = attachErr.message;
471
+ }
472
+ }
473
+ }
474
+
475
+ const line = JSON.stringify(message) + '\n';
476
+ await writeWithBackpressure(line);
477
+ totalBytesWritten += Buffer.byteLength(line);
478
+
479
+ await Export.incrementExported(account, exportId, Buffer.byteLength(line));
480
+ processed++;
481
+ }
482
+
483
+ try {
484
+ while (true) {
485
+ if (streamError) {
486
+ throw streamError;
487
+ }
488
+
489
+ if (sizeLimitReached) {
490
+ break;
491
+ }
492
+
493
+ if (Date.now() - lastLockExtension > LOCK_EXTENSION_INTERVAL) {
494
+ await job.extendLock(job.token, 10 * 60 * 1000).catch(err => {
495
+ logger.warn({ msg: 'Failed to extend job lock', account, exportId, err });
496
+ });
497
+ lastLockExtension = Date.now();
498
+ }
499
+
500
+ if (await Export.isCancelled(account, exportId)) {
501
+ const err = new Error('Export cancelled by user');
502
+ err.code = 'ExportCancelled';
503
+ throw err;
504
+ }
505
+
506
+ if (Date.now() - lastAccountCheck > ACCOUNT_CHECK_INTERVAL) {
507
+ const accountCheck = await redis.hgetall(`${REDIS_PREFIX}iad:${account}`);
508
+ if (!accountCheck || !accountCheck.account) {
509
+ const err = new Error('Account was deleted during export');
510
+ err.code = 'AccountDeleted';
511
+ throw err;
512
+ }
513
+ lastAccountCheck = Date.now();
514
+ }
515
+
516
+ const batch = await Export.getNextBatch(account, exportId, lastScore, BATCH_SIZE);
517
+ if (batch.length === 0) {
518
+ break;
519
+ }
520
+
521
+ const entriesToFetch = [];
522
+ for (const entry of batch) {
523
+ if (includeAttachments && entry.size > maxMessageSize) {
524
+ await Export.incrementSkipped(account, exportId);
525
+ lastScore = entry.score;
526
+ } else {
527
+ entriesToFetch.push(entry);
528
+ }
529
+ }
530
+
531
+ if (entriesToFetch.length === 0) {
532
+ await Export.updateLastProcessedScore(account, exportId, lastScore);
533
+ continue;
534
+ }
535
+
536
+ if (isApiAccount && entriesToFetch.length > 1) {
537
+ for (let i = 0; i < entriesToFetch.length; i += MESSAGE_FETCH_BATCH_SIZE) {
538
+ if (streamError) {
539
+ throw streamError;
540
+ }
541
+ if (sizeLimitReached) {
542
+ break;
543
+ }
544
+
545
+ let fetchBatch = entriesToFetch.slice(i, i + MESSAGE_FETCH_BATCH_SIZE);
546
+ let rateLimitRetry = 0;
547
+
548
+ while (fetchBatch.length > 0) {
549
+ const messageIds = fetchBatch.map(e => e.messageId);
550
+ const messageResults = await accountObject.getMessages(messageIds, { textType, maxBytes });
551
+
552
+ const resultMap = new Map();
553
+ for (const result of messageResults) {
554
+ resultMap.set(result.messageId, result);
555
+ }
556
+
557
+ const rateLimitedEntries = [];
558
+
559
+ for (const entry of fetchBatch) {
560
+ const result = resultMap.get(entry.messageId);
561
+
562
+ if (result && result.error) {
563
+ const err = result.error;
564
+ const isRateLimited = err.statusCode === 429 || err.code === 'rateLimitExceeded' || err.code === 'userRateLimitExceeded';
565
+
566
+ if (isSkippableError(err)) {
567
+ logger.warn({
568
+ msg: 'Skipping message during export',
569
+ account,
570
+ exportId,
571
+ messageId: entry.messageId,
572
+ folder: entry.folder,
573
+ reason: err.message || err.code
574
+ });
575
+ await Export.incrementSkipped(account, exportId);
576
+ lastScore = entry.score;
577
+ } else if (isRateLimited && rateLimitRetry < MAX_RATE_LIMIT_RETRIES) {
578
+ rateLimitedEntries.push(entry);
579
+ } else {
580
+ const error = new Error(err.message);
581
+ error.code = err.code;
582
+ error.statusCode = err.statusCode;
583
+ throw error;
584
+ }
585
+ } else if (result && result.data) {
586
+ await processMessage(result.data, entry);
587
+ lastScore = entry.score;
588
+ if (maxExportSize && totalBytesWritten >= maxExportSize) {
589
+ sizeLimitReached = true;
590
+ break;
591
+ }
592
+ } else {
593
+ logger.warn({
594
+ msg: 'Skipping message during export',
595
+ account,
596
+ exportId,
597
+ messageId: entry.messageId,
598
+ folder: entry.folder,
599
+ reason: 'Message not found in batch results'
600
+ });
601
+ await Export.incrementSkipped(account, exportId);
602
+ lastScore = entry.score;
603
+ }
604
+ }
605
+
606
+ if (sizeLimitReached) {
607
+ break;
608
+ }
609
+
610
+ if (rateLimitedEntries.length > 0) {
611
+ rateLimitRetry++;
612
+ const delay = RATE_LIMIT_BASE_DELAY * Math.pow(2, rateLimitRetry - 1) + Math.random() * 1000;
613
+ logger.warn({
614
+ msg: 'Rate limited during export, retrying batch',
615
+ account,
616
+ exportId,
617
+ rateLimitedCount: rateLimitedEntries.length,
618
+ attempt: rateLimitRetry,
619
+ maxAttempts: MAX_RATE_LIMIT_RETRIES,
620
+ delayMs: Math.round(delay)
621
+ });
622
+ await new Promise(resolve => setTimeout(resolve, delay));
623
+ fetchBatch = rateLimitedEntries;
624
+ } else {
625
+ break;
626
+ }
627
+ }
628
+ }
629
+ } else {
630
+ for (const entry of entriesToFetch) {
631
+ if (streamError) {
632
+ throw streamError;
633
+ }
634
+
635
+ let message = null;
636
+ let fetchError = null;
637
+
638
+ for (let attempt = 1; attempt <= IMAP_MESSAGE_MAX_RETRIES; attempt++) {
639
+ try {
640
+ message = await accountObject.getMessage(entry.messageId, { textType, maxBytes });
641
+ break; // Success - exit retry loop
642
+ } catch (err) {
643
+ if (isSkippableError(err)) {
644
+ fetchError = err;
645
+ break;
646
+ }
647
+
648
+ if (isTransientError(err) && attempt < IMAP_MESSAGE_MAX_RETRIES) {
649
+ const delay = IMAP_MESSAGE_RETRY_BASE_DELAY * Math.pow(2, attempt - 1);
650
+ logger.warn({
651
+ msg: 'Message fetch failed, retrying',
652
+ account,
653
+ exportId,
654
+ messageId: entry.messageId,
655
+ folder: entry.folder,
656
+ attempt,
657
+ maxAttempts: IMAP_MESSAGE_MAX_RETRIES,
658
+ delayMs: delay,
659
+ errorCode: err.code,
660
+ errorMessage: err.message
661
+ });
662
+ await new Promise(resolve => setTimeout(resolve, delay));
663
+ continue;
664
+ }
665
+
666
+ fetchError = err;
667
+ break;
668
+ }
669
+ }
670
+
671
+ if (message) {
672
+ await processMessage(message, entry);
673
+ if (maxExportSize && totalBytesWritten >= maxExportSize) {
674
+ sizeLimitReached = true;
675
+ lastScore = entry.score;
676
+ break;
677
+ }
678
+ } else if (fetchError && isSkippableError(fetchError)) {
679
+ logger.warn({
680
+ msg: 'Skipping message during export',
681
+ account,
682
+ exportId,
683
+ messageId: entry.messageId,
684
+ folder: entry.folder,
685
+ reason: fetchError.message || fetchError.code
686
+ });
687
+ await Export.incrementSkipped(account, exportId);
688
+ } else if (fetchError) {
689
+ throw fetchError;
690
+ }
691
+
692
+ lastScore = entry.score;
693
+ }
694
+ }
695
+
696
+ await Export.updateLastProcessedScore(account, exportId, lastScore);
697
+ logger.trace({ msg: 'Export batch processed', account, exportId, messagesExported: processed });
698
+ }
699
+ } catch (err) {
700
+ processingError = err;
701
+ }
702
+
703
+ if (sizeLimitReached) {
704
+ logger.warn({
705
+ msg: 'Export truncated: size limit reached',
706
+ account,
707
+ exportId,
708
+ totalBytesWritten,
709
+ limit: maxExportSize,
710
+ messagesExported: processed
711
+ });
712
+ await Export.update(account, exportId, { truncated: '1' });
713
+ }
714
+
715
+ const FINALIZATION_TIMEOUT = 30000;
716
+ await new Promise((resolve, reject) => {
717
+ const timeout = setTimeout(() => {
718
+ gzipStream.destroy();
719
+ if (encryptStream) encryptStream.destroy();
720
+ fileStream.destroy();
721
+ reject(streamError || new Error('Stream finalization timed out'));
722
+ }, FINALIZATION_TIMEOUT);
723
+ gzipStream.end();
724
+ fileStream.once('finish', () => {
725
+ clearTimeout(timeout);
726
+ resolve();
727
+ });
728
+ fileStream.once('error', err => {
729
+ clearTimeout(timeout);
730
+ reject(streamError || err);
731
+ });
732
+ });
733
+
734
+ if (processingError) {
735
+ throw processingError;
736
+ }
737
+ if (streamError) {
738
+ throw streamError;
739
+ }
740
+
741
+ logger.info({ msg: 'Export messages completed', account, exportId, messagesExported: processed, bytesWritten: totalBytesWritten });
742
+ }
743
+
744
+ const exportWorker = new Worker(
745
+ 'export',
746
+ async job => {
747
+ const { account, exportId } = job.data;
748
+ const startTime = Date.now();
749
+
750
+ logger.info({ msg: 'Processing export job', account, exportId, job: job.id });
751
+
752
+ try {
753
+ const exportData = await redis.hgetall(`${REDIS_PREFIX}exp:${account}:${exportId}`);
754
+ if (!exportData || !exportData.exportId) {
755
+ throw new Error('Export not found');
756
+ }
757
+
758
+ await Export.update(account, exportId, { status: 'processing', phase: 'indexing' });
759
+ await indexMessages(job, exportData);
760
+ await Export.update(account, exportId, { phase: 'exporting' });
761
+
762
+ const currentExportData = await redis.hgetall(`${REDIS_PREFIX}exp:${account}:${exportId}`);
763
+ await exportMessages(job, currentExportData);
764
+
765
+ await Export.complete(account, exportId);
766
+
767
+ const finalData = await redis.hgetall(`${REDIS_PREFIX}exp:${account}:${exportId}`);
768
+
769
+ await notify(account, EXPORT_COMPLETED_NOTIFY, {
770
+ exportId,
771
+ folders: JSON.parse(exportData.folders || '[]'),
772
+ startDate: new Date(Number(exportData.startDate)).toISOString(),
773
+ endDate: new Date(Number(exportData.endDate)).toISOString(),
774
+ messagesExported: Number(finalData.messagesExported) || 0,
775
+ messagesSkipped: Number(finalData.messagesSkipped) || 0,
776
+ bytesWritten: Number(finalData.bytesWritten) || 0,
777
+ duration: Date.now() - startTime,
778
+ expiresAt: new Date(Number(finalData.expiresAt)).toISOString()
779
+ });
780
+
781
+ logger.info({ msg: 'Export job completed', account, exportId, duration: Date.now() - startTime });
782
+ } catch (err) {
783
+ logger.error({ msg: 'Export job failed', account, exportId, err });
784
+
785
+ const exportData = await redis.hgetall(`${REDIS_PREFIX}exp:${account}:${exportId}`).catch(() => ({}));
786
+
787
+ if (exportData.filePath) {
788
+ await fs.promises.unlink(exportData.filePath).catch(() => {});
789
+ }
790
+
791
+ await Export.fail(account, exportId, err.message);
792
+
793
+ if (err.code !== 'AccountDeleted' && err.code !== 'AccountNotFound' && err.code !== 'ExportCancelled') {
794
+ await notify(account, EXPORT_FAILED_NOTIFY, {
795
+ exportId,
796
+ error: err.message,
797
+ errorCode: err.code,
798
+ phase: exportData.phase || 'unknown',
799
+ messagesExported: Number(exportData.messagesExported) || 0,
800
+ messagesQueued: Number(exportData.messagesQueued) || 0
801
+ });
802
+ }
803
+
804
+ throw err;
805
+ }
806
+ },
807
+ {
808
+ concurrency: EXPORT_QC,
809
+ lockDuration: 10 * 60 * 1000,
810
+ stalledInterval: 2 * 60 * 1000,
811
+ maxStalledCount: 5,
812
+ ...queueConf
813
+ }
814
+ );
815
+
816
+ exportWorker.on('completed', async job => {
817
+ metrics(logger, 'queuesProcessed', 'inc', { queue: 'export', status: 'completed' });
818
+ logger.info({ msg: 'Export queue entry completed', queue: job.queue.name, job: job.id, account: job.data.account, exportId: job.data.exportId });
819
+ });
820
+
821
+ exportWorker.on('failed', async (job, err) => {
822
+ metrics(logger, 'queuesProcessed', 'inc', { queue: 'export', status: 'failed' });
823
+ if (!job) {
824
+ logger.error({ msg: 'Export queue entry failed', err: err.message });
825
+ return;
826
+ }
827
+ logger.error({
828
+ msg: 'Export queue entry failed',
829
+ queue: job.queue.name,
830
+ job: job.id,
831
+ account: job.data.account,
832
+ exportId: job.data.exportId,
833
+ failedReason: job.failedReason
834
+ });
835
+ });
836
+
837
+ function onCommand(command) {
838
+ if (command.cmd === 'resource-usage') {
839
+ return threadStats.usage();
840
+ }
841
+ logger.debug({ msg: 'Unhandled command', command });
842
+ return 999;
843
+ }
844
+
845
+ (async () => {
846
+ try {
847
+ await Export.markInterruptedAsFailed();
848
+ logger.info({ msg: 'Checked for interrupted exports' });
849
+ } catch (err) {
850
+ logger.error({ msg: 'Failed to check for interrupted exports', err });
851
+ }
852
+
853
+ try {
854
+ const cleaned = await Export.cleanup();
855
+ if (cleaned > 0) {
856
+ logger.info({ msg: 'Cleaned up orphaned export files', count: cleaned });
857
+ }
858
+ } catch (err) {
859
+ logger.error({ msg: 'Failed to clean up export files', err });
860
+ }
861
+
862
+ setInterval(() => {
863
+ try {
864
+ parentPort.postMessage({ cmd: 'heartbeat' });
865
+ } catch {
866
+ // Ignore errors, parent might be shutting down
867
+ }
868
+ }, 10 * 1000).unref();
869
+
870
+ setInterval(
871
+ async () => {
872
+ try {
873
+ const activeExports = await redis.smembers(`${REDIS_PREFIX}exp:active`);
874
+ for (const entry of activeExports) {
875
+ const separatorIndex = entry.indexOf(':exp_');
876
+ if (separatorIndex === -1) continue;
877
+ const entryAccount = entry.substring(0, separatorIndex);
878
+ const entryExportId = entry.substring(separatorIndex + 1);
879
+ const exists = await redis.exists(`${REDIS_PREFIX}exp:${entryAccount}:${entryExportId}`);
880
+ if (!exists) {
881
+ await redis.srem(`${REDIS_PREFIX}exp:active`, entry);
882
+ logger.info({ msg: 'Cleaned stale active set entry', account: entryAccount, exportId: entryExportId });
883
+ }
884
+ }
885
+ } catch (err) {
886
+ logger.error({ msg: 'Failed to clean stale active set entries', err });
887
+ }
888
+ },
889
+ 5 * 60 * 1000
890
+ ).unref();
891
+
892
+ parentPort.postMessage({ cmd: 'ready' });
893
+ })();
894
+
895
+ parentPort.on('message', message => {
896
+ if (message && message.cmd === 'resp' && message.mid && callQueue.has(message.mid)) {
897
+ const { resolve, reject, timer } = callQueue.get(message.mid);
898
+ clearTimeout(timer);
899
+ callQueue.delete(message.mid);
900
+
901
+ if (message.error) {
902
+ const err = new Error(message.error);
903
+ if (message.code) err.code = message.code;
904
+ if (message.statusCode) err.statusCode = message.statusCode;
905
+ if (message.info) err.info = message.info;
906
+ return reject(err);
907
+ }
908
+ return resolve(message.response);
909
+ }
910
+
911
+ if (message && message.cmd === 'call' && message.mid) {
912
+ Promise.resolve(onCommand(message.message))
913
+ .then(response => parentPort.postMessage({ cmd: 'resp', mid: message.mid, response }))
914
+ .catch(err => parentPort.postMessage({ cmd: 'resp', mid: message.mid, error: err.message, code: err.code, statusCode: err.statusCode }));
915
+ }
916
+
917
+ if (message && message.cmd === 'settings') {
918
+ let d = message.data || {};
919
+ if ('httpProxyEnabled' in d || 'httpProxyUrl' in d) {
920
+ reloadHttpProxyAgent().catch(err => logger.error({ msg: 'Failed to reload HTTP proxy agent', err }));
921
+ }
922
+ }
923
+ });
924
+
925
+ logger.info({ msg: 'Started export worker thread', version: packageData.version });
926
+
927
+ module.exports = {
928
+ isTransientError,
929
+ isSkippableError,
930
+ IMAP_MESSAGE_MAX_RETRIES,
931
+ IMAP_MESSAGE_RETRY_BASE_DELAY,
932
+ ACCOUNT_CHECK_INTERVAL
933
+ };