emailengine-app 2.61.5 → 2.62.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/CHANGELOG.md +78 -0
  2. package/data/google-crawlers.json +1 -1
  3. package/lib/account.js +20 -7
  4. package/lib/api-routes/account-routes.js +28 -5
  5. package/lib/api-routes/chat-routes.js +1 -1
  6. package/lib/api-routes/export-routes.js +316 -0
  7. package/lib/api-routes/message-routes.js +28 -23
  8. package/lib/api-routes/template-routes.js +28 -7
  9. package/lib/arf-detect.js +1 -1
  10. package/lib/consts.js +16 -0
  11. package/lib/db.js +3 -0
  12. package/lib/email-client/base-client.js +6 -4
  13. package/lib/email-client/gmail-client.js +204 -33
  14. package/lib/email-client/imap/mailbox.js +99 -8
  15. package/lib/email-client/imap/subconnection.js +5 -5
  16. package/lib/email-client/imap-client.js +76 -16
  17. package/lib/email-client/message-builder.js +3 -1
  18. package/lib/email-client/notification-handler.js +12 -9
  19. package/lib/email-client/outlook-client.js +362 -69
  20. package/lib/email-client/smtp-pool-manager.js +1 -1
  21. package/lib/export.js +528 -0
  22. package/lib/oauth/gmail.js +21 -13
  23. package/lib/oauth/mail-ru.js +23 -10
  24. package/lib/oauth/outlook.js +26 -16
  25. package/lib/oauth/pubsub/google.js +5 -0
  26. package/lib/routes-ui.js +235 -1
  27. package/lib/schemas.js +260 -80
  28. package/lib/stream-encrypt.js +263 -0
  29. package/lib/tools.js +30 -4
  30. package/lib/ui-routes/account-routes.js +23 -0
  31. package/lib/ui-routes/admin-config-routes.js +11 -4
  32. package/lib/ui-routes/admin-entities-routes.js +18 -0
  33. package/lib/webhooks.js +16 -20
  34. package/package.json +16 -16
  35. package/sbom.json +1 -1
  36. package/server.js +41 -5
  37. package/static/js/ace/ace.js +1 -1
  38. package/static/js/ace/ext-language_tools.js +1 -1
  39. package/static/licenses.html +52 -62
  40. package/translations/de.mo +0 -0
  41. package/translations/de.po +63 -36
  42. package/translations/en.mo +0 -0
  43. package/translations/en.po +64 -37
  44. package/translations/et.mo +0 -0
  45. package/translations/et.po +63 -36
  46. package/translations/fr.mo +0 -0
  47. package/translations/fr.po +63 -36
  48. package/translations/ja.mo +0 -0
  49. package/translations/ja.po +63 -36
  50. package/translations/messages.pot +80 -47
  51. package/translations/nl.mo +0 -0
  52. package/translations/nl.po +63 -36
  53. package/translations/pl.mo +0 -0
  54. package/translations/pl.po +63 -36
  55. package/views/accounts/account.hbs +375 -2
  56. package/views/config/service.hbs +35 -0
  57. package/workers/api.js +123 -44
  58. package/workers/documents.js +1 -0
  59. package/workers/export.js +926 -0
  60. package/workers/imap.js +29 -0
  61. package/workers/submit.js +25 -5
  62. package/workers/webhooks.js +11 -2
package/lib/export.js ADDED
@@ -0,0 +1,528 @@
1
+ 'use strict';
2
+
3
+ const os = require('os');
4
+ const crypto = require('crypto');
5
+ const fs = require('fs');
6
+ const pathlib = require('path');
7
+ const msgpack = require('msgpack5')();
8
+ const { redis, exportQueue } = require('./db');
9
+ const settings = require('./settings');
10
+ const logger = require('./logger');
11
+ const getSecret = require('./get-secret');
12
+
13
+ const {
14
+ REDIS_PREFIX,
15
+ DEFAULT_EXPORT_MAX_AGE,
16
+ DEFAULT_EXPORT_MAX_CONCURRENT,
17
+ DEFAULT_EXPORT_MAX_GLOBAL_CONCURRENT,
18
+ DEFAULT_EXPORT_MAX_MESSAGE_SIZE
19
+ } = require('./consts');
20
+
21
+ const EXPORT_ID_PREFIX = 'exp_';
22
+ const ACTIVE_EXPORTS_KEY = `${REDIS_PREFIX}exp:active`;
23
+
24
+ // Atomically check concurrent limits AND add to active set if under limits.
25
+ // Eliminates TOCTOU race by combining check and add in a single operation.
26
+ const CONCURRENT_CHECK_AND_ADD_SCRIPT = `
27
+ local activeKey = KEYS[1]
28
+ local maxConcurrent = tonumber(ARGV[1])
29
+ local maxGlobal = tonumber(ARGV[2])
30
+ local accountPrefix = ARGV[3]
31
+ local activeEntry = ARGV[4]
32
+
33
+ local members = redis.call('SMEMBERS', activeKey)
34
+ local globalCount = #members
35
+
36
+ -- Check global limit first (faster)
37
+ if globalCount >= maxGlobal then
38
+ return 0
39
+ end
40
+
41
+ -- Check per-account limit
42
+ local accountCount = 0
43
+ for _, member in ipairs(members) do
44
+ if string.sub(member, 1, #accountPrefix) == accountPrefix then
45
+ accountCount = accountCount + 1
46
+ end
47
+ end
48
+
49
+ if accountCount >= maxConcurrent then
50
+ return 0
51
+ end
52
+
53
+ -- Under both limits - atomically add to active set
54
+ redis.call('SADD', activeKey, activeEntry)
55
+ return 1
56
+ `;
57
+
58
+ function generateExportId() {
59
+ return EXPORT_ID_PREFIX + crypto.randomBytes(12).toString('hex');
60
+ }
61
+
62
+ function createError(message, code, statusCode) {
63
+ const err = new Error(message);
64
+ err.code = code;
65
+ err.statusCode = statusCode;
66
+ return err;
67
+ }
68
+
69
+ async function tryAddToActiveSet(account, exportId) {
70
+ const maxConcurrent = (await settings.get('exportMaxConcurrent')) || DEFAULT_EXPORT_MAX_CONCURRENT;
71
+ const maxGlobalConcurrent = (await settings.get('exportMaxGlobalConcurrent')) || DEFAULT_EXPORT_MAX_GLOBAL_CONCURRENT;
72
+
73
+ const accountPrefix = `${account}:`;
74
+ const activeEntry = `${account}:${exportId}`;
75
+
76
+ const added = await redis.eval(CONCURRENT_CHECK_AND_ADD_SCRIPT, 1, ACTIVE_EXPORTS_KEY, maxConcurrent, maxGlobalConcurrent, accountPrefix, activeEntry);
77
+
78
+ if (!added) {
79
+ throw createError('Maximum concurrent exports reached', 'TooManyExports', 429);
80
+ }
81
+
82
+ return activeEntry;
83
+ }
84
+
85
+ async function scanKeys(pattern) {
86
+ const keys = [];
87
+ let cursor = '0';
88
+ do {
89
+ const [nextCursor, batch] = await redis.scan(cursor, 'MATCH', pattern, 'COUNT', 100);
90
+ cursor = nextCursor;
91
+ keys.push(...batch);
92
+ } while (cursor !== '0');
93
+ return keys;
94
+ }
95
+
96
+ function getExportKey(account, exportId) {
97
+ return `${REDIS_PREFIX}exp:${account}:${exportId}`;
98
+ }
99
+
100
+ function getExportQueueKey(account, exportId) {
101
+ return `${REDIS_PREFIX}exq:${account}:${exportId}`;
102
+ }
103
+
104
+ async function getExportPath() {
105
+ return (await settings.get('exportPath')) || process.env.EENGINE_EXPORT_PATH || os.tmpdir();
106
+ }
107
+
108
+ async function getExportMaxAge() {
109
+ const settingsMaxAge = await settings.get('exportMaxAge');
110
+ const envMaxAge = process.env.EENGINE_EXPORT_MAX_AGE;
111
+
112
+ if (settingsMaxAge && !isNaN(settingsMaxAge)) return Number(settingsMaxAge);
113
+ if (envMaxAge && !isNaN(envMaxAge)) return Number(envMaxAge);
114
+ return DEFAULT_EXPORT_MAX_AGE;
115
+ }
116
+
117
+ function toTimestamp(date) {
118
+ const ts = new Date(date).getTime();
119
+ if (isNaN(ts)) {
120
+ throw new Error('Invalid date value');
121
+ }
122
+ return ts;
123
+ }
124
+
125
+ function calculateScore(timestamp, folder, messageId, uid) {
126
+ const baseTimestamp = timestamp instanceof Date ? timestamp.getTime() : Number(timestamp) || Date.now();
127
+ const baseSeconds = Math.floor(baseTimestamp / 1000);
128
+
129
+ const uniqueKey = `${folder || ''}:${messageId || ''}:${uid || ''}`;
130
+ const hash = crypto.createHash('sha256').update(uniqueKey).digest();
131
+ const tiebreaker = (((hash[0] << 24) | (hash[1] << 16) | (hash[2] << 8) | hash[3]) >>> 0) % 1000000;
132
+
133
+ return baseSeconds * 1000000 + tiebreaker;
134
+ }
135
+
136
+ class Export {
137
+ static async create(account, options) {
138
+ const accountData = await redis.hgetall(`${REDIS_PREFIX}iad:${account}`);
139
+ if (!accountData || !accountData.account) {
140
+ throw createError('Account not found', 'AccountNotFound', 404);
141
+ }
142
+
143
+ const exportId = generateExportId();
144
+ const exportKey = getExportKey(account, exportId);
145
+
146
+ const maxAge = await getExportMaxAge();
147
+ const exportPath = await getExportPath();
148
+ const now = Date.now();
149
+ const expiresAt = now + maxAge;
150
+
151
+ const activeEntry = await tryAddToActiveSet(account, exportId);
152
+
153
+ await fs.promises.mkdir(exportPath, { recursive: true });
154
+
155
+ const secret = await getSecret();
156
+ const isEncrypted = !!secret;
157
+ const fileExtension = isEncrypted ? '.ndjson.gz.enc' : '.ndjson.gz';
158
+ const filePath = pathlib.join(exportPath, `${exportId}${fileExtension}`);
159
+ const startDate = toTimestamp(options.startDate);
160
+ const endDate = toTimestamp(options.endDate);
161
+
162
+ const exportData = {
163
+ exportId,
164
+ account,
165
+ status: 'queued',
166
+ phase: 'pending',
167
+ folders: JSON.stringify(options.folders || []),
168
+ startDate,
169
+ endDate,
170
+ textType: options.textType || '*',
171
+ maxBytes: options.maxBytes || 5 * 1024 * 1024,
172
+ includeAttachments: options.includeAttachments ? '1' : '0',
173
+ isEncrypted: isEncrypted ? '1' : '0',
174
+ foldersScanned: 0,
175
+ foldersTotal: 0,
176
+ messagesQueued: 0,
177
+ messagesExported: 0,
178
+ messagesSkipped: 0,
179
+ bytesWritten: 0,
180
+ filePath,
181
+ lastProcessedScore: 0,
182
+ created: now,
183
+ expiresAt,
184
+ error: ''
185
+ };
186
+
187
+ const ttl = Math.ceil(maxAge / 1000);
188
+
189
+ await redis.multi().hmset(exportKey, exportData).expire(exportKey, ttl).exec();
190
+
191
+ try {
192
+ await exportQueue.add('export', { account, exportId }, { jobId: exportId, removeOnComplete: true, removeOnFail: true });
193
+ } catch (err) {
194
+ await redis.multi().srem(ACTIVE_EXPORTS_KEY, activeEntry).del(exportKey).exec();
195
+ throw err;
196
+ }
197
+
198
+ logger.info({
199
+ msg: 'Export job created',
200
+ account,
201
+ exportId,
202
+ folders: options.folders,
203
+ startDate: new Date(startDate).toISOString(),
204
+ endDate: new Date(endDate).toISOString()
205
+ });
206
+
207
+ return {
208
+ exportId,
209
+ status: 'queued',
210
+ created: new Date(now).toISOString()
211
+ };
212
+ }
213
+
214
+ static async isCancelled(account, exportId) {
215
+ const status = await redis.hget(getExportKey(account, exportId), 'status');
216
+ return status === 'cancelled' || !status;
217
+ }
218
+
219
+ static async get(account, exportId) {
220
+ const data = await redis.hgetall(getExportKey(account, exportId));
221
+ return data && data.exportId ? Export.formatStatus(data) : null;
222
+ }
223
+
224
+ static formatStatus(data) {
225
+ const toIsoDate = value => (value ? new Date(Number(value)).toISOString() : undefined);
226
+
227
+ const result = {
228
+ exportId: data.exportId,
229
+ status: data.status,
230
+ phase: data.phase !== 'pending' ? data.phase : undefined,
231
+ folders: data.folders ? JSON.parse(data.folders) : [],
232
+ startDate: toIsoDate(data.startDate),
233
+ endDate: toIsoDate(data.endDate),
234
+ isEncrypted: data.isEncrypted === '1',
235
+ progress: {
236
+ foldersScanned: Number(data.foldersScanned) || 0,
237
+ foldersTotal: Number(data.foldersTotal) || 0,
238
+ messagesQueued: Number(data.messagesQueued) || 0,
239
+ messagesExported: Number(data.messagesExported) || 0,
240
+ messagesSkipped: Number(data.messagesSkipped) || 0,
241
+ bytesWritten: Number(data.bytesWritten) || 0
242
+ },
243
+ created: toIsoDate(data.created),
244
+ expiresAt: toIsoDate(data.expiresAt),
245
+ error: data.error || null
246
+ };
247
+
248
+ return result;
249
+ }
250
+
251
+ static async list(account, options = {}) {
252
+ const page = Number(options.page) || 0;
253
+ const pageSize = Number(options.pageSize) || 20;
254
+
255
+ const pattern = `${REDIS_PREFIX}exp:${account}:${EXPORT_ID_PREFIX}*`;
256
+ const keys = await scanKeys(pattern);
257
+
258
+ const exports = [];
259
+ for (const key of keys) {
260
+ const data = await redis.hgetall(key);
261
+ if (data && data.exportId) {
262
+ exports.push({
263
+ exportId: data.exportId,
264
+ status: data.status,
265
+ created: data.created ? new Date(Number(data.created)).toISOString() : undefined,
266
+ expiresAt: data.expiresAt ? new Date(Number(data.expiresAt)).toISOString() : undefined
267
+ });
268
+ }
269
+ }
270
+
271
+ exports.sort((a, b) => new Date(b.created) - new Date(a.created));
272
+
273
+ const total = exports.length;
274
+ const pages = Math.ceil(total / pageSize) || 1;
275
+
276
+ return {
277
+ total,
278
+ page,
279
+ pages,
280
+ exports: exports.slice(page * pageSize, (page + 1) * pageSize)
281
+ };
282
+ }
283
+
284
+ static async delete(account, exportId) {
285
+ const exportKey = getExportKey(account, exportId);
286
+ const queueKey = getExportQueueKey(account, exportId);
287
+
288
+ const data = await redis.hgetall(exportKey);
289
+ if (!data || !data.exportId) {
290
+ return false;
291
+ }
292
+
293
+ const isActive = data.status === 'processing';
294
+
295
+ // Signal cancellation for active exports
296
+ if (isActive) {
297
+ await redis.hset(exportKey, 'status', 'cancelled');
298
+ }
299
+
300
+ const job = await exportQueue.getJob(exportId).catch(() => null);
301
+ if (job) {
302
+ await job.remove().catch(() => {});
303
+ }
304
+
305
+ // Active exports: worker will clean up when it detects cancellation
306
+ // Non-active exports: clean up immediately
307
+ if (!isActive) {
308
+ await redis.multi().del(exportKey).del(queueKey).srem(ACTIVE_EXPORTS_KEY, `${account}:${exportId}`).exec();
309
+
310
+ if (data.filePath) {
311
+ await fs.promises.unlink(data.filePath).catch(() => {});
312
+ }
313
+ }
314
+
315
+ logger.info({ msg: 'Export deleted', account, exportId });
316
+ return true;
317
+ }
318
+
319
+ static async update(account, exportId, updates) {
320
+ if (Object.keys(updates).length > 0) {
321
+ await redis.hmset(getExportKey(account, exportId), updates);
322
+ }
323
+ }
324
+
325
+ static async queueMessage(account, exportId, messageInfo) {
326
+ const queueKey = getExportQueueKey(account, exportId);
327
+ const exportKey = getExportKey(account, exportId);
328
+
329
+ const score = calculateScore(messageInfo.date, messageInfo.folder, messageInfo.messageId, messageInfo.uid);
330
+
331
+ const value = msgpack
332
+ .encode({
333
+ folder: messageInfo.folder,
334
+ messageId: messageInfo.messageId,
335
+ uid: messageInfo.uid,
336
+ size: messageInfo.size || 0
337
+ })
338
+ .toString('base64url');
339
+
340
+ const ttl = await redis.ttl(exportKey);
341
+ const multi = redis.multi().zadd(queueKey, score, value).hincrby(exportKey, 'messagesQueued', 1);
342
+ if (ttl > 0) {
343
+ multi.expire(queueKey, ttl);
344
+ }
345
+ await multi.exec();
346
+ }
347
+
348
+ static async getNextBatch(account, exportId, lastScore, limit) {
349
+ const queueKey = getExportQueueKey(account, exportId);
350
+ // Use exclusive lower bound to avoid re-processing messages at batch boundaries
351
+ const minScore = lastScore > 0 ? '(' + lastScore : lastScore;
352
+ const results = await redis.zrangebyscore(queueKey, minScore, '+inf', 'WITHSCORES', 'LIMIT', 0, limit);
353
+
354
+ const messages = [];
355
+ for (let i = 0; i < results.length; i += 2) {
356
+ try {
357
+ const info = msgpack.decode(Buffer.from(results[i], 'base64url'));
358
+ messages.push({ ...info, score: Number(results[i + 1]) });
359
+ } catch (err) {
360
+ logger.error({ msg: 'Failed to decode message info', account, exportId, err });
361
+ }
362
+ }
363
+
364
+ return messages;
365
+ }
366
+
367
+ static async incrementExported(account, exportId, bytesWritten = 0) {
368
+ const exportKey = getExportKey(account, exportId);
369
+ await redis.multi().hincrby(exportKey, 'messagesExported', 1).hincrby(exportKey, 'bytesWritten', bytesWritten).exec();
370
+ }
371
+
372
+ static async incrementSkipped(account, exportId) {
373
+ await redis.hincrby(getExportKey(account, exportId), 'messagesSkipped', 1);
374
+ }
375
+
376
+ static async updateLastProcessedScore(account, exportId, score) {
377
+ await redis.hset(getExportKey(account, exportId), 'lastProcessedScore', score);
378
+ }
379
+
380
+ static async complete(account, exportId) {
381
+ const exportKey = getExportKey(account, exportId);
382
+ const queueKey = getExportQueueKey(account, exportId);
383
+
384
+ await redis
385
+ .multi()
386
+ .hmset(exportKey, { status: 'completed', phase: 'complete' })
387
+ .del(queueKey)
388
+ .srem(ACTIVE_EXPORTS_KEY, `${account}:${exportId}`)
389
+ .exec();
390
+
391
+ logger.info({ msg: 'Export completed', account, exportId });
392
+ }
393
+
394
+ static async fail(account, exportId, error) {
395
+ const exportKey = getExportKey(account, exportId);
396
+ const queueKey = getExportQueueKey(account, exportId);
397
+
398
+ const exportData = await redis.hgetall(exportKey);
399
+ if (!exportData || !exportData.exportId) {
400
+ // Key already deleted, just clean up active set
401
+ await redis.srem(ACTIVE_EXPORTS_KEY, `${account}:${exportId}`);
402
+ return;
403
+ }
404
+
405
+ await redis
406
+ .multi()
407
+ .hmset(exportKey, {
408
+ status: 'failed',
409
+ error: error || 'Unknown error'
410
+ })
411
+ .del(queueKey)
412
+ .srem(ACTIVE_EXPORTS_KEY, `${account}:${exportId}`)
413
+ .exec();
414
+
415
+ logger.error({ msg: 'Export failed', account, exportId, error });
416
+ }
417
+
418
+ static async markInterruptedAsFailed() {
419
+ const activeExports = await redis.smembers(ACTIVE_EXPORTS_KEY);
420
+
421
+ for (const entry of activeExports) {
422
+ try {
423
+ // Find ':exp_' as separator since account IDs may contain colons
424
+ const separatorIndex = entry.indexOf(':exp_');
425
+ if (separatorIndex === -1) continue;
426
+ const account = entry.substring(0, separatorIndex);
427
+ const exportId = entry.substring(separatorIndex + 1);
428
+ if (!account || !exportId) continue;
429
+
430
+ const data = await redis.hgetall(getExportKey(account, exportId));
431
+
432
+ if (!data || !data.exportId) {
433
+ // Stale entry -- export hash expired, clean up active set
434
+ await redis.srem(ACTIVE_EXPORTS_KEY, entry);
435
+ continue;
436
+ }
437
+
438
+ if (data && (data.status === 'processing' || data.status === 'queued' || data.status === 'indexing' || data.status === 'cancelled')) {
439
+ const job = await exportQueue.getJob(exportId).catch(() => null);
440
+ if (job) {
441
+ await job.remove().catch(() => {});
442
+ logger.info({ msg: 'Removed interrupted export job from queue', account, exportId });
443
+ }
444
+
445
+ await Export.fail(account, exportId, 'Export interrupted by application restart');
446
+
447
+ if (data.filePath) {
448
+ await fs.promises.unlink(data.filePath).catch(() => {});
449
+ }
450
+ }
451
+ } catch (err) {
452
+ logger.error({ msg: 'Failed to mark interrupted export as failed', entry, err });
453
+ }
454
+ }
455
+ }
456
+
457
+ static async cleanup() {
458
+ const exportPath = await getExportPath();
459
+ let cleaned = 0;
460
+
461
+ try {
462
+ const files = await fs.promises.readdir(exportPath);
463
+
464
+ for (const file of files) {
465
+ if (!file.startsWith(EXPORT_ID_PREFIX)) continue;
466
+
467
+ const exportId = file.split('.')[0];
468
+ const pattern = `${REDIS_PREFIX}exp:*:${exportId}`;
469
+ const keys = await scanKeys(pattern);
470
+
471
+ if (keys.length === 0) {
472
+ try {
473
+ await fs.promises.unlink(pathlib.join(exportPath, file));
474
+ cleaned++;
475
+ logger.info({ msg: 'Cleaned up orphaned export file', file });
476
+ } catch (err) {
477
+ logger.error({ msg: 'Failed to clean up export file', file, err });
478
+ }
479
+ }
480
+ }
481
+ } catch (err) {
482
+ logger.error({ msg: 'Failed to list export directory', exportPath, err });
483
+ }
484
+
485
+ return cleaned;
486
+ }
487
+
488
+ static async getFile(account, exportId) {
489
+ const data = await redis.hgetall(getExportKey(account, exportId));
490
+
491
+ if (!data || !data.exportId) {
492
+ return null;
493
+ }
494
+
495
+ if (data.status !== 'completed') {
496
+ throw createError('Export not completed', 'ExportNotReady', 400);
497
+ }
498
+
499
+ if (!data.filePath) {
500
+ throw createError('Export file not found', 'FileNotFound', 404);
501
+ }
502
+
503
+ try {
504
+ await fs.promises.access(data.filePath, fs.constants.R_OK);
505
+ } catch {
506
+ throw createError('Export file not found', 'FileNotFound', 404);
507
+ }
508
+
509
+ const isEncrypted = data.isEncrypted === '1';
510
+
511
+ return {
512
+ filePath: data.filePath,
513
+ filename: `${exportId}.ndjson.gz`,
514
+ isEncrypted
515
+ };
516
+ }
517
+ }
518
+
519
+ module.exports = {
520
+ Export,
521
+ generateExportId,
522
+ calculateScore,
523
+ getExportKey,
524
+ getExportQueueKey,
525
+ getExportPath,
526
+ getExportMaxAge,
527
+ DEFAULT_EXPORT_MAX_MESSAGE_SIZE
528
+ };
@@ -1,7 +1,7 @@
1
1
  'use strict';
2
2
 
3
3
  const packageData = require('../../package.json');
4
- const { formatPartialSecretKey, structuredClone, retryAgent } = require('../tools');
4
+ const { formatPartialSecretKey, structuredClone, fetchAgent, retryAgent, formatTokenError } = require('../tools');
5
5
  const crypto = require('crypto');
6
6
 
7
7
  const { fetch: fetchCmd } = require('undici');
@@ -125,10 +125,10 @@ const checkForUserFlags = err => {
125
125
  };
126
126
 
127
127
  const formatFetchBody = (searchParams, logRaw) => {
128
- let data = Object.fromEntries(searchParams);
128
+ let entries = typeof searchParams === 'string' ? new URLSearchParams(searchParams) : searchParams;
129
+ let data = Object.fromEntries(entries);
129
130
 
130
131
  if (logRaw) {
131
- // no changes needed
132
132
  return data;
133
133
  }
134
134
 
@@ -138,7 +138,7 @@ const formatFetchBody = (searchParams, logRaw) => {
138
138
  }
139
139
  }
140
140
 
141
- if (data.assertion && !logRaw) {
141
+ if (data.assertion) {
142
142
  let [payload, signature] = data.assertion.toString().split('.');
143
143
  data.assertion = [payload, formatPartialSecretKey(signature)].join('.');
144
144
  }
@@ -256,7 +256,7 @@ class GmailOauth {
256
256
 
257
257
  return {
258
258
  url: url.origin + url.pathname,
259
- body: url.searchParams
259
+ body: url.searchParams.toString()
260
260
  };
261
261
  }
262
262
 
@@ -326,9 +326,9 @@ class GmailOauth {
326
326
  code
327
327
  };
328
328
  try {
329
- err.tokenRequest.response = responseJson;
329
+ err.tokenRequest.response = responseJson || { error: 'Failed to parse response' };
330
330
 
331
- if (this.clientSecret && EXPOSE_PARTIAL_SECRET_KEY_REGEX.test(err.tokenRequest.response.error_description)) {
331
+ if (this.clientSecret && EXPOSE_PARTIAL_SECRET_KEY_REGEX.test(err.tokenRequest.response?.error_description)) {
332
332
  // key might have been invalidated or renewed
333
333
  err.tokenRequest.clientSecret = formatPartialSecretKey(this.clientSecret);
334
334
  }
@@ -342,6 +342,7 @@ class GmailOauth {
342
342
  // ignore
343
343
  }
344
344
 
345
+ err.message = formatTokenError(this.provider, err.tokenRequest);
345
346
  throw err;
346
347
  }
347
348
 
@@ -381,13 +382,14 @@ class GmailOauth {
381
382
  let requestUrl = url.origin + url.pathname;
382
383
  let method = 'post';
383
384
 
385
+ const bodyString = url.searchParams.toString();
384
386
  const fetchOpts = {
385
387
  method,
386
388
  headers: {
387
389
  'Content-Type': 'application/x-www-form-urlencoded',
388
390
  'User-Agent': `${packageData.name}/${packageData.version} (+${packageData.homepage})`
389
391
  },
390
- body: url.searchParams
392
+ body: bodyString
391
393
  };
392
394
 
393
395
  let res = await fetchCmd(
@@ -438,9 +440,9 @@ class GmailOauth {
438
440
  scopes: this.scopes
439
441
  };
440
442
  try {
441
- err.tokenRequest.response = responseJson;
443
+ err.tokenRequest.response = responseJson || { error: 'Failed to parse response' };
442
444
 
443
- if (this.clientSecret && EXPOSE_PARTIAL_SECRET_KEY_REGEX.test(err.tokenRequest.response.error_description)) {
445
+ if (this.clientSecret && EXPOSE_PARTIAL_SECRET_KEY_REGEX.test(err.tokenRequest.response?.error_description)) {
444
446
  // key might have been invalidated or renewed
445
447
  err.tokenRequest.clientSecret = formatPartialSecretKey(this.clientSecret);
446
448
  }
@@ -459,13 +461,14 @@ class GmailOauth {
459
461
  } catch (e) {
460
462
  // ignore
461
463
  }
464
+ err.message = formatTokenError(this.provider, err.tokenRequest);
462
465
  throw err;
463
466
  }
464
467
 
465
468
  // clear potential auth flag
466
469
  await this.setFlag();
467
470
 
468
- return await responseJson;
471
+ return responseJson;
469
472
  }
470
473
 
471
474
  async request(accessToken, url, method, payload, options) {
@@ -485,11 +488,16 @@ class GmailOauth {
485
488
  if (!Buffer.isBuffer(payload)) {
486
489
  reqData.headers.Accept = 'application/json';
487
490
  reqData.headers['Content-Type'] = options?.contentType || 'application/json';
488
- payload = Buffer.from(JSON.stringify(payload));
491
+ // Use string body instead of Buffer to avoid ArrayBuffer detachment on retry
492
+ reqData.body = JSON.stringify(payload);
489
493
  } else {
490
494
  reqData.headers['Content-Type'] = options?.contentType || 'application/x-www-form-urlencoded';
495
+ reqData.body = payload;
496
+ if (payload.length > 0) {
497
+ // Non-empty buffers use non-retry dispatcher to prevent ArrayBuffer detachment
498
+ reqData.dispatcher = fetchAgent;
499
+ }
491
500
  }
492
- reqData.body = payload;
493
501
  } else if (payload && method === 'get') {
494
502
  let parsedUrl = new URL(url);
495
503
  for (let key of Object.keys(payload)) {