@itsliaaa/baileys 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. package/LICENSE +22 -0
  2. package/README.md +1078 -0
  3. package/WAProto/index.js +100441 -0
  4. package/engine-requirements.js +10 -0
  5. package/lib/Defaults/index.js +144 -0
  6. package/lib/Signal/Group/ciphertext-message.js +11 -0
  7. package/lib/Signal/Group/group-session-builder.js +29 -0
  8. package/lib/Signal/Group/group_cipher.js +81 -0
  9. package/lib/Signal/Group/index.js +11 -0
  10. package/lib/Signal/Group/keyhelper.js +17 -0
  11. package/lib/Signal/Group/sender-chain-key.js +25 -0
  12. package/lib/Signal/Group/sender-key-distribution-message.js +62 -0
  13. package/lib/Signal/Group/sender-key-message.js +65 -0
  14. package/lib/Signal/Group/sender-key-name.js +47 -0
  15. package/lib/Signal/Group/sender-key-record.js +40 -0
  16. package/lib/Signal/Group/sender-key-state.js +83 -0
  17. package/lib/Signal/Group/sender-message-key.js +25 -0
  18. package/lib/Signal/libsignal.js +402 -0
  19. package/lib/Signal/lid-mapping.js +270 -0
  20. package/lib/Socket/Client/index.js +2 -0
  21. package/lib/Socket/Client/types.js +10 -0
  22. package/lib/Socket/Client/websocket.js +53 -0
  23. package/lib/Socket/business.js +378 -0
  24. package/lib/Socket/chats.js +1048 -0
  25. package/lib/Socket/communities.js +430 -0
  26. package/lib/Socket/groups.js +328 -0
  27. package/lib/Socket/index.js +11 -0
  28. package/lib/Socket/messages-recv.js +1442 -0
  29. package/lib/Socket/messages-send.js +1153 -0
  30. package/lib/Socket/mex.js +41 -0
  31. package/lib/Socket/newsletter.js +227 -0
  32. package/lib/Socket/socket.js +936 -0
  33. package/lib/Store/index.js +3 -0
  34. package/lib/Store/make-in-memory-store.js +421 -0
  35. package/lib/Store/make-ordered-dictionary.js +78 -0
  36. package/lib/Store/object-repository.js +23 -0
  37. package/lib/Types/Auth.js +1 -0
  38. package/lib/Types/Bussines.js +1 -0
  39. package/lib/Types/Call.js +1 -0
  40. package/lib/Types/Chat.js +7 -0
  41. package/lib/Types/Contact.js +1 -0
  42. package/lib/Types/Events.js +1 -0
  43. package/lib/Types/GroupMetadata.js +1 -0
  44. package/lib/Types/Label.js +24 -0
  45. package/lib/Types/LabelAssociation.js +6 -0
  46. package/lib/Types/Message.js +17 -0
  47. package/lib/Types/Newsletter.js +33 -0
  48. package/lib/Types/Product.js +1 -0
  49. package/lib/Types/Signal.js +1 -0
  50. package/lib/Types/Socket.js +2 -0
  51. package/lib/Types/State.js +12 -0
  52. package/lib/Types/USync.js +1 -0
  53. package/lib/Types/index.js +25 -0
  54. package/lib/Utils/auth-utils.js +289 -0
  55. package/lib/Utils/browser-utils.js +28 -0
  56. package/lib/Utils/business.js +230 -0
  57. package/lib/Utils/chat-utils.js +811 -0
  58. package/lib/Utils/crypto.js +117 -0
  59. package/lib/Utils/decode-wa-message.js +282 -0
  60. package/lib/Utils/event-buffer.js +573 -0
  61. package/lib/Utils/generics.js +385 -0
  62. package/lib/Utils/history.js +130 -0
  63. package/lib/Utils/identity-change-handler.js +48 -0
  64. package/lib/Utils/index.js +19 -0
  65. package/lib/Utils/link-preview.js +84 -0
  66. package/lib/Utils/logger.js +2 -0
  67. package/lib/Utils/lt-hash.js +7 -0
  68. package/lib/Utils/make-mutex.js +32 -0
  69. package/lib/Utils/message-retry-manager.js +224 -0
  70. package/lib/Utils/messages-media.js +789 -0
  71. package/lib/Utils/messages.js +1832 -0
  72. package/lib/Utils/noise-handler.js +200 -0
  73. package/lib/Utils/pre-key-manager.js +105 -0
  74. package/lib/Utils/process-message.js +527 -0
  75. package/lib/Utils/reporting-utils.js +257 -0
  76. package/lib/Utils/signal.js +158 -0
  77. package/lib/Utils/sync-action-utils.js +47 -0
  78. package/lib/Utils/tc-token-utils.js +17 -0
  79. package/lib/Utils/use-multi-file-auth-state.js +120 -0
  80. package/lib/Utils/validate-connection.js +206 -0
  81. package/lib/WABinary/constants.js +1300 -0
  82. package/lib/WABinary/decode.js +261 -0
  83. package/lib/WABinary/encode.js +219 -0
  84. package/lib/WABinary/generic-utils.js +197 -0
  85. package/lib/WABinary/index.js +5 -0
  86. package/lib/WABinary/jid-utils.js +95 -0
  87. package/lib/WABinary/types.js +1 -0
  88. package/lib/WAM/BinaryInfo.js +9 -0
  89. package/lib/WAM/constants.js +22852 -0
  90. package/lib/WAM/encode.js +149 -0
  91. package/lib/WAM/index.js +3 -0
  92. package/lib/WAUSync/Protocols/USyncContactProtocol.js +28 -0
  93. package/lib/WAUSync/Protocols/USyncDeviceProtocol.js +53 -0
  94. package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js +26 -0
  95. package/lib/WAUSync/Protocols/USyncStatusProtocol.js +37 -0
  96. package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js +50 -0
  97. package/lib/WAUSync/Protocols/UsyncLIDProtocol.js +28 -0
  98. package/lib/WAUSync/Protocols/index.js +4 -0
  99. package/lib/WAUSync/USyncQuery.js +93 -0
  100. package/lib/WAUSync/USyncUser.js +22 -0
  101. package/lib/WAUSync/index.js +3 -0
  102. package/lib/index.js +11 -0
  103. package/package.json +72 -0
@@ -0,0 +1,789 @@
1
+ import { Boom } from '@hapi/boom';
2
+ import { spawn } from 'child_process';
3
+ import * as Crypto from 'crypto';
4
+ import { once } from 'events';
5
+ import { createReadStream, createWriteStream, promises as fs, WriteStream } from 'fs';
6
+ import { tmpdir } from 'os';
7
+ import { join } from 'path';
8
+ import { Readable, Transform } from 'stream';
9
+ import { URL } from 'url';
10
+ import { proto } from '../../WAProto/index.js';
11
+ import { DEFAULT_ORIGIN, MEDIA_HKDF_KEY_MAPPING, MEDIA_PATH_MAP, NEWSLETTER_MEDIA_PATH_MAP } from '../Defaults/index.js';
12
+ import { getBinaryNodeChild, getBinaryNodeChildBuffer, jidNormalizedUser } from '../WABinary/index.js';
13
+ import { aesDecryptGCM, aesEncryptGCM, hkdf } from './crypto.js';
14
+ import { generateMessageIDV2 } from './generics.js';
15
+ const getTmpFilesDirectory = () => tmpdir();
16
+ export const getImageProcessingLibrary = async () => {
17
+ //@ts-ignore
18
+ const [jimp, sharp] = await Promise.all([import('jimp').catch(() => { }), import('sharp').catch(() => { })]);
19
+ if (sharp) {
20
+ return { sharp };
21
+ }
22
+ if (jimp) {
23
+ return { jimp };
24
+ }
25
+ throw new Boom('No image processing library available');
26
+ };
27
+ export const hkdfInfoKey = (type) => {
28
+ const hkdfInfo = MEDIA_HKDF_KEY_MAPPING[type];
29
+ return `WhatsApp ${hkdfInfo} Keys`;
30
+ };
31
+ export const getRawMediaUploadData = async (media, mediaType, logger) => {
32
+ const { stream } = await getStream(media);
33
+ logger?.debug('got stream for raw upload');
34
+ const hasher = Crypto.createHash('sha256');
35
+ const filePath = join(tmpdir(), mediaType + generateMessageIDV2());
36
+ const fileWriteStream = createWriteStream(filePath);
37
+ let fileLength = 0;
38
+ try {
39
+ for await (const data of stream) {
40
+ fileLength += data.length;
41
+ hasher.update(data);
42
+ if (!fileWriteStream.write(data)) {
43
+ await once(fileWriteStream, 'drain');
44
+ }
45
+ }
46
+ fileWriteStream.end();
47
+ await once(fileWriteStream, 'finish');
48
+ stream.destroy();
49
+ const fileSha256 = hasher.digest();
50
+ logger?.debug('hashed data for raw upload');
51
+ return {
52
+ filePath: filePath,
53
+ fileSha256,
54
+ fileLength
55
+ };
56
+ }
57
+ catch (error) {
58
+ fileWriteStream.destroy();
59
+ stream.destroy();
60
+ try {
61
+ await fs.unlink(filePath);
62
+ }
63
+ catch {
64
+ //
65
+ }
66
+ throw error;
67
+ }
68
+ };
69
+ /** generates all the keys required to encrypt/decrypt & sign a media message */
70
+ export async function getMediaKeys(buffer, mediaType) {
71
+ if (!buffer) {
72
+ throw new Boom('Cannot derive from empty media key');
73
+ }
74
+ if (typeof buffer === 'string') {
75
+ buffer = Buffer.from(buffer.replace('data:;base64,', ''), 'base64');
76
+ }
77
+ // expand using HKDF to 112 bytes, also pass in the relevant app info
78
+ const expandedMediaKey = hkdf(buffer, 112, { info: hkdfInfoKey(mediaType) });
79
+ return {
80
+ iv: expandedMediaKey.slice(0, 16),
81
+ cipherKey: expandedMediaKey.slice(16, 48),
82
+ macKey: expandedMediaKey.slice(48, 80)
83
+ };
84
+ }
85
+ /** Extracts video thumb using FFMPEG */
86
+ export const extractVideoThumb = async (path, time, size) => {
87
+ const ff = spawn('ffmpeg', [
88
+ '-loglevel', 'error',
89
+ '-ss', String(time),
90
+ '-i', path,
91
+ '-map_metadata', '-1',
92
+ '-vf', `scale=${size.width}:-1`,
93
+ '-frames:v', '1',
94
+ '-c:v', 'mjpeg',
95
+ '-f', 'image2pipe',
96
+ 'pipe:1'
97
+ ], {
98
+ stdio: ['ignore', 'pipe', 'pipe']
99
+ });
100
+ const stdoutChunks = [];
101
+ const stderrChunks = [];
102
+ ff.stdout.on('data', chunk => stdoutChunks.push(chunk));
103
+ ff.stderr.on('data', chunk => stderrChunks.push(chunk));
104
+ const [code] = await once(ff, 'close');
105
+ if (code !== 0) {
106
+ throw new Boom(
107
+ `FFmpeg failed (code ${code}):\n` +
108
+ Buffer.concat(stderrChunks).toString('utf8')
109
+ );
110
+ }
111
+ return Buffer.concat(stdoutChunks);
112
+ };
113
+ export const extractImageThumb = async (bufferOrFilePath, width = 32) => {
114
+ // TODO: Move entirely to sharp, removing jimp as it supports readable streams
115
+ // This will have positive speed and performance impacts as well as minimizing RAM usage.
116
+ if (bufferOrFilePath instanceof Readable) {
117
+ bufferOrFilePath = await toBuffer(bufferOrFilePath);
118
+ }
119
+ const lib = await getImageProcessingLibrary();
120
+ if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
121
+ const img = lib.sharp.default(bufferOrFilePath);
122
+ const dimensions = await img.metadata();
123
+ const buffer = await img.resize(width).jpeg({ quality: 50 }).toBuffer();
124
+ return {
125
+ buffer,
126
+ original: {
127
+ width: dimensions.width,
128
+ height: dimensions.height
129
+ }
130
+ };
131
+ }
132
+ else if ('jimp' in lib && typeof lib.jimp?.Jimp === 'function') {
133
+ const jimp = await lib.jimp.Jimp.read(bufferOrFilePath);
134
+ const dimensions = {
135
+ width: jimp.width,
136
+ height: jimp.height
137
+ };
138
+ const buffer = await jimp
139
+ .resize({ w: width, mode: lib.jimp.ResizeStrategy.BILINEAR })
140
+ .getBuffer('image/jpeg', { quality: 50 });
141
+ return {
142
+ buffer,
143
+ original: dimensions
144
+ };
145
+ }
146
+ else {
147
+ throw new Boom('No image processing library available');
148
+ }
149
+ };
150
+ export const encodeBase64EncodedStringForUpload = (b64) => encodeURIComponent(b64.replace(/\+/g, '-').replace(/\//g, '_').replace(/\=+$/, ''));
151
+ export const generateProfilePicture = async (mediaUpload, dimensions) => {
152
+ let buffer;
153
+ const { width: w = 720, height: h = 720 } = dimensions || {};
154
+ if (Buffer.isBuffer(mediaUpload)) {
155
+ buffer = mediaUpload;
156
+ }
157
+ else {
158
+ // Use getStream to handle all WAMediaUpload types (Buffer, Stream, URL)
159
+ const { stream } = await getStream(mediaUpload);
160
+ // Convert the resulting stream to a buffer
161
+ buffer = await toBuffer(stream);
162
+ }
163
+ const lib = await getImageProcessingLibrary();
164
+ let img;
165
+ if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
166
+ img = lib.sharp
167
+ .default(buffer)
168
+ .resize(w, h)
169
+ .jpeg({
170
+ quality: 80
171
+ })
172
+ .toBuffer();
173
+ }
174
+ else if ('jimp' in lib && typeof lib.jimp?.Jimp === 'function') {
175
+ const jimp = await lib.jimp.Jimp.read(buffer);
176
+ const min = Math.min(jimp.width, jimp.height);
177
+ const cropped = jimp.crop({ x: 0, y: 0, w: min, h: min });
178
+ img = cropped.resize({ w, h, mode: lib.jimp.ResizeStrategy.BILINEAR }).getBuffer('image/jpeg', { quality: 80 });
179
+ }
180
+ else {
181
+ throw new Boom('No image processing library available');
182
+ }
183
+ return {
184
+ img: await img
185
+ };
186
+ };
187
+ /** gets the SHA256 of the given media message */
188
+ export const mediaMessageSHA256B64 = (message) => {
189
+ const media = Object.values(message)[0];
190
+ return media?.fileSha256 && Buffer.from(media.fileSha256).toString('base64');
191
+ };
192
+ export async function getAudioDuration(buffer) {
193
+ const musicMetadata = await import('music-metadata');
194
+ let metadata;
195
+ const options = {
196
+ duration: true
197
+ };
198
+ if (Buffer.isBuffer(buffer)) {
199
+ metadata = await musicMetadata.parseBuffer(buffer, undefined, options);
200
+ }
201
+ else if (typeof buffer === 'string') {
202
+ metadata = await musicMetadata.parseFile(buffer, options);
203
+ }
204
+ else {
205
+ metadata = await musicMetadata.parseStream(buffer, undefined, options);
206
+ }
207
+ return metadata.format.duration;
208
+ }
209
+ /**
210
+ referenced from and modifying https://github.com/wppconnect-team/wa-js/blob/main/src/chat/functions/prepareAudioWaveform.ts
211
+ */
212
+ export async function getAudioWaveform(buffer, logger) {
213
+ try {
214
+ // @ts-ignore
215
+ const { default: decoder } = await import('audio-decode');
216
+ let audioData;
217
+ if (Buffer.isBuffer(buffer)) {
218
+ audioData = buffer;
219
+ }
220
+ else if (typeof buffer === 'string') {
221
+ const rStream = createReadStream(buffer);
222
+ audioData = await toBuffer(rStream);
223
+ }
224
+ else {
225
+ audioData = await toBuffer(buffer);
226
+ }
227
+ const audioBuffer = await decoder(audioData);
228
+ const rawData = audioBuffer.getChannelData(0); // We only need to work with one channel of data
229
+ const samples = 64; // Number of samples we want to have in our final data set
230
+ const blockSize = Math.floor(rawData.length / samples); // the number of samples in each subdivision
231
+ const filteredData = [];
232
+ for (let i = 0; i < samples; i++) {
233
+ const blockStart = blockSize * i; // the location of the first sample in the block
234
+ let sum = 0;
235
+ for (let j = 0; j < blockSize; j++) {
236
+ sum = sum + Math.abs(rawData[blockStart + j]); // find the sum of all the samples in the block
237
+ }
238
+ filteredData.push(sum / blockSize); // divide the sum by the block size to get the average
239
+ }
240
+ // This guarantees that the largest data point will be set to 1, and the rest of the data will scale proportionally.
241
+ const multiplier = Math.pow(Math.max(...filteredData), -1);
242
+ const normalizedData = filteredData.map(n => n * multiplier);
243
+ // Generate waveform like WhatsApp
244
+ const waveform = new Uint8Array(normalizedData.map(n => Math.floor(100 * n)));
245
+ return waveform;
246
+ }
247
+ catch (e) {
248
+ logger?.debug('Failed to generate waveform: ' + e);
249
+ }
250
+ }
251
+ export const toReadable = (buffer) => {
252
+ const readable = new Readable({ read: () => { } });
253
+ readable.push(buffer);
254
+ readable.push(null);
255
+ return readable;
256
+ };
257
+ export const toBuffer = async (stream) => {
258
+ const chunks = [];
259
+ for await (const chunk of stream) {
260
+ chunks.push(chunk);
261
+ }
262
+ stream.destroy();
263
+ return Buffer.concat(chunks);
264
+ };
265
+ export const getStream = async (item, opts) => {
266
+ if (Buffer.isBuffer(item)) {
267
+ return { stream: toReadable(item), type: 'buffer' };
268
+ }
269
+ if ('stream' in item) {
270
+ return { stream: item.stream, type: 'readable' };
271
+ }
272
+ const urlStr = item.url.toString();
273
+ if (urlStr.startsWith('data:')) {
274
+ const buffer = Buffer.from(urlStr.split(',')[1], 'base64');
275
+ return { stream: toReadable(buffer), type: 'buffer' };
276
+ }
277
+ if (urlStr.startsWith('http://') || urlStr.startsWith('https://')) {
278
+ return { stream: await getHttpStream(item.url, opts), type: 'remote' };
279
+ }
280
+ return { stream: createReadStream(item.url), type: 'file' };
281
+ };
282
+ /** generates a thumbnail for a given media, if required */
283
+ export async function generateThumbnail(file, mediaType, options) {
284
+ let thumbnail;
285
+ let originalImageDimensions;
286
+ if (mediaType === 'image') {
287
+ const { buffer, original } = await extractImageThumb(file);
288
+ thumbnail = buffer;
289
+ if (original.width && original.height) {
290
+ originalImageDimensions = {
291
+ width: original.width,
292
+ height: original.height
293
+ };
294
+ }
295
+ }
296
+ else if (mediaType === 'video') {
297
+ try {
298
+ const buffer = await extractVideoThumb(file, '00:00:00', { width: 32, height: 32 });
299
+ thumbnail = buffer;
300
+ }
301
+ catch (err) {
302
+ options.logger?.debug('could not generate video thumb: ' + err);
303
+ }
304
+ }
305
+ return {
306
+ thumbnail,
307
+ originalImageDimensions
308
+ };
309
+ }
310
+ export const getHttpStream = async (url, options = {}) => {
311
+ const response = await fetch(url.toString(), {
312
+ dispatcher: options.dispatcher,
313
+ method: 'GET',
314
+ headers: options.headers
315
+ });
316
+ if (!response.ok) {
317
+ throw new Boom(`Failed to fetch stream from ${url}`, { statusCode: response.status, data: { url } });
318
+ }
319
+ // @ts-ignore Node18+ Readable.fromWeb exists
320
+ return response.body instanceof Readable ? response.body : Readable.fromWeb(response.body);
321
+ };
322
+ export const encryptedStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts } = {}) => {
323
+ const { stream, type } = await getStream(media, opts);
324
+ logger?.debug('fetched media stream');
325
+ const mediaKey = Crypto.randomBytes(32);
326
+ const { cipherKey, iv, macKey } = await getMediaKeys(mediaKey, mediaType);
327
+ const encFilePath = join(getTmpFilesDirectory(), mediaType + generateMessageIDV2() + '-enc');
328
+ const encFileWriteStream = createWriteStream(encFilePath);
329
+ let originalFileStream;
330
+ let originalFilePath;
331
+ if (saveOriginalFileIfRequired) {
332
+ originalFilePath = join(getTmpFilesDirectory(), mediaType + generateMessageIDV2() + '-original');
333
+ originalFileStream = createWriteStream(originalFilePath);
334
+ }
335
+ let fileLength = 0;
336
+ const aes = Crypto.createCipheriv('aes-256-cbc', cipherKey, iv);
337
+ const hmac = Crypto.createHmac('sha256', macKey).update(iv);
338
+ const sha256Plain = Crypto.createHash('sha256');
339
+ const sha256Enc = Crypto.createHash('sha256');
340
+ const onChunk = async (buff) => {
341
+ sha256Enc.update(buff);
342
+ hmac.update(buff);
343
+ // Handle backpressure: if write returns false, wait for drain
344
+ if (!encFileWriteStream.write(buff)) {
345
+ await once(encFileWriteStream, 'drain');
346
+ }
347
+ };
348
+ try {
349
+ for await (const data of stream) {
350
+ fileLength += data.length;
351
+ if (type === 'remote' &&
352
+ opts?.maxContentLength &&
353
+ fileLength + data.length > opts.maxContentLength) {
354
+ throw new Boom(`content length exceeded when encrypting "${type}"`, {
355
+ data: { media, type }
356
+ });
357
+ }
358
+ if (originalFileStream) {
359
+ if (!originalFileStream.write(data)) {
360
+ await once(originalFileStream, 'drain');
361
+ }
362
+ }
363
+ sha256Plain.update(data);
364
+ await onChunk(aes.update(data));
365
+ }
366
+ await onChunk(aes.final());
367
+ const mac = hmac.digest().slice(0, 10);
368
+ sha256Enc.update(mac);
369
+ const fileSha256 = sha256Plain.digest();
370
+ const fileEncSha256 = sha256Enc.digest();
371
+ encFileWriteStream.write(mac);
372
+ const encFinishPromise = once(encFileWriteStream, 'finish');
373
+ const originalFinishPromise = originalFileStream ? once(originalFileStream, 'finish') : Promise.resolve();
374
+ encFileWriteStream.end();
375
+ originalFileStream?.end?.();
376
+ stream.destroy();
377
+ // Wait for write streams to fully flush to disk
378
+ // This helps reduce memory pressure by allowing OS to release buffers
379
+ await encFinishPromise;
380
+ await originalFinishPromise;
381
+ logger?.debug('encrypted data successfully');
382
+ return {
383
+ mediaKey,
384
+ originalFilePath,
385
+ encFilePath,
386
+ mac,
387
+ fileEncSha256,
388
+ fileSha256,
389
+ fileLength
390
+ };
391
+ }
392
+ catch (error) {
393
+ // destroy all streams with error
394
+ encFileWriteStream.destroy();
395
+ originalFileStream?.destroy?.();
396
+ aes.destroy();
397
+ hmac.destroy();
398
+ sha256Plain.destroy();
399
+ sha256Enc.destroy();
400
+ stream.destroy();
401
+ try {
402
+ await fs.unlink(encFilePath);
403
+ if (originalFilePath) {
404
+ await fs.unlink(originalFilePath);
405
+ }
406
+ }
407
+ catch (err) {
408
+ logger?.error({ err }, 'failed deleting tmp files');
409
+ }
410
+ throw error;
411
+ }
412
+ };
413
+ const DEF_HOST = 'mmg.whatsapp.net';
414
+ const AES_CHUNK_SIZE = 16;
415
+ const toSmallestChunkSize = (num) => {
416
+ return Math.floor(num / AES_CHUNK_SIZE) * AES_CHUNK_SIZE;
417
+ };
418
+ export const getUrlFromDirectPath = (directPath) => `https://${DEF_HOST}${directPath}`;
419
+ export const downloadContentFromMessage = async ({ mediaKey, directPath, url }, type, opts = {}) => {
420
+ const isValidMediaUrl = url?.startsWith('https://mmg.whatsapp.net/');
421
+ const downloadUrl = isValidMediaUrl ? url : getUrlFromDirectPath(directPath);
422
+ if (!downloadUrl) {
423
+ throw new Boom('No valid media URL or directPath present in message', { statusCode: 400 });
424
+ }
425
+ const keys = await getMediaKeys(mediaKey, type);
426
+ return downloadEncryptedContent(downloadUrl, keys, opts);
427
+ };
428
+ /**
429
+ * Decrypts and downloads an AES256-CBC encrypted file given the keys.
430
+ * Assumes the SHA256 of the plaintext is appended to the end of the ciphertext
431
+ * */
432
+ export const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startByte, endByte, options } = {}) => {
433
+ let bytesFetched = 0;
434
+ let startChunk = 0;
435
+ let firstBlockIsIV = false;
436
+ // if a start byte is specified -- then we need to fetch the previous chunk as that will form the IV
437
+ if (startByte) {
438
+ const chunk = toSmallestChunkSize(startByte || 0);
439
+ if (chunk) {
440
+ startChunk = chunk - AES_CHUNK_SIZE;
441
+ bytesFetched = chunk;
442
+ firstBlockIsIV = true;
443
+ }
444
+ }
445
+ const endChunk = endByte ? toSmallestChunkSize(endByte || 0) + AES_CHUNK_SIZE : undefined;
446
+ const headersInit = options?.headers ? options.headers : undefined;
447
+ const headers = {
448
+ ...(headersInit
449
+ ? Array.isArray(headersInit)
450
+ ? Object.fromEntries(headersInit)
451
+ : headersInit
452
+ : {}),
453
+ Origin: DEFAULT_ORIGIN
454
+ };
455
+ if (startChunk || endChunk) {
456
+ headers.Range = `bytes=${startChunk}-`;
457
+ if (endChunk) {
458
+ headers.Range += endChunk;
459
+ }
460
+ }
461
+ // download the message
462
+ const fetched = await getHttpStream(downloadUrl, {
463
+ ...(options || {}),
464
+ headers
465
+ });
466
+ let remainingBytes = Buffer.from([]);
467
+ let aes;
468
+ const pushBytes = (bytes, push) => {
469
+ if (startByte || endByte) {
470
+ const start = bytesFetched >= startByte ? undefined : Math.max(startByte - bytesFetched, 0);
471
+ const end = bytesFetched + bytes.length < endByte ? undefined : Math.max(endByte - bytesFetched, 0);
472
+ push(bytes.slice(start, end));
473
+ bytesFetched += bytes.length;
474
+ }
475
+ else {
476
+ push(bytes);
477
+ }
478
+ };
479
+ const output = new Transform({
480
+ transform(chunk, _, callback) {
481
+ let data = Buffer.concat([remainingBytes, chunk]);
482
+ const decryptLength = toSmallestChunkSize(data.length);
483
+ remainingBytes = data.slice(decryptLength);
484
+ data = data.slice(0, decryptLength);
485
+ if (!aes) {
486
+ let ivValue = iv;
487
+ if (firstBlockIsIV) {
488
+ ivValue = data.slice(0, AES_CHUNK_SIZE);
489
+ data = data.slice(AES_CHUNK_SIZE);
490
+ }
491
+ aes = Crypto.createDecipheriv('aes-256-cbc', cipherKey, ivValue);
492
+ // if an end byte that is not EOF is specified
493
+ // stop auto padding (PKCS7) -- otherwise throws an error for decryption
494
+ if (endByte) {
495
+ aes.setAutoPadding(false);
496
+ }
497
+ }
498
+ try {
499
+ pushBytes(aes.update(data), b => this.push(b));
500
+ callback();
501
+ }
502
+ catch (error) {
503
+ callback(error);
504
+ }
505
+ },
506
+ final(callback) {
507
+ try {
508
+ pushBytes(aes.final(), b => this.push(b));
509
+ callback();
510
+ }
511
+ catch (error) {
512
+ callback(error);
513
+ }
514
+ }
515
+ });
516
+ return fetched.pipe(output, { end: true });
517
+ };
518
+ export function extensionForMediaMessage(message) {
519
+ const getExtension = (mimetype) => mimetype.split(';')[0]?.split('/')[1];
520
+ const type = Object.keys(message)[0];
521
+ let extension;
522
+ if (type === 'locationMessage' || type === 'liveLocationMessage' || type === 'productMessage') {
523
+ extension = '.jpeg';
524
+ }
525
+ else {
526
+ const messageContent = message[type];
527
+ extension = getExtension(messageContent.mimetype);
528
+ }
529
+ return extension;
530
+ }
531
+ const isNodeRuntime = () => {
532
+ return (typeof process !== 'undefined' &&
533
+ process.versions?.node !== null &&
534
+ typeof process.versions.bun === 'undefined' &&
535
+ typeof globalThis.Deno === 'undefined');
536
+ };
537
+ export const uploadWithNodeHttp = async ({ url, filePath, headers, timeoutMs, agent }, redirectCount = 0) => {
538
+ if (redirectCount > 5) {
539
+ throw new Error('Too many redirects');
540
+ }
541
+ const parsedUrl = new URL(url);
542
+ const httpModule = parsedUrl.protocol === 'https:' ? await import('https') : await import('http');
543
+ // Get file size for Content-Length header (required for Node.js streaming)
544
+ const fileStats = await fs.stat(filePath);
545
+ const fileSize = fileStats.size;
546
+ return new Promise((resolve, reject) => {
547
+ const req = httpModule.request({
548
+ hostname: parsedUrl.hostname,
549
+ port: parsedUrl.port || (parsedUrl.protocol === 'https:' ? 443 : 80),
550
+ path: parsedUrl.pathname + parsedUrl.search,
551
+ method: 'POST',
552
+ headers: {
553
+ ...headers,
554
+ 'Content-Length': fileSize
555
+ },
556
+ agent,
557
+ timeout: timeoutMs
558
+ }, res => {
559
+ // Handle redirects (3xx)
560
+ if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
561
+ res.resume(); // Consume response to free resources
562
+ const newUrl = new URL(res.headers.location, url).toString();
563
+ resolve(uploadWithNodeHttp({
564
+ url: newUrl,
565
+ filePath,
566
+ headers,
567
+ timeoutMs,
568
+ agent
569
+ }, redirectCount + 1));
570
+ return;
571
+ }
572
+ let body = '';
573
+ res.on('data', chunk => (body += chunk));
574
+ res.on('end', () => {
575
+ try {
576
+ resolve(JSON.parse(body));
577
+ }
578
+ catch {
579
+ resolve(undefined);
580
+ }
581
+ });
582
+ });
583
+ req.on('error', reject);
584
+ req.on('timeout', () => {
585
+ req.destroy();
586
+ reject(new Error('Upload timeout'));
587
+ });
588
+ const stream = createReadStream(filePath);
589
+ stream.pipe(req);
590
+ stream.on('error', err => {
591
+ req.destroy();
592
+ reject(err);
593
+ });
594
+ });
595
+ };
596
+ const uploadWithFetch = async ({ url, filePath, headers, timeoutMs, agent }) => {
597
+ // Convert Node.js Readable to Web ReadableStream
598
+ const nodeStream = createReadStream(filePath);
599
+ const webStream = Readable.toWeb(nodeStream);
600
+ const response = await fetch(url, {
601
+ dispatcher: agent,
602
+ method: 'POST',
603
+ body: webStream,
604
+ headers,
605
+ duplex: 'half',
606
+ signal: timeoutMs ? AbortSignal.timeout(timeoutMs) : undefined
607
+ });
608
+ try {
609
+ return (await response.json());
610
+ }
611
+ catch {
612
+ return undefined;
613
+ }
614
+ };
615
+ /**
616
+ * Uploads media to WhatsApp servers.
617
+ *
618
+ * ## Why we have two upload implementations:
619
+ *
620
+ * Node.js's native `fetch` (powered by undici) has a known bug where it buffers
621
+ * the entire request body in memory before sending, even when using streams.
622
+ * This causes memory issues with large files (e.g., 1GB file = 1GB+ memory usage).
623
+ * See: https://github.com/nodejs/undici/issues/4058
624
+ *
625
+ * Other runtimes (Bun, Deno, browsers) correctly stream the request body without
626
+ * buffering, so we can use the web-standard Fetch API there.
627
+ *
628
+ * ## Future considerations:
629
+ * Once the undici bug is fixed, we can simplify this to use only the Fetch API
630
+ * across all runtimes. Monitor the GitHub issue for updates.
631
+ */
632
+ const uploadMedia = async (params, logger) => {
633
+ if (isNodeRuntime()) {
634
+ logger?.debug('Using Node.js https module for upload (avoids undici buffering bug)');
635
+ return uploadWithNodeHttp(params);
636
+ }
637
+ else {
638
+ logger?.debug('Using web-standard Fetch API for upload');
639
+ return uploadWithFetch(params);
640
+ }
641
+ };
642
+ export const getWAUploadToServer = ({ customUploadHosts, fetchAgent, logger, options }, refreshMediaConn) => {
643
+ return async (filePath, { mediaType, fileEncSha256B64, timeoutMs, newsletter }) => {
644
+ // send a query JSON to obtain the url & auth token to upload our media
645
+ let uploadInfo = await refreshMediaConn(false);
646
+ let urls;
647
+ const hosts = [...customUploadHosts, ...uploadInfo.hosts];
648
+ fileEncSha256B64 = encodeBase64EncodedStringForUpload(fileEncSha256B64);
649
+ // Prepare common headers
650
+ const customHeaders = (() => {
651
+ const hdrs = options?.headers;
652
+ if (!hdrs)
653
+ return {};
654
+ return Array.isArray(hdrs) ? Object.fromEntries(hdrs) : hdrs;
655
+ })();
656
+ const headers = {
657
+ ...customHeaders,
658
+ 'Content-Type': 'application/octet-stream',
659
+ Origin: DEFAULT_ORIGIN
660
+ };
661
+ for (const { hostname } of hosts) {
662
+ logger.debug(`uploading to "${hostname}"`);
663
+ const auth = encodeURIComponent(uploadInfo.auth);
664
+ // Lia@Changes 06-02-26 --- Switch media path map for newsletter uploads
665
+ const mediaPathMap = newsletter ? NEWSLETTER_MEDIA_PATH_MAP : MEDIA_PATH_MAP
666
+ const url = `https://${hostname}${mediaPathMap[mediaType]}/${fileEncSha256B64}?auth=${auth}&token=${fileEncSha256B64}`;
667
+ let result;
668
+ try {
669
+ result = await uploadMedia({
670
+ url,
671
+ filePath,
672
+ headers,
673
+ timeoutMs,
674
+ agent: fetchAgent
675
+ }, logger);
676
+ if (result?.url || result?.direct_path) {
677
+ urls = {
678
+ mediaUrl: result.url,
679
+ directPath: result.direct_path,
680
+ meta_hmac: result.meta_hmac,
681
+ fbid: result.fbid,
682
+ ts: result.ts,
683
+ handle: result.handle
684
+ };
685
+ break;
686
+ }
687
+ else {
688
+ uploadInfo = await refreshMediaConn(true);
689
+ throw new Error(`upload failed, reason: ${JSON.stringify(result)}`);
690
+ }
691
+ }
692
+ catch (error) {
693
+ const isLast = hostname === hosts[uploadInfo.hosts.length - 1]?.hostname;
694
+ logger.warn({ trace: error?.stack, uploadResult: result }, `Error in uploading to ${hostname} ${isLast ? '' : ', retrying...'}`);
695
+ }
696
+ }
697
+ if (!urls) {
698
+ throw new Boom('Media upload failed on all hosts', { statusCode: 500 });
699
+ }
700
+ return urls;
701
+ };
702
+ };
703
+ const getMediaRetryKey = (mediaKey) => {
704
+ return hkdf(mediaKey, 32, { info: 'WhatsApp Media Retry Notification' });
705
+ };
706
+ /**
707
+ * Generate a binary node that will request the phone to re-upload the media & return the newly uploaded URL
708
+ */
709
+ export const encryptMediaRetryRequest = (key, mediaKey, meId) => {
710
+ const recp = { stanzaId: key.id };
711
+ const recpBuffer = proto.ServerErrorReceipt.encode(recp).finish();
712
+ const iv = Crypto.randomBytes(12);
713
+ const retryKey = getMediaRetryKey(mediaKey);
714
+ const ciphertext = aesEncryptGCM(recpBuffer, retryKey, iv, Buffer.from(key.id));
715
+ const req = {
716
+ tag: 'receipt',
717
+ attrs: {
718
+ id: key.id,
719
+ to: jidNormalizedUser(meId),
720
+ type: 'server-error'
721
+ },
722
+ content: [
723
+ // this encrypt node is actually pretty useless
724
+ // the media is returned even without this node
725
+ // keeping it here to maintain parity with WA Web
726
+ {
727
+ tag: 'encrypt',
728
+ attrs: {},
729
+ content: [
730
+ { tag: 'enc_p', attrs: {}, content: ciphertext },
731
+ { tag: 'enc_iv', attrs: {}, content: iv }
732
+ ]
733
+ },
734
+ {
735
+ tag: 'rmr',
736
+ attrs: {
737
+ jid: key.remoteJid,
738
+ from_me: (!!key.fromMe).toString(),
739
+ // @ts-ignore
740
+ participant: key.participant || undefined
741
+ }
742
+ }
743
+ ]
744
+ };
745
+ return req;
746
+ };
747
+ export const decodeMediaRetryNode = (node) => {
748
+ const rmrNode = getBinaryNodeChild(node, 'rmr');
749
+ const event = {
750
+ key: {
751
+ id: node.attrs.id,
752
+ remoteJid: rmrNode.attrs.jid,
753
+ fromMe: rmrNode.attrs.from_me === 'true',
754
+ participant: rmrNode.attrs.participant
755
+ }
756
+ };
757
+ const errorNode = getBinaryNodeChild(node, 'error');
758
+ if (errorNode) {
759
+ const errorCode = +errorNode.attrs.code;
760
+ event.error = new Boom(`Failed to re-upload media (${errorCode})`, {
761
+ data: errorNode.attrs,
762
+ statusCode: getStatusCodeForMediaRetry(errorCode)
763
+ });
764
+ }
765
+ else {
766
+ const encryptedInfoNode = getBinaryNodeChild(node, 'encrypt');
767
+ const ciphertext = getBinaryNodeChildBuffer(encryptedInfoNode, 'enc_p');
768
+ const iv = getBinaryNodeChildBuffer(encryptedInfoNode, 'enc_iv');
769
+ if (ciphertext && iv) {
770
+ event.media = { ciphertext, iv };
771
+ }
772
+ else {
773
+ event.error = new Boom('Failed to re-upload media (missing ciphertext)', { statusCode: 404 });
774
+ }
775
+ }
776
+ return event;
777
+ };
778
+ export const decryptMediaRetryData = ({ ciphertext, iv }, mediaKey, msgId) => {
779
+ const retryKey = getMediaRetryKey(mediaKey);
780
+ const plaintext = aesDecryptGCM(ciphertext, retryKey, iv, Buffer.from(msgId));
781
+ return proto.MediaRetryNotification.decode(plaintext);
782
+ };
783
+ export const getStatusCodeForMediaRetry = (code) => MEDIA_RETRY_STATUS_MAP[code];
784
+ const MEDIA_RETRY_STATUS_MAP = {
785
+ [proto.MediaRetryNotification.ResultType.SUCCESS]: 200,
786
+ [proto.MediaRetryNotification.ResultType.DECRYPTION_ERROR]: 412,
787
+ [proto.MediaRetryNotification.ResultType.NOT_FOUND]: 404,
788
+ [proto.MediaRetryNotification.ResultType.GENERAL_ERROR]: 418
789
+ };