@sgintokic/baileys 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of @sgintokic/baileys might be problematic. Click here for more details.

Files changed (106) hide show
  1. package/LICENSE +22 -0
  2. package/README.md +1097 -0
  3. package/WAProto/index.js +2 -0
  4. package/engine-requirements.js +1 -0
  5. package/lib/Defaults/index.js +155 -0
  6. package/lib/Signal/Group/ciphertext-message.js +11 -0
  7. package/lib/Signal/Group/group-session-builder.js +41 -0
  8. package/lib/Signal/Group/group_cipher.js +108 -0
  9. package/lib/Signal/Group/index.js +11 -0
  10. package/lib/Signal/Group/keyhelper.js +14 -0
  11. package/lib/Signal/Group/sender-chain-key.js +31 -0
  12. package/lib/Signal/Group/sender-key-distribution-message.js +66 -0
  13. package/lib/Signal/Group/sender-key-message.js +79 -0
  14. package/lib/Signal/Group/sender-key-name.js +49 -0
  15. package/lib/Signal/Group/sender-key-record.js +46 -0
  16. package/lib/Signal/Group/sender-key-state.js +104 -0
  17. package/lib/Signal/Group/sender-message-key.js +29 -0
  18. package/lib/Signal/libsignal.js +485 -0
  19. package/lib/Signal/lid-mapping.js +291 -0
  20. package/lib/Socket/Client/index.js +2 -0
  21. package/lib/Socket/Client/types.js +10 -0
  22. package/lib/Socket/Client/websocket.js +64 -0
  23. package/lib/Socket/business.js +293 -0
  24. package/lib/Socket/chats.js +1068 -0
  25. package/lib/Socket/communities.js +476 -0
  26. package/lib/Socket/groups.js +383 -0
  27. package/lib/Socket/index.js +8 -0
  28. package/lib/Socket/messages-recv.js +1830 -0
  29. package/lib/Socket/messages-send.js +1462 -0
  30. package/lib/Socket/mex.js +55 -0
  31. package/lib/Socket/newsletter.js +277 -0
  32. package/lib/Socket/socket.js +1087 -0
  33. package/lib/Store/index.js +3 -0
  34. package/lib/Store/make-in-memory-store.js +517 -0
  35. package/lib/Store/make-ordered-dictionary.js +75 -0
  36. package/lib/Store/object-repository.js +23 -0
  37. package/lib/Types/Auth.js +1 -0
  38. package/lib/Types/Bussines.js +1 -0
  39. package/lib/Types/Call.js +1 -0
  40. package/lib/Types/Chat.js +7 -0
  41. package/lib/Types/Contact.js +1 -0
  42. package/lib/Types/Events.js +1 -0
  43. package/lib/Types/GroupMetadata.js +1 -0
  44. package/lib/Types/Label.js +24 -0
  45. package/lib/Types/LabelAssociation.js +6 -0
  46. package/lib/Types/Message.js +18 -0
  47. package/lib/Types/Newsletter.js +33 -0
  48. package/lib/Types/Product.js +1 -0
  49. package/lib/Types/Signal.js +1 -0
  50. package/lib/Types/Socket.js +2 -0
  51. package/lib/Types/State.js +15 -0
  52. package/lib/Types/USync.js +1 -0
  53. package/lib/Types/index.js +31 -0
  54. package/lib/Utils/auth-utils.js +293 -0
  55. package/lib/Utils/browser-utils.js +32 -0
  56. package/lib/Utils/business.js +245 -0
  57. package/lib/Utils/chat-utils.js +959 -0
  58. package/lib/Utils/crypto.js +133 -0
  59. package/lib/Utils/decode-wa-message.js +376 -0
  60. package/lib/Utils/event-buffer.js +620 -0
  61. package/lib/Utils/generics.js +417 -0
  62. package/lib/Utils/history.js +150 -0
  63. package/lib/Utils/identity-change-handler.js +63 -0
  64. package/lib/Utils/index.js +21 -0
  65. package/lib/Utils/link-preview.js +91 -0
  66. package/lib/Utils/logger.js +2 -0
  67. package/lib/Utils/lt-hash.js +6 -0
  68. package/lib/Utils/make-mutex.js +31 -0
  69. package/lib/Utils/message-retry-manager.js +240 -0
  70. package/lib/Utils/messages-media.js +901 -0
  71. package/lib/Utils/messages.js +2052 -0
  72. package/lib/Utils/noise-handler.js +229 -0
  73. package/lib/Utils/offline-node-processor.js +50 -0
  74. package/lib/Utils/pre-key-manager.js +119 -0
  75. package/lib/Utils/process-message.js +641 -0
  76. package/lib/Utils/reporting-utils.js +346 -0
  77. package/lib/Utils/signal.js +188 -0
  78. package/lib/Utils/stanza-ack.js +33 -0
  79. package/lib/Utils/sync-action-utils.js +53 -0
  80. package/lib/Utils/tc-token-utils.js +15 -0
  81. package/lib/Utils/use-multi-file-auth-state.js +116 -0
  82. package/lib/Utils/use-single-file-auth-state.js +94 -0
  83. package/lib/Utils/validate-connection.js +235 -0
  84. package/lib/WABinary/constants.js +1300 -0
  85. package/lib/WABinary/decode.js +258 -0
  86. package/lib/WABinary/encode.js +219 -0
  87. package/lib/WABinary/generic-utils.js +203 -0
  88. package/lib/WABinary/index.js +5 -0
  89. package/lib/WABinary/jid-utils.js +93 -0
  90. package/lib/WABinary/types.js +1 -0
  91. package/lib/WAM/BinaryInfo.js +9 -0
  92. package/lib/WAM/constants.js +20669 -0
  93. package/lib/WAM/encode.js +151 -0
  94. package/lib/WAM/index.js +3 -0
  95. package/lib/WAUSync/Protocols/USyncContactProtocol.js +21 -0
  96. package/lib/WAUSync/Protocols/USyncDeviceProtocol.js +50 -0
  97. package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js +20 -0
  98. package/lib/WAUSync/Protocols/USyncStatusProtocol.js +29 -0
  99. package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js +59 -0
  100. package/lib/WAUSync/Protocols/UsyncLIDProtocol.js +21 -0
  101. package/lib/WAUSync/Protocols/index.js +4 -0
  102. package/lib/WAUSync/USyncQuery.js +103 -0
  103. package/lib/WAUSync/USyncUser.js +22 -0
  104. package/lib/WAUSync/index.js +3 -0
  105. package/lib/index.js +11 -0
  106. package/package.json +58 -0
@@ -0,0 +1,901 @@
1
+ import { Boom } from "@hapi/boom";
2
+ import { spawn } from "child_process";
3
+ import * as Crypto from "crypto";
4
+ import { once } from "events";
5
+ import {
6
+ createReadStream,
7
+ createWriteStream,
8
+ promises as fs,
9
+ WriteStream,
10
+ } from "fs";
11
+ import { tmpdir } from "os";
12
+ import { join } from "path";
13
+ import { Readable, Transform } from "stream";
14
+ import { URL } from "url";
15
+ import { proto } from "../../WAProto/index.js";
16
+ import {
17
+ DEFAULT_ORIGIN,
18
+ MEDIA_HKDF_KEY_MAPPING,
19
+ MEDIA_PATH_MAP,
20
+ NEWSLETTER_MEDIA_PATH_MAP,
21
+ } from "../Defaults/index.js";
22
+ import {
23
+ getBinaryNodeChild,
24
+ getBinaryNodeChildBuffer,
25
+ jidNormalizedUser,
26
+ } from "../WABinary/index.js";
27
+ import { aesDecryptGCM, aesEncryptGCM, hkdf } from "./crypto.js";
28
+ import { generateMessageIDV2 } from "./generics.js";
29
+ const getTmpFilesDirectory = () => tmpdir();
30
+ let imageProcessingLibrary;
31
+ export const getImageProcessingLibrary = async () => {
32
+ if (imageProcessingLibrary) {
33
+ return imageProcessingLibrary;
34
+ }
35
+ //@ts-ignore
36
+ const [sharp, image, jimp] = await Promise.all([
37
+ import("sharp").catch(() => {}),
38
+ import("@napi-rs/image").catch(() => {}),
39
+ import("jimp").catch(() => {}),
40
+ ]);
41
+ if (sharp) {
42
+ imageProcessingLibrary = { sharp: sharp };
43
+ } else if (image) {
44
+ imageProcessingLibrary = { image: image };
45
+ } else if (jimp) {
46
+ imageProcessingLibrary = { jimp: jimp };
47
+ } else {
48
+ throw new Boom("No image processing library available");
49
+ }
50
+ return imageProcessingLibrary;
51
+ };
52
+ export const hkdfInfoKey = (type) => {
53
+ const hkdfInfo = MEDIA_HKDF_KEY_MAPPING[type];
54
+ return `WhatsApp ${hkdfInfo} Keys`;
55
+ };
56
+ export const getRawMediaUploadData = async (media, mediaType, logger) => {
57
+ const { stream } = await getStream(media);
58
+ logger?.debug("got stream for raw upload");
59
+ const hasher = Crypto.createHash("sha256");
60
+ const filePath = join(tmpdir(), mediaType + generateMessageIDV2());
61
+ const fileWriteStream = createWriteStream(filePath);
62
+ let fileLength = 0;
63
+ try {
64
+ for await (const data of stream) {
65
+ fileLength += data.length;
66
+ hasher.update(data);
67
+ if (!fileWriteStream.write(data)) {
68
+ await once(fileWriteStream, "drain");
69
+ }
70
+ }
71
+ fileWriteStream.end();
72
+ await once(fileWriteStream, "finish");
73
+ stream.destroy();
74
+ const fileSha256 = hasher.digest();
75
+ logger?.debug("hashed data for raw upload");
76
+ return {
77
+ filePath: filePath,
78
+ fileSha256: fileSha256,
79
+ fileLength: fileLength,
80
+ };
81
+ } catch (error) {
82
+ fileWriteStream.destroy();
83
+ stream.destroy();
84
+ try {
85
+ await fs.unlink(filePath);
86
+ } catch {
87
+ //
88
+ }
89
+ throw error;
90
+ }
91
+ };
92
+ /** generates all the keys required to encrypt/decrypt & sign a media message */ export async function getMediaKeys(
93
+ buffer,
94
+ mediaType,
95
+ ) {
96
+ if (!buffer) {
97
+ throw new Boom("Cannot derive from empty media key");
98
+ }
99
+ if (typeof buffer === "string") {
100
+ buffer = Buffer.from(buffer.replace("data:;base64,", ""), "base64");
101
+ }
102
+ // expand using HKDF to 112 bytes, also pass in the relevant app info
103
+ const expandedMediaKey = hkdf(buffer, 112, { info: hkdfInfoKey(mediaType) });
104
+ return {
105
+ iv: expandedMediaKey.slice(0, 16),
106
+ cipherKey: expandedMediaKey.slice(16, 48),
107
+ macKey: expandedMediaKey.slice(48, 80),
108
+ };
109
+ }
110
+ /** Extracts video thumb using FFMPEG */ export const extractVideoThumb =
111
+ async (path, time, size) => {
112
+ const ffmpeg = spawn(
113
+ "ffmpeg",
114
+ [
115
+ "-loglevel",
116
+ "error",
117
+ "-ss",
118
+ String(time),
119
+ "-i",
120
+ path,
121
+ "-an",
122
+ "-sn",
123
+ "-dn",
124
+ "-map_metadata",
125
+ "-1",
126
+ "-vf",
127
+ `scale=${size.width}:-1`,
128
+ "-frames:v",
129
+ "1",
130
+ "-c:v",
131
+ "mjpeg",
132
+ "-f",
133
+ "image2pipe",
134
+ "pipe:1",
135
+ ],
136
+ { stdio: ["ignore", "pipe", "pipe"] },
137
+ );
138
+ let buffer = Buffer.alloc(0);
139
+ const stderrChunks = [];
140
+ ffmpeg.stdout.on("data", (chunk) => {
141
+ buffer = Buffer.concat([buffer, chunk]);
142
+ });
143
+ ffmpeg.stderr.on("data", (chunk) => stderrChunks.push(chunk));
144
+ const [code] = await once(ffmpeg, "close");
145
+ if (code !== 0) {
146
+ throw new Boom(
147
+ `FFmpeg failed (code ${code}):\n` +
148
+ Buffer.concat(stderrChunks).toString("utf8"),
149
+ );
150
+ }
151
+ return buffer;
152
+ };
153
+ export const extractImageThumb = async (bufferOrFilePath, width = 32) => {
154
+ // TODO: Move entirely to sharp, removing jimp as it supports readable streams
155
+ // This will have positive speed and performance impacts as well as minimizing RAM usage.
156
+ if (bufferOrFilePath instanceof Readable) {
157
+ bufferOrFilePath = await toBuffer(bufferOrFilePath);
158
+ }
159
+ const lib = await getImageProcessingLibrary();
160
+ if ("sharp" in lib && lib.sharp?.default) {
161
+ const img = lib.sharp.default(bufferOrFilePath);
162
+ const dimensions = await img.metadata();
163
+ const buffer = await img.resize(width).jpeg({ quality: 50 }).toBuffer();
164
+ return {
165
+ buffer: buffer,
166
+ original: { width: dimensions.width, height: dimensions.height },
167
+ };
168
+ } else if ("image" in lib && lib.image?.Transformer) {
169
+ if (!Buffer.isBuffer(bufferOrFilePath)) {
170
+ bufferOrFilePath = await fs.readFile(bufferOrFilePath);
171
+ }
172
+ const img = new lib.image.Transformer(bufferOrFilePath);
173
+ const dimensions = await img.metadata();
174
+ const buffer = await img.resize(width, undefined, 0).jpeg(50);
175
+ return {
176
+ buffer: buffer,
177
+ original: { width: dimensions.width, height: dimensions.height },
178
+ };
179
+ } else if ("jimp" in lib && lib.jimp?.Jimp) {
180
+ const jimp = await lib.jimp.Jimp.read(bufferOrFilePath);
181
+ const dimensions = { width: jimp.width, height: jimp.height };
182
+ const buffer = await jimp
183
+ .resize({ w: width, mode: lib.jimp.ResizeStrategy.BILINEAR })
184
+ .getBuffer("image/jpeg", { quality: 50 });
185
+ return { buffer: buffer, original: dimensions };
186
+ } else {
187
+ throw new Boom("No image processing library available");
188
+ }
189
+ };
190
+ export const encodeBase64EncodedStringForUpload = (b64) =>
191
+ encodeURIComponent(
192
+ b64.replace(/\+/g, "-").replace(/\//g, "_").replace(/\=+$/, ""),
193
+ );
194
+ export const generateProfilePicture = async (mediaUpload, dimensions) => {
195
+ let buffer;
196
+ const { width: w = 720, height: h = 720 } = dimensions || {};
197
+ if (Buffer.isBuffer(mediaUpload)) {
198
+ buffer = mediaUpload;
199
+ } else {
200
+ // Use getStream to handle all WAMediaUpload types (Buffer, Stream, URL)
201
+ const { stream } = await getStream(mediaUpload);
202
+ // Convert the resulting stream to a buffer
203
+ buffer = await toBuffer(stream);
204
+ }
205
+ const lib = await getImageProcessingLibrary();
206
+ let img;
207
+ if ("sharp" in lib && lib.sharp?.default) {
208
+ img = lib.sharp
209
+ .default(buffer)
210
+ .resize(w, h)
211
+ .jpeg({ quality: 80 })
212
+ .toBuffer();
213
+ } else if ("image" in lib && lib.image?.Transformer) {
214
+ img = new lib.image.Transformer(buffer).resize(w, h, 0).jpeg(80);
215
+ } else if ("jimp" in lib && lib.jimp?.Jimp) {
216
+ const jimp = await lib.jimp.Jimp.read(buffer);
217
+ const min = Math.min(jimp.width, jimp.height);
218
+ const cropped = jimp.crop({ x: 0, y: 0, w: min, h: min });
219
+ img = cropped
220
+ .resize({ w: w, h: h, mode: lib.jimp.ResizeStrategy.BILINEAR })
221
+ .getBuffer("image/jpeg", { quality: 80 });
222
+ } else {
223
+ throw new Boom("No image processing library available");
224
+ }
225
+ return { img: await img };
226
+ };
227
+ /** gets the SHA256 of the given media message */ export const mediaMessageSHA256B64 =
228
+ (message) => {
229
+ const media = Object.values(message)[0];
230
+ return (
231
+ media?.fileSha256 && Buffer.from(media.fileSha256).toString("base64")
232
+ );
233
+ };
234
+ export async function getAudioDuration(buffer) {
235
+ const musicMetadata = await import("music-metadata");
236
+ let metadata;
237
+ const options = { duration: true };
238
+ if (Buffer.isBuffer(buffer)) {
239
+ metadata = await musicMetadata.parseBuffer(buffer, undefined, options);
240
+ } else if (typeof buffer === "string") {
241
+ metadata = await musicMetadata.parseFile(buffer, options);
242
+ } else {
243
+ metadata = await musicMetadata.parseStream(buffer, undefined, options);
244
+ }
245
+ return metadata.format.duration;
246
+ }
247
+ /**
248
+ referenced from and modifying https://github.com/wppconnect-team/wa-js/blob/main/src/chat/functions/prepareAudioWaveform.ts
249
+ */ export async function getAudioWaveform(buffer, logger) {
250
+ try {
251
+ // @ts-ignore
252
+ const { default: decoder } = await import("audio-decode");
253
+ let audioData;
254
+ if (Buffer.isBuffer(buffer)) {
255
+ audioData = buffer;
256
+ } else if (typeof buffer === "string") {
257
+ const rStream = createReadStream(buffer);
258
+ audioData = await toBuffer(rStream);
259
+ } else {
260
+ audioData = await toBuffer(buffer);
261
+ }
262
+ const audioBuffer = await decoder(audioData);
263
+ const rawData = audioBuffer.getChannelData(0); // We only need to work with one channel of data
264
+ const samples = 64; // Number of samples we want to have in our final data set
265
+ const blockSize = Math.floor(rawData.length / samples); // the number of samples in each subdivision
266
+ const filteredData = [];
267
+ for (let i = 0; i < samples; i++) {
268
+ const blockStart = blockSize * i; // the location of the first sample in the block
269
+ let sum = 0;
270
+ for (let j = 0; j < blockSize; j++) {
271
+ sum = sum + Math.abs(rawData[blockStart + j]); // find the sum of all the samples in the block
272
+ }
273
+ filteredData.push(sum / blockSize); // divide the sum by the block size to get the average
274
+ }
275
+ // This guarantees that the largest data point will be set to 1, and the rest of the data will scale proportionally.
276
+ const multiplier = Math.pow(Math.max(...filteredData), -1);
277
+ const normalizedData = filteredData.map((n) => n * multiplier);
278
+ // Generate waveform like WhatsApp
279
+ const waveform = new Uint8Array(
280
+ normalizedData.map((n) => Math.floor(100 * n)),
281
+ );
282
+ return waveform;
283
+ } catch (e) {
284
+ logger?.debug("Failed to generate waveform: " + e);
285
+ }
286
+ }
287
+ export const toReadable = (buffer) => {
288
+ const readable = new Readable({ read: () => {} });
289
+ readable.push(buffer);
290
+ readable.push(null);
291
+ return readable;
292
+ };
293
+ export const toBuffer = async (stream) => {
294
+ const chunks = [];
295
+ for await (const chunk of stream) {
296
+ chunks.push(chunk);
297
+ }
298
+ stream.destroy();
299
+ return Buffer.concat(chunks);
300
+ };
301
+ export const getStream = async (item, opts) => {
302
+ if (Buffer.isBuffer(item)) {
303
+ return { stream: toReadable(item), type: "buffer" };
304
+ }
305
+ if ("stream" in item) {
306
+ return { stream: item.stream, type: "readable" };
307
+ }
308
+ const urlStr = item.url.toString();
309
+ if (urlStr.startsWith("data:")) {
310
+ const buffer = Buffer.from(urlStr.split(",")[1], "base64");
311
+ return { stream: toReadable(buffer), type: "buffer" };
312
+ }
313
+ if (urlStr.startsWith("http://") || urlStr.startsWith("https://")) {
314
+ return { stream: await getHttpStream(item.url, opts), type: "remote" };
315
+ }
316
+ return { stream: createReadStream(item.url), type: "file" };
317
+ };
318
+ /** generates a thumbnail for a given media, if required */ export async function generateThumbnail(
319
+ file,
320
+ mediaType,
321
+ options,
322
+ ) {
323
+ let thumbnail;
324
+ let originalImageDimensions;
325
+ if (mediaType === "image") {
326
+ const { buffer, original } = await extractImageThumb(file);
327
+ thumbnail = buffer;
328
+ if (original.width && original.height) {
329
+ originalImageDimensions = {
330
+ width: original.width,
331
+ height: original.height,
332
+ };
333
+ }
334
+ } else if (mediaType === "video") {
335
+ try {
336
+ const buffer = await extractVideoThumb(file, "00:00:00", {
337
+ width: 32,
338
+ height: 32,
339
+ });
340
+ thumbnail = buffer;
341
+ } catch (err) {
342
+ options.logger?.debug("could not generate video thumb: " + err);
343
+ }
344
+ }
345
+ return {
346
+ thumbnail: thumbnail,
347
+ originalImageDimensions: originalImageDimensions,
348
+ };
349
+ }
350
+ export const getHttpStream = async (url, options = {}) => {
351
+ const response = await fetch(url.toString(), {
352
+ dispatcher: options.dispatcher,
353
+ method: "GET",
354
+ headers: options.headers,
355
+ });
356
+ if (!response.ok) {
357
+ throw new Boom(`Failed to fetch stream from ${url}`, {
358
+ statusCode: response.status,
359
+ data: { url: url },
360
+ });
361
+ }
362
+ // @ts-ignore Node18+ Readable.fromWeb exists
363
+ return response.body instanceof Readable
364
+ ? response.body
365
+ : Readable.fromWeb(response.body);
366
+ };
367
+ export const encryptedStream = async (
368
+ media,
369
+ mediaType,
370
+ { logger, saveOriginalFileIfRequired, opts } = {},
371
+ ) => {
372
+ const { stream, type } = await getStream(media, opts);
373
+ logger?.debug("fetched media stream");
374
+ const mediaKey = Crypto.randomBytes(32);
375
+ const { cipherKey, iv, macKey } = await getMediaKeys(mediaKey, mediaType);
376
+ const encFilePath = join(
377
+ getTmpFilesDirectory(),
378
+ mediaType + generateMessageIDV2() + "-enc",
379
+ );
380
+ const encFileWriteStream = createWriteStream(encFilePath);
381
+ let originalFileStream;
382
+ let originalFilePath;
383
+ if (saveOriginalFileIfRequired) {
384
+ originalFilePath = join(
385
+ getTmpFilesDirectory(),
386
+ mediaType + generateMessageIDV2() + "-original",
387
+ );
388
+ originalFileStream = createWriteStream(originalFilePath);
389
+ }
390
+ let fileLength = 0;
391
+ const aes = Crypto.createCipheriv("aes-256-cbc", cipherKey, iv);
392
+ const hmac = Crypto.createHmac("sha256", macKey).update(iv);
393
+ const sha256Plain = Crypto.createHash("sha256");
394
+ const sha256Enc = Crypto.createHash("sha256");
395
+ const onChunk = async (buff) => {
396
+ sha256Enc.update(buff);
397
+ hmac.update(buff);
398
+ // Handle backpressure: if write returns false, wait for drain
399
+ if (!encFileWriteStream.write(buff)) {
400
+ await once(encFileWriteStream, "drain");
401
+ }
402
+ };
403
+ try {
404
+ for await (const data of stream) {
405
+ fileLength += data.length;
406
+ if (
407
+ type === "remote" &&
408
+ opts?.maxContentLength &&
409
+ fileLength + data.length > opts.maxContentLength
410
+ ) {
411
+ throw new Boom(`content length exceeded when encrypting "${type}"`, {
412
+ data: { media: media, type: type },
413
+ });
414
+ }
415
+ if (originalFileStream) {
416
+ if (!originalFileStream.write(data)) {
417
+ await once(originalFileStream, "drain");
418
+ }
419
+ }
420
+ sha256Plain.update(data);
421
+ await onChunk(aes.update(data));
422
+ }
423
+ await onChunk(aes.final());
424
+ const mac = hmac.digest().slice(0, 10);
425
+ sha256Enc.update(mac);
426
+ const fileSha256 = sha256Plain.digest();
427
+ const fileEncSha256 = sha256Enc.digest();
428
+ encFileWriteStream.write(mac);
429
+ const encFinishPromise = once(encFileWriteStream, "finish");
430
+ const originalFinishPromise = originalFileStream
431
+ ? once(originalFileStream, "finish")
432
+ : Promise.resolve();
433
+ encFileWriteStream.end();
434
+ originalFileStream?.end?.();
435
+ stream.destroy();
436
+ // Wait for write streams to fully flush to disk
437
+ // This helps reduce memory pressure by allowing OS to release buffers
438
+ await encFinishPromise;
439
+ await originalFinishPromise;
440
+ logger?.debug("encrypted data successfully");
441
+ return {
442
+ mediaKey: mediaKey,
443
+ originalFilePath: originalFilePath,
444
+ encFilePath: encFilePath,
445
+ mac: mac,
446
+ fileEncSha256: fileEncSha256,
447
+ fileSha256: fileSha256,
448
+ fileLength: fileLength,
449
+ };
450
+ } catch (error) {
451
+ // destroy all streams with error
452
+ encFileWriteStream.destroy();
453
+ originalFileStream?.destroy?.();
454
+ aes.destroy();
455
+ hmac.destroy();
456
+ sha256Plain.destroy();
457
+ sha256Enc.destroy();
458
+ stream.destroy();
459
+ try {
460
+ await fs.unlink(encFilePath);
461
+ if (originalFilePath) {
462
+ await fs.unlink(originalFilePath);
463
+ }
464
+ } catch (err) {
465
+ logger?.error({ err: err }, "failed deleting tmp files");
466
+ }
467
+ throw error;
468
+ }
469
+ };
470
+ const DEF_HOST = "mmg.whatsapp.net";
471
+ const AES_CHUNK_SIZE = 16;
472
+ const toSmallestChunkSize = (num) => {
473
+ return Math.floor(num / AES_CHUNK_SIZE) * AES_CHUNK_SIZE;
474
+ };
475
+ export const getUrlFromDirectPath = (directPath) =>
476
+ `https://${DEF_HOST}${directPath}`;
477
+ export const downloadContentFromMessage = async (
478
+ { mediaKey, directPath, url },
479
+ type,
480
+ opts = {},
481
+ ) => {
482
+ const isValidMediaUrl = url?.startsWith("https://mmg.whatsapp.net/");
483
+ const downloadUrl = isValidMediaUrl ? url : getUrlFromDirectPath(directPath);
484
+ if (!downloadUrl) {
485
+ throw new Boom("No valid media URL or directPath present in message", {
486
+ statusCode: 400,
487
+ });
488
+ }
489
+ const keys = await getMediaKeys(mediaKey, type);
490
+ return downloadEncryptedContent(downloadUrl, keys, opts);
491
+ };
492
+ /**
493
+ * Decrypts and downloads an AES256-CBC encrypted file given the keys.
494
+ * Assumes the SHA256 of the plaintext is appended to the end of the ciphertext
495
+ * */ export const downloadEncryptedContent = async (
496
+ downloadUrl,
497
+ { cipherKey, iv },
498
+ { startByte, endByte, options } = {},
499
+ ) => {
500
+ let bytesFetched = 0;
501
+ let startChunk = 0;
502
+ let firstBlockIsIV = false;
503
+ // if a start byte is specified -- then we need to fetch the previous chunk as that will form the IV
504
+ if (startByte) {
505
+ const chunk = toSmallestChunkSize(startByte || 0);
506
+ if (chunk) {
507
+ startChunk = chunk - AES_CHUNK_SIZE;
508
+ bytesFetched = chunk;
509
+ firstBlockIsIV = true;
510
+ }
511
+ }
512
+ const endChunk = endByte
513
+ ? toSmallestChunkSize(endByte || 0) + AES_CHUNK_SIZE
514
+ : undefined;
515
+ const headersInit = options?.headers ? options.headers : undefined;
516
+ const headers = {
517
+ ...(headersInit
518
+ ? Array.isArray(headersInit)
519
+ ? Object.fromEntries(headersInit)
520
+ : headersInit
521
+ : {}),
522
+ Origin: DEFAULT_ORIGIN,
523
+ };
524
+ if (startChunk || endChunk) {
525
+ headers.Range = `bytes=${startChunk}-`;
526
+ if (endChunk) {
527
+ headers.Range += endChunk;
528
+ }
529
+ }
530
+ // download the message
531
+ const fetched = await getHttpStream(downloadUrl, {
532
+ ...(options || {}),
533
+ headers: headers,
534
+ });
535
+ let remainingBytes = Buffer.from([]);
536
+ let aes;
537
+ const pushBytes = (bytes, push) => {
538
+ if (startByte || endByte) {
539
+ const start =
540
+ bytesFetched >= startByte
541
+ ? undefined
542
+ : Math.max(startByte - bytesFetched, 0);
543
+ const end =
544
+ bytesFetched + bytes.length < endByte
545
+ ? undefined
546
+ : Math.max(endByte - bytesFetched, 0);
547
+ push(bytes.slice(start, end));
548
+ bytesFetched += bytes.length;
549
+ } else {
550
+ push(bytes);
551
+ }
552
+ };
553
+ const output = new Transform({
554
+ transform(chunk, _, callback) {
555
+ let data = Buffer.concat([remainingBytes, chunk]);
556
+ const decryptLength = toSmallestChunkSize(data.length);
557
+ remainingBytes = data.slice(decryptLength);
558
+ data = data.slice(0, decryptLength);
559
+ if (!aes) {
560
+ let ivValue = iv;
561
+ if (firstBlockIsIV) {
562
+ ivValue = data.slice(0, AES_CHUNK_SIZE);
563
+ data = data.slice(AES_CHUNK_SIZE);
564
+ }
565
+ aes = Crypto.createDecipheriv("aes-256-cbc", cipherKey, ivValue);
566
+ // if an end byte that is not EOF is specified
567
+ // stop auto padding (PKCS7) -- otherwise throws an error for decryption
568
+ if (endByte) {
569
+ aes.setAutoPadding(false);
570
+ }
571
+ }
572
+ try {
573
+ pushBytes(aes.update(data), (b) => this.push(b));
574
+ callback();
575
+ } catch (error) {
576
+ callback(error);
577
+ }
578
+ },
579
+ final(callback) {
580
+ try {
581
+ pushBytes(aes.final(), (b) => this.push(b));
582
+ callback();
583
+ } catch (error) {
584
+ callback(error);
585
+ }
586
+ },
587
+ });
588
+ return fetched.pipe(output, { end: true });
589
+ };
590
+ export function extensionForMediaMessage(message) {
591
+ const getExtension = (mimetype) => mimetype.split(";")[0]?.split("/")[1];
592
+ const type = Object.keys(message)[0];
593
+ let extension;
594
+ if (
595
+ type === "locationMessage" ||
596
+ type === "liveLocationMessage" ||
597
+ type === "productMessage"
598
+ ) {
599
+ extension = ".jpeg";
600
+ } else {
601
+ const messageContent = message[type];
602
+ extension = getExtension(messageContent.mimetype);
603
+ }
604
+ return extension;
605
+ }
606
+ const isNodeRuntime = () => {
607
+ return (
608
+ typeof process !== "undefined" &&
609
+ process.versions?.node !== null &&
610
+ typeof process.versions.bun === "undefined" &&
611
+ typeof globalThis.Deno === "undefined"
612
+ );
613
+ };
614
+ export const uploadWithNodeHttp = async (
615
+ { url, filePath, headers, timeoutMs, agent },
616
+ redirectCount = 0,
617
+ ) => {
618
+ if (redirectCount > 5) {
619
+ throw new Error("Too many redirects");
620
+ }
621
+ const parsedUrl = new URL(url);
622
+ const httpModule =
623
+ parsedUrl.protocol === "https:"
624
+ ? await import("https")
625
+ : await import("http");
626
+ // Get file size for Content-Length header (required for Node.js streaming)
627
+ const fileStats = await fs.stat(filePath);
628
+ const fileSize = fileStats.size;
629
+ return new Promise((resolve, reject) => {
630
+ const req = httpModule.request(
631
+ {
632
+ hostname: parsedUrl.hostname,
633
+ port: parsedUrl.port || (parsedUrl.protocol === "https:" ? 443 : 80),
634
+ path: parsedUrl.pathname + parsedUrl.search,
635
+ method: "POST",
636
+ headers: { ...headers, "Content-Length": fileSize },
637
+ agent: agent,
638
+ timeout: timeoutMs,
639
+ },
640
+ (res) => {
641
+ // Handle redirects (3xx)
642
+ if (
643
+ res.statusCode &&
644
+ res.statusCode >= 300 &&
645
+ res.statusCode < 400 &&
646
+ res.headers.location
647
+ ) {
648
+ res.resume(); // Consume response to free resources
649
+ const newUrl = new URL(res.headers.location, url).toString();
650
+ resolve(
651
+ uploadWithNodeHttp(
652
+ {
653
+ url: newUrl,
654
+ filePath: filePath,
655
+ headers: headers,
656
+ timeoutMs: timeoutMs,
657
+ agent: agent,
658
+ },
659
+ redirectCount + 1,
660
+ ),
661
+ );
662
+ return;
663
+ }
664
+ let body = "";
665
+ res.on("data", (chunk) => (body += chunk));
666
+ res.on("end", () => {
667
+ try {
668
+ resolve(JSON.parse(body));
669
+ } catch {
670
+ resolve(undefined);
671
+ }
672
+ });
673
+ },
674
+ );
675
+ req.on("error", reject);
676
+ req.on("timeout", () => {
677
+ req.destroy();
678
+ reject(new Error("Upload timeout"));
679
+ });
680
+ const stream = createReadStream(filePath);
681
+ stream.pipe(req);
682
+ stream.on("error", (err) => {
683
+ req.destroy();
684
+ reject(err);
685
+ });
686
+ });
687
+ };
688
+ const uploadWithFetch = async ({
689
+ url,
690
+ filePath,
691
+ headers,
692
+ timeoutMs,
693
+ agent,
694
+ }) => {
695
+ // Convert Node.js Readable to Web ReadableStream
696
+ const nodeStream = createReadStream(filePath);
697
+ const webStream = Readable.toWeb(nodeStream);
698
+ const response = await fetch(url, {
699
+ dispatcher: agent,
700
+ method: "POST",
701
+ body: webStream,
702
+ headers: headers,
703
+ duplex: "half",
704
+ signal: timeoutMs ? AbortSignal.timeout(timeoutMs) : undefined,
705
+ });
706
+ try {
707
+ return await response.json();
708
+ } catch {
709
+ return undefined;
710
+ }
711
+ };
712
+ /**
713
+ * Uploads media to WhatsApp servers.
714
+ *
715
+ * ## Why we have two upload implementations:
716
+ *
717
+ * Node.js's native `fetch` (powered by undici) has a known bug where it buffers
718
+ * the entire request body in memory before sending, even when using streams.
719
+ * This causes memory issues with large files (e.g., 1GB file = 1GB+ memory usage).
720
+ * See: https://github.com/nodejs/undici/issues/4058
721
+ *
722
+ * Other runtimes (Bun, Deno, browsers) correctly stream the request body without
723
+ * buffering, so we can use the web-standard Fetch API there.
724
+ *
725
+ * ## Future considerations:
726
+ * Once the undici bug is fixed, we can simplify this to use only the Fetch API
727
+ * across all runtimes. Monitor the GitHub issue for updates.
728
+ */ const uploadMedia = async (params, logger) => {
729
+ if (isNodeRuntime()) {
730
+ logger?.debug(
731
+ "Using Node.js https module for upload (avoids undici buffering bug)",
732
+ );
733
+ return uploadWithNodeHttp(params);
734
+ } else {
735
+ logger?.debug("Using web-standard Fetch API for upload");
736
+ return uploadWithFetch(params);
737
+ }
738
+ };
739
+ export const getWAUploadToServer = (
740
+ { customUploadHosts, fetchAgent, logger, options },
741
+ refreshMediaConn,
742
+ ) => {
743
+ return async (
744
+ filePath,
745
+ { mediaType, fileEncSha256B64, timeoutMs, newsletter },
746
+ ) => {
747
+ // send a query JSON to obtain the url & auth token to upload our media
748
+ let uploadInfo = await refreshMediaConn(false);
749
+ let urls;
750
+ const hosts = [...customUploadHosts, ...uploadInfo.hosts];
751
+ fileEncSha256B64 = encodeBase64EncodedStringForUpload(fileEncSha256B64);
752
+ // Prepare common headers
753
+ const customHeaders = (() => {
754
+ const hdrs = options?.headers;
755
+ if (!hdrs) return {};
756
+ return Array.isArray(hdrs) ? Object.fromEntries(hdrs) : hdrs;
757
+ })();
758
+ const headers = {
759
+ ...customHeaders,
760
+ "Content-Type": "application/octet-stream",
761
+ Origin: DEFAULT_ORIGIN,
762
+ };
763
+ for (const { hostname } of hosts) {
764
+ logger.debug(`uploading to "${hostname}"`);
765
+ const auth = encodeURIComponent(uploadInfo.auth);
766
+ // Lia@Changes 06-02-26 --- Switch media path map for newsletter uploads
767
+ const mediaPathMap =
768
+ (newsletter ? NEWSLETTER_MEDIA_PATH_MAP : undefined) || MEDIA_PATH_MAP;
769
+ // Lia@Changes 20-03-26 --- Add server thumb for newsletter media
770
+ const serverThumb = newsletter ? "&server_thumb_gen=1" : "";
771
+ const url = `https://${hostname}${mediaPathMap[mediaType]}/${fileEncSha256B64}?auth=${auth}&token=${fileEncSha256B64}${serverThumb}`;
772
+ let result;
773
+ try {
774
+ result = await uploadMedia(
775
+ {
776
+ url: url,
777
+ filePath: filePath,
778
+ headers: headers,
779
+ timeoutMs: timeoutMs,
780
+ agent: fetchAgent,
781
+ },
782
+ logger,
783
+ );
784
+ if (result?.url || result?.direct_path) {
785
+ urls = {
786
+ mediaUrl: result.url,
787
+ directPath: result.direct_path,
788
+ meta_hmac: result.meta_hmac,
789
+ fbid: result.fbid,
790
+ ts: result.ts,
791
+ thumbnailDirectPath: result.thumbnail_info?.thumbnail_direct_path,
792
+ thumbnailSha256: result.thumbnail_info?.thumbnail_sha256,
793
+ };
794
+ break;
795
+ } else {
796
+ uploadInfo = await refreshMediaConn(true);
797
+ throw new Error(`upload failed, reason: ${JSON.stringify(result)}`);
798
+ }
799
+ } catch (error) {
800
+ const isLast =
801
+ hostname === hosts[uploadInfo.hosts.length - 1]?.hostname;
802
+ logger.warn(
803
+ { trace: error?.stack, uploadResult: result },
804
+ `Error in uploading to ${hostname} ${isLast ? "" : ", retrying..."}`,
805
+ );
806
+ }
807
+ }
808
+ if (!urls) {
809
+ throw new Boom("Media upload failed on all hosts", { statusCode: 500 });
810
+ }
811
+ return urls;
812
+ };
813
+ };
814
+ const getMediaRetryKey = (mediaKey) => {
815
+ return hkdf(mediaKey, 32, { info: "WhatsApp Media Retry Notification" });
816
+ };
817
+ /**
818
+ * Generate a binary node that will request the phone to re-upload the media & return the newly uploaded URL
819
+ */ export const encryptMediaRetryRequest = (key, mediaKey, meId) => {
820
+ const recp = { stanzaId: key.id };
821
+ const recpBuffer = proto.ServerErrorReceipt.encode(recp).finish();
822
+ const iv = Crypto.randomBytes(12);
823
+ const retryKey = getMediaRetryKey(mediaKey);
824
+ const ciphertext = aesEncryptGCM(
825
+ recpBuffer,
826
+ retryKey,
827
+ iv,
828
+ Buffer.from(key.id),
829
+ );
830
+ const req = {
831
+ tag: "receipt",
832
+ attrs: { id: key.id, to: jidNormalizedUser(meId), type: "server-error" },
833
+ content: [
834
+ // this encrypt node is actually pretty useless
835
+ // the media is returned even without this node
836
+ // keeping it here to maintain parity with WA Web
837
+ {
838
+ tag: "encrypt",
839
+ attrs: {},
840
+ content: [
841
+ { tag: "enc_p", attrs: {}, content: ciphertext },
842
+ { tag: "enc_iv", attrs: {}, content: iv },
843
+ ],
844
+ },
845
+ {
846
+ tag: "rmr",
847
+ attrs: {
848
+ jid: key.remoteJid,
849
+ from_me: (!!key.fromMe).toString(),
850
+ // @ts-ignore
851
+ participant: key.participant || undefined,
852
+ },
853
+ },
854
+ ],
855
+ };
856
+ return req;
857
+ };
858
+ export const decodeMediaRetryNode = (node) => {
859
+ const rmrNode = getBinaryNodeChild(node, "rmr");
860
+ const event = {
861
+ key: {
862
+ id: node.attrs.id,
863
+ remoteJid: rmrNode.attrs.jid,
864
+ fromMe: rmrNode.attrs.from_me === "true",
865
+ participant: rmrNode.attrs.participant,
866
+ },
867
+ };
868
+ const errorNode = getBinaryNodeChild(node, "error");
869
+ if (errorNode) {
870
+ const errorCode = +errorNode.attrs.code;
871
+ event.error = new Boom(`Failed to re-upload media (${errorCode})`, {
872
+ data: errorNode.attrs,
873
+ statusCode: getStatusCodeForMediaRetry(errorCode),
874
+ });
875
+ } else {
876
+ const encryptedInfoNode = getBinaryNodeChild(node, "encrypt");
877
+ const ciphertext = getBinaryNodeChildBuffer(encryptedInfoNode, "enc_p");
878
+ const iv = getBinaryNodeChildBuffer(encryptedInfoNode, "enc_iv");
879
+ if (ciphertext && iv) {
880
+ event.media = { ciphertext: ciphertext, iv: iv };
881
+ } else {
882
+ event.error = new Boom("Failed to re-upload media (missing ciphertext)", {
883
+ statusCode: 404,
884
+ });
885
+ }
886
+ }
887
+ return event;
888
+ };
889
+ export const decryptMediaRetryData = ({ ciphertext, iv }, mediaKey, msgId) => {
890
+ const retryKey = getMediaRetryKey(mediaKey);
891
+ const plaintext = aesDecryptGCM(ciphertext, retryKey, iv, Buffer.from(msgId));
892
+ return proto.MediaRetryNotification.decode(plaintext);
893
+ };
894
+ export const getStatusCodeForMediaRetry = (code) =>
895
+ MEDIA_RETRY_STATUS_MAP[code];
896
+ const MEDIA_RETRY_STATUS_MAP = {
897
+ [proto.MediaRetryNotification.ResultType.SUCCESS]: 200,
898
+ [proto.MediaRetryNotification.ResultType.DECRYPTION_ERROR]: 412,
899
+ [proto.MediaRetryNotification.ResultType.NOT_FOUND]: 404,
900
+ [proto.MediaRetryNotification.ResultType.GENERAL_ERROR]: 418,
901
+ };