@nexustechpro/baileys 2.0.1 → 2.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +924 -1299
- package/lib/Defaults/baileys-version.json +6 -2
- package/lib/Defaults/index.js +172 -172
- package/lib/Signal/libsignal.js +380 -292
- package/lib/Signal/lid-mapping.js +264 -171
- package/lib/Socket/Client/index.js +2 -2
- package/lib/Socket/Client/types.js +10 -10
- package/lib/Socket/Client/websocket.js +45 -310
- package/lib/Socket/business.js +375 -375
- package/lib/Socket/chats.js +909 -963
- package/lib/Socket/communities.js +430 -430
- package/lib/Socket/groups.js +342 -342
- package/lib/Socket/index.js +22 -22
- package/lib/Socket/messages-recv.js +777 -743
- package/lib/Socket/messages-send.js +667 -393
- package/lib/Socket/mex.js +50 -50
- package/lib/Socket/newsletter.js +148 -148
- package/lib/Socket/nexus-handler.js +75 -261
- package/lib/Socket/socket.js +709 -1201
- package/lib/Store/index.js +5 -5
- package/lib/Store/make-cache-manager-store.js +81 -81
- package/lib/Store/make-in-memory-store.js +416 -416
- package/lib/Store/make-ordered-dictionary.js +81 -81
- package/lib/Store/object-repository.js +30 -30
- package/lib/Types/Auth.js +1 -1
- package/lib/Types/Bussines.js +1 -1
- package/lib/Types/Call.js +1 -1
- package/lib/Types/Chat.js +7 -7
- package/lib/Types/Contact.js +1 -1
- package/lib/Types/Events.js +1 -1
- package/lib/Types/GroupMetadata.js +1 -1
- package/lib/Types/Label.js +24 -24
- package/lib/Types/LabelAssociation.js +6 -6
- package/lib/Types/Message.js +10 -10
- package/lib/Types/Newsletter.js +28 -28
- package/lib/Types/Product.js +1 -1
- package/lib/Types/Signal.js +1 -1
- package/lib/Types/Socket.js +2 -2
- package/lib/Types/State.js +12 -12
- package/lib/Types/USync.js +1 -1
- package/lib/Types/index.js +25 -25
- package/lib/Utils/auth-utils.js +264 -256
- package/lib/Utils/baileys-event-stream.js +55 -55
- package/lib/Utils/browser-utils.js +27 -27
- package/lib/Utils/business.js +228 -230
- package/lib/Utils/chat-utils.js +694 -764
- package/lib/Utils/crypto.js +109 -135
- package/lib/Utils/decode-wa-message.js +310 -314
- package/lib/Utils/event-buffer.js +547 -547
- package/lib/Utils/generics.js +297 -297
- package/lib/Utils/history.js +91 -83
- package/lib/Utils/index.js +21 -20
- package/lib/Utils/key-store.js +17 -0
- package/lib/Utils/link-preview.js +97 -88
- package/lib/Utils/logger.js +2 -2
- package/lib/Utils/lt-hash.js +47 -47
- package/lib/Utils/make-mutex.js +39 -39
- package/lib/Utils/message-retry-manager.js +148 -148
- package/lib/Utils/messages-media.js +534 -532
- package/lib/Utils/messages.js +705 -705
- package/lib/Utils/noise-handler.js +255 -255
- package/lib/Utils/pre-key-manager.js +105 -105
- package/lib/Utils/process-message.js +412 -412
- package/lib/Utils/signal.js +160 -158
- package/lib/Utils/use-multi-file-auth-state.js +120 -120
- package/lib/Utils/validate-connection.js +194 -194
- package/lib/WABinary/constants.js +1300 -1300
- package/lib/WABinary/decode.js +237 -237
- package/lib/WABinary/encode.js +232 -232
- package/lib/WABinary/generic-utils.js +252 -211
- package/lib/WABinary/index.js +5 -5
- package/lib/WABinary/jid-utils.js +279 -95
- package/lib/WABinary/types.js +1 -1
- package/lib/WAM/BinaryInfo.js +9 -9
- package/lib/WAM/constants.js +22852 -22852
- package/lib/WAM/encode.js +149 -149
- package/lib/WAM/index.js +3 -3
- package/lib/WAUSync/Protocols/USyncContactProtocol.js +28 -28
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.js +53 -53
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js +26 -26
- package/lib/WAUSync/Protocols/USyncStatusProtocol.js +37 -37
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js +50 -50
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.js +28 -28
- package/lib/WAUSync/Protocols/index.js +4 -4
- package/lib/WAUSync/USyncQuery.js +93 -93
- package/lib/WAUSync/USyncUser.js +22 -22
- package/lib/WAUSync/index.js +3 -3
- package/lib/index.js +66 -66
- package/package.json +171 -144
- package/lib/Signal/Group/ciphertext-message.js +0 -12
- package/lib/Signal/Group/group-session-builder.js +0 -30
- package/lib/Signal/Group/group_cipher.js +0 -100
- package/lib/Signal/Group/index.js +0 -12
- package/lib/Signal/Group/keyhelper.js +0 -18
- package/lib/Signal/Group/sender-chain-key.js +0 -26
- package/lib/Signal/Group/sender-key-distribution-message.js +0 -63
- package/lib/Signal/Group/sender-key-message.js +0 -66
- package/lib/Signal/Group/sender-key-name.js +0 -48
- package/lib/Signal/Group/sender-key-record.js +0 -41
- package/lib/Signal/Group/sender-key-state.js +0 -84
- package/lib/Signal/Group/sender-message-key.js +0 -26
|
@@ -1,533 +1,535 @@
|
|
|
1
|
-
import { Boom } from '@hapi/boom';
|
|
2
|
-
import { exec } from 'child_process';
|
|
3
|
-
import * as Crypto from 'crypto';
|
|
4
|
-
import { once } from 'events';
|
|
5
|
-
import { createReadStream, createWriteStream, promises as fs } from 'fs';
|
|
6
|
-
import { tmpdir } from 'os';
|
|
7
|
-
import { join } from 'path';
|
|
8
|
-
import { Readable, Transform } from 'stream';
|
|
9
|
-
import { URL } from 'url';
|
|
10
|
-
import { proto } from '../../WAProto/index.js';
|
|
11
|
-
import { DEFAULT_ORIGIN, MEDIA_HKDF_KEY_MAPPING, MEDIA_PATH_MAP } from '../Defaults/index.js';
|
|
12
|
-
import { getBinaryNodeChild, getBinaryNodeChildBuffer, jidNormalizedUser } from '../WABinary/index.js';
|
|
13
|
-
import { aesDecryptGCM, aesEncryptGCM, hkdf } from './crypto.js';
|
|
14
|
-
import { generateMessageIDV2 } from './generics.js';
|
|
15
|
-
|
|
16
|
-
export const getImageProcessingLibrary = async () => {
|
|
17
|
-
const [jimp, sharp] = await Promise.all([
|
|
18
|
-
import('jimp').catch(() => null),
|
|
19
|
-
import('sharp').catch(() => null)
|
|
20
|
-
]);
|
|
21
|
-
if (sharp) return { sharp };
|
|
22
|
-
if (jimp) return { jimp };
|
|
23
|
-
throw new Boom('No image processing library available');
|
|
24
|
-
};
|
|
25
|
-
|
|
26
|
-
export const hkdfInfoKey = (type) => `WhatsApp ${MEDIA_HKDF_KEY_MAPPING[type]} Keys`;
|
|
27
|
-
|
|
28
|
-
export const getRawMediaUploadData = async (media, mediaType, logger) => {
|
|
29
|
-
const { stream } = await getStream(media);
|
|
30
|
-
const hasher = Crypto.createHash('sha256');
|
|
31
|
-
const filePath = join(tmpdir(), mediaType + generateMessageIDV2());
|
|
32
|
-
const fileWriteStream = createWriteStream(filePath);
|
|
33
|
-
let fileLength = 0;
|
|
34
|
-
try {
|
|
35
|
-
for await (const data of stream) {
|
|
36
|
-
fileLength += data.length;
|
|
37
|
-
hasher.update(data);
|
|
38
|
-
if (!fileWriteStream.write(data)) await once(fileWriteStream, 'drain');
|
|
39
|
-
}
|
|
40
|
-
fileWriteStream.end();
|
|
41
|
-
await once(fileWriteStream, 'finish');
|
|
42
|
-
stream.destroy();
|
|
43
|
-
logger?.debug('hashed data for raw upload');
|
|
44
|
-
return { filePath, fileSha256: hasher.digest(), fileLength };
|
|
45
|
-
} catch (error) {
|
|
46
|
-
fileWriteStream.destroy();
|
|
47
|
-
stream.destroy();
|
|
48
|
-
try { await fs.unlink(filePath); } catch { }
|
|
49
|
-
throw error;
|
|
50
|
-
}
|
|
51
|
-
};
|
|
52
|
-
|
|
53
|
-
export async function getMediaKeys(buffer, mediaType) {
|
|
54
|
-
if (!buffer) throw new Boom('Cannot derive from empty media key');
|
|
55
|
-
if (typeof buffer === 'string') buffer = Buffer.from(buffer.replace('data:;base64,', ''), 'base64');
|
|
56
|
-
const expandedMediaKey = hkdf(buffer, 112, { info: hkdfInfoKey(mediaType) });
|
|
57
|
-
return {
|
|
58
|
-
iv: expandedMediaKey.slice(0, 16),
|
|
59
|
-
cipherKey: expandedMediaKey.slice(16, 48),
|
|
60
|
-
macKey: expandedMediaKey.slice(48, 80)
|
|
61
|
-
};
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
const extractVideoThumb = (path, destPath, time, size) => new Promise((resolve, reject) => {
|
|
65
|
-
exec(`ffmpeg -ss ${time} -i ${path} -y -vf scale=${size.width}:-1 -vframes 1 -f image2 ${destPath}`, err => err ? reject(err) : resolve());
|
|
66
|
-
});
|
|
67
|
-
|
|
68
|
-
export const extractImageThumb = async (bufferOrFilePath, width = 32) => {
|
|
69
|
-
if (bufferOrFilePath instanceof Readable) bufferOrFilePath = await toBuffer(bufferOrFilePath);
|
|
70
|
-
const lib = await getImageProcessingLibrary();
|
|
71
|
-
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
|
72
|
-
const img = lib.sharp.default(bufferOrFilePath);
|
|
73
|
-
const dimensions = await img.metadata();
|
|
74
|
-
const buffer = await img.resize(width).jpeg({ quality:
|
|
75
|
-
return { buffer, original: { width: dimensions.width, height: dimensions.height } };
|
|
76
|
-
} else if ('jimp' in lib && typeof lib.jimp?.Jimp === 'object') {
|
|
77
|
-
const jimp = await lib.jimp.Jimp.read(bufferOrFilePath);
|
|
78
|
-
const buffer = await jimp.resize({ w: width, mode: lib.jimp.ResizeStrategy.BILINEAR }).getBuffer('image/jpeg', { quality:
|
|
79
|
-
return { buffer, original: { width: jimp.width, height: jimp.height } };
|
|
80
|
-
}
|
|
81
|
-
throw new Boom('No image processing library available');
|
|
82
|
-
};
|
|
83
|
-
|
|
84
|
-
export const encodeBase64EncodedStringForUpload = (b64) => encodeURIComponent(b64.replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, ''));
|
|
85
|
-
|
|
86
|
-
export const generateProfilePicture = async (mediaUpload) => {
|
|
87
|
-
let bufferOrFilePath = Buffer.isBuffer(mediaUpload) ? mediaUpload : 'url' in mediaUpload ? mediaUpload.url.toString() : await toBuffer(mediaUpload.stream);
|
|
88
|
-
const lib = await getImageProcessingLibrary();
|
|
89
|
-
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
|
90
|
-
const img = await lib.sharp.default(bufferOrFilePath).resize(720, 720, { fit: 'inside' }).jpeg({ quality: 50 }).toBuffer();
|
|
91
|
-
return { img };
|
|
92
|
-
} else if ('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
|
93
|
-
const { read, MIME_JPEG } = lib.jimp;
|
|
94
|
-
const image = await read(bufferOrFilePath);
|
|
95
|
-
const min = image.getWidth(), max = image.getHeight();
|
|
96
|
-
const img = await image.crop(0, 0, min, max).scaleToFit(720, 720).getBufferAsync(MIME_JPEG);
|
|
97
|
-
return { img };
|
|
98
|
-
}
|
|
99
|
-
throw new Boom('No image processing library available');
|
|
100
|
-
};
|
|
101
|
-
|
|
102
|
-
export const mediaMessageSHA256B64 = (message) => {
|
|
103
|
-
const media = Object.values(message)[0];
|
|
104
|
-
return media?.fileSha256 && Buffer.from(media.fileSha256).toString('base64');
|
|
105
|
-
};
|
|
106
|
-
|
|
107
|
-
export async function getAudioDuration(buffer) {
|
|
108
|
-
const musicMetadata = await import('music-metadata');
|
|
109
|
-
if (Buffer.isBuffer(buffer)) return (await musicMetadata.parseBuffer(buffer, undefined, { duration: true })).format.duration;
|
|
110
|
-
if (typeof buffer === 'string') return (await musicMetadata.parseFile(buffer, { duration: true })).format.duration;
|
|
111
|
-
return (await musicMetadata.parseStream(buffer, undefined, { duration: true })).format.duration;
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
export async function getAudioWaveform(buffer, logger) {
|
|
115
|
-
try {
|
|
116
|
-
const { default: decoder } = await import('audio-decode');
|
|
117
|
-
let audioData = Buffer.isBuffer(buffer) ? buffer : typeof buffer === 'string' ? await toBuffer(createReadStream(buffer)) : await toBuffer(buffer);
|
|
118
|
-
const audioBuffer = await decoder(audioData);
|
|
119
|
-
const rawData = audioBuffer.getChannelData(0);
|
|
120
|
-
const samples = 64, blockSize = Math.floor(rawData.length / samples);
|
|
121
|
-
const filteredData = [];
|
|
122
|
-
for (let i = 0; i < samples; i++) {
|
|
123
|
-
let sum = 0;
|
|
124
|
-
for (let j = 0; j < blockSize; j++) sum += Math.abs(rawData[i * blockSize + j]);
|
|
125
|
-
filteredData.push(sum / blockSize);
|
|
126
|
-
}
|
|
127
|
-
const multiplier = Math.pow(Math.max(...filteredData), -1);
|
|
128
|
-
return new Uint8Array(filteredData.map(n => Math.floor(100 * n * multiplier)));
|
|
129
|
-
} catch (e) {
|
|
130
|
-
logger?.debug('Failed to generate waveform: ' + e);
|
|
131
|
-
return new Uint8Array([0,99,0,99,0,99,0,99,88,99,0,99,0,55,0,99,0,99,0,99,0,99,0,99,88,99,0,99,0,55,0,99]);
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
const convertToOpusBuffer = (buffer, logger) => new Promise((resolve, reject) => {
|
|
136
|
-
const ffmpeg = exec('ffmpeg -i pipe:0 -c:a libopus -b:a 64k -vbr on -compression_level 10 -frame_duration 20 -application voip -f ogg pipe:1');
|
|
137
|
-
const chunks = [];
|
|
138
|
-
ffmpeg.stdin.write(buffer);
|
|
139
|
-
ffmpeg.stdin.end();
|
|
140
|
-
ffmpeg.stdout.on('data', chunk => chunks.push(chunk));
|
|
141
|
-
ffmpeg.stderr.on('data', () => {});
|
|
142
|
-
ffmpeg.on('close', code => code === 0 ? resolve(Buffer.concat(chunks)) : reject(new Error(`FFmpeg Opus conversion exited with code ${code}`)));
|
|
143
|
-
ffmpeg.on('error', reject);
|
|
144
|
-
});
|
|
145
|
-
|
|
146
|
-
const convertToMp4Buffer = (buffer, logger) => new Promise((resolve, reject) => {
|
|
147
|
-
const ffmpeg = exec('ffmpeg -i pipe:0 -c:v libx264 -preset veryfast -crf 23 -c:a aac -b:a 128k -movflags faststart -f mp4 pipe:1');
|
|
148
|
-
const chunks = [];
|
|
149
|
-
ffmpeg.stdin.write(buffer);
|
|
150
|
-
ffmpeg.stdin.end();
|
|
151
|
-
ffmpeg.stdout.on('data', chunk => chunks.push(chunk));
|
|
152
|
-
ffmpeg.stderr.on('data', () => {});
|
|
153
|
-
ffmpeg.on('close', code => code === 0 ? resolve(Buffer.concat(chunks)) : reject(new Error(`FFmpeg MP4 conversion exited with code ${code}`)));
|
|
154
|
-
ffmpeg.on('error', reject);
|
|
155
|
-
});
|
|
156
|
-
|
|
157
|
-
export const toReadable = (buffer) => {
|
|
158
|
-
const readable = new Readable({ read: () => {} });
|
|
159
|
-
readable.push(buffer);
|
|
160
|
-
readable.push(null);
|
|
161
|
-
return readable;
|
|
162
|
-
};
|
|
163
|
-
|
|
164
|
-
export const toBuffer = async (stream) => {
|
|
165
|
-
const chunks = [];
|
|
166
|
-
for await (const chunk of stream) chunks.push(chunk);
|
|
167
|
-
stream.destroy();
|
|
168
|
-
return Buffer.concat(chunks);
|
|
169
|
-
};
|
|
170
|
-
|
|
171
|
-
export const getStream = async (item, opts) => {
|
|
172
|
-
if (!item) throw new Boom('Item is required for getStream', { statusCode: 400 });
|
|
173
|
-
if (Buffer.isBuffer(item)) return { stream: toReadable(item), type: 'buffer' };
|
|
174
|
-
if (item?.stream?.pipe) return { stream: item.stream, type: 'readable' };
|
|
175
|
-
if (item?.pipe) return { stream: item, type: 'readable' };
|
|
176
|
-
if (item && typeof item === 'object' && 'url' in item) {
|
|
177
|
-
const urlStr = item.url.toString();
|
|
178
|
-
if (Buffer.isBuffer(item.url)) return { stream: toReadable(item.url), type: 'buffer' };
|
|
179
|
-
if (urlStr.startsWith('data:')) return { stream: toReadable(Buffer.from(urlStr.split(',')[1], 'base64')), type: 'buffer' };
|
|
180
|
-
if (urlStr.startsWith('http')) return { stream: await getHttpStream(item.url, opts), type: 'remote' };
|
|
181
|
-
return { stream: createReadStream(item.url), type: 'file' };
|
|
182
|
-
}
|
|
183
|
-
if (typeof item === 'string') {
|
|
184
|
-
if (item.startsWith('data:')) return { stream: toReadable(Buffer.from(item.split(',')[1], 'base64')), type: 'buffer' };
|
|
185
|
-
if (item.startsWith('http')) return { stream: await getHttpStream(item, opts), type: 'remote' };
|
|
186
|
-
return { stream: createReadStream(item), type: 'file' };
|
|
187
|
-
}
|
|
188
|
-
throw new Boom(`Invalid input type for getStream: ${typeof item}`, { statusCode: 400 });
|
|
189
|
-
};
|
|
190
|
-
|
|
191
|
-
export async function generateThumbnail(file, mediaType, options) {
|
|
192
|
-
let thumbnail, originalImageDimensions;
|
|
193
|
-
if (mediaType === 'image') {
|
|
194
|
-
const { buffer, original } = await extractImageThumb(file);
|
|
195
|
-
thumbnail = buffer.toString('base64');
|
|
196
|
-
if (original.width && original.height) originalImageDimensions = original;
|
|
197
|
-
} else if (mediaType === 'video') {
|
|
198
|
-
const imgFilename = join(tmpdir(), generateMessageIDV2() + '.jpg');
|
|
199
|
-
try {
|
|
200
|
-
await extractVideoThumb(file, imgFilename, '00:00:00', { width: 32, height: 32 });
|
|
201
|
-
thumbnail = (await fs.readFile(imgFilename)).toString('base64');
|
|
202
|
-
await fs.unlink(imgFilename);
|
|
203
|
-
} catch (err) {
|
|
204
|
-
options.logger?.debug('could not generate video thumb: ' + err);
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
return { thumbnail, originalImageDimensions };
|
|
208
|
-
}
|
|
209
|
-
|
|
210
|
-
export const getHttpStream = async (url, options = {}) => {
|
|
211
|
-
const response = await fetch(url.toString(), { dispatcher: options.dispatcher, method: 'GET', headers: options.headers });
|
|
212
|
-
if (!response.ok) throw new Boom(`Failed to fetch stream from ${url}`, { statusCode: response.status, data: { url } });
|
|
213
|
-
const body = response.body;
|
|
214
|
-
if (body && typeof body === 'object' && 'pipeTo' in body && typeof body.pipeTo === 'function') return Readable.fromWeb(body);
|
|
215
|
-
if (body && typeof body.pipe === 'function' && typeof body.read === 'function') return body;
|
|
216
|
-
throw new Error('Response body is not a readable stream');
|
|
217
|
-
};
|
|
218
|
-
|
|
219
|
-
export const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts, convertVideo } = {}) => {
|
|
220
|
-
const { stream, type } = await getStream(media, opts);
|
|
221
|
-
logger?.debug('fetched media stream');
|
|
222
|
-
let buffer = await toBuffer(stream);
|
|
223
|
-
if (mediaType === 'video' && convertVideo) {
|
|
224
|
-
try { buffer = await convertToMp4Buffer(buffer, logger); logger?.debug('converted video to mp4 for newsletter'); }
|
|
225
|
-
catch (e) { logger?.error('failed to convert video for newsletter:', e); }
|
|
226
|
-
}
|
|
227
|
-
let bodyPath, didSaveToTmpPath = false;
|
|
228
|
-
try {
|
|
229
|
-
if (type === 'file') bodyPath = media.url;
|
|
230
|
-
else if (saveOriginalFileIfRequired) {
|
|
231
|
-
bodyPath = join(tmpdir(), mediaType + generateMessageIDV2());
|
|
232
|
-
await fs.writeFile(bodyPath, buffer);
|
|
233
|
-
didSaveToTmpPath = true;
|
|
234
|
-
}
|
|
235
|
-
return { mediaKey: undefined, encWriteStream: buffer, fileLength: buffer.length, fileSha256: Crypto.createHash('sha256').update(buffer).digest(), fileEncSha256: undefined, bodyPath, didSaveToTmpPath };
|
|
236
|
-
} catch (error) {
|
|
237
|
-
if (didSaveToTmpPath && bodyPath) try { await fs.unlink(bodyPath); } catch { }
|
|
238
|
-
throw error;
|
|
239
|
-
}
|
|
240
|
-
};
|
|
241
|
-
|
|
242
|
-
export const encryptedStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts, mediaKey: providedMediaKey, isPtt, forceOpus, convertVideo } = {}) => {
|
|
243
|
-
const { stream, type } = await getStream(media, opts);
|
|
244
|
-
let finalStream = stream, opusConverted = false;
|
|
245
|
-
|
|
246
|
-
if (mediaType === 'audio' && (isPtt === true || forceOpus === true)) {
|
|
247
|
-
try {
|
|
248
|
-
finalStream = toReadable(await convertToOpusBuffer(await toBuffer(stream), logger));
|
|
249
|
-
opusConverted = true;
|
|
250
|
-
logger?.debug('converted audio to Opus');
|
|
251
|
-
} catch (error) {
|
|
252
|
-
logger?.error('failed to convert audio to Opus, using original');
|
|
253
|
-
finalStream = (await getStream(media, opts)).stream;
|
|
254
|
-
}
|
|
255
|
-
}
|
|
256
|
-
|
|
257
|
-
if (mediaType === 'video' && convertVideo === true) {
|
|
258
|
-
try {
|
|
259
|
-
finalStream = toReadable(await convertToMp4Buffer(await toBuffer(finalStream), logger));
|
|
260
|
-
logger?.debug('converted video to mp4');
|
|
261
|
-
} catch (error) {
|
|
262
|
-
logger?.error('failed to convert video to mp4, using original');
|
|
263
|
-
finalStream = (await getStream(media, opts)).stream;
|
|
264
|
-
}
|
|
265
|
-
}
|
|
266
|
-
|
|
267
|
-
const mediaKey = providedMediaKey || Crypto.randomBytes(32);
|
|
268
|
-
const { cipherKey, iv, macKey } = await getMediaKeys(mediaKey, mediaType);
|
|
269
|
-
const encFilePath = join(tmpdir(), mediaType + generateMessageIDV2() + '-enc');
|
|
270
|
-
const encFileWriteStream = createWriteStream(encFilePath);
|
|
271
|
-
let originalFileStream, originalFilePath;
|
|
272
|
-
|
|
273
|
-
if (saveOriginalFileIfRequired) {
|
|
274
|
-
originalFilePath = join(tmpdir(), mediaType + generateMessageIDV2() + '-original');
|
|
275
|
-
originalFileStream = createWriteStream(originalFilePath);
|
|
276
|
-
}
|
|
277
|
-
|
|
278
|
-
let fileLength = 0;
|
|
279
|
-
const aes = Crypto.createCipheriv('aes-256-cbc', cipherKey, iv);
|
|
280
|
-
const hmac = Crypto.createHmac('sha256', macKey).update(iv);
|
|
281
|
-
const sha256Plain = Crypto.createHash('sha256');
|
|
282
|
-
const sha256Enc = Crypto.createHash('sha256');
|
|
283
|
-
|
|
284
|
-
try {
|
|
285
|
-
for await (const data of finalStream) {
|
|
286
|
-
fileLength += data.length;
|
|
287
|
-
if (type === 'remote' && opts?.maxContentLength && fileLength > opts.maxContentLength) throw new Boom('content length exceeded', { data: { media, type } });
|
|
288
|
-
if (originalFileStream && !originalFileStream.write(data)) await once(originalFileStream, 'drain');
|
|
289
|
-
sha256Plain.update(data);
|
|
290
|
-
const encrypted = aes.update(data);
|
|
291
|
-
sha256Enc.update(encrypted);
|
|
292
|
-
hmac.update(encrypted);
|
|
293
|
-
encFileWriteStream.write(encrypted);
|
|
294
|
-
}
|
|
295
|
-
const finalData = aes.final();
|
|
296
|
-
sha256Enc.update(finalData);
|
|
297
|
-
hmac.update(finalData);
|
|
298
|
-
encFileWriteStream.write(finalData);
|
|
299
|
-
const mac = hmac.digest().slice(0, 10);
|
|
300
|
-
sha256Enc.update(mac);
|
|
301
|
-
encFileWriteStream.write(mac);
|
|
302
|
-
encFileWriteStream.end();
|
|
303
|
-
originalFileStream?.end?.();
|
|
304
|
-
finalStream.destroy();
|
|
305
|
-
logger?.debug('encrypted data successfully');
|
|
306
|
-
return { mediaKey, bodyPath: originalFilePath, encFilePath, mac, fileEncSha256: sha256Enc.digest(), fileSha256: sha256Plain.digest(), fileLength, opusConverted };
|
|
307
|
-
} catch (error) {
|
|
308
|
-
encFileWriteStream.destroy();
|
|
309
|
-
originalFileStream?.destroy?.();
|
|
310
|
-
aes.destroy();
|
|
311
|
-
hmac.destroy();
|
|
312
|
-
sha256Plain.destroy();
|
|
313
|
-
sha256Enc.destroy();
|
|
314
|
-
finalStream.destroy();
|
|
315
|
-
try { await fs.unlink(encFilePath); if (originalFilePath) await fs.unlink(originalFilePath); } catch (err) { logger?.error({ err }, 'failed deleting tmp files'); }
|
|
316
|
-
throw error;
|
|
317
|
-
}
|
|
318
|
-
};
|
|
319
|
-
|
|
320
|
-
const DEF_HOST = 'mmg.whatsapp.net';
|
|
321
|
-
const AES_CHUNK_SIZE = 16;
|
|
322
|
-
const toSmallestChunkSize = (num) => Math.floor(num / AES_CHUNK_SIZE) * AES_CHUNK_SIZE;
|
|
323
|
-
|
|
324
|
-
export const getUrlFromDirectPath = (directPath) => `https://${DEF_HOST}${directPath}`;
|
|
325
|
-
|
|
326
|
-
export const downloadContentFromMessage = async ({ mediaKey, directPath, url }, type, opts = {}) => {
|
|
327
|
-
const isValidMediaUrl = url?.startsWith('https://mmg.whatsapp.net/');
|
|
328
|
-
const downloadUrl = isValidMediaUrl ? url : getUrlFromDirectPath(directPath);
|
|
329
|
-
if (!downloadUrl) throw new Boom('No valid media URL or directPath present', { statusCode: 400 });
|
|
330
|
-
return downloadEncryptedContent(downloadUrl, await getMediaKeys(mediaKey, type), opts);
|
|
331
|
-
};
|
|
332
|
-
|
|
333
|
-
export const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startByte, endByte, options } = {}) => {
|
|
334
|
-
let bytesFetched = 0, startChunk = 0, firstBlockIsIV = false;
|
|
335
|
-
if (startByte) {
|
|
336
|
-
const chunk = toSmallestChunkSize(startByte || 0);
|
|
337
|
-
if (chunk) { startChunk = chunk - AES_CHUNK_SIZE; bytesFetched = chunk; firstBlockIsIV = true; }
|
|
338
|
-
}
|
|
339
|
-
const endChunk = endByte ? toSmallestChunkSize(endByte || 0) + AES_CHUNK_SIZE : undefined;
|
|
340
|
-
const headers = { ...(options?.headers ? (Array.isArray(options.headers) ? Object.fromEntries(options.headers) : options.headers) : {}), Origin: DEFAULT_ORIGIN };
|
|
341
|
-
if (startChunk || endChunk) headers.Range = `bytes=${startChunk}-${endChunk || ''}`;
|
|
342
|
-
|
|
343
|
-
const fetched = await getHttpStream(downloadUrl, { ...(options || {}), headers });
|
|
344
|
-
let remainingBytes = Buffer.from([]), aes;
|
|
345
|
-
|
|
346
|
-
const pushBytes = (bytes, push) => {
|
|
347
|
-
if (startByte || endByte) {
|
|
348
|
-
const start = bytesFetched >= startByte ? undefined : Math.max(startByte - bytesFetched, 0);
|
|
349
|
-
const end = bytesFetched + bytes.length < endByte ? undefined : Math.max(endByte - bytesFetched, 0);
|
|
350
|
-
push(bytes.slice(start, end));
|
|
351
|
-
bytesFetched += bytes.length;
|
|
352
|
-
} else {
|
|
353
|
-
push(bytes);
|
|
354
|
-
}
|
|
355
|
-
};
|
|
356
|
-
|
|
357
|
-
const output = new Transform({
|
|
358
|
-
transform(chunk, _, callback) {
|
|
359
|
-
let data = Buffer.concat([remainingBytes, chunk]);
|
|
360
|
-
const decryptLength = toSmallestChunkSize(data.length);
|
|
361
|
-
remainingBytes = data.slice(decryptLength);
|
|
362
|
-
data = data.slice(0, decryptLength);
|
|
363
|
-
if (!aes) {
|
|
364
|
-
let ivValue = iv;
|
|
365
|
-
if (firstBlockIsIV) { ivValue = data.slice(0, AES_CHUNK_SIZE); data = data.slice(AES_CHUNK_SIZE); }
|
|
366
|
-
aes = Crypto.createDecipheriv('aes-256-cbc', cipherKey, ivValue);
|
|
367
|
-
if (endByte) aes.setAutoPadding(false);
|
|
368
|
-
}
|
|
369
|
-
try { pushBytes(aes.update(data), b => this.push(b)); callback(); } catch (error) { callback(error); }
|
|
370
|
-
},
|
|
371
|
-
final(callback) {
|
|
372
|
-
try { pushBytes(aes.final(), b => this.push(b)); callback(); } catch (error) { callback(error); }
|
|
373
|
-
}
|
|
374
|
-
});
|
|
375
|
-
return fetched.pipe(output, { end: true });
|
|
376
|
-
};
|
|
377
|
-
|
|
378
|
-
export function extensionForMediaMessage(message) {
|
|
379
|
-
const getExtension = (mimetype) => mimetype.split(';')[0]?.split('/')[1];
|
|
380
|
-
const type = Object.keys(message)[0];
|
|
381
|
-
if (type === 'locationMessage' || type === 'liveLocationMessage' || type === 'productMessage') return '.jpeg';
|
|
382
|
-
return getExtension(message[type].mimetype);
|
|
383
|
-
}
|
|
384
|
-
|
|
385
|
-
export const getWAUploadToServer = ({ customUploadHosts, fetchAgent, logger, options }, refreshMediaConn) => {
|
|
386
|
-
return async (stream, { mediaType, fileEncSha256B64, newsletter, timeoutMs }) => {
|
|
387
|
-
// Accepts Buffer, file path, Node stream, Web ReadableStream, or async iterable.
|
|
388
|
-
// File paths are streamed directly from disk — no RAM cost for large files.
|
|
389
|
-
const toUploadBody = async (input) => {
|
|
390
|
-
if (!input) throw new Boom('Upload input is null or undefined', { statusCode: 400 });
|
|
391
|
-
if (Buffer.isBuffer(input)) return input;
|
|
392
|
-
if (typeof input === 'string') return createReadStream(input);
|
|
393
|
-
if (typeof ReadableStream !== 'undefined' && input instanceof ReadableStream) return Readable.fromWeb(input);
|
|
394
|
-
if (typeof input.pipe === 'function' || typeof input[Symbol.asyncIterator] === 'function') return input;
|
|
395
|
-
throw new Boom(`Unsupported upload input type: ${Object.prototype.toString.call(input)}`, { statusCode: 400 });
|
|
396
|
-
};
|
|
397
|
-
|
|
398
|
-
let reqBody;
|
|
399
|
-
try { reqBody = await toUploadBody(stream); }
|
|
400
|
-
catch (err) { logger?.error({ err: err.message }, 'failed to prepare upload body'); throw err; }
|
|
401
|
-
|
|
402
|
-
fileEncSha256B64 = encodeBase64EncodedStringForUpload(fileEncSha256B64);
|
|
403
|
-
|
|
404
|
-
let media = MEDIA_PATH_MAP[mediaType];
|
|
405
|
-
if (newsletter) media = media?.replace('/mms/', '/newsletter/newsletter-');
|
|
406
|
-
if (!media) throw new Boom(`No media path found for type: ${mediaType}`, { statusCode: 400 });
|
|
407
|
-
|
|
408
|
-
// Force-refresh auth upfront to avoid stale token failures
|
|
409
|
-
let uploadInfo = await refreshMediaConn(true);
|
|
410
|
-
const hosts = [...(customUploadHosts ?? []), ...(uploadInfo.hosts ?? [])];
|
|
411
|
-
if (!hosts.length) throw new Boom('No upload hosts available', { statusCode: 503 });
|
|
412
|
-
|
|
413
|
-
const MAX_RETRIES = 2;
|
|
414
|
-
let urls, lastError;
|
|
415
|
-
|
|
416
|
-
for (const { hostname, maxContentLengthBytes } of hosts) {
|
|
417
|
-
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
|
|
418
|
-
try {
|
|
419
|
-
if (attempt > 1) {
|
|
420
|
-
uploadInfo = await refreshMediaConn(true);
|
|
421
|
-
reqBody = await toUploadBody(stream);
|
|
422
|
-
}
|
|
423
|
-
|
|
424
|
-
if (maxContentLengthBytes && Buffer.isBuffer(reqBody) && reqBody.length > maxContentLengthBytes) {
|
|
425
|
-
logger?.warn({ hostname, maxContentLengthBytes }, 'body too large for host, skipping');
|
|
426
|
-
break;
|
|
427
|
-
}
|
|
428
|
-
|
|
429
|
-
const auth = encodeURIComponent(uploadInfo.auth);
|
|
430
|
-
const url = `https://${hostname}${media}/${fileEncSha256B64}?auth=${auth}&token=${fileEncSha256B64}`;
|
|
431
|
-
const controller = new AbortController();
|
|
432
|
-
const timer = timeoutMs ? setTimeout(() => controller.abort(), timeoutMs) : null;
|
|
433
|
-
|
|
434
|
-
let response;
|
|
435
|
-
try {
|
|
436
|
-
response = await fetch(url, {
|
|
437
|
-
dispatcher: fetchAgent,
|
|
438
|
-
method: 'POST',
|
|
439
|
-
body: reqBody,
|
|
440
|
-
headers: {
|
|
441
|
-
...(Array.isArray(options?.headers) ? Object.fromEntries(options.headers) : (options?.headers ?? {})),
|
|
442
|
-
'Content-Type': 'application/octet-stream',
|
|
443
|
-
Origin: DEFAULT_ORIGIN
|
|
444
|
-
},
|
|
445
|
-
duplex: 'half',
|
|
446
|
-
signal: controller.signal
|
|
447
|
-
});
|
|
448
|
-
} finally {
|
|
449
|
-
if (timer) clearTimeout(timer);
|
|
450
|
-
}
|
|
451
|
-
|
|
452
|
-
let result;
|
|
453
|
-
try { result = await response.json(); } catch { result = null; }
|
|
454
|
-
|
|
455
|
-
if (result?.url || result?.directPath) {
|
|
456
|
-
urls = { mediaUrl: result.url, directPath: result.direct_path, handle: result.handle };
|
|
457
|
-
break;
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
lastError = new Error(`${hostname} rejected upload (HTTP ${response.status}): ${JSON.stringify(result)}`);
|
|
461
|
-
logger?.warn({ hostname, attempt, status: response.status, result }, 'upload rejected');
|
|
462
|
-
|
|
463
|
-
} catch (err) {
|
|
464
|
-
lastError = err;
|
|
465
|
-
logger?.warn({ hostname, attempt, err: err.message, timedOut: err.name === 'AbortError' }, 'upload attempt failed');
|
|
466
|
-
if (attempt < MAX_RETRIES) await new Promise(r => setTimeout(r, 500 * attempt));
|
|
467
|
-
}
|
|
468
|
-
}
|
|
469
|
-
if (urls) break;
|
|
470
|
-
}
|
|
471
|
-
|
|
472
|
-
if (!urls) {
|
|
473
|
-
const msg = `Media upload failed on all hosts. Last error: ${lastError?.message ?? 'unknown'}`;
|
|
474
|
-
logger?.error({ hosts: hosts.map(h => h.hostname), lastError: lastError?.message }, msg);
|
|
475
|
-
throw new Boom(msg, { statusCode: 500, data: { lastError: lastError?.message } });
|
|
476
|
-
}
|
|
477
|
-
|
|
478
|
-
return urls;
|
|
479
|
-
};
|
|
480
|
-
};
|
|
481
|
-
|
|
482
|
-
const getMediaRetryKey = (mediaKey) => hkdf(mediaKey, 32, { info: 'WhatsApp Media Retry Notification' });
|
|
483
|
-
|
|
484
|
-
export const encryptMediaRetryRequest = async (key, mediaKey, meId) => {
|
|
485
|
-
const recp = { stanzaId: key.id };
|
|
486
|
-
const recpBuffer = proto.ServerErrorReceipt.encode(recp).finish();
|
|
487
|
-
const iv = Crypto.randomBytes(12);
|
|
488
|
-
const retryKey = await getMediaRetryKey(mediaKey);
|
|
489
|
-
const ciphertext = aesEncryptGCM(recpBuffer, retryKey, iv, Buffer.from(key.id));
|
|
490
|
-
return {
|
|
491
|
-
tag: 'receipt',
|
|
492
|
-
attrs: { id: key.id, to: jidNormalizedUser(meId), type: 'server-error' },
|
|
493
|
-
content: [
|
|
494
|
-
{
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
const
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
}
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
const
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
[proto.MediaRetryNotification.ResultType.
|
|
532
|
-
[proto.MediaRetryNotification.ResultType.
|
|
1
|
+
import { Boom } from '@hapi/boom';
|
|
2
|
+
import { exec } from 'child_process';
|
|
3
|
+
import * as Crypto from 'crypto';
|
|
4
|
+
import { once } from 'events';
|
|
5
|
+
import { createReadStream, createWriteStream, promises as fs } from 'fs';
|
|
6
|
+
import { tmpdir } from 'os';
|
|
7
|
+
import { join } from 'path';
|
|
8
|
+
import { Readable, Transform } from 'stream';
|
|
9
|
+
import { URL } from 'url';
|
|
10
|
+
import { proto } from '../../WAProto/index.js';
|
|
11
|
+
import { DEFAULT_ORIGIN, MEDIA_HKDF_KEY_MAPPING, MEDIA_PATH_MAP } from '../Defaults/index.js';
|
|
12
|
+
import { getBinaryNodeChild, getBinaryNodeChildBuffer, jidNormalizedUser } from '../WABinary/index.js';
|
|
13
|
+
import { aesDecryptGCM, aesEncryptGCM, hkdf } from './crypto.js';
|
|
14
|
+
import { generateMessageIDV2 } from './generics.js';
|
|
15
|
+
|
|
16
|
+
export const getImageProcessingLibrary = async () => {
|
|
17
|
+
const [jimp, sharp] = await Promise.all([
|
|
18
|
+
import('jimp').catch(() => null),
|
|
19
|
+
import('sharp').catch(() => null)
|
|
20
|
+
]);
|
|
21
|
+
if (sharp) return { sharp };
|
|
22
|
+
if (jimp) return { jimp };
|
|
23
|
+
throw new Boom('No image processing library available');
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export const hkdfInfoKey = (type) => `WhatsApp ${MEDIA_HKDF_KEY_MAPPING[type]} Keys`;
|
|
27
|
+
|
|
28
|
+
export const getRawMediaUploadData = async (media, mediaType, logger) => {
|
|
29
|
+
const { stream } = await getStream(media);
|
|
30
|
+
const hasher = Crypto.createHash('sha256');
|
|
31
|
+
const filePath = join(tmpdir(), mediaType + generateMessageIDV2());
|
|
32
|
+
const fileWriteStream = createWriteStream(filePath);
|
|
33
|
+
let fileLength = 0;
|
|
34
|
+
try {
|
|
35
|
+
for await (const data of stream) {
|
|
36
|
+
fileLength += data.length;
|
|
37
|
+
hasher.update(data);
|
|
38
|
+
if (!fileWriteStream.write(data)) await once(fileWriteStream, 'drain');
|
|
39
|
+
}
|
|
40
|
+
fileWriteStream.end();
|
|
41
|
+
await once(fileWriteStream, 'finish');
|
|
42
|
+
stream.destroy();
|
|
43
|
+
logger?.debug('hashed data for raw upload');
|
|
44
|
+
return { filePath, fileSha256: hasher.digest(), fileLength };
|
|
45
|
+
} catch (error) {
|
|
46
|
+
fileWriteStream.destroy();
|
|
47
|
+
stream.destroy();
|
|
48
|
+
try { await fs.unlink(filePath); } catch { }
|
|
49
|
+
throw error;
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
export async function getMediaKeys(buffer, mediaType) {
|
|
54
|
+
if (!buffer) throw new Boom('Cannot derive from empty media key');
|
|
55
|
+
if (typeof buffer === 'string') buffer = Buffer.from(buffer.replace('data:;base64,', ''), 'base64');
|
|
56
|
+
const expandedMediaKey = hkdf(buffer, 112, { info: hkdfInfoKey(mediaType) });
|
|
57
|
+
return {
|
|
58
|
+
iv: expandedMediaKey.slice(0, 16),
|
|
59
|
+
cipherKey: expandedMediaKey.slice(16, 48),
|
|
60
|
+
macKey: expandedMediaKey.slice(48, 80)
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const extractVideoThumb = (path, destPath, time, size) => new Promise((resolve, reject) => {
|
|
65
|
+
exec(`ffmpeg -ss ${time} -i ${path} -y -vf scale=${size.width}:-1 -vframes 1 -f image2 ${destPath}`, err => err ? reject(err) : resolve());
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
export const extractImageThumb = async (bufferOrFilePath, width = 32) => {
|
|
69
|
+
if (bufferOrFilePath instanceof Readable) bufferOrFilePath = await toBuffer(bufferOrFilePath);
|
|
70
|
+
const lib = await getImageProcessingLibrary();
|
|
71
|
+
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
|
72
|
+
const img = lib.sharp.default(bufferOrFilePath);
|
|
73
|
+
const dimensions = await img.metadata();
|
|
74
|
+
const buffer = await img.resize(width).jpeg({ quality: 95 }).toBuffer();
|
|
75
|
+
return { buffer, original: { width: dimensions.width, height: dimensions.height } };
|
|
76
|
+
} else if ('jimp' in lib && typeof lib.jimp?.Jimp === 'object') {
|
|
77
|
+
const jimp = await lib.jimp.Jimp.read(bufferOrFilePath);
|
|
78
|
+
const buffer = await jimp.resize({ w: width, mode: lib.jimp.ResizeStrategy.BILINEAR }).getBuffer('image/jpeg', { quality: 95 });
|
|
79
|
+
return { buffer, original: { width: jimp.width, height: jimp.height } };
|
|
80
|
+
}
|
|
81
|
+
throw new Boom('No image processing library available');
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
export const encodeBase64EncodedStringForUpload = (b64) => encodeURIComponent(b64.replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, ''));
|
|
85
|
+
|
|
86
|
+
export const generateProfilePicture = async (mediaUpload) => {
|
|
87
|
+
let bufferOrFilePath = Buffer.isBuffer(mediaUpload) ? mediaUpload : 'url' in mediaUpload ? mediaUpload.url.toString() : await toBuffer(mediaUpload.stream);
|
|
88
|
+
const lib = await getImageProcessingLibrary();
|
|
89
|
+
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
|
90
|
+
const img = await lib.sharp.default(bufferOrFilePath).resize(720, 720, { fit: 'inside' }).jpeg({ quality: 50 }).toBuffer();
|
|
91
|
+
return { img };
|
|
92
|
+
} else if ('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
|
93
|
+
const { read, MIME_JPEG } = lib.jimp;
|
|
94
|
+
const image = await read(bufferOrFilePath);
|
|
95
|
+
const min = image.getWidth(), max = image.getHeight();
|
|
96
|
+
const img = await image.crop(0, 0, min, max).scaleToFit(720, 720).getBufferAsync(MIME_JPEG);
|
|
97
|
+
return { img };
|
|
98
|
+
}
|
|
99
|
+
throw new Boom('No image processing library available');
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
export const mediaMessageSHA256B64 = (message) => {
|
|
103
|
+
const media = Object.values(message)[0];
|
|
104
|
+
return media?.fileSha256 && Buffer.from(media.fileSha256).toString('base64');
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
export async function getAudioDuration(buffer) {
|
|
108
|
+
const musicMetadata = await import('music-metadata');
|
|
109
|
+
if (Buffer.isBuffer(buffer)) return (await musicMetadata.parseBuffer(buffer, undefined, { duration: true })).format.duration;
|
|
110
|
+
if (typeof buffer === 'string') return (await musicMetadata.parseFile(buffer, { duration: true })).format.duration;
|
|
111
|
+
return (await musicMetadata.parseStream(buffer, undefined, { duration: true })).format.duration;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export async function getAudioWaveform(buffer, logger) {
|
|
115
|
+
try {
|
|
116
|
+
const { default: decoder } = await import('audio-decode');
|
|
117
|
+
let audioData = Buffer.isBuffer(buffer) ? buffer : typeof buffer === 'string' ? await toBuffer(createReadStream(buffer)) : await toBuffer(buffer);
|
|
118
|
+
const audioBuffer = await decoder(audioData);
|
|
119
|
+
const rawData = audioBuffer.getChannelData(0);
|
|
120
|
+
const samples = 64, blockSize = Math.floor(rawData.length / samples);
|
|
121
|
+
const filteredData = [];
|
|
122
|
+
for (let i = 0; i < samples; i++) {
|
|
123
|
+
let sum = 0;
|
|
124
|
+
for (let j = 0; j < blockSize; j++) sum += Math.abs(rawData[i * blockSize + j]);
|
|
125
|
+
filteredData.push(sum / blockSize);
|
|
126
|
+
}
|
|
127
|
+
const multiplier = Math.pow(Math.max(...filteredData), -1);
|
|
128
|
+
return new Uint8Array(filteredData.map(n => Math.floor(100 * n * multiplier)));
|
|
129
|
+
} catch (e) {
|
|
130
|
+
logger?.debug('Failed to generate waveform: ' + e);
|
|
131
|
+
return new Uint8Array([0, 99, 0, 99, 0, 99, 0, 99, 88, 99, 0, 99, 0, 55, 0, 99, 0, 99, 0, 99, 0, 99, 0, 99, 88, 99, 0, 99, 0, 55, 0, 99]);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
const convertToOpusBuffer = (buffer, logger) => new Promise((resolve, reject) => {
|
|
136
|
+
const ffmpeg = exec('ffmpeg -i pipe:0 -c:a libopus -b:a 64k -vbr on -compression_level 10 -frame_duration 20 -application voip -f ogg pipe:1');
|
|
137
|
+
const chunks = [];
|
|
138
|
+
ffmpeg.stdin.write(buffer);
|
|
139
|
+
ffmpeg.stdin.end();
|
|
140
|
+
ffmpeg.stdout.on('data', chunk => chunks.push(chunk));
|
|
141
|
+
ffmpeg.stderr.on('data', () => { });
|
|
142
|
+
ffmpeg.on('close', code => code === 0 ? resolve(Buffer.concat(chunks)) : reject(new Error(`FFmpeg Opus conversion exited with code ${code}`)));
|
|
143
|
+
ffmpeg.on('error', reject);
|
|
144
|
+
});
|
|
145
|
+
|
|
146
|
+
const convertToMp4Buffer = (buffer, logger) => new Promise((resolve, reject) => {
|
|
147
|
+
const ffmpeg = exec('ffmpeg -i pipe:0 -c:v libx264 -preset veryfast -crf 23 -c:a aac -b:a 128k -movflags faststart -f mp4 pipe:1');
|
|
148
|
+
const chunks = [];
|
|
149
|
+
ffmpeg.stdin.write(buffer);
|
|
150
|
+
ffmpeg.stdin.end();
|
|
151
|
+
ffmpeg.stdout.on('data', chunk => chunks.push(chunk));
|
|
152
|
+
ffmpeg.stderr.on('data', () => { });
|
|
153
|
+
ffmpeg.on('close', code => code === 0 ? resolve(Buffer.concat(chunks)) : reject(new Error(`FFmpeg MP4 conversion exited with code ${code}`)));
|
|
154
|
+
ffmpeg.on('error', reject);
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
export const toReadable = (buffer) => {
|
|
158
|
+
const readable = new Readable({ read: () => { } });
|
|
159
|
+
readable.push(buffer);
|
|
160
|
+
readable.push(null);
|
|
161
|
+
return readable;
|
|
162
|
+
};
|
|
163
|
+
|
|
164
|
+
export const toBuffer = async (stream) => {
|
|
165
|
+
const chunks = [];
|
|
166
|
+
for await (const chunk of stream) chunks.push(chunk);
|
|
167
|
+
stream.destroy();
|
|
168
|
+
return Buffer.concat(chunks);
|
|
169
|
+
};
|
|
170
|
+
|
|
171
|
+
export const getStream = async (item, opts) => {
|
|
172
|
+
if (!item) throw new Boom('Item is required for getStream', { statusCode: 400 });
|
|
173
|
+
if (Buffer.isBuffer(item)) return { stream: toReadable(item), type: 'buffer' };
|
|
174
|
+
if (item?.stream?.pipe) return { stream: item.stream, type: 'readable' };
|
|
175
|
+
if (item?.pipe) return { stream: item, type: 'readable' };
|
|
176
|
+
if (item && typeof item === 'object' && 'url' in item) {
|
|
177
|
+
const urlStr = item.url.toString();
|
|
178
|
+
if (Buffer.isBuffer(item.url)) return { stream: toReadable(item.url), type: 'buffer' };
|
|
179
|
+
if (urlStr.startsWith('data:')) return { stream: toReadable(Buffer.from(urlStr.split(',')[1], 'base64')), type: 'buffer' };
|
|
180
|
+
if (urlStr.startsWith('http')) return { stream: await getHttpStream(item.url, opts), type: 'remote' };
|
|
181
|
+
return { stream: createReadStream(item.url), type: 'file' };
|
|
182
|
+
}
|
|
183
|
+
if (typeof item === 'string') {
|
|
184
|
+
if (item.startsWith('data:')) return { stream: toReadable(Buffer.from(item.split(',')[1], 'base64')), type: 'buffer' };
|
|
185
|
+
if (item.startsWith('http')) return { stream: await getHttpStream(item, opts), type: 'remote' };
|
|
186
|
+
return { stream: createReadStream(item), type: 'file' };
|
|
187
|
+
}
|
|
188
|
+
throw new Boom(`Invalid input type for getStream: ${typeof item}`, { statusCode: 400 });
|
|
189
|
+
};
|
|
190
|
+
|
|
191
|
+
export async function generateThumbnail(file, mediaType, options) {
|
|
192
|
+
let thumbnail, originalImageDimensions;
|
|
193
|
+
if (mediaType === 'image') {
|
|
194
|
+
const { buffer, original } = await extractImageThumb(file);
|
|
195
|
+
thumbnail = buffer.toString('base64');
|
|
196
|
+
if (original.width && original.height) originalImageDimensions = original;
|
|
197
|
+
} else if (mediaType === 'video') {
|
|
198
|
+
const imgFilename = join(tmpdir(), generateMessageIDV2() + '.jpg');
|
|
199
|
+
try {
|
|
200
|
+
await extractVideoThumb(file, imgFilename, '00:00:00', { width: 32, height: 32 });
|
|
201
|
+
thumbnail = (await fs.readFile(imgFilename)).toString('base64');
|
|
202
|
+
await fs.unlink(imgFilename);
|
|
203
|
+
} catch (err) {
|
|
204
|
+
options.logger?.debug('could not generate video thumb: ' + err);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
return { thumbnail, originalImageDimensions };
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
export const getHttpStream = async (url, options = {}) => {
|
|
211
|
+
const response = await fetch(url.toString(), { dispatcher: options.dispatcher, method: 'GET', headers: options.headers });
|
|
212
|
+
if (!response.ok) throw new Boom(`Failed to fetch stream from ${url}`, { statusCode: response.status, data: { url } });
|
|
213
|
+
const body = response.body;
|
|
214
|
+
if (body && typeof body === 'object' && 'pipeTo' in body && typeof body.pipeTo === 'function') return Readable.fromWeb(body);
|
|
215
|
+
if (body && typeof body.pipe === 'function' && typeof body.read === 'function') return body;
|
|
216
|
+
throw new Error('Response body is not a readable stream');
|
|
217
|
+
};
|
|
218
|
+
|
|
219
|
+
export const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts, convertVideo } = {}) => {
|
|
220
|
+
const { stream, type } = await getStream(media, opts);
|
|
221
|
+
logger?.debug('fetched media stream');
|
|
222
|
+
let buffer = await toBuffer(stream);
|
|
223
|
+
if (mediaType === 'video' && convertVideo) {
|
|
224
|
+
try { buffer = await convertToMp4Buffer(buffer, logger); logger?.debug('converted video to mp4 for newsletter'); }
|
|
225
|
+
catch (e) { logger?.error('failed to convert video for newsletter:', e); }
|
|
226
|
+
}
|
|
227
|
+
let bodyPath, didSaveToTmpPath = false;
|
|
228
|
+
try {
|
|
229
|
+
if (type === 'file') bodyPath = media.url;
|
|
230
|
+
else if (saveOriginalFileIfRequired) {
|
|
231
|
+
bodyPath = join(tmpdir(), mediaType + generateMessageIDV2());
|
|
232
|
+
await fs.writeFile(bodyPath, buffer);
|
|
233
|
+
didSaveToTmpPath = true;
|
|
234
|
+
}
|
|
235
|
+
return { mediaKey: undefined, encWriteStream: buffer, fileLength: buffer.length, fileSha256: Crypto.createHash('sha256').update(buffer).digest(), fileEncSha256: undefined, bodyPath, didSaveToTmpPath };
|
|
236
|
+
} catch (error) {
|
|
237
|
+
if (didSaveToTmpPath && bodyPath) try { await fs.unlink(bodyPath); } catch { }
|
|
238
|
+
throw error;
|
|
239
|
+
}
|
|
240
|
+
};
|
|
241
|
+
|
|
242
|
+
export const encryptedStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts, mediaKey: providedMediaKey, isPtt, forceOpus, convertVideo } = {}) => {
|
|
243
|
+
const { stream, type } = await getStream(media, opts);
|
|
244
|
+
let finalStream = stream, opusConverted = false;
|
|
245
|
+
|
|
246
|
+
if (mediaType === 'audio' && (isPtt === true || forceOpus === true)) {
|
|
247
|
+
try {
|
|
248
|
+
finalStream = toReadable(await convertToOpusBuffer(await toBuffer(stream), logger));
|
|
249
|
+
opusConverted = true;
|
|
250
|
+
logger?.debug('converted audio to Opus');
|
|
251
|
+
} catch (error) {
|
|
252
|
+
logger?.error('failed to convert audio to Opus, using original');
|
|
253
|
+
finalStream = (await getStream(media, opts)).stream;
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
if (mediaType === 'video' && convertVideo === true) {
|
|
258
|
+
try {
|
|
259
|
+
finalStream = toReadable(await convertToMp4Buffer(await toBuffer(finalStream), logger));
|
|
260
|
+
logger?.debug('converted video to mp4');
|
|
261
|
+
} catch (error) {
|
|
262
|
+
logger?.error('failed to convert video to mp4, using original');
|
|
263
|
+
finalStream = (await getStream(media, opts)).stream;
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
const mediaKey = providedMediaKey || Crypto.randomBytes(32);
|
|
268
|
+
const { cipherKey, iv, macKey } = await getMediaKeys(mediaKey, mediaType);
|
|
269
|
+
const encFilePath = join(tmpdir(), mediaType + generateMessageIDV2() + '-enc');
|
|
270
|
+
const encFileWriteStream = createWriteStream(encFilePath);
|
|
271
|
+
let originalFileStream, originalFilePath;
|
|
272
|
+
|
|
273
|
+
if (saveOriginalFileIfRequired) {
|
|
274
|
+
originalFilePath = join(tmpdir(), mediaType + generateMessageIDV2() + '-original');
|
|
275
|
+
originalFileStream = createWriteStream(originalFilePath);
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
let fileLength = 0;
|
|
279
|
+
const aes = Crypto.createCipheriv('aes-256-cbc', cipherKey, iv);
|
|
280
|
+
const hmac = Crypto.createHmac('sha256', macKey).update(iv);
|
|
281
|
+
const sha256Plain = Crypto.createHash('sha256');
|
|
282
|
+
const sha256Enc = Crypto.createHash('sha256');
|
|
283
|
+
|
|
284
|
+
try {
|
|
285
|
+
for await (const data of finalStream) {
|
|
286
|
+
fileLength += data.length;
|
|
287
|
+
if (type === 'remote' && opts?.maxContentLength && fileLength > opts.maxContentLength) throw new Boom('content length exceeded', { data: { media, type } });
|
|
288
|
+
if (originalFileStream && !originalFileStream.write(data)) await once(originalFileStream, 'drain');
|
|
289
|
+
sha256Plain.update(data);
|
|
290
|
+
const encrypted = aes.update(data);
|
|
291
|
+
sha256Enc.update(encrypted);
|
|
292
|
+
hmac.update(encrypted);
|
|
293
|
+
encFileWriteStream.write(encrypted);
|
|
294
|
+
}
|
|
295
|
+
const finalData = aes.final();
|
|
296
|
+
sha256Enc.update(finalData);
|
|
297
|
+
hmac.update(finalData);
|
|
298
|
+
encFileWriteStream.write(finalData);
|
|
299
|
+
const mac = hmac.digest().slice(0, 10);
|
|
300
|
+
sha256Enc.update(mac);
|
|
301
|
+
encFileWriteStream.write(mac);
|
|
302
|
+
encFileWriteStream.end();
|
|
303
|
+
originalFileStream?.end?.();
|
|
304
|
+
finalStream.destroy();
|
|
305
|
+
logger?.debug('encrypted data successfully');
|
|
306
|
+
return { mediaKey, bodyPath: originalFilePath, encFilePath, mac, fileEncSha256: sha256Enc.digest(), fileSha256: sha256Plain.digest(), fileLength, opusConverted };
|
|
307
|
+
} catch (error) {
|
|
308
|
+
encFileWriteStream.destroy();
|
|
309
|
+
originalFileStream?.destroy?.();
|
|
310
|
+
aes.destroy();
|
|
311
|
+
hmac.destroy();
|
|
312
|
+
sha256Plain.destroy();
|
|
313
|
+
sha256Enc.destroy();
|
|
314
|
+
finalStream.destroy();
|
|
315
|
+
try { await fs.unlink(encFilePath); if (originalFilePath) await fs.unlink(originalFilePath); } catch (err) { logger?.error({ err }, 'failed deleting tmp files'); }
|
|
316
|
+
throw error;
|
|
317
|
+
}
|
|
318
|
+
};
|
|
319
|
+
|
|
320
|
+
const DEF_HOST = 'mmg.whatsapp.net';
|
|
321
|
+
const AES_CHUNK_SIZE = 16;
|
|
322
|
+
const toSmallestChunkSize = (num) => Math.floor(num / AES_CHUNK_SIZE) * AES_CHUNK_SIZE;
|
|
323
|
+
|
|
324
|
+
export const getUrlFromDirectPath = (directPath) => `https://${DEF_HOST}${directPath}`;
|
|
325
|
+
|
|
326
|
+
export const downloadContentFromMessage = async ({ mediaKey, directPath, url }, type, opts = {}) => {
|
|
327
|
+
const isValidMediaUrl = url?.startsWith('https://mmg.whatsapp.net/');
|
|
328
|
+
const downloadUrl = isValidMediaUrl ? url : getUrlFromDirectPath(directPath);
|
|
329
|
+
if (!downloadUrl) throw new Boom('No valid media URL or directPath present', { statusCode: 400 });
|
|
330
|
+
return downloadEncryptedContent(downloadUrl, await getMediaKeys(mediaKey, type), opts);
|
|
331
|
+
};
|
|
332
|
+
|
|
333
|
+
export const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startByte, endByte, options } = {}) => {
|
|
334
|
+
let bytesFetched = 0, startChunk = 0, firstBlockIsIV = false;
|
|
335
|
+
if (startByte) {
|
|
336
|
+
const chunk = toSmallestChunkSize(startByte || 0);
|
|
337
|
+
if (chunk) { startChunk = chunk - AES_CHUNK_SIZE; bytesFetched = chunk; firstBlockIsIV = true; }
|
|
338
|
+
}
|
|
339
|
+
const endChunk = endByte ? toSmallestChunkSize(endByte || 0) + AES_CHUNK_SIZE : undefined;
|
|
340
|
+
const headers = { ...(options?.headers ? (Array.isArray(options.headers) ? Object.fromEntries(options.headers) : options.headers) : {}), Origin: DEFAULT_ORIGIN };
|
|
341
|
+
if (startChunk || endChunk) headers.Range = `bytes=${startChunk}-${endChunk || ''}`;
|
|
342
|
+
|
|
343
|
+
const fetched = await getHttpStream(downloadUrl, { ...(options || {}), headers });
|
|
344
|
+
let remainingBytes = Buffer.from([]), aes;
|
|
345
|
+
|
|
346
|
+
const pushBytes = (bytes, push) => {
|
|
347
|
+
if (startByte || endByte) {
|
|
348
|
+
const start = bytesFetched >= startByte ? undefined : Math.max(startByte - bytesFetched, 0);
|
|
349
|
+
const end = bytesFetched + bytes.length < endByte ? undefined : Math.max(endByte - bytesFetched, 0);
|
|
350
|
+
push(bytes.slice(start, end));
|
|
351
|
+
bytesFetched += bytes.length;
|
|
352
|
+
} else {
|
|
353
|
+
push(bytes);
|
|
354
|
+
}
|
|
355
|
+
};
|
|
356
|
+
|
|
357
|
+
const output = new Transform({
|
|
358
|
+
transform(chunk, _, callback) {
|
|
359
|
+
let data = Buffer.concat([remainingBytes, chunk]);
|
|
360
|
+
const decryptLength = toSmallestChunkSize(data.length);
|
|
361
|
+
remainingBytes = data.slice(decryptLength);
|
|
362
|
+
data = data.slice(0, decryptLength);
|
|
363
|
+
if (!aes) {
|
|
364
|
+
let ivValue = iv;
|
|
365
|
+
if (firstBlockIsIV) { ivValue = data.slice(0, AES_CHUNK_SIZE); data = data.slice(AES_CHUNK_SIZE); }
|
|
366
|
+
aes = Crypto.createDecipheriv('aes-256-cbc', cipherKey, ivValue);
|
|
367
|
+
if (endByte) aes.setAutoPadding(false);
|
|
368
|
+
}
|
|
369
|
+
try { pushBytes(aes.update(data), b => this.push(b)); callback(); } catch (error) { callback(error); }
|
|
370
|
+
},
|
|
371
|
+
final(callback) {
|
|
372
|
+
try { pushBytes(aes.final(), b => this.push(b)); callback(); } catch (error) { callback(error); }
|
|
373
|
+
}
|
|
374
|
+
});
|
|
375
|
+
return fetched.pipe(output, { end: true });
|
|
376
|
+
};
|
|
377
|
+
|
|
378
|
+
export function extensionForMediaMessage(message) {
|
|
379
|
+
const getExtension = (mimetype) => mimetype.split(';')[0]?.split('/')[1];
|
|
380
|
+
const type = Object.keys(message)[0];
|
|
381
|
+
if (type === 'locationMessage' || type === 'liveLocationMessage' || type === 'productMessage') return '.jpeg';
|
|
382
|
+
return getExtension(message[type].mimetype);
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
export const getWAUploadToServer = ({ customUploadHosts, fetchAgent, logger, options }, refreshMediaConn) => {
|
|
386
|
+
return async (stream, { mediaType, fileEncSha256B64, newsletter, timeoutMs }) => {
|
|
387
|
+
// Accepts Buffer, file path, Node stream, Web ReadableStream, or async iterable.
|
|
388
|
+
// File paths are streamed directly from disk — no RAM cost for large files.
|
|
389
|
+
const toUploadBody = async (input) => {
|
|
390
|
+
if (!input) throw new Boom('Upload input is null or undefined', { statusCode: 400 });
|
|
391
|
+
if (Buffer.isBuffer(input)) return input;
|
|
392
|
+
if (typeof input === 'string') return createReadStream(input);
|
|
393
|
+
if (typeof ReadableStream !== 'undefined' && input instanceof ReadableStream) return Readable.fromWeb(input);
|
|
394
|
+
if (typeof input.pipe === 'function' || typeof input[Symbol.asyncIterator] === 'function') return input;
|
|
395
|
+
throw new Boom(`Unsupported upload input type: ${Object.prototype.toString.call(input)}`, { statusCode: 400 });
|
|
396
|
+
};
|
|
397
|
+
|
|
398
|
+
let reqBody;
|
|
399
|
+
try { reqBody = await toUploadBody(stream); }
|
|
400
|
+
catch (err) { logger?.error({ err: err.message }, 'failed to prepare upload body'); throw err; }
|
|
401
|
+
|
|
402
|
+
fileEncSha256B64 = encodeBase64EncodedStringForUpload(fileEncSha256B64);
|
|
403
|
+
|
|
404
|
+
let media = MEDIA_PATH_MAP[mediaType];
|
|
405
|
+
if (newsletter) media = media?.replace('/mms/', '/newsletter/newsletter-');
|
|
406
|
+
if (!media) throw new Boom(`No media path found for type: ${mediaType}`, { statusCode: 400 });
|
|
407
|
+
|
|
408
|
+
// Force-refresh auth upfront to avoid stale token failures
|
|
409
|
+
let uploadInfo = await refreshMediaConn(true);
|
|
410
|
+
const hosts = [...(customUploadHosts ?? []), ...(uploadInfo.hosts ?? [])];
|
|
411
|
+
if (!hosts.length) throw new Boom('No upload hosts available', { statusCode: 503 });
|
|
412
|
+
|
|
413
|
+
const MAX_RETRIES = 2;
|
|
414
|
+
let urls, lastError;
|
|
415
|
+
|
|
416
|
+
for (const { hostname, maxContentLengthBytes } of hosts) {
|
|
417
|
+
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
|
|
418
|
+
try {
|
|
419
|
+
if (attempt > 1) {
|
|
420
|
+
uploadInfo = await refreshMediaConn(true);
|
|
421
|
+
reqBody = await toUploadBody(stream);
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
if (maxContentLengthBytes && Buffer.isBuffer(reqBody) && reqBody.length > maxContentLengthBytes) {
|
|
425
|
+
logger?.warn({ hostname, maxContentLengthBytes }, 'body too large for host, skipping');
|
|
426
|
+
break;
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
const auth = encodeURIComponent(uploadInfo.auth);
|
|
430
|
+
const url = `https://${hostname}${media}/${fileEncSha256B64}?auth=${auth}&token=${fileEncSha256B64}`;
|
|
431
|
+
const controller = new AbortController();
|
|
432
|
+
const timer = timeoutMs ? setTimeout(() => controller.abort(), timeoutMs) : null;
|
|
433
|
+
|
|
434
|
+
let response;
|
|
435
|
+
try {
|
|
436
|
+
response = await fetch(url, {
|
|
437
|
+
dispatcher: fetchAgent,
|
|
438
|
+
method: 'POST',
|
|
439
|
+
body: reqBody,
|
|
440
|
+
headers: {
|
|
441
|
+
...(Array.isArray(options?.headers) ? Object.fromEntries(options.headers) : (options?.headers ?? {})),
|
|
442
|
+
'Content-Type': 'application/octet-stream',
|
|
443
|
+
Origin: DEFAULT_ORIGIN
|
|
444
|
+
},
|
|
445
|
+
duplex: 'half',
|
|
446
|
+
signal: controller.signal
|
|
447
|
+
});
|
|
448
|
+
} finally {
|
|
449
|
+
if (timer) clearTimeout(timer);
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
let result;
|
|
453
|
+
try { result = await response.json(); } catch { result = null; }
|
|
454
|
+
|
|
455
|
+
if (result?.url || result?.directPath) {
|
|
456
|
+
urls = { mediaUrl: result.url, directPath: result.direct_path, handle: result.handle };
|
|
457
|
+
break;
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
lastError = new Error(`${hostname} rejected upload (HTTP ${response.status}): ${JSON.stringify(result)}`);
|
|
461
|
+
logger?.warn({ hostname, attempt, status: response.status, result }, 'upload rejected');
|
|
462
|
+
|
|
463
|
+
} catch (err) {
|
|
464
|
+
lastError = err;
|
|
465
|
+
logger?.warn({ hostname, attempt, err: err.message, timedOut: err.name === 'AbortError' }, 'upload attempt failed');
|
|
466
|
+
if (attempt < MAX_RETRIES) await new Promise(r => setTimeout(r, 500 * attempt));
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
if (urls) break;
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
if (!urls) {
|
|
473
|
+
const msg = `Media upload failed on all hosts. Last error: ${lastError?.message ?? 'unknown'}`;
|
|
474
|
+
logger?.error({ hosts: hosts.map(h => h.hostname), lastError: lastError?.message }, msg);
|
|
475
|
+
throw new Boom(msg, { statusCode: 500, data: { lastError: lastError?.message } });
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
return urls;
|
|
479
|
+
};
|
|
480
|
+
};
|
|
481
|
+
|
|
482
|
+
const getMediaRetryKey = (mediaKey) => hkdf(mediaKey, 32, { info: 'WhatsApp Media Retry Notification' });
|
|
483
|
+
|
|
484
|
+
export const encryptMediaRetryRequest = async (key, mediaKey, meId) => {
|
|
485
|
+
const recp = { stanzaId: key.id };
|
|
486
|
+
const recpBuffer = proto.ServerErrorReceipt.encode(recp).finish();
|
|
487
|
+
const iv = Crypto.randomBytes(12);
|
|
488
|
+
const retryKey = await getMediaRetryKey(mediaKey);
|
|
489
|
+
const ciphertext = aesEncryptGCM(recpBuffer, retryKey, iv, Buffer.from(key.id));
|
|
490
|
+
return {
|
|
491
|
+
tag: 'receipt',
|
|
492
|
+
attrs: { id: key.id, to: jidNormalizedUser(meId), type: 'server-error' },
|
|
493
|
+
content: [
|
|
494
|
+
{
|
|
495
|
+
tag: 'encrypt', attrs: {}, content: [
|
|
496
|
+
{ tag: 'enc_p', attrs: {}, content: ciphertext },
|
|
497
|
+
{ tag: 'enc_iv', attrs: {}, content: iv }
|
|
498
|
+
]
|
|
499
|
+
},
|
|
500
|
+
{ tag: 'rmr', attrs: { jid: key.remoteJid, from_me: (!!key.fromMe).toString(), participant: key.participant } }
|
|
501
|
+
]
|
|
502
|
+
};
|
|
503
|
+
};
|
|
504
|
+
|
|
505
|
+
export const decodeMediaRetryNode = (node) => {
|
|
506
|
+
const rmrNode = getBinaryNodeChild(node, 'rmr');
|
|
507
|
+
const event = {
|
|
508
|
+
key: { id: node.attrs.id, remoteJid: rmrNode.attrs.jid, fromMe: rmrNode.attrs.from_me === 'true', participant: rmrNode.attrs.participant }
|
|
509
|
+
};
|
|
510
|
+
const errorNode = getBinaryNodeChild(node, 'error');
|
|
511
|
+
if (errorNode) {
|
|
512
|
+
event.error = new Boom(`Failed to re-upload media (${+errorNode.attrs.code})`, { data: errorNode.attrs, statusCode: getStatusCodeForMediaRetry(+errorNode.attrs.code) });
|
|
513
|
+
} else {
|
|
514
|
+
const encryptedInfoNode = getBinaryNodeChild(node, 'encrypt');
|
|
515
|
+
const ciphertext = getBinaryNodeChildBuffer(encryptedInfoNode, 'enc_p');
|
|
516
|
+
const iv = getBinaryNodeChildBuffer(encryptedInfoNode, 'enc_iv');
|
|
517
|
+
if (ciphertext && iv) event.media = { ciphertext, iv };
|
|
518
|
+
else event.error = new Boom('Failed to re-upload media (missing ciphertext)', { statusCode: 404 });
|
|
519
|
+
}
|
|
520
|
+
return event;
|
|
521
|
+
};
|
|
522
|
+
|
|
523
|
+
export const decryptMediaRetryData = async ({ ciphertext, iv }, mediaKey, msgId) => {
|
|
524
|
+
const plaintext = aesDecryptGCM(ciphertext, await getMediaRetryKey(mediaKey), iv, Buffer.from(msgId));
|
|
525
|
+
return proto.MediaRetryNotification.decode(plaintext);
|
|
526
|
+
};
|
|
527
|
+
|
|
528
|
+
export const getStatusCodeForMediaRetry = (code) => MEDIA_RETRY_STATUS_MAP[code];
|
|
529
|
+
|
|
530
|
+
const MEDIA_RETRY_STATUS_MAP = {
|
|
531
|
+
[proto.MediaRetryNotification.ResultType.SUCCESS]: 200,
|
|
532
|
+
[proto.MediaRetryNotification.ResultType.DECRYPTION_ERROR]: 412,
|
|
533
|
+
[proto.MediaRetryNotification.ResultType.NOT_FOUND]: 404,
|
|
534
|
+
[proto.MediaRetryNotification.ResultType.GENERAL_ERROR]: 418
|
|
533
535
|
};
|