@nexustechpro/baileys 1.1.9 → 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1745 -1745
- package/lib/Socket/messages-send.js +629 -345
- package/lib/Utils/link-preview.js +46 -36
- package/lib/Utils/messages-media.js +155 -311
- package/lib/Utils/messages.js +43 -43
- package/lib/index.js +2 -2
- package/package.json +20 -20
|
@@ -2,7 +2,7 @@ import { Boom } from '@hapi/boom';
|
|
|
2
2
|
import { exec } from 'child_process';
|
|
3
3
|
import * as Crypto from 'crypto';
|
|
4
4
|
import { once } from 'events';
|
|
5
|
-
import { createReadStream, createWriteStream, promises as fs
|
|
5
|
+
import { createReadStream, createWriteStream, promises as fs } from 'fs';
|
|
6
6
|
import { tmpdir } from 'os';
|
|
7
7
|
import { join } from 'path';
|
|
8
8
|
import { Readable, Transform } from 'stream';
|
|
@@ -31,7 +31,6 @@ export const getRawMediaUploadData = async (media, mediaType, logger) => {
|
|
|
31
31
|
const filePath = join(tmpdir(), mediaType + generateMessageIDV2());
|
|
32
32
|
const fileWriteStream = createWriteStream(filePath);
|
|
33
33
|
let fileLength = 0;
|
|
34
|
-
|
|
35
34
|
try {
|
|
36
35
|
for await (const data of stream) {
|
|
37
36
|
fileLength += data.length;
|
|
@@ -54,7 +53,6 @@ export const getRawMediaUploadData = async (media, mediaType, logger) => {
|
|
|
54
53
|
export async function getMediaKeys(buffer, mediaType) {
|
|
55
54
|
if (!buffer) throw new Boom('Cannot derive from empty media key');
|
|
56
55
|
if (typeof buffer === 'string') buffer = Buffer.from(buffer.replace('data:;base64,', ''), 'base64');
|
|
57
|
-
|
|
58
56
|
const expandedMediaKey = hkdf(buffer, 112, { info: hkdfInfoKey(mediaType) });
|
|
59
57
|
return {
|
|
60
58
|
iv: expandedMediaKey.slice(0, 16),
|
|
@@ -64,22 +62,20 @@ export async function getMediaKeys(buffer, mediaType) {
|
|
|
64
62
|
}
|
|
65
63
|
|
|
66
64
|
const extractVideoThumb = (path, destPath, time, size) => new Promise((resolve, reject) => {
|
|
67
|
-
|
|
68
|
-
exec(cmd, err => err ? reject(err) : resolve());
|
|
65
|
+
exec(`ffmpeg -ss ${time} -i ${path} -y -vf scale=${size.width}:-1 -vframes 1 -f image2 ${destPath}`, err => err ? reject(err) : resolve());
|
|
69
66
|
});
|
|
70
67
|
|
|
71
68
|
export const extractImageThumb = async (bufferOrFilePath, width = 32) => {
|
|
72
69
|
if (bufferOrFilePath instanceof Readable) bufferOrFilePath = await toBuffer(bufferOrFilePath);
|
|
73
|
-
|
|
74
70
|
const lib = await getImageProcessingLibrary();
|
|
75
71
|
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
|
76
72
|
const img = lib.sharp.default(bufferOrFilePath);
|
|
77
73
|
const dimensions = await img.metadata();
|
|
78
|
-
const buffer = await img.resize(width).jpeg({ quality:
|
|
74
|
+
const buffer = await img.resize(width).jpeg({ quality: 95 }).toBuffer();
|
|
79
75
|
return { buffer, original: { width: dimensions.width, height: dimensions.height } };
|
|
80
76
|
} else if ('jimp' in lib && typeof lib.jimp?.Jimp === 'object') {
|
|
81
77
|
const jimp = await lib.jimp.Jimp.read(bufferOrFilePath);
|
|
82
|
-
const buffer = await jimp.resize({ w: width, mode: lib.jimp.ResizeStrategy.BILINEAR }).getBuffer('image/jpeg', { quality:
|
|
78
|
+
const buffer = await jimp.resize({ w: width, mode: lib.jimp.ResizeStrategy.BILINEAR }).getBuffer('image/jpeg', { quality: 95 });
|
|
83
79
|
return { buffer, original: { width: jimp.width, height: jimp.height } };
|
|
84
80
|
}
|
|
85
81
|
throw new Boom('No image processing library available');
|
|
@@ -89,20 +85,18 @@ export const encodeBase64EncodedStringForUpload = (b64) => encodeURIComponent(b6
|
|
|
89
85
|
|
|
90
86
|
export const generateProfilePicture = async (mediaUpload) => {
|
|
91
87
|
let bufferOrFilePath = Buffer.isBuffer(mediaUpload) ? mediaUpload : 'url' in mediaUpload ? mediaUpload.url.toString() : await toBuffer(mediaUpload.stream);
|
|
92
|
-
|
|
93
88
|
const lib = await getImageProcessingLibrary();
|
|
94
|
-
let img;
|
|
95
89
|
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
|
96
|
-
img = await lib.sharp.default(bufferOrFilePath).resize(720, 720, { fit: 'inside' }).jpeg({ quality: 50 }).toBuffer();
|
|
90
|
+
const img = await lib.sharp.default(bufferOrFilePath).resize(720, 720, { fit: 'inside' }).jpeg({ quality: 50 }).toBuffer();
|
|
91
|
+
return { img };
|
|
97
92
|
} else if ('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
|
98
93
|
const { read, MIME_JPEG } = lib.jimp;
|
|
99
94
|
const image = await read(bufferOrFilePath);
|
|
100
95
|
const min = image.getWidth(), max = image.getHeight();
|
|
101
|
-
img = await image.crop(0, 0, min, max).scaleToFit(720, 720).getBufferAsync(MIME_JPEG);
|
|
102
|
-
|
|
103
|
-
throw new Boom('No image processing library available');
|
|
96
|
+
const img = await image.crop(0, 0, min, max).scaleToFit(720, 720).getBufferAsync(MIME_JPEG);
|
|
97
|
+
return { img };
|
|
104
98
|
}
|
|
105
|
-
|
|
99
|
+
throw new Boom('No image processing library available');
|
|
106
100
|
};
|
|
107
101
|
|
|
108
102
|
export const mediaMessageSHA256B64 = (message) => {
|
|
@@ -112,11 +106,9 @@ export const mediaMessageSHA256B64 = (message) => {
|
|
|
112
106
|
|
|
113
107
|
export async function getAudioDuration(buffer) {
|
|
114
108
|
const musicMetadata = await import('music-metadata');
|
|
115
|
-
|
|
116
|
-
if (
|
|
117
|
-
|
|
118
|
-
else metadata = await musicMetadata.parseStream(buffer, undefined, { duration: true });
|
|
119
|
-
return metadata.format.duration;
|
|
109
|
+
if (Buffer.isBuffer(buffer)) return (await musicMetadata.parseBuffer(buffer, undefined, { duration: true })).format.duration;
|
|
110
|
+
if (typeof buffer === 'string') return (await musicMetadata.parseFile(buffer, { duration: true })).format.duration;
|
|
111
|
+
return (await musicMetadata.parseStream(buffer, undefined, { duration: true })).format.duration;
|
|
120
112
|
}
|
|
121
113
|
|
|
122
114
|
export async function getAudioWaveform(buffer, logger) {
|
|
@@ -136,74 +128,31 @@ export async function getAudioWaveform(buffer, logger) {
|
|
|
136
128
|
return new Uint8Array(filteredData.map(n => Math.floor(100 * n * multiplier)));
|
|
137
129
|
} catch (e) {
|
|
138
130
|
logger?.debug('Failed to generate waveform: ' + e);
|
|
139
|
-
|
|
140
|
-
return new Uint8Array([0,99,0,99,0,99,0,99,88,99,0,99,0,55,0,99,0,99,0,99,0,99,0,99,88,99,0,99,0,55,0,99]);
|
|
131
|
+
return new Uint8Array([0, 99, 0, 99, 0, 99, 0, 99, 88, 99, 0, 99, 0, 55, 0, 99, 0, 99, 0, 99, 0, 99, 0, 99, 88, 99, 0, 99, 0, 55, 0, 99]);
|
|
141
132
|
}
|
|
142
133
|
}
|
|
143
134
|
|
|
144
|
-
const convertToOpusBuffer =
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
'-f', 'ogg',
|
|
155
|
-
'pipe:1'
|
|
156
|
-
];
|
|
157
|
-
|
|
158
|
-
const ffmpeg = exec(`ffmpeg ${args.join(' ')}`);
|
|
159
|
-
const chunks = [];
|
|
160
|
-
|
|
161
|
-
ffmpeg.stdin.write(buffer);
|
|
162
|
-
ffmpeg.stdin.end();
|
|
163
|
-
|
|
164
|
-
ffmpeg.stdout.on('data', chunk => chunks.push(chunk));
|
|
165
|
-
ffmpeg.stderr.on('data', () => {}); // Ignore stderr
|
|
166
|
-
|
|
167
|
-
ffmpeg.on('close', code => {
|
|
168
|
-
if (code === 0) resolve(Buffer.concat(chunks));
|
|
169
|
-
else reject(new Error(`FFmpeg Opus conversion exited with code ${code}`));
|
|
170
|
-
});
|
|
171
|
-
|
|
172
|
-
ffmpeg.on('error', err => reject(err));
|
|
173
|
-
});
|
|
174
|
-
};
|
|
135
|
+
const convertToOpusBuffer = (buffer, logger) => new Promise((resolve, reject) => {
|
|
136
|
+
const ffmpeg = exec('ffmpeg -i pipe:0 -c:a libopus -b:a 64k -vbr on -compression_level 10 -frame_duration 20 -application voip -f ogg pipe:1');
|
|
137
|
+
const chunks = [];
|
|
138
|
+
ffmpeg.stdin.write(buffer);
|
|
139
|
+
ffmpeg.stdin.end();
|
|
140
|
+
ffmpeg.stdout.on('data', chunk => chunks.push(chunk));
|
|
141
|
+
ffmpeg.stderr.on('data', () => { });
|
|
142
|
+
ffmpeg.on('close', code => code === 0 ? resolve(Buffer.concat(chunks)) : reject(new Error(`FFmpeg Opus conversion exited with code ${code}`)));
|
|
143
|
+
ffmpeg.on('error', reject);
|
|
144
|
+
});
|
|
175
145
|
|
|
176
|
-
const convertToMp4Buffer =
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
'-f', 'mp4',
|
|
187
|
-
'pipe:1'
|
|
188
|
-
];
|
|
189
|
-
|
|
190
|
-
const ffmpeg = exec(`ffmpeg ${args.join(' ')}`);
|
|
191
|
-
const chunks = [];
|
|
192
|
-
|
|
193
|
-
ffmpeg.stdin.write(buffer);
|
|
194
|
-
ffmpeg.stdin.end();
|
|
195
|
-
|
|
196
|
-
ffmpeg.stdout.on('data', chunk => chunks.push(chunk));
|
|
197
|
-
ffmpeg.stderr.on('data', () => {}); // Ignore stderr
|
|
198
|
-
|
|
199
|
-
ffmpeg.on('close', code => {
|
|
200
|
-
if (code === 0) resolve(Buffer.concat(chunks));
|
|
201
|
-
else reject(new Error(`FFmpeg MP4 conversion exited with code ${code}`));
|
|
202
|
-
});
|
|
203
|
-
|
|
204
|
-
ffmpeg.on('error', err => reject(err));
|
|
205
|
-
});
|
|
206
|
-
};
|
|
146
|
+
const convertToMp4Buffer = (buffer, logger) => new Promise((resolve, reject) => {
|
|
147
|
+
const ffmpeg = exec('ffmpeg -i pipe:0 -c:v libx264 -preset veryfast -crf 23 -c:a aac -b:a 128k -movflags faststart -f mp4 pipe:1');
|
|
148
|
+
const chunks = [];
|
|
149
|
+
ffmpeg.stdin.write(buffer);
|
|
150
|
+
ffmpeg.stdin.end();
|
|
151
|
+
ffmpeg.stdout.on('data', chunk => chunks.push(chunk));
|
|
152
|
+
ffmpeg.stderr.on('data', () => { });
|
|
153
|
+
ffmpeg.on('close', code => code === 0 ? resolve(Buffer.concat(chunks)) : reject(new Error(`FFmpeg MP4 conversion exited with code ${code}`)));
|
|
154
|
+
ffmpeg.on('error', reject);
|
|
155
|
+
});
|
|
207
156
|
|
|
208
157
|
export const toReadable = (buffer) => {
|
|
209
158
|
const readable = new Readable({ read: () => { } });
|
|
@@ -221,11 +170,9 @@ export const toBuffer = async (stream) => {
|
|
|
221
170
|
|
|
222
171
|
export const getStream = async (item, opts) => {
|
|
223
172
|
if (!item) throw new Boom('Item is required for getStream', { statusCode: 400 });
|
|
224
|
-
|
|
225
173
|
if (Buffer.isBuffer(item)) return { stream: toReadable(item), type: 'buffer' };
|
|
226
174
|
if (item?.stream?.pipe) return { stream: item.stream, type: 'readable' };
|
|
227
175
|
if (item?.pipe) return { stream: item, type: 'readable' };
|
|
228
|
-
|
|
229
176
|
if (item && typeof item === 'object' && 'url' in item) {
|
|
230
177
|
const urlStr = item.url.toString();
|
|
231
178
|
if (Buffer.isBuffer(item.url)) return { stream: toReadable(item.url), type: 'buffer' };
|
|
@@ -233,19 +180,16 @@ export const getStream = async (item, opts) => {
|
|
|
233
180
|
if (urlStr.startsWith('http')) return { stream: await getHttpStream(item.url, opts), type: 'remote' };
|
|
234
181
|
return { stream: createReadStream(item.url), type: 'file' };
|
|
235
182
|
}
|
|
236
|
-
|
|
237
183
|
if (typeof item === 'string') {
|
|
238
184
|
if (item.startsWith('data:')) return { stream: toReadable(Buffer.from(item.split(',')[1], 'base64')), type: 'buffer' };
|
|
239
185
|
if (item.startsWith('http')) return { stream: await getHttpStream(item, opts), type: 'remote' };
|
|
240
186
|
return { stream: createReadStream(item), type: 'file' };
|
|
241
187
|
}
|
|
242
|
-
|
|
243
188
|
throw new Boom(`Invalid input type for getStream: ${typeof item}`, { statusCode: 400 });
|
|
244
189
|
};
|
|
245
190
|
|
|
246
191
|
export async function generateThumbnail(file, mediaType, options) {
|
|
247
192
|
let thumbnail, originalImageDimensions;
|
|
248
|
-
|
|
249
193
|
if (mediaType === 'image') {
|
|
250
194
|
const { buffer, original } = await extractImageThumb(file);
|
|
251
195
|
thumbnail = buffer.toString('base64');
|
|
@@ -254,8 +198,7 @@ export async function generateThumbnail(file, mediaType, options) {
|
|
|
254
198
|
const imgFilename = join(tmpdir(), generateMessageIDV2() + '.jpg');
|
|
255
199
|
try {
|
|
256
200
|
await extractVideoThumb(file, imgFilename, '00:00:00', { width: 32, height: 32 });
|
|
257
|
-
|
|
258
|
-
thumbnail = buff.toString('base64');
|
|
201
|
+
thumbnail = (await fs.readFile(imgFilename)).toString('base64');
|
|
259
202
|
await fs.unlink(imgFilename);
|
|
260
203
|
} catch (err) {
|
|
261
204
|
options.logger?.debug('could not generate video thumb: ' + err);
|
|
@@ -265,17 +208,10 @@ export async function generateThumbnail(file, mediaType, options) {
|
|
|
265
208
|
}
|
|
266
209
|
|
|
267
210
|
export const getHttpStream = async (url, options = {}) => {
|
|
268
|
-
const response = await fetch(url.toString(), {
|
|
269
|
-
dispatcher: options.dispatcher,
|
|
270
|
-
method: 'GET',
|
|
271
|
-
headers: options.headers
|
|
272
|
-
});
|
|
211
|
+
const response = await fetch(url.toString(), { dispatcher: options.dispatcher, method: 'GET', headers: options.headers });
|
|
273
212
|
if (!response.ok) throw new Boom(`Failed to fetch stream from ${url}`, { statusCode: response.status, data: { url } });
|
|
274
|
-
|
|
275
213
|
const body = response.body;
|
|
276
|
-
if (body && typeof body === 'object' && 'pipeTo' in body && typeof body.pipeTo === 'function')
|
|
277
|
-
return Readable.fromWeb(body);
|
|
278
|
-
}
|
|
214
|
+
if (body && typeof body === 'object' && 'pipeTo' in body && typeof body.pipeTo === 'function') return Readable.fromWeb(body);
|
|
279
215
|
if (body && typeof body.pipe === 'function' && typeof body.read === 'function') return body;
|
|
280
216
|
throw new Error('Response body is not a readable stream');
|
|
281
217
|
};
|
|
@@ -283,82 +219,48 @@ export const getHttpStream = async (url, options = {}) => {
|
|
|
283
219
|
export const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts, convertVideo } = {}) => {
|
|
284
220
|
const { stream, type } = await getStream(media, opts);
|
|
285
221
|
logger?.debug('fetched media stream');
|
|
286
|
-
|
|
287
222
|
let buffer = await toBuffer(stream);
|
|
288
|
-
|
|
289
|
-
// Convert video to mp4 if needed for newsletter
|
|
290
223
|
if (mediaType === 'video' && convertVideo) {
|
|
291
|
-
try {
|
|
292
|
-
|
|
293
|
-
logger?.debug('converted video to mp4 for newsletter');
|
|
294
|
-
} catch (e) {
|
|
295
|
-
logger?.error('failed to convert video for newsletter:', e);
|
|
296
|
-
}
|
|
224
|
+
try { buffer = await convertToMp4Buffer(buffer, logger); logger?.debug('converted video to mp4 for newsletter'); }
|
|
225
|
+
catch (e) { logger?.error('failed to convert video for newsletter:', e); }
|
|
297
226
|
}
|
|
298
|
-
|
|
299
|
-
let bodyPath;
|
|
300
|
-
let didSaveToTmpPath = false;
|
|
301
|
-
|
|
227
|
+
let bodyPath, didSaveToTmpPath = false;
|
|
302
228
|
try {
|
|
303
|
-
if (type === 'file')
|
|
304
|
-
|
|
305
|
-
} else if (saveOriginalFileIfRequired) {
|
|
229
|
+
if (type === 'file') bodyPath = media.url;
|
|
230
|
+
else if (saveOriginalFileIfRequired) {
|
|
306
231
|
bodyPath = join(tmpdir(), mediaType + generateMessageIDV2());
|
|
307
232
|
await fs.writeFile(bodyPath, buffer);
|
|
308
233
|
didSaveToTmpPath = true;
|
|
309
234
|
}
|
|
310
|
-
|
|
311
|
-
const fileLength = buffer.length;
|
|
312
|
-
const fileSha256 = Crypto.createHash('sha256').update(buffer).digest();
|
|
313
|
-
|
|
314
|
-
return {
|
|
315
|
-
mediaKey: undefined,
|
|
316
|
-
encWriteStream: buffer,
|
|
317
|
-
fileLength,
|
|
318
|
-
fileSha256,
|
|
319
|
-
fileEncSha256: undefined,
|
|
320
|
-
bodyPath,
|
|
321
|
-
didSaveToTmpPath
|
|
322
|
-
};
|
|
235
|
+
return { mediaKey: undefined, encWriteStream: buffer, fileLength: buffer.length, fileSha256: Crypto.createHash('sha256').update(buffer).digest(), fileEncSha256: undefined, bodyPath, didSaveToTmpPath };
|
|
323
236
|
} catch (error) {
|
|
324
|
-
if (didSaveToTmpPath && bodyPath) {
|
|
325
|
-
try { await fs.unlink(bodyPath); } catch { }
|
|
326
|
-
}
|
|
237
|
+
if (didSaveToTmpPath && bodyPath) try { await fs.unlink(bodyPath); } catch { }
|
|
327
238
|
throw error;
|
|
328
239
|
}
|
|
329
240
|
};
|
|
330
241
|
|
|
331
242
|
export const encryptedStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts, mediaKey: providedMediaKey, isPtt, forceOpus, convertVideo } = {}) => {
|
|
332
243
|
const { stream, type } = await getStream(media, opts);
|
|
333
|
-
let finalStream = stream;
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
// Convert audio to opus if needed
|
|
244
|
+
let finalStream = stream, opusConverted = false;
|
|
245
|
+
|
|
337
246
|
if (mediaType === 'audio' && (isPtt === true || forceOpus === true)) {
|
|
338
247
|
try {
|
|
339
|
-
|
|
340
|
-
const opusBuffer = await convertToOpusBuffer(buffer, logger);
|
|
341
|
-
finalStream = toReadable(opusBuffer);
|
|
248
|
+
finalStream = toReadable(await convertToOpusBuffer(await toBuffer(stream), logger));
|
|
342
249
|
opusConverted = true;
|
|
343
250
|
logger?.debug('converted audio to Opus');
|
|
344
251
|
} catch (error) {
|
|
345
252
|
logger?.error('failed to convert audio to Opus, using original');
|
|
346
|
-
|
|
347
|
-
finalStream = newStream;
|
|
253
|
+
finalStream = (await getStream(media, opts)).stream;
|
|
348
254
|
}
|
|
349
255
|
}
|
|
350
256
|
|
|
351
|
-
// Convert video to mp4 if needed
|
|
352
257
|
if (mediaType === 'video' && convertVideo === true) {
|
|
353
258
|
try {
|
|
354
|
-
|
|
355
|
-
const mp4Buffer = await convertToMp4Buffer(buffer, logger);
|
|
356
|
-
finalStream = toReadable(mp4Buffer);
|
|
259
|
+
finalStream = toReadable(await convertToMp4Buffer(await toBuffer(finalStream), logger));
|
|
357
260
|
logger?.debug('converted video to mp4');
|
|
358
261
|
} catch (error) {
|
|
359
262
|
logger?.error('failed to convert video to mp4, using original');
|
|
360
|
-
|
|
361
|
-
finalStream = newStream;
|
|
263
|
+
finalStream = (await getStream(media, opts)).stream;
|
|
362
264
|
}
|
|
363
265
|
}
|
|
364
266
|
|
|
@@ -367,7 +269,7 @@ export const encryptedStream = async (media, mediaType, { logger, saveOriginalFi
|
|
|
367
269
|
const encFilePath = join(tmpdir(), mediaType + generateMessageIDV2() + '-enc');
|
|
368
270
|
const encFileWriteStream = createWriteStream(encFilePath);
|
|
369
271
|
let originalFileStream, originalFilePath;
|
|
370
|
-
|
|
272
|
+
|
|
371
273
|
if (saveOriginalFileIfRequired) {
|
|
372
274
|
originalFilePath = join(tmpdir(), mediaType + generateMessageIDV2() + '-original');
|
|
373
275
|
originalFileStream = createWriteStream(originalFilePath);
|
|
@@ -382,9 +284,7 @@ export const encryptedStream = async (media, mediaType, { logger, saveOriginalFi
|
|
|
382
284
|
try {
|
|
383
285
|
for await (const data of finalStream) {
|
|
384
286
|
fileLength += data.length;
|
|
385
|
-
if (type === 'remote' && opts?.maxContentLength && fileLength > opts.maxContentLength) {
|
|
386
|
-
throw new Boom('content length exceeded', { data: { media, type } });
|
|
387
|
-
}
|
|
287
|
+
if (type === 'remote' && opts?.maxContentLength && fileLength > opts.maxContentLength) throw new Boom('content length exceeded', { data: { media, type } });
|
|
388
288
|
if (originalFileStream && !originalFileStream.write(data)) await once(originalFileStream, 'drain');
|
|
389
289
|
sha256Plain.update(data);
|
|
390
290
|
const encrypted = aes.update(data);
|
|
@@ -392,30 +292,18 @@ export const encryptedStream = async (media, mediaType, { logger, saveOriginalFi
|
|
|
392
292
|
hmac.update(encrypted);
|
|
393
293
|
encFileWriteStream.write(encrypted);
|
|
394
294
|
}
|
|
395
|
-
|
|
396
295
|
const finalData = aes.final();
|
|
397
296
|
sha256Enc.update(finalData);
|
|
398
297
|
hmac.update(finalData);
|
|
399
298
|
encFileWriteStream.write(finalData);
|
|
400
|
-
|
|
401
299
|
const mac = hmac.digest().slice(0, 10);
|
|
402
300
|
sha256Enc.update(mac);
|
|
403
301
|
encFileWriteStream.write(mac);
|
|
404
302
|
encFileWriteStream.end();
|
|
405
303
|
originalFileStream?.end?.();
|
|
406
304
|
finalStream.destroy();
|
|
407
|
-
|
|
408
305
|
logger?.debug('encrypted data successfully');
|
|
409
|
-
return {
|
|
410
|
-
mediaKey,
|
|
411
|
-
bodyPath: originalFilePath, // ✅ Add this for consistency
|
|
412
|
-
encFilePath,
|
|
413
|
-
mac,
|
|
414
|
-
fileEncSha256: sha256Enc.digest(),
|
|
415
|
-
fileSha256: sha256Plain.digest(),
|
|
416
|
-
fileLength,
|
|
417
|
-
opusConverted
|
|
418
|
-
};
|
|
306
|
+
return { mediaKey, bodyPath: originalFilePath, encFilePath, mac, fileEncSha256: sha256Enc.digest(), fileSha256: sha256Plain.digest(), fileLength, opusConverted };
|
|
419
307
|
} catch (error) {
|
|
420
308
|
encFileWriteStream.destroy();
|
|
421
309
|
originalFileStream?.destroy?.();
|
|
@@ -424,12 +312,7 @@ export const encryptedStream = async (media, mediaType, { logger, saveOriginalFi
|
|
|
424
312
|
sha256Plain.destroy();
|
|
425
313
|
sha256Enc.destroy();
|
|
426
314
|
finalStream.destroy();
|
|
427
|
-
try {
|
|
428
|
-
await fs.unlink(encFilePath);
|
|
429
|
-
if (originalFilePath) await fs.unlink(originalFilePath);
|
|
430
|
-
} catch (err) {
|
|
431
|
-
logger?.error({ err }, 'failed deleting tmp files');
|
|
432
|
-
}
|
|
315
|
+
try { await fs.unlink(encFilePath); if (originalFilePath) await fs.unlink(originalFilePath); } catch (err) { logger?.error({ err }, 'failed deleting tmp files'); }
|
|
433
316
|
throw error;
|
|
434
317
|
}
|
|
435
318
|
};
|
|
@@ -444,32 +327,18 @@ export const downloadContentFromMessage = async ({ mediaKey, directPath, url },
|
|
|
444
327
|
const isValidMediaUrl = url?.startsWith('https://mmg.whatsapp.net/');
|
|
445
328
|
const downloadUrl = isValidMediaUrl ? url : getUrlFromDirectPath(directPath);
|
|
446
329
|
if (!downloadUrl) throw new Boom('No valid media URL or directPath present', { statusCode: 400 });
|
|
447
|
-
|
|
448
|
-
const keys = await getMediaKeys(mediaKey, type);
|
|
449
|
-
return downloadEncryptedContent(downloadUrl, keys, opts);
|
|
330
|
+
return downloadEncryptedContent(downloadUrl, await getMediaKeys(mediaKey, type), opts);
|
|
450
331
|
};
|
|
451
332
|
|
|
452
333
|
export const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startByte, endByte, options } = {}) => {
|
|
453
334
|
let bytesFetched = 0, startChunk = 0, firstBlockIsIV = false;
|
|
454
|
-
|
|
455
335
|
if (startByte) {
|
|
456
336
|
const chunk = toSmallestChunkSize(startByte || 0);
|
|
457
|
-
if (chunk) {
|
|
458
|
-
startChunk = chunk - AES_CHUNK_SIZE;
|
|
459
|
-
bytesFetched = chunk;
|
|
460
|
-
firstBlockIsIV = true;
|
|
461
|
-
}
|
|
337
|
+
if (chunk) { startChunk = chunk - AES_CHUNK_SIZE; bytesFetched = chunk; firstBlockIsIV = true; }
|
|
462
338
|
}
|
|
463
|
-
|
|
464
339
|
const endChunk = endByte ? toSmallestChunkSize(endByte || 0) + AES_CHUNK_SIZE : undefined;
|
|
465
|
-
const headers = {
|
|
466
|
-
|
|
467
|
-
Origin: DEFAULT_ORIGIN
|
|
468
|
-
};
|
|
469
|
-
|
|
470
|
-
if (startChunk || endChunk) {
|
|
471
|
-
headers.Range = `bytes=${startChunk}-${endChunk || ''}`;
|
|
472
|
-
}
|
|
340
|
+
const headers = { ...(options?.headers ? (Array.isArray(options.headers) ? Object.fromEntries(options.headers) : options.headers) : {}), Origin: DEFAULT_ORIGIN };
|
|
341
|
+
if (startChunk || endChunk) headers.Range = `bytes=${startChunk}-${endChunk || ''}`;
|
|
473
342
|
|
|
474
343
|
const fetched = await getHttpStream(downloadUrl, { ...(options || {}), headers });
|
|
475
344
|
let remainingBytes = Buffer.from([]), aes;
|
|
@@ -491,31 +360,16 @@ export const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, {
|
|
|
491
360
|
const decryptLength = toSmallestChunkSize(data.length);
|
|
492
361
|
remainingBytes = data.slice(decryptLength);
|
|
493
362
|
data = data.slice(0, decryptLength);
|
|
494
|
-
|
|
495
363
|
if (!aes) {
|
|
496
364
|
let ivValue = iv;
|
|
497
|
-
if (firstBlockIsIV) {
|
|
498
|
-
ivValue = data.slice(0, AES_CHUNK_SIZE);
|
|
499
|
-
data = data.slice(AES_CHUNK_SIZE);
|
|
500
|
-
}
|
|
365
|
+
if (firstBlockIsIV) { ivValue = data.slice(0, AES_CHUNK_SIZE); data = data.slice(AES_CHUNK_SIZE); }
|
|
501
366
|
aes = Crypto.createDecipheriv('aes-256-cbc', cipherKey, ivValue);
|
|
502
367
|
if (endByte) aes.setAutoPadding(false);
|
|
503
368
|
}
|
|
504
|
-
|
|
505
|
-
try {
|
|
506
|
-
pushBytes(aes.update(data), b => this.push(b));
|
|
507
|
-
callback();
|
|
508
|
-
} catch (error) {
|
|
509
|
-
callback(error);
|
|
510
|
-
}
|
|
369
|
+
try { pushBytes(aes.update(data), b => this.push(b)); callback(); } catch (error) { callback(error); }
|
|
511
370
|
},
|
|
512
371
|
final(callback) {
|
|
513
|
-
try {
|
|
514
|
-
pushBytes(aes.final(), b => this.push(b));
|
|
515
|
-
callback();
|
|
516
|
-
} catch (error) {
|
|
517
|
-
callback(error);
|
|
518
|
-
}
|
|
372
|
+
try { pushBytes(aes.final(), b => this.push(b)); callback(); } catch (error) { callback(error); }
|
|
519
373
|
}
|
|
520
374
|
});
|
|
521
375
|
return fetched.pipe(output, { end: true });
|
|
@@ -524,106 +378,103 @@ export const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, {
|
|
|
524
378
|
export function extensionForMediaMessage(message) {
|
|
525
379
|
const getExtension = (mimetype) => mimetype.split(';')[0]?.split('/')[1];
|
|
526
380
|
const type = Object.keys(message)[0];
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
if (type === 'locationMessage' || type === 'liveLocationMessage' || type === 'productMessage') {
|
|
530
|
-
extension = '.jpeg';
|
|
531
|
-
} else {
|
|
532
|
-
const messageContent = message[type];
|
|
533
|
-
extension = getExtension(messageContent.mimetype);
|
|
534
|
-
}
|
|
535
|
-
return extension;
|
|
381
|
+
if (type === 'locationMessage' || type === 'liveLocationMessage' || type === 'productMessage') return '.jpeg';
|
|
382
|
+
return getExtension(message[type].mimetype);
|
|
536
383
|
}
|
|
537
384
|
|
|
538
385
|
export const getWAUploadToServer = ({ customUploadHosts, fetchAgent, logger, options }, refreshMediaConn) => {
|
|
539
386
|
return async (stream, { mediaType, fileEncSha256B64, newsletter, timeoutMs }) => {
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
const
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
387
|
+
// Accepts Buffer, file path, Node stream, Web ReadableStream, or async iterable.
|
|
388
|
+
// File paths are streamed directly from disk — no RAM cost for large files.
|
|
389
|
+
const toUploadBody = async (input) => {
|
|
390
|
+
if (!input) throw new Boom('Upload input is null or undefined', { statusCode: 400 });
|
|
391
|
+
if (Buffer.isBuffer(input)) return input;
|
|
392
|
+
if (typeof input === 'string') return createReadStream(input);
|
|
393
|
+
if (typeof ReadableStream !== 'undefined' && input instanceof ReadableStream) return Readable.fromWeb(input);
|
|
394
|
+
if (typeof input.pipe === 'function' || typeof input[Symbol.asyncIterator] === 'function') return input;
|
|
395
|
+
throw new Boom(`Unsupported upload input type: ${Object.prototype.toString.call(input)}`, { statusCode: 400 });
|
|
396
|
+
};
|
|
397
|
+
|
|
550
398
|
let reqBody;
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
// If it's a file path, read it
|
|
555
|
-
const fs = await import('fs');
|
|
556
|
-
reqBody = await fs.promises.readFile(stream);
|
|
557
|
-
} else if (stream && typeof stream[Symbol.asyncIterator] === 'function') {
|
|
558
|
-
// It's an async iterable (stream)
|
|
559
|
-
const chunks = [];
|
|
560
|
-
for await (const chunk of stream) {
|
|
561
|
-
chunks.push(chunk);
|
|
562
|
-
}
|
|
563
|
-
reqBody = Buffer.concat(chunks);
|
|
564
|
-
} else if (stream && typeof stream.pipe === 'function') {
|
|
565
|
-
// It's a readable stream
|
|
566
|
-
const chunks = [];
|
|
567
|
-
for await (const chunk of stream) {
|
|
568
|
-
chunks.push(chunk);
|
|
569
|
-
}
|
|
570
|
-
reqBody = Buffer.concat(chunks);
|
|
571
|
-
} else {
|
|
572
|
-
throw new Boom(`Invalid stream type: ${typeof stream}`, { statusCode: 400 });
|
|
573
|
-
}
|
|
574
|
-
|
|
399
|
+
try { reqBody = await toUploadBody(stream); }
|
|
400
|
+
catch (err) { logger?.error({ err: err.message }, 'failed to prepare upload body'); throw err; }
|
|
401
|
+
|
|
575
402
|
fileEncSha256B64 = encodeBase64EncodedStringForUpload(fileEncSha256B64);
|
|
576
|
-
|
|
403
|
+
|
|
577
404
|
let media = MEDIA_PATH_MAP[mediaType];
|
|
578
|
-
if (newsletter)
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
405
|
+
if (newsletter) media = media?.replace('/mms/', '/newsletter/newsletter-');
|
|
406
|
+
if (!media) throw new Boom(`No media path found for type: ${mediaType}`, { statusCode: 400 });
|
|
407
|
+
|
|
408
|
+
// Force-refresh auth upfront to avoid stale token failures
|
|
409
|
+
let uploadInfo = await refreshMediaConn(true);
|
|
410
|
+
const hosts = [...(customUploadHosts ?? []), ...(uploadInfo.hosts ?? [])];
|
|
411
|
+
if (!hosts.length) throw new Boom('No upload hosts available', { statusCode: 503 });
|
|
412
|
+
|
|
413
|
+
const MAX_RETRIES = 2;
|
|
414
|
+
let urls, lastError;
|
|
415
|
+
|
|
582
416
|
for (const { hostname, maxContentLengthBytes } of hosts) {
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
417
|
+
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
|
|
418
|
+
try {
|
|
419
|
+
if (attempt > 1) {
|
|
420
|
+
uploadInfo = await refreshMediaConn(true);
|
|
421
|
+
reqBody = await toUploadBody(stream);
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
if (maxContentLengthBytes && Buffer.isBuffer(reqBody) && reqBody.length > maxContentLengthBytes) {
|
|
425
|
+
logger?.warn({ hostname, maxContentLengthBytes }, 'body too large for host, skipping');
|
|
426
|
+
break;
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
const auth = encodeURIComponent(uploadInfo.auth);
|
|
430
|
+
const url = `https://${hostname}${media}/${fileEncSha256B64}?auth=${auth}&token=${fileEncSha256B64}`;
|
|
431
|
+
const controller = new AbortController();
|
|
432
|
+
const timer = timeoutMs ? setTimeout(() => controller.abort(), timeoutMs) : null;
|
|
433
|
+
|
|
434
|
+
let response;
|
|
435
|
+
try {
|
|
436
|
+
response = await fetch(url, {
|
|
437
|
+
dispatcher: fetchAgent,
|
|
438
|
+
method: 'POST',
|
|
439
|
+
body: reqBody,
|
|
440
|
+
headers: {
|
|
441
|
+
...(Array.isArray(options?.headers) ? Object.fromEntries(options.headers) : (options?.headers ?? {})),
|
|
442
|
+
'Content-Type': 'application/octet-stream',
|
|
443
|
+
Origin: DEFAULT_ORIGIN
|
|
444
|
+
},
|
|
445
|
+
duplex: 'half',
|
|
446
|
+
signal: controller.signal
|
|
447
|
+
});
|
|
448
|
+
} finally {
|
|
449
|
+
if (timer) clearTimeout(timer);
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
let result;
|
|
453
|
+
try { result = await response.json(); } catch { result = null; }
|
|
454
|
+
|
|
455
|
+
if (result?.url || result?.directPath) {
|
|
456
|
+
urls = { mediaUrl: result.url, directPath: result.direct_path, handle: result.handle };
|
|
457
|
+
break;
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
lastError = new Error(`${hostname} rejected upload (HTTP ${response.status}): ${JSON.stringify(result)}`);
|
|
461
|
+
logger?.warn({ hostname, attempt, status: response.status, result }, 'upload rejected');
|
|
462
|
+
|
|
463
|
+
} catch (err) {
|
|
464
|
+
lastError = err;
|
|
465
|
+
logger?.warn({ hostname, attempt, err: err.message, timedOut: err.name === 'AbortError' }, 'upload attempt failed');
|
|
466
|
+
if (attempt < MAX_RETRIES) await new Promise(r => setTimeout(r, 500 * attempt));
|
|
590
467
|
}
|
|
591
|
-
|
|
592
|
-
const response = await fetch(url, {
|
|
593
|
-
dispatcher: fetchAgent,
|
|
594
|
-
method: 'POST',
|
|
595
|
-
body: reqBody,
|
|
596
|
-
headers: {
|
|
597
|
-
...(options?.headers ? (Array.isArray(options.headers) ? Object.fromEntries(options.headers) : options.headers) : {}),
|
|
598
|
-
'Content-Type': 'application/octet-stream',
|
|
599
|
-
Origin: DEFAULT_ORIGIN
|
|
600
|
-
},
|
|
601
|
-
duplex: 'half',
|
|
602
|
-
signal: timeoutMs ? AbortSignal.timeout(timeoutMs) : undefined
|
|
603
|
-
});
|
|
604
|
-
|
|
605
|
-
let result;
|
|
606
|
-
try { result = await response.json(); }
|
|
607
|
-
catch { result = undefined; }
|
|
608
|
-
|
|
609
|
-
if (result?.url || result?.directPath) {
|
|
610
|
-
urls = {
|
|
611
|
-
mediaUrl: result.url,
|
|
612
|
-
directPath: result.direct_path,
|
|
613
|
-
handle: result.handle
|
|
614
|
-
};
|
|
615
|
-
break;
|
|
616
|
-
} else {
|
|
617
|
-
uploadInfo = await refreshMediaConn(true);
|
|
618
|
-
throw new Error(`upload failed: ${JSON.stringify(result)}`);
|
|
619
|
-
}
|
|
620
|
-
} catch (error) {
|
|
621
|
-
const isLast = hostname === hosts[uploadInfo.hosts.length - 1]?.hostname;
|
|
622
|
-
logger.warn({ trace: error?.stack }, `Error uploading to ${hostname}${isLast ? '' : ', retrying...'}`);
|
|
623
468
|
}
|
|
469
|
+
if (urls) break;
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
if (!urls) {
|
|
473
|
+
const msg = `Media upload failed on all hosts. Last error: ${lastError?.message ?? 'unknown'}`;
|
|
474
|
+
logger?.error({ hosts: hosts.map(h => h.hostname), lastError: lastError?.message }, msg);
|
|
475
|
+
throw new Boom(msg, { statusCode: 500, data: { lastError: lastError?.message } });
|
|
624
476
|
}
|
|
625
|
-
|
|
626
|
-
if (!urls) throw new Boom('Media upload failed on all hosts', { statusCode: 500 });
|
|
477
|
+
|
|
627
478
|
return urls;
|
|
628
479
|
};
|
|
629
480
|
};
|
|
@@ -636,15 +487,16 @@ export const encryptMediaRetryRequest = async (key, mediaKey, meId) => {
|
|
|
636
487
|
const iv = Crypto.randomBytes(12);
|
|
637
488
|
const retryKey = await getMediaRetryKey(mediaKey);
|
|
638
489
|
const ciphertext = aesEncryptGCM(recpBuffer, retryKey, iv, Buffer.from(key.id));
|
|
639
|
-
|
|
640
490
|
return {
|
|
641
491
|
tag: 'receipt',
|
|
642
492
|
attrs: { id: key.id, to: jidNormalizedUser(meId), type: 'server-error' },
|
|
643
493
|
content: [
|
|
644
|
-
{
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
494
|
+
{
|
|
495
|
+
tag: 'encrypt', attrs: {}, content: [
|
|
496
|
+
{ tag: 'enc_p', attrs: {}, content: ciphertext },
|
|
497
|
+
{ tag: 'enc_iv', attrs: {}, content: iv }
|
|
498
|
+
]
|
|
499
|
+
},
|
|
648
500
|
{ tag: 'rmr', attrs: { jid: key.remoteJid, from_me: (!!key.fromMe).toString(), participant: key.participant } }
|
|
649
501
|
]
|
|
650
502
|
};
|
|
@@ -653,18 +505,11 @@ export const encryptMediaRetryRequest = async (key, mediaKey, meId) => {
|
|
|
653
505
|
export const decodeMediaRetryNode = (node) => {
|
|
654
506
|
const rmrNode = getBinaryNodeChild(node, 'rmr');
|
|
655
507
|
const event = {
|
|
656
|
-
key: {
|
|
657
|
-
id: node.attrs.id,
|
|
658
|
-
remoteJid: rmrNode.attrs.jid,
|
|
659
|
-
fromMe: rmrNode.attrs.from_me === 'true',
|
|
660
|
-
participant: rmrNode.attrs.participant
|
|
661
|
-
}
|
|
508
|
+
key: { id: node.attrs.id, remoteJid: rmrNode.attrs.jid, fromMe: rmrNode.attrs.from_me === 'true', participant: rmrNode.attrs.participant }
|
|
662
509
|
};
|
|
663
|
-
|
|
664
510
|
const errorNode = getBinaryNodeChild(node, 'error');
|
|
665
511
|
if (errorNode) {
|
|
666
|
-
|
|
667
|
-
event.error = new Boom(`Failed to re-upload media (${errorCode})`, { data: errorNode.attrs, statusCode: getStatusCodeForMediaRetry(errorCode) });
|
|
512
|
+
event.error = new Boom(`Failed to re-upload media (${+errorNode.attrs.code})`, { data: errorNode.attrs, statusCode: getStatusCodeForMediaRetry(+errorNode.attrs.code) });
|
|
668
513
|
} else {
|
|
669
514
|
const encryptedInfoNode = getBinaryNodeChild(node, 'encrypt');
|
|
670
515
|
const ciphertext = getBinaryNodeChildBuffer(encryptedInfoNode, 'enc_p');
|
|
@@ -676,8 +521,7 @@ export const decodeMediaRetryNode = (node) => {
|
|
|
676
521
|
};
|
|
677
522
|
|
|
678
523
|
export const decryptMediaRetryData = async ({ ciphertext, iv }, mediaKey, msgId) => {
|
|
679
|
-
const
|
|
680
|
-
const plaintext = aesDecryptGCM(ciphertext, retryKey, iv, Buffer.from(msgId));
|
|
524
|
+
const plaintext = aesDecryptGCM(ciphertext, await getMediaRetryKey(mediaKey), iv, Buffer.from(msgId));
|
|
681
525
|
return proto.MediaRetryNotification.decode(plaintext);
|
|
682
526
|
};
|
|
683
527
|
|