@hansaka02/baileys 7.3.4 → 7.3.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +203 -247
- package/lib/Defaults/baileys-version.json +2 -2
- package/lib/Defaults/connection.js +1 -1
- package/lib/Defaults/constants.js +13 -1
- package/lib/Defaults/history.js +3 -1
- package/lib/Signal/Group/sender-chain-key.js +1 -14
- package/lib/Signal/Group/sender-key-distribution-message.js +2 -2
- package/lib/Signal/Group/sender-key-record.js +2 -11
- package/lib/Signal/Group/sender-key-state.js +11 -57
- package/lib/Signal/libsignal.js +200 -116
- package/lib/Signal/lid-mapping.js +121 -68
- package/lib/Socket/Client/websocket.js +9 -2
- package/lib/Socket/business.js +5 -1
- package/lib/Socket/chats.js +180 -89
- package/lib/Socket/community.js +169 -41
- package/lib/Socket/groups.js +25 -21
- package/lib/Socket/messages-recv.js +458 -333
- package/lib/Socket/messages-send.js +517 -572
- package/lib/Socket/mex.js +61 -0
- package/lib/Socket/newsletter.js +159 -252
- package/lib/Socket/socket.js +283 -100
- package/lib/Types/Newsletter.js +32 -25
- package/lib/Utils/auth-utils.js +189 -354
- package/lib/Utils/browser-utils.js +43 -0
- package/lib/Utils/chat-utils.js +166 -41
- package/lib/Utils/decode-wa-message.js +77 -35
- package/lib/Utils/event-buffer.js +80 -24
- package/lib/Utils/generics.js +28 -128
- package/lib/Utils/history.js +10 -8
- package/lib/Utils/index.js +1 -1
- package/lib/Utils/link-preview.js +17 -32
- package/lib/Utils/lt-hash.js +28 -22
- package/lib/Utils/make-mutex.js +26 -28
- package/lib/Utils/message-retry-manager.js +51 -3
- package/lib/Utils/messages-media.js +343 -151
- package/lib/Utils/messages.js +806 -792
- package/lib/Utils/noise-handler.js +33 -2
- package/lib/Utils/pre-key-manager.js +126 -0
- package/lib/Utils/process-message.js +115 -55
- package/lib/Utils/signal.js +45 -18
- package/lib/Utils/validate-connection.js +52 -29
- package/lib/WABinary/constants.js +1268 -1268
- package/lib/WABinary/decode.js +58 -4
- package/lib/WABinary/encode.js +54 -7
- package/lib/WABinary/jid-utils.js +58 -11
- package/lib/WAM/constants.js +19064 -11563
- package/lib/WAM/encode.js +57 -8
- package/lib/WAUSync/USyncQuery.js +35 -19
- package/package.json +9 -8
- package/lib/Socket/usync.js +0 -83
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
Object.defineProperty(exports, "__esModule", { value: true })
|
|
4
4
|
|
|
5
5
|
const { Boom } = require("@hapi/boom")
|
|
6
|
-
const {
|
|
6
|
+
const { exec } = require("child_process")
|
|
7
7
|
const { once } = require("events")
|
|
8
8
|
const {
|
|
9
9
|
createHash,
|
|
@@ -17,13 +17,20 @@ const {
|
|
|
17
17
|
createReadStream,
|
|
18
18
|
createWriteStream
|
|
19
19
|
} = require("fs")
|
|
20
|
+
const {
|
|
21
|
+
parseBuffer,
|
|
22
|
+
parseFile,
|
|
23
|
+
parseStream
|
|
24
|
+
} = require('music-metadata')
|
|
25
|
+
const {
|
|
26
|
+
default: decoder
|
|
27
|
+
} = require("audio-decode")
|
|
20
28
|
const { tmpdir } = require("os")
|
|
21
29
|
const { join } = require("path")
|
|
22
30
|
const {
|
|
23
31
|
Readable,
|
|
24
32
|
Transform
|
|
25
33
|
} = require("stream")
|
|
26
|
-
const axios_1 = require("axios")
|
|
27
34
|
const { proto } = require("../../WAProto")
|
|
28
35
|
const {
|
|
29
36
|
MEDIA_PATH_MAP,
|
|
@@ -42,24 +49,27 @@ const {
|
|
|
42
49
|
} = require("./crypto")
|
|
43
50
|
const { generateMessageID } = require("./generics")
|
|
44
51
|
|
|
45
|
-
const
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
})()
|
|
55
|
-
])
|
|
52
|
+
const getTmpFilesDirectory = () => tmpdir()
|
|
53
|
+
|
|
54
|
+
const getImageProcessingLibrary = () => {
|
|
55
|
+
let sharp, jimp
|
|
56
|
+
|
|
57
|
+
try {
|
|
58
|
+
sharp = require('sharp')
|
|
59
|
+
} catch {}
|
|
60
|
+
|
|
56
61
|
if (sharp) {
|
|
57
62
|
return { sharp }
|
|
58
63
|
}
|
|
59
|
-
|
|
64
|
+
|
|
65
|
+
try {
|
|
66
|
+
jimp = require('jimp')
|
|
67
|
+
} catch {}
|
|
68
|
+
|
|
60
69
|
if (jimp) {
|
|
61
70
|
return { jimp }
|
|
62
71
|
}
|
|
72
|
+
|
|
63
73
|
throw new Boom('No image processing library available')
|
|
64
74
|
}
|
|
65
75
|
|
|
@@ -74,7 +84,7 @@ const getRawMediaUploadData = async (media, mediaType, logger) => {
|
|
|
74
84
|
logger?.debug('got stream for raw upload')
|
|
75
85
|
|
|
76
86
|
const hasher = createHash('sha256')
|
|
77
|
-
const filePath = join(
|
|
87
|
+
const filePath = join(getTmpFilesDirectory(), mediaType + generateMessageID())
|
|
78
88
|
const fileWriteStream = createWriteStream(filePath)
|
|
79
89
|
|
|
80
90
|
let fileLength = 0
|
|
@@ -130,75 +140,50 @@ async function getMediaKeys(buffer, mediaType) {
|
|
|
130
140
|
return {
|
|
131
141
|
iv: expandedMediaKey.slice(0, 16),
|
|
132
142
|
cipherKey: expandedMediaKey.slice(16, 48),
|
|
133
|
-
macKey: expandedMediaKey.slice(48, 80)
|
|
143
|
+
macKey: expandedMediaKey.slice(48, 80)
|
|
134
144
|
}
|
|
135
145
|
}
|
|
136
146
|
|
|
137
147
|
/** Extracts video thumb using FFMPEG */
|
|
138
|
-
const extractVideoThumb = (
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
'-vcodec', 'mjpeg',
|
|
148
|
-
'pipe:1'
|
|
149
|
-
]
|
|
150
|
-
|
|
151
|
-
const ffmpeg = spawn('ffmpeg', args)
|
|
152
|
-
const chunks = []
|
|
153
|
-
let errorOutput = ''
|
|
154
|
-
|
|
155
|
-
ffmpeg.stdout.on('data', chunk => chunks.push(chunk))
|
|
156
|
-
ffmpeg.stderr.on('data', data => {
|
|
157
|
-
errorOutput += data.toString()
|
|
158
|
-
})
|
|
159
|
-
ffmpeg.on('error', reject)
|
|
160
|
-
ffmpeg.on('close', code => {
|
|
161
|
-
if (code === 0) return resolve(Buffer.concat(chunks))
|
|
162
|
-
reject(new Error(`ffmpeg exited with code ${code}\n${errorOutput}`))
|
|
163
|
-
})
|
|
148
|
+
const extractVideoThumb = async (path, destPath, time, size) => new Promise((resolve, reject) => {
|
|
149
|
+
const cmd = `ffmpeg -ss ${time} -i ${path} -y -vf scale=${size.width}:-1 -vframes 1 -f image2 ${destPath}`
|
|
150
|
+
exec(cmd, err => {
|
|
151
|
+
if (err) {
|
|
152
|
+
reject(err)
|
|
153
|
+
}
|
|
154
|
+
else {
|
|
155
|
+
resolve()
|
|
156
|
+
}
|
|
164
157
|
})
|
|
165
|
-
}
|
|
158
|
+
})
|
|
166
159
|
|
|
167
160
|
const extractImageThumb = async (bufferOrFilePath, width = 32, quality = 50) => {
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
bufferOrFilePath = Buffer.from(response.data)
|
|
171
|
-
}
|
|
161
|
+
// TODO: Move entirely to sharp, removing jimp as it supports readable streams
|
|
162
|
+
// This will have positive speed and performance impacts as well as minimizing RAM usage.
|
|
172
163
|
if (bufferOrFilePath instanceof Readable) {
|
|
173
164
|
bufferOrFilePath = await toBuffer(bufferOrFilePath)
|
|
174
165
|
}
|
|
166
|
+
|
|
175
167
|
const lib = await getImageProcessingLibrary()
|
|
176
|
-
|
|
177
|
-
|
|
168
|
+
|
|
169
|
+
if ('sharp' in lib && typeof lib.sharp === 'function') {
|
|
170
|
+
const img = lib.sharp(bufferOrFilePath)
|
|
178
171
|
const dimensions = await img.metadata()
|
|
179
|
-
const buffer = await img
|
|
180
|
-
.resize({
|
|
181
|
-
width,
|
|
182
|
-
height: width,
|
|
183
|
-
fit: 'contain',
|
|
184
|
-
background: { r: 255, g: 255, b: 255, alpha: 0 }
|
|
185
|
-
})
|
|
186
|
-
.jpeg({ quality })
|
|
187
|
-
.toBuffer()
|
|
172
|
+
const buffer = await img.resize(width).jpeg({ quality: 50 }).toBuffer()
|
|
188
173
|
return {
|
|
189
174
|
buffer,
|
|
190
175
|
original: {
|
|
191
176
|
width: dimensions.width,
|
|
192
|
-
height: dimensions.height
|
|
193
|
-
}
|
|
177
|
+
height: dimensions.height
|
|
178
|
+
}
|
|
194
179
|
}
|
|
195
180
|
}
|
|
196
|
-
else if ('jimp' in lib && typeof lib.jimp
|
|
197
|
-
|
|
181
|
+
else if ('jimp' in lib && typeof lib.jimp.read === 'function') {
|
|
182
|
+
const { read, MIME_JPEG, RESIZE_BEZIER, AUTO } = lib.jimp
|
|
198
183
|
const jimp = await read(bufferOrFilePath)
|
|
199
184
|
const dimensions = {
|
|
200
185
|
width: jimp.getWidth(),
|
|
201
|
-
height: jimp.getHeight()
|
|
186
|
+
height: jimp.getHeight()
|
|
202
187
|
}
|
|
203
188
|
const buffer = await jimp
|
|
204
189
|
.quality(quality)
|
|
@@ -219,52 +204,53 @@ const encodeBase64EncodedStringForUpload = (b64) => (encodeURIComponent(b64
|
|
|
219
204
|
.replace(/\//g, '_')
|
|
220
205
|
.replace(/\=+$/, '')))
|
|
221
206
|
|
|
222
|
-
const generateProfilePicture = async (mediaUpload) => {
|
|
223
|
-
let
|
|
207
|
+
const generateProfilePicture = async (mediaUpload, dimensions) => {
|
|
208
|
+
let buffer
|
|
209
|
+
|
|
210
|
+
const { width: w = 640, height: h = 640 } = dimensions || {}
|
|
211
|
+
|
|
224
212
|
if (Buffer.isBuffer(mediaUpload)) {
|
|
225
|
-
|
|
226
|
-
}
|
|
227
|
-
else if ('url' in mediaUpload) {
|
|
228
|
-
bufferOrFilePath = mediaUpload.url.toString()
|
|
213
|
+
buffer = mediaUpload
|
|
229
214
|
}
|
|
230
215
|
else {
|
|
231
|
-
|
|
216
|
+
// Use getStream to handle all WAMediaUpload types (Buffer, Stream, URL)
|
|
217
|
+
const { stream } = await getStream(mediaUpload)
|
|
218
|
+
// Convert the resulting stream to a buffer
|
|
219
|
+
buffer = await toBuffer(stream)
|
|
232
220
|
}
|
|
233
221
|
const lib = await getImageProcessingLibrary()
|
|
222
|
+
|
|
234
223
|
let img
|
|
224
|
+
|
|
235
225
|
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
|
236
|
-
img =
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
.toBuffer()
|
|
226
|
+
img = lib.sharp
|
|
227
|
+
.default(buffer)
|
|
228
|
+
.resize(w, h)
|
|
229
|
+
.jpeg({
|
|
230
|
+
quality: 50
|
|
231
|
+
}).toBuffer()
|
|
242
232
|
}
|
|
243
233
|
else if ('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
|
244
|
-
|
|
245
|
-
const
|
|
246
|
-
const
|
|
247
|
-
|
|
248
|
-
const cropped = image.crop(0, 0, min, max)
|
|
249
|
-
img = await cropped.scaleToFit(720, 720).getBufferAsync(MIME_JPEG)
|
|
234
|
+
const jimp = await lib.jimp.read(buffer)
|
|
235
|
+
const min = Math.min(jimp.width, jimp.height)
|
|
236
|
+
const cropped = jimp.crop({ x: 0, y: 0, w: min, h: min })
|
|
237
|
+
img = cropped.resize({ w, h, mode: lib.jimp.ResizeStrategy.BILINEAR }).getBuffer('image/jpeg', { quality: 50 })
|
|
250
238
|
}
|
|
251
239
|
else {
|
|
252
|
-
throw new Boom('No image processing library available')
|
|
240
|
+
throw new Boom('No image processing library available');
|
|
253
241
|
}
|
|
254
242
|
return {
|
|
255
|
-
img: await img
|
|
243
|
+
img: await img
|
|
256
244
|
}
|
|
257
245
|
}
|
|
258
246
|
|
|
259
|
-
|
|
260
247
|
/** gets the SHA256 of the given media message */
|
|
261
248
|
const mediaMessageSHA256B64 = (message) => {
|
|
262
249
|
const media = Object.values(message)[0]
|
|
263
|
-
return
|
|
250
|
+
return media?.fileSha256 && Buffer.from(media.fileSha256).toString('base64')
|
|
264
251
|
}
|
|
265
252
|
|
|
266
253
|
async function getAudioDuration(buffer) {
|
|
267
|
-
const musicMetadata = await Promise.resolve().then(() => __importStar(require('music-metadata')))
|
|
268
254
|
const options = {
|
|
269
255
|
duration: true
|
|
270
256
|
}
|
|
@@ -272,15 +258,15 @@ async function getAudioDuration(buffer) {
|
|
|
272
258
|
let metadata
|
|
273
259
|
|
|
274
260
|
if (Buffer.isBuffer(buffer)) {
|
|
275
|
-
metadata = await
|
|
261
|
+
metadata = await parseBuffer(buffer, undefined, options)
|
|
276
262
|
}
|
|
277
263
|
else if (typeof buffer === 'string') {
|
|
278
|
-
metadata = await
|
|
264
|
+
metadata = await parseFile(buffer, options)
|
|
279
265
|
}
|
|
280
266
|
else {
|
|
281
|
-
metadata = await
|
|
267
|
+
metadata = await parseStream(buffer, undefined, options)
|
|
282
268
|
}
|
|
283
|
-
return metadata.format
|
|
269
|
+
return metadata.format?.duration
|
|
284
270
|
}
|
|
285
271
|
|
|
286
272
|
/**
|
|
@@ -288,8 +274,8 @@ async function getAudioDuration(buffer) {
|
|
|
288
274
|
*/
|
|
289
275
|
async function getAudioWaveform(buffer, logger) {
|
|
290
276
|
try {
|
|
291
|
-
const { default: decoder } = await eval('import(\'audio-decode\')')
|
|
292
277
|
let audioData
|
|
278
|
+
|
|
293
279
|
if (Buffer.isBuffer(buffer)) {
|
|
294
280
|
audioData = buffer
|
|
295
281
|
}
|
|
@@ -300,11 +286,13 @@ async function getAudioWaveform(buffer, logger) {
|
|
|
300
286
|
else {
|
|
301
287
|
audioData = await toBuffer(buffer)
|
|
302
288
|
}
|
|
289
|
+
|
|
303
290
|
const audioBuffer = await decoder(audioData)
|
|
304
291
|
const rawData = audioBuffer.getChannelData(0) // We only need to work with one channel of data
|
|
305
292
|
const samples = 64 // Number of samples we want to have in our final data set
|
|
306
293
|
const blockSize = Math.floor(rawData.length / samples) // the number of samples in each subdivision
|
|
307
294
|
const filteredData = []
|
|
295
|
+
|
|
308
296
|
for (let i = 0; i < samples; i++) {
|
|
309
297
|
const blockStart = blockSize * i // the location of the first sample in the block
|
|
310
298
|
let sum = 0
|
|
@@ -313,9 +301,11 @@ async function getAudioWaveform(buffer, logger) {
|
|
|
313
301
|
}
|
|
314
302
|
filteredData.push(sum / blockSize) // divide the sum by the block size to get the average
|
|
315
303
|
}
|
|
304
|
+
|
|
316
305
|
// This guarantees that the largest data point will be set to 1, and the rest of the data will scale proportionally.
|
|
317
306
|
const multiplier = Math.pow(Math.max(...filteredData), -1)
|
|
318
307
|
const normalizedData = filteredData.map((n) => n * multiplier)
|
|
308
|
+
|
|
319
309
|
// Generate waveform like WhatsApp
|
|
320
310
|
const waveform = new Uint8Array(normalizedData.map((n) => Math.floor(100 * n)))
|
|
321
311
|
return waveform
|
|
@@ -368,23 +358,31 @@ const getStream = async (item, opts) => {
|
|
|
368
358
|
async function generateThumbnail(file, mediaType, options) {
|
|
369
359
|
let thumbnail
|
|
370
360
|
let originalImageDimensions
|
|
361
|
+
|
|
371
362
|
if (mediaType === 'image') {
|
|
372
|
-
const { buffer, original } = await extractImageThumb(file
|
|
363
|
+
const { buffer, original } = await extractImageThumb(file)
|
|
364
|
+
|
|
373
365
|
thumbnail = buffer.toString('base64')
|
|
366
|
+
|
|
374
367
|
if (original.width && original.height) {
|
|
375
368
|
originalImageDimensions = {
|
|
376
369
|
width: original.width,
|
|
377
|
-
height: original.height
|
|
370
|
+
height: original.height
|
|
378
371
|
}
|
|
379
372
|
}
|
|
380
373
|
}
|
|
381
374
|
else if (mediaType === 'video') {
|
|
375
|
+
const imgFilename = join(getTmpFilesDirectory(), generateMessageID() + '.jpg')
|
|
382
376
|
try {
|
|
383
|
-
|
|
377
|
+
await extractVideoThumb(file, imgFilename, '00:00:00', { width: 32, height: 32 })
|
|
378
|
+
const buff = await promises.readFile(imgFilename)
|
|
379
|
+
|
|
384
380
|
thumbnail = buff.toString('base64')
|
|
381
|
+
|
|
382
|
+
await promises.unlink(imgFilename)
|
|
385
383
|
}
|
|
386
384
|
catch (err) {
|
|
387
|
-
options
|
|
385
|
+
options.logger?.debug('could not generate video thumb: ' + err)
|
|
388
386
|
}
|
|
389
387
|
}
|
|
390
388
|
return {
|
|
@@ -394,11 +392,20 @@ async function generateThumbnail(file, mediaType, options) {
|
|
|
394
392
|
}
|
|
395
393
|
|
|
396
394
|
const getHttpStream = async (url, options = {}) => {
|
|
397
|
-
const
|
|
398
|
-
|
|
395
|
+
const response = await fetch(url.toString(), {
|
|
396
|
+
dispatcher: options.dispatcher,
|
|
397
|
+
method: 'GET',
|
|
398
|
+
headers: options.headers
|
|
399
|
+
})
|
|
400
|
+
|
|
401
|
+
if (!response.ok) {
|
|
402
|
+
throw new Boom(`Failed to fetch stream from ${url}`, { statusCode: response.status, data: { url } })
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
return response.body instanceof Readable ? response.body : Readable.fromWeb(response.body)
|
|
399
406
|
}
|
|
400
407
|
|
|
401
|
-
const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts } = {}) => {
|
|
408
|
+
/*const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts } = {}) => {
|
|
402
409
|
const { stream, type } = await getStream(media, opts)
|
|
403
410
|
logger?.debug('fetched media stream')
|
|
404
411
|
|
|
@@ -470,59 +477,90 @@ const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequi
|
|
|
470
477
|
}
|
|
471
478
|
throw error
|
|
472
479
|
}
|
|
473
|
-
}
|
|
480
|
+
}*/
|
|
474
481
|
|
|
475
482
|
const encryptedStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts } = {}) => {
|
|
476
483
|
const { stream, type } = await getStream(media, opts)
|
|
484
|
+
|
|
477
485
|
logger?.debug('fetched media stream')
|
|
486
|
+
|
|
478
487
|
const mediaKey = randomBytes(32)
|
|
479
488
|
const { cipherKey, iv, macKey } = await getMediaKeys(mediaKey, mediaType)
|
|
480
|
-
const encFilePath = join(
|
|
489
|
+
const encFilePath = join(getTmpFilesDirectory(), mediaType + generateMessageID() + '-enc')
|
|
481
490
|
const encFileWriteStream = createWriteStream(encFilePath)
|
|
482
|
-
|
|
491
|
+
|
|
492
|
+
let originalFileStream;
|
|
483
493
|
let originalFilePath
|
|
494
|
+
|
|
484
495
|
if (saveOriginalFileIfRequired) {
|
|
485
|
-
originalFilePath = join(
|
|
496
|
+
originalFilePath = join(getTmpFilesDirectory(), mediaType + generateMessageID() + '-original')
|
|
486
497
|
originalFileStream = createWriteStream(originalFilePath)
|
|
487
498
|
}
|
|
499
|
+
|
|
488
500
|
let fileLength = 0
|
|
489
|
-
|
|
501
|
+
|
|
502
|
+
const aes = createCipheriv('aes-256-cbc', cipherKey, iv)
|
|
490
503
|
const hmac = createHmac('sha256', macKey).update(iv)
|
|
491
|
-
const sha256Plain = createHash('sha256')
|
|
504
|
+
const sha256Plain = createHash('sha256');
|
|
492
505
|
const sha256Enc = createHash('sha256')
|
|
493
|
-
|
|
506
|
+
|
|
507
|
+
const onChunk = async (buff) => {
|
|
494
508
|
sha256Enc.update(buff)
|
|
495
509
|
hmac.update(buff)
|
|
496
|
-
|
|
510
|
+
|
|
511
|
+
// Handle backpressure: if write returns false, wait for drain
|
|
512
|
+
if (!encFileWriteStream.write(buff)) {
|
|
513
|
+
await once(encFileWriteStream, 'drain')
|
|
514
|
+
}
|
|
497
515
|
}
|
|
516
|
+
|
|
498
517
|
try {
|
|
499
518
|
for await (const data of stream) {
|
|
500
519
|
fileLength += data.length
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
520
|
+
|
|
521
|
+
if (type === 'remote' &&
|
|
522
|
+
opts?.maxContentLength &&
|
|
523
|
+
fileLength + data.length > opts.maxContentLength) {
|
|
504
524
|
throw new Boom(`content length exceeded when encrypting "${type}"`, {
|
|
505
525
|
data: { media, type }
|
|
506
526
|
})
|
|
507
527
|
}
|
|
528
|
+
|
|
508
529
|
if (originalFileStream) {
|
|
509
530
|
if (!originalFileStream.write(data)) {
|
|
510
531
|
await once(originalFileStream, 'drain')
|
|
511
532
|
}
|
|
512
533
|
}
|
|
534
|
+
|
|
513
535
|
sha256Plain.update(data)
|
|
514
|
-
|
|
536
|
+
|
|
537
|
+
await onChunk(aes.update(data))
|
|
515
538
|
}
|
|
516
|
-
|
|
539
|
+
|
|
540
|
+
await onChunk(aes.final())
|
|
517
541
|
const mac = hmac.digest().slice(0, 10)
|
|
542
|
+
|
|
518
543
|
sha256Enc.update(mac)
|
|
544
|
+
|
|
519
545
|
const fileSha256 = sha256Plain.digest()
|
|
520
546
|
const fileEncSha256 = sha256Enc.digest()
|
|
547
|
+
|
|
521
548
|
encFileWriteStream.write(mac)
|
|
549
|
+
|
|
550
|
+
const encFinishPromise = once(encFileWriteStream, 'finish')
|
|
551
|
+
const originalFinishPromise = originalFileStream ? once(originalFileStream, 'finish') : Promise.resolve()
|
|
552
|
+
|
|
522
553
|
encFileWriteStream.end()
|
|
523
|
-
originalFileStream?.end?.
|
|
554
|
+
originalFileStream?.end?.()
|
|
524
555
|
stream.destroy()
|
|
556
|
+
|
|
557
|
+
// Wait for write streams to fully flush to disk
|
|
558
|
+
// This helps reduce memory pressure by allowing OS to release buffers
|
|
559
|
+
await encFinishPromise
|
|
560
|
+
await originalFinishPromise
|
|
561
|
+
|
|
525
562
|
logger?.debug('encrypted data successfully')
|
|
563
|
+
|
|
526
564
|
return {
|
|
527
565
|
mediaKey,
|
|
528
566
|
originalFilePath,
|
|
@@ -536,14 +574,16 @@ const encryptedStream = async (media, mediaType, { logger, saveOriginalFileIfReq
|
|
|
536
574
|
catch (error) {
|
|
537
575
|
// destroy all streams with error
|
|
538
576
|
encFileWriteStream.destroy()
|
|
539
|
-
originalFileStream?.destroy?.
|
|
577
|
+
originalFileStream?.destroy?.()
|
|
540
578
|
aes.destroy()
|
|
541
579
|
hmac.destroy()
|
|
542
580
|
sha256Plain.destroy()
|
|
543
581
|
sha256Enc.destroy()
|
|
544
582
|
stream.destroy()
|
|
583
|
+
|
|
545
584
|
try {
|
|
546
585
|
await promises.unlink(encFilePath)
|
|
586
|
+
|
|
547
587
|
if (originalFilePath) {
|
|
548
588
|
await promises.unlink(originalFilePath)
|
|
549
589
|
}
|
|
@@ -584,39 +624,51 @@ const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startB
|
|
|
584
624
|
let bytesFetched = 0
|
|
585
625
|
let startChunk = 0
|
|
586
626
|
let firstBlockIsIV = false
|
|
627
|
+
|
|
587
628
|
// if a start byte is specified -- then we need to fetch the previous chunk as that will form the IV
|
|
588
629
|
if (startByte) {
|
|
589
630
|
const chunk = toSmallestChunkSize(startByte || 0)
|
|
631
|
+
|
|
590
632
|
if (chunk) {
|
|
591
633
|
startChunk = chunk - AES_CHUNK_SIZE
|
|
592
634
|
bytesFetched = chunk
|
|
593
635
|
firstBlockIsIV = true
|
|
594
636
|
}
|
|
595
637
|
}
|
|
638
|
+
|
|
596
639
|
const endChunk = endByte ? toSmallestChunkSize(endByte || 0) + AES_CHUNK_SIZE : undefined
|
|
640
|
+
const headersInit = options?.headers ? options.headers : undefined
|
|
597
641
|
const headers = {
|
|
598
|
-
...(
|
|
642
|
+
...(headersInit
|
|
643
|
+
? Array.isArray(headersInit)
|
|
644
|
+
? Object.fromEntries(headersInit)
|
|
645
|
+
: headersInit
|
|
646
|
+
: {}),
|
|
599
647
|
Origin: DEFAULT_ORIGIN
|
|
600
648
|
}
|
|
649
|
+
|
|
601
650
|
if (startChunk || endChunk) {
|
|
602
651
|
headers.Range = `bytes=${startChunk}-`
|
|
652
|
+
|
|
603
653
|
if (endChunk) {
|
|
604
654
|
headers.Range += endChunk
|
|
605
655
|
}
|
|
606
656
|
}
|
|
657
|
+
|
|
607
658
|
// download the message
|
|
608
659
|
const fetched = await getHttpStream(downloadUrl, {
|
|
609
|
-
...options || {},
|
|
610
|
-
headers
|
|
611
|
-
maxBodyLength: Infinity,
|
|
612
|
-
maxContentLength: Infinity,
|
|
660
|
+
...(options || {}),
|
|
661
|
+
headers
|
|
613
662
|
})
|
|
663
|
+
|
|
614
664
|
let remainingBytes = Buffer.from([])
|
|
615
665
|
let aes
|
|
666
|
+
|
|
616
667
|
const pushBytes = (bytes, push) => {
|
|
617
668
|
if (startByte || endByte) {
|
|
618
669
|
const start = bytesFetched >= startByte ? undefined : Math.max(startByte - bytesFetched, 0)
|
|
619
670
|
const end = bytesFetched + bytes.length < endByte ? undefined : Math.max(endByte - bytesFetched, 0)
|
|
671
|
+
|
|
620
672
|
push(bytes.slice(start, end))
|
|
621
673
|
bytesFetched += bytes.length
|
|
622
674
|
}
|
|
@@ -624,19 +676,26 @@ const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startB
|
|
|
624
676
|
push(bytes)
|
|
625
677
|
}
|
|
626
678
|
}
|
|
679
|
+
|
|
627
680
|
const output = new Transform({
|
|
628
681
|
transform(chunk, _, callback) {
|
|
629
682
|
let data = Buffer.concat([remainingBytes, chunk])
|
|
683
|
+
|
|
630
684
|
const decryptLength = toSmallestChunkSize(data.length)
|
|
685
|
+
|
|
631
686
|
remainingBytes = data.slice(decryptLength)
|
|
632
687
|
data = data.slice(0, decryptLength)
|
|
688
|
+
|
|
633
689
|
if (!aes) {
|
|
634
690
|
let ivValue = iv
|
|
691
|
+
|
|
635
692
|
if (firstBlockIsIV) {
|
|
636
693
|
ivValue = data.slice(0, AES_CHUNK_SIZE)
|
|
637
694
|
data = data.slice(AES_CHUNK_SIZE)
|
|
638
695
|
}
|
|
696
|
+
|
|
639
697
|
aes = createDecipheriv('aes-256-cbc', cipherKey, ivValue)
|
|
698
|
+
|
|
640
699
|
// if an end byte that is not EOF is specified
|
|
641
700
|
// stop auto padding (PKCS7) -- otherwise throws an error for decryption
|
|
642
701
|
if (endByte) {
|
|
@@ -659,8 +718,9 @@ const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startB
|
|
|
659
718
|
catch (error) {
|
|
660
719
|
callback(error)
|
|
661
720
|
}
|
|
662
|
-
}
|
|
721
|
+
}
|
|
663
722
|
})
|
|
723
|
+
|
|
664
724
|
return fetched.pipe(output, { end: true })
|
|
665
725
|
}
|
|
666
726
|
|
|
@@ -680,43 +740,176 @@ function extensionForMediaMessage(message) {
|
|
|
680
740
|
return extension
|
|
681
741
|
}
|
|
682
742
|
|
|
743
|
+
const isNodeRuntime = () => {
|
|
744
|
+
return (typeof process !== 'undefined' &&
|
|
745
|
+
process.versions?.node !== null &&
|
|
746
|
+
typeof process.versions.bun === 'undefined' &&
|
|
747
|
+
typeof globalThis.Deno === 'undefined')
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
const uploadWithNodeHttp = async ({ url, filePath, headers, timeoutMs, agent }, redirectCount = 0) => {
|
|
751
|
+
if (redirectCount > 5) {
|
|
752
|
+
throw new Error('Too many redirects')
|
|
753
|
+
}
|
|
754
|
+
|
|
755
|
+
const parsedUrl = new URL(url)
|
|
756
|
+
const httpModule = parsedUrl.protocol === 'https:' ? require('https') : require('http')
|
|
757
|
+
|
|
758
|
+
// Get file size for Content-Length header (required for Node.js streaming)
|
|
759
|
+
const fileStats = await promises.stat(filePath)
|
|
760
|
+
const fileSize = fileStats.size
|
|
761
|
+
|
|
762
|
+
return new Promise((resolve, reject) => {
|
|
763
|
+
const req = httpModule.request({
|
|
764
|
+
hostname: parsedUrl.hostname,
|
|
765
|
+
port: parsedUrl.port || (parsedUrl.protocol === 'https:' ? 443 : 80),
|
|
766
|
+
path: parsedUrl.pathname + parsedUrl.search,
|
|
767
|
+
method: 'POST',
|
|
768
|
+
headers: {
|
|
769
|
+
...headers,
|
|
770
|
+
'Content-Length': fileSize
|
|
771
|
+
},
|
|
772
|
+
agent,
|
|
773
|
+
timeout: timeoutMs
|
|
774
|
+
}, res => {
|
|
775
|
+
// Handle redirects (3xx)
|
|
776
|
+
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
|
777
|
+
res.resume() // Consume response to free resources
|
|
778
|
+
|
|
779
|
+
const newUrl = new URL(res.headers.location, url).toString()
|
|
780
|
+
|
|
781
|
+
resolve(uploadWithNodeHttp({
|
|
782
|
+
url: newUrl,
|
|
783
|
+
filePath,
|
|
784
|
+
headers,
|
|
785
|
+
timeoutMs,
|
|
786
|
+
agent
|
|
787
|
+
}, redirectCount + 1))
|
|
788
|
+
return
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
let body = ''
|
|
792
|
+
|
|
793
|
+
res.on('data', chunk => (body += chunk))
|
|
794
|
+
res.on('end', () => {
|
|
795
|
+
try {
|
|
796
|
+
resolve(JSON.parse(body))
|
|
797
|
+
}
|
|
798
|
+
catch {
|
|
799
|
+
resolve(undefined)
|
|
800
|
+
}
|
|
801
|
+
})
|
|
802
|
+
})
|
|
803
|
+
|
|
804
|
+
req.on('error', reject)
|
|
805
|
+
req.on('timeout', () => {
|
|
806
|
+
req.destroy()
|
|
807
|
+
reject(new Error('Upload timeout'))
|
|
808
|
+
})
|
|
809
|
+
|
|
810
|
+
const stream = createReadStream(filePath)
|
|
811
|
+
|
|
812
|
+
stream.pipe(req)
|
|
813
|
+
stream.on('error', err => {
|
|
814
|
+
req.destroy()
|
|
815
|
+
reject(err)
|
|
816
|
+
})
|
|
817
|
+
})
|
|
818
|
+
}
|
|
819
|
+
|
|
820
|
+
const uploadWithFetch = async ({ url, filePath, headers, timeoutMs, agent }) => {
|
|
821
|
+
// Convert Node.js Readable to Web ReadableStream
|
|
822
|
+
const nodeStream = createReadStream(filePath)
|
|
823
|
+
const webStream = Readable.toWeb(nodeStream)
|
|
824
|
+
const response = await fetch(url, {
|
|
825
|
+
dispatcher: agent,
|
|
826
|
+
method: 'POST',
|
|
827
|
+
body: webStream,
|
|
828
|
+
headers,
|
|
829
|
+
duplex: 'half',
|
|
830
|
+
signal: timeoutMs ? AbortSignal.timeout(timeoutMs) : undefined
|
|
831
|
+
})
|
|
832
|
+
|
|
833
|
+
try {
|
|
834
|
+
return (await response.json())
|
|
835
|
+
}
|
|
836
|
+
catch {
|
|
837
|
+
return undefined
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
/**
|
|
842
|
+
* Uploads media to WhatsApp servers.
|
|
843
|
+
*
|
|
844
|
+
* ## Why we have two upload implementations:
|
|
845
|
+
*
|
|
846
|
+
* Node.js's native `fetch` (powered by undici) has a known bug where it buffers
|
|
847
|
+
* the entire request body in memory before sending, even when using streams.
|
|
848
|
+
* This causes memory issues with large files (e.g., 1GB file = 1GB+ memory usage).
|
|
849
|
+
* See: https://github.com/nodejs/undici/issues/4058
|
|
850
|
+
*
|
|
851
|
+
* Other runtimes (Bun, Deno, browsers) correctly stream the request body without
|
|
852
|
+
* buffering, so we can use the web-standard Fetch API there.
|
|
853
|
+
*
|
|
854
|
+
* ## Future considerations:
|
|
855
|
+
* Once the undici bug is fixed, we can simplify this to use only the Fetch API
|
|
856
|
+
* across all runtimes. Monitor the GitHub issue for updates.
|
|
857
|
+
*/
|
|
858
|
+
const uploadMedia = async (params, logger) => {
|
|
859
|
+
if (isNodeRuntime()) {
|
|
860
|
+
logger?.debug('Using Node.js https module for upload (avoids undici buffering bug)')
|
|
861
|
+
return uploadWithNodeHttp(params)
|
|
862
|
+
}
|
|
863
|
+
else {
|
|
864
|
+
logger?.debug('Using web-standard Fetch API for upload');
|
|
865
|
+
return uploadWithFetch(params)
|
|
866
|
+
}
|
|
867
|
+
}
|
|
868
|
+
|
|
683
869
|
const getWAUploadToServer = ({ customUploadHosts, fetchAgent, logger, options }, refreshMediaConn) => {
|
|
684
|
-
return async (filePath, { mediaType, fileEncSha256B64,
|
|
870
|
+
return async (filePath, { mediaType, fileEncSha256B64, timeoutMs }) => {
|
|
685
871
|
// send a query JSON to obtain the url & auth token to upload our media
|
|
686
872
|
let uploadInfo = await refreshMediaConn(false)
|
|
687
873
|
let urls
|
|
688
|
-
|
|
874
|
+
|
|
689
875
|
const hosts = [...customUploadHosts, ...uploadInfo.hosts]
|
|
876
|
+
|
|
690
877
|
fileEncSha256B64 = encodeBase64EncodedStringForUpload(fileEncSha256B64)
|
|
691
|
-
|
|
692
|
-
|
|
878
|
+
|
|
879
|
+
// Prepare common headers
|
|
880
|
+
const customHeaders = (() => {
|
|
881
|
+
const hdrs = options?.headers;
|
|
882
|
+
if (!hdrs)
|
|
883
|
+
return {};
|
|
884
|
+
return Array.isArray(hdrs) ? Object.fromEntries(hdrs) : hdrs
|
|
885
|
+
})()
|
|
886
|
+
|
|
887
|
+
const headers = {
|
|
888
|
+
...customHeaders,
|
|
889
|
+
'Content-Type': 'application/octet-stream',
|
|
890
|
+
Origin: DEFAULT_ORIGIN
|
|
693
891
|
}
|
|
892
|
+
|
|
694
893
|
for (const { hostname } of hosts) {
|
|
695
894
|
logger.debug(`uploading to "${hostname}"`)
|
|
696
|
-
|
|
697
|
-
const
|
|
698
|
-
|
|
895
|
+
|
|
896
|
+
const auth = encodeURIComponent(uploadInfo.auth)
|
|
897
|
+
const url = `https://${hostname}${MEDIA_PATH_MAP[mediaType]}/${fileEncSha256B64}?auth=${auth}&token=${fileEncSha256B64}`
|
|
898
|
+
|
|
699
899
|
let result
|
|
900
|
+
|
|
700
901
|
try {
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
headers
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
httpsAgent: fetchAgent,
|
|
710
|
-
timeout: timeoutMs,
|
|
711
|
-
responseType: 'json',
|
|
712
|
-
maxBodyLength: Infinity,
|
|
713
|
-
maxContentLength: Infinity,
|
|
714
|
-
})
|
|
715
|
-
result = body.data
|
|
716
|
-
if (result?.url || result?.directPath) {
|
|
902
|
+
result = await uploadMedia({
|
|
903
|
+
url,
|
|
904
|
+
filePath,
|
|
905
|
+
headers,
|
|
906
|
+
timeoutMs,
|
|
907
|
+
agent: fetchAgent
|
|
908
|
+
}, logger);
|
|
909
|
+
if (result?.url || result?.direct_path) {
|
|
717
910
|
urls = {
|
|
718
911
|
mediaUrl: result.url,
|
|
719
|
-
directPath: result.direct_path,
|
|
912
|
+
directPath: result.direct_path,
|
|
720
913
|
meta_hmac: result.meta_hmac,
|
|
721
914
|
fbid: result.fbid,
|
|
722
915
|
ts: result.ts
|
|
@@ -729,16 +922,15 @@ const getWAUploadToServer = ({ customUploadHosts, fetchAgent, logger, options },
|
|
|
729
922
|
}
|
|
730
923
|
}
|
|
731
924
|
catch (error) {
|
|
732
|
-
if (axios_1.default.isAxiosError(error)) {
|
|
733
|
-
result = error.response?.data
|
|
734
|
-
}
|
|
735
925
|
const isLast = hostname === hosts[uploadInfo.hosts.length - 1]?.hostname
|
|
736
|
-
logger.warn({ trace: error
|
|
926
|
+
logger.warn({ trace: error?.stack, uploadResult: result }, `Error in uploading to ${hostname} ${isLast ? '' : ', retrying...'}`)
|
|
737
927
|
}
|
|
738
928
|
}
|
|
929
|
+
|
|
739
930
|
if (!urls) {
|
|
740
931
|
throw new Boom('Media upload failed on all hosts', { statusCode: 500 })
|
|
741
932
|
}
|
|
933
|
+
|
|
742
934
|
return urls
|
|
743
935
|
}
|
|
744
936
|
}
|
|
@@ -778,8 +970,7 @@ const encryptMediaRetryRequest = async (key, mediaKey, meId) => {
|
|
|
778
970
|
tag: 'rmr',
|
|
779
971
|
attrs: {
|
|
780
972
|
jid: key.remoteJid,
|
|
781
|
-
|
|
782
|
-
// @ts-ignore
|
|
973
|
+
from_me: (!!key.fromMe).toString(),
|
|
783
974
|
participant: key.participant || undefined
|
|
784
975
|
}
|
|
785
976
|
}
|
|
@@ -847,12 +1038,13 @@ module.exports = {
|
|
|
847
1038
|
getStream,
|
|
848
1039
|
generateThumbnail,
|
|
849
1040
|
getHttpStream,
|
|
850
|
-
prepareStream,
|
|
1041
|
+
//prepareStream,
|
|
851
1042
|
encryptedStream,
|
|
852
1043
|
getUrlFromDirectPath,
|
|
853
1044
|
downloadContentFromMessage,
|
|
854
1045
|
downloadEncryptedContent,
|
|
855
1046
|
extensionForMediaMessage,
|
|
1047
|
+
uploadWithNodeHttp,
|
|
856
1048
|
getRawMediaUploadData,
|
|
857
1049
|
getWAUploadToServer,
|
|
858
1050
|
getMediaRetryKey,
|