@fhynella/baileys 2.1.7 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/Defaults/baileys-version.json +2 -2
- package/lib/Defaults/connection.js +1 -1
- package/lib/Defaults/constants.js +13 -1
- package/lib/Defaults/history.js +3 -1
- package/lib/Signal/Group/sender-chain-key.js +1 -14
- package/lib/Signal/Group/sender-key-distribution-message.js +2 -2
- package/lib/Signal/Group/sender-key-record.js +2 -11
- package/lib/Signal/Group/sender-key-state.js +11 -57
- package/lib/Signal/libsignal.js +200 -116
- package/lib/Signal/lid-mapping.js +121 -68
- package/lib/Socket/Client/websocket.js +9 -2
- package/lib/Socket/business.js +5 -1
- package/lib/Socket/chats.js +180 -89
- package/lib/Socket/community.js +169 -41
- package/lib/Socket/groups.js +25 -21
- package/lib/Socket/messages-recv.js +458 -333
- package/lib/Socket/messages-send.js +517 -572
- package/lib/Socket/mex.js +61 -0
- package/lib/Socket/newsletter.js +159 -252
- package/lib/Socket/socket.js +283 -100
- package/lib/Types/Newsletter.js +32 -25
- package/lib/Utils/auth-utils.js +189 -354
- package/lib/Utils/browser-utils.js +43 -0
- package/lib/Utils/chat-utils.js +166 -41
- package/lib/Utils/decode-wa-message.js +77 -35
- package/lib/Utils/event-buffer.js +80 -24
- package/lib/Utils/generics.js +28 -128
- package/lib/Utils/history.js +10 -8
- package/lib/Utils/index.js +1 -1
- package/lib/Utils/link-preview.js +17 -32
- package/lib/Utils/lt-hash.js +28 -22
- package/lib/Utils/make-mutex.js +26 -28
- package/lib/Utils/message-retry-manager.js +51 -3
- package/lib/Utils/messages-media.js +364 -173
- package/lib/Utils/messages.js +735 -727
- package/lib/Utils/noise-handler.js +33 -2
- package/lib/Utils/pre-key-manager.js +126 -0
- package/lib/Utils/process-message.js +115 -55
- package/lib/Utils/signal.js +45 -18
- package/lib/Utils/validate-connection.js +52 -29
- package/lib/WABinary/constants.js +1268 -1268
- package/lib/WABinary/decode.js +58 -4
- package/lib/WABinary/encode.js +54 -7
- package/lib/WABinary/jid-utils.js +58 -11
- package/lib/WAM/constants.js +19064 -11563
- package/lib/WAM/encode.js +57 -8
- package/lib/WAUSync/USyncQuery.js +35 -19
- package/package.json +26 -7
- package/lib/Socket/usync.js +0 -83
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
Object.defineProperty(exports, "__esModule", { value: true })
|
|
4
4
|
|
|
5
5
|
const { Boom } = require("@hapi/boom")
|
|
6
|
-
const {
|
|
6
|
+
const { exec } = require("child_process")
|
|
7
7
|
const { once } = require("events")
|
|
8
8
|
const {
|
|
9
9
|
createHash,
|
|
@@ -17,13 +17,17 @@ const {
|
|
|
17
17
|
createReadStream,
|
|
18
18
|
createWriteStream
|
|
19
19
|
} = require("fs")
|
|
20
|
+
const {
|
|
21
|
+
parseBuffer,
|
|
22
|
+
parseFile,
|
|
23
|
+
parseStream
|
|
24
|
+
} = require('music-metadata')
|
|
20
25
|
const { tmpdir } = require("os")
|
|
21
26
|
const { join } = require("path")
|
|
22
27
|
const {
|
|
23
28
|
Readable,
|
|
24
29
|
Transform
|
|
25
30
|
} = require("stream")
|
|
26
|
-
const axios_1 = require("axios")
|
|
27
31
|
const { proto } = require("../../WAProto")
|
|
28
32
|
const {
|
|
29
33
|
MEDIA_PATH_MAP,
|
|
@@ -42,24 +46,27 @@ const {
|
|
|
42
46
|
} = require("./crypto")
|
|
43
47
|
const { generateMessageID } = require("./generics")
|
|
44
48
|
|
|
45
|
-
const
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
})()
|
|
55
|
-
])
|
|
49
|
+
const getTmpFilesDirectory = () => tmpdir()
|
|
50
|
+
|
|
51
|
+
const getImageProcessingLibrary = () => {
|
|
52
|
+
let sharp, jimp
|
|
53
|
+
|
|
54
|
+
try {
|
|
55
|
+
sharp = require('sharp')
|
|
56
|
+
} catch {}
|
|
57
|
+
|
|
56
58
|
if (sharp) {
|
|
57
59
|
return { sharp }
|
|
58
60
|
}
|
|
59
|
-
|
|
61
|
+
|
|
62
|
+
try {
|
|
63
|
+
jimp = require('jimp')
|
|
64
|
+
} catch {}
|
|
65
|
+
|
|
60
66
|
if (jimp) {
|
|
61
67
|
return { jimp }
|
|
62
68
|
}
|
|
69
|
+
|
|
63
70
|
throw new Boom('No image processing library available')
|
|
64
71
|
}
|
|
65
72
|
|
|
@@ -70,33 +77,33 @@ const hkdfInfoKey = (type) => {
|
|
|
70
77
|
|
|
71
78
|
const getRawMediaUploadData = async (media, mediaType, logger) => {
|
|
72
79
|
const { stream } = await getStream(media)
|
|
73
|
-
|
|
80
|
+
|
|
74
81
|
logger?.debug('got stream for raw upload')
|
|
75
|
-
|
|
82
|
+
|
|
76
83
|
const hasher = createHash('sha256')
|
|
77
|
-
const filePath = join(
|
|
84
|
+
const filePath = join(getTmpFilesDirectory(), mediaType + generateMessageID())
|
|
78
85
|
const fileWriteStream = createWriteStream(filePath)
|
|
79
|
-
|
|
86
|
+
|
|
80
87
|
let fileLength = 0
|
|
81
|
-
|
|
88
|
+
|
|
82
89
|
try {
|
|
83
90
|
for await (const data of stream) {
|
|
84
91
|
fileLength += data.length
|
|
85
92
|
hasher.update(data)
|
|
86
|
-
|
|
93
|
+
|
|
87
94
|
if (!fileWriteStream.write(data)) {
|
|
88
95
|
await once(fileWriteStream, 'drain')
|
|
89
96
|
}
|
|
90
97
|
}
|
|
91
|
-
|
|
98
|
+
|
|
92
99
|
fileWriteStream.end()
|
|
93
100
|
await once(fileWriteStream, 'finish')
|
|
94
101
|
stream.destroy()
|
|
95
|
-
|
|
102
|
+
|
|
96
103
|
const fileSha256 = hasher.digest()
|
|
97
|
-
|
|
104
|
+
|
|
98
105
|
logger?.debug('hashed data for raw upload')
|
|
99
|
-
|
|
106
|
+
|
|
100
107
|
return {
|
|
101
108
|
filePath: filePath,
|
|
102
109
|
fileSha256,
|
|
@@ -106,7 +113,7 @@ const getRawMediaUploadData = async (media, mediaType, logger) => {
|
|
|
106
113
|
catch (error) {
|
|
107
114
|
fileWriteStream.destroy()
|
|
108
115
|
stream.destroy()
|
|
109
|
-
|
|
116
|
+
|
|
110
117
|
try {
|
|
111
118
|
await promises.unlink(filePath)
|
|
112
119
|
}
|
|
@@ -130,75 +137,50 @@ async function getMediaKeys(buffer, mediaType) {
|
|
|
130
137
|
return {
|
|
131
138
|
iv: expandedMediaKey.slice(0, 16),
|
|
132
139
|
cipherKey: expandedMediaKey.slice(16, 48),
|
|
133
|
-
macKey: expandedMediaKey.slice(48, 80)
|
|
140
|
+
macKey: expandedMediaKey.slice(48, 80)
|
|
134
141
|
}
|
|
135
142
|
}
|
|
136
143
|
|
|
137
144
|
/** Extracts video thumb using FFMPEG */
|
|
138
|
-
const extractVideoThumb = (
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
'-vcodec', 'mjpeg',
|
|
148
|
-
'pipe:1'
|
|
149
|
-
]
|
|
150
|
-
|
|
151
|
-
const ffmpeg = spawn('ffmpeg', args)
|
|
152
|
-
const chunks = []
|
|
153
|
-
let errorOutput = ''
|
|
154
|
-
|
|
155
|
-
ffmpeg.stdout.on('data', chunk => chunks.push(chunk))
|
|
156
|
-
ffmpeg.stderr.on('data', data => {
|
|
157
|
-
errorOutput += data.toString()
|
|
158
|
-
})
|
|
159
|
-
ffmpeg.on('error', reject)
|
|
160
|
-
ffmpeg.on('close', code => {
|
|
161
|
-
if (code === 0) return resolve(Buffer.concat(chunks))
|
|
162
|
-
reject(new Error(`ffmpeg exited with code ${code}\n${errorOutput}`))
|
|
163
|
-
})
|
|
145
|
+
const extractVideoThumb = async (path, destPath, time, size) => new Promise((resolve, reject) => {
|
|
146
|
+
const cmd = `ffmpeg -ss ${time} -i ${path} -y -vf scale=${size.width}:-1 -vframes 1 -f image2 ${destPath}`
|
|
147
|
+
exec(cmd, err => {
|
|
148
|
+
if (err) {
|
|
149
|
+
reject(err)
|
|
150
|
+
}
|
|
151
|
+
else {
|
|
152
|
+
resolve()
|
|
153
|
+
}
|
|
164
154
|
})
|
|
165
|
-
}
|
|
155
|
+
})
|
|
166
156
|
|
|
167
157
|
const extractImageThumb = async (bufferOrFilePath, width = 32, quality = 50) => {
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
bufferOrFilePath = Buffer.from(response.data)
|
|
171
|
-
}
|
|
158
|
+
// TODO: Move entirely to sharp, removing jimp as it supports readable streams
|
|
159
|
+
// This will have positive speed and performance impacts as well as minimizing RAM usage.
|
|
172
160
|
if (bufferOrFilePath instanceof Readable) {
|
|
173
161
|
bufferOrFilePath = await toBuffer(bufferOrFilePath)
|
|
174
162
|
}
|
|
163
|
+
|
|
175
164
|
const lib = await getImageProcessingLibrary()
|
|
176
|
-
|
|
177
|
-
|
|
165
|
+
|
|
166
|
+
if ('sharp' in lib && typeof lib.sharp === 'function') {
|
|
167
|
+
const img = lib.sharp(bufferOrFilePath)
|
|
178
168
|
const dimensions = await img.metadata()
|
|
179
|
-
const buffer = await img
|
|
180
|
-
.resize({
|
|
181
|
-
width,
|
|
182
|
-
height: width,
|
|
183
|
-
fit: 'contain',
|
|
184
|
-
background: { r: 255, g: 255, b: 255, alpha: 0 }
|
|
185
|
-
})
|
|
186
|
-
.jpeg({ quality })
|
|
187
|
-
.toBuffer()
|
|
169
|
+
const buffer = await img.resize(width).jpeg({ quality: 50 }).toBuffer()
|
|
188
170
|
return {
|
|
189
171
|
buffer,
|
|
190
172
|
original: {
|
|
191
173
|
width: dimensions.width,
|
|
192
|
-
height: dimensions.height
|
|
193
|
-
}
|
|
174
|
+
height: dimensions.height
|
|
175
|
+
}
|
|
194
176
|
}
|
|
195
177
|
}
|
|
196
|
-
else if ('jimp' in lib && typeof lib.jimp
|
|
197
|
-
|
|
178
|
+
else if ('jimp' in lib && typeof lib.jimp.read === 'function') {
|
|
179
|
+
const { read, MIME_JPEG, RESIZE_BEZIER, AUTO } = lib.jimp
|
|
198
180
|
const jimp = await read(bufferOrFilePath)
|
|
199
181
|
const dimensions = {
|
|
200
182
|
width: jimp.getWidth(),
|
|
201
|
-
height: jimp.getHeight()
|
|
183
|
+
height: jimp.getHeight()
|
|
202
184
|
}
|
|
203
185
|
const buffer = await jimp
|
|
204
186
|
.quality(quality)
|
|
@@ -218,69 +200,70 @@ const encodeBase64EncodedStringForUpload = (b64) => (encodeURIComponent(b64
|
|
|
218
200
|
.replace(/\+/g, '-')
|
|
219
201
|
.replace(/\//g, '_')
|
|
220
202
|
.replace(/\=+$/, '')))
|
|
221
|
-
|
|
222
|
-
const generateProfilePicture = async (mediaUpload) => {
|
|
223
|
-
let
|
|
203
|
+
|
|
204
|
+
const generateProfilePicture = async (mediaUpload, dimensions) => {
|
|
205
|
+
let buffer
|
|
206
|
+
|
|
207
|
+
const { width: w = 640, height: h = 640 } = dimensions || {}
|
|
208
|
+
|
|
224
209
|
if (Buffer.isBuffer(mediaUpload)) {
|
|
225
|
-
|
|
226
|
-
}
|
|
227
|
-
else if ('url' in mediaUpload) {
|
|
228
|
-
bufferOrFilePath = mediaUpload.url.toString()
|
|
210
|
+
buffer = mediaUpload
|
|
229
211
|
}
|
|
230
212
|
else {
|
|
231
|
-
|
|
213
|
+
// Use getStream to handle all WAMediaUpload types (Buffer, Stream, URL)
|
|
214
|
+
const { stream } = await getStream(mediaUpload)
|
|
215
|
+
// Convert the resulting stream to a buffer
|
|
216
|
+
buffer = await toBuffer(stream)
|
|
232
217
|
}
|
|
233
218
|
const lib = await getImageProcessingLibrary()
|
|
219
|
+
|
|
234
220
|
let img
|
|
221
|
+
|
|
235
222
|
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
|
236
|
-
img =
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
.toBuffer()
|
|
223
|
+
img = lib.sharp
|
|
224
|
+
.default(buffer)
|
|
225
|
+
.resize(w, h)
|
|
226
|
+
.jpeg({
|
|
227
|
+
quality: 50
|
|
228
|
+
}).toBuffer()
|
|
242
229
|
}
|
|
243
230
|
else if ('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
|
244
|
-
|
|
245
|
-
const
|
|
246
|
-
const
|
|
247
|
-
|
|
248
|
-
const cropped = image.crop(0, 0, min, max)
|
|
249
|
-
img = await cropped.scaleToFit(720, 720).getBufferAsync(MIME_JPEG)
|
|
231
|
+
const jimp = await lib.jimp.read(buffer)
|
|
232
|
+
const min = Math.min(jimp.width, jimp.height)
|
|
233
|
+
const cropped = jimp.crop({ x: 0, y: 0, w: min, h: min })
|
|
234
|
+
img = cropped.resize({ w, h, mode: lib.jimp.ResizeStrategy.BILINEAR }).getBuffer('image/jpeg', { quality: 50 })
|
|
250
235
|
}
|
|
251
236
|
else {
|
|
252
|
-
throw new Boom('No image processing library available')
|
|
237
|
+
throw new Boom('No image processing library available');
|
|
253
238
|
}
|
|
254
239
|
return {
|
|
255
|
-
img: await img
|
|
240
|
+
img: await img
|
|
256
241
|
}
|
|
257
242
|
}
|
|
258
243
|
|
|
259
|
-
|
|
260
244
|
/** gets the SHA256 of the given media message */
|
|
261
245
|
const mediaMessageSHA256B64 = (message) => {
|
|
262
246
|
const media = Object.values(message)[0]
|
|
263
|
-
return
|
|
247
|
+
return media?.fileSha256 && Buffer.from(media.fileSha256).toString('base64')
|
|
264
248
|
}
|
|
265
249
|
|
|
266
250
|
async function getAudioDuration(buffer) {
|
|
267
|
-
const musicMetadata = await Promise.resolve().then(() => __importStar(require('music-metadata')))
|
|
268
251
|
const options = {
|
|
269
|
-
|
|
252
|
+
duration: true
|
|
270
253
|
}
|
|
271
|
-
|
|
254
|
+
|
|
272
255
|
let metadata
|
|
273
|
-
|
|
256
|
+
|
|
274
257
|
if (Buffer.isBuffer(buffer)) {
|
|
275
|
-
metadata = await
|
|
258
|
+
metadata = await parseBuffer(buffer, undefined, options)
|
|
276
259
|
}
|
|
277
260
|
else if (typeof buffer === 'string') {
|
|
278
|
-
metadata = await
|
|
261
|
+
metadata = await parseFile(buffer, options)
|
|
279
262
|
}
|
|
280
263
|
else {
|
|
281
|
-
metadata = await
|
|
264
|
+
metadata = await parseStream(buffer, undefined, options)
|
|
282
265
|
}
|
|
283
|
-
return metadata.format
|
|
266
|
+
return metadata.format?.duration
|
|
284
267
|
}
|
|
285
268
|
|
|
286
269
|
/**
|
|
@@ -289,7 +272,9 @@ async function getAudioDuration(buffer) {
|
|
|
289
272
|
async function getAudioWaveform(buffer, logger) {
|
|
290
273
|
try {
|
|
291
274
|
const { default: decoder } = await eval('import(\'audio-decode\')')
|
|
275
|
+
|
|
292
276
|
let audioData
|
|
277
|
+
|
|
293
278
|
if (Buffer.isBuffer(buffer)) {
|
|
294
279
|
audioData = buffer
|
|
295
280
|
}
|
|
@@ -300,11 +285,13 @@ async function getAudioWaveform(buffer, logger) {
|
|
|
300
285
|
else {
|
|
301
286
|
audioData = await toBuffer(buffer)
|
|
302
287
|
}
|
|
288
|
+
|
|
303
289
|
const audioBuffer = await decoder(audioData)
|
|
304
290
|
const rawData = audioBuffer.getChannelData(0) // We only need to work with one channel of data
|
|
305
291
|
const samples = 64 // Number of samples we want to have in our final data set
|
|
306
292
|
const blockSize = Math.floor(rawData.length / samples) // the number of samples in each subdivision
|
|
307
293
|
const filteredData = []
|
|
294
|
+
|
|
308
295
|
for (let i = 0; i < samples; i++) {
|
|
309
296
|
const blockStart = blockSize * i // the location of the first sample in the block
|
|
310
297
|
let sum = 0
|
|
@@ -313,9 +300,11 @@ async function getAudioWaveform(buffer, logger) {
|
|
|
313
300
|
}
|
|
314
301
|
filteredData.push(sum / blockSize) // divide the sum by the block size to get the average
|
|
315
302
|
}
|
|
303
|
+
|
|
316
304
|
// This guarantees that the largest data point will be set to 1, and the rest of the data will scale proportionally.
|
|
317
305
|
const multiplier = Math.pow(Math.max(...filteredData), -1)
|
|
318
306
|
const normalizedData = filteredData.map((n) => n * multiplier)
|
|
307
|
+
|
|
319
308
|
// Generate waveform like WhatsApp
|
|
320
309
|
const waveform = new Uint8Array(normalizedData.map((n) => Math.floor(100 * n)))
|
|
321
310
|
return waveform
|
|
@@ -345,22 +334,22 @@ const getStream = async (item, opts) => {
|
|
|
345
334
|
if (Buffer.isBuffer(item)) {
|
|
346
335
|
return { stream: toReadable(item), type: 'buffer' }
|
|
347
336
|
}
|
|
348
|
-
|
|
337
|
+
|
|
349
338
|
if ('stream' in item) {
|
|
350
339
|
return { stream: item.stream, type: 'readable' }
|
|
351
340
|
}
|
|
352
|
-
|
|
341
|
+
|
|
353
342
|
const urlStr = item.url.toString()
|
|
354
|
-
|
|
343
|
+
|
|
355
344
|
if (urlStr.startsWith('data:')) {
|
|
356
345
|
const buffer = Buffer.from(urlStr.split(',')[1], 'base64')
|
|
357
346
|
return { stream: await toReadable(buffer), type: 'buffer' }
|
|
358
347
|
}
|
|
359
|
-
|
|
348
|
+
|
|
360
349
|
if (urlStr.startsWith('http://') || urlStr.startsWith('https://')) {
|
|
361
350
|
return { stream: await getHttpStream(item.url, opts), type: 'remote' }
|
|
362
351
|
}
|
|
363
|
-
|
|
352
|
+
|
|
364
353
|
return { stream: createReadStream(item.url), type: 'file' }
|
|
365
354
|
}
|
|
366
355
|
|
|
@@ -368,23 +357,31 @@ const getStream = async (item, opts) => {
|
|
|
368
357
|
async function generateThumbnail(file, mediaType, options) {
|
|
369
358
|
let thumbnail
|
|
370
359
|
let originalImageDimensions
|
|
360
|
+
|
|
371
361
|
if (mediaType === 'image') {
|
|
372
|
-
const { buffer, original } = await extractImageThumb(file
|
|
362
|
+
const { buffer, original } = await extractImageThumb(file)
|
|
363
|
+
|
|
373
364
|
thumbnail = buffer.toString('base64')
|
|
365
|
+
|
|
374
366
|
if (original.width && original.height) {
|
|
375
367
|
originalImageDimensions = {
|
|
376
368
|
width: original.width,
|
|
377
|
-
height: original.height
|
|
369
|
+
height: original.height
|
|
378
370
|
}
|
|
379
371
|
}
|
|
380
372
|
}
|
|
381
373
|
else if (mediaType === 'video') {
|
|
374
|
+
const imgFilename = join(getTmpFilesDirectory(), generateMessageID() + '.jpg')
|
|
382
375
|
try {
|
|
383
|
-
|
|
376
|
+
await extractVideoThumb(file, imgFilename, '00:00:00', { width: 32, height: 32 })
|
|
377
|
+
const buff = await promises.readFile(imgFilename)
|
|
378
|
+
|
|
384
379
|
thumbnail = buff.toString('base64')
|
|
380
|
+
|
|
381
|
+
await promises.unlink(imgFilename)
|
|
385
382
|
}
|
|
386
383
|
catch (err) {
|
|
387
|
-
options
|
|
384
|
+
options.logger?.debug('could not generate video thumb: ' + err)
|
|
388
385
|
}
|
|
389
386
|
}
|
|
390
387
|
return {
|
|
@@ -394,11 +391,20 @@ async function generateThumbnail(file, mediaType, options) {
|
|
|
394
391
|
}
|
|
395
392
|
|
|
396
393
|
const getHttpStream = async (url, options = {}) => {
|
|
397
|
-
const
|
|
398
|
-
|
|
394
|
+
const response = await fetch(url.toString(), {
|
|
395
|
+
dispatcher: options.dispatcher,
|
|
396
|
+
method: 'GET',
|
|
397
|
+
headers: options.headers
|
|
398
|
+
})
|
|
399
|
+
|
|
400
|
+
if (!response.ok) {
|
|
401
|
+
throw new Boom(`Failed to fetch stream from ${url}`, { statusCode: response.status, data: { url } })
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
return response.body instanceof Readable ? response.body : Readable.fromWeb(response.body)
|
|
399
405
|
}
|
|
400
406
|
|
|
401
|
-
const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts } = {}) => {
|
|
407
|
+
/*const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts } = {}) => {
|
|
402
408
|
const { stream, type } = await getStream(media, opts)
|
|
403
409
|
logger?.debug('fetched media stream')
|
|
404
410
|
|
|
@@ -470,59 +476,90 @@ const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequi
|
|
|
470
476
|
}
|
|
471
477
|
throw error
|
|
472
478
|
}
|
|
473
|
-
}
|
|
479
|
+
}*/
|
|
474
480
|
|
|
475
481
|
const encryptedStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts } = {}) => {
|
|
476
482
|
const { stream, type } = await getStream(media, opts)
|
|
483
|
+
|
|
477
484
|
logger?.debug('fetched media stream')
|
|
485
|
+
|
|
478
486
|
const mediaKey = randomBytes(32)
|
|
479
487
|
const { cipherKey, iv, macKey } = await getMediaKeys(mediaKey, mediaType)
|
|
480
|
-
const encFilePath = join(
|
|
488
|
+
const encFilePath = join(getTmpFilesDirectory(), mediaType + generateMessageID() + '-enc')
|
|
481
489
|
const encFileWriteStream = createWriteStream(encFilePath)
|
|
482
|
-
|
|
490
|
+
|
|
491
|
+
let originalFileStream;
|
|
483
492
|
let originalFilePath
|
|
493
|
+
|
|
484
494
|
if (saveOriginalFileIfRequired) {
|
|
485
|
-
originalFilePath = join(
|
|
495
|
+
originalFilePath = join(getTmpFilesDirectory(), mediaType + generateMessageID() + '-original')
|
|
486
496
|
originalFileStream = createWriteStream(originalFilePath)
|
|
487
497
|
}
|
|
498
|
+
|
|
488
499
|
let fileLength = 0
|
|
489
|
-
|
|
500
|
+
|
|
501
|
+
const aes = createCipheriv('aes-256-cbc', cipherKey, iv)
|
|
490
502
|
const hmac = createHmac('sha256', macKey).update(iv)
|
|
491
|
-
const sha256Plain = createHash('sha256')
|
|
503
|
+
const sha256Plain = createHash('sha256');
|
|
492
504
|
const sha256Enc = createHash('sha256')
|
|
493
|
-
|
|
505
|
+
|
|
506
|
+
const onChunk = async (buff) => {
|
|
494
507
|
sha256Enc.update(buff)
|
|
495
508
|
hmac.update(buff)
|
|
496
|
-
|
|
509
|
+
|
|
510
|
+
// Handle backpressure: if write returns false, wait for drain
|
|
511
|
+
if (!encFileWriteStream.write(buff)) {
|
|
512
|
+
await once(encFileWriteStream, 'drain')
|
|
513
|
+
}
|
|
497
514
|
}
|
|
515
|
+
|
|
498
516
|
try {
|
|
499
517
|
for await (const data of stream) {
|
|
500
518
|
fileLength += data.length
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
519
|
+
|
|
520
|
+
if (type === 'remote' &&
|
|
521
|
+
opts?.maxContentLength &&
|
|
522
|
+
fileLength + data.length > opts.maxContentLength) {
|
|
504
523
|
throw new Boom(`content length exceeded when encrypting "${type}"`, {
|
|
505
524
|
data: { media, type }
|
|
506
525
|
})
|
|
507
526
|
}
|
|
527
|
+
|
|
508
528
|
if (originalFileStream) {
|
|
509
529
|
if (!originalFileStream.write(data)) {
|
|
510
530
|
await once(originalFileStream, 'drain')
|
|
511
531
|
}
|
|
512
532
|
}
|
|
533
|
+
|
|
513
534
|
sha256Plain.update(data)
|
|
514
|
-
|
|
535
|
+
|
|
536
|
+
await onChunk(aes.update(data))
|
|
515
537
|
}
|
|
516
|
-
|
|
538
|
+
|
|
539
|
+
await onChunk(aes.final())
|
|
517
540
|
const mac = hmac.digest().slice(0, 10)
|
|
541
|
+
|
|
518
542
|
sha256Enc.update(mac)
|
|
543
|
+
|
|
519
544
|
const fileSha256 = sha256Plain.digest()
|
|
520
545
|
const fileEncSha256 = sha256Enc.digest()
|
|
546
|
+
|
|
521
547
|
encFileWriteStream.write(mac)
|
|
548
|
+
|
|
549
|
+
const encFinishPromise = once(encFileWriteStream, 'finish')
|
|
550
|
+
const originalFinishPromise = originalFileStream ? once(originalFileStream, 'finish') : Promise.resolve()
|
|
551
|
+
|
|
522
552
|
encFileWriteStream.end()
|
|
523
|
-
originalFileStream?.end?.
|
|
553
|
+
originalFileStream?.end?.()
|
|
524
554
|
stream.destroy()
|
|
555
|
+
|
|
556
|
+
// Wait for write streams to fully flush to disk
|
|
557
|
+
// This helps reduce memory pressure by allowing OS to release buffers
|
|
558
|
+
await encFinishPromise
|
|
559
|
+
await originalFinishPromise
|
|
560
|
+
|
|
525
561
|
logger?.debug('encrypted data successfully')
|
|
562
|
+
|
|
526
563
|
return {
|
|
527
564
|
mediaKey,
|
|
528
565
|
originalFilePath,
|
|
@@ -536,14 +573,16 @@ const encryptedStream = async (media, mediaType, { logger, saveOriginalFileIfReq
|
|
|
536
573
|
catch (error) {
|
|
537
574
|
// destroy all streams with error
|
|
538
575
|
encFileWriteStream.destroy()
|
|
539
|
-
originalFileStream?.destroy?.
|
|
576
|
+
originalFileStream?.destroy?.()
|
|
540
577
|
aes.destroy()
|
|
541
578
|
hmac.destroy()
|
|
542
579
|
sha256Plain.destroy()
|
|
543
580
|
sha256Enc.destroy()
|
|
544
581
|
stream.destroy()
|
|
582
|
+
|
|
545
583
|
try {
|
|
546
584
|
await promises.unlink(encFilePath)
|
|
585
|
+
|
|
547
586
|
if (originalFilePath) {
|
|
548
587
|
await promises.unlink(originalFilePath)
|
|
549
588
|
}
|
|
@@ -565,13 +604,13 @@ const toSmallestChunkSize = (num) => {
|
|
|
565
604
|
const getUrlFromDirectPath = (directPath) => `https://${DEF_HOST}${directPath}`
|
|
566
605
|
|
|
567
606
|
const downloadContentFromMessage = async ({ mediaKey, directPath, url }, type, opts = {}) => {
|
|
568
|
-
|
|
607
|
+
const isValidMediaUrl = url?.startsWith('https://mmg.whatsapp.net/')
|
|
569
608
|
const downloadUrl = isValidMediaUrl ? url : getUrlFromDirectPath(directPath)
|
|
570
|
-
|
|
609
|
+
|
|
571
610
|
if (!downloadUrl) {
|
|
572
|
-
|
|
611
|
+
throw new Boom('No valid media URL or directPath present in message', { statusCode: 400 })
|
|
573
612
|
}
|
|
574
|
-
|
|
613
|
+
|
|
575
614
|
const keys = await getMediaKeys(mediaKey, type)
|
|
576
615
|
return downloadEncryptedContent(downloadUrl, keys, opts)
|
|
577
616
|
}
|
|
@@ -584,39 +623,51 @@ const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startB
|
|
|
584
623
|
let bytesFetched = 0
|
|
585
624
|
let startChunk = 0
|
|
586
625
|
let firstBlockIsIV = false
|
|
626
|
+
|
|
587
627
|
// if a start byte is specified -- then we need to fetch the previous chunk as that will form the IV
|
|
588
628
|
if (startByte) {
|
|
589
629
|
const chunk = toSmallestChunkSize(startByte || 0)
|
|
630
|
+
|
|
590
631
|
if (chunk) {
|
|
591
632
|
startChunk = chunk - AES_CHUNK_SIZE
|
|
592
633
|
bytesFetched = chunk
|
|
593
634
|
firstBlockIsIV = true
|
|
594
635
|
}
|
|
595
636
|
}
|
|
637
|
+
|
|
596
638
|
const endChunk = endByte ? toSmallestChunkSize(endByte || 0) + AES_CHUNK_SIZE : undefined
|
|
639
|
+
const headersInit = options?.headers ? options.headers : undefined
|
|
597
640
|
const headers = {
|
|
598
|
-
...(
|
|
641
|
+
...(headersInit
|
|
642
|
+
? Array.isArray(headersInit)
|
|
643
|
+
? Object.fromEntries(headersInit)
|
|
644
|
+
: headersInit
|
|
645
|
+
: {}),
|
|
599
646
|
Origin: DEFAULT_ORIGIN
|
|
600
647
|
}
|
|
648
|
+
|
|
601
649
|
if (startChunk || endChunk) {
|
|
602
650
|
headers.Range = `bytes=${startChunk}-`
|
|
651
|
+
|
|
603
652
|
if (endChunk) {
|
|
604
653
|
headers.Range += endChunk
|
|
605
654
|
}
|
|
606
655
|
}
|
|
656
|
+
|
|
607
657
|
// download the message
|
|
608
658
|
const fetched = await getHttpStream(downloadUrl, {
|
|
609
|
-
...options || {},
|
|
610
|
-
headers
|
|
611
|
-
maxBodyLength: Infinity,
|
|
612
|
-
maxContentLength: Infinity,
|
|
659
|
+
...(options || {}),
|
|
660
|
+
headers
|
|
613
661
|
})
|
|
662
|
+
|
|
614
663
|
let remainingBytes = Buffer.from([])
|
|
615
664
|
let aes
|
|
665
|
+
|
|
616
666
|
const pushBytes = (bytes, push) => {
|
|
617
667
|
if (startByte || endByte) {
|
|
618
668
|
const start = bytesFetched >= startByte ? undefined : Math.max(startByte - bytesFetched, 0)
|
|
619
669
|
const end = bytesFetched + bytes.length < endByte ? undefined : Math.max(endByte - bytesFetched, 0)
|
|
670
|
+
|
|
620
671
|
push(bytes.slice(start, end))
|
|
621
672
|
bytesFetched += bytes.length
|
|
622
673
|
}
|
|
@@ -624,19 +675,26 @@ const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startB
|
|
|
624
675
|
push(bytes)
|
|
625
676
|
}
|
|
626
677
|
}
|
|
678
|
+
|
|
627
679
|
const output = new Transform({
|
|
628
680
|
transform(chunk, _, callback) {
|
|
629
681
|
let data = Buffer.concat([remainingBytes, chunk])
|
|
682
|
+
|
|
630
683
|
const decryptLength = toSmallestChunkSize(data.length)
|
|
684
|
+
|
|
631
685
|
remainingBytes = data.slice(decryptLength)
|
|
632
686
|
data = data.slice(0, decryptLength)
|
|
687
|
+
|
|
633
688
|
if (!aes) {
|
|
634
689
|
let ivValue = iv
|
|
690
|
+
|
|
635
691
|
if (firstBlockIsIV) {
|
|
636
692
|
ivValue = data.slice(0, AES_CHUNK_SIZE)
|
|
637
693
|
data = data.slice(AES_CHUNK_SIZE)
|
|
638
694
|
}
|
|
695
|
+
|
|
639
696
|
aes = createDecipheriv('aes-256-cbc', cipherKey, ivValue)
|
|
697
|
+
|
|
640
698
|
// if an end byte that is not EOF is specified
|
|
641
699
|
// stop auto padding (PKCS7) -- otherwise throws an error for decryption
|
|
642
700
|
if (endByte) {
|
|
@@ -659,8 +717,9 @@ const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startB
|
|
|
659
717
|
catch (error) {
|
|
660
718
|
callback(error)
|
|
661
719
|
}
|
|
662
|
-
}
|
|
720
|
+
}
|
|
663
721
|
})
|
|
722
|
+
|
|
664
723
|
return fetched.pipe(output, { end: true })
|
|
665
724
|
}
|
|
666
725
|
|
|
@@ -680,43 +739,176 @@ function extensionForMediaMessage(message) {
|
|
|
680
739
|
return extension
|
|
681
740
|
}
|
|
682
741
|
|
|
742
|
+
const isNodeRuntime = () => {
|
|
743
|
+
return (typeof process !== 'undefined' &&
|
|
744
|
+
process.versions?.node !== null &&
|
|
745
|
+
typeof process.versions.bun === 'undefined' &&
|
|
746
|
+
typeof globalThis.Deno === 'undefined')
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
const uploadWithNodeHttp = async ({ url, filePath, headers, timeoutMs, agent }, redirectCount = 0) => {
|
|
750
|
+
if (redirectCount > 5) {
|
|
751
|
+
throw new Error('Too many redirects')
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
const parsedUrl = new URL(url)
|
|
755
|
+
const httpModule = parsedUrl.protocol === 'https:' ? require('https') : require('http')
|
|
756
|
+
|
|
757
|
+
// Get file size for Content-Length header (required for Node.js streaming)
|
|
758
|
+
const fileStats = await promises.stat(filePath)
|
|
759
|
+
const fileSize = fileStats.size
|
|
760
|
+
|
|
761
|
+
return new Promise((resolve, reject) => {
|
|
762
|
+
const req = httpModule.request({
|
|
763
|
+
hostname: parsedUrl.hostname,
|
|
764
|
+
port: parsedUrl.port || (parsedUrl.protocol === 'https:' ? 443 : 80),
|
|
765
|
+
path: parsedUrl.pathname + parsedUrl.search,
|
|
766
|
+
method: 'POST',
|
|
767
|
+
headers: {
|
|
768
|
+
...headers,
|
|
769
|
+
'Content-Length': fileSize
|
|
770
|
+
},
|
|
771
|
+
agent,
|
|
772
|
+
timeout: timeoutMs
|
|
773
|
+
}, res => {
|
|
774
|
+
// Handle redirects (3xx)
|
|
775
|
+
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
|
776
|
+
res.resume() // Consume response to free resources
|
|
777
|
+
|
|
778
|
+
const newUrl = new URL(res.headers.location, url).toString()
|
|
779
|
+
|
|
780
|
+
resolve(uploadWithNodeHttp({
|
|
781
|
+
url: newUrl,
|
|
782
|
+
filePath,
|
|
783
|
+
headers,
|
|
784
|
+
timeoutMs,
|
|
785
|
+
agent
|
|
786
|
+
}, redirectCount + 1))
|
|
787
|
+
return
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
let body = ''
|
|
791
|
+
|
|
792
|
+
res.on('data', chunk => (body += chunk))
|
|
793
|
+
res.on('end', () => {
|
|
794
|
+
try {
|
|
795
|
+
resolve(JSON.parse(body))
|
|
796
|
+
}
|
|
797
|
+
catch {
|
|
798
|
+
resolve(undefined)
|
|
799
|
+
}
|
|
800
|
+
})
|
|
801
|
+
})
|
|
802
|
+
|
|
803
|
+
req.on('error', reject)
|
|
804
|
+
req.on('timeout', () => {
|
|
805
|
+
req.destroy()
|
|
806
|
+
reject(new Error('Upload timeout'))
|
|
807
|
+
})
|
|
808
|
+
|
|
809
|
+
const stream = createReadStream(filePath)
|
|
810
|
+
|
|
811
|
+
stream.pipe(req)
|
|
812
|
+
stream.on('error', err => {
|
|
813
|
+
req.destroy()
|
|
814
|
+
reject(err)
|
|
815
|
+
})
|
|
816
|
+
})
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
const uploadWithFetch = async ({ url, filePath, headers, timeoutMs, agent }) => {
|
|
820
|
+
// Convert Node.js Readable to Web ReadableStream
|
|
821
|
+
const nodeStream = createReadStream(filePath)
|
|
822
|
+
const webStream = Readable.toWeb(nodeStream)
|
|
823
|
+
const response = await fetch(url, {
|
|
824
|
+
dispatcher: agent,
|
|
825
|
+
method: 'POST',
|
|
826
|
+
body: webStream,
|
|
827
|
+
headers,
|
|
828
|
+
duplex: 'half',
|
|
829
|
+
signal: timeoutMs ? AbortSignal.timeout(timeoutMs) : undefined
|
|
830
|
+
})
|
|
831
|
+
|
|
832
|
+
try {
|
|
833
|
+
return (await response.json())
|
|
834
|
+
}
|
|
835
|
+
catch {
|
|
836
|
+
return undefined
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
|
|
840
|
+
/**
|
|
841
|
+
* Uploads media to WhatsApp servers.
|
|
842
|
+
*
|
|
843
|
+
* ## Why we have two upload implementations:
|
|
844
|
+
*
|
|
845
|
+
* Node.js's native `fetch` (powered by undici) has a known bug where it buffers
|
|
846
|
+
* the entire request body in memory before sending, even when using streams.
|
|
847
|
+
* This causes memory issues with large files (e.g., 1GB file = 1GB+ memory usage).
|
|
848
|
+
* See: https://github.com/nodejs/undici/issues/4058
|
|
849
|
+
*
|
|
850
|
+
* Other runtimes (Bun, Deno, browsers) correctly stream the request body without
|
|
851
|
+
* buffering, so we can use the web-standard Fetch API there.
|
|
852
|
+
*
|
|
853
|
+
* ## Future considerations:
|
|
854
|
+
* Once the undici bug is fixed, we can simplify this to use only the Fetch API
|
|
855
|
+
* across all runtimes. Monitor the GitHub issue for updates.
|
|
856
|
+
*/
|
|
857
|
+
const uploadMedia = async (params, logger) => {
|
|
858
|
+
if (isNodeRuntime()) {
|
|
859
|
+
logger?.debug('Using Node.js https module for upload (avoids undici buffering bug)')
|
|
860
|
+
return uploadWithNodeHttp(params)
|
|
861
|
+
}
|
|
862
|
+
else {
|
|
863
|
+
logger?.debug('Using web-standard Fetch API for upload');
|
|
864
|
+
return uploadWithFetch(params)
|
|
865
|
+
}
|
|
866
|
+
}
|
|
867
|
+
|
|
683
868
|
const getWAUploadToServer = ({ customUploadHosts, fetchAgent, logger, options }, refreshMediaConn) => {
|
|
684
|
-
return async (filePath, { mediaType, fileEncSha256B64,
|
|
869
|
+
return async (filePath, { mediaType, fileEncSha256B64, timeoutMs }) => {
|
|
685
870
|
// send a query JSON to obtain the url & auth token to upload our media
|
|
686
871
|
let uploadInfo = await refreshMediaConn(false)
|
|
687
872
|
let urls
|
|
688
|
-
|
|
873
|
+
|
|
689
874
|
const hosts = [...customUploadHosts, ...uploadInfo.hosts]
|
|
875
|
+
|
|
690
876
|
fileEncSha256B64 = encodeBase64EncodedStringForUpload(fileEncSha256B64)
|
|
691
|
-
|
|
692
|
-
|
|
877
|
+
|
|
878
|
+
// Prepare common headers
|
|
879
|
+
const customHeaders = (() => {
|
|
880
|
+
const hdrs = options?.headers;
|
|
881
|
+
if (!hdrs)
|
|
882
|
+
return {};
|
|
883
|
+
return Array.isArray(hdrs) ? Object.fromEntries(hdrs) : hdrs
|
|
884
|
+
})()
|
|
885
|
+
|
|
886
|
+
const headers = {
|
|
887
|
+
...customHeaders,
|
|
888
|
+
'Content-Type': 'application/octet-stream',
|
|
889
|
+
Origin: DEFAULT_ORIGIN
|
|
693
890
|
}
|
|
891
|
+
|
|
694
892
|
for (const { hostname } of hosts) {
|
|
695
893
|
logger.debug(`uploading to "${hostname}"`)
|
|
696
|
-
|
|
697
|
-
const
|
|
698
|
-
|
|
894
|
+
|
|
895
|
+
const auth = encodeURIComponent(uploadInfo.auth)
|
|
896
|
+
const url = `https://${hostname}${MEDIA_PATH_MAP[mediaType]}/${fileEncSha256B64}?auth=${auth}&token=${fileEncSha256B64}`
|
|
897
|
+
|
|
699
898
|
let result
|
|
899
|
+
|
|
700
900
|
try {
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
headers
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
httpsAgent: fetchAgent,
|
|
710
|
-
timeout: timeoutMs,
|
|
711
|
-
responseType: 'json',
|
|
712
|
-
maxBodyLength: Infinity,
|
|
713
|
-
maxContentLength: Infinity,
|
|
714
|
-
})
|
|
715
|
-
result = body.data
|
|
716
|
-
if (result?.url || result?.directPath) {
|
|
901
|
+
result = await uploadMedia({
|
|
902
|
+
url,
|
|
903
|
+
filePath,
|
|
904
|
+
headers,
|
|
905
|
+
timeoutMs,
|
|
906
|
+
agent: fetchAgent
|
|
907
|
+
}, logger);
|
|
908
|
+
if (result?.url || result?.direct_path) {
|
|
717
909
|
urls = {
|
|
718
910
|
mediaUrl: result.url,
|
|
719
|
-
directPath: result.direct_path,
|
|
911
|
+
directPath: result.direct_path,
|
|
720
912
|
meta_hmac: result.meta_hmac,
|
|
721
913
|
fbid: result.fbid,
|
|
722
914
|
ts: result.ts
|
|
@@ -729,16 +921,15 @@ const getWAUploadToServer = ({ customUploadHosts, fetchAgent, logger, options },
|
|
|
729
921
|
}
|
|
730
922
|
}
|
|
731
923
|
catch (error) {
|
|
732
|
-
if (axios_1.default.isAxiosError(error)) {
|
|
733
|
-
result = error.response?.data
|
|
734
|
-
}
|
|
735
924
|
const isLast = hostname === hosts[uploadInfo.hosts.length - 1]?.hostname
|
|
736
|
-
logger.warn({ trace: error
|
|
925
|
+
logger.warn({ trace: error?.stack, uploadResult: result }, `Error in uploading to ${hostname} ${isLast ? '' : ', retrying...'}`)
|
|
737
926
|
}
|
|
738
927
|
}
|
|
928
|
+
|
|
739
929
|
if (!urls) {
|
|
740
930
|
throw new Boom('Media upload failed on all hosts', { statusCode: 500 })
|
|
741
931
|
}
|
|
932
|
+
|
|
742
933
|
return urls
|
|
743
934
|
}
|
|
744
935
|
}
|
|
@@ -778,8 +969,7 @@ const encryptMediaRetryRequest = async (key, mediaKey, meId) => {
|
|
|
778
969
|
tag: 'rmr',
|
|
779
970
|
attrs: {
|
|
780
971
|
jid: key.remoteJid,
|
|
781
|
-
|
|
782
|
-
// @ts-ignore
|
|
972
|
+
from_me: (!!key.fromMe).toString(),
|
|
783
973
|
participant: key.participant || undefined
|
|
784
974
|
}
|
|
785
975
|
}
|
|
@@ -847,12 +1037,13 @@ module.exports = {
|
|
|
847
1037
|
getStream,
|
|
848
1038
|
generateThumbnail,
|
|
849
1039
|
getHttpStream,
|
|
850
|
-
prepareStream,
|
|
1040
|
+
//prepareStream,
|
|
851
1041
|
encryptedStream,
|
|
852
1042
|
getUrlFromDirectPath,
|
|
853
1043
|
downloadContentFromMessage,
|
|
854
1044
|
downloadEncryptedContent,
|
|
855
1045
|
extensionForMediaMessage,
|
|
1046
|
+
uploadWithNodeHttp,
|
|
856
1047
|
getRawMediaUploadData,
|
|
857
1048
|
getWAUploadToServer,
|
|
858
1049
|
getMediaRetryKey,
|