xmd-baileys 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +10 -0
- package/WAProto/index.js +169661 -0
- package/engine-requirements.js +10 -0
- package/lib/Defaults/baileys-version.json +3 -0
- package/lib/Defaults/constants.js +74 -0
- package/lib/Defaults/index.d.ts +53 -0
- package/lib/Defaults/index.js +147 -0
- package/lib/Defaults/media.js +48 -0
- package/lib/Defaults/phonenumber-mcc.json +223 -0
- package/lib/Signal/Group/ciphertext-message.d.ts +9 -0
- package/lib/Signal/Group/ciphertext-message.js +15 -0
- package/lib/Signal/Group/group-session-builder.d.ts +14 -0
- package/lib/Signal/Group/group-session-builder.js +64 -0
- package/lib/Signal/Group/group_cipher.d.ts +17 -0
- package/lib/Signal/Group/group_cipher.js +96 -0
- package/lib/Signal/Group/index.d.ts +11 -0
- package/lib/Signal/Group/index.js +57 -0
- package/lib/Signal/Group/keyhelper.d.ts +10 -0
- package/lib/Signal/Group/keyhelper.js +55 -0
- package/lib/Signal/Group/queue-job.d.ts +1 -0
- package/lib/Signal/Group/queue-job.js +57 -0
- package/lib/Signal/Group/sender-chain-key.d.ts +13 -0
- package/lib/Signal/Group/sender-chain-key.js +34 -0
- package/lib/Signal/Group/sender-key-distribution-message.d.ts +16 -0
- package/lib/Signal/Group/sender-key-distribution-message.js +66 -0
- package/lib/Signal/Group/sender-key-message.d.ts +18 -0
- package/lib/Signal/Group/sender-key-message.js +69 -0
- package/lib/Signal/Group/sender-key-name.d.ts +17 -0
- package/lib/Signal/Group/sender-key-name.js +51 -0
- package/lib/Signal/Group/sender-key-record.d.ts +30 -0
- package/lib/Signal/Group/sender-key-record.js +53 -0
- package/lib/Signal/Group/sender-key-state.d.ts +38 -0
- package/lib/Signal/Group/sender-key-state.js +99 -0
- package/lib/Signal/Group/sender-message-key.d.ts +11 -0
- package/lib/Signal/Group/sender-message-key.js +29 -0
- package/lib/Signal/Group/tmp +1 -0
- package/lib/Signal/libsignal.d.ts +3 -0
- package/lib/Signal/libsignal.js +174 -0
- package/lib/Socket/Client/abstract-socket-client.d.ts +17 -0
- package/lib/Socket/Client/abstract-socket-client.js +13 -0
- package/lib/Socket/Client/index.d.ts +3 -0
- package/lib/Socket/Client/index.js +19 -0
- package/lib/Socket/Client/mobile-socket-client.d.ts +13 -0
- package/lib/Socket/Client/mobile-socket-client.js +65 -0
- package/lib/Socket/Client/tmp +1 -0
- package/lib/Socket/Client/web-socket-client.d.ts +12 -0
- package/lib/Socket/Client/web-socket-client.js +62 -0
- package/lib/Socket/business.d.ts +171 -0
- package/lib/Socket/business.js +260 -0
- package/lib/Socket/chats.d.ts +267 -0
- package/lib/Socket/chats.js +970 -0
- package/lib/Socket/groups.d.ts +115 -0
- package/lib/Socket/groups.js +317 -0
- package/lib/Socket/index.d.ts +173 -0
- package/lib/Socket/index.js +11 -0
- package/lib/Socket/luxu.d.ts +268 -0
- package/lib/Socket/luxu.js +591 -0
- package/lib/Socket/messages-recv.d.ts +161 -0
- package/lib/Socket/messages-recv.js +1110 -0
- package/lib/Socket/messages-send.d.ts +149 -0
- package/lib/Socket/messages-send.js +912 -0
- package/lib/Socket/newsletter.d.ts +134 -0
- package/lib/Socket/newsletter.js +315 -0
- package/lib/Socket/registration.d.ts +267 -0
- package/lib/Socket/registration.js +166 -0
- package/lib/Socket/socket.d.ts +43 -0
- package/lib/Socket/socket.js +665 -0
- package/lib/Socket/usync.d.ts +36 -0
- package/lib/Socket/usync.js +70 -0
- package/lib/Store/index.d.ts +3 -0
- package/lib/Store/index.js +10 -0
- package/lib/Store/make-cache-manager-store.d.ts +13 -0
- package/lib/Store/make-cache-manager-store.js +83 -0
- package/lib/Store/make-in-memory-store.d.ts +118 -0
- package/lib/Store/make-in-memory-store.js +427 -0
- package/lib/Store/make-ordered-dictionary.d.ts +13 -0
- package/lib/Store/make-ordered-dictionary.js +81 -0
- package/lib/Store/object-repository.d.ts +10 -0
- package/lib/Store/object-repository.js +27 -0
- package/lib/Store/tmp +1 -0
- package/lib/Types/Auth.d.ts +110 -0
- package/lib/Types/Auth.js +2 -0
- package/lib/Types/Call.d.ts +13 -0
- package/lib/Types/Call.js +2 -0
- package/lib/Types/Chat.d.ts +102 -0
- package/lib/Types/Chat.js +4 -0
- package/lib/Types/Contact.d.ts +19 -0
- package/lib/Types/Contact.js +2 -0
- package/lib/Types/Events.d.ts +157 -0
- package/lib/Types/Events.js +2 -0
- package/lib/Types/GroupMetadata.d.ts +55 -0
- package/lib/Types/GroupMetadata.js +2 -0
- package/lib/Types/Label.d.ts +35 -0
- package/lib/Types/Label.js +27 -0
- package/lib/Types/LabelAssociation.d.ts +29 -0
- package/lib/Types/LabelAssociation.js +9 -0
- package/lib/Types/Message.d.ts +273 -0
- package/lib/Types/Message.js +9 -0
- package/lib/Types/Newsletter.d.ts +103 -0
- package/lib/Types/Newsletter.js +38 -0
- package/lib/Types/Product.d.ts +78 -0
- package/lib/Types/Product.js +2 -0
- package/lib/Types/Signal.d.ts +57 -0
- package/lib/Types/Signal.js +2 -0
- package/lib/Types/Socket.d.ts +111 -0
- package/lib/Types/Socket.js +2 -0
- package/lib/Types/State.d.ts +27 -0
- package/lib/Types/State.js +2 -0
- package/lib/Types/USync.d.ts +25 -0
- package/lib/Types/USync.js +2 -0
- package/lib/Types/index.d.ts +57 -0
- package/lib/Types/index.js +42 -0
- package/lib/Types/tmp +1 -0
- package/lib/Utils/auth-utils.d.ts +18 -0
- package/lib/Utils/auth-utils.js +206 -0
- package/lib/Utils/baileys-event-stream.d.ts +16 -0
- package/lib/Utils/baileys-event-stream.js +63 -0
- package/lib/Utils/business.d.ts +22 -0
- package/lib/Utils/business.js +234 -0
- package/lib/Utils/chat-utils.d.ts +71 -0
- package/lib/Utils/chat-utils.js +729 -0
- package/lib/Utils/crypto.d.ts +41 -0
- package/lib/Utils/crypto.js +151 -0
- package/lib/Utils/decode-wa-message.d.ts +19 -0
- package/lib/Utils/decode-wa-message.js +198 -0
- package/lib/Utils/event-buffer.d.ts +35 -0
- package/lib/Utils/event-buffer.js +514 -0
- package/lib/Utils/generics.d.ts +92 -0
- package/lib/Utils/generics.js +423 -0
- package/lib/Utils/history.d.ts +15 -0
- package/lib/Utils/history.js +96 -0
- package/lib/Utils/index.d.ts +17 -0
- package/lib/Utils/index.js +33 -0
- package/lib/Utils/link-preview.d.ts +21 -0
- package/lib/Utils/link-preview.js +93 -0
- package/lib/Utils/logger.d.ts +4 -0
- package/lib/Utils/logger.js +7 -0
- package/lib/Utils/lt-hash.d.ts +12 -0
- package/lib/Utils/lt-hash.js +51 -0
- package/lib/Utils/make-mutex.d.ts +7 -0
- package/lib/Utils/make-mutex.js +43 -0
- package/lib/Utils/messages-media.d.ts +116 -0
- package/lib/Utils/messages-media.js +1109 -0
- package/lib/Utils/messages.d.ts +77 -0
- package/lib/Utils/messages.js +1858 -0
- package/lib/Utils/noise-handler.d.ts +21 -0
- package/lib/Utils/noise-handler.js +155 -0
- package/lib/Utils/process-message.d.ts +41 -0
- package/lib/Utils/process-message.js +321 -0
- package/lib/Utils/signal.d.ts +32 -0
- package/lib/Utils/signal.js +153 -0
- package/lib/Utils/tmp +1 -0
- package/lib/Utils/use-multi-file-auth-state.d.ts +13 -0
- package/lib/Utils/use-multi-file-auth-state.js +119 -0
- package/lib/Utils/validate-connection.d.ts +11 -0
- package/lib/Utils/validate-connection.js +229 -0
- package/lib/WABinary/constants.d.ts +30 -0
- package/lib/WABinary/constants.js +40 -0
- package/lib/WABinary/decode.d.ts +7 -0
- package/lib/WABinary/decode.js +252 -0
- package/lib/WABinary/encode.d.ts +3 -0
- package/lib/WABinary/encode.js +265 -0
- package/lib/WABinary/generic-utils.d.ts +17 -0
- package/lib/WABinary/generic-utils.js +198 -0
- package/lib/WABinary/index.d.ts +5 -0
- package/lib/WABinary/index.js +21 -0
- package/lib/WABinary/jid-utils.d.ts +31 -0
- package/lib/WABinary/jid-utils.js +65 -0
- package/lib/WABinary/tmp +1 -0
- package/lib/WABinary/types.d.ts +18 -0
- package/lib/WABinary/types.js +2 -0
- package/lib/WAM/BinaryInfo.d.ts +17 -0
- package/lib/WAM/BinaryInfo.js +13 -0
- package/lib/WAM/constants.d.ts +38 -0
- package/lib/WAM/constants.js +15350 -0
- package/lib/WAM/encode.d.ts +3 -0
- package/lib/WAM/encode.js +155 -0
- package/lib/WAM/index.d.ts +3 -0
- package/lib/WAM/index.js +19 -0
- package/lib/WAM/tmp +1 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.d.ts +9 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.js +32 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.d.ts +22 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.js +57 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.d.ts +12 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js +30 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.d.ts +12 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.js +42 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.d.ts +25 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js +53 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.d.ts +8 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.js +24 -0
- package/lib/WAUSync/Protocols/index.d.ts +4 -0
- package/lib/WAUSync/Protocols/index.js +20 -0
- package/lib/WAUSync/Protocols/tmp +1 -0
- package/lib/WAUSync/USyncQuery.d.ts +28 -0
- package/lib/WAUSync/USyncQuery.js +89 -0
- package/lib/WAUSync/USyncUser.d.ts +12 -0
- package/lib/WAUSync/USyncUser.js +26 -0
- package/lib/WAUSync/index.js +19 -0
- package/lib/index.d.ts +12 -0
- package/lib/index.js +39 -0
- package/lib/temp +1 -0
- package/package.json +109 -0
|
@@ -0,0 +1,1109 @@
|
|
|
1
|
+
"use strict"
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true })
|
|
4
|
+
|
|
5
|
+
const { Boom } = require("@hapi/boom")
|
|
6
|
+
const { exec } = require("child_process")
|
|
7
|
+
const { once } = require("events")
|
|
8
|
+
const {
|
|
9
|
+
createHash,
|
|
10
|
+
randomBytes,
|
|
11
|
+
createHmac,
|
|
12
|
+
createCipheriv,
|
|
13
|
+
createDecipheriv
|
|
14
|
+
} = require("crypto")
|
|
15
|
+
const {
|
|
16
|
+
promises,
|
|
17
|
+
createReadStream,
|
|
18
|
+
createWriteStream
|
|
19
|
+
} = require("fs")
|
|
20
|
+
const {
|
|
21
|
+
parseBuffer,
|
|
22
|
+
parseFile,
|
|
23
|
+
parseStream
|
|
24
|
+
} = require('music-metadata')
|
|
25
|
+
const { tmpdir } = require("os")
|
|
26
|
+
const { join } = require("path")
|
|
27
|
+
const {
|
|
28
|
+
Readable,
|
|
29
|
+
Transform
|
|
30
|
+
} = require("stream")
|
|
31
|
+
const { proto } = require("../../WAProto")
|
|
32
|
+
const {
|
|
33
|
+
MEDIA_PATH_MAP,
|
|
34
|
+
MEDIA_HKDF_KEY_MAPPING
|
|
35
|
+
} = require("../Defaults/media")
|
|
36
|
+
const { DEFAULT_ORIGIN } = require("../Defaults/constants")
|
|
37
|
+
const {
|
|
38
|
+
getBinaryNodeChild,
|
|
39
|
+
getBinaryNodeChildBuffer,
|
|
40
|
+
jidNormalizedUser
|
|
41
|
+
} = require("../WABinary")
|
|
42
|
+
const {
|
|
43
|
+
aesDecryptGCM,
|
|
44
|
+
aesEncryptGCM,
|
|
45
|
+
hkdf
|
|
46
|
+
} = require("./crypto")
|
|
47
|
+
const { generateMessageID } = require("./generics")
|
|
48
|
+
|
|
49
|
+
const getTmpFilesDirectory = () => tmpdir()
|
|
50
|
+
|
|
51
|
+
const getImageProcessingLibrary = () => {
|
|
52
|
+
let sharp, jimp
|
|
53
|
+
|
|
54
|
+
try {
|
|
55
|
+
sharp = require('sharp')
|
|
56
|
+
} catch {}
|
|
57
|
+
|
|
58
|
+
if (sharp) {
|
|
59
|
+
return { sharp }
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
try {
|
|
63
|
+
jimp = require('jimp')
|
|
64
|
+
} catch {}
|
|
65
|
+
|
|
66
|
+
if (jimp) {
|
|
67
|
+
return { jimp }
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
throw new Boom('No image processing library available')
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const hkdfInfoKey = (type) => {
|
|
74
|
+
const hkdfInfo = MEDIA_HKDF_KEY_MAPPING[type]
|
|
75
|
+
return `WhatsApp ${hkdfInfo} Keys`
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const getRawMediaUploadData = async (media, mediaType, logger) => {
|
|
79
|
+
const { stream } = await getStream(media)
|
|
80
|
+
|
|
81
|
+
logger?.debug('got stream for raw upload')
|
|
82
|
+
|
|
83
|
+
const hasher = createHash('sha256')
|
|
84
|
+
const filePath = join(getTmpFilesDirectory(), mediaType + generateMessageID())
|
|
85
|
+
const fileWriteStream = createWriteStream(filePath)
|
|
86
|
+
|
|
87
|
+
let fileLength = 0
|
|
88
|
+
|
|
89
|
+
try {
|
|
90
|
+
for await (const data of stream) {
|
|
91
|
+
fileLength += data.length
|
|
92
|
+
hasher.update(data)
|
|
93
|
+
|
|
94
|
+
if (!fileWriteStream.write(data)) {
|
|
95
|
+
await once(fileWriteStream, 'drain')
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
fileWriteStream.end()
|
|
100
|
+
await once(fileWriteStream, 'finish')
|
|
101
|
+
stream.destroy()
|
|
102
|
+
|
|
103
|
+
const fileSha256 = hasher.digest()
|
|
104
|
+
|
|
105
|
+
logger?.debug('hashed data for raw upload')
|
|
106
|
+
|
|
107
|
+
return {
|
|
108
|
+
filePath: filePath,
|
|
109
|
+
fileSha256,
|
|
110
|
+
fileLength
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
catch (error) {
|
|
114
|
+
fileWriteStream.destroy()
|
|
115
|
+
stream.destroy()
|
|
116
|
+
|
|
117
|
+
try {
|
|
118
|
+
await promises.unlink(filePath)
|
|
119
|
+
}
|
|
120
|
+
catch {
|
|
121
|
+
//
|
|
122
|
+
}
|
|
123
|
+
throw error
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/** generates all the keys required to encrypt/decrypt & sign a media message */
|
|
128
|
+
async function getMediaKeys(buffer, mediaType) {
|
|
129
|
+
if (!buffer) {
|
|
130
|
+
throw new Boom('Cannot derive from empty media key')
|
|
131
|
+
}
|
|
132
|
+
if (typeof buffer === 'string') {
|
|
133
|
+
buffer = Buffer.from(buffer.replace('data:base64,', ''), 'base64')
|
|
134
|
+
}
|
|
135
|
+
// expand using HKDF to 112 bytes, also pass in the relevant app info
|
|
136
|
+
const expandedMediaKey = await hkdf(buffer, 112, { info: hkdfInfoKey(mediaType) })
|
|
137
|
+
return {
|
|
138
|
+
iv: expandedMediaKey.slice(0, 16),
|
|
139
|
+
cipherKey: expandedMediaKey.slice(16, 48),
|
|
140
|
+
macKey: expandedMediaKey.slice(48, 80)
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/** Extracts video thumb using FFMPEG */
|
|
145
|
+
const extractVideoThumb = async (path, destPath, time, size) => new Promise((resolve, reject) => {
|
|
146
|
+
const cmd = `ffmpeg -ss ${time} -i ${path} -y -vf scale=${size.width}:-1 -vframes 1 -f image2 ${destPath}`
|
|
147
|
+
exec(cmd, err => {
|
|
148
|
+
if (err) {
|
|
149
|
+
reject(err)
|
|
150
|
+
}
|
|
151
|
+
else {
|
|
152
|
+
resolve()
|
|
153
|
+
}
|
|
154
|
+
})
|
|
155
|
+
})
|
|
156
|
+
|
|
157
|
+
const extractImageThumb = async (bufferOrFilePath, width = 32, quality = 50) => {
|
|
158
|
+
// TODO: Move entirely to sharp, removing jimp as it supports readable streams
|
|
159
|
+
// This will have positive speed and performance impacts as well as minimizing RAM usage.
|
|
160
|
+
if (bufferOrFilePath instanceof Readable) {
|
|
161
|
+
bufferOrFilePath = await toBuffer(bufferOrFilePath)
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
const lib = await getImageProcessingLibrary()
|
|
165
|
+
|
|
166
|
+
if ('sharp' in lib && typeof lib.sharp === 'function') {
|
|
167
|
+
const img = lib.sharp(bufferOrFilePath)
|
|
168
|
+
const dimensions = await img.metadata()
|
|
169
|
+
const buffer = await img.resize(width).jpeg({ quality: 50 }).toBuffer()
|
|
170
|
+
return {
|
|
171
|
+
buffer,
|
|
172
|
+
original: {
|
|
173
|
+
width: dimensions.width,
|
|
174
|
+
height: dimensions.height
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
else if ('jimp' in lib && typeof lib.jimp.read === 'function') {
|
|
179
|
+
const { read, MIME_JPEG, RESIZE_BEZIER, AUTO } = lib.jimp
|
|
180
|
+
const jimp = await read(bufferOrFilePath)
|
|
181
|
+
const dimensions = {
|
|
182
|
+
width: jimp.getWidth(),
|
|
183
|
+
height: jimp.getHeight()
|
|
184
|
+
}
|
|
185
|
+
const buffer = await jimp
|
|
186
|
+
.quality(quality)
|
|
187
|
+
.resize(width, AUTO, RESIZE_BEZIER)
|
|
188
|
+
.getBufferAsync(MIME_JPEG)
|
|
189
|
+
return {
|
|
190
|
+
buffer,
|
|
191
|
+
original: dimensions
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
else {
|
|
195
|
+
throw new Boom('No image processing library available')
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
const encodeBase64EncodedStringForUpload = (b64) => (encodeURIComponent(b64
|
|
200
|
+
.replace(/\+/g, '-')
|
|
201
|
+
.replace(/\//g, '_')
|
|
202
|
+
.replace(/\=+$/, '')))
|
|
203
|
+
|
|
204
|
+
const generateProfilePicture = async (mediaUpload, dimensions) => {
|
|
205
|
+
let buffer
|
|
206
|
+
|
|
207
|
+
const { width: w = 640, height: h = 640 } = dimensions || {}
|
|
208
|
+
|
|
209
|
+
if (Buffer.isBuffer(mediaUpload)) {
|
|
210
|
+
buffer = mediaUpload
|
|
211
|
+
}
|
|
212
|
+
else {
|
|
213
|
+
// Use getStream to handle all WAMediaUpload types (Buffer, Stream, URL)
|
|
214
|
+
const { stream } = await getStream(mediaUpload)
|
|
215
|
+
// Convert the resulting stream to a buffer
|
|
216
|
+
buffer = await toBuffer(stream)
|
|
217
|
+
}
|
|
218
|
+
const lib = await getImageProcessingLibrary()
|
|
219
|
+
|
|
220
|
+
let img
|
|
221
|
+
|
|
222
|
+
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
|
223
|
+
img = lib.sharp
|
|
224
|
+
.default(buffer)
|
|
225
|
+
.resize(w, h)
|
|
226
|
+
.jpeg({
|
|
227
|
+
quality: 50
|
|
228
|
+
}).toBuffer()
|
|
229
|
+
}
|
|
230
|
+
else if ('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
|
231
|
+
const jimp = await lib.jimp.read(buffer)
|
|
232
|
+
const min = Math.min(jimp.width, jimp.height)
|
|
233
|
+
const cropped = jimp.crop({ x: 0, y: 0, w: min, h: min })
|
|
234
|
+
img = cropped.resize({ w, h, mode: lib.jimp.ResizeStrategy.BILINEAR }).getBuffer('image/jpeg', { quality: 50 })
|
|
235
|
+
}
|
|
236
|
+
else {
|
|
237
|
+
throw new Boom('No image processing library available');
|
|
238
|
+
}
|
|
239
|
+
return {
|
|
240
|
+
img: await img
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
/** gets the SHA256 of the given media message */
|
|
245
|
+
const mediaMessageSHA256B64 = (message) => {
|
|
246
|
+
const media = Object.values(message)[0]
|
|
247
|
+
return media?.fileSha256 && Buffer.from(media.fileSha256).toString('base64')
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
async function getAudioDuration(buffer) {
|
|
251
|
+
const options = {
|
|
252
|
+
duration: true
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
let metadata
|
|
256
|
+
|
|
257
|
+
if (Buffer.isBuffer(buffer)) {
|
|
258
|
+
metadata = await parseBuffer(buffer, undefined, options)
|
|
259
|
+
}
|
|
260
|
+
else if (typeof buffer === 'string') {
|
|
261
|
+
metadata = await parseFile(buffer, options)
|
|
262
|
+
}
|
|
263
|
+
else {
|
|
264
|
+
metadata = await parseStream(buffer, undefined, options)
|
|
265
|
+
}
|
|
266
|
+
return metadata.format?.duration
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
/**
|
|
270
|
+
referenced from and modifying https://github.com/wppconnect-team/wa-js/blob/main/src/chat/functions/prepareAudioWaveform.ts
|
|
271
|
+
*/
|
|
272
|
+
async function getAudioWaveform(buffer, logger) {
|
|
273
|
+
try {
|
|
274
|
+
const { default: decoder } = await eval('import(\'audio-decode\')')
|
|
275
|
+
|
|
276
|
+
let audioData
|
|
277
|
+
|
|
278
|
+
if (Buffer.isBuffer(buffer)) {
|
|
279
|
+
audioData = buffer
|
|
280
|
+
}
|
|
281
|
+
else if (typeof buffer === 'string') {
|
|
282
|
+
const rStream = createReadStream(buffer)
|
|
283
|
+
audioData = await toBuffer(rStream)
|
|
284
|
+
}
|
|
285
|
+
else {
|
|
286
|
+
audioData = await toBuffer(buffer)
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
const audioBuffer = await decoder(audioData)
|
|
290
|
+
const rawData = audioBuffer.getChannelData(0) // We only need to work with one channel of data
|
|
291
|
+
const samples = 64 // Number of samples we want to have in our final data set
|
|
292
|
+
const blockSize = Math.floor(rawData.length / samples) // the number of samples in each subdivision
|
|
293
|
+
const filteredData = []
|
|
294
|
+
|
|
295
|
+
for (let i = 0; i < samples; i++) {
|
|
296
|
+
const blockStart = blockSize * i // the location of the first sample in the block
|
|
297
|
+
let sum = 0
|
|
298
|
+
for (let j = 0; j < blockSize; j++) {
|
|
299
|
+
sum = sum + Math.abs(rawData[blockStart + j]) // find the sum of all the samples in the block
|
|
300
|
+
}
|
|
301
|
+
filteredData.push(sum / blockSize) // divide the sum by the block size to get the average
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
// This guarantees that the largest data point will be set to 1, and the rest of the data will scale proportionally.
|
|
305
|
+
const multiplier = Math.pow(Math.max(...filteredData), -1)
|
|
306
|
+
const normalizedData = filteredData.map((n) => n * multiplier)
|
|
307
|
+
|
|
308
|
+
// Generate waveform like WhatsApp
|
|
309
|
+
const waveform = new Uint8Array(normalizedData.map((n) => Math.floor(100 * n)))
|
|
310
|
+
return waveform
|
|
311
|
+
}
|
|
312
|
+
catch (e) {
|
|
313
|
+
logger?.debug('Failed to generate waveform: ' + e)
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
async function convertToOpusBuffer(buffer, logger) {
|
|
318
|
+
try {
|
|
319
|
+
const { PassThrough } = require('stream');
|
|
320
|
+
const ff = require('fluent-ffmpeg');
|
|
321
|
+
|
|
322
|
+
return await new Promise((resolve, reject) => {
|
|
323
|
+
const inStream = new PassThrough();
|
|
324
|
+
const outStream = new PassThrough();
|
|
325
|
+
const chunks = [];
|
|
326
|
+
inStream.end(buffer);
|
|
327
|
+
|
|
328
|
+
ff(inStream)
|
|
329
|
+
.noVideo()
|
|
330
|
+
.audioCodec('libopus')
|
|
331
|
+
.format('ogg')
|
|
332
|
+
.audioBitrate('48k')
|
|
333
|
+
.audioChannels(1)
|
|
334
|
+
.audioFrequency(48000)
|
|
335
|
+
.outputOptions([
|
|
336
|
+
'-vn',
|
|
337
|
+
'-b:a 64k',
|
|
338
|
+
'-ac 2',
|
|
339
|
+
'-ar 48000',
|
|
340
|
+
'-map_metadata', '-1',
|
|
341
|
+
'-application', 'voip'
|
|
342
|
+
])
|
|
343
|
+
.on('error', reject)
|
|
344
|
+
.on('end', () => resolve(Buffer.concat(chunks)))
|
|
345
|
+
.pipe(outStream, {
|
|
346
|
+
end: true
|
|
347
|
+
});
|
|
348
|
+
outStream.on('data', c => chunks.push(c));
|
|
349
|
+
});
|
|
350
|
+
} catch (e) {
|
|
351
|
+
logger?.debug(e);
|
|
352
|
+
throw e;
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
const toReadable = (buffer) => {
|
|
357
|
+
const readable = new Readable({ read: () => { } })
|
|
358
|
+
readable.push(buffer)
|
|
359
|
+
readable.push(null)
|
|
360
|
+
return readable
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
const toBuffer = async (stream) => {
|
|
364
|
+
const chunks = []
|
|
365
|
+
for await (const chunk of stream) {
|
|
366
|
+
chunks.push(chunk)
|
|
367
|
+
}
|
|
368
|
+
stream.destroy()
|
|
369
|
+
return Buffer.concat(chunks)
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
const getStream = async (item, opts) => {
|
|
373
|
+
if (Buffer.isBuffer(item)) {
|
|
374
|
+
return { stream: toReadable(item), type: 'buffer' }
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
if ('stream' in item) {
|
|
378
|
+
return { stream: item.stream, type: 'readable' }
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
const urlStr = item.url.toString()
|
|
382
|
+
|
|
383
|
+
if (urlStr.startsWith('data:')) {
|
|
384
|
+
const buffer = Buffer.from(urlStr.split(',')[1], 'base64')
|
|
385
|
+
return { stream: await toReadable(buffer), type: 'buffer' }
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
if (urlStr.startsWith('http://') || urlStr.startsWith('https://')) {
|
|
389
|
+
return { stream: await getHttpStream(item.url, opts), type: 'remote' }
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
return { stream: createReadStream(item.url), type: 'file' }
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
/** generates a thumbnail for a given media, if required */
|
|
396
|
+
async function generateThumbnail(file, mediaType, options) {
|
|
397
|
+
let thumbnail
|
|
398
|
+
let originalImageDimensions
|
|
399
|
+
|
|
400
|
+
if (mediaType === 'image') {
|
|
401
|
+
const { buffer, original } = await extractImageThumb(file)
|
|
402
|
+
|
|
403
|
+
thumbnail = buffer.toString('base64')
|
|
404
|
+
|
|
405
|
+
if (original.width && original.height) {
|
|
406
|
+
originalImageDimensions = {
|
|
407
|
+
width: original.width,
|
|
408
|
+
height: original.height
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
else if (mediaType === 'video') {
|
|
413
|
+
const imgFilename = join(getTmpFilesDirectory(), generateMessageID() + '.jpg')
|
|
414
|
+
try {
|
|
415
|
+
await extractVideoThumb(file, imgFilename, '00:00:00', { width: 32, height: 32 })
|
|
416
|
+
const buff = await promises.readFile(imgFilename)
|
|
417
|
+
|
|
418
|
+
thumbnail = buff.toString('base64')
|
|
419
|
+
|
|
420
|
+
await promises.unlink(imgFilename)
|
|
421
|
+
}
|
|
422
|
+
catch (err) {
|
|
423
|
+
options.logger?.debug('could not generate video thumb: ' + err)
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
return {
|
|
427
|
+
thumbnail,
|
|
428
|
+
originalImageDimensions
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
const getHttpStream = async (url, options = {}) => {
|
|
433
|
+
const response = await fetch(url.toString(), {
|
|
434
|
+
dispatcher: options.dispatcher,
|
|
435
|
+
method: 'GET',
|
|
436
|
+
headers: options.headers
|
|
437
|
+
})
|
|
438
|
+
|
|
439
|
+
if (!response.ok) {
|
|
440
|
+
throw new Boom(`Failed to fetch stream from ${url}`, { statusCode: response.status, data: { url } })
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
return response.body instanceof Readable ? response.body : Readable.fromWeb(response.body)
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
/*const prepareStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts } = {}) => {
|
|
447
|
+
const { stream, type } = await getStream(media, opts)
|
|
448
|
+
logger?.debug('fetched media stream')
|
|
449
|
+
|
|
450
|
+
const encFilePath = join(tmpdir(), mediaType + generateMessageID() + '-plain')
|
|
451
|
+
const encFileWriteStream = createWriteStream(encFilePath)
|
|
452
|
+
|
|
453
|
+
let originalFilePath
|
|
454
|
+
let originalFileStream
|
|
455
|
+
|
|
456
|
+
if (type === 'file') {
|
|
457
|
+
originalFilePath = media.url.toString()
|
|
458
|
+
} else if (saveOriginalFileIfRequired) {
|
|
459
|
+
originalFilePath = join(tmpdir(), mediaType + generateMessageID() + '-original')
|
|
460
|
+
originalFileStream = createWriteStream(originalFilePath)
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
let fileLength = 0
|
|
464
|
+
const sha256 = createHash('sha256')
|
|
465
|
+
|
|
466
|
+
try {
|
|
467
|
+
for await (const data of stream) {
|
|
468
|
+
fileLength += data.length
|
|
469
|
+
|
|
470
|
+
if (type === 'remote'
|
|
471
|
+
&& opts?.maxContentLength
|
|
472
|
+
&& fileLength + data.length > opts.maxContentLength) {
|
|
473
|
+
throw new Boom(`content length exceeded when preparing "${type}"`, {
|
|
474
|
+
data: { media, type }
|
|
475
|
+
})
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
sha256.update(data)
|
|
479
|
+
encFileWriteStream.write(data)
|
|
480
|
+
|
|
481
|
+
if (originalFileStream && !originalFileStream.write(data)) {
|
|
482
|
+
await once(originalFileStream, 'drain')
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
const fileSha256 = sha256.digest()
|
|
487
|
+
encFileWriteStream.end()
|
|
488
|
+
originalFileStream?.end?.call(originalFileStream)
|
|
489
|
+
stream.destroy()
|
|
490
|
+
|
|
491
|
+
logger?.debug('prepared plain stream successfully')
|
|
492
|
+
|
|
493
|
+
return {
|
|
494
|
+
mediaKey: undefined,
|
|
495
|
+
originalFilePath,
|
|
496
|
+
encFilePath,
|
|
497
|
+
mac: undefined,
|
|
498
|
+
fileEncSha256: undefined,
|
|
499
|
+
fileSha256,
|
|
500
|
+
fileLength
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
catch (error) {
|
|
504
|
+
encFileWriteStream.destroy()
|
|
505
|
+
originalFileStream?.destroy?.call(originalFileStream)
|
|
506
|
+
sha256.destroy()
|
|
507
|
+
stream.destroy()
|
|
508
|
+
try {
|
|
509
|
+
await promises.unlink(encFilePath)
|
|
510
|
+
if (originalFilePath && didSaveToTmpPath) {
|
|
511
|
+
await promises.unlink(originalFilePath)
|
|
512
|
+
}
|
|
513
|
+
} catch (err) {
|
|
514
|
+
logger?.error({ err }, 'failed deleting tmp files')
|
|
515
|
+
}
|
|
516
|
+
throw error
|
|
517
|
+
}
|
|
518
|
+
}*/
|
|
519
|
+
|
|
520
|
+
const encryptedStream = async (media, mediaType, { logger, saveOriginalFileIfRequired, opts, isPtt, forceOpus } = {}) => {
|
|
521
|
+
const { stream, type } = await getStream(media, opts)
|
|
522
|
+
|
|
523
|
+
let finalStream = stream;
|
|
524
|
+
if (mediaType === 'audio' && (isPtt === true || forceOpus === true)) {
|
|
525
|
+
try {
|
|
526
|
+
const buffer = await toBuffer(stream);
|
|
527
|
+
const opusBuffer = await convertToOpusBuffer(buffer, logger);
|
|
528
|
+
finalStream = toReadable(opusBuffer);
|
|
529
|
+
} catch (error) {
|
|
530
|
+
if (isPtt) {
|
|
531
|
+
throw error;
|
|
532
|
+
}
|
|
533
|
+
const { stream: newStream } = await getStream(media, opts);
|
|
534
|
+
finalStream = newStream;
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
logger?.debug('fetched media stream')
|
|
539
|
+
|
|
540
|
+
const mediaKey = randomBytes(32)
|
|
541
|
+
const { cipherKey, iv, macKey } = await getMediaKeys(mediaKey, mediaType)
|
|
542
|
+
const encFilePath = join(getTmpFilesDirectory(), mediaType + generateMessageID() + '-enc')
|
|
543
|
+
const encFileWriteStream = createWriteStream(encFilePath)
|
|
544
|
+
|
|
545
|
+
let originalFileStream;
|
|
546
|
+
let originalFilePath
|
|
547
|
+
|
|
548
|
+
if (saveOriginalFileIfRequired) {
|
|
549
|
+
originalFilePath = join(getTmpFilesDirectory(), mediaType + generateMessageID() + '-original')
|
|
550
|
+
originalFileStream = createWriteStream(originalFilePath)
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
let fileLength = 0
|
|
554
|
+
|
|
555
|
+
const aes = createCipheriv('aes-256-cbc', cipherKey, iv)
|
|
556
|
+
const hmac = createHmac('sha256', macKey).update(iv)
|
|
557
|
+
const sha256Plain = createHash('sha256');
|
|
558
|
+
const sha256Enc = createHash('sha256')
|
|
559
|
+
|
|
560
|
+
const onChunk = async (buff) => {
|
|
561
|
+
sha256Enc.update(buff)
|
|
562
|
+
hmac.update(buff)
|
|
563
|
+
|
|
564
|
+
// Handle backpressure: if write returns false, wait for drain
|
|
565
|
+
if (!encFileWriteStream.write(buff)) {
|
|
566
|
+
await once(encFileWriteStream, 'drain')
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
try {
|
|
571
|
+
for await (const data of stream) {
|
|
572
|
+
fileLength += data.length
|
|
573
|
+
|
|
574
|
+
if (type === 'remote' &&
|
|
575
|
+
opts?.maxContentLength &&
|
|
576
|
+
fileLength + data.length > opts.maxContentLength) {
|
|
577
|
+
throw new Boom(`content length exceeded when encrypting "${type}"`, {
|
|
578
|
+
data: { media, type }
|
|
579
|
+
})
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
if (originalFileStream) {
|
|
583
|
+
if (!originalFileStream.write(data)) {
|
|
584
|
+
await once(originalFileStream, 'drain')
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
sha256Plain.update(data)
|
|
589
|
+
|
|
590
|
+
await onChunk(aes.update(data))
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
await onChunk(aes.final())
|
|
594
|
+
const mac = hmac.digest().slice(0, 10)
|
|
595
|
+
|
|
596
|
+
sha256Enc.update(mac)
|
|
597
|
+
|
|
598
|
+
const fileSha256 = sha256Plain.digest()
|
|
599
|
+
const fileEncSha256 = sha256Enc.digest()
|
|
600
|
+
|
|
601
|
+
encFileWriteStream.write(mac)
|
|
602
|
+
|
|
603
|
+
const encFinishPromise = once(encFileWriteStream, 'finish')
|
|
604
|
+
const originalFinishPromise = originalFileStream ? once(originalFileStream, 'finish') : Promise.resolve()
|
|
605
|
+
|
|
606
|
+
encFileWriteStream.end()
|
|
607
|
+
originalFileStream?.end?.()
|
|
608
|
+
stream.destroy()
|
|
609
|
+
|
|
610
|
+
// Wait for write streams to fully flush to disk
|
|
611
|
+
// This helps reduce memory pressure by allowing OS to release buffers
|
|
612
|
+
await encFinishPromise
|
|
613
|
+
await originalFinishPromise
|
|
614
|
+
|
|
615
|
+
logger?.debug('encrypted data successfully')
|
|
616
|
+
|
|
617
|
+
return {
|
|
618
|
+
mediaKey,
|
|
619
|
+
originalFilePath,
|
|
620
|
+
encFilePath,
|
|
621
|
+
mac,
|
|
622
|
+
fileEncSha256,
|
|
623
|
+
fileSha256,
|
|
624
|
+
fileLength
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
catch (error) {
|
|
628
|
+
// destroy all streams with error
|
|
629
|
+
encFileWriteStream.destroy()
|
|
630
|
+
originalFileStream?.destroy?.()
|
|
631
|
+
aes.destroy()
|
|
632
|
+
hmac.destroy()
|
|
633
|
+
sha256Plain.destroy()
|
|
634
|
+
sha256Enc.destroy()
|
|
635
|
+
stream.destroy()
|
|
636
|
+
|
|
637
|
+
try {
|
|
638
|
+
await promises.unlink(encFilePath)
|
|
639
|
+
|
|
640
|
+
if (originalFilePath) {
|
|
641
|
+
await promises.unlink(originalFilePath)
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
catch (err) {
|
|
645
|
+
logger?.error({ err }, 'failed deleting tmp files')
|
|
646
|
+
}
|
|
647
|
+
throw error
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
const DEF_HOST = 'mmg.whatsapp.net'
|
|
652
|
+
|
|
653
|
+
const AES_CHUNK_SIZE = 16
|
|
654
|
+
|
|
655
|
+
const toSmallestChunkSize = (num) => {
|
|
656
|
+
return Math.floor(num / AES_CHUNK_SIZE) * AES_CHUNK_SIZE
|
|
657
|
+
}
|
|
658
|
+
const getUrlFromDirectPath = (directPath) => `https://${DEF_HOST}${directPath}`
|
|
659
|
+
|
|
660
|
+
const downloadContentFromMessage = async ({ mediaKey, directPath, url }, type, opts = {}) => {
|
|
661
|
+
const isValidMediaUrl = url?.startsWith('https://mmg.whatsapp.net/')
|
|
662
|
+
const downloadUrl = isValidMediaUrl ? url : getUrlFromDirectPath(directPath)
|
|
663
|
+
|
|
664
|
+
if (!downloadUrl) {
|
|
665
|
+
throw new Boom('No valid media URL or directPath present in message', { statusCode: 400 })
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
const keys = await getMediaKeys(mediaKey, type)
|
|
669
|
+
return downloadEncryptedContent(downloadUrl, keys, opts)
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
/**
|
|
673
|
+
* Decrypts and downloads an AES256-CBC encrypted file given the keys.
|
|
674
|
+
* Assumes the SHA256 of the plaintext is appended to the end of the ciphertext
|
|
675
|
+
* */
|
|
676
|
+
const downloadEncryptedContent = async (downloadUrl, { cipherKey, iv }, { startByte, endByte, options } = {}) => {
|
|
677
|
+
let bytesFetched = 0
|
|
678
|
+
let startChunk = 0
|
|
679
|
+
let firstBlockIsIV = false
|
|
680
|
+
|
|
681
|
+
// if a start byte is specified -- then we need to fetch the previous chunk as that will form the IV
|
|
682
|
+
if (startByte) {
|
|
683
|
+
const chunk = toSmallestChunkSize(startByte || 0)
|
|
684
|
+
|
|
685
|
+
if (chunk) {
|
|
686
|
+
startChunk = chunk - AES_CHUNK_SIZE
|
|
687
|
+
bytesFetched = chunk
|
|
688
|
+
firstBlockIsIV = true
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
const endChunk = endByte ? toSmallestChunkSize(endByte || 0) + AES_CHUNK_SIZE : undefined
|
|
693
|
+
const headersInit = options?.headers ? options.headers : undefined
|
|
694
|
+
const headers = {
|
|
695
|
+
...(headersInit
|
|
696
|
+
? Array.isArray(headersInit)
|
|
697
|
+
? Object.fromEntries(headersInit)
|
|
698
|
+
: headersInit
|
|
699
|
+
: {}),
|
|
700
|
+
Origin: DEFAULT_ORIGIN
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
if (startChunk || endChunk) {
|
|
704
|
+
headers.Range = `bytes=${startChunk}-`
|
|
705
|
+
|
|
706
|
+
if (endChunk) {
|
|
707
|
+
headers.Range += endChunk
|
|
708
|
+
}
|
|
709
|
+
}
|
|
710
|
+
|
|
711
|
+
// download the message
|
|
712
|
+
const fetched = await getHttpStream(downloadUrl, {
|
|
713
|
+
...(options || {}),
|
|
714
|
+
headers
|
|
715
|
+
})
|
|
716
|
+
|
|
717
|
+
let remainingBytes = Buffer.from([])
|
|
718
|
+
let aes
|
|
719
|
+
|
|
720
|
+
const pushBytes = (bytes, push) => {
|
|
721
|
+
if (startByte || endByte) {
|
|
722
|
+
const start = bytesFetched >= startByte ? undefined : Math.max(startByte - bytesFetched, 0)
|
|
723
|
+
const end = bytesFetched + bytes.length < endByte ? undefined : Math.max(endByte - bytesFetched, 0)
|
|
724
|
+
|
|
725
|
+
push(bytes.slice(start, end))
|
|
726
|
+
bytesFetched += bytes.length
|
|
727
|
+
}
|
|
728
|
+
else {
|
|
729
|
+
push(bytes)
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
const output = new Transform({
|
|
734
|
+
transform(chunk, _, callback) {
|
|
735
|
+
let data = Buffer.concat([remainingBytes, chunk])
|
|
736
|
+
|
|
737
|
+
const decryptLength = toSmallestChunkSize(data.length)
|
|
738
|
+
|
|
739
|
+
remainingBytes = data.slice(decryptLength)
|
|
740
|
+
data = data.slice(0, decryptLength)
|
|
741
|
+
|
|
742
|
+
if (!aes) {
|
|
743
|
+
let ivValue = iv
|
|
744
|
+
|
|
745
|
+
if (firstBlockIsIV) {
|
|
746
|
+
ivValue = data.slice(0, AES_CHUNK_SIZE)
|
|
747
|
+
data = data.slice(AES_CHUNK_SIZE)
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
aes = createDecipheriv('aes-256-cbc', cipherKey, ivValue)
|
|
751
|
+
|
|
752
|
+
// if an end byte that is not EOF is specified
|
|
753
|
+
// stop auto padding (PKCS7) -- otherwise throws an error for decryption
|
|
754
|
+
if (endByte) {
|
|
755
|
+
aes.setAutoPadding(false)
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
try {
|
|
759
|
+
pushBytes(aes.update(data), b => this.push(b))
|
|
760
|
+
callback()
|
|
761
|
+
}
|
|
762
|
+
catch (error) {
|
|
763
|
+
callback(error)
|
|
764
|
+
}
|
|
765
|
+
},
|
|
766
|
+
final(callback) {
|
|
767
|
+
try {
|
|
768
|
+
pushBytes(aes.final(), b => this.push(b))
|
|
769
|
+
callback()
|
|
770
|
+
}
|
|
771
|
+
catch (error) {
|
|
772
|
+
callback(error)
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
})
|
|
776
|
+
|
|
777
|
+
return fetched.pipe(output, { end: true })
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
function extensionForMediaMessage(message) {
|
|
781
|
+
const getExtension = (mimetype) => mimetype.split('')[0].split('/')[1]
|
|
782
|
+
const type = Object.keys(message)[0]
|
|
783
|
+
let extension
|
|
784
|
+
if (type === 'locationMessage' ||
|
|
785
|
+
type === 'liveLocationMessage' ||
|
|
786
|
+
type === 'productMessage') {
|
|
787
|
+
extension = '.jpeg'
|
|
788
|
+
}
|
|
789
|
+
else {
|
|
790
|
+
const messageContent = message[type]
|
|
791
|
+
extension = getExtension(messageContent.mimetype)
|
|
792
|
+
}
|
|
793
|
+
return extension
|
|
794
|
+
}
|
|
795
|
+
|
|
796
|
+
const isNodeRuntime = () => {
|
|
797
|
+
return (typeof process !== 'undefined' &&
|
|
798
|
+
process.versions?.node !== null &&
|
|
799
|
+
typeof process.versions.bun === 'undefined' &&
|
|
800
|
+
typeof globalThis.Deno === 'undefined')
|
|
801
|
+
}
|
|
802
|
+
|
|
803
|
+
const uploadWithNodeHttp = async ({ url, filePath, headers, timeoutMs, agent }, redirectCount = 0) => {
|
|
804
|
+
if (redirectCount > 5) {
|
|
805
|
+
throw new Error('Too many redirects')
|
|
806
|
+
}
|
|
807
|
+
|
|
808
|
+
const parsedUrl = new URL(url)
|
|
809
|
+
const httpModule = parsedUrl.protocol === 'https:' ? require('https') : require('http')
|
|
810
|
+
|
|
811
|
+
// Get file size for Content-Length header (required for Node.js streaming)
|
|
812
|
+
const fileStats = await promises.stat(filePath)
|
|
813
|
+
const fileSize = fileStats.size
|
|
814
|
+
|
|
815
|
+
return new Promise((resolve, reject) => {
|
|
816
|
+
const req = httpModule.request({
|
|
817
|
+
hostname: parsedUrl.hostname,
|
|
818
|
+
port: parsedUrl.port || (parsedUrl.protocol === 'https:' ? 443 : 80),
|
|
819
|
+
path: parsedUrl.pathname + parsedUrl.search,
|
|
820
|
+
method: 'POST',
|
|
821
|
+
headers: {
|
|
822
|
+
...headers,
|
|
823
|
+
'Content-Length': fileSize
|
|
824
|
+
},
|
|
825
|
+
agent,
|
|
826
|
+
timeout: timeoutMs
|
|
827
|
+
}, res => {
|
|
828
|
+
// Handle redirects (3xx)
|
|
829
|
+
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
|
830
|
+
res.resume() // Consume response to free resources
|
|
831
|
+
|
|
832
|
+
const newUrl = new URL(res.headers.location, url).toString()
|
|
833
|
+
|
|
834
|
+
resolve(uploadWithNodeHttp({
|
|
835
|
+
url: newUrl,
|
|
836
|
+
filePath,
|
|
837
|
+
headers,
|
|
838
|
+
timeoutMs,
|
|
839
|
+
agent
|
|
840
|
+
}, redirectCount + 1))
|
|
841
|
+
return
|
|
842
|
+
}
|
|
843
|
+
|
|
844
|
+
let body = ''
|
|
845
|
+
|
|
846
|
+
res.on('data', chunk => (body += chunk))
|
|
847
|
+
res.on('end', () => {
|
|
848
|
+
try {
|
|
849
|
+
resolve(JSON.parse(body))
|
|
850
|
+
}
|
|
851
|
+
catch {
|
|
852
|
+
resolve(undefined)
|
|
853
|
+
}
|
|
854
|
+
})
|
|
855
|
+
})
|
|
856
|
+
|
|
857
|
+
req.on('error', reject)
|
|
858
|
+
req.on('timeout', () => {
|
|
859
|
+
req.destroy()
|
|
860
|
+
reject(new Error('Upload timeout'))
|
|
861
|
+
})
|
|
862
|
+
|
|
863
|
+
const stream = createReadStream(filePath)
|
|
864
|
+
|
|
865
|
+
stream.pipe(req)
|
|
866
|
+
stream.on('error', err => {
|
|
867
|
+
req.destroy()
|
|
868
|
+
reject(err)
|
|
869
|
+
})
|
|
870
|
+
})
|
|
871
|
+
}
|
|
872
|
+
|
|
873
|
+
const uploadWithFetch = async ({ url, filePath, headers, timeoutMs, agent }) => {
|
|
874
|
+
// Convert Node.js Readable to Web ReadableStream
|
|
875
|
+
const nodeStream = createReadStream(filePath)
|
|
876
|
+
const webStream = Readable.toWeb(nodeStream)
|
|
877
|
+
const response = await fetch(url, {
|
|
878
|
+
dispatcher: agent,
|
|
879
|
+
method: 'POST',
|
|
880
|
+
body: webStream,
|
|
881
|
+
headers,
|
|
882
|
+
duplex: 'half',
|
|
883
|
+
signal: timeoutMs ? AbortSignal.timeout(timeoutMs) : undefined
|
|
884
|
+
})
|
|
885
|
+
|
|
886
|
+
try {
|
|
887
|
+
return (await response.json())
|
|
888
|
+
}
|
|
889
|
+
catch {
|
|
890
|
+
return undefined
|
|
891
|
+
}
|
|
892
|
+
}
|
|
893
|
+
|
|
894
|
+
/**
|
|
895
|
+
* Uploads media to WhatsApp servers.
|
|
896
|
+
*
|
|
897
|
+
* ## Why we have two upload implementations:
|
|
898
|
+
*
|
|
899
|
+
* Node.js's native `fetch` (powered by undici) has a known bug where it buffers
|
|
900
|
+
* the entire request body in memory before sending, even when using streams.
|
|
901
|
+
* This causes memory issues with large files (e.g., 1GB file = 1GB+ memory usage).
|
|
902
|
+
* See: https://github.com/nodejs/undici/issues/4058
|
|
903
|
+
*
|
|
904
|
+
* Other runtimes (Bun, Deno, browsers) correctly stream the request body without
|
|
905
|
+
* buffering, so we can use the web-standard Fetch API there.
|
|
906
|
+
*
|
|
907
|
+
* ## Future considerations:
|
|
908
|
+
* Once the undici bug is fixed, we can simplify this to use only the Fetch API
|
|
909
|
+
* across all runtimes. Monitor the GitHub issue for updates.
|
|
910
|
+
*/
|
|
911
|
+
const uploadMedia = async (params, logger) => {
|
|
912
|
+
if (isNodeRuntime()) {
|
|
913
|
+
logger?.debug('Using Node.js https module for upload (avoids undici buffering bug)')
|
|
914
|
+
return uploadWithNodeHttp(params)
|
|
915
|
+
}
|
|
916
|
+
else {
|
|
917
|
+
logger?.debug('Using web-standard Fetch API for upload');
|
|
918
|
+
return uploadWithFetch(params)
|
|
919
|
+
}
|
|
920
|
+
}
|
|
921
|
+
|
|
922
|
+
const getWAUploadToServer = ({ customUploadHosts, fetchAgent, logger, options }, refreshMediaConn) => {
|
|
923
|
+
return async (filePath, { mediaType, fileEncSha256B64, timeoutMs }) => {
|
|
924
|
+
// send a query JSON to obtain the url & auth token to upload our media
|
|
925
|
+
let uploadInfo = await refreshMediaConn(false)
|
|
926
|
+
let urls
|
|
927
|
+
|
|
928
|
+
const hosts = [...customUploadHosts, ...uploadInfo.hosts]
|
|
929
|
+
|
|
930
|
+
fileEncSha256B64 = encodeBase64EncodedStringForUpload(fileEncSha256B64)
|
|
931
|
+
|
|
932
|
+
// Prepare common headers
|
|
933
|
+
const customHeaders = (() => {
|
|
934
|
+
const hdrs = options?.headers;
|
|
935
|
+
if (!hdrs)
|
|
936
|
+
return {};
|
|
937
|
+
return Array.isArray(hdrs) ? Object.fromEntries(hdrs) : hdrs
|
|
938
|
+
})()
|
|
939
|
+
|
|
940
|
+
const headers = {
|
|
941
|
+
...customHeaders,
|
|
942
|
+
'Content-Type': 'application/octet-stream',
|
|
943
|
+
Origin: DEFAULT_ORIGIN
|
|
944
|
+
}
|
|
945
|
+
|
|
946
|
+
for (const { hostname } of hosts) {
|
|
947
|
+
logger.debug(`uploading to "${hostname}"`)
|
|
948
|
+
|
|
949
|
+
const auth = encodeURIComponent(uploadInfo.auth)
|
|
950
|
+
const url = `https://${hostname}${MEDIA_PATH_MAP[mediaType]}/${fileEncSha256B64}?auth=${auth}&token=${fileEncSha256B64}`
|
|
951
|
+
|
|
952
|
+
let result
|
|
953
|
+
|
|
954
|
+
try {
|
|
955
|
+
result = await uploadMedia({
|
|
956
|
+
url,
|
|
957
|
+
filePath,
|
|
958
|
+
headers,
|
|
959
|
+
timeoutMs,
|
|
960
|
+
agent: fetchAgent
|
|
961
|
+
}, logger);
|
|
962
|
+
if (result?.url || result?.direct_path) {
|
|
963
|
+
urls = {
|
|
964
|
+
mediaUrl: result.url,
|
|
965
|
+
directPath: result.direct_path,
|
|
966
|
+
meta_hmac: result.meta_hmac,
|
|
967
|
+
fbid: result.fbid,
|
|
968
|
+
ts: result.ts
|
|
969
|
+
}
|
|
970
|
+
break
|
|
971
|
+
}
|
|
972
|
+
else {
|
|
973
|
+
uploadInfo = await refreshMediaConn(true)
|
|
974
|
+
throw new Error(`upload failed, reason: ${JSON.stringify(result)}`)
|
|
975
|
+
}
|
|
976
|
+
}
|
|
977
|
+
catch (error) {
|
|
978
|
+
const isLast = hostname === hosts[uploadInfo.hosts.length - 1]?.hostname
|
|
979
|
+
logger.warn({ trace: error?.stack, uploadResult: result }, `Error in uploading to ${hostname} ${isLast ? '' : ', retrying...'}`)
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
|
|
983
|
+
if (!urls) {
|
|
984
|
+
throw new Boom('Media upload failed on all hosts', { statusCode: 500 })
|
|
985
|
+
}
|
|
986
|
+
|
|
987
|
+
return urls
|
|
988
|
+
}
|
|
989
|
+
}
|
|
990
|
+
|
|
991
|
+
const getMediaRetryKey = (mediaKey) => {
|
|
992
|
+
return hkdf(mediaKey, 32, { info: 'WhatsApp Media Retry Notification' })
|
|
993
|
+
}
|
|
994
|
+
/**
|
|
995
|
+
* Generate a binary node that will request the phone to re-upload the media & return the newly uploaded URL
|
|
996
|
+
*/
|
|
997
|
+
const encryptMediaRetryRequest = async (key, mediaKey, meId) => {
|
|
998
|
+
const recp = { stanzaId: key.id }
|
|
999
|
+
const recpBuffer = proto.ServerErrorReceipt.encode(recp).finish()
|
|
1000
|
+
const iv = randomBytes(12)
|
|
1001
|
+
const retryKey = await getMediaRetryKey(mediaKey)
|
|
1002
|
+
const ciphertext = aesEncryptGCM(recpBuffer, retryKey, iv, Buffer.from(key.id))
|
|
1003
|
+
const req = {
|
|
1004
|
+
tag: 'receipt',
|
|
1005
|
+
attrs: {
|
|
1006
|
+
id: key.id,
|
|
1007
|
+
to: jidNormalizedUser(meId),
|
|
1008
|
+
type: 'server-error'
|
|
1009
|
+
},
|
|
1010
|
+
content: [
|
|
1011
|
+
// this encrypt node is actually pretty useless
|
|
1012
|
+
// the media is returned even without this node
|
|
1013
|
+
// keeping it here to maintain parity with WA Web
|
|
1014
|
+
{
|
|
1015
|
+
tag: 'encrypt',
|
|
1016
|
+
attrs: {},
|
|
1017
|
+
content: [
|
|
1018
|
+
{ tag: 'enc_p', attrs: {}, content: ciphertext },
|
|
1019
|
+
{ tag: 'enc_iv', attrs: {}, content: iv }
|
|
1020
|
+
]
|
|
1021
|
+
},
|
|
1022
|
+
{
|
|
1023
|
+
tag: 'rmr',
|
|
1024
|
+
attrs: {
|
|
1025
|
+
jid: key.remoteJid,
|
|
1026
|
+
from_me: (!!key.fromMe).toString(),
|
|
1027
|
+
participant: key.participant || undefined
|
|
1028
|
+
}
|
|
1029
|
+
}
|
|
1030
|
+
]
|
|
1031
|
+
}
|
|
1032
|
+
return req
|
|
1033
|
+
}
|
|
1034
|
+
|
|
1035
|
+
const decodeMediaRetryNode = (node) => {
|
|
1036
|
+
const rmrNode = getBinaryNodeChild(node, 'rmr')
|
|
1037
|
+
const event = {
|
|
1038
|
+
key: {
|
|
1039
|
+
id: node.attrs.id,
|
|
1040
|
+
remoteJid: rmrNode.attrs.jid,
|
|
1041
|
+
fromMe: rmrNode.attrs.from_me === 'true',
|
|
1042
|
+
participant: rmrNode.attrs.participant
|
|
1043
|
+
}
|
|
1044
|
+
}
|
|
1045
|
+
const errorNode = getBinaryNodeChild(node, 'error')
|
|
1046
|
+
if (errorNode) {
|
|
1047
|
+
const errorCode = +errorNode.attrs.code
|
|
1048
|
+
event.error = new Boom(`Failed to re-upload media (${errorCode})`, { data: errorNode.attrs, statusCode: getStatusCodeForMediaRetry(errorCode) })
|
|
1049
|
+
}
|
|
1050
|
+
else {
|
|
1051
|
+
const encryptedInfoNode = getBinaryNodeChild(node, 'encrypt')
|
|
1052
|
+
const ciphertext = getBinaryNodeChildBuffer(encryptedInfoNode, 'enc_p')
|
|
1053
|
+
const iv = getBinaryNodeChildBuffer(encryptedInfoNode, 'enc_iv')
|
|
1054
|
+
if (ciphertext && iv) {
|
|
1055
|
+
event.media = { ciphertext, iv }
|
|
1056
|
+
}
|
|
1057
|
+
else {
|
|
1058
|
+
event.error = new Boom('Failed to re-upload media (missing ciphertext)', { statusCode: 404 })
|
|
1059
|
+
}
|
|
1060
|
+
}
|
|
1061
|
+
return event
|
|
1062
|
+
}
|
|
1063
|
+
|
|
1064
|
+
const decryptMediaRetryData = async ({ ciphertext, iv }, mediaKey, msgId) => {
|
|
1065
|
+
const retryKey = await getMediaRetryKey(mediaKey)
|
|
1066
|
+
const plaintext = aesDecryptGCM(ciphertext, retryKey, iv, Buffer.from(msgId))
|
|
1067
|
+
return proto.MediaRetryNotification.decode(plaintext)
|
|
1068
|
+
}
|
|
1069
|
+
|
|
1070
|
+
const getStatusCodeForMediaRetry = (code) => MEDIA_RETRY_STATUS_MAP[code]
|
|
1071
|
+
|
|
1072
|
+
const MEDIA_RETRY_STATUS_MAP = {
|
|
1073
|
+
[proto.MediaRetryNotification.ResultType.SUCCESS]: 200,
|
|
1074
|
+
[proto.MediaRetryNotification.ResultType.DECRYPTION_ERROR]: 412,
|
|
1075
|
+
[proto.MediaRetryNotification.ResultType.NOT_FOUND]: 404,
|
|
1076
|
+
[proto.MediaRetryNotification.ResultType.GENERAL_ERROR]: 418,
|
|
1077
|
+
}
|
|
1078
|
+
|
|
1079
|
+
module.exports = {
|
|
1080
|
+
hkdfInfoKey,
|
|
1081
|
+
getMediaKeys,
|
|
1082
|
+
extractVideoThumb,
|
|
1083
|
+
extractImageThumb,
|
|
1084
|
+
encodeBase64EncodedStringForUpload,
|
|
1085
|
+
generateProfilePicture,
|
|
1086
|
+
mediaMessageSHA256B64,
|
|
1087
|
+
getAudioDuration,
|
|
1088
|
+
getAudioWaveform,
|
|
1089
|
+
toReadable,
|
|
1090
|
+
toBuffer,
|
|
1091
|
+
getStream,
|
|
1092
|
+
generateThumbnail,
|
|
1093
|
+
getHttpStream,
|
|
1094
|
+
//prepareStream,
|
|
1095
|
+
encryptedStream,
|
|
1096
|
+
getUrlFromDirectPath,
|
|
1097
|
+
downloadContentFromMessage,
|
|
1098
|
+
downloadEncryptedContent,
|
|
1099
|
+
extensionForMediaMessage,
|
|
1100
|
+
uploadWithNodeHttp,
|
|
1101
|
+
getRawMediaUploadData,
|
|
1102
|
+
getWAUploadToServer,
|
|
1103
|
+
getMediaRetryKey,
|
|
1104
|
+
encryptMediaRetryRequest,
|
|
1105
|
+
decodeMediaRetryNode,
|
|
1106
|
+
decryptMediaRetryData,
|
|
1107
|
+
getStatusCodeForMediaRetry,
|
|
1108
|
+
MEDIA_RETRY_STATUS_MAP
|
|
1109
|
+
}
|