@hansaka02/baileys 7.3.4 → 7.3.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +203 -247
- package/lib/Defaults/baileys-version.json +2 -2
- package/lib/Defaults/connection.js +1 -1
- package/lib/Defaults/constants.js +13 -1
- package/lib/Defaults/history.js +3 -1
- package/lib/Signal/Group/sender-chain-key.js +1 -14
- package/lib/Signal/Group/sender-key-distribution-message.js +2 -2
- package/lib/Signal/Group/sender-key-record.js +2 -11
- package/lib/Signal/Group/sender-key-state.js +11 -57
- package/lib/Signal/libsignal.js +200 -116
- package/lib/Signal/lid-mapping.js +121 -68
- package/lib/Socket/Client/websocket.js +9 -2
- package/lib/Socket/business.js +5 -1
- package/lib/Socket/chats.js +180 -89
- package/lib/Socket/community.js +169 -41
- package/lib/Socket/groups.js +25 -21
- package/lib/Socket/messages-recv.js +458 -333
- package/lib/Socket/messages-send.js +517 -572
- package/lib/Socket/mex.js +61 -0
- package/lib/Socket/newsletter.js +159 -252
- package/lib/Socket/socket.js +283 -100
- package/lib/Types/Newsletter.js +32 -25
- package/lib/Utils/auth-utils.js +189 -354
- package/lib/Utils/browser-utils.js +43 -0
- package/lib/Utils/chat-utils.js +166 -41
- package/lib/Utils/decode-wa-message.js +77 -35
- package/lib/Utils/event-buffer.js +80 -24
- package/lib/Utils/generics.js +28 -128
- package/lib/Utils/history.js +10 -8
- package/lib/Utils/index.js +1 -1
- package/lib/Utils/link-preview.js +17 -32
- package/lib/Utils/lt-hash.js +28 -22
- package/lib/Utils/make-mutex.js +26 -28
- package/lib/Utils/message-retry-manager.js +51 -3
- package/lib/Utils/messages-media.js +343 -151
- package/lib/Utils/messages.js +806 -792
- package/lib/Utils/noise-handler.js +33 -2
- package/lib/Utils/pre-key-manager.js +126 -0
- package/lib/Utils/process-message.js +115 -55
- package/lib/Utils/signal.js +45 -18
- package/lib/Utils/validate-connection.js +52 -29
- package/lib/WABinary/constants.js +1268 -1268
- package/lib/WABinary/decode.js +58 -4
- package/lib/WABinary/encode.js +54 -7
- package/lib/WABinary/jid-utils.js +58 -11
- package/lib/WAM/constants.js +19064 -11563
- package/lib/WAM/encode.js +57 -8
- package/lib/WAUSync/USyncQuery.js +35 -19
- package/package.json +9 -8
- package/lib/Socket/usync.js +0 -83
|
@@ -54,9 +54,10 @@ const {
|
|
|
54
54
|
getBinaryNodeChildren,
|
|
55
55
|
getBinaryNodeChildString,
|
|
56
56
|
isJidGroup,
|
|
57
|
+
isJidNewsletter,
|
|
57
58
|
isJidStatusBroadcast,
|
|
58
59
|
isLidUser,
|
|
59
|
-
|
|
60
|
+
isPnUser,
|
|
60
61
|
jidDecode,
|
|
61
62
|
jidNormalizedUser,
|
|
62
63
|
S_WHATSAPP_NET
|
|
@@ -66,9 +67,40 @@ const { makeMutex } = require("../Utils/make-mutex")
|
|
|
66
67
|
const { makeMessagesSocket } = require("./messages-send")
|
|
67
68
|
|
|
68
69
|
const makeMessagesRecvSocket = (config) => {
|
|
69
|
-
const {
|
|
70
|
+
const {
|
|
71
|
+
logger,
|
|
72
|
+
retryRequestDelayMs,
|
|
73
|
+
maxMsgRetryCount,
|
|
74
|
+
getMessage,
|
|
75
|
+
shouldIgnoreJid,
|
|
76
|
+
enableAutoSessionRecreation
|
|
77
|
+
} = config
|
|
78
|
+
|
|
70
79
|
const suki = makeMessagesSocket(config)
|
|
71
|
-
|
|
80
|
+
|
|
81
|
+
const {
|
|
82
|
+
ev,
|
|
83
|
+
authState,
|
|
84
|
+
ws,
|
|
85
|
+
messageMutex,
|
|
86
|
+
notificationMutex,
|
|
87
|
+
receiptMutex,
|
|
88
|
+
signalRepository,
|
|
89
|
+
query,
|
|
90
|
+
upsertMessage,
|
|
91
|
+
resyncAppState,
|
|
92
|
+
onUnexpectedError,
|
|
93
|
+
assertSessions,
|
|
94
|
+
sendNode,
|
|
95
|
+
relayMessage,
|
|
96
|
+
sendReceipt,
|
|
97
|
+
uploadPreKeys,
|
|
98
|
+
groupMetadata,
|
|
99
|
+
getUSyncDevices,
|
|
100
|
+
createParticipantNodes,
|
|
101
|
+
messageRetryManager,
|
|
102
|
+
sendPeerDataOperationMessage
|
|
103
|
+
} = suki
|
|
72
104
|
|
|
73
105
|
/** this mutex ensures that each retryRequest will wait for the previous one to finish */
|
|
74
106
|
const retryMutex = makeMutex()
|
|
@@ -88,8 +120,73 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
88
120
|
useClones: false
|
|
89
121
|
})
|
|
90
122
|
|
|
123
|
+
// Debounce identity-change session refreshes per JID to avoid bursts
|
|
124
|
+
const identityAssertDebounce = new NodeCache({
|
|
125
|
+
stdTTL: 5,
|
|
126
|
+
useClones: false
|
|
127
|
+
})
|
|
128
|
+
|
|
91
129
|
let sendActiveReceipts = false
|
|
92
130
|
|
|
131
|
+
const fetchMessageHistory = async (count, oldestMsgKey, oldestMsgTimestamp) => {
|
|
132
|
+
if (!authState.creds.me?.id) {
|
|
133
|
+
throw new Boom('Not authenticated')
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const pdoMessage = {
|
|
137
|
+
historySyncOnDemandRequest: {
|
|
138
|
+
chatJid: oldestMsgKey.remoteJid,
|
|
139
|
+
oldestMsgFromMe: oldestMsgKey.fromMe,
|
|
140
|
+
oldestMsgId: oldestMsgKey.id,
|
|
141
|
+
oldestMsgTimestampMs: oldestMsgTimestamp,
|
|
142
|
+
onDemandMsgCount: count
|
|
143
|
+
},
|
|
144
|
+
peerDataOperationRequestType: proto.Message.PeerDataOperationRequestType.HISTORY_SYNC_ON_DEMAND
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return sendPeerDataOperationMessage(pdoMessage)
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
const requestPlaceholderResend = async (messageKey) => {
|
|
151
|
+
if (!authState.creds.me?.id) {
|
|
152
|
+
throw new Boom('Not authenticated')
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
if (await placeholderResendCache.get(messageKey?.id)) {
|
|
156
|
+
logger.debug({ messageKey }, 'already requested resend')
|
|
157
|
+
return
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
else {
|
|
161
|
+
await placeholderResendCache.set(messageKey?.id, true)
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
await delay(5000)
|
|
165
|
+
|
|
166
|
+
if (!(await placeholderResendCache.get(messageKey?.id))) {
|
|
167
|
+
logger.debug({ messageKey }, 'message received while resend requested')
|
|
168
|
+
return 'RESOLVED'
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const pdoMessage = {
|
|
172
|
+
placeholderMessageResendRequest: [
|
|
173
|
+
{
|
|
174
|
+
messageKey
|
|
175
|
+
}
|
|
176
|
+
],
|
|
177
|
+
peerDataOperationRequestType: proto.Message.PeerDataOperationRequestType.PLACEHOLDER_MESSAGE_RESEND
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
setTimeout(async () => {
|
|
181
|
+
if (await placeholderResendCache.get(messageKey?.id)) {
|
|
182
|
+
logger.debug({ messageKey }, 'PDO message without response after 15 seconds. Phone possibly offline')
|
|
183
|
+
await placeholderResendCache.del(messageKey?.id)
|
|
184
|
+
}
|
|
185
|
+
}, 15000)
|
|
186
|
+
|
|
187
|
+
return sendPeerDataOperationMessage(pdoMessage)
|
|
188
|
+
}
|
|
189
|
+
|
|
93
190
|
const sendMessageAck = async ({ tag, attrs, content }, errorCode) => {
|
|
94
191
|
const stanza = {
|
|
95
192
|
tag: 'ack',
|
|
@@ -223,20 +320,25 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
223
320
|
|
|
224
321
|
// Use the new retry count for the rest of the logic
|
|
225
322
|
const key = `${msgId}:${msgKey?.participant}`
|
|
226
|
-
msgRetryCache.set(key, retryCount)
|
|
323
|
+
await msgRetryCache.set(key, retryCount)
|
|
227
324
|
}
|
|
325
|
+
|
|
228
326
|
else {
|
|
229
327
|
// Fallback to old system
|
|
230
328
|
const key = `${msgId}:${msgKey?.participant}`
|
|
329
|
+
|
|
231
330
|
let retryCount = (await msgRetryCache.get(key)) || 0
|
|
232
331
|
|
|
233
332
|
if (retryCount >= maxMsgRetryCount) {
|
|
234
333
|
logger.debug({ retryCount, msgId }, 'reached retry limit, clearing')
|
|
235
|
-
|
|
334
|
+
|
|
335
|
+
await msgRetryCache.del(key)
|
|
336
|
+
|
|
236
337
|
return
|
|
237
338
|
}
|
|
238
339
|
|
|
239
340
|
retryCount += 1
|
|
341
|
+
|
|
240
342
|
await msgRetryCache.set(key, retryCount)
|
|
241
343
|
}
|
|
242
344
|
|
|
@@ -260,12 +362,15 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
260
362
|
recreateReason = result.reason
|
|
261
363
|
|
|
262
364
|
if (shouldRecreateSession) {
|
|
263
|
-
logger.
|
|
365
|
+
logger.debug({ fromJid, retryCount, reason: recreateReason }, 'recreating session for retry')
|
|
366
|
+
|
|
264
367
|
// Delete existing session to force recreation
|
|
265
368
|
await authState.keys.set({ session: { [sessionId]: null } })
|
|
369
|
+
|
|
266
370
|
forceIncludeKeys = true
|
|
267
371
|
}
|
|
268
372
|
}
|
|
373
|
+
|
|
269
374
|
catch (error) {
|
|
270
375
|
logger.warn({ error, fromJid }, 'failed to check session recreation')
|
|
271
376
|
}
|
|
@@ -277,22 +382,27 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
277
382
|
// Schedule phone request with delay (like whatsmeow)
|
|
278
383
|
messageRetryManager.schedulePhoneRequest(msgId, async () => {
|
|
279
384
|
try {
|
|
280
|
-
const
|
|
281
|
-
|
|
385
|
+
const requestId = await requestPlaceholderResend(msgKey)
|
|
386
|
+
|
|
387
|
+
logger.debug(`sendRetryRequest: requested placeholder resend (${requestId}) for message ${msgId} (scheduled)`)
|
|
282
388
|
}
|
|
389
|
+
|
|
283
390
|
catch (error) {
|
|
284
|
-
logger.warn({ error, msgId }, 'failed to send scheduled phone request')
|
|
391
|
+
logger.warn({ error, msgId }, 'failed to send scheduled phone request')
|
|
285
392
|
}
|
|
286
393
|
})
|
|
287
394
|
}
|
|
395
|
+
|
|
288
396
|
else {
|
|
289
397
|
// Fallback to immediate request
|
|
290
398
|
const msgId = await requestPlaceholderResend(msgKey)
|
|
399
|
+
|
|
291
400
|
logger.debug(`sendRetryRequest: requested placeholder resend for message ${msgId}`)
|
|
292
401
|
}
|
|
293
402
|
}
|
|
294
403
|
|
|
295
404
|
const deviceIdentity = encodeSignedDeviceIdentity(account, true)
|
|
405
|
+
|
|
296
406
|
await authState.keys.transaction(async () => {
|
|
297
407
|
const receipt = {
|
|
298
408
|
tag: 'receipt',
|
|
@@ -308,7 +418,9 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
308
418
|
count: retryCount.toString(),
|
|
309
419
|
id: node.attrs.id,
|
|
310
420
|
t: node.attrs.t,
|
|
311
|
-
v: '1'
|
|
421
|
+
v: '1',
|
|
422
|
+
// ADD ERROR FIELD
|
|
423
|
+
error: '0'
|
|
312
424
|
}
|
|
313
425
|
},
|
|
314
426
|
{
|
|
@@ -343,20 +455,25 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
343
455
|
xmppSignedPreKey(signedPreKey),
|
|
344
456
|
{ tag: 'device-identity', attrs: {}, content: deviceIdentity }
|
|
345
457
|
]
|
|
346
|
-
})
|
|
458
|
+
})
|
|
459
|
+
|
|
347
460
|
ev.emit('creds.update', update)
|
|
348
461
|
}
|
|
462
|
+
|
|
349
463
|
await sendNode(receipt)
|
|
464
|
+
|
|
350
465
|
logger.info({ msgAttrs: node.attrs, retryCount }, 'sent retry receipt')
|
|
351
466
|
}, authState?.creds?.me?.id || 'sendRetryRequest')
|
|
352
467
|
}
|
|
353
468
|
|
|
354
469
|
const handleEncryptNotification = async (node) => {
|
|
355
470
|
const from = node.attrs.from
|
|
471
|
+
|
|
356
472
|
if (from === S_WHATSAPP_NET) {
|
|
357
473
|
const countChild = getBinaryNodeChild(node, 'count')
|
|
358
474
|
const count = +countChild.attrs.value
|
|
359
475
|
const shouldUploadMorePreKeys = count < MIN_PREKEY_COUNT
|
|
476
|
+
|
|
360
477
|
logger.debug({ count, shouldUploadMorePreKeys }, 'recv pre-key count')
|
|
361
478
|
|
|
362
479
|
if (shouldUploadMorePreKeys) {
|
|
@@ -366,10 +483,24 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
366
483
|
|
|
367
484
|
else {
|
|
368
485
|
const identityNode = getBinaryNodeChild(node, 'identity')
|
|
486
|
+
|
|
369
487
|
if (identityNode) {
|
|
370
488
|
logger.info({ jid: from }, 'identity changed')
|
|
371
|
-
|
|
372
|
-
|
|
489
|
+
|
|
490
|
+
if (identityAssertDebounce.get(from)) {
|
|
491
|
+
logger.debug({ jid: from }, 'skipping identity assert (debounced)')
|
|
492
|
+
return
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
identityAssertDebounce.set(from, true)
|
|
496
|
+
|
|
497
|
+
try {
|
|
498
|
+
await assertSessions([from], true)
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
catch (error) {
|
|
502
|
+
logger.warn({ error, jid: from }, 'failed to assert sessions after identity change')
|
|
503
|
+
}
|
|
373
504
|
}
|
|
374
505
|
|
|
375
506
|
else {
|
|
@@ -378,29 +509,35 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
378
509
|
}
|
|
379
510
|
}
|
|
380
511
|
|
|
381
|
-
const handleGroupNotification = (
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
512
|
+
const handleGroupNotification = (fullNode, child, msg) => {
|
|
513
|
+
// TODO: Support PN/LID (Here is only LID now)
|
|
514
|
+
const actingParticipantLid = fullNode.attrs.participant
|
|
515
|
+
const actingParticipantPn = fullNode.attrs.participant_pn
|
|
516
|
+
const affectedParticipantLid = getBinaryNodeChild(child, 'participant')?.attrs?.jid || actingParticipantLid
|
|
517
|
+
const affectedParticipantPn = getBinaryNodeChild(child, 'participant')?.attrs?.phone_number || actingParticipantPn
|
|
518
|
+
|
|
519
|
+
switch (child?.tag) {
|
|
386
520
|
case 'create':
|
|
387
521
|
const metadata = extractGroupMetadata(child)
|
|
388
522
|
msg.messageStubType = WAMessageStubType.GROUP_CREATE
|
|
389
523
|
msg.messageStubParameters = [metadata.subject]
|
|
390
|
-
msg.key = { participant: metadata.owner }
|
|
391
|
-
|
|
524
|
+
msg.key = { participant: metadata.owner, participantAlt: metadata.ownerPn }
|
|
525
|
+
|
|
526
|
+
ev.emit('chats.upsert', [
|
|
527
|
+
{
|
|
392
528
|
id: metadata.id,
|
|
393
529
|
name: metadata.subject,
|
|
394
|
-
conversationTimestamp: metadata.creation
|
|
395
|
-
}
|
|
396
|
-
|
|
530
|
+
conversationTimestamp: metadata.creation
|
|
531
|
+
}
|
|
532
|
+
])
|
|
533
|
+
|
|
534
|
+
ev.emit('groups.upsert', [
|
|
535
|
+
{
|
|
397
536
|
...metadata,
|
|
398
|
-
author:
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
msg.messageStubType = WAMessageStubType.COMMUNITY_PARENT_GROUP_DELETED
|
|
403
|
-
msg.messageStubParameters = [participantJid, 'delete']
|
|
537
|
+
author: actingParticipantLid,
|
|
538
|
+
authorPn: actingParticipantPn
|
|
539
|
+
}
|
|
540
|
+
])
|
|
404
541
|
break
|
|
405
542
|
case 'ephemeral':
|
|
406
543
|
case 'not_ephemeral':
|
|
@@ -412,7 +549,7 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
412
549
|
}
|
|
413
550
|
break
|
|
414
551
|
case 'modify':
|
|
415
|
-
const oldNumber =
|
|
552
|
+
const oldNumber = getBinaryNodeChildren(child, 'participant').map(p => p.attrs.jid)
|
|
416
553
|
msg.messageStubParameters = oldNumber || []
|
|
417
554
|
msg.messageStubType = WAMessageStubType.GROUP_PARTICIPANT_CHANGE_NUMBER
|
|
418
555
|
break
|
|
@@ -421,24 +558,32 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
421
558
|
case 'remove':
|
|
422
559
|
case 'add':
|
|
423
560
|
case 'leave':
|
|
424
|
-
|
|
425
|
-
if (child.attrs?.reason === 'linked_group_join') {
|
|
426
|
-
stubType = GROUP_PARTICIPANT_LINKED_GROUP_JOIN
|
|
427
|
-
}
|
|
561
|
+
const stubType = `GROUP_PARTICIPANT_${child.tag.toUpperCase()}`
|
|
428
562
|
msg.messageStubType = WAMessageStubType[stubType]
|
|
429
|
-
const participants =
|
|
563
|
+
const participants = getBinaryNodeChildren(child, 'participant').map(({ attrs }) => {
|
|
564
|
+
// TODO: Store LID MAPPINGS
|
|
565
|
+
return {
|
|
566
|
+
id: attrs.jid,
|
|
567
|
+
phoneNumber: isLidUser(attrs.jid) && isPnUser(attrs.phone_number) ? attrs.phone_number : undefined,
|
|
568
|
+
lid: isPnUser(attrs.jid) && isLidUser(attrs.lid) ? attrs.lid : undefined,
|
|
569
|
+
admin: (attrs.type || null)
|
|
570
|
+
}
|
|
571
|
+
})
|
|
572
|
+
|
|
430
573
|
if (participants.length === 1 &&
|
|
431
574
|
// if recv. "remove" message and sender removed themselves
|
|
432
575
|
// mark as left
|
|
433
|
-
areJidsSameUser(participants[0],
|
|
576
|
+
(areJidsSameUser(participants[0].id, actingParticipantLid) ||
|
|
577
|
+
areJidsSameUser(participants[0].id, actingParticipantPn)) &&
|
|
434
578
|
child.tag === 'remove') {
|
|
435
579
|
msg.messageStubType = WAMessageStubType.GROUP_PARTICIPANT_LEAVE
|
|
436
580
|
}
|
|
437
|
-
|
|
581
|
+
|
|
582
|
+
msg.messageStubParameters = participants.map(a => JSON.stringify(a))
|
|
438
583
|
break
|
|
439
584
|
case 'subject':
|
|
440
585
|
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_SUBJECT
|
|
441
|
-
msg.messageStubParameters = [
|
|
586
|
+
msg.messageStubParameters = [child.attrs.subject]
|
|
442
587
|
break
|
|
443
588
|
case 'description':
|
|
444
589
|
const description = getBinaryNodeChild(child, 'body')?.content?.toString()
|
|
@@ -448,19 +593,19 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
448
593
|
case 'announcement':
|
|
449
594
|
case 'not_announcement':
|
|
450
595
|
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_ANNOUNCE
|
|
451
|
-
msg.messageStubParameters = [
|
|
596
|
+
msg.messageStubParameters = [child.tag === 'announcement' ? 'on' : 'off']
|
|
452
597
|
break
|
|
453
598
|
case 'locked':
|
|
454
599
|
case 'unlocked':
|
|
455
600
|
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_RESTRICT
|
|
456
|
-
msg.messageStubParameters = [
|
|
601
|
+
msg.messageStubParameters = [child.tag === 'locked' ? 'on' : 'off']
|
|
457
602
|
break
|
|
458
603
|
case 'invite':
|
|
459
604
|
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_INVITE_LINK
|
|
460
605
|
msg.messageStubParameters = [child.attrs.code]
|
|
461
606
|
break
|
|
462
607
|
case 'member_add_mode':
|
|
463
|
-
const addMode = child.content
|
|
608
|
+
const addMode = child.content;
|
|
464
609
|
if (addMode) {
|
|
465
610
|
msg.messageStubType = WAMessageStubType.GROUP_MEMBER_ADD_MODE
|
|
466
611
|
msg.messageStubParameters = [addMode.toString()]
|
|
@@ -474,52 +619,21 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
474
619
|
}
|
|
475
620
|
break
|
|
476
621
|
case 'created_membership_requests':
|
|
477
|
-
participantJid = mode === 'lid' ? getBinaryNodeChild(child, 'requested_user')?.attrs?.phone_number : getBinaryNodeChild(child, 'requested_user')?.attrs?.jid || participant
|
|
478
|
-
|
|
479
622
|
msg.messageStubType = WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_REQUEST_NON_ADMIN_ADD
|
|
480
|
-
msg.messageStubParameters = [
|
|
623
|
+
msg.messageStubParameters = [
|
|
624
|
+
JSON.stringify({ lid: affectedParticipantLid, pn: affectedParticipantPn }),
|
|
625
|
+
'created',
|
|
626
|
+
child.attrs.request_method
|
|
627
|
+
]
|
|
481
628
|
break
|
|
482
629
|
case 'revoked_membership_requests':
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
const isDenied = areJidsSameUser(participantJid, participant)
|
|
630
|
+
const isDenied = areJidsSameUser(affectedParticipantLid, actingParticipantLid)
|
|
631
|
+
// TODO: LIDMAPPING SUPPORT
|
|
486
632
|
msg.messageStubType = WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_REQUEST_NON_ADMIN_ADD
|
|
487
|
-
msg.messageStubParameters = [
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
const type = child.attrs?.unlink_type || child.attrs?.link_type
|
|
492
|
-
const stubMap = {
|
|
493
|
-
parent_group: WAMessageStubType[`COMMUNITY_${child.tag.toUpperCase()}_PARENT_GROUP`],
|
|
494
|
-
sibling_group: WAMessageStubType[`COMMUNITY_${child.tag.toUpperCase()}_SIBLING_GROUP`],
|
|
495
|
-
sub_group: WAMessageStubType[`COMMUNITY_${child.tag.toUpperCase()}_SUB_GROUP`]
|
|
496
|
-
}
|
|
497
|
-
const groups = getBinaryNodeChildren(child, 'group')
|
|
498
|
-
.map(g => g.attrs?.jid || g.attrs?.subject || '')
|
|
499
|
-
.filter(x => x)
|
|
500
|
-
msg.messageStubType = stubMap?.[type] || WAMessageStubType[`COMMUNITY_${child.tag.toUpperCase()}_PARENT_GROUP`]
|
|
501
|
-
msg.messageStubParameters = [participantJid, child.tag, groups]
|
|
502
|
-
break
|
|
503
|
-
case 'linked_group_promote':
|
|
504
|
-
case 'linked_group_demote':
|
|
505
|
-
const stubtype = `COMMUNITY_PARTICIPANT_${child.tag.split('_')[2].toUpperCase()}`
|
|
506
|
-
const participantS = mode === 'lid' ? getBinaryNodeChildren(child, 'participant').map(p => p.attrs.phone_number) : getBinaryNodeChildren(child, 'participant').map(p => p.attrs.jid)
|
|
507
|
-
msg.messageStubType = WAMessageStubType[stubtype]
|
|
508
|
-
msg.messageStubParameters = participantS
|
|
509
|
-
break
|
|
510
|
-
case 'created_sub_group_suggestion':
|
|
511
|
-
msg.messageStubType = WAMessageStubType.SUGGESTED_SUBGROUP_ANNOUNCE
|
|
512
|
-
msg.messageStubParameters = [participantJid, 'add']
|
|
513
|
-
break
|
|
514
|
-
case 'revoked_sub_group_suggestions':
|
|
515
|
-
const res = getBinaryNodeChildren(child, 'sub_group_suggestions')
|
|
516
|
-
const reason = res.attrs?.reason
|
|
517
|
-
if (reason === 'approved') msg.messageStubType = WAMessageStubType.GROUP_CREATE
|
|
518
|
-
else msg.messageStubType = WAMessageStubType.GENERIC_NOTIFICATION
|
|
519
|
-
msg.messageStubParameters = [participantJid, reason]
|
|
520
|
-
break
|
|
521
|
-
default:
|
|
522
|
-
logger.warn(child.tag, 'Unhandled group node')
|
|
633
|
+
msg.messageStubParameters = [
|
|
634
|
+
JSON.stringify({ lid: affectedParticipantLid, pn: affectedParticipantPn }),
|
|
635
|
+
isDenied ? 'revoked' : 'rejected'
|
|
636
|
+
]
|
|
523
637
|
break
|
|
524
638
|
}
|
|
525
639
|
}
|
|
@@ -637,22 +751,9 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
637
751
|
const from = jidNormalizedUser(node.attrs.from)
|
|
638
752
|
|
|
639
753
|
switch (nodeType) {
|
|
640
|
-
case 'privacy_token':
|
|
641
|
-
const tokenList = getBinaryNodeChildren(child, 'token')
|
|
642
|
-
for (const { attrs, content } of tokenList) {
|
|
643
|
-
const jid = attrs.jid
|
|
644
|
-
ev.emit('chats.update', [
|
|
645
|
-
{
|
|
646
|
-
id: jid,
|
|
647
|
-
tcToken: content
|
|
648
|
-
}
|
|
649
|
-
])
|
|
650
|
-
logger.debug({ jid }, 'got privacy token update')
|
|
651
|
-
}
|
|
652
|
-
break
|
|
653
754
|
case 'w:gp2':
|
|
654
|
-
|
|
655
|
-
handleGroupNotification(
|
|
755
|
+
// TODO: HANDLE PARTICIPANT_PN
|
|
756
|
+
handleGroupNotification(node, child, result)
|
|
656
757
|
break
|
|
657
758
|
case 'newsletter':
|
|
658
759
|
handleNewsletterNotification(node.attrs.from, child)
|
|
@@ -686,20 +787,27 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
686
787
|
case 'picture':
|
|
687
788
|
const setPicture = getBinaryNodeChild(node, 'set')
|
|
688
789
|
const delPicture = getBinaryNodeChild(node, 'delete')
|
|
689
|
-
|
|
790
|
+
|
|
791
|
+
ev.emit('contacts.update', [
|
|
792
|
+
{
|
|
690
793
|
id: jidNormalizedUser(node?.attrs?.from) || (setPicture || delPicture)?.attrs?.hash || '',
|
|
691
794
|
imgUrl: setPicture ? 'changed' : 'removed'
|
|
692
|
-
}
|
|
795
|
+
}
|
|
796
|
+
])
|
|
797
|
+
|
|
693
798
|
if (isJidGroup(from)) {
|
|
694
799
|
const node = setPicture || delPicture
|
|
800
|
+
|
|
695
801
|
result.messageStubType = WAMessageStubType.GROUP_CHANGE_ICON
|
|
802
|
+
|
|
696
803
|
if (setPicture) {
|
|
697
804
|
result.messageStubParameters = [setPicture.attrs.id]
|
|
698
805
|
}
|
|
699
|
-
|
|
806
|
+
|
|
807
|
+
result.participant = node?.attrs.author
|
|
700
808
|
result.key = {
|
|
701
|
-
...result.key || {},
|
|
702
|
-
participant: setPicture?.attrs
|
|
809
|
+
...(result.key || {}),
|
|
810
|
+
participant: setPicture?.attrs.author
|
|
703
811
|
}
|
|
704
812
|
}
|
|
705
813
|
break
|
|
@@ -713,16 +821,17 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
713
821
|
...authState.creds.accountSettings,
|
|
714
822
|
defaultDisappearingMode: {
|
|
715
823
|
ephemeralExpiration: newDuration,
|
|
716
|
-
ephemeralSettingTimestamp: timestamp
|
|
717
|
-
}
|
|
824
|
+
ephemeralSettingTimestamp: timestamp
|
|
825
|
+
}
|
|
718
826
|
}
|
|
719
827
|
})
|
|
720
828
|
}
|
|
829
|
+
|
|
721
830
|
else if (child.tag === 'blocklist') {
|
|
722
831
|
const blocklists = getBinaryNodeChildren(child, 'item')
|
|
723
832
|
for (const { attrs } of blocklists) {
|
|
724
833
|
const blocklist = [attrs.jid]
|
|
725
|
-
const type =
|
|
834
|
+
const type = attrs.action === 'block' ? 'add' : 'remove'
|
|
726
835
|
ev.emit('blocklist.update', { blocklist, type })
|
|
727
836
|
}
|
|
728
837
|
}
|
|
@@ -736,13 +845,15 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
736
845
|
const companionSharedKey = Curve.sharedKey(authState.creds.pairingEphemeralKeyPair.private, codePairingPublicKey)
|
|
737
846
|
const random = randomBytes(32)
|
|
738
847
|
const linkCodeSalt = randomBytes(32)
|
|
739
|
-
|
|
740
848
|
const linkCodePairingExpanded = await hkdf(companionSharedKey, 32, {
|
|
741
849
|
salt: linkCodeSalt,
|
|
742
850
|
info: 'link_code_pairing_key_bundle_encryption_key'
|
|
743
851
|
})
|
|
744
|
-
|
|
745
|
-
|
|
852
|
+
const encryptPayload = Buffer.concat([
|
|
853
|
+
Buffer.from(authState.creds.signedIdentityKey.public),
|
|
854
|
+
primaryIdentityPublicKey,
|
|
855
|
+
random
|
|
856
|
+
])
|
|
746
857
|
const encryptIv = randomBytes(12)
|
|
747
858
|
const encrypted = aesEncryptGCM(encryptPayload, linkCodePairingExpanded, encryptIv, Buffer.alloc(0))
|
|
748
859
|
const encryptedPayload = Buffer.concat([linkCodeSalt, encryptIv, encrypted])
|
|
@@ -764,7 +875,7 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
764
875
|
tag: 'link_code_companion_reg',
|
|
765
876
|
attrs: {
|
|
766
877
|
jid: authState.creds.me.id,
|
|
767
|
-
stage: 'companion_finish'
|
|
878
|
+
stage: 'companion_finish'
|
|
768
879
|
},
|
|
769
880
|
content: [
|
|
770
881
|
{
|
|
@@ -789,6 +900,10 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
789
900
|
|
|
790
901
|
authState.creds.registered = true
|
|
791
902
|
ev.emit('creds.update', authState.creds)
|
|
903
|
+
break
|
|
904
|
+
case 'privacy_token':
|
|
905
|
+
await handlePrivacyTokenNotification(node)
|
|
906
|
+
break
|
|
792
907
|
}
|
|
793
908
|
|
|
794
909
|
if (Object.keys(result).length) {
|
|
@@ -796,6 +911,33 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
796
911
|
}
|
|
797
912
|
}
|
|
798
913
|
|
|
914
|
+
const handlePrivacyTokenNotification = async (node) => {
|
|
915
|
+
const tokensNode = getBinaryNodeChild(node, 'tokens')
|
|
916
|
+
const from = jidNormalizedUser(node.attrs.from)
|
|
917
|
+
|
|
918
|
+
if (!tokensNode) return
|
|
919
|
+
|
|
920
|
+
const tokenNodes = getBinaryNodeChildren(tokensNode, 'token')
|
|
921
|
+
|
|
922
|
+
for (const tokenNode of tokenNodes) {
|
|
923
|
+
const { attrs, content } = tokenNode
|
|
924
|
+
const type = attrs.type
|
|
925
|
+
const timestamp = attrs.t
|
|
926
|
+
|
|
927
|
+
if (type === 'trusted_contact' && content instanceof Buffer) {
|
|
928
|
+
logger.debug({
|
|
929
|
+
from,
|
|
930
|
+
timestamp,
|
|
931
|
+
tcToken: content
|
|
932
|
+
}, 'received trusted contact token')
|
|
933
|
+
|
|
934
|
+
await authState.keys.set({
|
|
935
|
+
tctoken: { [from]: { token: content, timestamp } }
|
|
936
|
+
})
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
}
|
|
940
|
+
|
|
799
941
|
async function decipherLinkPublicKey(data) {
|
|
800
942
|
const buffer = toRequiredBuffer(data)
|
|
801
943
|
const salt = buffer.slice(0, 32)
|
|
@@ -838,6 +980,7 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
838
980
|
// Try to get from retry cache first if enabled
|
|
839
981
|
if (messageRetryManager) {
|
|
840
982
|
const cachedMsg = messageRetryManager.getRecentMessage(remoteJid, id)
|
|
983
|
+
|
|
841
984
|
if (cachedMsg) {
|
|
842
985
|
msg = cachedMsg.message
|
|
843
986
|
logger.debug({ jid: remoteJid, id }, 'found message in retry cache')
|
|
@@ -850,15 +993,17 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
850
993
|
// Fallback to getMessage if not found in cache
|
|
851
994
|
if (!msg) {
|
|
852
995
|
msg = await getMessage({ ...key, id })
|
|
996
|
+
|
|
853
997
|
if (msg) {
|
|
854
998
|
logger.debug({ jid: remoteJid, id }, 'found message via getMessage')
|
|
855
999
|
|
|
856
1000
|
// Also mark as successful if found via getMessage
|
|
857
1001
|
if (messageRetryManager) {
|
|
858
|
-
messageRetryManager.markRetrySuccess(id)
|
|
1002
|
+
messageRetryManager.markRetrySuccess(id)
|
|
859
1003
|
}
|
|
860
1004
|
}
|
|
861
1005
|
}
|
|
1006
|
+
|
|
862
1007
|
msgs.push(msg)
|
|
863
1008
|
}
|
|
864
1009
|
|
|
@@ -881,41 +1026,45 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
881
1026
|
recreateReason = result.reason
|
|
882
1027
|
|
|
883
1028
|
if (shouldRecreateSession) {
|
|
884
|
-
logger.
|
|
885
|
-
await authState.keys.set({ session: { [sessionId]: null } })
|
|
1029
|
+
logger.debug({ participant, retryCount, reason: recreateReason }, 'recreating session for outgoing retry')
|
|
1030
|
+
await authState.keys.set({ session: { [sessionId]: null } })
|
|
886
1031
|
}
|
|
887
1032
|
}
|
|
1033
|
+
|
|
888
1034
|
catch (error) {
|
|
889
1035
|
logger.warn({ error, participant }, 'failed to check session recreation for outgoing retry')
|
|
890
1036
|
}
|
|
891
1037
|
}
|
|
892
1038
|
|
|
893
|
-
await assertSessions([participant],
|
|
1039
|
+
await assertSessions([participant], true)
|
|
894
1040
|
|
|
895
1041
|
if (isJidGroup(remoteJid)) {
|
|
896
|
-
await authState.keys.set({ 'sender-key-memory': { [remoteJid]: null } })
|
|
1042
|
+
await authState.keys.set({ 'sender-key-memory': { [remoteJid]: null } })
|
|
897
1043
|
}
|
|
1044
|
+
|
|
898
1045
|
logger.debug({ participant, sendToAll, shouldRecreateSession, recreateReason }, 'forced new session for retry recp')
|
|
899
1046
|
|
|
900
1047
|
for (const [i, msg] of msgs.entries()) {
|
|
901
|
-
if (!ids[i])
|
|
902
|
-
|
|
903
|
-
|
|
1048
|
+
if (!ids[i]) continue
|
|
1049
|
+
|
|
904
1050
|
if (msg && (await willSendMessageAgain(ids[i], participant))) {
|
|
905
|
-
updateSendMessageAgainCount(ids[i], participant)
|
|
1051
|
+
await updateSendMessageAgainCount(ids[i], participant)
|
|
906
1052
|
const msgRelayOpts = { messageId: ids[i] }
|
|
907
1053
|
|
|
908
1054
|
if (sendToAll) {
|
|
909
1055
|
msgRelayOpts.useUserDevicesCache = false
|
|
910
1056
|
}
|
|
1057
|
+
|
|
911
1058
|
else {
|
|
912
1059
|
msgRelayOpts.participant = {
|
|
913
1060
|
jid: participant,
|
|
914
1061
|
count: +retryNode.attrs.count
|
|
915
1062
|
}
|
|
916
1063
|
}
|
|
1064
|
+
|
|
917
1065
|
await relayMessage(key.remoteJid, msg, msgRelayOpts)
|
|
918
1066
|
}
|
|
1067
|
+
|
|
919
1068
|
else {
|
|
920
1069
|
logger.debug({ jid: key.remoteJid, id: ids[i] }, 'recv retry request, but message not available')
|
|
921
1070
|
}
|
|
@@ -928,7 +1077,6 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
928
1077
|
const isNodeFromMe = areJidsSameUser(attrs.participant || attrs.from, isLid ? authState.creds.me?.lid : authState.creds.me?.id)
|
|
929
1078
|
const remoteJid = !isNodeFromMe || isJidGroup(attrs.from) ? attrs.from : attrs.recipient
|
|
930
1079
|
const fromMe = !attrs.recipient || ((attrs.type === 'retry' || attrs.type === 'sender') && isNodeFromMe)
|
|
931
|
-
|
|
932
1080
|
const key = {
|
|
933
1081
|
remoteJid,
|
|
934
1082
|
id: '',
|
|
@@ -936,13 +1084,14 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
936
1084
|
participant: attrs.participant
|
|
937
1085
|
}
|
|
938
1086
|
|
|
939
|
-
if (shouldIgnoreJid(remoteJid) && remoteJid !==
|
|
1087
|
+
if (shouldIgnoreJid(remoteJid) && remoteJid !== S_WHATSAPP_NET) {
|
|
940
1088
|
logger.debug({ remoteJid }, 'ignoring receipt from jid')
|
|
941
1089
|
await sendMessageAck(node)
|
|
942
1090
|
return
|
|
943
1091
|
}
|
|
944
1092
|
|
|
945
1093
|
const ids = [attrs.id]
|
|
1094
|
+
|
|
946
1095
|
if (Array.isArray(content)) {
|
|
947
1096
|
const items = getBinaryNodeChildren(content[0], 'item')
|
|
948
1097
|
ids.push(...items.map(i => i.attrs.id))
|
|
@@ -950,17 +1099,17 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
950
1099
|
|
|
951
1100
|
try {
|
|
952
1101
|
await Promise.all([
|
|
953
|
-
|
|
1102
|
+
receiptMutex.mutex(async () => {
|
|
954
1103
|
const status = getStatusFromReceiptType(attrs.type)
|
|
955
1104
|
|
|
956
|
-
if (typeof status !== 'undefined' &&
|
|
1105
|
+
if (typeof status !== 'undefined' &&
|
|
957
1106
|
// basically, we only want to know when a message from us has been delivered to/read by the other person
|
|
958
1107
|
// or another device of ours has read some messages
|
|
959
|
-
status >= proto.WebMessageInfo.Status.SERVER_ACK ||
|
|
960
|
-
!isNodeFromMe)) {
|
|
1108
|
+
(status >= proto.WebMessageInfo.Status.SERVER_ACK || !isNodeFromMe)) {
|
|
961
1109
|
if (isJidGroup(remoteJid) || isJidStatusBroadcast(remoteJid)) {
|
|
962
1110
|
if (attrs.participant) {
|
|
963
1111
|
const updateKey = status === proto.WebMessageInfo.Status.DELIVERY_ACK ? 'receiptTimestamp' : 'readTimestamp'
|
|
1112
|
+
|
|
964
1113
|
ev.emit('message-receipt.update', ids.map(id => ({
|
|
965
1114
|
key: { ...key, id },
|
|
966
1115
|
receipt: {
|
|
@@ -982,13 +1131,16 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
982
1131
|
if (attrs.type === 'retry') {
|
|
983
1132
|
// correctly set who is asking for the retry
|
|
984
1133
|
key.participant = key.participant || attrs.from
|
|
1134
|
+
|
|
985
1135
|
const retryNode = getBinaryNodeChild(node, 'retry')
|
|
986
1136
|
|
|
987
1137
|
if (ids[0] && key.participant && (await willSendMessageAgain(ids[0], key.participant))) {
|
|
988
1138
|
if (key.fromMe) {
|
|
989
1139
|
try {
|
|
990
|
-
updateSendMessageAgainCount(ids[0], key.participant)
|
|
1140
|
+
await updateSendMessageAgainCount(ids[0], key.participant)
|
|
1141
|
+
|
|
991
1142
|
logger.debug({ attrs, key }, 'recv retry request')
|
|
1143
|
+
|
|
992
1144
|
await sendMessagesAgain(key, ids, retryNode)
|
|
993
1145
|
}
|
|
994
1146
|
|
|
@@ -1009,6 +1161,7 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1009
1161
|
})
|
|
1010
1162
|
])
|
|
1011
1163
|
}
|
|
1164
|
+
|
|
1012
1165
|
finally {
|
|
1013
1166
|
await sendMessageAck(node)
|
|
1014
1167
|
}
|
|
@@ -1016,8 +1169,7 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1016
1169
|
|
|
1017
1170
|
const handleNotification = async (node) => {
|
|
1018
1171
|
const remoteJid = node.attrs.from
|
|
1019
|
-
|
|
1020
|
-
if (shouldIgnoreJid(remoteJid) && remoteJid !== '@s.whatsapp.net') {
|
|
1172
|
+
if (shouldIgnoreJid(remoteJid) && remoteJid !== S_WHATSAPP_NET) {
|
|
1021
1173
|
logger.debug({ remoteJid, id: node.attrs.id }, 'ignored notification')
|
|
1022
1174
|
await sendMessageAck(node)
|
|
1023
1175
|
return
|
|
@@ -1025,40 +1177,46 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1025
1177
|
|
|
1026
1178
|
try {
|
|
1027
1179
|
await Promise.all([
|
|
1028
|
-
|
|
1180
|
+
notificationMutex.mutex(async () => {
|
|
1029
1181
|
const msg = await processNotification(node)
|
|
1030
1182
|
|
|
1031
1183
|
if (msg) {
|
|
1032
1184
|
const fromMe = areJidsSameUser(node.attrs.participant || remoteJid, authState.creds.me.id)
|
|
1185
|
+
const { senderAlt: participantAlt, addressingMode } = extractAddressingContext(node)
|
|
1186
|
+
|
|
1033
1187
|
msg.key = {
|
|
1034
1188
|
remoteJid,
|
|
1035
1189
|
fromMe,
|
|
1036
1190
|
participant: node.attrs.participant,
|
|
1191
|
+
participantAlt,
|
|
1192
|
+
addressingMode,
|
|
1037
1193
|
id: node.attrs.id,
|
|
1038
1194
|
...(msg.key || {})
|
|
1039
1195
|
}
|
|
1040
|
-
|
|
1196
|
+
|
|
1197
|
+
msg.participant ?? (msg.participant = node.attrs.participant)
|
|
1041
1198
|
msg.messageTimestamp = +node.attrs.t
|
|
1199
|
+
|
|
1042
1200
|
const fullMsg = proto.WebMessageInfo.fromObject(msg)
|
|
1201
|
+
|
|
1043
1202
|
await upsertMessage(fullMsg, 'append')
|
|
1044
1203
|
}
|
|
1045
1204
|
})
|
|
1046
1205
|
])
|
|
1047
1206
|
}
|
|
1207
|
+
|
|
1048
1208
|
finally {
|
|
1049
1209
|
await sendMessageAck(node)
|
|
1050
1210
|
}
|
|
1051
1211
|
}
|
|
1052
1212
|
|
|
1053
1213
|
const handleMessage = async (node) => {
|
|
1054
|
-
if (shouldIgnoreJid(node.attrs.from) && node.attrs.from !==
|
|
1214
|
+
if (shouldIgnoreJid(node.attrs.from) && node.attrs.from !== S_WHATSAPP_NET) {
|
|
1055
1215
|
logger.debug({ key: node.attrs.key }, 'ignored message')
|
|
1056
|
-
await sendMessageAck(node)
|
|
1216
|
+
await sendMessageAck(node, NACK_REASONS.UnhandledError)
|
|
1057
1217
|
return
|
|
1058
1218
|
}
|
|
1059
1219
|
|
|
1060
|
-
let response
|
|
1061
|
-
|
|
1062
1220
|
const encNode = getBinaryNodeChild(node, 'enc')
|
|
1063
1221
|
|
|
1064
1222
|
// TODO: temporary fix for crashes and issues resulting of failed msmsg decryption
|
|
@@ -1068,53 +1226,23 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1068
1226
|
return
|
|
1069
1227
|
}
|
|
1070
1228
|
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
const { key } = decodeMessageNode(node, authState.creds.me.id, authState.creds.me.lid || '').fullMessage
|
|
1074
|
-
response = await requestPlaceholderResend(key);
|
|
1075
|
-
if (response === 'RESOLVED') {
|
|
1076
|
-
return
|
|
1077
|
-
}
|
|
1078
|
-
logger.debug('received unavailable message, acked and requested resend from phone');
|
|
1079
|
-
}
|
|
1080
|
-
else {
|
|
1081
|
-
if (placeholderResendCache.get(node.attrs.id)) {
|
|
1082
|
-
await placeholderResendCache.del(node.attrs.id)
|
|
1083
|
-
}
|
|
1084
|
-
}
|
|
1085
|
-
|
|
1086
|
-
const { fullMessage: msg, category, author, decrypt } = decryptMessageNode(node, authState.creds.me.id, authState.creds.me.lid || '', signalRepository, logger)
|
|
1087
|
-
|
|
1088
|
-
if (response && msg?.messageStubParameters?.[0] === NO_MESSAGE_FOUND_ERROR_TEXT) {
|
|
1089
|
-
msg.messageStubParameters = [NO_MESSAGE_FOUND_ERROR_TEXT, response]
|
|
1090
|
-
}
|
|
1091
|
-
|
|
1092
|
-
if (msg.message?.protocolMessage?.type === proto.Message.ProtocolMessage.Type.SHARE_PHONE_NUMBER &&
|
|
1093
|
-
node.attrs.sender_pn) {
|
|
1094
|
-
const lid = jidNormalizedUser(node.attrs.from), pn = jidNormalizedUser(node.attrs.sender_pn)
|
|
1095
|
-
ev.emit('lid-mapping.update', { lid, pn })
|
|
1096
|
-
await signalRepository.lidMapping.storeLIDPNMappings([{ lid, pn }])
|
|
1097
|
-
}
|
|
1098
|
-
|
|
1099
|
-
const alt = msg.key.participantAlt || msg.key.remoteJidAlt
|
|
1100
|
-
|
|
1229
|
+
const { fullMessage: msg, category, author, decrypt } = decryptMessageNode(node, authState.creds.me.id, authState.creds.me.lid || '', signalRepository, logger);
|
|
1230
|
+
const alt = msg.key.participantAlt || msg.key.remoteJidAlt;
|
|
1101
1231
|
// store new mappings we didn't have before
|
|
1102
1232
|
if (!!alt) {
|
|
1103
1233
|
const altServer = jidDecode(alt)?.server
|
|
1234
|
+
const primaryJid = msg.key.participant || msg.key.remoteJid
|
|
1104
1235
|
|
|
1105
1236
|
if (altServer === 'lid') {
|
|
1106
|
-
if (
|
|
1107
|
-
await signalRepository.lidMapping.storeLIDPNMappings([
|
|
1108
|
-
|
|
1109
|
-
])
|
|
1237
|
+
if (!(await signalRepository.lidMapping.getPNForLID(alt))) {
|
|
1238
|
+
await signalRepository.lidMapping.storeLIDPNMappings([{ lid: alt, pn: primaryJid }])
|
|
1239
|
+
await signalRepository.migrateSession(primaryJid, alt)
|
|
1110
1240
|
}
|
|
1111
1241
|
}
|
|
1242
|
+
|
|
1112
1243
|
else {
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
{ lid: msg.key.participant || msg.key.remoteJid, pn: alt }
|
|
1116
|
-
])
|
|
1117
|
-
}
|
|
1244
|
+
await signalRepository.lidMapping.storeLIDPNMappings([{ lid: primaryJid, pn: alt }])
|
|
1245
|
+
await signalRepository.migrateSession(alt, primaryJid)
|
|
1118
1246
|
}
|
|
1119
1247
|
}
|
|
1120
1248
|
|
|
@@ -1127,70 +1255,83 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1127
1255
|
}
|
|
1128
1256
|
|
|
1129
1257
|
try {
|
|
1130
|
-
await
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
}
|
|
1258
|
+
await messageMutex.mutex(async () => {
|
|
1259
|
+
await decrypt()
|
|
1260
|
+
|
|
1261
|
+
// message failed to decrypt
|
|
1262
|
+
if (msg.messageStubType === proto.WebMessageInfo.StubType.CIPHERTEXT && msg.category !== 'peer') {
|
|
1263
|
+
if (msg?.messageStubParameters?.[0] === MISSING_KEYS_ERROR_TEXT ||
|
|
1264
|
+
msg.messageStubParameters?.[0] === NO_MESSAGE_FOUND_ERROR_TEXT) {
|
|
1265
|
+
return sendMessageAck(node)
|
|
1266
|
+
}
|
|
1267
|
+
|
|
1268
|
+
const errorMessage = msg?.messageStubParameters?.[0] || ''
|
|
1269
|
+
const isPreKeyError = errorMessage.includes('PreKey')
|
|
1270
|
+
|
|
1271
|
+
logger.debug(`[handleMessage] Attempting retry request for failed decryption`)
|
|
1272
|
+
|
|
1273
|
+
// Handle both pre-key and normal retries in single mutex
|
|
1274
|
+
await retryMutex.mutex(async () => {
|
|
1275
|
+
try {
|
|
1276
|
+
if (!ws.isOpen) {
|
|
1277
|
+
logger.debug({ node }, 'Connection closed, skipping retry')
|
|
1278
|
+
return
|
|
1279
|
+
}
|
|
1280
|
+
|
|
1281
|
+
// Handle pre-key errors with upload and delay
|
|
1282
|
+
if (isPreKeyError) {
|
|
1283
|
+
logger.info({ error: errorMessage }, 'PreKey error detected, uploading and retrying')
|
|
1150
1284
|
|
|
1151
|
-
|
|
1152
|
-
logger.debug('
|
|
1153
|
-
|
|
1285
|
+
try {
|
|
1286
|
+
logger.debug('Uploading pre-keys for error recovery')
|
|
1287
|
+
await uploadPreKeys(5)
|
|
1288
|
+
logger.debug('Waiting for server to process new pre-keys')
|
|
1289
|
+
await delay(1000)
|
|
1154
1290
|
}
|
|
1155
1291
|
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
logger.info({ error: errorMessage }, 'PreKey error detected, uploading and retrying')
|
|
1159
|
-
try {
|
|
1160
|
-
logger.debug('Uploading pre-keys for error recovery')
|
|
1161
|
-
await uploadPreKeys(5)
|
|
1162
|
-
logger.debug('Waiting for server to process new pre-keys')
|
|
1163
|
-
await delay(1000)
|
|
1164
|
-
}
|
|
1165
|
-
catch (uploadErr) {
|
|
1166
|
-
logger.error({ uploadErr }, 'Pre-key upload failed, proceeding with retry anyway')
|
|
1167
|
-
}
|
|
1292
|
+
catch (uploadErr) {
|
|
1293
|
+
logger.error({ uploadErr }, 'Pre-key upload failed, proceeding with retry anyway')
|
|
1168
1294
|
}
|
|
1169
|
-
|
|
1295
|
+
}
|
|
1296
|
+
|
|
1297
|
+
const encNode = getBinaryNodeChild(node, 'enc')
|
|
1298
|
+
|
|
1299
|
+
await sendRetryRequest(node, !encNode)
|
|
1300
|
+
|
|
1301
|
+
if (retryRequestDelayMs) {
|
|
1302
|
+
await delay(retryRequestDelayMs)
|
|
1303
|
+
}
|
|
1304
|
+
}
|
|
1305
|
+
|
|
1306
|
+
catch (err) {
|
|
1307
|
+
logger.error({ err, isPreKeyError }, 'Failed to handle retry, attempting basic retry')
|
|
1308
|
+
|
|
1309
|
+
// Still attempt retry even if pre-key upload failed
|
|
1310
|
+
try {
|
|
1170
1311
|
const encNode = getBinaryNodeChild(node, 'enc')
|
|
1312
|
+
|
|
1171
1313
|
await sendRetryRequest(node, !encNode)
|
|
1172
|
-
if (retryRequestDelayMs) {
|
|
1173
|
-
await delay(retryRequestDelayMs)
|
|
1174
|
-
}
|
|
1175
1314
|
}
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
try {
|
|
1180
|
-
const encNode = getBinaryNodeChild(node, 'enc')
|
|
1181
|
-
await sendRetryRequest(node, !encNode)
|
|
1182
|
-
}
|
|
1183
|
-
catch (retryErr) {
|
|
1184
|
-
logger.error({ retryErr }, 'Failed to send retry after error handling')
|
|
1185
|
-
}
|
|
1315
|
+
|
|
1316
|
+
catch (retryErr) {
|
|
1317
|
+
logger.error({ retryErr }, 'Failed to send retry after error handling')
|
|
1186
1318
|
}
|
|
1187
|
-
}
|
|
1319
|
+
}
|
|
1320
|
+
|
|
1321
|
+
await sendMessageAck(node, NACK_REASONS.UnhandledError)
|
|
1322
|
+
})
|
|
1323
|
+
}
|
|
1324
|
+
|
|
1325
|
+
else {
|
|
1326
|
+
if (messageRetryManager && msg.key.id) {
|
|
1327
|
+
messageRetryManager.cancelPendingPhoneRequest(msg.key.id)
|
|
1188
1328
|
}
|
|
1189
1329
|
|
|
1190
|
-
|
|
1330
|
+
const isNewsletter = isJidNewsletter(msg.key.remoteJid)
|
|
1331
|
+
|
|
1332
|
+
if (!isNewsletter) {
|
|
1191
1333
|
// no type in the receipt => message delivered
|
|
1192
1334
|
let type = undefined
|
|
1193
|
-
|
|
1194
1335
|
let participant = msg.key.participant
|
|
1195
1336
|
|
|
1196
1337
|
if (category === 'peer') {
|
|
@@ -1211,124 +1352,60 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1211
1352
|
else if (!sendActiveReceipts) {
|
|
1212
1353
|
type = 'inactive'
|
|
1213
1354
|
}
|
|
1355
|
+
|
|
1214
1356
|
await sendReceipt(msg.key.remoteJid, participant, [msg.key.id], type)
|
|
1215
1357
|
|
|
1216
1358
|
// send ack for history message
|
|
1217
1359
|
const isAnyHistoryMsg = getHistoryMsg(msg.message)
|
|
1360
|
+
|
|
1218
1361
|
if (isAnyHistoryMsg) {
|
|
1219
1362
|
const jid = jidNormalizedUser(msg.key.remoteJid)
|
|
1220
|
-
await sendReceipt(jid, undefined, [msg.key.id], 'hist_sync')
|
|
1363
|
+
await sendReceipt(jid, undefined, [msg.key.id], 'hist_sync') // TODO: investigate
|
|
1221
1364
|
}
|
|
1222
1365
|
}
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1366
|
+
|
|
1367
|
+
else {
|
|
1368
|
+
await sendMessageAck(node)
|
|
1369
|
+
logger.debug({ key: msg.key }, 'processed newsletter message without receipts')
|
|
1370
|
+
}
|
|
1371
|
+
}
|
|
1372
|
+
|
|
1373
|
+
cleanMessage(msg, authState.creds.me.id, authState.creds.me.lid)
|
|
1374
|
+
|
|
1375
|
+
await upsertMessage(msg, node.attrs.offline ? 'append' : 'notify')
|
|
1376
|
+
})
|
|
1228
1377
|
}
|
|
1229
1378
|
|
|
1230
1379
|
catch (error) {
|
|
1231
|
-
logger.error({ error, node }, 'error in handling message')
|
|
1232
|
-
}
|
|
1233
|
-
}
|
|
1234
|
-
|
|
1235
|
-
const fetchMessageHistory = async (count, oldestMsgKey, oldestMsgTimestamp) => {
|
|
1236
|
-
if (!authState.creds.me?.id) {
|
|
1237
|
-
throw new Boom('Not authenticated')
|
|
1238
|
-
}
|
|
1239
|
-
|
|
1240
|
-
const pdoMessage = {
|
|
1241
|
-
historySyncOnDemandRequest: {
|
|
1242
|
-
chatJid: oldestMsgKey.remoteJid,
|
|
1243
|
-
oldestMsgFromMe: oldestMsgKey.fromMe,
|
|
1244
|
-
oldestMsgId: oldestMsgKey.id,
|
|
1245
|
-
oldestMsgTimestampMs: oldestMsgTimestamp,
|
|
1246
|
-
onDemandMsgCount: count
|
|
1247
|
-
},
|
|
1248
|
-
peerDataOperationRequestType: proto.Message.PeerDataOperationRequestType.HISTORY_SYNC_ON_DEMAND
|
|
1380
|
+
logger.error({ error, node: binaryNodeToString(node) }, 'error in handling message')
|
|
1249
1381
|
}
|
|
1250
|
-
|
|
1251
|
-
return sendPeerDataOperationMessage(pdoMessage)
|
|
1252
|
-
}
|
|
1253
|
-
|
|
1254
|
-
const requestPlaceholderResend = async (messageKey) => {
|
|
1255
|
-
if (!authState.creds.me?.id) {
|
|
1256
|
-
throw new Boom('Not authenticated')
|
|
1257
|
-
}
|
|
1258
|
-
|
|
1259
|
-
if (placeholderResendCache.get(messageKey?.id)) {
|
|
1260
|
-
logger.debug({ messageKey }, 'already requested resend')
|
|
1261
|
-
return
|
|
1262
|
-
}
|
|
1263
|
-
|
|
1264
|
-
else {
|
|
1265
|
-
placeholderResendCache.set(messageKey?.id, true)
|
|
1266
|
-
}
|
|
1267
|
-
|
|
1268
|
-
await delay(5000)
|
|
1269
|
-
|
|
1270
|
-
if (!placeholderResendCache.get(messageKey?.id)) {
|
|
1271
|
-
logger.debug({ messageKey }, 'message received while resend requested')
|
|
1272
|
-
return 'RESOLVED'
|
|
1273
|
-
}
|
|
1274
|
-
|
|
1275
|
-
const pdoMessage = {
|
|
1276
|
-
placeholderMessageResendRequest: [{
|
|
1277
|
-
messageKey
|
|
1278
|
-
}],
|
|
1279
|
-
peerDataOperationRequestType: proto.Message.PeerDataOperationRequestType.PLACEHOLDER_MESSAGE_RESEND
|
|
1280
|
-
}
|
|
1281
|
-
|
|
1282
|
-
setTimeout(() => {
|
|
1283
|
-
if (placeholderResendCache.get(messageKey?.id)) {
|
|
1284
|
-
logger.debug({ messageKey }, 'PDO message without response after 15 seconds. Phone possibly offline')
|
|
1285
|
-
placeholderResendCache.del(messageKey?.id)
|
|
1286
|
-
}
|
|
1287
|
-
}, 15000)
|
|
1288
|
-
|
|
1289
|
-
return sendPeerDataOperationMessage(pdoMessage)
|
|
1290
1382
|
}
|
|
1291
1383
|
|
|
1292
1384
|
const handleCall = async (node) => {
|
|
1293
|
-
let status
|
|
1294
|
-
|
|
1295
1385
|
const { attrs } = node
|
|
1296
1386
|
const [infoChild] = getAllBinaryNodeChildren(node)
|
|
1297
|
-
const
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
if (isLidUser(from) && infoChild.tag === 'relaylatency') {
|
|
1302
|
-
const verify = await callOfferCache.get(callId)
|
|
1303
|
-
|
|
1304
|
-
if (!verify) {
|
|
1305
|
-
status = 'offer'
|
|
1306
|
-
|
|
1307
|
-
const callLid = {
|
|
1308
|
-
chatId: attrs.from,
|
|
1309
|
-
from,
|
|
1310
|
-
id: callId,
|
|
1311
|
-
date: new Date(+attrs.t * 1000),
|
|
1312
|
-
offline: !!attrs.offline,
|
|
1313
|
-
status
|
|
1314
|
-
}
|
|
1315
|
-
await callOfferCache.set(callId, callLid)
|
|
1316
|
-
}
|
|
1387
|
+
const status = getCallStatusFromNode(infoChild)
|
|
1388
|
+
|
|
1389
|
+
if (!infoChild) {
|
|
1390
|
+
throw new Boom('Missing call info in call node')
|
|
1317
1391
|
}
|
|
1318
1392
|
|
|
1393
|
+
const callId = infoChild.attrs['call-id']
|
|
1394
|
+
const from = infoChild.attrs.from || infoChild.attrs['call-creator']
|
|
1319
1395
|
const call = {
|
|
1320
1396
|
chatId: attrs.from,
|
|
1321
1397
|
from,
|
|
1322
1398
|
id: callId,
|
|
1323
1399
|
date: new Date(+attrs.t * 1000),
|
|
1324
1400
|
offline: !!attrs.offline,
|
|
1325
|
-
status
|
|
1401
|
+
status
|
|
1326
1402
|
}
|
|
1327
1403
|
|
|
1328
1404
|
if (status === 'offer') {
|
|
1329
1405
|
call.isVideo = !!getBinaryNodeChild(infoChild, 'video')
|
|
1330
1406
|
call.isGroup = infoChild.attrs.type === 'group' || !!infoChild.attrs['group-jid']
|
|
1331
1407
|
call.groupJid = infoChild.attrs['group-jid']
|
|
1408
|
+
|
|
1332
1409
|
await callOfferCache.set(call.id, call)
|
|
1333
1410
|
}
|
|
1334
1411
|
|
|
@@ -1351,7 +1428,8 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1351
1428
|
}
|
|
1352
1429
|
|
|
1353
1430
|
const handleBadAck = async ({ attrs }) => {
|
|
1354
|
-
const key = { remoteJid: attrs.from, fromMe: true, id: attrs.id
|
|
1431
|
+
const key = { remoteJid: attrs.from, fromMe: true, id: attrs.id }
|
|
1432
|
+
|
|
1355
1433
|
// WARNING: REFRAIN FROM ENABLING THIS FOR NOW. IT WILL CAUSE A LOOP
|
|
1356
1434
|
// // current hypothesis is that if pash is sent in the ack
|
|
1357
1435
|
// // it means -- the message hasn't reached all devices yet
|
|
@@ -1374,12 +1452,24 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1374
1452
|
key,
|
|
1375
1453
|
update: {
|
|
1376
1454
|
status: WAMessageStatus.ERROR,
|
|
1377
|
-
messageStubParameters: [
|
|
1378
|
-
attrs.error
|
|
1379
|
-
]
|
|
1455
|
+
messageStubParameters: [attrs.error]
|
|
1380
1456
|
}
|
|
1381
1457
|
}
|
|
1382
1458
|
])
|
|
1459
|
+
|
|
1460
|
+
// resend the message with device_fanout=false, use at your own risk
|
|
1461
|
+
// if (attrs.error === '475') {
|
|
1462
|
+
// const msg = await getMessage(key)
|
|
1463
|
+
// if (msg) {
|
|
1464
|
+
// await relayMessage(key.remoteJid!, msg, {
|
|
1465
|
+
// messageId: key.id!,
|
|
1466
|
+
// useUserDevicesCache: false,
|
|
1467
|
+
// additionalAttributes: {
|
|
1468
|
+
// device_fanout: 'false'
|
|
1469
|
+
// }
|
|
1470
|
+
// })
|
|
1471
|
+
// }
|
|
1472
|
+
// }
|
|
1383
1473
|
}
|
|
1384
1474
|
}
|
|
1385
1475
|
|
|
@@ -1389,12 +1479,17 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1389
1479
|
ev.buffer()
|
|
1390
1480
|
await execTask()
|
|
1391
1481
|
ev.flush()
|
|
1482
|
+
|
|
1392
1483
|
function execTask() {
|
|
1393
|
-
return exec(node, false)
|
|
1394
|
-
.catch(err => onUnexpectedError(err, identifier))
|
|
1484
|
+
return exec(node, false).catch(err => onUnexpectedError(err, identifier))
|
|
1395
1485
|
}
|
|
1396
1486
|
}
|
|
1397
1487
|
|
|
1488
|
+
/** Yields control to the event loop to prevent blocking */
|
|
1489
|
+
const yieldToEventLoop = () => {
|
|
1490
|
+
return new Promise(resolve => setImmediate(resolve))
|
|
1491
|
+
}
|
|
1492
|
+
|
|
1398
1493
|
const makeOfflineNodeProcessor = () => {
|
|
1399
1494
|
const nodeProcessorMap = new Map([
|
|
1400
1495
|
['message', handleMessage],
|
|
@@ -1404,8 +1499,12 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1404
1499
|
])
|
|
1405
1500
|
|
|
1406
1501
|
const nodes = []
|
|
1502
|
+
|
|
1407
1503
|
let isProcessing = false
|
|
1408
1504
|
|
|
1505
|
+
// Number of nodes to process before yielding to event loop
|
|
1506
|
+
const BATCH_SIZE = 10
|
|
1507
|
+
|
|
1409
1508
|
const enqueue = (type, node) => {
|
|
1410
1509
|
nodes.push({ type, node })
|
|
1411
1510
|
|
|
@@ -1416,17 +1515,32 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1416
1515
|
isProcessing = true
|
|
1417
1516
|
|
|
1418
1517
|
const promise = async () => {
|
|
1518
|
+
let processedInBatch = 0
|
|
1519
|
+
|
|
1419
1520
|
while (nodes.length && ws.isOpen) {
|
|
1420
1521
|
const { type, node } = nodes.shift()
|
|
1421
1522
|
const nodeProcessor = nodeProcessorMap.get(type)
|
|
1523
|
+
|
|
1422
1524
|
if (!nodeProcessor) {
|
|
1423
1525
|
onUnexpectedError(new Error(`unknown offline node type: ${type}`), 'processing offline node')
|
|
1424
1526
|
continue
|
|
1425
1527
|
}
|
|
1528
|
+
|
|
1426
1529
|
await nodeProcessor(node)
|
|
1530
|
+
|
|
1531
|
+
processedInBatch++
|
|
1532
|
+
|
|
1533
|
+
// Yield to event loop after processing a batch
|
|
1534
|
+
// This prevents blocking the event loop for too long when there are many offline nodes
|
|
1535
|
+
if (processedInBatch >= BATCH_SIZE) {
|
|
1536
|
+
processedInBatch = 0
|
|
1537
|
+
await yieldToEventLoop()
|
|
1538
|
+
}
|
|
1427
1539
|
}
|
|
1540
|
+
|
|
1428
1541
|
isProcessing = false
|
|
1429
1542
|
}
|
|
1543
|
+
|
|
1430
1544
|
promise().catch(error => onUnexpectedError(error, 'processing offline nodes'))
|
|
1431
1545
|
}
|
|
1432
1546
|
|
|
@@ -1435,7 +1549,7 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1435
1549
|
|
|
1436
1550
|
const offlineNodeProcessor = makeOfflineNodeProcessor()
|
|
1437
1551
|
|
|
1438
|
-
const processNode = (type, node, identifier, exec) => {
|
|
1552
|
+
const processNode = async (type, node, identifier, exec) => {
|
|
1439
1553
|
const isOffline = !!node.attrs.offline
|
|
1440
1554
|
|
|
1441
1555
|
if (isOffline) {
|
|
@@ -1443,28 +1557,36 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1443
1557
|
}
|
|
1444
1558
|
|
|
1445
1559
|
else {
|
|
1446
|
-
processNodeWithBuffer(node, identifier, exec)
|
|
1560
|
+
await processNodeWithBuffer(node, identifier, exec)
|
|
1447
1561
|
}
|
|
1448
1562
|
}
|
|
1449
1563
|
|
|
1450
1564
|
// recv a message
|
|
1451
|
-
ws.on('CB:message', (node) => {
|
|
1452
|
-
processNode('message', node, 'processing message', handleMessage)
|
|
1565
|
+
ws.on('CB:message', async (node) => {
|
|
1566
|
+
await processNode('message', node, 'processing message', handleMessage)
|
|
1453
1567
|
})
|
|
1568
|
+
|
|
1454
1569
|
ws.on('CB:call', async (node) => {
|
|
1455
|
-
processNode('call', node, 'handling call', handleCall)
|
|
1570
|
+
await processNode('call', node, 'handling call', handleCall)
|
|
1456
1571
|
})
|
|
1457
|
-
|
|
1458
|
-
|
|
1572
|
+
|
|
1573
|
+
ws.on('CB:receipt', async (node) => {
|
|
1574
|
+
await processNode('receipt', node, 'handling receipt', handleReceipt)
|
|
1459
1575
|
})
|
|
1576
|
+
|
|
1460
1577
|
ws.on('CB:notification', async (node) => {
|
|
1461
|
-
processNode('notification', node, 'handling notification', handleNotification)
|
|
1578
|
+
await processNode('notification', node, 'handling notification', handleNotification)
|
|
1462
1579
|
})
|
|
1580
|
+
|
|
1463
1581
|
ws.on('CB:ack,class:message', (node) => {
|
|
1464
|
-
handleBadAck(node)
|
|
1465
|
-
.catch(error => onUnexpectedError(error, 'handling bad ack'))
|
|
1582
|
+
handleBadAck(node).catch(error => onUnexpectedError(error, 'handling bad ack'))
|
|
1466
1583
|
})
|
|
1467
|
-
|
|
1584
|
+
|
|
1585
|
+
ev.on('call', async ([call]) => {
|
|
1586
|
+
if (!call) {
|
|
1587
|
+
return;
|
|
1588
|
+
}
|
|
1589
|
+
|
|
1468
1590
|
// missed call + group call notification message generation
|
|
1469
1591
|
if (call.status === 'timeout' || (call.status === 'offer' && call.isGroup)) {
|
|
1470
1592
|
const msg = {
|
|
@@ -1473,12 +1595,14 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1473
1595
|
id: call.id,
|
|
1474
1596
|
fromMe: false
|
|
1475
1597
|
},
|
|
1476
|
-
messageTimestamp: unixTimestampSeconds(call.date)
|
|
1598
|
+
messageTimestamp: unixTimestampSeconds(call.date)
|
|
1477
1599
|
}
|
|
1478
1600
|
|
|
1479
1601
|
if (call.status === 'timeout') {
|
|
1480
1602
|
if (call.isGroup) {
|
|
1481
|
-
msg.messageStubType = call.isVideo
|
|
1603
|
+
msg.messageStubType = call.isVideo
|
|
1604
|
+
? WAMessageStubType.CALL_MISSED_GROUP_VIDEO
|
|
1605
|
+
: WAMessageStubType.CALL_MISSED_GROUP_VOICE
|
|
1482
1606
|
}
|
|
1483
1607
|
|
|
1484
1608
|
else {
|
|
@@ -1487,11 +1611,12 @@ const makeMessagesRecvSocket = (config) => {
|
|
|
1487
1611
|
}
|
|
1488
1612
|
|
|
1489
1613
|
else {
|
|
1490
|
-
msg.message = { call: { callKey: Buffer.from(call.id) } }
|
|
1614
|
+
msg.message = { call: { callKey: Buffer.from(call.id) } };
|
|
1491
1615
|
}
|
|
1492
1616
|
|
|
1493
1617
|
const protoMsg = proto.WebMessageInfo.fromObject(msg)
|
|
1494
|
-
|
|
1618
|
+
|
|
1619
|
+
await upsertMessage(protoMsg, call.offline ? 'append' : 'notify')
|
|
1495
1620
|
}
|
|
1496
1621
|
})
|
|
1497
1622
|
|