@kelvdra/baileys 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +1478 -0
- package/WAProto/GenerateStatics.sh +3 -0
- package/WAProto/WAProto.proto +4633 -0
- package/WAProto/fix-imports.js +29 -0
- package/WAProto/index.d.ts +37016 -0
- package/WAProto/index.js +169659 -0
- package/engine-requirements.js +10 -0
- package/lib/Defaults/index.d.ts +62 -0
- package/lib/Defaults/index.d.ts.map +1 -0
- package/lib/Defaults/index.js +115 -0
- package/lib/Defaults/index.js.map +1 -0
- package/lib/Signal/Group/ciphertext-message.d.ts +10 -0
- package/lib/Signal/Group/ciphertext-message.d.ts.map +1 -0
- package/lib/Signal/Group/ciphertext-message.js +12 -0
- package/lib/Signal/Group/ciphertext-message.js.map +1 -0
- package/lib/Signal/Group/group-session-builder.d.ts +15 -0
- package/lib/Signal/Group/group-session-builder.d.ts.map +1 -0
- package/lib/Signal/Group/group-session-builder.js +30 -0
- package/lib/Signal/Group/group-session-builder.js.map +1 -0
- package/lib/Signal/Group/group_cipher.d.ts +17 -0
- package/lib/Signal/Group/group_cipher.d.ts.map +1 -0
- package/lib/Signal/Group/group_cipher.js +82 -0
- package/lib/Signal/Group/group_cipher.js.map +1 -0
- package/lib/Signal/Group/index.d.ts +12 -0
- package/lib/Signal/Group/index.d.ts.map +1 -0
- package/lib/Signal/Group/index.js +12 -0
- package/lib/Signal/Group/index.js.map +1 -0
- package/lib/Signal/Group/keyhelper.d.ts +11 -0
- package/lib/Signal/Group/keyhelper.d.ts.map +1 -0
- package/lib/Signal/Group/keyhelper.js +18 -0
- package/lib/Signal/Group/keyhelper.js.map +1 -0
- package/lib/Signal/Group/sender-chain-key.d.ts +14 -0
- package/lib/Signal/Group/sender-chain-key.d.ts.map +1 -0
- package/lib/Signal/Group/sender-chain-key.js +26 -0
- package/lib/Signal/Group/sender-chain-key.js.map +1 -0
- package/lib/Signal/Group/sender-key-distribution-message.d.ts +17 -0
- package/lib/Signal/Group/sender-key-distribution-message.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-distribution-message.js +63 -0
- package/lib/Signal/Group/sender-key-distribution-message.js.map +1 -0
- package/lib/Signal/Group/sender-key-message.d.ts +19 -0
- package/lib/Signal/Group/sender-key-message.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-message.js +66 -0
- package/lib/Signal/Group/sender-key-message.js.map +1 -0
- package/lib/Signal/Group/sender-key-name.d.ts +18 -0
- package/lib/Signal/Group/sender-key-name.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-name.js +48 -0
- package/lib/Signal/Group/sender-key-name.js.map +1 -0
- package/lib/Signal/Group/sender-key-record.d.ts +31 -0
- package/lib/Signal/Group/sender-key-record.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-record.js +41 -0
- package/lib/Signal/Group/sender-key-record.js.map +1 -0
- package/lib/Signal/Group/sender-key-state.d.ts +39 -0
- package/lib/Signal/Group/sender-key-state.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-state.js +84 -0
- package/lib/Signal/Group/sender-key-state.js.map +1 -0
- package/lib/Signal/Group/sender-message-key.d.ts +12 -0
- package/lib/Signal/Group/sender-message-key.d.ts.map +1 -0
- package/lib/Signal/Group/sender-message-key.js +26 -0
- package/lib/Signal/Group/sender-message-key.js.map +1 -0
- package/lib/Signal/libsignal.d.ts +5 -0
- package/lib/Signal/libsignal.d.ts.map +1 -0
- package/lib/Signal/libsignal.js +342 -0
- package/lib/Signal/libsignal.js.map +1 -0
- package/lib/Signal/lid-mapping.d.ts +23 -0
- package/lib/Signal/lid-mapping.d.ts.map +1 -0
- package/lib/Signal/lid-mapping.js +171 -0
- package/lib/Signal/lid-mapping.js.map +1 -0
- package/lib/Socket/Client/index.d.ts +3 -0
- package/lib/Socket/Client/index.d.ts.map +1 -0
- package/lib/Socket/Client/index.js +3 -0
- package/lib/Socket/Client/index.js.map +1 -0
- package/lib/Socket/Client/types.d.ts +16 -0
- package/lib/Socket/Client/types.d.ts.map +1 -0
- package/lib/Socket/Client/types.js +11 -0
- package/lib/Socket/Client/types.js.map +1 -0
- package/lib/Socket/Client/websocket.d.ts +13 -0
- package/lib/Socket/Client/websocket.d.ts.map +1 -0
- package/lib/Socket/Client/websocket.js +50 -0
- package/lib/Socket/Client/websocket.js.map +1 -0
- package/lib/Socket/business.d.ts +188 -0
- package/lib/Socket/business.d.ts.map +1 -0
- package/lib/Socket/business.js +376 -0
- package/lib/Socket/business.js.map +1 -0
- package/lib/Socket/chats.d.ts +98 -0
- package/lib/Socket/chats.d.ts.map +1 -0
- package/lib/Socket/chats.js +962 -0
- package/lib/Socket/chats.js.map +1 -0
- package/lib/Socket/communities.d.ts +244 -0
- package/lib/Socket/communities.d.ts.map +1 -0
- package/lib/Socket/communities.js +431 -0
- package/lib/Socket/communities.js.map +1 -0
- package/lib/Socket/groups.d.ts +137 -0
- package/lib/Socket/groups.d.ts.map +1 -0
- package/lib/Socket/groups.js +327 -0
- package/lib/Socket/groups.js.map +1 -0
- package/lib/Socket/hydra.d.ts +174 -0
- package/lib/Socket/hydra.js +715 -0
- package/lib/Socket/index.d.ts +231 -0
- package/lib/Socket/index.d.ts.map +1 -0
- package/lib/Socket/index.js +18 -0
- package/lib/Socket/index.js.map +1 -0
- package/lib/Socket/messages-recv.d.ts +173 -0
- package/lib/Socket/messages-recv.d.ts.map +1 -0
- package/lib/Socket/messages-recv.js +1228 -0
- package/lib/Socket/messages-recv.js.map +1 -0
- package/lib/Socket/messages-send.d.ts +169 -0
- package/lib/Socket/messages-send.d.ts.map +1 -0
- package/lib/Socket/messages-send.js +1367 -0
- package/lib/Socket/messages-send.js.map +1 -0
- package/lib/Socket/mex.d.ts +3 -0
- package/lib/Socket/mex.d.ts.map +1 -0
- package/lib/Socket/mex.js +42 -0
- package/lib/Socket/mex.js.map +1 -0
- package/lib/Socket/newsletter.d.ts +147 -0
- package/lib/Socket/newsletter.d.ts.map +1 -0
- package/lib/Socket/newsletter.js +181 -0
- package/lib/Socket/newsletter.js.map +1 -0
- package/lib/Socket/socket.d.ts +51 -0
- package/lib/Socket/socket.d.ts.map +1 -0
- package/lib/Socket/socket.js +841 -0
- package/lib/Socket/socket.js.map +1 -0
- package/lib/Store/index.d.ts +4 -0
- package/lib/Store/index.js +4 -0
- package/lib/Store/make-cache-manager-store.d.ts +14 -0
- package/lib/Store/make-cache-manager-store.js +81 -0
- package/lib/Store/make-in-memory-store.d.ts +123 -0
- package/lib/Store/make-in-memory-store.js +416 -0
- package/lib/Store/make-ordered-dictionary.d.ts +12 -0
- package/lib/Store/make-ordered-dictionary.js +82 -0
- package/lib/Store/object-repository.d.ts +10 -0
- package/lib/Store/object-repository.js +31 -0
- package/lib/Types/Auth.d.ts +111 -0
- package/lib/Types/Auth.d.ts.map +1 -0
- package/lib/Types/Auth.js +2 -0
- package/lib/Types/Auth.js.map +1 -0
- package/lib/Types/Bussines.d.ts +25 -0
- package/lib/Types/Bussines.d.ts.map +1 -0
- package/lib/Types/Bussines.js +2 -0
- package/lib/Types/Bussines.js.map +1 -0
- package/lib/Types/Call.d.ts +14 -0
- package/lib/Types/Call.d.ts.map +1 -0
- package/lib/Types/Call.js +2 -0
- package/lib/Types/Call.js.map +1 -0
- package/lib/Types/Chat.d.ts +123 -0
- package/lib/Types/Chat.d.ts.map +1 -0
- package/lib/Types/Chat.js +8 -0
- package/lib/Types/Chat.js.map +1 -0
- package/lib/Types/Contact.d.ts +24 -0
- package/lib/Types/Contact.d.ts.map +1 -0
- package/lib/Types/Contact.js +2 -0
- package/lib/Types/Contact.js.map +1 -0
- package/lib/Types/Events.d.ts +202 -0
- package/lib/Types/Events.d.ts.map +1 -0
- package/lib/Types/Events.js +2 -0
- package/lib/Types/Events.js.map +1 -0
- package/lib/Types/GroupMetadata.d.ts +67 -0
- package/lib/Types/GroupMetadata.d.ts.map +1 -0
- package/lib/Types/GroupMetadata.js +2 -0
- package/lib/Types/GroupMetadata.js.map +1 -0
- package/lib/Types/Label.d.ts +47 -0
- package/lib/Types/Label.d.ts.map +1 -0
- package/lib/Types/Label.js +25 -0
- package/lib/Types/Label.js.map +1 -0
- package/lib/Types/LabelAssociation.d.ts +30 -0
- package/lib/Types/LabelAssociation.d.ts.map +1 -0
- package/lib/Types/LabelAssociation.js +7 -0
- package/lib/Types/LabelAssociation.js.map +1 -0
- package/lib/Types/Message.d.ts +303 -0
- package/lib/Types/Message.d.ts.map +1 -0
- package/lib/Types/Message.js +11 -0
- package/lib/Types/Message.js.map +1 -0
- package/lib/Types/Newsletter.d.ts +135 -0
- package/lib/Types/Newsletter.d.ts.map +1 -0
- package/lib/Types/Newsletter.js +31 -0
- package/lib/Types/Newsletter.js.map +1 -0
- package/lib/Types/Product.d.ts +79 -0
- package/lib/Types/Product.d.ts.map +1 -0
- package/lib/Types/Product.js +2 -0
- package/lib/Types/Product.js.map +1 -0
- package/lib/Types/Signal.d.ts +76 -0
- package/lib/Types/Signal.d.ts.map +1 -0
- package/lib/Types/Signal.js +2 -0
- package/lib/Types/Signal.js.map +1 -0
- package/lib/Types/Socket.d.ts +133 -0
- package/lib/Types/Socket.d.ts.map +1 -0
- package/lib/Types/Socket.js +3 -0
- package/lib/Types/Socket.js.map +1 -0
- package/lib/Types/State.d.ts +39 -0
- package/lib/Types/State.d.ts.map +1 -0
- package/lib/Types/State.js +13 -0
- package/lib/Types/State.js.map +1 -0
- package/lib/Types/USync.d.ts +26 -0
- package/lib/Types/USync.d.ts.map +1 -0
- package/lib/Types/USync.js +2 -0
- package/lib/Types/USync.js.map +1 -0
- package/lib/Types/index.d.ts +65 -0
- package/lib/Types/index.d.ts.map +1 -0
- package/lib/Types/index.js +26 -0
- package/lib/Types/index.js.map +1 -0
- package/lib/Utils/auth-utils.d.ts +19 -0
- package/lib/Utils/auth-utils.d.ts.map +1 -0
- package/lib/Utils/auth-utils.js +257 -0
- package/lib/Utils/auth-utils.js.map +1 -0
- package/lib/Utils/baileys-event-stream.d.ts +17 -0
- package/lib/Utils/baileys-event-stream.d.ts.map +1 -0
- package/lib/Utils/baileys-event-stream.js +56 -0
- package/lib/Utils/baileys-event-stream.js.map +1 -0
- package/lib/Utils/browser-utils.d.ts +4 -0
- package/lib/Utils/browser-utils.d.ts.map +1 -0
- package/lib/Utils/browser-utils.js +28 -0
- package/lib/Utils/browser-utils.js.map +1 -0
- package/lib/Utils/business.d.ts +23 -0
- package/lib/Utils/business.d.ts.map +1 -0
- package/lib/Utils/business.js +231 -0
- package/lib/Utils/business.js.map +1 -0
- package/lib/Utils/chat-utils.d.ts +70 -0
- package/lib/Utils/chat-utils.d.ts.map +1 -0
- package/lib/Utils/chat-utils.js +763 -0
- package/lib/Utils/chat-utils.js.map +1 -0
- package/lib/Utils/crypto.d.ts +41 -0
- package/lib/Utils/crypto.d.ts.map +1 -0
- package/lib/Utils/crypto.js +142 -0
- package/lib/Utils/crypto.js.map +1 -0
- package/lib/Utils/decode-wa-message.d.ts +48 -0
- package/lib/Utils/decode-wa-message.d.ts.map +1 -0
- package/lib/Utils/decode-wa-message.js +279 -0
- package/lib/Utils/decode-wa-message.js.map +1 -0
- package/lib/Utils/event-buffer.d.ts +34 -0
- package/lib/Utils/event-buffer.d.ts.map +1 -0
- package/lib/Utils/event-buffer.js +548 -0
- package/lib/Utils/event-buffer.js.map +1 -0
- package/lib/Utils/generics.d.ts +90 -0
- package/lib/Utils/generics.d.ts.map +1 -0
- package/lib/Utils/generics.js +381 -0
- package/lib/Utils/generics.js.map +1 -0
- package/lib/Utils/history.d.ts +19 -0
- package/lib/Utils/history.d.ts.map +1 -0
- package/lib/Utils/history.js +84 -0
- package/lib/Utils/history.js.map +1 -0
- package/lib/Utils/index.d.ts +20 -0
- package/lib/Utils/index.d.ts.map +1 -0
- package/lib/Utils/index.js +20 -0
- package/lib/Utils/index.js.map +1 -0
- package/lib/Utils/link-preview.d.ts +21 -0
- package/lib/Utils/link-preview.d.ts.map +1 -0
- package/lib/Utils/link-preview.js +85 -0
- package/lib/Utils/link-preview.js.map +1 -0
- package/lib/Utils/logger.d.ts +12 -0
- package/lib/Utils/logger.d.ts.map +1 -0
- package/lib/Utils/logger.js +3 -0
- package/lib/Utils/logger.js.map +1 -0
- package/lib/Utils/lt-hash.d.ts +13 -0
- package/lib/Utils/lt-hash.d.ts.map +1 -0
- package/lib/Utils/lt-hash.js +48 -0
- package/lib/Utils/lt-hash.js.map +1 -0
- package/lib/Utils/make-mutex.d.ts +8 -0
- package/lib/Utils/make-mutex.d.ts.map +1 -0
- package/lib/Utils/make-mutex.js +40 -0
- package/lib/Utils/make-mutex.js.map +1 -0
- package/lib/Utils/message-retry-manager.d.ts +82 -0
- package/lib/Utils/message-retry-manager.d.ts.map +1 -0
- package/lib/Utils/message-retry-manager.js +149 -0
- package/lib/Utils/message-retry-manager.js.map +1 -0
- package/lib/Utils/messages-media.d.ts +114 -0
- package/lib/Utils/messages-media.d.ts.map +1 -0
- package/lib/Utils/messages-media.js +684 -0
- package/lib/Utils/messages-media.js.map +1 -0
- package/lib/Utils/messages.d.ts +76 -0
- package/lib/Utils/messages.d.ts.map +1 -0
- package/lib/Utils/messages.js +820 -0
- package/lib/Utils/messages.js.map +1 -0
- package/lib/Utils/noise-handler.d.ts +20 -0
- package/lib/Utils/noise-handler.d.ts.map +1 -0
- package/lib/Utils/noise-handler.js +147 -0
- package/lib/Utils/noise-handler.js.map +1 -0
- package/lib/Utils/pre-key-manager.d.ts +28 -0
- package/lib/Utils/pre-key-manager.d.ts.map +1 -0
- package/lib/Utils/pre-key-manager.js +106 -0
- package/lib/Utils/pre-key-manager.js.map +1 -0
- package/lib/Utils/process-message.d.ts +42 -0
- package/lib/Utils/process-message.d.ts.map +1 -0
- package/lib/Utils/process-message.js +413 -0
- package/lib/Utils/process-message.js.map +1 -0
- package/lib/Utils/signal.d.ts +34 -0
- package/lib/Utils/signal.d.ts.map +1 -0
- package/lib/Utils/signal.js +159 -0
- package/lib/Utils/signal.js.map +1 -0
- package/lib/Utils/use-multi-file-auth-state.d.ts +13 -0
- package/lib/Utils/use-multi-file-auth-state.d.ts.map +1 -0
- package/lib/Utils/use-multi-file-auth-state.js +121 -0
- package/lib/Utils/use-multi-file-auth-state.js.map +1 -0
- package/lib/Utils/validate-connection.d.ts +11 -0
- package/lib/Utils/validate-connection.d.ts.map +1 -0
- package/lib/Utils/validate-connection.js +195 -0
- package/lib/Utils/validate-connection.js.map +1 -0
- package/lib/WABinary/constants.d.ts +28 -0
- package/lib/WABinary/constants.d.ts.map +1 -0
- package/lib/WABinary/constants.js +1301 -0
- package/lib/WABinary/constants.js.map +1 -0
- package/lib/WABinary/decode.d.ts +7 -0
- package/lib/WABinary/decode.d.ts.map +1 -0
- package/lib/WABinary/decode.js +238 -0
- package/lib/WABinary/decode.js.map +1 -0
- package/lib/WABinary/encode.d.ts +3 -0
- package/lib/WABinary/encode.d.ts.map +1 -0
- package/lib/WABinary/encode.js +216 -0
- package/lib/WABinary/encode.js.map +1 -0
- package/lib/WABinary/generic-utils.d.ts +15 -0
- package/lib/WABinary/generic-utils.d.ts.map +1 -0
- package/lib/WABinary/generic-utils.js +111 -0
- package/lib/WABinary/generic-utils.js.map +1 -0
- package/lib/WABinary/index.d.ts +6 -0
- package/lib/WABinary/index.d.ts.map +1 -0
- package/lib/WABinary/index.js +6 -0
- package/lib/WABinary/index.js.map +1 -0
- package/lib/WABinary/jid-utils.d.ts +48 -0
- package/lib/WABinary/jid-utils.d.ts.map +1 -0
- package/lib/WABinary/jid-utils.js +96 -0
- package/lib/WABinary/jid-utils.js.map +1 -0
- package/lib/WABinary/types.d.ts +19 -0
- package/lib/WABinary/types.d.ts.map +1 -0
- package/lib/WABinary/types.js +2 -0
- package/lib/WABinary/types.js.map +1 -0
- package/lib/WAM/BinaryInfo.d.ts +9 -0
- package/lib/WAM/BinaryInfo.d.ts.map +1 -0
- package/lib/WAM/BinaryInfo.js +10 -0
- package/lib/WAM/BinaryInfo.js.map +1 -0
- package/lib/WAM/constants.d.ts +40 -0
- package/lib/WAM/constants.d.ts.map +1 -0
- package/lib/WAM/constants.js +22853 -0
- package/lib/WAM/constants.js.map +1 -0
- package/lib/WAM/encode.d.ts +3 -0
- package/lib/WAM/encode.d.ts.map +1 -0
- package/lib/WAM/encode.js +150 -0
- package/lib/WAM/encode.js.map +1 -0
- package/lib/WAM/index.d.ts +4 -0
- package/lib/WAM/index.d.ts.map +1 -0
- package/lib/WAM/index.js +4 -0
- package/lib/WAM/index.js.map +1 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.d.ts +10 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.js +29 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.d.ts +23 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.js +54 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.d.ts +13 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js +27 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.d.ts +13 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.js +38 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.d.ts +26 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js +51 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.d.ts +10 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.js +29 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/index.d.ts +5 -0
- package/lib/WAUSync/Protocols/index.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/index.js +5 -0
- package/lib/WAUSync/Protocols/index.js.map +1 -0
- package/lib/WAUSync/USyncQuery.d.ts +29 -0
- package/lib/WAUSync/USyncQuery.d.ts.map +1 -0
- package/lib/WAUSync/USyncQuery.js +94 -0
- package/lib/WAUSync/USyncQuery.js.map +1 -0
- package/lib/WAUSync/USyncUser.d.ts +13 -0
- package/lib/WAUSync/USyncUser.d.ts.map +1 -0
- package/lib/WAUSync/USyncUser.js +23 -0
- package/lib/WAUSync/USyncUser.js.map +1 -0
- package/lib/WAUSync/index.d.ts +4 -0
- package/lib/WAUSync/index.d.ts.map +1 -0
- package/lib/WAUSync/index.js +4 -0
- package/lib/WAUSync/index.js.map +1 -0
- package/lib/index.d.ts +13 -0
- package/lib/index.d.ts.map +1 -0
- package/lib/index.js +12 -0
- package/lib/index.js.map +1 -0
- package/package.json +104 -0
|
@@ -0,0 +1,1228 @@
|
|
|
1
|
+
import NodeCache from '@cacheable/node-cache';
|
|
2
|
+
import { Boom } from '@hapi/boom';
|
|
3
|
+
import { randomBytes } from 'crypto';
|
|
4
|
+
import Long from 'long';
|
|
5
|
+
import { proto } from '../../WAProto/index.js';
|
|
6
|
+
import { DEFAULT_CACHE_TTLS, KEY_BUNDLE_TYPE, MIN_PREKEY_COUNT } from '../Defaults/index.js';
|
|
7
|
+
import { WAMessageStatus, WAMessageStubType } from '../Types/index.js';
|
|
8
|
+
import { aesDecryptCTR, aesEncryptGCM, cleanMessage, Curve, decodeMediaRetryNode, decodeMessageNode, decryptMessageNode, delay, derivePairingCodeKey, encodeBigEndian, encodeSignedDeviceIdentity, extractAddressingContext, getCallStatusFromNode, getHistoryMsg, getNextPreKeys, getStatusFromReceiptType, hkdf, MISSING_KEYS_ERROR_TEXT, NACK_REASONS, unixTimestampSeconds, xmppPreKey, xmppSignedPreKey } from '../Utils/index.js';
|
|
9
|
+
import { makeMutex } from '../Utils/make-mutex.js';
|
|
10
|
+
import { areJidsSameUser, binaryNodeToString, getAllBinaryNodeChildren, getBinaryNodeChild, getBinaryNodeChildBuffer, getBinaryNodeChildren, getBinaryNodeChildString, isJidGroup, isJidStatusBroadcast, isLidUser, isPnUser, jidDecode, jidNormalizedUser, S_WHATSAPP_NET } from '../WABinary/index.js';
|
|
11
|
+
import { extractGroupMetadata } from './groups.js';
|
|
12
|
+
import { makeMessagesSocket } from './messages-send.js';
|
|
13
|
+
export const makeMessagesRecvSocket = (config) => {
|
|
14
|
+
const { logger, retryRequestDelayMs, maxMsgRetryCount, getMessage, shouldIgnoreJid, enableAutoSessionRecreation } = config;
|
|
15
|
+
const sock = makeMessagesSocket(config);
|
|
16
|
+
const { ev, authState, ws, processingMutex, signalRepository, query, upsertMessage, resyncAppState, onUnexpectedError, assertSessions, sendNode, relayMessage, sendReceipt, uploadPreKeys, sendPeerDataOperationMessage, messageRetryManager } = sock;
|
|
17
|
+
/** this mutex ensures that each retryRequest will wait for the previous one to finish */
|
|
18
|
+
const retryMutex = makeMutex();
|
|
19
|
+
const msgRetryCache = config.msgRetryCounterCache ||
|
|
20
|
+
new NodeCache({
|
|
21
|
+
stdTTL: DEFAULT_CACHE_TTLS.MSG_RETRY, // 1 hour
|
|
22
|
+
useClones: false
|
|
23
|
+
});
|
|
24
|
+
const callOfferCache = config.callOfferCache ||
|
|
25
|
+
new NodeCache({
|
|
26
|
+
stdTTL: DEFAULT_CACHE_TTLS.CALL_OFFER, // 5 mins
|
|
27
|
+
useClones: false
|
|
28
|
+
});
|
|
29
|
+
const placeholderResendCache = config.placeholderResendCache ||
|
|
30
|
+
new NodeCache({
|
|
31
|
+
stdTTL: DEFAULT_CACHE_TTLS.MSG_RETRY, // 1 hour
|
|
32
|
+
useClones: false
|
|
33
|
+
});
|
|
34
|
+
let sendActiveReceipts = false;
|
|
35
|
+
const fetchMessageHistory = async (count, oldestMsgKey, oldestMsgTimestamp) => {
|
|
36
|
+
if (!authState.creds.me?.id) {
|
|
37
|
+
throw new Boom('Not authenticated');
|
|
38
|
+
}
|
|
39
|
+
const pdoMessage = {
|
|
40
|
+
historySyncOnDemandRequest: {
|
|
41
|
+
chatJid: oldestMsgKey.remoteJid,
|
|
42
|
+
oldestMsgFromMe: oldestMsgKey.fromMe,
|
|
43
|
+
oldestMsgId: oldestMsgKey.id,
|
|
44
|
+
oldestMsgTimestampMs: oldestMsgTimestamp,
|
|
45
|
+
onDemandMsgCount: count
|
|
46
|
+
},
|
|
47
|
+
peerDataOperationRequestType: proto.Message.PeerDataOperationRequestType.HISTORY_SYNC_ON_DEMAND
|
|
48
|
+
};
|
|
49
|
+
return sendPeerDataOperationMessage(pdoMessage);
|
|
50
|
+
};
|
|
51
|
+
const requestPlaceholderResend = async (messageKey) => {
|
|
52
|
+
if (!authState.creds.me?.id) {
|
|
53
|
+
throw new Boom('Not authenticated');
|
|
54
|
+
}
|
|
55
|
+
if (placeholderResendCache.get(messageKey?.id)) {
|
|
56
|
+
logger.debug({ messageKey }, 'already requested resend');
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
placeholderResendCache.set(messageKey?.id, true);
|
|
61
|
+
}
|
|
62
|
+
await delay(5000);
|
|
63
|
+
if (!placeholderResendCache.get(messageKey?.id)) {
|
|
64
|
+
logger.debug({ messageKey }, 'message received while resend requested');
|
|
65
|
+
return 'RESOLVED';
|
|
66
|
+
}
|
|
67
|
+
const pdoMessage = {
|
|
68
|
+
placeholderMessageResendRequest: [
|
|
69
|
+
{
|
|
70
|
+
messageKey
|
|
71
|
+
}
|
|
72
|
+
],
|
|
73
|
+
peerDataOperationRequestType: proto.Message.PeerDataOperationRequestType.PLACEHOLDER_MESSAGE_RESEND
|
|
74
|
+
};
|
|
75
|
+
setTimeout(() => {
|
|
76
|
+
if (placeholderResendCache.get(messageKey?.id)) {
|
|
77
|
+
logger.debug({ messageKey }, 'PDO message without response after 15 seconds. Phone possibly offline');
|
|
78
|
+
placeholderResendCache.del(messageKey?.id);
|
|
79
|
+
}
|
|
80
|
+
}, 15000);
|
|
81
|
+
return sendPeerDataOperationMessage(pdoMessage);
|
|
82
|
+
};
|
|
83
|
+
// Handles mex newsletter notifications
|
|
84
|
+
const handleMexNewsletterNotification = async (node) => {
|
|
85
|
+
const mexNode = getBinaryNodeChild(node, 'mex');
|
|
86
|
+
if (!mexNode?.content) {
|
|
87
|
+
logger.warn({ node }, 'Invalid mex newsletter notification');
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
let data;
|
|
91
|
+
try {
|
|
92
|
+
data = JSON.parse(mexNode.content.toString());
|
|
93
|
+
}
|
|
94
|
+
catch (error) {
|
|
95
|
+
logger.error({ err: error, node }, 'Failed to parse mex newsletter notification');
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
const operation = data?.operation;
|
|
99
|
+
const updates = data?.updates;
|
|
100
|
+
if (!updates || !operation) {
|
|
101
|
+
logger.warn({ data }, 'Invalid mex newsletter notification content');
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
logger.info({ operation, updates }, 'got mex newsletter notification');
|
|
105
|
+
switch (operation) {
|
|
106
|
+
case 'NotificationNewsletterUpdate':
|
|
107
|
+
for (const update of updates) {
|
|
108
|
+
if (update.jid && update.settings && Object.keys(update.settings).length > 0) {
|
|
109
|
+
ev.emit('newsletter-settings.update', {
|
|
110
|
+
id: update.jid,
|
|
111
|
+
update: update.settings
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
break;
|
|
116
|
+
case 'NotificationNewsletterAdminPromote':
|
|
117
|
+
for (const update of updates) {
|
|
118
|
+
if (update.jid && update.user) {
|
|
119
|
+
ev.emit('newsletter-participants.update', {
|
|
120
|
+
id: update.jid,
|
|
121
|
+
author: node.attrs.from,
|
|
122
|
+
user: update.user,
|
|
123
|
+
new_role: 'ADMIN',
|
|
124
|
+
action: 'promote'
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
break;
|
|
129
|
+
default:
|
|
130
|
+
logger.info({ operation, data }, 'Unhandled mex newsletter notification');
|
|
131
|
+
break;
|
|
132
|
+
}
|
|
133
|
+
};
|
|
134
|
+
// Handles newsletter notifications
|
|
135
|
+
const handleNewsletterNotification = async (node) => {
|
|
136
|
+
const from = node.attrs.from;
|
|
137
|
+
const child = getAllBinaryNodeChildren(node)[0];
|
|
138
|
+
const author = node.attrs.participant;
|
|
139
|
+
logger.info({ from, child }, 'got newsletter notification');
|
|
140
|
+
switch (child.tag) {
|
|
141
|
+
case 'reaction':
|
|
142
|
+
const reactionUpdate = {
|
|
143
|
+
id: from,
|
|
144
|
+
server_id: child.attrs.message_id,
|
|
145
|
+
reaction: {
|
|
146
|
+
code: getBinaryNodeChildString(child, 'reaction'),
|
|
147
|
+
count: 1
|
|
148
|
+
}
|
|
149
|
+
};
|
|
150
|
+
ev.emit('newsletter.reaction', reactionUpdate);
|
|
151
|
+
break;
|
|
152
|
+
case 'view':
|
|
153
|
+
const viewUpdate = {
|
|
154
|
+
id: from,
|
|
155
|
+
server_id: child.attrs.message_id,
|
|
156
|
+
count: parseInt(child.content?.toString() || '0', 10)
|
|
157
|
+
};
|
|
158
|
+
ev.emit('newsletter.view', viewUpdate);
|
|
159
|
+
break;
|
|
160
|
+
case 'participant':
|
|
161
|
+
const participantUpdate = {
|
|
162
|
+
id: from,
|
|
163
|
+
author,
|
|
164
|
+
user: child.attrs.jid,
|
|
165
|
+
action: child.attrs.action,
|
|
166
|
+
new_role: child.attrs.role
|
|
167
|
+
};
|
|
168
|
+
ev.emit('newsletter-participants.update', participantUpdate);
|
|
169
|
+
break;
|
|
170
|
+
case 'update':
|
|
171
|
+
const settingsNode = getBinaryNodeChild(child, 'settings');
|
|
172
|
+
if (settingsNode) {
|
|
173
|
+
const update = {};
|
|
174
|
+
const nameNode = getBinaryNodeChild(settingsNode, 'name');
|
|
175
|
+
if (nameNode?.content)
|
|
176
|
+
update.name = nameNode.content.toString();
|
|
177
|
+
const descriptionNode = getBinaryNodeChild(settingsNode, 'description');
|
|
178
|
+
if (descriptionNode?.content)
|
|
179
|
+
update.description = descriptionNode.content.toString();
|
|
180
|
+
ev.emit('newsletter-settings.update', {
|
|
181
|
+
id: from,
|
|
182
|
+
update
|
|
183
|
+
});
|
|
184
|
+
}
|
|
185
|
+
break;
|
|
186
|
+
case 'message':
|
|
187
|
+
const plaintextNode = getBinaryNodeChild(child, 'plaintext');
|
|
188
|
+
if (plaintextNode?.content) {
|
|
189
|
+
try {
|
|
190
|
+
const contentBuf = typeof plaintextNode.content === 'string'
|
|
191
|
+
? Buffer.from(plaintextNode.content, 'binary')
|
|
192
|
+
: Buffer.from(plaintextNode.content);
|
|
193
|
+
const messageProto = proto.Message.decode(contentBuf).toJSON();
|
|
194
|
+
const fullMessage = proto.WebMessageInfo.fromObject({
|
|
195
|
+
key: {
|
|
196
|
+
remoteJid: from,
|
|
197
|
+
id: child.attrs.message_id || child.attrs.server_id,
|
|
198
|
+
fromMe: false // TODO: is this really true though
|
|
199
|
+
},
|
|
200
|
+
message: messageProto,
|
|
201
|
+
messageTimestamp: +child.attrs.t
|
|
202
|
+
}).toJSON();
|
|
203
|
+
await upsertMessage(fullMessage, 'append');
|
|
204
|
+
logger.info('Processed plaintext newsletter message');
|
|
205
|
+
}
|
|
206
|
+
catch (error) {
|
|
207
|
+
logger.error({ error }, 'Failed to decode plaintext newsletter message');
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
break;
|
|
211
|
+
default:
|
|
212
|
+
logger.warn({ node }, 'Unknown newsletter notification');
|
|
213
|
+
break;
|
|
214
|
+
}
|
|
215
|
+
};
|
|
216
|
+
const sendMessageAck = async ({ tag, attrs, content }, errorCode) => {
|
|
217
|
+
const stanza = {
|
|
218
|
+
tag: 'ack',
|
|
219
|
+
attrs: {
|
|
220
|
+
id: attrs.id,
|
|
221
|
+
to: attrs.from,
|
|
222
|
+
class: tag
|
|
223
|
+
}
|
|
224
|
+
};
|
|
225
|
+
if (!!errorCode) {
|
|
226
|
+
stanza.attrs.error = errorCode.toString();
|
|
227
|
+
}
|
|
228
|
+
if (!!attrs.participant) {
|
|
229
|
+
stanza.attrs.participant = attrs.participant;
|
|
230
|
+
}
|
|
231
|
+
if (!!attrs.recipient) {
|
|
232
|
+
stanza.attrs.recipient = attrs.recipient;
|
|
233
|
+
}
|
|
234
|
+
if (!!attrs.type &&
|
|
235
|
+
(tag !== 'message' || getBinaryNodeChild({ tag, attrs, content }, 'unavailable') || errorCode !== 0)) {
|
|
236
|
+
stanza.attrs.type = attrs.type;
|
|
237
|
+
}
|
|
238
|
+
if (tag === 'message' && getBinaryNodeChild({ tag, attrs, content }, 'unavailable')) {
|
|
239
|
+
stanza.attrs.from = authState.creds.me.id;
|
|
240
|
+
}
|
|
241
|
+
logger.debug({ recv: { tag, attrs }, sent: stanza.attrs }, 'sent ack');
|
|
242
|
+
await sendNode(stanza);
|
|
243
|
+
};
|
|
244
|
+
const rejectCall = async (callId, callFrom) => {
|
|
245
|
+
const stanza = {
|
|
246
|
+
tag: 'call',
|
|
247
|
+
attrs: {
|
|
248
|
+
from: authState.creds.me.id,
|
|
249
|
+
to: callFrom
|
|
250
|
+
},
|
|
251
|
+
content: [
|
|
252
|
+
{
|
|
253
|
+
tag: 'reject',
|
|
254
|
+
attrs: {
|
|
255
|
+
'call-id': callId,
|
|
256
|
+
'call-creator': callFrom,
|
|
257
|
+
count: '0'
|
|
258
|
+
},
|
|
259
|
+
content: undefined
|
|
260
|
+
}
|
|
261
|
+
]
|
|
262
|
+
};
|
|
263
|
+
await query(stanza);
|
|
264
|
+
};
|
|
265
|
+
const sendRetryRequest = async (node, forceIncludeKeys = false) => {
|
|
266
|
+
const { fullMessage } = decodeMessageNode(node, authState.creds.me.id, authState.creds.me.lid || '');
|
|
267
|
+
const { key: msgKey } = fullMessage;
|
|
268
|
+
const msgId = msgKey.id;
|
|
269
|
+
if (messageRetryManager) {
|
|
270
|
+
// Check if we've exceeded max retries using the new system
|
|
271
|
+
if (messageRetryManager.hasExceededMaxRetries(msgId)) {
|
|
272
|
+
logger.debug({ msgId }, 'reached retry limit with new retry manager, clearing');
|
|
273
|
+
messageRetryManager.markRetryFailed(msgId);
|
|
274
|
+
return;
|
|
275
|
+
}
|
|
276
|
+
// Increment retry count using new system
|
|
277
|
+
const retryCount = messageRetryManager.incrementRetryCount(msgId);
|
|
278
|
+
// Use the new retry count for the rest of the logic
|
|
279
|
+
const key = `${msgId}:${msgKey?.participant}`;
|
|
280
|
+
msgRetryCache.set(key, retryCount);
|
|
281
|
+
}
|
|
282
|
+
else {
|
|
283
|
+
// Fallback to old system
|
|
284
|
+
const key = `${msgId}:${msgKey?.participant}`;
|
|
285
|
+
let retryCount = (await msgRetryCache.get(key)) || 0;
|
|
286
|
+
if (retryCount >= maxMsgRetryCount) {
|
|
287
|
+
logger.debug({ retryCount, msgId }, 'reached retry limit, clearing');
|
|
288
|
+
msgRetryCache.del(key);
|
|
289
|
+
return;
|
|
290
|
+
}
|
|
291
|
+
retryCount += 1;
|
|
292
|
+
await msgRetryCache.set(key, retryCount);
|
|
293
|
+
}
|
|
294
|
+
const key = `${msgId}:${msgKey?.participant}`;
|
|
295
|
+
const retryCount = (await msgRetryCache.get(key)) || 1;
|
|
296
|
+
const { account, signedPreKey, signedIdentityKey: identityKey } = authState.creds;
|
|
297
|
+
const fromJid = node.attrs.from;
|
|
298
|
+
// Check if we should recreate the session
|
|
299
|
+
let shouldRecreateSession = false;
|
|
300
|
+
let recreateReason = '';
|
|
301
|
+
if (enableAutoSessionRecreation && messageRetryManager) {
|
|
302
|
+
try {
|
|
303
|
+
// Check if we have a session with this JID
|
|
304
|
+
const sessionId = signalRepository.jidToSignalProtocolAddress(fromJid);
|
|
305
|
+
const hasSession = await signalRepository.validateSession(fromJid);
|
|
306
|
+
const result = messageRetryManager.shouldRecreateSession(fromJid, retryCount, hasSession.exists);
|
|
307
|
+
shouldRecreateSession = result.recreate;
|
|
308
|
+
recreateReason = result.reason;
|
|
309
|
+
if (shouldRecreateSession) {
|
|
310
|
+
logger.debug({ fromJid, retryCount, reason: recreateReason }, 'recreating session for retry');
|
|
311
|
+
// Delete existing session to force recreation
|
|
312
|
+
await authState.keys.set({ session: { [sessionId]: null } });
|
|
313
|
+
forceIncludeKeys = true;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
catch (error) {
|
|
317
|
+
logger.warn({ error, fromJid }, 'failed to check session recreation');
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
if (retryCount <= 2) {
|
|
321
|
+
// Use new retry manager for phone requests if available
|
|
322
|
+
if (messageRetryManager) {
|
|
323
|
+
// Schedule phone request with delay (like whatsmeow)
|
|
324
|
+
messageRetryManager.schedulePhoneRequest(msgId, async () => {
|
|
325
|
+
try {
|
|
326
|
+
const requestId = await requestPlaceholderResend(msgKey);
|
|
327
|
+
logger.debug(`sendRetryRequest: requested placeholder resend (${requestId}) for message ${msgId} (scheduled)`);
|
|
328
|
+
}
|
|
329
|
+
catch (error) {
|
|
330
|
+
logger.warn({ error, msgId }, 'failed to send scheduled phone request');
|
|
331
|
+
}
|
|
332
|
+
});
|
|
333
|
+
}
|
|
334
|
+
else {
|
|
335
|
+
// Fallback to immediate request
|
|
336
|
+
const msgId = await requestPlaceholderResend(msgKey);
|
|
337
|
+
logger.debug(`sendRetryRequest: requested placeholder resend for message ${msgId}`);
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
const deviceIdentity = encodeSignedDeviceIdentity(account, true);
|
|
341
|
+
await authState.keys.transaction(async () => {
|
|
342
|
+
const receipt = {
|
|
343
|
+
tag: 'receipt',
|
|
344
|
+
attrs: {
|
|
345
|
+
id: msgId,
|
|
346
|
+
type: 'retry',
|
|
347
|
+
to: node.attrs.from
|
|
348
|
+
},
|
|
349
|
+
content: [
|
|
350
|
+
{
|
|
351
|
+
tag: 'retry',
|
|
352
|
+
attrs: {
|
|
353
|
+
count: retryCount.toString(),
|
|
354
|
+
id: node.attrs.id,
|
|
355
|
+
t: node.attrs.t,
|
|
356
|
+
v: '1',
|
|
357
|
+
// ADD ERROR FIELD
|
|
358
|
+
error: '0'
|
|
359
|
+
}
|
|
360
|
+
},
|
|
361
|
+
{
|
|
362
|
+
tag: 'registration',
|
|
363
|
+
attrs: {},
|
|
364
|
+
content: encodeBigEndian(authState.creds.registrationId)
|
|
365
|
+
}
|
|
366
|
+
]
|
|
367
|
+
};
|
|
368
|
+
if (node.attrs.recipient) {
|
|
369
|
+
receipt.attrs.recipient = node.attrs.recipient;
|
|
370
|
+
}
|
|
371
|
+
if (node.attrs.participant) {
|
|
372
|
+
receipt.attrs.participant = node.attrs.participant;
|
|
373
|
+
}
|
|
374
|
+
if (retryCount > 1 || forceIncludeKeys || shouldRecreateSession) {
|
|
375
|
+
const { update, preKeys } = await getNextPreKeys(authState, 1);
|
|
376
|
+
const [keyId] = Object.keys(preKeys);
|
|
377
|
+
const key = preKeys[+keyId];
|
|
378
|
+
const content = receipt.content;
|
|
379
|
+
content.push({
|
|
380
|
+
tag: 'keys',
|
|
381
|
+
attrs: {},
|
|
382
|
+
content: [
|
|
383
|
+
{ tag: 'type', attrs: {}, content: Buffer.from(KEY_BUNDLE_TYPE) },
|
|
384
|
+
{ tag: 'identity', attrs: {}, content: identityKey.public },
|
|
385
|
+
xmppPreKey(key, +keyId),
|
|
386
|
+
xmppSignedPreKey(signedPreKey),
|
|
387
|
+
{ tag: 'device-identity', attrs: {}, content: deviceIdentity }
|
|
388
|
+
]
|
|
389
|
+
});
|
|
390
|
+
ev.emit('creds.update', update);
|
|
391
|
+
}
|
|
392
|
+
await sendNode(receipt);
|
|
393
|
+
logger.info({ msgAttrs: node.attrs, retryCount }, 'sent retry receipt');
|
|
394
|
+
}, authState?.creds?.me?.id || 'sendRetryRequest');
|
|
395
|
+
};
|
|
396
|
+
const handleEncryptNotification = async (node) => {
|
|
397
|
+
const from = node.attrs.from;
|
|
398
|
+
if (from === S_WHATSAPP_NET) {
|
|
399
|
+
const countChild = getBinaryNodeChild(node, 'count');
|
|
400
|
+
const count = +countChild.attrs.value;
|
|
401
|
+
const shouldUploadMorePreKeys = count < MIN_PREKEY_COUNT;
|
|
402
|
+
logger.debug({ count, shouldUploadMorePreKeys }, 'recv pre-key count');
|
|
403
|
+
if (shouldUploadMorePreKeys) {
|
|
404
|
+
await uploadPreKeys();
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
else {
|
|
408
|
+
const identityNode = getBinaryNodeChild(node, 'identity');
|
|
409
|
+
if (identityNode) {
|
|
410
|
+
logger.info({ jid: from }, 'identity changed');
|
|
411
|
+
// not handling right now
|
|
412
|
+
// signal will override new identity anyway
|
|
413
|
+
}
|
|
414
|
+
else {
|
|
415
|
+
logger.info({ node }, 'unknown encrypt notification');
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
};
|
|
419
|
+
const handleGroupNotification = (fullNode, child, msg) => {
|
|
420
|
+
// TODO: Support PN/LID (Here is only LID now)
|
|
421
|
+
const actingParticipantLid = fullNode.attrs.participant;
|
|
422
|
+
const actingParticipantPn = fullNode.attrs.participant_pn;
|
|
423
|
+
const affectedParticipantLid = getBinaryNodeChild(child, 'participant')?.attrs?.jid || actingParticipantLid;
|
|
424
|
+
const affectedParticipantPn = getBinaryNodeChild(child, 'participant')?.attrs?.phone_number || actingParticipantPn;
|
|
425
|
+
switch (child?.tag) {
|
|
426
|
+
case 'create':
|
|
427
|
+
const metadata = extractGroupMetadata(child);
|
|
428
|
+
msg.messageStubType = WAMessageStubType.GROUP_CREATE;
|
|
429
|
+
msg.messageStubParameters = [metadata.subject];
|
|
430
|
+
msg.key = { participant: metadata.owner, participantAlt: metadata.ownerPn };
|
|
431
|
+
ev.emit('chats.upsert', [
|
|
432
|
+
{
|
|
433
|
+
id: metadata.id,
|
|
434
|
+
name: metadata.subject,
|
|
435
|
+
conversationTimestamp: metadata.creation
|
|
436
|
+
}
|
|
437
|
+
]);
|
|
438
|
+
ev.emit('groups.upsert', [
|
|
439
|
+
{
|
|
440
|
+
...metadata,
|
|
441
|
+
author: actingParticipantLid,
|
|
442
|
+
authorPn: actingParticipantPn
|
|
443
|
+
}
|
|
444
|
+
]);
|
|
445
|
+
break;
|
|
446
|
+
case 'ephemeral':
|
|
447
|
+
case 'not_ephemeral':
|
|
448
|
+
msg.message = {
|
|
449
|
+
protocolMessage: {
|
|
450
|
+
type: proto.Message.ProtocolMessage.Type.EPHEMERAL_SETTING,
|
|
451
|
+
ephemeralExpiration: +(child.attrs.expiration || 0)
|
|
452
|
+
}
|
|
453
|
+
};
|
|
454
|
+
break;
|
|
455
|
+
case 'modify':
|
|
456
|
+
const oldNumber = getBinaryNodeChildren(child, 'participant').map(p => p.attrs.jid);
|
|
457
|
+
msg.messageStubParameters = oldNumber || [];
|
|
458
|
+
msg.messageStubType = WAMessageStubType.GROUP_PARTICIPANT_CHANGE_NUMBER;
|
|
459
|
+
break;
|
|
460
|
+
case 'promote':
|
|
461
|
+
case 'demote':
|
|
462
|
+
case 'remove':
|
|
463
|
+
case 'add':
|
|
464
|
+
case 'leave':
|
|
465
|
+
const stubType = `GROUP_PARTICIPANT_${child.tag.toUpperCase()}`;
|
|
466
|
+
msg.messageStubType = WAMessageStubType[stubType];
|
|
467
|
+
const participants = getBinaryNodeChildren(child, 'participant').map(({ attrs }) => {
|
|
468
|
+
// TODO: Store LID MAPPINGS
|
|
469
|
+
return {
|
|
470
|
+
id: attrs.jid,
|
|
471
|
+
phoneNumber: isLidUser(attrs.jid) && isPnUser(attrs.phone_number) ? attrs.phone_number : undefined,
|
|
472
|
+
lid: isPnUser(attrs.jid) && isLidUser(attrs.lid) ? attrs.lid : undefined,
|
|
473
|
+
admin: (attrs.type || null)
|
|
474
|
+
};
|
|
475
|
+
});
|
|
476
|
+
if (participants.length === 1 &&
|
|
477
|
+
// if recv. "remove" message and sender removed themselves
|
|
478
|
+
// mark as left
|
|
479
|
+
(areJidsSameUser(participants[0].id, actingParticipantLid) ||
|
|
480
|
+
areJidsSameUser(participants[0].id, actingParticipantPn)) &&
|
|
481
|
+
child.tag === 'remove') {
|
|
482
|
+
msg.messageStubType = WAMessageStubType.GROUP_PARTICIPANT_LEAVE;
|
|
483
|
+
}
|
|
484
|
+
msg.messageStubParameters = participants.map(a => JSON.stringify(a));
|
|
485
|
+
break;
|
|
486
|
+
case 'subject':
|
|
487
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_SUBJECT;
|
|
488
|
+
msg.messageStubParameters = [child.attrs.subject];
|
|
489
|
+
break;
|
|
490
|
+
case 'description':
|
|
491
|
+
const description = getBinaryNodeChild(child, 'body')?.content?.toString();
|
|
492
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_DESCRIPTION;
|
|
493
|
+
msg.messageStubParameters = description ? [description] : undefined;
|
|
494
|
+
break;
|
|
495
|
+
case 'announcement':
|
|
496
|
+
case 'not_announcement':
|
|
497
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_ANNOUNCE;
|
|
498
|
+
msg.messageStubParameters = [child.tag === 'announcement' ? 'on' : 'off'];
|
|
499
|
+
break;
|
|
500
|
+
case 'locked':
|
|
501
|
+
case 'unlocked':
|
|
502
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_RESTRICT;
|
|
503
|
+
msg.messageStubParameters = [child.tag === 'locked' ? 'on' : 'off'];
|
|
504
|
+
break;
|
|
505
|
+
case 'invite':
|
|
506
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_INVITE_LINK;
|
|
507
|
+
msg.messageStubParameters = [child.attrs.code];
|
|
508
|
+
break;
|
|
509
|
+
case 'member_add_mode':
|
|
510
|
+
const addMode = child.content;
|
|
511
|
+
if (addMode) {
|
|
512
|
+
msg.messageStubType = WAMessageStubType.GROUP_MEMBER_ADD_MODE;
|
|
513
|
+
msg.messageStubParameters = [addMode.toString()];
|
|
514
|
+
}
|
|
515
|
+
break;
|
|
516
|
+
case 'membership_approval_mode':
|
|
517
|
+
const approvalMode = getBinaryNodeChild(child, 'group_join');
|
|
518
|
+
if (approvalMode) {
|
|
519
|
+
msg.messageStubType = WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_MODE;
|
|
520
|
+
msg.messageStubParameters = [approvalMode.attrs.state];
|
|
521
|
+
}
|
|
522
|
+
break;
|
|
523
|
+
case 'created_membership_requests':
|
|
524
|
+
msg.messageStubType = WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_REQUEST_NON_ADMIN_ADD;
|
|
525
|
+
msg.messageStubParameters = [
|
|
526
|
+
JSON.stringify({ lid: affectedParticipantLid, pn: affectedParticipantPn }),
|
|
527
|
+
'created',
|
|
528
|
+
child.attrs.request_method
|
|
529
|
+
];
|
|
530
|
+
break;
|
|
531
|
+
case 'revoked_membership_requests':
|
|
532
|
+
const isDenied = areJidsSameUser(affectedParticipantLid, actingParticipantLid);
|
|
533
|
+
// TODO: LIDMAPPING SUPPORT
|
|
534
|
+
msg.messageStubType = WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_REQUEST_NON_ADMIN_ADD;
|
|
535
|
+
msg.messageStubParameters = [
|
|
536
|
+
JSON.stringify({ lid: affectedParticipantLid, pn: affectedParticipantPn }),
|
|
537
|
+
isDenied ? 'revoked' : 'rejected'
|
|
538
|
+
];
|
|
539
|
+
break;
|
|
540
|
+
}
|
|
541
|
+
};
|
|
542
|
+
const processNotification = async (node) => {
|
|
543
|
+
const result = {};
|
|
544
|
+
const [child] = getAllBinaryNodeChildren(node);
|
|
545
|
+
const nodeType = node.attrs.type;
|
|
546
|
+
const from = jidNormalizedUser(node.attrs.from);
|
|
547
|
+
switch (nodeType) {
|
|
548
|
+
case 'privacy_token':
|
|
549
|
+
const tokenList = getBinaryNodeChildren(child, 'token');
|
|
550
|
+
for (const { attrs, content } of tokenList) {
|
|
551
|
+
const jid = attrs.jid;
|
|
552
|
+
ev.emit('chats.update', [
|
|
553
|
+
{
|
|
554
|
+
id: jid,
|
|
555
|
+
tcToken: content
|
|
556
|
+
}
|
|
557
|
+
]);
|
|
558
|
+
logger.debug({ jid }, 'got privacy token update');
|
|
559
|
+
}
|
|
560
|
+
break;
|
|
561
|
+
case 'newsletter':
|
|
562
|
+
await handleNewsletterNotification(node);
|
|
563
|
+
break;
|
|
564
|
+
case 'mex':
|
|
565
|
+
await handleMexNewsletterNotification(node);
|
|
566
|
+
break;
|
|
567
|
+
case 'w:gp2':
|
|
568
|
+
// TODO: HANDLE PARTICIPANT_PN
|
|
569
|
+
handleGroupNotification(node, child, result);
|
|
570
|
+
break;
|
|
571
|
+
case 'mediaretry':
|
|
572
|
+
const event = decodeMediaRetryNode(node);
|
|
573
|
+
ev.emit('messages.media-update', [event]);
|
|
574
|
+
break;
|
|
575
|
+
case 'encrypt':
|
|
576
|
+
await handleEncryptNotification(node);
|
|
577
|
+
break;
|
|
578
|
+
case 'devices':
|
|
579
|
+
const devices = getBinaryNodeChildren(child, 'device');
|
|
580
|
+
if (areJidsSameUser(child.attrs.jid, authState.creds.me.id) ||
|
|
581
|
+
areJidsSameUser(child.attrs.lid, authState.creds.me.lid)) {
|
|
582
|
+
const deviceData = devices.map(d => ({ id: d.attrs.jid, lid: d.attrs.lid }));
|
|
583
|
+
logger.info({ deviceData }, 'my own devices changed');
|
|
584
|
+
}
|
|
585
|
+
//TODO: drop a new event, add hashes
|
|
586
|
+
break;
|
|
587
|
+
case 'server_sync':
|
|
588
|
+
const update = getBinaryNodeChild(node, 'collection');
|
|
589
|
+
if (update) {
|
|
590
|
+
const name = update.attrs.name;
|
|
591
|
+
await resyncAppState([name], false);
|
|
592
|
+
}
|
|
593
|
+
break;
|
|
594
|
+
case 'picture':
|
|
595
|
+
const setPicture = getBinaryNodeChild(node, 'set');
|
|
596
|
+
const delPicture = getBinaryNodeChild(node, 'delete');
|
|
597
|
+
ev.emit('contacts.update', [
|
|
598
|
+
{
|
|
599
|
+
id: jidNormalizedUser(node?.attrs?.from) || (setPicture || delPicture)?.attrs?.hash || '',
|
|
600
|
+
imgUrl: setPicture ? 'changed' : 'removed'
|
|
601
|
+
}
|
|
602
|
+
]);
|
|
603
|
+
if (isJidGroup(from)) {
|
|
604
|
+
const node = setPicture || delPicture;
|
|
605
|
+
result.messageStubType = WAMessageStubType.GROUP_CHANGE_ICON;
|
|
606
|
+
if (setPicture) {
|
|
607
|
+
result.messageStubParameters = [setPicture.attrs.id];
|
|
608
|
+
}
|
|
609
|
+
result.participant = node?.attrs.author;
|
|
610
|
+
result.key = {
|
|
611
|
+
...(result.key || {}),
|
|
612
|
+
participant: setPicture?.attrs.author
|
|
613
|
+
};
|
|
614
|
+
}
|
|
615
|
+
break;
|
|
616
|
+
case 'account_sync':
|
|
617
|
+
if (child.tag === 'disappearing_mode') {
|
|
618
|
+
const newDuration = +child.attrs.duration;
|
|
619
|
+
const timestamp = +child.attrs.t;
|
|
620
|
+
logger.info({ newDuration }, 'updated account disappearing mode');
|
|
621
|
+
ev.emit('creds.update', {
|
|
622
|
+
accountSettings: {
|
|
623
|
+
...authState.creds.accountSettings,
|
|
624
|
+
defaultDisappearingMode: {
|
|
625
|
+
ephemeralExpiration: newDuration,
|
|
626
|
+
ephemeralSettingTimestamp: timestamp
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
});
|
|
630
|
+
}
|
|
631
|
+
else if (child.tag === 'blocklist') {
|
|
632
|
+
const blocklists = getBinaryNodeChildren(child, 'item');
|
|
633
|
+
for (const { attrs } of blocklists) {
|
|
634
|
+
const blocklist = [attrs.jid];
|
|
635
|
+
const type = attrs.action === 'block' ? 'add' : 'remove';
|
|
636
|
+
ev.emit('blocklist.update', { blocklist, type });
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
break;
|
|
640
|
+
case 'link_code_companion_reg':
|
|
641
|
+
const linkCodeCompanionReg = getBinaryNodeChild(node, 'link_code_companion_reg');
|
|
642
|
+
const ref = toRequiredBuffer(getBinaryNodeChildBuffer(linkCodeCompanionReg, 'link_code_pairing_ref'));
|
|
643
|
+
const primaryIdentityPublicKey = toRequiredBuffer(getBinaryNodeChildBuffer(linkCodeCompanionReg, 'primary_identity_pub'));
|
|
644
|
+
const primaryEphemeralPublicKeyWrapped = toRequiredBuffer(getBinaryNodeChildBuffer(linkCodeCompanionReg, 'link_code_pairing_wrapped_primary_ephemeral_pub'));
|
|
645
|
+
const codePairingPublicKey = await decipherLinkPublicKey(primaryEphemeralPublicKeyWrapped);
|
|
646
|
+
const companionSharedKey = Curve.sharedKey(authState.creds.pairingEphemeralKeyPair.private, codePairingPublicKey);
|
|
647
|
+
const random = randomBytes(32);
|
|
648
|
+
const linkCodeSalt = randomBytes(32);
|
|
649
|
+
const linkCodePairingExpanded = await hkdf(companionSharedKey, 32, {
|
|
650
|
+
salt: linkCodeSalt,
|
|
651
|
+
info: 'link_code_pairing_key_bundle_encryption_key'
|
|
652
|
+
});
|
|
653
|
+
const encryptPayload = Buffer.concat([
|
|
654
|
+
Buffer.from(authState.creds.signedIdentityKey.public),
|
|
655
|
+
primaryIdentityPublicKey,
|
|
656
|
+
random
|
|
657
|
+
]);
|
|
658
|
+
const encryptIv = randomBytes(12);
|
|
659
|
+
const encrypted = aesEncryptGCM(encryptPayload, linkCodePairingExpanded, encryptIv, Buffer.alloc(0));
|
|
660
|
+
const encryptedPayload = Buffer.concat([linkCodeSalt, encryptIv, encrypted]);
|
|
661
|
+
const identitySharedKey = Curve.sharedKey(authState.creds.signedIdentityKey.private, primaryIdentityPublicKey);
|
|
662
|
+
const identityPayload = Buffer.concat([companionSharedKey, identitySharedKey, random]);
|
|
663
|
+
authState.creds.advSecretKey = (await hkdf(identityPayload, 32, { info: 'adv_secret' })).toString('base64');
|
|
664
|
+
await query({
|
|
665
|
+
tag: 'iq',
|
|
666
|
+
attrs: {
|
|
667
|
+
to: S_WHATSAPP_NET,
|
|
668
|
+
type: 'set',
|
|
669
|
+
id: sock.generateMessageTag(),
|
|
670
|
+
xmlns: 'md'
|
|
671
|
+
},
|
|
672
|
+
content: [
|
|
673
|
+
{
|
|
674
|
+
tag: 'link_code_companion_reg',
|
|
675
|
+
attrs: {
|
|
676
|
+
jid: authState.creds.me.id,
|
|
677
|
+
stage: 'companion_finish'
|
|
678
|
+
},
|
|
679
|
+
content: [
|
|
680
|
+
{
|
|
681
|
+
tag: 'link_code_pairing_wrapped_key_bundle',
|
|
682
|
+
attrs: {},
|
|
683
|
+
content: encryptedPayload
|
|
684
|
+
},
|
|
685
|
+
{
|
|
686
|
+
tag: 'companion_identity_public',
|
|
687
|
+
attrs: {},
|
|
688
|
+
content: authState.creds.signedIdentityKey.public
|
|
689
|
+
},
|
|
690
|
+
{
|
|
691
|
+
tag: 'link_code_pairing_ref',
|
|
692
|
+
attrs: {},
|
|
693
|
+
content: ref
|
|
694
|
+
}
|
|
695
|
+
]
|
|
696
|
+
}
|
|
697
|
+
]
|
|
698
|
+
});
|
|
699
|
+
authState.creds.registered = true;
|
|
700
|
+
ev.emit('creds.update', authState.creds);
|
|
701
|
+
}
|
|
702
|
+
if (Object.keys(result).length) {
|
|
703
|
+
return result;
|
|
704
|
+
}
|
|
705
|
+
};
|
|
706
|
+
async function decipherLinkPublicKey(data) {
|
|
707
|
+
const buffer = toRequiredBuffer(data);
|
|
708
|
+
const salt = buffer.slice(0, 32);
|
|
709
|
+
const secretKey = await derivePairingCodeKey(authState.creds.pairingCode, salt);
|
|
710
|
+
const iv = buffer.slice(32, 48);
|
|
711
|
+
const payload = buffer.slice(48, 80);
|
|
712
|
+
return aesDecryptCTR(payload, secretKey, iv);
|
|
713
|
+
}
|
|
714
|
+
function toRequiredBuffer(data) {
|
|
715
|
+
if (data === undefined) {
|
|
716
|
+
throw new Boom('Invalid buffer', { statusCode: 400 });
|
|
717
|
+
}
|
|
718
|
+
return data instanceof Buffer ? data : Buffer.from(data);
|
|
719
|
+
}
|
|
720
|
+
const willSendMessageAgain = async (id, participant) => {
|
|
721
|
+
const key = `${id}:${participant}`;
|
|
722
|
+
const retryCount = (await msgRetryCache.get(key)) || 0;
|
|
723
|
+
return retryCount < maxMsgRetryCount;
|
|
724
|
+
};
|
|
725
|
+
const updateSendMessageAgainCount = async (id, participant) => {
|
|
726
|
+
const key = `${id}:${participant}`;
|
|
727
|
+
const newValue = ((await msgRetryCache.get(key)) || 0) + 1;
|
|
728
|
+
await msgRetryCache.set(key, newValue);
|
|
729
|
+
};
|
|
730
|
+
const sendMessagesAgain = async (key, ids, retryNode) => {
|
|
731
|
+
const remoteJid = key.remoteJid;
|
|
732
|
+
const participant = key.participant || remoteJid;
|
|
733
|
+
const retryCount = +retryNode.attrs.count || 1;
|
|
734
|
+
// Try to get messages from cache first, then fallback to getMessage
|
|
735
|
+
const msgs = [];
|
|
736
|
+
for (const id of ids) {
|
|
737
|
+
let msg;
|
|
738
|
+
// Try to get from retry cache first if enabled
|
|
739
|
+
if (messageRetryManager) {
|
|
740
|
+
const cachedMsg = messageRetryManager.getRecentMessage(remoteJid, id);
|
|
741
|
+
if (cachedMsg) {
|
|
742
|
+
msg = cachedMsg.message;
|
|
743
|
+
logger.debug({ jid: remoteJid, id }, 'found message in retry cache');
|
|
744
|
+
// Mark retry as successful since we found the message
|
|
745
|
+
messageRetryManager.markRetrySuccess(id);
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
// Fallback to getMessage if not found in cache
|
|
749
|
+
if (!msg) {
|
|
750
|
+
msg = await getMessage({ ...key, id });
|
|
751
|
+
if (msg) {
|
|
752
|
+
logger.debug({ jid: remoteJid, id }, 'found message via getMessage');
|
|
753
|
+
// Also mark as successful if found via getMessage
|
|
754
|
+
if (messageRetryManager) {
|
|
755
|
+
messageRetryManager.markRetrySuccess(id);
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
}
|
|
759
|
+
msgs.push(msg);
|
|
760
|
+
}
|
|
761
|
+
// if it's the primary jid sending the request
|
|
762
|
+
// just re-send the message to everyone
|
|
763
|
+
// prevents the first message decryption failure
|
|
764
|
+
const sendToAll = !jidDecode(participant)?.device;
|
|
765
|
+
// Check if we should recreate session for this retry
|
|
766
|
+
let shouldRecreateSession = false;
|
|
767
|
+
let recreateReason = '';
|
|
768
|
+
if (enableAutoSessionRecreation && messageRetryManager) {
|
|
769
|
+
try {
|
|
770
|
+
const sessionId = signalRepository.jidToSignalProtocolAddress(participant);
|
|
771
|
+
const hasSession = await signalRepository.validateSession(participant);
|
|
772
|
+
const result = messageRetryManager.shouldRecreateSession(participant, retryCount, hasSession.exists);
|
|
773
|
+
shouldRecreateSession = result.recreate;
|
|
774
|
+
recreateReason = result.reason;
|
|
775
|
+
if (shouldRecreateSession) {
|
|
776
|
+
logger.debug({ participant, retryCount, reason: recreateReason }, 'recreating session for outgoing retry');
|
|
777
|
+
await authState.keys.set({ session: { [sessionId]: null } });
|
|
778
|
+
}
|
|
779
|
+
}
|
|
780
|
+
catch (error) {
|
|
781
|
+
logger.warn({ error, participant }, 'failed to check session recreation for outgoing retry');
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
await assertSessions([participant]);
|
|
785
|
+
if (isJidGroup(remoteJid)) {
|
|
786
|
+
await authState.keys.set({ 'sender-key-memory': { [remoteJid]: null } });
|
|
787
|
+
}
|
|
788
|
+
logger.debug({ participant, sendToAll, shouldRecreateSession, recreateReason }, 'forced new session for retry recp');
|
|
789
|
+
for (const [i, msg] of msgs.entries()) {
|
|
790
|
+
if (!ids[i])
|
|
791
|
+
continue;
|
|
792
|
+
if (msg && (await willSendMessageAgain(ids[i], participant))) {
|
|
793
|
+
updateSendMessageAgainCount(ids[i], participant);
|
|
794
|
+
const msgRelayOpts = { messageId: ids[i] };
|
|
795
|
+
if (sendToAll) {
|
|
796
|
+
msgRelayOpts.useUserDevicesCache = false;
|
|
797
|
+
}
|
|
798
|
+
else {
|
|
799
|
+
msgRelayOpts.participant = {
|
|
800
|
+
jid: participant,
|
|
801
|
+
count: +retryNode.attrs.count
|
|
802
|
+
};
|
|
803
|
+
}
|
|
804
|
+
await relayMessage(key.remoteJid, msg, msgRelayOpts);
|
|
805
|
+
}
|
|
806
|
+
else {
|
|
807
|
+
logger.debug({ jid: key.remoteJid, id: ids[i] }, 'recv retry request, but message not available');
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
};
|
|
811
|
+
const handleReceipt = async (node) => {
|
|
812
|
+
const { attrs, content } = node;
|
|
813
|
+
const isLid = attrs.from.includes('lid');
|
|
814
|
+
const isNodeFromMe = areJidsSameUser(attrs.participant || attrs.from, isLid ? authState.creds.me?.lid : authState.creds.me?.id);
|
|
815
|
+
const remoteJid = !isNodeFromMe || isJidGroup(attrs.from) ? attrs.from : attrs.recipient;
|
|
816
|
+
const fromMe = !attrs.recipient || ((attrs.type === 'retry' || attrs.type === 'sender') && isNodeFromMe);
|
|
817
|
+
const key = {
|
|
818
|
+
remoteJid,
|
|
819
|
+
id: '',
|
|
820
|
+
fromMe,
|
|
821
|
+
participant: attrs.participant
|
|
822
|
+
};
|
|
823
|
+
if (shouldIgnoreJid(remoteJid) && remoteJid !== S_WHATSAPP_NET) {
|
|
824
|
+
logger.debug({ remoteJid }, 'ignoring receipt from jid');
|
|
825
|
+
await sendMessageAck(node);
|
|
826
|
+
return;
|
|
827
|
+
}
|
|
828
|
+
const ids = [attrs.id];
|
|
829
|
+
if (Array.isArray(content)) {
|
|
830
|
+
const items = getBinaryNodeChildren(content[0], 'item');
|
|
831
|
+
ids.push(...items.map(i => i.attrs.id));
|
|
832
|
+
}
|
|
833
|
+
try {
|
|
834
|
+
await Promise.all([
|
|
835
|
+
processingMutex.mutex(async () => {
|
|
836
|
+
const status = getStatusFromReceiptType(attrs.type);
|
|
837
|
+
if (typeof status !== 'undefined' &&
|
|
838
|
+
// basically, we only want to know when a message from us has been delivered to/read by the other person
|
|
839
|
+
// or another device of ours has read some messages
|
|
840
|
+
(status >= proto.WebMessageInfo.Status.SERVER_ACK || !isNodeFromMe)) {
|
|
841
|
+
if (isJidGroup(remoteJid) || isJidStatusBroadcast(remoteJid)) {
|
|
842
|
+
if (attrs.participant) {
|
|
843
|
+
const updateKey = status === proto.WebMessageInfo.Status.DELIVERY_ACK ? 'receiptTimestamp' : 'readTimestamp';
|
|
844
|
+
ev.emit('message-receipt.update', ids.map(id => ({
|
|
845
|
+
key: { ...key, id },
|
|
846
|
+
receipt: {
|
|
847
|
+
userJid: jidNormalizedUser(attrs.participant),
|
|
848
|
+
[updateKey]: +attrs.t
|
|
849
|
+
}
|
|
850
|
+
})));
|
|
851
|
+
}
|
|
852
|
+
}
|
|
853
|
+
else {
|
|
854
|
+
ev.emit('messages.update', ids.map(id => ({
|
|
855
|
+
key: { ...key, id },
|
|
856
|
+
update: { status }
|
|
857
|
+
})));
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
if (attrs.type === 'retry') {
|
|
861
|
+
// correctly set who is asking for the retry
|
|
862
|
+
key.participant = key.participant || attrs.from;
|
|
863
|
+
const retryNode = getBinaryNodeChild(node, 'retry');
|
|
864
|
+
if (ids[0] && key.participant && (await willSendMessageAgain(ids[0], key.participant))) {
|
|
865
|
+
if (key.fromMe) {
|
|
866
|
+
try {
|
|
867
|
+
updateSendMessageAgainCount(ids[0], key.participant);
|
|
868
|
+
logger.debug({ attrs, key }, 'recv retry request');
|
|
869
|
+
await sendMessagesAgain(key, ids, retryNode);
|
|
870
|
+
}
|
|
871
|
+
catch (error) {
|
|
872
|
+
logger.error({ key, ids, trace: error instanceof Error ? error.stack : 'Unknown error' }, 'error in sending message again');
|
|
873
|
+
}
|
|
874
|
+
}
|
|
875
|
+
else {
|
|
876
|
+
logger.info({ attrs, key }, 'recv retry for not fromMe message');
|
|
877
|
+
}
|
|
878
|
+
}
|
|
879
|
+
else {
|
|
880
|
+
logger.info({ attrs, key }, 'will not send message again, as sent too many times');
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
})
|
|
884
|
+
]);
|
|
885
|
+
}
|
|
886
|
+
finally {
|
|
887
|
+
await sendMessageAck(node);
|
|
888
|
+
}
|
|
889
|
+
};
|
|
890
|
+
const handleNotification = async (node) => {
|
|
891
|
+
const remoteJid = node.attrs.from;
|
|
892
|
+
if (shouldIgnoreJid(remoteJid) && remoteJid !== S_WHATSAPP_NET) {
|
|
893
|
+
logger.debug({ remoteJid, id: node.attrs.id }, 'ignored notification');
|
|
894
|
+
await sendMessageAck(node);
|
|
895
|
+
return;
|
|
896
|
+
}
|
|
897
|
+
try {
|
|
898
|
+
await Promise.all([
|
|
899
|
+
processingMutex.mutex(async () => {
|
|
900
|
+
const msg = await processNotification(node);
|
|
901
|
+
if (msg) {
|
|
902
|
+
const fromMe = areJidsSameUser(node.attrs.participant || remoteJid, authState.creds.me.id);
|
|
903
|
+
const { senderAlt: participantAlt, addressingMode } = extractAddressingContext(node);
|
|
904
|
+
msg.key = {
|
|
905
|
+
remoteJid,
|
|
906
|
+
fromMe,
|
|
907
|
+
participant: node.attrs.participant,
|
|
908
|
+
participantAlt,
|
|
909
|
+
addressingMode,
|
|
910
|
+
id: node.attrs.id,
|
|
911
|
+
...(msg.key || {})
|
|
912
|
+
};
|
|
913
|
+
msg.participant ?? (msg.participant = node.attrs.participant);
|
|
914
|
+
msg.messageTimestamp = +node.attrs.t;
|
|
915
|
+
const fullMsg = proto.WebMessageInfo.fromObject(msg);
|
|
916
|
+
await upsertMessage(fullMsg, 'append');
|
|
917
|
+
}
|
|
918
|
+
})
|
|
919
|
+
]);
|
|
920
|
+
}
|
|
921
|
+
finally {
|
|
922
|
+
await sendMessageAck(node);
|
|
923
|
+
}
|
|
924
|
+
};
|
|
925
|
+
const handleMessage = async (node) => {
|
|
926
|
+
if (shouldIgnoreJid(node.attrs.from) && node.attrs.from !== S_WHATSAPP_NET) {
|
|
927
|
+
logger.debug({ key: node.attrs.key }, 'ignored message');
|
|
928
|
+
await sendMessageAck(node, NACK_REASONS.UnhandledError);
|
|
929
|
+
return;
|
|
930
|
+
}
|
|
931
|
+
const encNode = getBinaryNodeChild(node, 'enc');
|
|
932
|
+
// TODO: temporary fix for crashes and issues resulting of failed msmsg decryption
|
|
933
|
+
if (encNode && encNode.attrs.type === 'msmsg') {
|
|
934
|
+
logger.debug({ key: node.attrs.key }, 'ignored msmsg');
|
|
935
|
+
await sendMessageAck(node, NACK_REASONS.MissingMessageSecret);
|
|
936
|
+
return;
|
|
937
|
+
}
|
|
938
|
+
const { fullMessage: msg, category, author, decrypt } = decryptMessageNode(node, authState.creds.me.id, authState.creds.me.lid || '', signalRepository, logger);
|
|
939
|
+
const alt = msg.key.participantAlt || msg.key.remoteJidAlt;
|
|
940
|
+
// store new mappings we didn't have before
|
|
941
|
+
if (!!alt) {
|
|
942
|
+
const altServer = jidDecode(alt)?.server;
|
|
943
|
+
const primaryJid = msg.key.participant || msg.key.remoteJid;
|
|
944
|
+
if (altServer === 'lid') {
|
|
945
|
+
if (!(await signalRepository.lidMapping.getPNForLID(alt))) {
|
|
946
|
+
await signalRepository.lidMapping.storeLIDPNMappings([{ lid: alt, pn: primaryJid }]);
|
|
947
|
+
await signalRepository.migrateSession(primaryJid, alt);
|
|
948
|
+
}
|
|
949
|
+
}
|
|
950
|
+
else {
|
|
951
|
+
await signalRepository.lidMapping.storeLIDPNMappings([{ lid: primaryJid, pn: alt }]);
|
|
952
|
+
await signalRepository.migrateSession(alt, primaryJid);
|
|
953
|
+
}
|
|
954
|
+
}
|
|
955
|
+
if (msg.key?.remoteJid && msg.key?.id && messageRetryManager) {
|
|
956
|
+
messageRetryManager.addRecentMessage(msg.key.remoteJid, msg.key.id, msg.message);
|
|
957
|
+
logger.debug({
|
|
958
|
+
jid: msg.key.remoteJid,
|
|
959
|
+
id: msg.key.id
|
|
960
|
+
}, 'Added message to recent cache for retry receipts');
|
|
961
|
+
}
|
|
962
|
+
try {
|
|
963
|
+
await processingMutex.mutex(async () => {
|
|
964
|
+
await decrypt();
|
|
965
|
+
// message failed to decrypt
|
|
966
|
+
if (msg.messageStubType === proto.WebMessageInfo.StubType.CIPHERTEXT) {
|
|
967
|
+
if (msg?.messageStubParameters?.[0] === MISSING_KEYS_ERROR_TEXT) {
|
|
968
|
+
return sendMessageAck(node, NACK_REASONS.ParsingError);
|
|
969
|
+
}
|
|
970
|
+
const errorMessage = msg?.messageStubParameters?.[0] || '';
|
|
971
|
+
const isPreKeyError = errorMessage.includes('PreKey');
|
|
972
|
+
logger.debug(`[handleMessage] Attempting retry request for failed decryption`);
|
|
973
|
+
// Handle both pre-key and normal retries in single mutex
|
|
974
|
+
retryMutex.mutex(async () => {
|
|
975
|
+
try {
|
|
976
|
+
if (!ws.isOpen) {
|
|
977
|
+
logger.debug({ node }, 'Connection closed, skipping retry');
|
|
978
|
+
return;
|
|
979
|
+
}
|
|
980
|
+
// Handle pre-key errors with upload and delay
|
|
981
|
+
if (isPreKeyError) {
|
|
982
|
+
logger.info({ error: errorMessage }, 'PreKey error detected, uploading and retrying');
|
|
983
|
+
try {
|
|
984
|
+
logger.debug('Uploading pre-keys for error recovery');
|
|
985
|
+
await uploadPreKeys(5);
|
|
986
|
+
logger.debug('Waiting for server to process new pre-keys');
|
|
987
|
+
await delay(1000);
|
|
988
|
+
}
|
|
989
|
+
catch (uploadErr) {
|
|
990
|
+
logger.error({ uploadErr }, 'Pre-key upload failed, proceeding with retry anyway');
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
const encNode = getBinaryNodeChild(node, 'enc');
|
|
994
|
+
await sendRetryRequest(node, !encNode);
|
|
995
|
+
if (retryRequestDelayMs) {
|
|
996
|
+
await delay(retryRequestDelayMs);
|
|
997
|
+
}
|
|
998
|
+
}
|
|
999
|
+
catch (err) {
|
|
1000
|
+
logger.error({ err, isPreKeyError }, 'Failed to handle retry, attempting basic retry');
|
|
1001
|
+
// Still attempt retry even if pre-key upload failed
|
|
1002
|
+
try {
|
|
1003
|
+
const encNode = getBinaryNodeChild(node, 'enc');
|
|
1004
|
+
await sendRetryRequest(node, !encNode);
|
|
1005
|
+
}
|
|
1006
|
+
catch (retryErr) {
|
|
1007
|
+
logger.error({ retryErr }, 'Failed to send retry after error handling');
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
await sendMessageAck(node, NACK_REASONS.UnhandledError);
|
|
1011
|
+
});
|
|
1012
|
+
}
|
|
1013
|
+
else {
|
|
1014
|
+
// no type in the receipt => message delivered
|
|
1015
|
+
let type = undefined;
|
|
1016
|
+
let participant = msg.key.participant;
|
|
1017
|
+
if (category === 'peer') {
|
|
1018
|
+
// special peer message
|
|
1019
|
+
type = 'peer_msg';
|
|
1020
|
+
}
|
|
1021
|
+
else if (msg.key.fromMe) {
|
|
1022
|
+
// message was sent by us from a different device
|
|
1023
|
+
type = 'sender';
|
|
1024
|
+
// need to specially handle this case
|
|
1025
|
+
if (isLidUser(msg.key.remoteJid) || isLidUser(msg.key.remoteJidAlt)) {
|
|
1026
|
+
participant = author; // TODO: investigate sending receipts to LIDs and not PNs
|
|
1027
|
+
}
|
|
1028
|
+
}
|
|
1029
|
+
else if (!sendActiveReceipts) {
|
|
1030
|
+
type = 'inactive';
|
|
1031
|
+
}
|
|
1032
|
+
await sendReceipt(msg.key.remoteJid, participant, [msg.key.id], type);
|
|
1033
|
+
// send ack for history message
|
|
1034
|
+
const isAnyHistoryMsg = getHistoryMsg(msg.message);
|
|
1035
|
+
if (isAnyHistoryMsg) {
|
|
1036
|
+
const jid = jidNormalizedUser(msg.key.remoteJid);
|
|
1037
|
+
await sendReceipt(jid, undefined, [msg.key.id], 'hist_sync');
|
|
1038
|
+
}
|
|
1039
|
+
}
|
|
1040
|
+
cleanMessage(msg, authState.creds.me.id, authState.creds.me.lid);
|
|
1041
|
+
await upsertMessage(msg, node.attrs.offline ? 'append' : 'notify');
|
|
1042
|
+
});
|
|
1043
|
+
}
|
|
1044
|
+
catch (error) {
|
|
1045
|
+
logger.error({ error, node: binaryNodeToString(node) }, 'error in handling message');
|
|
1046
|
+
}
|
|
1047
|
+
};
|
|
1048
|
+
const handleCall = async (node) => {
|
|
1049
|
+
const { attrs } = node;
|
|
1050
|
+
const [infoChild] = getAllBinaryNodeChildren(node);
|
|
1051
|
+
const status = getCallStatusFromNode(infoChild);
|
|
1052
|
+
if (!infoChild) {
|
|
1053
|
+
throw new Boom('Missing call info in call node');
|
|
1054
|
+
}
|
|
1055
|
+
const callId = infoChild.attrs['call-id'];
|
|
1056
|
+
const from = infoChild.attrs.from || infoChild.attrs['call-creator'];
|
|
1057
|
+
const call = {
|
|
1058
|
+
chatId: attrs.from,
|
|
1059
|
+
from,
|
|
1060
|
+
id: callId,
|
|
1061
|
+
date: new Date(+attrs.t * 1000),
|
|
1062
|
+
offline: !!attrs.offline,
|
|
1063
|
+
status
|
|
1064
|
+
};
|
|
1065
|
+
if (status === 'offer') {
|
|
1066
|
+
call.isVideo = !!getBinaryNodeChild(infoChild, 'video');
|
|
1067
|
+
call.isGroup = infoChild.attrs.type === 'group' || !!infoChild.attrs['group-jid'];
|
|
1068
|
+
call.groupJid = infoChild.attrs['group-jid'];
|
|
1069
|
+
await callOfferCache.set(call.id, call);
|
|
1070
|
+
}
|
|
1071
|
+
const existingCall = await callOfferCache.get(call.id);
|
|
1072
|
+
// use existing call info to populate this event
|
|
1073
|
+
if (existingCall) {
|
|
1074
|
+
call.isVideo = existingCall.isVideo;
|
|
1075
|
+
call.isGroup = existingCall.isGroup;
|
|
1076
|
+
}
|
|
1077
|
+
// delete data once call has ended
|
|
1078
|
+
if (status === 'reject' || status === 'accept' || status === 'timeout' || status === 'terminate') {
|
|
1079
|
+
await callOfferCache.del(call.id);
|
|
1080
|
+
}
|
|
1081
|
+
ev.emit('call', [call]);
|
|
1082
|
+
await sendMessageAck(node);
|
|
1083
|
+
};
|
|
1084
|
+
const handleBadAck = async ({ attrs }) => {
|
|
1085
|
+
const key = { remoteJid: attrs.from, fromMe: true, id: attrs.id };
|
|
1086
|
+
// WARNING: REFRAIN FROM ENABLING THIS FOR NOW. IT WILL CAUSE A LOOP
|
|
1087
|
+
// // current hypothesis is that if pash is sent in the ack
|
|
1088
|
+
// // it means -- the message hasn't reached all devices yet
|
|
1089
|
+
// // we'll retry sending the message here
|
|
1090
|
+
// if(attrs.phash) {
|
|
1091
|
+
// logger.info({ attrs }, 'received phash in ack, resending message...')
|
|
1092
|
+
// const msg = await getMessage(key)
|
|
1093
|
+
// if(msg) {
|
|
1094
|
+
// await relayMessage(key.remoteJid!, msg, { messageId: key.id!, useUserDevicesCache: false })
|
|
1095
|
+
// } else {
|
|
1096
|
+
// logger.warn({ attrs }, 'could not send message again, as it was not found')
|
|
1097
|
+
// }
|
|
1098
|
+
// }
|
|
1099
|
+
// error in acknowledgement,
|
|
1100
|
+
// device could not display the message
|
|
1101
|
+
if (attrs.error) {
|
|
1102
|
+
logger.warn({ attrs }, 'received error in ack');
|
|
1103
|
+
ev.emit('messages.update', [
|
|
1104
|
+
{
|
|
1105
|
+
key,
|
|
1106
|
+
update: {
|
|
1107
|
+
status: WAMessageStatus.ERROR,
|
|
1108
|
+
messageStubParameters: [attrs.error]
|
|
1109
|
+
}
|
|
1110
|
+
}
|
|
1111
|
+
]);
|
|
1112
|
+
}
|
|
1113
|
+
};
|
|
1114
|
+
/// processes a node with the given function
|
|
1115
|
+
/// and adds the task to the existing buffer if we're buffering events
|
|
1116
|
+
const processNodeWithBuffer = async (node, identifier, exec) => {
|
|
1117
|
+
ev.buffer();
|
|
1118
|
+
await execTask();
|
|
1119
|
+
ev.flush();
|
|
1120
|
+
function execTask() {
|
|
1121
|
+
return exec(node, false).catch(err => onUnexpectedError(err, identifier));
|
|
1122
|
+
}
|
|
1123
|
+
};
|
|
1124
|
+
const makeOfflineNodeProcessor = () => {
|
|
1125
|
+
const nodeProcessorMap = new Map([
|
|
1126
|
+
['message', handleMessage],
|
|
1127
|
+
['call', handleCall],
|
|
1128
|
+
['receipt', handleReceipt],
|
|
1129
|
+
['notification', handleNotification]
|
|
1130
|
+
]);
|
|
1131
|
+
const nodes = [];
|
|
1132
|
+
let isProcessing = false;
|
|
1133
|
+
const enqueue = (type, node) => {
|
|
1134
|
+
nodes.push({ type, node });
|
|
1135
|
+
if (isProcessing) {
|
|
1136
|
+
return;
|
|
1137
|
+
}
|
|
1138
|
+
isProcessing = true;
|
|
1139
|
+
const promise = async () => {
|
|
1140
|
+
while (nodes.length && ws.isOpen) {
|
|
1141
|
+
const { type, node } = nodes.shift();
|
|
1142
|
+
const nodeProcessor = nodeProcessorMap.get(type);
|
|
1143
|
+
if (!nodeProcessor) {
|
|
1144
|
+
onUnexpectedError(new Error(`unknown offline node type: ${type}`), 'processing offline node');
|
|
1145
|
+
continue;
|
|
1146
|
+
}
|
|
1147
|
+
await nodeProcessor(node);
|
|
1148
|
+
}
|
|
1149
|
+
isProcessing = false;
|
|
1150
|
+
};
|
|
1151
|
+
promise().catch(error => onUnexpectedError(error, 'processing offline nodes'));
|
|
1152
|
+
};
|
|
1153
|
+
return { enqueue };
|
|
1154
|
+
};
|
|
1155
|
+
const offlineNodeProcessor = makeOfflineNodeProcessor();
|
|
1156
|
+
const processNode = (type, node, identifier, exec) => {
|
|
1157
|
+
const isOffline = !!node.attrs.offline;
|
|
1158
|
+
if (isOffline) {
|
|
1159
|
+
offlineNodeProcessor.enqueue(type, node);
|
|
1160
|
+
}
|
|
1161
|
+
else {
|
|
1162
|
+
processNodeWithBuffer(node, identifier, exec);
|
|
1163
|
+
}
|
|
1164
|
+
};
|
|
1165
|
+
// recv a message
|
|
1166
|
+
ws.on('CB:message', (node) => {
|
|
1167
|
+
processNode('message', node, 'processing message', handleMessage);
|
|
1168
|
+
});
|
|
1169
|
+
ws.on('CB:call', async (node) => {
|
|
1170
|
+
processNode('call', node, 'handling call', handleCall);
|
|
1171
|
+
});
|
|
1172
|
+
ws.on('CB:receipt', node => {
|
|
1173
|
+
processNode('receipt', node, 'handling receipt', handleReceipt);
|
|
1174
|
+
});
|
|
1175
|
+
ws.on('CB:notification', async (node) => {
|
|
1176
|
+
processNode('notification', node, 'handling notification', handleNotification);
|
|
1177
|
+
});
|
|
1178
|
+
ws.on('CB:ack,class:message', (node) => {
|
|
1179
|
+
handleBadAck(node).catch(error => onUnexpectedError(error, 'handling bad ack'));
|
|
1180
|
+
});
|
|
1181
|
+
ev.on('call', ([call]) => {
|
|
1182
|
+
if (!call) {
|
|
1183
|
+
return;
|
|
1184
|
+
}
|
|
1185
|
+
// missed call + group call notification message generation
|
|
1186
|
+
if (call.status === 'timeout' || (call.status === 'offer' && call.isGroup)) {
|
|
1187
|
+
const msg = {
|
|
1188
|
+
key: {
|
|
1189
|
+
remoteJid: call.chatId,
|
|
1190
|
+
id: call.id,
|
|
1191
|
+
fromMe: false
|
|
1192
|
+
},
|
|
1193
|
+
messageTimestamp: unixTimestampSeconds(call.date)
|
|
1194
|
+
};
|
|
1195
|
+
if (call.status === 'timeout') {
|
|
1196
|
+
if (call.isGroup) {
|
|
1197
|
+
msg.messageStubType = call.isVideo
|
|
1198
|
+
? WAMessageStubType.CALL_MISSED_GROUP_VIDEO
|
|
1199
|
+
: WAMessageStubType.CALL_MISSED_GROUP_VOICE;
|
|
1200
|
+
}
|
|
1201
|
+
else {
|
|
1202
|
+
msg.messageStubType = call.isVideo ? WAMessageStubType.CALL_MISSED_VIDEO : WAMessageStubType.CALL_MISSED_VOICE;
|
|
1203
|
+
}
|
|
1204
|
+
}
|
|
1205
|
+
else {
|
|
1206
|
+
msg.message = { call: { callKey: Buffer.from(call.id) } };
|
|
1207
|
+
}
|
|
1208
|
+
const protoMsg = proto.WebMessageInfo.fromObject(msg);
|
|
1209
|
+
upsertMessage(protoMsg, call.offline ? 'append' : 'notify');
|
|
1210
|
+
}
|
|
1211
|
+
});
|
|
1212
|
+
ev.on('connection.update', ({ isOnline }) => {
|
|
1213
|
+
if (typeof isOnline !== 'undefined') {
|
|
1214
|
+
sendActiveReceipts = isOnline;
|
|
1215
|
+
logger.trace(`sendActiveReceipts set to "${sendActiveReceipts}"`);
|
|
1216
|
+
}
|
|
1217
|
+
});
|
|
1218
|
+
return {
|
|
1219
|
+
...sock,
|
|
1220
|
+
sendMessageAck,
|
|
1221
|
+
sendRetryRequest,
|
|
1222
|
+
rejectCall,
|
|
1223
|
+
fetchMessageHistory,
|
|
1224
|
+
requestPlaceholderResend,
|
|
1225
|
+
messageRetryManager
|
|
1226
|
+
};
|
|
1227
|
+
};
|
|
1228
|
+
//# sourceMappingURL=messages-recv.js.map
|