anya-bail 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +316 -0
- package/WAProto/GenerateStatics.sh +3 -0
- package/WAProto/WAProto/GenerateStatics.sh +3 -0
- package/WAProto/WAProto/WAProto.proto +5479 -0
- package/WAProto/WAProto/fix-imports.js +81 -0
- package/WAProto/WAProto/index.d.ts +14017 -0
- package/WAProto/WAProto/index.js +97687 -0
- package/WAProto/WAProto.proto +5479 -0
- package/WAProto/fix-imports.js +85 -0
- package/WAProto/index.d.ts +14017 -0
- package/WAProto/index.js +97691 -0
- package/engine-requirements.js +10 -0
- package/lib/Defaults/index.d.ts +86 -0
- package/lib/Defaults/index.d.ts.map +1 -0
- package/lib/Defaults/index.js +145 -0
- package/lib/Defaults/index.js.map +1 -0
- package/lib/Signal/Group/ciphertext-message.d.ts +10 -0
- package/lib/Signal/Group/ciphertext-message.d.ts.map +1 -0
- package/lib/Signal/Group/ciphertext-message.js +12 -0
- package/lib/Signal/Group/ciphertext-message.js.map +1 -0
- package/lib/Signal/Group/group-session-builder.d.ts +15 -0
- package/lib/Signal/Group/group-session-builder.d.ts.map +1 -0
- package/lib/Signal/Group/group-session-builder.js +30 -0
- package/lib/Signal/Group/group-session-builder.js.map +1 -0
- package/lib/Signal/Group/group_cipher.d.ts +17 -0
- package/lib/Signal/Group/group_cipher.d.ts.map +1 -0
- package/lib/Signal/Group/group_cipher.js +82 -0
- package/lib/Signal/Group/group_cipher.js.map +1 -0
- package/lib/Signal/Group/index.d.ts +12 -0
- package/lib/Signal/Group/index.d.ts.map +1 -0
- package/lib/Signal/Group/index.js +12 -0
- package/lib/Signal/Group/index.js.map +1 -0
- package/lib/Signal/Group/keyhelper.d.ts +11 -0
- package/lib/Signal/Group/keyhelper.d.ts.map +1 -0
- package/lib/Signal/Group/keyhelper.js +18 -0
- package/lib/Signal/Group/keyhelper.js.map +1 -0
- package/lib/Signal/Group/sender-chain-key.d.ts +14 -0
- package/lib/Signal/Group/sender-chain-key.d.ts.map +1 -0
- package/lib/Signal/Group/sender-chain-key.js +26 -0
- package/lib/Signal/Group/sender-chain-key.js.map +1 -0
- package/lib/Signal/Group/sender-key-distribution-message.d.ts +17 -0
- package/lib/Signal/Group/sender-key-distribution-message.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-distribution-message.js +63 -0
- package/lib/Signal/Group/sender-key-distribution-message.js.map +1 -0
- package/lib/Signal/Group/sender-key-message.d.ts +19 -0
- package/lib/Signal/Group/sender-key-message.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-message.js +66 -0
- package/lib/Signal/Group/sender-key-message.js.map +1 -0
- package/lib/Signal/Group/sender-key-name.d.ts +18 -0
- package/lib/Signal/Group/sender-key-name.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-name.js +48 -0
- package/lib/Signal/Group/sender-key-name.js.map +1 -0
- package/lib/Signal/Group/sender-key-record.d.ts +31 -0
- package/lib/Signal/Group/sender-key-record.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-record.js +41 -0
- package/lib/Signal/Group/sender-key-record.js.map +1 -0
- package/lib/Signal/Group/sender-key-state.d.ts +39 -0
- package/lib/Signal/Group/sender-key-state.d.ts.map +1 -0
- package/lib/Signal/Group/sender-key-state.js +84 -0
- package/lib/Signal/Group/sender-key-state.js.map +1 -0
- package/lib/Signal/Group/sender-message-key.d.ts +12 -0
- package/lib/Signal/Group/sender-message-key.d.ts.map +1 -0
- package/lib/Signal/Group/sender-message-key.js +26 -0
- package/lib/Signal/Group/sender-message-key.js.map +1 -0
- package/lib/Signal/libsignal.d.ts +5 -0
- package/lib/Signal/libsignal.d.ts.map +1 -0
- package/lib/Signal/libsignal.js +431 -0
- package/lib/Signal/libsignal.js.map +1 -0
- package/lib/Signal/lid-mapping.d.ts +23 -0
- package/lib/Signal/lid-mapping.d.ts.map +1 -0
- package/lib/Signal/lid-mapping.js +277 -0
- package/lib/Signal/lid-mapping.js.map +1 -0
- package/lib/Socket/Client/index.d.ts +3 -0
- package/lib/Socket/Client/index.d.ts.map +1 -0
- package/lib/Socket/Client/index.js +3 -0
- package/lib/Socket/Client/index.js.map +1 -0
- package/lib/Socket/Client/types.d.ts +16 -0
- package/lib/Socket/Client/types.d.ts.map +1 -0
- package/lib/Socket/Client/types.js +11 -0
- package/lib/Socket/Client/types.js.map +1 -0
- package/lib/Socket/Client/websocket.d.ts +13 -0
- package/lib/Socket/Client/websocket.d.ts.map +1 -0
- package/lib/Socket/Client/websocket.js +54 -0
- package/lib/Socket/Client/websocket.js.map +1 -0
- package/lib/Socket/business.d.ts +217 -0
- package/lib/Socket/business.d.ts.map +1 -0
- package/lib/Socket/business.js +379 -0
- package/lib/Socket/business.js.map +1 -0
- package/lib/Socket/chats.d.ts +124 -0
- package/lib/Socket/chats.d.ts.map +1 -0
- package/lib/Socket/chats.js +1192 -0
- package/lib/Socket/chats.js.map +1 -0
- package/lib/Socket/communities.d.ts +273 -0
- package/lib/Socket/communities.d.ts.map +1 -0
- package/lib/Socket/communities.js +431 -0
- package/lib/Socket/communities.js.map +1 -0
- package/lib/Socket/groups.d.ts +161 -0
- package/lib/Socket/groups.d.ts.map +1 -0
- package/lib/Socket/groups.js +347 -0
- package/lib/Socket/groups.js.map +1 -0
- package/lib/Socket/index.d.ts +260 -0
- package/lib/Socket/index.d.ts.map +1 -0
- package/lib/Socket/index.js +12 -0
- package/lib/Socket/index.js.map +1 -0
- package/lib/Socket/messages-recv.d.ts +203 -0
- package/lib/Socket/messages-recv.d.ts.map +1 -0
- package/lib/Socket/messages-recv.js +1772 -0
- package/lib/Socket/messages-recv.js.map +1 -0
- package/lib/Socket/messages-send.d.ts +199 -0
- package/lib/Socket/messages-send.d.ts.map +1 -0
- package/lib/Socket/messages-send.js +1157 -0
- package/lib/Socket/messages-send.js.map +1 -0
- package/lib/Socket/mex.d.ts +3 -0
- package/lib/Socket/mex.d.ts.map +1 -0
- package/lib/Socket/mex.js +42 -0
- package/lib/Socket/mex.js.map +1 -0
- package/lib/Socket/newsletter.d.ts +170 -0
- package/lib/Socket/newsletter.d.ts.map +1 -0
- package/lib/Socket/newsletter.js +181 -0
- package/lib/Socket/newsletter.js.map +1 -0
- package/lib/Socket/rich-messages.d.ts +7 -0
- package/lib/Socket/rich-messages.d.ts.map +1 -0
- package/lib/Socket/rich-messages.js +6 -0
- package/lib/Socket/rich-messages.js.map +1 -0
- package/lib/Socket/socket.d.ts +59 -0
- package/lib/Socket/socket.d.ts.map +1 -0
- package/lib/Socket/socket.js +1040 -0
- package/lib/Socket/socket.js.map +1 -0
- package/lib/Socket/usync.d.ts +49 -0
- package/lib/Socket/usync.d.ts.map +1 -0
- package/lib/Socket/usync.js +42 -0
- package/lib/Socket/usync.js.map +1 -0
- package/lib/Types/Auth.d.ts +117 -0
- package/lib/Types/Auth.d.ts.map +1 -0
- package/lib/Types/Auth.js +2 -0
- package/lib/Types/Auth.js.map +1 -0
- package/lib/Types/Bussines.d.ts +25 -0
- package/lib/Types/Bussines.d.ts.map +1 -0
- package/lib/Types/Bussines.js +2 -0
- package/lib/Types/Bussines.js.map +1 -0
- package/lib/Types/Call.d.ts +23 -0
- package/lib/Types/Call.d.ts.map +1 -0
- package/lib/Types/Call.js +2 -0
- package/lib/Types/Call.js.map +1 -0
- package/lib/Types/Chat.d.ts +123 -0
- package/lib/Types/Chat.d.ts.map +1 -0
- package/lib/Types/Chat.js +8 -0
- package/lib/Types/Chat.js.map +1 -0
- package/lib/Types/Contact.d.ts +26 -0
- package/lib/Types/Contact.d.ts.map +1 -0
- package/lib/Types/Contact.js +2 -0
- package/lib/Types/Contact.js.map +1 -0
- package/lib/Types/Events.d.ts +256 -0
- package/lib/Types/Events.d.ts.map +1 -0
- package/lib/Types/Events.js +2 -0
- package/lib/Types/Events.js.map +1 -0
- package/lib/Types/GroupMetadata.d.ts +71 -0
- package/lib/Types/GroupMetadata.d.ts.map +1 -0
- package/lib/Types/GroupMetadata.js +2 -0
- package/lib/Types/GroupMetadata.js.map +1 -0
- package/lib/Types/Label.d.ts +47 -0
- package/lib/Types/Label.d.ts.map +1 -0
- package/lib/Types/Label.js +25 -0
- package/lib/Types/Label.js.map +1 -0
- package/lib/Types/LabelAssociation.d.ts +30 -0
- package/lib/Types/LabelAssociation.d.ts.map +1 -0
- package/lib/Types/LabelAssociation.js +7 -0
- package/lib/Types/LabelAssociation.js.map +1 -0
- package/lib/Types/Message.d.ts +355 -0
- package/lib/Types/Message.d.ts.map +1 -0
- package/lib/Types/Message.js +18 -0
- package/lib/Types/Message.js.map +1 -0
- package/lib/Types/Mex.d.ts +141 -0
- package/lib/Types/Mex.d.ts.map +1 -0
- package/lib/Types/Mex.js +37 -0
- package/lib/Types/Mex.js.map +1 -0
- package/lib/Types/MexUpdates.d.ts +9 -0
- package/lib/Types/MexUpdates.d.ts.map +1 -0
- package/lib/Types/MexUpdates.js +11 -0
- package/lib/Types/MexUpdates.js.map +1 -0
- package/lib/Types/Newsletter.d.ts +135 -0
- package/lib/Types/Newsletter.d.ts.map +1 -0
- package/lib/Types/Newsletter.js +31 -0
- package/lib/Types/Newsletter.js.map +1 -0
- package/lib/Types/Product.d.ts +79 -0
- package/lib/Types/Product.d.ts.map +1 -0
- package/lib/Types/Product.js +2 -0
- package/lib/Types/Product.js.map +1 -0
- package/lib/Types/Signal.d.ts +87 -0
- package/lib/Types/Signal.d.ts.map +1 -0
- package/lib/Types/Signal.js +2 -0
- package/lib/Types/Signal.js.map +1 -0
- package/lib/Types/Socket.d.ts +136 -0
- package/lib/Types/Socket.d.ts.map +1 -0
- package/lib/Types/Socket.js +3 -0
- package/lib/Types/Socket.js.map +1 -0
- package/lib/Types/State.d.ts +97 -0
- package/lib/Types/State.d.ts.map +1 -0
- package/lib/Types/State.js +56 -0
- package/lib/Types/State.js.map +1 -0
- package/lib/Types/USync.d.ts +26 -0
- package/lib/Types/USync.d.ts.map +1 -0
- package/lib/Types/USync.js +2 -0
- package/lib/Types/USync.js.map +1 -0
- package/lib/Types/index.d.ts +67 -0
- package/lib/Types/index.d.ts.map +1 -0
- package/lib/Types/index.js +26 -0
- package/lib/Types/index.js.map +1 -0
- package/lib/Utils/anti-delete.d.ts +3 -0
- package/lib/Utils/anti-delete.d.ts.map +1 -0
- package/lib/Utils/anti-delete.js +3 -0
- package/lib/Utils/anti-delete.js.map +1 -0
- package/lib/Utils/auth-utils.d.ts +24 -0
- package/lib/Utils/auth-utils.d.ts.map +1 -0
- package/lib/Utils/auth-utils.js +302 -0
- package/lib/Utils/auth-utils.js.map +1 -0
- package/lib/Utils/auto-reply.d.ts +50 -0
- package/lib/Utils/auto-reply.d.ts.map +1 -0
- package/lib/Utils/auto-reply.js +118 -0
- package/lib/Utils/auto-reply.js.map +1 -0
- package/lib/Utils/baileys-event-stream.d.ts +3 -0
- package/lib/Utils/baileys-event-stream.d.ts.map +1 -0
- package/lib/Utils/baileys-event-stream.js +3 -0
- package/lib/Utils/baileys-event-stream.js.map +1 -0
- package/lib/Utils/browser-utils.d.ts +11 -0
- package/lib/Utils/browser-utils.d.ts.map +1 -0
- package/lib/Utils/browser-utils.js +52 -0
- package/lib/Utils/browser-utils.js.map +1 -0
- package/lib/Utils/business.d.ts +23 -0
- package/lib/Utils/business.d.ts.map +1 -0
- package/lib/Utils/business.js +231 -0
- package/lib/Utils/business.js.map +1 -0
- package/lib/Utils/chat-control.d.ts +55 -0
- package/lib/Utils/chat-control.d.ts.map +1 -0
- package/lib/Utils/chat-control.js +128 -0
- package/lib/Utils/chat-control.js.map +1 -0
- package/lib/Utils/chat-utils.d.ts +100 -0
- package/lib/Utils/chat-utils.d.ts.map +1 -0
- package/lib/Utils/chat-utils.js +872 -0
- package/lib/Utils/chat-utils.js.map +1 -0
- package/lib/Utils/companion-reg-client-utils.d.ts +17 -0
- package/lib/Utils/companion-reg-client-utils.d.ts.map +1 -0
- package/lib/Utils/companion-reg-client-utils.js +34 -0
- package/lib/Utils/companion-reg-client-utils.js.map +1 -0
- package/lib/Utils/crypto.d.ts +37 -0
- package/lib/Utils/crypto.d.ts.map +1 -0
- package/lib/Utils/crypto.js +118 -0
- package/lib/Utils/crypto.js.map +1 -0
- package/lib/Utils/decode-wa-message.d.ts +66 -0
- package/lib/Utils/decode-wa-message.d.ts.map +1 -0
- package/lib/Utils/decode-wa-message.js +311 -0
- package/lib/Utils/decode-wa-message.js.map +1 -0
- package/lib/Utils/event-buffer.d.ts +36 -0
- package/lib/Utils/event-buffer.d.ts.map +1 -0
- package/lib/Utils/event-buffer.js +622 -0
- package/lib/Utils/event-buffer.js.map +1 -0
- package/lib/Utils/generics.d.ts +98 -0
- package/lib/Utils/generics.d.ts.map +1 -0
- package/lib/Utils/generics.js +410 -0
- package/lib/Utils/generics.js.map +1 -0
- package/lib/Utils/history.d.ts +24 -0
- package/lib/Utils/history.d.ts.map +1 -0
- package/lib/Utils/history.js +134 -0
- package/lib/Utils/history.js.map +1 -0
- package/lib/Utils/identity-change-handler.d.ts +44 -0
- package/lib/Utils/identity-change-handler.d.ts.map +1 -0
- package/lib/Utils/identity-change-handler.js +50 -0
- package/lib/Utils/identity-change-handler.js.map +1 -0
- package/lib/Utils/index.d.ts +23 -0
- package/lib/Utils/index.d.ts.map +1 -0
- package/lib/Utils/index.js +23 -0
- package/lib/Utils/index.js.map +1 -0
- package/lib/Utils/interactive-message.d.ts +201 -0
- package/lib/Utils/interactive-message.d.ts.map +1 -0
- package/lib/Utils/interactive-message.js +256 -0
- package/lib/Utils/interactive-message.js.map +1 -0
- package/lib/Utils/jid-plotting.d.ts +3 -0
- package/lib/Utils/jid-plotting.d.ts.map +1 -0
- package/lib/Utils/jid-plotting.js +3 -0
- package/lib/Utils/jid-plotting.js.map +1 -0
- package/lib/Utils/link-preview.d.ts +21 -0
- package/lib/Utils/link-preview.d.ts.map +1 -0
- package/lib/Utils/link-preview.js +85 -0
- package/lib/Utils/link-preview.js.map +1 -0
- package/lib/Utils/logger.d.ts +13 -0
- package/lib/Utils/logger.d.ts.map +1 -0
- package/lib/Utils/logger.js +3 -0
- package/lib/Utils/logger.js.map +1 -0
- package/lib/Utils/lt-hash.d.ts +8 -0
- package/lib/Utils/lt-hash.d.ts.map +1 -0
- package/lib/Utils/lt-hash.js +8 -0
- package/lib/Utils/lt-hash.js.map +1 -0
- package/lib/Utils/make-mutex.d.ts +9 -0
- package/lib/Utils/make-mutex.d.ts.map +1 -0
- package/lib/Utils/make-mutex.js +33 -0
- package/lib/Utils/make-mutex.js.map +1 -0
- package/lib/Utils/message-composer.d.ts +7 -0
- package/lib/Utils/message-composer.d.ts.map +1 -0
- package/lib/Utils/message-composer.js +6 -0
- package/lib/Utils/message-composer.js.map +1 -0
- package/lib/Utils/message-retry-manager.d.ts +115 -0
- package/lib/Utils/message-retry-manager.d.ts.map +1 -0
- package/lib/Utils/message-retry-manager.js +265 -0
- package/lib/Utils/message-retry-manager.js.map +1 -0
- package/lib/Utils/message-search.d.ts +37 -0
- package/lib/Utils/message-search.d.ts.map +1 -0
- package/lib/Utils/message-search.js +149 -0
- package/lib/Utils/message-search.js.map +1 -0
- package/lib/Utils/messages-media.d.ts +191 -0
- package/lib/Utils/messages-media.d.ts.map +1 -0
- package/lib/Utils/messages-media.js +786 -0
- package/lib/Utils/messages-media.js.map +1 -0
- package/lib/Utils/messages.d.ts +94 -0
- package/lib/Utils/messages.d.ts.map +1 -0
- package/lib/Utils/messages.js +1375 -0
- package/lib/Utils/messages.js.map +1 -0
- package/lib/Utils/noise-handler.d.ts +20 -0
- package/lib/Utils/noise-handler.d.ts.map +1 -0
- package/lib/Utils/noise-handler.js +201 -0
- package/lib/Utils/noise-handler.js.map +1 -0
- package/lib/Utils/offline-node-processor.d.ts +17 -0
- package/lib/Utils/offline-node-processor.d.ts.map +1 -0
- package/lib/Utils/offline-node-processor.js +40 -0
- package/lib/Utils/offline-node-processor.js.map +1 -0
- package/lib/Utils/pre-key-manager.d.ts +28 -0
- package/lib/Utils/pre-key-manager.d.ts.map +1 -0
- package/lib/Utils/pre-key-manager.js +106 -0
- package/lib/Utils/pre-key-manager.js.map +1 -0
- package/lib/Utils/process-message.d.ts +60 -0
- package/lib/Utils/process-message.d.ts.map +1 -0
- package/lib/Utils/process-message.js +597 -0
- package/lib/Utils/process-message.js.map +1 -0
- package/lib/Utils/reporting-utils.d.ts +11 -0
- package/lib/Utils/reporting-utils.d.ts.map +1 -0
- package/lib/Utils/reporting-utils.js +258 -0
- package/lib/Utils/reporting-utils.js.map +1 -0
- package/lib/Utils/scheduling.d.ts +40 -0
- package/lib/Utils/scheduling.d.ts.map +1 -0
- package/lib/Utils/scheduling.js +105 -0
- package/lib/Utils/scheduling.js.map +1 -0
- package/lib/Utils/signal.d.ts +47 -0
- package/lib/Utils/signal.d.ts.map +1 -0
- package/lib/Utils/signal.js +201 -0
- package/lib/Utils/signal.js.map +1 -0
- package/lib/Utils/stanza-ack.d.ts +11 -0
- package/lib/Utils/stanza-ack.d.ts.map +1 -0
- package/lib/Utils/stanza-ack.js +38 -0
- package/lib/Utils/stanza-ack.js.map +1 -0
- package/lib/Utils/status-posting.d.ts +79 -0
- package/lib/Utils/status-posting.d.ts.map +1 -0
- package/lib/Utils/status-posting.js +69 -0
- package/lib/Utils/status-posting.js.map +1 -0
- package/lib/Utils/sync-action-utils.d.ts +19 -0
- package/lib/Utils/sync-action-utils.d.ts.map +1 -0
- package/lib/Utils/sync-action-utils.js +49 -0
- package/lib/Utils/sync-action-utils.js.map +1 -0
- package/lib/Utils/tc-token-utils.d.ts +37 -0
- package/lib/Utils/tc-token-utils.d.ts.map +1 -0
- package/lib/Utils/tc-token-utils.js +163 -0
- package/lib/Utils/tc-token-utils.js.map +1 -0
- package/lib/Utils/templates.d.ts +48 -0
- package/lib/Utils/templates.d.ts.map +1 -0
- package/lib/Utils/templates.js +119 -0
- package/lib/Utils/templates.js.map +1 -0
- package/lib/Utils/use-mongo-file-auth-state.d.ts +16 -0
- package/lib/Utils/use-mongo-file-auth-state.d.ts.map +1 -0
- package/lib/Utils/use-mongo-file-auth-state.js +60 -0
- package/lib/Utils/use-mongo-file-auth-state.js.map +1 -0
- package/lib/Utils/use-multi-file-auth-state.d.ts +13 -0
- package/lib/Utils/use-multi-file-auth-state.d.ts.map +1 -0
- package/lib/Utils/use-multi-file-auth-state.js +121 -0
- package/lib/Utils/use-multi-file-auth-state.js.map +1 -0
- package/lib/Utils/use-single-file-auth-state.d.ts +6 -0
- package/lib/Utils/use-single-file-auth-state.d.ts.map +1 -0
- package/lib/Utils/use-single-file-auth-state.js +88 -0
- package/lib/Utils/use-single-file-auth-state.js.map +1 -0
- package/lib/Utils/validate-connection.d.ts +11 -0
- package/lib/Utils/validate-connection.d.ts.map +1 -0
- package/lib/Utils/validate-connection.js +209 -0
- package/lib/Utils/validate-connection.js.map +1 -0
- package/lib/Utils/vcard.d.ts +53 -0
- package/lib/Utils/vcard.d.ts.map +1 -0
- package/lib/Utils/vcard.js +81 -0
- package/lib/Utils/vcard.js.map +1 -0
- package/lib/WABinary/constants.d.ts +28 -0
- package/lib/WABinary/constants.d.ts.map +1 -0
- package/lib/WABinary/constants.js +1301 -0
- package/lib/WABinary/constants.js.map +1 -0
- package/lib/WABinary/decode.d.ts +7 -0
- package/lib/WABinary/decode.d.ts.map +1 -0
- package/lib/WABinary/decode.js +262 -0
- package/lib/WABinary/decode.js.map +1 -0
- package/lib/WABinary/encode.d.ts +3 -0
- package/lib/WABinary/encode.d.ts.map +1 -0
- package/lib/WABinary/encode.js +220 -0
- package/lib/WABinary/encode.js.map +1 -0
- package/lib/WABinary/generic-utils.d.ts +15 -0
- package/lib/WABinary/generic-utils.d.ts.map +1 -0
- package/lib/WABinary/generic-utils.js +113 -0
- package/lib/WABinary/generic-utils.js.map +1 -0
- package/lib/WABinary/index.d.ts +6 -0
- package/lib/WABinary/index.d.ts.map +1 -0
- package/lib/WABinary/index.js +6 -0
- package/lib/WABinary/index.js.map +1 -0
- package/lib/WABinary/jid-utils.d.ts +48 -0
- package/lib/WABinary/jid-utils.d.ts.map +1 -0
- package/lib/WABinary/jid-utils.js +96 -0
- package/lib/WABinary/jid-utils.js.map +1 -0
- package/lib/WABinary/types.d.ts +19 -0
- package/lib/WABinary/types.d.ts.map +1 -0
- package/lib/WABinary/types.js +2 -0
- package/lib/WABinary/types.js.map +1 -0
- package/lib/WAM/BinaryInfo.d.ts +9 -0
- package/lib/WAM/BinaryInfo.d.ts.map +1 -0
- package/lib/WAM/BinaryInfo.js +10 -0
- package/lib/WAM/BinaryInfo.js.map +1 -0
- package/lib/WAM/constants.d.ts +40 -0
- package/lib/WAM/constants.d.ts.map +1 -0
- package/lib/WAM/constants.js +22853 -0
- package/lib/WAM/constants.js.map +1 -0
- package/lib/WAM/encode.d.ts +3 -0
- package/lib/WAM/encode.d.ts.map +1 -0
- package/lib/WAM/encode.js +150 -0
- package/lib/WAM/encode.js.map +1 -0
- package/lib/WAM/index.d.ts +4 -0
- package/lib/WAM/index.d.ts.map +1 -0
- package/lib/WAM/index.js +4 -0
- package/lib/WAM/index.js.map +1 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.d.ts +10 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.js +52 -0
- package/lib/WAUSync/Protocols/USyncContactProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.d.ts +23 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.js +54 -0
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.d.ts +13 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js +27 -0
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.d.ts +13 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.js +38 -0
- package/lib/WAUSync/Protocols/USyncStatusProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/USyncUsernameProtocol.d.ts +10 -0
- package/lib/WAUSync/Protocols/USyncUsernameProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/USyncUsernameProtocol.js +25 -0
- package/lib/WAUSync/Protocols/USyncUsernameProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.d.ts +26 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js +51 -0
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.d.ts +10 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.js +29 -0
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.js.map +1 -0
- package/lib/WAUSync/Protocols/index.d.ts +8 -0
- package/lib/WAUSync/Protocols/index.d.ts.map +1 -0
- package/lib/WAUSync/Protocols/index.js +8 -0
- package/lib/WAUSync/Protocols/index.js.map +1 -0
- package/lib/WAUSync/USyncQuery.d.ts +30 -0
- package/lib/WAUSync/USyncQuery.d.ts.map +1 -0
- package/lib/WAUSync/USyncQuery.js +98 -0
- package/lib/WAUSync/USyncQuery.js.map +1 -0
- package/lib/WAUSync/USyncUser.d.ts +17 -0
- package/lib/WAUSync/USyncUser.d.ts.map +1 -0
- package/lib/WAUSync/USyncUser.js +31 -0
- package/lib/WAUSync/USyncUser.js.map +1 -0
- package/lib/WAUSync/index.d.ts +4 -0
- package/lib/WAUSync/index.d.ts.map +1 -0
- package/lib/WAUSync/index.js +4 -0
- package/lib/WAUSync/index.js.map +1 -0
- package/lib/addons/anti-delete.d.ts +62 -0
- package/lib/addons/anti-delete.d.ts.map +1 -0
- package/lib/addons/anti-delete.js +145 -0
- package/lib/addons/anti-delete.js.map +1 -0
- package/lib/addons/auto-reply.d.ts +57 -0
- package/lib/addons/auto-reply.d.ts.map +1 -0
- package/lib/addons/auto-reply.js +150 -0
- package/lib/addons/auto-reply.js.map +1 -0
- package/lib/addons/baileys-event-stream.d.ts +12 -0
- package/lib/addons/baileys-event-stream.d.ts.map +1 -0
- package/lib/addons/baileys-event-stream.js +45 -0
- package/lib/addons/baileys-event-stream.js.map +1 -0
- package/lib/addons/chat-control.d.ts +63 -0
- package/lib/addons/chat-control.d.ts.map +1 -0
- package/lib/addons/chat-control.js +121 -0
- package/lib/addons/chat-control.js.map +1 -0
- package/lib/addons/index.d.ts +40 -0
- package/lib/addons/index.d.ts.map +1 -0
- package/lib/addons/index.js +44 -0
- package/lib/addons/index.js.map +1 -0
- package/lib/addons/interactive-message.d.ts +293 -0
- package/lib/addons/interactive-message.d.ts.map +1 -0
- package/lib/addons/interactive-message.js +72 -0
- package/lib/addons/interactive-message.js.map +1 -0
- package/lib/addons/jid-plotting.d.ts +57 -0
- package/lib/addons/jid-plotting.d.ts.map +1 -0
- package/lib/addons/jid-plotting.js +152 -0
- package/lib/addons/jid-plotting.js.map +1 -0
- package/lib/addons/message-search.d.ts +41 -0
- package/lib/addons/message-search.d.ts.map +1 -0
- package/lib/addons/message-search.js +162 -0
- package/lib/addons/message-search.js.map +1 -0
- package/lib/addons/rich-message-composer.d.ts +94 -0
- package/lib/addons/rich-message-composer.d.ts.map +1 -0
- package/lib/addons/rich-message-composer.js +183 -0
- package/lib/addons/rich-message-composer.js.map +1 -0
- package/lib/addons/rich-message-utils.d.ts +144 -0
- package/lib/addons/rich-message-utils.d.ts.map +1 -0
- package/lib/addons/rich-message-utils.js +323 -0
- package/lib/addons/rich-message-utils.js.map +1 -0
- package/lib/addons/rich-types.d.ts +28 -0
- package/lib/addons/rich-types.d.ts.map +1 -0
- package/lib/addons/rich-types.js +127 -0
- package/lib/addons/rich-types.js.map +1 -0
- package/lib/addons/scheduling.d.ts +47 -0
- package/lib/addons/scheduling.d.ts.map +1 -0
- package/lib/addons/scheduling.js +109 -0
- package/lib/addons/scheduling.js.map +1 -0
- package/lib/addons/status-helpers.d.ts +75 -0
- package/lib/addons/status-helpers.d.ts.map +1 -0
- package/lib/addons/status-helpers.js +100 -0
- package/lib/addons/status-helpers.js.map +1 -0
- package/lib/addons/templates.d.ts +67 -0
- package/lib/addons/templates.d.ts.map +1 -0
- package/lib/addons/templates.js +137 -0
- package/lib/addons/templates.js.map +1 -0
- package/lib/addons/use-cache-manager-auth-state.d.ts +14 -0
- package/lib/addons/use-cache-manager-auth-state.d.ts.map +1 -0
- package/lib/addons/use-cache-manager-auth-state.js +71 -0
- package/lib/addons/use-cache-manager-auth-state.js.map +1 -0
- package/lib/addons/use-mongo-auth-state.d.ts +12 -0
- package/lib/addons/use-mongo-auth-state.d.ts.map +1 -0
- package/lib/addons/use-mongo-auth-state.js +64 -0
- package/lib/addons/use-mongo-auth-state.js.map +1 -0
- package/lib/addons/use-single-file-auth-state.d.ts +6 -0
- package/lib/addons/use-single-file-auth-state.d.ts.map +1 -0
- package/lib/addons/use-single-file-auth-state.js +155 -0
- package/lib/addons/use-single-file-auth-state.js.map +1 -0
- package/lib/addons/vcard.d.ts +61 -0
- package/lib/addons/vcard.d.ts.map +1 -0
- package/lib/addons/vcard.js +103 -0
- package/lib/addons/vcard.js.map +1 -0
- package/lib/index.d.ts +13 -0
- package/lib/index.d.ts.map +1 -0
- package/lib/index.js +14 -0
- package/lib/index.js.map +1 -0
- package/package.json +126 -0
|
@@ -0,0 +1,1772 @@
|
|
|
1
|
+
import NodeCache from '@cacheable/node-cache';
|
|
2
|
+
import { Boom } from '@hapi/boom';
|
|
3
|
+
import { randomBytes } from 'crypto';
|
|
4
|
+
import Long from 'long';
|
|
5
|
+
import { proto } from '../../WAProto/index.js';
|
|
6
|
+
import { DEFAULT_CACHE_TTLS, KEY_BUNDLE_TYPE, MIN_PREKEY_COUNT, PLACEHOLDER_MAX_AGE_SECONDS, STATUS_EXPIRY_SECONDS } from '../Defaults/index.js';
|
|
7
|
+
import { ReachoutTimelockEnforcementType, WAMessageStatus, WAMessageStubType } from '../Types/index.js';
|
|
8
|
+
import { ACCOUNT_RESTRICTED_TEXT, aesDecryptCTR, aesEncryptGCM, cleanMessage, Curve, decodeMediaRetryNode, decodeMessageNode, decryptMessageNode, delay, derivePairingCodeKey, encodeBigEndian, encodeSignedDeviceIdentity, extractAddressingContext, extractE2ESessionFromRetryReceipt, getCallStatusFromNode, getHistoryMsg, getNextPreKeys, getStatusFromReceiptType, handleIdentityChange, hkdf, MISSING_KEYS_ERROR_TEXT, NACK_REASONS, NO_MESSAGE_FOUND_ERROR_TEXT, SERVER_ERROR_CODES, toNumber, unixTimestampSeconds, xmppPreKey, xmppSignedPreKey } from '../Utils/index.js';
|
|
9
|
+
import { makeMutex } from '../Utils/make-mutex.js';
|
|
10
|
+
import { makeOfflineNodeProcessor } from '../Utils/offline-node-processor.js';
|
|
11
|
+
import { buildAckStanza } from '../Utils/stanza-ack.js';
|
|
12
|
+
import { buildMergedTcTokenIndexWrite, isTcTokenExpired, readTcTokenIndex, resolveIssuanceJid, resolveTcTokenJid, storeTcTokensFromIqResult, TC_TOKEN_INDEX_KEY } from '../Utils/tc-token-utils.js';
|
|
13
|
+
import { areJidsSameUser, binaryNodeToString, getAllBinaryNodeChildren, getBinaryNodeChild, getBinaryNodeChildBuffer, getBinaryNodeChildren, getBinaryNodeChildString, getBinaryNodeChildUInt, isJidGroup, isJidNewsletter, isJidStatusBroadcast, isLidUser, isPnUser, jidDecode, jidNormalizedUser, S_WHATSAPP_NET } from '../WABinary/index.js';
|
|
14
|
+
import { extractGroupMetadata } from './groups.js';
|
|
15
|
+
import { makeMessagesSocket } from './messages-send.js';
|
|
16
|
+
const ENFORCEMENT_TYPE_VALUES = new Set(Object.values(ReachoutTimelockEnforcementType));
|
|
17
|
+
function isValidEnforcementType(value) {
|
|
18
|
+
return typeof value === 'string' && ENFORCEMENT_TYPE_VALUES.has(value);
|
|
19
|
+
}
|
|
20
|
+
export const makeMessagesRecvSocket = (config) => {
|
|
21
|
+
const { logger, retryRequestDelayMs, maxMsgRetryCount, getMessage, shouldIgnoreJid, enableAutoSessionRecreation } = config;
|
|
22
|
+
const sock = makeMessagesSocket(config);
|
|
23
|
+
const { userDevicesCache, devicesMutex, ev, authState, ws, messageMutex, notificationMutex, receiptMutex, signalRepository, query, upsertMessage, resyncAppState, onUnexpectedError, assertSessions, sendNode, relayMessage, sendReceipt, uploadPreKeys, sendPeerDataOperationMessage, messageRetryManager, registerSocketEndHandler, issuePrivacyTokens, fetchAccountReachoutTimelock, placeholderResendCache } = sock;
|
|
24
|
+
const getLIDForPN = signalRepository.lidMapping.getLIDForPN.bind(signalRepository.lidMapping);
|
|
25
|
+
/** this mutex ensures that each retryRequest will wait for the previous one to finish */
|
|
26
|
+
const retryMutex = makeMutex();
|
|
27
|
+
const msgRetryCache = config.msgRetryCounterCache ||
|
|
28
|
+
new NodeCache({
|
|
29
|
+
stdTTL: DEFAULT_CACHE_TTLS.MSG_RETRY, // 1 hour
|
|
30
|
+
useClones: false
|
|
31
|
+
});
|
|
32
|
+
const callOfferCache = config.callOfferCache ||
|
|
33
|
+
new NodeCache({
|
|
34
|
+
stdTTL: DEFAULT_CACHE_TTLS.CALL_OFFER, // 5 mins
|
|
35
|
+
useClones: false
|
|
36
|
+
});
|
|
37
|
+
// Debounce identity-change session refreshes per JID to avoid bursts
|
|
38
|
+
const identityAssertDebounce = new NodeCache({ stdTTL: 5, useClones: false });
|
|
39
|
+
let sendActiveReceipts = false;
|
|
40
|
+
const fetchMessageHistory = async (count, oldestMsgKey, oldestMsgTimestamp) => {
|
|
41
|
+
if (!authState.creds.me?.id) {
|
|
42
|
+
throw new Boom('Not authenticated');
|
|
43
|
+
}
|
|
44
|
+
const pdoMessage = {
|
|
45
|
+
historySyncOnDemandRequest: {
|
|
46
|
+
chatJid: oldestMsgKey.remoteJid,
|
|
47
|
+
oldestMsgFromMe: oldestMsgKey.fromMe,
|
|
48
|
+
oldestMsgId: oldestMsgKey.id,
|
|
49
|
+
oldestMsgTimestampMs: oldestMsgTimestamp,
|
|
50
|
+
onDemandMsgCount: count
|
|
51
|
+
},
|
|
52
|
+
peerDataOperationRequestType: proto.Message.PeerDataOperationRequestType.HISTORY_SYNC_ON_DEMAND
|
|
53
|
+
};
|
|
54
|
+
return sendPeerDataOperationMessage(pdoMessage);
|
|
55
|
+
};
|
|
56
|
+
const requestPlaceholderResend = async (messageKey, msgData) => {
|
|
57
|
+
if (!authState.creds.me?.id) {
|
|
58
|
+
throw new Boom('Not authenticated');
|
|
59
|
+
}
|
|
60
|
+
if (await placeholderResendCache.get(messageKey?.id)) {
|
|
61
|
+
logger.debug({ messageKey }, 'already requested resend');
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
else {
|
|
65
|
+
// Store original message data so PDO response handler can preserve
|
|
66
|
+
// metadata (LID details, timestamps, etc.) that the phone may omit
|
|
67
|
+
await placeholderResendCache.set(messageKey?.id, msgData || true);
|
|
68
|
+
}
|
|
69
|
+
await delay(2000);
|
|
70
|
+
if (!(await placeholderResendCache.get(messageKey?.id))) {
|
|
71
|
+
logger.debug({ messageKey }, 'message received while resend requested');
|
|
72
|
+
return 'RESOLVED';
|
|
73
|
+
}
|
|
74
|
+
const pdoMessage = {
|
|
75
|
+
placeholderMessageResendRequest: [
|
|
76
|
+
{
|
|
77
|
+
messageKey
|
|
78
|
+
}
|
|
79
|
+
],
|
|
80
|
+
peerDataOperationRequestType: proto.Message.PeerDataOperationRequestType.PLACEHOLDER_MESSAGE_RESEND
|
|
81
|
+
};
|
|
82
|
+
setTimeout(async () => {
|
|
83
|
+
if (await placeholderResendCache.get(messageKey?.id)) {
|
|
84
|
+
logger.debug({ messageKey }, 'PDO message without response after 8 seconds. Phone possibly offline');
|
|
85
|
+
await placeholderResendCache.del(messageKey?.id);
|
|
86
|
+
}
|
|
87
|
+
}, 8000);
|
|
88
|
+
return sendPeerDataOperationMessage(pdoMessage);
|
|
89
|
+
};
|
|
90
|
+
const handleMexNotification = async (node) => {
|
|
91
|
+
const updateNode = getBinaryNodeChild(node, 'update');
|
|
92
|
+
if (updateNode) {
|
|
93
|
+
const opName = updateNode.attrs?.op_name;
|
|
94
|
+
if (!opName) {
|
|
95
|
+
logger.warn({ node: binaryNodeToString(node) }, 'mex notification missing op_name, fallback to legacy');
|
|
96
|
+
await handleLegacyMexNewsletterNotification(node);
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
let mexResponse;
|
|
100
|
+
try {
|
|
101
|
+
mexResponse = JSON.parse(updateNode.content.toString());
|
|
102
|
+
}
|
|
103
|
+
catch (error) {
|
|
104
|
+
logger.error({ err: error, opName }, 'failed to parse mex notification JSON');
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
if (mexResponse.errors?.length) {
|
|
108
|
+
logger.warn({ errors: mexResponse.errors, opName }, 'mex notification has GQL errors');
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
const data = mexResponse.data;
|
|
112
|
+
if (!data) {
|
|
113
|
+
logger.warn({ opName }, 'mex notification has null data');
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
logger.debug({ opName }, 'processing mex notification');
|
|
117
|
+
switch (opName) {
|
|
118
|
+
case 'NotificationUserReachoutTimelockUpdate':
|
|
119
|
+
handleReachoutTimelockNotification(data);
|
|
120
|
+
break;
|
|
121
|
+
case 'MessageCappingInfoNotification':
|
|
122
|
+
handleMessageCappingNotification(data);
|
|
123
|
+
break;
|
|
124
|
+
// newsletter ops still use the legacy <mex> child structure
|
|
125
|
+
case 'NotificationNewsletterUpdate':
|
|
126
|
+
case 'NotificationLinkedProfilesUpdates':
|
|
127
|
+
case 'NotificationNewsletterAdminPromote':
|
|
128
|
+
case 'NotificationNewsletterAdminDemote':
|
|
129
|
+
case 'NotificationNewsletterUserSettingChange':
|
|
130
|
+
case 'NotificationNewsletterJoin':
|
|
131
|
+
case 'NotificationNewsletterLeave':
|
|
132
|
+
case 'NotificationNewsletterStateChange':
|
|
133
|
+
case 'NotificationNewsletterAdminMetadataUpdate':
|
|
134
|
+
case 'NotificationNewsletterOwnerUpdate':
|
|
135
|
+
case 'NotificationNewsletterAdminInviteRevoke':
|
|
136
|
+
case 'NotificationNewsletterWamoSubStatusChange':
|
|
137
|
+
case 'NotificationNewsletterBlockUser':
|
|
138
|
+
case 'NotificationNewsletterPaidPartnership':
|
|
139
|
+
case 'NotificationNewsletterMilestone':
|
|
140
|
+
case 'NewsletterResponseStateUpdate':
|
|
141
|
+
await handleLegacyMexNewsletterNotification(node);
|
|
142
|
+
break;
|
|
143
|
+
default:
|
|
144
|
+
logger.debug({ opName }, 'unhandled mex notification');
|
|
145
|
+
break;
|
|
146
|
+
}
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
await handleLegacyMexNewsletterNotification(node);
|
|
150
|
+
};
|
|
151
|
+
const handleReachoutTimelockNotification = (data) => {
|
|
152
|
+
const payload = data.xwa2_notify_account_reachout_timelock;
|
|
153
|
+
if (!payload) {
|
|
154
|
+
logger.warn('reachout timelock notification missing payload');
|
|
155
|
+
return;
|
|
156
|
+
}
|
|
157
|
+
if (!payload.is_active) {
|
|
158
|
+
logger.info('reachout timelock restriction lifted');
|
|
159
|
+
ev.emit('connection.update', {
|
|
160
|
+
reachoutTimeLock: {
|
|
161
|
+
isActive: false,
|
|
162
|
+
enforcementType: ReachoutTimelockEnforcementType.DEFAULT
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
return;
|
|
166
|
+
}
|
|
167
|
+
// WA Web defaults to now+60s when the server omits the expiry
|
|
168
|
+
const timeEnforcementEnds = payload.time_enforcement_ends
|
|
169
|
+
? new Date(parseInt(payload.time_enforcement_ends, 10) * 1000)
|
|
170
|
+
: new Date(Date.now() + 60000);
|
|
171
|
+
const enforcementType = isValidEnforcementType(payload.enforcement_type)
|
|
172
|
+
? payload.enforcement_type
|
|
173
|
+
: ReachoutTimelockEnforcementType.DEFAULT;
|
|
174
|
+
logger.info({ enforcementType, timeEnforcementEnds }, 'reachout timelock restriction set');
|
|
175
|
+
ev.emit('connection.update', {
|
|
176
|
+
reachoutTimeLock: {
|
|
177
|
+
isActive: true,
|
|
178
|
+
timeEnforcementEnds,
|
|
179
|
+
enforcementType
|
|
180
|
+
}
|
|
181
|
+
});
|
|
182
|
+
};
|
|
183
|
+
const handleMessageCappingNotification = (data) => {
|
|
184
|
+
const payload = data.xwa2_notify_new_chat_messages_capping_info_update;
|
|
185
|
+
if (!payload) {
|
|
186
|
+
logger.warn('message capping notification missing payload');
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
logger.info({ payload }, 'received message capping update');
|
|
190
|
+
ev.emit('message-capping.update', payload);
|
|
191
|
+
};
|
|
192
|
+
const handleLegacyMexNewsletterNotification = async (node) => {
|
|
193
|
+
const mexNode = getBinaryNodeChild(node, 'mex');
|
|
194
|
+
const updateNode = mexNode?.content ? null : getBinaryNodeChild(node, 'update') || getAllBinaryNodeChildren(node)[0];
|
|
195
|
+
const payloadNode = mexNode?.content ? mexNode : updateNode;
|
|
196
|
+
if (!payloadNode?.content) {
|
|
197
|
+
logger.warn({ node: binaryNodeToString(node) }, 'invalid mex newsletter notification');
|
|
198
|
+
return;
|
|
199
|
+
}
|
|
200
|
+
let data;
|
|
201
|
+
try {
|
|
202
|
+
const payloadContent = payloadNode.content;
|
|
203
|
+
if (Array.isArray(payloadContent)) {
|
|
204
|
+
logger.warn({ payloadNode }, 'invalid mex newsletter notification payload format');
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
const contentBuf = typeof payloadContent === 'string' ? Buffer.from(payloadContent, 'binary') : Buffer.from(payloadContent);
|
|
208
|
+
data = JSON.parse(contentBuf.toString());
|
|
209
|
+
}
|
|
210
|
+
catch (error) {
|
|
211
|
+
logger.error({ err: error, node: binaryNodeToString(node) }, 'failed to parse mex newsletter notification');
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
const operation = data?.operation ?? payloadNode?.attrs?.op_name;
|
|
215
|
+
let updates = data?.updates;
|
|
216
|
+
if (!updates) {
|
|
217
|
+
const linkedProfiles = data?.data?.xwa2_notify_linked_profiles;
|
|
218
|
+
if (linkedProfiles) {
|
|
219
|
+
updates = [linkedProfiles];
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
if (!updates || !operation) {
|
|
223
|
+
logger.warn({ data }, 'invalid mex newsletter notification content');
|
|
224
|
+
return;
|
|
225
|
+
}
|
|
226
|
+
logger.info({ operation, updates }, 'got mex newsletter notification');
|
|
227
|
+
switch (operation) {
|
|
228
|
+
case 'NotificationNewsletterUpdate':
|
|
229
|
+
for (const update of updates) {
|
|
230
|
+
if (update.jid && update.settings && Object.keys(update.settings).length > 0) {
|
|
231
|
+
ev.emit('newsletter-settings.update', {
|
|
232
|
+
id: update.jid,
|
|
233
|
+
update: update.settings
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
break;
|
|
238
|
+
case 'NotificationNewsletterAdminPromote':
|
|
239
|
+
for (const update of updates) {
|
|
240
|
+
if (update.jid && update.user) {
|
|
241
|
+
ev.emit('newsletter-participants.update', {
|
|
242
|
+
id: update.jid,
|
|
243
|
+
author: node.attrs.from,
|
|
244
|
+
user: update.user,
|
|
245
|
+
new_role: 'ADMIN',
|
|
246
|
+
action: 'promote'
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
break;
|
|
251
|
+
case 'NotificationLinkedProfilesUpdates':
|
|
252
|
+
for (const update of updates) {
|
|
253
|
+
const lid = update?.jid;
|
|
254
|
+
const addedProfiles = Array.isArray(update?.added_profiles) ? update.added_profiles : [];
|
|
255
|
+
const mappings = [];
|
|
256
|
+
for (const profile of addedProfiles) {
|
|
257
|
+
const pn = typeof profile === 'string' ? profile : (profile?.pn ?? profile?.jid ?? null);
|
|
258
|
+
if (lid && pn) {
|
|
259
|
+
const mapping = { lid, pn };
|
|
260
|
+
ev.emit('lid-mapping.update', mapping);
|
|
261
|
+
mappings.push(mapping);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
await signalRepository.lidMapping.storeLIDPNMappings(mappings);
|
|
265
|
+
}
|
|
266
|
+
break;
|
|
267
|
+
default:
|
|
268
|
+
logger.info({ operation, data }, 'unhandled mex newsletter notification');
|
|
269
|
+
break;
|
|
270
|
+
}
|
|
271
|
+
};
|
|
272
|
+
// Handles newsletter notifications
|
|
273
|
+
const handleNewsletterNotification = async (node) => {
|
|
274
|
+
const from = node.attrs.from;
|
|
275
|
+
const children = getAllBinaryNodeChildren(node);
|
|
276
|
+
const author = node.attrs.participant;
|
|
277
|
+
for (const child of children) {
|
|
278
|
+
logger.debug({ from, child }, 'got newsletter notification');
|
|
279
|
+
switch (child.tag) {
|
|
280
|
+
case 'reaction': {
|
|
281
|
+
const reactionUpdate = {
|
|
282
|
+
id: from,
|
|
283
|
+
server_id: child.attrs.message_id,
|
|
284
|
+
reaction: {
|
|
285
|
+
code: getBinaryNodeChildString(child, 'reaction'),
|
|
286
|
+
count: 1
|
|
287
|
+
}
|
|
288
|
+
};
|
|
289
|
+
ev.emit('newsletter.reaction', reactionUpdate);
|
|
290
|
+
break;
|
|
291
|
+
}
|
|
292
|
+
case 'view': {
|
|
293
|
+
const viewUpdate = {
|
|
294
|
+
id: from,
|
|
295
|
+
server_id: child.attrs.message_id,
|
|
296
|
+
count: parseInt(child.content?.toString() || '0', 10)
|
|
297
|
+
};
|
|
298
|
+
ev.emit('newsletter.view', viewUpdate);
|
|
299
|
+
break;
|
|
300
|
+
}
|
|
301
|
+
case 'participant': {
|
|
302
|
+
const participantUpdate = {
|
|
303
|
+
id: from,
|
|
304
|
+
author,
|
|
305
|
+
user: child.attrs.jid,
|
|
306
|
+
action: child.attrs.action,
|
|
307
|
+
new_role: child.attrs.role
|
|
308
|
+
};
|
|
309
|
+
ev.emit('newsletter-participants.update', participantUpdate);
|
|
310
|
+
break;
|
|
311
|
+
}
|
|
312
|
+
case 'update': {
|
|
313
|
+
const settingsNode = getBinaryNodeChild(child, 'settings');
|
|
314
|
+
if (settingsNode) {
|
|
315
|
+
const update = {};
|
|
316
|
+
const nameNode = getBinaryNodeChild(settingsNode, 'name');
|
|
317
|
+
if (nameNode?.content)
|
|
318
|
+
update.name = nameNode.content.toString();
|
|
319
|
+
const descriptionNode = getBinaryNodeChild(settingsNode, 'description');
|
|
320
|
+
if (descriptionNode?.content)
|
|
321
|
+
update.description = descriptionNode.content.toString();
|
|
322
|
+
ev.emit('newsletter-settings.update', {
|
|
323
|
+
id: from,
|
|
324
|
+
update
|
|
325
|
+
});
|
|
326
|
+
}
|
|
327
|
+
break;
|
|
328
|
+
}
|
|
329
|
+
case 'message': {
|
|
330
|
+
const plaintextNode = getBinaryNodeChild(child, 'plaintext');
|
|
331
|
+
if (plaintextNode?.content) {
|
|
332
|
+
try {
|
|
333
|
+
const contentBuf = typeof plaintextNode.content === 'string'
|
|
334
|
+
? Buffer.from(plaintextNode.content, 'binary')
|
|
335
|
+
: Buffer.from(plaintextNode.content);
|
|
336
|
+
const messageProto = proto.Message.decode(contentBuf).toJSON();
|
|
337
|
+
const fullMessage = proto.WebMessageInfo.fromObject({
|
|
338
|
+
key: {
|
|
339
|
+
remoteJid: from,
|
|
340
|
+
id: child.attrs.message_id || child.attrs.server_id,
|
|
341
|
+
fromMe: false // TODO: is this really true though
|
|
342
|
+
},
|
|
343
|
+
message: messageProto,
|
|
344
|
+
messageTimestamp: +child.attrs.t
|
|
345
|
+
}).toJSON();
|
|
346
|
+
await upsertMessage(fullMessage, 'append');
|
|
347
|
+
logger.debug('Processed plaintext newsletter message');
|
|
348
|
+
}
|
|
349
|
+
catch (error) {
|
|
350
|
+
logger.error({ error }, 'Failed to decode plaintext newsletter message');
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
break;
|
|
354
|
+
}
|
|
355
|
+
default:
|
|
356
|
+
logger.warn({ node, child }, 'Unknown newsletter notification child');
|
|
357
|
+
break;
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
};
|
|
361
|
+
const sendMessageAck = async (node, errorCode) => {
|
|
362
|
+
const stanza = buildAckStanza(node, errorCode, authState.creds.me.id);
|
|
363
|
+
logger.debug({ recv: { tag: node.tag, attrs: node.attrs }, sent: stanza.attrs }, 'sent ack');
|
|
364
|
+
await sendNode(stanza);
|
|
365
|
+
};
|
|
366
|
+
const rejectCall = async (callId, callFrom) => {
|
|
367
|
+
const stanza = {
|
|
368
|
+
tag: 'call',
|
|
369
|
+
attrs: {
|
|
370
|
+
from: authState.creds.me.id,
|
|
371
|
+
to: callFrom
|
|
372
|
+
},
|
|
373
|
+
content: [
|
|
374
|
+
{
|
|
375
|
+
tag: 'reject',
|
|
376
|
+
attrs: {
|
|
377
|
+
'call-id': callId,
|
|
378
|
+
'call-creator': callFrom,
|
|
379
|
+
count: '0'
|
|
380
|
+
},
|
|
381
|
+
content: undefined
|
|
382
|
+
}
|
|
383
|
+
]
|
|
384
|
+
};
|
|
385
|
+
await query(stanza);
|
|
386
|
+
};
|
|
387
|
+
const sendRetryRequest = async (node, forceIncludeKeys = false) => {
|
|
388
|
+
const { fullMessage } = decodeMessageNode(node, authState.creds.me.id, authState.creds.me.lid || '');
|
|
389
|
+
const { key: msgKey } = fullMessage;
|
|
390
|
+
const msgId = msgKey.id;
|
|
391
|
+
if (messageRetryManager) {
|
|
392
|
+
// Check if we've exceeded max retries using the new system
|
|
393
|
+
if (messageRetryManager.hasExceededMaxRetries(msgId)) {
|
|
394
|
+
logger.debug({ msgId }, 'reached retry limit with new retry manager, clearing');
|
|
395
|
+
messageRetryManager.markRetryFailed(msgId);
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
// Increment retry count using new system
|
|
399
|
+
const retryCount = messageRetryManager.incrementRetryCount(msgId);
|
|
400
|
+
// Use the new retry count for the rest of the logic
|
|
401
|
+
const key = `${msgId}:${msgKey?.participant}`;
|
|
402
|
+
await msgRetryCache.set(key, retryCount);
|
|
403
|
+
}
|
|
404
|
+
else {
|
|
405
|
+
// Fallback to old system
|
|
406
|
+
const key = `${msgId}:${msgKey?.participant}`;
|
|
407
|
+
let retryCount = (await msgRetryCache.get(key)) || 0;
|
|
408
|
+
if (retryCount >= maxMsgRetryCount) {
|
|
409
|
+
logger.debug({ retryCount, msgId }, 'reached retry limit, clearing');
|
|
410
|
+
await msgRetryCache.del(key);
|
|
411
|
+
return;
|
|
412
|
+
}
|
|
413
|
+
retryCount += 1;
|
|
414
|
+
await msgRetryCache.set(key, retryCount);
|
|
415
|
+
}
|
|
416
|
+
const key = `${msgId}:${msgKey?.participant}`;
|
|
417
|
+
const retryCount = (await msgRetryCache.get(key)) || 1;
|
|
418
|
+
const { account, signedPreKey, signedIdentityKey: identityKey } = authState.creds;
|
|
419
|
+
const fromJid = node.attrs.from;
|
|
420
|
+
// Check if we should recreate the session
|
|
421
|
+
let shouldRecreateSession = false;
|
|
422
|
+
let recreateReason = '';
|
|
423
|
+
if (enableAutoSessionRecreation && messageRetryManager && retryCount > 1) {
|
|
424
|
+
try {
|
|
425
|
+
// Check if we have a session with this JID
|
|
426
|
+
const sessionId = signalRepository.jidToSignalProtocolAddress(fromJid);
|
|
427
|
+
const hasSession = await signalRepository.validateSession(fromJid);
|
|
428
|
+
const result = messageRetryManager.shouldRecreateSession(fromJid, hasSession.exists);
|
|
429
|
+
shouldRecreateSession = result.recreate;
|
|
430
|
+
recreateReason = result.reason;
|
|
431
|
+
if (shouldRecreateSession) {
|
|
432
|
+
logger.debug({ fromJid, retryCount, reason: recreateReason }, 'recreating session for retry');
|
|
433
|
+
// Delete existing session to force recreation
|
|
434
|
+
await authState.keys.set({ session: { [sessionId]: null } });
|
|
435
|
+
forceIncludeKeys = true;
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
catch (error) {
|
|
439
|
+
logger.warn({ error, fromJid }, 'failed to check session recreation');
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
if (retryCount <= 2) {
|
|
443
|
+
// Use new retry manager for phone requests if available
|
|
444
|
+
if (messageRetryManager) {
|
|
445
|
+
// Schedule phone request with delay (like whatsmeow)
|
|
446
|
+
messageRetryManager.schedulePhoneRequest(msgId, async () => {
|
|
447
|
+
try {
|
|
448
|
+
const requestId = await requestPlaceholderResend(msgKey);
|
|
449
|
+
logger.debug(`sendRetryRequest: requested placeholder resend (${requestId}) for message ${msgId} (scheduled)`);
|
|
450
|
+
}
|
|
451
|
+
catch (error) {
|
|
452
|
+
logger.warn({ error, msgId }, 'failed to send scheduled phone request');
|
|
453
|
+
}
|
|
454
|
+
});
|
|
455
|
+
}
|
|
456
|
+
else {
|
|
457
|
+
// Fallback to immediate request
|
|
458
|
+
const msgId = await requestPlaceholderResend(msgKey);
|
|
459
|
+
logger.debug(`sendRetryRequest: requested placeholder resend for message ${msgId}`);
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
const deviceIdentity = encodeSignedDeviceIdentity(account, true);
|
|
463
|
+
await authState.keys.transaction(async () => {
|
|
464
|
+
const receipt = {
|
|
465
|
+
tag: 'receipt',
|
|
466
|
+
attrs: {
|
|
467
|
+
id: msgId,
|
|
468
|
+
type: 'retry',
|
|
469
|
+
to: node.attrs.from
|
|
470
|
+
},
|
|
471
|
+
content: [
|
|
472
|
+
{
|
|
473
|
+
tag: 'retry',
|
|
474
|
+
attrs: {
|
|
475
|
+
count: retryCount.toString(),
|
|
476
|
+
id: node.attrs.id,
|
|
477
|
+
t: node.attrs.t,
|
|
478
|
+
v: '1',
|
|
479
|
+
// ADD ERROR FIELD
|
|
480
|
+
error: '0'
|
|
481
|
+
}
|
|
482
|
+
},
|
|
483
|
+
{
|
|
484
|
+
tag: 'registration',
|
|
485
|
+
attrs: {},
|
|
486
|
+
content: encodeBigEndian(authState.creds.registrationId)
|
|
487
|
+
}
|
|
488
|
+
]
|
|
489
|
+
};
|
|
490
|
+
if (node.attrs.recipient) {
|
|
491
|
+
receipt.attrs.recipient = node.attrs.recipient;
|
|
492
|
+
}
|
|
493
|
+
if (node.attrs.participant) {
|
|
494
|
+
receipt.attrs.participant = node.attrs.participant;
|
|
495
|
+
}
|
|
496
|
+
if (retryCount > 1 || forceIncludeKeys || shouldRecreateSession) {
|
|
497
|
+
const { update, preKeys } = await getNextPreKeys(authState, 1);
|
|
498
|
+
const [keyId] = Object.keys(preKeys);
|
|
499
|
+
const key = preKeys[+keyId];
|
|
500
|
+
const content = receipt.content;
|
|
501
|
+
content.push({
|
|
502
|
+
tag: 'keys',
|
|
503
|
+
attrs: {},
|
|
504
|
+
content: [
|
|
505
|
+
{ tag: 'type', attrs: {}, content: Buffer.from(KEY_BUNDLE_TYPE) },
|
|
506
|
+
{ tag: 'identity', attrs: {}, content: identityKey.public },
|
|
507
|
+
xmppPreKey(key, +keyId),
|
|
508
|
+
xmppSignedPreKey(signedPreKey),
|
|
509
|
+
{ tag: 'device-identity', attrs: {}, content: deviceIdentity }
|
|
510
|
+
]
|
|
511
|
+
});
|
|
512
|
+
ev.emit('creds.update', update);
|
|
513
|
+
}
|
|
514
|
+
await sendNode(receipt);
|
|
515
|
+
logger.info({ msgAttrs: node.attrs, retryCount }, 'sent retry receipt');
|
|
516
|
+
}, authState?.creds?.me?.id || 'sendRetryRequest');
|
|
517
|
+
};
|
|
518
|
+
// Mirrors WAWeb/Handle/PreKeyLow.js: skip a re-issued notification with the same stanza id.
|
|
519
|
+
const inFlightPreKeyLow = new Set();
|
|
520
|
+
/**
|
|
521
|
+
* Fire-and-forget tctoken re-issuance after a peer's device identity changed.
|
|
522
|
+
* Mirrors WAWebSendTcTokenWhenDeviceIdentityChange — runs in parallel with
|
|
523
|
+
* the session refresh (not after it).
|
|
524
|
+
*/
|
|
525
|
+
const reissueTcTokenAfterIdentityChange = (from) => {
|
|
526
|
+
void (async () => {
|
|
527
|
+
const normalizedJid = jidNormalizedUser(from);
|
|
528
|
+
const tcJid = await resolveTcTokenJid(normalizedJid, getLIDForPN);
|
|
529
|
+
const tcTokenData = await authState.keys.get('tctoken', [tcJid]);
|
|
530
|
+
const senderTs = tcTokenData?.[tcJid]?.senderTimestamp;
|
|
531
|
+
if (senderTs === null || senderTs === undefined || isTcTokenExpired(senderTs)) {
|
|
532
|
+
return;
|
|
533
|
+
}
|
|
534
|
+
logger.debug({ jid: normalizedJid, senderTimestamp: senderTs }, 'identity changed, re-issuing tctoken');
|
|
535
|
+
const getPNForLID = signalRepository.lidMapping.getPNForLID.bind(signalRepository.lidMapping);
|
|
536
|
+
const issueJid = await resolveIssuanceJid(normalizedJid, sock.serverProps.lidTrustedTokenIssueToLid, getLIDForPN, getPNForLID);
|
|
537
|
+
const result = await issuePrivacyTokens([issueJid], senderTs);
|
|
538
|
+
await storeTcTokensFromIqResult({
|
|
539
|
+
result,
|
|
540
|
+
fallbackJid: tcJid,
|
|
541
|
+
keys: authState.keys,
|
|
542
|
+
getLIDForPN,
|
|
543
|
+
onNewJidStored: trackTcTokenJid
|
|
544
|
+
});
|
|
545
|
+
})().catch(err => {
|
|
546
|
+
logger.debug({ jid: from, err: err?.message }, 'failed to re-issue tctoken after identity change');
|
|
547
|
+
});
|
|
548
|
+
};
|
|
549
|
+
const handleEncryptNotification = async (node) => {
|
|
550
|
+
const from = node.attrs.from;
|
|
551
|
+
if (from === S_WHATSAPP_NET) {
|
|
552
|
+
const stanzaId = node.attrs.id;
|
|
553
|
+
if (stanzaId && inFlightPreKeyLow.has(stanzaId)) {
|
|
554
|
+
return;
|
|
555
|
+
}
|
|
556
|
+
const countChild = getBinaryNodeChild(node, 'count');
|
|
557
|
+
const count = +countChild.attrs.value;
|
|
558
|
+
const shouldUploadMorePreKeys = count < MIN_PREKEY_COUNT;
|
|
559
|
+
logger.debug({ count, shouldUploadMorePreKeys }, 'recv pre-key count');
|
|
560
|
+
if (shouldUploadMorePreKeys) {
|
|
561
|
+
if (stanzaId)
|
|
562
|
+
inFlightPreKeyLow.add(stanzaId);
|
|
563
|
+
try {
|
|
564
|
+
await uploadPreKeys();
|
|
565
|
+
}
|
|
566
|
+
finally {
|
|
567
|
+
if (stanzaId)
|
|
568
|
+
inFlightPreKeyLow.delete(stanzaId);
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
else {
|
|
573
|
+
const result = await handleIdentityChange(node, {
|
|
574
|
+
meId: authState.creds.me?.id,
|
|
575
|
+
meLid: authState.creds.me?.lid,
|
|
576
|
+
validateSession: signalRepository.validateSession,
|
|
577
|
+
assertSessions,
|
|
578
|
+
debounceCache: identityAssertDebounce,
|
|
579
|
+
logger,
|
|
580
|
+
onBeforeSessionRefresh: reissueTcTokenAfterIdentityChange
|
|
581
|
+
});
|
|
582
|
+
if (result.action === 'no_identity_node') {
|
|
583
|
+
logger.info({ node }, 'unknown encrypt notification');
|
|
584
|
+
}
|
|
585
|
+
}
|
|
586
|
+
};
|
|
587
|
+
const handleGroupNotification = (fullNode, child, msg) => {
|
|
588
|
+
// TODO: Support PN/LID (Here is only LID now)
|
|
589
|
+
const actingParticipantLid = fullNode.attrs.participant;
|
|
590
|
+
const actingParticipantPn = fullNode.attrs.participant_pn;
|
|
591
|
+
const actingParticipantUsername = fullNode.attrs.participant_username;
|
|
592
|
+
const affectedParticipantLid = getBinaryNodeChild(child, 'participant')?.attrs?.jid || actingParticipantLid;
|
|
593
|
+
const affectedParticipantPn = getBinaryNodeChild(child, 'participant')?.attrs?.phone_number || actingParticipantPn;
|
|
594
|
+
switch (child?.tag) {
|
|
595
|
+
case 'create':
|
|
596
|
+
const metadata = extractGroupMetadata(child);
|
|
597
|
+
msg.messageStubType = WAMessageStubType.GROUP_CREATE;
|
|
598
|
+
msg.messageStubParameters = [metadata.subject];
|
|
599
|
+
msg.key = { participant: metadata.owner, participantAlt: metadata.ownerPn };
|
|
600
|
+
ev.emit('chats.upsert', [
|
|
601
|
+
{
|
|
602
|
+
id: metadata.id,
|
|
603
|
+
name: metadata.subject,
|
|
604
|
+
conversationTimestamp: metadata.creation
|
|
605
|
+
}
|
|
606
|
+
]);
|
|
607
|
+
ev.emit('groups.upsert', [
|
|
608
|
+
{
|
|
609
|
+
...metadata,
|
|
610
|
+
author: actingParticipantLid,
|
|
611
|
+
authorPn: actingParticipantPn,
|
|
612
|
+
authorUsername: actingParticipantUsername
|
|
613
|
+
}
|
|
614
|
+
]);
|
|
615
|
+
break;
|
|
616
|
+
case 'ephemeral':
|
|
617
|
+
case 'not_ephemeral':
|
|
618
|
+
msg.message = {
|
|
619
|
+
protocolMessage: {
|
|
620
|
+
type: proto.Message.ProtocolMessage.Type.EPHEMERAL_SETTING,
|
|
621
|
+
ephemeralExpiration: +(child.attrs.expiration || 0)
|
|
622
|
+
}
|
|
623
|
+
};
|
|
624
|
+
break;
|
|
625
|
+
case 'modify':
|
|
626
|
+
const oldNumber = getBinaryNodeChildren(child, 'participant').map(p => p.attrs.jid);
|
|
627
|
+
msg.messageStubParameters = oldNumber || [];
|
|
628
|
+
msg.messageStubType = WAMessageStubType.GROUP_PARTICIPANT_CHANGE_NUMBER;
|
|
629
|
+
break;
|
|
630
|
+
case 'promote':
|
|
631
|
+
case 'demote':
|
|
632
|
+
case 'remove':
|
|
633
|
+
case 'add':
|
|
634
|
+
case 'leave':
|
|
635
|
+
const stubType = `GROUP_PARTICIPANT_${child.tag.toUpperCase()}`;
|
|
636
|
+
msg.messageStubType = WAMessageStubType[stubType];
|
|
637
|
+
const participants = getBinaryNodeChildren(child, 'participant').map(({ attrs }) => {
|
|
638
|
+
// TODO: Store LID MAPPINGS
|
|
639
|
+
return {
|
|
640
|
+
id: attrs.jid,
|
|
641
|
+
phoneNumber: isLidUser(attrs.jid) && isPnUser(attrs.phone_number) ? attrs.phone_number : undefined,
|
|
642
|
+
lid: isPnUser(attrs.jid) && isLidUser(attrs.lid) ? attrs.lid : undefined,
|
|
643
|
+
username: attrs.participant_username || attrs.username || undefined,
|
|
644
|
+
admin: (attrs.type || null)
|
|
645
|
+
};
|
|
646
|
+
});
|
|
647
|
+
if (participants.length === 1 &&
|
|
648
|
+
// if recv. "remove" message and sender removed themselves
|
|
649
|
+
// mark as left
|
|
650
|
+
(areJidsSameUser(participants[0].id, actingParticipantLid) ||
|
|
651
|
+
areJidsSameUser(participants[0].id, actingParticipantPn)) &&
|
|
652
|
+
child.tag === 'remove') {
|
|
653
|
+
msg.messageStubType = WAMessageStubType.GROUP_PARTICIPANT_LEAVE;
|
|
654
|
+
}
|
|
655
|
+
msg.messageStubParameters = participants.map(a => JSON.stringify(a));
|
|
656
|
+
break;
|
|
657
|
+
case 'subject':
|
|
658
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_SUBJECT;
|
|
659
|
+
msg.messageStubParameters = [child.attrs.subject];
|
|
660
|
+
break;
|
|
661
|
+
case 'description':
|
|
662
|
+
const description = getBinaryNodeChild(child, 'body')?.content?.toString();
|
|
663
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_DESCRIPTION;
|
|
664
|
+
msg.messageStubParameters = description ? [description] : undefined;
|
|
665
|
+
break;
|
|
666
|
+
case 'announcement':
|
|
667
|
+
case 'not_announcement':
|
|
668
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_ANNOUNCE;
|
|
669
|
+
msg.messageStubParameters = [child.tag === 'announcement' ? 'on' : 'off'];
|
|
670
|
+
break;
|
|
671
|
+
case 'locked':
|
|
672
|
+
case 'unlocked':
|
|
673
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_RESTRICT;
|
|
674
|
+
msg.messageStubParameters = [child.tag === 'locked' ? 'on' : 'off'];
|
|
675
|
+
break;
|
|
676
|
+
case 'invite':
|
|
677
|
+
msg.messageStubType = WAMessageStubType.GROUP_CHANGE_INVITE_LINK;
|
|
678
|
+
msg.messageStubParameters = [child.attrs.code];
|
|
679
|
+
break;
|
|
680
|
+
case 'member_add_mode':
|
|
681
|
+
const addMode = child.content;
|
|
682
|
+
if (addMode) {
|
|
683
|
+
msg.messageStubType = WAMessageStubType.GROUP_MEMBER_ADD_MODE;
|
|
684
|
+
msg.messageStubParameters = [addMode.toString()];
|
|
685
|
+
}
|
|
686
|
+
break;
|
|
687
|
+
case 'membership_approval_mode':
|
|
688
|
+
const approvalMode = getBinaryNodeChild(child, 'group_join');
|
|
689
|
+
if (approvalMode) {
|
|
690
|
+
msg.messageStubType = WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_MODE;
|
|
691
|
+
msg.messageStubParameters = [approvalMode.attrs.state];
|
|
692
|
+
}
|
|
693
|
+
break;
|
|
694
|
+
case 'created_membership_requests':
|
|
695
|
+
msg.messageStubType = WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_REQUEST_NON_ADMIN_ADD;
|
|
696
|
+
msg.messageStubParameters = [
|
|
697
|
+
JSON.stringify({ lid: affectedParticipantLid, pn: affectedParticipantPn }),
|
|
698
|
+
'created',
|
|
699
|
+
child.attrs.request_method
|
|
700
|
+
];
|
|
701
|
+
break;
|
|
702
|
+
case 'revoked_membership_requests':
|
|
703
|
+
const isDenied = areJidsSameUser(affectedParticipantLid, actingParticipantLid);
|
|
704
|
+
// TODO: LIDMAPPING SUPPORT
|
|
705
|
+
msg.messageStubType = WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_REQUEST_NON_ADMIN_ADD;
|
|
706
|
+
msg.messageStubParameters = [
|
|
707
|
+
JSON.stringify({ lid: affectedParticipantLid, pn: affectedParticipantPn }),
|
|
708
|
+
isDenied ? 'revoked' : 'rejected'
|
|
709
|
+
];
|
|
710
|
+
break;
|
|
711
|
+
}
|
|
712
|
+
};
|
|
713
|
+
const handleDevicesNotification = async (node) => {
|
|
714
|
+
const [child] = getAllBinaryNodeChildren(node);
|
|
715
|
+
const from = jidNormalizedUser(node.attrs.from);
|
|
716
|
+
if (!child) {
|
|
717
|
+
logger.debug({ from }, 'devices notification missing child, skipping');
|
|
718
|
+
return;
|
|
719
|
+
}
|
|
720
|
+
const tag = child.tag;
|
|
721
|
+
const deviceHash = child.attrs.device_hash;
|
|
722
|
+
const devices = getBinaryNodeChildren(child, 'device');
|
|
723
|
+
if (areJidsSameUser(from, authState.creds.me.id) || areJidsSameUser(from, authState.creds.me.lid)) {
|
|
724
|
+
const deviceJids = devices.map(d => d.attrs.jid);
|
|
725
|
+
logger.info({ deviceJids }, 'got my own devices');
|
|
726
|
+
}
|
|
727
|
+
if (!devices.length) {
|
|
728
|
+
logger.debug({ from, tag }, 'no devices in notification, skipping');
|
|
729
|
+
return;
|
|
730
|
+
}
|
|
731
|
+
const decoded = [];
|
|
732
|
+
for (const d of devices) {
|
|
733
|
+
const jid = d.attrs.jid;
|
|
734
|
+
if (!jid)
|
|
735
|
+
continue;
|
|
736
|
+
const parts = jidDecode(jid);
|
|
737
|
+
if (!parts) {
|
|
738
|
+
logger.debug({ jid }, 'failed to decode device jid, skipping');
|
|
739
|
+
continue;
|
|
740
|
+
}
|
|
741
|
+
decoded.push({ jid, user: parts.user, server: parts.server, device: parts.device });
|
|
742
|
+
}
|
|
743
|
+
if (!decoded.length)
|
|
744
|
+
return;
|
|
745
|
+
await devicesMutex.mutex(async () => {
|
|
746
|
+
const byUser = new Map();
|
|
747
|
+
for (const d of decoded) {
|
|
748
|
+
const list = byUser.get(d.user) || [];
|
|
749
|
+
list.push(d);
|
|
750
|
+
byUser.set(d.user, list);
|
|
751
|
+
}
|
|
752
|
+
for (const [user, entries] of byUser) {
|
|
753
|
+
if (tag === 'update') {
|
|
754
|
+
logger.debug({ user }, `${user}'s device list updated, dropping cached devices`);
|
|
755
|
+
await userDevicesCache?.del(user);
|
|
756
|
+
continue;
|
|
757
|
+
}
|
|
758
|
+
if (tag === 'remove') {
|
|
759
|
+
await signalRepository.deleteSession(entries.map(e => e.jid));
|
|
760
|
+
}
|
|
761
|
+
const existingCache = (await userDevicesCache?.get(user)) || [];
|
|
762
|
+
if (!existingCache.length) {
|
|
763
|
+
// No baseline yet; skip applying the delta so getUSyncDevices can
|
|
764
|
+
// later fetch the full device list. Caching just the notification
|
|
765
|
+
// entries would make a partial list look authoritative.
|
|
766
|
+
logger.debug({ user, tag }, 'device list not cached, deferring to USync refresh');
|
|
767
|
+
continue;
|
|
768
|
+
}
|
|
769
|
+
const affected = new Set(entries.map(e => e.device));
|
|
770
|
+
let updatedDevices;
|
|
771
|
+
switch (tag) {
|
|
772
|
+
case 'add':
|
|
773
|
+
logger.info({ deviceHash, count: entries.length }, 'devices added');
|
|
774
|
+
updatedDevices = [
|
|
775
|
+
...existingCache.filter(d => !affected.has(d.device)),
|
|
776
|
+
...entries.map(e => ({ user: e.user, server: e.server, device: e.device }))
|
|
777
|
+
];
|
|
778
|
+
break;
|
|
779
|
+
case 'remove':
|
|
780
|
+
logger.info({ deviceHash, count: entries.length }, 'devices removed');
|
|
781
|
+
updatedDevices = existingCache.filter(d => !affected.has(d.device));
|
|
782
|
+
break;
|
|
783
|
+
default:
|
|
784
|
+
logger.debug({ tag }, 'Unknown device list change tag');
|
|
785
|
+
continue;
|
|
786
|
+
}
|
|
787
|
+
if (updatedDevices.length === 0) {
|
|
788
|
+
await userDevicesCache?.del(user);
|
|
789
|
+
}
|
|
790
|
+
else {
|
|
791
|
+
await userDevicesCache?.set(user, updatedDevices);
|
|
792
|
+
}
|
|
793
|
+
}
|
|
794
|
+
});
|
|
795
|
+
};
|
|
796
|
+
const processNotification = async (node) => {
|
|
797
|
+
const result = {};
|
|
798
|
+
const [child] = getAllBinaryNodeChildren(node);
|
|
799
|
+
const nodeType = node.attrs.type;
|
|
800
|
+
const from = jidNormalizedUser(node.attrs.from);
|
|
801
|
+
switch (nodeType) {
|
|
802
|
+
case 'newsletter':
|
|
803
|
+
await handleNewsletterNotification(node);
|
|
804
|
+
break;
|
|
805
|
+
case 'mex':
|
|
806
|
+
await handleMexNotification(node);
|
|
807
|
+
break;
|
|
808
|
+
case 'w:gp2':
|
|
809
|
+
// TODO: HANDLE PARTICIPANT_PN
|
|
810
|
+
handleGroupNotification(node, child, result);
|
|
811
|
+
break;
|
|
812
|
+
case 'mediaretry':
|
|
813
|
+
const event = decodeMediaRetryNode(node);
|
|
814
|
+
ev.emit('messages.media-update', [event]);
|
|
815
|
+
break;
|
|
816
|
+
case 'encrypt':
|
|
817
|
+
await handleEncryptNotification(node);
|
|
818
|
+
break;
|
|
819
|
+
case 'devices':
|
|
820
|
+
try {
|
|
821
|
+
await handleDevicesNotification(node);
|
|
822
|
+
}
|
|
823
|
+
catch (error) {
|
|
824
|
+
logger.error({ error, node }, 'failed to handle devices notification');
|
|
825
|
+
}
|
|
826
|
+
break;
|
|
827
|
+
case 'server_sync':
|
|
828
|
+
const update = getBinaryNodeChild(node, 'collection');
|
|
829
|
+
if (update) {
|
|
830
|
+
const name = update.attrs.name;
|
|
831
|
+
await resyncAppState([name], false);
|
|
832
|
+
}
|
|
833
|
+
break;
|
|
834
|
+
case 'picture':
|
|
835
|
+
const setPicture = getBinaryNodeChild(node, 'set');
|
|
836
|
+
const delPicture = getBinaryNodeChild(node, 'delete');
|
|
837
|
+
// TODO: WAJIDHASH stuff proper support inhouse
|
|
838
|
+
ev.emit('contacts.update', [
|
|
839
|
+
{
|
|
840
|
+
id: jidNormalizedUser(node?.attrs?.from) || (setPicture || delPicture)?.attrs?.hash || '',
|
|
841
|
+
imgUrl: setPicture ? 'changed' : 'removed'
|
|
842
|
+
}
|
|
843
|
+
]);
|
|
844
|
+
if (isJidGroup(from)) {
|
|
845
|
+
const node = setPicture || delPicture;
|
|
846
|
+
result.messageStubType = WAMessageStubType.GROUP_CHANGE_ICON;
|
|
847
|
+
if (setPicture) {
|
|
848
|
+
result.messageStubParameters = [setPicture.attrs.id];
|
|
849
|
+
}
|
|
850
|
+
result.participant = node?.attrs.author;
|
|
851
|
+
result.key = {
|
|
852
|
+
...(result.key || {}),
|
|
853
|
+
participant: setPicture?.attrs.author
|
|
854
|
+
};
|
|
855
|
+
}
|
|
856
|
+
break;
|
|
857
|
+
case 'account_sync':
|
|
858
|
+
if (child.tag === 'disappearing_mode') {
|
|
859
|
+
const newDuration = +child.attrs.duration;
|
|
860
|
+
const timestamp = +child.attrs.t;
|
|
861
|
+
logger.info({ newDuration }, 'updated account disappearing mode');
|
|
862
|
+
ev.emit('creds.update', {
|
|
863
|
+
accountSettings: {
|
|
864
|
+
...authState.creds.accountSettings,
|
|
865
|
+
defaultDisappearingMode: {
|
|
866
|
+
ephemeralExpiration: newDuration,
|
|
867
|
+
ephemeralSettingTimestamp: timestamp
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
});
|
|
871
|
+
}
|
|
872
|
+
else if (child.tag === 'blocklist') {
|
|
873
|
+
const blocklists = getBinaryNodeChildren(child, 'item');
|
|
874
|
+
for (const { attrs } of blocklists) {
|
|
875
|
+
const blocklist = [attrs.jid];
|
|
876
|
+
const type = attrs.action === 'block' ? 'add' : 'remove';
|
|
877
|
+
ev.emit('blocklist.update', { blocklist, type });
|
|
878
|
+
}
|
|
879
|
+
}
|
|
880
|
+
break;
|
|
881
|
+
case 'link_code_companion_reg':
|
|
882
|
+
const linkCodeCompanionReg = getBinaryNodeChild(node, 'link_code_companion_reg');
|
|
883
|
+
const ref = toRequiredBuffer(getBinaryNodeChildBuffer(linkCodeCompanionReg, 'link_code_pairing_ref'));
|
|
884
|
+
const primaryIdentityPublicKey = toRequiredBuffer(getBinaryNodeChildBuffer(linkCodeCompanionReg, 'primary_identity_pub'));
|
|
885
|
+
const primaryEphemeralPublicKeyWrapped = toRequiredBuffer(getBinaryNodeChildBuffer(linkCodeCompanionReg, 'link_code_pairing_wrapped_primary_ephemeral_pub'));
|
|
886
|
+
const codePairingPublicKey = await decipherLinkPublicKey(primaryEphemeralPublicKeyWrapped);
|
|
887
|
+
const companionSharedKey = Curve.sharedKey(authState.creds.pairingEphemeralKeyPair.private, codePairingPublicKey);
|
|
888
|
+
const random = randomBytes(32);
|
|
889
|
+
const linkCodeSalt = randomBytes(32);
|
|
890
|
+
const linkCodePairingExpanded = hkdf(companionSharedKey, 32, {
|
|
891
|
+
salt: linkCodeSalt,
|
|
892
|
+
info: 'link_code_pairing_key_bundle_encryption_key'
|
|
893
|
+
});
|
|
894
|
+
const encryptPayload = Buffer.concat([
|
|
895
|
+
Buffer.from(authState.creds.signedIdentityKey.public),
|
|
896
|
+
primaryIdentityPublicKey,
|
|
897
|
+
random
|
|
898
|
+
]);
|
|
899
|
+
const encryptIv = randomBytes(12);
|
|
900
|
+
const encrypted = aesEncryptGCM(encryptPayload, linkCodePairingExpanded, encryptIv, Buffer.alloc(0));
|
|
901
|
+
const encryptedPayload = Buffer.concat([linkCodeSalt, encryptIv, encrypted]);
|
|
902
|
+
const identitySharedKey = Curve.sharedKey(authState.creds.signedIdentityKey.private, primaryIdentityPublicKey);
|
|
903
|
+
const identityPayload = Buffer.concat([companionSharedKey, identitySharedKey, random]);
|
|
904
|
+
authState.creds.advSecretKey = Buffer.from(hkdf(identityPayload, 32, { info: 'adv_secret' })).toString('base64');
|
|
905
|
+
await query({
|
|
906
|
+
tag: 'iq',
|
|
907
|
+
attrs: {
|
|
908
|
+
to: S_WHATSAPP_NET,
|
|
909
|
+
type: 'set',
|
|
910
|
+
id: sock.generateMessageTag(),
|
|
911
|
+
xmlns: 'md'
|
|
912
|
+
},
|
|
913
|
+
content: [
|
|
914
|
+
{
|
|
915
|
+
tag: 'link_code_companion_reg',
|
|
916
|
+
attrs: {
|
|
917
|
+
jid: authState.creds.me.id,
|
|
918
|
+
stage: 'companion_finish'
|
|
919
|
+
},
|
|
920
|
+
content: [
|
|
921
|
+
{
|
|
922
|
+
tag: 'link_code_pairing_wrapped_key_bundle',
|
|
923
|
+
attrs: {},
|
|
924
|
+
content: encryptedPayload
|
|
925
|
+
},
|
|
926
|
+
{
|
|
927
|
+
tag: 'companion_identity_public',
|
|
928
|
+
attrs: {},
|
|
929
|
+
content: authState.creds.signedIdentityKey.public
|
|
930
|
+
},
|
|
931
|
+
{
|
|
932
|
+
tag: 'link_code_pairing_ref',
|
|
933
|
+
attrs: {},
|
|
934
|
+
content: ref
|
|
935
|
+
}
|
|
936
|
+
]
|
|
937
|
+
}
|
|
938
|
+
]
|
|
939
|
+
});
|
|
940
|
+
authState.creds.registered = true;
|
|
941
|
+
ev.emit('creds.update', authState.creds);
|
|
942
|
+
break;
|
|
943
|
+
case 'privacy_token':
|
|
944
|
+
await handlePrivacyTokenNotification(node);
|
|
945
|
+
break;
|
|
946
|
+
}
|
|
947
|
+
if (Object.keys(result).length) {
|
|
948
|
+
return result;
|
|
949
|
+
}
|
|
950
|
+
};
|
|
951
|
+
/**
|
|
952
|
+
* In-memory cache of storage JIDs with stored tctokens, seeded from the persisted index.
|
|
953
|
+
* Used to coalesce writes during a session; pruning always re-reads the persisted index
|
|
954
|
+
* to cover writes made by other layers (e.g. history sync).
|
|
955
|
+
*/
|
|
956
|
+
const tcTokenKnownJids = new Set();
|
|
957
|
+
const tcTokenIndexLoaded = (async () => {
|
|
958
|
+
try {
|
|
959
|
+
const jids = await readTcTokenIndex(authState.keys);
|
|
960
|
+
for (const jid of jids)
|
|
961
|
+
tcTokenKnownJids.add(jid);
|
|
962
|
+
logger.debug({ count: tcTokenKnownJids.size }, 'loaded tctoken index');
|
|
963
|
+
}
|
|
964
|
+
catch (err) {
|
|
965
|
+
logger.warn({ err: err?.message }, 'failed to load tctoken index');
|
|
966
|
+
}
|
|
967
|
+
})();
|
|
968
|
+
let tcTokenIndexTimer;
|
|
969
|
+
async function flushTcTokenIndex() {
|
|
970
|
+
if (tcTokenIndexTimer) {
|
|
971
|
+
clearTimeout(tcTokenIndexTimer);
|
|
972
|
+
tcTokenIndexTimer = undefined;
|
|
973
|
+
}
|
|
974
|
+
// Merge with whatever is already persisted so we don't clobber writes from other
|
|
975
|
+
// paths (history sync, concurrent sessions on the same store).
|
|
976
|
+
const write = await buildMergedTcTokenIndexWrite(authState.keys, tcTokenKnownJids);
|
|
977
|
+
return authState.keys.set({ tctoken: write });
|
|
978
|
+
}
|
|
979
|
+
function scheduleTcTokenIndexSave() {
|
|
980
|
+
if (tcTokenIndexTimer) {
|
|
981
|
+
clearTimeout(tcTokenIndexTimer);
|
|
982
|
+
}
|
|
983
|
+
tcTokenIndexTimer = setTimeout(() => {
|
|
984
|
+
tcTokenIndexTimer = undefined;
|
|
985
|
+
flushTcTokenIndex().catch(err => {
|
|
986
|
+
logger.warn({ err: err?.message }, 'failed to save tctoken index');
|
|
987
|
+
});
|
|
988
|
+
}, 5000);
|
|
989
|
+
}
|
|
990
|
+
function trackTcTokenJid(jid) {
|
|
991
|
+
if (jid && jid !== TC_TOKEN_INDEX_KEY && !tcTokenKnownJids.has(jid)) {
|
|
992
|
+
tcTokenKnownJids.add(jid);
|
|
993
|
+
scheduleTcTokenIndexSave();
|
|
994
|
+
}
|
|
995
|
+
}
|
|
996
|
+
const handlePrivacyTokenNotification = async (node) => {
|
|
997
|
+
const tokensNode = getBinaryNodeChild(node, 'tokens');
|
|
998
|
+
if (!tokensNode)
|
|
999
|
+
return;
|
|
1000
|
+
const from = jidNormalizedUser(node.attrs.from);
|
|
1001
|
+
// WA Web uses: senderLid ?? toLid(from) for the storage key
|
|
1002
|
+
// The sender_lid attribute provides the LID directly when available
|
|
1003
|
+
const senderLid = node.attrs.sender_lid && isLidUser(jidNormalizedUser(node.attrs.sender_lid))
|
|
1004
|
+
? jidNormalizedUser(node.attrs.sender_lid)
|
|
1005
|
+
: undefined;
|
|
1006
|
+
const fallbackJid = senderLid ?? (await resolveTcTokenJid(from, getLIDForPN));
|
|
1007
|
+
logger.debug({ from, storageJid: fallbackJid }, 'processing privacy token notification');
|
|
1008
|
+
await storeTcTokensFromIqResult({
|
|
1009
|
+
result: node,
|
|
1010
|
+
fallbackJid,
|
|
1011
|
+
keys: authState.keys,
|
|
1012
|
+
getLIDForPN,
|
|
1013
|
+
onNewJidStored: trackTcTokenJid
|
|
1014
|
+
});
|
|
1015
|
+
};
|
|
1016
|
+
async function decipherLinkPublicKey(data) {
|
|
1017
|
+
const buffer = toRequiredBuffer(data);
|
|
1018
|
+
const salt = buffer.slice(0, 32);
|
|
1019
|
+
const secretKey = await derivePairingCodeKey(authState.creds.pairingCode, salt);
|
|
1020
|
+
const iv = buffer.slice(32, 48);
|
|
1021
|
+
const payload = buffer.slice(48, 80);
|
|
1022
|
+
return aesDecryptCTR(payload, secretKey, iv);
|
|
1023
|
+
}
|
|
1024
|
+
function toRequiredBuffer(data) {
|
|
1025
|
+
if (data === undefined) {
|
|
1026
|
+
throw new Boom('Invalid buffer', { statusCode: 400 });
|
|
1027
|
+
}
|
|
1028
|
+
return data instanceof Buffer ? data : Buffer.from(data);
|
|
1029
|
+
}
|
|
1030
|
+
const willSendMessageAgain = async (id, participant) => {
|
|
1031
|
+
const key = `${id}:${participant}`;
|
|
1032
|
+
const retryCount = (await msgRetryCache.get(key)) || 0;
|
|
1033
|
+
return retryCount < maxMsgRetryCount;
|
|
1034
|
+
};
|
|
1035
|
+
const updateSendMessageAgainCount = async (id, participant) => {
|
|
1036
|
+
const key = `${id}:${participant}`;
|
|
1037
|
+
const newValue = ((await msgRetryCache.get(key)) || 0) + 1;
|
|
1038
|
+
await msgRetryCache.set(key, newValue);
|
|
1039
|
+
};
|
|
1040
|
+
const sendMessagesAgain = async (key, ids, retryNode, receiptNode) => {
|
|
1041
|
+
const remoteJid = key.remoteJid;
|
|
1042
|
+
const participant = key.participant || remoteJid;
|
|
1043
|
+
const retryCount = +retryNode.attrs.count || 1;
|
|
1044
|
+
const msgId = ids[0];
|
|
1045
|
+
// Try to get messages from cache first, then fallback to getMessage
|
|
1046
|
+
const msgs = [];
|
|
1047
|
+
for (const id of ids) {
|
|
1048
|
+
let msg;
|
|
1049
|
+
// Try to get from retry cache first if enabled
|
|
1050
|
+
if (messageRetryManager) {
|
|
1051
|
+
const cachedMsg = messageRetryManager.getRecentMessage(remoteJid, id);
|
|
1052
|
+
if (cachedMsg) {
|
|
1053
|
+
msg = cachedMsg.message;
|
|
1054
|
+
logger.debug({ jid: remoteJid, id }, 'found message in retry cache');
|
|
1055
|
+
// Mark retry as successful since we found the message
|
|
1056
|
+
messageRetryManager.markRetrySuccess(id);
|
|
1057
|
+
}
|
|
1058
|
+
}
|
|
1059
|
+
// Fallback to getMessage if not found in cache
|
|
1060
|
+
if (!msg) {
|
|
1061
|
+
msg = await getMessage({ ...key, id });
|
|
1062
|
+
if (msg) {
|
|
1063
|
+
logger.debug({ jid: remoteJid, id }, 'found message via getMessage');
|
|
1064
|
+
// Also mark as successful if found via getMessage
|
|
1065
|
+
if (messageRetryManager) {
|
|
1066
|
+
messageRetryManager.markRetrySuccess(id);
|
|
1067
|
+
}
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
msgs.push(msg);
|
|
1071
|
+
}
|
|
1072
|
+
// if it's the primary jid sending the request
|
|
1073
|
+
// just re-send the message to everyone
|
|
1074
|
+
// prevents the first message decryption failure
|
|
1075
|
+
const sendToAll = !jidDecode(participant)?.device;
|
|
1076
|
+
const sessionId = signalRepository.jidToSignalProtocolAddress(participant);
|
|
1077
|
+
let injectedFromBundle = false;
|
|
1078
|
+
const bundle = extractE2ESessionFromRetryReceipt(receiptNode);
|
|
1079
|
+
if (bundle) {
|
|
1080
|
+
try {
|
|
1081
|
+
await signalRepository.injectE2ESession({ jid: participant, session: bundle });
|
|
1082
|
+
injectedFromBundle = true;
|
|
1083
|
+
logger.debug({ participant, retryCount }, 'injected session from retry receipt key bundle');
|
|
1084
|
+
}
|
|
1085
|
+
catch (error) {
|
|
1086
|
+
logger.warn({ error, participant }, 'failed to inject session from retry receipt');
|
|
1087
|
+
}
|
|
1088
|
+
}
|
|
1089
|
+
if (!injectedFromBundle) {
|
|
1090
|
+
const receivedRegId = getBinaryNodeChildUInt(receiptNode, 'registration', 4);
|
|
1091
|
+
if (typeof receivedRegId === 'number' && Number.isInteger(receivedRegId)) {
|
|
1092
|
+
const info = await signalRepository.getSessionInfo(participant);
|
|
1093
|
+
if (info && info.registrationId !== 0 && info.registrationId !== receivedRegId) {
|
|
1094
|
+
logger.info({ participant, stored: info.registrationId, received: receivedRegId }, 'reg id mismatch on retry without bundle, deleting session');
|
|
1095
|
+
await authState.keys.set({ session: { [sessionId]: null } });
|
|
1096
|
+
}
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
const BASE_KEY_CHECK_RETRY = 2;
|
|
1100
|
+
if (msgId && messageRetryManager) {
|
|
1101
|
+
const info = await signalRepository.getSessionInfo(participant);
|
|
1102
|
+
if (info) {
|
|
1103
|
+
if (retryCount === BASE_KEY_CHECK_RETRY) {
|
|
1104
|
+
messageRetryManager.saveBaseKey(sessionId, msgId, info.baseKey);
|
|
1105
|
+
}
|
|
1106
|
+
else if (retryCount > BASE_KEY_CHECK_RETRY) {
|
|
1107
|
+
if (messageRetryManager.hasSameBaseKey(sessionId, msgId, info.baseKey)) {
|
|
1108
|
+
logger.warn({ participant, retryCount }, 'base key collision on retry, forcing fresh session');
|
|
1109
|
+
await authState.keys.set({ session: { [sessionId]: null } });
|
|
1110
|
+
}
|
|
1111
|
+
messageRetryManager.deleteBaseKey(sessionId, msgId);
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
1114
|
+
}
|
|
1115
|
+
let shouldRecreateSession = false;
|
|
1116
|
+
let recreateReason = '';
|
|
1117
|
+
if (enableAutoSessionRecreation && messageRetryManager && retryCount > 1 && !injectedFromBundle) {
|
|
1118
|
+
try {
|
|
1119
|
+
const hasSession = await signalRepository.validateSession(participant);
|
|
1120
|
+
const result = messageRetryManager.shouldRecreateSession(participant, hasSession.exists);
|
|
1121
|
+
shouldRecreateSession = result.recreate;
|
|
1122
|
+
recreateReason = result.reason;
|
|
1123
|
+
if (shouldRecreateSession) {
|
|
1124
|
+
logger.debug({ participant, retryCount, reason: recreateReason }, 'recreating session for outgoing retry');
|
|
1125
|
+
await authState.keys.set({ session: { [sessionId]: null } });
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
catch (error) {
|
|
1129
|
+
logger.warn({ error, participant }, 'failed to check session recreation for outgoing retry');
|
|
1130
|
+
}
|
|
1131
|
+
}
|
|
1132
|
+
if (!injectedFromBundle) {
|
|
1133
|
+
await assertSessions([participant], true);
|
|
1134
|
+
}
|
|
1135
|
+
if (isJidGroup(remoteJid)) {
|
|
1136
|
+
await authState.keys.set({ 'sender-key-memory': { [remoteJid]: null } });
|
|
1137
|
+
}
|
|
1138
|
+
logger.debug({ participant, sendToAll, shouldRecreateSession, recreateReason, injectedFromBundle }, 'prepared session for retry resend');
|
|
1139
|
+
for (const [i, msg] of msgs.entries()) {
|
|
1140
|
+
if (!ids[i])
|
|
1141
|
+
continue;
|
|
1142
|
+
if (msg && (await willSendMessageAgain(ids[i], participant))) {
|
|
1143
|
+
await updateSendMessageAgainCount(ids[i], participant);
|
|
1144
|
+
const msgRelayOpts = { messageId: ids[i] };
|
|
1145
|
+
if (sendToAll) {
|
|
1146
|
+
msgRelayOpts.useUserDevicesCache = false;
|
|
1147
|
+
}
|
|
1148
|
+
else {
|
|
1149
|
+
msgRelayOpts.participant = {
|
|
1150
|
+
jid: participant,
|
|
1151
|
+
count: +retryNode.attrs.count
|
|
1152
|
+
};
|
|
1153
|
+
}
|
|
1154
|
+
await relayMessage(key.remoteJid, msg, msgRelayOpts);
|
|
1155
|
+
}
|
|
1156
|
+
else {
|
|
1157
|
+
logger.debug({ jid: key.remoteJid, id: ids[i] }, 'recv retry request, but message not available');
|
|
1158
|
+
}
|
|
1159
|
+
}
|
|
1160
|
+
};
|
|
1161
|
+
const handleReceipt = async (node) => {
|
|
1162
|
+
const { attrs, content } = node;
|
|
1163
|
+
const isLid = attrs.from.includes('lid');
|
|
1164
|
+
const isNodeFromMe = areJidsSameUser(attrs.participant || attrs.from, isLid ? authState.creds.me?.lid : authState.creds.me?.id);
|
|
1165
|
+
const remoteJid = !isNodeFromMe || isJidGroup(attrs.from) ? attrs.from : attrs.recipient;
|
|
1166
|
+
const fromMe = !attrs.recipient || ((attrs.type === 'retry' || attrs.type === 'sender') && isNodeFromMe);
|
|
1167
|
+
const key = {
|
|
1168
|
+
remoteJid,
|
|
1169
|
+
id: '',
|
|
1170
|
+
fromMe,
|
|
1171
|
+
participant: attrs.participant
|
|
1172
|
+
};
|
|
1173
|
+
const ids = [attrs.id];
|
|
1174
|
+
if (Array.isArray(content)) {
|
|
1175
|
+
const items = getBinaryNodeChildren(content[0], 'item');
|
|
1176
|
+
ids.push(...items.map(i => i.attrs.id));
|
|
1177
|
+
}
|
|
1178
|
+
try {
|
|
1179
|
+
await Promise.all([
|
|
1180
|
+
receiptMutex.mutex(async () => {
|
|
1181
|
+
const status = getStatusFromReceiptType(attrs.type);
|
|
1182
|
+
if (typeof status !== 'undefined' &&
|
|
1183
|
+
// basically, we only want to know when a message from us has been delivered to/read by the other person
|
|
1184
|
+
// or another device of ours has read some messages
|
|
1185
|
+
(status >= proto.WebMessageInfo.Status.SERVER_ACK || !isNodeFromMe)) {
|
|
1186
|
+
if (isJidGroup(remoteJid) || isJidStatusBroadcast(remoteJid)) {
|
|
1187
|
+
if (attrs.participant) {
|
|
1188
|
+
const updateKey = status === proto.WebMessageInfo.Status.DELIVERY_ACK ? 'receiptTimestamp' : 'readTimestamp';
|
|
1189
|
+
ev.emit('message-receipt.update', ids.map(id => ({
|
|
1190
|
+
key: { ...key, id },
|
|
1191
|
+
receipt: {
|
|
1192
|
+
userJid: jidNormalizedUser(attrs.participant),
|
|
1193
|
+
[updateKey]: +attrs.t
|
|
1194
|
+
}
|
|
1195
|
+
})));
|
|
1196
|
+
}
|
|
1197
|
+
}
|
|
1198
|
+
else {
|
|
1199
|
+
ev.emit('messages.update', ids.map(id => ({
|
|
1200
|
+
key: { ...key, id },
|
|
1201
|
+
update: { status, messageTimestamp: toNumber(+(attrs.t ?? 0)) }
|
|
1202
|
+
})));
|
|
1203
|
+
}
|
|
1204
|
+
}
|
|
1205
|
+
if (attrs.type === 'retry') {
|
|
1206
|
+
// correctly set who is asking for the retry
|
|
1207
|
+
key.participant = key.participant || attrs.from;
|
|
1208
|
+
const retryNode = getBinaryNodeChild(node, 'retry');
|
|
1209
|
+
if (ids[0] && key.participant && (await willSendMessageAgain(ids[0], key.participant))) {
|
|
1210
|
+
if (key.fromMe) {
|
|
1211
|
+
try {
|
|
1212
|
+
await updateSendMessageAgainCount(ids[0], key.participant);
|
|
1213
|
+
logger.debug({ attrs, key }, 'recv retry request');
|
|
1214
|
+
await sendMessagesAgain(key, ids, retryNode, node);
|
|
1215
|
+
}
|
|
1216
|
+
catch (error) {
|
|
1217
|
+
logger.error({ key, ids, trace: error instanceof Error ? error.stack : 'Unknown error' }, 'error in sending message again');
|
|
1218
|
+
}
|
|
1219
|
+
}
|
|
1220
|
+
else {
|
|
1221
|
+
logger.info({ attrs, key }, 'recv retry for not fromMe message');
|
|
1222
|
+
}
|
|
1223
|
+
}
|
|
1224
|
+
else {
|
|
1225
|
+
logger.info({ attrs, key }, 'will not send message again, as sent too many times');
|
|
1226
|
+
}
|
|
1227
|
+
}
|
|
1228
|
+
})
|
|
1229
|
+
]);
|
|
1230
|
+
}
|
|
1231
|
+
finally {
|
|
1232
|
+
await sendMessageAck(node).catch(ackErr => logger.error({ ackErr }, 'failed to ack receipt'));
|
|
1233
|
+
}
|
|
1234
|
+
};
|
|
1235
|
+
const handleNotification = async (node) => {
|
|
1236
|
+
const remoteJid = node.attrs.from;
|
|
1237
|
+
try {
|
|
1238
|
+
await Promise.all([
|
|
1239
|
+
notificationMutex.mutex(async () => {
|
|
1240
|
+
const msg = await processNotification(node);
|
|
1241
|
+
if (msg) {
|
|
1242
|
+
const fromMe = areJidsSameUser(node.attrs.participant || remoteJid, authState.creds.me.id);
|
|
1243
|
+
const { senderAlt: participantAlt, addressingMode } = extractAddressingContext(node);
|
|
1244
|
+
msg.key = {
|
|
1245
|
+
remoteJid,
|
|
1246
|
+
fromMe,
|
|
1247
|
+
participant: node.attrs.participant,
|
|
1248
|
+
participantAlt,
|
|
1249
|
+
participantUsername: node.attrs.participant_username,
|
|
1250
|
+
addressingMode,
|
|
1251
|
+
id: node.attrs.id,
|
|
1252
|
+
...(msg.key || {})
|
|
1253
|
+
};
|
|
1254
|
+
msg.participant ?? (msg.participant = node.attrs.participant);
|
|
1255
|
+
msg.messageTimestamp = +node.attrs.t;
|
|
1256
|
+
const fullMsg = proto.WebMessageInfo.fromObject(msg);
|
|
1257
|
+
await upsertMessage(fullMsg, 'append');
|
|
1258
|
+
}
|
|
1259
|
+
})
|
|
1260
|
+
]);
|
|
1261
|
+
}
|
|
1262
|
+
finally {
|
|
1263
|
+
await sendMessageAck(node).catch(ackErr => logger.error({ ackErr }, 'failed to ack notification'));
|
|
1264
|
+
}
|
|
1265
|
+
};
|
|
1266
|
+
const handleMessage = async (node) => {
|
|
1267
|
+
const encNode = getBinaryNodeChild(node, 'enc');
|
|
1268
|
+
// TODO: temporary fix for crashes and issues resulting of failed msmsg decryption
|
|
1269
|
+
if (encNode?.attrs.type === 'msmsg') {
|
|
1270
|
+
logger.debug({ key: node.attrs.key }, 'ignored msmsg');
|
|
1271
|
+
await sendMessageAck(node, NACK_REASONS.MissingMessageSecret);
|
|
1272
|
+
return;
|
|
1273
|
+
}
|
|
1274
|
+
let acked = false;
|
|
1275
|
+
try {
|
|
1276
|
+
const { fullMessage: msg, category, author, decrypt } = decryptMessageNode(node, authState.creds.me.id, authState.creds.me.lid || '', signalRepository, logger);
|
|
1277
|
+
const alt = msg.key.participantAlt || msg.key.remoteJidAlt;
|
|
1278
|
+
// store new mappings we didn't have before
|
|
1279
|
+
if (!!alt) {
|
|
1280
|
+
const altServer = jidDecode(alt)?.server;
|
|
1281
|
+
const primaryJid = msg.key.participant || msg.key.remoteJid;
|
|
1282
|
+
if (altServer === 'lid') {
|
|
1283
|
+
if (!(await signalRepository.lidMapping.getPNForLID(alt))) {
|
|
1284
|
+
await signalRepository.lidMapping.storeLIDPNMappings([{ lid: alt, pn: primaryJid }]);
|
|
1285
|
+
await signalRepository.migrateSession(primaryJid, alt);
|
|
1286
|
+
}
|
|
1287
|
+
}
|
|
1288
|
+
else {
|
|
1289
|
+
await signalRepository.lidMapping.storeLIDPNMappings([{ lid: primaryJid, pn: alt }]);
|
|
1290
|
+
await signalRepository.migrateSession(alt, primaryJid);
|
|
1291
|
+
}
|
|
1292
|
+
}
|
|
1293
|
+
await messageMutex.mutex(async () => {
|
|
1294
|
+
await decrypt();
|
|
1295
|
+
if (msg.key?.remoteJid && msg.key?.id && msg.message && messageRetryManager) {
|
|
1296
|
+
messageRetryManager.addRecentMessage(msg.key.remoteJid, msg.key.id, msg.message);
|
|
1297
|
+
}
|
|
1298
|
+
// message failed to decrypt
|
|
1299
|
+
if (msg.messageStubType === proto.WebMessageInfo.StubType.CIPHERTEXT && msg.category !== 'peer') {
|
|
1300
|
+
if (msg?.messageStubParameters?.[0] === MISSING_KEYS_ERROR_TEXT) {
|
|
1301
|
+
acked = true;
|
|
1302
|
+
return sendMessageAck(node, NACK_REASONS.ParsingError);
|
|
1303
|
+
}
|
|
1304
|
+
if (msg.messageStubParameters?.[0] === NO_MESSAGE_FOUND_ERROR_TEXT) {
|
|
1305
|
+
// Message arrived without encryption (e.g. CTWA ads messages).
|
|
1306
|
+
// Check if this is eligible for placeholder resend (matching WA Web filters).
|
|
1307
|
+
const unavailableNode = getBinaryNodeChild(node, 'unavailable');
|
|
1308
|
+
const unavailableType = unavailableNode?.attrs?.type;
|
|
1309
|
+
if (unavailableType === 'bot_unavailable_fanout' ||
|
|
1310
|
+
unavailableType === 'hosted_unavailable_fanout' ||
|
|
1311
|
+
unavailableType === 'view_once_unavailable_fanout') {
|
|
1312
|
+
logger.debug({ msgId: msg.key.id, unavailableType }, 'skipping placeholder resend for excluded unavailable type');
|
|
1313
|
+
acked = true;
|
|
1314
|
+
return sendMessageAck(node);
|
|
1315
|
+
}
|
|
1316
|
+
const messageAge = unixTimestampSeconds() - toNumber(msg.messageTimestamp);
|
|
1317
|
+
if (messageAge > PLACEHOLDER_MAX_AGE_SECONDS) {
|
|
1318
|
+
logger.debug({ msgId: msg.key.id, messageAge }, 'skipping placeholder resend for old message');
|
|
1319
|
+
acked = true;
|
|
1320
|
+
return sendMessageAck(node);
|
|
1321
|
+
}
|
|
1322
|
+
// Request the real content from the phone via placeholder resend PDO.
|
|
1323
|
+
// Upsert the CIPHERTEXT stub as a placeholder (like WA Web's processPlaceholderMsg),
|
|
1324
|
+
// and store the requestId in stubParameters[1] so users can correlate
|
|
1325
|
+
// with the incoming PDO response event.
|
|
1326
|
+
const cleanKey = {
|
|
1327
|
+
remoteJid: msg.key.remoteJid,
|
|
1328
|
+
fromMe: msg.key.fromMe,
|
|
1329
|
+
id: msg.key.id,
|
|
1330
|
+
participant: msg.key.participant
|
|
1331
|
+
};
|
|
1332
|
+
// Cache the original message metadata so the PDO response handler
|
|
1333
|
+
// can preserve key fields (LID details etc.) that the phone may omit
|
|
1334
|
+
const msgData = {
|
|
1335
|
+
key: msg.key,
|
|
1336
|
+
messageTimestamp: msg.messageTimestamp,
|
|
1337
|
+
pushName: msg.pushName,
|
|
1338
|
+
participant: msg.participant,
|
|
1339
|
+
verifiedBizName: msg.verifiedBizName
|
|
1340
|
+
};
|
|
1341
|
+
requestPlaceholderResend(cleanKey, msgData)
|
|
1342
|
+
.then(requestId => {
|
|
1343
|
+
if (requestId && requestId !== 'RESOLVED') {
|
|
1344
|
+
logger.debug({ msgId: msg.key.id, requestId }, 'requested placeholder resend for unavailable message');
|
|
1345
|
+
ev.emit('messages.update', [
|
|
1346
|
+
{
|
|
1347
|
+
key: msg.key,
|
|
1348
|
+
update: { messageStubParameters: [NO_MESSAGE_FOUND_ERROR_TEXT, requestId] }
|
|
1349
|
+
}
|
|
1350
|
+
]);
|
|
1351
|
+
}
|
|
1352
|
+
})
|
|
1353
|
+
.catch(err => {
|
|
1354
|
+
logger.warn({ err, msgId: msg.key.id }, 'failed to request placeholder resend for unavailable message');
|
|
1355
|
+
});
|
|
1356
|
+
acked = true;
|
|
1357
|
+
await sendMessageAck(node);
|
|
1358
|
+
// Don't return — fall through to upsertMessage so the stub is emitted
|
|
1359
|
+
}
|
|
1360
|
+
else {
|
|
1361
|
+
// Skip retry for expired status messages (>24h old)
|
|
1362
|
+
if (isJidStatusBroadcast(msg.key.remoteJid)) {
|
|
1363
|
+
const messageAge = unixTimestampSeconds() - toNumber(msg.messageTimestamp);
|
|
1364
|
+
if (messageAge > STATUS_EXPIRY_SECONDS) {
|
|
1365
|
+
logger.debug({ msgId: msg.key.id, messageAge, remoteJid: msg.key.remoteJid }, 'skipping retry for expired status message');
|
|
1366
|
+
acked = true;
|
|
1367
|
+
return sendMessageAck(node);
|
|
1368
|
+
}
|
|
1369
|
+
}
|
|
1370
|
+
logger.debug('[handleMessage] Attempting retry request for failed decryption');
|
|
1371
|
+
// WAWeb only retry-receipts here; server emits PreKeyLow if prekeys run low.
|
|
1372
|
+
await retryMutex.mutex(async () => {
|
|
1373
|
+
try {
|
|
1374
|
+
if (!ws.isOpen) {
|
|
1375
|
+
logger.debug({ node }, 'Connection closed, skipping retry');
|
|
1376
|
+
return;
|
|
1377
|
+
}
|
|
1378
|
+
const encNode = getBinaryNodeChild(node, 'enc');
|
|
1379
|
+
await sendRetryRequest(node, !encNode);
|
|
1380
|
+
if (retryRequestDelayMs) {
|
|
1381
|
+
await delay(retryRequestDelayMs);
|
|
1382
|
+
}
|
|
1383
|
+
}
|
|
1384
|
+
catch (err) {
|
|
1385
|
+
logger.error({ err }, 'Failed to send retry');
|
|
1386
|
+
}
|
|
1387
|
+
acked = true;
|
|
1388
|
+
await sendMessageAck(node, NACK_REASONS.UnhandledError);
|
|
1389
|
+
});
|
|
1390
|
+
}
|
|
1391
|
+
}
|
|
1392
|
+
else {
|
|
1393
|
+
if (messageRetryManager && msg.key.id) {
|
|
1394
|
+
messageRetryManager.cancelPendingPhoneRequest(msg.key.id);
|
|
1395
|
+
}
|
|
1396
|
+
const isNewsletter = isJidNewsletter(msg.key.remoteJid);
|
|
1397
|
+
if (!isNewsletter) {
|
|
1398
|
+
// no type in the receipt => message delivered
|
|
1399
|
+
let type = undefined;
|
|
1400
|
+
let participant = msg.key.participant;
|
|
1401
|
+
if (category === 'peer') {
|
|
1402
|
+
// special peer message
|
|
1403
|
+
type = 'peer_msg';
|
|
1404
|
+
}
|
|
1405
|
+
else if (msg.key.fromMe) {
|
|
1406
|
+
// message was sent by us from a different device
|
|
1407
|
+
type = 'sender';
|
|
1408
|
+
// need to specially handle this case
|
|
1409
|
+
if (isLidUser(msg.key.remoteJid) || isLidUser(msg.key.remoteJidAlt)) {
|
|
1410
|
+
participant = author; // TODO: investigate sending receipts to LIDs and not PNs
|
|
1411
|
+
}
|
|
1412
|
+
}
|
|
1413
|
+
else if (!sendActiveReceipts) {
|
|
1414
|
+
type = 'inactive';
|
|
1415
|
+
}
|
|
1416
|
+
acked = true;
|
|
1417
|
+
await sendReceipt(msg.key.remoteJid, participant, [msg.key.id], type);
|
|
1418
|
+
// send ack for history message
|
|
1419
|
+
const isAnyHistoryMsg = getHistoryMsg(msg.message);
|
|
1420
|
+
if (isAnyHistoryMsg) {
|
|
1421
|
+
const jid = jidNormalizedUser(msg.key.remoteJid);
|
|
1422
|
+
await sendReceipt(jid, undefined, [msg.key.id], 'hist_sync'); // TODO: investigate
|
|
1423
|
+
}
|
|
1424
|
+
}
|
|
1425
|
+
else {
|
|
1426
|
+
acked = true;
|
|
1427
|
+
await sendMessageAck(node);
|
|
1428
|
+
logger.debug({ key: msg.key }, 'processed newsletter message without receipts');
|
|
1429
|
+
}
|
|
1430
|
+
}
|
|
1431
|
+
cleanMessage(msg, authState.creds.me.id, authState.creds.me.lid);
|
|
1432
|
+
await upsertMessage(msg, node.attrs.offline ? 'append' : 'notify');
|
|
1433
|
+
});
|
|
1434
|
+
}
|
|
1435
|
+
catch (error) {
|
|
1436
|
+
logger.error({ error, node: binaryNodeToString(node) }, 'error in handling message');
|
|
1437
|
+
if (!acked) {
|
|
1438
|
+
await sendMessageAck(node, NACK_REASONS.UnhandledError).catch(ackErr => logger.error({ ackErr }, 'failed to ack message after error'));
|
|
1439
|
+
}
|
|
1440
|
+
}
|
|
1441
|
+
};
|
|
1442
|
+
const handleCall = async (node) => {
|
|
1443
|
+
try {
|
|
1444
|
+
const { attrs } = node;
|
|
1445
|
+
const [infoChild] = getAllBinaryNodeChildren(node);
|
|
1446
|
+
if (!infoChild) {
|
|
1447
|
+
throw new Boom('Missing call info in call node');
|
|
1448
|
+
}
|
|
1449
|
+
const status = getCallStatusFromNode(infoChild);
|
|
1450
|
+
const callId = infoChild.attrs['call-id'];
|
|
1451
|
+
const from = infoChild.attrs.from || infoChild.attrs['call-creator'];
|
|
1452
|
+
const call = {
|
|
1453
|
+
chatId: attrs.from,
|
|
1454
|
+
from,
|
|
1455
|
+
callerPn: infoChild.attrs['caller_pn'],
|
|
1456
|
+
id: callId,
|
|
1457
|
+
date: new Date(+attrs.t * 1000),
|
|
1458
|
+
offline: !!attrs.offline,
|
|
1459
|
+
status
|
|
1460
|
+
};
|
|
1461
|
+
if (status === 'relaylatency') {
|
|
1462
|
+
const latencyValue = infoChild.attrs.latency || infoChild.attrs['latency_ms'] || infoChild.attrs['latency-ms'];
|
|
1463
|
+
const latencyMs = latencyValue ? Number(latencyValue) : undefined;
|
|
1464
|
+
if (Number.isFinite(latencyMs)) {
|
|
1465
|
+
call.latencyMs = latencyMs;
|
|
1466
|
+
}
|
|
1467
|
+
}
|
|
1468
|
+
if (status === 'offer') {
|
|
1469
|
+
call.isVideo = !!getBinaryNodeChild(infoChild, 'video');
|
|
1470
|
+
call.isGroup = infoChild.attrs.type === 'group' || !!infoChild.attrs['group-jid'];
|
|
1471
|
+
call.groupJid = infoChild.attrs['group-jid'];
|
|
1472
|
+
await callOfferCache.set(call.id, call);
|
|
1473
|
+
}
|
|
1474
|
+
const existingCall = await callOfferCache.get(call.id);
|
|
1475
|
+
// use existing call info to populate this event
|
|
1476
|
+
if (existingCall) {
|
|
1477
|
+
call.isVideo = existingCall.isVideo;
|
|
1478
|
+
call.isGroup = existingCall.isGroup;
|
|
1479
|
+
call.callerPn = call.callerPn || existingCall.callerPn;
|
|
1480
|
+
}
|
|
1481
|
+
// delete data once call has ended
|
|
1482
|
+
if (status === 'reject' || status === 'accept' || status === 'timeout' || status === 'terminate') {
|
|
1483
|
+
await callOfferCache.del(call.id);
|
|
1484
|
+
}
|
|
1485
|
+
ev.emit('call', [call]);
|
|
1486
|
+
}
|
|
1487
|
+
catch (error) {
|
|
1488
|
+
logger.error({ error, node: binaryNodeToString(node) }, 'error in handling call');
|
|
1489
|
+
}
|
|
1490
|
+
finally {
|
|
1491
|
+
await sendMessageAck(node).catch(ackErr => logger.error({ ackErr }, 'failed to ack call'));
|
|
1492
|
+
}
|
|
1493
|
+
};
|
|
1494
|
+
const handleBadAck = async ({ attrs }) => {
|
|
1495
|
+
const key = { remoteJid: attrs.from, fromMe: true, id: attrs.id };
|
|
1496
|
+
// WARNING: REFRAIN FROM ENABLING THIS FOR NOW. IT WILL CAUSE A LOOP
|
|
1497
|
+
// // current hypothesis is that if pash is sent in the ack
|
|
1498
|
+
// // it means -- the message hasn't reached all devices yet
|
|
1499
|
+
// // we'll retry sending the message here
|
|
1500
|
+
// if(attrs.phash) {
|
|
1501
|
+
// logger.info({ attrs }, 'received phash in ack, resending message...')
|
|
1502
|
+
// const msg = await getMessage(key)
|
|
1503
|
+
// if(msg) {
|
|
1504
|
+
// await relayMessage(key.remoteJid!, msg, { messageId: key.id!, useUserDevicesCache: false })
|
|
1505
|
+
// } else {
|
|
1506
|
+
// logger.warn({ attrs }, 'could not send message again, as it was not found')
|
|
1507
|
+
// }
|
|
1508
|
+
// }
|
|
1509
|
+
// error in acknowledgement,
|
|
1510
|
+
// device could not display the message
|
|
1511
|
+
if (attrs.error) {
|
|
1512
|
+
const isReachoutTimelocked = attrs.error === String(NACK_REASONS.SenderReachoutTimelocked);
|
|
1513
|
+
if (attrs.error === SERVER_ERROR_CODES.MessageAccountRestriction) {
|
|
1514
|
+
// 463 = 1:1 message missing privacy token (tctoken). Usually means the
|
|
1515
|
+
// account is restricted: WhatsApp blocks starting new chats but preserves
|
|
1516
|
+
// existing ones, since established chats already carry a tctoken.
|
|
1517
|
+
// WA Web prevents this client-side (disables the compose bar).
|
|
1518
|
+
// No retry — retrying counts as another "reach out" and worsens the restriction.
|
|
1519
|
+
logger.warn({ msgId: attrs.id, from: attrs.from }, 'error 463: account restricted or missing tctoken for contact');
|
|
1520
|
+
const ackFrom = attrs.from;
|
|
1521
|
+
if (ackFrom && !inFlight463Recoveries.has(ackFrom)) {
|
|
1522
|
+
inFlight463Recoveries.add(ackFrom);
|
|
1523
|
+
void (async () => {
|
|
1524
|
+
try {
|
|
1525
|
+
const getPNForLID = signalRepository.lidMapping.getPNForLID.bind(signalRepository.lidMapping);
|
|
1526
|
+
const tcStorageJid = await resolveTcTokenJid(ackFrom, getLIDForPN);
|
|
1527
|
+
const issueJid = await resolveIssuanceJid(ackFrom, sock.serverProps.lidTrustedTokenIssueToLid, getLIDForPN, getPNForLID);
|
|
1528
|
+
const result = await issuePrivacyTokens([issueJid], unixTimestampSeconds());
|
|
1529
|
+
await storeTcTokensFromIqResult({
|
|
1530
|
+
result,
|
|
1531
|
+
fallbackJid: tcStorageJid,
|
|
1532
|
+
keys: authState.keys,
|
|
1533
|
+
getLIDForPN,
|
|
1534
|
+
onNewJidStored: trackTcTokenJid
|
|
1535
|
+
});
|
|
1536
|
+
logger.debug({ from: ackFrom }, 'completed 463 token recovery issuance');
|
|
1537
|
+
}
|
|
1538
|
+
catch (err) {
|
|
1539
|
+
logger.debug({ from: ackFrom, err: err?.message }, 'failed 463 token recovery issuance');
|
|
1540
|
+
}
|
|
1541
|
+
finally {
|
|
1542
|
+
inFlight463Recoveries.delete(ackFrom);
|
|
1543
|
+
}
|
|
1544
|
+
})();
|
|
1545
|
+
}
|
|
1546
|
+
}
|
|
1547
|
+
else if (attrs.error === SERVER_ERROR_CODES.SmaxInvalid) {
|
|
1548
|
+
logger.warn({ msgId: attrs.id, from: attrs.from }, 'smax-invalid (479): stanza rejected by server — likely stale device session or malformed addressing');
|
|
1549
|
+
}
|
|
1550
|
+
else if (isReachoutTimelocked) {
|
|
1551
|
+
// user is temporarily restricted, fetch current restriction details
|
|
1552
|
+
await fetchAccountReachoutTimelock().catch(err => logger.warn({ err }, 'failed to fetch reachout timelock'));
|
|
1553
|
+
logger.warn({ attrs }, 'received error in ack');
|
|
1554
|
+
}
|
|
1555
|
+
else {
|
|
1556
|
+
logger.warn({ attrs }, 'received error in ack');
|
|
1557
|
+
}
|
|
1558
|
+
ev.emit('messages.update', [
|
|
1559
|
+
{
|
|
1560
|
+
key,
|
|
1561
|
+
update: {
|
|
1562
|
+
status: WAMessageStatus.ERROR,
|
|
1563
|
+
messageStubParameters: isReachoutTimelocked ? [attrs.error, ACCOUNT_RESTRICTED_TEXT] : [attrs.error]
|
|
1564
|
+
}
|
|
1565
|
+
}
|
|
1566
|
+
]);
|
|
1567
|
+
}
|
|
1568
|
+
};
|
|
1569
|
+
/// processes a node with the given function
|
|
1570
|
+
/// and adds the task to the existing buffer if we're buffering events
|
|
1571
|
+
const processNodeWithBuffer = async (node, identifier, exec) => {
|
|
1572
|
+
ev.buffer();
|
|
1573
|
+
await execTask();
|
|
1574
|
+
ev.flush();
|
|
1575
|
+
function execTask() {
|
|
1576
|
+
return exec(node, false).catch(err => onUnexpectedError(err, identifier));
|
|
1577
|
+
}
|
|
1578
|
+
};
|
|
1579
|
+
const nodeProcessor = makeOfflineNodeProcessor(new Map([
|
|
1580
|
+
['message', handleMessage],
|
|
1581
|
+
['call', handleCall],
|
|
1582
|
+
['receipt', handleReceipt],
|
|
1583
|
+
['notification', handleNotification]
|
|
1584
|
+
]), {
|
|
1585
|
+
isWsOpen: () => ws.isOpen,
|
|
1586
|
+
onUnexpectedError,
|
|
1587
|
+
yieldToEventLoop: () => new Promise(resolve => setImmediate(resolve))
|
|
1588
|
+
});
|
|
1589
|
+
const processNode = async (type, node, identifier, exec) => {
|
|
1590
|
+
// Fast path: ack and drop ignored JIDs before entering the buffer/queue
|
|
1591
|
+
const from = node.attrs.from;
|
|
1592
|
+
let ignoreJid = from;
|
|
1593
|
+
if (type === 'receipt' && from) {
|
|
1594
|
+
const attrs = node.attrs;
|
|
1595
|
+
const isLid = attrs.from.includes('lid');
|
|
1596
|
+
const isNodeFromMe = areJidsSameUser(attrs.participant || attrs.from, isLid ? authState.creds.me?.lid : authState.creds.me?.id);
|
|
1597
|
+
ignoreJid = !isNodeFromMe || isJidGroup(attrs.from) ? attrs.from : attrs.recipient;
|
|
1598
|
+
}
|
|
1599
|
+
if (ignoreJid && ignoreJid !== S_WHATSAPP_NET && shouldIgnoreJid(ignoreJid)) {
|
|
1600
|
+
await sendMessageAck(node, type === 'message' ? NACK_REASONS.UnhandledError : undefined);
|
|
1601
|
+
return;
|
|
1602
|
+
}
|
|
1603
|
+
const isOffline = !!node.attrs.offline;
|
|
1604
|
+
if (isOffline) {
|
|
1605
|
+
nodeProcessor.enqueue(type, node);
|
|
1606
|
+
}
|
|
1607
|
+
else {
|
|
1608
|
+
await processNodeWithBuffer(node, identifier, exec);
|
|
1609
|
+
}
|
|
1610
|
+
};
|
|
1611
|
+
// recv a message
|
|
1612
|
+
ws.on('CB:message', async (node) => {
|
|
1613
|
+
await processNode('message', node, 'processing message', handleMessage);
|
|
1614
|
+
});
|
|
1615
|
+
ws.on('CB:call', async (node) => {
|
|
1616
|
+
await processNode('call', node, 'handling call', handleCall);
|
|
1617
|
+
});
|
|
1618
|
+
ws.on('CB:receipt', async (node) => {
|
|
1619
|
+
await processNode('receipt', node, 'handling receipt', handleReceipt);
|
|
1620
|
+
});
|
|
1621
|
+
ws.on('CB:notification', async (node) => {
|
|
1622
|
+
await processNode('notification', node, 'handling notification', handleNotification);
|
|
1623
|
+
});
|
|
1624
|
+
ws.on('CB:ack,class:message', (node) => {
|
|
1625
|
+
handleBadAck(node).catch(error => onUnexpectedError(error, 'handling bad ack'));
|
|
1626
|
+
});
|
|
1627
|
+
ev.on('call', async ([call]) => {
|
|
1628
|
+
if (!call) {
|
|
1629
|
+
return;
|
|
1630
|
+
}
|
|
1631
|
+
// missed call + group call notification message generation
|
|
1632
|
+
if (call.status === 'timeout' || (call.status === 'offer' && call.isGroup)) {
|
|
1633
|
+
const msg = {
|
|
1634
|
+
key: {
|
|
1635
|
+
remoteJid: call.chatId,
|
|
1636
|
+
id: call.id,
|
|
1637
|
+
fromMe: false
|
|
1638
|
+
},
|
|
1639
|
+
messageTimestamp: unixTimestampSeconds(call.date)
|
|
1640
|
+
};
|
|
1641
|
+
if (call.status === 'timeout') {
|
|
1642
|
+
if (call.isGroup) {
|
|
1643
|
+
msg.messageStubType = call.isVideo
|
|
1644
|
+
? WAMessageStubType.CALL_MISSED_GROUP_VIDEO
|
|
1645
|
+
: WAMessageStubType.CALL_MISSED_GROUP_VOICE;
|
|
1646
|
+
}
|
|
1647
|
+
else {
|
|
1648
|
+
msg.messageStubType = call.isVideo ? WAMessageStubType.CALL_MISSED_VIDEO : WAMessageStubType.CALL_MISSED_VOICE;
|
|
1649
|
+
}
|
|
1650
|
+
}
|
|
1651
|
+
else {
|
|
1652
|
+
msg.message = { call: { callKey: Buffer.from(call.id) } };
|
|
1653
|
+
}
|
|
1654
|
+
const protoMsg = proto.WebMessageInfo.fromObject(msg);
|
|
1655
|
+
await upsertMessage(protoMsg, call.offline ? 'append' : 'notify');
|
|
1656
|
+
}
|
|
1657
|
+
});
|
|
1658
|
+
/** timestamp of last tctoken prune run — throttles to once per 24h */
|
|
1659
|
+
let lastTcTokenPruneTs = 0;
|
|
1660
|
+
/** dedupe in-flight 463 recovery token issuance by target JID */
|
|
1661
|
+
const inFlight463Recoveries = new Set();
|
|
1662
|
+
ev.on('connection.update', ({ isOnline, connection }) => {
|
|
1663
|
+
if (typeof isOnline !== 'undefined') {
|
|
1664
|
+
sendActiveReceipts = isOnline;
|
|
1665
|
+
logger.trace(`sendActiveReceipts set to "${sendActiveReceipts}"`);
|
|
1666
|
+
}
|
|
1667
|
+
// Flush pending tctoken index save on disconnect to avoid writing after close
|
|
1668
|
+
if (connection === 'close' && tcTokenIndexTimer) {
|
|
1669
|
+
clearTimeout(tcTokenIndexTimer);
|
|
1670
|
+
tcTokenIndexTimer = undefined;
|
|
1671
|
+
// Best-effort flush — may fail if store is already closed
|
|
1672
|
+
try {
|
|
1673
|
+
void Promise.resolve(flushTcTokenIndex()).catch(() => { });
|
|
1674
|
+
}
|
|
1675
|
+
catch {
|
|
1676
|
+
/* ignore sync errors */
|
|
1677
|
+
}
|
|
1678
|
+
}
|
|
1679
|
+
// Prune expired tctokens when coming online, at most once per 24 hours
|
|
1680
|
+
// Matches WA Web's CLEAN_TC_TOKENS task
|
|
1681
|
+
// Note: don't gate on tcTokenKnownJids.size — the index may still be loading
|
|
1682
|
+
if (isOnline) {
|
|
1683
|
+
const now = Date.now();
|
|
1684
|
+
const DAY_MS = 24 * 60 * 60 * 1000;
|
|
1685
|
+
if (now - lastTcTokenPruneTs >= DAY_MS) {
|
|
1686
|
+
lastTcTokenPruneTs = now;
|
|
1687
|
+
void pruneExpiredTcTokens();
|
|
1688
|
+
}
|
|
1689
|
+
}
|
|
1690
|
+
});
|
|
1691
|
+
registerSocketEndHandler(() => {
|
|
1692
|
+
if (!config.msgRetryCounterCache && msgRetryCache.close) {
|
|
1693
|
+
msgRetryCache.close();
|
|
1694
|
+
}
|
|
1695
|
+
if (!config.callOfferCache && callOfferCache.close) {
|
|
1696
|
+
callOfferCache.close();
|
|
1697
|
+
}
|
|
1698
|
+
identityAssertDebounce.close();
|
|
1699
|
+
sendActiveReceipts = false;
|
|
1700
|
+
});
|
|
1701
|
+
async function pruneExpiredTcTokens() {
|
|
1702
|
+
try {
|
|
1703
|
+
await tcTokenIndexLoaded;
|
|
1704
|
+
// Union with the persisted index picks up JIDs added by other layers
|
|
1705
|
+
// (history sync) without needing inter-module wiring.
|
|
1706
|
+
const persisted = await readTcTokenIndex(authState.keys);
|
|
1707
|
+
const allJids = new Set(tcTokenKnownJids);
|
|
1708
|
+
for (const jid of persisted)
|
|
1709
|
+
allJids.add(jid);
|
|
1710
|
+
if (!allJids.size)
|
|
1711
|
+
return;
|
|
1712
|
+
const jids = [...allJids];
|
|
1713
|
+
const allTokens = await authState.keys.get('tctoken', jids);
|
|
1714
|
+
const writes = {};
|
|
1715
|
+
const survivors = new Set();
|
|
1716
|
+
let mutated = 0;
|
|
1717
|
+
for (const jid of jids) {
|
|
1718
|
+
const entry = allTokens[jid];
|
|
1719
|
+
if (!entry) {
|
|
1720
|
+
// Tracked but nothing in store — drop from index.
|
|
1721
|
+
mutated++;
|
|
1722
|
+
continue;
|
|
1723
|
+
}
|
|
1724
|
+
const hasPeerToken = !!entry.token?.length;
|
|
1725
|
+
const peerTokenExpired = hasPeerToken && isTcTokenExpired(entry.timestamp);
|
|
1726
|
+
const hasSenderTs = entry.senderTimestamp !== undefined;
|
|
1727
|
+
const senderTsExpired = hasSenderTs && isTcTokenExpired(entry.senderTimestamp);
|
|
1728
|
+
const keepPeerToken = hasPeerToken && !peerTokenExpired;
|
|
1729
|
+
const keepSenderTs = hasSenderTs && !senderTsExpired;
|
|
1730
|
+
if (!keepPeerToken && !keepSenderTs) {
|
|
1731
|
+
writes[jid] = null;
|
|
1732
|
+
mutated++;
|
|
1733
|
+
}
|
|
1734
|
+
else if (peerTokenExpired && keepSenderTs) {
|
|
1735
|
+
writes[jid] = { token: Buffer.alloc(0), senderTimestamp: entry.senderTimestamp };
|
|
1736
|
+
survivors.add(jid);
|
|
1737
|
+
mutated++;
|
|
1738
|
+
}
|
|
1739
|
+
else {
|
|
1740
|
+
survivors.add(jid);
|
|
1741
|
+
}
|
|
1742
|
+
}
|
|
1743
|
+
if (mutated === 0)
|
|
1744
|
+
return;
|
|
1745
|
+
await authState.keys.set({
|
|
1746
|
+
tctoken: {
|
|
1747
|
+
...writes,
|
|
1748
|
+
[TC_TOKEN_INDEX_KEY]: {
|
|
1749
|
+
token: Buffer.from(JSON.stringify([...survivors]))
|
|
1750
|
+
}
|
|
1751
|
+
}
|
|
1752
|
+
});
|
|
1753
|
+
tcTokenKnownJids.clear();
|
|
1754
|
+
for (const jid of survivors)
|
|
1755
|
+
tcTokenKnownJids.add(jid);
|
|
1756
|
+
logger.debug({ mutated, remaining: survivors.size }, 'pruned expired tctokens');
|
|
1757
|
+
}
|
|
1758
|
+
catch (err) {
|
|
1759
|
+
logger.warn({ err: err?.message }, 'failed to prune expired tctokens');
|
|
1760
|
+
}
|
|
1761
|
+
}
|
|
1762
|
+
return {
|
|
1763
|
+
...sock,
|
|
1764
|
+
sendMessageAck,
|
|
1765
|
+
sendRetryRequest,
|
|
1766
|
+
rejectCall,
|
|
1767
|
+
fetchMessageHistory,
|
|
1768
|
+
requestPlaceholderResend,
|
|
1769
|
+
messageRetryManager
|
|
1770
|
+
};
|
|
1771
|
+
};
|
|
1772
|
+
//# sourceMappingURL=messages-recv.js.map
|