@hbmodsofc/baileys 1.5.0 → 1.7.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/WAProto/index.js +19671 -152026
- package/engine-requirements.js +4 -4
- package/lib/Defaults/index.d.ts +12 -8
- package/lib/Defaults/index.js +90 -124
- package/lib/Signal/Group/group_cipher.d.ts +0 -1
- package/lib/Signal/Group/group_cipher.js +28 -39
- package/lib/Signal/Group/sender-chain-key.d.ts +1 -1
- package/lib/Signal/Group/sender-chain-key.js +9 -2
- package/lib/Signal/Group/sender-key-distribution-message.js +3 -3
- package/lib/Signal/Group/sender-key-message.js +3 -3
- package/lib/Signal/Group/sender-key-state.d.ts +4 -4
- package/lib/Signal/Group/sender-key-state.js +47 -16
- package/lib/Signal/libsignal.d.ts +7 -3
- package/lib/Signal/libsignal.js +224 -39
- package/lib/Signal/lid-mapping.d.ts +26 -0
- package/lib/Signal/lid-mapping.js +146 -0
- package/lib/Socket/Client/index.d.ts +2 -3
- package/lib/Socket/Client/index.js +2 -3
- package/lib/Socket/Client/{abstract-socket-client.d.ts → types.d.ts} +1 -3
- package/lib/Socket/Client/{web-socket-client.d.ts → websocket.d.ts} +1 -1
- package/lib/Socket/Client/{web-socket-client.js → websocket.js} +10 -16
- package/lib/Socket/business.d.ts +94 -78
- package/lib/Socket/business.js +130 -11
- package/lib/Socket/chats.d.ts +63 -233
- package/lib/Socket/chats.js +234 -184
- package/lib/Socket/communities.d.ts +232 -0
- package/lib/Socket/communities.js +402 -0
- package/lib/Socket/groups.d.ts +62 -41
- package/lib/Socket/groups.js +76 -64
- package/lib/Socket/index.d.ts +129 -83
- package/lib/Socket/index.js +13 -6
- package/lib/Socket/messages-recv.d.ts +59 -48
- package/lib/Socket/messages-recv.js +516 -371
- package/lib/Socket/messages-send.d.ts +86 -67
- package/lib/Socket/messages-send.js +1091 -1
- package/lib/Socket/mex.d.ts +2 -0
- package/lib/Socket/mex.js +45 -0
- package/lib/Socket/newsletter.d.ts +76 -64
- package/lib/Socket/newsletter.js +184 -1
- package/lib/Socket/socket.d.ts +19 -13
- package/lib/Socket/socket.js +805 -1
- package/lib/Types/Auth.d.ts +4 -10
- package/lib/Types/Bussines.d.ts +24 -0
- package/lib/Types/Bussines.js +2 -0
- package/lib/Types/Call.d.ts +1 -1
- package/lib/Types/Chat.d.ts +29 -9
- package/lib/Types/Chat.js +7 -1
- package/lib/Types/Contact.d.ts +5 -1
- package/lib/Types/Events.d.ts +55 -14
- package/lib/Types/GroupMetadata.d.ts +15 -5
- package/lib/Types/Label.d.ts +11 -0
- package/lib/Types/Label.js +1 -1
- package/lib/Types/LabelAssociation.js +1 -1
- package/lib/Types/Message.d.ts +75 -49
- package/lib/Types/Message.js +10 -7
- package/lib/Types/Newsletter.d.ts +129 -98
- package/lib/Types/Newsletter.js +33 -38
- package/lib/Types/Product.d.ts +1 -1
- package/lib/Types/Signal.d.ts +29 -1
- package/lib/Types/Socket.d.ts +48 -22
- package/lib/Types/State.d.ts +13 -2
- package/lib/Types/State.js +12 -0
- package/lib/Types/USync.d.ts +1 -1
- package/lib/Types/index.d.ts +10 -3
- package/lib/Types/index.js +2 -2
- package/lib/Utils/auth-utils.d.ts +3 -3
- package/lib/Utils/auth-utils.js +378 -102
- package/lib/Utils/baileys-event-stream.js +1 -1
- package/lib/Utils/business.d.ts +2 -2
- package/lib/Utils/business.js +19 -13
- package/lib/Utils/chat-utils.d.ts +21 -22
- package/lib/Utils/chat-utils.js +201 -154
- package/lib/Utils/crypto.d.ts +18 -19
- package/lib/Utils/crypto.js +78 -37
- package/lib/Utils/decode-wa-message.d.ts +34 -7
- package/lib/Utils/decode-wa-message.js +138 -66
- package/lib/Utils/event-buffer.d.ts +6 -8
- package/lib/Utils/event-buffer.js +81 -43
- package/lib/Utils/generics.d.ts +27 -27
- package/lib/Utils/generics.js +128 -133
- package/lib/Utils/history.d.ts +9 -5
- package/lib/Utils/history.js +17 -23
- package/lib/Utils/index.d.ts +2 -0
- package/lib/Utils/index.js +2 -0
- package/lib/Utils/lidToJid-test.d.ts +11 -0
- package/lib/Utils/lidToJid-test.js +27 -0
- package/lib/Utils/link-preview.d.ts +4 -4
- package/lib/Utils/link-preview.js +40 -12
- package/lib/Utils/logger.d.ts +11 -3
- package/lib/Utils/lt-hash.d.ts +8 -8
- package/lib/Utils/lt-hash.js +23 -24
- package/lib/Utils/make-mutex.d.ts +2 -2
- package/lib/Utils/make-mutex.js +3 -2
- package/lib/Utils/message-retry-manager.d.ts +81 -0
- package/lib/Utils/message-retry-manager.js +152 -0
- package/lib/Utils/messages-media.d.ts +37 -41
- package/lib/Utils/messages-media.js +252 -368
- package/lib/Utils/messages.d.ts +13 -15
- package/lib/Utils/messages.js +274 -261
- package/lib/Utils/noise-handler.d.ts +13 -15
- package/lib/Utils/noise-handler.js +20 -26
- package/lib/Utils/process-message.d.ts +9 -8
- package/lib/Utils/process-message.js +157 -93
- package/lib/Utils/signal.d.ts +6 -5
- package/lib/Utils/signal.js +37 -29
- package/lib/Utils/use-multi-file-auth-state.d.ts +1 -2
- package/lib/Utils/use-multi-file-auth-state.js +12 -7
- package/lib/Utils/validate-connection.d.ts +5 -6
- package/lib/Utils/validate-connection.js +39 -97
- package/lib/WABinary/constants.d.ts +24 -27
- package/lib/WABinary/constants.js +1276 -13
- package/lib/WABinary/decode.d.ts +3 -4
- package/lib/WABinary/decode.js +28 -14
- package/lib/WABinary/encode.d.ts +1 -2
- package/lib/WABinary/encode.js +134 -147
- package/lib/WABinary/generic-utils.d.ts +4 -7
- package/lib/WABinary/generic-utils.js +40 -125
- package/lib/WABinary/jid-utils.d.ts +13 -8
- package/lib/WABinary/jid-utils.js +27 -16
- package/lib/WAM/BinaryInfo.d.ts +2 -11
- package/lib/WAM/constants.d.ts +3 -2
- package/lib/WAM/constants.js +2252 -2359
- package/lib/WAM/encode.d.ts +1 -2
- package/lib/WAM/encode.js +8 -11
- package/lib/WAUSync/Protocols/USyncContactProtocol.d.ts +2 -2
- package/lib/WAUSync/Protocols/USyncContactProtocol.js +3 -4
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.d.ts +2 -2
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.js +5 -5
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.d.ts +2 -2
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js +5 -5
- package/lib/WAUSync/Protocols/USyncStatusProtocol.d.ts +2 -2
- package/lib/WAUSync/Protocols/USyncStatusProtocol.js +5 -6
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.d.ts +2 -2
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js +1 -1
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.d.ts +4 -3
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.js +11 -3
- package/lib/WAUSync/USyncQuery.d.ts +2 -2
- package/lib/WAUSync/USyncQuery.js +19 -15
- package/lib/WAUSync/USyncUser.d.ts +5 -5
- package/lib/WAUSync/index.d.ts +1 -1
- package/lib/WAUSync/index.js +1 -1
- package/package.json +102 -102
- package/lib/Defaults/baileys-version.json +0 -3
- package/lib/Defaults/phonenumber-mcc.json +0 -223
- package/lib/Signal/Group/queue-job.d.ts +0 -1
- package/lib/Signal/Group/queue-job.js +0 -57
- package/lib/Socket/Client/mobile-socket-client.d.ts +0 -13
- package/lib/Socket/Client/mobile-socket-client.js +0 -65
- package/lib/Socket/hbmods.d.ts +0 -253
- package/lib/Socket/hbmods.js +0 -1
- package/lib/Socket/registration.d.ts +0 -267
- package/lib/Socket/registration.js +0 -166
- package/lib/Socket/usync.d.ts +0 -36
- package/lib/Socket/usync.js +0 -70
- /package/lib/Socket/Client/{abstract-socket-client.js → types.js} +0 -0
package/lib/Utils/auth-utils.js
CHANGED
|
@@ -3,10 +3,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.initAuthCreds = exports.addTransactionCapability =
|
|
6
|
+
exports.initAuthCreds = exports.addTransactionCapability = void 0;
|
|
7
|
+
exports.makeCacheableSignalKeyStore = makeCacheableSignalKeyStore;
|
|
8
|
+
const node_cache_1 = __importDefault(require("@cacheable/node-cache"));
|
|
9
|
+
const async_mutex_1 = require("async-mutex");
|
|
7
10
|
const crypto_1 = require("crypto");
|
|
8
|
-
const
|
|
9
|
-
const uuid_1 = require("uuid");
|
|
11
|
+
const lru_cache_1 = require("lru-cache");
|
|
10
12
|
const Defaults_1 = require("../Defaults");
|
|
11
13
|
const crypto_2 = require("./crypto");
|
|
12
14
|
const generics_1 = require("./generics");
|
|
@@ -17,59 +19,203 @@ const generics_1 = require("./generics");
|
|
|
17
19
|
* @param _cache cache store to use
|
|
18
20
|
*/
|
|
19
21
|
function makeCacheableSignalKeyStore(store, logger, _cache) {
|
|
20
|
-
const cache = _cache ||
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
22
|
+
const cache = _cache ||
|
|
23
|
+
new node_cache_1.default({
|
|
24
|
+
stdTTL: Defaults_1.DEFAULT_CACHE_TTLS.SIGNAL_STORE, // 5 minutes
|
|
25
|
+
useClones: false,
|
|
26
|
+
deleteOnExpire: true
|
|
27
|
+
});
|
|
28
|
+
// Mutex for protecting cache operations
|
|
29
|
+
const cacheMutex = new async_mutex_1.Mutex();
|
|
25
30
|
function getUniqueId(type, id) {
|
|
26
31
|
return `${type}.${id}`;
|
|
27
32
|
}
|
|
28
33
|
return {
|
|
29
34
|
async get(type, ids) {
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
const
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
}
|
|
37
|
-
else {
|
|
38
|
-
idsToFetch.push(id);
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
if (idsToFetch.length) {
|
|
42
|
-
logger.trace({ items: idsToFetch.length }, 'loading from store');
|
|
43
|
-
const fetched = await store.get(type, idsToFetch);
|
|
44
|
-
for (const id of idsToFetch) {
|
|
45
|
-
const item = fetched[id];
|
|
46
|
-
if (item) {
|
|
35
|
+
return cacheMutex.runExclusive(async () => {
|
|
36
|
+
const data = {};
|
|
37
|
+
const idsToFetch = [];
|
|
38
|
+
for (const id of ids) {
|
|
39
|
+
const item = cache.get(getUniqueId(type, id));
|
|
40
|
+
if (typeof item !== 'undefined') {
|
|
47
41
|
data[id] = item;
|
|
48
|
-
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
idsToFetch.push(id);
|
|
49
45
|
}
|
|
50
46
|
}
|
|
51
|
-
|
|
52
|
-
|
|
47
|
+
if (idsToFetch.length) {
|
|
48
|
+
logger?.trace({ items: idsToFetch.length }, 'loading from store');
|
|
49
|
+
const fetched = await store.get(type, idsToFetch);
|
|
50
|
+
for (const id of idsToFetch) {
|
|
51
|
+
const item = fetched[id];
|
|
52
|
+
if (item) {
|
|
53
|
+
data[id] = item;
|
|
54
|
+
cache.set(getUniqueId(type, id), item);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return data;
|
|
59
|
+
});
|
|
53
60
|
},
|
|
54
61
|
async set(data) {
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
for (const
|
|
58
|
-
|
|
59
|
-
|
|
62
|
+
return cacheMutex.runExclusive(async () => {
|
|
63
|
+
let keys = 0;
|
|
64
|
+
for (const type in data) {
|
|
65
|
+
for (const id in data[type]) {
|
|
66
|
+
await cache.set(getUniqueId(type, id), data[type][id]);
|
|
67
|
+
keys += 1;
|
|
68
|
+
}
|
|
60
69
|
}
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
70
|
+
logger?.trace({ keys }, 'updated cache');
|
|
71
|
+
await store.set(data);
|
|
72
|
+
});
|
|
64
73
|
},
|
|
65
74
|
async clear() {
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
await ((_a = store.clear) === null || _a === void 0 ? void 0 : _a.call(store));
|
|
75
|
+
await cache.flushAll();
|
|
76
|
+
await store.clear?.();
|
|
69
77
|
}
|
|
70
78
|
};
|
|
71
79
|
}
|
|
72
|
-
|
|
80
|
+
// Module-level specialized mutexes for pre-key operations
|
|
81
|
+
const preKeyMutex = new async_mutex_1.Mutex();
|
|
82
|
+
const signedPreKeyMutex = new async_mutex_1.Mutex();
|
|
83
|
+
/**
|
|
84
|
+
* Get the appropriate mutex for the key type
|
|
85
|
+
*/
|
|
86
|
+
const getPreKeyMutex = (keyType) => {
|
|
87
|
+
return keyType === 'signed-pre-key' ? signedPreKeyMutex : preKeyMutex;
|
|
88
|
+
};
|
|
89
|
+
/**
|
|
90
|
+
* Handles pre-key operations with mutex protection
|
|
91
|
+
*/
|
|
92
|
+
async function handlePreKeyOperations(data, keyType, transactionCache, mutations, logger, isInTransaction, state) {
|
|
93
|
+
const mutex = getPreKeyMutex(keyType);
|
|
94
|
+
await mutex.runExclusive(async () => {
|
|
95
|
+
const keyData = data[keyType];
|
|
96
|
+
if (!keyData)
|
|
97
|
+
return;
|
|
98
|
+
// Ensure structures exist
|
|
99
|
+
transactionCache[keyType] = transactionCache[keyType] || {};
|
|
100
|
+
mutations[keyType] = mutations[keyType] || {};
|
|
101
|
+
// Separate deletions from updates for batch processing
|
|
102
|
+
const deletionKeys = [];
|
|
103
|
+
const updateKeys = [];
|
|
104
|
+
for (const keyId in keyData) {
|
|
105
|
+
if (keyData[keyId] === null) {
|
|
106
|
+
deletionKeys.push(keyId);
|
|
107
|
+
}
|
|
108
|
+
else {
|
|
109
|
+
updateKeys.push(keyId);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
// Process updates first (no validation needed)
|
|
113
|
+
for (const keyId of updateKeys) {
|
|
114
|
+
if (transactionCache[keyType]) {
|
|
115
|
+
transactionCache[keyType][keyId] = keyData[keyId];
|
|
116
|
+
}
|
|
117
|
+
if (mutations[keyType]) {
|
|
118
|
+
mutations[keyType][keyId] = keyData[keyId];
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
// Process deletions with validation
|
|
122
|
+
if (deletionKeys.length === 0)
|
|
123
|
+
return;
|
|
124
|
+
if (isInTransaction) {
|
|
125
|
+
// In transaction, only allow deletion if key exists in cache
|
|
126
|
+
for (const keyId of deletionKeys) {
|
|
127
|
+
if (transactionCache[keyType]) {
|
|
128
|
+
transactionCache[keyType][keyId] = null;
|
|
129
|
+
if (mutations[keyType]) {
|
|
130
|
+
// Mark for deletion in mutations
|
|
131
|
+
mutations[keyType][keyId] = null;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
else {
|
|
135
|
+
logger.warn(`Skipping deletion of non-existent ${keyType} in transaction: ${keyId}`);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
// Outside transaction, batch validate all deletions
|
|
141
|
+
if (!state)
|
|
142
|
+
return;
|
|
143
|
+
const existingKeys = await state.get(keyType, deletionKeys);
|
|
144
|
+
for (const keyId of deletionKeys) {
|
|
145
|
+
if (existingKeys[keyId]) {
|
|
146
|
+
if (transactionCache[keyType])
|
|
147
|
+
transactionCache[keyType][keyId] = null;
|
|
148
|
+
if (mutations[keyType])
|
|
149
|
+
mutations[keyType][keyId] = null;
|
|
150
|
+
}
|
|
151
|
+
else {
|
|
152
|
+
logger.warn(`Skipping deletion of non-existent ${keyType}: ${keyId}`);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Handles normal key operations for transactions
|
|
159
|
+
*/
|
|
160
|
+
function handleNormalKeyOperations(data, key, transactionCache, mutations) {
|
|
161
|
+
Object.assign(transactionCache[key], data[key]);
|
|
162
|
+
mutations[key] = mutations[key] || {};
|
|
163
|
+
Object.assign(mutations[key], data[key]);
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* Process pre-key deletions with validation
|
|
167
|
+
*/
|
|
168
|
+
async function processPreKeyDeletions(data, keyType, state, logger) {
|
|
169
|
+
const mutex = getPreKeyMutex(keyType);
|
|
170
|
+
await mutex.runExclusive(async () => {
|
|
171
|
+
const keyData = data[keyType];
|
|
172
|
+
if (!keyData)
|
|
173
|
+
return;
|
|
174
|
+
// Validate deletions
|
|
175
|
+
for (const keyId in keyData) {
|
|
176
|
+
if (keyData[keyId] === null) {
|
|
177
|
+
const existingKeys = await state.get(keyType, [keyId]);
|
|
178
|
+
if (!existingKeys[keyId]) {
|
|
179
|
+
logger.warn(`Skipping deletion of non-existent ${keyType}: ${keyId}`);
|
|
180
|
+
if (data[keyType])
|
|
181
|
+
delete data[keyType][keyId];
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Executes a function with mutexes acquired for given key types
|
|
189
|
+
* Uses async-mutex's runExclusive with efficient batching
|
|
190
|
+
*/
|
|
191
|
+
async function withMutexes(keyTypes, getKeyTypeMutex, fn) {
|
|
192
|
+
if (keyTypes.length === 0) {
|
|
193
|
+
return fn();
|
|
194
|
+
}
|
|
195
|
+
if (keyTypes.length === 1) {
|
|
196
|
+
return getKeyTypeMutex(keyTypes[0]).runExclusive(fn);
|
|
197
|
+
}
|
|
198
|
+
// For multiple mutexes, sort by key type to prevent deadlocks
|
|
199
|
+
// Then acquire all mutexes in order using Promise.all for better efficiency
|
|
200
|
+
const sortedKeyTypes = [...keyTypes].sort();
|
|
201
|
+
const mutexes = sortedKeyTypes.map(getKeyTypeMutex);
|
|
202
|
+
// Acquire all mutexes in order to prevent deadlocks
|
|
203
|
+
const releases = [];
|
|
204
|
+
try {
|
|
205
|
+
for (const mutex of mutexes) {
|
|
206
|
+
releases.push(await mutex.acquire());
|
|
207
|
+
}
|
|
208
|
+
return await fn();
|
|
209
|
+
}
|
|
210
|
+
finally {
|
|
211
|
+
// Release in reverse order
|
|
212
|
+
while (releases.length > 0) {
|
|
213
|
+
const release = releases.pop();
|
|
214
|
+
if (release)
|
|
215
|
+
release();
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
}
|
|
73
219
|
/**
|
|
74
220
|
* Adds DB like transaction capability (https://en.wikipedia.org/wiki/Database_transaction) to the SignalKeyStore,
|
|
75
221
|
* this allows batch read & write operations & improves the performance of the lib
|
|
@@ -83,24 +229,116 @@ const addTransactionCapability = (state, logger, { maxCommitRetries, delayBetwee
|
|
|
83
229
|
let dbQueriesInTransaction = 0;
|
|
84
230
|
let transactionCache = {};
|
|
85
231
|
let mutations = {};
|
|
232
|
+
// LRU Cache to hold mutexes for different key types
|
|
233
|
+
const mutexCache = new lru_cache_1.LRUCache({
|
|
234
|
+
ttl: 60 * 60 * 1000, // 1 hour
|
|
235
|
+
ttlAutopurge: true,
|
|
236
|
+
updateAgeOnGet: true
|
|
237
|
+
});
|
|
86
238
|
let transactionsInProgress = 0;
|
|
239
|
+
function getKeyTypeMutex(type) {
|
|
240
|
+
return getMutex(`keytype:${type}`);
|
|
241
|
+
}
|
|
242
|
+
function getSenderKeyMutex(senderKeyName) {
|
|
243
|
+
return getMutex(`senderkey:${senderKeyName}`);
|
|
244
|
+
}
|
|
245
|
+
function getTransactionMutex(key) {
|
|
246
|
+
return getMutex(`transaction:${key}`);
|
|
247
|
+
}
|
|
248
|
+
// Get or create a mutex for a specific key name
|
|
249
|
+
function getMutex(key) {
|
|
250
|
+
let mutex = mutexCache.get(key);
|
|
251
|
+
if (!mutex) {
|
|
252
|
+
mutex = new async_mutex_1.Mutex();
|
|
253
|
+
mutexCache.set(key, mutex);
|
|
254
|
+
logger.info({ key }, 'created new mutex');
|
|
255
|
+
}
|
|
256
|
+
return mutex;
|
|
257
|
+
}
|
|
258
|
+
// Sender key operations with proper mutex sequencing
|
|
259
|
+
function queueSenderKeyOperation(senderKeyName, operation) {
|
|
260
|
+
return getSenderKeyMutex(senderKeyName).runExclusive(operation);
|
|
261
|
+
}
|
|
262
|
+
// Check if we are currently in a transaction
|
|
263
|
+
function isInTransaction() {
|
|
264
|
+
return transactionsInProgress > 0;
|
|
265
|
+
}
|
|
266
|
+
// Helper function to handle transaction commit with retries
|
|
267
|
+
async function commitTransaction() {
|
|
268
|
+
if (!Object.keys(mutations).length) {
|
|
269
|
+
logger.trace('no mutations in transaction');
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
logger.trace('committing transaction');
|
|
273
|
+
let tries = maxCommitRetries;
|
|
274
|
+
while (tries > 0) {
|
|
275
|
+
tries -= 1;
|
|
276
|
+
try {
|
|
277
|
+
await state.set(mutations);
|
|
278
|
+
logger.trace({ dbQueriesInTransaction }, 'committed transaction');
|
|
279
|
+
return;
|
|
280
|
+
}
|
|
281
|
+
catch (error) {
|
|
282
|
+
logger.warn(`failed to commit ${Object.keys(mutations).length} mutations, tries left=${tries}`);
|
|
283
|
+
if (tries > 0) {
|
|
284
|
+
await (0, generics_1.delay)(delayBetweenTriesMs);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
// Helper function to clean up transaction state
|
|
290
|
+
function cleanupTransactionState() {
|
|
291
|
+
transactionsInProgress -= 1;
|
|
292
|
+
if (transactionsInProgress === 0) {
|
|
293
|
+
transactionCache = {};
|
|
294
|
+
mutations = {};
|
|
295
|
+
dbQueriesInTransaction = 0;
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
// Helper function to execute work within transaction
|
|
299
|
+
async function executeTransactionWork(work) {
|
|
300
|
+
const result = await work();
|
|
301
|
+
// commit if this is the outermost transaction
|
|
302
|
+
if (transactionsInProgress === 1) {
|
|
303
|
+
await commitTransaction();
|
|
304
|
+
}
|
|
305
|
+
return result;
|
|
306
|
+
}
|
|
87
307
|
return {
|
|
88
308
|
get: async (type, ids) => {
|
|
89
309
|
if (isInTransaction()) {
|
|
90
310
|
const dict = transactionCache[type];
|
|
91
|
-
const idsRequiringFetch = dict
|
|
92
|
-
? ids.filter(item => typeof dict[item] === 'undefined')
|
|
93
|
-
: ids;
|
|
311
|
+
const idsRequiringFetch = dict ? ids.filter(item => typeof dict[item] === 'undefined') : ids;
|
|
94
312
|
// only fetch if there are any items to fetch
|
|
95
313
|
if (idsRequiringFetch.length) {
|
|
96
314
|
dbQueriesInTransaction += 1;
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
315
|
+
// Use per-sender-key queue for sender-key operations when possible
|
|
316
|
+
if (type === 'sender-key') {
|
|
317
|
+
logger.info({ idsRequiringFetch }, 'processing sender keys in transaction');
|
|
318
|
+
// For sender keys, process each one with queued operations to maintain serialization
|
|
319
|
+
for (const senderKeyName of idsRequiringFetch) {
|
|
320
|
+
await queueSenderKeyOperation(senderKeyName, async () => {
|
|
321
|
+
logger.info({ senderKeyName }, 'fetching sender key in transaction');
|
|
322
|
+
const result = await state.get(type, [senderKeyName]);
|
|
323
|
+
// Update transaction cache
|
|
324
|
+
transactionCache[type] || (transactionCache[type] = {});
|
|
325
|
+
Object.assign(transactionCache[type], result);
|
|
326
|
+
logger.info({ senderKeyName, hasResult: !!result[senderKeyName] }, 'sender key fetch complete');
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
else {
|
|
331
|
+
// Use runExclusive for cleaner mutex handling
|
|
332
|
+
await getKeyTypeMutex(type).runExclusive(async () => {
|
|
333
|
+
const result = await state.get(type, idsRequiringFetch);
|
|
334
|
+
// Update transaction cache
|
|
335
|
+
transactionCache[type] || (transactionCache[type] = {});
|
|
336
|
+
Object.assign(transactionCache[type], result);
|
|
337
|
+
});
|
|
338
|
+
}
|
|
100
339
|
}
|
|
101
340
|
return ids.reduce((dict, id) => {
|
|
102
|
-
|
|
103
|
-
const value = (_a = transactionCache[type]) === null || _a === void 0 ? void 0 : _a[id];
|
|
341
|
+
const value = transactionCache[type]?.[id];
|
|
104
342
|
if (value) {
|
|
105
343
|
dict[id] = value;
|
|
106
344
|
}
|
|
@@ -108,71 +346,115 @@ const addTransactionCapability = (state, logger, { maxCommitRetries, delayBetwee
|
|
|
108
346
|
}, {});
|
|
109
347
|
}
|
|
110
348
|
else {
|
|
111
|
-
|
|
349
|
+
// Not in transaction, fetch directly with queue protection
|
|
350
|
+
if (type === 'sender-key') {
|
|
351
|
+
// For sender keys, use individual queues to maintain per-key serialization
|
|
352
|
+
const results = {};
|
|
353
|
+
for (const senderKeyName of ids) {
|
|
354
|
+
const result = await queueSenderKeyOperation(senderKeyName, async () => await state.get(type, [senderKeyName]));
|
|
355
|
+
Object.assign(results, result);
|
|
356
|
+
}
|
|
357
|
+
return results;
|
|
358
|
+
}
|
|
359
|
+
else {
|
|
360
|
+
return await getKeyTypeMutex(type).runExclusive(() => state.get(type, ids));
|
|
361
|
+
}
|
|
112
362
|
}
|
|
113
363
|
},
|
|
114
|
-
set: data => {
|
|
364
|
+
set: async (data) => {
|
|
115
365
|
if (isInTransaction()) {
|
|
116
366
|
logger.trace({ types: Object.keys(data) }, 'caching in transaction');
|
|
117
|
-
for (const
|
|
367
|
+
for (const key_ in data) {
|
|
368
|
+
const key = key_;
|
|
118
369
|
transactionCache[key] = transactionCache[key] || {};
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
370
|
+
// Special handling for pre-keys and signed-pre-keys
|
|
371
|
+
if (key === 'pre-key') {
|
|
372
|
+
await handlePreKeyOperations(data, key, transactionCache, mutations, logger, true);
|
|
373
|
+
}
|
|
374
|
+
else {
|
|
375
|
+
// Normal handling for other key types
|
|
376
|
+
handleNormalKeyOperations(data, key, transactionCache, mutations);
|
|
377
|
+
}
|
|
122
378
|
}
|
|
123
379
|
}
|
|
124
380
|
else {
|
|
125
|
-
|
|
381
|
+
// Not in transaction, apply directly with mutex protection
|
|
382
|
+
const hasSenderKeys = 'sender-key' in data;
|
|
383
|
+
const senderKeyNames = hasSenderKeys ? Object.keys(data['sender-key'] || {}) : [];
|
|
384
|
+
if (hasSenderKeys) {
|
|
385
|
+
logger.info({ senderKeyNames }, 'processing sender key set operations');
|
|
386
|
+
// Handle sender key operations with per-key queues
|
|
387
|
+
for (const senderKeyName of senderKeyNames) {
|
|
388
|
+
await queueSenderKeyOperation(senderKeyName, async () => {
|
|
389
|
+
// Create data subset for this specific sender key
|
|
390
|
+
const senderKeyData = {
|
|
391
|
+
'sender-key': {
|
|
392
|
+
[senderKeyName]: data['sender-key'][senderKeyName]
|
|
393
|
+
}
|
|
394
|
+
};
|
|
395
|
+
logger.trace({ senderKeyName }, 'storing sender key');
|
|
396
|
+
// Apply changes to the store
|
|
397
|
+
await state.set(senderKeyData);
|
|
398
|
+
logger.trace({ senderKeyName }, 'sender key stored');
|
|
399
|
+
});
|
|
400
|
+
}
|
|
401
|
+
// Handle any non-sender-key data with regular mutexes
|
|
402
|
+
const nonSenderKeyData = { ...data };
|
|
403
|
+
delete nonSenderKeyData['sender-key'];
|
|
404
|
+
if (Object.keys(nonSenderKeyData).length > 0) {
|
|
405
|
+
await withMutexes(Object.keys(nonSenderKeyData), getKeyTypeMutex, async () => {
|
|
406
|
+
// Process pre-keys and signed-pre-keys separately with specialized mutexes
|
|
407
|
+
for (const key_ in nonSenderKeyData) {
|
|
408
|
+
const keyType = key_;
|
|
409
|
+
if (keyType === 'pre-key') {
|
|
410
|
+
await processPreKeyDeletions(nonSenderKeyData, keyType, state, logger);
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
// Apply changes to the store
|
|
414
|
+
await state.set(nonSenderKeyData);
|
|
415
|
+
});
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
else {
|
|
419
|
+
// No sender keys - use original logic
|
|
420
|
+
await withMutexes(Object.keys(data), getKeyTypeMutex, async () => {
|
|
421
|
+
// Process pre-keys and signed-pre-keys separately with specialized mutexes
|
|
422
|
+
for (const key_ in data) {
|
|
423
|
+
const keyType = key_;
|
|
424
|
+
if (keyType === 'pre-key') {
|
|
425
|
+
await processPreKeyDeletions(data, keyType, state, logger);
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
// Apply changes to the store
|
|
429
|
+
await state.set(data);
|
|
430
|
+
});
|
|
431
|
+
}
|
|
126
432
|
}
|
|
127
433
|
},
|
|
128
434
|
isInTransaction,
|
|
129
|
-
async transaction(work) {
|
|
130
|
-
|
|
131
|
-
transactionsInProgress += 1;
|
|
132
|
-
if (transactionsInProgress === 1) {
|
|
133
|
-
logger.trace('entering transaction');
|
|
134
|
-
}
|
|
435
|
+
async transaction(work, key) {
|
|
436
|
+
const releaseTxMutex = await getTransactionMutex(key).acquire();
|
|
135
437
|
try {
|
|
136
|
-
|
|
137
|
-
// commit if this is the outermost transaction
|
|
438
|
+
transactionsInProgress += 1;
|
|
138
439
|
if (transactionsInProgress === 1) {
|
|
139
|
-
|
|
140
|
-
logger.trace('committing transaction');
|
|
141
|
-
// retry mechanism to ensure we've some recovery
|
|
142
|
-
// in case a transaction fails in the first attempt
|
|
143
|
-
let tries = maxCommitRetries;
|
|
144
|
-
while (tries) {
|
|
145
|
-
tries -= 1;
|
|
146
|
-
try {
|
|
147
|
-
await state.set(mutations);
|
|
148
|
-
logger.trace({ dbQueriesInTransaction }, 'committed transaction');
|
|
149
|
-
break;
|
|
150
|
-
}
|
|
151
|
-
catch (error) {
|
|
152
|
-
logger.warn(`failed to commit ${Object.keys(mutations).length} mutations, tries left=${tries}`);
|
|
153
|
-
await (0, generics_1.delay)(delayBetweenTriesMs);
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
else {
|
|
158
|
-
logger.trace('no mutations in transaction');
|
|
159
|
-
}
|
|
440
|
+
logger.trace('entering transaction');
|
|
160
441
|
}
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
442
|
+
// Release the transaction mutex now that we've updated the counter
|
|
443
|
+
// This allows other transactions to start preparing
|
|
444
|
+
releaseTxMutex();
|
|
445
|
+
try {
|
|
446
|
+
return await executeTransactionWork(work);
|
|
447
|
+
}
|
|
448
|
+
finally {
|
|
449
|
+
cleanupTransactionState();
|
|
168
450
|
}
|
|
169
451
|
}
|
|
170
|
-
|
|
452
|
+
catch (error) {
|
|
453
|
+
releaseTxMutex();
|
|
454
|
+
throw error;
|
|
455
|
+
}
|
|
171
456
|
}
|
|
172
457
|
};
|
|
173
|
-
function isInTransaction() {
|
|
174
|
-
return transactionsInProgress > 0;
|
|
175
|
-
}
|
|
176
458
|
};
|
|
177
459
|
exports.addTransactionCapability = addTransactionCapability;
|
|
178
460
|
const initAuthCreds = () => {
|
|
@@ -191,16 +473,10 @@ const initAuthCreds = () => {
|
|
|
191
473
|
accountSettings: {
|
|
192
474
|
unarchiveChats: false
|
|
193
475
|
},
|
|
194
|
-
// mobile creds
|
|
195
|
-
deviceId: Buffer.from((0, uuid_1.v4)().replace(/-/g, ''), 'hex').toString('base64url'),
|
|
196
|
-
phoneId: (0, uuid_1.v4)(),
|
|
197
|
-
identityId: (0, crypto_1.randomBytes)(20),
|
|
198
476
|
registered: false,
|
|
199
|
-
backupToken: (0, crypto_1.randomBytes)(20),
|
|
200
|
-
registration: {},
|
|
201
477
|
pairingCode: undefined,
|
|
202
478
|
lastPropHash: undefined,
|
|
203
|
-
routingInfo: undefined
|
|
479
|
+
routingInfo: undefined
|
|
204
480
|
};
|
|
205
481
|
};
|
|
206
482
|
exports.initAuthCreds = initAuthCreds;
|
|
@@ -50,7 +50,7 @@ const readAndEmitEventStream = (filename, delayIntervalMs = 0) => {
|
|
|
50
50
|
if (line) {
|
|
51
51
|
const { event, data } = JSON.parse(line);
|
|
52
52
|
ev.emit(event, data);
|
|
53
|
-
delayIntervalMs && await (0, generics_1.delay)(delayIntervalMs);
|
|
53
|
+
delayIntervalMs && (await (0, generics_1.delay)(delayIntervalMs));
|
|
54
54
|
}
|
|
55
55
|
}
|
|
56
56
|
fileStream.close();
|
package/lib/Utils/business.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { CatalogCollection, OrderDetails, Product, ProductCreate, ProductUpdate, WAMediaUpload, WAMediaUploadFunction } from '../Types';
|
|
2
|
-
import { BinaryNode } from '../WABinary';
|
|
1
|
+
import type { CatalogCollection, OrderDetails, Product, ProductCreate, ProductUpdate, WAMediaUpload, WAMediaUploadFunction } from '../Types';
|
|
2
|
+
import { type BinaryNode } from '../WABinary';
|
|
3
3
|
export declare const parseCatalogNode: (node: BinaryNode) => {
|
|
4
4
|
products: Product[];
|
|
5
5
|
nextPageCursor: string | undefined;
|
package/lib/Utils/business.js
CHANGED
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.uploadingNecessaryImages = exports.
|
|
3
|
+
exports.uploadingNecessaryImages = exports.parseProductNode = exports.toProductNode = exports.parseOrderDetailsNode = exports.parseCollectionsNode = exports.parseCatalogNode = void 0;
|
|
4
|
+
exports.uploadingNecessaryImagesOfProduct = uploadingNecessaryImagesOfProduct;
|
|
4
5
|
const boom_1 = require("@hapi/boom");
|
|
5
6
|
const crypto_1 = require("crypto");
|
|
7
|
+
const fs_1 = require("fs");
|
|
8
|
+
const os_1 = require("os");
|
|
9
|
+
const path_1 = require("path");
|
|
6
10
|
const WABinary_1 = require("../WABinary");
|
|
11
|
+
const generics_1 = require("./generics");
|
|
7
12
|
const messages_media_1 = require("./messages-media");
|
|
8
13
|
const parseCatalogNode = (node) => {
|
|
9
14
|
const catalogNode = (0, WABinary_1.getBinaryNodeChild)(node, 'product_catalog');
|
|
@@ -11,9 +16,7 @@ const parseCatalogNode = (node) => {
|
|
|
11
16
|
const paging = (0, WABinary_1.getBinaryNodeChild)(catalogNode, 'paging');
|
|
12
17
|
return {
|
|
13
18
|
products,
|
|
14
|
-
nextPageCursor: paging
|
|
15
|
-
? (0, WABinary_1.getBinaryNodeChildString)(paging, 'after')
|
|
16
|
-
: undefined
|
|
19
|
+
nextPageCursor: paging ? (0, WABinary_1.getBinaryNodeChildString)(paging, 'after') : undefined
|
|
17
20
|
};
|
|
18
21
|
};
|
|
19
22
|
exports.parseCatalogNode = parseCatalogNode;
|
|
@@ -52,7 +55,7 @@ const parseOrderDetailsNode = (node) => {
|
|
|
52
55
|
const orderDetails = {
|
|
53
56
|
price: {
|
|
54
57
|
total: +(0, WABinary_1.getBinaryNodeChildString)(priceNode, 'total'),
|
|
55
|
-
currency: (0, WABinary_1.getBinaryNodeChildString)(priceNode, 'currency')
|
|
58
|
+
currency: (0, WABinary_1.getBinaryNodeChildString)(priceNode, 'currency')
|
|
56
59
|
},
|
|
57
60
|
products
|
|
58
61
|
};
|
|
@@ -164,7 +167,7 @@ const parseProductNode = (productNode) => {
|
|
|
164
167
|
id,
|
|
165
168
|
imageUrls: parseImageUrls(mediaNode),
|
|
166
169
|
reviewStatus: {
|
|
167
|
-
whatsapp: (0, WABinary_1.getBinaryNodeChildString)(statusInfoNode, 'status')
|
|
170
|
+
whatsapp: (0, WABinary_1.getBinaryNodeChildString)(statusInfoNode, 'status')
|
|
168
171
|
},
|
|
169
172
|
availability: 'in stock',
|
|
170
173
|
name: (0, WABinary_1.getBinaryNodeChildString)(productNode, 'name'),
|
|
@@ -173,7 +176,7 @@ const parseProductNode = (productNode) => {
|
|
|
173
176
|
description: (0, WABinary_1.getBinaryNodeChildString)(productNode, 'description'),
|
|
174
177
|
price: +(0, WABinary_1.getBinaryNodeChildString)(productNode, 'price'),
|
|
175
178
|
currency: (0, WABinary_1.getBinaryNodeChildString)(productNode, 'currency'),
|
|
176
|
-
isHidden
|
|
179
|
+
isHidden
|
|
177
180
|
};
|
|
178
181
|
return product;
|
|
179
182
|
};
|
|
@@ -184,11 +187,12 @@ exports.parseProductNode = parseProductNode;
|
|
|
184
187
|
async function uploadingNecessaryImagesOfProduct(product, waUploadToServer, timeoutMs = 30000) {
|
|
185
188
|
product = {
|
|
186
189
|
...product,
|
|
187
|
-
images: product.images
|
|
190
|
+
images: product.images
|
|
191
|
+
? await (0, exports.uploadingNecessaryImages)(product.images, waUploadToServer, timeoutMs)
|
|
192
|
+
: product.images
|
|
188
193
|
};
|
|
189
194
|
return product;
|
|
190
195
|
}
|
|
191
|
-
exports.uploadingNecessaryImagesOfProduct = uploadingNecessaryImagesOfProduct;
|
|
192
196
|
/**
|
|
193
197
|
* Uploads images not already uploaded to WA's servers
|
|
194
198
|
*/
|
|
@@ -202,17 +206,19 @@ const uploadingNecessaryImages = async (images, waUploadToServer, timeoutMs = 30
|
|
|
202
206
|
}
|
|
203
207
|
const { stream } = await (0, messages_media_1.getStream)(img);
|
|
204
208
|
const hasher = (0, crypto_1.createHash)('sha256');
|
|
205
|
-
const
|
|
209
|
+
const filePath = (0, path_1.join)((0, os_1.tmpdir)(), 'img' + (0, generics_1.generateMessageIDV2)());
|
|
210
|
+
const encFileWriteStream = (0, fs_1.createWriteStream)(filePath);
|
|
206
211
|
for await (const block of stream) {
|
|
207
212
|
hasher.update(block);
|
|
208
|
-
|
|
213
|
+
encFileWriteStream.write(block);
|
|
209
214
|
}
|
|
210
215
|
const sha = hasher.digest('base64');
|
|
211
|
-
const { directPath } = await waUploadToServer(
|
|
216
|
+
const { directPath } = await waUploadToServer(filePath, {
|
|
212
217
|
mediaType: 'product-catalog-image',
|
|
213
218
|
fileEncSha256B64: sha,
|
|
214
219
|
timeoutMs
|
|
215
220
|
});
|
|
221
|
+
await fs_1.promises.unlink(filePath).catch(err => console.log('Error deleting temp file ', err));
|
|
216
222
|
return { url: (0, messages_media_1.getUrlFromDirectPath)(directPath) };
|
|
217
223
|
}));
|
|
218
224
|
return results;
|
|
@@ -229,6 +235,6 @@ const parseStatusInfo = (mediaNode) => {
|
|
|
229
235
|
const node = (0, WABinary_1.getBinaryNodeChild)(mediaNode, 'status_info');
|
|
230
236
|
return {
|
|
231
237
|
status: (0, WABinary_1.getBinaryNodeChildString)(node, 'status'),
|
|
232
|
-
canAppeal: (0, WABinary_1.getBinaryNodeChildString)(node, 'can_appeal') === 'true'
|
|
238
|
+
canAppeal: (0, WABinary_1.getBinaryNodeChildString)(node, 'can_appeal') === 'true'
|
|
233
239
|
};
|
|
234
240
|
};
|