@nexustechpro/baileys 2.0.2 → 2.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +924 -1299
- package/lib/Defaults/baileys-version.json +6 -2
- package/lib/Defaults/index.js +172 -172
- package/lib/Signal/libsignal.js +380 -292
- package/lib/Signal/lid-mapping.js +264 -171
- package/lib/Socket/Client/index.js +2 -2
- package/lib/Socket/Client/types.js +10 -10
- package/lib/Socket/Client/websocket.js +45 -310
- package/lib/Socket/business.js +375 -375
- package/lib/Socket/chats.js +909 -963
- package/lib/Socket/communities.js +430 -430
- package/lib/Socket/groups.js +342 -342
- package/lib/Socket/index.js +22 -22
- package/lib/Socket/messages-recv.js +777 -743
- package/lib/Socket/messages-send.js +295 -305
- package/lib/Socket/mex.js +50 -50
- package/lib/Socket/newsletter.js +148 -148
- package/lib/Socket/nexus-handler.js +75 -261
- package/lib/Socket/socket.js +709 -1201
- package/lib/Store/index.js +5 -5
- package/lib/Store/make-cache-manager-store.js +81 -81
- package/lib/Store/make-in-memory-store.js +416 -416
- package/lib/Store/make-ordered-dictionary.js +81 -81
- package/lib/Store/object-repository.js +30 -30
- package/lib/Types/Auth.js +1 -1
- package/lib/Types/Bussines.js +1 -1
- package/lib/Types/Call.js +1 -1
- package/lib/Types/Chat.js +7 -7
- package/lib/Types/Contact.js +1 -1
- package/lib/Types/Events.js +1 -1
- package/lib/Types/GroupMetadata.js +1 -1
- package/lib/Types/Label.js +24 -24
- package/lib/Types/LabelAssociation.js +6 -6
- package/lib/Types/Message.js +10 -10
- package/lib/Types/Newsletter.js +28 -28
- package/lib/Types/Product.js +1 -1
- package/lib/Types/Signal.js +1 -1
- package/lib/Types/Socket.js +2 -2
- package/lib/Types/State.js +12 -12
- package/lib/Types/USync.js +1 -1
- package/lib/Types/index.js +25 -25
- package/lib/Utils/auth-utils.js +264 -256
- package/lib/Utils/baileys-event-stream.js +55 -55
- package/lib/Utils/browser-utils.js +27 -27
- package/lib/Utils/business.js +228 -230
- package/lib/Utils/chat-utils.js +694 -764
- package/lib/Utils/crypto.js +109 -135
- package/lib/Utils/decode-wa-message.js +310 -314
- package/lib/Utils/event-buffer.js +547 -547
- package/lib/Utils/generics.js +297 -297
- package/lib/Utils/history.js +91 -83
- package/lib/Utils/index.js +21 -20
- package/lib/Utils/key-store.js +17 -0
- package/lib/Utils/link-preview.js +97 -98
- package/lib/Utils/logger.js +2 -2
- package/lib/Utils/lt-hash.js +47 -47
- package/lib/Utils/make-mutex.js +39 -39
- package/lib/Utils/message-retry-manager.js +148 -148
- package/lib/Utils/messages-media.js +534 -534
- package/lib/Utils/messages.js +705 -705
- package/lib/Utils/noise-handler.js +255 -255
- package/lib/Utils/pre-key-manager.js +105 -105
- package/lib/Utils/process-message.js +412 -412
- package/lib/Utils/signal.js +160 -158
- package/lib/Utils/use-multi-file-auth-state.js +120 -120
- package/lib/Utils/validate-connection.js +194 -194
- package/lib/WABinary/constants.js +1300 -1300
- package/lib/WABinary/decode.js +237 -237
- package/lib/WABinary/encode.js +232 -232
- package/lib/WABinary/generic-utils.js +252 -211
- package/lib/WABinary/index.js +5 -5
- package/lib/WABinary/jid-utils.js +279 -95
- package/lib/WABinary/types.js +1 -1
- package/lib/WAM/BinaryInfo.js +9 -9
- package/lib/WAM/constants.js +22852 -22852
- package/lib/WAM/encode.js +149 -149
- package/lib/WAM/index.js +3 -3
- package/lib/WAUSync/Protocols/USyncContactProtocol.js +28 -28
- package/lib/WAUSync/Protocols/USyncDeviceProtocol.js +53 -53
- package/lib/WAUSync/Protocols/USyncDisappearingModeProtocol.js +26 -26
- package/lib/WAUSync/Protocols/USyncStatusProtocol.js +37 -37
- package/lib/WAUSync/Protocols/UsyncBotProfileProtocol.js +50 -50
- package/lib/WAUSync/Protocols/UsyncLIDProtocol.js +28 -28
- package/lib/WAUSync/Protocols/index.js +4 -4
- package/lib/WAUSync/USyncQuery.js +93 -93
- package/lib/WAUSync/USyncUser.js +22 -22
- package/lib/WAUSync/index.js +3 -3
- package/lib/index.js +66 -66
- package/package.json +171 -144
- package/lib/Signal/Group/ciphertext-message.js +0 -12
- package/lib/Signal/Group/group-session-builder.js +0 -30
- package/lib/Signal/Group/group_cipher.js +0 -100
- package/lib/Signal/Group/index.js +0 -12
- package/lib/Signal/Group/keyhelper.js +0 -18
- package/lib/Signal/Group/sender-chain-key.js +0 -26
- package/lib/Signal/Group/sender-key-distribution-message.js +0 -63
- package/lib/Signal/Group/sender-key-message.js +0 -66
- package/lib/Signal/Group/sender-key-name.js +0 -48
- package/lib/Signal/Group/sender-key-record.js +0 -41
- package/lib/Signal/Group/sender-key-state.js +0 -84
- package/lib/Signal/Group/sender-message-key.js +0 -26
package/lib/Utils/auth-utils.js
CHANGED
|
@@ -1,257 +1,265 @@
|
|
|
1
|
-
import NodeCache from '@cacheable/node-cache';
|
|
2
|
-
import { AsyncLocalStorage } from 'async_hooks';
|
|
3
|
-
import { Mutex } from 'async-mutex';
|
|
4
|
-
import { randomBytes } from 'crypto';
|
|
5
|
-
import PQueue from 'p-queue';
|
|
6
|
-
import {
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
9
|
-
import {
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
*
|
|
13
|
-
* @param
|
|
14
|
-
* @param
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
const
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
await
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
*
|
|
78
|
-
*
|
|
79
|
-
* @param
|
|
80
|
-
* @
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
const
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
return {
|
|
139
|
-
get: async (type, ids) => {
|
|
140
|
-
const ctx = txStorage.getStore();
|
|
141
|
-
if (!ctx) {
|
|
142
|
-
// No transaction - direct read without exclusive lock for concurrency
|
|
143
|
-
return state.get(type, ids);
|
|
144
|
-
}
|
|
145
|
-
// In transaction - check cache first
|
|
146
|
-
const cached = ctx.cache[type] || {};
|
|
147
|
-
const missing = ids.filter(id => !(id in cached));
|
|
148
|
-
if (missing.length > 0) {
|
|
149
|
-
ctx.dbQueries++;
|
|
150
|
-
logger.trace({ type, count: missing.length }, 'fetching missing keys in transaction');
|
|
151
|
-
const fetched = await getTxMutex(type).runExclusive(() => state.get(type, missing));
|
|
152
|
-
// Update cache
|
|
153
|
-
ctx.cache[type] = ctx.cache[type] || {};
|
|
154
|
-
Object.assign(ctx.cache[type], fetched);
|
|
155
|
-
}
|
|
156
|
-
// Return requested ids from cache
|
|
157
|
-
const result = {};
|
|
158
|
-
for (const id of ids) {
|
|
159
|
-
const value = ctx.cache[type]?.[id];
|
|
160
|
-
if (value !== undefined && value !== null) {
|
|
161
|
-
result[id] = value;
|
|
162
|
-
}
|
|
163
|
-
}
|
|
164
|
-
return result;
|
|
165
|
-
},
|
|
166
|
-
set: async (data) => {
|
|
167
|
-
const ctx = txStorage.getStore();
|
|
168
|
-
if (!ctx) {
|
|
169
|
-
// No transaction - direct write with queue protection
|
|
170
|
-
const types = Object.keys(data);
|
|
171
|
-
// Process pre-keys with validation
|
|
172
|
-
for (const type_ of types) {
|
|
173
|
-
const type = type_;
|
|
174
|
-
if (type === 'pre-key') {
|
|
175
|
-
await preKeyManager.validateDeletions(data, type);
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
// Write all data in parallel
|
|
179
|
-
await Promise.all(types.map(type => getQueue(type).add(async () => {
|
|
180
|
-
const typeData = { [type]: data[type] };
|
|
181
|
-
await state.set(typeData);
|
|
182
|
-
})));
|
|
183
|
-
return;
|
|
184
|
-
}
|
|
185
|
-
// In transaction - update cache and mutations
|
|
186
|
-
logger.trace({ types: Object.keys(data) }, 'caching in transaction');
|
|
187
|
-
for (const key_ in data) {
|
|
188
|
-
const key = key_;
|
|
189
|
-
// Ensure structures exist
|
|
190
|
-
ctx.cache[key] = ctx.cache[key] || {};
|
|
191
|
-
ctx.mutations[key] = ctx.mutations[key] || {};
|
|
192
|
-
// Special handling for pre-keys
|
|
193
|
-
if (key === 'pre-key') {
|
|
194
|
-
await preKeyManager.processOperations(data, key, ctx.cache, ctx.mutations, true);
|
|
195
|
-
}
|
|
196
|
-
else {
|
|
197
|
-
// Normal key types
|
|
198
|
-
Object.assign(ctx.cache[key], data[key]);
|
|
199
|
-
Object.assign(ctx.mutations[key], data[key]);
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
},
|
|
203
|
-
isInTransaction,
|
|
204
|
-
transaction: async (work, key) => {
|
|
205
|
-
const existing = txStorage.getStore();
|
|
206
|
-
// Nested transaction - reuse existing context
|
|
207
|
-
if (existing) {
|
|
208
|
-
logger.trace('reusing existing transaction context');
|
|
209
|
-
return work();
|
|
210
|
-
}
|
|
211
|
-
// New transaction - acquire mutex and create context
|
|
212
|
-
return getTxMutex(key).runExclusive(async () => {
|
|
213
|
-
const ctx = {
|
|
214
|
-
cache: {},
|
|
215
|
-
mutations: {},
|
|
216
|
-
dbQueries: 0
|
|
217
|
-
};
|
|
218
|
-
logger.trace('entering transaction');
|
|
219
|
-
try {
|
|
220
|
-
const result = await txStorage.run(ctx, work);
|
|
221
|
-
// Commit mutations
|
|
222
|
-
await commitWithRetry(ctx.mutations);
|
|
223
|
-
logger.trace({ dbQueries: ctx.dbQueries }, 'transaction completed');
|
|
224
|
-
return result;
|
|
225
|
-
}
|
|
226
|
-
catch (error) {
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
1
|
+
import NodeCache from '@cacheable/node-cache';
|
|
2
|
+
import { AsyncLocalStorage } from 'async_hooks';
|
|
3
|
+
import { Mutex } from 'async-mutex';
|
|
4
|
+
import { randomBytes } from 'crypto';
|
|
5
|
+
import PQueue from 'p-queue';
|
|
6
|
+
import { v4 } from 'uuid';
|
|
7
|
+
import { DEFAULT_CACHE_TTLS } from '../Defaults/index.js';
|
|
8
|
+
import { Curve, signedKeyPair } from './crypto.js';
|
|
9
|
+
import { delay, generateRegistrationId } from './generics.js';
|
|
10
|
+
import { PreKeyManager } from './pre-key-manager.js';
|
|
11
|
+
/**
|
|
12
|
+
* Adds caching capability to a SignalKeyStore
|
|
13
|
+
* @param store the store to add caching to
|
|
14
|
+
* @param logger to log trace events
|
|
15
|
+
* @param _cache cache store to use
|
|
16
|
+
*/
|
|
17
|
+
export function makeCacheableSignalKeyStore(store, logger, _cache) {
|
|
18
|
+
const cache = _cache ||
|
|
19
|
+
new NodeCache({
|
|
20
|
+
stdTTL: DEFAULT_CACHE_TTLS.SIGNAL_STORE, // 5 minutes
|
|
21
|
+
useClones: false,
|
|
22
|
+
deleteOnExpire: true
|
|
23
|
+
});
|
|
24
|
+
// Mutex for protecting cache operations
|
|
25
|
+
const cacheMutex = new Mutex();
|
|
26
|
+
function getUniqueId(type, id) {
|
|
27
|
+
return `${type}.${id}`;
|
|
28
|
+
}
|
|
29
|
+
return {
|
|
30
|
+
async get(type, ids) {
|
|
31
|
+
return cacheMutex.runExclusive(async () => {
|
|
32
|
+
const data = {};
|
|
33
|
+
const idsToFetch = [];
|
|
34
|
+
for (const id of ids) {
|
|
35
|
+
const item = (await cache.get(getUniqueId(type, id)));
|
|
36
|
+
if (typeof item !== 'undefined') {
|
|
37
|
+
data[id] = item;
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
idsToFetch.push(id);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
if (idsToFetch.length) {
|
|
44
|
+
logger?.trace({ items: idsToFetch.length }, 'loading from store');
|
|
45
|
+
const fetched = await store.get(type, idsToFetch);
|
|
46
|
+
for (const id of idsToFetch) {
|
|
47
|
+
const item = fetched[id];
|
|
48
|
+
if (item) {
|
|
49
|
+
data[id] = item;
|
|
50
|
+
cache.set(getUniqueId(type, id), item);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
return data;
|
|
55
|
+
});
|
|
56
|
+
},
|
|
57
|
+
async set(data) {
|
|
58
|
+
return cacheMutex.runExclusive(async () => {
|
|
59
|
+
let keys = 0;
|
|
60
|
+
for (const type in data) {
|
|
61
|
+
for (const id in data[type]) {
|
|
62
|
+
await cache.set(getUniqueId(type, id), data[type][id]);
|
|
63
|
+
keys += 1;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
logger?.trace({ keys }, 'updated cache');
|
|
67
|
+
await store.set(data);
|
|
68
|
+
});
|
|
69
|
+
},
|
|
70
|
+
async clear() {
|
|
71
|
+
await cache.flushAll();
|
|
72
|
+
await store.clear?.();
|
|
73
|
+
}
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Adds DB-like transaction capability to the SignalKeyStore
|
|
78
|
+
* Uses AsyncLocalStorage for automatic context management
|
|
79
|
+
* @param state the key store to apply this capability to
|
|
80
|
+
* @param logger logger to log events
|
|
81
|
+
* @returns SignalKeyStore with transaction capability
|
|
82
|
+
*/
|
|
83
|
+
export const addTransactionCapability = (state, logger, { maxCommitRetries, delayBetweenTriesMs }) => {
|
|
84
|
+
const txStorage = new AsyncLocalStorage();
|
|
85
|
+
// Queues for concurrency control
|
|
86
|
+
const keyQueues = new Map();
|
|
87
|
+
const txMutexes = new Map();
|
|
88
|
+
// Pre-key manager for specialized operations
|
|
89
|
+
const preKeyManager = new PreKeyManager(state, logger);
|
|
90
|
+
/**
|
|
91
|
+
* Get or create a queue for a specific key type
|
|
92
|
+
*/
|
|
93
|
+
function getQueue(key) {
|
|
94
|
+
if (!keyQueues.has(key)) {
|
|
95
|
+
keyQueues.set(key, new PQueue({ concurrency: 1 }));
|
|
96
|
+
}
|
|
97
|
+
return keyQueues.get(key);
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Get or create a transaction mutex
|
|
101
|
+
*/
|
|
102
|
+
function getTxMutex(key) {
|
|
103
|
+
if (!txMutexes.has(key)) {
|
|
104
|
+
txMutexes.set(key, new Mutex());
|
|
105
|
+
}
|
|
106
|
+
return txMutexes.get(key);
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Check if currently in a transaction
|
|
110
|
+
*/
|
|
111
|
+
function isInTransaction() {
|
|
112
|
+
return !!txStorage.getStore();
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Commit transaction with retries
|
|
116
|
+
*/
|
|
117
|
+
async function commitWithRetry(mutations) {
|
|
118
|
+
if (Object.keys(mutations).length === 0) {
|
|
119
|
+
logger.trace('no mutations in transaction');
|
|
120
|
+
return;
|
|
121
|
+
}
|
|
122
|
+
logger.trace('committing transaction');
|
|
123
|
+
for (let attempt = 0; attempt < maxCommitRetries; attempt++) {
|
|
124
|
+
try {
|
|
125
|
+
await state.set(mutations);
|
|
126
|
+
logger.trace({ mutationCount: Object.keys(mutations).length }, 'committed transaction');
|
|
127
|
+
return;
|
|
128
|
+
}
|
|
129
|
+
catch (error) {
|
|
130
|
+
const msg = error?.message || (typeof error === 'string' ? error : '') || ''
|
|
131
|
+
const isExpected = msg.includes('InvalidPreKeyId') || msg.includes('SessionNotFound') || msg.includes('InvalidMessage') || msg.includes('no sender key state') || msg.includes('old counter') || msg.includes('DuplicatedMessage') || msg.includes('Connection Closed')
|
|
132
|
+
if (isExpected) { logger?.debug?.({ error: msg }, 'transaction skipped — expected decrypt error, message dropped silently'); return undefined } // skip, no retry
|
|
133
|
+
logger?.error?.({ error: msg || error }, 'transaction failed, rolling back')
|
|
134
|
+
throw error
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
return {
|
|
139
|
+
get: async (type, ids) => {
|
|
140
|
+
const ctx = txStorage.getStore();
|
|
141
|
+
if (!ctx) {
|
|
142
|
+
// No transaction - direct read without exclusive lock for concurrency
|
|
143
|
+
return state.get(type, ids);
|
|
144
|
+
}
|
|
145
|
+
// In transaction - check cache first
|
|
146
|
+
const cached = ctx.cache[type] || {};
|
|
147
|
+
const missing = ids.filter(id => !(id in cached));
|
|
148
|
+
if (missing.length > 0) {
|
|
149
|
+
ctx.dbQueries++;
|
|
150
|
+
logger.trace({ type, count: missing.length }, 'fetching missing keys in transaction');
|
|
151
|
+
const fetched = await getTxMutex(type).runExclusive(() => state.get(type, missing));
|
|
152
|
+
// Update cache
|
|
153
|
+
ctx.cache[type] = ctx.cache[type] || {};
|
|
154
|
+
Object.assign(ctx.cache[type], fetched);
|
|
155
|
+
}
|
|
156
|
+
// Return requested ids from cache
|
|
157
|
+
const result = {};
|
|
158
|
+
for (const id of ids) {
|
|
159
|
+
const value = ctx.cache[type]?.[id];
|
|
160
|
+
if (value !== undefined && value !== null) {
|
|
161
|
+
result[id] = value;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
return result;
|
|
165
|
+
},
|
|
166
|
+
set: async (data) => {
|
|
167
|
+
const ctx = txStorage.getStore();
|
|
168
|
+
if (!ctx) {
|
|
169
|
+
// No transaction - direct write with queue protection
|
|
170
|
+
const types = Object.keys(data);
|
|
171
|
+
// Process pre-keys with validation
|
|
172
|
+
for (const type_ of types) {
|
|
173
|
+
const type = type_;
|
|
174
|
+
if (type === 'pre-key') {
|
|
175
|
+
await preKeyManager.validateDeletions(data, type);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
// Write all data in parallel
|
|
179
|
+
await Promise.all(types.map(type => getQueue(type).add(async () => {
|
|
180
|
+
const typeData = { [type]: data[type] };
|
|
181
|
+
await state.set(typeData);
|
|
182
|
+
})));
|
|
183
|
+
return;
|
|
184
|
+
}
|
|
185
|
+
// In transaction - update cache and mutations
|
|
186
|
+
logger.trace({ types: Object.keys(data) }, 'caching in transaction');
|
|
187
|
+
for (const key_ in data) {
|
|
188
|
+
const key = key_;
|
|
189
|
+
// Ensure structures exist
|
|
190
|
+
ctx.cache[key] = ctx.cache[key] || {};
|
|
191
|
+
ctx.mutations[key] = ctx.mutations[key] || {};
|
|
192
|
+
// Special handling for pre-keys
|
|
193
|
+
if (key === 'pre-key') {
|
|
194
|
+
await preKeyManager.processOperations(data, key, ctx.cache, ctx.mutations, true);
|
|
195
|
+
}
|
|
196
|
+
else {
|
|
197
|
+
// Normal key types
|
|
198
|
+
Object.assign(ctx.cache[key], data[key]);
|
|
199
|
+
Object.assign(ctx.mutations[key], data[key]);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
},
|
|
203
|
+
isInTransaction,
|
|
204
|
+
transaction: async (work, key) => {
|
|
205
|
+
const existing = txStorage.getStore();
|
|
206
|
+
// Nested transaction - reuse existing context
|
|
207
|
+
if (existing) {
|
|
208
|
+
logger.trace('reusing existing transaction context');
|
|
209
|
+
return work();
|
|
210
|
+
}
|
|
211
|
+
// New transaction - acquire mutex and create context
|
|
212
|
+
return getTxMutex(key).runExclusive(async () => {
|
|
213
|
+
const ctx = {
|
|
214
|
+
cache: {},
|
|
215
|
+
mutations: {},
|
|
216
|
+
dbQueries: 0
|
|
217
|
+
};
|
|
218
|
+
logger.trace('entering transaction');
|
|
219
|
+
try {
|
|
220
|
+
const result = await txStorage.run(ctx, work);
|
|
221
|
+
// Commit mutations
|
|
222
|
+
await commitWithRetry(ctx.mutations);
|
|
223
|
+
logger.trace({ dbQueries: ctx.dbQueries }, 'transaction completed');
|
|
224
|
+
return result;
|
|
225
|
+
}
|
|
226
|
+
catch (error) {
|
|
227
|
+
const msg = error?.message || (typeof error === 'string' ? error : '') || ''
|
|
228
|
+
const isExpected = msg.includes('InvalidPreKeyId') || msg.includes('SessionNotFound') || msg.includes('InvalidMessage') || msg.includes('no sender key state') || msg.includes('old counter')
|
|
229
|
+
; (isExpected ? logger?.debug?.bind(logger) : logger?.error?.bind(logger))?.({ error: msg || error }, 'transaction failed, rolling back')
|
|
230
|
+
throw error
|
|
231
|
+
}
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
};
|
|
235
|
+
};
|
|
236
|
+
export const initAuthCreds = () => {
|
|
237
|
+
const identityKey = Curve.generateKeyPair();
|
|
238
|
+
return {
|
|
239
|
+
noiseKey: Curve.generateKeyPair(),
|
|
240
|
+
pairingEphemeralKeyPair: Curve.generateKeyPair(),
|
|
241
|
+
signedIdentityKey: identityKey,
|
|
242
|
+
signedPreKey: signedKeyPair(identityKey, 1),
|
|
243
|
+
registrationId: generateRegistrationId(),
|
|
244
|
+
advSecretKey: randomBytes(32).toString('base64'),
|
|
245
|
+
processedHistoryMessages: [],
|
|
246
|
+
nextPreKeyId: 1,
|
|
247
|
+
firstUnuploadedPreKeyId: 1,
|
|
248
|
+
accountSyncCounter: 0,
|
|
249
|
+
accountSettings: {
|
|
250
|
+
unarchiveChats: false
|
|
251
|
+
},
|
|
252
|
+
// mobile creds
|
|
253
|
+
deviceId: Buffer.from(v4().replace(/-/g, ''), 'hex').toString('base64url'),
|
|
254
|
+
phoneId: v4(),
|
|
255
|
+
identityId: randomBytes(20),
|
|
256
|
+
registered: false,
|
|
257
|
+
backupToken: randomBytes(20),
|
|
258
|
+
registration: {},
|
|
259
|
+
pairingCode: undefined,
|
|
260
|
+
lastPropHash: undefined,
|
|
261
|
+
routingInfo: undefined,
|
|
262
|
+
additionalData: undefined
|
|
263
|
+
};
|
|
264
|
+
};
|
|
257
265
|
//# sourceMappingURL=auth-utils.js.map
|
|
@@ -1,56 +1,56 @@
|
|
|
1
|
-
import EventEmitter from 'events';
|
|
2
|
-
import { createReadStream } from 'fs';
|
|
3
|
-
import { writeFile } from 'fs/promises';
|
|
4
|
-
import { createInterface } from 'readline';
|
|
5
|
-
import { delay } from './generics.js';
|
|
6
|
-
import { makeMutex } from './make-mutex.js';
|
|
7
|
-
/**
|
|
8
|
-
* Captures events from a baileys event emitter & stores them in a file
|
|
9
|
-
* @param ev The event emitter to read events from
|
|
10
|
-
* @param filename File to save to
|
|
11
|
-
*/
|
|
12
|
-
export const captureEventStream = (ev, filename) => {
|
|
13
|
-
const oldEmit = ev.emit;
|
|
14
|
-
// write mutex so data is appended in order
|
|
15
|
-
const writeMutex = makeMutex();
|
|
16
|
-
// monkey patch eventemitter to capture all events
|
|
17
|
-
ev.emit = function (...args) {
|
|
18
|
-
const content = JSON.stringify({ timestamp: Date.now(), event: args[0], data: args[1] }) + '\n';
|
|
19
|
-
const result = oldEmit.apply(ev, args);
|
|
20
|
-
writeMutex.mutex(async () => {
|
|
21
|
-
await writeFile(filename, content, { flag: 'a' });
|
|
22
|
-
});
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
};
|
|
26
|
-
/**
|
|
27
|
-
* Read event file and emit events from there
|
|
28
|
-
* @param filename filename containing event data
|
|
29
|
-
* @param delayIntervalMs delay between each event emit
|
|
30
|
-
*/
|
|
31
|
-
export const readAndEmitEventStream = (filename, delayIntervalMs = 0) => {
|
|
32
|
-
const ev = new EventEmitter();
|
|
33
|
-
const fireEvents = async () => {
|
|
34
|
-
// from: https://stackoverflow.com/questions/6156501/read-a-file-one-line-at-a-time-in-node-js
|
|
35
|
-
const fileStream = createReadStream(filename);
|
|
36
|
-
const rl = createInterface({
|
|
37
|
-
input: fileStream,
|
|
38
|
-
crlfDelay: Infinity
|
|
39
|
-
});
|
|
40
|
-
// Note: we use the crlfDelay option to recognize all instances of CR LF
|
|
41
|
-
// ('\r\n') in input.txt as a single line break.
|
|
42
|
-
for await (const line of rl) {
|
|
43
|
-
if (line) {
|
|
44
|
-
const { event, data } = JSON.parse(line);
|
|
45
|
-
ev.emit(event, data);
|
|
46
|
-
delayIntervalMs && (await delay(delayIntervalMs));
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
fileStream.close();
|
|
50
|
-
};
|
|
51
|
-
return {
|
|
52
|
-
ev,
|
|
53
|
-
task: fireEvents()
|
|
54
|
-
};
|
|
55
|
-
};
|
|
1
|
+
import EventEmitter from 'events';
|
|
2
|
+
import { createReadStream } from 'fs';
|
|
3
|
+
import { writeFile } from 'fs/promises';
|
|
4
|
+
import { createInterface } from 'readline';
|
|
5
|
+
import { delay } from './generics.js';
|
|
6
|
+
import { makeMutex } from './make-mutex.js';
|
|
7
|
+
/**
|
|
8
|
+
* Captures events from a baileys event emitter & stores them in a file
|
|
9
|
+
* @param ev The event emitter to read events from
|
|
10
|
+
* @param filename File to save to
|
|
11
|
+
*/
|
|
12
|
+
export const captureEventStream = (ev, filename) => {
|
|
13
|
+
const oldEmit = ev.emit;
|
|
14
|
+
// write mutex so data is appended in order
|
|
15
|
+
const writeMutex = makeMutex();
|
|
16
|
+
// monkey patch eventemitter to capture all events
|
|
17
|
+
ev.emit = function (...args) {
|
|
18
|
+
const content = JSON.stringify({ timestamp: Date.now(), event: args[0], data: args[1] }) + '\n';
|
|
19
|
+
const result = oldEmit.apply(ev, args);
|
|
20
|
+
writeMutex.mutex(async () => {
|
|
21
|
+
await writeFile(filename, content, { flag: 'a' });
|
|
22
|
+
});
|
|
23
|
+
return result;
|
|
24
|
+
};
|
|
25
|
+
};
|
|
26
|
+
/**
|
|
27
|
+
* Read event file and emit events from there
|
|
28
|
+
* @param filename filename containing event data
|
|
29
|
+
* @param delayIntervalMs delay between each event emit
|
|
30
|
+
*/
|
|
31
|
+
export const readAndEmitEventStream = (filename, delayIntervalMs = 0) => {
|
|
32
|
+
const ev = new EventEmitter();
|
|
33
|
+
const fireEvents = async () => {
|
|
34
|
+
// from: https://stackoverflow.com/questions/6156501/read-a-file-one-line-at-a-time-in-node-js
|
|
35
|
+
const fileStream = createReadStream(filename);
|
|
36
|
+
const rl = createInterface({
|
|
37
|
+
input: fileStream,
|
|
38
|
+
crlfDelay: Infinity
|
|
39
|
+
});
|
|
40
|
+
// Note: we use the crlfDelay option to recognize all instances of CR LF
|
|
41
|
+
// ('\r\n') in input.txt as a single line break.
|
|
42
|
+
for await (const line of rl) {
|
|
43
|
+
if (line) {
|
|
44
|
+
const { event, data } = JSON.parse(line);
|
|
45
|
+
ev.emit(event, data);
|
|
46
|
+
delayIntervalMs && (await delay(delayIntervalMs));
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
fileStream.close();
|
|
50
|
+
};
|
|
51
|
+
return {
|
|
52
|
+
ev,
|
|
53
|
+
task: fireEvents()
|
|
54
|
+
};
|
|
55
|
+
};
|
|
56
56
|
//# sourceMappingURL=baileys-event-stream.js.map
|