dop-wallet-v6 1.2.21 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/services/dop/core/react-native-init.d.ts +14 -5
- package/dist/services/dop/core/react-native-init.js +449 -34
- package/dist/services/dop/core/react-native-init.js.map +1 -1
- package/dist/services/dop/crypto/react-native-crypto-provider.d.ts +1 -1
- package/dist/services/dop/crypto/react-native-crypto-provider.js +20 -2
- package/dist/services/dop/crypto/react-native-crypto-provider.js.map +1 -1
- package/dist/services/dop/dop-txids/graphql/index.js +3 -6
- package/dist/services/dop/dop-txids/graphql/index.js.map +1 -1
- package/dist/services/dop/quick-sync/V2/graphql/index.js +3 -6
- package/dist/services/dop/quick-sync/V2/graphql/index.js.map +1 -1
- package/dist/services/dop/quick-sync/V3/graphql/index.js +3 -6
- package/dist/services/dop/quick-sync/V3/graphql/index.js.map +1 -1
- package/dist/tests/balances.test.js +115 -112
- package/dist/tests/balances.test.js.map +1 -1
- package/dist/tests/setup.test.js +5 -1
- package/dist/tests/setup.test.js.map +1 -1
- package/package.json +2 -1
- package/patches/dop-engine-v3+1.4.9.patch +252 -2
- package/dist/tests/on-chain-event-logger.d.ts +0 -62
- package/dist/tests/on-chain-event-logger.js +0 -217
- package/dist/tests/on-chain-event-logger.js.map +0 -1
|
@@ -7,15 +7,24 @@ export declare class ReactNativeLevelDB {
|
|
|
7
7
|
private db;
|
|
8
8
|
private storageKey;
|
|
9
9
|
private AsyncStorage;
|
|
10
|
+
private persistTimeout;
|
|
11
|
+
private isDirty;
|
|
10
12
|
constructor(name: string);
|
|
11
|
-
open(callback
|
|
13
|
+
open(callbackOrOptions?: any, callback?: (error?: Error) => void): Promise<void>;
|
|
12
14
|
close(callback: (error?: Error) => void): void;
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
15
|
+
forcePersist(): Promise<void>;
|
|
16
|
+
clearPersistedData(): Promise<void>;
|
|
17
|
+
put(key: any, value: any, optionsOrCallback?: any, callback?: (error?: Error) => void): void;
|
|
18
|
+
get(key: any, optionsOrCallback?: any, callback?: (error?: Error, value?: any) => void): void;
|
|
19
|
+
del(key: any, optionsOrCallback?: any, callback?: (error?: Error) => void): void;
|
|
20
|
+
clear(options: any, callback: (error?: Error) => void): void;
|
|
21
|
+
batch(operationsOrCallback?: any, optionsOrCallback?: any, callback?: (error?: Error) => void): any;
|
|
17
22
|
iterator(options?: any): any;
|
|
23
|
+
private _schedulePersistence;
|
|
18
24
|
private _persistData;
|
|
25
|
+
private _cleanupExtraChunks;
|
|
26
|
+
private _cleanupAllChunks;
|
|
27
|
+
private _cleanupOldChunks;
|
|
19
28
|
}
|
|
20
29
|
/**
|
|
21
30
|
* Initialize DOP Engine specifically for React Native environments.
|
|
@@ -19,6 +19,8 @@ class ReactNativeLevelDB {
|
|
|
19
19
|
db;
|
|
20
20
|
storageKey;
|
|
21
21
|
AsyncStorage;
|
|
22
|
+
persistTimeout = null;
|
|
23
|
+
isDirty = false;
|
|
22
24
|
constructor(name) {
|
|
23
25
|
this.storageKey = `leveldb_${name}`;
|
|
24
26
|
this.db = memdown();
|
|
@@ -33,18 +35,87 @@ class ReactNativeLevelDB {
|
|
|
33
35
|
}
|
|
34
36
|
}
|
|
35
37
|
// Implement AbstractLevelDOWN interface
|
|
36
|
-
async open(callback) {
|
|
38
|
+
async open(callbackOrOptions, callback) {
|
|
39
|
+
// Handle both open(callback) and open(options, callback) signatures
|
|
40
|
+
let cb;
|
|
41
|
+
if (typeof callbackOrOptions === 'function') {
|
|
42
|
+
// open(callback) signature
|
|
43
|
+
cb = callbackOrOptions;
|
|
44
|
+
}
|
|
45
|
+
else if (typeof callback === 'function') {
|
|
46
|
+
// open(options, callback) signature
|
|
47
|
+
cb = callback;
|
|
48
|
+
}
|
|
49
|
+
else {
|
|
50
|
+
// No callback provided - this shouldn't happen in AbstractLevelDOWN
|
|
51
|
+
throw new Error('No callback provided to open method');
|
|
52
|
+
}
|
|
37
53
|
try {
|
|
38
54
|
// Load persisted data from AsyncStorage (only if available)
|
|
39
55
|
if (this.AsyncStorage) {
|
|
40
56
|
try {
|
|
41
|
-
|
|
57
|
+
let persistedData = null;
|
|
58
|
+
// Check if data is stored in chunks
|
|
59
|
+
const chunksCount = await this.AsyncStorage.getItem(`${this.storageKey}_chunks`);
|
|
60
|
+
if (chunksCount) {
|
|
61
|
+
console.log(`📦 Loading database from ${String(chunksCount)} chunks...`);
|
|
62
|
+
// Reassemble from chunks
|
|
63
|
+
const chunks = [];
|
|
64
|
+
const count = parseInt(chunksCount, 10);
|
|
65
|
+
if (Number.isNaN(count) || count < 0 || count > 1000) {
|
|
66
|
+
throw new Error(`Invalid chunk count: ${String(chunksCount)}`);
|
|
67
|
+
}
|
|
68
|
+
for (let i = 0; i < count; i += 1) {
|
|
69
|
+
const chunk = await this.AsyncStorage.getItem(`${this.storageKey}_chunk_${i}`);
|
|
70
|
+
if (chunk) {
|
|
71
|
+
chunks.push(chunk);
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
console.warn(`⚠️ Missing chunk ${i} of ${count}, data may be incomplete`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
if (chunks.length === 0) {
|
|
78
|
+
throw new Error('No chunks found, but chunk count was set');
|
|
79
|
+
}
|
|
80
|
+
persistedData = chunks.join('');
|
|
81
|
+
console.log(`📦 Reassembled ${chunks.length}/${count} chunks (${(persistedData.length / 1024 / 1024).toFixed(2)}MB)`);
|
|
82
|
+
}
|
|
83
|
+
else {
|
|
84
|
+
// Try to get data directly
|
|
85
|
+
persistedData = await this.AsyncStorage.getItem(this.storageKey);
|
|
86
|
+
if (persistedData) {
|
|
87
|
+
console.log(`📦 Loading database directly (${(persistedData.length / 1024).toFixed(2)}KB)`);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
42
90
|
if (persistedData) {
|
|
43
|
-
|
|
91
|
+
// Validate JSON before parsing
|
|
92
|
+
if (!persistedData.trim().startsWith('{') || !persistedData.trim().endsWith('}')) {
|
|
93
|
+
throw new Error('Persisted data does not look like valid JSON (missing braces)');
|
|
94
|
+
}
|
|
95
|
+
let data;
|
|
96
|
+
try {
|
|
97
|
+
data = JSON.parse(persistedData);
|
|
98
|
+
}
|
|
99
|
+
catch (jsonError) {
|
|
100
|
+
throw new Error(`JSON parse failed: ${jsonError instanceof Error ? jsonError.message : String(jsonError)}`);
|
|
101
|
+
}
|
|
44
102
|
// Restore data to memdown instance
|
|
103
|
+
const keys = Object.keys(data);
|
|
104
|
+
console.log(`📦 Restoring ${keys.length} entries to database...`);
|
|
45
105
|
for (const [key, value] of Object.entries(data)) {
|
|
46
106
|
await new Promise((resolve, reject) => {
|
|
47
|
-
|
|
107
|
+
// Restore Buffer/Uint8Array types from base64 with type marker
|
|
108
|
+
let restoredValue = value;
|
|
109
|
+
if (value && typeof value === 'object' && '__type' in value) {
|
|
110
|
+
const typedValue = value;
|
|
111
|
+
if (typedValue.__type === 'Buffer' && typeof typedValue.data === 'string') {
|
|
112
|
+
restoredValue = Buffer.from(typedValue.data, 'base64');
|
|
113
|
+
}
|
|
114
|
+
else if (typedValue.__type === 'Uint8Array' && typeof typedValue.data === 'string') {
|
|
115
|
+
restoredValue = new Uint8Array(Buffer.from(typedValue.data, 'base64'));
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
this.db.put(key, restoredValue, (err) => {
|
|
48
119
|
if (err)
|
|
49
120
|
reject(err);
|
|
50
121
|
else
|
|
@@ -52,58 +123,253 @@ class ReactNativeLevelDB {
|
|
|
52
123
|
});
|
|
53
124
|
});
|
|
54
125
|
}
|
|
126
|
+
console.log('✅ Successfully restored database from AsyncStorage');
|
|
127
|
+
}
|
|
128
|
+
else {
|
|
129
|
+
console.log('ℹ️ No persisted data found, starting with empty database');
|
|
55
130
|
}
|
|
56
131
|
}
|
|
57
132
|
catch (asyncStorageError) {
|
|
58
|
-
|
|
133
|
+
console.error('❌ Failed to load from AsyncStorage, clearing corrupted data and starting fresh:', asyncStorageError);
|
|
134
|
+
// Clear corrupted data
|
|
135
|
+
try {
|
|
136
|
+
await this.AsyncStorage.removeItem(this.storageKey);
|
|
137
|
+
await this.AsyncStorage.removeItem(`${this.storageKey}_chunks`);
|
|
138
|
+
// Try to remove chunks (up to 100 chunks max)
|
|
139
|
+
for (let i = 0; i < 100; i += 1) {
|
|
140
|
+
const chunkKey = `${this.storageKey}_chunk_${i}`;
|
|
141
|
+
const exists = await this.AsyncStorage.getItem(chunkKey);
|
|
142
|
+
if (exists) {
|
|
143
|
+
await this.AsyncStorage.removeItem(chunkKey);
|
|
144
|
+
}
|
|
145
|
+
else {
|
|
146
|
+
break; // No more chunks
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
console.log('🧹 Cleared corrupted AsyncStorage data');
|
|
150
|
+
}
|
|
151
|
+
catch (clearError) {
|
|
152
|
+
console.warn('⚠️ Failed to clear corrupted data:', clearError);
|
|
153
|
+
}
|
|
59
154
|
}
|
|
60
155
|
}
|
|
61
156
|
// Open the memdown database
|
|
62
|
-
this.db.open(
|
|
157
|
+
this.db.open(cb);
|
|
63
158
|
}
|
|
64
159
|
catch (error) {
|
|
65
|
-
|
|
160
|
+
cb(error);
|
|
66
161
|
}
|
|
67
162
|
}
|
|
68
163
|
close(callback) {
|
|
69
|
-
|
|
164
|
+
// Clear pending persistence timeout
|
|
165
|
+
if (this.persistTimeout) {
|
|
166
|
+
clearTimeout(this.persistTimeout);
|
|
167
|
+
this.persistTimeout = null;
|
|
168
|
+
}
|
|
169
|
+
// Force immediate persistence before closing if dirty
|
|
170
|
+
if (this.isDirty && this.AsyncStorage) {
|
|
171
|
+
console.log('💾 Persisting database before close...');
|
|
172
|
+
this.isDirty = false;
|
|
173
|
+
this._persistData()
|
|
174
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
|
175
|
+
.then(() => {
|
|
176
|
+
console.log('✅ Database persisted, closing...');
|
|
177
|
+
this.db.close(callback);
|
|
178
|
+
})
|
|
179
|
+
.catch((error) => {
|
|
180
|
+
console.warn('⚠️ Failed to persist on close:', error);
|
|
181
|
+
this.db.close(callback);
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
else {
|
|
185
|
+
this.db.close(callback);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
// Public method to force immediate persistence (useful after scan completes)
|
|
189
|
+
async forcePersist() {
|
|
190
|
+
if (this.persistTimeout) {
|
|
191
|
+
clearTimeout(this.persistTimeout);
|
|
192
|
+
this.persistTimeout = null;
|
|
193
|
+
}
|
|
194
|
+
if (this.isDirty && this.AsyncStorage) {
|
|
195
|
+
this.isDirty = false;
|
|
196
|
+
await this._persistData();
|
|
197
|
+
}
|
|
70
198
|
}
|
|
71
|
-
|
|
199
|
+
// Public method to clear all persisted data (use when recovering from storage full errors)
|
|
200
|
+
async clearPersistedData() {
|
|
201
|
+
console.log('🧹 Clearing all persisted database data from AsyncStorage...');
|
|
202
|
+
await this._cleanupOldChunks();
|
|
203
|
+
console.log('✅ Cleared all persisted data');
|
|
204
|
+
}
|
|
205
|
+
put(key, value, optionsOrCallback, callback) {
|
|
206
|
+
// Handle both put(key, value, callback) and put(key, value, options, callback) signatures
|
|
207
|
+
let cb;
|
|
208
|
+
if (typeof optionsOrCallback === 'function') {
|
|
209
|
+
// put(key, value, callback) signature
|
|
210
|
+
cb = optionsOrCallback;
|
|
211
|
+
}
|
|
212
|
+
else if (typeof callback === 'function') {
|
|
213
|
+
// put(key, value, options, callback) signature
|
|
214
|
+
cb = callback;
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
throw new Error('No callback provided to put method');
|
|
218
|
+
}
|
|
72
219
|
this.db.put(key, value, (err) => {
|
|
73
220
|
if (!err && this.AsyncStorage) {
|
|
74
|
-
//
|
|
75
|
-
|
|
221
|
+
// Schedule throttled persistence
|
|
222
|
+
this._schedulePersistence();
|
|
76
223
|
}
|
|
77
|
-
|
|
224
|
+
cb(err);
|
|
78
225
|
});
|
|
79
226
|
}
|
|
80
|
-
get(key, callback) {
|
|
81
|
-
|
|
227
|
+
get(key, optionsOrCallback, callback) {
|
|
228
|
+
// Handle both get(key, callback) and get(key, options, callback) signatures
|
|
229
|
+
let cb;
|
|
230
|
+
if (typeof optionsOrCallback === 'function') {
|
|
231
|
+
// get(key, callback) signature
|
|
232
|
+
cb = optionsOrCallback;
|
|
233
|
+
}
|
|
234
|
+
else if (typeof callback === 'function') {
|
|
235
|
+
// get(key, options, callback) signature
|
|
236
|
+
cb = callback;
|
|
237
|
+
}
|
|
238
|
+
else {
|
|
239
|
+
throw new Error('No callback provided to get method');
|
|
240
|
+
}
|
|
241
|
+
this.db.get(key, cb);
|
|
82
242
|
}
|
|
83
|
-
del(key, callback) {
|
|
243
|
+
del(key, optionsOrCallback, callback) {
|
|
244
|
+
// Handle both del(key, callback) and del(key, options, callback) signatures
|
|
245
|
+
let cb;
|
|
246
|
+
if (typeof optionsOrCallback === 'function') {
|
|
247
|
+
// del(key, callback) signature
|
|
248
|
+
cb = optionsOrCallback;
|
|
249
|
+
}
|
|
250
|
+
else if (typeof callback === 'function') {
|
|
251
|
+
// del(key, options, callback) signature
|
|
252
|
+
cb = callback;
|
|
253
|
+
}
|
|
254
|
+
else {
|
|
255
|
+
throw new Error('No callback provided to del method');
|
|
256
|
+
}
|
|
84
257
|
this.db.del(key, (err) => {
|
|
85
258
|
if (!err && this.AsyncStorage) {
|
|
86
|
-
//
|
|
87
|
-
|
|
259
|
+
// Schedule throttled persistence
|
|
260
|
+
this._schedulePersistence();
|
|
88
261
|
}
|
|
89
|
-
|
|
262
|
+
cb(err);
|
|
90
263
|
});
|
|
91
264
|
}
|
|
92
|
-
|
|
265
|
+
clear(options, callback) {
|
|
266
|
+
// Handle clear operation for deleting ranges of keys
|
|
267
|
+
// This is required by dop-engine-v3's clearNamespace function
|
|
268
|
+
const { gte, lte, gt, lt } = options || {};
|
|
269
|
+
if (!gte && !gt) {
|
|
270
|
+
callback(new Error('clear() requires gte or gt option'));
|
|
271
|
+
return;
|
|
272
|
+
}
|
|
273
|
+
// Use iterator to find all keys in the range and delete them
|
|
274
|
+
const keysToDelete = [];
|
|
275
|
+
const iterator = this.db.iterator({
|
|
276
|
+
gte,
|
|
277
|
+
lte,
|
|
278
|
+
gt,
|
|
279
|
+
lt,
|
|
280
|
+
keys: true,
|
|
281
|
+
values: false,
|
|
282
|
+
});
|
|
283
|
+
const processNext = () => {
|
|
284
|
+
iterator.next((err, key) => {
|
|
285
|
+
if (err) {
|
|
286
|
+
iterator.end(() => {
|
|
287
|
+
callback(err);
|
|
288
|
+
});
|
|
289
|
+
return;
|
|
290
|
+
}
|
|
291
|
+
if (key === undefined) {
|
|
292
|
+
// No more keys - now delete all collected keys
|
|
293
|
+
iterator.end(() => {
|
|
294
|
+
if (keysToDelete.length === 0) {
|
|
295
|
+
callback();
|
|
296
|
+
return;
|
|
297
|
+
}
|
|
298
|
+
// Delete keys in batch
|
|
299
|
+
const operations = keysToDelete.map(k => ({ type: 'del', key: k }));
|
|
300
|
+
this.db.batch(operations, (batchErr) => {
|
|
301
|
+
if (!batchErr && this.AsyncStorage) {
|
|
302
|
+
this._schedulePersistence();
|
|
303
|
+
}
|
|
304
|
+
callback(batchErr);
|
|
305
|
+
});
|
|
306
|
+
});
|
|
307
|
+
return;
|
|
308
|
+
}
|
|
309
|
+
keysToDelete.push(key);
|
|
310
|
+
processNext();
|
|
311
|
+
});
|
|
312
|
+
};
|
|
313
|
+
processNext();
|
|
314
|
+
}
|
|
315
|
+
batch(operationsOrCallback, optionsOrCallback, callback) {
|
|
316
|
+
// Handle multiple batch signatures:
|
|
317
|
+
// batch() - returns chained batch
|
|
318
|
+
// batch(operations, callback)
|
|
319
|
+
// batch(operations, options, callback)
|
|
320
|
+
if (arguments.length === 0) {
|
|
321
|
+
// batch() - return chained batch (not commonly used in LevelUp)
|
|
322
|
+
return this.db.batch();
|
|
323
|
+
}
|
|
324
|
+
// Handle batch operations with callback
|
|
325
|
+
const operations = operationsOrCallback;
|
|
326
|
+
let cb;
|
|
327
|
+
if (typeof optionsOrCallback === 'function') {
|
|
328
|
+
// batch(operations, callback) signature
|
|
329
|
+
cb = optionsOrCallback;
|
|
330
|
+
}
|
|
331
|
+
else if (typeof callback === 'function') {
|
|
332
|
+
// batch(operations, options, callback) signature
|
|
333
|
+
cb = callback;
|
|
334
|
+
}
|
|
335
|
+
else {
|
|
336
|
+
throw new Error('No callback provided to batch method');
|
|
337
|
+
}
|
|
93
338
|
this.db.batch(operations, (err) => {
|
|
94
339
|
if (!err && this.AsyncStorage) {
|
|
95
|
-
//
|
|
96
|
-
|
|
340
|
+
// Schedule throttled persistence
|
|
341
|
+
this._schedulePersistence();
|
|
97
342
|
}
|
|
98
|
-
|
|
343
|
+
cb(err);
|
|
99
344
|
});
|
|
100
345
|
}
|
|
101
346
|
iterator(options) {
|
|
347
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
|
102
348
|
return this.db.iterator(options);
|
|
103
349
|
}
|
|
350
|
+
_schedulePersistence() {
|
|
351
|
+
this.isDirty = true;
|
|
352
|
+
// Clear existing timeout
|
|
353
|
+
if (this.persistTimeout) {
|
|
354
|
+
clearTimeout(this.persistTimeout);
|
|
355
|
+
}
|
|
356
|
+
// Schedule persistence with longer throttling during heavy writes (every 30 seconds)
|
|
357
|
+
// This dramatically improves scan performance by reducing serialization overhead
|
|
358
|
+
this.persistTimeout = setTimeout(() => {
|
|
359
|
+
if (this.isDirty) {
|
|
360
|
+
this.isDirty = false;
|
|
361
|
+
void this._persistData().catch((error) => {
|
|
362
|
+
console.error('⚠️ Scheduled persistence failed:', error?.message || error);
|
|
363
|
+
// Don't log full stack trace to avoid cluttering console
|
|
364
|
+
});
|
|
365
|
+
}
|
|
366
|
+
}, 30000); // Increased from 5s to 30s
|
|
367
|
+
}
|
|
104
368
|
async _persistData() {
|
|
105
369
|
if (!this.AsyncStorage)
|
|
106
370
|
return;
|
|
371
|
+
const startTime = Date.now();
|
|
372
|
+
let entryCount = 0;
|
|
107
373
|
try {
|
|
108
374
|
const data = {};
|
|
109
375
|
const iterator = this.db.iterator();
|
|
@@ -118,16 +384,164 @@ class ReactNativeLevelDB {
|
|
|
118
384
|
iterator.end(() => resolve());
|
|
119
385
|
return;
|
|
120
386
|
}
|
|
121
|
-
|
|
387
|
+
entryCount += 1;
|
|
388
|
+
// Preserve Buffer/Uint8Array types by converting to base64 with type marker
|
|
389
|
+
if (Buffer.isBuffer(value)) {
|
|
390
|
+
data[key.toString()] = {
|
|
391
|
+
__type: 'Buffer',
|
|
392
|
+
data: value.toString('base64'),
|
|
393
|
+
};
|
|
394
|
+
}
|
|
395
|
+
else if (value instanceof Uint8Array) {
|
|
396
|
+
data[key.toString()] = {
|
|
397
|
+
__type: 'Uint8Array',
|
|
398
|
+
data: Buffer.from(value).toString('base64'),
|
|
399
|
+
};
|
|
400
|
+
}
|
|
401
|
+
else {
|
|
402
|
+
data[key.toString()] = value;
|
|
403
|
+
}
|
|
122
404
|
processNext();
|
|
123
405
|
});
|
|
124
406
|
};
|
|
125
407
|
processNext();
|
|
126
408
|
});
|
|
127
|
-
|
|
409
|
+
// Serialize data
|
|
410
|
+
let jsonData;
|
|
411
|
+
try {
|
|
412
|
+
jsonData = JSON.stringify(data);
|
|
413
|
+
}
|
|
414
|
+
catch (stringifyError) {
|
|
415
|
+
throw new Error(`Failed to stringify data: ${stringifyError instanceof Error ? stringifyError.message : String(stringifyError)}`);
|
|
416
|
+
}
|
|
417
|
+
const serializeTime = Date.now() - startTime;
|
|
418
|
+
const dataSizeMB = jsonData.length / 1024 / 1024;
|
|
419
|
+
// React Native AsyncStorage has a 6MB per-item limit and total quota limits
|
|
420
|
+
// Use 500KB chunks to stay well within limits and avoid SQLITE_FULL errors
|
|
421
|
+
const chunkSize = 512 * 1024; // 500KB chunks (conservative for Android)
|
|
422
|
+
if (jsonData.length > chunkSize) {
|
|
423
|
+
// Split large data into chunks
|
|
424
|
+
const chunks = [];
|
|
425
|
+
for (let i = 0; i < jsonData.length; i += chunkSize) {
|
|
426
|
+
chunks.push(jsonData.slice(i, i + chunkSize));
|
|
427
|
+
}
|
|
428
|
+
console.log(`💾 Writing ${chunks.length} chunks (${dataSizeMB.toFixed(2)}MB)...`);
|
|
429
|
+
// Check if data is too large (warn if over 50MB)
|
|
430
|
+
if (dataSizeMB > 50) {
|
|
431
|
+
console.warn(`⚠️ Database is very large (${dataSizeMB.toFixed(2)}MB). Consider clearing old data.`);
|
|
432
|
+
}
|
|
433
|
+
// Write chunks sequentially to avoid overwhelming AsyncStorage
|
|
434
|
+
// Parallel writes can cause SQLITE_FULL errors
|
|
435
|
+
for (let i = 0; i < chunks.length; i += 1) {
|
|
436
|
+
try {
|
|
437
|
+
await this.AsyncStorage.setItem(`${this.storageKey}_chunk_${i}`, chunks[i]);
|
|
438
|
+
}
|
|
439
|
+
catch (chunkError) {
|
|
440
|
+
// If we hit storage quota, clean up partial writes and throw
|
|
441
|
+
console.error(`❌ Failed to write chunk ${i}/${chunks.length}:`, chunkError);
|
|
442
|
+
if (chunkError?.message?.includes('full') || chunkError?.code === 13) {
|
|
443
|
+
console.error('💥 Storage quota exceeded! Cleaning up partial writes...');
|
|
444
|
+
// Only clean up the chunks we just tried to write
|
|
445
|
+
for (let j = 0; j <= i; j += 1) {
|
|
446
|
+
await this.AsyncStorage.removeItem(`${this.storageKey}_chunk_${j}`).catch(() => { });
|
|
447
|
+
}
|
|
448
|
+
throw new Error('AsyncStorage quota exceeded. Please clear app data or restart the app.');
|
|
449
|
+
}
|
|
450
|
+
throw chunkError;
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
// Clean up any extra chunks from previous saves (if we had more chunks before)
|
|
454
|
+
await this._cleanupExtraChunks(chunks.length);
|
|
455
|
+
// Only update chunk count after all chunks are written successfully
|
|
456
|
+
await this.AsyncStorage.setItem(`${this.storageKey}_chunks`, chunks.length.toString());
|
|
457
|
+
// Remove direct storage if it exists (migrating from direct to chunked)
|
|
458
|
+
await this.AsyncStorage.removeItem(this.storageKey).catch(() => { });
|
|
459
|
+
const totalTime = Date.now() - startTime;
|
|
460
|
+
console.log(`✅ Persisted ${entryCount} entries (${dataSizeMB.toFixed(2)}MB in ${chunks.length} chunks) in ${totalTime}ms (serialize: ${serializeTime}ms)`);
|
|
461
|
+
}
|
|
462
|
+
else {
|
|
463
|
+
// Small data, store directly
|
|
464
|
+
await this.AsyncStorage.setItem(this.storageKey, jsonData);
|
|
465
|
+
// Clean up chunked storage if it exists (migrating from chunked to direct)
|
|
466
|
+
await this._cleanupAllChunks();
|
|
467
|
+
const totalTime = Date.now() - startTime;
|
|
468
|
+
console.log(`✅ Persisted ${entryCount} entries (${(jsonData.length / 1024).toFixed(2)}KB) in ${totalTime}ms (serialize: ${serializeTime}ms)`);
|
|
469
|
+
}
|
|
128
470
|
}
|
|
129
471
|
catch (error) {
|
|
130
|
-
console.
|
|
472
|
+
console.error('❌ Failed to persist data to AsyncStorage:', error);
|
|
473
|
+
console.error('Error details:', {
|
|
474
|
+
message: error?.message,
|
|
475
|
+
code: error?.code,
|
|
476
|
+
stack: error?.stack?.split('\n')[0]
|
|
477
|
+
});
|
|
478
|
+
// If quota exceeded, clear everything to recover
|
|
479
|
+
if (error?.message?.includes('quota') || error?.message?.includes('full')) {
|
|
480
|
+
console.error('💥 Storage full! Clearing all AsyncStorage data for recovery...');
|
|
481
|
+
await this._cleanupAllChunks();
|
|
482
|
+
}
|
|
483
|
+
// Don't mark as dirty - if persistence fails, we don't want to retry infinitely
|
|
484
|
+
// The data is still in memory (memdown) so the app can continue working
|
|
485
|
+
// this.isDirty = true;
|
|
486
|
+
// Don't throw - this allows the app to continue even if persistence fails
|
|
487
|
+
// throw error;
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
async _cleanupExtraChunks(keepChunkCount) {
|
|
491
|
+
if (!this.AsyncStorage)
|
|
492
|
+
return;
|
|
493
|
+
try {
|
|
494
|
+
// Remove chunks beyond the new count (cleanup old data)
|
|
495
|
+
for (let i = keepChunkCount; i < 200; i += 1) {
|
|
496
|
+
const chunkKey = `${this.storageKey}_chunk_${i}`;
|
|
497
|
+
const exists = await this.AsyncStorage.getItem(chunkKey);
|
|
498
|
+
if (!exists) {
|
|
499
|
+
break; // No more chunks to clean
|
|
500
|
+
}
|
|
501
|
+
await this.AsyncStorage.removeItem(chunkKey);
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
catch (cleanupError) {
|
|
505
|
+
console.warn('⚠️ Error during extra chunk cleanup:', cleanupError);
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
async _cleanupAllChunks() {
|
|
509
|
+
if (!this.AsyncStorage)
|
|
510
|
+
return;
|
|
511
|
+
try {
|
|
512
|
+
// Remove chunk metadata
|
|
513
|
+
await this.AsyncStorage.removeItem(`${this.storageKey}_chunks`);
|
|
514
|
+
// Remove all chunk entries (try up to 200 chunks)
|
|
515
|
+
for (let i = 0; i < 200; i += 1) {
|
|
516
|
+
const chunkKey = `${this.storageKey}_chunk_${i}`;
|
|
517
|
+
const exists = await this.AsyncStorage.getItem(chunkKey);
|
|
518
|
+
if (!exists) {
|
|
519
|
+
break; // No more chunks to clean
|
|
520
|
+
}
|
|
521
|
+
await this.AsyncStorage.removeItem(chunkKey);
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
catch (cleanupError) {
|
|
525
|
+
console.warn('⚠️ Error during chunk cleanup:', cleanupError);
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
// Keep old method for clearing persisted data (full cleanup)
|
|
529
|
+
async _cleanupOldChunks() {
|
|
530
|
+
if (!this.AsyncStorage)
|
|
531
|
+
return;
|
|
532
|
+
try {
|
|
533
|
+
// Remove chunk metadata
|
|
534
|
+
await this.AsyncStorage.removeItem(`${this.storageKey}_chunks`);
|
|
535
|
+
// Remove old direct storage (if exists)
|
|
536
|
+
await this.AsyncStorage.removeItem(this.storageKey);
|
|
537
|
+
// Remove all chunk entries (try up to 200 chunks)
|
|
538
|
+
for (let i = 0; i < 200; i += 1) {
|
|
539
|
+
const chunkKey = `${this.storageKey}_chunk_${i}`;
|
|
540
|
+
await this.AsyncStorage.removeItem(chunkKey).catch(() => { });
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
catch (cleanupError) {
|
|
544
|
+
console.warn('⚠️ Error during full cleanup:', cleanupError);
|
|
131
545
|
}
|
|
132
546
|
}
|
|
133
547
|
}
|
|
@@ -146,16 +560,17 @@ exports.ReactNativeLevelDB = ReactNativeLevelDB;
|
|
|
146
560
|
* @param databaseName - Name for the database (used as prefix in AsyncStorage)
|
|
147
561
|
*/
|
|
148
562
|
const startDopEngineReactNative = async (walletSource, shouldDebug, artifactStore, useNativeArtifacts = true, skipMerkletreeScans = false, verboseScanLogging = false, databaseName = 'dop-wallet-db') => {
|
|
149
|
-
// Create React Native compatible database instance
|
|
150
|
-
|
|
563
|
+
// Create React Native compatible database instance with AsyncStorage persistence
|
|
564
|
+
const db = new ReactNativeLevelDB(databaseName);
|
|
151
565
|
// Ensure database is opened before proceeding
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
566
|
+
await new Promise((resolve, reject) => {
|
|
567
|
+
db.open((error) => {
|
|
568
|
+
if (error)
|
|
569
|
+
reject(error);
|
|
570
|
+
else
|
|
571
|
+
resolve();
|
|
572
|
+
});
|
|
573
|
+
});
|
|
159
574
|
// Initialize the DOP Engine with the React Native database
|
|
160
575
|
return (0, init_1.startDopEngine)(walletSource, db, // Cast to any since TypeScript doesn't know about our custom implementation
|
|
161
576
|
shouldDebug, artifactStore, useNativeArtifacts, skipMerkletreeScans, verboseScanLogging);
|