@dynamicu/chromedebug-mcp 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +344 -0
- package/LICENSE +21 -0
- package/README.md +250 -0
- package/chrome-extension/README.md +41 -0
- package/chrome-extension/background.js +3917 -0
- package/chrome-extension/chrome-session-manager.js +706 -0
- package/chrome-extension/content.css +181 -0
- package/chrome-extension/content.js +3022 -0
- package/chrome-extension/data-buffer.js +435 -0
- package/chrome-extension/dom-tracker.js +411 -0
- package/chrome-extension/extension-config.js +78 -0
- package/chrome-extension/firebase-client.js +278 -0
- package/chrome-extension/firebase-config.js +32 -0
- package/chrome-extension/firebase-config.module.js +22 -0
- package/chrome-extension/firebase-config.module.template.js +27 -0
- package/chrome-extension/firebase-config.template.js +36 -0
- package/chrome-extension/frame-capture.js +407 -0
- package/chrome-extension/icon128.png +1 -0
- package/chrome-extension/icon16.png +1 -0
- package/chrome-extension/icon48.png +1 -0
- package/chrome-extension/license-helper.js +181 -0
- package/chrome-extension/logger.js +23 -0
- package/chrome-extension/manifest.json +73 -0
- package/chrome-extension/network-tracker.js +510 -0
- package/chrome-extension/offscreen.html +10 -0
- package/chrome-extension/options.html +203 -0
- package/chrome-extension/options.js +282 -0
- package/chrome-extension/pako.min.js +2 -0
- package/chrome-extension/performance-monitor.js +533 -0
- package/chrome-extension/pii-redactor.js +405 -0
- package/chrome-extension/popup.html +532 -0
- package/chrome-extension/popup.js +2446 -0
- package/chrome-extension/upload-manager.js +323 -0
- package/chrome-extension/web-vitals.iife.js +1 -0
- package/config/api-keys.json +11 -0
- package/config/chrome-pilot-config.json +45 -0
- package/package.json +126 -0
- package/scripts/cleanup-processes.js +109 -0
- package/scripts/config-manager.js +280 -0
- package/scripts/generate-extension-config.js +53 -0
- package/scripts/setup-security.js +64 -0
- package/src/capture/architecture.js +426 -0
- package/src/capture/error-handling-tests.md +38 -0
- package/src/capture/error-handling-types.ts +360 -0
- package/src/capture/index.js +508 -0
- package/src/capture/interfaces.js +625 -0
- package/src/capture/memory-manager.js +713 -0
- package/src/capture/types.js +342 -0
- package/src/chrome-controller.js +2658 -0
- package/src/cli.js +19 -0
- package/src/config-loader.js +303 -0
- package/src/database.js +2178 -0
- package/src/firebase-license-manager.js +462 -0
- package/src/firebase-privacy-guard.js +397 -0
- package/src/http-server.js +1516 -0
- package/src/index-direct.js +157 -0
- package/src/index-modular.js +219 -0
- package/src/index-monolithic-backup.js +2230 -0
- package/src/index.js +305 -0
- package/src/legacy/chrome-controller-old.js +1406 -0
- package/src/legacy/index-express.js +625 -0
- package/src/legacy/index-old.js +977 -0
- package/src/legacy/routes.js +260 -0
- package/src/legacy/shared-storage.js +101 -0
- package/src/logger.js +10 -0
- package/src/mcp/handlers/chrome-tool-handler.js +306 -0
- package/src/mcp/handlers/element-tool-handler.js +51 -0
- package/src/mcp/handlers/frame-tool-handler.js +957 -0
- package/src/mcp/handlers/request-handler.js +104 -0
- package/src/mcp/handlers/workflow-tool-handler.js +636 -0
- package/src/mcp/server.js +68 -0
- package/src/mcp/tools/index.js +701 -0
- package/src/middleware/auth.js +371 -0
- package/src/middleware/security.js +267 -0
- package/src/port-discovery.js +258 -0
- package/src/routes/admin.js +182 -0
- package/src/services/browser-daemon.js +494 -0
- package/src/services/chrome-service.js +375 -0
- package/src/services/failover-manager.js +412 -0
- package/src/services/git-safety-service.js +675 -0
- package/src/services/heartbeat-manager.js +200 -0
- package/src/services/http-client.js +195 -0
- package/src/services/process-manager.js +318 -0
- package/src/services/process-tracker.js +574 -0
- package/src/services/profile-manager.js +449 -0
- package/src/services/project-manager.js +415 -0
- package/src/services/session-manager.js +497 -0
- package/src/services/session-registry.js +491 -0
- package/src/services/unified-session-manager.js +678 -0
- package/src/shared-storage-old.js +267 -0
- package/src/standalone-server.js +53 -0
- package/src/utils/extension-path.js +145 -0
- package/src/utils.js +187 -0
- package/src/validation/log-transformer.js +125 -0
- package/src/validation/schemas.js +391 -0
|
@@ -0,0 +1,435 @@
|
|
|
1
|
+
// IndexedDB Circular Buffer for Chrome Debug Full Data Recording
|
|
2
|
+
// Provides resilient client-side buffering with compression support
|
|
3
|
+
|
|
4
|
+
class DataBuffer {
|
|
5
|
+
constructor() {
|
|
6
|
+
this.dbName = 'ChromePilotDataBuffer';
|
|
7
|
+
this.version = 1;
|
|
8
|
+
this.db = null;
|
|
9
|
+
this.maxBufferSize = 100 * 1024 * 1024; // 100MB default
|
|
10
|
+
this.currentSize = 0;
|
|
11
|
+
this.batchSize = 1000; // Events per batch
|
|
12
|
+
this.uploadQueue = [];
|
|
13
|
+
this.isUploading = false;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
async init() {
|
|
17
|
+
return new Promise((resolve, reject) => {
|
|
18
|
+
const request = indexedDB.open(this.dbName, this.version);
|
|
19
|
+
|
|
20
|
+
request.onerror = () => {
|
|
21
|
+
console.error('[DataBuffer] Failed to open IndexedDB:', request.error);
|
|
22
|
+
reject(request.error);
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
request.onsuccess = () => {
|
|
26
|
+
this.db = request.result;
|
|
27
|
+
console.log('[DataBuffer] IndexedDB initialized');
|
|
28
|
+
this.calculateCurrentSize();
|
|
29
|
+
resolve();
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
request.onupgradeneeded = (event) => {
|
|
33
|
+
const db = event.target.result;
|
|
34
|
+
|
|
35
|
+
// Create object stores if they don't exist
|
|
36
|
+
if (!db.objectStoreNames.contains('events')) {
|
|
37
|
+
const eventsStore = db.createObjectStore('events', {
|
|
38
|
+
keyPath: 'id',
|
|
39
|
+
autoIncrement: true
|
|
40
|
+
});
|
|
41
|
+
eventsStore.createIndex('timestamp', 'timestamp', { unique: false });
|
|
42
|
+
eventsStore.createIndex('type', 'type', { unique: false });
|
|
43
|
+
eventsStore.createIndex('recording_id', 'recording_id', { unique: false });
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (!db.objectStoreNames.contains('batches')) {
|
|
47
|
+
const batchesStore = db.createObjectStore('batches', {
|
|
48
|
+
keyPath: 'id',
|
|
49
|
+
autoIncrement: true
|
|
50
|
+
});
|
|
51
|
+
batchesStore.createIndex('timestamp', 'timestamp', { unique: false });
|
|
52
|
+
batchesStore.createIndex('status', 'status', { unique: false });
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (!db.objectStoreNames.contains('metadata')) {
|
|
56
|
+
db.createObjectStore('metadata', { keyPath: 'key' });
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
async addEvent(event) {
|
|
63
|
+
if (!this.db) await this.init();
|
|
64
|
+
|
|
65
|
+
// Add metadata
|
|
66
|
+
event.timestamp = event.timestamp || Date.now();
|
|
67
|
+
event.recording_id = event.recording_id || this.getCurrentRecordingId();
|
|
68
|
+
|
|
69
|
+
// CONTRACT B: Fallback logic for oversized logs
|
|
70
|
+
const eventSize = this.estimateSize(event);
|
|
71
|
+
if (eventSize > this.maxBufferSize * 0.5) { // If event exceeds 50% of buffer
|
|
72
|
+
console.warn('[DataBuffer] Oversized event detected, truncating message');
|
|
73
|
+
if (event.message && event.message.length > 10000) {
|
|
74
|
+
const originalLength = event.message.length;
|
|
75
|
+
event.message = event.message.substring(0, 10000) +
|
|
76
|
+
`... [TRUNCATED: original ${originalLength} chars]`;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const transaction = this.db.transaction(['events'], 'readwrite');
|
|
81
|
+
const store = transaction.objectStore('events');
|
|
82
|
+
|
|
83
|
+
return new Promise((resolve, reject) => {
|
|
84
|
+
const request = store.add(event);
|
|
85
|
+
|
|
86
|
+
request.onsuccess = () => {
|
|
87
|
+
this.currentSize += this.estimateSize(event);
|
|
88
|
+
this.checkBufferLimits();
|
|
89
|
+
resolve(request.result);
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
request.onerror = () => {
|
|
93
|
+
console.error('[DataBuffer] Failed to add event:', request.error);
|
|
94
|
+
reject(request.error);
|
|
95
|
+
};
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
async addBatch(events) {
|
|
100
|
+
if (!this.db) await this.init();
|
|
101
|
+
|
|
102
|
+
const transaction = this.db.transaction(['events'], 'readwrite');
|
|
103
|
+
const store = transaction.objectStore('events');
|
|
104
|
+
|
|
105
|
+
return new Promise((resolve, reject) => {
|
|
106
|
+
let addedCount = 0;
|
|
107
|
+
|
|
108
|
+
for (const event of events) {
|
|
109
|
+
event.timestamp = event.timestamp || Date.now();
|
|
110
|
+
event.recording_id = event.recording_id || this.getCurrentRecordingId();
|
|
111
|
+
|
|
112
|
+
const request = store.add(event);
|
|
113
|
+
|
|
114
|
+
request.onsuccess = () => {
|
|
115
|
+
addedCount++;
|
|
116
|
+
this.currentSize += this.estimateSize(event);
|
|
117
|
+
|
|
118
|
+
if (addedCount === events.length) {
|
|
119
|
+
this.checkBufferLimits();
|
|
120
|
+
resolve(addedCount);
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
request.onerror = () => {
|
|
125
|
+
console.error('[DataBuffer] Failed to add event in batch:', request.error);
|
|
126
|
+
reject(request.error);
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
async createBatch(recordingId) {
|
|
133
|
+
if (!this.db) await this.init();
|
|
134
|
+
|
|
135
|
+
const transaction = this.db.transaction(['events', 'batches'], 'readwrite');
|
|
136
|
+
const eventsStore = transaction.objectStore('events');
|
|
137
|
+
const batchesStore = transaction.objectStore('batches');
|
|
138
|
+
|
|
139
|
+
// Get events for this recording
|
|
140
|
+
const index = eventsStore.index('recording_id');
|
|
141
|
+
const range = IDBKeyRange.only(recordingId);
|
|
142
|
+
const events = [];
|
|
143
|
+
|
|
144
|
+
return new Promise((resolve, reject) => {
|
|
145
|
+
const request = index.openCursor(range);
|
|
146
|
+
|
|
147
|
+
request.onsuccess = async (event) => {
|
|
148
|
+
const cursor = event.target.result;
|
|
149
|
+
|
|
150
|
+
if (cursor) {
|
|
151
|
+
events.push(cursor.value);
|
|
152
|
+
|
|
153
|
+
if (events.length >= this.batchSize) {
|
|
154
|
+
// Create batch with current events
|
|
155
|
+
const batch = await this.saveBatch(events, recordingId);
|
|
156
|
+
await this.deleteEvents(events.map(e => e.id));
|
|
157
|
+
resolve(batch);
|
|
158
|
+
} else {
|
|
159
|
+
cursor.continue();
|
|
160
|
+
}
|
|
161
|
+
} else {
|
|
162
|
+
// No more events, create batch with what we have
|
|
163
|
+
if (events.length > 0) {
|
|
164
|
+
const batch = await this.saveBatch(events, recordingId);
|
|
165
|
+
await this.deleteEvents(events.map(e => e.id));
|
|
166
|
+
resolve(batch);
|
|
167
|
+
} else {
|
|
168
|
+
resolve(null);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
};
|
|
172
|
+
|
|
173
|
+
request.onerror = () => {
|
|
174
|
+
reject(request.error);
|
|
175
|
+
};
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
async saveBatch(events, recordingId) {
|
|
180
|
+
const batch = {
|
|
181
|
+
timestamp: Date.now(),
|
|
182
|
+
recording_id: recordingId,
|
|
183
|
+
events: events,
|
|
184
|
+
status: 'pending',
|
|
185
|
+
compressed: false,
|
|
186
|
+
size: this.estimateSize(events)
|
|
187
|
+
};
|
|
188
|
+
|
|
189
|
+
// Compress if pako is available
|
|
190
|
+
if (typeof pako !== 'undefined') {
|
|
191
|
+
try {
|
|
192
|
+
const jsonString = JSON.stringify(events);
|
|
193
|
+
const compressed = pako.gzip(jsonString);
|
|
194
|
+
batch.events = Array.from(compressed); // Store as array for IndexedDB
|
|
195
|
+
batch.compressed = true;
|
|
196
|
+
batch.size = compressed.length;
|
|
197
|
+
} catch (error) {
|
|
198
|
+
console.error('[DataBuffer] Compression failed:', error);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const transaction = this.db.transaction(['batches'], 'readwrite');
|
|
203
|
+
const store = transaction.objectStore('batches');
|
|
204
|
+
|
|
205
|
+
return new Promise((resolve, reject) => {
|
|
206
|
+
const request = store.add(batch);
|
|
207
|
+
|
|
208
|
+
request.onsuccess = () => {
|
|
209
|
+
batch.id = request.result;
|
|
210
|
+
resolve(batch);
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
request.onerror = () => {
|
|
214
|
+
reject(request.error);
|
|
215
|
+
};
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
async deleteEvents(eventIds) {
|
|
220
|
+
const transaction = this.db.transaction(['events'], 'readwrite');
|
|
221
|
+
const store = transaction.objectStore('events');
|
|
222
|
+
|
|
223
|
+
for (const id of eventIds) {
|
|
224
|
+
store.delete(id);
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return new Promise((resolve) => {
|
|
228
|
+
transaction.oncomplete = () => {
|
|
229
|
+
resolve();
|
|
230
|
+
};
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
async getPendingBatches() {
|
|
235
|
+
if (!this.db) await this.init();
|
|
236
|
+
|
|
237
|
+
const transaction = this.db.transaction(['batches'], 'readonly');
|
|
238
|
+
const store = transaction.objectStore('batches');
|
|
239
|
+
const index = store.index('status');
|
|
240
|
+
const range = IDBKeyRange.only('pending');
|
|
241
|
+
|
|
242
|
+
return new Promise((resolve, reject) => {
|
|
243
|
+
const request = index.getAll(range);
|
|
244
|
+
|
|
245
|
+
request.onsuccess = () => {
|
|
246
|
+
resolve(request.result);
|
|
247
|
+
};
|
|
248
|
+
|
|
249
|
+
request.onerror = () => {
|
|
250
|
+
reject(request.error);
|
|
251
|
+
};
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
async markBatchUploaded(batchId) {
|
|
256
|
+
const transaction = this.db.transaction(['batches'], 'readwrite');
|
|
257
|
+
const store = transaction.objectStore('batches');
|
|
258
|
+
|
|
259
|
+
return new Promise((resolve, reject) => {
|
|
260
|
+
const getRequest = store.get(batchId);
|
|
261
|
+
|
|
262
|
+
getRequest.onsuccess = () => {
|
|
263
|
+
const batch = getRequest.result;
|
|
264
|
+
if (batch) {
|
|
265
|
+
batch.status = 'uploaded';
|
|
266
|
+
const putRequest = store.put(batch);
|
|
267
|
+
|
|
268
|
+
putRequest.onsuccess = () => {
|
|
269
|
+
resolve();
|
|
270
|
+
};
|
|
271
|
+
|
|
272
|
+
putRequest.onerror = () => {
|
|
273
|
+
reject(putRequest.error);
|
|
274
|
+
};
|
|
275
|
+
} else {
|
|
276
|
+
resolve();
|
|
277
|
+
}
|
|
278
|
+
};
|
|
279
|
+
|
|
280
|
+
getRequest.onerror = () => {
|
|
281
|
+
reject(getRequest.error);
|
|
282
|
+
};
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
async checkBufferLimits() {
|
|
287
|
+
// Implement FIFO circular buffer strategy
|
|
288
|
+
if (this.currentSize > this.maxBufferSize) {
|
|
289
|
+
console.log('[DataBuffer] Buffer limit exceeded, removing oldest batches');
|
|
290
|
+
await this.removeOldestBatches();
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
async removeOldestBatches() {
|
|
295
|
+
const transaction = this.db.transaction(['batches'], 'readwrite');
|
|
296
|
+
const store = transaction.objectStore('batches');
|
|
297
|
+
const index = store.index('timestamp');
|
|
298
|
+
|
|
299
|
+
return new Promise((resolve, reject) => {
|
|
300
|
+
const request = index.openCursor();
|
|
301
|
+
let deletedSize = 0;
|
|
302
|
+
const targetSize = this.maxBufferSize * 0.95; // Retain 95% capacity (CONTRACT B fix)
|
|
303
|
+
|
|
304
|
+
request.onsuccess = (event) => {
|
|
305
|
+
const cursor = event.target.result;
|
|
306
|
+
|
|
307
|
+
if (cursor && this.currentSize - deletedSize > targetSize) {
|
|
308
|
+
const batch = cursor.value;
|
|
309
|
+
deletedSize += batch.size || 0;
|
|
310
|
+
store.delete(cursor.key);
|
|
311
|
+
cursor.continue();
|
|
312
|
+
} else {
|
|
313
|
+
this.currentSize -= deletedSize;
|
|
314
|
+
console.log(`[DataBuffer] Freed ${deletedSize} bytes`);
|
|
315
|
+
resolve();
|
|
316
|
+
}
|
|
317
|
+
};
|
|
318
|
+
|
|
319
|
+
request.onerror = () => {
|
|
320
|
+
reject(request.error);
|
|
321
|
+
};
|
|
322
|
+
});
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
async calculateCurrentSize() {
|
|
326
|
+
if (!this.db) return;
|
|
327
|
+
|
|
328
|
+
const transaction = this.db.transaction(['events', 'batches'], 'readonly');
|
|
329
|
+
const eventsStore = transaction.objectStore('events');
|
|
330
|
+
const batchesStore = transaction.objectStore('batches');
|
|
331
|
+
|
|
332
|
+
let totalSize = 0;
|
|
333
|
+
|
|
334
|
+
// Calculate events size
|
|
335
|
+
const eventsRequest = eventsStore.getAll();
|
|
336
|
+
eventsRequest.onsuccess = () => {
|
|
337
|
+
const events = eventsRequest.result;
|
|
338
|
+
for (const event of events) {
|
|
339
|
+
totalSize += this.estimateSize(event);
|
|
340
|
+
}
|
|
341
|
+
};
|
|
342
|
+
|
|
343
|
+
// Calculate batches size
|
|
344
|
+
const batchesRequest = batchesStore.getAll();
|
|
345
|
+
batchesRequest.onsuccess = () => {
|
|
346
|
+
const batches = batchesRequest.result;
|
|
347
|
+
for (const batch of batches) {
|
|
348
|
+
totalSize += batch.size || this.estimateSize(batch);
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
this.currentSize = totalSize;
|
|
352
|
+
console.log(`[DataBuffer] Current buffer size: ${(totalSize / 1024 / 1024).toFixed(2)}MB`);
|
|
353
|
+
};
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
estimateSize(obj) {
|
|
357
|
+
// Rough estimation of object size in bytes
|
|
358
|
+
const str = JSON.stringify(obj);
|
|
359
|
+
return str.length * 2; // Assuming 2 bytes per character (UTF-16)
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
getCurrentRecordingId() {
|
|
363
|
+
// Get from chrome.storage or use a default
|
|
364
|
+
return chrome.storage?.local ?
|
|
365
|
+
new Promise((resolve) => {
|
|
366
|
+
chrome.storage.local.get(['currentRecordingId'], (result) => {
|
|
367
|
+
resolve(result.currentRecordingId || 'default');
|
|
368
|
+
});
|
|
369
|
+
}) : 'default';
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
async clear() {
|
|
373
|
+
if (!this.db) await this.init();
|
|
374
|
+
|
|
375
|
+
const transaction = this.db.transaction(['events', 'batches'], 'readwrite');
|
|
376
|
+
const eventsStore = transaction.objectStore('events');
|
|
377
|
+
const batchesStore = transaction.objectStore('batches');
|
|
378
|
+
|
|
379
|
+
eventsStore.clear();
|
|
380
|
+
batchesStore.clear();
|
|
381
|
+
|
|
382
|
+
return new Promise((resolve) => {
|
|
383
|
+
transaction.oncomplete = () => {
|
|
384
|
+
this.currentSize = 0;
|
|
385
|
+
console.log('[DataBuffer] Buffer cleared');
|
|
386
|
+
resolve();
|
|
387
|
+
};
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
async getStats() {
|
|
392
|
+
if (!this.db) await this.init();
|
|
393
|
+
|
|
394
|
+
const transaction = this.db.transaction(['events', 'batches'], 'readonly');
|
|
395
|
+
const eventsStore = transaction.objectStore('events');
|
|
396
|
+
const batchesStore = transaction.objectStore('batches');
|
|
397
|
+
|
|
398
|
+
return new Promise((resolve) => {
|
|
399
|
+
let stats = {
|
|
400
|
+
eventCount: 0,
|
|
401
|
+
batchCount: 0,
|
|
402
|
+
pendingBatches: 0,
|
|
403
|
+
uploadedBatches: 0,
|
|
404
|
+
totalSize: this.currentSize,
|
|
405
|
+
maxSize: this.maxBufferSize
|
|
406
|
+
};
|
|
407
|
+
|
|
408
|
+
eventsStore.count().onsuccess = (e) => {
|
|
409
|
+
stats.eventCount = e.target.result;
|
|
410
|
+
};
|
|
411
|
+
|
|
412
|
+
batchesStore.count().onsuccess = (e) => {
|
|
413
|
+
stats.batchCount = e.target.result;
|
|
414
|
+
};
|
|
415
|
+
|
|
416
|
+
const pendingIndex = batchesStore.index('status');
|
|
417
|
+
pendingIndex.count(IDBKeyRange.only('pending')).onsuccess = (e) => {
|
|
418
|
+
stats.pendingBatches = e.target.result;
|
|
419
|
+
};
|
|
420
|
+
|
|
421
|
+
pendingIndex.count(IDBKeyRange.only('uploaded')).onsuccess = (e) => {
|
|
422
|
+
stats.uploadedBatches = e.target.result;
|
|
423
|
+
};
|
|
424
|
+
|
|
425
|
+
transaction.oncomplete = () => {
|
|
426
|
+
resolve(stats);
|
|
427
|
+
};
|
|
428
|
+
});
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
// Export for use in background script
|
|
433
|
+
if (typeof module !== 'undefined' && module.exports) {
|
|
434
|
+
module.exports = DataBuffer;
|
|
435
|
+
}
|