@pixagram/lacerta-db 0.5.3 → 0.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/legacy.js DELETED
@@ -1,1901 +0,0 @@
1
- /**
2
- * LacertaDB V4 - Complete Core Library (Enhanced with Force Delete)
3
- * A powerful browser-based document database with encryption, compression, and OPFS support
4
- * @version 4.0.4 (With Force Delete Support)
5
- * @license MIT
6
- */
7
-
8
- 'use strict';
9
- // Note: These imports are for browser environments using a bundler (e.g., Webpack, Vite).
10
- // For direct browser usage, you would use an ES module import from a URL or local path.
11
- import TurboSerial from "@pixagram/turboserial";
12
- import TurboBase64 from "@pixagram/turbobase64";
13
-
14
- const serializer = new TurboSerial({
15
- compression: true,
16
- deduplication: true,
17
- shareArrayBuffers: true,
18
- simdOptimization: true,
19
- detectCircular: true
20
- });
21
- const base64 = new TurboBase64();
22
-
23
- /**
24
- * Async Mutex for managing concurrent operations
25
- */
26
- class AsyncMutex {
27
- constructor() {
28
- this._queue = [];
29
- this._locked = false;
30
- }
31
-
32
- acquire() {
33
- return new Promise(resolve => {
34
- this._queue.push(resolve);
35
- this._dispatch();
36
- });
37
- }
38
-
39
- release() {
40
- this._locked = false;
41
- this._dispatch();
42
- }
43
-
44
- async runExclusive(callback) {
45
- const release = await this.acquire();
46
- try {
47
- return await callback();
48
- } finally {
49
- release();
50
- }
51
- }
52
-
53
- _dispatch() {
54
- if (this._locked || this._queue.length === 0) {
55
- return;
56
- }
57
- this._locked = true;
58
- const resolve = this._queue.shift();
59
- resolve(() => this.release());
60
- }
61
- }
62
-
63
- /**
64
- * Custom error class for LacertaDB
65
- */
66
- class LacertaDBError extends Error {
67
- constructor(message, code, originalError) {
68
- super(message);
69
- this.name = 'LacertaDBError';
70
- this.code = code;
71
- this.originalError = originalError || null;
72
- this.timestamp = new Date().toISOString();
73
- }
74
- }
75
-
76
- // ========================
77
- // Compression Utility
78
- // ========================
79
-
80
- class BrowserCompressionUtility {
81
- async compress(input) {
82
- if (!(input instanceof Uint8Array)) {
83
- throw new TypeError('Input must be Uint8Array');
84
- }
85
- try {
86
- const stream = new Response(input).body
87
- .pipeThrough(new CompressionStream('deflate'));
88
- const compressed = await new Response(stream).arrayBuffer();
89
- return new Uint8Array(compressed);
90
- } catch (error) {
91
- throw new LacertaDBError('Compression failed', 'COMPRESSION_FAILED', error);
92
- }
93
- }
94
-
95
- async decompress(compressedData) {
96
- if (!(compressedData instanceof Uint8Array)) {
97
- throw new TypeError('Input must be Uint8Array');
98
- }
99
- try {
100
- const stream = new Response(compressedData).body
101
- .pipeThrough(new DecompressionStream('deflate'));
102
- const decompressed = await new Response(stream).arrayBuffer();
103
- return new Uint8Array(decompressed);
104
- } catch (error) {
105
- throw new LacertaDBError('Decompression failed', 'DECOMPRESSION_FAILED', error);
106
- }
107
- }
108
-
109
- // Fallback sync methods are simple pass-throughs
110
- compressSync(input) {
111
- if (!(input instanceof Uint8Array)) {
112
- throw new TypeError('Input must be Uint8Array');
113
- }
114
- return input;
115
- }
116
-
117
- decompressSync(compressedData) {
118
- if (!(compressedData instanceof Uint8Array)) {
119
- throw new TypeError('Input must be Uint8Array');
120
- }
121
- return compressedData;
122
- }
123
- }
124
-
125
- // ========================
126
- // Encryption Utility (FIXED & IMPROVED)
127
- // ========================
128
-
129
- class BrowserEncryptionUtility {
130
- async encrypt(data, password) {
131
- if (!(data instanceof Uint8Array)) {
132
- throw new TypeError('Data must be Uint8Array');
133
- }
134
- try {
135
- const salt = crypto.getRandomValues(new Uint8Array(16));
136
- const iv = crypto.getRandomValues(new Uint8Array(12));
137
-
138
- const keyMaterial = await crypto.subtle.importKey(
139
- 'raw',
140
- new TextEncoder().encode(password),
141
- 'PBKDF2',
142
- false,
143
- ['deriveKey']
144
- );
145
-
146
- const key = await crypto.subtle.deriveKey({
147
- name: 'PBKDF2',
148
- salt,
149
- iterations: 600000,
150
- hash: 'SHA-512'
151
- },
152
- keyMaterial, {
153
- name: 'AES-GCM',
154
- length: 256
155
- },
156
- false,
157
- ['encrypt']
158
- );
159
-
160
- const encrypted = await crypto.subtle.encrypt({
161
- name: 'AES-GCM',
162
- iv
163
- },
164
- key,
165
- data
166
- );
167
-
168
- // The checksum was removed as AES-GCM provides this via an authentication tag.
169
- const result = new Uint8Array(salt.length + iv.length + encrypted.byteLength);
170
- result.set(salt, 0);
171
- result.set(iv, salt.length);
172
- result.set(new Uint8Array(encrypted), salt.length + iv.length);
173
-
174
- return result;
175
- } catch (error) {
176
- throw new LacertaDBError('Encryption failed', 'ENCRYPTION_FAILED', error);
177
- }
178
- }
179
-
180
- async decrypt(wrappedData, password) {
181
- if (!(wrappedData instanceof Uint8Array)) {
182
- throw new TypeError('Data must be Uint8Array');
183
- }
184
- try {
185
- const salt = wrappedData.slice(0, 16);
186
- const iv = wrappedData.slice(16, 28);
187
- const encryptedData = wrappedData.slice(28);
188
-
189
- const keyMaterial = await crypto.subtle.importKey(
190
- 'raw',
191
- new TextEncoder().encode(password),
192
- 'PBKDF2',
193
- false,
194
- ['deriveKey']
195
- );
196
-
197
- const key = await crypto.subtle.deriveKey({
198
- name: 'PBKDF2',
199
- salt,
200
- iterations: 600000,
201
- hash: 'SHA-512'
202
- },
203
- keyMaterial, {
204
- name: 'AES-GCM',
205
- length: 256
206
- },
207
- false,
208
- ['decrypt']
209
- );
210
-
211
- const decrypted = await crypto.subtle.decrypt({
212
- name: 'AES-GCM',
213
- iv
214
- },
215
- key,
216
- encryptedData
217
- );
218
-
219
- // Checksum verification removed. crypto.subtle.decrypt will throw on failure.
220
- return new Uint8Array(decrypted);
221
- } catch (error) {
222
- // Provide a more specific error for failed decryption, which often indicates a wrong password.
223
- throw new LacertaDBError('Decryption failed. This may be due to an incorrect password or corrupted data.', 'DECRYPTION_FAILED', error);
224
- }
225
- }
226
- }
227
-
228
- // ========================
229
- // OPFS (Origin Private File System) Utility
230
- // ========================
231
-
232
- class OPFSUtility {
233
- async saveAttachments(dbName, collectionName, documentId, attachments) {
234
- try {
235
- const attachmentPaths = [];
236
- const root = await navigator.storage.getDirectory();
237
- const dbDir = await root.getDirectoryHandle(dbName, { create: true });
238
- const collDir = await dbDir.getDirectoryHandle(collectionName, { create: true });
239
- const docDir = await collDir.getDirectoryHandle(documentId, { create: true });
240
-
241
- for (const [index, attachment] of attachments.entries()) {
242
- const filename = `${index}_${attachment.name || 'file'}`;
243
- const fileHandle = await docDir.getFileHandle(filename, { create: true });
244
- const writable = await fileHandle.createWritable();
245
-
246
- let dataToWrite;
247
- if (attachment.data instanceof Uint8Array) {
248
- dataToWrite = attachment.data;
249
- } else if (attachment.data instanceof ArrayBuffer) {
250
- dataToWrite = new Uint8Array(attachment.data);
251
- } else if (attachment.data instanceof Blob) {
252
- dataToWrite = new Uint8Array(await attachment.data.arrayBuffer());
253
- } else {
254
- throw new TypeError('Unsupported attachment data type');
255
- }
256
-
257
- const blob = new Blob([dataToWrite], { type: attachment.type || 'application/octet-stream' });
258
- await writable.write(blob);
259
- await writable.close();
260
-
261
- const path = `/${dbName}/${collectionName}/${documentId}/${filename}`;
262
- attachmentPaths.push({
263
- path,
264
- name: attachment.name,
265
- type: attachment.type,
266
- size: dataToWrite.byteLength,
267
- originalName: attachment.originalName || attachment.name
268
- });
269
- }
270
- return attachmentPaths;
271
- } catch (error) {
272
- throw new LacertaDBError('Failed to save attachments', 'ATTACHMENT_SAVE_FAILED', error);
273
- }
274
- }
275
-
276
- async getAttachments(attachmentPaths) {
277
- const attachments = [];
278
- const root = await navigator.storage.getDirectory();
279
-
280
- for (const attachmentInfo of attachmentPaths) {
281
- try {
282
- const pathParts = attachmentInfo.path.split('/').filter(p => p);
283
- let currentDir = root;
284
-
285
- for (let i = 0; i < pathParts.length - 1; i++) {
286
- currentDir = await currentDir.getDirectoryHandle(pathParts[i]);
287
- }
288
-
289
- const fileHandle = await currentDir.getFileHandle(pathParts[pathParts.length - 1]);
290
- const file = await fileHandle.getFile();
291
- const data = await file.arrayBuffer();
292
-
293
- attachments.push({
294
- name: attachmentInfo.originalName || attachmentInfo.name,
295
- type: attachmentInfo.type,
296
- data: new Uint8Array(data),
297
- size: attachmentInfo.size
298
- });
299
- } catch (error) {
300
- console.error(`Failed to get attachment: ${attachmentInfo.path}`, error);
301
- // Optionally, collect errors and return them
302
- }
303
- }
304
- return attachments;
305
- }
306
-
307
- async deleteAttachments(dbName, collectionName, documentId) {
308
- try {
309
- const root = await navigator.storage.getDirectory();
310
- const dbDir = await root.getDirectoryHandle(dbName);
311
- const collDir = await dbDir.getDirectoryHandle(collectionName);
312
- await collDir.removeEntry(documentId, { recursive: true });
313
- } catch (error) {
314
- // Ignore "NotFoundError" as the directory might already be gone
315
- if (error.name !== 'NotFoundError') {
316
- console.error(`Failed to delete attachments for ${documentId}:`, error);
317
- }
318
- }
319
- }
320
-
321
- static async prepareAttachment(file, name) {
322
- let data;
323
- if (file instanceof File || file instanceof Blob) {
324
- const buffer = await file.arrayBuffer();
325
- data = new Uint8Array(buffer);
326
- } else if (file instanceof ArrayBuffer) {
327
- data = new Uint8Array(file);
328
- } else if (file instanceof Uint8Array) {
329
- data = file;
330
- } else {
331
- throw new TypeError('Unsupported file type for attachment');
332
- }
333
-
334
- return {
335
- name: name || file.name || 'unnamed',
336
- type: file.type || 'application/octet-stream',
337
- data,
338
- originalName: file.name || name
339
- };
340
- }
341
- }
342
-
343
- // ========================
344
- // IndexedDB Utility
345
- // ========================
346
-
347
- class IndexedDBUtility {
348
- constructor() {
349
- this.mutex = new AsyncMutex();
350
- }
351
-
352
- openDatabase(dbName, version = 1, upgradeCallback) {
353
- return new Promise((resolve, reject) => {
354
- const request = indexedDB.open(dbName, version);
355
-
356
- request.onerror = () => reject(new LacertaDBError(
357
- 'Failed to open database', 'DATABASE_OPEN_FAILED', request.error
358
- ));
359
- request.onsuccess = () => resolve(request.result);
360
- request.onupgradeneeded = event => {
361
- if (upgradeCallback) {
362
- upgradeCallback(event.target.result, event.oldVersion, event.newVersion);
363
- }
364
- };
365
- });
366
- }
367
-
368
- async performTransaction(db, storeNames, mode, callback, retries = 3) {
369
- return this.mutex.runExclusive(async () => {
370
- let lastError;
371
- for (let i = 0; i < retries; i++) {
372
- try {
373
- return await new Promise((resolve, reject) => {
374
- const transaction = db.transaction(storeNames, mode);
375
- let result;
376
-
377
- transaction.oncomplete = () => resolve(result);
378
- transaction.onerror = () => reject(transaction.error);
379
- transaction.onabort = () => reject(new Error('Transaction aborted'));
380
-
381
- try {
382
- const cbResult = callback(transaction);
383
- if (cbResult instanceof Promise) {
384
- cbResult.then(res => { result = res; }).catch(reject);
385
- } else {
386
- result = cbResult;
387
- }
388
- } catch (error) {
389
- reject(error);
390
- }
391
- });
392
- } catch (error) {
393
- lastError = error;
394
- if (i < retries - 1) {
395
- await new Promise(resolve => setTimeout(resolve, (2 ** i) * 100));
396
- }
397
- }
398
- }
399
- throw new LacertaDBError('Transaction failed after retries', 'TRANSACTION_FAILED', lastError);
400
- });
401
- }
402
-
403
- _promisifyRequest(requestFactory) {
404
- return new Promise((resolve, reject) => {
405
- const request = requestFactory();
406
- request.onsuccess = () => resolve(request.result);
407
- request.onerror = () => reject(request.error);
408
- });
409
- }
410
-
411
- add(db, storeName, value, key) {
412
- return this.performTransaction(db, [storeName], 'readwrite', tx => {
413
- const store = tx.objectStore(storeName);
414
- return this._promisifyRequest(() => key !== undefined ? store.add(value, key) : store.add(value));
415
- });
416
- }
417
-
418
- put(db, storeName, value, key) {
419
- return this.performTransaction(db, [storeName], 'readwrite', tx => {
420
- const store = tx.objectStore(storeName);
421
- return this._promisifyRequest(() => key !== undefined ? store.put(value, key) : store.put(value));
422
- });
423
- }
424
-
425
- get(db, storeName, key) {
426
- return this.performTransaction(db, [storeName], 'readonly', tx => {
427
- return this._promisifyRequest(() => tx.objectStore(storeName).get(key));
428
- });
429
- }
430
-
431
- getAll(db, storeName, query, count) {
432
- return this.performTransaction(db, [storeName], 'readonly', tx => {
433
- return this._promisifyRequest(() => tx.objectStore(storeName).getAll(query, count));
434
- });
435
- }
436
-
437
- delete(db, storeName, key) {
438
- return this.performTransaction(db, [storeName], 'readwrite', tx => {
439
- return this._promisifyRequest(() => tx.objectStore(storeName).delete(key));
440
- });
441
- }
442
-
443
- clear(db, storeName) {
444
- return this.performTransaction(db, [storeName], 'readwrite', tx => {
445
- return this._promisifyRequest(() => tx.objectStore(storeName).clear());
446
- });
447
- }
448
-
449
- count(db, storeName, query) {
450
- return this.performTransaction(db, [storeName], 'readonly', tx => {
451
- return this._promisifyRequest(() => tx.objectStore(storeName).count(query));
452
- });
453
- }
454
-
455
- iterateCursorSafe(db, storeName, callback, direction = 'next', query) {
456
- return this.performTransaction(db, [storeName], 'readonly', tx => {
457
- return new Promise((resolve, reject) => {
458
- const results = [];
459
- const request = tx.objectStore(storeName).openCursor(query, direction);
460
-
461
- request.onsuccess = event => {
462
- const cursor = event.target.result;
463
- if (cursor) {
464
- try {
465
- const result = callback(cursor.value, cursor.key);
466
- if (result !== false) {
467
- results.push(result);
468
- cursor.continue();
469
- } else {
470
- resolve(results);
471
- }
472
- } catch (error) {
473
- reject(error);
474
- }
475
- } else {
476
- resolve(results);
477
- }
478
- };
479
- request.onerror = () => reject(request.error);
480
- });
481
- });
482
- }
483
- }
484
-
485
- // ========================
486
- // Document Class
487
- // ========================
488
-
489
- class Document {
490
- constructor(data = {}, options = {}) {
491
- this._id = data._id || this.generateId();
492
- this._created = data._created || Date.now();
493
- this._modified = data._modified || Date.now();
494
- this._permanent = data._permanent || options.permanent || false;
495
- this._encrypted = data._encrypted || options.encrypted || false;
496
- this._compressed = data._compressed || options.compressed || false;
497
- this._attachments = data._attachments || [];
498
- this.data = data.data || {};
499
- this.packedData = data.packedData || null;
500
-
501
- // Utilities can be passed in or instantiated. For simplicity, we keep instantiation here.
502
- this.compression = new BrowserCompressionUtility();
503
- this.encryption = new BrowserEncryptionUtility();
504
- this.password = options.password || null;
505
- }
506
-
507
- generateId() {
508
- return `doc_${Date.now()}_${Math.random().toString(36).substring(2, 11)}`;
509
- }
510
-
511
- async pack() {
512
- try {
513
- let packed = serializer.serialize(this.data);
514
- if (this._compressed) {
515
- packed = await this.compression.compress(packed);
516
- }
517
- if (this._encrypted && this.password) {
518
- packed = await this.encryption.encrypt(packed, this.password);
519
- }
520
- this.packedData = packed;
521
- return packed;
522
- } catch (error) {
523
- throw new LacertaDBError('Failed to pack document', 'PACK_FAILED', error);
524
- }
525
- }
526
-
527
- async unpack() {
528
- try {
529
- let unpacked = this.packedData;
530
- if (this._encrypted && this.password) {
531
- unpacked = await this.encryption.decrypt(unpacked, this.password);
532
- }
533
- if (this._compressed) {
534
- unpacked = await this.compression.decompress(unpacked);
535
- }
536
- this.data = serializer.deserialize(unpacked);
537
- return this.data;
538
- } catch (error) {
539
- throw new LacertaDBError('Failed to unpack document', 'UNPACK_FAILED', error);
540
- }
541
- }
542
-
543
- packSync() {
544
- let packed = serializer.serialize(this.data);
545
- if (this._compressed) {
546
- packed = this.compression.compressSync(packed);
547
- }
548
- if (this._encrypted) {
549
- throw new LacertaDBError('Synchronous encryption not supported', 'SYNC_ENCRYPT_NOT_SUPPORTED');
550
- }
551
- this.packedData = packed;
552
- return packed;
553
- }
554
-
555
- unpackSync() {
556
- let unpacked = this.packedData;
557
- if (this._encrypted) {
558
- throw new LacertaDBError('Synchronous decryption not supported', 'SYNC_DECRYPT_NOT_SUPPORTED');
559
- }
560
- if (this._compressed) {
561
- unpacked = this.compression.decompressSync(unpacked);
562
- }
563
- this.data = serializer.deserialize(unpacked);
564
- return this.data;
565
- }
566
-
567
- objectOutput(includeAttachments = false) {
568
- const output = {
569
- _id: this._id,
570
- _created: this._created,
571
- _modified: this._modified,
572
- _permanent: this._permanent,
573
- ...this.data
574
- };
575
- if (includeAttachments && this._attachments.length > 0) {
576
- output._attachments = this._attachments;
577
- }
578
- return output;
579
- }
580
-
581
- databaseOutput() {
582
- return {
583
- _id: this._id,
584
- _created: this._created,
585
- _modified: this._modified,
586
- _permanent: this._permanent,
587
- _encrypted: this._encrypted,
588
- _compressed: this._compressed,
589
- _attachments: this._attachments,
590
- packedData: this.packedData
591
- };
592
- }
593
- }
594
-
595
- // ========================
596
- // Metadata Classes
597
- // ========================
598
-
599
- class CollectionMetadata {
600
- constructor(name, data = {}) {
601
- this.name = name;
602
- this.sizeKB = data.sizeKB || 0;
603
- this.length = data.length || 0;
604
- this.createdAt = data.createdAt || Date.now();
605
- this.modifiedAt = data.modifiedAt || Date.now();
606
- this.documentSizes = data.documentSizes || {};
607
- this.documentModifiedAt = data.documentModifiedAt || {};
608
- this.documentPermanent = data.documentPermanent || {};
609
- this.documentAttachments = data.documentAttachments || {};
610
- }
611
-
612
- addDocument(docId, sizeKB, isPermanent, attachmentCount) {
613
- this.documentSizes[docId] = sizeKB;
614
- this.documentModifiedAt[docId] = Date.now();
615
- if (isPermanent) this.documentPermanent[docId] = true;
616
- if (attachmentCount > 0) this.documentAttachments[docId] = attachmentCount;
617
-
618
- this.sizeKB += sizeKB;
619
- this.length++;
620
- this.modifiedAt = Date.now();
621
- }
622
-
623
- updateDocument(docId, newSizeKB, isPermanent, attachmentCount) {
624
- const oldSize = this.documentSizes[docId] || 0;
625
- this.sizeKB = this.sizeKB - oldSize + newSizeKB;
626
- this.documentSizes[docId] = newSizeKB;
627
- this.documentModifiedAt[docId] = Date.now();
628
-
629
- if (isPermanent) {
630
- this.documentPermanent[docId] = true;
631
- } else {
632
- delete this.documentPermanent[docId];
633
- }
634
-
635
- if (attachmentCount > 0) {
636
- this.documentAttachments[docId] = attachmentCount;
637
- } else {
638
- delete this.documentAttachments[docId];
639
- }
640
-
641
- this.modifiedAt = Date.now();
642
- }
643
-
644
- removeDocument(docId) {
645
- const sizeKB = this.documentSizes[docId] || 0;
646
- if (this.documentSizes[docId]) {
647
- this.sizeKB -= sizeKB;
648
- this.length--;
649
- }
650
- delete this.documentSizes[docId];
651
- delete this.documentModifiedAt[docId];
652
- delete this.documentPermanent[docId];
653
- delete this.documentAttachments[docId];
654
- this.modifiedAt = Date.now();
655
- }
656
-
657
- getOldestNonPermanentDocuments(count) {
658
- return Object.entries(this.documentModifiedAt)
659
- .filter(([docId]) => !this.documentPermanent[docId])
660
- .sort(([, timeA], [, timeB]) => timeA - timeB)
661
- .slice(0, count)
662
- .map(([docId]) => docId);
663
- }
664
- }
665
-
666
- class DatabaseMetadata {
667
- constructor(name, data = {}) {
668
- this.name = name;
669
- this.collections = data.collections || {};
670
- this.totalSizeKB = data.totalSizeKB || 0;
671
- this.totalLength = data.totalLength || 0;
672
- this.modifiedAt = data.modifiedAt || Date.now();
673
- }
674
-
675
- static load(dbName) {
676
- const key = `lacertadb_${dbName}_metadata`;
677
- const stored = localStorage.getItem(key);
678
- if (stored) {
679
- try {
680
- const decoded = base64.decode(stored);
681
- const data = serializer.deserialize(decoded);
682
- return new DatabaseMetadata(dbName, data);
683
- } catch (e) {
684
- console.error('Failed to load metadata:', e);
685
- }
686
- }
687
- return new DatabaseMetadata(dbName);
688
- }
689
-
690
- save() {
691
- const key = `lacertadb_${this.name}_metadata`;
692
- try {
693
- const dataToStore = {
694
- collections: this.collections,
695
- totalSizeKB: this.totalSizeKB,
696
- totalLength: this.totalLength,
697
- modifiedAt: this.modifiedAt
698
- };
699
- const serializedData = serializer.serialize(dataToStore);
700
- const encodedData = base64.encode(serializedData);
701
- localStorage.setItem(key, encodedData);
702
- } catch (e) {
703
- if (e.name === 'QuotaExceededError') {
704
- throw new LacertaDBError('Storage quota exceeded for metadata', 'QUOTA_EXCEEDED', e);
705
- }
706
- throw new LacertaDBError('Failed to save metadata', 'METADATA_SAVE_FAILED', e);
707
- }
708
- }
709
-
710
- setCollection(collectionMetadata) {
711
- this.collections[collectionMetadata.name] = {
712
- sizeKB: collectionMetadata.sizeKB,
713
- length: collectionMetadata.length,
714
- createdAt: collectionMetadata.createdAt,
715
- modifiedAt: collectionMetadata.modifiedAt,
716
- documentSizes: collectionMetadata.documentSizes,
717
- documentModifiedAt: collectionMetadata.documentModifiedAt,
718
- documentPermanent: collectionMetadata.documentPermanent,
719
- documentAttachments: collectionMetadata.documentAttachments
720
- };
721
- this.recalculate();
722
- this.save();
723
- }
724
-
725
- removeCollection(collectionName) {
726
- delete this.collections[collectionName];
727
- this.recalculate();
728
- this.save();
729
- }
730
-
731
- recalculate() {
732
- this.totalSizeKB = 0;
733
- this.totalLength = 0;
734
- for (const collName in this.collections) {
735
- const coll = this.collections[collName];
736
- this.totalSizeKB += coll.sizeKB;
737
- this.totalLength += coll.length;
738
- }
739
- this.modifiedAt = Date.now();
740
- }
741
- }
742
-
743
- class Settings {
744
- constructor(dbName, data = {}) {
745
- this.dbName = dbName;
746
- // Replaced `??` with ternary operator for compatibility
747
- this.sizeLimitKB = data.sizeLimitKB != null ? data.sizeLimitKB : Infinity;
748
- const defaultBuffer = this.sizeLimitKB === Infinity ? 0 : this.sizeLimitKB * 0.8;
749
- this.bufferLimitKB = data.bufferLimitKB != null ? data.bufferLimitKB : defaultBuffer;
750
- this.freeSpaceEvery = data.freeSpaceEvery || 10000;
751
- }
752
-
753
- static load(dbName) {
754
- const key = `lacertadb_${dbName}_settings`;
755
- const stored = localStorage.getItem(key);
756
- if (stored) {
757
- try {
758
- const decoded = base64.decode(stored);
759
- const data = serializer.deserialize(decoded);
760
- return new Settings(dbName, data);
761
- } catch (e) {
762
- console.error('Failed to load settings:', e);
763
- }
764
- }
765
- return new Settings(dbName);
766
- }
767
-
768
- save() {
769
- const key = `lacertadb_${this.dbName}_settings`;
770
- const dataToStore = {
771
- sizeLimitKB: this.sizeLimitKB,
772
- bufferLimitKB: this.bufferLimitKB,
773
- freeSpaceEvery: this.freeSpaceEvery
774
- };
775
- const serializedData = serializer.serialize(dataToStore);
776
- const encodedData = base64.encode(serializedData);
777
- localStorage.setItem(key, encodedData);
778
- }
779
-
780
- updateSettings(newSettings) {
781
- Object.assign(this, newSettings);
782
- this.save();
783
- }
784
- }
785
-
786
- // ========================
787
- // Quick Store (localStorage based)
788
- // ========================
789
-
790
- class QuickStore {
791
- constructor(dbName) {
792
- this.dbName = dbName;
793
- this.keyPrefix = `lacertadb_${dbName}_quickstore_`;
794
- this.indexKey = `${this.keyPrefix}index`;
795
- }
796
-
797
- _readIndex() {
798
- const indexStr = localStorage.getItem(this.indexKey);
799
- if (!indexStr) return [];
800
- try {
801
- const decoded = base64.decode(indexStr);
802
- return serializer.deserialize(decoded);
803
- } catch {
804
- return [];
805
- }
806
- }
807
-
808
- _writeIndex(index) {
809
- const serializedIndex = serializer.serialize(index);
810
- const encodedIndex = base64.encode(serializedIndex);
811
- localStorage.setItem(this.indexKey, encodedIndex);
812
- }
813
-
814
- add(docId, data) {
815
- const key = `${this.keyPrefix}data_${docId}`;
816
- try {
817
- const serializedData = serializer.serialize(data);
818
- const encodedData = base64.encode(serializedData);
819
- localStorage.setItem(key, encodedData);
820
-
821
- const index = this._readIndex();
822
- if (!index.includes(docId)) {
823
- index.push(docId);
824
- this._writeIndex(index);
825
- }
826
- return true;
827
- } catch (e) {
828
- if (e.name === 'QuotaExceededError') {
829
- throw new LacertaDBError('QuickStore quota exceeded', 'QUOTA_EXCEEDED', e);
830
- }
831
- return false;
832
- }
833
- }
834
-
835
- get(docId) {
836
- const key = `${this.keyPrefix}data_${docId}`;
837
- const stored = localStorage.getItem(key);
838
- if (stored) {
839
- try {
840
- const decoded = base64.decode(stored);
841
- return serializer.deserialize(decoded);
842
- } catch (e) {
843
- console.error('Failed to parse QuickStore data:', e);
844
- }
845
- }
846
- return null;
847
- }
848
-
849
- update(docId, data) {
850
- return this.add(docId, data);
851
- }
852
-
853
- delete(docId) {
854
- const key = `${this.keyPrefix}data_${docId}`;
855
- localStorage.removeItem(key);
856
-
857
- let index = this._readIndex();
858
- const initialLength = index.length;
859
- index = index.filter(id => id !== docId);
860
- if (index.length < initialLength) {
861
- this._writeIndex(index);
862
- }
863
- }
864
-
865
- getAll() {
866
- const index = this._readIndex();
867
- return index.map(docId => {
868
- const doc = this.get(docId);
869
- return doc ? { _id: docId, ...doc } : null;
870
- }).filter(Boolean);
871
- }
872
-
873
- clear() {
874
- const index = this._readIndex();
875
- for (const docId of index) {
876
- localStorage.removeItem(`${this.keyPrefix}data_${docId}`);
877
- }
878
- localStorage.removeItem(this.indexKey);
879
- }
880
- }
881
-
882
- // ========================
883
- // Query Engine
884
- // ========================
885
-
886
- class QueryEngine {
887
- constructor() {
888
- this.operators = {
889
- // Comparison
890
- '$eq': (a, b) => a === b,
891
- '$ne': (a, b) => a !== b,
892
- '$gt': (a, b) => a > b,
893
- '$gte': (a, b) => a >= b,
894
- '$lt': (a, b) => a < b,
895
- '$lte': (a, b) => a <= b,
896
- '$in': (a, b) => Array.isArray(b) && b.includes(a),
897
- '$nin': (a, b) => Array.isArray(b) && !b.includes(a),
898
-
899
- // Logical
900
- '$and': (doc, conditions) => conditions.every(cond => this.evaluate(doc, cond)),
901
- '$or': (doc, conditions) => conditions.some(cond => this.evaluate(doc, cond)),
902
- '$not': (doc, condition) => !this.evaluate(doc, condition),
903
- '$nor': (doc, conditions) => !conditions.some(cond => this.evaluate(doc, cond)),
904
-
905
- // Element
906
- '$exists': (value, exists) => (value !== undefined) === exists,
907
- '$type': (value, type) => typeof value === type,
908
-
909
- // Array
910
- '$all': (arr, values) => Array.isArray(arr) && values.every(v => arr.includes(v)),
911
- '$elemMatch': (arr, condition) => Array.isArray(arr) && arr.some(elem => this.evaluate({ value: elem }, { value: condition })),
912
- '$size': (arr, size) => Array.isArray(arr) && arr.length === size,
913
-
914
- // String
915
- '$regex': (str, pattern) => {
916
- if (typeof str !== 'string') return false;
917
- try {
918
- const regex = new RegExp(pattern);
919
- return regex.test(str);
920
- } catch {
921
- return false;
922
- }
923
- },
924
- '$text': (str, search) => typeof str === 'string' && str.toLowerCase().includes(search.toLowerCase())
925
- };
926
- }
927
-
928
- evaluate(doc, query) {
929
- for (const key in query) {
930
- const value = query[key];
931
- if (key.startsWith('$')) {
932
- // Logical operator at root level
933
- const operator = this.operators[key];
934
- if (!operator || !operator(doc, value)) return false;
935
- } else {
936
- // Field-level query
937
- const fieldValue = this.getFieldValue(doc, key);
938
- if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
939
- // Operator-based comparison
940
- for (const op in value) {
941
- if (op.startsWith('$')) {
942
- const operatorFn = this.operators[op];
943
- if (!operatorFn || !operatorFn(fieldValue, value[op])) {
944
- return false;
945
- }
946
- }
947
- }
948
- } else {
949
- // Direct equality comparison
950
- if (fieldValue !== value) return false;
951
- }
952
- }
953
- }
954
- return true;
955
- }
956
-
957
- getFieldValue(doc, path) {
958
- // Replaced optional chaining with a loop for compatibility
959
- let current = doc;
960
- for (const part of path.split('.')) {
961
- if (current === null || current === undefined) {
962
- return undefined;
963
- }
964
- current = current[part];
965
- }
966
- return current;
967
- }
968
- }
969
- const queryEngine = new QueryEngine();
970
-
971
-
972
- // ========================
973
- // Aggregation Pipeline
974
- // ========================
975
-
976
- class AggregationPipeline {
977
- constructor() {
978
- this.stages = {
979
- '$match': (docs, condition) => docs.filter(doc => queryEngine.evaluate(doc, condition)),
980
-
981
- '$project': (docs, projection) => docs.map(doc => {
982
- const projected = {};
983
- for (const key in projection) {
984
- const value = projection[key];
985
- if (value === 1 || value === true) {
986
- projected[key] = queryEngine.getFieldValue(doc, key);
987
- } else if (typeof value === 'object') {
988
- // Handle computed fields if necessary
989
- } else if (typeof value === 'string' && value.startsWith('$')) {
990
- projected[key] = queryEngine.getFieldValue(doc, value.substring(1));
991
- }
992
- }
993
- // Handle exclusion projection
994
- if (Object.values(projection).some(v => v === 0 || v === false)) {
995
- const exclusions = Object.keys(projection).filter(k => projection[k] === 0 || projection[k] === false);
996
- const included = { ...doc };
997
- exclusions.forEach(key => delete included[key]);
998
- return included;
999
- }
1000
- return projected;
1001
- }),
1002
-
1003
- '$sort': (docs, sortSpec) => [...docs].sort((a, b) => {
1004
- for (const key in sortSpec) {
1005
- const order = sortSpec[key]; // 1 for asc, -1 for desc
1006
- const aVal = queryEngine.getFieldValue(a, key);
1007
- const bVal = queryEngine.getFieldValue(b, key);
1008
- if (aVal < bVal) return -order;
1009
- if (aVal > bVal) return order;
1010
- }
1011
- return 0;
1012
- }),
1013
-
1014
- '$limit': (docs, limit) => docs.slice(0, limit),
1015
-
1016
- '$skip': (docs, skip) => docs.slice(skip),
1017
-
1018
- '$group': (docs, groupSpec) => {
1019
- const groups = new Map();
1020
- const idField = groupSpec._id;
1021
-
1022
- for (const doc of docs) {
1023
- const groupKey = typeof idField === 'string' ?
1024
- queryEngine.getFieldValue(doc, idField.replace('$', '')) :
1025
- JSON.stringify(idField); // Fallback for complex IDs
1026
-
1027
- if (!groups.has(groupKey)) {
1028
- groups.set(groupKey, { _id: groupKey, docs: [] });
1029
- }
1030
- groups.get(groupKey).docs.push(doc);
1031
- }
1032
-
1033
- const results = [];
1034
- for (const group of groups.values()) {
1035
- const result = { _id: group._id };
1036
- for (const fieldKey in groupSpec) {
1037
- if (fieldKey === '_id') continue;
1038
- const accumulator = groupSpec[fieldKey];
1039
- const op = Object.keys(accumulator)[0];
1040
- const field = accumulator[op].toString().replace('$', '');
1041
-
1042
- switch(op) {
1043
- case '$sum':
1044
- result[fieldKey] = group.docs.reduce((sum, d) => sum + (queryEngine.getFieldValue(d, field) || 0), 0);
1045
- break;
1046
- case '$avg':
1047
- const sum = group.docs.reduce((s, d) => s + (queryEngine.getFieldValue(d, field) || 0), 0);
1048
- result[fieldKey] = sum / group.docs.length;
1049
- break;
1050
- case '$count':
1051
- result[fieldKey] = group.docs.length;
1052
- break;
1053
- case '$max':
1054
- result[fieldKey] = Math.max(...group.docs.map(d => queryEngine.getFieldValue(d, field)));
1055
- break;
1056
- case '$min':
1057
- result[fieldKey] = Math.min(...group.docs.map(d => queryEngine.getFieldValue(d, field)));
1058
- break;
1059
- }
1060
- }
1061
- results.push(result);
1062
- }
1063
- return results;
1064
- },
1065
-
1066
- '$lookup': async (docs, lookupSpec, db) => {
1067
- const foreignCollection = await db.getCollection(lookupSpec.from);
1068
- const foreignDocs = await foreignCollection.getAll();
1069
- const foreignMap = new Map();
1070
- foreignDocs.forEach(doc => {
1071
- const key = queryEngine.getFieldValue(doc, lookupSpec.foreignField);
1072
- if (!foreignMap.has(key)) foreignMap.set(key, []);
1073
- foreignMap.get(key).push(doc);
1074
- });
1075
-
1076
- return docs.map(doc => {
1077
- const localValue = queryEngine.getFieldValue(doc, lookupSpec.localField);
1078
- return {
1079
- ...doc,
1080
- [lookupSpec.as]: foreignMap.get(localValue) || []
1081
- };
1082
- });
1083
- }
1084
- };
1085
- }
1086
-
1087
- async execute(docs, pipeline, db) {
1088
- let result = docs;
1089
- for (const stage of pipeline) {
1090
- const stageName = Object.keys(stage)[0];
1091
- const stageSpec = stage[stageName];
1092
- const stageFunction = this.stages[stageName];
1093
-
1094
- if (!stageFunction) {
1095
- throw new Error(`Unknown aggregation stage: ${stageName}`);
1096
- }
1097
-
1098
- if (stageName === '$lookup') {
1099
- result = await stageFunction(result, stageSpec, db);
1100
- } else {
1101
- result = stageFunction(result, stageSpec);
1102
- }
1103
- }
1104
- return result;
1105
- }
1106
- }
1107
- const aggregationPipeline = new AggregationPipeline();
1108
-
1109
- // ========================
1110
- // Migration Manager
1111
- // ========================
1112
-
1113
- class MigrationManager {
1114
- constructor(database) {
1115
- this.database = database;
1116
- this.migrations = [];
1117
- this.currentVersion = this.loadVersion();
1118
- }
1119
-
1120
- loadVersion() {
1121
- return localStorage.getItem(`lacertadb_${this.database.name}_version`) || '1.0.0';
1122
- }
1123
-
1124
- saveVersion(version) {
1125
- localStorage.setItem(`lacertadb_${this.database.name}_version`, version);
1126
- this.currentVersion = version;
1127
- }
1128
-
1129
- addMigration(migration) {
1130
- this.migrations.push(migration);
1131
- }
1132
-
1133
- compareVersions(a, b) {
1134
- const partsA = a.split('.').map(Number);
1135
- const partsB = b.split('.').map(Number);
1136
- const len = Math.max(partsA.length, partsB.length);
1137
-
1138
- for (let i = 0; i < len; i++) {
1139
- const partA = partsA[i] || 0;
1140
- const partB = partsB[i] || 0;
1141
- if (partA > partB) return 1;
1142
- if (partA < partB) return -1;
1143
- }
1144
- return 0;
1145
- }
1146
-
1147
- async runMigrations(targetVersion) {
1148
- const applicableMigrations = this.migrations
1149
- .filter(m => this.compareVersions(m.version, this.currentVersion) > 0 &&
1150
- this.compareVersions(m.version, targetVersion) <= 0)
1151
- .sort((a, b) => this.compareVersions(a.version, b.version));
1152
-
1153
- for (const migration of applicableMigrations) {
1154
- await this._applyMigration(migration, 'up');
1155
- this.saveVersion(migration.version);
1156
- }
1157
- }
1158
-
1159
- async rollback(targetVersion) {
1160
- const applicableMigrations = this.migrations
1161
- .filter(m => m.down &&
1162
- this.compareVersions(m.version, targetVersion) > 0 &&
1163
- this.compareVersions(m.version, this.currentVersion) <= 0)
1164
- .sort((a, b) => this.compareVersions(b.version, a.version));
1165
-
1166
- for (const migration of applicableMigrations) {
1167
- await this._applyMigration(migration, 'down');
1168
- }
1169
- this.saveVersion(targetVersion);
1170
- }
1171
-
1172
- async _applyMigration(migration, direction) {
1173
- console.log(`${direction === 'up' ? 'Running' : 'Rolling back'} migration: ${migration.name} (v${migration.version})`);
1174
- const collections = await this.database.listCollections();
1175
- for (const collectionName of collections) {
1176
- const coll = await this.database.getCollection(collectionName);
1177
- const docs = await coll.getAll();
1178
- for (const doc of docs) {
1179
- const updated = await migration[direction](doc);
1180
- if (updated) {
1181
- await coll.update(doc._id, updated);
1182
- }
1183
- }
1184
- }
1185
- }
1186
- }
1187
-
1188
- // ========================
1189
- // Performance Monitor
1190
- // ========================
1191
-
1192
- class PerformanceMonitor {
1193
- constructor() {
1194
- this.metrics = {
1195
- operations: [],
1196
- latencies: [],
1197
- cacheHits: 0,
1198
- cacheMisses: 0,
1199
- memoryUsage: []
1200
- };
1201
- this.monitoring = false;
1202
- this.monitoringInterval = null;
1203
- }
1204
-
1205
- startMonitoring() {
1206
- if (this.monitoring) return;
1207
- this.monitoring = true;
1208
- this.monitoringInterval = setInterval(() => this.collectMetrics(), 1000);
1209
- }
1210
-
1211
- stopMonitoring() {
1212
- if (!this.monitoring) return;
1213
- this.monitoring = false;
1214
- clearInterval(this.monitoringInterval);
1215
- this.monitoringInterval = null;
1216
- }
1217
-
1218
- recordOperation(type, duration) {
1219
- if (!this.monitoring) return;
1220
- this.metrics.operations.push({ type, duration, timestamp: Date.now() });
1221
- this.metrics.latencies.push(duration);
1222
- if (this.metrics.operations.length > 100) this.metrics.operations.shift();
1223
- if (this.metrics.latencies.length > 100) this.metrics.latencies.shift();
1224
- }
1225
-
1226
- recordCacheHit() { this.metrics.cacheHits++; }
1227
- recordCacheMiss() { this.metrics.cacheMisses++; }
1228
-
1229
- collectMetrics() {
1230
- // Replaced optional chaining with `&&` for compatibility
1231
- if (performance && performance.memory) {
1232
- this.metrics.memoryUsage.push({
1233
- used: performance.memory.usedJSHeapSize,
1234
- total: performance.memory.totalJSHeapSize,
1235
- limit: performance.memory.jsHeapSizeLimit,
1236
- timestamp: Date.now()
1237
- });
1238
- if (this.metrics.memoryUsage.length > 60) this.metrics.memoryUsage.shift();
1239
- }
1240
- }
1241
-
1242
- getStats() {
1243
- const opsPerSec = this.metrics.operations.filter(op => Date.now() - op.timestamp < 1000).length;
1244
- const totalLatency = this.metrics.latencies.reduce((a, b) => a + b, 0);
1245
- const avgLatency = this.metrics.latencies.length > 0 ? totalLatency / this.metrics.latencies.length : 0;
1246
- const totalCacheOps = this.metrics.cacheHits + this.metrics.cacheMisses;
1247
- const cacheHitRate = totalCacheOps > 0 ? (this.metrics.cacheHits / totalCacheOps) * 100 : 0;
1248
-
1249
- // Replaced `.at(-1)` with classic index access for compatibility
1250
- const latestMemory = this.metrics.memoryUsage.length > 0 ? this.metrics.memoryUsage[this.metrics.memoryUsage.length - 1] : null;
1251
- const memoryUsageMB = latestMemory ? latestMemory.used / (1024 * 1024) : 0;
1252
-
1253
- return {
1254
- opsPerSec,
1255
- avgLatency: avgLatency.toFixed(2),
1256
- cacheHitRate: cacheHitRate.toFixed(1),
1257
- memoryUsageMB: memoryUsageMB.toFixed(2)
1258
- };
1259
- }
1260
-
1261
- getOptimizationTips() {
1262
- const tips = [];
1263
- const stats = this.getStats();
1264
-
1265
- if (stats.avgLatency > 100) {
1266
- tips.push('High average latency detected. Consider enabling compression and indexing frequently queried fields.');
1267
- }
1268
- if (stats.cacheHitRate < 50 && (this.metrics.cacheHits + this.metrics.cacheMisses) > 20) {
1269
- tips.push('Low cache hit rate. Consider increasing cache size or optimizing query patterns.');
1270
- }
1271
- if (this.metrics.memoryUsage.length > 10) {
1272
- const recent = this.metrics.memoryUsage.slice(-10);
1273
- const trend = recent[recent.length - 1].used - recent[0].used;
1274
- if (trend > 10 * 1024 * 1024) { // > 10MB increase
1275
- tips.push('Memory usage is increasing rapidly. Check for memory leaks or consider batch processing.');
1276
- }
1277
- }
1278
- return tips.length > 0 ? tips : ['Performance is optimal. No issues detected.'];
1279
- }
1280
- }
1281
-
1282
- // ========================
1283
- // Collection Class
1284
- // ========================
1285
-
1286
- class Collection {
1287
- constructor(name, database) {
1288
- this.name = name;
1289
- this.database = database;
1290
- this.db = null;
1291
- this.metadata = null;
1292
- this.settings = database.settings;
1293
- this.indexedDB = new IndexedDBUtility();
1294
- this.opfs = new OPFSUtility();
1295
- this.cleanupInterval = null;
1296
- this.events = new Map();
1297
- this.queryCache = new Map();
1298
- this.cacheTimeout = 60000;
1299
- this.performanceMonitor = database.performanceMonitor;
1300
- }
1301
-
1302
- async init() {
1303
- const dbName = `${this.database.name}_${this.name}`;
1304
- this.db = await this.indexedDB.openDatabase(dbName, 1, (db, oldVersion) => {
1305
- if (oldVersion < 1 && !db.objectStoreNames.contains('documents')) {
1306
- const store = db.createObjectStore('documents', { keyPath: '_id' });
1307
- store.createIndex('modified', '_modified', { unique: false });
1308
- }
1309
- // Future index creation logic would go here during version bumps
1310
- });
1311
-
1312
- const metadataData = this.database.metadata.collections[this.name];
1313
- this.metadata = new CollectionMetadata(this.name, metadataData);
1314
-
1315
- if (this.settings.freeSpaceEvery > 0) {
1316
- this.cleanupInterval = setInterval(() => this.freeSpace(), this.settings.freeSpaceEvery);
1317
- }
1318
- return this;
1319
- }
1320
-
1321
- async add(documentData, options = {}) {
1322
- await this.trigger('beforeAdd', documentData);
1323
-
1324
- const doc = new Document({ data: documentData, _id: options.id }, {
1325
- encrypted: options.encrypted || false,
1326
- compressed: options.compressed !== false,
1327
- permanent: options.permanent || false,
1328
- password: options.password
1329
- });
1330
-
1331
- const attachments = options.attachments;
1332
- if (attachments && attachments.length > 0) {
1333
- const preparedAttachments = await Promise.all(
1334
- attachments.map(att => (att instanceof File || att instanceof Blob) ?
1335
- OPFSUtility.prepareAttachment(att, att.name) :
1336
- Promise.resolve(att))
1337
- );
1338
- doc._attachments = await this.opfs.saveAttachments(this.database.name, this.name, doc._id, preparedAttachments);
1339
- }
1340
-
1341
- await doc.pack();
1342
- const dbOutput = doc.databaseOutput();
1343
- await this.indexedDB.add(this.db, 'documents', dbOutput);
1344
-
1345
- const sizeKB = dbOutput.packedData.byteLength / 1024;
1346
- this.metadata.addDocument(doc._id, sizeKB, doc._permanent, doc._attachments.length);
1347
- this.database.metadata.setCollection(this.metadata);
1348
-
1349
- await this.checkSpaceLimit();
1350
- await this.trigger('afterAdd', doc);
1351
- this.queryCache.clear();
1352
- return doc._id;
1353
- }
1354
-
1355
- async get(docId, options = {}) {
1356
- await this.trigger('beforeGet', docId);
1357
-
1358
- const stored = await this.indexedDB.get(this.db, 'documents', docId);
1359
- if (!stored) {
1360
- throw new LacertaDBError(`Document with id '${docId}' not found.`, 'DOCUMENT_NOT_FOUND');
1361
- }
1362
-
1363
- const doc = new Document(stored, {
1364
- password: options.password,
1365
- encrypted: stored._encrypted,
1366
- compressed: stored._compressed
1367
- });
1368
-
1369
- if (stored.packedData) {
1370
- await doc.unpack();
1371
- }
1372
-
1373
- if (options.includeAttachments && doc._attachments.length > 0) {
1374
- doc.data._attachments = await this.opfs.getAttachments(doc._attachments);
1375
- }
1376
-
1377
- await this.trigger('afterGet', doc);
1378
- return doc.objectOutput(options.includeAttachments);
1379
- }
1380
-
1381
- async getAll(options = {}) {
1382
- const stored = await this.indexedDB.getAll(this.db, 'documents', undefined, options.limit);
1383
- return Promise.all(stored.map(async docData => {
1384
- try {
1385
- const doc = new Document(docData, {
1386
- password: options.password,
1387
- encrypted: docData._encrypted,
1388
- compressed: docData._compressed
1389
- });
1390
- if (docData.packedData) {
1391
- await doc.unpack();
1392
- }
1393
- return doc.objectOutput();
1394
- } catch (error) {
1395
- console.error(`Failed to unpack document ${docData._id}:`, error);
1396
- return null;
1397
- }
1398
- })).then(docs => docs.filter(Boolean));
1399
- }
1400
-
1401
- async update(docId, updates, options = {}) {
1402
- await this.trigger('beforeUpdate', { docId, updates });
1403
-
1404
- const stored = await this.indexedDB.get(this.db, 'documents', docId);
1405
- if (!stored) {
1406
- throw new LacertaDBError(`Document with id '${docId}' not found for update.`, 'DOCUMENT_NOT_FOUND');
1407
- }
1408
-
1409
- const existingDoc = new Document(stored, { password: options.password });
1410
- if (stored.packedData) await existingDoc.unpack();
1411
-
1412
- const updatedData = { ...existingDoc.data, ...updates };
1413
-
1414
- // Replaced `??` with ternary operator for compatibility
1415
- const doc = new Document({
1416
- _id: docId,
1417
- _created: stored._created,
1418
- data: updatedData
1419
- }, {
1420
- encrypted: options.encrypted !== undefined ? options.encrypted : stored._encrypted,
1421
- compressed: options.compressed !== undefined ? options.compressed : stored._compressed,
1422
- permanent: options.permanent !== undefined ? options.permanent : stored._permanent,
1423
- password: options.password
1424
- });
1425
- doc._modified = Date.now();
1426
-
1427
- const attachments = options.attachments;
1428
- if (attachments && attachments.length > 0) {
1429
- await this.opfs.deleteAttachments(this.database.name, this.name, docId);
1430
- const preparedAttachments = await Promise.all(
1431
- attachments.map(att => (att instanceof File || att instanceof Blob) ?
1432
- OPFSUtility.prepareAttachment(att, att.name) :
1433
- Promise.resolve(att))
1434
- );
1435
- doc._attachments = await this.opfs.saveAttachments(this.database.name, this.name, doc._id, preparedAttachments);
1436
- } else {
1437
- doc._attachments = stored._attachments;
1438
- }
1439
-
1440
- await doc.pack();
1441
- const dbOutput = doc.databaseOutput();
1442
- await this.indexedDB.put(this.db, 'documents', dbOutput);
1443
-
1444
- const sizeKB = dbOutput.packedData.byteLength / 1024;
1445
- this.metadata.updateDocument(doc._id, sizeKB, doc._permanent, doc._attachments.length);
1446
- this.database.metadata.setCollection(this.metadata);
1447
-
1448
- await this.trigger('afterUpdate', doc);
1449
- this.queryCache.clear();
1450
- return doc._id;
1451
- }
1452
-
1453
- /**
1454
- * Delete a document from the collection
1455
- * @param {string} docId - The document ID to delete
1456
- * @param {Object} options - Delete options
1457
- * @param {boolean} options.force - Force delete even if document is permanent (default: false)
1458
- * @returns {Promise<void>}
1459
- */
1460
- async delete(docId, options = {}) {
1461
- await this.trigger('beforeDelete', docId);
1462
-
1463
- const doc = await this.indexedDB.get(this.db, 'documents', docId);
1464
- if (!doc) {
1465
- throw new LacertaDBError('Document not found for deletion', 'DOCUMENT_NOT_FOUND');
1466
- }
1467
-
1468
- // Check if document is permanent and force flag is not set
1469
- if (doc._permanent && !options.force) {
1470
- throw new LacertaDBError(
1471
- 'Cannot delete a permanent document. Use options.force = true to force deletion.',
1472
- 'PERMANENT_DOCUMENT_PROTECTION'
1473
- );
1474
- }
1475
-
1476
- // Log warning if force deleting a permanent document
1477
- if (doc._permanent && options.force) {
1478
- console.warn(`Force deleting permanent document: ${docId}`);
1479
- }
1480
-
1481
- await this.indexedDB.delete(this.db, 'documents', docId);
1482
- const attachments = doc._attachments;
1483
- if (attachments && attachments.length > 0) {
1484
- await this.opfs.deleteAttachments(this.database.name, this.name, docId);
1485
- }
1486
-
1487
- this.metadata.removeDocument(docId);
1488
- this.database.metadata.setCollection(this.metadata);
1489
-
1490
- await this.trigger('afterDelete', docId);
1491
- this.queryCache.clear();
1492
- }
1493
-
1494
- async query(filter = {}, options = {}) {
1495
- const startTime = performance.now();
1496
- const cacheKey = base64.encode(serializer.serialize({ filter, options }));
1497
- const cached = this.queryCache.get(cacheKey);
1498
-
1499
- if (cached && Date.now() - cached.timestamp < this.cacheTimeout) {
1500
- if (this.performanceMonitor) this.performanceMonitor.recordCacheHit();
1501
- return cached.data;
1502
- }
1503
- if (this.performanceMonitor) this.performanceMonitor.recordCacheMiss();
1504
-
1505
- let results = await this.getAll(options);
1506
- if (Object.keys(filter).length > 0) {
1507
- results = results.filter(doc => queryEngine.evaluate(doc, filter));
1508
- }
1509
-
1510
- if (options.sort) results = aggregationPipeline.stages.$sort(results, options.sort);
1511
- if (options.skip) results = aggregationPipeline.stages.$skip(results, options.skip);
1512
- if (options.limit) results = aggregationPipeline.stages.$limit(results, options.limit);
1513
- if (options.projection) results = aggregationPipeline.stages.$project(results, options.projection);
1514
-
1515
- if (this.performanceMonitor) this.performanceMonitor.recordOperation('query', performance.now() - startTime);
1516
-
1517
- this.queryCache.set(cacheKey, { data: results, timestamp: Date.now() });
1518
- if (this.queryCache.size > 100) {
1519
- this.queryCache.delete(this.queryCache.keys().next().value);
1520
- }
1521
- return results;
1522
- }
1523
-
1524
- async aggregate(pipeline) {
1525
- const startTime = performance.now();
1526
- const docs = await this.getAll();
1527
- const result = await aggregationPipeline.execute(docs, pipeline, this.database);
1528
- if (this.performanceMonitor) this.performanceMonitor.recordOperation('aggregate', performance.now() - startTime);
1529
- return result;
1530
- }
1531
-
1532
- async batchAdd(documents, options) {
1533
- const startTime = performance.now();
1534
- const results = await Promise.all(documents.map(doc =>
1535
- this.add(doc, options)
1536
- .then(id => ({ success: true, id }))
1537
- .catch(error => ({ success: false, error: error.message }))
1538
- ));
1539
- if (this.performanceMonitor) this.performanceMonitor.recordOperation('batchAdd', performance.now() - startTime);
1540
- return results;
1541
- }
1542
-
1543
- batchUpdate(updates, options) {
1544
- return Promise.all(updates.map(update =>
1545
- this.update(update.id, update.data, options)
1546
- .then(id => ({ success: true, id }))
1547
- .catch(error => ({ success: false, id: update.id, error: error.message }))
1548
- ));
1549
- }
1550
-
1551
- /**
1552
- * Batch delete documents
1553
- * @param {Array<string|Object>} items - Array of document IDs or objects with id and options
1554
- * @returns {Promise<Array>} Results of batch deletion
1555
- */
1556
- async batchDelete(items) {
1557
- // Handle both simple ID array and array of objects with options
1558
- const normalizedItems = items.map(item => {
1559
- if (typeof item === 'string') {
1560
- return { id: item, options: {} };
1561
- }
1562
- return { id: item.id, options: item.options || {} };
1563
- });
1564
-
1565
- return Promise.all(normalizedItems.map(({ id, options }) =>
1566
- this.delete(id, options)
1567
- .then(() => ({ success: true, id }))
1568
- .catch(error => ({ success: false, id, error: error.message }))
1569
- ));
1570
- }
1571
-
1572
- /**
1573
- * Clear all documents from the collection
1574
- * @param {Object} options - Clear options
1575
- * @param {boolean} options.force - Force clear even permanent documents (default: false)
1576
- * @returns {Promise<void>}
1577
- */
1578
- async clear(options = {}) {
1579
- if (options.force) {
1580
- // Force clear all documents including permanent ones
1581
- await this.indexedDB.clear(this.db, 'documents');
1582
- this.metadata = new CollectionMetadata(this.name);
1583
- this.database.metadata.setCollection(this.metadata);
1584
- this.queryCache.clear();
1585
- } else {
1586
- // Clear only non-permanent documents
1587
- const allDocs = await this.getAll();
1588
- const nonPermanentDocs = allDocs.filter(doc => !doc._permanent);
1589
- await this.batchDelete(nonPermanentDocs.map(doc => doc._id));
1590
- }
1591
- }
1592
-
1593
- async checkSpaceLimit() {
1594
- if (this.settings.sizeLimitKB !== Infinity && this.metadata.sizeKB > this.settings.bufferLimitKB) {
1595
- await this.freeSpace();
1596
- }
1597
- }
1598
-
1599
- async freeSpace() {
1600
- const targetSize = this.settings.bufferLimitKB * 0.8;
1601
- while (this.metadata.sizeKB > targetSize) {
1602
- const oldestDocs = this.metadata.getOldestNonPermanentDocuments(10);
1603
- if (oldestDocs.length === 0) break;
1604
- await this.batchDelete(oldestDocs);
1605
- }
1606
- }
1607
-
1608
- on(event, callback) {
1609
- if (!this.events.has(event)) this.events.set(event, []);
1610
- this.events.get(event).push(callback);
1611
- }
1612
-
1613
- off(event, callback) {
1614
- if (!this.events.has(event)) return;
1615
- const listeners = this.events.get(event).filter(cb => cb !== callback);
1616
- this.events.set(event, listeners);
1617
- }
1618
-
1619
- async trigger(event, data) {
1620
- if (!this.events.has(event)) return;
1621
- for (const callback of this.events.get(event)) {
1622
- await callback(data);
1623
- }
1624
- }
1625
-
1626
- clearCache() { this.queryCache.clear(); }
1627
-
1628
- destroy() {
1629
- clearInterval(this.cleanupInterval);
1630
- if (this.db) {
1631
- this.db.close();
1632
- }
1633
- }
1634
- }
1635
-
1636
- // ========================
1637
- // Database Class
1638
- // ========================
1639
-
1640
- class Database {
1641
- constructor(name, performanceMonitor) {
1642
- this.name = name;
1643
- this.collections = new Map();
1644
- this.metadata = null;
1645
- this.settings = null;
1646
- this.quickStore = null;
1647
- this.performanceMonitor = performanceMonitor;
1648
- }
1649
-
1650
- async init() {
1651
- this.metadata = DatabaseMetadata.load(this.name);
1652
- this.settings = Settings.load(this.name);
1653
- this.quickStore = new QuickStore(this.name);
1654
- return this;
1655
- }
1656
-
1657
- async createCollection(name, options) {
1658
- if (this.collections.has(name)) {
1659
- throw new LacertaDBError(`Collection '${name}' already exists.`, 'COLLECTION_EXISTS');
1660
- }
1661
-
1662
- const collection = new Collection(name, this);
1663
- await collection.init();
1664
- this.collections.set(name, collection);
1665
-
1666
- if (!this.metadata.collections[name]) {
1667
- this.metadata.setCollection(new CollectionMetadata(name));
1668
- }
1669
- return collection;
1670
- }
1671
-
1672
- async getCollection(name) {
1673
- if (this.collections.has(name)) {
1674
- return this.collections.get(name);
1675
- }
1676
- if (this.metadata.collections[name]) {
1677
- const collection = new Collection(name, this);
1678
- await collection.init();
1679
- this.collections.set(name, collection);
1680
- return collection;
1681
- }
1682
- throw new LacertaDBError(`Collection '${name}' not found.`, 'COLLECTION_NOT_FOUND');
1683
- }
1684
-
1685
- async dropCollection(name) {
1686
- if (this.collections.has(name)) {
1687
- const collection = this.collections.get(name);
1688
- await collection.clear({ force: true }); // Force clear to remove permanent documents
1689
- collection.destroy();
1690
- this.collections.delete(name);
1691
- }
1692
-
1693
- this.metadata.removeCollection(name);
1694
-
1695
- const dbName = `${this.name}_${name}`;
1696
- await new Promise((resolve, reject) => {
1697
- const deleteReq = indexedDB.deleteDatabase(dbName);
1698
- deleteReq.onsuccess = resolve;
1699
- deleteReq.onerror = reject;
1700
- deleteReq.onblocked = () => console.warn(`Deletion of '${dbName}' is blocked.`);
1701
- });
1702
- }
1703
-
1704
- listCollections() {
1705
- return Object.keys(this.metadata.collections);
1706
- }
1707
-
1708
- getStats() {
1709
- return {
1710
- name: this.name,
1711
- totalSizeKB: this.metadata.totalSizeKB,
1712
- totalDocuments: this.metadata.totalLength,
1713
- collections: Object.entries(this.metadata.collections).map(([name, data]) => ({
1714
- name,
1715
- sizeKB: data.sizeKB,
1716
- documents: data.length,
1717
- createdAt: new Date(data.createdAt).toISOString(),
1718
- modifiedAt: new Date(data.modifiedAt).toISOString()
1719
- }))
1720
- };
1721
- }
1722
-
1723
- updateSettings(newSettings) { this.settings.updateSettings(newSettings); }
1724
-
1725
- async export(format = 'json', password = null) {
1726
- const data = {
1727
- version: '4.0.4',
1728
- database: this.name,
1729
- timestamp: Date.now(),
1730
- collections: {}
1731
- };
1732
-
1733
- for (const collName of this.listCollections()) {
1734
- const collection = await this.getCollection(collName);
1735
- data.collections[collName] = await collection.getAll({ password });
1736
- }
1737
-
1738
- if (format === 'json') {
1739
- const serialized = serializer.serialize(data);
1740
- return base64.encode(serialized);
1741
- }
1742
- if (format === 'encrypted' && password) {
1743
- const encryption = new BrowserEncryptionUtility();
1744
- const serializedData = serializer.serialize(data);
1745
- const encrypted = await encryption.encrypt(serializedData, password);
1746
- return base64.encode(encrypted);
1747
- }
1748
- throw new LacertaDBError(`Unsupported export format: ${format}`, 'INVALID_FORMAT');
1749
- }
1750
-
1751
- async import(data, format = 'json', password = null) {
1752
- let parsed;
1753
- try {
1754
- const decoded = base64.decode(data);
1755
- if (format === 'encrypted' && password) {
1756
- const encryption = new BrowserEncryptionUtility();
1757
- const decrypted = await encryption.decrypt(decoded, password);
1758
- parsed = serializer.deserialize(decrypted);
1759
- } else {
1760
- parsed = serializer.deserialize(decoded);
1761
- }
1762
- } catch (e) {
1763
- throw new LacertaDBError('Failed to parse import data', 'IMPORT_PARSE_FAILED', e);
1764
- }
1765
-
1766
- for (const collName in parsed.collections) {
1767
- const docs = parsed.collections[collName];
1768
- const collection = await this.createCollection(collName).catch(() => this.getCollection(collName));
1769
- await collection.batchAdd(docs);
1770
- }
1771
-
1772
- const docCount = Object.values(parsed.collections).reduce((sum, docs) => sum + docs.length, 0);
1773
- return {
1774
- collections: Object.keys(parsed.collections).length,
1775
- documents: docCount
1776
- };
1777
- }
1778
-
1779
- async clearAll() {
1780
- await Promise.all([...this.collections.keys()].map(name => this.dropCollection(name)));
1781
- this.collections.clear();
1782
- this.metadata = new DatabaseMetadata(this.name);
1783
- this.metadata.save();
1784
- this.quickStore.clear();
1785
- }
1786
-
1787
- destroy() {
1788
- this.collections.forEach(collection => collection.destroy());
1789
- this.collections.clear();
1790
- }
1791
- }
1792
-
1793
- // ========================
1794
- // Main LacertaDB Class
1795
- // ========================
1796
-
1797
- export class LacertaDB {
1798
- constructor() {
1799
- this.databases = new Map();
1800
- this.performanceMonitor = new PerformanceMonitor();
1801
- }
1802
-
1803
- async getDatabase(name) {
1804
- if (!this.databases.has(name)) {
1805
- const db = new Database(name, this.performanceMonitor);
1806
- await db.init();
1807
- this.databases.set(name, db);
1808
- }
1809
- return this.databases.get(name);
1810
- }
1811
-
1812
- async dropDatabase(name) {
1813
- if (this.databases.has(name)) {
1814
- const db = this.databases.get(name);
1815
- await db.clearAll();
1816
- db.destroy();
1817
- this.databases.delete(name);
1818
- }
1819
-
1820
- ['metadata', 'settings', 'version'].forEach(suffix => {
1821
- localStorage.removeItem(`lacertadb_${name}_${suffix}`);
1822
- });
1823
- const quickStore = new QuickStore(name);
1824
- quickStore.clear();
1825
- }
1826
-
1827
- listDatabases() {
1828
- const dbNames = new Set();
1829
- for (let i = 0; i < localStorage.length; i++) {
1830
- const key = localStorage.key(i);
1831
- // Replaced optional chaining with `&&` for compatibility
1832
- if (key && key.startsWith('lacertadb_')) {
1833
- const dbName = key.split('_')[1];
1834
- dbNames.add(dbName);
1835
- }
1836
- }
1837
- return [...dbNames];
1838
- }
1839
-
1840
- async createBackup(password = null) {
1841
- const backup = {
1842
- version: '4.0.4',
1843
- timestamp: Date.now(),
1844
- databases: {}
1845
- };
1846
-
1847
- for (const dbName of this.listDatabases()) {
1848
- const db = await this.getDatabase(dbName);
1849
- const exported = await db.export('json');
1850
- const decoded = base64.decode(exported);
1851
- backup.databases[dbName] = serializer.deserialize(decoded);
1852
- }
1853
-
1854
- const serializedBackup = serializer.serialize(backup);
1855
- if (password) {
1856
- const encryption = new BrowserEncryptionUtility();
1857
- const encrypted = await encryption.encrypt(serializedBackup, password);
1858
- return base64.encode(encrypted);
1859
- }
1860
- return base64.encode(serializedBackup);
1861
- }
1862
-
1863
- async restoreBackup(backupData, password = null) {
1864
- let backup;
1865
- try {
1866
- let decodedData = base64.decode(backupData);
1867
- if (password) {
1868
- const encryption = new BrowserEncryptionUtility();
1869
- const decrypted = await encryption.decrypt(decodedData, password);
1870
- backup = serializer.deserialize(decrypted);
1871
- } else {
1872
- backup = serializer.deserialize(decodedData);
1873
- }
1874
- } catch (e) {
1875
- throw new LacertaDBError('Failed to parse backup data', 'BACKUP_PARSE_FAILED', e);
1876
- }
1877
-
1878
- const results = { databases: 0, collections: 0, documents: 0 };
1879
- for (const [dbName, dbData] of Object.entries(backup.databases)) {
1880
- const db = await this.getDatabase(dbName);
1881
- const encodedDbData = base64.encode(serializer.serialize(dbData));
1882
- const importResult = await db.import(encodedDbData);
1883
-
1884
- results.databases++;
1885
- results.collections += importResult.collections;
1886
- results.documents += importResult.documents;
1887
- }
1888
- return results;
1889
- }
1890
- }
1891
-
1892
- // Export all major components for advanced usage
1893
- export {
1894
- Database,
1895
- Collection,
1896
- Document,
1897
- MigrationManager,
1898
- PerformanceMonitor,
1899
- LacertaDBError,
1900
- OPFSUtility
1901
- };