verso-db 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/CHANGELOG.md +46 -0
  2. package/LICENSE +21 -0
  3. package/README.md +252 -0
  4. package/dist/BinaryHeap.d.ts +25 -0
  5. package/dist/BinaryHeap.d.ts.map +1 -0
  6. package/dist/Collection.d.ts +156 -0
  7. package/dist/Collection.d.ts.map +1 -0
  8. package/dist/HNSWIndex.d.ts +357 -0
  9. package/dist/HNSWIndex.d.ts.map +1 -0
  10. package/dist/MaxBinaryHeap.d.ts +63 -0
  11. package/dist/MaxBinaryHeap.d.ts.map +1 -0
  12. package/dist/Storage.d.ts +54 -0
  13. package/dist/Storage.d.ts.map +1 -0
  14. package/dist/VectorDB.d.ts +44 -0
  15. package/dist/VectorDB.d.ts.map +1 -0
  16. package/dist/backends/DistanceBackend.d.ts +5 -0
  17. package/dist/backends/DistanceBackend.d.ts.map +1 -0
  18. package/dist/backends/JsDistanceBackend.d.ts +37 -0
  19. package/dist/backends/JsDistanceBackend.d.ts.map +1 -0
  20. package/dist/encoding/DeltaEncoder.d.ts +61 -0
  21. package/dist/encoding/DeltaEncoder.d.ts.map +1 -0
  22. package/dist/errors.d.ts +58 -0
  23. package/dist/errors.d.ts.map +1 -0
  24. package/dist/index.d.ts +64 -0
  25. package/dist/index.d.ts.map +1 -0
  26. package/dist/index.js +3732 -0
  27. package/dist/presets.d.ts +91 -0
  28. package/dist/presets.d.ts.map +1 -0
  29. package/dist/quantization/ScalarQuantizer.d.ts +114 -0
  30. package/dist/quantization/ScalarQuantizer.d.ts.map +1 -0
  31. package/dist/storage/BatchWriter.d.ts +104 -0
  32. package/dist/storage/BatchWriter.d.ts.map +1 -0
  33. package/dist/storage/BunStorageBackend.d.ts +58 -0
  34. package/dist/storage/BunStorageBackend.d.ts.map +1 -0
  35. package/dist/storage/MemoryBackend.d.ts +44 -0
  36. package/dist/storage/MemoryBackend.d.ts.map +1 -0
  37. package/dist/storage/OPFSBackend.d.ts +59 -0
  38. package/dist/storage/OPFSBackend.d.ts.map +1 -0
  39. package/dist/storage/StorageBackend.d.ts +66 -0
  40. package/dist/storage/StorageBackend.d.ts.map +1 -0
  41. package/dist/storage/WriteAheadLog.d.ts +111 -0
  42. package/dist/storage/WriteAheadLog.d.ts.map +1 -0
  43. package/dist/storage/createStorageBackend.d.ts +40 -0
  44. package/dist/storage/createStorageBackend.d.ts.map +1 -0
  45. package/dist/storage/index.d.ts +30 -0
  46. package/dist/storage/index.d.ts.map +1 -0
  47. package/package.json +98 -0
  48. package/src/BinaryHeap.ts +131 -0
  49. package/src/Collection.ts +695 -0
  50. package/src/HNSWIndex.ts +1839 -0
  51. package/src/MaxBinaryHeap.ts +175 -0
  52. package/src/Storage.ts +435 -0
  53. package/src/VectorDB.ts +109 -0
  54. package/src/backends/DistanceBackend.ts +17 -0
  55. package/src/backends/JsDistanceBackend.ts +227 -0
  56. package/src/encoding/DeltaEncoder.ts +217 -0
  57. package/src/errors.ts +110 -0
  58. package/src/index.ts +138 -0
  59. package/src/presets.ts +229 -0
  60. package/src/quantization/ScalarQuantizer.ts +383 -0
  61. package/src/storage/BatchWriter.ts +336 -0
  62. package/src/storage/BunStorageBackend.ts +161 -0
  63. package/src/storage/MemoryBackend.ts +120 -0
  64. package/src/storage/OPFSBackend.ts +250 -0
  65. package/src/storage/StorageBackend.ts +74 -0
  66. package/src/storage/WriteAheadLog.ts +326 -0
  67. package/src/storage/createStorageBackend.ts +137 -0
  68. package/src/storage/index.ts +53 -0
@@ -0,0 +1,336 @@
1
+ /**
2
+ * Batch Write Coalescing for Storage Backends
3
+ *
4
+ * Buffers writes in memory and flushes them in batches to reduce I/O operations.
5
+ * This is especially beneficial for:
6
+ * - OPFS where each write has overhead
7
+ * - IndexedDB where transactions have cost
8
+ * - Network storage where round-trips are expensive
9
+ *
10
+ * Features:
11
+ * - Configurable flush thresholds (count and size)
12
+ * - Automatic flushing when thresholds are reached
13
+ * - Explicit flush for durability guarantees
14
+ * - Coalesces multiple writes to the same key
15
+ */
16
+
17
+ import type { StorageBackend } from './StorageBackend';
18
+
19
+ export interface BatchWriterOptions {
20
+ /** Maximum number of pending writes before auto-flush (default: 100) */
21
+ maxPendingWrites?: number;
22
+ /** Maximum total size of pending data in bytes before auto-flush (default: 1MB) */
23
+ maxPendingBytes?: number;
24
+ /** Auto-flush interval in milliseconds (0 = disabled, default: 0) */
25
+ autoFlushInterval?: number;
26
+ }
27
+
28
+ interface PendingWrite {
29
+ key: string;
30
+ data: Uint8Array;
31
+ append: boolean;
32
+ }
33
+
34
+ /**
35
+ * BatchWriter - Coalesces multiple writes into batched flushes
36
+ */
37
+ export class BatchWriter {
38
+ private backend: StorageBackend;
39
+ private pendingWrites: Map<string, PendingWrite> = new Map();
40
+ private pendingAppends: Map<string, Uint8Array[]> = new Map();
41
+ private pendingBytes: number = 0;
42
+ private maxPendingWrites: number;
43
+ private maxPendingBytes: number;
44
+ private autoFlushInterval: number;
45
+ private flushTimer: ReturnType<typeof setTimeout> | null = null;
46
+ private isFlushing: boolean = false;
47
+ private flushPromise: Promise<void> | null = null;
48
+
49
+ constructor(backend: StorageBackend, options: BatchWriterOptions = {}) {
50
+ this.backend = backend;
51
+ this.maxPendingWrites = options.maxPendingWrites ?? 100;
52
+ this.maxPendingBytes = options.maxPendingBytes ?? 1024 * 1024; // 1MB
53
+ this.autoFlushInterval = options.autoFlushInterval ?? 0;
54
+
55
+ if (this.autoFlushInterval > 0) {
56
+ this.startAutoFlush();
57
+ }
58
+ }
59
+
60
+ /**
61
+ * Write data to a key (buffered)
62
+ * Multiple writes to the same key will coalesce to the last value
63
+ */
64
+ async write(key: string, data: ArrayBuffer | Uint8Array): Promise<void> {
65
+ const bytes = data instanceof ArrayBuffer ? new Uint8Array(data) : data;
66
+
67
+ // If there's already a pending write, subtract its size
68
+ const existing = this.pendingWrites.get(key);
69
+ if (existing) {
70
+ this.pendingBytes -= existing.data.length;
71
+ }
72
+
73
+ // Clear any pending appends for this key (write overwrites)
74
+ const existingAppends = this.pendingAppends.get(key);
75
+ if (existingAppends) {
76
+ for (const append of existingAppends) {
77
+ this.pendingBytes -= append.length;
78
+ }
79
+ this.pendingAppends.delete(key);
80
+ }
81
+
82
+ this.pendingWrites.set(key, {
83
+ key,
84
+ data: bytes,
85
+ append: false
86
+ });
87
+ this.pendingBytes += bytes.length;
88
+
89
+ await this.checkThresholds();
90
+ }
91
+
92
+ /**
93
+ * Append data to a key (buffered)
94
+ * Multiple appends to the same key will be concatenated
95
+ */
96
+ async append(key: string, data: ArrayBuffer | Uint8Array): Promise<void> {
97
+ const bytes = data instanceof ArrayBuffer ? new Uint8Array(data) : data;
98
+
99
+ // If there's a pending write, append to it instead
100
+ const existingWrite = this.pendingWrites.get(key);
101
+ if (existingWrite) {
102
+ // Concatenate with existing write
103
+ const newData = new Uint8Array(existingWrite.data.length + bytes.length);
104
+ newData.set(existingWrite.data);
105
+ newData.set(bytes, existingWrite.data.length);
106
+ this.pendingBytes -= existingWrite.data.length;
107
+ this.pendingBytes += newData.length;
108
+ existingWrite.data = newData;
109
+ } else {
110
+ // Add to pending appends
111
+ let appends = this.pendingAppends.get(key);
112
+ if (!appends) {
113
+ appends = [];
114
+ this.pendingAppends.set(key, appends);
115
+ }
116
+ appends.push(bytes);
117
+ this.pendingBytes += bytes.length;
118
+ }
119
+
120
+ await this.checkThresholds();
121
+ }
122
+
123
+ /**
124
+ * Delete a key (buffered)
125
+ * Clears any pending writes/appends for this key
126
+ */
127
+ async delete(key: string): Promise<void> {
128
+ // Clear pending operations
129
+ const existing = this.pendingWrites.get(key);
130
+ if (existing) {
131
+ this.pendingBytes -= existing.data.length;
132
+ this.pendingWrites.delete(key);
133
+ }
134
+
135
+ const existingAppends = this.pendingAppends.get(key);
136
+ if (existingAppends) {
137
+ for (const append of existingAppends) {
138
+ this.pendingBytes -= append.length;
139
+ }
140
+ this.pendingAppends.delete(key);
141
+ }
142
+
143
+ // Perform delete immediately (can't batch deletes effectively)
144
+ await this.backend.delete(key);
145
+ }
146
+
147
+ /**
148
+ * Read data from a key
149
+ * Returns pending data if available, otherwise reads from backend
150
+ */
151
+ async read(key: string): Promise<ArrayBuffer | null> {
152
+ // Check for pending write
153
+ const pending = this.pendingWrites.get(key);
154
+ if (pending) {
155
+ const buffer = pending.data.buffer.slice(
156
+ pending.data.byteOffset,
157
+ pending.data.byteOffset + pending.data.length
158
+ );
159
+ return buffer as ArrayBuffer;
160
+ }
161
+
162
+ // Check for pending appends - need to combine with existing data
163
+ const appends = this.pendingAppends.get(key);
164
+ if (appends && appends.length > 0) {
165
+ const existing = await this.backend.read(key);
166
+ const existingBytes = existing ? new Uint8Array(existing) : new Uint8Array(0);
167
+
168
+ // Calculate total size
169
+ let totalSize = existingBytes.length;
170
+ for (const append of appends) {
171
+ totalSize += append.length;
172
+ }
173
+
174
+ // Combine
175
+ const result = new Uint8Array(totalSize);
176
+ result.set(existingBytes);
177
+ let offset = existingBytes.length;
178
+ for (const append of appends) {
179
+ result.set(append, offset);
180
+ offset += append.length;
181
+ }
182
+
183
+ return result.buffer;
184
+ }
185
+
186
+ // No pending data, read from backend
187
+ return this.backend.read(key);
188
+ }
189
+
190
+ /**
191
+ * Check if thresholds are exceeded and flush if needed
192
+ */
193
+ private async checkThresholds(): Promise<void> {
194
+ const totalWrites = this.pendingWrites.size + this.pendingAppends.size;
195
+
196
+ if (totalWrites >= this.maxPendingWrites || this.pendingBytes >= this.maxPendingBytes) {
197
+ await this.flush();
198
+ }
199
+ }
200
+
201
+ /**
202
+ * Flush all pending writes to the backend
203
+ */
204
+ async flush(): Promise<void> {
205
+ // If already flushing, wait for it to complete
206
+ if (this.isFlushing && this.flushPromise) {
207
+ await this.flushPromise;
208
+ return;
209
+ }
210
+
211
+ if (this.pendingWrites.size === 0 && this.pendingAppends.size === 0) {
212
+ return;
213
+ }
214
+
215
+ this.isFlushing = true;
216
+ this.flushPromise = this.doFlush();
217
+
218
+ try {
219
+ await this.flushPromise;
220
+ } finally {
221
+ this.isFlushing = false;
222
+ this.flushPromise = null;
223
+ }
224
+ }
225
+
226
+ private async doFlush(): Promise<void> {
227
+ // Capture current pending writes and clear
228
+ const writes = Array.from(this.pendingWrites.values());
229
+ const appends = Array.from(this.pendingAppends.entries());
230
+
231
+ this.pendingWrites.clear();
232
+ this.pendingAppends.clear();
233
+ this.pendingBytes = 0;
234
+
235
+ // Execute all writes
236
+ const promises: Promise<void>[] = [];
237
+
238
+ for (const write of writes) {
239
+ promises.push(this.backend.write(write.key, write.data));
240
+ }
241
+
242
+ for (const [key, chunks] of appends) {
243
+ // Concatenate all appends for this key
244
+ let totalSize = 0;
245
+ for (const chunk of chunks) {
246
+ totalSize += chunk.length;
247
+ }
248
+
249
+ const combined = new Uint8Array(totalSize);
250
+ let offset = 0;
251
+ for (const chunk of chunks) {
252
+ combined.set(chunk, offset);
253
+ offset += chunk.length;
254
+ }
255
+
256
+ promises.push(this.backend.append(key, combined));
257
+ }
258
+
259
+ // Wait for all operations to complete
260
+ await Promise.all(promises);
261
+ }
262
+
263
+ /**
264
+ * Get statistics about pending writes
265
+ */
266
+ getStats(): {
267
+ pendingWrites: number;
268
+ pendingAppends: number;
269
+ pendingBytes: number;
270
+ maxPendingWrites: number;
271
+ maxPendingBytes: number;
272
+ } {
273
+ return {
274
+ pendingWrites: this.pendingWrites.size,
275
+ pendingAppends: this.pendingAppends.size,
276
+ pendingBytes: this.pendingBytes,
277
+ maxPendingWrites: this.maxPendingWrites,
278
+ maxPendingBytes: this.maxPendingBytes
279
+ };
280
+ }
281
+
282
+ /**
283
+ * Check if there are pending writes
284
+ */
285
+ hasPendingWrites(): boolean {
286
+ return this.pendingWrites.size > 0 || this.pendingAppends.size > 0;
287
+ }
288
+
289
+ /**
290
+ * Start auto-flush timer
291
+ */
292
+ private startAutoFlush(): void {
293
+ if (this.flushTimer) return;
294
+
295
+ this.flushTimer = setInterval(async () => {
296
+ if (this.hasPendingWrites()) {
297
+ await this.flush();
298
+ }
299
+ }, this.autoFlushInterval);
300
+ }
301
+
302
+ /**
303
+ * Stop auto-flush timer
304
+ */
305
+ stopAutoFlush(): void {
306
+ if (this.flushTimer) {
307
+ clearInterval(this.flushTimer);
308
+ this.flushTimer = null;
309
+ }
310
+ }
311
+
312
+ /**
313
+ * Close the batch writer, flushing any pending writes
314
+ */
315
+ async close(): Promise<void> {
316
+ this.stopAutoFlush();
317
+ await this.flush();
318
+ }
319
+
320
+ /**
321
+ * Get the underlying storage backend
322
+ */
323
+ getBackend(): StorageBackend {
324
+ return this.backend;
325
+ }
326
+ }
327
+
328
+ /**
329
+ * Create a batch writer that wraps an existing storage backend
330
+ */
331
+ export function createBatchWriter(
332
+ backend: StorageBackend,
333
+ options?: BatchWriterOptions
334
+ ): BatchWriter {
335
+ return new BatchWriter(backend, options);
336
+ }
@@ -0,0 +1,161 @@
1
+ /**
2
+ * Bun Storage Backend
3
+ *
4
+ * High-performance file system storage using Bun's native APIs.
5
+ * Features:
6
+ * - Auto-initializing (no manual init() required)
7
+ * - Automatic memory mapping for large files
8
+ * - Efficient async I/O with O(1) append
9
+ * - Native TypedArray support
10
+ */
11
+
12
+ import { mkdir, readdir, unlink, rm, appendFile } from 'fs/promises';
13
+ import * as path from 'path';
14
+ import type { StorageBackend } from './StorageBackend';
15
+
16
+ export class BunStorageBackend implements StorageBackend {
17
+ readonly type = 'bun';
18
+ private basePath: string;
19
+ // Cache of directories we've already ensured exist - avoids redundant mkdir calls
20
+ // Profiling showed 5-10% write overhead from mkdir on every write
21
+ private dirCache: Set<string> = new Set();
22
+
23
+ /**
24
+ * Create a new Bun storage backend
25
+ * @param basePath Base directory for all storage operations
26
+ */
27
+ constructor(basePath: string = './vectordb_data') {
28
+ this.basePath = basePath;
29
+ }
30
+
31
+ /**
32
+ * Ensure directory exists, using cache to avoid redundant mkdir calls
33
+ */
34
+ private async ensureDir(dir: string): Promise<void> {
35
+ if (this.dirCache.has(dir)) return;
36
+ await mkdir(dir, { recursive: true }).catch(() => {});
37
+ this.dirCache.add(dir);
38
+ }
39
+
40
+ /**
41
+ * Get the full path for a key
42
+ */
43
+ private getFullPath(key: string): string {
44
+ return path.join(this.basePath, key);
45
+ }
46
+
47
+ /**
48
+ * Ensure base directory exists (optional - operations auto-initialize)
49
+ * @deprecated No longer required - write/append create directories automatically
50
+ */
51
+ async init(): Promise<void> {
52
+ await mkdir(this.basePath, { recursive: true }).catch(() => {});
53
+ }
54
+
55
+ async read(key: string): Promise<ArrayBuffer | null> {
56
+ const fullPath = this.getFullPath(key);
57
+ const file = Bun.file(fullPath);
58
+
59
+ if (!(await file.exists())) {
60
+ return null;
61
+ }
62
+
63
+ return file.arrayBuffer();
64
+ }
65
+
66
+ async write(key: string, data: ArrayBuffer | Uint8Array): Promise<void> {
67
+ const fullPath = this.getFullPath(key);
68
+
69
+ // Ensure parent directory exists (cached to avoid redundant syscalls)
70
+ await this.ensureDir(path.dirname(fullPath));
71
+
72
+ await Bun.write(fullPath, data);
73
+ }
74
+
75
+ async append(key: string, data: ArrayBuffer | Uint8Array): Promise<void> {
76
+ const fullPath = this.getFullPath(key);
77
+
78
+ // Ensure parent directory exists (cached to avoid redundant syscalls)
79
+ await this.ensureDir(path.dirname(fullPath));
80
+
81
+ // Use true O(1) append instead of O(n) read-modify-write
82
+ const appendData = data instanceof ArrayBuffer ? new Uint8Array(data) : data;
83
+ await appendFile(fullPath, appendData);
84
+ }
85
+
86
+ async delete(key: string): Promise<void> {
87
+ const fullPath = this.getFullPath(key);
88
+ try {
89
+ await unlink(fullPath);
90
+ } catch {
91
+ // File may not exist, which is fine
92
+ }
93
+ }
94
+
95
+ async exists(key: string): Promise<boolean> {
96
+ const fullPath = this.getFullPath(key);
97
+ const file = Bun.file(fullPath);
98
+ return file.exists();
99
+ }
100
+
101
+ async list(prefix?: string): Promise<string[]> {
102
+ const searchPath = prefix ? this.getFullPath(prefix) : this.basePath;
103
+
104
+ try {
105
+ const entries = await readdir(searchPath, { recursive: true });
106
+ return entries.map(entry => {
107
+ const fullPath = path.join(searchPath, entry);
108
+ // Return path relative to basePath
109
+ return path.relative(this.basePath, fullPath);
110
+ });
111
+ } catch {
112
+ return [];
113
+ }
114
+ }
115
+
116
+ async mkdir(dirPath: string): Promise<void> {
117
+ const fullPath = this.getFullPath(dirPath);
118
+ await mkdir(fullPath, { recursive: true }).catch(() => {});
119
+ }
120
+
121
+ /**
122
+ * Delete all data in the storage directory
123
+ */
124
+ async clear(): Promise<void> {
125
+ await rm(this.basePath, { recursive: true, force: true }).catch(() => {});
126
+ await mkdir(this.basePath, { recursive: true }).catch(() => {});
127
+ // Clear directory cache since directories were deleted
128
+ this.dirCache.clear();
129
+ this.dirCache.add(this.basePath);
130
+ }
131
+
132
+ /**
133
+ * Get file size without reading the entire file
134
+ */
135
+ async size(key: string): Promise<number> {
136
+ const fullPath = this.getFullPath(key);
137
+ const file = Bun.file(fullPath);
138
+
139
+ if (!(await file.exists())) {
140
+ return 0;
141
+ }
142
+
143
+ return file.size;
144
+ }
145
+
146
+ /**
147
+ * Read a file as a stream (for large files)
148
+ */
149
+ stream(key: string): ReadableStream<Uint8Array> | null {
150
+ const fullPath = this.getFullPath(key);
151
+ const file = Bun.file(fullPath);
152
+ return file.stream();
153
+ }
154
+
155
+ /**
156
+ * Get the base path
157
+ */
158
+ getBasePath(): string {
159
+ return this.basePath;
160
+ }
161
+ }
@@ -0,0 +1,120 @@
1
+ /**
2
+ * In-Memory Storage Backend
3
+ *
4
+ * Stores all data in memory using a Map. Useful for:
5
+ * - Testing without file system
6
+ * - Small datasets that fit in memory
7
+ * - Browser environments without OPFS/IndexedDB
8
+ * - Temporary caches
9
+ */
10
+
11
+ import type { StorageBackend } from './StorageBackend';
12
+
13
+ export class MemoryBackend implements StorageBackend {
14
+ readonly type = 'memory';
15
+ private storage: Map<string, ArrayBuffer> = new Map();
16
+ private directories: Set<string> = new Set();
17
+
18
+ constructor() {
19
+ this.directories.add(''); // Root directory always exists
20
+ }
21
+
22
+ async read(key: string): Promise<ArrayBuffer | null> {
23
+ const data = this.storage.get(key);
24
+ if (!data) return null;
25
+ // Return a copy to prevent mutation
26
+ return data.slice(0);
27
+ }
28
+
29
+ async write(key: string, data: ArrayBuffer | Uint8Array): Promise<void> {
30
+ // Store a copy to prevent external mutation
31
+ const buffer = data instanceof ArrayBuffer ? data : data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength) as ArrayBuffer;
32
+ this.storage.set(key, buffer.slice(0));
33
+ }
34
+
35
+ async append(key: string, data: ArrayBuffer | Uint8Array): Promise<void> {
36
+ const appendData = data instanceof ArrayBuffer ? new Uint8Array(data) : data;
37
+
38
+ const existing = this.storage.get(key);
39
+ if (existing) {
40
+ const existingArray = new Uint8Array(existing);
41
+ const combined = new Uint8Array(existingArray.length + appendData.length);
42
+ combined.set(existingArray, 0);
43
+ combined.set(appendData, existingArray.length);
44
+ this.storage.set(key, combined.buffer.slice(0) as ArrayBuffer);
45
+ } else {
46
+ this.storage.set(key, appendData.buffer.slice(appendData.byteOffset, appendData.byteOffset + appendData.byteLength) as ArrayBuffer);
47
+ }
48
+ }
49
+
50
+ async delete(key: string): Promise<void> {
51
+ this.storage.delete(key);
52
+ }
53
+
54
+ async exists(key: string): Promise<boolean> {
55
+ return this.storage.has(key) || this.directories.has(key);
56
+ }
57
+
58
+ async list(prefix?: string): Promise<string[]> {
59
+ const keys: string[] = [];
60
+ for (const key of this.storage.keys()) {
61
+ if (!prefix || key.startsWith(prefix)) {
62
+ keys.push(key);
63
+ }
64
+ }
65
+ return keys;
66
+ }
67
+
68
+ async mkdir(path: string): Promise<void> {
69
+ this.directories.add(path);
70
+ // Also add parent directories
71
+ const parts = path.split('/');
72
+ for (let i = 1; i <= parts.length; i++) {
73
+ this.directories.add(parts.slice(0, i).join('/'));
74
+ }
75
+ }
76
+
77
+ /**
78
+ * Clear all data
79
+ */
80
+ clear(): void {
81
+ this.storage.clear();
82
+ this.directories.clear();
83
+ this.directories.add('');
84
+ }
85
+
86
+ /**
87
+ * Get memory usage statistics
88
+ */
89
+ getStats(): { keyCount: number; totalBytes: number } {
90
+ let totalBytes = 0;
91
+ for (const value of this.storage.values()) {
92
+ totalBytes += value.byteLength;
93
+ }
94
+ return {
95
+ keyCount: this.storage.size,
96
+ totalBytes,
97
+ };
98
+ }
99
+
100
+ /**
101
+ * Export all data as a serializable object
102
+ * Useful for debugging or saving state
103
+ */
104
+ export(): Record<string, ArrayBuffer> {
105
+ const result: Record<string, ArrayBuffer> = {};
106
+ for (const [key, value] of this.storage.entries()) {
107
+ result[key] = value.slice(0);
108
+ }
109
+ return result;
110
+ }
111
+
112
+ /**
113
+ * Import data from a serializable object
114
+ */
115
+ import(data: Record<string, ArrayBuffer>): void {
116
+ for (const [key, value] of Object.entries(data)) {
117
+ this.storage.set(key, value.slice(0));
118
+ }
119
+ }
120
+ }