@a0n/aeon 5.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/LICENSE +15 -0
  2. package/README.md +199 -0
  3. package/dist/CryptoProvider-SLWjqByk.d.cts +407 -0
  4. package/dist/CryptoProvider-SLWjqByk.d.ts +407 -0
  5. package/dist/compression/index.cjs +1445 -0
  6. package/dist/compression/index.cjs.map +1 -0
  7. package/dist/compression/index.d.cts +451 -0
  8. package/dist/compression/index.d.ts +451 -0
  9. package/dist/compression/index.js +1426 -0
  10. package/dist/compression/index.js.map +1 -0
  11. package/dist/core/index.cjs +4 -0
  12. package/dist/core/index.cjs.map +1 -0
  13. package/dist/core/index.d.cts +212 -0
  14. package/dist/core/index.d.ts +212 -0
  15. package/dist/core/index.js +3 -0
  16. package/dist/core/index.js.map +1 -0
  17. package/dist/crypto/index.cjs +130 -0
  18. package/dist/crypto/index.cjs.map +1 -0
  19. package/dist/crypto/index.d.cts +56 -0
  20. package/dist/crypto/index.d.ts +56 -0
  21. package/dist/crypto/index.js +124 -0
  22. package/dist/crypto/index.js.map +1 -0
  23. package/dist/distributed/index.cjs +2586 -0
  24. package/dist/distributed/index.cjs.map +1 -0
  25. package/dist/distributed/index.d.cts +1005 -0
  26. package/dist/distributed/index.d.ts +1005 -0
  27. package/dist/distributed/index.js +2580 -0
  28. package/dist/distributed/index.js.map +1 -0
  29. package/dist/index.cjs +10953 -0
  30. package/dist/index.cjs.map +1 -0
  31. package/dist/index.d.cts +1953 -0
  32. package/dist/index.d.ts +1953 -0
  33. package/dist/index.js +10828 -0
  34. package/dist/index.js.map +1 -0
  35. package/dist/offline/index.cjs +419 -0
  36. package/dist/offline/index.cjs.map +1 -0
  37. package/dist/offline/index.d.cts +148 -0
  38. package/dist/offline/index.d.ts +148 -0
  39. package/dist/offline/index.js +415 -0
  40. package/dist/offline/index.js.map +1 -0
  41. package/dist/optimization/index.cjs +800 -0
  42. package/dist/optimization/index.cjs.map +1 -0
  43. package/dist/optimization/index.d.cts +347 -0
  44. package/dist/optimization/index.d.ts +347 -0
  45. package/dist/optimization/index.js +790 -0
  46. package/dist/optimization/index.js.map +1 -0
  47. package/dist/persistence/index.cjs +207 -0
  48. package/dist/persistence/index.cjs.map +1 -0
  49. package/dist/persistence/index.d.cts +95 -0
  50. package/dist/persistence/index.d.ts +95 -0
  51. package/dist/persistence/index.js +204 -0
  52. package/dist/persistence/index.js.map +1 -0
  53. package/dist/presence/index.cjs +489 -0
  54. package/dist/presence/index.cjs.map +1 -0
  55. package/dist/presence/index.d.cts +283 -0
  56. package/dist/presence/index.d.ts +283 -0
  57. package/dist/presence/index.js +485 -0
  58. package/dist/presence/index.js.map +1 -0
  59. package/dist/types-CMxO7QF0.d.cts +33 -0
  60. package/dist/types-CMxO7QF0.d.ts +33 -0
  61. package/dist/utils/index.cjs +64 -0
  62. package/dist/utils/index.cjs.map +1 -0
  63. package/dist/utils/index.d.cts +38 -0
  64. package/dist/utils/index.d.ts +38 -0
  65. package/dist/utils/index.js +57 -0
  66. package/dist/utils/index.js.map +1 -0
  67. package/dist/versioning/index.cjs +1164 -0
  68. package/dist/versioning/index.cjs.map +1 -0
  69. package/dist/versioning/index.d.cts +537 -0
  70. package/dist/versioning/index.d.ts +537 -0
  71. package/dist/versioning/index.js +1159 -0
  72. package/dist/versioning/index.js.map +1 -0
  73. package/package.json +194 -0
@@ -0,0 +1,1426 @@
1
+ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
2
+ get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
3
+ }) : x)(function(x) {
4
+ if (typeof require !== "undefined") return require.apply(this, arguments);
5
+ throw Error('Dynamic require of "' + x + '" is not supported');
6
+ });
7
+
8
+ // src/utils/logger.ts
9
+ var consoleLogger = {
10
+ debug: (...args) => {
11
+ console.debug("[AEON:DEBUG]", ...args);
12
+ },
13
+ info: (...args) => {
14
+ console.info("[AEON:INFO]", ...args);
15
+ },
16
+ warn: (...args) => {
17
+ console.warn("[AEON:WARN]", ...args);
18
+ },
19
+ error: (...args) => {
20
+ console.error("[AEON:ERROR]", ...args);
21
+ }
22
+ };
23
+ var currentLogger = consoleLogger;
24
+ function getLogger() {
25
+ return currentLogger;
26
+ }
27
+
28
+ // src/compression/CompressionEngine.ts
29
+ var logger = getLogger();
30
+ var CompressionEngine = class {
31
+ stats = {
32
+ totalCompressed: 0,
33
+ totalDecompressed: 0,
34
+ totalOriginalBytes: 0,
35
+ totalCompressedBytes: 0,
36
+ averageCompressionRatio: 0,
37
+ compressionTimeMs: 0,
38
+ decompressionTimeMs: 0
39
+ };
40
+ preferredAlgorithm = "gzip";
41
+ constructor(preferredAlgorithm = "gzip") {
42
+ this.preferredAlgorithm = preferredAlgorithm;
43
+ logger.debug("[CompressionEngine] Initialized", {
44
+ algorithm: preferredAlgorithm,
45
+ supportsNative: this.supportsNativeCompression()
46
+ });
47
+ }
48
+ /**
49
+ * Check if native compression is available
50
+ */
51
+ supportsNativeCompression() {
52
+ return typeof CompressionStream !== "undefined" && typeof DecompressionStream !== "undefined";
53
+ }
54
+ /**
55
+ * Compress data
56
+ */
57
+ async compress(data) {
58
+ const startTime = performance.now();
59
+ const inputData = typeof data === "string" ? new TextEncoder().encode(data) : data;
60
+ const originalSize = inputData.byteLength;
61
+ let compressed;
62
+ let algorithm = this.preferredAlgorithm;
63
+ if (this.supportsNativeCompression()) {
64
+ try {
65
+ compressed = await this.compressNative(
66
+ inputData,
67
+ this.preferredAlgorithm
68
+ );
69
+ } catch (error) {
70
+ logger.warn(
71
+ "[CompressionEngine] Native compression failed, using fallback",
72
+ error
73
+ );
74
+ compressed = inputData;
75
+ algorithm = "none";
76
+ }
77
+ } else {
78
+ compressed = inputData;
79
+ algorithm = "none";
80
+ }
81
+ const compressionRatio = originalSize > 0 ? 1 - compressed.byteLength / originalSize : 0;
82
+ const batch = {
83
+ id: `batch-${Date.now()}-${Math.random().toString(36).slice(2)}`,
84
+ compressed,
85
+ originalSize,
86
+ compressedSize: compressed.byteLength,
87
+ compressionRatio,
88
+ algorithm,
89
+ timestamp: Date.now()
90
+ };
91
+ const elapsed = performance.now() - startTime;
92
+ this.stats.totalCompressed++;
93
+ this.stats.totalOriginalBytes += originalSize;
94
+ this.stats.totalCompressedBytes += compressed.byteLength;
95
+ this.stats.compressionTimeMs += elapsed;
96
+ this.updateAverageRatio();
97
+ logger.debug("[CompressionEngine] Compressed", {
98
+ original: originalSize,
99
+ compressed: compressed.byteLength,
100
+ ratio: (compressionRatio * 100).toFixed(1) + "%",
101
+ algorithm,
102
+ timeMs: elapsed.toFixed(2)
103
+ });
104
+ return batch;
105
+ }
106
+ /**
107
+ * Decompress data
108
+ */
109
+ async decompress(batch) {
110
+ const startTime = performance.now();
111
+ let decompressed;
112
+ if (batch.algorithm === "none") {
113
+ decompressed = batch.compressed;
114
+ } else if (this.supportsNativeCompression()) {
115
+ try {
116
+ decompressed = await this.decompressNative(
117
+ batch.compressed,
118
+ batch.algorithm
119
+ );
120
+ } catch (error) {
121
+ logger.warn("[CompressionEngine] Native decompression failed", error);
122
+ throw error;
123
+ }
124
+ } else {
125
+ throw new Error("Native decompression not available");
126
+ }
127
+ const elapsed = performance.now() - startTime;
128
+ this.stats.totalDecompressed++;
129
+ this.stats.decompressionTimeMs += elapsed;
130
+ logger.debug("[CompressionEngine] Decompressed", {
131
+ compressed: batch.compressedSize,
132
+ decompressed: decompressed.byteLength,
133
+ algorithm: batch.algorithm,
134
+ timeMs: elapsed.toFixed(2)
135
+ });
136
+ return decompressed;
137
+ }
138
+ /**
139
+ * Compress using native CompressionStream
140
+ */
141
+ async compressNative(data, algorithm) {
142
+ const stream = new CompressionStream(algorithm);
143
+ const writer = stream.writable.getWriter();
144
+ const reader = stream.readable.getReader();
145
+ writer.write(
146
+ new Uint8Array(
147
+ data.buffer,
148
+ data.byteOffset,
149
+ data.byteLength
150
+ )
151
+ );
152
+ writer.close();
153
+ const chunks = [];
154
+ let done = false;
155
+ while (!done) {
156
+ const result = await reader.read();
157
+ done = result.done;
158
+ if (result.value) {
159
+ chunks.push(result.value);
160
+ }
161
+ }
162
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
163
+ const combined = new Uint8Array(totalLength);
164
+ let offset = 0;
165
+ for (const chunk of chunks) {
166
+ combined.set(chunk, offset);
167
+ offset += chunk.length;
168
+ }
169
+ return combined;
170
+ }
171
+ /**
172
+ * Decompress using native DecompressionStream
173
+ */
174
+ async decompressNative(data, algorithm) {
175
+ const stream = new DecompressionStream(algorithm);
176
+ const writer = stream.writable.getWriter();
177
+ const reader = stream.readable.getReader();
178
+ writer.write(
179
+ new Uint8Array(
180
+ data.buffer,
181
+ data.byteOffset,
182
+ data.byteLength
183
+ )
184
+ );
185
+ writer.close();
186
+ const chunks = [];
187
+ let done = false;
188
+ while (!done) {
189
+ const result = await reader.read();
190
+ done = result.done;
191
+ if (result.value) {
192
+ chunks.push(result.value);
193
+ }
194
+ }
195
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
196
+ const combined = new Uint8Array(totalLength);
197
+ let offset = 0;
198
+ for (const chunk of chunks) {
199
+ combined.set(chunk, offset);
200
+ offset += chunk.length;
201
+ }
202
+ return combined;
203
+ }
204
+ /**
205
+ * Split compressed batch into chunks for transmission
206
+ */
207
+ splitIntoChunks(batch, chunkSize = 64 * 1024) {
208
+ const chunks = [];
209
+ const data = batch.compressed;
210
+ const total = Math.ceil(data.byteLength / chunkSize);
211
+ for (let i = 0; i < total; i++) {
212
+ const start = i * chunkSize;
213
+ const end = Math.min(start + chunkSize, data.byteLength);
214
+ const chunkData = data.slice(start, end);
215
+ chunks.push({
216
+ chunkId: `${batch.id}-chunk-${i}`,
217
+ batchId: batch.id,
218
+ data: chunkData,
219
+ index: i,
220
+ total,
221
+ checksum: this.simpleChecksum(chunkData)
222
+ });
223
+ }
224
+ return chunks;
225
+ }
226
+ /**
227
+ * Reassemble chunks into compressed batch
228
+ */
229
+ reassembleChunks(chunks) {
230
+ const sorted = [...chunks].sort((a, b) => a.index - b.index);
231
+ if (sorted.length === 0) {
232
+ throw new Error("Cannot reassemble: no chunks provided");
233
+ }
234
+ const total = sorted[0].total;
235
+ if (sorted.length !== total) {
236
+ throw new Error(
237
+ `Missing chunks: got ${sorted.length}, expected ${total}`
238
+ );
239
+ }
240
+ const totalLength = sorted.reduce(
241
+ (sum, chunk) => sum + chunk.data.length,
242
+ 0
243
+ );
244
+ const combined = new Uint8Array(totalLength);
245
+ let offset = 0;
246
+ for (const chunk of sorted) {
247
+ combined.set(chunk.data, offset);
248
+ offset += chunk.data.length;
249
+ }
250
+ return combined;
251
+ }
252
+ /**
253
+ * Simple checksum for chunk verification
254
+ */
255
+ simpleChecksum(data) {
256
+ let hash = 0;
257
+ for (let i = 0; i < data.length; i++) {
258
+ hash = (hash << 5) - hash + data[i] | 0;
259
+ }
260
+ return (hash >>> 0).toString(16);
261
+ }
262
+ /**
263
+ * Update average compression ratio
264
+ */
265
+ updateAverageRatio() {
266
+ if (this.stats.totalOriginalBytes > 0) {
267
+ this.stats.averageCompressionRatio = 1 - this.stats.totalCompressedBytes / this.stats.totalOriginalBytes;
268
+ }
269
+ }
270
+ /**
271
+ * Get statistics
272
+ */
273
+ getStats() {
274
+ return { ...this.stats };
275
+ }
276
+ /**
277
+ * Reset statistics
278
+ */
279
+ resetStats() {
280
+ this.stats = {
281
+ totalCompressed: 0,
282
+ totalDecompressed: 0,
283
+ totalOriginalBytes: 0,
284
+ totalCompressedBytes: 0,
285
+ averageCompressionRatio: 0,
286
+ compressionTimeMs: 0,
287
+ decompressionTimeMs: 0
288
+ };
289
+ }
290
+ };
291
+ var compressionEngineInstance = null;
292
+ function getCompressionEngine() {
293
+ if (!compressionEngineInstance) {
294
+ compressionEngineInstance = new CompressionEngine();
295
+ }
296
+ return compressionEngineInstance;
297
+ }
298
+ function resetCompressionEngine() {
299
+ compressionEngineInstance = null;
300
+ }
301
+
302
+ // src/compression/DeltaSyncOptimizer.ts
303
+ var logger2 = getLogger();
304
+ var DeltaSyncOptimizer = class _DeltaSyncOptimizer {
305
+ static MAX_HISTORY_SIZE = 1e4;
306
+ operationHistory = /* @__PURE__ */ new Map();
307
+ stats = {
308
+ totalOperations: 0,
309
+ totalFull: 0,
310
+ totalDelta: 0,
311
+ totalOriginalSize: 0,
312
+ totalDeltaSize: 0,
313
+ averageReductionPercent: 0,
314
+ lastSyncTime: 0,
315
+ fullOperationThreshold: 1e3
316
+ // Force full if delta > 1KB
317
+ };
318
+ constructor(fullOperationThreshold = 1e3) {
319
+ this.stats.fullOperationThreshold = fullOperationThreshold;
320
+ logger2.debug("[DeltaSyncOptimizer] Initialized", {
321
+ threshold: fullOperationThreshold
322
+ });
323
+ }
324
+ /**
325
+ * Compute delta for single operation
326
+ */
327
+ computeDelta(operation) {
328
+ const operationJson = JSON.stringify(operation);
329
+ const originalSize = new TextEncoder().encode(operationJson).byteLength;
330
+ const previous = this.operationHistory.get(operation.id);
331
+ if (!previous) {
332
+ const delta = {
333
+ id: `delta-${Date.now()}-${Math.random().toString(36).slice(2)}`,
334
+ type: "full",
335
+ operationId: operation.id,
336
+ operationType: operation.type,
337
+ sessionId: operation.sessionId,
338
+ timestamp: Date.now(),
339
+ fullData: operation.data,
340
+ priority: operation.priority
341
+ };
342
+ this.stats.totalOperations++;
343
+ this.stats.totalFull++;
344
+ this.stats.totalOriginalSize += originalSize;
345
+ const deltaSize2 = new TextEncoder().encode(
346
+ JSON.stringify(delta)
347
+ ).byteLength;
348
+ this.stats.totalDeltaSize += deltaSize2;
349
+ this.operationHistory.set(operation.id, operation);
350
+ if (this.operationHistory.size > _DeltaSyncOptimizer.MAX_HISTORY_SIZE) {
351
+ const firstKey = this.operationHistory.keys().next().value;
352
+ if (firstKey !== void 0) this.operationHistory.delete(firstKey);
353
+ }
354
+ return delta;
355
+ }
356
+ const changes = {};
357
+ const changeMask = [];
358
+ let hasMeaningfulChanges = false;
359
+ for (const [key, value] of Object.entries(operation.data)) {
360
+ const oldValue = previous.data[key];
361
+ if (!this.deepEqual(value, oldValue)) {
362
+ changes[key] = value;
363
+ changeMask.push(key);
364
+ hasMeaningfulChanges = true;
365
+ }
366
+ }
367
+ for (const key of Object.keys(previous.data)) {
368
+ if (!(key in operation.data)) {
369
+ changes[key] = null;
370
+ changeMask.push(`${key}:deleted`);
371
+ hasMeaningfulChanges = true;
372
+ }
373
+ }
374
+ const deltaData = {
375
+ id: `delta-${Date.now()}-${Math.random().toString(36).slice(2)}`,
376
+ type: "delta",
377
+ operationId: operation.id,
378
+ operationType: operation.type,
379
+ sessionId: operation.sessionId,
380
+ timestamp: Date.now(),
381
+ changes: hasMeaningfulChanges ? changes : void 0,
382
+ changeMask: hasMeaningfulChanges ? changeMask : void 0,
383
+ priority: operation.priority
384
+ };
385
+ const deltaSize = new TextEncoder().encode(
386
+ JSON.stringify(deltaData)
387
+ ).byteLength;
388
+ const finalDelta = deltaSize > this.stats.fullOperationThreshold ? {
389
+ ...deltaData,
390
+ type: "full",
391
+ fullData: operation.data,
392
+ changes: void 0,
393
+ changeMask: void 0
394
+ } : deltaData;
395
+ this.stats.totalOperations++;
396
+ if (finalDelta.type === "full") {
397
+ this.stats.totalFull++;
398
+ } else {
399
+ this.stats.totalDelta++;
400
+ }
401
+ this.stats.totalOriginalSize += originalSize;
402
+ this.stats.totalDeltaSize += deltaSize;
403
+ this.operationHistory.set(operation.id, operation);
404
+ if (this.operationHistory.size > _DeltaSyncOptimizer.MAX_HISTORY_SIZE) {
405
+ const firstKey = this.operationHistory.keys().next().value;
406
+ if (firstKey !== void 0) this.operationHistory.delete(firstKey);
407
+ }
408
+ return finalDelta;
409
+ }
410
+ /**
411
+ * Compute deltas for batch of operations
412
+ */
413
+ computeBatchDeltas(operations) {
414
+ const deltas = operations.map((op) => this.computeDelta(op));
415
+ const totalOriginalSize = operations.reduce(
416
+ (sum, op) => sum + new TextEncoder().encode(JSON.stringify(op)).byteLength,
417
+ 0
418
+ );
419
+ const totalDeltaSize = deltas.reduce(
420
+ (sum, delta) => sum + new TextEncoder().encode(JSON.stringify(delta)).byteLength,
421
+ 0
422
+ );
423
+ const reductionPercent = totalOriginalSize > 0 ? Math.round(
424
+ (totalOriginalSize - totalDeltaSize) / totalOriginalSize * 100
425
+ ) : 0;
426
+ const batch = {
427
+ batchId: `batch-${Date.now()}-${Math.random().toString(36).slice(2)}`,
428
+ operations: deltas,
429
+ timestamp: Date.now(),
430
+ totalOriginalSize,
431
+ totalDeltaSize,
432
+ reductionPercent
433
+ };
434
+ logger2.debug("[DeltaSyncOptimizer] Batch computed", {
435
+ operations: operations.length,
436
+ reduction: reductionPercent,
437
+ size: totalDeltaSize
438
+ });
439
+ return batch;
440
+ }
441
+ /**
442
+ * Decompress delta operation back to full operation
443
+ */
444
+ decompressDelta(delta) {
445
+ if (delta.type === "full") {
446
+ return {
447
+ id: delta.operationId,
448
+ type: delta.operationType,
449
+ sessionId: delta.sessionId,
450
+ data: delta.fullData || {},
451
+ status: "pending",
452
+ createdAt: delta.timestamp
453
+ };
454
+ }
455
+ const previous = this.operationHistory.get(delta.operationId);
456
+ if (!previous) {
457
+ logger2.warn("[DeltaSyncOptimizer] Cannot decompress - no history", {
458
+ operationId: delta.operationId
459
+ });
460
+ return {
461
+ id: delta.operationId,
462
+ type: delta.operationType,
463
+ sessionId: delta.sessionId,
464
+ data: delta.changes || {},
465
+ status: "pending",
466
+ createdAt: delta.timestamp
467
+ };
468
+ }
469
+ const reconstructed = {
470
+ ...previous,
471
+ data: {
472
+ ...previous.data,
473
+ ...delta.changes || {}
474
+ }
475
+ };
476
+ if (delta.changes) {
477
+ for (const [key, value] of Object.entries(delta.changes)) {
478
+ if (value === null) {
479
+ delete reconstructed.data[key];
480
+ }
481
+ }
482
+ }
483
+ return reconstructed;
484
+ }
485
+ /**
486
+ * Update history after successful sync
487
+ */
488
+ updateHistory(operations) {
489
+ for (const op of operations) {
490
+ this.operationHistory.set(op.id, op);
491
+ }
492
+ while (this.operationHistory.size > _DeltaSyncOptimizer.MAX_HISTORY_SIZE) {
493
+ const firstKey = this.operationHistory.keys().next().value;
494
+ if (firstKey !== void 0) this.operationHistory.delete(firstKey);
495
+ else break;
496
+ }
497
+ logger2.debug("[DeltaSyncOptimizer] History updated", {
498
+ count: operations.length,
499
+ totalHistorySize: this.operationHistory.size
500
+ });
501
+ }
502
+ /**
503
+ * Clear history for specific operations
504
+ */
505
+ clearHistory(operationIds) {
506
+ for (const id of operationIds) {
507
+ this.operationHistory.delete(id);
508
+ }
509
+ logger2.debug("[DeltaSyncOptimizer] History cleared", {
510
+ cleared: operationIds.length,
511
+ remaining: this.operationHistory.size
512
+ });
513
+ }
514
+ /**
515
+ * Get current performance statistics
516
+ */
517
+ getStats() {
518
+ if (this.stats.totalOperations > 0) {
519
+ this.stats.averageReductionPercent = Math.round(
520
+ (this.stats.totalOriginalSize - this.stats.totalDeltaSize) / this.stats.totalOriginalSize * 100
521
+ );
522
+ }
523
+ return { ...this.stats };
524
+ }
525
+ /**
526
+ * Reset statistics
527
+ */
528
+ resetStats() {
529
+ this.stats = {
530
+ totalOperations: 0,
531
+ totalFull: 0,
532
+ totalDelta: 0,
533
+ totalOriginalSize: 0,
534
+ totalDeltaSize: 0,
535
+ averageReductionPercent: 0,
536
+ lastSyncTime: 0,
537
+ fullOperationThreshold: this.stats.fullOperationThreshold
538
+ };
539
+ logger2.debug("[DeltaSyncOptimizer] Stats reset");
540
+ }
541
+ /**
542
+ * Set the full operation threshold
543
+ */
544
+ setFullOperationThreshold(bytes) {
545
+ this.stats.fullOperationThreshold = bytes;
546
+ logger2.debug("[DeltaSyncOptimizer] Threshold updated", { bytes });
547
+ }
548
+ /**
549
+ * Get history size for memory monitoring
550
+ */
551
+ getHistorySize() {
552
+ return this.operationHistory.size;
553
+ }
554
+ /**
555
+ * Get memory footprint estimate
556
+ */
557
+ getMemoryEstimate() {
558
+ let totalBytes = 0;
559
+ for (const op of this.operationHistory.values()) {
560
+ totalBytes += new TextEncoder().encode(JSON.stringify(op)).byteLength;
561
+ }
562
+ return totalBytes;
563
+ }
564
+ /**
565
+ * Deep equality check for nested objects
566
+ */
567
+ deepEqual(a, b) {
568
+ if (a === b) return true;
569
+ if (a == null || b == null) return false;
570
+ if (typeof a !== "object" || typeof b !== "object") return false;
571
+ const aObj = a;
572
+ const bObj = b;
573
+ const aKeys = Object.keys(aObj);
574
+ const bKeys = Object.keys(bObj);
575
+ if (aKeys.length !== bKeys.length) return false;
576
+ for (const key of aKeys) {
577
+ if (!this.deepEqual(aObj[key], bObj[key])) {
578
+ return false;
579
+ }
580
+ }
581
+ return true;
582
+ }
583
+ };
584
+ var deltaSyncInstance = null;
585
+ function getDeltaSyncOptimizer(threshold) {
586
+ if (!deltaSyncInstance) {
587
+ deltaSyncInstance = new DeltaSyncOptimizer(threshold);
588
+ }
589
+ return deltaSyncInstance;
590
+ }
591
+ function resetDeltaSyncOptimizer() {
592
+ deltaSyncInstance = null;
593
+ }
594
+
595
+ // src/compression/codecs.ts
596
+ var RawCodec = class {
597
+ id = 0;
598
+ name = "raw";
599
+ encode(data) {
600
+ return data;
601
+ }
602
+ decode(data) {
603
+ return data;
604
+ }
605
+ };
606
+ var RLECodec = class {
607
+ id = 1;
608
+ name = "rle";
609
+ encode(data) {
610
+ if (data.length === 0) return new Uint8Array(0);
611
+ const output = new Uint8Array(data.length * 3);
612
+ let writePos = 0;
613
+ let i = 0;
614
+ while (i < data.length) {
615
+ const byte = data[i];
616
+ let runLength = 1;
617
+ while (i + runLength < data.length && data[i + runLength] === byte && runLength < 65535) {
618
+ runLength++;
619
+ }
620
+ output[writePos++] = byte;
621
+ output[writePos++] = runLength >>> 8 & 255;
622
+ output[writePos++] = runLength & 255;
623
+ i += runLength;
624
+ }
625
+ return output.subarray(0, writePos);
626
+ }
627
+ decode(data, originalSize) {
628
+ const output = new Uint8Array(originalSize);
629
+ let readPos = 0;
630
+ let writePos = 0;
631
+ while (readPos < data.length && writePos < originalSize) {
632
+ const byte = data[readPos++];
633
+ const count = data[readPos++] << 8 | data[readPos++];
634
+ const end = Math.min(writePos + count, originalSize);
635
+ output.fill(byte, writePos, end);
636
+ writePos = end;
637
+ }
638
+ return output;
639
+ }
640
+ };
641
+ var DeltaCodec = class {
642
+ id = 2;
643
+ name = "delta";
644
+ encode(data) {
645
+ if (data.length === 0) return new Uint8Array(0);
646
+ const output = new Uint8Array(data.length);
647
+ output[0] = data[0];
648
+ for (let i = 1; i < data.length; i++) {
649
+ output[i] = data[i] - data[i - 1] & 255;
650
+ }
651
+ return output;
652
+ }
653
+ decode(data, originalSize) {
654
+ const output = new Uint8Array(originalSize);
655
+ if (data.length === 0) return output;
656
+ output[0] = data[0];
657
+ for (let i = 1; i < data.length && i < originalSize; i++) {
658
+ output[i] = output[i - 1] + data[i] & 255;
659
+ }
660
+ return output;
661
+ }
662
+ };
663
+ var LZ77Codec = class _LZ77Codec {
664
+ id = 3;
665
+ name = "lz77";
666
+ static WINDOW_SIZE = 4096;
667
+ static MIN_MATCH = 3;
668
+ static MAX_MATCH = 18;
669
+ encode(data) {
670
+ if (data.length === 0) return new Uint8Array(0);
671
+ const output = new Uint8Array(data.length + Math.ceil(data.length / 8) + 16);
672
+ let writePos = 0;
673
+ let readPos = 0;
674
+ while (readPos < data.length) {
675
+ const controlPos = writePos++;
676
+ let controlByte = 0;
677
+ for (let bit = 0; bit < 8 && readPos < data.length; bit++) {
678
+ const windowStart = Math.max(0, readPos - _LZ77Codec.WINDOW_SIZE);
679
+ let bestOffset = 0;
680
+ let bestLength = 0;
681
+ for (let j = windowStart; j < readPos; j++) {
682
+ let matchLen = 0;
683
+ while (matchLen < _LZ77Codec.MAX_MATCH && readPos + matchLen < data.length && data[j + matchLen] === data[readPos + matchLen]) {
684
+ matchLen++;
685
+ }
686
+ if (matchLen >= _LZ77Codec.MIN_MATCH && matchLen > bestLength) {
687
+ bestOffset = readPos - j;
688
+ bestLength = matchLen;
689
+ }
690
+ }
691
+ if (bestLength >= _LZ77Codec.MIN_MATCH) {
692
+ controlByte |= 1 << bit;
693
+ const lengthCode = bestLength - _LZ77Codec.MIN_MATCH;
694
+ output[writePos++] = bestOffset >>> 8 & 15 | lengthCode << 4;
695
+ output[writePos++] = bestOffset & 255;
696
+ readPos += bestLength;
697
+ } else {
698
+ output[writePos++] = data[readPos++];
699
+ }
700
+ }
701
+ output[controlPos] = controlByte;
702
+ }
703
+ return output.subarray(0, writePos);
704
+ }
705
+ decode(data, originalSize) {
706
+ const output = new Uint8Array(originalSize);
707
+ let readPos = 0;
708
+ let writePos = 0;
709
+ while (readPos < data.length && writePos < originalSize) {
710
+ const controlByte = data[readPos++];
711
+ for (let bit = 0; bit < 8 && readPos < data.length && writePos < originalSize; bit++) {
712
+ if (controlByte & 1 << bit) {
713
+ const byte1 = data[readPos++];
714
+ const byte2 = data[readPos++];
715
+ const offset = (byte1 & 15) << 8 | byte2;
716
+ const length = (byte1 >>> 4) + _LZ77Codec.MIN_MATCH;
717
+ const srcStart = writePos - offset;
718
+ for (let k = 0; k < length && writePos < originalSize; k++) {
719
+ output[writePos++] = output[srcStart + k];
720
+ }
721
+ } else {
722
+ output[writePos++] = data[readPos++];
723
+ }
724
+ }
725
+ }
726
+ return output;
727
+ }
728
+ };
729
+ var BrotliCodec = class {
730
+ id = 4;
731
+ name = "brotli";
732
+ quality;
733
+ constructor(quality = 4) {
734
+ this.quality = quality;
735
+ }
736
+ encode(data) {
737
+ try {
738
+ const zlib = __require("zlib");
739
+ return new Uint8Array(zlib.brotliCompressSync(Buffer.from(data), {
740
+ params: {
741
+ [zlib.constants.BROTLI_PARAM_QUALITY]: this.quality
742
+ }
743
+ }));
744
+ } catch {
745
+ return data;
746
+ }
747
+ }
748
+ decode(data) {
749
+ const zlib = __require("zlib");
750
+ return new Uint8Array(zlib.brotliDecompressSync(Buffer.from(data)));
751
+ }
752
+ };
753
+ var GzipCodec = class {
754
+ id = 5;
755
+ name = "gzip";
756
+ level;
757
+ constructor(level = 6) {
758
+ this.level = level;
759
+ }
760
+ encode(data) {
761
+ try {
762
+ const zlib = __require("zlib");
763
+ return new Uint8Array(zlib.gzipSync(Buffer.from(data), {
764
+ level: this.level
765
+ }));
766
+ } catch {
767
+ return data;
768
+ }
769
+ }
770
+ decode(data) {
771
+ const zlib = __require("zlib");
772
+ return new Uint8Array(zlib.gunzipSync(Buffer.from(data)));
773
+ }
774
+ };
775
+ var HuffmanCodec = class {
776
+ id = 6;
777
+ name = "huffman";
778
+ encode(data) {
779
+ if (data.length < 32) return data;
780
+ const freq = new Uint32Array(256);
781
+ for (let i = 0; i < data.length; i++) freq[data[i]]++;
782
+ const symbols = [];
783
+ for (let i = 0; i < 256; i++) {
784
+ if (freq[i] > 0) symbols.push({ sym: i, freq: freq[i] });
785
+ }
786
+ if (symbols.length <= 1) return data;
787
+ const nodes = symbols.map((s) => ({
788
+ freq: s.freq,
789
+ sym: s.sym,
790
+ left: -1,
791
+ right: -1
792
+ }));
793
+ const heap = [...nodes];
794
+ heap.sort((a, b) => a.freq - b.freq);
795
+ while (heap.length > 1) {
796
+ const left = heap.shift();
797
+ const right = heap.shift();
798
+ const leftIdx = nodes.indexOf(left);
799
+ const rightIdx = nodes.indexOf(right);
800
+ const parent = {
801
+ freq: left.freq + right.freq,
802
+ sym: -1,
803
+ left: leftIdx,
804
+ right: rightIdx
805
+ };
806
+ nodes.push(parent);
807
+ let idx = 0;
808
+ while (idx < heap.length && heap[idx].freq <= parent.freq) idx++;
809
+ heap.splice(idx, 0, parent);
810
+ }
811
+ const codeLengths = new Uint8Array(256);
812
+ const root = nodes.length - 1;
813
+ const dfs = (nodeIdx, depth) => {
814
+ const node = nodes[nodeIdx];
815
+ if (node.left === -1 && node.right === -1) {
816
+ codeLengths[node.sym] = depth || 1;
817
+ return;
818
+ }
819
+ if (node.left >= 0) dfs(node.left, depth + 1);
820
+ if (node.right >= 0) dfs(node.right, depth + 1);
821
+ };
822
+ dfs(root, 0);
823
+ for (let i = 0; i < 256; i++) {
824
+ if (codeLengths[i] > 15) return data;
825
+ }
826
+ const sorted = [];
827
+ for (let i = 0; i < 256; i++) {
828
+ if (codeLengths[i] > 0) sorted.push({ sym: i, len: codeLengths[i] });
829
+ }
830
+ sorted.sort((a, b) => a.len - b.len || a.sym - b.sym);
831
+ const codes = new Uint32Array(256);
832
+ let code = 0;
833
+ let prevLen = sorted[0].len;
834
+ codes[sorted[0].sym] = 0;
835
+ for (let i = 1; i < sorted.length; i++) {
836
+ code = code + 1 << sorted[i].len - prevLen;
837
+ codes[sorted[i].sym] = code;
838
+ prevLen = sorted[i].len;
839
+ }
840
+ let totalBits = 0;
841
+ for (let i = 0; i < data.length; i++) totalBits += codeLengths[data[i]];
842
+ const totalBytes = Math.ceil(totalBits / 8);
843
+ const headerSize = 260;
844
+ const output = new Uint8Array(headerSize + totalBytes);
845
+ output.set(codeLengths, 0);
846
+ new DataView(output.buffer).setUint32(256, totalBits);
847
+ let bitPos = 0;
848
+ for (let i = 0; i < data.length; i++) {
849
+ const sym = data[i];
850
+ const codeVal = codes[sym];
851
+ const codeLen = codeLengths[sym];
852
+ for (let b = codeLen - 1; b >= 0; b--) {
853
+ if (codeVal >>> b & 1) {
854
+ const byteIdx = headerSize + (bitPos >>> 3);
855
+ output[byteIdx] |= 1 << 7 - (bitPos & 7);
856
+ }
857
+ bitPos++;
858
+ }
859
+ }
860
+ return output;
861
+ }
862
+ decode(data, originalSize) {
863
+ if (data.length < 260) return data.subarray(0, originalSize);
864
+ const codeLengths = data.subarray(0, 256);
865
+ const totalBits = new DataView(
866
+ data.buffer,
867
+ data.byteOffset + 256,
868
+ 4
869
+ ).getUint32(0);
870
+ const sorted = [];
871
+ for (let i = 0; i < 256; i++) {
872
+ if (codeLengths[i] > 0) sorted.push({ sym: i, len: codeLengths[i] });
873
+ }
874
+ sorted.sort((a, b) => a.len - b.len || a.sym - b.sym);
875
+ const tree = [[-1, -1, -1]];
876
+ const insertCode = (codeVal, len, sym) => {
877
+ let node = 0;
878
+ for (let b = len - 1; b >= 0; b--) {
879
+ const bit = codeVal >>> b & 1;
880
+ if (tree[node][bit] === -1) {
881
+ tree[node][bit] = tree.length;
882
+ tree.push([-1, -1, -1]);
883
+ }
884
+ node = tree[node][bit];
885
+ }
886
+ tree[node][2] = sym;
887
+ };
888
+ let code = 0;
889
+ let prevLen = sorted[0].len;
890
+ insertCode(0, sorted[0].len, sorted[0].sym);
891
+ for (let i = 1; i < sorted.length; i++) {
892
+ code = code + 1 << sorted[i].len - prevLen;
893
+ insertCode(code, sorted[i].len, sorted[i].sym);
894
+ prevLen = sorted[i].len;
895
+ }
896
+ const output = new Uint8Array(originalSize);
897
+ let bitPos = 0;
898
+ let outPos = 0;
899
+ const bitsStart = 260;
900
+ while (outPos < originalSize && bitPos < totalBits) {
901
+ let node = 0;
902
+ while (tree[node][2] === -1 && bitPos < totalBits) {
903
+ const byteIdx = bitsStart + (bitPos >>> 3);
904
+ const bit = data[byteIdx] >>> 7 - (bitPos & 7) & 1;
905
+ node = tree[node][bit];
906
+ bitPos++;
907
+ }
908
+ if (tree[node][2] !== -1) {
909
+ output[outPos++] = tree[node][2];
910
+ }
911
+ }
912
+ return output;
913
+ }
914
+ };
915
+ var DICTIONARY_STRINGS = [
916
+ // Long patterns first (most savings per match)
917
+ "addEventListener",
918
+ // 16 bytes → 2 = saves 14
919
+ "querySelector",
920
+ // 13 → 2 = saves 11
921
+ "createElement",
922
+ // 13 → 2 = saves 11
923
+ "justify-content",
924
+ // 15 → 2 = saves 13
925
+ "align-items:center",
926
+ // 19 → 2 = saves 17
927
+ "textContent",
928
+ // 11 → 2 = saves 9
929
+ "display:flex",
930
+ // 12 → 2 = saves 10
931
+ "display:grid",
932
+ // 12 → 2 = saves 10
933
+ "display:none",
934
+ // 12 → 2 = saves 10
935
+ "background:",
936
+ // 11 → 2 = saves 9
937
+ "font-weight:",
938
+ // 12 → 2 = saves 10
939
+ "font-size:",
940
+ // 10 → 2 = saves 8
941
+ "className",
942
+ // 9 → 2 = saves 7
943
+ "undefined",
944
+ // 9 → 2 = saves 7
945
+ "container",
946
+ // 9 → 2 = saves 7
947
+ "transform:",
948
+ // 10 → 2 = saves 8
949
+ "overflow:",
950
+ // 9 → 2 = saves 7
951
+ "position:",
952
+ // 9 → 2 = saves 7
953
+ "function ",
954
+ // 9 → 2 = saves 7
955
+ "children",
956
+ // 8 → 2 = saves 6
957
+ "document",
958
+ // 8 → 2 = saves 6
959
+ "display:",
960
+ // 8 → 2 = saves 6
961
+ "padding:",
962
+ // 8 → 2 = saves 6
963
+ "onClick",
964
+ // 7 → 2 = saves 5
965
+ "useState",
966
+ // 8 → 2 = saves 6
967
+ "https://",
968
+ // 8 → 2 = saves 6
969
+ "default",
970
+ // 7 → 2 = saves 5
971
+ "extends",
972
+ // 7 → 2 = saves 5
973
+ "return ",
974
+ // 7 → 2 = saves 5
975
+ "export ",
976
+ // 7 → 2 = saves 5
977
+ "import ",
978
+ // 7 → 2 = saves 5
979
+ "margin:",
980
+ // 7 → 2 = saves 5
981
+ "border:",
982
+ // 7 → 2 = saves 5
983
+ "cursor:",
984
+ // 7 → 2 = saves 5
985
+ "height:",
986
+ // 7 → 2 = saves 5
987
+ "</span>",
988
+ // 7 → 2 = saves 5
989
+ "color:",
990
+ // 6 → 2 = saves 4
991
+ "width:",
992
+ // 6 → 2 = saves 4
993
+ "const ",
994
+ // 6 → 2 = saves 4
995
+ "class ",
996
+ // 6 → 2 = saves 4
997
+ "</div>",
998
+ // 6 → 2 = saves 4
999
+ "<span ",
1000
+ // 6 → 2 = saves 4
1001
+ "<div ",
1002
+ // 5 → 2 = saves 3
1003
+ "async",
1004
+ // 5 → 2 = saves 3
1005
+ "await",
1006
+ // 5 → 2 = saves 3
1007
+ "false",
1008
+ // 5 → 2 = saves 3
1009
+ "this.",
1010
+ // 5 → 2 = saves 3
1011
+ "props",
1012
+ // 5 → 2 = saves 3
1013
+ "state",
1014
+ // 5 → 2 = saves 3
1015
+ "</p>",
1016
+ // 4 → 2 = saves 2
1017
+ "null",
1018
+ // 4 → 2 = saves 2
1019
+ "true",
1020
+ // 4 → 2 = saves 2
1021
+ "flex",
1022
+ // 4 → 2 = saves 2
1023
+ "grid",
1024
+ // 4 → 2 = saves 2
1025
+ "none",
1026
+ // 4 → 2 = saves 2
1027
+ "auto",
1028
+ // 4 → 2 = saves 2
1029
+ "self",
1030
+ // 4 → 2 = saves 2
1031
+ ".css",
1032
+ // 4 → 2 = saves 2
1033
+ ".com",
1034
+ // 4 → 2 = saves 2
1035
+ "var(",
1036
+ // 4 → 2 = saves 2
1037
+ "<p>",
1038
+ // 3 → 2 = saves 1
1039
+ ".js",
1040
+ // 3 → 2 = saves 1
1041
+ "px;",
1042
+ // 3 → 2 = saves 1
1043
+ "rem"
1044
+ // 3 → 2 = saves 1
1045
+ ];
1046
+ var DICTIONARY = DICTIONARY_STRINGS.map(
1047
+ (s) => new TextEncoder().encode(s)
1048
+ );
1049
+ var DictionaryCodec = class {
1050
+ id = 7;
1051
+ name = "dictionary";
1052
+ encode(data) {
1053
+ if (data.length < 4) return data;
1054
+ const output = [];
1055
+ let pos = 0;
1056
+ while (pos < data.length) {
1057
+ let matched = false;
1058
+ for (let idx = 0; idx < DICTIONARY.length; idx++) {
1059
+ const entry = DICTIONARY[idx];
1060
+ if (pos + entry.length > data.length) continue;
1061
+ let match = true;
1062
+ for (let j = 0; j < entry.length; j++) {
1063
+ if (data[pos + j] !== entry[j]) {
1064
+ match = false;
1065
+ break;
1066
+ }
1067
+ }
1068
+ if (match) {
1069
+ output.push(0, idx + 1);
1070
+ pos += entry.length;
1071
+ matched = true;
1072
+ break;
1073
+ }
1074
+ }
1075
+ if (!matched) {
1076
+ if (data[pos] === 0) {
1077
+ output.push(0, 0);
1078
+ } else {
1079
+ output.push(data[pos]);
1080
+ }
1081
+ pos++;
1082
+ }
1083
+ }
1084
+ return new Uint8Array(output);
1085
+ }
1086
+ decode(data, originalSize) {
1087
+ const output = [];
1088
+ let pos = 0;
1089
+ while (pos < data.length && output.length < originalSize) {
1090
+ if (data[pos] === 0) {
1091
+ pos++;
1092
+ if (data[pos] === 0) {
1093
+ output.push(0);
1094
+ } else {
1095
+ const entry = DICTIONARY[data[pos] - 1];
1096
+ for (let j = 0; j < entry.length; j++) output.push(entry[j]);
1097
+ }
1098
+ pos++;
1099
+ } else {
1100
+ output.push(data[pos]);
1101
+ pos++;
1102
+ }
1103
+ }
1104
+ return new Uint8Array(output);
1105
+ }
1106
+ };
1107
+ var PURE_JS_CODECS = [
1108
+ new RawCodec(),
1109
+ new RLECodec(),
1110
+ new DeltaCodec(),
1111
+ new LZ77Codec(),
1112
+ new HuffmanCodec(),
1113
+ new DictionaryCodec()
1114
+ ];
1115
+ var BUILTIN_CODECS = [
1116
+ ...PURE_JS_CODECS,
1117
+ new BrotliCodec(),
1118
+ new GzipCodec()
1119
+ ];
1120
+ var CODEC_MAP = new Map(
1121
+ BUILTIN_CODECS.map((c) => [c.id, c])
1122
+ );
1123
+ function getCodecById(id) {
1124
+ const codec = CODEC_MAP.get(id);
1125
+ if (!codec) {
1126
+ throw new Error(`Unknown codec ID: ${id}`);
1127
+ }
1128
+ return codec;
1129
+ }
1130
+
1131
+ // src/compression/TopologicalCompressor.ts
1132
+ var CHUNK_HEADER_SIZE = 9;
1133
+ function encodeChunkHeader(codecId, originalSize, compressedSize) {
1134
+ const header = new Uint8Array(CHUNK_HEADER_SIZE);
1135
+ const view = new DataView(header.buffer);
1136
+ header[0] = codecId;
1137
+ view.setUint32(1, originalSize);
1138
+ view.setUint32(5, compressedSize);
1139
+ return header;
1140
+ }
1141
+ function decodeChunkHeader(data, offset) {
1142
+ const codecId = data[offset];
1143
+ const view = new DataView(data.buffer, data.byteOffset + offset + 1, 8);
1144
+ const originalSize = view.getUint32(0);
1145
+ const compressedSize = view.getUint32(4);
1146
+ return { codecId, originalSize, compressedSize };
1147
+ }
1148
+ var STREAM_HEADER_SIZE = 5;
1149
+ function encodeStreamHeader(strategy, originalSize) {
1150
+ const header = new Uint8Array(STREAM_HEADER_SIZE);
1151
+ header[0] = strategy;
1152
+ new DataView(header.buffer).setUint32(1, originalSize);
1153
+ return header;
1154
+ }
1155
+ function decodeStreamHeader(data) {
1156
+ const strategy = data[0];
1157
+ const originalSize = new DataView(
1158
+ data.buffer,
1159
+ data.byteOffset + 1,
1160
+ 4
1161
+ ).getUint32(0);
1162
+ return { strategy, originalSize };
1163
+ }
1164
+ var TopologicalCompressor = class {
1165
+ config;
1166
+ constructor(config) {
1167
+ this.config = {
1168
+ chunkSize: config?.chunkSize ?? 4096,
1169
+ codecs: config?.codecs ?? BUILTIN_CODECS,
1170
+ streamRace: config?.streamRace ?? false
1171
+ };
1172
+ }
1173
+ /**
1174
+ * Compress data using fork/race/fold.
1175
+ *
1176
+ * When streamRace=false (default): per-chunk race only.
1177
+ * When streamRace=true: two-level race — global codecs vs per-chunk topo.
1178
+ */
1179
+ compress(data) {
1180
+ if (data.length === 0) {
1181
+ return {
1182
+ data: new Uint8Array(0),
1183
+ chunks: [],
1184
+ originalSize: 0,
1185
+ compressedSize: 0,
1186
+ ratio: 0,
1187
+ codecsUsed: 0,
1188
+ bettiNumber: 0,
1189
+ timeMs: 0
1190
+ };
1191
+ }
1192
+ if (!this.config.streamRace) {
1193
+ return this.compressChunked(data);
1194
+ }
1195
+ return this.compressTwoLevel(data);
1196
+ }
1197
+ /**
1198
+ * Decompress data produced by compress().
1199
+ */
1200
+ decompress(compressed) {
1201
+ if (compressed.length === 0) return new Uint8Array(0);
1202
+ if (!this.config.streamRace) {
1203
+ return this.decompressChunked(compressed);
1204
+ }
1205
+ return this.decompressTwoLevel(compressed);
1206
+ }
1207
+ // ════════════════════════════════════════════════════════════════════════
1208
+ // Level 2: Per-Chunk Topological Compression
1209
+ // ════════════════════════════════════════════════════════════════════════
1210
+ compressChunked(data) {
1211
+ const startTime = performance.now();
1212
+ const { chunkSize, codecs } = this.config;
1213
+ const numChunks = Math.ceil(data.length / chunkSize);
1214
+ const compressedChunks = [];
1215
+ const chunkResults = [];
1216
+ const codecWins = /* @__PURE__ */ new Set();
1217
+ const bettiNumber = Math.max(0, codecs.length - 1);
1218
+ for (let i = 0; i < numChunks; i++) {
1219
+ const chunkStart = i * chunkSize;
1220
+ const chunkEnd = Math.min(chunkStart + chunkSize, data.length);
1221
+ const chunk = data.subarray(chunkStart, chunkEnd);
1222
+ let bestCodecId = 0;
1223
+ let bestCompressed = chunk;
1224
+ let ventCount = 0;
1225
+ for (const codec of codecs) {
1226
+ const compressed = codec.encode(chunk);
1227
+ if (compressed.length >= chunk.length && codec.id !== 0) {
1228
+ ventCount++;
1229
+ continue;
1230
+ }
1231
+ if (compressed.length < bestCompressed.length) {
1232
+ bestCodecId = codec.id;
1233
+ bestCompressed = compressed;
1234
+ }
1235
+ }
1236
+ const header = encodeChunkHeader(
1237
+ bestCodecId,
1238
+ chunk.length,
1239
+ bestCompressed.length
1240
+ );
1241
+ const frame = new Uint8Array(CHUNK_HEADER_SIZE + bestCompressed.length);
1242
+ frame.set(header, 0);
1243
+ frame.set(bestCompressed, CHUNK_HEADER_SIZE);
1244
+ compressedChunks.push(frame);
1245
+ codecWins.add(bestCodecId);
1246
+ const codecName = codecs.find((c) => c.id === bestCodecId)?.name ?? "unknown";
1247
+ chunkResults.push({
1248
+ chunkIndex: i,
1249
+ codecId: bestCodecId,
1250
+ codecName,
1251
+ originalSize: chunk.length,
1252
+ compressedSize: frame.length,
1253
+ ratio: chunk.length > 0 ? 1 - frame.length / chunk.length : 0,
1254
+ vented: ventCount
1255
+ });
1256
+ }
1257
+ const totalCompressedSize = compressedChunks.reduce(
1258
+ (sum, c) => sum + c.length,
1259
+ 0
1260
+ );
1261
+ const output = new Uint8Array(totalCompressedSize);
1262
+ let offset = 0;
1263
+ for (const c of compressedChunks) {
1264
+ output.set(c, offset);
1265
+ offset += c.length;
1266
+ }
1267
+ return {
1268
+ data: output,
1269
+ chunks: chunkResults,
1270
+ originalSize: data.length,
1271
+ compressedSize: totalCompressedSize,
1272
+ ratio: data.length > 0 ? 1 - totalCompressedSize / data.length : 0,
1273
+ codecsUsed: codecWins.size,
1274
+ bettiNumber,
1275
+ timeMs: performance.now() - startTime
1276
+ };
1277
+ }
1278
+ decompressChunked(compressed) {
1279
+ const chunks = [];
1280
+ let totalOriginalSize = 0;
1281
+ let readPos = 0;
1282
+ while (readPos < compressed.length) {
1283
+ if (readPos + CHUNK_HEADER_SIZE > compressed.length) {
1284
+ throw new Error(`Truncated chunk header at offset ${readPos}`);
1285
+ }
1286
+ const { codecId, originalSize, compressedSize } = decodeChunkHeader(
1287
+ compressed,
1288
+ readPos
1289
+ );
1290
+ readPos += CHUNK_HEADER_SIZE;
1291
+ if (readPos + compressedSize > compressed.length) {
1292
+ throw new Error(
1293
+ `Truncated chunk data at offset ${readPos}: need ${compressedSize}, have ${compressed.length - readPos}`
1294
+ );
1295
+ }
1296
+ chunks.push({
1297
+ codecId,
1298
+ originalSize,
1299
+ compressedData: compressed.subarray(readPos, readPos + compressedSize)
1300
+ });
1301
+ readPos += compressedSize;
1302
+ totalOriginalSize += originalSize;
1303
+ }
1304
+ const output = new Uint8Array(totalOriginalSize);
1305
+ let writePos = 0;
1306
+ for (const chunk of chunks) {
1307
+ const codec = getCodecById(chunk.codecId);
1308
+ const decompressed = codec.decode(
1309
+ chunk.compressedData,
1310
+ chunk.originalSize
1311
+ );
1312
+ output.set(decompressed, writePos);
1313
+ writePos += chunk.originalSize;
1314
+ }
1315
+ return output;
1316
+ }
1317
+ // ════════════════════════════════════════════════════════════════════════
1318
+ // Level 1: Stream-Level Two-Level Race
1319
+ // ════════════════════════════════════════════════════════════════════════
1320
+ /**
1321
+ * Two-level fork/race/fold:
1322
+ *
1323
+ * FORK (stream level):
1324
+ * ├─ Path 0: Per-chunk topological (Level 2)
1325
+ * ├─ Path 1: Global codec A on entire stream
1326
+ * ├─ Path 2: Global codec B on entire stream
1327
+ * └─ ...
1328
+ * RACE: Smallest total output wins
1329
+ * FOLD: 5-byte strategy header + compressed data
1330
+ *
1331
+ * On homogeneous text, global brotli wins (cross-chunk dictionary).
1332
+ * On mixed content, per-chunk topo wins (adapts per region).
1333
+ * The topology decides — not the programmer.
1334
+ */
1335
+ compressTwoLevel(data) {
1336
+ const startTime = performance.now();
1337
+ const { codecs } = this.config;
1338
+ const chunkedResult = this.compressChunked(data);
1339
+ const chunkedTotal = STREAM_HEADER_SIZE + chunkedResult.compressedSize;
1340
+ const globalCandidates = [];
1341
+ for (const codec of codecs) {
1342
+ if (codec.id === 0) continue;
1343
+ try {
1344
+ const compressed = codec.encode(data);
1345
+ const totalSize = STREAM_HEADER_SIZE + compressed.length;
1346
+ if (compressed.length < data.length) {
1347
+ globalCandidates.push({
1348
+ codecId: codec.id,
1349
+ codecName: codec.name,
1350
+ compressed,
1351
+ totalSize
1352
+ });
1353
+ }
1354
+ } catch {
1355
+ }
1356
+ }
1357
+ let bestStrategy = 0;
1358
+ let bestSize = chunkedTotal;
1359
+ let bestGlobal = null;
1360
+ for (const candidate of globalCandidates) {
1361
+ if (candidate.totalSize < bestSize) {
1362
+ bestStrategy = candidate.codecId;
1363
+ bestSize = candidate.totalSize;
1364
+ bestGlobal = candidate;
1365
+ }
1366
+ }
1367
+ const streamHeader = encodeStreamHeader(bestStrategy, data.length);
1368
+ const outerPaths = globalCandidates.length + 1;
1369
+ const innerBeta = Math.max(0, codecs.length - 1);
1370
+ const totalBeta = outerPaths - 1 + innerBeta;
1371
+ if (bestStrategy === 0) {
1372
+ const output = new Uint8Array(STREAM_HEADER_SIZE + chunkedResult.data.length);
1373
+ output.set(streamHeader, 0);
1374
+ output.set(chunkedResult.data, STREAM_HEADER_SIZE);
1375
+ return {
1376
+ ...chunkedResult,
1377
+ data: output,
1378
+ compressedSize: output.length,
1379
+ ratio: data.length > 0 ? 1 - output.length / data.length : 0,
1380
+ bettiNumber: totalBeta,
1381
+ strategy: "chunked",
1382
+ timeMs: performance.now() - startTime
1383
+ };
1384
+ } else {
1385
+ const output = new Uint8Array(STREAM_HEADER_SIZE + bestGlobal.compressed.length);
1386
+ output.set(streamHeader, 0);
1387
+ output.set(bestGlobal.compressed, STREAM_HEADER_SIZE);
1388
+ return {
1389
+ data: output,
1390
+ chunks: [],
1391
+ originalSize: data.length,
1392
+ compressedSize: output.length,
1393
+ ratio: data.length > 0 ? 1 - output.length / data.length : 0,
1394
+ codecsUsed: 1,
1395
+ bettiNumber: totalBeta,
1396
+ strategy: `global:${bestGlobal.codecName}`,
1397
+ timeMs: performance.now() - startTime
1398
+ };
1399
+ }
1400
+ }
1401
+ decompressTwoLevel(compressed) {
1402
+ if (compressed.length < STREAM_HEADER_SIZE) {
1403
+ throw new Error("Truncated stream header");
1404
+ }
1405
+ const { strategy, originalSize } = decodeStreamHeader(compressed);
1406
+ const payload = compressed.subarray(STREAM_HEADER_SIZE);
1407
+ if (strategy === 0) {
1408
+ return this.decompressChunked(payload);
1409
+ } else {
1410
+ const codec = getCodecById(strategy);
1411
+ return codec.decode(payload, originalSize);
1412
+ }
1413
+ }
1414
+ /** Get the codecs available for racing. */
1415
+ getCodecs() {
1416
+ return this.config.codecs;
1417
+ }
1418
+ /** Get the chunk size. */
1419
+ getChunkSize() {
1420
+ return this.config.chunkSize;
1421
+ }
1422
+ };
1423
+
1424
+ export { BUILTIN_CODECS, BrotliCodec, CompressionEngine, DeltaCodec, DeltaSyncOptimizer, DictionaryCodec, GzipCodec, HuffmanCodec, LZ77Codec, PURE_JS_CODECS, RLECodec, RawCodec, TopologicalCompressor, getCodecById, getCompressionEngine, getDeltaSyncOptimizer, resetCompressionEngine, resetDeltaSyncOptimizer };
1425
+ //# sourceMappingURL=index.js.map
1426
+ //# sourceMappingURL=index.js.map