@a0n/aeon 5.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/LICENSE +15 -0
  2. package/README.md +199 -0
  3. package/dist/CryptoProvider-SLWjqByk.d.cts +407 -0
  4. package/dist/CryptoProvider-SLWjqByk.d.ts +407 -0
  5. package/dist/compression/index.cjs +1445 -0
  6. package/dist/compression/index.cjs.map +1 -0
  7. package/dist/compression/index.d.cts +451 -0
  8. package/dist/compression/index.d.ts +451 -0
  9. package/dist/compression/index.js +1426 -0
  10. package/dist/compression/index.js.map +1 -0
  11. package/dist/core/index.cjs +4 -0
  12. package/dist/core/index.cjs.map +1 -0
  13. package/dist/core/index.d.cts +212 -0
  14. package/dist/core/index.d.ts +212 -0
  15. package/dist/core/index.js +3 -0
  16. package/dist/core/index.js.map +1 -0
  17. package/dist/crypto/index.cjs +130 -0
  18. package/dist/crypto/index.cjs.map +1 -0
  19. package/dist/crypto/index.d.cts +56 -0
  20. package/dist/crypto/index.d.ts +56 -0
  21. package/dist/crypto/index.js +124 -0
  22. package/dist/crypto/index.js.map +1 -0
  23. package/dist/distributed/index.cjs +2586 -0
  24. package/dist/distributed/index.cjs.map +1 -0
  25. package/dist/distributed/index.d.cts +1005 -0
  26. package/dist/distributed/index.d.ts +1005 -0
  27. package/dist/distributed/index.js +2580 -0
  28. package/dist/distributed/index.js.map +1 -0
  29. package/dist/index.cjs +10953 -0
  30. package/dist/index.cjs.map +1 -0
  31. package/dist/index.d.cts +1953 -0
  32. package/dist/index.d.ts +1953 -0
  33. package/dist/index.js +10828 -0
  34. package/dist/index.js.map +1 -0
  35. package/dist/offline/index.cjs +419 -0
  36. package/dist/offline/index.cjs.map +1 -0
  37. package/dist/offline/index.d.cts +148 -0
  38. package/dist/offline/index.d.ts +148 -0
  39. package/dist/offline/index.js +415 -0
  40. package/dist/offline/index.js.map +1 -0
  41. package/dist/optimization/index.cjs +800 -0
  42. package/dist/optimization/index.cjs.map +1 -0
  43. package/dist/optimization/index.d.cts +347 -0
  44. package/dist/optimization/index.d.ts +347 -0
  45. package/dist/optimization/index.js +790 -0
  46. package/dist/optimization/index.js.map +1 -0
  47. package/dist/persistence/index.cjs +207 -0
  48. package/dist/persistence/index.cjs.map +1 -0
  49. package/dist/persistence/index.d.cts +95 -0
  50. package/dist/persistence/index.d.ts +95 -0
  51. package/dist/persistence/index.js +204 -0
  52. package/dist/persistence/index.js.map +1 -0
  53. package/dist/presence/index.cjs +489 -0
  54. package/dist/presence/index.cjs.map +1 -0
  55. package/dist/presence/index.d.cts +283 -0
  56. package/dist/presence/index.d.ts +283 -0
  57. package/dist/presence/index.js +485 -0
  58. package/dist/presence/index.js.map +1 -0
  59. package/dist/types-CMxO7QF0.d.cts +33 -0
  60. package/dist/types-CMxO7QF0.d.ts +33 -0
  61. package/dist/utils/index.cjs +64 -0
  62. package/dist/utils/index.cjs.map +1 -0
  63. package/dist/utils/index.d.cts +38 -0
  64. package/dist/utils/index.d.ts +38 -0
  65. package/dist/utils/index.js +57 -0
  66. package/dist/utils/index.js.map +1 -0
  67. package/dist/versioning/index.cjs +1164 -0
  68. package/dist/versioning/index.cjs.map +1 -0
  69. package/dist/versioning/index.d.cts +537 -0
  70. package/dist/versioning/index.d.ts +537 -0
  71. package/dist/versioning/index.js +1159 -0
  72. package/dist/versioning/index.js.map +1 -0
  73. package/package.json +194 -0
@@ -0,0 +1,1445 @@
1
+ 'use strict';
2
+
3
+ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
4
+ get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
5
+ }) : x)(function(x) {
6
+ if (typeof require !== "undefined") return require.apply(this, arguments);
7
+ throw Error('Dynamic require of "' + x + '" is not supported');
8
+ });
9
+
10
+ // src/utils/logger.ts
11
+ var consoleLogger = {
12
+ debug: (...args) => {
13
+ console.debug("[AEON:DEBUG]", ...args);
14
+ },
15
+ info: (...args) => {
16
+ console.info("[AEON:INFO]", ...args);
17
+ },
18
+ warn: (...args) => {
19
+ console.warn("[AEON:WARN]", ...args);
20
+ },
21
+ error: (...args) => {
22
+ console.error("[AEON:ERROR]", ...args);
23
+ }
24
+ };
25
+ var currentLogger = consoleLogger;
26
+ function getLogger() {
27
+ return currentLogger;
28
+ }
29
+
30
+ // src/compression/CompressionEngine.ts
31
+ var logger = getLogger();
32
+ var CompressionEngine = class {
33
+ stats = {
34
+ totalCompressed: 0,
35
+ totalDecompressed: 0,
36
+ totalOriginalBytes: 0,
37
+ totalCompressedBytes: 0,
38
+ averageCompressionRatio: 0,
39
+ compressionTimeMs: 0,
40
+ decompressionTimeMs: 0
41
+ };
42
+ preferredAlgorithm = "gzip";
43
+ constructor(preferredAlgorithm = "gzip") {
44
+ this.preferredAlgorithm = preferredAlgorithm;
45
+ logger.debug("[CompressionEngine] Initialized", {
46
+ algorithm: preferredAlgorithm,
47
+ supportsNative: this.supportsNativeCompression()
48
+ });
49
+ }
50
+ /**
51
+ * Check if native compression is available
52
+ */
53
+ supportsNativeCompression() {
54
+ return typeof CompressionStream !== "undefined" && typeof DecompressionStream !== "undefined";
55
+ }
56
+ /**
57
+ * Compress data
58
+ */
59
+ async compress(data) {
60
+ const startTime = performance.now();
61
+ const inputData = typeof data === "string" ? new TextEncoder().encode(data) : data;
62
+ const originalSize = inputData.byteLength;
63
+ let compressed;
64
+ let algorithm = this.preferredAlgorithm;
65
+ if (this.supportsNativeCompression()) {
66
+ try {
67
+ compressed = await this.compressNative(
68
+ inputData,
69
+ this.preferredAlgorithm
70
+ );
71
+ } catch (error) {
72
+ logger.warn(
73
+ "[CompressionEngine] Native compression failed, using fallback",
74
+ error
75
+ );
76
+ compressed = inputData;
77
+ algorithm = "none";
78
+ }
79
+ } else {
80
+ compressed = inputData;
81
+ algorithm = "none";
82
+ }
83
+ const compressionRatio = originalSize > 0 ? 1 - compressed.byteLength / originalSize : 0;
84
+ const batch = {
85
+ id: `batch-${Date.now()}-${Math.random().toString(36).slice(2)}`,
86
+ compressed,
87
+ originalSize,
88
+ compressedSize: compressed.byteLength,
89
+ compressionRatio,
90
+ algorithm,
91
+ timestamp: Date.now()
92
+ };
93
+ const elapsed = performance.now() - startTime;
94
+ this.stats.totalCompressed++;
95
+ this.stats.totalOriginalBytes += originalSize;
96
+ this.stats.totalCompressedBytes += compressed.byteLength;
97
+ this.stats.compressionTimeMs += elapsed;
98
+ this.updateAverageRatio();
99
+ logger.debug("[CompressionEngine] Compressed", {
100
+ original: originalSize,
101
+ compressed: compressed.byteLength,
102
+ ratio: (compressionRatio * 100).toFixed(1) + "%",
103
+ algorithm,
104
+ timeMs: elapsed.toFixed(2)
105
+ });
106
+ return batch;
107
+ }
108
+ /**
109
+ * Decompress data
110
+ */
111
+ async decompress(batch) {
112
+ const startTime = performance.now();
113
+ let decompressed;
114
+ if (batch.algorithm === "none") {
115
+ decompressed = batch.compressed;
116
+ } else if (this.supportsNativeCompression()) {
117
+ try {
118
+ decompressed = await this.decompressNative(
119
+ batch.compressed,
120
+ batch.algorithm
121
+ );
122
+ } catch (error) {
123
+ logger.warn("[CompressionEngine] Native decompression failed", error);
124
+ throw error;
125
+ }
126
+ } else {
127
+ throw new Error("Native decompression not available");
128
+ }
129
+ const elapsed = performance.now() - startTime;
130
+ this.stats.totalDecompressed++;
131
+ this.stats.decompressionTimeMs += elapsed;
132
+ logger.debug("[CompressionEngine] Decompressed", {
133
+ compressed: batch.compressedSize,
134
+ decompressed: decompressed.byteLength,
135
+ algorithm: batch.algorithm,
136
+ timeMs: elapsed.toFixed(2)
137
+ });
138
+ return decompressed;
139
+ }
140
+ /**
141
+ * Compress using native CompressionStream
142
+ */
143
+ async compressNative(data, algorithm) {
144
+ const stream = new CompressionStream(algorithm);
145
+ const writer = stream.writable.getWriter();
146
+ const reader = stream.readable.getReader();
147
+ writer.write(
148
+ new Uint8Array(
149
+ data.buffer,
150
+ data.byteOffset,
151
+ data.byteLength
152
+ )
153
+ );
154
+ writer.close();
155
+ const chunks = [];
156
+ let done = false;
157
+ while (!done) {
158
+ const result = await reader.read();
159
+ done = result.done;
160
+ if (result.value) {
161
+ chunks.push(result.value);
162
+ }
163
+ }
164
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
165
+ const combined = new Uint8Array(totalLength);
166
+ let offset = 0;
167
+ for (const chunk of chunks) {
168
+ combined.set(chunk, offset);
169
+ offset += chunk.length;
170
+ }
171
+ return combined;
172
+ }
173
+ /**
174
+ * Decompress using native DecompressionStream
175
+ */
176
+ async decompressNative(data, algorithm) {
177
+ const stream = new DecompressionStream(algorithm);
178
+ const writer = stream.writable.getWriter();
179
+ const reader = stream.readable.getReader();
180
+ writer.write(
181
+ new Uint8Array(
182
+ data.buffer,
183
+ data.byteOffset,
184
+ data.byteLength
185
+ )
186
+ );
187
+ writer.close();
188
+ const chunks = [];
189
+ let done = false;
190
+ while (!done) {
191
+ const result = await reader.read();
192
+ done = result.done;
193
+ if (result.value) {
194
+ chunks.push(result.value);
195
+ }
196
+ }
197
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
198
+ const combined = new Uint8Array(totalLength);
199
+ let offset = 0;
200
+ for (const chunk of chunks) {
201
+ combined.set(chunk, offset);
202
+ offset += chunk.length;
203
+ }
204
+ return combined;
205
+ }
206
+ /**
207
+ * Split compressed batch into chunks for transmission
208
+ */
209
+ splitIntoChunks(batch, chunkSize = 64 * 1024) {
210
+ const chunks = [];
211
+ const data = batch.compressed;
212
+ const total = Math.ceil(data.byteLength / chunkSize);
213
+ for (let i = 0; i < total; i++) {
214
+ const start = i * chunkSize;
215
+ const end = Math.min(start + chunkSize, data.byteLength);
216
+ const chunkData = data.slice(start, end);
217
+ chunks.push({
218
+ chunkId: `${batch.id}-chunk-${i}`,
219
+ batchId: batch.id,
220
+ data: chunkData,
221
+ index: i,
222
+ total,
223
+ checksum: this.simpleChecksum(chunkData)
224
+ });
225
+ }
226
+ return chunks;
227
+ }
228
+ /**
229
+ * Reassemble chunks into compressed batch
230
+ */
231
+ reassembleChunks(chunks) {
232
+ const sorted = [...chunks].sort((a, b) => a.index - b.index);
233
+ if (sorted.length === 0) {
234
+ throw new Error("Cannot reassemble: no chunks provided");
235
+ }
236
+ const total = sorted[0].total;
237
+ if (sorted.length !== total) {
238
+ throw new Error(
239
+ `Missing chunks: got ${sorted.length}, expected ${total}`
240
+ );
241
+ }
242
+ const totalLength = sorted.reduce(
243
+ (sum, chunk) => sum + chunk.data.length,
244
+ 0
245
+ );
246
+ const combined = new Uint8Array(totalLength);
247
+ let offset = 0;
248
+ for (const chunk of sorted) {
249
+ combined.set(chunk.data, offset);
250
+ offset += chunk.data.length;
251
+ }
252
+ return combined;
253
+ }
254
+ /**
255
+ * Simple checksum for chunk verification
256
+ */
257
+ simpleChecksum(data) {
258
+ let hash = 0;
259
+ for (let i = 0; i < data.length; i++) {
260
+ hash = (hash << 5) - hash + data[i] | 0;
261
+ }
262
+ return (hash >>> 0).toString(16);
263
+ }
264
+ /**
265
+ * Update average compression ratio
266
+ */
267
+ updateAverageRatio() {
268
+ if (this.stats.totalOriginalBytes > 0) {
269
+ this.stats.averageCompressionRatio = 1 - this.stats.totalCompressedBytes / this.stats.totalOriginalBytes;
270
+ }
271
+ }
272
+ /**
273
+ * Get statistics
274
+ */
275
+ getStats() {
276
+ return { ...this.stats };
277
+ }
278
+ /**
279
+ * Reset statistics
280
+ */
281
+ resetStats() {
282
+ this.stats = {
283
+ totalCompressed: 0,
284
+ totalDecompressed: 0,
285
+ totalOriginalBytes: 0,
286
+ totalCompressedBytes: 0,
287
+ averageCompressionRatio: 0,
288
+ compressionTimeMs: 0,
289
+ decompressionTimeMs: 0
290
+ };
291
+ }
292
+ };
293
+ var compressionEngineInstance = null;
294
+ function getCompressionEngine() {
295
+ if (!compressionEngineInstance) {
296
+ compressionEngineInstance = new CompressionEngine();
297
+ }
298
+ return compressionEngineInstance;
299
+ }
300
+ function resetCompressionEngine() {
301
+ compressionEngineInstance = null;
302
+ }
303
+
304
+ // src/compression/DeltaSyncOptimizer.ts
305
+ var logger2 = getLogger();
306
+ var DeltaSyncOptimizer = class _DeltaSyncOptimizer {
307
+ static MAX_HISTORY_SIZE = 1e4;
308
+ operationHistory = /* @__PURE__ */ new Map();
309
+ stats = {
310
+ totalOperations: 0,
311
+ totalFull: 0,
312
+ totalDelta: 0,
313
+ totalOriginalSize: 0,
314
+ totalDeltaSize: 0,
315
+ averageReductionPercent: 0,
316
+ lastSyncTime: 0,
317
+ fullOperationThreshold: 1e3
318
+ // Force full if delta > 1KB
319
+ };
320
+ constructor(fullOperationThreshold = 1e3) {
321
+ this.stats.fullOperationThreshold = fullOperationThreshold;
322
+ logger2.debug("[DeltaSyncOptimizer] Initialized", {
323
+ threshold: fullOperationThreshold
324
+ });
325
+ }
326
+ /**
327
+ * Compute delta for single operation
328
+ */
329
+ computeDelta(operation) {
330
+ const operationJson = JSON.stringify(operation);
331
+ const originalSize = new TextEncoder().encode(operationJson).byteLength;
332
+ const previous = this.operationHistory.get(operation.id);
333
+ if (!previous) {
334
+ const delta = {
335
+ id: `delta-${Date.now()}-${Math.random().toString(36).slice(2)}`,
336
+ type: "full",
337
+ operationId: operation.id,
338
+ operationType: operation.type,
339
+ sessionId: operation.sessionId,
340
+ timestamp: Date.now(),
341
+ fullData: operation.data,
342
+ priority: operation.priority
343
+ };
344
+ this.stats.totalOperations++;
345
+ this.stats.totalFull++;
346
+ this.stats.totalOriginalSize += originalSize;
347
+ const deltaSize2 = new TextEncoder().encode(
348
+ JSON.stringify(delta)
349
+ ).byteLength;
350
+ this.stats.totalDeltaSize += deltaSize2;
351
+ this.operationHistory.set(operation.id, operation);
352
+ if (this.operationHistory.size > _DeltaSyncOptimizer.MAX_HISTORY_SIZE) {
353
+ const firstKey = this.operationHistory.keys().next().value;
354
+ if (firstKey !== void 0) this.operationHistory.delete(firstKey);
355
+ }
356
+ return delta;
357
+ }
358
+ const changes = {};
359
+ const changeMask = [];
360
+ let hasMeaningfulChanges = false;
361
+ for (const [key, value] of Object.entries(operation.data)) {
362
+ const oldValue = previous.data[key];
363
+ if (!this.deepEqual(value, oldValue)) {
364
+ changes[key] = value;
365
+ changeMask.push(key);
366
+ hasMeaningfulChanges = true;
367
+ }
368
+ }
369
+ for (const key of Object.keys(previous.data)) {
370
+ if (!(key in operation.data)) {
371
+ changes[key] = null;
372
+ changeMask.push(`${key}:deleted`);
373
+ hasMeaningfulChanges = true;
374
+ }
375
+ }
376
+ const deltaData = {
377
+ id: `delta-${Date.now()}-${Math.random().toString(36).slice(2)}`,
378
+ type: "delta",
379
+ operationId: operation.id,
380
+ operationType: operation.type,
381
+ sessionId: operation.sessionId,
382
+ timestamp: Date.now(),
383
+ changes: hasMeaningfulChanges ? changes : void 0,
384
+ changeMask: hasMeaningfulChanges ? changeMask : void 0,
385
+ priority: operation.priority
386
+ };
387
+ const deltaSize = new TextEncoder().encode(
388
+ JSON.stringify(deltaData)
389
+ ).byteLength;
390
+ const finalDelta = deltaSize > this.stats.fullOperationThreshold ? {
391
+ ...deltaData,
392
+ type: "full",
393
+ fullData: operation.data,
394
+ changes: void 0,
395
+ changeMask: void 0
396
+ } : deltaData;
397
+ this.stats.totalOperations++;
398
+ if (finalDelta.type === "full") {
399
+ this.stats.totalFull++;
400
+ } else {
401
+ this.stats.totalDelta++;
402
+ }
403
+ this.stats.totalOriginalSize += originalSize;
404
+ this.stats.totalDeltaSize += deltaSize;
405
+ this.operationHistory.set(operation.id, operation);
406
+ if (this.operationHistory.size > _DeltaSyncOptimizer.MAX_HISTORY_SIZE) {
407
+ const firstKey = this.operationHistory.keys().next().value;
408
+ if (firstKey !== void 0) this.operationHistory.delete(firstKey);
409
+ }
410
+ return finalDelta;
411
+ }
412
+ /**
413
+ * Compute deltas for batch of operations
414
+ */
415
+ computeBatchDeltas(operations) {
416
+ const deltas = operations.map((op) => this.computeDelta(op));
417
+ const totalOriginalSize = operations.reduce(
418
+ (sum, op) => sum + new TextEncoder().encode(JSON.stringify(op)).byteLength,
419
+ 0
420
+ );
421
+ const totalDeltaSize = deltas.reduce(
422
+ (sum, delta) => sum + new TextEncoder().encode(JSON.stringify(delta)).byteLength,
423
+ 0
424
+ );
425
+ const reductionPercent = totalOriginalSize > 0 ? Math.round(
426
+ (totalOriginalSize - totalDeltaSize) / totalOriginalSize * 100
427
+ ) : 0;
428
+ const batch = {
429
+ batchId: `batch-${Date.now()}-${Math.random().toString(36).slice(2)}`,
430
+ operations: deltas,
431
+ timestamp: Date.now(),
432
+ totalOriginalSize,
433
+ totalDeltaSize,
434
+ reductionPercent
435
+ };
436
+ logger2.debug("[DeltaSyncOptimizer] Batch computed", {
437
+ operations: operations.length,
438
+ reduction: reductionPercent,
439
+ size: totalDeltaSize
440
+ });
441
+ return batch;
442
+ }
443
+ /**
444
+ * Decompress delta operation back to full operation
445
+ */
446
+ decompressDelta(delta) {
447
+ if (delta.type === "full") {
448
+ return {
449
+ id: delta.operationId,
450
+ type: delta.operationType,
451
+ sessionId: delta.sessionId,
452
+ data: delta.fullData || {},
453
+ status: "pending",
454
+ createdAt: delta.timestamp
455
+ };
456
+ }
457
+ const previous = this.operationHistory.get(delta.operationId);
458
+ if (!previous) {
459
+ logger2.warn("[DeltaSyncOptimizer] Cannot decompress - no history", {
460
+ operationId: delta.operationId
461
+ });
462
+ return {
463
+ id: delta.operationId,
464
+ type: delta.operationType,
465
+ sessionId: delta.sessionId,
466
+ data: delta.changes || {},
467
+ status: "pending",
468
+ createdAt: delta.timestamp
469
+ };
470
+ }
471
+ const reconstructed = {
472
+ ...previous,
473
+ data: {
474
+ ...previous.data,
475
+ ...delta.changes || {}
476
+ }
477
+ };
478
+ if (delta.changes) {
479
+ for (const [key, value] of Object.entries(delta.changes)) {
480
+ if (value === null) {
481
+ delete reconstructed.data[key];
482
+ }
483
+ }
484
+ }
485
+ return reconstructed;
486
+ }
487
+ /**
488
+ * Update history after successful sync
489
+ */
490
+ updateHistory(operations) {
491
+ for (const op of operations) {
492
+ this.operationHistory.set(op.id, op);
493
+ }
494
+ while (this.operationHistory.size > _DeltaSyncOptimizer.MAX_HISTORY_SIZE) {
495
+ const firstKey = this.operationHistory.keys().next().value;
496
+ if (firstKey !== void 0) this.operationHistory.delete(firstKey);
497
+ else break;
498
+ }
499
+ logger2.debug("[DeltaSyncOptimizer] History updated", {
500
+ count: operations.length,
501
+ totalHistorySize: this.operationHistory.size
502
+ });
503
+ }
504
+ /**
505
+ * Clear history for specific operations
506
+ */
507
+ clearHistory(operationIds) {
508
+ for (const id of operationIds) {
509
+ this.operationHistory.delete(id);
510
+ }
511
+ logger2.debug("[DeltaSyncOptimizer] History cleared", {
512
+ cleared: operationIds.length,
513
+ remaining: this.operationHistory.size
514
+ });
515
+ }
516
+ /**
517
+ * Get current performance statistics
518
+ */
519
+ getStats() {
520
+ if (this.stats.totalOperations > 0) {
521
+ this.stats.averageReductionPercent = Math.round(
522
+ (this.stats.totalOriginalSize - this.stats.totalDeltaSize) / this.stats.totalOriginalSize * 100
523
+ );
524
+ }
525
+ return { ...this.stats };
526
+ }
527
+ /**
528
+ * Reset statistics
529
+ */
530
+ resetStats() {
531
+ this.stats = {
532
+ totalOperations: 0,
533
+ totalFull: 0,
534
+ totalDelta: 0,
535
+ totalOriginalSize: 0,
536
+ totalDeltaSize: 0,
537
+ averageReductionPercent: 0,
538
+ lastSyncTime: 0,
539
+ fullOperationThreshold: this.stats.fullOperationThreshold
540
+ };
541
+ logger2.debug("[DeltaSyncOptimizer] Stats reset");
542
+ }
543
+ /**
544
+ * Set the full operation threshold
545
+ */
546
+ setFullOperationThreshold(bytes) {
547
+ this.stats.fullOperationThreshold = bytes;
548
+ logger2.debug("[DeltaSyncOptimizer] Threshold updated", { bytes });
549
+ }
550
+ /**
551
+ * Get history size for memory monitoring
552
+ */
553
+ getHistorySize() {
554
+ return this.operationHistory.size;
555
+ }
556
+ /**
557
+ * Get memory footprint estimate
558
+ */
559
+ getMemoryEstimate() {
560
+ let totalBytes = 0;
561
+ for (const op of this.operationHistory.values()) {
562
+ totalBytes += new TextEncoder().encode(JSON.stringify(op)).byteLength;
563
+ }
564
+ return totalBytes;
565
+ }
566
+ /**
567
+ * Deep equality check for nested objects
568
+ */
569
+ deepEqual(a, b) {
570
+ if (a === b) return true;
571
+ if (a == null || b == null) return false;
572
+ if (typeof a !== "object" || typeof b !== "object") return false;
573
+ const aObj = a;
574
+ const bObj = b;
575
+ const aKeys = Object.keys(aObj);
576
+ const bKeys = Object.keys(bObj);
577
+ if (aKeys.length !== bKeys.length) return false;
578
+ for (const key of aKeys) {
579
+ if (!this.deepEqual(aObj[key], bObj[key])) {
580
+ return false;
581
+ }
582
+ }
583
+ return true;
584
+ }
585
+ };
586
+ var deltaSyncInstance = null;
587
+ function getDeltaSyncOptimizer(threshold) {
588
+ if (!deltaSyncInstance) {
589
+ deltaSyncInstance = new DeltaSyncOptimizer(threshold);
590
+ }
591
+ return deltaSyncInstance;
592
+ }
593
+ function resetDeltaSyncOptimizer() {
594
+ deltaSyncInstance = null;
595
+ }
596
+
597
+ // src/compression/codecs.ts
598
+ var RawCodec = class {
599
+ id = 0;
600
+ name = "raw";
601
+ encode(data) {
602
+ return data;
603
+ }
604
+ decode(data) {
605
+ return data;
606
+ }
607
+ };
608
+ var RLECodec = class {
609
+ id = 1;
610
+ name = "rle";
611
+ encode(data) {
612
+ if (data.length === 0) return new Uint8Array(0);
613
+ const output = new Uint8Array(data.length * 3);
614
+ let writePos = 0;
615
+ let i = 0;
616
+ while (i < data.length) {
617
+ const byte = data[i];
618
+ let runLength = 1;
619
+ while (i + runLength < data.length && data[i + runLength] === byte && runLength < 65535) {
620
+ runLength++;
621
+ }
622
+ output[writePos++] = byte;
623
+ output[writePos++] = runLength >>> 8 & 255;
624
+ output[writePos++] = runLength & 255;
625
+ i += runLength;
626
+ }
627
+ return output.subarray(0, writePos);
628
+ }
629
+ decode(data, originalSize) {
630
+ const output = new Uint8Array(originalSize);
631
+ let readPos = 0;
632
+ let writePos = 0;
633
+ while (readPos < data.length && writePos < originalSize) {
634
+ const byte = data[readPos++];
635
+ const count = data[readPos++] << 8 | data[readPos++];
636
+ const end = Math.min(writePos + count, originalSize);
637
+ output.fill(byte, writePos, end);
638
+ writePos = end;
639
+ }
640
+ return output;
641
+ }
642
+ };
643
+ var DeltaCodec = class {
644
+ id = 2;
645
+ name = "delta";
646
+ encode(data) {
647
+ if (data.length === 0) return new Uint8Array(0);
648
+ const output = new Uint8Array(data.length);
649
+ output[0] = data[0];
650
+ for (let i = 1; i < data.length; i++) {
651
+ output[i] = data[i] - data[i - 1] & 255;
652
+ }
653
+ return output;
654
+ }
655
+ decode(data, originalSize) {
656
+ const output = new Uint8Array(originalSize);
657
+ if (data.length === 0) return output;
658
+ output[0] = data[0];
659
+ for (let i = 1; i < data.length && i < originalSize; i++) {
660
+ output[i] = output[i - 1] + data[i] & 255;
661
+ }
662
+ return output;
663
+ }
664
+ };
665
+ var LZ77Codec = class _LZ77Codec {
666
+ id = 3;
667
+ name = "lz77";
668
+ static WINDOW_SIZE = 4096;
669
+ static MIN_MATCH = 3;
670
+ static MAX_MATCH = 18;
671
+ encode(data) {
672
+ if (data.length === 0) return new Uint8Array(0);
673
+ const output = new Uint8Array(data.length + Math.ceil(data.length / 8) + 16);
674
+ let writePos = 0;
675
+ let readPos = 0;
676
+ while (readPos < data.length) {
677
+ const controlPos = writePos++;
678
+ let controlByte = 0;
679
+ for (let bit = 0; bit < 8 && readPos < data.length; bit++) {
680
+ const windowStart = Math.max(0, readPos - _LZ77Codec.WINDOW_SIZE);
681
+ let bestOffset = 0;
682
+ let bestLength = 0;
683
+ for (let j = windowStart; j < readPos; j++) {
684
+ let matchLen = 0;
685
+ while (matchLen < _LZ77Codec.MAX_MATCH && readPos + matchLen < data.length && data[j + matchLen] === data[readPos + matchLen]) {
686
+ matchLen++;
687
+ }
688
+ if (matchLen >= _LZ77Codec.MIN_MATCH && matchLen > bestLength) {
689
+ bestOffset = readPos - j;
690
+ bestLength = matchLen;
691
+ }
692
+ }
693
+ if (bestLength >= _LZ77Codec.MIN_MATCH) {
694
+ controlByte |= 1 << bit;
695
+ const lengthCode = bestLength - _LZ77Codec.MIN_MATCH;
696
+ output[writePos++] = bestOffset >>> 8 & 15 | lengthCode << 4;
697
+ output[writePos++] = bestOffset & 255;
698
+ readPos += bestLength;
699
+ } else {
700
+ output[writePos++] = data[readPos++];
701
+ }
702
+ }
703
+ output[controlPos] = controlByte;
704
+ }
705
+ return output.subarray(0, writePos);
706
+ }
707
+ decode(data, originalSize) {
708
+ const output = new Uint8Array(originalSize);
709
+ let readPos = 0;
710
+ let writePos = 0;
711
+ while (readPos < data.length && writePos < originalSize) {
712
+ const controlByte = data[readPos++];
713
+ for (let bit = 0; bit < 8 && readPos < data.length && writePos < originalSize; bit++) {
714
+ if (controlByte & 1 << bit) {
715
+ const byte1 = data[readPos++];
716
+ const byte2 = data[readPos++];
717
+ const offset = (byte1 & 15) << 8 | byte2;
718
+ const length = (byte1 >>> 4) + _LZ77Codec.MIN_MATCH;
719
+ const srcStart = writePos - offset;
720
+ for (let k = 0; k < length && writePos < originalSize; k++) {
721
+ output[writePos++] = output[srcStart + k];
722
+ }
723
+ } else {
724
+ output[writePos++] = data[readPos++];
725
+ }
726
+ }
727
+ }
728
+ return output;
729
+ }
730
+ };
731
+ var BrotliCodec = class {
732
+ id = 4;
733
+ name = "brotli";
734
+ quality;
735
+ constructor(quality = 4) {
736
+ this.quality = quality;
737
+ }
738
+ encode(data) {
739
+ try {
740
+ const zlib = __require("zlib");
741
+ return new Uint8Array(zlib.brotliCompressSync(Buffer.from(data), {
742
+ params: {
743
+ [zlib.constants.BROTLI_PARAM_QUALITY]: this.quality
744
+ }
745
+ }));
746
+ } catch {
747
+ return data;
748
+ }
749
+ }
750
+ decode(data) {
751
+ const zlib = __require("zlib");
752
+ return new Uint8Array(zlib.brotliDecompressSync(Buffer.from(data)));
753
+ }
754
+ };
755
+ var GzipCodec = class {
756
+ id = 5;
757
+ name = "gzip";
758
+ level;
759
+ constructor(level = 6) {
760
+ this.level = level;
761
+ }
762
+ encode(data) {
763
+ try {
764
+ const zlib = __require("zlib");
765
+ return new Uint8Array(zlib.gzipSync(Buffer.from(data), {
766
+ level: this.level
767
+ }));
768
+ } catch {
769
+ return data;
770
+ }
771
+ }
772
+ decode(data) {
773
+ const zlib = __require("zlib");
774
+ return new Uint8Array(zlib.gunzipSync(Buffer.from(data)));
775
+ }
776
+ };
777
+ var HuffmanCodec = class {
778
+ id = 6;
779
+ name = "huffman";
780
+ encode(data) {
781
+ if (data.length < 32) return data;
782
+ const freq = new Uint32Array(256);
783
+ for (let i = 0; i < data.length; i++) freq[data[i]]++;
784
+ const symbols = [];
785
+ for (let i = 0; i < 256; i++) {
786
+ if (freq[i] > 0) symbols.push({ sym: i, freq: freq[i] });
787
+ }
788
+ if (symbols.length <= 1) return data;
789
+ const nodes = symbols.map((s) => ({
790
+ freq: s.freq,
791
+ sym: s.sym,
792
+ left: -1,
793
+ right: -1
794
+ }));
795
+ const heap = [...nodes];
796
+ heap.sort((a, b) => a.freq - b.freq);
797
+ while (heap.length > 1) {
798
+ const left = heap.shift();
799
+ const right = heap.shift();
800
+ const leftIdx = nodes.indexOf(left);
801
+ const rightIdx = nodes.indexOf(right);
802
+ const parent = {
803
+ freq: left.freq + right.freq,
804
+ sym: -1,
805
+ left: leftIdx,
806
+ right: rightIdx
807
+ };
808
+ nodes.push(parent);
809
+ let idx = 0;
810
+ while (idx < heap.length && heap[idx].freq <= parent.freq) idx++;
811
+ heap.splice(idx, 0, parent);
812
+ }
813
+ const codeLengths = new Uint8Array(256);
814
+ const root = nodes.length - 1;
815
+ const dfs = (nodeIdx, depth) => {
816
+ const node = nodes[nodeIdx];
817
+ if (node.left === -1 && node.right === -1) {
818
+ codeLengths[node.sym] = depth || 1;
819
+ return;
820
+ }
821
+ if (node.left >= 0) dfs(node.left, depth + 1);
822
+ if (node.right >= 0) dfs(node.right, depth + 1);
823
+ };
824
+ dfs(root, 0);
825
+ for (let i = 0; i < 256; i++) {
826
+ if (codeLengths[i] > 15) return data;
827
+ }
828
+ const sorted = [];
829
+ for (let i = 0; i < 256; i++) {
830
+ if (codeLengths[i] > 0) sorted.push({ sym: i, len: codeLengths[i] });
831
+ }
832
+ sorted.sort((a, b) => a.len - b.len || a.sym - b.sym);
833
+ const codes = new Uint32Array(256);
834
+ let code = 0;
835
+ let prevLen = sorted[0].len;
836
+ codes[sorted[0].sym] = 0;
837
+ for (let i = 1; i < sorted.length; i++) {
838
+ code = code + 1 << sorted[i].len - prevLen;
839
+ codes[sorted[i].sym] = code;
840
+ prevLen = sorted[i].len;
841
+ }
842
+ let totalBits = 0;
843
+ for (let i = 0; i < data.length; i++) totalBits += codeLengths[data[i]];
844
+ const totalBytes = Math.ceil(totalBits / 8);
845
+ const headerSize = 260;
846
+ const output = new Uint8Array(headerSize + totalBytes);
847
+ output.set(codeLengths, 0);
848
+ new DataView(output.buffer).setUint32(256, totalBits);
849
+ let bitPos = 0;
850
+ for (let i = 0; i < data.length; i++) {
851
+ const sym = data[i];
852
+ const codeVal = codes[sym];
853
+ const codeLen = codeLengths[sym];
854
+ for (let b = codeLen - 1; b >= 0; b--) {
855
+ if (codeVal >>> b & 1) {
856
+ const byteIdx = headerSize + (bitPos >>> 3);
857
+ output[byteIdx] |= 1 << 7 - (bitPos & 7);
858
+ }
859
+ bitPos++;
860
+ }
861
+ }
862
+ return output;
863
+ }
864
+ decode(data, originalSize) {
865
+ if (data.length < 260) return data.subarray(0, originalSize);
866
+ const codeLengths = data.subarray(0, 256);
867
+ const totalBits = new DataView(
868
+ data.buffer,
869
+ data.byteOffset + 256,
870
+ 4
871
+ ).getUint32(0);
872
+ const sorted = [];
873
+ for (let i = 0; i < 256; i++) {
874
+ if (codeLengths[i] > 0) sorted.push({ sym: i, len: codeLengths[i] });
875
+ }
876
+ sorted.sort((a, b) => a.len - b.len || a.sym - b.sym);
877
+ const tree = [[-1, -1, -1]];
878
+ const insertCode = (codeVal, len, sym) => {
879
+ let node = 0;
880
+ for (let b = len - 1; b >= 0; b--) {
881
+ const bit = codeVal >>> b & 1;
882
+ if (tree[node][bit] === -1) {
883
+ tree[node][bit] = tree.length;
884
+ tree.push([-1, -1, -1]);
885
+ }
886
+ node = tree[node][bit];
887
+ }
888
+ tree[node][2] = sym;
889
+ };
890
+ let code = 0;
891
+ let prevLen = sorted[0].len;
892
+ insertCode(0, sorted[0].len, sorted[0].sym);
893
+ for (let i = 1; i < sorted.length; i++) {
894
+ code = code + 1 << sorted[i].len - prevLen;
895
+ insertCode(code, sorted[i].len, sorted[i].sym);
896
+ prevLen = sorted[i].len;
897
+ }
898
+ const output = new Uint8Array(originalSize);
899
+ let bitPos = 0;
900
+ let outPos = 0;
901
+ const bitsStart = 260;
902
+ while (outPos < originalSize && bitPos < totalBits) {
903
+ let node = 0;
904
+ while (tree[node][2] === -1 && bitPos < totalBits) {
905
+ const byteIdx = bitsStart + (bitPos >>> 3);
906
+ const bit = data[byteIdx] >>> 7 - (bitPos & 7) & 1;
907
+ node = tree[node][bit];
908
+ bitPos++;
909
+ }
910
+ if (tree[node][2] !== -1) {
911
+ output[outPos++] = tree[node][2];
912
+ }
913
+ }
914
+ return output;
915
+ }
916
+ };
917
+ var DICTIONARY_STRINGS = [
918
+ // Long patterns first (most savings per match)
919
+ "addEventListener",
920
+ // 16 bytes → 2 = saves 14
921
+ "querySelector",
922
+ // 13 → 2 = saves 11
923
+ "createElement",
924
+ // 13 → 2 = saves 11
925
+ "justify-content",
926
+ // 15 → 2 = saves 13
927
+ "align-items:center",
928
+ // 19 → 2 = saves 17
929
+ "textContent",
930
+ // 11 → 2 = saves 9
931
+ "display:flex",
932
+ // 12 → 2 = saves 10
933
+ "display:grid",
934
+ // 12 → 2 = saves 10
935
+ "display:none",
936
+ // 12 → 2 = saves 10
937
+ "background:",
938
+ // 11 → 2 = saves 9
939
+ "font-weight:",
940
+ // 12 → 2 = saves 10
941
+ "font-size:",
942
+ // 10 → 2 = saves 8
943
+ "className",
944
+ // 9 → 2 = saves 7
945
+ "undefined",
946
+ // 9 → 2 = saves 7
947
+ "container",
948
+ // 9 → 2 = saves 7
949
+ "transform:",
950
+ // 10 → 2 = saves 8
951
+ "overflow:",
952
+ // 9 → 2 = saves 7
953
+ "position:",
954
+ // 9 → 2 = saves 7
955
+ "function ",
956
+ // 9 → 2 = saves 7
957
+ "children",
958
+ // 8 → 2 = saves 6
959
+ "document",
960
+ // 8 → 2 = saves 6
961
+ "display:",
962
+ // 8 → 2 = saves 6
963
+ "padding:",
964
+ // 8 → 2 = saves 6
965
+ "onClick",
966
+ // 7 → 2 = saves 5
967
+ "useState",
968
+ // 8 → 2 = saves 6
969
+ "https://",
970
+ // 8 → 2 = saves 6
971
+ "default",
972
+ // 7 → 2 = saves 5
973
+ "extends",
974
+ // 7 → 2 = saves 5
975
+ "return ",
976
+ // 7 → 2 = saves 5
977
+ "export ",
978
+ // 7 → 2 = saves 5
979
+ "import ",
980
+ // 7 → 2 = saves 5
981
+ "margin:",
982
+ // 7 → 2 = saves 5
983
+ "border:",
984
+ // 7 → 2 = saves 5
985
+ "cursor:",
986
+ // 7 → 2 = saves 5
987
+ "height:",
988
+ // 7 → 2 = saves 5
989
+ "</span>",
990
+ // 7 → 2 = saves 5
991
+ "color:",
992
+ // 6 → 2 = saves 4
993
+ "width:",
994
+ // 6 → 2 = saves 4
995
+ "const ",
996
+ // 6 → 2 = saves 4
997
+ "class ",
998
+ // 6 → 2 = saves 4
999
+ "</div>",
1000
+ // 6 → 2 = saves 4
1001
+ "<span ",
1002
+ // 6 → 2 = saves 4
1003
+ "<div ",
1004
+ // 5 → 2 = saves 3
1005
+ "async",
1006
+ // 5 → 2 = saves 3
1007
+ "await",
1008
+ // 5 → 2 = saves 3
1009
+ "false",
1010
+ // 5 → 2 = saves 3
1011
+ "this.",
1012
+ // 5 → 2 = saves 3
1013
+ "props",
1014
+ // 5 → 2 = saves 3
1015
+ "state",
1016
+ // 5 → 2 = saves 3
1017
+ "</p>",
1018
+ // 4 → 2 = saves 2
1019
+ "null",
1020
+ // 4 → 2 = saves 2
1021
+ "true",
1022
+ // 4 → 2 = saves 2
1023
+ "flex",
1024
+ // 4 → 2 = saves 2
1025
+ "grid",
1026
+ // 4 → 2 = saves 2
1027
+ "none",
1028
+ // 4 → 2 = saves 2
1029
+ "auto",
1030
+ // 4 → 2 = saves 2
1031
+ "self",
1032
+ // 4 → 2 = saves 2
1033
+ ".css",
1034
+ // 4 → 2 = saves 2
1035
+ ".com",
1036
+ // 4 → 2 = saves 2
1037
+ "var(",
1038
+ // 4 → 2 = saves 2
1039
+ "<p>",
1040
+ // 3 → 2 = saves 1
1041
+ ".js",
1042
+ // 3 → 2 = saves 1
1043
+ "px;",
1044
+ // 3 → 2 = saves 1
1045
+ "rem"
1046
+ // 3 → 2 = saves 1
1047
+ ];
1048
+ var DICTIONARY = DICTIONARY_STRINGS.map(
1049
+ (s) => new TextEncoder().encode(s)
1050
+ );
1051
+ var DictionaryCodec = class {
1052
+ id = 7;
1053
+ name = "dictionary";
1054
+ encode(data) {
1055
+ if (data.length < 4) return data;
1056
+ const output = [];
1057
+ let pos = 0;
1058
+ while (pos < data.length) {
1059
+ let matched = false;
1060
+ for (let idx = 0; idx < DICTIONARY.length; idx++) {
1061
+ const entry = DICTIONARY[idx];
1062
+ if (pos + entry.length > data.length) continue;
1063
+ let match = true;
1064
+ for (let j = 0; j < entry.length; j++) {
1065
+ if (data[pos + j] !== entry[j]) {
1066
+ match = false;
1067
+ break;
1068
+ }
1069
+ }
1070
+ if (match) {
1071
+ output.push(0, idx + 1);
1072
+ pos += entry.length;
1073
+ matched = true;
1074
+ break;
1075
+ }
1076
+ }
1077
+ if (!matched) {
1078
+ if (data[pos] === 0) {
1079
+ output.push(0, 0);
1080
+ } else {
1081
+ output.push(data[pos]);
1082
+ }
1083
+ pos++;
1084
+ }
1085
+ }
1086
+ return new Uint8Array(output);
1087
+ }
1088
+ decode(data, originalSize) {
1089
+ const output = [];
1090
+ let pos = 0;
1091
+ while (pos < data.length && output.length < originalSize) {
1092
+ if (data[pos] === 0) {
1093
+ pos++;
1094
+ if (data[pos] === 0) {
1095
+ output.push(0);
1096
+ } else {
1097
+ const entry = DICTIONARY[data[pos] - 1];
1098
+ for (let j = 0; j < entry.length; j++) output.push(entry[j]);
1099
+ }
1100
+ pos++;
1101
+ } else {
1102
+ output.push(data[pos]);
1103
+ pos++;
1104
+ }
1105
+ }
1106
+ return new Uint8Array(output);
1107
+ }
1108
+ };
1109
+ var PURE_JS_CODECS = [
1110
+ new RawCodec(),
1111
+ new RLECodec(),
1112
+ new DeltaCodec(),
1113
+ new LZ77Codec(),
1114
+ new HuffmanCodec(),
1115
+ new DictionaryCodec()
1116
+ ];
1117
+ var BUILTIN_CODECS = [
1118
+ ...PURE_JS_CODECS,
1119
+ new BrotliCodec(),
1120
+ new GzipCodec()
1121
+ ];
1122
+ var CODEC_MAP = new Map(
1123
+ BUILTIN_CODECS.map((c) => [c.id, c])
1124
+ );
1125
+ function getCodecById(id) {
1126
+ const codec = CODEC_MAP.get(id);
1127
+ if (!codec) {
1128
+ throw new Error(`Unknown codec ID: ${id}`);
1129
+ }
1130
+ return codec;
1131
+ }
1132
+
1133
+ // src/compression/TopologicalCompressor.ts
1134
+ var CHUNK_HEADER_SIZE = 9;
1135
+ function encodeChunkHeader(codecId, originalSize, compressedSize) {
1136
+ const header = new Uint8Array(CHUNK_HEADER_SIZE);
1137
+ const view = new DataView(header.buffer);
1138
+ header[0] = codecId;
1139
+ view.setUint32(1, originalSize);
1140
+ view.setUint32(5, compressedSize);
1141
+ return header;
1142
+ }
1143
+ function decodeChunkHeader(data, offset) {
1144
+ const codecId = data[offset];
1145
+ const view = new DataView(data.buffer, data.byteOffset + offset + 1, 8);
1146
+ const originalSize = view.getUint32(0);
1147
+ const compressedSize = view.getUint32(4);
1148
+ return { codecId, originalSize, compressedSize };
1149
+ }
1150
+ var STREAM_HEADER_SIZE = 5;
1151
+ function encodeStreamHeader(strategy, originalSize) {
1152
+ const header = new Uint8Array(STREAM_HEADER_SIZE);
1153
+ header[0] = strategy;
1154
+ new DataView(header.buffer).setUint32(1, originalSize);
1155
+ return header;
1156
+ }
1157
+ function decodeStreamHeader(data) {
1158
+ const strategy = data[0];
1159
+ const originalSize = new DataView(
1160
+ data.buffer,
1161
+ data.byteOffset + 1,
1162
+ 4
1163
+ ).getUint32(0);
1164
+ return { strategy, originalSize };
1165
+ }
1166
+ var TopologicalCompressor = class {
1167
+ config;
1168
+ constructor(config) {
1169
+ this.config = {
1170
+ chunkSize: config?.chunkSize ?? 4096,
1171
+ codecs: config?.codecs ?? BUILTIN_CODECS,
1172
+ streamRace: config?.streamRace ?? false
1173
+ };
1174
+ }
1175
+ /**
1176
+ * Compress data using fork/race/fold.
1177
+ *
1178
+ * When streamRace=false (default): per-chunk race only.
1179
+ * When streamRace=true: two-level race — global codecs vs per-chunk topo.
1180
+ */
1181
+ compress(data) {
1182
+ if (data.length === 0) {
1183
+ return {
1184
+ data: new Uint8Array(0),
1185
+ chunks: [],
1186
+ originalSize: 0,
1187
+ compressedSize: 0,
1188
+ ratio: 0,
1189
+ codecsUsed: 0,
1190
+ bettiNumber: 0,
1191
+ timeMs: 0
1192
+ };
1193
+ }
1194
+ if (!this.config.streamRace) {
1195
+ return this.compressChunked(data);
1196
+ }
1197
+ return this.compressTwoLevel(data);
1198
+ }
1199
+ /**
1200
+ * Decompress data produced by compress().
1201
+ */
1202
+ decompress(compressed) {
1203
+ if (compressed.length === 0) return new Uint8Array(0);
1204
+ if (!this.config.streamRace) {
1205
+ return this.decompressChunked(compressed);
1206
+ }
1207
+ return this.decompressTwoLevel(compressed);
1208
+ }
1209
+ // ════════════════════════════════════════════════════════════════════════
1210
+ // Level 2: Per-Chunk Topological Compression
1211
+ // ════════════════════════════════════════════════════════════════════════
1212
+ compressChunked(data) {
1213
+ const startTime = performance.now();
1214
+ const { chunkSize, codecs } = this.config;
1215
+ const numChunks = Math.ceil(data.length / chunkSize);
1216
+ const compressedChunks = [];
1217
+ const chunkResults = [];
1218
+ const codecWins = /* @__PURE__ */ new Set();
1219
+ const bettiNumber = Math.max(0, codecs.length - 1);
1220
+ for (let i = 0; i < numChunks; i++) {
1221
+ const chunkStart = i * chunkSize;
1222
+ const chunkEnd = Math.min(chunkStart + chunkSize, data.length);
1223
+ const chunk = data.subarray(chunkStart, chunkEnd);
1224
+ let bestCodecId = 0;
1225
+ let bestCompressed = chunk;
1226
+ let ventCount = 0;
1227
+ for (const codec of codecs) {
1228
+ const compressed = codec.encode(chunk);
1229
+ if (compressed.length >= chunk.length && codec.id !== 0) {
1230
+ ventCount++;
1231
+ continue;
1232
+ }
1233
+ if (compressed.length < bestCompressed.length) {
1234
+ bestCodecId = codec.id;
1235
+ bestCompressed = compressed;
1236
+ }
1237
+ }
1238
+ const header = encodeChunkHeader(
1239
+ bestCodecId,
1240
+ chunk.length,
1241
+ bestCompressed.length
1242
+ );
1243
+ const frame = new Uint8Array(CHUNK_HEADER_SIZE + bestCompressed.length);
1244
+ frame.set(header, 0);
1245
+ frame.set(bestCompressed, CHUNK_HEADER_SIZE);
1246
+ compressedChunks.push(frame);
1247
+ codecWins.add(bestCodecId);
1248
+ const codecName = codecs.find((c) => c.id === bestCodecId)?.name ?? "unknown";
1249
+ chunkResults.push({
1250
+ chunkIndex: i,
1251
+ codecId: bestCodecId,
1252
+ codecName,
1253
+ originalSize: chunk.length,
1254
+ compressedSize: frame.length,
1255
+ ratio: chunk.length > 0 ? 1 - frame.length / chunk.length : 0,
1256
+ vented: ventCount
1257
+ });
1258
+ }
1259
+ const totalCompressedSize = compressedChunks.reduce(
1260
+ (sum, c) => sum + c.length,
1261
+ 0
1262
+ );
1263
+ const output = new Uint8Array(totalCompressedSize);
1264
+ let offset = 0;
1265
+ for (const c of compressedChunks) {
1266
+ output.set(c, offset);
1267
+ offset += c.length;
1268
+ }
1269
+ return {
1270
+ data: output,
1271
+ chunks: chunkResults,
1272
+ originalSize: data.length,
1273
+ compressedSize: totalCompressedSize,
1274
+ ratio: data.length > 0 ? 1 - totalCompressedSize / data.length : 0,
1275
+ codecsUsed: codecWins.size,
1276
+ bettiNumber,
1277
+ timeMs: performance.now() - startTime
1278
+ };
1279
+ }
1280
+ decompressChunked(compressed) {
1281
+ const chunks = [];
1282
+ let totalOriginalSize = 0;
1283
+ let readPos = 0;
1284
+ while (readPos < compressed.length) {
1285
+ if (readPos + CHUNK_HEADER_SIZE > compressed.length) {
1286
+ throw new Error(`Truncated chunk header at offset ${readPos}`);
1287
+ }
1288
+ const { codecId, originalSize, compressedSize } = decodeChunkHeader(
1289
+ compressed,
1290
+ readPos
1291
+ );
1292
+ readPos += CHUNK_HEADER_SIZE;
1293
+ if (readPos + compressedSize > compressed.length) {
1294
+ throw new Error(
1295
+ `Truncated chunk data at offset ${readPos}: need ${compressedSize}, have ${compressed.length - readPos}`
1296
+ );
1297
+ }
1298
+ chunks.push({
1299
+ codecId,
1300
+ originalSize,
1301
+ compressedData: compressed.subarray(readPos, readPos + compressedSize)
1302
+ });
1303
+ readPos += compressedSize;
1304
+ totalOriginalSize += originalSize;
1305
+ }
1306
+ const output = new Uint8Array(totalOriginalSize);
1307
+ let writePos = 0;
1308
+ for (const chunk of chunks) {
1309
+ const codec = getCodecById(chunk.codecId);
1310
+ const decompressed = codec.decode(
1311
+ chunk.compressedData,
1312
+ chunk.originalSize
1313
+ );
1314
+ output.set(decompressed, writePos);
1315
+ writePos += chunk.originalSize;
1316
+ }
1317
+ return output;
1318
+ }
1319
+ // ════════════════════════════════════════════════════════════════════════
1320
+ // Level 1: Stream-Level Two-Level Race
1321
+ // ════════════════════════════════════════════════════════════════════════
1322
+ /**
1323
+ * Two-level fork/race/fold:
1324
+ *
1325
+ * FORK (stream level):
1326
+ * ├─ Path 0: Per-chunk topological (Level 2)
1327
+ * ├─ Path 1: Global codec A on entire stream
1328
+ * ├─ Path 2: Global codec B on entire stream
1329
+ * └─ ...
1330
+ * RACE: Smallest total output wins
1331
+ * FOLD: 5-byte strategy header + compressed data
1332
+ *
1333
+ * On homogeneous text, global brotli wins (cross-chunk dictionary).
1334
+ * On mixed content, per-chunk topo wins (adapts per region).
1335
+ * The topology decides — not the programmer.
1336
+ */
1337
+ compressTwoLevel(data) {
1338
+ const startTime = performance.now();
1339
+ const { codecs } = this.config;
1340
+ const chunkedResult = this.compressChunked(data);
1341
+ const chunkedTotal = STREAM_HEADER_SIZE + chunkedResult.compressedSize;
1342
+ const globalCandidates = [];
1343
+ for (const codec of codecs) {
1344
+ if (codec.id === 0) continue;
1345
+ try {
1346
+ const compressed = codec.encode(data);
1347
+ const totalSize = STREAM_HEADER_SIZE + compressed.length;
1348
+ if (compressed.length < data.length) {
1349
+ globalCandidates.push({
1350
+ codecId: codec.id,
1351
+ codecName: codec.name,
1352
+ compressed,
1353
+ totalSize
1354
+ });
1355
+ }
1356
+ } catch {
1357
+ }
1358
+ }
1359
+ let bestStrategy = 0;
1360
+ let bestSize = chunkedTotal;
1361
+ let bestGlobal = null;
1362
+ for (const candidate of globalCandidates) {
1363
+ if (candidate.totalSize < bestSize) {
1364
+ bestStrategy = candidate.codecId;
1365
+ bestSize = candidate.totalSize;
1366
+ bestGlobal = candidate;
1367
+ }
1368
+ }
1369
+ const streamHeader = encodeStreamHeader(bestStrategy, data.length);
1370
+ const outerPaths = globalCandidates.length + 1;
1371
+ const innerBeta = Math.max(0, codecs.length - 1);
1372
+ const totalBeta = outerPaths - 1 + innerBeta;
1373
+ if (bestStrategy === 0) {
1374
+ const output = new Uint8Array(STREAM_HEADER_SIZE + chunkedResult.data.length);
1375
+ output.set(streamHeader, 0);
1376
+ output.set(chunkedResult.data, STREAM_HEADER_SIZE);
1377
+ return {
1378
+ ...chunkedResult,
1379
+ data: output,
1380
+ compressedSize: output.length,
1381
+ ratio: data.length > 0 ? 1 - output.length / data.length : 0,
1382
+ bettiNumber: totalBeta,
1383
+ strategy: "chunked",
1384
+ timeMs: performance.now() - startTime
1385
+ };
1386
+ } else {
1387
+ const output = new Uint8Array(STREAM_HEADER_SIZE + bestGlobal.compressed.length);
1388
+ output.set(streamHeader, 0);
1389
+ output.set(bestGlobal.compressed, STREAM_HEADER_SIZE);
1390
+ return {
1391
+ data: output,
1392
+ chunks: [],
1393
+ originalSize: data.length,
1394
+ compressedSize: output.length,
1395
+ ratio: data.length > 0 ? 1 - output.length / data.length : 0,
1396
+ codecsUsed: 1,
1397
+ bettiNumber: totalBeta,
1398
+ strategy: `global:${bestGlobal.codecName}`,
1399
+ timeMs: performance.now() - startTime
1400
+ };
1401
+ }
1402
+ }
1403
+ decompressTwoLevel(compressed) {
1404
+ if (compressed.length < STREAM_HEADER_SIZE) {
1405
+ throw new Error("Truncated stream header");
1406
+ }
1407
+ const { strategy, originalSize } = decodeStreamHeader(compressed);
1408
+ const payload = compressed.subarray(STREAM_HEADER_SIZE);
1409
+ if (strategy === 0) {
1410
+ return this.decompressChunked(payload);
1411
+ } else {
1412
+ const codec = getCodecById(strategy);
1413
+ return codec.decode(payload, originalSize);
1414
+ }
1415
+ }
1416
+ /** Get the codecs available for racing. */
1417
+ getCodecs() {
1418
+ return this.config.codecs;
1419
+ }
1420
+ /** Get the chunk size. */
1421
+ getChunkSize() {
1422
+ return this.config.chunkSize;
1423
+ }
1424
+ };
1425
+
1426
+ exports.BUILTIN_CODECS = BUILTIN_CODECS;
1427
+ exports.BrotliCodec = BrotliCodec;
1428
+ exports.CompressionEngine = CompressionEngine;
1429
+ exports.DeltaCodec = DeltaCodec;
1430
+ exports.DeltaSyncOptimizer = DeltaSyncOptimizer;
1431
+ exports.DictionaryCodec = DictionaryCodec;
1432
+ exports.GzipCodec = GzipCodec;
1433
+ exports.HuffmanCodec = HuffmanCodec;
1434
+ exports.LZ77Codec = LZ77Codec;
1435
+ exports.PURE_JS_CODECS = PURE_JS_CODECS;
1436
+ exports.RLECodec = RLECodec;
1437
+ exports.RawCodec = RawCodec;
1438
+ exports.TopologicalCompressor = TopologicalCompressor;
1439
+ exports.getCodecById = getCodecById;
1440
+ exports.getCompressionEngine = getCompressionEngine;
1441
+ exports.getDeltaSyncOptimizer = getDeltaSyncOptimizer;
1442
+ exports.resetCompressionEngine = resetCompressionEngine;
1443
+ exports.resetDeltaSyncOptimizer = resetDeltaSyncOptimizer;
1444
+ //# sourceMappingURL=index.cjs.map
1445
+ //# sourceMappingURL=index.cjs.map