@lov3kaizen/agentsea-cache 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,794 @@
1
+ import { nanoid } from 'nanoid';
2
+ import EventEmitter from 'eventemitter3';
3
+ import murmurhash from 'murmurhash';
4
+
5
+ // src/streaming/ChunkBuffer.ts
6
+ var DEFAULT_CONFIG = {
7
+ maxChunks: 100,
8
+ maxBytes: 64 * 1024,
9
+ // 64KB
10
+ flushIntervalMs: 1e3,
11
+ preserveOrder: true
12
+ };
13
+ var ChunkBuffer = class {
14
+ chunks = [];
15
+ currentBytes = 0;
16
+ config;
17
+ flushTimer = null;
18
+ onFlush;
19
+ constructor(config, onFlush) {
20
+ this.config = { ...DEFAULT_CONFIG, ...config };
21
+ this.onFlush = onFlush;
22
+ if (this.config.flushIntervalMs > 0) {
23
+ this.startFlushTimer();
24
+ }
25
+ }
26
+ /**
27
+ * Add a chunk to the buffer
28
+ */
29
+ add(chunk) {
30
+ if (this.config.preserveOrder) {
31
+ if (chunk.index === void 0) {
32
+ chunk.index = this.chunks.length;
33
+ }
34
+ }
35
+ this.chunks.push(chunk);
36
+ this.currentBytes += this.estimateChunkSize(chunk);
37
+ if (this.shouldFlush()) {
38
+ this.flush();
39
+ }
40
+ }
41
+ /**
42
+ * Add multiple chunks
43
+ */
44
+ addAll(chunks) {
45
+ for (const chunk of chunks) {
46
+ this.add(chunk);
47
+ }
48
+ }
49
+ /**
50
+ * Flush all buffered chunks
51
+ */
52
+ flush() {
53
+ const flushed = this.chunks;
54
+ if (this.config.preserveOrder) {
55
+ flushed.sort((a, b) => a.index - b.index);
56
+ }
57
+ this.chunks = [];
58
+ this.currentBytes = 0;
59
+ if (this.onFlush && flushed.length > 0) {
60
+ this.onFlush(flushed);
61
+ }
62
+ return flushed;
63
+ }
64
+ /**
65
+ * Get current buffer size
66
+ */
67
+ size() {
68
+ return this.chunks.length;
69
+ }
70
+ /**
71
+ * Get current buffer bytes
72
+ */
73
+ bytes() {
74
+ return this.currentBytes;
75
+ }
76
+ /**
77
+ * Check if buffer is empty
78
+ */
79
+ isEmpty() {
80
+ return this.chunks.length === 0;
81
+ }
82
+ /**
83
+ * Peek at buffered chunks without flushing
84
+ */
85
+ peek() {
86
+ return this.chunks;
87
+ }
88
+ /**
89
+ * Clear the buffer without flushing
90
+ */
91
+ clear() {
92
+ this.chunks = [];
93
+ this.currentBytes = 0;
94
+ }
95
+ /**
96
+ * Stop the flush timer
97
+ */
98
+ stop() {
99
+ if (this.flushTimer) {
100
+ clearInterval(this.flushTimer);
101
+ this.flushTimer = null;
102
+ }
103
+ }
104
+ /**
105
+ * Destroy the buffer
106
+ */
107
+ destroy() {
108
+ this.stop();
109
+ this.clear();
110
+ }
111
+ shouldFlush() {
112
+ return this.chunks.length >= this.config.maxChunks || this.currentBytes >= this.config.maxBytes;
113
+ }
114
+ startFlushTimer() {
115
+ this.flushTimer = setInterval(() => {
116
+ if (!this.isEmpty()) {
117
+ this.flush();
118
+ }
119
+ }, this.config.flushIntervalMs);
120
+ }
121
+ estimateChunkSize(chunk) {
122
+ let size = 0;
123
+ if (chunk.content) {
124
+ size += chunk.content.length * 2;
125
+ }
126
+ if (chunk.toolCall) {
127
+ size += JSON.stringify(chunk.toolCall).length;
128
+ }
129
+ if (chunk.toolResult) {
130
+ size += JSON.stringify(chunk.toolResult).length;
131
+ }
132
+ if (chunk.metadata) {
133
+ size += JSON.stringify(chunk.metadata).length;
134
+ }
135
+ return size + 50;
136
+ }
137
+ };
138
+ function createChunkBuffer(config, onFlush) {
139
+ return new ChunkBuffer(config, onFlush);
140
+ }
141
+ function generateId(prefix) {
142
+ const id = nanoid(16);
143
+ return prefix ? `${prefix}_${id}` : id;
144
+ }
145
+ function now() {
146
+ return Date.now();
147
+ }
148
+
149
+ // src/streaming/StreamRecorder.ts
150
+ var DEFAULT_CONFIG2 = {
151
+ buffer: {
152
+ maxChunks: 100,
153
+ maxBytes: 64 * 1024,
154
+ flushIntervalMs: 0,
155
+ // No auto-flush during recording
156
+ preserveOrder: true
157
+ },
158
+ captureToolCalls: true,
159
+ captureMetadata: true,
160
+ maxDurationMs: 3e5,
161
+ // 5 minutes
162
+ maxChunks: 1e4
163
+ };
164
+ var StreamRecorder = class {
165
+ config;
166
+ buffer;
167
+ recording = false;
168
+ startTime = 0;
169
+ model = "";
170
+ messages = [];
171
+ key = "";
172
+ totalChars = 0;
173
+ chunkIndex = 0;
174
+ timeoutId = null;
175
+ constructor(config) {
176
+ this.config = {
177
+ ...DEFAULT_CONFIG2,
178
+ ...config,
179
+ buffer: { ...DEFAULT_CONFIG2.buffer, ...config?.buffer }
180
+ };
181
+ this.buffer = new ChunkBuffer(this.config.buffer);
182
+ }
183
+ /**
184
+ * Start recording a new stream
185
+ */
186
+ start(model, messages, key) {
187
+ if (this.recording) {
188
+ throw new Error("Recording already in progress");
189
+ }
190
+ this.recording = true;
191
+ this.startTime = now();
192
+ this.model = model;
193
+ this.messages = messages;
194
+ this.key = key ?? generateId();
195
+ this.totalChars = 0;
196
+ this.chunkIndex = 0;
197
+ this.buffer.clear();
198
+ if (this.config.maxDurationMs > 0) {
199
+ this.timeoutId = setTimeout(() => {
200
+ if (this.recording) {
201
+ this.abort("Recording exceeded maximum duration");
202
+ }
203
+ }, this.config.maxDurationMs);
204
+ }
205
+ }
206
+ /**
207
+ * Record a text chunk
208
+ */
209
+ recordText(content, metadata) {
210
+ this.recordChunk({
211
+ type: "text",
212
+ content,
213
+ metadata: this.config.captureMetadata ? metadata : void 0,
214
+ timestamp: now(),
215
+ index: this.chunkIndex++
216
+ });
217
+ this.totalChars += content.length;
218
+ }
219
+ /**
220
+ * Record a tool call chunk
221
+ */
222
+ recordToolCall(id, name, args, metadata) {
223
+ if (!this.config.captureToolCalls) return;
224
+ this.recordChunk({
225
+ type: "tool_call",
226
+ toolCall: { id, name, arguments: args },
227
+ metadata: this.config.captureMetadata ? metadata : void 0,
228
+ timestamp: now(),
229
+ index: this.chunkIndex++
230
+ });
231
+ }
232
+ /**
233
+ * Record a tool result chunk
234
+ */
235
+ recordToolResult(callId, content, metadata) {
236
+ if (!this.config.captureToolCalls) return;
237
+ this.recordChunk({
238
+ type: "tool_result",
239
+ toolResult: { callId, content },
240
+ metadata: this.config.captureMetadata ? metadata : void 0,
241
+ timestamp: now(),
242
+ index: this.chunkIndex++
243
+ });
244
+ }
245
+ /**
246
+ * Record metadata
247
+ */
248
+ recordMetadata(metadata) {
249
+ if (!this.config.captureMetadata) return;
250
+ this.recordChunk({
251
+ type: "metadata",
252
+ metadata,
253
+ timestamp: now(),
254
+ index: this.chunkIndex++
255
+ });
256
+ }
257
+ /**
258
+ * Record a generic chunk
259
+ */
260
+ recordChunk(chunk) {
261
+ if (!this.recording) {
262
+ throw new Error("Not currently recording");
263
+ }
264
+ if (chunk.index >= this.config.maxChunks) {
265
+ throw new Error("Maximum chunks exceeded");
266
+ }
267
+ this.buffer.add(chunk);
268
+ }
269
+ /**
270
+ * Complete the recording and return the recorded stream
271
+ */
272
+ complete(tokenUsage) {
273
+ if (!this.recording) {
274
+ throw new Error("Not currently recording");
275
+ }
276
+ this.clearTimeout();
277
+ const endTime = now();
278
+ const chunks = this.buffer.flush();
279
+ const stream = {
280
+ id: generateId(),
281
+ key: this.key,
282
+ chunks,
283
+ model: this.model,
284
+ messages: this.messages,
285
+ startTime: this.startTime,
286
+ endTime,
287
+ durationMs: endTime - this.startTime,
288
+ totalChars: this.totalChars,
289
+ tokenUsage,
290
+ complete: true
291
+ };
292
+ this.reset();
293
+ return stream;
294
+ }
295
+ /**
296
+ * Abort the recording
297
+ */
298
+ abort(reason) {
299
+ if (!this.recording) {
300
+ throw new Error("Not currently recording");
301
+ }
302
+ this.clearTimeout();
303
+ const endTime = now();
304
+ const chunks = this.buffer.flush();
305
+ const stream = {
306
+ id: generateId(),
307
+ key: this.key,
308
+ chunks,
309
+ model: this.model,
310
+ messages: this.messages,
311
+ startTime: this.startTime,
312
+ endTime,
313
+ durationMs: endTime - this.startTime,
314
+ totalChars: this.totalChars,
315
+ complete: false,
316
+ error: reason ?? "Recording aborted"
317
+ };
318
+ this.reset();
319
+ return stream;
320
+ }
321
+ /**
322
+ * Check if currently recording
323
+ */
324
+ isRecording() {
325
+ return this.recording;
326
+ }
327
+ /**
328
+ * Get current chunk count
329
+ */
330
+ getChunkCount() {
331
+ return this.chunkIndex;
332
+ }
333
+ /**
334
+ * Get current recording duration in ms
335
+ */
336
+ getDuration() {
337
+ if (!this.recording) return 0;
338
+ return now() - this.startTime;
339
+ }
340
+ /**
341
+ * Destroy the recorder
342
+ */
343
+ destroy() {
344
+ this.clearTimeout();
345
+ this.buffer.destroy();
346
+ this.reset();
347
+ }
348
+ reset() {
349
+ this.recording = false;
350
+ this.startTime = 0;
351
+ this.model = "";
352
+ this.messages = [];
353
+ this.key = "";
354
+ this.totalChars = 0;
355
+ this.chunkIndex = 0;
356
+ }
357
+ clearTimeout() {
358
+ if (this.timeoutId) {
359
+ clearTimeout(this.timeoutId);
360
+ this.timeoutId = null;
361
+ }
362
+ }
363
+ };
364
+ function createStreamRecorder(config) {
365
+ return new StreamRecorder(config);
366
+ }
367
+
368
+ // src/streaming/StreamReplayer.ts
369
+ var DEFAULT_CONFIG3 = {
370
+ speedMultiplier: 1,
371
+ minDelayMs: 0,
372
+ maxDelayMs: 100,
373
+ simulateTiming: false,
374
+ onChunk: () => {
375
+ },
376
+ onComplete: () => {
377
+ },
378
+ onError: () => {
379
+ }
380
+ };
381
+ var StreamReplayer = class {
382
+ config;
383
+ abortController = null;
384
+ constructor(config) {
385
+ this.config = { ...DEFAULT_CONFIG3, ...config };
386
+ }
387
+ /**
388
+ * Replay a recorded stream as an async iterable
389
+ */
390
+ async *replay(stream) {
391
+ this.abortController = new AbortController();
392
+ const signal = this.abortController.signal;
393
+ try {
394
+ const chunks = [...stream.chunks].sort((a, b) => a.index - b.index);
395
+ let lastTimestamp = chunks[0]?.timestamp ?? 0;
396
+ for (let i = 0; i < chunks.length; i++) {
397
+ if (signal.aborted) {
398
+ break;
399
+ }
400
+ const chunk = chunks[i];
401
+ if (this.config.simulateTiming && i > 0) {
402
+ const timeDiff = chunk.timestamp - lastTimestamp;
403
+ const delay = Math.min(
404
+ Math.max(
405
+ timeDiff / this.config.speedMultiplier,
406
+ this.config.minDelayMs
407
+ ),
408
+ this.config.maxDelayMs
409
+ );
410
+ if (delay > 0) {
411
+ await this.delay(delay, signal);
412
+ }
413
+ }
414
+ lastTimestamp = chunk.timestamp;
415
+ this.config.onChunk(chunk);
416
+ yield chunk;
417
+ }
418
+ this.config.onComplete(stream);
419
+ } catch (error) {
420
+ if (error.name !== "AbortError") {
421
+ this.config.onError(error);
422
+ throw error;
423
+ }
424
+ } finally {
425
+ this.abortController = null;
426
+ }
427
+ }
428
+ /**
429
+ * Replay as a full async iterable of text content only
430
+ */
431
+ async *replayText(stream) {
432
+ for await (const chunk of this.replay(stream)) {
433
+ if (chunk.type === "text" && chunk.content) {
434
+ yield chunk.content;
435
+ }
436
+ }
437
+ }
438
+ /**
439
+ * Replay synchronously (no timing simulation)
440
+ */
441
+ *replaySync(stream) {
442
+ const chunks = [...stream.chunks].sort((a, b) => a.index - b.index);
443
+ for (const chunk of chunks) {
444
+ this.config.onChunk(chunk);
445
+ yield chunk;
446
+ }
447
+ this.config.onComplete(stream);
448
+ }
449
+ /**
450
+ * Get all chunks at once
451
+ */
452
+ getAllChunks(stream) {
453
+ return [...stream.chunks].sort((a, b) => a.index - b.index);
454
+ }
455
+ /**
456
+ * Get full text content from stream
457
+ */
458
+ getFullText(stream) {
459
+ return stream.chunks.filter((c) => c.type === "text" && c.content).sort((a, b) => a.index - b.index).map((c) => c.content).join("");
460
+ }
461
+ /**
462
+ * Get tool calls from stream
463
+ */
464
+ getToolCalls(stream) {
465
+ return stream.chunks.filter((c) => c.type === "tool_call" && c.toolCall).sort((a, b) => a.index - b.index).map((c) => c.toolCall);
466
+ }
467
+ /**
468
+ * Stop current replay
469
+ */
470
+ stop() {
471
+ if (this.abortController) {
472
+ this.abortController.abort();
473
+ }
474
+ }
475
+ /**
476
+ * Update configuration
477
+ */
478
+ configure(config) {
479
+ this.config = { ...this.config, ...config };
480
+ }
481
+ delay(ms, signal) {
482
+ return new Promise((resolve, reject) => {
483
+ const timeoutId = setTimeout(resolve, ms);
484
+ signal.addEventListener("abort", () => {
485
+ clearTimeout(timeoutId);
486
+ reject(new DOMException("Aborted", "AbortError"));
487
+ });
488
+ });
489
+ }
490
+ };
491
+ function createStreamReplayer(config) {
492
+ return new StreamReplayer(config);
493
+ }
494
+ var DEFAULT_KEY_OPTIONS = {
495
+ includeTemperature: false,
496
+ includeTools: false,
497
+ normalizeWhitespace: true,
498
+ extractUserMessage: false
499
+ };
500
+ function generateCacheKey(model, messages, options = {}) {
501
+ const opts = { ...DEFAULT_KEY_OPTIONS, ...options };
502
+ const normalized = normalizeRequest(model, messages, opts);
503
+ const hash = murmurhash.v3(JSON.stringify(normalized)).toString(16);
504
+ return `cache:${model}:${hash}`;
505
+ }
506
+ function normalizeRequest(model, messages, options = {}) {
507
+ const normalizedMessages = messages.map((m) => ({
508
+ role: m.role,
509
+ content: options.normalizeWhitespace ? normalizeWhitespace(m.content) : m.content
510
+ }));
511
+ return {
512
+ model,
513
+ messages: options.extractUserMessage ? extractUserMessage(normalizedMessages) : normalizedMessages
514
+ };
515
+ }
516
+ function normalizeWhitespace(text) {
517
+ return text.trim().replace(/\r\n/g, "\n").replace(/\s+/g, " ");
518
+ }
519
+ function extractUserMessage(messages) {
520
+ for (let i = messages.length - 1; i >= 0; i--) {
521
+ if (messages[i].role === "user") {
522
+ return messages[i].content;
523
+ }
524
+ }
525
+ return "";
526
+ }
527
+
528
+ // src/streaming/StreamCache.ts
529
+ var DEFAULT_CONFIG4 = {
530
+ recorder: {},
531
+ replayer: {},
532
+ cacheIncomplete: false,
533
+ minLengthToCache: 10,
534
+ streamTtl: 3600
535
+ // 1 hour
536
+ };
537
+ var StreamCache = class extends EventEmitter {
538
+ store;
539
+ similarity;
540
+ config;
541
+ recorder;
542
+ replayer;
543
+ stats = {
544
+ totalLookups: 0,
545
+ totalHits: 0,
546
+ totalMisses: 0,
547
+ hitRate: 0,
548
+ avgReplayLatencyMs: 0,
549
+ totalStreamsCached: 0,
550
+ totalBytesCached: 0,
551
+ avgStreamDurationMs: 0
552
+ };
553
+ replayLatencies = [];
554
+ streamDurations = [];
555
+ constructor(store, config, similarity) {
556
+ super();
557
+ this.store = store;
558
+ this.similarity = similarity;
559
+ this.config = { ...DEFAULT_CONFIG4, ...config };
560
+ this.recorder = new StreamRecorder(this.config.recorder);
561
+ this.replayer = new StreamReplayer(this.config.replayer);
562
+ }
563
+ /**
564
+ * Look up a cached stream
565
+ */
566
+ async lookup(model, messages) {
567
+ const startTime = performance.now();
568
+ this.stats.totalLookups++;
569
+ const key = this.generateStreamKey(model, messages);
570
+ try {
571
+ const entry = await this.store.get(key);
572
+ if (entry) {
573
+ const stream = this.deserializeStream(entry.response.content);
574
+ if (stream) {
575
+ this.stats.totalHits++;
576
+ this.updateHitRate();
577
+ const latencyMs2 = performance.now() - startTime;
578
+ this.replayLatencies.push(latencyMs2);
579
+ const result = {
580
+ hit: true,
581
+ stream,
582
+ similarity: 1,
583
+ source: "exact",
584
+ latencyMs: latencyMs2
585
+ };
586
+ this.emit("hit", result);
587
+ return result;
588
+ }
589
+ }
590
+ if (this.similarity) {
591
+ const userMessage = messages.find((m) => m.role === "user")?.content;
592
+ if (userMessage) {
593
+ const embedding = await this.similarity.embed(userMessage);
594
+ const results = await this.store.query(embedding, {
595
+ topK: 1,
596
+ minSimilarity: 0.92
597
+ });
598
+ if (results.entries.length > 0) {
599
+ const entry2 = results.entries[0];
600
+ const stream = this.deserializeStream(entry2.response.content);
601
+ if (stream && entry2.score >= 0.92) {
602
+ this.stats.totalHits++;
603
+ this.updateHitRate();
604
+ const latencyMs2 = performance.now() - startTime;
605
+ this.replayLatencies.push(latencyMs2);
606
+ const result = {
607
+ hit: true,
608
+ stream,
609
+ similarity: entry2.score,
610
+ source: "semantic",
611
+ latencyMs: latencyMs2
612
+ };
613
+ this.emit("hit", result);
614
+ return result;
615
+ }
616
+ }
617
+ }
618
+ }
619
+ this.stats.totalMisses++;
620
+ this.updateHitRate();
621
+ const latencyMs = performance.now() - startTime;
622
+ this.emit("miss", key);
623
+ return {
624
+ hit: false,
625
+ source: "miss",
626
+ latencyMs
627
+ };
628
+ } catch (error) {
629
+ this.emit("error", error);
630
+ return {
631
+ hit: false,
632
+ source: "miss",
633
+ latencyMs: performance.now() - startTime
634
+ };
635
+ }
636
+ }
637
+ /**
638
+ * Cache a recorded stream
639
+ */
640
+ async cache(stream, embedding) {
641
+ if (!stream.complete && !this.config.cacheIncomplete) {
642
+ return;
643
+ }
644
+ if (stream.totalChars < this.config.minLengthToCache) {
645
+ return;
646
+ }
647
+ const key = this.generateStreamKey(stream.model, stream.messages);
648
+ try {
649
+ const cacheMessages = stream.messages.map((m) => ({
650
+ role: m.role,
651
+ content: m.content
652
+ }));
653
+ await this.store.set(key, {
654
+ id: generateId(),
655
+ key,
656
+ request: {
657
+ messages: cacheMessages,
658
+ model: stream.model
659
+ },
660
+ response: {
661
+ content: this.serializeStream(stream),
662
+ model: stream.model,
663
+ finishReason: stream.complete ? "stop" : "error",
664
+ usage: {
665
+ promptTokens: stream.tokenUsage?.prompt ?? 0,
666
+ completionTokens: stream.tokenUsage?.completion ?? 0,
667
+ totalTokens: stream.tokenUsage?.total ?? 0
668
+ }
669
+ },
670
+ embedding,
671
+ metadata: {
672
+ createdAt: now(),
673
+ accessedAt: now(),
674
+ accessCount: 1,
675
+ hitCount: 0,
676
+ ttl: this.config.streamTtl
677
+ }
678
+ });
679
+ this.stats.totalStreamsCached++;
680
+ this.stats.totalBytesCached += this.estimateStreamSize(stream);
681
+ this.streamDurations.push(stream.durationMs);
682
+ this.updateAvgStreamDuration();
683
+ this.emit("record", stream);
684
+ } catch (error) {
685
+ this.emit("error", error);
686
+ }
687
+ }
688
+ /**
689
+ * Wrap a streaming function with caching
690
+ */
691
+ async *wrapStream(model, messages, streamFn, options) {
692
+ const lookupResult = await this.lookup(model, messages);
693
+ if (lookupResult.hit && lookupResult.stream) {
694
+ for await (const chunk of this.replayer.replay(lookupResult.stream)) {
695
+ yield { content: chunk.content };
696
+ }
697
+ return;
698
+ }
699
+ const key = this.generateStreamKey(model, messages);
700
+ this.recorder.start(model, messages, key);
701
+ try {
702
+ for await (const chunk of streamFn()) {
703
+ if (chunk.content) {
704
+ this.recorder.recordText(chunk.content);
705
+ }
706
+ yield chunk;
707
+ }
708
+ const stream = this.recorder.complete();
709
+ await this.cache(stream, options?.embedding);
710
+ } catch (error) {
711
+ if (this.recorder.isRecording()) {
712
+ this.recorder.abort(error.message);
713
+ }
714
+ throw error;
715
+ }
716
+ }
717
+ /**
718
+ * Replay a cached stream
719
+ */
720
+ async *replay(stream) {
721
+ for await (const chunk of this.replayer.replay(stream)) {
722
+ yield chunk;
723
+ }
724
+ }
725
+ /**
726
+ * Get cache statistics
727
+ */
728
+ getStats() {
729
+ return { ...this.stats };
730
+ }
731
+ /**
732
+ * Clear the stream cache
733
+ */
734
+ async clear() {
735
+ await this.store.clear();
736
+ this.stats = {
737
+ totalLookups: 0,
738
+ totalHits: 0,
739
+ totalMisses: 0,
740
+ hitRate: 0,
741
+ avgReplayLatencyMs: 0,
742
+ totalStreamsCached: 0,
743
+ totalBytesCached: 0,
744
+ avgStreamDurationMs: 0
745
+ };
746
+ this.replayLatencies = [];
747
+ this.streamDurations = [];
748
+ }
749
+ /**
750
+ * Destroy the stream cache
751
+ */
752
+ destroy() {
753
+ this.recorder.destroy();
754
+ this.replayer.stop();
755
+ this.removeAllListeners();
756
+ }
757
+ generateStreamKey(model, messages) {
758
+ const cacheMessages = messages.map((m) => ({
759
+ role: m.role,
760
+ content: m.content
761
+ }));
762
+ return generateCacheKey(model, cacheMessages);
763
+ }
764
+ serializeStream(stream) {
765
+ return JSON.stringify(stream);
766
+ }
767
+ deserializeStream(content) {
768
+ try {
769
+ return JSON.parse(content);
770
+ } catch {
771
+ return null;
772
+ }
773
+ }
774
+ estimateStreamSize(stream) {
775
+ return JSON.stringify(stream).length;
776
+ }
777
+ updateHitRate() {
778
+ if (this.stats.totalLookups > 0) {
779
+ this.stats.hitRate = this.stats.totalHits / this.stats.totalLookups * 100;
780
+ }
781
+ }
782
+ updateAvgStreamDuration() {
783
+ if (this.streamDurations.length > 0) {
784
+ this.stats.avgStreamDurationMs = this.streamDurations.reduce((a, b) => a + b, 0) / this.streamDurations.length;
785
+ }
786
+ }
787
+ };
788
+ function createStreamCache(store, config, similarity) {
789
+ return new StreamCache(store, config, similarity);
790
+ }
791
+
792
+ export { ChunkBuffer, StreamCache, StreamRecorder, StreamReplayer, createChunkBuffer, createStreamCache, createStreamRecorder, createStreamReplayer };
793
+ //# sourceMappingURL=index.js.map
794
+ //# sourceMappingURL=index.js.map