@object-ui/data-objectstack 0.3.1 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@object-ui/data-objectstack",
3
- "version": "0.3.1",
3
+ "version": "0.5.0",
4
4
  "description": "ObjectStack Data Adapter for Object UI",
5
5
  "license": "MIT",
6
6
  "type": "module",
@@ -20,9 +20,9 @@
20
20
  "README.md"
21
21
  ],
22
22
  "dependencies": {
23
- "@objectstack/client": "^0.3.3",
24
- "@object-ui/types": "0.3.1",
25
- "@object-ui/core": "0.3.1"
23
+ "@objectstack/client": "^0.9.1",
24
+ "@object-ui/core": "0.5.0",
25
+ "@object-ui/types": "0.5.0"
26
26
  },
27
27
  "devDependencies": {
28
28
  "tsup": "^8.0.1",
@@ -0,0 +1,426 @@
1
+ /**
2
+ * ObjectUI
3
+ * Copyright (c) 2024-present ObjectStack Inc.
4
+ *
5
+ * This source code is licensed under the MIT license found in the
6
+ * LICENSE file in the root directory of this source tree.
7
+ */
8
+
9
+ import { describe, it, expect, beforeEach, vi } from 'vitest';
10
+ import { MetadataCache } from './MetadataCache';
11
+
12
+ describe('MetadataCache', () => {
13
+ let cache: MetadataCache;
14
+
15
+ beforeEach(() => {
16
+ cache = new MetadataCache({ maxSize: 3, ttl: 1000 }); // Small size and TTL for testing
17
+ });
18
+
19
+ describe('Cache Hit/Miss Scenarios', () => {
20
+ it('should return cached value on cache hit', async () => {
21
+ const fetcher = vi.fn(async () => ({ name: 'users', fields: [] }));
22
+
23
+ // First call - cache miss
24
+ const result1 = await cache.get('users', fetcher);
25
+ expect(result1).toEqual({ name: 'users', fields: [] });
26
+ expect(fetcher).toHaveBeenCalledTimes(1);
27
+
28
+ // Second call - cache hit
29
+ const result2 = await cache.get('users', fetcher);
30
+ expect(result2).toEqual({ name: 'users', fields: [] });
31
+ expect(fetcher).toHaveBeenCalledTimes(1); // Not called again
32
+
33
+ const stats = cache.getStats();
34
+ expect(stats.hits).toBe(1);
35
+ expect(stats.misses).toBe(1);
36
+ expect(stats.hitRate).toBe(0.5);
37
+ });
38
+
39
+ it('should call fetcher on cache miss', async () => {
40
+ const fetcher = vi.fn(async () => ({ data: 'test' }));
41
+
42
+ const result = await cache.get('test-key', fetcher);
43
+
44
+ expect(result).toEqual({ data: 'test' });
45
+ expect(fetcher).toHaveBeenCalledTimes(1);
46
+
47
+ const stats = cache.getStats();
48
+ expect(stats.misses).toBe(1);
49
+ expect(stats.hits).toBe(0);
50
+ });
51
+
52
+ it('should handle multiple different keys', async () => {
53
+ const fetcher1 = vi.fn(async () => ({ type: 'users' }));
54
+ const fetcher2 = vi.fn(async () => ({ type: 'posts' }));
55
+
56
+ const result1 = await cache.get('users', fetcher1);
57
+ const result2 = await cache.get('posts', fetcher2);
58
+
59
+ expect(result1).toEqual({ type: 'users' });
60
+ expect(result2).toEqual({ type: 'posts' });
61
+ expect(fetcher1).toHaveBeenCalledTimes(1);
62
+ expect(fetcher2).toHaveBeenCalledTimes(1);
63
+
64
+ // Get again - both should be cached
65
+ await cache.get('users', fetcher1);
66
+ await cache.get('posts', fetcher2);
67
+
68
+ expect(fetcher1).toHaveBeenCalledTimes(1);
69
+ expect(fetcher2).toHaveBeenCalledTimes(1);
70
+ });
71
+ });
72
+
73
+ describe('TTL Expiration', () => {
74
+ it('should expire entries after TTL', async () => {
75
+ const cache = new MetadataCache({ maxSize: 10, ttl: 100 }); // 100ms TTL
76
+ const fetcher = vi.fn(async () => ({ data: 'test' }));
77
+
78
+ // First fetch
79
+ await cache.get('test', fetcher);
80
+ expect(fetcher).toHaveBeenCalledTimes(1);
81
+
82
+ // Immediate second fetch - should be cached
83
+ await cache.get('test', fetcher);
84
+ expect(fetcher).toHaveBeenCalledTimes(1);
85
+
86
+ // Wait for TTL to expire
87
+ await new Promise(resolve => setTimeout(resolve, 150));
88
+
89
+ // Should fetch again after expiration
90
+ await cache.get('test', fetcher);
91
+ expect(fetcher).toHaveBeenCalledTimes(2);
92
+ });
93
+
94
+ it('should update timestamp on cache hit', async () => {
95
+ const cache = new MetadataCache({ maxSize: 10, ttl: 200 });
96
+ const fetcher = vi.fn(async () => ({ data: 'test' }));
97
+
98
+ await cache.get('test', fetcher);
99
+
100
+ // Access again after 100ms
101
+ await new Promise(resolve => setTimeout(resolve, 100));
102
+ await cache.get('test', fetcher);
103
+
104
+ // Access again after another 110ms (total 210ms from first, ensuring TTL has passed)
105
+ await new Promise(resolve => setTimeout(resolve, 110));
106
+
107
+ // Should still be in cache because we're checking timestamp, not last accessed
108
+ // Actually, the implementation uses timestamp for expiration, not lastAccessed
109
+ // So after 210ms total (> 200ms TTL), it should expire
110
+ await cache.get('test', fetcher);
111
+
112
+ // Should have been called twice - initial + after expiration
113
+ expect(fetcher).toHaveBeenCalledTimes(2);
114
+ });
115
+
116
+ it('should not return expired entries via has()', async () => {
117
+ const cache = new MetadataCache({ maxSize: 10, ttl: 100 });
118
+ const fetcher = vi.fn(async () => ({ data: 'test' }));
119
+
120
+ await cache.get('test', fetcher);
121
+
122
+ expect(cache.has('test')).toBe(true);
123
+
124
+ // Wait for expiration
125
+ await new Promise(resolve => setTimeout(resolve, 150));
126
+
127
+ expect(cache.has('test')).toBe(false);
128
+ });
129
+ });
130
+
131
+ describe('LRU Eviction', () => {
132
+ it('should evict least recently used entry when maxSize is reached', async () => {
133
+ // Cache size is 3
134
+ const fetcher1 = vi.fn(async () => ({ id: 1 }));
135
+ const fetcher2 = vi.fn(async () => ({ id: 2 }));
136
+ const fetcher3 = vi.fn(async () => ({ id: 3 }));
137
+ const fetcher4 = vi.fn(async () => ({ id: 4 }));
138
+
139
+ // Fill cache
140
+ await cache.get('key1', fetcher1);
141
+ await cache.get('key2', fetcher2);
142
+ await cache.get('key3', fetcher3);
143
+
144
+ expect(cache.getStats().size).toBe(3);
145
+
146
+ // Add fourth item - should evict key1 (least recently used)
147
+ await cache.get('key4', fetcher4);
148
+
149
+ expect(cache.getStats().size).toBe(3);
150
+ expect(cache.getStats().evictions).toBe(1);
151
+
152
+ // key1 should not be in cache anymore
153
+ await cache.get('key1', fetcher1);
154
+ expect(fetcher1).toHaveBeenCalledTimes(2); // Called again
155
+
156
+ // After re-adding key1, key2 should have been evicted
157
+ // So cache now has: key3, key4, key1
158
+
159
+ // key3, key4 should still be cached
160
+ await cache.get('key3', fetcher3);
161
+ await cache.get('key4', fetcher4);
162
+ expect(fetcher3).toHaveBeenCalledTimes(1);
163
+ expect(fetcher4).toHaveBeenCalledTimes(1);
164
+
165
+ // key2 should have been evicted when key1 was re-added
166
+ await cache.get('key2', fetcher2);
167
+ expect(fetcher2).toHaveBeenCalledTimes(2);
168
+ });
169
+
170
+ it('should update LRU order on access', async () => {
171
+ const fetcher1 = vi.fn(async () => ({ id: 1 }));
172
+ const fetcher2 = vi.fn(async () => ({ id: 2 }));
173
+ const fetcher3 = vi.fn(async () => ({ id: 3 }));
174
+ const fetcher4 = vi.fn(async () => ({ id: 4 }));
175
+
176
+ // Fill cache: key1, key2, key3
177
+ await cache.get('key1', fetcher1);
178
+ await cache.get('key2', fetcher2);
179
+ await cache.get('key3', fetcher3);
180
+
181
+ // Access key1 again - should move it to the end (most recently used)
182
+ // Cache order: key2, key3, key1
183
+ await cache.get('key1', fetcher1);
184
+
185
+ // Add key4 - should evict key2 (now the LRU)
186
+ // Cache order: key3, key1, key4
187
+ await cache.get('key4', fetcher4);
188
+
189
+ // Verify key2 was evicted
190
+ await cache.get('key2', fetcher2);
191
+ expect(fetcher2).toHaveBeenCalledTimes(2);
192
+
193
+ // After re-adding key2, key3 should have been evicted
194
+ // Cache order: key1, key4, key2
195
+
196
+ // key1, key4 should still be cached
197
+ await cache.get('key1', fetcher1);
198
+ await cache.get('key4', fetcher4);
199
+ expect(fetcher1).toHaveBeenCalledTimes(1); // Only called once initially (re-access was a cache hit)
200
+ expect(fetcher4).toHaveBeenCalledTimes(1);
201
+
202
+ // key3 should have been evicted when key2 was re-added
203
+ await cache.get('key3', fetcher3);
204
+ expect(fetcher3).toHaveBeenCalledTimes(2);
205
+ });
206
+ });
207
+
208
+ describe('Concurrent Access', () => {
209
+ it('should handle concurrent requests for the same key', async () => {
210
+ let fetchCount = 0;
211
+ const fetcher = vi.fn(async () => {
212
+ fetchCount++;
213
+ await new Promise(resolve => setTimeout(resolve, 50));
214
+ return { data: 'test', fetchCount };
215
+ });
216
+
217
+ // Make multiple concurrent requests
218
+ const results = await Promise.all([
219
+ cache.get('test', fetcher),
220
+ cache.get('test', fetcher),
221
+ cache.get('test', fetcher),
222
+ ]);
223
+
224
+ // All should return the same data
225
+ // Note: Due to async nature, the first call will fetch and others might also fetch
226
+ // if they check before the first one completes. This is acceptable behavior.
227
+ // But at least one should be cached if they complete after the first one.
228
+ expect(results[0]).toBeDefined();
229
+ expect(results[1]).toBeDefined();
230
+ expect(results[2]).toBeDefined();
231
+ });
232
+
233
+ it('should handle concurrent requests for different keys', async () => {
234
+ const fetcher1 = vi.fn(async () => {
235
+ await new Promise(resolve => setTimeout(resolve, 30));
236
+ return { id: 1 };
237
+ });
238
+ const fetcher2 = vi.fn(async () => {
239
+ await new Promise(resolve => setTimeout(resolve, 30));
240
+ return { id: 2 };
241
+ });
242
+ const fetcher3 = vi.fn(async () => {
243
+ await new Promise(resolve => setTimeout(resolve, 30));
244
+ return { id: 3 };
245
+ });
246
+
247
+ const results = await Promise.all([
248
+ cache.get('key1', fetcher1),
249
+ cache.get('key2', fetcher2),
250
+ cache.get('key3', fetcher3),
251
+ ]);
252
+
253
+ expect(results[0]).toEqual({ id: 1 });
254
+ expect(results[1]).toEqual({ id: 2 });
255
+ expect(results[2]).toEqual({ id: 3 });
256
+ expect(fetcher1).toHaveBeenCalledTimes(1);
257
+ expect(fetcher2).toHaveBeenCalledTimes(1);
258
+ expect(fetcher3).toHaveBeenCalledTimes(1);
259
+ });
260
+ });
261
+
262
+ describe('Cache Management', () => {
263
+ it('should invalidate specific key', async () => {
264
+ const fetcher = vi.fn(async () => ({ data: 'test' }));
265
+
266
+ await cache.get('test', fetcher);
267
+ expect(fetcher).toHaveBeenCalledTimes(1);
268
+
269
+ cache.invalidate('test');
270
+
271
+ await cache.get('test', fetcher);
272
+ expect(fetcher).toHaveBeenCalledTimes(2);
273
+ });
274
+
275
+ it('should invalidate all keys', async () => {
276
+ const fetcher1 = vi.fn(async () => ({ id: 1 }));
277
+ const fetcher2 = vi.fn(async () => ({ id: 2 }));
278
+
279
+ await cache.get('key1', fetcher1);
280
+ await cache.get('key2', fetcher2);
281
+
282
+ cache.invalidate(); // No key = invalidate all
283
+
284
+ await cache.get('key1', fetcher1);
285
+ await cache.get('key2', fetcher2);
286
+
287
+ expect(fetcher1).toHaveBeenCalledTimes(2);
288
+ expect(fetcher2).toHaveBeenCalledTimes(2);
289
+ });
290
+
291
+ it('should clear cache and reset stats', async () => {
292
+ const fetcher = vi.fn(async () => ({ data: 'test' }));
293
+
294
+ await cache.get('key1', fetcher);
295
+ await cache.get('key2', fetcher);
296
+ await cache.get('key1', fetcher); // Hit
297
+
298
+ const statsBefore = cache.getStats();
299
+ expect(statsBefore.size).toBe(2);
300
+ expect(statsBefore.hits).toBe(1);
301
+ expect(statsBefore.misses).toBe(2);
302
+
303
+ cache.clear();
304
+
305
+ const statsAfter = cache.getStats();
306
+ expect(statsAfter.size).toBe(0);
307
+ expect(statsAfter.hits).toBe(0);
308
+ expect(statsAfter.misses).toBe(0);
309
+ expect(statsAfter.evictions).toBe(0);
310
+ });
311
+ });
312
+
313
+ describe('Statistics', () => {
314
+ it('should track cache statistics correctly', async () => {
315
+ const fetcher = vi.fn(async () => ({ data: 'test' }));
316
+
317
+ // Initial stats
318
+ let stats = cache.getStats();
319
+ expect(stats.size).toBe(0);
320
+ expect(stats.maxSize).toBe(3);
321
+ expect(stats.hits).toBe(0);
322
+ expect(stats.misses).toBe(0);
323
+ expect(stats.evictions).toBe(0);
324
+ expect(stats.hitRate).toBe(0);
325
+
326
+ // First access - miss
327
+ await cache.get('key1', fetcher);
328
+ stats = cache.getStats();
329
+ expect(stats.size).toBe(1);
330
+ expect(stats.misses).toBe(1);
331
+ expect(stats.hitRate).toBe(0);
332
+
333
+ // Second access - hit
334
+ await cache.get('key1', fetcher);
335
+ stats = cache.getStats();
336
+ expect(stats.hits).toBe(1);
337
+ expect(stats.hitRate).toBe(0.5);
338
+
339
+ // Third access - hit
340
+ await cache.get('key1', fetcher);
341
+ stats = cache.getStats();
342
+ expect(stats.hits).toBe(2);
343
+ expect(stats.hitRate).toBeCloseTo(0.667, 2);
344
+ });
345
+
346
+ it('should track evictions', async () => {
347
+ const fetcher = vi.fn(async () => ({ data: 'test' }));
348
+
349
+ // Fill cache to max
350
+ await cache.get('key1', fetcher);
351
+ await cache.get('key2', fetcher);
352
+ await cache.get('key3', fetcher);
353
+
354
+ let stats = cache.getStats();
355
+ expect(stats.evictions).toBe(0);
356
+
357
+ // Trigger eviction
358
+ await cache.get('key4', fetcher);
359
+
360
+ stats = cache.getStats();
361
+ expect(stats.evictions).toBe(1);
362
+
363
+ // Trigger more evictions
364
+ await cache.get('key5', fetcher);
365
+ await cache.get('key6', fetcher);
366
+
367
+ stats = cache.getStats();
368
+ expect(stats.evictions).toBe(3);
369
+ });
370
+ });
371
+
372
+ describe('Edge Cases', () => {
373
+ it('should handle fetcher that throws error', async () => {
374
+ const fetcher = vi.fn(async () => {
375
+ throw new Error('Fetch failed');
376
+ });
377
+
378
+ await expect(cache.get('test', fetcher)).rejects.toThrow('Fetch failed');
379
+
380
+ // Should not cache the error
381
+ const stats = cache.getStats();
382
+ expect(stats.size).toBe(0);
383
+ });
384
+
385
+ it('should handle null/undefined values', async () => {
386
+ const fetcher1 = vi.fn(async () => null);
387
+ const fetcher2 = vi.fn(async () => undefined);
388
+
389
+ const result1 = await cache.get('null-key', fetcher1);
390
+ const result2 = await cache.get('undefined-key', fetcher2);
391
+
392
+ expect(result1).toBeNull();
393
+ expect(result2).toBeUndefined();
394
+
395
+ // Should still cache these values
396
+ await cache.get('null-key', fetcher1);
397
+ await cache.get('undefined-key', fetcher2);
398
+
399
+ expect(fetcher1).toHaveBeenCalledTimes(1);
400
+ expect(fetcher2).toHaveBeenCalledTimes(1);
401
+ });
402
+
403
+ it('should handle empty string key', async () => {
404
+ const fetcher = vi.fn(async () => ({ data: 'test' }));
405
+
406
+ const result = await cache.get('', fetcher);
407
+ expect(result).toEqual({ data: 'test' });
408
+
409
+ await cache.get('', fetcher);
410
+ expect(fetcher).toHaveBeenCalledTimes(1);
411
+ });
412
+
413
+ it('should handle very large cache', async () => {
414
+ const largeCache = new MetadataCache({ maxSize: 10000, ttl: 60000 });
415
+
416
+ // Add many entries
417
+ for (let i = 0; i < 1000; i++) {
418
+ await largeCache.get(`key-${i}`, async () => ({ id: i }));
419
+ }
420
+
421
+ const stats = largeCache.getStats();
422
+ expect(stats.size).toBe(1000);
423
+ expect(stats.evictions).toBe(0);
424
+ });
425
+ });
426
+ });
@@ -0,0 +1,229 @@
1
+ /**
2
+ * ObjectUI
3
+ * Copyright (c) 2024-present ObjectStack Inc.
4
+ *
5
+ * This source code is licensed under the MIT license found in the
6
+ * LICENSE file in the root directory of this source tree.
7
+ */
8
+
9
+ /**
10
+ * Represents a cached schema entry with metadata
11
+ */
12
+ interface CachedSchema {
13
+ data: unknown;
14
+ timestamp: number;
15
+ accessCount: number;
16
+ lastAccessed: number;
17
+ }
18
+
19
+ /**
20
+ * Cache statistics for monitoring
21
+ */
22
+ export interface CacheStats {
23
+ size: number;
24
+ maxSize: number;
25
+ hits: number;
26
+ misses: number;
27
+ evictions: number;
28
+ hitRate: number;
29
+ }
30
+
31
+ /**
32
+ * MetadataCache - LRU cache with TTL expiration for schema metadata
33
+ *
34
+ * Features:
35
+ * - LRU (Least Recently Used) eviction policy
36
+ * - TTL (Time To Live) based expiration (fixed from creation, not sliding)
37
+ * - Memory limit controls
38
+ * - Async-safe operations
39
+ * - Performance statistics tracking
40
+ *
41
+ * Note: Concurrent requests for the same uncached key may result in multiple
42
+ * fetcher calls. For production use cases requiring request deduplication,
43
+ * consider wrapping the cache with a promise-based deduplication layer.
44
+ *
45
+ * @example
46
+ * ```typescript
47
+ * const cache = new MetadataCache({ maxSize: 100, ttl: 300000 });
48
+ *
49
+ * const schema = await cache.get('users', async () => {
50
+ * return await fetchSchemaFromServer('users');
51
+ * });
52
+ *
53
+ * console.log(cache.getStats());
54
+ * ```
55
+ */
56
+ export class MetadataCache {
57
+ private cache: Map<string, CachedSchema>;
58
+ private maxSize: number;
59
+ private ttl: number;
60
+ private stats: {
61
+ hits: number;
62
+ misses: number;
63
+ evictions: number;
64
+ };
65
+
66
+ /**
67
+ * Create a new MetadataCache instance
68
+ *
69
+ * @param options - Configuration options
70
+ * @param options.maxSize - Maximum number of entries (default: 100)
71
+ * @param options.ttl - Time to live in milliseconds (default: 5 minutes)
72
+ */
73
+ constructor(options: { maxSize?: number; ttl?: number } = {}) {
74
+ this.cache = new Map();
75
+ this.maxSize = options.maxSize || 100;
76
+ this.ttl = options.ttl || 5 * 60 * 1000; // 5 minutes default
77
+ this.stats = {
78
+ hits: 0,
79
+ misses: 0,
80
+ evictions: 0,
81
+ };
82
+ }
83
+
84
+ /**
85
+ * Get a value from cache or fetch it using the provided fetcher function
86
+ *
87
+ * @param key - Cache key
88
+ * @param fetcher - Async function to fetch data if not in cache
89
+ * @returns Promise resolving to the cached or fetched data
90
+ */
91
+ async get<T = unknown>(key: string, fetcher: () => Promise<T>): Promise<T> {
92
+ const now = Date.now();
93
+ const cached = this.cache.get(key);
94
+
95
+ // Check if cache entry exists and is not expired
96
+ if (cached) {
97
+ const age = now - cached.timestamp;
98
+
99
+ if (age < this.ttl) {
100
+ // Cache hit - update access metadata
101
+ cached.accessCount++;
102
+ cached.lastAccessed = now;
103
+ this.stats.hits++;
104
+
105
+ // Move to end (most recently used) by re-inserting
106
+ this.cache.delete(key);
107
+ this.cache.set(key, cached);
108
+
109
+ return cached.data as T;
110
+ } else {
111
+ // Expired entry - remove it
112
+ this.cache.delete(key);
113
+ }
114
+ }
115
+
116
+ // Cache miss - fetch the data
117
+ this.stats.misses++;
118
+ const data = await fetcher();
119
+
120
+ // Store in cache
121
+ this.set(key, data);
122
+
123
+ return data;
124
+ }
125
+
126
+ /**
127
+ * Set a value in the cache
128
+ *
129
+ * @param key - Cache key
130
+ * @param data - Data to cache
131
+ */
132
+ private set(key: string, data: unknown): void {
133
+ const now = Date.now();
134
+
135
+ // Check if we need to evict entries
136
+ if (this.cache.size >= this.maxSize && !this.cache.has(key)) {
137
+ this.evictLRU();
138
+ }
139
+
140
+ // Add or update the entry
141
+ this.cache.set(key, {
142
+ data,
143
+ timestamp: now,
144
+ accessCount: 1,
145
+ lastAccessed: now,
146
+ });
147
+ }
148
+
149
+ /**
150
+ * Evict the least recently used entry
151
+ */
152
+ private evictLRU(): void {
153
+ // The first entry in the Map is the least recently used
154
+ // (since we move accessed items to the end)
155
+ const firstKey = this.cache.keys().next().value;
156
+
157
+ if (firstKey !== undefined) {
158
+ this.cache.delete(firstKey);
159
+ this.stats.evictions++;
160
+ }
161
+ }
162
+
163
+ /**
164
+ * Invalidate a specific cache entry or all entries
165
+ *
166
+ * @param key - Optional key to invalidate. If omitted, invalidates all entries
167
+ */
168
+ invalidate(key?: string): void {
169
+ if (key) {
170
+ this.cache.delete(key);
171
+ } else {
172
+ this.cache.clear();
173
+ }
174
+ }
175
+
176
+ /**
177
+ * Clear all cache entries and reset statistics
178
+ */
179
+ clear(): void {
180
+ this.cache.clear();
181
+ this.stats = {
182
+ hits: 0,
183
+ misses: 0,
184
+ evictions: 0,
185
+ };
186
+ }
187
+
188
+ /**
189
+ * Get cache statistics
190
+ *
191
+ * @returns Cache statistics including hit rate
192
+ */
193
+ getStats(): CacheStats {
194
+ const total = this.stats.hits + this.stats.misses;
195
+ const hitRate = total > 0 ? this.stats.hits / total : 0;
196
+
197
+ return {
198
+ size: this.cache.size,
199
+ maxSize: this.maxSize,
200
+ hits: this.stats.hits,
201
+ misses: this.stats.misses,
202
+ evictions: this.stats.evictions,
203
+ hitRate: hitRate,
204
+ };
205
+ }
206
+
207
+ /**
208
+ * Check if a key exists in the cache (and is not expired)
209
+ *
210
+ * @param key - Cache key to check
211
+ * @returns true if the key exists and is not expired
212
+ */
213
+ has(key: string): boolean {
214
+ const cached = this.cache.get(key);
215
+
216
+ if (!cached) {
217
+ return false;
218
+ }
219
+
220
+ const age = Date.now() - cached.timestamp;
221
+
222
+ if (age >= this.ttl) {
223
+ this.cache.delete(key);
224
+ return false;
225
+ }
226
+
227
+ return true;
228
+ }
229
+ }