tick-cache 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +422 -0
- package/dist/index.cjs +718 -0
- package/dist/index.d.cts +64 -0
- package/dist/index.d.ts +64 -0
- package/dist/index.js +691 -0
- package/package.json +70 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 m-thenot
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
# tick-cache
|
|
2
|
+
|
|
3
|
+
A high-performance, memory-efficient TTL cache with LRU eviction, implemented using a timer wheel algorithm.
|
|
4
|
+
|
|
5
|
+
**Why a timer wheel?** Traditional approaches like min-heaps (O(log n)) or per-entry timers (memory overhead) become inefficient with many entries and diverse TTLs. Timer wheels provide **consistent O(1) performance** for scheduling and expiration, making them ideal for caches with thousands of entries and varying expiration times, without the overhead of managing individual JavaScript timers.
|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- ⚡ **O(1) amortized complexity** for get/set/delete operations
|
|
10
|
+
- 🎯 **Precise TTL expiration** using a timer wheel
|
|
11
|
+
- 💾 **Memory efficient** with Structure-of-Arrays (SoA) layout
|
|
12
|
+
- 🔄 **LRU eviction** when cache capacity is reached
|
|
13
|
+
- 🎛️ **Flexible expiration modes**: active (on-access) or passive (background timer)
|
|
14
|
+
- 📊 **Optional sliding expiration** with `updateTTLOnGet`
|
|
15
|
+
- 🔧 **TypeScript native** with full type safety
|
|
16
|
+
- 🪝 **Disposal callbacks** for cleanup and tracking
|
|
17
|
+
- 🚀 **Zero dependencies** for production use
|
|
18
|
+
|
|
19
|
+
## Installation
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
npm install tick-cache
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
## Quick Start
|
|
26
|
+
|
|
27
|
+
```typescript
|
|
28
|
+
import { TtlWheelCache } from 'tick-cache';
|
|
29
|
+
|
|
30
|
+
// Create a cache with 1000 entry limit
|
|
31
|
+
const cache = new TtlWheelCache<string, any>({
|
|
32
|
+
maxEntries: 1000,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// Store entries with TTL (in milliseconds)
|
|
36
|
+
cache.set("user:123", { name: "Alice" }, 5000); // expires in 5 seconds
|
|
37
|
+
cache.set("session:abc", { token: "xyz" }, 60000); // expires in 1 minute
|
|
38
|
+
|
|
39
|
+
// Retrieve entries
|
|
40
|
+
const user = cache.get("user:123"); // { name: "Alice" } or undefined if expired
|
|
41
|
+
|
|
42
|
+
// Check existence
|
|
43
|
+
if (cache.has("session:abc")) {
|
|
44
|
+
console.log("Session is active");
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Manual deletion
|
|
48
|
+
cache.delete("user:123");
|
|
49
|
+
|
|
50
|
+
// Always close the cache to stop background timers
|
|
51
|
+
cache.close();
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## API Reference
|
|
55
|
+
|
|
56
|
+
### Constructor Options
|
|
57
|
+
|
|
58
|
+
```typescript
|
|
59
|
+
interface Options<K, V> {
|
|
60
|
+
maxEntries: number; // Maximum number of entries (required)
|
|
61
|
+
tickMs?: number; // Tick interval in ms (default: 50)
|
|
62
|
+
wheelSize?: number; // Timer wheel size, power of 2 (default: 4096)
|
|
63
|
+
budgetPerTick?: number; // Max operations per tick (default: 200_000)
|
|
64
|
+
updateTTLOnGet?: boolean; // Reset TTL on access (default: false)
|
|
65
|
+
ttlAutopurge?: boolean; // Background cleanup (default: true)
|
|
66
|
+
onDispose?: (key: K, value: V, reason: DisposeReason) => void;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
type DisposeReason = "ttl" | "lru" | "delete" | "clear" | "set";
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
### Methods
|
|
73
|
+
|
|
74
|
+
#### `set(key: K, value: V, ttlMs: number): void`
|
|
75
|
+
|
|
76
|
+
Store a key-value pair with a time-to-live in milliseconds.
|
|
77
|
+
|
|
78
|
+
```typescript
|
|
79
|
+
cache.set("key", "value", 5000); // expires in 5 seconds
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
#### `get(key: K): V | undefined`
|
|
83
|
+
|
|
84
|
+
Retrieve a value by key. Returns `undefined` if the key doesn't exist or has expired.
|
|
85
|
+
|
|
86
|
+
```typescript
|
|
87
|
+
const value = cache.get("key");
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
#### `has(key: K): boolean`
|
|
91
|
+
|
|
92
|
+
Check if a key exists and is not expired.
|
|
93
|
+
|
|
94
|
+
```typescript
|
|
95
|
+
if (cache.has("key")) {
|
|
96
|
+
// Key exists and is valid
|
|
97
|
+
}
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
#### `delete(key: K): boolean`
|
|
101
|
+
|
|
102
|
+
Manually delete an entry. Returns `true` if the entry existed.
|
|
103
|
+
|
|
104
|
+
```typescript
|
|
105
|
+
cache.delete("key");
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
#### `clear(): void`
|
|
109
|
+
|
|
110
|
+
Remove all entries from the cache.
|
|
111
|
+
|
|
112
|
+
```typescript
|
|
113
|
+
cache.clear();
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
#### `size(): number`
|
|
117
|
+
|
|
118
|
+
Get the current number of entries in the cache.
|
|
119
|
+
|
|
120
|
+
```typescript
|
|
121
|
+
console.log(cache.size()); // 42
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
#### `stats(): Stats`
|
|
125
|
+
|
|
126
|
+
Get cache statistics.
|
|
127
|
+
|
|
128
|
+
```typescript
|
|
129
|
+
const stats = cache.stats();
|
|
130
|
+
console.log(stats.size); // Current size
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
#### `close(): void`
|
|
134
|
+
|
|
135
|
+
Stop background cleanup timer. Call this when you're done using the cache to prevent memory leaks.
|
|
136
|
+
|
|
137
|
+
```typescript
|
|
138
|
+
cache.close();
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
## Advanced Usage
|
|
142
|
+
|
|
143
|
+
### TypeScript with Generics
|
|
144
|
+
|
|
145
|
+
```typescript
|
|
146
|
+
interface User {
|
|
147
|
+
id: string;
|
|
148
|
+
name: string;
|
|
149
|
+
email: string;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
const userCache = new TtlWheelCache<string, User>({
|
|
153
|
+
maxEntries: 10000,
|
|
154
|
+
});
|
|
155
|
+
|
|
156
|
+
userCache.set("user:123", {
|
|
157
|
+
id: "123",
|
|
158
|
+
name: "Alice",
|
|
159
|
+
email: "alice@example.com"
|
|
160
|
+
}, 300000); // 5 minutes
|
|
161
|
+
|
|
162
|
+
// TypeScript knows this is User | undefined
|
|
163
|
+
const user = userCache.get("user:123");
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
### Disposal Callback
|
|
167
|
+
|
|
168
|
+
Track when entries are removed from the cache:
|
|
169
|
+
|
|
170
|
+
```typescript
|
|
171
|
+
const cache = new TtlWheelCache({
|
|
172
|
+
maxEntries: 100,
|
|
173
|
+
onDispose: (key, value, reason) => {
|
|
174
|
+
console.log(`Entry ${key} removed: ${reason}`);
|
|
175
|
+
// reason: "ttl" | "lru" | "delete" | "clear" | "set"
|
|
176
|
+
|
|
177
|
+
// Cleanup logic (e.g., close connections, free resources)
|
|
178
|
+
if (value.connection) {
|
|
179
|
+
value.connection.close();
|
|
180
|
+
}
|
|
181
|
+
},
|
|
182
|
+
});
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
### Sliding Expiration
|
|
186
|
+
|
|
187
|
+
Reset TTL on each access:
|
|
188
|
+
|
|
189
|
+
```typescript
|
|
190
|
+
const cache = new TtlWheelCache({
|
|
191
|
+
maxEntries: 1000,
|
|
192
|
+
updateTTLOnGet: true, // Reset TTL on every get()
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
cache.set("session", { user: "alice" }, 30000); // 30 seconds
|
|
196
|
+
|
|
197
|
+
// Each get() resets the TTL
|
|
198
|
+
cache.get("session"); // TTL reset to 30 seconds
|
|
199
|
+
setTimeout(() => cache.get("session"), 20000); // TTL reset again
|
|
200
|
+
// Entry stays alive as long as it's accessed within 30 seconds
|
|
201
|
+
```
|
|
202
|
+
|
|
203
|
+
### Active Expiration (No Background Timer)
|
|
204
|
+
|
|
205
|
+
For applications that frequently access the cache, disable background cleanup:
|
|
206
|
+
|
|
207
|
+
```typescript
|
|
208
|
+
const cache = new TtlWheelCache({
|
|
209
|
+
maxEntries: 1000,
|
|
210
|
+
ttlAutopurge: false, // No background timer
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
// Expirations are processed during get/set/has/delete operations
|
|
214
|
+
cache.get("key"); // Triggers expiration check
|
|
215
|
+
```
|
|
216
|
+
|
|
217
|
+
This saves CPU cycles by avoiding background timers while still maintaining correctness.
|
|
218
|
+
|
|
219
|
+
### Custom Tick Configuration
|
|
220
|
+
|
|
221
|
+
Fine-tune the timer wheel for your use case:
|
|
222
|
+
|
|
223
|
+
```typescript
|
|
224
|
+
const cache = new TtlWheelCache({
|
|
225
|
+
maxEntries: 10000,
|
|
226
|
+
tickMs: 100, // Check every 100ms (less frequent = lower CPU)
|
|
227
|
+
wheelSize: 8192, // Larger wheel = more precise, more memory
|
|
228
|
+
budgetPerTick: 500, // Limit operations per tick (prevent CPU spikes)
|
|
229
|
+
});
|
|
230
|
+
```
|
|
231
|
+
|
|
232
|
+
## Performance Characteristics
|
|
233
|
+
|
|
234
|
+
### Time Complexity
|
|
235
|
+
|
|
236
|
+
| Operation | Average | Worst Case |
|
|
237
|
+
|-----------|---------|------------|
|
|
238
|
+
| `get()` | O(1) | O(1) |
|
|
239
|
+
| `set()` | O(1) | O(1)* |
|
|
240
|
+
| `delete()`| O(1) | O(1) |
|
|
241
|
+
| `has()` | O(1) | O(1) |
|
|
242
|
+
|
|
243
|
+
\* May trigger LRU eviction when at capacity
|
|
244
|
+
|
|
245
|
+
### Space Complexity
|
|
246
|
+
|
|
247
|
+
Memory usage per entry:
|
|
248
|
+
- **Metadata**: ~68 bytes (pointers, TTL, timestamps, indexes)
|
|
249
|
+
- **Key + Value**: Size of your JavaScript objects
|
|
250
|
+
- **Total**: ~68 bytes + sizeof(key) + sizeof(value)
|
|
251
|
+
|
|
252
|
+
For 10,000 entries with small keys/values: ~1.2 MB
|
|
253
|
+
|
|
254
|
+
## Timer Wheel Algorithm
|
|
255
|
+
|
|
256
|
+
This cache uses a **timer wheel with overflow** for efficient TTL management. The timer wheel is a circular buffer that divides time into fixed-size buckets, providing O(1) operations for scheduling and expiration.
|
|
257
|
+
|
|
258
|
+
### How It Works
|
|
259
|
+
|
|
260
|
+
**Conceptual Model:**
|
|
261
|
+
```
|
|
262
|
+
Time advances →
|
|
263
|
+
|
|
264
|
+
Bucket: [0] [1] [2] [3] [4] ... [4095]
|
|
265
|
+
↑
|
|
266
|
+
Current tick
|
|
267
|
+
|
|
268
|
+
Each bucket contains a doubly-linked list of entries expiring in that time slot.
|
|
269
|
+
```
|
|
270
|
+
|
|
271
|
+
**Scheduling an Entry (O(1)):**
|
|
272
|
+
|
|
273
|
+
When you call `cache.set(key, value, 5000)`:
|
|
274
|
+
|
|
275
|
+
1. **Calculate target tick**: `targetTick = currentTick + (5000ms / tickMs)`
|
|
276
|
+
- Example: If `tickMs=50`, then `targetTick = currentTick + 100`
|
|
277
|
+
|
|
278
|
+
2. **Find bucket**: `bucketIndex = targetTick % wheelSize`
|
|
279
|
+
- With `wheelSize=4096`, this wraps around circularly
|
|
280
|
+
|
|
281
|
+
3. **Link entry**: Add the entry to the doubly-linked list in that bucket
|
|
282
|
+
- O(1) operation: just update `wheelNext` and `wheelPrev` pointers
|
|
283
|
+
|
|
284
|
+
**Processing Expirations (O(1) amortized):**
|
|
285
|
+
|
|
286
|
+
Every `tickMs` milliseconds (or on cache access if `ttlAutopurge=false`):
|
|
287
|
+
|
|
288
|
+
1. **Advance to current bucket**: Calculate which bucket corresponds to `now`
|
|
289
|
+
|
|
290
|
+
2. **Process bucket**: Walk the linked list of entries in that bucket
|
|
291
|
+
- Each entry with `expireTick <= currentTick` is expired
|
|
292
|
+
- Call `onDispose(key, value, "ttl")`
|
|
293
|
+
- Remove from cache structures
|
|
294
|
+
|
|
295
|
+
3. **Budget limit**: Stop after `budgetPerTick` operations to prevent CPU spikes
|
|
296
|
+
- Remaining entries are processed in the next tick
|
|
297
|
+
- This prevents one huge expiration wave from blocking the event loop
|
|
298
|
+
|
|
299
|
+
**Overflow Handling:**
|
|
300
|
+
|
|
301
|
+
When TTL exceeds the wheel's time horizon (`wheelSize × tickMs`):
|
|
302
|
+
|
|
303
|
+
```
|
|
304
|
+
Horizon = 4096 buckets × 50ms = ~204 seconds
|
|
305
|
+
|
|
306
|
+
If TTL = 5 minutes (300,000ms):
|
|
307
|
+
→ Entry goes to overflow list (special bucket -2)
|
|
308
|
+
→ Periodically reschedule from overflow to wheel as time passes
|
|
309
|
+
```
|
|
310
|
+
|
|
311
|
+
### Key Properties
|
|
312
|
+
|
|
313
|
+
- **O(1) scheduling**: Adding an entry with TTL
|
|
314
|
+
- **O(1) amortized expiration**: Processing expired entries per tick
|
|
315
|
+
- **Bounded work per tick**: Configurable `budgetPerTick` prevents CPU spikes
|
|
316
|
+
- **Predictable latency**: No sudden pauses from processing thousands of expirations
|
|
317
|
+
- **Memory efficient**: Reuses bucket slots as the wheel rotates
|
|
318
|
+
|
|
319
|
+
### Example Timeline
|
|
320
|
+
|
|
321
|
+
```typescript
|
|
322
|
+
const cache = new TtlWheelCache({
|
|
323
|
+
maxEntries: 1000,
|
|
324
|
+
tickMs: 50, // Process every 50ms
|
|
325
|
+
wheelSize: 4096, // 4096 buckets
|
|
326
|
+
});
|
|
327
|
+
|
|
328
|
+
// t=0: Set entries
|
|
329
|
+
cache.set("a", 1, 100); // Expires at t=100ms → bucket 2
|
|
330
|
+
cache.set("b", 2, 250); // Expires at t=250ms → bucket 5
|
|
331
|
+
cache.set("c", 3, 150); // Expires at t=150ms → bucket 3
|
|
332
|
+
|
|
333
|
+
// t=50ms: Tick 1 - Process bucket 1 (empty)
|
|
334
|
+
// t=100ms: Tick 2 - Process bucket 2 (expire "a")
|
|
335
|
+
// t=150ms: Tick 3 - Process bucket 3 (expire "c")
|
|
336
|
+
// t=250ms: Tick 5 - Process bucket 5 (expire "b")
|
|
337
|
+
```
|
|
338
|
+
|
|
339
|
+
## Architecture
|
|
340
|
+
|
|
341
|
+
### Structure-of-Arrays (SoA)
|
|
342
|
+
|
|
343
|
+
Data is stored in separate typed arrays for optimal CPU cache usage:
|
|
344
|
+
|
|
345
|
+
```typescript
|
|
346
|
+
// Instead of Array<{key, value, ttl, next, prev}>
|
|
347
|
+
// We use:
|
|
348
|
+
keyRef: Array<K>
|
|
349
|
+
valRef: Array<V>
|
|
350
|
+
expiresTick: Uint32Array
|
|
351
|
+
wheelNext: Int32Array
|
|
352
|
+
wheelPrev: Int32Array
|
|
353
|
+
lruNext: Int32Array
|
|
354
|
+
lruPrev: Int32Array
|
|
355
|
+
```
|
|
356
|
+
|
|
357
|
+
This layout improves:
|
|
358
|
+
- **Cache locality**: Related data is contiguous in memory
|
|
359
|
+
- **Memory efficiency**: Typed arrays are compact
|
|
360
|
+
- **Iteration speed**: SIMD-friendly for modern CPUs
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
## FAQ
|
|
364
|
+
|
|
365
|
+
### Is this a hierarchical timer wheel?
|
|
366
|
+
|
|
367
|
+
No, this implementation uses a **single-level timer wheel with overflow**. Hierarchical timer wheels (with multiple levels like seconds/minutes/hours) could handle very long TTLs more efficiently but add complexity.
|
|
368
|
+
|
|
369
|
+
The current design:
|
|
370
|
+
- Single wheel with configurable size (default 4096 buckets)
|
|
371
|
+
- Overflow list for TTLs beyond the wheel horizon
|
|
372
|
+
- Works well for TTLs up to ~49 days
|
|
373
|
+
|
|
374
|
+
A hierarchical implementation could be added in future development if there's demand for more efficient handling of very long TTLs (months/years).
|
|
375
|
+
|
|
376
|
+
### When should I use `ttlAutopurge: false`?
|
|
377
|
+
|
|
378
|
+
When your application frequently accesses the cache (e.g., every few milliseconds), active expiration is more efficient than background timers. Expirations will be processed during cache operations.
|
|
379
|
+
|
|
380
|
+
### How does LRU eviction work with TTL?
|
|
381
|
+
|
|
382
|
+
When the cache reaches `maxEntries`, the least recently used entry is evicted to make room for new entries. TTL and LRU work together: entries can be removed either by expiring (TTL) or by being evicted (LRU).
|
|
383
|
+
|
|
384
|
+
### What's the maximum TTL?
|
|
385
|
+
|
|
386
|
+
The practical maximum is ~2^32 milliseconds (~49 days) due to Uint32Array for tick storage. For longer TTLs, consider using a different caching strategy.
|
|
387
|
+
|
|
388
|
+
### How do I monitor cache performance?
|
|
389
|
+
|
|
390
|
+
Use the `onDispose` callback to track evictions:
|
|
391
|
+
|
|
392
|
+
```typescript
|
|
393
|
+
const metrics = { ttl: 0, lru: 0, set: 0 };
|
|
394
|
+
|
|
395
|
+
const cache = new TtlWheelCache({
|
|
396
|
+
maxEntries: 1000,
|
|
397
|
+
onDispose: (key, value, reason) => {
|
|
398
|
+
metrics[reason]++;
|
|
399
|
+
},
|
|
400
|
+
});
|
|
401
|
+
```
|
|
402
|
+
|
|
403
|
+
## Development
|
|
404
|
+
|
|
405
|
+
```bash
|
|
406
|
+
# Install dependencies
|
|
407
|
+
npm install
|
|
408
|
+
|
|
409
|
+
# Run tests
|
|
410
|
+
npm test
|
|
411
|
+
|
|
412
|
+
# Run benchmarks
|
|
413
|
+
npm run bench
|
|
414
|
+
|
|
415
|
+
# Build
|
|
416
|
+
npm run build
|
|
417
|
+
```
|
|
418
|
+
|
|
419
|
+
## License
|
|
420
|
+
|
|
421
|
+
MIT
|
|
422
|
+
|