@boredland/node-ts-cache 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,27 @@
1
+ name: test and release
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+ pull_request:
7
+ branches: [main]
8
+
9
+ jobs:
10
+ test:
11
+ runs-on: ubuntu-latest
12
+ strategy:
13
+ matrix:
14
+ node-version: [22.x, 24.x]
15
+ steps:
16
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # tag=v5.5.0
17
+ with:
18
+ token: ${{ github.token }}
19
+ fetch-depth: 0
20
+ - name: setup node ${{ matrix.node-version }}
21
+ uses: actions/setup-node@dda4788290998366da86b6a4f497909644397bb2 # tag=v6.6.0
22
+ with:
23
+ node-version: ${{ matrix.node-version }}
24
+ cache: "npm"
25
+ - run: npm i --prefer-offline
26
+ - run: npm run build
27
+ - run: npm run test
package/LICENSE.md ADDED
@@ -0,0 +1,23 @@
1
+ # MIT License
2
+
3
+ Copyright (c) 2022 ioki GmbH
4
+ Copyright (c) 2022 Jonas Strassel
5
+ Copyright (c) 2017 Himmet Avsar
6
+
7
+ Permission is hereby granted, free of charge, to any person obtaining a copy
8
+ of this software and associated documentation files (the "Software"), to deal
9
+ in the Software without restriction, including without limitation the rights
10
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11
+ copies of the Software, and to permit persons to whom the Software is
12
+ furnished to do so, subject to the following conditions:
13
+
14
+ The above copyright notice and this permission notice shall be included in all
15
+ copies or substantial portions of the Software.
16
+
17
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,184 @@
1
+ # @boredland/node-ts-cache
2
+
3
+ [![CI](https://github.com/boredland/node-ts-cache/actions/workflows/ci.yml/badge.svg)](https://github.com/boredland/node-ts-cache/actions/workflows/ci.yml)
4
+ [![The MIT License](https://img.shields.io/npm/l/node-ts-cache.svg)](http://opensource.org/licenses/MIT)
5
+
6
+ Simple and extensible caching module supporting decorators and multiple caching strategies.
7
+
8
+ ## Install
9
+
10
+ ```bash
11
+ npm i @boredland/node-ts-cache
12
+ ```
13
+
14
+ ## Usage
15
+
16
+ ### Wrap your function calls with `withCacheFactory`
17
+
18
+ Function wrapper factory for arbitrary functions. The cache key is calculated based on the parameters passed to the function.
19
+
20
+ ```ts
21
+ import {
22
+ withCacheFactory,
23
+ CacheContainer,
24
+ LRUStorage,
25
+ } from "@boredland/node-ts-cache";
26
+
27
+ const doThingsCache = new CacheContainer(new LRUStorage());
28
+
29
+ const someFn = (input: { a: string; b: number }) => Promise.resolve("result");
30
+
31
+ const wrappedFn = withCacheFactory(doThingsCache)(someFn, {
32
+ prefix: "my-function",
33
+ strategy: "eager", // or "lazy" or "swr"
34
+ });
35
+
36
+ const result = await wrappedFn({ a: "lala", b: 123 });
37
+ ```
38
+
39
+ ### Caching Strategies
40
+
41
+ The `withCache` wrapper supports three different caching strategies:
42
+
43
+ #### Eager (Default)
44
+
45
+ ```ts
46
+ const wrappedFn = withCacheFactory(cacheContainer)(someFn, {
47
+ strategy: "eager",
48
+ });
49
+ ```
50
+
51
+ - Cache is populated before returning the result
52
+ - Expired items are removed and the function is called again
53
+
54
+ #### Lazy
55
+
56
+ ```ts
57
+ const wrappedFn = withCacheFactory(cacheContainer)(someFn, {
58
+ strategy: "lazy",
59
+ });
60
+ ```
61
+
62
+ - Cache is populated in the background after returning the result
63
+ - Expired items are invalidated on touch (when accessed)
64
+
65
+ #### Stale-While-Revalidate (SWR)
66
+
67
+ ```ts
68
+ const wrappedFn = withCacheFactory(cacheContainer)(someFn, {
69
+ strategy: "swr",
70
+ });
71
+ ```
72
+
73
+ - Returns expired cache immediately while revalidating in the background
74
+ - Revalidation is queued with configurable concurrency
75
+ - Perfect for scenarios where stale data is acceptable
76
+ - Only one concurrent revalidation is enqueued per cache-key
77
+
78
+ ### Advanced Options
79
+
80
+ ```ts
81
+ const wrappedFn = withCacheFactory(cacheContainer)(someFn, {
82
+ prefix: "my-function", // Cache key prefix
83
+ strategy: "swr", // Caching strategy
84
+ ttl: 60000, // Time-to-live in milliseconds (null = forever)
85
+ revalidationConcurrency: 5, // Max concurrent background revalidations (default: 1)
86
+ calculateKey: (params) => {
87
+ // Custom key calculation
88
+ return `${params[0]}-${params[1]}`;
89
+ },
90
+ shouldStore: (result) => {
91
+ // Conditional caching
92
+ return result && result.success;
93
+ },
94
+ });
95
+ ```
96
+
97
+ ### Using `getItem` and `setItem` directly
98
+
99
+ ```ts
100
+ import { CacheContainer, LRUStorage } from "@boredland/node-ts-cache";
101
+
102
+ const myCache = new CacheContainer(new LRUStorage({ max: 1000 }));
103
+
104
+ class MyService {
105
+ public async getUsers(): Promise<string[]> {
106
+ const cachedUsers = await myCache.getItem<string[]>("users");
107
+
108
+ if (cachedUsers) {
109
+ return cachedUsers.content;
110
+ }
111
+
112
+ const newUsers = ["Max", "User"];
113
+
114
+ await myCache.setItem("users", newUsers, { ttl: 60000 });
115
+
116
+ return newUsers;
117
+ }
118
+ }
119
+ ```
120
+
121
+ ### LRUStorage
122
+
123
+ The `LRUStorage` adapter uses an in-memory LRU (Least Recently Used) cache with configurable capacity:
124
+
125
+ ```ts
126
+ import { LRUStorage } from "@boredland/node-ts-cache";
127
+
128
+ // Create an LRU cache with max 10,000 items
129
+ const storage = new LRUStorage({ max: 10000 });
130
+
131
+ const container = new CacheContainer(storage);
132
+ ```
133
+
134
+ **Features:**
135
+
136
+ - In-memory caching with automatic eviction
137
+ - LRU eviction policy when capacity is reached
138
+ - Configurable maximum size
139
+ - Perfect for testing and single-process applications
140
+
141
+ ## Logging
142
+
143
+ This project uses `debug` to log useful information.
144
+ Set environment variable **DEBUG=node-ts-cache** to enable logging.
145
+
146
+ ### Running Tests
147
+
148
+ ```bash
149
+ npm test
150
+ ```
151
+
152
+ ### Example Test Usage
153
+
154
+ For a complete example of how to test with `LRUStorage`, see the [comprehensive test suite](./src/lruStorage.test.ts):
155
+
156
+ ## LICENSE
157
+
158
+ Distributed under the MIT License. See LICENSE.md for more information.
159
+
160
+ ## Development & Testing
161
+
162
+ ### Setup
163
+
164
+ ```bash
165
+ cd node-ts-cache
166
+ npm i
167
+ npm run build
168
+ npm test
169
+ npm run lint
170
+ ```
171
+
172
+ ### Commands
173
+
174
+ - `npm test` - Run test suite with Vitest
175
+ - `npm run lint` - Run TypeScript and Biome linting
176
+ - `npm run build` - Build the project
177
+
178
+ ## Credits
179
+
180
+ As this is a fork of the original [node-ts-cache](https://github.com/havsar/node-ts-cache), all credit goes to the upstream project by [havsar](https://github.com/havsar).
181
+
182
+ Structural changes have been made by [boredland](https://github.com/boredland) in order to align more with their use-case.
183
+
184
+ Project Link: [https://github.com/boredland/node-ts-cache](https://github.com/boredland/node-ts-cache)
package/biome.json ADDED
@@ -0,0 +1,50 @@
1
+ {
2
+ "$schema": "https://biomejs.dev/schemas/2.3.4/schema.json",
3
+ "vcs": {
4
+ "enabled": true,
5
+ "clientKind": "git",
6
+ "useIgnoreFile": true
7
+ },
8
+ "files": {
9
+ "ignoreUnknown": false
10
+ },
11
+ "formatter": {
12
+ "enabled": true,
13
+ "indentStyle": "tab"
14
+ },
15
+ "linter": {
16
+ "enabled": true,
17
+ "rules": {
18
+ "recommended": true,
19
+ "correctness": {
20
+ "useImportExtensions": "error",
21
+ "noUnsafeOptionalChaining": "error"
22
+ },
23
+ "suspicious": {
24
+ "noConsole": "error"
25
+ },
26
+ "nursery": {
27
+ "noFloatingPromises": "error",
28
+ "noMisusedPromises": "error"
29
+ },
30
+ "style": {
31
+ "noUnusedTemplateLiteral": "error",
32
+ "noProcessEnv": "error",
33
+ "useThrowNewError": "error"
34
+ }
35
+ }
36
+ },
37
+ "javascript": {
38
+ "formatter": {
39
+ "quoteStyle": "double"
40
+ }
41
+ },
42
+ "assist": {
43
+ "enabled": true,
44
+ "actions": {
45
+ "source": {
46
+ "organizeImports": "on"
47
+ }
48
+ }
49
+ }
50
+ }
package/package.json ADDED
@@ -0,0 +1,40 @@
1
+ {
2
+ "name": "@boredland/node-ts-cache",
3
+ "version": "1.0.0",
4
+ "repository": {
5
+ "type": "git",
6
+ "url": "git+https://github.com/boredland/node-ts-cache.git"
7
+ },
8
+ "license": "ISC",
9
+ "private": false,
10
+ "type": "module",
11
+ "scripts": {
12
+ "build": "tsdown",
13
+ "test": "vitest",
14
+ "lint": "tsc && biome check --fix --no-errors-on-unmatched",
15
+ "prepublishOnly": "npm run build && npm run test -- --run"
16
+ },
17
+ "devDependencies": {
18
+ "lru-cache": "^11.2.2",
19
+ "node-object-hash": "^3.1.1",
20
+ "p-queue": "^9.0.0",
21
+ "vitest": "^4.0.8",
22
+ "@biomejs/biome": "2.3.4",
23
+ "@types/node": "^24.10.0",
24
+ "tsdown": "^0.16.0",
25
+ "typescript": "^5.9.3"
26
+ },
27
+ "main": "./dist/index.mjs",
28
+ "types": "./dist/index.d.mts",
29
+ "module": "./dist/index.mjs",
30
+ "publishConfig": {
31
+ "access": "public"
32
+ },
33
+ "exports": {
34
+ ".": "./dist/index.mjs",
35
+ "./*": "./*"
36
+ },
37
+ "engines": {
38
+ "node": ">=22.0.0"
39
+ }
40
+ }
package/renovate.json ADDED
@@ -0,0 +1,37 @@
1
+ {
2
+ "automerge": true,
3
+ "enabled": true,
4
+ "extends": [
5
+ ":pinDependencies",
6
+ ":dependencyDashboard",
7
+ ":semanticPrefixFixDepsChoreOthers",
8
+ ":semanticCommits",
9
+ "group:monorepos",
10
+ "group:recommended",
11
+ "workarounds:all",
12
+ "helpers:pinGitHubActionDigests"
13
+ ],
14
+ "labels": ["dependency"],
15
+ "lockFileMaintenance": {
16
+ "automerge": true,
17
+ "enabled": true
18
+ },
19
+ "major": {
20
+ "dependencyDashboardApproval": true
21
+ },
22
+ "npm": {
23
+ "enabled": true
24
+ },
25
+ "github-actions": {
26
+ "enabled": true
27
+ },
28
+ "packageRules": [
29
+ {
30
+ "automerge": true,
31
+ "automergeStrategy": "squash",
32
+ "matchUpdateTypes": ["digest", "minor", "patch", "pin"]
33
+ }
34
+ ],
35
+ "prConcurrentLimit": 1,
36
+ "semanticCommits": "enabled"
37
+ }
@@ -0,0 +1,90 @@
1
+ import { debug } from "./debug.ts";
2
+ import type { Storage } from "./storage.ts";
3
+
4
+ export type CachedItem<T = unknown> = {
5
+ content: T;
6
+ meta: {
7
+ createdAt: number;
8
+ ttl: number | null;
9
+ isLazy: boolean;
10
+ };
11
+ };
12
+
13
+ export type CachingOptions = {
14
+ /** Number of milliseconds to expire the cachte item - defaults to forever */
15
+ ttl: number | null;
16
+ /** (Default: true) If true, expired cache entries will be deleted on touch and returned anyway. If false, entries will be deleted after the given ttl. */
17
+ isLazy: boolean;
18
+ /** (Default: JSON.stringify combination of className, methodName and call args) */
19
+ calculateKey: (data: {
20
+ /** The class name for the method being decorated */
21
+ className: string;
22
+ /** The method name being decorated */
23
+ methodName: string;
24
+ /** The arguments passed to the method when called */
25
+ args: unknown[];
26
+ }) => string;
27
+ };
28
+
29
+ export class CacheContainer {
30
+ constructor(private storage: Storage) {}
31
+
32
+ public async getItem<T>(
33
+ key: string,
34
+ ): Promise<
35
+ { content: T; meta: { expired: boolean; createdAt: number } } | undefined
36
+ > {
37
+ const item = await this.storage.getItem(key);
38
+
39
+ if (!item) return;
40
+
41
+ const result = {
42
+ content: item.content as T,
43
+ meta: {
44
+ createdAt: item.meta.createdAt,
45
+ expired: this.isItemExpired(item),
46
+ },
47
+ };
48
+
49
+ if (result.meta.expired) await this.unsetKey(key);
50
+
51
+ if (result.meta.expired && !item.meta.isLazy) return undefined;
52
+
53
+ return result;
54
+ }
55
+
56
+ public async setItem(
57
+ key: string,
58
+ content: unknown,
59
+ options?: Partial<CachingOptions>,
60
+ ): Promise<void> {
61
+ const finalOptions = {
62
+ ttl: null,
63
+ isLazy: true,
64
+ ...options,
65
+ };
66
+
67
+ const meta: CachedItem<typeof content>["meta"] = {
68
+ createdAt: Date.now(),
69
+ isLazy: finalOptions.isLazy,
70
+ ttl: finalOptions.ttl,
71
+ };
72
+
73
+ await this.storage.setItem(key, { meta, content });
74
+ }
75
+
76
+ public async clear(): Promise<void> {
77
+ await this.storage.clear();
78
+
79
+ debug("Cleared cache");
80
+ }
81
+
82
+ private isItemExpired(item: CachedItem): boolean {
83
+ if (item.meta.ttl === null) return false;
84
+ return Date.now() > item.meta.createdAt + item.meta.ttl;
85
+ }
86
+
87
+ public async unsetKey(key: string): Promise<void> {
88
+ await this.storage.removeItem(key);
89
+ }
90
+ }
package/src/debug.ts ADDED
@@ -0,0 +1,3 @@
1
+ import { debug as _debug } from "node:util";
2
+
3
+ export const debug = _debug("node-ts-cache");
package/src/hash.ts ADDED
@@ -0,0 +1,15 @@
1
+ import { hasher } from "node-object-hash";
2
+
3
+ /**
4
+ * Hash function for creating consistent, deterministic hashes from JavaScript objects.
5
+ * Used primarily for generating cache keys and ensuring object equality checks.
6
+ *
7
+ * @param obj - Any JavaScript object to hash
8
+ * @returns String hash of the object
9
+ */
10
+ const { hash } = hasher({
11
+ sort: true, // Ensures consistent order for object properties
12
+ coerce: true, // Converts values to a consistent type (e.g., numbers to strings)
13
+ });
14
+
15
+ export default hash;
package/src/index.ts ADDED
@@ -0,0 +1,4 @@
1
+ export * from "./cacheContainer.ts";
2
+ export * from "./lruStorage.ts";
3
+ export * from "./storage.ts";
4
+ export * from "./withCache.ts";
@@ -0,0 +1,613 @@
1
+ import { beforeEach, describe, expect, it, vi } from "vitest";
2
+ import { CacheContainer } from "./cacheContainer.ts";
3
+ import { LRUStorage } from "./lruStorage.ts";
4
+ import { withCacheFactory } from "./withCache.ts";
5
+
6
+ describe("LRUStorage with withCache", () => {
7
+ let storage: LRUStorage;
8
+ let container: CacheContainer;
9
+ let withCache: ReturnType<typeof withCacheFactory>;
10
+
11
+ beforeEach(() => {
12
+ storage = new LRUStorage({ max: 10 });
13
+ container = new CacheContainer(storage);
14
+ withCache = withCacheFactory(container);
15
+ });
16
+
17
+ describe("Basic caching functionality", () => {
18
+ it("should cache function results", async () => {
19
+ const mockFn = vi.fn(async (x: number) => x * 2);
20
+ const cachedFn = withCache(mockFn);
21
+
22
+ const result1 = await cachedFn(5);
23
+ const result2 = await cachedFn(5);
24
+
25
+ expect(result1).toBe(10);
26
+ expect(result2).toBe(10);
27
+ expect(mockFn).toHaveResolvedTimes(1);
28
+ });
29
+
30
+ it("should differentiate between different parameters", async () => {
31
+ const mockFn = vi.fn(async (x: number) => x * 2);
32
+ const cachedFn = withCache(mockFn);
33
+
34
+ const result1 = await cachedFn(5);
35
+ const result2 = await cachedFn(10);
36
+
37
+ expect(result1).toBe(10);
38
+ expect(result2).toBe(20);
39
+ expect(mockFn).toHaveResolvedTimes(2);
40
+ });
41
+
42
+ it("should use custom calculateKey function", async () => {
43
+ const mockFn = vi.fn(async (x: number, y: number) => x + y);
44
+ const cachedFn = withCache(mockFn, {
45
+ calculateKey: ([x, y]) => `${x}-${y}`,
46
+ });
47
+
48
+ const result1 = await cachedFn(1, 2);
49
+ const result2 = await cachedFn(1, 2);
50
+
51
+ expect(result1).toBe(3);
52
+ expect(result2).toBe(3);
53
+ expect(mockFn).toHaveResolvedTimes(1);
54
+ });
55
+
56
+ it("should support prefix option", async () => {
57
+ const mockFn = vi.fn(async (x: number) => x * 2);
58
+ const cachedFn1 = withCache(mockFn, { prefix: "version1" });
59
+ const cachedFn2 = withCache(mockFn, { prefix: "version2" });
60
+
61
+ const result1 = await cachedFn1(5);
62
+ const result2 = await cachedFn2(5);
63
+
64
+ expect(result1).toBe(10);
65
+ expect(result2).toBe(10);
66
+ expect(mockFn).toHaveResolvedTimes(2);
67
+ });
68
+
69
+ it("should work with multiple parameters", async () => {
70
+ const mockFn = vi.fn(async (a: number, b: string) => `${a}-${b}`);
71
+ const cachedFn = withCache(mockFn);
72
+
73
+ const result1 = await cachedFn(1, "a");
74
+ const result2 = await cachedFn(1, "a");
75
+
76
+ expect(result1).toBe("1-a");
77
+ expect(result2).toBe("1-a");
78
+ expect(mockFn).toHaveResolvedTimes(1);
79
+ });
80
+ });
81
+
82
+ describe("TTL and expiration", () => {
83
+ it("should cache items with TTL in eager mode", async () => {
84
+ const mockFn = vi.fn(async (x: number) => x * 2);
85
+ const cachedFn = withCache(mockFn, { ttl: 100, strategy: "eager" });
86
+
87
+ const result1 = await cachedFn(5);
88
+ expect(result1).toBe(10);
89
+ expect(mockFn).toHaveResolvedTimes(1);
90
+
91
+ // Item should still be cached before expiration
92
+ const result2 = await cachedFn(5);
93
+ expect(result2).toBe(10);
94
+ expect(mockFn).toHaveResolvedTimes(1);
95
+
96
+ // Wait for expiration
97
+ await new Promise((resolve) => setTimeout(resolve, 150));
98
+
99
+ // After expiration, item is removed and function is called again
100
+ const result3 = await cachedFn(5);
101
+ expect(result3).toBe(10);
102
+ expect(mockFn).toHaveResolvedTimes(2);
103
+ });
104
+
105
+ it("should use lazy strategy to invalidate cache on touch", async () => {
106
+ const mockFn = vi.fn(async (x: number) => x * 2);
107
+ const cachedFn = withCache(mockFn, { ttl: 100, strategy: "lazy" });
108
+
109
+ const result1 = await cachedFn(5);
110
+ expect(result1).toBe(10);
111
+ expect(mockFn).toHaveResolvedTimes(1);
112
+
113
+ // Item should be cached for one subsequent call
114
+ const result2 = await cachedFn(5);
115
+ expect(result2).toBe(10);
116
+ expect(mockFn).toHaveResolvedTimes(1);
117
+
118
+ // Wait for expiration
119
+ await new Promise((resolve) => setTimeout(resolve, 150));
120
+
121
+ // After expiration, the cached item is stale, but should be returned
122
+ const result3 = await cachedFn(5);
123
+ expect(result3).toBe(10);
124
+ expect(mockFn).toHaveResolvedTimes(1);
125
+
126
+ // Next call should have invalidated the cache and call the function again
127
+ const result4 = await cachedFn(5);
128
+ expect(result4).toBe(10);
129
+ expect(mockFn).toHaveResolvedTimes(2);
130
+ });
131
+
132
+ it("should use swr strategy to return stale cache and revalidate in background", async () => {
133
+ const mockFn = vi.fn(async (x: number) => {
134
+ await new Promise((resolve) => setTimeout(resolve, 50));
135
+ return x * 2;
136
+ });
137
+ const cachedFn = withCache(mockFn, { ttl: 100, strategy: "swr" });
138
+
139
+ const result1 = await cachedFn(5);
140
+ expect(result1).toBe(10);
141
+ expect(mockFn).toHaveResolvedTimes(1);
142
+
143
+ // Item should be cached
144
+ const result2 = await cachedFn(5);
145
+ expect(result2).toBe(10);
146
+ expect(mockFn).toHaveResolvedTimes(1);
147
+
148
+ // Wait for expiration
149
+ await new Promise((resolve) => setTimeout(resolve, 150));
150
+
151
+ // With swr strategy, expired items are returned immediately
152
+ const result3 = await cachedFn(5);
153
+ expect(result3).toBe(10);
154
+ expect(mockFn).toHaveResolvedTimes(1);
155
+ // The stale cache is returned, but revalidation is queued in background
156
+ // Wait a bit for background revalidation to complete
157
+ await new Promise((resolve) => setTimeout(resolve, 50));
158
+ expect(mockFn).toHaveResolvedTimes(2);
159
+ });
160
+ });
161
+
162
+ describe("shouldStore option", () => {
163
+ it("should not cache when shouldStore returns false", async () => {
164
+ const mockFn = vi.fn(async (x: number) => x * 2);
165
+ const cachedFn = withCache(mockFn, {
166
+ shouldStore: (result: unknown) => (result as number) > 15,
167
+ });
168
+
169
+ const result1 = await cachedFn(5);
170
+ expect(result1).toBe(10);
171
+
172
+ const result2 = await cachedFn(5);
173
+ expect(result2).toBe(10);
174
+ expect(mockFn).toHaveResolvedTimes(2);
175
+ });
176
+
177
+ it("should cache when shouldStore returns true", async () => {
178
+ const mockFn = vi.fn(async (x: number) => x * 2);
179
+ const cachedFn = withCache(mockFn, {
180
+ shouldStore: (result: unknown) => (result as number) > 5,
181
+ });
182
+
183
+ const result1 = await cachedFn(5);
184
+ expect(result1).toBe(10);
185
+
186
+ const result2 = await cachedFn(5);
187
+ expect(result2).toBe(10);
188
+ expect(mockFn).toHaveResolvedTimes(1);
189
+ });
190
+
191
+ it("should evaluate shouldStore on complex results", async () => {
192
+ const mockFn = vi.fn(async (x: number) => ({
193
+ value: x * 2,
194
+ success: x > 0,
195
+ }));
196
+ const cachedFn = withCache(mockFn, {
197
+ shouldStore: (result: unknown) =>
198
+ (result as { success: boolean }).success,
199
+ });
200
+
201
+ const result1 = await cachedFn(5);
202
+ expect(result1).toEqual({ value: 10, success: true });
203
+
204
+ const result2 = await cachedFn(5);
205
+ expect(result2).toEqual({ value: 10, success: true });
206
+ expect(mockFn).toHaveResolvedTimes(1);
207
+ });
208
+ });
209
+
210
+ describe("LRU eviction", () => {
211
+ it("should evict least recently used items when max capacity is reached", async () => {
212
+ // Create storage with small capacity
213
+ const smallStorage = new LRUStorage({ max: 3 });
214
+ const smallContainer = new CacheContainer(smallStorage);
215
+ const smallWithCache = withCacheFactory(smallContainer);
216
+
217
+ const mockFn = vi.fn(async (x: number) => x * 2);
218
+ const cachedFn = smallWithCache(mockFn);
219
+
220
+ // Fill cache to capacity
221
+ await cachedFn(1); // key1
222
+ await cachedFn(2); // key2
223
+ await cachedFn(3); // key3
224
+
225
+ expect(mockFn).toHaveResolvedTimes(3);
226
+
227
+ // Access all three to verify they're cached
228
+ await cachedFn(1);
229
+ await cachedFn(2);
230
+ await cachedFn(3);
231
+ expect(mockFn).toHaveResolvedTimes(3);
232
+
233
+ // Add a new item, which should evict the least recently used
234
+ await cachedFn(4); // This should evict key1 (least recently used)
235
+
236
+ expect(mockFn).toHaveResolvedTimes(4);
237
+
238
+ // key1 should be evicted and function should be called again
239
+ await cachedFn(1);
240
+ expect(mockFn).toHaveResolvedTimes(5);
241
+ });
242
+
243
+ it("should keep recently accessed items in cache", async () => {
244
+ const smallStorage = new LRUStorage({ max: 2 });
245
+ const smallContainer = new CacheContainer(smallStorage);
246
+ const smallWithCache = withCacheFactory(smallContainer);
247
+
248
+ const mockFn = vi.fn(async (x: number) => x * 2);
249
+ const cachedFn = smallWithCache(mockFn);
250
+
251
+ await cachedFn(1);
252
+ await cachedFn(2);
253
+ expect(mockFn).toHaveResolvedTimes(2);
254
+
255
+ // Access 1 again to make it recently used
256
+ await cachedFn(1);
257
+ expect(mockFn).toHaveResolvedTimes(2);
258
+
259
+ // Add 3, should evict 2 (not 1)
260
+ await cachedFn(3);
261
+ expect(mockFn).toHaveResolvedTimes(3);
262
+
263
+ // 1 should still be cached
264
+ await cachedFn(1);
265
+ expect(mockFn).toHaveResolvedTimes(3);
266
+
267
+ // 2 should have been evicted
268
+ await cachedFn(2);
269
+ expect(mockFn).toHaveResolvedTimes(4);
270
+ });
271
+ });
272
+
273
+ describe("Clear and removal", () => {
274
+ it("should clear all cache entries", async () => {
275
+ const mockFn = vi.fn(async (x: number) => x * 2);
276
+ const cachedFn = withCache(mockFn);
277
+
278
+ await cachedFn(1);
279
+ await cachedFn(2);
280
+ expect(mockFn).toHaveResolvedTimes(2);
281
+
282
+ // Clear cache
283
+ await container.clear();
284
+
285
+ // Should call function again
286
+ await cachedFn(1);
287
+ await cachedFn(2);
288
+ expect(mockFn).toHaveResolvedTimes(4);
289
+ });
290
+
291
+ it("should remove individual cache entries", async () => {
292
+ const mockFn = vi.fn(async (x: number) => x * 2);
293
+ // Create a named function so we know the function name
294
+ const namedAsyncFn = Object.defineProperty(mockFn, "name", {
295
+ value: "testFn",
296
+ }) as unknown as (x: number) => Promise<number>;
297
+
298
+ const cachedFn = withCache(namedAsyncFn, {
299
+ calculateKey: ([x]) => `custom-key-${x}`,
300
+ });
301
+
302
+ await cachedFn(5);
303
+ expect(mockFn).toHaveResolvedTimes(1);
304
+
305
+ // Manually remove the key - must include the function name and prefix
306
+ // The key format is: ${operation.name}:${prefix}:${calculateKeyResult}
307
+ const fullKey = "testFn:default:custom-key-5";
308
+ await container.unsetKey(fullKey);
309
+
310
+ // Should call function again
311
+ await cachedFn(5);
312
+ expect(mockFn).toHaveResolvedTimes(2);
313
+ });
314
+ });
315
+
316
+ describe("Complex scenarios", () => {
317
+ it("should handle multiple concurrent calls", async () => {
318
+ const mockFn = vi.fn(async (x: number) => {
319
+ await new Promise((resolve) => setTimeout(resolve, 50));
320
+ return x * 2;
321
+ });
322
+ const cachedFn = withCache(mockFn);
323
+
324
+ const results = await Promise.all([
325
+ cachedFn(5),
326
+ cachedFn(5),
327
+ cachedFn(5),
328
+ ]);
329
+
330
+ expect(results).toEqual([10, 10, 10]);
331
+ expect(mockFn).toHaveResolvedTimes(3);
332
+ });
333
+
334
+ it("should handle different data types", async () => {
335
+ const mockFn = vi.fn(async (data: Record<string, string>) => ({
336
+ received: data,
337
+ timestamp: Date.now(),
338
+ }));
339
+ const cachedFn = withCache(mockFn);
340
+
341
+ const result1 = await cachedFn({ name: "test" });
342
+ const result2 = await cachedFn({ name: "test" });
343
+
344
+ // Verify both results are the same object (cached)
345
+ expect(result1).toBe(result2);
346
+ expect(mockFn).toHaveResolvedTimes(1);
347
+ });
348
+
349
+ it("should combine multiple cache options", async () => {
350
+ const mockFn = vi.fn(async (x: number) => x * 2);
351
+ const cachedFn = withCache(mockFn, {
352
+ prefix: "combined",
353
+ ttl: 100,
354
+ strategy: "lazy",
355
+ shouldStore: (result: unknown) => (result as number) > 5,
356
+ });
357
+
358
+ const result1 = await cachedFn(5);
359
+ expect(result1).toBe(10);
360
+ expect(mockFn).toHaveResolvedTimes(1);
361
+
362
+ const result2 = await cachedFn(5);
363
+ expect(result2).toBe(10);
364
+ expect(mockFn).toHaveResolvedTimes(1);
365
+
366
+ await new Promise((resolve) => setTimeout(resolve, 150));
367
+
368
+ const result3 = await cachedFn(5);
369
+ expect(result3).toBe(10);
370
+ expect(mockFn).toHaveResolvedTimes(1);
371
+ });
372
+ });
373
+
374
+ describe("Storage operations", () => {
375
+ it("should correctly store and retrieve items", async () => {
376
+ const key = "test-key";
377
+ const content = { value: "test", number: 123 };
378
+
379
+ await storage.setItem(key, {
380
+ content,
381
+ meta: {
382
+ createdAt: Date.now(),
383
+ ttl: null,
384
+ isLazy: true,
385
+ },
386
+ });
387
+
388
+ const retrieved = await storage.getItem(key);
389
+ expect(retrieved?.content).toEqual(content);
390
+ });
391
+
392
+ it("should remove items from storage", async () => {
393
+ const key = "test-key";
394
+ const content = { value: "test" };
395
+
396
+ await storage.setItem(key, {
397
+ content,
398
+ meta: {
399
+ createdAt: Date.now(),
400
+ ttl: null,
401
+ isLazy: true,
402
+ },
403
+ });
404
+
405
+ await storage.removeItem(key);
406
+
407
+ const retrieved = await storage.getItem(key);
408
+ expect(retrieved).toBeUndefined();
409
+ });
410
+
411
+ it("should clear all storage items", async () => {
412
+ const key1 = "key1";
413
+ const key2 = "key2";
414
+
415
+ await storage.setItem(key1, {
416
+ content: "value1",
417
+ meta: {
418
+ createdAt: Date.now(),
419
+ ttl: null,
420
+ isLazy: true,
421
+ },
422
+ });
423
+
424
+ await storage.setItem(key2, {
425
+ content: "value2",
426
+ meta: {
427
+ createdAt: Date.now(),
428
+ ttl: null,
429
+ isLazy: true,
430
+ },
431
+ });
432
+
433
+ await storage.clear();
434
+
435
+ const result1 = await storage.getItem(key1);
436
+ const result2 = await storage.getItem(key2);
437
+
438
+ expect(result1).toBeUndefined();
439
+ expect(result2).toBeUndefined();
440
+ });
441
+ });
442
+
443
+ describe("Error handling", () => {
444
+ it("should not cache errors", async () => {
445
+ const mockFn = vi.fn(async (x: number) => {
446
+ if (x < 0) throw new Error("Negative number");
447
+ return x * 2;
448
+ });
449
+ const cachedFn = withCache(mockFn);
450
+
451
+ const result = await cachedFn(5);
452
+ expect(result).toBe(10);
453
+
454
+ await expect(cachedFn(-5)).rejects.toThrow("Negative number");
455
+ expect(mockFn).toHaveBeenCalledTimes(2);
456
+ });
457
+ });
458
+
459
+ describe("Concurrency limiting in revalidation queue", () => {
460
+ it("should limit concurrent revalidations to revalidationConcurrency in swr mode", async () => {
461
+ const mockFn = vi.fn(async (x: number) => {
462
+ await new Promise((resolve) => setTimeout(resolve, 50));
463
+ return x * 2;
464
+ });
465
+
466
+ const cachedFn = withCache(mockFn, {
467
+ ttl: 100,
468
+ strategy: "swr",
469
+ revalidationConcurrency: 1,
470
+ });
471
+
472
+ // Prime the cache
473
+ await cachedFn(5);
474
+ expect(mockFn).toHaveResolvedTimes(1);
475
+
476
+ // Wait for expiration so cache is stale
477
+ await new Promise((resolve) => setTimeout(resolve, 150));
478
+
479
+ // Track concurrent execution count
480
+ let maxConcurrent = 0;
481
+ let currentConcurrent = 0;
482
+ const originalImpl = mockFn.getMockImplementation();
483
+
484
+ mockFn.mockImplementation(async (x: number) => {
485
+ currentConcurrent++;
486
+ maxConcurrent = Math.max(maxConcurrent, currentConcurrent);
487
+ // biome-ignore lint/style/noNonNullAssertion: we're sure it is here
488
+ const result = await originalImpl!(x);
489
+ currentConcurrent--;
490
+ return result;
491
+ });
492
+
493
+ // Trigger multiple revalidations
494
+ const results = await Promise.all([
495
+ cachedFn(5),
496
+ cachedFn(5),
497
+ cachedFn(5),
498
+ ]);
499
+
500
+ // wait until all would have been called
501
+ await new Promise((resolve) => setTimeout(resolve, 450));
502
+
503
+ expect(results).toEqual([10, 10, 10]);
504
+ // With concurrency: 1, should never have more than 1 concurrent revalidation
505
+ expect(maxConcurrent).toBe(1);
506
+ expect(mockFn).toHaveBeenCalledTimes(2);
507
+ });
508
+
509
+ it("should only queue revalidations in swr strategy when cache is expired", async () => {
510
+ const mockFn = vi.fn(async (x: number) => {
511
+ await new Promise((resolve) => setTimeout(resolve, 50));
512
+ return x * 2;
513
+ });
514
+
515
+ const cachedFn = withCache(mockFn, {
516
+ ttl: 100,
517
+ strategy: "swr",
518
+ revalidationConcurrency: 1,
519
+ });
520
+
521
+ // Prime the cache
522
+ await cachedFn(5);
523
+ expect(mockFn).toHaveResolvedTimes(1);
524
+
525
+ // Track concurrent execution count
526
+ let maxConcurrent = 0;
527
+ let currentConcurrent = 0;
528
+ const originalImpl = mockFn.getMockImplementation();
529
+
530
+ mockFn.mockImplementation(async (x: number) => {
531
+ currentConcurrent++;
532
+ maxConcurrent = Math.max(maxConcurrent, currentConcurrent);
533
+ // biome-ignore lint/style/noNonNullAssertion: we're sure it is here
534
+ const result = await originalImpl!(x);
535
+ currentConcurrent--;
536
+ return result;
537
+ });
538
+
539
+ // Call while cache is still valid - should NOT queue revalidation
540
+ const results = await Promise.all([
541
+ cachedFn(5),
542
+ cachedFn(5),
543
+ cachedFn(5),
544
+ ]);
545
+
546
+ expect(results).toEqual([10, 10, 10]);
547
+ // Since cache is not expired, no revalidation should be queued
548
+ // Function should only be called once (initial call)
549
+ expect(mockFn).toHaveResolvedTimes(1);
550
+ });
551
+
552
+ it("should use concurrency limit per unique cache key", async () => {
553
+ const mockFn1 = vi.fn(async (x: number) => {
554
+ await new Promise((resolve) => setTimeout(resolve, 50));
555
+ return x * 2;
556
+ });
557
+
558
+ const mockFn2 = vi.fn(async (x: number) => {
559
+ await new Promise((resolve) => setTimeout(resolve, 50));
560
+ return x * 3;
561
+ });
562
+
563
+ const cachedFn1 = withCache(mockFn1, {
564
+ ttl: 100,
565
+ strategy: "swr",
566
+ revalidationConcurrency: 1,
567
+ prefix: "fn1",
568
+ });
569
+
570
+ const cachedFn2 = withCache(mockFn2, {
571
+ ttl: 100,
572
+ strategy: "swr",
573
+ revalidationConcurrency: 1,
574
+ prefix: "fn2",
575
+ });
576
+
577
+ // Prime both caches
578
+ await cachedFn1(5);
579
+ await cachedFn2(5);
580
+ expect(mockFn1).toHaveResolvedTimes(1);
581
+ expect(mockFn2).toHaveResolvedTimes(1);
582
+
583
+ // Wait for expiration
584
+ await new Promise((resolve) => setTimeout(resolve, 150));
585
+
586
+ // Track concurrent execution for each
587
+ let maxConcurrent1 = 0;
588
+ let currentConcurrent1 = 0;
589
+ const originalImpl1 = mockFn1.getMockImplementation();
590
+
591
+ mockFn1.mockImplementation(async (x: number) => {
592
+ currentConcurrent1++;
593
+ maxConcurrent1 = Math.max(maxConcurrent1, currentConcurrent1);
594
+ // biome-ignore lint/style/noNonNullAssertion: we're sure it is here
595
+ const result = await originalImpl1!(x);
596
+ currentConcurrent1--;
597
+ return result;
598
+ });
599
+
600
+ // Trigger revalidations for both functions concurrently
601
+ const results = await Promise.all([
602
+ cachedFn1(5),
603
+ cachedFn1(5),
604
+ cachedFn2(5),
605
+ cachedFn2(5),
606
+ ]);
607
+
608
+ expect(results).toEqual([10, 10, 15, 15]);
609
+ // Each function's queue should limit its own concurrency
610
+ expect(maxConcurrent1).toBe(1);
611
+ });
612
+ });
613
+ });
@@ -0,0 +1,32 @@
1
+ import { LRUCache } from "lru-cache";
2
+ import type { CachedItem } from "./cacheContainer.ts";
3
+ import type { Storage } from "./storage.ts";
4
+
5
+ export class LRUStorage implements Storage {
6
+ private cache: LRUCache<string, CachedItem, unknown>;
7
+
8
+ constructor({
9
+ max = 10_000,
10
+ }: Partial<LRUCache<string, CachedItem, unknown>> = {}) {
11
+ this.cache = new LRUCache<string, CachedItem, unknown>({
12
+ max,
13
+ });
14
+ }
15
+
16
+ async clear(): Promise<void> {
17
+ this.cache.clear();
18
+ }
19
+
20
+ async getItem(key: string) {
21
+ const item = this.cache.get(key);
22
+ return item;
23
+ }
24
+
25
+ async setItem(key: string, content: CachedItem) {
26
+ this.cache.set(key, content);
27
+ }
28
+
29
+ async removeItem(key: string) {
30
+ this.cache.delete(key);
31
+ }
32
+ }
package/src/storage.ts ADDED
@@ -0,0 +1,27 @@
1
+ import type { CachedItem } from "./cacheContainer.ts";
2
+
3
+ export interface Storage {
4
+ /**
5
+ * returns a cached item from the storage layer
6
+ * @param key - key to look up
7
+ */
8
+ getItem(key: string): Promise<CachedItem | undefined>;
9
+
10
+ /**
11
+ * sets a cached item on the storage layer
12
+ * @param key - key to store
13
+ * @param content - content to store, including some meta data
14
+ */
15
+ setItem(key: string, content: CachedItem): Promise<void>;
16
+
17
+ /**
18
+ * removes item from the storage layer
19
+ * @param key - key to remove
20
+ */
21
+ removeItem(key: string): Promise<void>;
22
+
23
+ /**
24
+ * remove all keys from the storage layer
25
+ */
26
+ clear(): Promise<void>;
27
+ }
@@ -0,0 +1,110 @@
1
+ import PQueue from "p-queue";
2
+ import type { CacheContainer, CachingOptions } from "./cacheContainer.ts";
3
+ import hash from "./hash.ts";
4
+
5
+ const revalidationQueues: Record<string, PQueue> = {};
6
+
7
+ type WithCacheOptions<Parameters, Result> = Partial<
8
+ Omit<CachingOptions, "calculateKey" | "isLazy">
9
+ > & {
10
+ /** an optional prefix to prepend to the key */
11
+ prefix?: string;
12
+ /** an optional function to calculate a key based on the parameters of the wrapped function */
13
+ calculateKey?: (input: Parameters) => string;
14
+ /** an optional function that is called just before the result is stored to the storage */
15
+ shouldStore?: (result: Awaited<Result>) => boolean;
16
+ /**
17
+ * caching strategy to use
18
+ * - "lazy": cache is populated in the background after returning the result
19
+ * - "swr": stale-while-revalidate, cache is returned if present and updated in the background
20
+ * - "eager": cache is populated before returning the result
21
+ * @default "eager"
22
+ */
23
+ strategy?: "lazy" | "swr" | "eager";
24
+ /**
25
+ * Concurrency for revalidation queue
26
+ * @default 1
27
+ */
28
+ revalidationConcurrency?: number;
29
+ };
30
+
31
+ /**
32
+ * wrapped function factory
33
+ * @param container - cache container to create the fn for
34
+ * @returns wrapping function
35
+ */
36
+ export const withCacheFactory = (container: CacheContainer) => {
37
+ /**
38
+ * function wrapper
39
+ * @param operation - the function to be wrapped
40
+ * @param options - caching options
41
+ * @returns wrapped operation
42
+ */
43
+ const withCache = <
44
+ Parameters extends Array<unknown>,
45
+ Result extends Promise<unknown>,
46
+ >(
47
+ operation: (...parameters: Parameters) => Result,
48
+ options: WithCacheOptions<Parameters, Result> = {},
49
+ ) => {
50
+ return async (...parameters: Parameters): Promise<Result> => {
51
+ const {
52
+ calculateKey,
53
+ strategy = "eager",
54
+ revalidationConcurrency: concurrency = 1,
55
+ ...rest
56
+ } = options;
57
+ const prefix = options.prefix ?? "default";
58
+ const key = `${operation.name}:${prefix}:${
59
+ calculateKey ? calculateKey(parameters) : hash(parameters)
60
+ }` as const;
61
+
62
+ const queueName = `${operation.name}:${prefix}` as const;
63
+
64
+ revalidationQueues[queueName] =
65
+ revalidationQueues[queueName] ??
66
+ new PQueue({
67
+ concurrency,
68
+ });
69
+ revalidationQueues[queueName].concurrency = concurrency;
70
+
71
+ const cachedResponse = await container.getItem<Awaited<Result>>(key);
72
+
73
+ const refreshedItem = async () => {
74
+ const result = await operation(...parameters);
75
+ if (!options.shouldStore || options.shouldStore(result)) {
76
+ await container.setItem(key, result, {
77
+ ...rest,
78
+ isLazy: strategy === "lazy" || strategy === "swr",
79
+ });
80
+ }
81
+ return result;
82
+ };
83
+
84
+ /**
85
+ * Stale-While-Revalidate strategy
86
+ * If the cached response is expired, we return it immediately and
87
+ * revalidate in the background
88
+ */
89
+ if (strategy === "swr" && cachedResponse?.meta.expired) {
90
+ if (
91
+ !revalidationQueues[queueName].runningTasks.some(
92
+ (t) => t.id === key && t.startTime,
93
+ )
94
+ ) {
95
+ revalidationQueues[queueName].add(refreshedItem, {
96
+ id: key,
97
+ });
98
+ }
99
+ }
100
+
101
+ if (cachedResponse) {
102
+ return cachedResponse.content;
103
+ }
104
+
105
+ const result = await refreshedItem();
106
+ return result;
107
+ };
108
+ };
109
+ return withCache;
110
+ };
package/tsconfig.json ADDED
@@ -0,0 +1,48 @@
1
+ {
2
+ // Visit https://aka.ms/tsconfig to read more about this file
3
+ "compilerOptions": {
4
+ "noEmit": true,
5
+ // File Layout
6
+ // "rootDir": "./src",
7
+ // "outDir": "./dist",
8
+
9
+ // Environment Settings
10
+ // See also https://aka.ms/tsconfig/module
11
+ "module": "ES2022",
12
+ "moduleResolution": "bundler",
13
+ "target": "ES2022",
14
+ "types": [],
15
+ // For nodejs:
16
+ // "lib": ["esnext"],
17
+ // "types": ["node"],
18
+ // and npm install -D @types/node
19
+
20
+ // Other Outputs
21
+ "sourceMap": true,
22
+ "declaration": true,
23
+ "declarationMap": true,
24
+
25
+ // Stricter Typechecking Options
26
+ "noUncheckedIndexedAccess": true,
27
+ "exactOptionalPropertyTypes": true,
28
+
29
+ // Style Options
30
+ "noImplicitReturns": true,
31
+ "noImplicitOverride": true,
32
+ "noUnusedLocals": true,
33
+ "noUnusedParameters": true,
34
+ "noFallthroughCasesInSwitch": true,
35
+ "noPropertyAccessFromIndexSignature": true,
36
+
37
+ // Recommended Options
38
+ "strict": true,
39
+ "jsx": "react-jsx",
40
+ "verbatimModuleSyntax": true,
41
+ "isolatedModules": true,
42
+ "noUncheckedSideEffectImports": true,
43
+ "moduleDetection": "force",
44
+ "skipLibCheck": true,
45
+
46
+ "allowImportingTsExtensions": true
47
+ }
48
+ }
@@ -0,0 +1,9 @@
1
+ import { defineConfig } from "tsdown";
2
+
3
+ export default defineConfig({
4
+ exports: {
5
+ all: true,
6
+ },
7
+ minify: true,
8
+ platform: "node",
9
+ });