@veloxts/cache 0.6.87 → 0.6.89

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,21 @@
1
1
  # @veloxts/cache
2
2
 
3
+ ## 0.6.89
4
+
5
+ ### Patch Changes
6
+
7
+ - expand preset system with server config, auth presets, and security validation
8
+ - Updated dependencies
9
+ - @veloxts/core@0.6.89
10
+
11
+ ## 0.6.88
12
+
13
+ ### Patch Changes
14
+
15
+ - add ecosystem presets for environment-aware configuration
16
+ - Updated dependencies
17
+ - @veloxts/core@0.6.88
18
+
3
19
  ## 0.6.87
4
20
 
5
21
  ### Patch Changes
package/GUIDE.md CHANGED
@@ -1,41 +1,63 @@
1
1
  # @veloxts/cache Guide
2
2
 
3
- Multi-driver caching layer for VeloxTS framework with support for memory (LRU), Redis, cache tags, and distributed locks.
3
+ Multi-driver caching for VeloxTS applications with support for memory (LRU) and Redis, cache tags, and distributed locks.
4
4
 
5
5
  ## Installation
6
6
 
7
7
  ```bash
8
8
  pnpm add @veloxts/cache
9
9
 
10
- # For Redis support (optional)
10
+ # For Redis (production)
11
11
  pnpm add ioredis
12
12
  ```
13
13
 
14
- ## Plugin Registration
14
+ ## Quick Start
15
+
16
+ ### Development (Memory)
15
17
 
16
18
  ```typescript
17
19
  import { veloxApp } from '@veloxts/core';
18
20
  import { cachePlugin } from '@veloxts/cache';
19
21
 
20
- const app = await veloxApp();
22
+ const app = veloxApp();
21
23
 
22
- // Memory cache (development)
23
- app.use(cachePlugin({
24
+ app.register(cachePlugin({
24
25
  driver: 'memory',
25
26
  config: { maxSize: 1000 },
26
27
  }));
27
28
 
28
- // Redis cache (production)
29
- app.use(cachePlugin({
29
+ await app.start();
30
+ ```
31
+
32
+ ### Production (Redis)
33
+
34
+ ```typescript
35
+ import { veloxApp } from '@veloxts/core';
36
+ import { cachePlugin } from '@veloxts/cache';
37
+
38
+ const app = veloxApp();
39
+
40
+ app.register(cachePlugin({
30
41
  driver: 'redis',
31
- config: { url: process.env.REDIS_URL },
42
+ config: {
43
+ url: process.env.REDIS_URL,
44
+ },
32
45
  }));
46
+
47
+ await app.start();
48
+ ```
49
+
50
+ **Environment Variables:**
51
+
52
+ ```bash
53
+ # .env
54
+ REDIS_URL=redis://user:password@your-redis-host:6379
33
55
  ```
34
56
 
35
57
  ## Basic Usage
36
58
 
37
59
  ```typescript
38
- // Store a value
60
+ // Store a value with TTL
39
61
  await ctx.cache.put('user:123', { name: 'John' }, '30m');
40
62
 
41
63
  // Get a value
@@ -46,6 +68,9 @@ if (await ctx.cache.has('user:123')) { ... }
46
68
 
47
69
  // Delete a value
48
70
  await ctx.cache.forget('user:123');
71
+
72
+ // Delete multiple values
73
+ await ctx.cache.forgetMany(['user:123', 'user:456']);
49
74
  ```
50
75
 
51
76
  ## Remember Pattern
@@ -66,7 +91,7 @@ await ctx.cache.put('key', value, '5m'); // 5 minutes
66
91
  await ctx.cache.put('key', value, '1h'); // 1 hour
67
92
  await ctx.cache.put('key', value, '1d'); // 1 day
68
93
  await ctx.cache.put('key', value, '1w'); // 1 week
69
- await ctx.cache.put('key', value, 3600); // 3600 seconds
94
+ await ctx.cache.put('key', value, 3600); // 3600 seconds (number)
70
95
  ```
71
96
 
72
97
  ## Cache Tags
@@ -84,7 +109,7 @@ await ctx.cache.tags(['users']).flush();
84
109
 
85
110
  ## Distributed Locks
86
111
 
87
- Prevent concurrent execution across instances:
112
+ Prevent concurrent execution across instances (Redis only):
88
113
 
89
114
  ```typescript
90
115
  await ctx.cache.lockAndRun('payment:process', '30s', async () => {
@@ -97,6 +122,7 @@ await ctx.cache.lockAndRun('payment:process', '30s', async () => {
97
122
 
98
123
  ```typescript
99
124
  await ctx.cache.increment('views:post:123');
125
+ await ctx.cache.increment('views:post:123', 5); // Increment by 5
100
126
  await ctx.cache.decrement('stock:item:456');
101
127
  ```
102
128
 
@@ -106,3 +132,61 @@ await ctx.cache.decrement('stock:item:456');
106
132
  |--------|---------|----------|
107
133
  | `memory` | lru-cache | Development, single instance |
108
134
  | `redis` | ioredis | Production, multi-instance |
135
+
136
+ ### Memory Driver Options
137
+
138
+ ```typescript
139
+ app.register(cachePlugin({
140
+ driver: 'memory',
141
+ config: {
142
+ maxSize: 1000, // Max entries
143
+ defaultTtl: '1h', // Default TTL
144
+ },
145
+ }));
146
+ ```
147
+
148
+ ### Redis Driver Options
149
+
150
+ ```typescript
151
+ app.register(cachePlugin({
152
+ driver: 'redis',
153
+ config: {
154
+ url: process.env.REDIS_URL,
155
+ keyPrefix: 'myapp:cache:', // Optional prefix
156
+ defaultTtl: '1h', // Default TTL
157
+ },
158
+ }));
159
+ ```
160
+
161
+ ## Production Deployment
162
+
163
+ **Why Redis for production:**
164
+ - Shared cache across multiple server instances
165
+ - Cache persists across deployments
166
+ - Distributed locks work across instances
167
+ - Better memory management than in-process cache
168
+
169
+ **Recommended Redis providers:**
170
+ - [Upstash](https://upstash.com) - Serverless, pay-per-request
171
+ - [Redis Cloud](https://redis.com/cloud) - Managed Redis
172
+ - [Railway](https://railway.app) - Simple Redis add-on
173
+
174
+ ## Standalone Usage
175
+
176
+ Use cache outside of Fastify request context (CLI commands, background jobs):
177
+
178
+ ```typescript
179
+ import { getCache, closeCache } from '@veloxts/cache';
180
+
181
+ // Get standalone cache instance
182
+ const cache = await getCache({
183
+ driver: 'redis',
184
+ config: { url: process.env.REDIS_URL },
185
+ });
186
+
187
+ await cache.put('key', 'value', '1h');
188
+ const value = await cache.get('key');
189
+
190
+ // Clean up when done
191
+ await closeCache();
192
+ ```
package/dist/manager.js CHANGED
@@ -132,8 +132,15 @@ export async function createCacheManager(options = {}) {
132
132
  return cached;
133
133
  }
134
134
  const value = await callback();
135
- // Use a very long TTL (10 years)
136
- await store.put(key, value, '3650d');
135
+ // Use a very long TTL (10 years) with atomic add to prevent race conditions
136
+ const added = await store.add(key, value, '3650d');
137
+ if (!added) {
138
+ // Another process set the value - return that instead
139
+ const existing = await store.get(key);
140
+ if (existing !== null) {
141
+ return existing;
142
+ }
143
+ }
137
144
  return value;
138
145
  },
139
146
  async pull(key) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@veloxts/cache",
3
- "version": "0.6.87",
3
+ "version": "0.6.89",
4
4
  "description": "Multi-driver caching layer for VeloxTS framework",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -35,7 +35,7 @@
35
35
  "fastify-plugin": "5.1.0",
36
36
  "lru-cache": "11.1.0",
37
37
  "superjson": "2.2.2",
38
- "@veloxts/core": "0.6.87"
38
+ "@veloxts/core": "0.6.89"
39
39
  },
40
40
  "peerDependencies": {
41
41
  "ioredis": ">=5.0.0"
@@ -51,7 +51,7 @@
51
51
  "ioredis": "5.6.1",
52
52
  "typescript": "5.9.3",
53
53
  "vitest": "4.0.16",
54
- "@veloxts/testing": "0.6.87"
54
+ "@veloxts/testing": "0.6.89"
55
55
  },
56
56
  "keywords": [
57
57
  "velox",