@ooneex/cache 0.0.1 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1 +1,403 @@
1
1
  # @ooneex/cache
2
+
3
+ A flexible caching library for TypeScript applications with support for filesystem and Redis backends. This package provides a consistent interface for caching data with TTL support, automatic serialization, and seamless integration with the Ooneex dependency injection container.
4
+
5
+ ![Bun](https://img.shields.io/badge/Bun-Compatible-orange?style=flat-square&logo=bun)
6
+ ![Deno](https://img.shields.io/badge/Deno-Compatible-blue?style=flat-square&logo=deno)
7
+ ![Node.js](https://img.shields.io/badge/Node.js-Compatible-green?style=flat-square&logo=node.js)
8
+ ![TypeScript](https://img.shields.io/badge/TypeScript-Ready-blue?style=flat-square&logo=typescript)
9
+ ![MIT License](https://img.shields.io/badge/License-MIT-yellow?style=flat-square)
10
+
11
+ ## Features
12
+
13
+ ✅ **Multiple Backends** - Support for filesystem and Redis caching
14
+
15
+ ✅ **TTL Support** - Automatic expiration with configurable time-to-live
16
+
17
+ ✅ **Type-Safe** - Full TypeScript support with generic cache values
18
+
19
+ ✅ **Auto Serialization** - Automatic JSON serialization for complex objects
20
+
21
+ ✅ **Container Integration** - Works seamlessly with dependency injection
22
+
23
+ ✅ **File Size Limits** - Configurable maximum file size for filesystem cache
24
+
25
+ ✅ **Auto Reconnection** - Redis client with automatic reconnection support
26
+
27
+ ## Installation
28
+
29
+ ### Bun
30
+ ```bash
31
+ bun add @ooneex/cache
32
+ ```
33
+
34
+ ### pnpm
35
+ ```bash
36
+ pnpm add @ooneex/cache
37
+ ```
38
+
39
+ ### Yarn
40
+ ```bash
41
+ yarn add @ooneex/cache
42
+ ```
43
+
44
+ ### npm
45
+ ```bash
46
+ npm install @ooneex/cache
47
+ ```
48
+
49
+ ## Usage
50
+
51
+ ### Filesystem Cache
52
+
53
+ ```typescript
54
+ import { FilesystemCache } from '@ooneex/cache';
55
+
56
+ const cache = new FilesystemCache({
57
+ cacheDir: './.cache',
58
+ maxFileSize: 10 * 1024 * 1024 // 10MB
59
+ });
60
+
61
+ // Set a value
62
+ await cache.set('user:123', { name: 'John', age: 30 });
63
+
64
+ // Get a value
65
+ const user = await cache.get<{ name: string; age: number }>('user:123');
66
+ console.log(user); // { name: 'John', age: 30 }
67
+
68
+ // Check if key exists
69
+ const exists = await cache.has('user:123');
70
+ console.log(exists); // true
71
+
72
+ // Delete a value
73
+ await cache.delete('user:123');
74
+ ```
75
+
76
+ ### Redis Cache
77
+
78
+ ```typescript
79
+ import { RedisCache } from '@ooneex/cache';
80
+
81
+ const cache = new RedisCache({
82
+ connectionString: 'redis://localhost:6379'
83
+ });
84
+
85
+ // Set with TTL (5 minutes)
86
+ await cache.set('session:abc', { token: 'xyz' }, 300);
87
+
88
+ // Get the value
89
+ const session = await cache.get<{ token: string }>('session:abc');
90
+
91
+ // Check existence
92
+ if (await cache.has('session:abc')) {
93
+ console.log('Session is valid');
94
+ }
95
+ ```
96
+
97
+ ### With Environment Variables
98
+
99
+ ```typescript
100
+ import { RedisCache } from '@ooneex/cache';
101
+
102
+ // Automatically uses CACHE_REDIS_URL environment variable
103
+ const cache = new RedisCache();
104
+
105
+ await cache.set('key', 'value');
106
+ ```
107
+
108
+ **Environment Variables:**
109
+ - `CACHE_REDIS_URL` - Redis connection string for RedisCache
110
+
111
+ ### With TTL (Time-To-Live)
112
+
113
+ ```typescript
114
+ import { FilesystemCache } from '@ooneex/cache';
115
+
116
+ const cache = new FilesystemCache();
117
+
118
+ // Cache for 60 seconds
119
+ await cache.set('temporary', { data: 'expires soon' }, 60);
120
+
121
+ // Cache indefinitely (no TTL)
122
+ await cache.set('permanent', { data: 'stays forever' }, 0);
123
+ ```
124
+
125
+ ## API Reference
126
+
127
+ ### Classes
128
+
129
+ #### `FilesystemCache`
130
+
131
+ File-based caching implementation that stores data as JSON files.
132
+
133
+ **Constructor:**
134
+ ```typescript
135
+ new FilesystemCache(options?: FilesystemCacheOptionsType)
136
+ ```
137
+
138
+ **Parameters:**
139
+ - `options.cacheDir` - Directory for cache files (default: `{cwd}/.cache`)
140
+ - `options.maxFileSize` - Maximum file size in bytes (default: 10MB)
141
+
142
+ **Methods:**
143
+
144
+ ##### `get<T>(key: string): Promise<T | undefined>`
145
+
146
+ Retrieves a cached value by key.
147
+
148
+ **Parameters:**
149
+ - `key` - The cache key
150
+
151
+ **Returns:** The cached value or `undefined` if not found or expired
152
+
153
+ ##### `set<T>(key: string, value: T, ttl?: number): Promise<void>`
154
+
155
+ Stores a value in the cache.
156
+
157
+ **Parameters:**
158
+ - `key` - The cache key
159
+ - `value` - The value to cache
160
+ - `ttl` - Time-to-live in seconds (optional, 0 for no expiration)
161
+
162
+ ##### `delete(key: string): Promise<boolean>`
163
+
164
+ Removes a value from the cache.
165
+
166
+ **Parameters:**
167
+ - `key` - The cache key
168
+
169
+ **Returns:** `true` if the key was deleted, `false` if it didn't exist
170
+
171
+ ##### `has(key: string): Promise<boolean>`
172
+
173
+ Checks if a key exists in the cache.
174
+
175
+ **Parameters:**
176
+ - `key` - The cache key
177
+
178
+ **Returns:** `true` if the key exists and hasn't expired
179
+
180
+ ---
181
+
182
+ #### `RedisCache`
183
+
184
+ Redis-based caching implementation for distributed caching.
185
+
186
+ **Constructor:**
187
+ ```typescript
188
+ new RedisCache(options?: RedisCacheOptionsType)
189
+ ```
190
+
191
+ **Parameters:**
192
+ - `options.connectionString` - Redis URL (default: `CACHE_REDIS_URL` env var)
193
+ - `options.connectionTimeout` - Connection timeout in ms (default: 10000)
194
+ - `options.idleTimeout` - Idle timeout in ms (default: 30000)
195
+ - `options.autoReconnect` - Enable auto reconnection (default: true)
196
+ - `options.maxRetries` - Maximum retry attempts (default: 3)
197
+ - `options.enableOfflineQueue` - Queue commands when offline (default: true)
198
+ - `options.enableAutoPipelining` - Enable auto pipelining (default: true)
199
+ - `options.tls` - TLS configuration (optional)
200
+
201
+ **Methods:**
202
+
203
+ Same interface as `FilesystemCache` - implements `ICache` interface.
204
+
205
+ ### Interfaces
206
+
207
+ #### `ICache`
208
+
209
+ ```typescript
210
+ interface ICache {
211
+ get: <T = unknown>(key: string) => Promise<T | undefined>;
212
+ set: <T = unknown>(key: string, value: T, ttl?: number) => Promise<void>;
213
+ delete: (key: string) => Promise<boolean>;
214
+ has: (key: string) => Promise<boolean>;
215
+ }
216
+ ```
217
+
218
+ ### Types
219
+
220
+ #### `FilesystemCacheOptionsType`
221
+
222
+ ```typescript
223
+ type FilesystemCacheOptionsType = {
224
+ cacheDir?: string;
225
+ maxFileSize?: number;
226
+ cleanupInterval?: number;
227
+ enableCleanup?: boolean;
228
+ };
229
+ ```
230
+
231
+ #### `RedisCacheOptionsType`
232
+
233
+ ```typescript
234
+ type RedisCacheOptionsType = {
235
+ connectionString?: string;
236
+ connectionTimeout?: number;
237
+ idleTimeout?: number;
238
+ autoReconnect?: boolean;
239
+ maxRetries?: number;
240
+ enableOfflineQueue?: boolean;
241
+ enableAutoPipelining?: boolean;
242
+ tls?: boolean | object;
243
+ };
244
+ ```
245
+
246
+ #### `CacheClassType`
247
+
248
+ ```typescript
249
+ type CacheClassType = new (...args: any[]) => ICache;
250
+ ```
251
+
252
+ ## Advanced Usage
253
+
254
+ ### Integration with Ooneex App
255
+
256
+ ```typescript
257
+ import { App } from '@ooneex/app';
258
+ import { RedisCache } from '@ooneex/cache';
259
+
260
+ const app = new App({
261
+ cache: RedisCache,
262
+ // ... other config
263
+ });
264
+
265
+ await app.run();
266
+ ```
267
+
268
+ ### Using in Controllers
269
+
270
+ ```typescript
271
+ import { Route } from '@ooneex/routing';
272
+ import type { IController, ContextType } from '@ooneex/controller';
273
+
274
+ @Route.http({
275
+ name: 'api.users.show',
276
+ path: '/api/users/:id',
277
+ method: 'GET',
278
+ description: 'Get user by ID'
279
+ })
280
+ class UserShowController implements IController {
281
+ public async index(context: ContextType): Promise<IResponse> {
282
+ const { id } = context.params;
283
+ const { cache } = context;
284
+
285
+ // Try to get from cache first
286
+ const cacheKey = `user:${id}`;
287
+ let user = await cache?.get<User>(cacheKey);
288
+
289
+ if (!user) {
290
+ // Fetch from database
291
+ user = await this.userRepository.findById(id);
292
+
293
+ // Cache for 5 minutes
294
+ await cache?.set(cacheKey, user, 300);
295
+ }
296
+
297
+ return context.response.json({ user });
298
+ }
299
+ }
300
+ ```
301
+
302
+ ### Cache Decorator Pattern
303
+
304
+ ```typescript
305
+ import { container } from '@ooneex/container';
306
+ import { RedisCache, decorator } from '@ooneex/cache';
307
+
308
+ // Register with container using decorator
309
+ @decorator.cache()
310
+ class MyCacheService extends RedisCache {
311
+ // Custom caching logic
312
+ }
313
+
314
+ // Resolve from container
315
+ const cache = container.get(MyCacheService);
316
+ ```
317
+
318
+ ### Error Handling
319
+
320
+ ```typescript
321
+ import { RedisCache, CacheException } from '@ooneex/cache';
322
+
323
+ try {
324
+ const cache = new RedisCache();
325
+ await cache.set('key', 'value');
326
+ } catch (error) {
327
+ if (error instanceof CacheException) {
328
+ console.error('Cache Error:', error.message);
329
+ // Handle cache-specific error
330
+ }
331
+ }
332
+ ```
333
+
334
+ ### Caching Complex Objects
335
+
336
+ ```typescript
337
+ import { FilesystemCache } from '@ooneex/cache';
338
+
339
+ interface Product {
340
+ id: string;
341
+ name: string;
342
+ price: number;
343
+ categories: string[];
344
+ }
345
+
346
+ const cache = new FilesystemCache();
347
+
348
+ const product: Product = {
349
+ id: 'prod-123',
350
+ name: 'Widget',
351
+ price: 29.99,
352
+ categories: ['electronics', 'gadgets']
353
+ };
354
+
355
+ // Objects are automatically serialized to JSON
356
+ await cache.set('product:prod-123', product);
357
+
358
+ // And deserialized when retrieved
359
+ const cached = await cache.get<Product>('product:prod-123');
360
+ console.log(cached?.name); // "Widget"
361
+ ```
362
+
363
+ ### Cache Key Patterns
364
+
365
+ ```typescript
366
+ import { RedisCache } from '@ooneex/cache';
367
+
368
+ const cache = new RedisCache();
369
+
370
+ // Use namespaced keys for organization
371
+ await cache.set('users:123:profile', userData);
372
+ await cache.set('users:123:settings', settingsData);
373
+ await cache.set('products:456:details', productData);
374
+
375
+ // Session-based caching
376
+ await cache.set(`session:${sessionId}`, sessionData, 3600); // 1 hour TTL
377
+ ```
378
+
379
+ ## License
380
+
381
+ This project is licensed under the MIT License - see the [LICENSE](./LICENSE) file for details.
382
+
383
+ ## Contributing
384
+
385
+ Contributions are welcome! Please feel free to submit a Pull Request. For major changes, please open an issue first to discuss what you would like to change.
386
+
387
+ ### Development Setup
388
+
389
+ 1. Clone the repository
390
+ 2. Install dependencies: `bun install`
391
+ 3. Run tests: `bun run test`
392
+ 4. Build the project: `bun run build`
393
+
394
+ ### Guidelines
395
+
396
+ - Write tests for new features
397
+ - Follow the existing code style
398
+ - Update documentation for API changes
399
+ - Ensure all tests pass before submitting PR
400
+
401
+ ---
402
+
403
+ Made with ❤️ by the Ooneex team
package/dist/index.d.ts CHANGED
@@ -2,8 +2,9 @@ import { Exception } from "@ooneex/exception";
2
2
  declare class CacheException extends Exception {
3
3
  constructor(message: string, data?: Record<string, unknown>);
4
4
  }
5
+ import { EContainerScope } from "@ooneex/container";
5
6
  type CacheClassType = new (...args: any[]) => ICache;
6
- type RedisCacheAdapterType = {
7
+ type RedisCacheOptionsType = {
7
8
  connectionString?: string;
8
9
  connectionTimeout?: number;
9
10
  idleTimeout?: number;
@@ -13,7 +14,7 @@ type RedisCacheAdapterType = {
13
14
  enableAutoPipelining?: boolean;
14
15
  tls?: boolean | object;
15
16
  };
16
- type FilesystemCacheAdapterType = {
17
+ type FilesystemCacheOptionsType = {
17
18
  cacheDir?: string;
18
19
  maxFileSize?: number;
19
20
  cleanupInterval?: number;
@@ -24,75 +25,31 @@ interface ICache {
24
25
  set: <T = unknown>(key: string, value: T, ttl?: number) => Promise<void>;
25
26
  delete: (key: string) => Promise<boolean>;
26
27
  has: (key: string) => Promise<boolean>;
27
- mget: <T = unknown>(keys: string[]) => Promise<(T | undefined)[]>;
28
- mset: <T = unknown>(entries: {
29
- key: string;
30
- value: T;
31
- ttl?: number;
32
- }[]) => Promise<void>;
33
- ttl: (key: string) => Promise<number | null>;
34
- expire: (key: string, ttl: number) => Promise<boolean>;
35
- incr: (key: string, delta?: number) => Promise<number>;
36
- decr: (key: string, delta?: number) => Promise<number>;
37
- keys: (pattern?: string) => Promise<string[]>;
38
- flush: () => Promise<void>;
39
28
  }
29
+ declare const decorator: {
30
+ cache: (scope?: EContainerScope) => (target: CacheClassType) => void;
31
+ };
40
32
  declare class FilesystemCache implements ICache {
41
33
  private cacheDir;
42
34
  private maxFileSize;
43
- private cleanupInterval;
44
- private enableCleanup;
45
- private cleanupTimer;
46
- constructor(options?: FilesystemCacheAdapterType);
47
- connect(): Promise<void>;
48
- close(): Promise<void>;
49
- get: <T = unknown>(key: string) => Promise<T | undefined>;
50
- set: <T = unknown>(key: string, value: T, ttl?: number) => Promise<void>;
51
- delete: (key: string) => Promise<boolean>;
52
- has: (key: string) => Promise<boolean>;
53
- mget: <T = unknown>(keys: string[]) => Promise<(T | undefined)[]>;
54
- mset: <T = unknown>(entries: {
55
- key: string;
56
- value: T;
57
- ttl?: number;
58
- }[]) => Promise<void>;
59
- ttl: (key: string) => Promise<number | null>;
60
- expire: (key: string, ttl: number) => Promise<boolean>;
61
- incr: (key: string, delta?: number) => Promise<number>;
62
- decr: (key: string, delta?: number) => Promise<number>;
63
- keys: (pattern?: string) => Promise<string[]>;
64
- flush: () => Promise<void>;
65
- private startCleanup;
66
- private cleanupExpired;
35
+ constructor(options?: FilesystemCacheOptionsType);
36
+ private connect;
37
+ get<T = unknown>(key: string): Promise<T | undefined>;
38
+ set<T = unknown>(key: string, value: T, ttl?: number): Promise<void>;
39
+ delete(key: string): Promise<boolean>;
40
+ has(key: string): Promise<boolean>;
67
41
  private getFilePath;
68
42
  private isExpired;
69
43
  private readCacheEntry;
70
44
  private writeCacheEntry;
71
45
  }
72
- declare class RedisCacheAdapter implements ICache {
46
+ declare class RedisCache implements ICache {
73
47
  private client;
74
- constructor(options?: RedisCacheAdapterType);
75
- connect(): Promise<void>;
76
- close(): void;
77
- /**
78
- * Get connection status
79
- */
80
- get connected(): boolean;
48
+ constructor(options?: RedisCacheOptionsType);
49
+ private connect;
81
50
  get<T = unknown>(key: string): Promise<T | undefined>;
82
51
  set<T = unknown>(key: string, value: T, ttl?: number): Promise<void>;
83
52
  delete(key: string): Promise<boolean>;
84
53
  has(key: string): Promise<boolean>;
85
- mget<T = unknown>(keys: string[]): Promise<(T | undefined)[]>;
86
- mset<T = unknown>(entries: {
87
- key: string;
88
- value: T;
89
- ttl?: number;
90
- }[]): Promise<void>;
91
- ttl(key: string): Promise<number | null>;
92
- expire(key: string, ttl: number): Promise<boolean>;
93
- incr(key: string, delta?: number): Promise<number>;
94
- decr(key: string, delta?: number): Promise<number>;
95
- keys(pattern?: string): Promise<string[]>;
96
- flush(): Promise<void>;
97
54
  }
98
- export { RedisCacheAdapterType, RedisCacheAdapter, ICache, FilesystemCacheAdapterType, FilesystemCache, CacheException, CacheClassType };
55
+ export { decorator, RedisCacheOptionsType, RedisCache, ICache, FilesystemCacheOptionsType, FilesystemCache, CacheException, CacheClassType };
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
1
  // @bun
2
- var Z=Object.create;var{getPrototypeOf:_,defineProperty:U,getOwnPropertyNames:$}=Object;var L=Object.prototype.hasOwnProperty;var W=(H,I,j)=>{j=H!=null?Z(_(H)):{};let q=I||!H||!H.__esModule?U(j,"default",{value:H,enumerable:!0}):j;for(let B of $(H))if(!L.call(q,B))U(q,B,{get:()=>H[B],enumerable:!0});return q};var X=import.meta.require;import{Exception as M}from"@ooneex/exception";import{HttpStatus as O}from"@ooneex/http-status";class G extends M{constructor(H,I={}){super(H,{status:O.Code.InternalServerError,data:I});this.name="CacheException"}}import{mkdir as b,readdir as D,stat as A}from"fs/promises";class F{cacheDir;maxFileSize;cleanupInterval;enableCleanup;cleanupTimer;constructor(H={}){this.cacheDir=H.cacheDir||`${process.cwd()}/.cache`,this.maxFileSize=H.maxFileSize||10485760,this.cleanupInterval=H.cleanupInterval||300000,this.enableCleanup=H.enableCleanup??!0}async connect(){try{if(await b(this.cacheDir,{recursive:!0}),!(await A(this.cacheDir)).isDirectory())throw new G("Failed to create cache directory");if(this.enableCleanup&&!this.cleanupTimer)this.startCleanup()}catch(H){throw new G(`Failed to initialize filesystem cache: ${H}`)}}async close(){if(this.cleanupTimer)clearInterval(this.cleanupTimer),this.cleanupTimer=void 0}get=async(H)=>{try{return(await this.readCacheEntry(H))?.value}catch(I){throw new G(`Failed to get key "${H}": ${I}`)}};set=async(H,I,j)=>{try{let q={value:I,createdAt:Date.now(),originalKey:H,...j!==void 0&&{ttl:j}};await this.writeCacheEntry(H,q)}catch(q){throw new G(`Failed to set key "${H}": ${q}`)}};delete=async(H)=>{try{let I=Bun.file(this.getFilePath(H));if(!await I.exists())return!1;return await I.delete(),!0}catch(I){throw new G(`Failed to delete key "${H}": ${I}`)}};has=async(H)=>{try{return await this.readCacheEntry(H)!==void 0}catch(I){throw new G(`Failed to check if key "${H}" exists: ${I}`)}};mget=async(H)=>{if(H.length===0)return[];try{let I=[],j=await Promise.allSettled(H.map((q)=>this.readCacheEntry(q)));for(let q of j)if(q.status==="fulfilled")I.push(q.value?.value);else I.push(void 0);return I}catch(I){throw new G(`Failed to get multiple keys: ${I}`)}};mset=async(H)=>{if(H.length===0)return;try{let I=Date.now(),j=H.map(async({key:q,value:B,ttl:J})=>{let P={value:B,createdAt:I,originalKey:q,...J!==void 0&&{ttl:J}};return this.writeCacheEntry(q,P)});await Promise.all(j)}catch(I){throw new G(`Failed to set multiple keys: ${I}`)}};ttl=async(H)=>{try{let I=await this.readCacheEntry(H);if(!I)return-1;if(I.ttl===0)return 0;if(!I.ttl)return null;let j=Math.floor((I.createdAt+I.ttl*1000-Date.now())/1000);return Math.max(0,j)}catch(I){throw new G(`Failed to get TTL for key "${H}": ${I}`)}};expire=async(H,I)=>{try{let j=await this.readCacheEntry(H);if(!j)return!1;if(j.ttl=I,I!==0)j.createdAt=Date.now();return j.originalKey=H,await this.writeCacheEntry(H,j),!0}catch(j){throw new G(`Failed to set TTL for key "${H}": ${j}`)}};incr=async(H,I=1)=>{try{let j=await this.readCacheEntry(H),B=(typeof j?.value==="number"?j.value:0)+I,J={value:B,createdAt:j?.createdAt??Date.now(),originalKey:H,...j?.ttl!==void 0&&{ttl:j.ttl}};return await this.writeCacheEntry(H,J),B}catch(j){throw new G(`Failed to increment key "${H}" by ${I}: ${j}`)}};decr=async(H,I=1)=>{try{let j=await this.readCacheEntry(H),B=(typeof j?.value==="number"?j.value:0)-I,J={value:B,createdAt:j?.createdAt||Date.now(),originalKey:H,...j?.ttl!==void 0&&{ttl:j.ttl}};return await this.writeCacheEntry(H,J),B}catch(j){throw new G(`Failed to decrement key "${H}" by ${I}: ${j}`)}};keys=async(H="*")=>{try{let j=(await D(this.cacheDir)).filter((Q)=>Q.endsWith(".cache")),q=[];for(let Q of j)try{let R=Bun.file(`${this.cacheDir}/${Q}`);if(await R.exists()){let Y=await R.text(),S=JSON.parse(Y);if(S.originalKey)q.push(S.originalKey);else q.push(Q.replace(".cache",""))}}catch{q.push(Q.replace(".cache",""))}let B=q;if(H!=="*"){let Q=new RegExp(H.replace(/\*/g,".*").replace(/\?/g,"."));B=q.filter((R)=>Q.test(R))}let J=await Promise.allSettled(B.map(async(Q)=>{let R=await this.readCacheEntry(Q);return{key:Q,valid:R!==void 0}})),P=[];for(let Q of J)if(Q.status==="fulfilled"&&Q.value.valid)P.push(Q.value.key);return P.sort()}catch(I){throw new G(`Failed to get keys with pattern "${H}": ${I}`)}};flush=async()=>{try{let{readdir:H}=await import("fs/promises"),q=(await H(this.cacheDir)).filter((B)=>B.endsWith(".cache")).map((B)=>{return Bun.file(`${this.cacheDir}/${B}`).delete()});await Promise.all(q)}catch(H){throw new G(`Failed to flush cache: ${H}`)}};startCleanup(){this.cleanupTimer=setInterval(async()=>{try{await this.cleanupExpired()}catch{}},this.cleanupInterval)}async cleanupExpired(){try{let{readdir:H}=await import("fs/promises"),I=await H(this.cacheDir),j=Date.now();for(let q of I){if(!q.endsWith(".cache"))continue;let B=Bun.file(`${this.cacheDir}/${q}`);try{if(await B.exists()){let J=await B.text(),P=JSON.parse(J);if(P.ttl&&P.createdAt+P.ttl*1000<j)await B.delete()}}catch{try{await B.delete()}catch{}}}}catch{}}getFilePath(H){if(H.length>200){let j=Bun.hash(H);return`${this.cacheDir}/${j.toString(36)}.cache`}let I=H.replace(/[<>:"/\\|?*\x00-\x1f]/g,"_");return`${this.cacheDir}/${I}.cache`}async isExpired(H){if(!H.ttl)return!1;if(H.ttl===0)return!1;return H.createdAt+H.ttl*1000<Date.now()}async readCacheEntry(H){try{let I=Bun.file(this.getFilePath(H));if(!await I.exists())return;let j=await I.text(),q=JSON.parse(j);if(await this.isExpired(q)){await I.delete().catch(()=>{});return}return q}catch{return}}async writeCacheEntry(H,I){let j=JSON.stringify(I);if(Buffer.byteLength(j,"utf-8")>this.maxFileSize)throw new G(`Cache entry exceeds maximum file size of ${this.maxFileSize} bytes`);await Bun.write(this.getFilePath(H),j)}}class N{client;constructor(H={}){let I=H.connectionString||Bun.env.CACHE_REDIS_URL;if(!I)throw new G("Redis connection string is required. Please provide a connection string either through the constructor options or set the CACHE_REDIS_URL environment variable.");let{connectionString:j,...q}=H;this.client=new Bun.RedisClient(I,q)}async connect(){await this.client.connect()}close(){this.client.close()}get connected(){return this.client.connected}async get(H){try{let I=await this.client.get(H);if(I===null)return;try{return JSON.parse(I)}catch{return I}}catch(I){throw new G(`Failed to get key "${H}": ${I}`)}}async set(H,I,j){try{let q=I===void 0?null:I,B=typeof q==="string"?q:JSON.stringify(q);if(j)await this.client.send("SETEX",[H,j.toString(),B]);else await this.client.set(H,B)}catch(q){throw new G(`Failed to set key "${H}": ${q}`)}}async delete(H){try{return await this.client.del(H)>0}catch(I){throw new G(`Failed to delete key "${H}": ${I}`)}}async has(H){try{return await this.client.exists(H)}catch(I){throw new G(`Failed to check if key "${H}" exists: ${I}`)}}async mget(H){if(H.length===0)return[];try{return(await this.client.send("MGET",H)).map((j)=>{if(j===null)return;try{return JSON.parse(j)}catch{return j}})}catch(I){throw new G(`Failed to get multiple keys: ${I}`)}}async mset(H){if(H.length===0)return;try{let I=[],j=[];if(H.forEach((q)=>{if(q.ttl&&q.ttl>0)j.push(q);else I.push({key:q.key,value:q.value})}),I.length>0){let q=[];I.forEach(({key:B,value:J})=>{q.push(B);let P=J===void 0?null:J;q.push(typeof P==="string"?P:JSON.stringify(P))}),await this.client.send("MSET",q)}for(let{key:q,value:B,ttl:J}of j){let P=B===void 0?null:B,Q=typeof P==="string"?P:JSON.stringify(P);await this.client.send("SETEX",[q,J.toString(),Q])}}catch(I){throw new G(`Failed to set multiple keys: ${I}`)}}async ttl(H){try{let I=await this.client.ttl(H);if(I===-2)return-1;if(I===-1)return null;return I}catch(I){throw new G(`Failed to get TTL for key "${H}": ${I}`)}}async expire(H,I){try{return await this.client.expire(H,I)===1}catch(j){throw new G(`Failed to set TTL for key "${H}": ${j}`)}}async incr(H,I=1){try{if(I===1)return await this.client.incr(H);let j=Math.floor(I);return await this.client.send("INCRBY",[H,j.toString()])}catch(j){throw new G(`Failed to increment key "${H}" by ${I}: ${j}`)}}async decr(H,I=1){try{if(I===1)return await this.client.decr(H);let j=Math.floor(I);return await this.client.send("DECRBY",[H,j.toString()])}catch(j){throw new G(`Failed to decrement key "${H}" by ${I}: ${j}`)}}async keys(H="*"){try{return await this.client.send("KEYS",[H])}catch(I){throw new G(`Failed to get keys with pattern "${H}": ${I}`)}}async flush(){try{await this.client.send("FLUSHDB",[])}catch(H){throw new G(`Failed to flush database: ${H}`)}}}export{N as RedisCacheAdapter,F as FilesystemCache,G as CacheException};
2
+ var h=Object.create;var{getPrototypeOf:u,defineProperty:o,getOwnPropertyNames:p}=Object;var y=Object.prototype.hasOwnProperty;var m=(e,t,i)=>{i=e!=null?h(u(e)):{};let n=t||!e||!e.__esModule?o(i,"default",{value:e,enumerable:!0}):i;for(let a of p(e))if(!y.call(n,a))o(n,a,{get:()=>e[a],enumerable:!0});return n};var d=import.meta.require;import{Exception as w}from"@ooneex/exception";import{HttpStatus as f}from"@ooneex/http-status";class r extends w{constructor(e,t={}){super(e,{status:f.Code.InternalServerError,data:t});this.name="CacheException"}}import{container as g,EContainerScope as C}from"@ooneex/container";var x={cache:(e=C.Singleton)=>{return(t)=>{g.add(t,e)}}};class c{cacheDir;maxFileSize;constructor(e={}){this.cacheDir=e.cacheDir||`${process.cwd()}/.cache`,this.maxFileSize=e.maxFileSize||10485760}async connect(){try{let{mkdir:e,stat:t}=await import("fs/promises");if(await e(this.cacheDir,{recursive:!0}),!(await t(this.cacheDir)).isDirectory())throw new r("Failed to create cache directory")}catch(e){throw new r(`Failed to initialize filesystem cache: ${e}`)}}async get(e){try{return await this.connect(),(await this.readCacheEntry(e))?.value}catch(t){throw new r(`Failed to get key "${e}": ${t}`)}}async set(e,t,i){try{await this.connect();let n={value:t,createdAt:Date.now(),originalKey:e,...i!==void 0&&{ttl:i}};await this.writeCacheEntry(e,n)}catch(n){throw new r(`Failed to set key "${e}": ${n}`)}}async delete(e){try{await this.connect();let t=Bun.file(this.getFilePath(e));if(!await t.exists())return!1;return await t.delete(),!0}catch(t){throw new r(`Failed to delete key "${e}": ${t}`)}}async has(e){try{return await this.connect(),await this.readCacheEntry(e)!==void 0}catch(t){throw new r(`Failed to check if key "${e}" exists: ${t}`)}}getFilePath(e){if(e.length>200){let i=Bun.hash(e);return`${this.cacheDir}/${i.toString(36)}.cache`}let t=e.replace(/[<>:"/\\|?*\x00-\x1f]/g,"_");return`${this.cacheDir}/${t}.cache`}isExpired(e){if(!e.ttl)return!1;if(e.ttl===0)return!1;return e.createdAt+e.ttl*1000<Date.now()}async readCacheEntry(e){try{let t=Bun.file(this.getFilePath(e));if(!await t.exists())return;let i=await t.text(),n=JSON.parse(i);if(this.isExpired(n)){await t.delete().catch(()=>{});return}return n}catch{return}}async writeCacheEntry(e,t){let i=JSON.stringify(t);if(Buffer.byteLength(i,"utf-8")>this.maxFileSize)throw new r(`Cache entry exceeds maximum file size of ${this.maxFileSize} bytes`);await Bun.write(this.getFilePath(e),i)}}class s{client;constructor(e={}){let t=e.connectionString||Bun.env.CACHE_REDIS_URL;if(!t)throw new r("Redis connection string is required. Please provide a connection string either through the constructor options or set the CACHE_REDIS_URL environment variable.");let{connectionString:i,...n}=e,l={...{connectionTimeout:1e4,idleTimeout:30000,autoReconnect:!0,maxRetries:3,enableOfflineQueue:!0,enableAutoPipelining:!0},...n};this.client=new Bun.RedisClient(t,l)}async connect(){if(!this.client.connected)await this.client.connect()}async get(e){try{await this.connect();let t=await this.client.get(e);if(t===null)return;try{return JSON.parse(t)}catch{return t}}catch(t){throw new r(`Failed to get key "${e}": ${t}`)}}async set(e,t,i){try{await this.connect();let n=t===void 0?null:t,a=typeof n==="string"?n:JSON.stringify(n);if(await this.client.set(e,a),i&&i>0)await this.client.expire(e,i)}catch(n){throw new r(`Failed to set key "${e}": ${n}`)}}async delete(e){try{return await this.connect(),await this.client.del(e)>0}catch(t){throw new r(`Failed to delete key "${e}": ${t}`)}}async has(e){try{return await this.connect(),await this.client.exists(e)}catch(t){throw new r(`Failed to check if key "${e}" exists: ${t}`)}}}export{x as decorator,s as RedisCache,c as FilesystemCache,r as CacheException};
3
3
 
4
- //# debugId=3AA2F052E73C867064756E2164756E21
4
+ //# debugId=2319999CAED1C92C64756E2164756E21
package/dist/index.js.map CHANGED
@@ -1,12 +1,13 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["src/CacheException.ts", "src/FilesystemCache.ts", "src/RedisCacheAdapter.ts"],
3
+ "sources": ["src/CacheException.ts", "src/decorators.ts", "src/FilesystemCache.ts", "src/RedisCache.ts"],
4
4
  "sourcesContent": [
5
5
  "import { Exception } from \"@ooneex/exception\";\nimport { HttpStatus } from \"@ooneex/http-status\";\n\nexport class CacheException extends Exception {\n constructor(message: string, data: Record<string, unknown> = {}) {\n super(message, {\n status: HttpStatus.Code.InternalServerError,\n data,\n });\n this.name = \"CacheException\";\n }\n}\n",
6
- "import { mkdir, readdir, stat } from \"node:fs/promises\";\nimport { CacheException } from \"./CacheException\";\nimport type { FilesystemCacheAdapterType, ICache } from \"./types\";\n\ntype CacheEntryType<T = unknown> = {\n value: T;\n ttl?: number;\n createdAt: number;\n originalKey: string;\n};\n\nexport class FilesystemCache implements ICache {\n private cacheDir: string;\n private maxFileSize: number;\n private cleanupInterval: number;\n private enableCleanup: boolean;\n private cleanupTimer: Timer | undefined;\n\n constructor(options: FilesystemCacheAdapterType = {}) {\n this.cacheDir = options.cacheDir || `${process.cwd()}/.cache`;\n this.maxFileSize = options.maxFileSize || 10 * 1024 * 1024; // 10MB default\n this.cleanupInterval = options.cleanupInterval || 5 * 60 * 1000; // 5 minutes default\n this.enableCleanup = options.enableCleanup ?? true;\n }\n\n public async connect(): Promise<void> {\n try {\n await mkdir(this.cacheDir, { recursive: true });\n\n // Verify directory was created\n const stats = await stat(this.cacheDir);\n if (!stats.isDirectory()) {\n throw new CacheException(\"Failed to create cache directory\");\n }\n\n if (this.enableCleanup && !this.cleanupTimer) {\n this.startCleanup();\n }\n } catch (error) {\n throw new CacheException(`Failed to initialize filesystem cache: ${error}`);\n }\n }\n\n public async close(): Promise<void> {\n if (this.cleanupTimer) {\n clearInterval(this.cleanupTimer);\n this.cleanupTimer = undefined as Timer | undefined;\n }\n }\n\n // Basic operations\n public get = async <T = unknown>(key: string): Promise<T | undefined> => {\n try {\n const entry = await this.readCacheEntry<T>(key);\n return entry?.value;\n } catch (error) {\n throw new CacheException(`Failed to get key \"${key}\": ${error}`);\n }\n };\n\n public set = async <T = unknown>(key: string, value: T, ttl?: number): Promise<void> => {\n try {\n const entry: CacheEntryType<T> = {\n value,\n createdAt: Date.now(),\n originalKey: key,\n ...(ttl !== undefined && { ttl }),\n };\n\n await this.writeCacheEntry(key, entry);\n } catch (error) {\n throw new CacheException(`Failed to set key \"${key}\": ${error}`);\n }\n };\n\n public delete = async (key: string): Promise<boolean> => {\n try {\n const file = Bun.file(this.getFilePath(key));\n\n if (!(await file.exists())) {\n return false;\n }\n\n await file.delete();\n return true;\n } catch (error) {\n throw new CacheException(`Failed to delete key \"${key}\": ${error}`);\n }\n };\n\n public has = async (key: string): Promise<boolean> => {\n try {\n const entry = await this.readCacheEntry(key);\n return entry !== undefined;\n } catch (error) {\n throw new CacheException(`Failed to check if key \"${key}\" exists: ${error}`);\n }\n };\n\n // Bulk operations\n public mget = async <T = unknown>(keys: string[]): Promise<(T | undefined)[]> => {\n if (keys.length === 0) {\n return [];\n }\n\n try {\n const results: (T | undefined)[] = [];\n\n // Use Promise.all for concurrent reads\n const entries = await Promise.allSettled(keys.map((key) => this.readCacheEntry<T>(key)));\n\n for (const result of entries) {\n if (result.status === \"fulfilled\") {\n results.push(result.value?.value);\n } else {\n results.push(undefined);\n }\n }\n\n return results;\n } catch (error) {\n throw new CacheException(`Failed to get multiple keys: ${error}`);\n }\n };\n\n public mset = async <T = unknown>(entries: { key: string; value: T; ttl?: number }[]): Promise<void> => {\n if (entries.length === 0) {\n return;\n }\n\n try {\n const now = Date.now();\n\n // Use Promise.all for concurrent writes\n const promises = entries.map(async ({ key, value, ttl }) => {\n const entry: CacheEntryType<T> = {\n value,\n createdAt: now,\n originalKey: key,\n ...(ttl !== undefined && { ttl }),\n };\n\n return this.writeCacheEntry(key, entry);\n });\n\n await Promise.all(promises);\n } catch (error) {\n throw new CacheException(`Failed to set multiple keys: ${error}`);\n }\n };\n\n // TTL / metadata\n public ttl = async (key: string): Promise<number | null> => {\n try {\n const entry = await this.readCacheEntry(key);\n\n if (!entry) {\n return -1; // Key not found\n }\n\n // Special handling for zero TTL - should return 0 if it was set with 0 TTL\n if (entry.ttl === 0) {\n return 0;\n }\n\n if (!entry.ttl) {\n return null; // No TTL\n }\n\n const remaining = Math.floor((entry.createdAt + entry.ttl * 1000 - Date.now()) / 1000);\n return Math.max(0, remaining);\n } catch (error) {\n throw new CacheException(`Failed to get TTL for key \"${key}\": ${error}`);\n }\n };\n\n public expire = async (key: string, ttl: number): Promise<boolean> => {\n try {\n const entry = await this.readCacheEntry(key);\n\n if (!entry) {\n return false;\n }\n\n // For zero TTL, mark it but don't update createdAt to maintain the \"zero\" state\n entry.ttl = ttl;\n if (ttl !== 0) {\n entry.createdAt = Date.now();\n }\n entry.originalKey = key; // Ensure original key is preserved\n\n await this.writeCacheEntry(key, entry);\n return true;\n } catch (error) {\n throw new CacheException(`Failed to set TTL for key \"${key}\": ${error}`);\n }\n };\n\n // Counters (atomic operations with file-based locking)\n public incr = async (key: string, delta = 1): Promise<number> => {\n try {\n const entry = await this.readCacheEntry<number>(key);\n const currentValue = typeof entry?.value === \"number\" ? entry.value : 0;\n const newValue = currentValue + delta;\n\n const newEntry: CacheEntryType<number> = {\n value: newValue,\n createdAt: entry?.createdAt ?? Date.now(),\n originalKey: key,\n ...(entry?.ttl !== undefined && { ttl: entry.ttl }),\n };\n\n await this.writeCacheEntry(key, newEntry);\n return newValue;\n } catch (error) {\n throw new CacheException(`Failed to increment key \"${key}\" by ${delta}: ${error}`);\n }\n };\n\n public decr = async (key: string, delta = 1): Promise<number> => {\n try {\n const entry = await this.readCacheEntry<number>(key);\n const currentValue = typeof entry?.value === \"number\" ? entry.value : 0;\n const newValue = currentValue - delta;\n\n const newEntry: CacheEntryType<number> = {\n value: newValue,\n createdAt: entry?.createdAt || Date.now(),\n originalKey: key,\n ...(entry?.ttl !== undefined && { ttl: entry.ttl }),\n };\n\n await this.writeCacheEntry(key, newEntry);\n return newValue;\n } catch (error) {\n throw new CacheException(`Failed to decrement key \"${key}\" by ${delta}: ${error}`);\n }\n };\n\n // Maintenance\n public keys = async (pattern = \"*\"): Promise<string[]> => {\n try {\n const files = await readdir(this.cacheDir);\n const cacheFiles = files.filter((file) => file.endsWith(\".cache\"));\n\n // Read cache files to get original keys\n const keys: string[] = [];\n\n for (const file of cacheFiles) {\n try {\n const cacheFile = Bun.file(`${this.cacheDir}/${file}`);\n if (await cacheFile.exists()) {\n const content = await cacheFile.text();\n const entry = JSON.parse(content);\n\n // Use the original key stored in the entry\n if (entry.originalKey) {\n keys.push(entry.originalKey);\n } else {\n // Fallback to filename without extension\n keys.push(file.replace(\".cache\", \"\"));\n }\n }\n } catch {\n // If we can't read the file, use the filename as key\n keys.push(file.replace(\".cache\", \"\"));\n }\n }\n\n // Simple pattern matching\n let filteredKeys = keys;\n if (pattern !== \"*\") {\n const regex = new RegExp(pattern.replace(/\\*/g, \".*\").replace(/\\?/g, \".\"));\n filteredKeys = keys.filter((key) => regex.test(key));\n }\n\n // Filter out expired keys using concurrent checks\n const validityChecks = await Promise.allSettled(\n filteredKeys.map(async (key) => {\n const entry = await this.readCacheEntry(key);\n return { key, valid: entry !== undefined };\n }),\n );\n\n const validKeys: string[] = [];\n for (const result of validityChecks) {\n if (result.status === \"fulfilled\" && result.value.valid) {\n validKeys.push(result.value.key);\n }\n }\n\n return validKeys.sort();\n } catch (error) {\n throw new CacheException(`Failed to get keys with pattern \"${pattern}\": ${error}`);\n }\n };\n\n public flush = async (): Promise<void> => {\n try {\n const { readdir } = await import(\"node:fs/promises\");\n const files = await readdir(this.cacheDir);\n const cacheFiles = files.filter((file) => file.endsWith(\".cache\"));\n\n // Use Promise.all for concurrent deletions\n const promises = cacheFiles.map((fileName) => {\n const file = Bun.file(`${this.cacheDir}/${fileName}`);\n return file.delete();\n });\n\n await Promise.all(promises);\n } catch (error) {\n throw new CacheException(`Failed to flush cache: ${error}`);\n }\n };\n\n private startCleanup(): void {\n this.cleanupTimer = setInterval(async () => {\n try {\n await this.cleanupExpired();\n } catch {\n // Silent fail for cleanup\n }\n }, this.cleanupInterval);\n }\n\n private async cleanupExpired(): Promise<void> {\n try {\n const { readdir } = await import(\"node:fs/promises\");\n const files = await readdir(this.cacheDir);\n const now = Date.now();\n\n for (const fileName of files) {\n if (!fileName.endsWith(\".cache\")) continue;\n\n const file = Bun.file(`${this.cacheDir}/${fileName}`);\n try {\n if (await file.exists()) {\n const content = await file.text();\n const entry: CacheEntryType = JSON.parse(content);\n\n if (entry.ttl && entry.createdAt + entry.ttl * 1000 < now) {\n await file.delete();\n }\n }\n } catch {\n // If we can't read/parse a file, it might be corrupted, so delete it\n try {\n await file.delete();\n } catch {\n // Silent fail\n }\n }\n }\n } catch {\n // Silent fail for cleanup\n }\n }\n\n private getFilePath(key: string): string {\n // Handle very long keys by using hash instead of truncation\n if (key.length > 200) {\n const hash = Bun.hash(key);\n return `${this.cacheDir}/${hash.toString(36)}.cache`;\n }\n\n // Sanitize key for filesystem - only replace characters that are problematic for file names\n const sanitizedKey = key.replace(/[<>:\"/\\\\|?*\\x00-\\x1f]/g, \"_\");\n return `${this.cacheDir}/${sanitizedKey}.cache`;\n }\n\n private async isExpired(entry: CacheEntryType): Promise<boolean> {\n if (!entry.ttl) return false;\n\n // Special case for zero TTL - should not be considered expired for immediate reads\n // Zero TTL means immediate expiration but should still be readable momentarily\n if (entry.ttl === 0) {\n return false; // Don't auto-expire zero TTL entries in readCacheEntry\n }\n\n return entry.createdAt + entry.ttl * 1000 < Date.now();\n }\n\n private async readCacheEntry<T>(key: string): Promise<CacheEntryType<T> | undefined> {\n try {\n const file = Bun.file(this.getFilePath(key));\n\n if (!(await file.exists())) {\n return;\n }\n\n const content = await file.text();\n const entry: CacheEntryType<T> = JSON.parse(content);\n\n if (await this.isExpired(entry)) {\n // Clean up expired entry\n await file.delete().catch(() => {});\n return;\n }\n\n return entry;\n } catch {\n // For any read errors, assume file doesn't exist or is corrupted\n return;\n }\n }\n\n private async writeCacheEntry<T>(key: string, entry: CacheEntryType<T>): Promise<void> {\n const content = JSON.stringify(entry);\n\n if (Buffer.byteLength(content, \"utf-8\") > this.maxFileSize) {\n throw new CacheException(`Cache entry exceeds maximum file size of ${this.maxFileSize} bytes`);\n }\n\n await Bun.write(this.getFilePath(key), content);\n }\n}\n",
7
- "import { CacheException } from \"./CacheException\";\nimport type { ICache, RedisCacheAdapterType } from \"./types\";\n\nexport class RedisCacheAdapter implements ICache {\n private client: Bun.RedisClient;\n\n constructor(options: RedisCacheAdapterType = {}) {\n const connectionString = options.connectionString || Bun.env.CACHE_REDIS_URL;\n\n if (!connectionString) {\n throw new CacheException(\n \"Redis connection string is required. Please provide a connection string either through the constructor options or set the CACHE_REDIS_URL environment variable.\",\n );\n }\n\n const { connectionString: _, ...clientOptions } = options;\n\n this.client = new Bun.RedisClient(connectionString, clientOptions);\n }\n\n public async connect(): Promise<void> {\n await this.client.connect();\n }\n\n public close(): void {\n this.client.close();\n }\n\n /**\n * Get connection status\n */\n public get connected(): boolean {\n return this.client.connected;\n }\n\n // Basic operations\n public async get<T = unknown>(key: string): Promise<T | undefined> {\n try {\n const value = await this.client.get(key);\n if (value === null) {\n return;\n }\n\n // Try to parse as JSON, fallback to raw value\n try {\n return JSON.parse(value);\n } catch {\n return value as T;\n }\n } catch (error) {\n throw new CacheException(`Failed to get key \"${key}\": ${error}`);\n }\n }\n\n public async set<T = unknown>(key: string, value: T, ttl?: number): Promise<void> {\n try {\n // Convert undefined to null for Redis compatibility\n const normalizedValue = value === undefined ? null : value;\n const serializedValue = typeof normalizedValue === \"string\" ? normalizedValue : JSON.stringify(normalizedValue);\n\n if (ttl) {\n await this.client.send(\"SETEX\", [key, ttl.toString(), serializedValue]);\n } else {\n await this.client.set(key, serializedValue);\n }\n } catch (error) {\n throw new CacheException(`Failed to set key \"${key}\": ${error}`);\n }\n }\n\n public async delete(key: string): Promise<boolean> {\n try {\n const result = await this.client.del(key);\n return result > 0;\n } catch (error) {\n throw new CacheException(`Failed to delete key \"${key}\": ${error}`);\n }\n }\n\n public async has(key: string): Promise<boolean> {\n try {\n return await this.client.exists(key);\n } catch (error) {\n throw new CacheException(`Failed to check if key \"${key}\" exists: ${error}`);\n }\n }\n\n // Bulk operations\n public async mget<T = unknown>(keys: string[]): Promise<(T | undefined)[]> {\n if (keys.length === 0) {\n return [];\n }\n\n try {\n const values = await this.client.send(\"MGET\", keys);\n return values.map((value: string | null) => {\n if (value === null) {\n return undefined;\n }\n\n // Try to parse as JSON, fallback to raw value\n try {\n return JSON.parse(value);\n } catch {\n return value as T;\n }\n });\n } catch (error) {\n throw new CacheException(`Failed to get multiple keys: ${error}`);\n }\n }\n\n public async mset<T = unknown>(entries: { key: string; value: T; ttl?: number }[]): Promise<void> {\n if (entries.length === 0) {\n return;\n }\n\n try {\n // Group entries by TTL\n const withoutTtl: { key: string; value: T }[] = [];\n const withTtl: { key: string; value: T; ttl: number }[] = [];\n\n entries.forEach((entry) => {\n if (entry.ttl && entry.ttl > 0) {\n withTtl.push(entry as { key: string; value: T; ttl: number });\n } else {\n withoutTtl.push({ key: entry.key, value: entry.value });\n }\n });\n\n // Handle entries without TTL using MSET\n if (withoutTtl.length > 0) {\n const msetArgs: string[] = [];\n withoutTtl.forEach(({ key, value }) => {\n msetArgs.push(key);\n // Convert undefined to null for Redis compatibility\n const normalizedValue = value === undefined ? null : value;\n msetArgs.push(typeof normalizedValue === \"string\" ? normalizedValue : JSON.stringify(normalizedValue));\n });\n await this.client.send(\"MSET\", msetArgs);\n }\n\n // Handle entries with TTL individually using SETEX\n for (const { key, value, ttl } of withTtl) {\n // Convert undefined to null for Redis compatibility\n const normalizedValue = value === undefined ? null : value;\n const serializedValue = typeof normalizedValue === \"string\" ? normalizedValue : JSON.stringify(normalizedValue);\n await this.client.send(\"SETEX\", [key, ttl.toString(), serializedValue]);\n }\n } catch (error) {\n throw new CacheException(`Failed to set multiple keys: ${error}`);\n }\n }\n\n // TTL / metadata\n public async ttl(key: string): Promise<number | null> {\n try {\n const result = await this.client.ttl(key);\n\n // Redis TTL returns:\n // -2 if key does not exist\n // -1 if key exists but has no TTL\n // positive number for remaining seconds\n if (result === -2) {\n return -1; // Key not found (following interface spec)\n }\n if (result === -1) {\n return null; // No TTL\n }\n return result; // Remaining seconds\n } catch (error) {\n throw new CacheException(`Failed to get TTL for key \"${key}\": ${error}`);\n }\n }\n\n public async expire(key: string, ttl: number): Promise<boolean> {\n try {\n const result = await this.client.expire(key, ttl);\n return result === 1;\n } catch (error) {\n throw new CacheException(`Failed to set TTL for key \"${key}\": ${error}`);\n }\n }\n\n // Counters (atomic operations)\n public async incr(key: string, delta = 1): Promise<number> {\n try {\n if (delta === 1) {\n return await this.client.incr(key);\n }\n const integerDelta = Math.floor(delta);\n return await this.client.send(\"INCRBY\", [key, integerDelta.toString()]);\n } catch (error) {\n throw new CacheException(`Failed to increment key \"${key}\" by ${delta}: ${error}`);\n }\n }\n\n public async decr(key: string, delta = 1): Promise<number> {\n try {\n if (delta === 1) {\n return await this.client.decr(key);\n }\n const integerDelta = Math.floor(delta);\n return await this.client.send(\"DECRBY\", [key, integerDelta.toString()]);\n } catch (error) {\n throw new CacheException(`Failed to decrement key \"${key}\" by ${delta}: ${error}`);\n }\n }\n\n // Maintenance\n public async keys(pattern = \"*\"): Promise<string[]> {\n try {\n return await this.client.send(\"KEYS\", [pattern]);\n } catch (error) {\n throw new CacheException(`Failed to get keys with pattern \"${pattern}\": ${error}`);\n }\n }\n\n public async flush(): Promise<void> {\n try {\n await this.client.send(\"FLUSHDB\", []);\n } catch (error) {\n throw new CacheException(`Failed to flush database: ${error}`);\n }\n }\n}\n"
6
+ "import { container, EContainerScope } from \"@ooneex/container\";\nimport type { CacheClassType } from \"./types\";\n\nexport const decorator = {\n cache: (scope: EContainerScope = EContainerScope.Singleton) => {\n return (target: CacheClassType): void => {\n container.add(target, scope);\n };\n },\n};\n",
7
+ "import { CacheException } from \"./CacheException\";\nimport type { FilesystemCacheOptionsType, ICache } from \"./types\";\n\ntype CacheEntryType<T = unknown> = {\n value: T;\n ttl?: number;\n createdAt: number;\n originalKey: string;\n};\n\nexport class FilesystemCache implements ICache {\n private cacheDir: string;\n private maxFileSize: number;\n\n constructor(options: FilesystemCacheOptionsType = {}) {\n this.cacheDir = options.cacheDir || `${process.cwd()}/.cache`;\n this.maxFileSize = options.maxFileSize || 10 * 1024 * 1024; // 10MB default\n }\n\n private async connect(): Promise<void> {\n try {\n const { mkdir, stat } = await import(\"node:fs/promises\");\n await mkdir(this.cacheDir, { recursive: true });\n\n const stats = await stat(this.cacheDir);\n if (!stats.isDirectory()) {\n throw new CacheException(\"Failed to create cache directory\");\n }\n } catch (error) {\n throw new CacheException(`Failed to initialize filesystem cache: ${error}`);\n }\n }\n\n public async get<T = unknown>(key: string): Promise<T | undefined> {\n try {\n await this.connect();\n const entry = await this.readCacheEntry<T>(key);\n\n return entry?.value;\n } catch (error) {\n throw new CacheException(`Failed to get key \"${key}\": ${error}`);\n }\n }\n\n public async set<T = unknown>(key: string, value: T, ttl?: number): Promise<void> {\n try {\n await this.connect();\n\n const entry: CacheEntryType<T> = {\n value,\n createdAt: Date.now(),\n originalKey: key,\n ...(ttl !== undefined && { ttl }),\n };\n\n await this.writeCacheEntry(key, entry);\n } catch (error) {\n throw new CacheException(`Failed to set key \"${key}\": ${error}`);\n }\n }\n\n public async delete(key: string): Promise<boolean> {\n try {\n await this.connect();\n\n const file = Bun.file(this.getFilePath(key));\n\n if (!(await file.exists())) {\n return false;\n }\n\n await file.delete();\n\n return true;\n } catch (error) {\n throw new CacheException(`Failed to delete key \"${key}\": ${error}`);\n }\n }\n\n public async has(key: string): Promise<boolean> {\n try {\n await this.connect();\n const entry = await this.readCacheEntry(key);\n\n return entry !== undefined;\n } catch (error) {\n throw new CacheException(`Failed to check if key \"${key}\" exists: ${error}`);\n }\n }\n\n private getFilePath(key: string): string {\n if (key.length > 200) {\n const hash = Bun.hash(key);\n return `${this.cacheDir}/${hash.toString(36)}.cache`;\n }\n\n const sanitizedKey = key.replace(/[<>:\"/\\\\|?*\\x00-\\x1f]/g, \"_\");\n\n return `${this.cacheDir}/${sanitizedKey}.cache`;\n }\n\n private isExpired(entry: CacheEntryType): boolean {\n if (!entry.ttl) return false;\n\n if (entry.ttl === 0) {\n return false;\n }\n\n return entry.createdAt + entry.ttl * 1000 < Date.now();\n }\n\n private async readCacheEntry<T>(key: string): Promise<CacheEntryType<T> | undefined> {\n try {\n const file = Bun.file(this.getFilePath(key));\n\n if (!(await file.exists())) {\n return;\n }\n\n const content = await file.text();\n const entry: CacheEntryType<T> = JSON.parse(content);\n\n if (this.isExpired(entry)) {\n await file.delete().catch(() => {});\n return;\n }\n\n return entry;\n } catch {\n return;\n }\n }\n\n private async writeCacheEntry<T>(key: string, entry: CacheEntryType<T>): Promise<void> {\n const content = JSON.stringify(entry);\n\n if (Buffer.byteLength(content, \"utf-8\") > this.maxFileSize) {\n throw new CacheException(`Cache entry exceeds maximum file size of ${this.maxFileSize} bytes`);\n }\n\n await Bun.write(this.getFilePath(key), content);\n }\n}\n",
8
+ "import { CacheException } from \"./CacheException\";\nimport type { ICache, RedisCacheOptionsType } from \"./types\";\n\nexport class RedisCache implements ICache {\n private client: Bun.RedisClient;\n\n constructor(options: RedisCacheOptionsType = {}) {\n const connectionString = options.connectionString || Bun.env.CACHE_REDIS_URL;\n\n if (!connectionString) {\n throw new CacheException(\n \"Redis connection string is required. Please provide a connection string either through the constructor options or set the CACHE_REDIS_URL environment variable.\",\n );\n }\n\n const { connectionString: _, ...userOptions } = options;\n\n const defaultOptions = {\n connectionTimeout: 10_000,\n idleTimeout: 30_000,\n autoReconnect: true,\n maxRetries: 3,\n enableOfflineQueue: true,\n enableAutoPipelining: true,\n };\n\n const clientOptions = { ...defaultOptions, ...userOptions };\n\n this.client = new Bun.RedisClient(connectionString, clientOptions);\n }\n\n private async connect(): Promise<void> {\n if (!this.client.connected) {\n await this.client.connect();\n }\n }\n\n public async get<T = unknown>(key: string): Promise<T | undefined> {\n try {\n await this.connect();\n const value = await this.client.get(key);\n\n if (value === null) {\n return;\n }\n\n try {\n return JSON.parse(value);\n } catch {\n return value as T;\n }\n } catch (error) {\n throw new CacheException(`Failed to get key \"${key}\": ${error}`);\n }\n }\n\n public async set<T = unknown>(key: string, value: T, ttl?: number): Promise<void> {\n try {\n await this.connect();\n\n const normalizedValue = value === undefined ? null : value;\n const serializedValue = typeof normalizedValue === \"string\" ? normalizedValue : JSON.stringify(normalizedValue);\n\n await this.client.set(key, serializedValue);\n\n if (ttl && ttl > 0) {\n await this.client.expire(key, ttl);\n }\n } catch (error) {\n throw new CacheException(`Failed to set key \"${key}\": ${error}`);\n }\n }\n\n public async delete(key: string): Promise<boolean> {\n try {\n await this.connect();\n const result = await this.client.del(key);\n\n return result > 0;\n } catch (error) {\n throw new CacheException(`Failed to delete key \"${key}\": ${error}`);\n }\n }\n\n public async has(key: string): Promise<boolean> {\n try {\n await this.connect();\n const result = await this.client.exists(key);\n\n return result;\n } catch (error) {\n throw new CacheException(`Failed to check if key \"${key}\" exists: ${error}`);\n }\n }\n}\n"
8
9
  ],
9
- "mappings": ";iVAAA,oBAAS,0BACT,qBAAS,4BAEF,MAAM,UAAuB,CAAU,CAC5C,WAAW,CAAC,EAAiB,EAAgC,CAAC,EAAG,CAC/D,MAAM,EAAS,CACb,OAAQ,EAAW,KAAK,oBACxB,MACF,CAAC,EACD,KAAK,KAAO,iBAEhB,CCXA,gBAAS,aAAO,UAAS,oBAWlB,MAAM,CAAkC,CACrC,SACA,YACA,gBACA,cACA,aAER,WAAW,CAAC,EAAsC,CAAC,EAAG,CACpD,KAAK,SAAW,EAAQ,UAAY,GAAG,QAAQ,IAAI,WACnD,KAAK,YAAc,EAAQ,aAAe,SAC1C,KAAK,gBAAkB,EAAQ,iBAAmB,OAClD,KAAK,cAAgB,EAAQ,eAAiB,QAGnC,QAAO,EAAkB,CACpC,GAAI,CAKF,GAJA,MAAM,EAAM,KAAK,SAAU,CAAE,UAAW,EAAK,CAAC,EAI1C,EADU,MAAM,EAAK,KAAK,QAAQ,GAC3B,YAAY,EACrB,MAAM,IAAI,EAAe,kCAAkC,EAG7D,GAAI,KAAK,eAAiB,CAAC,KAAK,aAC9B,KAAK,aAAa,EAEpB,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,0CAA0C,GAAO,QAIjE,MAAK,EAAkB,CAClC,GAAI,KAAK,aACP,cAAc,KAAK,YAAY,EAC/B,KAAK,aAAe,OAKjB,IAAM,MAAoB,IAAwC,CACvE,GAAI,CAEF,OADc,MAAM,KAAK,eAAkB,CAAG,IAChC,MACd,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,sBAAsB,OAAS,GAAO,IAI5D,IAAM,MAAoB,EAAa,EAAU,IAAgC,CACtF,GAAI,CACF,IAAM,EAA2B,CAC/B,QACA,UAAW,KAAK,IAAI,EACpB,YAAa,KACT,IAAQ,QAAa,CAAE,KAAI,CACjC,EAEA,MAAM,KAAK,gBAAgB,EAAK,CAAK,EACrC,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,sBAAsB,OAAS,GAAO,IAI5D,OAAS,MAAO,IAAkC,CACvD,GAAI,CACF,IAAM,EAAO,IAAI,KAAK,KAAK,YAAY,CAAG,CAAC,EAE3C,GAAI,CAAE,MAAM,EAAK,OAAO,EACtB,MAAO,GAIT,OADA,MAAM,EAAK,OAAO,EACX,GACP,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,yBAAyB,OAAS,GAAO,IAI/D,IAAM,MAAO,IAAkC,CACpD,GAAI,CAEF,OADc,MAAM,KAAK,eAAe,CAAG,IAC1B,OACjB,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,2BAA2B,cAAgB,GAAO,IAKxE,KAAO,MAAoB,IAA+C,CAC/E,GAAI,EAAK,SAAW,EAClB,MAAO,CAAC,EAGV,GAAI,CACF,IAAM,EAA6B,CAAC,EAG9B,EAAU,MAAM,QAAQ,WAAW,EAAK,IAAI,CAAC,IAAQ,KAAK,eAAkB,CAAG,CAAC,CAAC,EAEvF,QAAW,KAAU,EACnB,GAAI,EAAO,SAAW,YACpB,EAAQ,KAAK,EAAO,OAAO,KAAK,EAEhC,OAAQ,KAAK,MAAS,EAI1B,OAAO,EACP,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,gCAAgC,GAAO,IAI7D,KAAO,MAAoB,IAAsE,CACtG,GAAI,EAAQ,SAAW,EACrB,OAGF,GAAI,CACF,IAAM,EAAM,KAAK,IAAI,EAGf,EAAW,EAAQ,IAAI,OAAS,MAAK,QAAO,SAAU,CAC1D,IAAM,EAA2B,CAC/B,QACA,UAAW,EACX,YAAa,KACT,IAAQ,QAAa,CAAE,KAAI,CACjC,EAEA,OAAO,KAAK,gBAAgB,EAAK,CAAK,EACvC,EAED,MAAM,QAAQ,IAAI,CAAQ,EAC1B,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,gCAAgC,GAAO,IAK7D,IAAM,MAAO,IAAwC,CAC1D,GAAI,CACF,IAAM,EAAQ,MAAM,KAAK,eAAe,CAAG,EAE3C,GAAI,CAAC,EACH,MAAO,GAIT,GAAI,EAAM,MAAQ,EAChB,MAAO,GAGT,GAAI,CAAC,EAAM,IACT,OAAO,KAGT,IAAM,EAAY,KAAK,OAAO,EAAM,UAAY,EAAM,IAAM,KAAO,KAAK,IAAI,GAAK,IAAI,EACrF,OAAO,KAAK,IAAI,EAAG,CAAS,EAC5B,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,8BAA8B,OAAS,GAAO,IAIpE,OAAS,MAAO,EAAa,IAAkC,CACpE,GAAI,CACF,IAAM,EAAQ,MAAM,KAAK,eAAe,CAAG,EAE3C,GAAI,CAAC,EACH,MAAO,GAKT,GADA,EAAM,IAAM,EACR,IAAQ,EACV,EAAM,UAAY,KAAK,IAAI,EAK7B,OAHA,EAAM,YAAc,EAEpB,MAAM,KAAK,gBAAgB,EAAK,CAAK,EAC9B,GACP,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,8BAA8B,OAAS,GAAO,IAKpE,KAAO,MAAO,EAAa,EAAQ,IAAuB,CAC/D,GAAI,CACF,IAAM,EAAQ,MAAM,KAAK,eAAuB,CAAG,EAE7C,GADe,OAAO,GAAO,QAAU,SAAW,EAAM,MAAQ,GACtC,EAE1B,EAAmC,CACvC,MAAO,EACP,UAAW,GAAO,WAAa,KAAK,IAAI,EACxC,YAAa,KACT,GAAO,MAAQ,QAAa,CAAE,IAAK,EAAM,GAAI,CACnD,EAGA,OADA,MAAM,KAAK,gBAAgB,EAAK,CAAQ,EACjC,EACP,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,4BAA4B,SAAW,MAAU,GAAO,IAI9E,KAAO,MAAO,EAAa,EAAQ,IAAuB,CAC/D,GAAI,CACF,IAAM,EAAQ,MAAM,KAAK,eAAuB,CAAG,EAE7C,GADe,OAAO,GAAO,QAAU,SAAW,EAAM,MAAQ,GACtC,EAE1B,EAAmC,CACvC,MAAO,EACP,UAAW,GAAO,WAAa,KAAK,IAAI,EACxC,YAAa,KACT,GAAO,MAAQ,QAAa,CAAE,IAAK,EAAM,GAAI,CACnD,EAGA,OADA,MAAM,KAAK,gBAAgB,EAAK,CAAQ,EACjC,EACP,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,4BAA4B,SAAW,MAAU,GAAO,IAK9E,KAAO,MAAO,EAAU,MAA2B,CACxD,GAAI,CAEF,IAAM,GADQ,MAAM,EAAQ,KAAK,QAAQ,GAChB,OAAO,CAAC,IAAS,EAAK,SAAS,QAAQ,CAAC,EAG3D,EAAiB,CAAC,EAExB,QAAW,KAAQ,EACjB,GAAI,CACF,IAAM,EAAY,IAAI,KAAK,GAAG,KAAK,YAAY,GAAM,EACrD,GAAI,MAAM,EAAU,OAAO,EAAG,CAC5B,IAAM,EAAU,MAAM,EAAU,KAAK,EAC/B,EAAQ,KAAK,MAAM,CAAO,EAGhC,GAAI,EAAM,YACR,EAAK,KAAK,EAAM,WAAW,EAG3B,OAAK,KAAK,EAAK,QAAQ,SAAU,EAAE,CAAC,GAGxC,KAAM,CAEN,EAAK,KAAK,EAAK,QAAQ,SAAU,EAAE,CAAC,EAKxC,IAAI,EAAe,EACnB,GAAI,IAAY,IAAK,CACnB,IAAM,EAAQ,IAAI,OAAO,EAAQ,QAAQ,MAAO,IAAI,EAAE,QAAQ,MAAO,GAAG,CAAC,EACzE,EAAe,EAAK,OAAO,CAAC,IAAQ,EAAM,KAAK,CAAG,CAAC,EAIrD,IAAM,EAAiB,MAAM,QAAQ,WACnC,EAAa,IAAI,MAAO,IAAQ,CAC9B,IAAM,EAAQ,MAAM,KAAK,eAAe,CAAG,EAC3C,MAAO,CAAE,MAAK,MAAO,IAAU,MAAU,EAC1C,CACH,EAEM,EAAsB,CAAC,EAC7B,QAAW,KAAU,EACnB,GAAI,EAAO,SAAW,aAAe,EAAO,MAAM,MAChD,EAAU,KAAK,EAAO,MAAM,GAAG,EAInC,OAAO,EAAU,KAAK,EACtB,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,oCAAoC,OAAa,GAAO,IAI9E,MAAQ,SAA2B,CACxC,GAAI,CACF,IAAQ,WAAY,KAAa,uBAK3B,GAJQ,MAAM,EAAQ,KAAK,QAAQ,GAChB,OAAO,CAAC,IAAS,EAAK,SAAS,QAAQ,CAAC,EAGrC,IAAI,CAAC,IAAa,CAE5C,OADa,IAAI,KAAK,GAAG,KAAK,YAAY,GAAU,EACxC,OAAO,EACpB,EAED,MAAM,QAAQ,IAAI,CAAQ,EAC1B,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,0BAA0B,GAAO,IAItD,YAAY,EAAS,CAC3B,KAAK,aAAe,YAAY,SAAY,CAC1C,GAAI,CACF,MAAM,KAAK,eAAe,EAC1B,KAAM,IAGP,KAAK,eAAe,OAGX,eAAc,EAAkB,CAC5C,GAAI,CACF,IAAQ,WAAY,KAAa,uBAC3B,EAAQ,MAAM,EAAQ,KAAK,QAAQ,EACnC,EAAM,KAAK,IAAI,EAErB,QAAW,KAAY,EAAO,CAC5B,GAAI,CAAC,EAAS,SAAS,QAAQ,EAAG,SAElC,IAAM,EAAO,IAAI,KAAK,GAAG,KAAK,YAAY,GAAU,EACpD,GAAI,CACF,GAAI,MAAM,EAAK,OAAO,EAAG,CACvB,IAAM,EAAU,MAAM,EAAK,KAAK,EAC1B,EAAwB,KAAK,MAAM,CAAO,EAEhD,GAAI,EAAM,KAAO,EAAM,UAAY,EAAM,IAAM,KAAO,EACpD,MAAM,EAAK,OAAO,GAGtB,KAAM,CAEN,GAAI,CACF,MAAM,EAAK,OAAO,EAClB,KAAM,KAKZ,KAAM,GAKF,WAAW,CAAC,EAAqB,CAEvC,GAAI,EAAI,OAAS,IAAK,CACpB,IAAM,EAAO,IAAI,KAAK,CAAG,EACzB,MAAO,GAAG,KAAK,YAAY,EAAK,SAAS,EAAE,UAI7C,IAAM,EAAe,EAAI,QAAQ,yBAA0B,GAAG,EAC9D,MAAO,GAAG,KAAK,YAAY,eAGf,UAAS,CAAC,EAAyC,CAC/D,GAAI,CAAC,EAAM,IAAK,MAAO,GAIvB,GAAI,EAAM,MAAQ,EAChB,MAAO,GAGT,OAAO,EAAM,UAAY,EAAM,IAAM,KAAO,KAAK,IAAI,OAGzC,eAAiB,CAAC,EAAqD,CACnF,GAAI,CACF,IAAM,EAAO,IAAI,KAAK,KAAK,YAAY,CAAG,CAAC,EAE3C,GAAI,CAAE,MAAM,EAAK,OAAO,EACtB,OAGF,IAAM,EAAU,MAAM,EAAK,KAAK,EAC1B,EAA2B,KAAK,MAAM,CAAO,EAEnD,GAAI,MAAM,KAAK,UAAU,CAAK,EAAG,CAE/B,MAAM,EAAK,OAAO,EAAE,MAAM,IAAM,EAAE,EAClC,OAGF,OAAO,EACP,KAAM,CAEN,aAIU,gBAAkB,CAAC,EAAa,EAAyC,CACrF,IAAM,EAAU,KAAK,UAAU,CAAK,EAEpC,GAAI,OAAO,WAAW,EAAS,OAAO,EAAI,KAAK,YAC7C,MAAM,IAAI,EAAe,4CAA4C,KAAK,mBAAmB,EAG/F,MAAM,IAAI,MAAM,KAAK,YAAY,CAAG,EAAG,CAAO,EAElD,CC5ZO,MAAM,CAAoC,CACvC,OAER,WAAW,CAAC,EAAiC,CAAC,EAAG,CAC/C,IAAM,EAAmB,EAAQ,kBAAoB,IAAI,IAAI,gBAE7D,GAAI,CAAC,EACH,MAAM,IAAI,EACR,iKACF,EAGF,IAAQ,iBAAkB,KAAM,GAAkB,EAElD,KAAK,OAAS,IAAI,IAAI,YAAY,EAAkB,CAAa,OAGtD,QAAO,EAAkB,CACpC,MAAM,KAAK,OAAO,QAAQ,EAGrB,KAAK,EAAS,CACnB,KAAK,OAAO,MAAM,KAMT,UAAS,EAAY,CAC9B,OAAO,KAAK,OAAO,eAIR,IAAgB,CAAC,EAAqC,CACjE,GAAI,CACF,IAAM,EAAQ,MAAM,KAAK,OAAO,IAAI,CAAG,EACvC,GAAI,IAAU,KACZ,OAIF,GAAI,CACF,OAAO,KAAK,MAAM,CAAK,EACvB,KAAM,CACN,OAAO,GAET,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,sBAAsB,OAAS,GAAO,QAItD,IAAgB,CAAC,EAAa,EAAU,EAA6B,CAChF,GAAI,CAEF,IAAM,EAAkB,IAAU,OAAY,KAAO,EAC/C,EAAkB,OAAO,IAAoB,SAAW,EAAkB,KAAK,UAAU,CAAe,EAE9G,GAAI,EACF,MAAM,KAAK,OAAO,KAAK,QAAS,CAAC,EAAK,EAAI,SAAS,EAAG,CAAe,CAAC,EAEtE,WAAM,KAAK,OAAO,IAAI,EAAK,CAAe,EAE5C,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,sBAAsB,OAAS,GAAO,QAItD,OAAM,CAAC,EAA+B,CACjD,GAAI,CAEF,OADe,MAAM,KAAK,OAAO,IAAI,CAAG,EACxB,EAChB,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,yBAAyB,OAAS,GAAO,QAIzD,IAAG,CAAC,EAA+B,CAC9C,GAAI,CACF,OAAO,MAAM,KAAK,OAAO,OAAO,CAAG,EACnC,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,2BAA2B,cAAgB,GAAO,QAKlE,KAAiB,CAAC,EAA4C,CACzE,GAAI,EAAK,SAAW,EAClB,MAAO,CAAC,EAGV,GAAI,CAEF,OADe,MAAM,KAAK,OAAO,KAAK,OAAQ,CAAI,GACpC,IAAI,CAAC,IAAyB,CAC1C,GAAI,IAAU,KACZ,OAIF,GAAI,CACF,OAAO,KAAK,MAAM,CAAK,EACvB,KAAM,CACN,OAAO,GAEV,EACD,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,gCAAgC,GAAO,QAIvD,KAAiB,CAAC,EAAmE,CAChG,GAAI,EAAQ,SAAW,EACrB,OAGF,GAAI,CAEF,IAAM,EAA0C,CAAC,EAC3C,EAAoD,CAAC,EAW3D,GATA,EAAQ,QAAQ,CAAC,IAAU,CACzB,GAAI,EAAM,KAAO,EAAM,IAAM,EAC3B,EAAQ,KAAK,CAA+C,EAE5D,OAAW,KAAK,CAAE,IAAK,EAAM,IAAK,MAAO,EAAM,KAAM,CAAC,EAEzD,EAGG,EAAW,OAAS,EAAG,CACzB,IAAM,EAAqB,CAAC,EAC5B,EAAW,QAAQ,EAAG,MAAK,WAAY,CACrC,EAAS,KAAK,CAAG,EAEjB,IAAM,EAAkB,IAAU,OAAY,KAAO,EACrD,EAAS,KAAK,OAAO,IAAoB,SAAW,EAAkB,KAAK,UAAU,CAAe,CAAC,EACtG,EACD,MAAM,KAAK,OAAO,KAAK,OAAQ,CAAQ,EAIzC,QAAa,MAAK,QAAO,SAAS,EAAS,CAEzC,IAAM,EAAkB,IAAU,OAAY,KAAO,EAC/C,EAAkB,OAAO,IAAoB,SAAW,EAAkB,KAAK,UAAU,CAAe,EAC9G,MAAM,KAAK,OAAO,KAAK,QAAS,CAAC,EAAK,EAAI,SAAS,EAAG,CAAe,CAAC,GAExE,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,gCAAgC,GAAO,QAKvD,IAAG,CAAC,EAAqC,CACpD,GAAI,CACF,IAAM,EAAS,MAAM,KAAK,OAAO,IAAI,CAAG,EAMxC,GAAI,IAAW,GACb,MAAO,GAET,GAAI,IAAW,GACb,OAAO,KAET,OAAO,EACP,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,8BAA8B,OAAS,GAAO,QAI9D,OAAM,CAAC,EAAa,EAA+B,CAC9D,GAAI,CAEF,OADe,MAAM,KAAK,OAAO,OAAO,EAAK,CAAG,IAC9B,EAClB,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,8BAA8B,OAAS,GAAO,QAK9D,KAAI,CAAC,EAAa,EAAQ,EAAoB,CACzD,GAAI,CACF,GAAI,IAAU,EACZ,OAAO,MAAM,KAAK,OAAO,KAAK,CAAG,EAEnC,IAAM,EAAe,KAAK,MAAM,CAAK,EACrC,OAAO,MAAM,KAAK,OAAO,KAAK,SAAU,CAAC,EAAK,EAAa,SAAS,CAAC,CAAC,EACtE,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,4BAA4B,SAAW,MAAU,GAAO,QAIxE,KAAI,CAAC,EAAa,EAAQ,EAAoB,CACzD,GAAI,CACF,GAAI,IAAU,EACZ,OAAO,MAAM,KAAK,OAAO,KAAK,CAAG,EAEnC,IAAM,EAAe,KAAK,MAAM,CAAK,EACrC,OAAO,MAAM,KAAK,OAAO,KAAK,SAAU,CAAC,EAAK,EAAa,SAAS,CAAC,CAAC,EACtE,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,4BAA4B,SAAW,MAAU,GAAO,QAKxE,KAAI,CAAC,EAAU,IAAwB,CAClD,GAAI,CACF,OAAO,MAAM,KAAK,OAAO,KAAK,OAAQ,CAAC,CAAO,CAAC,EAC/C,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,oCAAoC,OAAa,GAAO,QAIxE,MAAK,EAAkB,CAClC,GAAI,CACF,MAAM,KAAK,OAAO,KAAK,UAAW,CAAC,CAAC,EACpC,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,6BAA6B,GAAO,GAGnE",
10
- "debugId": "3AA2F052E73C867064756E2164756E21",
10
+ "mappings": ";iVAAA,oBAAS,0BACT,qBAAS,4BAEF,MAAM,UAAuB,CAAU,CAC5C,WAAW,CAAC,EAAiB,EAAgC,CAAC,EAAG,CAC/D,MAAM,EAAS,CACb,OAAQ,EAAW,KAAK,oBACxB,MACF,CAAC,EACD,KAAK,KAAO,iBAEhB,CCXA,oBAAS,qBAAW,0BAGb,IAAM,EAAY,CACvB,MAAO,CAAC,EAAyB,EAAgB,YAAc,CAC7D,MAAO,CAAC,IAAiC,CACvC,EAAU,IAAI,EAAQ,CAAK,GAGjC,ECCO,MAAM,CAAkC,CACrC,SACA,YAER,WAAW,CAAC,EAAsC,CAAC,EAAG,CACpD,KAAK,SAAW,EAAQ,UAAY,GAAG,QAAQ,IAAI,WACnD,KAAK,YAAc,EAAQ,aAAe,cAG9B,QAAO,EAAkB,CACrC,GAAI,CACF,IAAQ,QAAO,QAAS,KAAa,uBAIrC,GAHA,MAAM,EAAM,KAAK,SAAU,CAAE,UAAW,EAAK,CAAC,EAG1C,EADU,MAAM,EAAK,KAAK,QAAQ,GAC3B,YAAY,EACrB,MAAM,IAAI,EAAe,kCAAkC,EAE7D,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,0CAA0C,GAAO,QAIjE,IAAgB,CAAC,EAAqC,CACjE,GAAI,CAIF,OAHA,MAAM,KAAK,QAAQ,GACL,MAAM,KAAK,eAAkB,CAAG,IAEhC,MACd,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,sBAAsB,OAAS,GAAO,QAItD,IAAgB,CAAC,EAAa,EAAU,EAA6B,CAChF,GAAI,CACF,MAAM,KAAK,QAAQ,EAEnB,IAAM,EAA2B,CAC/B,QACA,UAAW,KAAK,IAAI,EACpB,YAAa,KACT,IAAQ,QAAa,CAAE,KAAI,CACjC,EAEA,MAAM,KAAK,gBAAgB,EAAK,CAAK,EACrC,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,sBAAsB,OAAS,GAAO,QAItD,OAAM,CAAC,EAA+B,CACjD,GAAI,CACF,MAAM,KAAK,QAAQ,EAEnB,IAAM,EAAO,IAAI,KAAK,KAAK,YAAY,CAAG,CAAC,EAE3C,GAAI,CAAE,MAAM,EAAK,OAAO,EACtB,MAAO,GAKT,OAFA,MAAM,EAAK,OAAO,EAEX,GACP,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,yBAAyB,OAAS,GAAO,QAIzD,IAAG,CAAC,EAA+B,CAC9C,GAAI,CAIF,OAHA,MAAM,KAAK,QAAQ,EACL,MAAM,KAAK,eAAe,CAAG,IAE1B,OACjB,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,2BAA2B,cAAgB,GAAO,GAIvE,WAAW,CAAC,EAAqB,CACvC,GAAI,EAAI,OAAS,IAAK,CACpB,IAAM,EAAO,IAAI,KAAK,CAAG,EACzB,MAAO,GAAG,KAAK,YAAY,EAAK,SAAS,EAAE,UAG7C,IAAM,EAAe,EAAI,QAAQ,yBAA0B,GAAG,EAE9D,MAAO,GAAG,KAAK,YAAY,UAGrB,SAAS,CAAC,EAAgC,CAChD,GAAI,CAAC,EAAM,IAAK,MAAO,GAEvB,GAAI,EAAM,MAAQ,EAChB,MAAO,GAGT,OAAO,EAAM,UAAY,EAAM,IAAM,KAAO,KAAK,IAAI,OAGzC,eAAiB,CAAC,EAAqD,CACnF,GAAI,CACF,IAAM,EAAO,IAAI,KAAK,KAAK,YAAY,CAAG,CAAC,EAE3C,GAAI,CAAE,MAAM,EAAK,OAAO,EACtB,OAGF,IAAM,EAAU,MAAM,EAAK,KAAK,EAC1B,EAA2B,KAAK,MAAM,CAAO,EAEnD,GAAI,KAAK,UAAU,CAAK,EAAG,CACzB,MAAM,EAAK,OAAO,EAAE,MAAM,IAAM,EAAE,EAClC,OAGF,OAAO,EACP,KAAM,CACN,aAIU,gBAAkB,CAAC,EAAa,EAAyC,CACrF,IAAM,EAAU,KAAK,UAAU,CAAK,EAEpC,GAAI,OAAO,WAAW,EAAS,OAAO,EAAI,KAAK,YAC7C,MAAM,IAAI,EAAe,4CAA4C,KAAK,mBAAmB,EAG/F,MAAM,IAAI,MAAM,KAAK,YAAY,CAAG,EAAG,CAAO,EAElD,CC3IO,MAAM,CAA6B,CAChC,OAER,WAAW,CAAC,EAAiC,CAAC,EAAG,CAC/C,IAAM,EAAmB,EAAQ,kBAAoB,IAAI,IAAI,gBAE7D,GAAI,CAAC,EACH,MAAM,IAAI,EACR,iKACF,EAGF,IAAQ,iBAAkB,KAAM,GAAgB,EAW1C,EAAgB,IATC,CACrB,kBAAmB,IACnB,YAAa,MACb,cAAe,GACf,WAAY,EACZ,mBAAoB,GACpB,qBAAsB,EACxB,KAE8C,CAAY,EAE1D,KAAK,OAAS,IAAI,IAAI,YAAY,EAAkB,CAAa,OAGrD,QAAO,EAAkB,CACrC,GAAI,CAAC,KAAK,OAAO,UACf,MAAM,KAAK,OAAO,QAAQ,OAIjB,IAAgB,CAAC,EAAqC,CACjE,GAAI,CACF,MAAM,KAAK,QAAQ,EACnB,IAAM,EAAQ,MAAM,KAAK,OAAO,IAAI,CAAG,EAEvC,GAAI,IAAU,KACZ,OAGF,GAAI,CACF,OAAO,KAAK,MAAM,CAAK,EACvB,KAAM,CACN,OAAO,GAET,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,sBAAsB,OAAS,GAAO,QAItD,IAAgB,CAAC,EAAa,EAAU,EAA6B,CAChF,GAAI,CACF,MAAM,KAAK,QAAQ,EAEnB,IAAM,EAAkB,IAAU,OAAY,KAAO,EAC/C,EAAkB,OAAO,IAAoB,SAAW,EAAkB,KAAK,UAAU,CAAe,EAI9G,GAFA,MAAM,KAAK,OAAO,IAAI,EAAK,CAAe,EAEtC,GAAO,EAAM,EACf,MAAM,KAAK,OAAO,OAAO,EAAK,CAAG,EAEnC,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,sBAAsB,OAAS,GAAO,QAItD,OAAM,CAAC,EAA+B,CACjD,GAAI,CAIF,OAHA,MAAM,KAAK,QAAQ,EACJ,MAAM,KAAK,OAAO,IAAI,CAAG,EAExB,EAChB,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,yBAAyB,OAAS,GAAO,QAIzD,IAAG,CAAC,EAA+B,CAC9C,GAAI,CAIF,OAHA,MAAM,KAAK,QAAQ,EACJ,MAAM,KAAK,OAAO,OAAO,CAAG,EAG3C,MAAO,EAAO,CACd,MAAM,IAAI,EAAe,2BAA2B,cAAgB,GAAO,GAGjF",
11
+ "debugId": "2319999CAED1C92C64756E2164756E21",
11
12
  "names": []
12
13
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@ooneex/cache",
3
- "description": "",
4
- "version": "0.0.1",
3
+ "description": "A flexible caching library with support for filesystem and Redis backends, featuring decorators for easy service integration",
4
+ "version": "0.0.5",
5
5
  "type": "module",
6
6
  "files": [
7
7
  "dist",
@@ -25,14 +25,22 @@
25
25
  "test": "bun test tests",
26
26
  "build": "bunup",
27
27
  "lint": "tsgo --noEmit && bunx biome lint",
28
- "publish:prod": "bun publish --tolerate-republish --access public",
29
- "publish:pack": "bun pm pack --destination ./dist",
30
- "publish:dry": "bun publish --dry-run"
28
+ "npm:publish": "bun publish --tolerate-republish --access public"
31
29
  },
32
30
  "dependencies": {
31
+ "@ooneex/container": "0.0.2",
33
32
  "@ooneex/exception": "0.0.1",
34
33
  "@ooneex/http-status": "0.0.1"
35
34
  },
36
35
  "devDependencies": {},
37
- "peerDependencies": {}
36
+ "keywords": [
37
+ "bun",
38
+ "cache",
39
+ "caching",
40
+ "memory",
41
+ "ooneex",
42
+ "redis",
43
+ "storage",
44
+ "typescript"
45
+ ]
38
46
  }
Binary file