alepha 0.9.4 → 0.9.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bucket.d.ts CHANGED
@@ -52,17 +52,332 @@ declare class MemoryFileStorageProvider implements FileStorageProvider {
52
52
  //#endregion
53
53
  //#region src/descriptors/$bucket.d.ts
54
54
  /**
55
- * Create a container for storing files.
55
+ * Creates a bucket descriptor for file storage and management with configurable validation.
56
+ *
57
+ * This descriptor provides a comprehensive file storage system that handles file uploads,
58
+ * downloads, validation, and management across multiple storage backends. It supports
59
+ * MIME type validation, size limits, and integrates seamlessly with various storage
60
+ * providers for scalable file management in applications.
61
+ *
62
+ * **Key Features**
63
+ *
64
+ * - **Multi-Provider Support**: Works with filesystem, cloud storage (S3, Azure), and in-memory providers
65
+ * - **File Validation**: Automatic MIME type checking and file size validation
66
+ * - **Type Safety**: Full TypeScript support with FileLike interface compatibility
67
+ * - **Event Integration**: Emits events for file operations (upload, delete) for monitoring
68
+ * - **Flexible Configuration**: Per-bucket and per-operation configuration options
69
+ * - **Automatic Detection**: Smart file type and size detection with fallback mechanisms
70
+ * - **Error Handling**: Comprehensive error handling with descriptive error messages
71
+ *
72
+ * **Use Cases**
73
+ *
74
+ * Perfect for handling file storage requirements across applications:
75
+ * - User profile picture and document uploads
76
+ * - Product image and media management
77
+ * - Document storage and retrieval systems
78
+ * - Temporary file handling and processing
79
+ * - Content delivery and asset management
80
+ * - Backup and archival storage
81
+ * - File-based data import/export workflows
56
82
  *
57
83
  * @example
84
+ * **Basic file upload bucket:**
58
85
  * ```ts
59
86
  * import { $bucket } from "alepha/bucket";
60
87
  *
61
- * class App {
62
- * images = $bucket();
88
+ * class MediaService {
89
+ * images = $bucket({
90
+ * name: "user-images",
91
+ * description: "User uploaded profile images and photos",
92
+ * mimeTypes: ["image/jpeg", "image/png", "image/gif", "image/webp"],
93
+ * maxSize: 5 // 5MB limit
94
+ * });
95
+ *
96
+ * async uploadProfileImage(file: FileLike, userId: string): Promise<string> {
97
+ * // File is automatically validated against MIME types and size
98
+ * const fileId = await this.images.upload(file);
99
+ *
100
+ * // Update user profile with new image
101
+ * await this.userService.updateProfileImage(userId, fileId);
102
+ *
103
+ * return fileId;
104
+ * }
105
+ *
106
+ * async getUserProfileImage(userId: string): Promise<FileLike> {
107
+ * const user = await this.userService.getUser(userId);
108
+ * if (!user.profileImageId) {
109
+ * throw new Error('User has no profile image');
110
+ * }
111
+ *
112
+ * return await this.images.download(user.profileImageId);
113
+ * }
114
+ * }
115
+ * ```
116
+ *
117
+ * @example
118
+ * **Document storage with multiple file types:**
119
+ * ```ts
120
+ * class DocumentManager {
121
+ * documents = $bucket({
122
+ * name: "company-documents",
123
+ * description: "Legal documents, contracts, and reports",
124
+ * mimeTypes: [
125
+ * "application/pdf",
126
+ * "application/msword",
127
+ * "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
128
+ * "text/plain",
129
+ * "text/csv"
130
+ * ],
131
+ * maxSize: 50 // 50MB for large documents
132
+ * });
133
+ *
134
+ * async uploadDocument(file: FileLike, metadata: { title: string; category: string; userId: string }): Promise<string> {
135
+ * try {
136
+ * const fileId = await this.documents.upload(file);
137
+ *
138
+ * // Store document metadata in database
139
+ * await this.database.documents.create({
140
+ * id: fileId,
141
+ * title: metadata.title,
142
+ * category: metadata.category,
143
+ * uploadedBy: metadata.userId,
144
+ * fileName: file.name,
145
+ * fileSize: file.size,
146
+ * mimeType: file.type,
147
+ * uploadedAt: new Date()
148
+ * });
149
+ *
150
+ * console.log(`Document uploaded successfully: ${metadata.title} (${fileId})`);
151
+ * return fileId;
152
+ *
153
+ * } catch (error) {
154
+ * console.error(`Failed to upload document: ${metadata.title}`, error);
155
+ * throw error;
156
+ * }
157
+ * }
158
+ *
159
+ * async downloadDocument(documentId: string, userId: string): Promise<FileLike> {
160
+ * // Check permissions
161
+ * const document = await this.database.documents.findById(documentId);
162
+ * if (!document) {
163
+ * throw new Error('Document not found');
164
+ * }
165
+ *
166
+ * const hasAccess = await this.permissionService.canAccessDocument(userId, documentId);
167
+ * if (!hasAccess) {
168
+ * throw new Error('Insufficient permissions to access document');
169
+ * }
170
+ *
171
+ * // Download and return file
172
+ * return await this.documents.download(documentId);
173
+ * }
174
+ *
175
+ * async deleteDocument(documentId: string, userId: string): Promise<void> {
176
+ * // Verify ownership or admin privileges
177
+ * const document = await this.database.documents.findById(documentId);
178
+ * if (document.uploadedBy !== userId && !await this.userService.isAdmin(userId)) {
179
+ * throw new Error('Cannot delete document: insufficient permissions');
180
+ * }
181
+ *
182
+ * // Delete from storage and database
183
+ * await this.documents.delete(documentId);
184
+ * await this.database.documents.delete(documentId);
63
185
  *
64
- * uploadImage(file: FileLike): Promise<string> {
65
- * return this.images.upload(file);
186
+ * console.log(`Document deleted: ${document.title} (${documentId})`);
187
+ * }
188
+ * }
189
+ * ```
190
+ *
191
+ * @example
192
+ * **Cloud storage integration with custom provider:**
193
+ * ```ts
194
+ * class ProductImageService {
195
+ * productImages = $bucket({
196
+ * name: "product-images",
197
+ * provider: S3FileStorageProvider, // Use AWS S3 for production storage
198
+ * description: "Product catalog images and thumbnails",
199
+ * mimeTypes: ["image/jpeg", "image/png", "image/webp"],
200
+ * maxSize: 10 // 10MB for high-quality product images
201
+ * });
202
+ *
203
+ * thumbnails = $bucket({
204
+ * name: "product-thumbnails",
205
+ * provider: S3FileStorageProvider,
206
+ * description: "Generated product thumbnail images",
207
+ * mimeTypes: ["image/jpeg", "image/webp"],
208
+ * maxSize: 1 // 1MB for thumbnails
209
+ * });
210
+ *
211
+ * async uploadProductImage(productId: string, file: FileLike): Promise<{ imageId: string; thumbnailId: string }> {
212
+ * try {
213
+ * // Upload original image
214
+ * const imageId = await this.productImages.upload(file);
215
+ *
216
+ * // Generate and upload thumbnail
217
+ * const thumbnailFile = await this.imageProcessor.generateThumbnail(file, {
218
+ * width: 300,
219
+ * height: 300,
220
+ * format: 'webp',
221
+ * quality: 80
222
+ * });
223
+ *
224
+ * const thumbnailId = await this.thumbnails.upload(thumbnailFile);
225
+ *
226
+ * // Update product in database
227
+ * await this.database.products.update(productId, {
228
+ * imageId,
229
+ * thumbnailId,
230
+ * imageUpdatedAt: new Date()
231
+ * });
232
+ *
233
+ * console.log(`Product images uploaded for ${productId}: image=${imageId}, thumbnail=${thumbnailId}`);
234
+ *
235
+ * return { imageId, thumbnailId };
236
+ *
237
+ * } catch (error) {
238
+ * console.error(`Failed to upload product image for ${productId}`, error);
239
+ * throw error;
240
+ * }
241
+ * }
242
+ *
243
+ * async getProductImage(productId: string, thumbnail: boolean = false): Promise<FileLike> {
244
+ * const product = await this.database.products.findById(productId);
245
+ * if (!product) {
246
+ * throw new Error(`Product ${productId} not found`);
247
+ * }
248
+ *
249
+ * const imageId = thumbnail ? product.thumbnailId : product.imageId;
250
+ * if (!imageId) {
251
+ * throw new Error(`Product ${productId} has no ${thumbnail ? 'thumbnail' : 'image'}`);
252
+ * }
253
+ *
254
+ * const bucket = thumbnail ? this.thumbnails : this.productImages;
255
+ * return await bucket.download(imageId);
256
+ * }
257
+ * }
258
+ * ```
259
+ *
260
+ * @example
261
+ * **Temporary file processing with memory storage:**
262
+ * ```ts
263
+ * class FileProcessingService {
264
+ * tempFiles = $bucket({
265
+ * name: "temp-processing",
266
+ * provider: "memory", // Use in-memory storage for temporary files
267
+ * description: "Temporary files during processing workflows",
268
+ * maxSize: 100 // Large limit for processing workflows
269
+ * });
270
+ *
271
+ * async processDataFile(inputFile: FileLike, transformations: string[]): Promise<FileLike> {
272
+ * let currentFile = inputFile;
273
+ * const intermediateFiles: string[] = [];
274
+ *
275
+ * try {
276
+ * // Upload initial file to temp storage
277
+ * let currentFileId = await this.tempFiles.upload(currentFile);
278
+ * intermediateFiles.push(currentFileId);
279
+ *
280
+ * // Apply each transformation
281
+ * for (const transformation of transformations) {
282
+ * console.log(`Applying transformation: ${transformation}`);
283
+ *
284
+ * // Download current file
285
+ * currentFile = await this.tempFiles.download(currentFileId);
286
+ *
287
+ * // Apply transformation
288
+ * const transformedFile = await this.applyTransformation(currentFile, transformation);
289
+ *
290
+ * // Upload transformed file
291
+ * currentFileId = await this.tempFiles.upload(transformedFile);
292
+ * intermediateFiles.push(currentFileId);
293
+ * }
294
+ *
295
+ * // Download final result
296
+ * const finalFile = await this.tempFiles.download(currentFileId);
297
+ *
298
+ * console.log(`File processing completed with ${transformations.length} transformations`);
299
+ * return finalFile;
300
+ *
301
+ * } finally {
302
+ * // Clean up all intermediate files
303
+ * for (const fileId of intermediateFiles) {
304
+ * try {
305
+ * await this.tempFiles.delete(fileId);
306
+ * } catch (error) {
307
+ * console.warn(`Failed to clean up temp file ${fileId}:`, error.message);
308
+ * }
309
+ * }
310
+ * console.log(`Cleaned up ${intermediateFiles.length} temporary files`);
311
+ * }
312
+ * }
313
+ * }
314
+ * ```
315
+ *
316
+ * @example
317
+ * **File validation with dynamic configuration:**
318
+ * ```ts
319
+ * class UserContentService {
320
+ * userContent = $bucket({
321
+ * name: "user-content",
322
+ * description: "User-generated content with flexible validation"
323
+ * // Base configuration - can be overridden per operation
324
+ * });
325
+ *
326
+ * async uploadUserFile(file: FileLike, userId: string, contentType: 'avatar' | 'document' | 'media'): Promise<string> {
327
+ * // Dynamic validation based on content type
328
+ * const validationConfig = this.getValidationConfig(contentType, userId);
329
+ *
330
+ * try {
331
+ * // Upload with specific validation rules
332
+ * const fileId = await this.userContent.upload(file, validationConfig);
333
+ *
334
+ * // Log upload for audit trail
335
+ * await this.auditLogger.log({
336
+ * action: 'file_upload',
337
+ * userId,
338
+ * fileId,
339
+ * contentType,
340
+ * fileName: file.name,
341
+ * fileSize: file.size,
342
+ * mimeType: file.type
343
+ * });
344
+ *
345
+ * return fileId;
346
+ *
347
+ * } catch (error) {
348
+ * console.error(`File upload failed for user ${userId}`, {
349
+ * contentType,
350
+ * fileName: file.name,
351
+ * error: error.message
352
+ * });
353
+ * throw error;
354
+ * }
355
+ * }
356
+ *
357
+ * private getValidationConfig(contentType: string, userId: string) {
358
+ * const baseConfig = {
359
+ * avatar: {
360
+ * mimeTypes: ['image/jpeg', 'image/png'],
361
+ * maxSize: 2 // 2MB for avatars
362
+ * },
363
+ * document: {
364
+ * mimeTypes: ['application/pdf', 'text/plain'],
365
+ * maxSize: 10 // 10MB for documents
366
+ * },
367
+ * media: {
368
+ * mimeTypes: ['image/jpeg', 'image/png', 'video/mp4'],
369
+ * maxSize: 50 // 50MB for media files
370
+ * }
371
+ * };
372
+ *
373
+ * const config = baseConfig[contentType];
374
+ *
375
+ * // Premium users get higher limits
376
+ * if (this.userService.isPremium(userId)) {
377
+ * config.maxSize *= 2;
378
+ * }
379
+ *
380
+ * return config;
66
381
  * }
67
382
  * }
68
383
  * ```
@@ -73,16 +388,138 @@ declare const $bucket: {
73
388
  };
74
389
  interface BucketDescriptorOptions extends BucketFileOptions {
75
390
  /**
76
- * File storage provider. If not provided, the default provider will be used.
391
+ * File storage provider configuration for the bucket.
392
+ *
393
+ * Options:
394
+ * - **"memory"**: In-memory storage (default for development, lost on restart)
395
+ * - **Service<FileStorageProvider>**: Custom provider class (e.g., S3FileStorageProvider, AzureBlobProvider)
396
+ * - **undefined**: Uses the default file storage provider from dependency injection
397
+ *
398
+ * **Provider Selection Guidelines**:
399
+ * - **Development**: Use "memory" for fast, simple testing without external dependencies
400
+ * - **Production**: Use cloud providers (S3, Azure Blob, Google Cloud Storage) for scalability
401
+ * - **Local deployment**: Use filesystem providers for on-premise installations
402
+ * - **Hybrid**: Use different providers for different bucket types (temp files vs permanent storage)
403
+ *
404
+ * **Provider Capabilities**:
405
+ * - File persistence and durability guarantees
406
+ * - Scalability and performance characteristics
407
+ * - Geographic distribution and CDN integration
408
+ * - Cost implications for storage and bandwidth
409
+ * - Backup and disaster recovery features
410
+ *
411
+ * @default Uses injected FileStorageProvider
412
+ * @example "memory"
413
+ * @example S3FileStorageProvider
414
+ * @example AzureBlobStorageProvider
77
415
  */
78
416
  provider?: Service<FileStorageProvider> | "memory";
79
417
  /**
80
- * Optional name of the bucket. If not provided, the key of the descriptor will be used.
418
+ * Unique name identifier for the bucket.
419
+ *
420
+ * This name is used for:
421
+ * - Storage backend organization and partitioning
422
+ * - File path generation and URL construction
423
+ * - Logging, monitoring, and debugging
424
+ * - Access control and permissions management
425
+ * - Backup and replication configuration
426
+ *
427
+ * **Naming Conventions**:
428
+ * - Use lowercase with hyphens for consistency
429
+ * - Include purpose or content type in the name
430
+ * - Avoid spaces and special characters
431
+ * - Consider environment prefixes for deployment isolation
432
+ *
433
+ * If not provided, defaults to the property key where the bucket is declared.
434
+ *
435
+ * @example "user-avatars"
436
+ * @example "product-images"
437
+ * @example "legal-documents"
438
+ * @example "temp-processing-files"
81
439
  */
82
440
  name?: string;
83
441
  }
442
+ interface BucketFileOptions {
443
+ /**
444
+ * Human-readable description of the bucket's purpose and contents.
445
+ *
446
+ * Used for:
447
+ * - Documentation generation and API references
448
+ * - Developer onboarding and system understanding
449
+ * - Monitoring dashboards and admin interfaces
450
+ * - Compliance and audit documentation
451
+ *
452
+ * **Description Best Practices**:
453
+ * - Explain what types of files this bucket stores
454
+ * - Mention any special handling or processing requirements
455
+ * - Include information about retention policies if applicable
456
+ * - Note any compliance or security considerations
457
+ *
458
+ * @example "User profile pictures and avatar images"
459
+ * @example "Product catalog images with automated thumbnail generation"
460
+ * @example "Legal documents requiring long-term retention"
461
+ * @example "Temporary files for data processing workflows"
462
+ */
463
+ description?: string;
464
+ /**
465
+ * Array of allowed MIME types for files uploaded to this bucket.
466
+ *
467
+ * When specified, only files with these exact MIME types will be accepted.
468
+ * Files with disallowed MIME types will be rejected with an InvalidFileError.
469
+ *
470
+ * **MIME Type Categories**:
471
+ * - Images: "image/jpeg", "image/png", "image/gif", "image/webp", "image/svg+xml"
472
+ * - Documents: "application/pdf", "text/plain", "text/csv"
473
+ * - Office: "application/msword", "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
474
+ * - Archives: "application/zip", "application/x-tar", "application/gzip"
475
+ * - Media: "video/mp4", "audio/mpeg", "audio/wav"
476
+ *
477
+ * **Security Considerations**:
478
+ * - Always validate MIME types for user uploads
479
+ * - Be cautious with executable file types
480
+ * - Consider using allow-lists rather than deny-lists
481
+ * - Remember that MIME types can be spoofed by malicious users
482
+ *
483
+ * If not specified, all MIME types are allowed (not recommended for user uploads).
484
+ *
485
+ * @example ["image/jpeg", "image/png"] // Only JPEG and PNG images
486
+ * @example ["application/pdf", "text/plain"] // Documents only
487
+ * @example ["video/mp4", "video/webm"] // Video files
488
+ */
489
+ mimeTypes?: string[];
490
+ /**
491
+ * Maximum file size allowed in megabytes (MB).
492
+ *
493
+ * Files larger than this limit will be rejected with an InvalidFileError.
494
+ * This helps prevent:
495
+ * - Storage quota exhaustion
496
+ * - Memory issues during file processing
497
+ * - Long upload times and timeouts
498
+ * - Abuse of storage resources
499
+ *
500
+ * **Size Guidelines by File Type**:
501
+ * - Profile images: 1-5 MB
502
+ * - Product photos: 5-10 MB
503
+ * - Documents: 10-50 MB
504
+ * - Video files: 50-500 MB
505
+ * - Data files: 100-1000 MB
506
+ *
507
+ * **Considerations**:
508
+ * - Consider your storage costs and limits
509
+ * - Factor in network upload speeds for users
510
+ * - Account for processing requirements (thumbnails, compression)
511
+ * - Set reasonable limits based on actual use cases
512
+ *
513
+ * @default 10 MB
514
+ *
515
+ * @example 1 // 1MB for small images
516
+ * @example 25 // 25MB for documents
517
+ * @example 100 // 100MB for media files
518
+ */
519
+ maxSize?: number;
520
+ }
84
521
  declare class BucketDescriptor extends Descriptor<BucketDescriptorOptions> {
85
- readonly provider: FileStorageProvider | MemoryFileStorageProvider;
522
+ readonly provider: MemoryFileStorageProvider | FileStorageProvider;
86
523
  get name(): string;
87
524
  /**
88
525
  * Uploads a file to the bucket.
@@ -100,7 +537,7 @@ declare class BucketDescriptor extends Descriptor<BucketDescriptorOptions> {
100
537
  * Downloads a file from the bucket.
101
538
  */
102
539
  download(fileId: string): Promise<FileLike>;
103
- protected $provider(): FileStorageProvider | MemoryFileStorageProvider;
540
+ protected $provider(): MemoryFileStorageProvider | FileStorageProvider;
104
541
  }
105
542
  interface BucketFileOptions {
106
543
  /**
package/cache.d.ts CHANGED
@@ -44,7 +44,138 @@ declare abstract class CacheProvider {
44
44
  //#endregion
45
45
  //#region src/descriptors/$cache.d.ts
46
46
  /**
47
- * Creates a cache storage or a cache function.
47
+ * Creates a cache descriptor for high-performance data caching with automatic cache management.
48
+ *
49
+ * This descriptor provides a powerful caching layer that can significantly improve application performance
50
+ * by storing frequently accessed data in memory or external cache stores like Redis. It supports both
51
+ * function result caching and manual cache operations with intelligent serialization and TTL management.
52
+ *
53
+ * **Key Features**
54
+ *
55
+ * - **Function Result Caching**: Automatically cache function results based on input parameters
56
+ * - **Multiple Storage Backends**: Support for in-memory, Redis, and custom cache providers
57
+ * - **Intelligent Serialization**: Automatic handling of JSON, strings, and binary data
58
+ * - **TTL Management**: Configurable time-to-live with automatic expiration
59
+ * - **Cache Invalidation**: Pattern-based cache invalidation with wildcard support
60
+ * - **Environment Controls**: Enable/disable caching via environment variables
61
+ * - **Type Safety**: Full TypeScript support with generic type parameters
62
+ *
63
+ * ## Cache Strategies
64
+ *
65
+ * ### 1. Function Result Caching (Memoization)
66
+ * Automatically cache the results of expensive operations based on input parameters.
67
+ *
68
+ * ### 2. Manual Cache Operations
69
+ * Direct cache operations for custom caching logic and data storage.
70
+ *
71
+ * ## Storage Backends
72
+ *
73
+ * - **Memory**: Fast in-memory cache (default for development)
74
+ * - **Redis**: Distributed cache for production environments
75
+ * - **Custom Providers**: Implement your own cache storage backend
76
+ *
77
+ * @example
78
+ * **Basic function result caching:**
79
+ * ```ts
80
+ * import { $cache } from "alepha/cache";
81
+ *
82
+ * class DataService {
83
+ * // Cache expensive database queries
84
+ * getUserData = $cache({
85
+ * name: "user-data",
86
+ * ttl: [10, "minutes"],
87
+ * handler: async (userId: string) => {
88
+ * // Expensive database operation
89
+ * return await database.users.findById(userId);
90
+ * }
91
+ * });
92
+ *
93
+ * async getUser(id: string) {
94
+ * // This will hit cache on subsequent calls with same ID
95
+ * return await this.getUserData(id);
96
+ * }
97
+ * }
98
+ * ```
99
+ *
100
+ * @example
101
+ * **API response caching with custom key generation:**
102
+ * ```ts
103
+ * class ApiService {
104
+ * fetchUserPosts = $cache({
105
+ * name: "user-posts",
106
+ * ttl: [5, "minutes"],
107
+ * key: (userId: string, page: number) => `${userId}:page:${page}`,
108
+ * handler: async (userId: string, page: number = 1) => {
109
+ * const response = await fetch(`/api/users/${userId}/posts?page=${page}`);
110
+ * return await response.json();
111
+ * }
112
+ * });
113
+ * }
114
+ * ```
115
+ *
116
+ * @example
117
+ * **Manual cache operations for custom logic:**
118
+ * ```ts
119
+ * class SessionService {
120
+ * sessionCache = $cache<UserSession>({
121
+ * name: "user-sessions",
122
+ * ttl: [1, "hour"],
123
+ * provider: "memory" // Use memory cache for sessions
124
+ * });
125
+ *
126
+ * async storeSession(sessionId: string, session: UserSession) {
127
+ * await this.sessionCache.set(sessionId, session);
128
+ * }
129
+ *
130
+ * async getSession(sessionId: string): Promise<UserSession | undefined> {
131
+ * return await this.sessionCache.get(sessionId);
132
+ * }
133
+ *
134
+ * async invalidateUserSessions(userId: string) {
135
+ * // Invalidate all sessions for a user using wildcards
136
+ * await this.sessionCache.invalidate(`user:${userId}:*`);
137
+ * }
138
+ * }
139
+ * ```
140
+ *
141
+ * @example
142
+ * **Redis-backed caching for production:**
143
+ * ```ts
144
+ * class ProductService {
145
+ * productCache = $cache({
146
+ * name: "products",
147
+ * ttl: [1, "hour"],
148
+ * provider: RedisCacheProvider, // Use Redis for distributed caching
149
+ * handler: async (productId: string) => {
150
+ * return await this.database.products.findById(productId);
151
+ * }
152
+ * });
153
+ *
154
+ * async invalidateProduct(productId: string) {
155
+ * await this.productCache.invalidate(productId);
156
+ * }
157
+ *
158
+ * async invalidateAllProducts() {
159
+ * await this.productCache.invalidate("*");
160
+ * }
161
+ * }
162
+ * ```
163
+ *
164
+ * @example
165
+ * **Conditional caching with environment controls:**
166
+ * ```ts
167
+ * class ExpensiveService {
168
+ * computation = $cache({
169
+ * name: "heavy-computation",
170
+ * ttl: [1, "day"],
171
+ * disabled: process.env.NODE_ENV === "development", // Disable in dev
172
+ * handler: async (input: ComplexInput) => {
173
+ * // Very expensive computation that should be cached in production
174
+ * return await performHeavyComputation(input);
175
+ * }
176
+ * });
177
+ * }
178
+ * ```
48
179
  */
49
180
  declare const $cache: {
50
181
  <TReturn = string, TParameter extends any[] = any[]>(options?: CacheDescriptorOptions<TReturn, TParameter>): CacheDescriptorFn<TReturn, TParameter>;
package/command.d.ts CHANGED
@@ -170,6 +170,8 @@ declare module "@sinclair/typebox" {
170
170
  aliases?: string[];
171
171
  }
172
172
  }
173
+ //# sourceMappingURL=index.d.ts.map
174
+
173
175
  //#endregion
174
176
  export { $command, AlephaCommand, CliProvider, CommandDescriptor, CommandDescriptorOptions, CommandError, CommandHandlerArgs, RunOptions, Runner, RunnerMethod, Task };
175
177
  //# sourceMappingURL=index.d.ts.map