alepha 0.13.2 → 0.13.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. package/README.md +7 -7
  2. package/dist/api-files/index.browser.js +80 -0
  3. package/dist/api-files/index.browser.js.map +1 -0
  4. package/dist/api-files/index.d.ts +175 -175
  5. package/dist/api-jobs/index.browser.js +56 -0
  6. package/dist/api-jobs/index.browser.js.map +1 -0
  7. package/dist/api-jobs/index.d.ts +156 -156
  8. package/dist/api-notifications/index.browser.js +382 -0
  9. package/dist/api-notifications/index.browser.js.map +1 -0
  10. package/dist/api-notifications/index.d.ts +221 -166
  11. package/dist/api-notifications/index.js +107 -55
  12. package/dist/api-notifications/index.js.map +1 -1
  13. package/dist/api-parameters/index.browser.js +29 -0
  14. package/dist/api-parameters/index.browser.js.map +1 -0
  15. package/dist/api-users/index.d.ts +16 -3
  16. package/dist/api-users/index.js +75 -28
  17. package/dist/api-users/index.js.map +1 -1
  18. package/dist/api-verifications/index.browser.js +52 -0
  19. package/dist/api-verifications/index.browser.js.map +1 -0
  20. package/dist/api-verifications/index.d.ts +120 -98
  21. package/dist/api-verifications/index.js +1 -1
  22. package/dist/api-verifications/index.js.map +1 -1
  23. package/dist/batch/index.js +0 -5
  24. package/dist/batch/index.js.map +1 -1
  25. package/dist/bucket/index.js +7 -5
  26. package/dist/bucket/index.js.map +1 -1
  27. package/dist/cli/{dist-Dl9Vl7Ur.js → dist-lGnqsKpu.js} +11 -15
  28. package/dist/cli/dist-lGnqsKpu.js.map +1 -0
  29. package/dist/cli/index.d.ts +26 -45
  30. package/dist/cli/index.js +40 -58
  31. package/dist/cli/index.js.map +1 -1
  32. package/dist/command/index.d.ts +1 -0
  33. package/dist/command/index.js +9 -0
  34. package/dist/command/index.js.map +1 -1
  35. package/dist/core/index.browser.js +5 -1
  36. package/dist/core/index.browser.js.map +1 -1
  37. package/dist/core/index.d.ts +221 -219
  38. package/dist/core/index.js +5 -1
  39. package/dist/core/index.js.map +1 -1
  40. package/dist/core/index.native.js +5 -1
  41. package/dist/core/index.native.js.map +1 -1
  42. package/dist/email/index.d.ts +4 -4
  43. package/dist/email/index.js +5 -0
  44. package/dist/email/index.js.map +1 -1
  45. package/dist/orm/index.d.ts +19 -19
  46. package/dist/orm/index.js +3 -3
  47. package/dist/orm/index.js.map +1 -1
  48. package/dist/redis/index.d.ts +10 -10
  49. package/dist/security/index.d.ts +28 -28
  50. package/dist/security/index.js +3 -3
  51. package/dist/security/index.js.map +1 -1
  52. package/dist/server/index.d.ts +9 -9
  53. package/dist/server-auth/index.d.ts +152 -152
  54. package/dist/server-cookies/index.js +2 -2
  55. package/dist/server-cookies/index.js.map +1 -1
  56. package/dist/server-links/index.d.ts +33 -33
  57. package/dist/server-security/index.d.ts +9 -9
  58. package/dist/server-static/index.js +18 -2
  59. package/dist/server-static/index.js.map +1 -1
  60. package/package.json +16 -6
  61. package/src/api-files/index.browser.ts +17 -0
  62. package/src/api-jobs/index.browser.ts +15 -0
  63. package/src/api-notifications/controllers/NotificationController.ts +26 -1
  64. package/src/api-notifications/index.browser.ts +17 -0
  65. package/src/api-notifications/index.ts +1 -0
  66. package/src/api-notifications/schemas/notificationQuerySchema.ts +13 -0
  67. package/src/api-notifications/services/NotificationService.ts +45 -2
  68. package/src/api-parameters/index.browser.ts +12 -0
  69. package/src/api-users/atoms/realmAuthSettingsAtom.ts +3 -1
  70. package/src/api-users/controllers/UserController.ts +21 -1
  71. package/src/api-users/primitives/$userRealm.ts +33 -10
  72. package/src/api-users/providers/UserRealmProvider.ts +1 -0
  73. package/src/api-users/services/SessionService.ts +2 -0
  74. package/src/api-users/services/UserService.ts +56 -16
  75. package/src/api-verifications/index.browser.ts +15 -0
  76. package/src/api-verifications/index.ts +1 -0
  77. package/src/batch/providers/BatchProvider.ts +0 -7
  78. package/src/bucket/index.ts +7 -5
  79. package/src/cli/apps/AlephaCli.ts +27 -1
  80. package/src/cli/apps/AlephaPackageBuilderCli.ts +3 -0
  81. package/src/cli/commands/CoreCommands.ts +6 -2
  82. package/src/cli/commands/ViteCommands.ts +2 -1
  83. package/src/cli/services/ProjectUtils.ts +40 -75
  84. package/src/command/helpers/Asker.ts +10 -0
  85. package/src/core/Alepha.ts +14 -0
  86. package/src/core/primitives/$module.ts +1 -1
  87. package/src/email/index.ts +13 -5
  88. package/src/orm/providers/drivers/NodeSqliteProvider.ts +3 -3
  89. package/src/server-cookies/providers/ServerCookiesProvider.ts +2 -1
  90. package/src/server-static/providers/ServerStaticProvider.ts +18 -3
  91. package/dist/cli/dist-Dl9Vl7Ur.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":["partitionKey: string","itemState: BatchItemState<TItem, TResponse>","promises: Promise<void>[]","itemsToProcess: TItem[]","result: any"],"sources":["../../src/batch/providers/BatchProvider.ts","../../src/batch/primitives/$batch.ts","../../src/batch/index.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport { $inject, type Alepha } from \"alepha\";\nimport { DateTimeProvider, type DurationLike } from \"alepha/datetime\";\nimport { $logger } from \"alepha/logger\";\nimport { type RetryBackoffOptions, RetryProvider } from \"alepha/retry\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport interface BatchOptions<TItem, TResponse = any> {\n /**\n * The batch processing handler function that processes arrays of validated items.\n */\n handler: (items: TItem[]) => TResponse;\n\n /**\n * Maximum number of items to collect before automatically flushing the batch.\n *\n * @default 10\n */\n maxSize?: number;\n\n /**\n * Maximum number of items that can be queued in a single partition.\n * If exceeded, push() will throw an error.\n */\n maxQueueSize?: number;\n\n /**\n * Maximum time to wait before flushing a batch, even if it hasn't reached maxSize.\n *\n * @default [1, \"second\"]\n */\n maxDuration?: DurationLike;\n\n /**\n * Function to determine partition keys for grouping items into separate batches.\n */\n partitionBy?: (item: TItem) => string;\n\n /**\n * Maximum number of batch handlers that can execute simultaneously.\n *\n * @default 1\n */\n concurrency?: number;\n\n /**\n * Retry configuration for failed batch processing operations.\n */\n retry?: {\n /**\n * The maximum number of attempts.\n *\n * @default 3\n */\n max?: number;\n\n /**\n * The backoff strategy for delays between retries.\n * Can be a fixed number (in ms) or a configuration object for exponential backoff.\n *\n * @default { initial: 200, factor: 2, jitter: true }\n */\n backoff?: number | RetryBackoffOptions;\n\n /**\n * An overall time limit for all retry attempts combined.\n *\n * e.g., `[5, 'seconds']`\n */\n maxDuration?: DurationLike;\n\n /**\n * A function that determines if a retry should be attempted based on the error.\n *\n * @default (error) => true (retries on any error)\n */\n when?: (error: Error) => boolean;\n\n /**\n * A custom callback for when a retry attempt fails.\n * This is called before the delay.\n */\n onError?: (error: Error, attempt: number) => void;\n };\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport type BatchItemStatus = \"pending\" | \"processing\" | \"completed\" | \"failed\";\n\nexport interface BatchItemState<TItem, TResponse> {\n id: string;\n item: TItem;\n partitionKey: string;\n status: BatchItemStatus;\n result?: TResponse;\n error?: Error;\n promise?: Promise<TResponse>;\n resolve?: (value: TResponse) => void;\n reject?: (error: Error) => void;\n}\n\nexport interface PartitionState {\n itemIds: string[];\n timeout?: { clear: () => void };\n flushing: boolean;\n}\n\n/**\n * Context object that holds all state for a batch processor instance.\n */\nexport interface BatchContext<TItem, TResponse> {\n options: BatchOptions<TItem, TResponse>;\n itemStates: Map<string, BatchItemState<TItem, TResponse>>;\n partitions: Map<string, PartitionState>;\n activeHandlers: PromiseWithResolvers<void>[];\n isShuttingDown: boolean;\n isReady: boolean;\n alepha: Alepha;\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Service for batch processing operations.\n * Provides methods to manage batches of items with automatic flushing based on size or time.\n */\nexport class BatchProvider {\n protected readonly log = $logger();\n protected readonly dateTime = $inject(DateTimeProvider);\n protected readonly retryProvider = $inject(RetryProvider);\n\n /**\n * Creates a new batch context with the given options.\n */\n createContext<TItem, TResponse>(\n alepha: Alepha,\n options: BatchOptions<TItem, TResponse>,\n ): BatchContext<TItem, TResponse> {\n return {\n options,\n itemStates: new Map(),\n partitions: new Map(),\n activeHandlers: [],\n isShuttingDown: false,\n isReady: false,\n alepha,\n };\n }\n\n /**\n * Get the effective maxSize for a context.\n */\n protected getMaxSize<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): number {\n return context.options.maxSize ?? 10;\n }\n\n /**\n * Get the effective concurrency for a context.\n */\n protected getConcurrency<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): number {\n return context.options.concurrency ?? 1;\n }\n\n /**\n * Get the effective maxDuration for a context.\n */\n protected getMaxDuration<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): DurationLike {\n return context.options.maxDuration ?? [1, \"second\"];\n }\n\n /**\n * Pushes an item into the batch and returns immediately with a unique ID.\n * The item will be processed asynchronously with other items when the batch is flushed.\n * Use wait(id) to get the processing result.\n *\n * @throws Error if maxQueueSize is exceeded\n */\n push<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n item: TItem,\n ): string {\n // 1. Generate unique ID\n const id = randomUUID();\n\n // 2. Determine the partition key (with error handling)\n let partitionKey: string;\n try {\n partitionKey = context.options.partitionBy\n ? context.options.partitionBy(item)\n : \"default\";\n } catch (error) {\n this.log.warn(\n \"partitionBy function threw an error, using 'default' partition\",\n { error },\n );\n partitionKey = \"default\";\n }\n\n // 3. Create item state\n const itemState: BatchItemState<TItem, TResponse> = {\n id,\n item,\n partitionKey,\n status: \"pending\",\n };\n\n // CAUTION: Do not log.debug/info here as it may cause infinite loops if logging is batched\n // log.trace is safe\n\n this.log.trace(\"Pushing item to batch\", {\n id,\n partitionKey,\n item,\n });\n\n context.itemStates.set(id, itemState);\n\n // 4. Get or create the partition state\n if (!context.partitions.has(partitionKey)) {\n context.partitions.set(partitionKey, {\n itemIds: [],\n flushing: false,\n });\n }\n const partition = context.partitions.get(partitionKey)!;\n\n // 5. Check maxQueueSize before adding\n if (\n context.options.maxQueueSize !== undefined &&\n partition.itemIds.length >= context.options.maxQueueSize\n ) {\n throw new Error(\n `Batch queue size exceeded for partition '${partitionKey}' (max: ${context.options.maxQueueSize})`,\n );\n }\n\n // 6. Add item ID to partition\n partition.itemIds.push(id);\n\n const maxSize = this.getMaxSize(context);\n const maxDuration = this.getMaxDuration(context);\n\n // 7. Only start processing if the app is ready (after \"ready\" hook)\n // During startup, items are just buffered in memory\n if (context.isReady) {\n // Check if the batch is full\n if (partition.itemIds.length >= maxSize) {\n this.log.trace(\n `Batch partition '${partitionKey}' is full, flushing...`,\n );\n this.flushPartition(context, partitionKey).catch((error) =>\n this.log.error(\n `Failed to flush batch partition '${partitionKey}' on max size`,\n error,\n ),\n );\n } else if (!partition.timeout && !partition.flushing) {\n // 8. Start the timeout if it's not already running for this partition and not currently flushing\n partition.timeout = this.dateTime.createTimeout(() => {\n this.log.trace(\n `Batch partition '${partitionKey}' timed out, flushing...`,\n );\n this.flushPartition(context, partitionKey).catch((error) =>\n this.log.error(\n `Failed to flush batch partition '${partitionKey}' on timeout`,\n error,\n ),\n );\n }, maxDuration);\n }\n } else {\n // Not ready yet - just buffer items, no size checks or timeouts\n this.log.trace(\n `Buffering item in partition '${partitionKey}' (app not ready yet, ${partition.itemIds.length} items buffered)`,\n );\n }\n\n // 9. Return ID immediately\n return id;\n }\n\n /**\n * Wait for a specific item to be processed and get its result.\n * @param id The item ID returned from push()\n * @returns The processing result\n * @throws If the item doesn't exist or processing failed\n */\n async wait<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n id: string,\n ): Promise<TResponse> {\n const itemState = context.itemStates.get(id);\n if (!itemState) {\n throw new Error(`Item with id '${id}' not found`);\n }\n\n // If already completed or failed, return immediately\n if (itemState.status === \"completed\") {\n return itemState.result!;\n }\n if (itemState.status === \"failed\") {\n throw itemState.error!;\n }\n\n // Create promise on-demand if not already created\n if (!itemState.promise) {\n itemState.promise = new Promise<TResponse>((resolve, reject) => {\n itemState.resolve = resolve;\n itemState.reject = reject;\n });\n }\n\n return itemState.promise;\n }\n\n /**\n * Get the current status of an item.\n * @param id The item ID returned from push()\n * @returns Status information or undefined if item doesn't exist\n */\n status<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n id: string,\n ):\n | { status: \"pending\" | \"processing\" }\n | { status: \"completed\"; result: TResponse }\n | { status: \"failed\"; error: Error }\n | undefined {\n const itemState = context.itemStates.get(id);\n if (!itemState) {\n return undefined;\n }\n\n if (itemState.status === \"completed\") {\n return { status: \"completed\", result: itemState.result! };\n }\n if (itemState.status === \"failed\") {\n return { status: \"failed\", error: itemState.error! };\n }\n return { status: itemState.status };\n }\n\n /**\n * Clears completed and failed items from the context to free memory.\n * Returns the number of items cleared.\n *\n * @param context The batch context\n * @param status Optional: only clear items with this specific status ('completed' or 'failed')\n * @returns The number of items cleared\n */\n clearCompleted<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n status?: \"completed\" | \"failed\",\n ): number {\n let count = 0;\n for (const [id, state] of context.itemStates) {\n if (status) {\n if (state.status === status) {\n context.itemStates.delete(id);\n count++;\n }\n } else if (state.status === \"completed\" || state.status === \"failed\") {\n context.itemStates.delete(id);\n count++;\n }\n }\n return count;\n }\n\n /**\n * Flush all partitions or a specific partition.\n */\n async flush<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n partitionKey?: string,\n ): Promise<void> {\n const promises: Promise<void>[] = [];\n if (partitionKey) {\n if (context.partitions.has(partitionKey)) {\n promises.push(this.flushPartition(context, partitionKey));\n }\n } else {\n for (const key of context.partitions.keys()) {\n promises.push(this.flushPartition(context, key));\n }\n }\n await Promise.all(promises);\n }\n\n /**\n * Flush a specific partition.\n */\n protected async flushPartition<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n partitionKey: string,\n limit?: number,\n ): Promise<void> {\n const partition = context.partitions.get(partitionKey);\n if (!partition || partition.itemIds.length === 0) {\n context.partitions.delete(partitionKey);\n return;\n }\n\n // Clear the timeout and grab the item IDs (up to limit if specified)\n partition.timeout?.clear();\n partition.timeout = undefined;\n const itemsToTake =\n limit !== undefined\n ? Math.min(limit, partition.itemIds.length)\n : partition.itemIds.length;\n const itemIdsToProcess = partition.itemIds.splice(0, itemsToTake);\n\n // Mark partition as flushing to prevent race conditions\n partition.flushing = true;\n\n // Get the items and mark them as processing\n const itemsToProcess: TItem[] = [];\n for (const id of itemIdsToProcess) {\n const itemState = context.itemStates.get(id);\n if (itemState) {\n itemState.status = \"processing\";\n itemsToProcess.push(itemState.item);\n }\n }\n\n const concurrency = this.getConcurrency(context);\n const maxDuration = this.getMaxDuration(context);\n\n // Wait until there's a free slot (if at concurrency limit)\n while (context.activeHandlers.length >= concurrency) {\n this.log.trace(\n `Batch handler is at concurrency limit, waiting for a slot...`,\n );\n // Wait for any single handler to complete, not all of them\n await Promise.race(context.activeHandlers.map((it) => it.promise));\n }\n\n const promise = Promise.withResolvers<void>();\n context.activeHandlers.push(promise);\n let result: any;\n try {\n result = await context.alepha.context.run(() =>\n // during shutdown, call handler directly to avoid retry cancellation\n context.isShuttingDown\n ? context.options.handler(itemsToProcess)\n : this.retryProvider.retry(\n {\n ...context.options.retry,\n handler: context.options.handler,\n },\n itemsToProcess,\n ),\n );\n\n // Mark all items as completed and resolve their promises\n for (const id of itemIdsToProcess) {\n const itemState = context.itemStates.get(id);\n if (itemState) {\n itemState.status = \"completed\";\n itemState.result = result;\n // Only resolve if someone is waiting\n itemState.resolve?.(result);\n }\n }\n } catch (error) {\n this.log.error(`Batch handler failed`, error);\n\n // Mark all items as failed and reject their promises\n for (const id of itemIdsToProcess) {\n const itemState = context.itemStates.get(id);\n if (itemState) {\n itemState.status = \"failed\";\n itemState.error = error as Error;\n // Only reject if someone is waiting (promise was created)\n itemState.reject?.(error as Error);\n }\n }\n } finally {\n promise.resolve();\n context.activeHandlers = context.activeHandlers.filter(\n (it) => it !== promise,\n );\n\n // Only delete partition if no new items arrived during processing\n const currentPartition = context.partitions.get(partitionKey);\n if (currentPartition?.flushing && currentPartition.itemIds.length === 0) {\n context.partitions.delete(partitionKey);\n } else if (currentPartition) {\n // Reset flushing flag if partition still exists with items\n currentPartition.flushing = false;\n\n // Restart timeout for items that arrived during flush\n if (currentPartition.itemIds.length > 0 && !currentPartition.timeout) {\n currentPartition.timeout = this.dateTime.createTimeout(() => {\n this.log.trace(\n `Batch partition '${partitionKey}' timed out, flushing...`,\n );\n this.flushPartition(context, partitionKey).catch((error) =>\n this.log.error(\n `Failed to flush batch partition '${partitionKey}' on timeout`,\n error,\n ),\n );\n }, maxDuration);\n }\n }\n }\n }\n\n /**\n * Mark the context as ready and start processing buffered items.\n * Called after the \"ready\" hook.\n */\n async markReady<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): Promise<void> {\n this.log.debug(\n \"Batch processor is now ready, starting to process buffered items...\",\n );\n context.isReady = true;\n await this.startProcessing(context);\n }\n\n /**\n * Mark the context as shutting down and flush all remaining items.\n */\n async shutdown<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): Promise<void> {\n this.log.debug(\"Flushing all remaining batch partitions on shutdown...\");\n context.isShuttingDown = true;\n await this.flush(context);\n this.log.debug(\"All batch partitions flushed\");\n }\n\n /**\n * Called after the \"ready\" hook to start processing buffered items that were\n * pushed during startup. This checks all partitions and starts timeouts/flushes\n * for items that were accumulated before the app was ready.\n */\n protected async startProcessing<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): Promise<void> {\n const maxSize = this.getMaxSize(context);\n const maxDuration = this.getMaxDuration(context);\n\n for (const [partitionKey, partition] of context.partitions.entries()) {\n if (partition.itemIds.length === 0) {\n continue;\n }\n\n this.log.trace(\n `Starting processing for partition '${partitionKey}' with ${partition.itemIds.length} buffered items`,\n );\n\n // Flush batches of maxSize while we have items >= maxSize\n while (partition.itemIds.length >= maxSize) {\n this.log.trace(\n `Partition '${partitionKey}' has ${partition.itemIds.length} items, flushing batch of ${maxSize}...`,\n );\n await this.flushPartition(context, partitionKey, maxSize);\n }\n\n // After flushing full batches, start timeout for any remaining items\n if (\n partition.itemIds.length > 0 &&\n !partition.timeout &&\n !partition.flushing\n ) {\n this.log.trace(\n `Starting timeout for partition '${partitionKey}' with ${partition.itemIds.length} remaining items`,\n );\n partition.timeout = this.dateTime.createTimeout(() => {\n this.log.trace(\n `Batch partition '${partitionKey}' timed out, flushing...`,\n );\n this.flushPartition(context, partitionKey).catch((error) =>\n this.log.error(\n `Failed to flush partition '${partitionKey}' on timeout after startup`,\n error,\n ),\n );\n }, maxDuration);\n }\n }\n }\n}\n","import {\n $hook,\n $inject,\n createPrimitive,\n KIND,\n Primitive,\n type Static,\n type TSchema,\n} from \"alepha\";\nimport type { DurationLike } from \"alepha/datetime\";\nimport type { RetryPrimitiveOptions } from \"alepha/retry\";\nimport {\n type BatchContext,\n type BatchItemState,\n type BatchItemStatus,\n BatchProvider,\n} from \"../providers/BatchProvider.ts\";\n\n/**\n * Creates a batch processing primitive for efficient grouping and processing of multiple operations.\n */\nexport const $batch = <TItem extends TSchema, TResponse>(\n options: BatchPrimitiveOptions<TItem, TResponse>,\n): BatchPrimitive<TItem, TResponse> =>\n createPrimitive(BatchPrimitive<TItem, TResponse>, options);\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport interface BatchPrimitiveOptions<TItem extends TSchema, TResponse = any> {\n /**\n * TypeBox schema for validating each item added to the batch.\n */\n schema: TItem;\n\n /**\n * The batch processing handler function that processes arrays of validated items.\n */\n handler: (items: Static<TItem>[]) => TResponse;\n\n /**\n * Maximum number of items to collect before automatically flushing the batch.\n */\n maxSize?: number;\n\n /**\n * Maximum number of items that can be queued in a single partition.\n * If exceeded, push() will throw an error.\n */\n maxQueueSize?: number;\n\n /**\n * Maximum time to wait before flushing a batch, even if it hasn't reached maxSize.\n */\n maxDuration?: DurationLike;\n\n /**\n * Function to determine partition keys for grouping items into separate batches.\n */\n partitionBy?: (item: Static<TItem>) => string;\n\n /**\n * Maximum number of batch handlers that can execute simultaneously.\n */\n concurrency?: number;\n\n /**\n * Retry configuration for failed batch processing operations.\n */\n retry?: Omit<RetryPrimitiveOptions<() => Array<Static<TItem>>>, \"handler\">;\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport type { BatchItemState, BatchItemStatus };\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport class BatchPrimitive<\n TItem extends TSchema,\n TResponse = any,\n> extends Primitive<BatchPrimitiveOptions<TItem, TResponse>> {\n protected readonly batchProvider = $inject(BatchProvider);\n protected readonly context: BatchContext<Static<TItem>, TResponse>;\n\n constructor(\n ...args: ConstructorParameters<\n typeof Primitive<BatchPrimitiveOptions<TItem, TResponse>>\n >\n ) {\n super(...args);\n this.context = this.batchProvider.createContext(this.alepha, {\n handler: this.options.handler,\n maxSize: this.options.maxSize,\n maxQueueSize: this.options.maxQueueSize,\n maxDuration: this.options.maxDuration,\n partitionBy: this.options.partitionBy,\n concurrency: this.options.concurrency,\n retry: this.options.retry,\n });\n }\n\n /**\n * Pushes an item into the batch and returns immediately with a unique ID.\n * The item will be processed asynchronously with other items when the batch is flushed.\n * Use wait(id) to get the processing result.\n */\n public async push(item: Static<TItem>): Promise<string> {\n // Validate the item against the schema\n const validatedItem = this.alepha.codec.validate(this.options.schema, item);\n return this.batchProvider.push(this.context, validatedItem);\n }\n\n /**\n * Wait for a specific item to be processed and get its result.\n * @param id The item ID returned from push()\n * @returns The processing result\n * @throws If the item doesn't exist or processing failed\n */\n public async wait(id: string): Promise<TResponse> {\n return this.batchProvider.wait(this.context, id);\n }\n\n /**\n * Get the current status of an item.\n * @param id The item ID returned from push()\n * @returns Status information or undefined if item doesn't exist\n */\n public status(\n id: string,\n ):\n | { status: \"pending\" | \"processing\" }\n | { status: \"completed\"; result: TResponse }\n | { status: \"failed\"; error: Error }\n | undefined {\n return this.batchProvider.status(this.context, id);\n }\n\n /**\n * Flush all partitions or a specific partition.\n */\n public async flush(partitionKey?: string): Promise<void> {\n return this.batchProvider.flush(this.context, partitionKey);\n }\n\n /**\n * Clears completed and failed items from memory.\n * Call this periodically in long-running applications to prevent memory leaks.\n *\n * @param status Optional: only clear items with this specific status ('completed' or 'failed')\n * @returns The number of items cleared\n */\n public clearCompleted(status?: \"completed\" | \"failed\"): number {\n return this.batchProvider.clearCompleted(this.context, status);\n }\n\n protected readonly onReady = $hook({\n on: \"ready\",\n handler: async () => {\n await this.batchProvider.markReady(this.context);\n },\n });\n\n protected readonly dispose = $hook({\n on: \"stop\",\n priority: \"first\",\n handler: async () => {\n await this.batchProvider.shutdown(this.context);\n },\n });\n}\n\n$batch[KIND] = BatchPrimitive;\n","import { $module } from \"alepha\";\nimport { $batch } from \"./primitives/$batch.ts\";\nimport { BatchProvider } from \"./providers/BatchProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport * from \"./primitives/$batch.ts\";\nexport * from \"./providers/BatchProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * This module allows you to group multiple asynchronous operations into a single \"batch,\" which is then processed together.\n * This is an essential pattern for improving performance, reducing I/O, and interacting efficiently with rate-limited APIs or databases.\n *\n * ```ts\n * import { Alepha, $hook, run, t } from \"alepha\";\n * import { $batch } from \"alepha/batch\";\n *\n * class LoggingService {\n * // define the batch processor\n * logBatch = $batch({\n * schema: t.text(),\n * maxSize: 10,\n * maxDuration: [5, \"seconds\"],\n * handler: async (items) => {\n * console.log(`[BATCH LOG] Processing ${items.length} events:`, items);\n * },\n * });\n *\n * // example of how to use it\n * onReady = $hook({\n * on: \"ready\",\n * handler: async () => {\n * // push() returns an ID immediately\n * const id1 = await this.logBatch.push(\"Application started.\");\n * const id2 = await this.logBatch.push(\"User authenticated.\");\n *\n * // optionally wait for processing to complete\n * await this.logBatch.wait(id1);\n *\n * // or check the status\n * const status = this.logBatch.status(id2);\n * console.log(status?.status); // \"pending\" | \"processing\" | \"completed\" | \"failed\"\n * },\n * });\n * }\n * ```\n *\n * @see {@link $batch}\n * @see {@link BatchProvider}\n * @module alepha.batch\n */\nexport const AlephaBatch = $module({\n name: \"alepha.batch\",\n primitives: [$batch],\n services: [BatchProvider],\n});\n"],"mappings":";;;;;;;;;;;AAgIA,IAAa,gBAAb,MAA2B;CACzB,AAAmB,MAAM,SAAS;CAClC,AAAmB,WAAW,QAAQ,iBAAiB;CACvD,AAAmB,gBAAgB,QAAQ,cAAc;;;;CAKzD,cACE,QACA,SACgC;AAChC,SAAO;GACL;GACA,4BAAY,IAAI,KAAK;GACrB,4BAAY,IAAI,KAAK;GACrB,gBAAgB,EAAE;GAClB,gBAAgB;GAChB,SAAS;GACT;GACD;;;;;CAMH,AAAU,WACR,SACQ;AACR,SAAO,QAAQ,QAAQ,WAAW;;;;;CAMpC,AAAU,eACR,SACQ;AACR,SAAO,QAAQ,QAAQ,eAAe;;;;;CAMxC,AAAU,eACR,SACc;AACd,SAAO,QAAQ,QAAQ,eAAe,CAAC,GAAG,SAAS;;;;;;;;;CAUrD,KACE,SACA,MACQ;EAER,MAAM,KAAK,YAAY;EAGvB,IAAIA;AACJ,MAAI;AACF,kBAAe,QAAQ,QAAQ,cAC3B,QAAQ,QAAQ,YAAY,KAAK,GACjC;WACG,OAAO;AACd,QAAK,IAAI,KACP,kEACA,EAAE,OAAO,CACV;AACD,kBAAe;;EAIjB,MAAMC,YAA8C;GAClD;GACA;GACA;GACA,QAAQ;GACT;AAKD,OAAK,IAAI,MAAM,yBAAyB;GACtC;GACA;GACA;GACD,CAAC;AAEF,UAAQ,WAAW,IAAI,IAAI,UAAU;AAGrC,MAAI,CAAC,QAAQ,WAAW,IAAI,aAAa,CACvC,SAAQ,WAAW,IAAI,cAAc;GACnC,SAAS,EAAE;GACX,UAAU;GACX,CAAC;EAEJ,MAAM,YAAY,QAAQ,WAAW,IAAI,aAAa;AAGtD,MACE,QAAQ,QAAQ,iBAAiB,UACjC,UAAU,QAAQ,UAAU,QAAQ,QAAQ,aAE5C,OAAM,IAAI,MACR,4CAA4C,aAAa,UAAU,QAAQ,QAAQ,aAAa,GACjG;AAIH,YAAU,QAAQ,KAAK,GAAG;EAE1B,MAAM,UAAU,KAAK,WAAW,QAAQ;EACxC,MAAM,cAAc,KAAK,eAAe,QAAQ;AAIhD,MAAI,QAAQ,SAEV;OAAI,UAAU,QAAQ,UAAU,SAAS;AACvC,SAAK,IAAI,MACP,oBAAoB,aAAa,wBAClC;AACD,SAAK,eAAe,SAAS,aAAa,CAAC,OAAO,UAChD,KAAK,IAAI,MACP,oCAAoC,aAAa,gBACjD,MACD,CACF;cACQ,CAAC,UAAU,WAAW,CAAC,UAAU,SAE1C,WAAU,UAAU,KAAK,SAAS,oBAAoB;AACpD,SAAK,IAAI,MACP,oBAAoB,aAAa,0BAClC;AACD,SAAK,eAAe,SAAS,aAAa,CAAC,OAAO,UAChD,KAAK,IAAI,MACP,oCAAoC,aAAa,eACjD,MACD,CACF;MACA,YAAY;QAIjB,MAAK,IAAI,MACP,gCAAgC,aAAa,wBAAwB,UAAU,QAAQ,OAAO,kBAC/F;AAIH,SAAO;;;;;;;;CAST,MAAM,KACJ,SACA,IACoB;EACpB,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,MAAI,CAAC,UACH,OAAM,IAAI,MAAM,iBAAiB,GAAG,aAAa;AAInD,MAAI,UAAU,WAAW,YACvB,QAAO,UAAU;AAEnB,MAAI,UAAU,WAAW,SACvB,OAAM,UAAU;AAIlB,MAAI,CAAC,UAAU,QACb,WAAU,UAAU,IAAI,SAAoB,SAAS,WAAW;AAC9D,aAAU,UAAU;AACpB,aAAU,SAAS;IACnB;AAGJ,SAAO,UAAU;;;;;;;CAQnB,OACE,SACA,IAKY;EACZ,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,MAAI,CAAC,UACH;AAGF,MAAI,UAAU,WAAW,YACvB,QAAO;GAAE,QAAQ;GAAa,QAAQ,UAAU;GAAS;AAE3D,MAAI,UAAU,WAAW,SACvB,QAAO;GAAE,QAAQ;GAAU,OAAO,UAAU;GAAQ;AAEtD,SAAO,EAAE,QAAQ,UAAU,QAAQ;;;;;;;;;;CAWrC,eACE,SACA,QACQ;EACR,IAAI,QAAQ;AACZ,OAAK,MAAM,CAAC,IAAI,UAAU,QAAQ,WAChC,KAAI,QACF;OAAI,MAAM,WAAW,QAAQ;AAC3B,YAAQ,WAAW,OAAO,GAAG;AAC7B;;aAEO,MAAM,WAAW,eAAe,MAAM,WAAW,UAAU;AACpE,WAAQ,WAAW,OAAO,GAAG;AAC7B;;AAGJ,SAAO;;;;;CAMT,MAAM,MACJ,SACA,cACe;EACf,MAAMC,WAA4B,EAAE;AACpC,MAAI,cACF;OAAI,QAAQ,WAAW,IAAI,aAAa,CACtC,UAAS,KAAK,KAAK,eAAe,SAAS,aAAa,CAAC;QAG3D,MAAK,MAAM,OAAO,QAAQ,WAAW,MAAM,CACzC,UAAS,KAAK,KAAK,eAAe,SAAS,IAAI,CAAC;AAGpD,QAAM,QAAQ,IAAI,SAAS;;;;;CAM7B,MAAgB,eACd,SACA,cACA,OACe;EACf,MAAM,YAAY,QAAQ,WAAW,IAAI,aAAa;AACtD,MAAI,CAAC,aAAa,UAAU,QAAQ,WAAW,GAAG;AAChD,WAAQ,WAAW,OAAO,aAAa;AACvC;;AAIF,YAAU,SAAS,OAAO;AAC1B,YAAU,UAAU;EACpB,MAAM,cACJ,UAAU,SACN,KAAK,IAAI,OAAO,UAAU,QAAQ,OAAO,GACzC,UAAU,QAAQ;EACxB,MAAM,mBAAmB,UAAU,QAAQ,OAAO,GAAG,YAAY;AAGjE,YAAU,WAAW;EAGrB,MAAMC,iBAA0B,EAAE;AAClC,OAAK,MAAM,MAAM,kBAAkB;GACjC,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,OAAI,WAAW;AACb,cAAU,SAAS;AACnB,mBAAe,KAAK,UAAU,KAAK;;;EAIvC,MAAM,cAAc,KAAK,eAAe,QAAQ;EAChD,MAAM,cAAc,KAAK,eAAe,QAAQ;AAGhD,SAAO,QAAQ,eAAe,UAAU,aAAa;AACnD,QAAK,IAAI,MACP,+DACD;AAED,SAAM,QAAQ,KAAK,QAAQ,eAAe,KAAK,OAAO,GAAG,QAAQ,CAAC;;EAGpE,MAAM,UAAU,QAAQ,eAAqB;AAC7C,UAAQ,eAAe,KAAK,QAAQ;EACpC,IAAIC;AACJ,MAAI;AACF,YAAS,MAAM,QAAQ,OAAO,QAAQ,UAEpC,QAAQ,iBACJ,QAAQ,QAAQ,QAAQ,eAAe,GACvC,KAAK,cAAc,MACjB;IACE,GAAG,QAAQ,QAAQ;IACnB,SAAS,QAAQ,QAAQ;IAC1B,EACD,eACD,CACN;AAGD,QAAK,MAAM,MAAM,kBAAkB;IACjC,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,QAAI,WAAW;AACb,eAAU,SAAS;AACnB,eAAU,SAAS;AAEnB,eAAU,UAAU,OAAO;;;WAGxB,OAAO;AACd,QAAK,IAAI,MAAM,wBAAwB,MAAM;AAG7C,QAAK,MAAM,MAAM,kBAAkB;IACjC,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,QAAI,WAAW;AACb,eAAU,SAAS;AACnB,eAAU,QAAQ;AAElB,eAAU,SAAS,MAAe;;;YAG9B;AACR,WAAQ,SAAS;AACjB,WAAQ,iBAAiB,QAAQ,eAAe,QAC7C,OAAO,OAAO,QAChB;GAGD,MAAM,mBAAmB,QAAQ,WAAW,IAAI,aAAa;AAC7D,OAAI,kBAAkB,YAAY,iBAAiB,QAAQ,WAAW,EACpE,SAAQ,WAAW,OAAO,aAAa;YAC9B,kBAAkB;AAE3B,qBAAiB,WAAW;AAG5B,QAAI,iBAAiB,QAAQ,SAAS,KAAK,CAAC,iBAAiB,QAC3D,kBAAiB,UAAU,KAAK,SAAS,oBAAoB;AAC3D,UAAK,IAAI,MACP,oBAAoB,aAAa,0BAClC;AACD,UAAK,eAAe,SAAS,aAAa,CAAC,OAAO,UAChD,KAAK,IAAI,MACP,oCAAoC,aAAa,eACjD,MACD,CACF;OACA,YAAY;;;;;;;;CAUvB,MAAM,UACJ,SACe;AACf,OAAK,IAAI,MACP,sEACD;AACD,UAAQ,UAAU;AAClB,QAAM,KAAK,gBAAgB,QAAQ;;;;;CAMrC,MAAM,SACJ,SACe;AACf,OAAK,IAAI,MAAM,yDAAyD;AACxE,UAAQ,iBAAiB;AACzB,QAAM,KAAK,MAAM,QAAQ;AACzB,OAAK,IAAI,MAAM,+BAA+B;;;;;;;CAQhD,MAAgB,gBACd,SACe;EACf,MAAM,UAAU,KAAK,WAAW,QAAQ;EACxC,MAAM,cAAc,KAAK,eAAe,QAAQ;AAEhD,OAAK,MAAM,CAAC,cAAc,cAAc,QAAQ,WAAW,SAAS,EAAE;AACpE,OAAI,UAAU,QAAQ,WAAW,EAC/B;AAGF,QAAK,IAAI,MACP,sCAAsC,aAAa,SAAS,UAAU,QAAQ,OAAO,iBACtF;AAGD,UAAO,UAAU,QAAQ,UAAU,SAAS;AAC1C,SAAK,IAAI,MACP,cAAc,aAAa,QAAQ,UAAU,QAAQ,OAAO,4BAA4B,QAAQ,KACjG;AACD,UAAM,KAAK,eAAe,SAAS,cAAc,QAAQ;;AAI3D,OACE,UAAU,QAAQ,SAAS,KAC3B,CAAC,UAAU,WACX,CAAC,UAAU,UACX;AACA,SAAK,IAAI,MACP,mCAAmC,aAAa,SAAS,UAAU,QAAQ,OAAO,kBACnF;AACD,cAAU,UAAU,KAAK,SAAS,oBAAoB;AACpD,UAAK,IAAI,MACP,oBAAoB,aAAa,0BAClC;AACD,UAAK,eAAe,SAAS,aAAa,CAAC,OAAO,UAChD,KAAK,IAAI,MACP,8BAA8B,aAAa,6BAC3C,MACD,CACF;OACA,YAAY;;;;;;;;;;;ACzjBvB,MAAa,UACX,YAEA,gBAAgB,gBAAkC,QAAQ;AAqD5D,IAAa,iBAAb,cAGU,UAAmD;CAC3D,AAAmB,gBAAgB,QAAQ,cAAc;CACzD,AAAmB;CAEnB,YACE,GAAG,MAGH;AACA,QAAM,GAAG,KAAK;AACd,OAAK,UAAU,KAAK,cAAc,cAAc,KAAK,QAAQ;GAC3D,SAAS,KAAK,QAAQ;GACtB,SAAS,KAAK,QAAQ;GACtB,cAAc,KAAK,QAAQ;GAC3B,aAAa,KAAK,QAAQ;GAC1B,aAAa,KAAK,QAAQ;GAC1B,aAAa,KAAK,QAAQ;GAC1B,OAAO,KAAK,QAAQ;GACrB,CAAC;;;;;;;CAQJ,MAAa,KAAK,MAAsC;EAEtD,MAAM,gBAAgB,KAAK,OAAO,MAAM,SAAS,KAAK,QAAQ,QAAQ,KAAK;AAC3E,SAAO,KAAK,cAAc,KAAK,KAAK,SAAS,cAAc;;;;;;;;CAS7D,MAAa,KAAK,IAAgC;AAChD,SAAO,KAAK,cAAc,KAAK,KAAK,SAAS,GAAG;;;;;;;CAQlD,AAAO,OACL,IAKY;AACZ,SAAO,KAAK,cAAc,OAAO,KAAK,SAAS,GAAG;;;;;CAMpD,MAAa,MAAM,cAAsC;AACvD,SAAO,KAAK,cAAc,MAAM,KAAK,SAAS,aAAa;;;;;;;;;CAU7D,AAAO,eAAe,QAAyC;AAC7D,SAAO,KAAK,cAAc,eAAe,KAAK,SAAS,OAAO;;CAGhE,AAAmB,UAAU,MAAM;EACjC,IAAI;EACJ,SAAS,YAAY;AACnB,SAAM,KAAK,cAAc,UAAU,KAAK,QAAQ;;EAEnD,CAAC;CAEF,AAAmB,UAAU,MAAM;EACjC,IAAI;EACJ,UAAU;EACV,SAAS,YAAY;AACnB,SAAM,KAAK,cAAc,SAAS,KAAK,QAAQ;;EAElD,CAAC;;AAGJ,OAAO,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtHf,MAAa,cAAc,QAAQ;CACjC,MAAM;CACN,YAAY,CAAC,OAAO;CACpB,UAAU,CAAC,cAAc;CAC1B,CAAC"}
1
+ {"version":3,"file":"index.js","names":["partitionKey: string","itemState: BatchItemState<TItem, TResponse>","promises: Promise<void>[]","itemsToProcess: TItem[]","result: any"],"sources":["../../src/batch/providers/BatchProvider.ts","../../src/batch/primitives/$batch.ts","../../src/batch/index.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport { $inject, type Alepha } from \"alepha\";\nimport { DateTimeProvider, type DurationLike } from \"alepha/datetime\";\nimport { $logger } from \"alepha/logger\";\nimport { type RetryBackoffOptions, RetryProvider } from \"alepha/retry\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport interface BatchOptions<TItem, TResponse = any> {\n /**\n * The batch processing handler function that processes arrays of validated items.\n */\n handler: (items: TItem[]) => TResponse;\n\n /**\n * Maximum number of items to collect before automatically flushing the batch.\n *\n * @default 10\n */\n maxSize?: number;\n\n /**\n * Maximum number of items that can be queued in a single partition.\n * If exceeded, push() will throw an error.\n */\n maxQueueSize?: number;\n\n /**\n * Maximum time to wait before flushing a batch, even if it hasn't reached maxSize.\n *\n * @default [1, \"second\"]\n */\n maxDuration?: DurationLike;\n\n /**\n * Function to determine partition keys for grouping items into separate batches.\n */\n partitionBy?: (item: TItem) => string;\n\n /**\n * Maximum number of batch handlers that can execute simultaneously.\n *\n * @default 1\n */\n concurrency?: number;\n\n /**\n * Retry configuration for failed batch processing operations.\n */\n retry?: {\n /**\n * The maximum number of attempts.\n *\n * @default 3\n */\n max?: number;\n\n /**\n * The backoff strategy for delays between retries.\n * Can be a fixed number (in ms) or a configuration object for exponential backoff.\n *\n * @default { initial: 200, factor: 2, jitter: true }\n */\n backoff?: number | RetryBackoffOptions;\n\n /**\n * An overall time limit for all retry attempts combined.\n *\n * e.g., `[5, 'seconds']`\n */\n maxDuration?: DurationLike;\n\n /**\n * A function that determines if a retry should be attempted based on the error.\n *\n * @default (error) => true (retries on any error)\n */\n when?: (error: Error) => boolean;\n\n /**\n * A custom callback for when a retry attempt fails.\n * This is called before the delay.\n */\n onError?: (error: Error, attempt: number) => void;\n };\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport type BatchItemStatus = \"pending\" | \"processing\" | \"completed\" | \"failed\";\n\nexport interface BatchItemState<TItem, TResponse> {\n id: string;\n item: TItem;\n partitionKey: string;\n status: BatchItemStatus;\n result?: TResponse;\n error?: Error;\n promise?: Promise<TResponse>;\n resolve?: (value: TResponse) => void;\n reject?: (error: Error) => void;\n}\n\nexport interface PartitionState {\n itemIds: string[];\n timeout?: { clear: () => void };\n flushing: boolean;\n}\n\n/**\n * Context object that holds all state for a batch processor instance.\n */\nexport interface BatchContext<TItem, TResponse> {\n options: BatchOptions<TItem, TResponse>;\n itemStates: Map<string, BatchItemState<TItem, TResponse>>;\n partitions: Map<string, PartitionState>;\n activeHandlers: PromiseWithResolvers<void>[];\n isShuttingDown: boolean;\n isReady: boolean;\n alepha: Alepha;\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Service for batch processing operations.\n * Provides methods to manage batches of items with automatic flushing based on size or time.\n */\nexport class BatchProvider {\n protected readonly log = $logger();\n protected readonly dateTime = $inject(DateTimeProvider);\n protected readonly retryProvider = $inject(RetryProvider);\n\n /**\n * Creates a new batch context with the given options.\n */\n createContext<TItem, TResponse>(\n alepha: Alepha,\n options: BatchOptions<TItem, TResponse>,\n ): BatchContext<TItem, TResponse> {\n return {\n options,\n itemStates: new Map(),\n partitions: new Map(),\n activeHandlers: [],\n isShuttingDown: false,\n isReady: false,\n alepha,\n };\n }\n\n /**\n * Get the effective maxSize for a context.\n */\n protected getMaxSize<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): number {\n return context.options.maxSize ?? 10;\n }\n\n /**\n * Get the effective concurrency for a context.\n */\n protected getConcurrency<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): number {\n return context.options.concurrency ?? 1;\n }\n\n /**\n * Get the effective maxDuration for a context.\n */\n protected getMaxDuration<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): DurationLike {\n return context.options.maxDuration ?? [1, \"second\"];\n }\n\n /**\n * Pushes an item into the batch and returns immediately with a unique ID.\n * The item will be processed asynchronously with other items when the batch is flushed.\n * Use wait(id) to get the processing result.\n *\n * @throws Error if maxQueueSize is exceeded\n */\n push<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n item: TItem,\n ): string {\n // 1. Generate unique ID\n const id = randomUUID();\n\n // 2. Determine the partition key (with error handling)\n let partitionKey: string;\n try {\n partitionKey = context.options.partitionBy\n ? context.options.partitionBy(item)\n : \"default\";\n } catch (error) {\n this.log.warn(\n \"partitionBy function threw an error, using 'default' partition\",\n { error },\n );\n partitionKey = \"default\";\n }\n\n // 3. Create item state\n const itemState: BatchItemState<TItem, TResponse> = {\n id,\n item,\n partitionKey,\n status: \"pending\",\n };\n\n // CAUTION: Do not log.debug/info here as it may cause infinite loops if logging is batched\n\n context.itemStates.set(id, itemState);\n\n // 4. Get or create the partition state\n if (!context.partitions.has(partitionKey)) {\n context.partitions.set(partitionKey, {\n itemIds: [],\n flushing: false,\n });\n }\n const partition = context.partitions.get(partitionKey)!;\n\n // 5. Check maxQueueSize before adding\n if (\n context.options.maxQueueSize !== undefined &&\n partition.itemIds.length >= context.options.maxQueueSize\n ) {\n throw new Error(\n `Batch queue size exceeded for partition '${partitionKey}' (max: ${context.options.maxQueueSize})`,\n );\n }\n\n // 6. Add item ID to partition\n partition.itemIds.push(id);\n\n const maxSize = this.getMaxSize(context);\n const maxDuration = this.getMaxDuration(context);\n\n // 7. Only start processing if the app is ready (after \"ready\" hook)\n // During startup, items are just buffered in memory\n if (context.isReady) {\n // Check if the batch is full\n if (partition.itemIds.length >= maxSize) {\n this.log.trace(\n `Batch partition '${partitionKey}' is full, flushing...`,\n );\n this.flushPartition(context, partitionKey).catch((error) =>\n this.log.error(\n `Failed to flush batch partition '${partitionKey}' on max size`,\n error,\n ),\n );\n } else if (!partition.timeout && !partition.flushing) {\n // 8. Start the timeout if it's not already running for this partition and not currently flushing\n partition.timeout = this.dateTime.createTimeout(() => {\n this.log.trace(\n `Batch partition '${partitionKey}' timed out, flushing...`,\n );\n this.flushPartition(context, partitionKey).catch((error) =>\n this.log.error(\n `Failed to flush batch partition '${partitionKey}' on timeout`,\n error,\n ),\n );\n }, maxDuration);\n }\n } else {\n // Not ready yet - just buffer items, no size checks or timeouts\n this.log.trace(\n `Buffering item in partition '${partitionKey}' (app not ready yet, ${partition.itemIds.length} items buffered)`,\n );\n }\n\n // 9. Return ID immediately\n return id;\n }\n\n /**\n * Wait for a specific item to be processed and get its result.\n * @param id The item ID returned from push()\n * @returns The processing result\n * @throws If the item doesn't exist or processing failed\n */\n async wait<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n id: string,\n ): Promise<TResponse> {\n const itemState = context.itemStates.get(id);\n if (!itemState) {\n throw new Error(`Item with id '${id}' not found`);\n }\n\n // If already completed or failed, return immediately\n if (itemState.status === \"completed\") {\n return itemState.result!;\n }\n if (itemState.status === \"failed\") {\n throw itemState.error!;\n }\n\n // Create promise on-demand if not already created\n if (!itemState.promise) {\n itemState.promise = new Promise<TResponse>((resolve, reject) => {\n itemState.resolve = resolve;\n itemState.reject = reject;\n });\n }\n\n return itemState.promise;\n }\n\n /**\n * Get the current status of an item.\n * @param id The item ID returned from push()\n * @returns Status information or undefined if item doesn't exist\n */\n status<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n id: string,\n ):\n | { status: \"pending\" | \"processing\" }\n | { status: \"completed\"; result: TResponse }\n | { status: \"failed\"; error: Error }\n | undefined {\n const itemState = context.itemStates.get(id);\n if (!itemState) {\n return undefined;\n }\n\n if (itemState.status === \"completed\") {\n return { status: \"completed\", result: itemState.result! };\n }\n if (itemState.status === \"failed\") {\n return { status: \"failed\", error: itemState.error! };\n }\n return { status: itemState.status };\n }\n\n /**\n * Clears completed and failed items from the context to free memory.\n * Returns the number of items cleared.\n *\n * @param context The batch context\n * @param status Optional: only clear items with this specific status ('completed' or 'failed')\n * @returns The number of items cleared\n */\n clearCompleted<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n status?: \"completed\" | \"failed\",\n ): number {\n let count = 0;\n for (const [id, state] of context.itemStates) {\n if (status) {\n if (state.status === status) {\n context.itemStates.delete(id);\n count++;\n }\n } else if (state.status === \"completed\" || state.status === \"failed\") {\n context.itemStates.delete(id);\n count++;\n }\n }\n return count;\n }\n\n /**\n * Flush all partitions or a specific partition.\n */\n async flush<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n partitionKey?: string,\n ): Promise<void> {\n const promises: Promise<void>[] = [];\n if (partitionKey) {\n if (context.partitions.has(partitionKey)) {\n promises.push(this.flushPartition(context, partitionKey));\n }\n } else {\n for (const key of context.partitions.keys()) {\n promises.push(this.flushPartition(context, key));\n }\n }\n await Promise.all(promises);\n }\n\n /**\n * Flush a specific partition.\n */\n protected async flushPartition<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n partitionKey: string,\n limit?: number,\n ): Promise<void> {\n const partition = context.partitions.get(partitionKey);\n if (!partition || partition.itemIds.length === 0) {\n context.partitions.delete(partitionKey);\n return;\n }\n\n // Clear the timeout and grab the item IDs (up to limit if specified)\n partition.timeout?.clear();\n partition.timeout = undefined;\n const itemsToTake =\n limit !== undefined\n ? Math.min(limit, partition.itemIds.length)\n : partition.itemIds.length;\n const itemIdsToProcess = partition.itemIds.splice(0, itemsToTake);\n\n // Mark partition as flushing to prevent race conditions\n partition.flushing = true;\n\n // Get the items and mark them as processing\n const itemsToProcess: TItem[] = [];\n for (const id of itemIdsToProcess) {\n const itemState = context.itemStates.get(id);\n if (itemState) {\n itemState.status = \"processing\";\n itemsToProcess.push(itemState.item);\n }\n }\n\n const concurrency = this.getConcurrency(context);\n const maxDuration = this.getMaxDuration(context);\n\n // Wait until there's a free slot (if at concurrency limit)\n while (context.activeHandlers.length >= concurrency) {\n this.log.trace(\n `Batch handler is at concurrency limit, waiting for a slot...`,\n );\n // Wait for any single handler to complete, not all of them\n await Promise.race(context.activeHandlers.map((it) => it.promise));\n }\n\n const promise = Promise.withResolvers<void>();\n context.activeHandlers.push(promise);\n let result: any;\n try {\n result = await context.alepha.context.run(() =>\n // during shutdown, call handler directly to avoid retry cancellation\n context.isShuttingDown\n ? context.options.handler(itemsToProcess)\n : this.retryProvider.retry(\n {\n ...context.options.retry,\n handler: context.options.handler,\n },\n itemsToProcess,\n ),\n );\n\n // Mark all items as completed and resolve their promises\n for (const id of itemIdsToProcess) {\n const itemState = context.itemStates.get(id);\n if (itemState) {\n itemState.status = \"completed\";\n itemState.result = result;\n // Only resolve if someone is waiting\n itemState.resolve?.(result);\n }\n }\n } catch (error) {\n this.log.error(`Batch handler failed`, error);\n\n // Mark all items as failed and reject their promises\n for (const id of itemIdsToProcess) {\n const itemState = context.itemStates.get(id);\n if (itemState) {\n itemState.status = \"failed\";\n itemState.error = error as Error;\n // Only reject if someone is waiting (promise was created)\n itemState.reject?.(error as Error);\n }\n }\n } finally {\n promise.resolve();\n context.activeHandlers = context.activeHandlers.filter(\n (it) => it !== promise,\n );\n\n // Only delete partition if no new items arrived during processing\n const currentPartition = context.partitions.get(partitionKey);\n if (currentPartition?.flushing && currentPartition.itemIds.length === 0) {\n context.partitions.delete(partitionKey);\n } else if (currentPartition) {\n // Reset flushing flag if partition still exists with items\n currentPartition.flushing = false;\n\n // Restart timeout for items that arrived during flush\n if (currentPartition.itemIds.length > 0 && !currentPartition.timeout) {\n currentPartition.timeout = this.dateTime.createTimeout(() => {\n this.log.trace(\n `Batch partition '${partitionKey}' timed out, flushing...`,\n );\n this.flushPartition(context, partitionKey).catch((error) =>\n this.log.error(\n `Failed to flush batch partition '${partitionKey}' on timeout`,\n error,\n ),\n );\n }, maxDuration);\n }\n }\n }\n }\n\n /**\n * Mark the context as ready and start processing buffered items.\n * Called after the \"ready\" hook.\n */\n async markReady<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): Promise<void> {\n this.log.debug(\n \"Batch processor is now ready, starting to process buffered items...\",\n );\n context.isReady = true;\n await this.startProcessing(context);\n }\n\n /**\n * Mark the context as shutting down and flush all remaining items.\n */\n async shutdown<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): Promise<void> {\n this.log.debug(\"Flushing all remaining batch partitions on shutdown...\");\n context.isShuttingDown = true;\n await this.flush(context);\n this.log.debug(\"All batch partitions flushed\");\n }\n\n /**\n * Called after the \"ready\" hook to start processing buffered items that were\n * pushed during startup. This checks all partitions and starts timeouts/flushes\n * for items that were accumulated before the app was ready.\n */\n protected async startProcessing<TItem, TResponse>(\n context: BatchContext<TItem, TResponse>,\n ): Promise<void> {\n const maxSize = this.getMaxSize(context);\n const maxDuration = this.getMaxDuration(context);\n\n for (const [partitionKey, partition] of context.partitions.entries()) {\n if (partition.itemIds.length === 0) {\n continue;\n }\n\n this.log.trace(\n `Starting processing for partition '${partitionKey}' with ${partition.itemIds.length} buffered items`,\n );\n\n // Flush batches of maxSize while we have items >= maxSize\n while (partition.itemIds.length >= maxSize) {\n this.log.trace(\n `Partition '${partitionKey}' has ${partition.itemIds.length} items, flushing batch of ${maxSize}...`,\n );\n await this.flushPartition(context, partitionKey, maxSize);\n }\n\n // After flushing full batches, start timeout for any remaining items\n if (\n partition.itemIds.length > 0 &&\n !partition.timeout &&\n !partition.flushing\n ) {\n this.log.trace(\n `Starting timeout for partition '${partitionKey}' with ${partition.itemIds.length} remaining items`,\n );\n partition.timeout = this.dateTime.createTimeout(() => {\n this.log.trace(\n `Batch partition '${partitionKey}' timed out, flushing...`,\n );\n this.flushPartition(context, partitionKey).catch((error) =>\n this.log.error(\n `Failed to flush partition '${partitionKey}' on timeout after startup`,\n error,\n ),\n );\n }, maxDuration);\n }\n }\n }\n}\n","import {\n $hook,\n $inject,\n createPrimitive,\n KIND,\n Primitive,\n type Static,\n type TSchema,\n} from \"alepha\";\nimport type { DurationLike } from \"alepha/datetime\";\nimport type { RetryPrimitiveOptions } from \"alepha/retry\";\nimport {\n type BatchContext,\n type BatchItemState,\n type BatchItemStatus,\n BatchProvider,\n} from \"../providers/BatchProvider.ts\";\n\n/**\n * Creates a batch processing primitive for efficient grouping and processing of multiple operations.\n */\nexport const $batch = <TItem extends TSchema, TResponse>(\n options: BatchPrimitiveOptions<TItem, TResponse>,\n): BatchPrimitive<TItem, TResponse> =>\n createPrimitive(BatchPrimitive<TItem, TResponse>, options);\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport interface BatchPrimitiveOptions<TItem extends TSchema, TResponse = any> {\n /**\n * TypeBox schema for validating each item added to the batch.\n */\n schema: TItem;\n\n /**\n * The batch processing handler function that processes arrays of validated items.\n */\n handler: (items: Static<TItem>[]) => TResponse;\n\n /**\n * Maximum number of items to collect before automatically flushing the batch.\n */\n maxSize?: number;\n\n /**\n * Maximum number of items that can be queued in a single partition.\n * If exceeded, push() will throw an error.\n */\n maxQueueSize?: number;\n\n /**\n * Maximum time to wait before flushing a batch, even if it hasn't reached maxSize.\n */\n maxDuration?: DurationLike;\n\n /**\n * Function to determine partition keys for grouping items into separate batches.\n */\n partitionBy?: (item: Static<TItem>) => string;\n\n /**\n * Maximum number of batch handlers that can execute simultaneously.\n */\n concurrency?: number;\n\n /**\n * Retry configuration for failed batch processing operations.\n */\n retry?: Omit<RetryPrimitiveOptions<() => Array<Static<TItem>>>, \"handler\">;\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport type { BatchItemState, BatchItemStatus };\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport class BatchPrimitive<\n TItem extends TSchema,\n TResponse = any,\n> extends Primitive<BatchPrimitiveOptions<TItem, TResponse>> {\n protected readonly batchProvider = $inject(BatchProvider);\n protected readonly context: BatchContext<Static<TItem>, TResponse>;\n\n constructor(\n ...args: ConstructorParameters<\n typeof Primitive<BatchPrimitiveOptions<TItem, TResponse>>\n >\n ) {\n super(...args);\n this.context = this.batchProvider.createContext(this.alepha, {\n handler: this.options.handler,\n maxSize: this.options.maxSize,\n maxQueueSize: this.options.maxQueueSize,\n maxDuration: this.options.maxDuration,\n partitionBy: this.options.partitionBy,\n concurrency: this.options.concurrency,\n retry: this.options.retry,\n });\n }\n\n /**\n * Pushes an item into the batch and returns immediately with a unique ID.\n * The item will be processed asynchronously with other items when the batch is flushed.\n * Use wait(id) to get the processing result.\n */\n public async push(item: Static<TItem>): Promise<string> {\n // Validate the item against the schema\n const validatedItem = this.alepha.codec.validate(this.options.schema, item);\n return this.batchProvider.push(this.context, validatedItem);\n }\n\n /**\n * Wait for a specific item to be processed and get its result.\n * @param id The item ID returned from push()\n * @returns The processing result\n * @throws If the item doesn't exist or processing failed\n */\n public async wait(id: string): Promise<TResponse> {\n return this.batchProvider.wait(this.context, id);\n }\n\n /**\n * Get the current status of an item.\n * @param id The item ID returned from push()\n * @returns Status information or undefined if item doesn't exist\n */\n public status(\n id: string,\n ):\n | { status: \"pending\" | \"processing\" }\n | { status: \"completed\"; result: TResponse }\n | { status: \"failed\"; error: Error }\n | undefined {\n return this.batchProvider.status(this.context, id);\n }\n\n /**\n * Flush all partitions or a specific partition.\n */\n public async flush(partitionKey?: string): Promise<void> {\n return this.batchProvider.flush(this.context, partitionKey);\n }\n\n /**\n * Clears completed and failed items from memory.\n * Call this periodically in long-running applications to prevent memory leaks.\n *\n * @param status Optional: only clear items with this specific status ('completed' or 'failed')\n * @returns The number of items cleared\n */\n public clearCompleted(status?: \"completed\" | \"failed\"): number {\n return this.batchProvider.clearCompleted(this.context, status);\n }\n\n protected readonly onReady = $hook({\n on: \"ready\",\n handler: async () => {\n await this.batchProvider.markReady(this.context);\n },\n });\n\n protected readonly dispose = $hook({\n on: \"stop\",\n priority: \"first\",\n handler: async () => {\n await this.batchProvider.shutdown(this.context);\n },\n });\n}\n\n$batch[KIND] = BatchPrimitive;\n","import { $module } from \"alepha\";\nimport { $batch } from \"./primitives/$batch.ts\";\nimport { BatchProvider } from \"./providers/BatchProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport * from \"./primitives/$batch.ts\";\nexport * from \"./providers/BatchProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * This module allows you to group multiple asynchronous operations into a single \"batch,\" which is then processed together.\n * This is an essential pattern for improving performance, reducing I/O, and interacting efficiently with rate-limited APIs or databases.\n *\n * ```ts\n * import { Alepha, $hook, run, t } from \"alepha\";\n * import { $batch } from \"alepha/batch\";\n *\n * class LoggingService {\n * // define the batch processor\n * logBatch = $batch({\n * schema: t.text(),\n * maxSize: 10,\n * maxDuration: [5, \"seconds\"],\n * handler: async (items) => {\n * console.log(`[BATCH LOG] Processing ${items.length} events:`, items);\n * },\n * });\n *\n * // example of how to use it\n * onReady = $hook({\n * on: \"ready\",\n * handler: async () => {\n * // push() returns an ID immediately\n * const id1 = await this.logBatch.push(\"Application started.\");\n * const id2 = await this.logBatch.push(\"User authenticated.\");\n *\n * // optionally wait for processing to complete\n * await this.logBatch.wait(id1);\n *\n * // or check the status\n * const status = this.logBatch.status(id2);\n * console.log(status?.status); // \"pending\" | \"processing\" | \"completed\" | \"failed\"\n * },\n * });\n * }\n * ```\n *\n * @see {@link $batch}\n * @see {@link BatchProvider}\n * @module alepha.batch\n */\nexport const AlephaBatch = $module({\n name: \"alepha.batch\",\n primitives: [$batch],\n services: [BatchProvider],\n});\n"],"mappings":";;;;;;;;;;;AAgIA,IAAa,gBAAb,MAA2B;CACzB,AAAmB,MAAM,SAAS;CAClC,AAAmB,WAAW,QAAQ,iBAAiB;CACvD,AAAmB,gBAAgB,QAAQ,cAAc;;;;CAKzD,cACE,QACA,SACgC;AAChC,SAAO;GACL;GACA,4BAAY,IAAI,KAAK;GACrB,4BAAY,IAAI,KAAK;GACrB,gBAAgB,EAAE;GAClB,gBAAgB;GAChB,SAAS;GACT;GACD;;;;;CAMH,AAAU,WACR,SACQ;AACR,SAAO,QAAQ,QAAQ,WAAW;;;;;CAMpC,AAAU,eACR,SACQ;AACR,SAAO,QAAQ,QAAQ,eAAe;;;;;CAMxC,AAAU,eACR,SACc;AACd,SAAO,QAAQ,QAAQ,eAAe,CAAC,GAAG,SAAS;;;;;;;;;CAUrD,KACE,SACA,MACQ;EAER,MAAM,KAAK,YAAY;EAGvB,IAAIA;AACJ,MAAI;AACF,kBAAe,QAAQ,QAAQ,cAC3B,QAAQ,QAAQ,YAAY,KAAK,GACjC;WACG,OAAO;AACd,QAAK,IAAI,KACP,kEACA,EAAE,OAAO,CACV;AACD,kBAAe;;EAIjB,MAAMC,YAA8C;GAClD;GACA;GACA;GACA,QAAQ;GACT;AAID,UAAQ,WAAW,IAAI,IAAI,UAAU;AAGrC,MAAI,CAAC,QAAQ,WAAW,IAAI,aAAa,CACvC,SAAQ,WAAW,IAAI,cAAc;GACnC,SAAS,EAAE;GACX,UAAU;GACX,CAAC;EAEJ,MAAM,YAAY,QAAQ,WAAW,IAAI,aAAa;AAGtD,MACE,QAAQ,QAAQ,iBAAiB,UACjC,UAAU,QAAQ,UAAU,QAAQ,QAAQ,aAE5C,OAAM,IAAI,MACR,4CAA4C,aAAa,UAAU,QAAQ,QAAQ,aAAa,GACjG;AAIH,YAAU,QAAQ,KAAK,GAAG;EAE1B,MAAM,UAAU,KAAK,WAAW,QAAQ;EACxC,MAAM,cAAc,KAAK,eAAe,QAAQ;AAIhD,MAAI,QAAQ,SAEV;OAAI,UAAU,QAAQ,UAAU,SAAS;AACvC,SAAK,IAAI,MACP,oBAAoB,aAAa,wBAClC;AACD,SAAK,eAAe,SAAS,aAAa,CAAC,OAAO,UAChD,KAAK,IAAI,MACP,oCAAoC,aAAa,gBACjD,MACD,CACF;cACQ,CAAC,UAAU,WAAW,CAAC,UAAU,SAE1C,WAAU,UAAU,KAAK,SAAS,oBAAoB;AACpD,SAAK,IAAI,MACP,oBAAoB,aAAa,0BAClC;AACD,SAAK,eAAe,SAAS,aAAa,CAAC,OAAO,UAChD,KAAK,IAAI,MACP,oCAAoC,aAAa,eACjD,MACD,CACF;MACA,YAAY;QAIjB,MAAK,IAAI,MACP,gCAAgC,aAAa,wBAAwB,UAAU,QAAQ,OAAO,kBAC/F;AAIH,SAAO;;;;;;;;CAST,MAAM,KACJ,SACA,IACoB;EACpB,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,MAAI,CAAC,UACH,OAAM,IAAI,MAAM,iBAAiB,GAAG,aAAa;AAInD,MAAI,UAAU,WAAW,YACvB,QAAO,UAAU;AAEnB,MAAI,UAAU,WAAW,SACvB,OAAM,UAAU;AAIlB,MAAI,CAAC,UAAU,QACb,WAAU,UAAU,IAAI,SAAoB,SAAS,WAAW;AAC9D,aAAU,UAAU;AACpB,aAAU,SAAS;IACnB;AAGJ,SAAO,UAAU;;;;;;;CAQnB,OACE,SACA,IAKY;EACZ,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,MAAI,CAAC,UACH;AAGF,MAAI,UAAU,WAAW,YACvB,QAAO;GAAE,QAAQ;GAAa,QAAQ,UAAU;GAAS;AAE3D,MAAI,UAAU,WAAW,SACvB,QAAO;GAAE,QAAQ;GAAU,OAAO,UAAU;GAAQ;AAEtD,SAAO,EAAE,QAAQ,UAAU,QAAQ;;;;;;;;;;CAWrC,eACE,SACA,QACQ;EACR,IAAI,QAAQ;AACZ,OAAK,MAAM,CAAC,IAAI,UAAU,QAAQ,WAChC,KAAI,QACF;OAAI,MAAM,WAAW,QAAQ;AAC3B,YAAQ,WAAW,OAAO,GAAG;AAC7B;;aAEO,MAAM,WAAW,eAAe,MAAM,WAAW,UAAU;AACpE,WAAQ,WAAW,OAAO,GAAG;AAC7B;;AAGJ,SAAO;;;;;CAMT,MAAM,MACJ,SACA,cACe;EACf,MAAMC,WAA4B,EAAE;AACpC,MAAI,cACF;OAAI,QAAQ,WAAW,IAAI,aAAa,CACtC,UAAS,KAAK,KAAK,eAAe,SAAS,aAAa,CAAC;QAG3D,MAAK,MAAM,OAAO,QAAQ,WAAW,MAAM,CACzC,UAAS,KAAK,KAAK,eAAe,SAAS,IAAI,CAAC;AAGpD,QAAM,QAAQ,IAAI,SAAS;;;;;CAM7B,MAAgB,eACd,SACA,cACA,OACe;EACf,MAAM,YAAY,QAAQ,WAAW,IAAI,aAAa;AACtD,MAAI,CAAC,aAAa,UAAU,QAAQ,WAAW,GAAG;AAChD,WAAQ,WAAW,OAAO,aAAa;AACvC;;AAIF,YAAU,SAAS,OAAO;AAC1B,YAAU,UAAU;EACpB,MAAM,cACJ,UAAU,SACN,KAAK,IAAI,OAAO,UAAU,QAAQ,OAAO,GACzC,UAAU,QAAQ;EACxB,MAAM,mBAAmB,UAAU,QAAQ,OAAO,GAAG,YAAY;AAGjE,YAAU,WAAW;EAGrB,MAAMC,iBAA0B,EAAE;AAClC,OAAK,MAAM,MAAM,kBAAkB;GACjC,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,OAAI,WAAW;AACb,cAAU,SAAS;AACnB,mBAAe,KAAK,UAAU,KAAK;;;EAIvC,MAAM,cAAc,KAAK,eAAe,QAAQ;EAChD,MAAM,cAAc,KAAK,eAAe,QAAQ;AAGhD,SAAO,QAAQ,eAAe,UAAU,aAAa;AACnD,QAAK,IAAI,MACP,+DACD;AAED,SAAM,QAAQ,KAAK,QAAQ,eAAe,KAAK,OAAO,GAAG,QAAQ,CAAC;;EAGpE,MAAM,UAAU,QAAQ,eAAqB;AAC7C,UAAQ,eAAe,KAAK,QAAQ;EACpC,IAAIC;AACJ,MAAI;AACF,YAAS,MAAM,QAAQ,OAAO,QAAQ,UAEpC,QAAQ,iBACJ,QAAQ,QAAQ,QAAQ,eAAe,GACvC,KAAK,cAAc,MACjB;IACE,GAAG,QAAQ,QAAQ;IACnB,SAAS,QAAQ,QAAQ;IAC1B,EACD,eACD,CACN;AAGD,QAAK,MAAM,MAAM,kBAAkB;IACjC,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,QAAI,WAAW;AACb,eAAU,SAAS;AACnB,eAAU,SAAS;AAEnB,eAAU,UAAU,OAAO;;;WAGxB,OAAO;AACd,QAAK,IAAI,MAAM,wBAAwB,MAAM;AAG7C,QAAK,MAAM,MAAM,kBAAkB;IACjC,MAAM,YAAY,QAAQ,WAAW,IAAI,GAAG;AAC5C,QAAI,WAAW;AACb,eAAU,SAAS;AACnB,eAAU,QAAQ;AAElB,eAAU,SAAS,MAAe;;;YAG9B;AACR,WAAQ,SAAS;AACjB,WAAQ,iBAAiB,QAAQ,eAAe,QAC7C,OAAO,OAAO,QAChB;GAGD,MAAM,mBAAmB,QAAQ,WAAW,IAAI,aAAa;AAC7D,OAAI,kBAAkB,YAAY,iBAAiB,QAAQ,WAAW,EACpE,SAAQ,WAAW,OAAO,aAAa;YAC9B,kBAAkB;AAE3B,qBAAiB,WAAW;AAG5B,QAAI,iBAAiB,QAAQ,SAAS,KAAK,CAAC,iBAAiB,QAC3D,kBAAiB,UAAU,KAAK,SAAS,oBAAoB;AAC3D,UAAK,IAAI,MACP,oBAAoB,aAAa,0BAClC;AACD,UAAK,eAAe,SAAS,aAAa,CAAC,OAAO,UAChD,KAAK,IAAI,MACP,oCAAoC,aAAa,eACjD,MACD,CACF;OACA,YAAY;;;;;;;;CAUvB,MAAM,UACJ,SACe;AACf,OAAK,IAAI,MACP,sEACD;AACD,UAAQ,UAAU;AAClB,QAAM,KAAK,gBAAgB,QAAQ;;;;;CAMrC,MAAM,SACJ,SACe;AACf,OAAK,IAAI,MAAM,yDAAyD;AACxE,UAAQ,iBAAiB;AACzB,QAAM,KAAK,MAAM,QAAQ;AACzB,OAAK,IAAI,MAAM,+BAA+B;;;;;;;CAQhD,MAAgB,gBACd,SACe;EACf,MAAM,UAAU,KAAK,WAAW,QAAQ;EACxC,MAAM,cAAc,KAAK,eAAe,QAAQ;AAEhD,OAAK,MAAM,CAAC,cAAc,cAAc,QAAQ,WAAW,SAAS,EAAE;AACpE,OAAI,UAAU,QAAQ,WAAW,EAC/B;AAGF,QAAK,IAAI,MACP,sCAAsC,aAAa,SAAS,UAAU,QAAQ,OAAO,iBACtF;AAGD,UAAO,UAAU,QAAQ,UAAU,SAAS;AAC1C,SAAK,IAAI,MACP,cAAc,aAAa,QAAQ,UAAU,QAAQ,OAAO,4BAA4B,QAAQ,KACjG;AACD,UAAM,KAAK,eAAe,SAAS,cAAc,QAAQ;;AAI3D,OACE,UAAU,QAAQ,SAAS,KAC3B,CAAC,UAAU,WACX,CAAC,UAAU,UACX;AACA,SAAK,IAAI,MACP,mCAAmC,aAAa,SAAS,UAAU,QAAQ,OAAO,kBACnF;AACD,cAAU,UAAU,KAAK,SAAS,oBAAoB;AACpD,UAAK,IAAI,MACP,oBAAoB,aAAa,0BAClC;AACD,UAAK,eAAe,SAAS,aAAa,CAAC,OAAO,UAChD,KAAK,IAAI,MACP,8BAA8B,aAAa,6BAC3C,MACD,CACF;OACA,YAAY;;;;;;;;;;;ACljBvB,MAAa,UACX,YAEA,gBAAgB,gBAAkC,QAAQ;AAqD5D,IAAa,iBAAb,cAGU,UAAmD;CAC3D,AAAmB,gBAAgB,QAAQ,cAAc;CACzD,AAAmB;CAEnB,YACE,GAAG,MAGH;AACA,QAAM,GAAG,KAAK;AACd,OAAK,UAAU,KAAK,cAAc,cAAc,KAAK,QAAQ;GAC3D,SAAS,KAAK,QAAQ;GACtB,SAAS,KAAK,QAAQ;GACtB,cAAc,KAAK,QAAQ;GAC3B,aAAa,KAAK,QAAQ;GAC1B,aAAa,KAAK,QAAQ;GAC1B,aAAa,KAAK,QAAQ;GAC1B,OAAO,KAAK,QAAQ;GACrB,CAAC;;;;;;;CAQJ,MAAa,KAAK,MAAsC;EAEtD,MAAM,gBAAgB,KAAK,OAAO,MAAM,SAAS,KAAK,QAAQ,QAAQ,KAAK;AAC3E,SAAO,KAAK,cAAc,KAAK,KAAK,SAAS,cAAc;;;;;;;;CAS7D,MAAa,KAAK,IAAgC;AAChD,SAAO,KAAK,cAAc,KAAK,KAAK,SAAS,GAAG;;;;;;;CAQlD,AAAO,OACL,IAKY;AACZ,SAAO,KAAK,cAAc,OAAO,KAAK,SAAS,GAAG;;;;;CAMpD,MAAa,MAAM,cAAsC;AACvD,SAAO,KAAK,cAAc,MAAM,KAAK,SAAS,aAAa;;;;;;;;;CAU7D,AAAO,eAAe,QAAyC;AAC7D,SAAO,KAAK,cAAc,eAAe,KAAK,SAAS,OAAO;;CAGhE,AAAmB,UAAU,MAAM;EACjC,IAAI;EACJ,SAAS,YAAY;AACnB,SAAM,KAAK,cAAc,UAAU,KAAK,QAAQ;;EAEnD,CAAC;CAEF,AAAmB,UAAU,MAAM;EACjC,IAAI;EACJ,UAAU;EACV,SAAS,YAAY;AACnB,SAAM,KAAK,cAAc,SAAS,KAAK,QAAQ;;EAElD,CAAC;;AAGJ,OAAO,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtHf,MAAa,cAAc,QAAQ;CACjC,MAAM;CACN,YAAY,CAAC,OAAO;CACpB,UAAU,CAAC,cAAc;CAC1B,CAAC"}
@@ -284,11 +284,13 @@ const AlephaBucket = $module({
284
284
  MemoryFileStorageProvider,
285
285
  LocalFileStorageProvider
286
286
  ],
287
- register: (alepha) => alepha.with({
288
- optional: true,
289
- provide: FileStorageProvider,
290
- use: alepha.isTest() ? MemoryFileStorageProvider : LocalFileStorageProvider
291
- })
287
+ register: (alepha) => {
288
+ alepha.with({
289
+ optional: true,
290
+ provide: FileStorageProvider,
291
+ use: alepha.isTest() || alepha.isServerless() ? MemoryFileStorageProvider : LocalFileStorageProvider
292
+ });
293
+ }
292
294
  });
293
295
 
294
296
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":[],"sources":["../../src/bucket/errors/InvalidFileError.ts","../../src/bucket/providers/FileStorageProvider.ts","../../src/bucket/errors/FileNotFoundError.ts","../../src/bucket/providers/MemoryFileStorageProvider.ts","../../src/bucket/primitives/$bucket.ts","../../src/bucket/providers/LocalFileStorageProvider.ts","../../src/bucket/index.ts"],"sourcesContent":["export class InvalidFileError extends Error {\n public readonly status = 400;\n}\n","import type { FileLike } from \"alepha\";\n\nexport abstract class FileStorageProvider {\n /**\n * Uploads a file to the storage.\n *\n * @param bucketName - Container name\n * @param file - File to upload\n * @param fileId - Optional file identifier. If not provided, a unique ID will be generated.\n * @return The identifier of the uploaded file.\n */\n abstract upload(\n bucketName: string,\n file: FileLike,\n fileId?: string,\n ): Promise<string>;\n\n /**\n * Downloads a file from the storage.\n *\n * @param bucketName - Container name\n * @param fileId - Identifier of the file to download\n * @return The downloaded file as a FileLike object.\n */\n abstract download(bucketName: string, fileId: string): Promise<FileLike>;\n\n /**\n * Check if fileId exists in the storage bucket.\n *\n * @param bucketName - Container name\n * @param fileId - Identifier of the file to stream\n * @return True is the file exists, false otherwise.\n */\n abstract exists(bucketName: string, fileId: string): Promise<boolean>;\n\n /**\n * Delete permanently a file from the storage.\n *\n * @param bucketName - Container name\n * @param fileId - Identifier of the file to delete\n */\n abstract delete(bucketName: string, fileId: string): Promise<void>;\n}\n","import { AlephaError } from \"alepha\";\n\nexport class FileNotFoundError extends AlephaError {\n public readonly status = 404;\n}\n","import { randomUUID } from \"node:crypto\";\nimport { $inject, type FileLike } from \"alepha\";\nimport { FileDetector, FileSystemProvider } from \"alepha/file\";\nimport { FileNotFoundError } from \"../errors/FileNotFoundError.ts\";\nimport type { FileStorageProvider } from \"./FileStorageProvider.ts\";\n\nexport class MemoryFileStorageProvider implements FileStorageProvider {\n public readonly files: Record<string, FileLike> = {};\n protected readonly fileSystem = $inject(FileSystemProvider);\n protected readonly fileDetector = $inject(FileDetector);\n\n public async upload(\n bucketName: string,\n file: FileLike,\n fileId?: string,\n ): Promise<string> {\n fileId ??= this.createId();\n\n this.files[`${bucketName}/${fileId}`] = this.fileSystem.createFile({\n stream: file.stream(),\n name: file.name,\n type: file.type,\n size: file.size,\n });\n\n return fileId;\n }\n\n public async download(bucketName: string, fileId: string): Promise<FileLike> {\n const fileKey = `${bucketName}/${fileId}`;\n const file = this.files[fileKey];\n\n if (!file) {\n throw new FileNotFoundError(`File with ID ${fileId} not found.`);\n }\n\n return file;\n }\n\n public async exists(bucketName: string, fileId: string): Promise<boolean> {\n return `${bucketName}/${fileId}` in this.files;\n }\n\n public async delete(bucketName: string, fileId: string): Promise<void> {\n const fileKey = `${bucketName}/${fileId}`;\n if (!(fileKey in this.files)) {\n throw new FileNotFoundError(`File with ID ${fileId} not found.`);\n }\n\n delete this.files[fileKey];\n }\n\n protected createId(): string {\n return randomUUID();\n }\n}\n","import {\n $inject,\n createPrimitive,\n type FileLike,\n KIND,\n Primitive,\n type Service,\n} from \"alepha\";\nimport { FileSystemProvider } from \"alepha/file\";\nimport { InvalidFileError } from \"../errors/InvalidFileError.ts\";\nimport { FileStorageProvider } from \"../providers/FileStorageProvider.ts\";\nimport { MemoryFileStorageProvider } from \"../providers/MemoryFileStorageProvider.ts\";\n\n/**\n * Creates a bucket primitive for file storage and management with configurable validation.\n *\n * Provides a comprehensive file storage system that handles uploads, downloads, validation,\n * and management across multiple storage backends with MIME type and size limit controls.\n *\n * **Key Features**\n * - Multi-provider support (filesystem, cloud storage, in-memory)\n * - Automatic MIME type and file size validation\n * - Event integration for file operations monitoring\n * - Flexible per-bucket and per-operation configuration\n * - Smart file type and size detection\n *\n * **Common Use Cases**\n * - User profile pictures and document uploads\n * - Product images and media management\n * - Document storage and retrieval systems\n *\n * @example\n * ```ts\n * class MediaService {\n * images = $bucket({\n * name: \"user-images\",\n * mimeTypes: [\"image/jpeg\", \"image/png\", \"image/gif\"],\n * maxSize: 5 // 5MB limit\n * });\n *\n * documents = $bucket({\n * name: \"documents\",\n * mimeTypes: [\"application/pdf\", \"text/plain\"],\n * maxSize: 50 // 50MB limit\n * });\n *\n * async uploadProfileImage(file: FileLike, userId: string): Promise<string> {\n * const fileId = await this.images.upload(file);\n * await this.userService.updateProfileImage(userId, fileId);\n * return fileId;\n * }\n *\n * async downloadDocument(documentId: string): Promise<FileLike> {\n * return await this.documents.download(documentId);\n * }\n *\n * async deleteDocument(documentId: string): Promise<void> {\n * await this.documents.delete(documentId);\n * }\n * }\n * ```\n */\nexport const $bucket = (options: BucketPrimitiveOptions) =>\n createPrimitive(BucketPrimitive, options);\n\nexport interface BucketPrimitiveOptions extends BucketFileOptions {\n /**\n * File storage provider configuration for the bucket.\n *\n * Options:\n * - **\"memory\"**: In-memory storage (default for development, lost on restart)\n * - **Service<FileStorageProvider>**: Custom provider class (e.g., S3FileStorageProvider, AzureBlobProvider)\n * - **undefined**: Uses the default file storage provider from dependency injection\n *\n * **Provider Selection Guidelines**:\n * - **Development**: Use \"memory\" for fast, simple testing without external dependencies\n * - **Production**: Use cloud providers (S3, Azure Blob, Google Cloud Storage) for scalability\n * - **Local deployment**: Use filesystem providers for on-premise installations\n * - **Hybrid**: Use different providers for different bucket types (temp files vs permanent storage)\n *\n * **Provider Capabilities**:\n * - File persistence and durability guarantees\n * - Scalability and performance characteristics\n * - Geographic distribution and CDN integration\n * - Cost implications for storage and bandwidth\n * - Backup and disaster recovery features\n *\n * @default Uses injected FileStorageProvider\n * @example \"memory\"\n * @example S3FileStorageProvider\n * @example AzureBlobStorageProvider\n */\n provider?: Service<FileStorageProvider> | \"memory\";\n\n /**\n * Unique name identifier for the bucket.\n *\n * This name is used for:\n * - Storage backend organization and partitioning\n * - File path generation and URL construction\n * - Logging, monitoring, and debugging\n * - Access control and permissions management\n * - Backup and replication configuration\n *\n * **Naming Conventions**:\n * - Use lowercase with hyphens for consistency\n * - Include purpose or content type in the name\n * - Avoid spaces and special characters\n * - Consider environment prefixes for deployment isolation\n *\n * If not provided, defaults to the property key where the bucket is declared.\n *\n * @example \"user-avatars\"\n * @example \"product-images\"\n * @example \"legal-documents\"\n * @example \"temp-processing-files\"\n */\n name?: string;\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport interface BucketFileOptions {\n /**\n * Human-readable description of the bucket's purpose and contents.\n *\n * Used for:\n * - Documentation generation and API references\n * - Developer onboarding and system understanding\n * - Monitoring dashboards and admin interfaces\n * - Compliance and audit documentation\n *\n * **Description Best Practices**:\n * - Explain what types of files this bucket stores\n * - Mention any special handling or processing requirements\n * - Include information about retention policies if applicable\n * - Note any compliance or security considerations\n *\n * @example \"User profile pictures and avatar images\"\n * @example \"Product catalog images with automated thumbnail generation\"\n * @example \"Legal documents requiring long-term retention\"\n * @example \"Temporary files for data processing workflows\"\n */\n description?: string;\n\n /**\n * Array of allowed MIME types for files uploaded to this bucket.\n *\n * When specified, only files with these exact MIME types will be accepted.\n * Files with disallowed MIME types will be rejected with an InvalidFileError.\n *\n * **MIME Type Categories**:\n * - Images: \"image/jpeg\", \"image/png\", \"image/gif\", \"image/webp\", \"image/svg+xml\"\n * - Documents: \"application/pdf\", \"text/plain\", \"text/csv\"\n * - Office: \"application/msword\", \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\"\n * - Archives: \"application/zip\", \"application/x-tar\", \"application/gzip\"\n * - Media: \"video/mp4\", \"audio/mpeg\", \"audio/wav\"\n *\n * **Security Considerations**:\n * - Always validate MIME types for user uploads\n * - Be cautious with executable file types\n * - Consider using allow-lists rather than deny-lists\n * - Remember that MIME types can be spoofed by malicious users\n *\n * If not specified, all MIME types are allowed (not recommended for user uploads).\n *\n * @example [\"image/jpeg\", \"image/png\"] // Only JPEG and PNG images\n * @example [\"application/pdf\", \"text/plain\"] // Documents only\n * @example [\"video/mp4\", \"video/webm\"] // Video files\n */\n mimeTypes?: string[];\n\n /**\n * Maximum file size allowed in megabytes (MB).\n *\n * Files larger than this limit will be rejected with an InvalidFileError.\n * This helps prevent:\n * - Storage quota exhaustion\n * - Memory issues during file processing\n * - Long upload times and timeouts\n * - Abuse of storage resources\n *\n * **Size Guidelines by File Type**:\n * - Profile images: 1-5 MB\n * - Product photos: 5-10 MB\n * - Documents: 10-50 MB\n * - Video files: 50-500 MB\n * - Data files: 100-1000 MB\n *\n * **Considerations**:\n * - Consider your storage costs and limits\n * - Factor in network upload speeds for users\n * - Account for processing requirements (thumbnails, compression)\n * - Set reasonable limits based on actual use cases\n *\n * @default 10 MB\n *\n * @example 1 // 1MB for small images\n * @example 25 // 25MB for documents\n * @example 100 // 100MB for media files\n */\n maxSize?: number;\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport class BucketPrimitive extends Primitive<BucketPrimitiveOptions> {\n public readonly provider = this.$provider();\n private readonly fileSystem = $inject(FileSystemProvider);\n\n public get name() {\n return this.options.name ?? `${this.config.propertyKey}`;\n }\n\n /**\n * Uploads a file to the bucket.\n */\n public async upload(\n file: FileLike,\n options?: BucketFileOptions,\n ): Promise<string> {\n if (file instanceof File) {\n // our createFile is smarter than the browser's File constructor\n // by doing this, we can guess the MIME type and size!\n file = this.fileSystem.createFile({ file });\n }\n\n options = {\n ...this.options,\n ...options,\n };\n\n const mimeTypes = options.mimeTypes ?? undefined;\n const maxSize = options.maxSize ?? 10; // Default to 10 MB if not specified\n\n if (mimeTypes) {\n const mimeType = file.type || \"application/octet-stream\";\n if (!mimeTypes.includes(mimeType)) {\n throw new InvalidFileError(\n `MIME type ${mimeType} is not allowed in bucket ${this.name}`,\n );\n }\n }\n\n // check size in bytes, convert MB to bytes\n if (file.size > maxSize * 1024 * 1024) {\n throw new InvalidFileError(\n `File size ${file.size} exceeds the maximum size of ${this.options.maxSize} MB in bucket ${this.name}`,\n );\n }\n\n const id = await this.provider.upload(this.name, file);\n\n await this.alepha.events.emit(\"bucket:file:uploaded\", {\n id,\n bucket: this,\n file,\n options,\n });\n\n return id;\n }\n\n /**\n * Delete permanently a file from the bucket.\n */\n public async delete(fileId: string, skipHook = false): Promise<void> {\n await this.provider.delete(this.name, fileId);\n\n if (skipHook) {\n return;\n }\n\n await this.alepha.events.emit(\"bucket:file:deleted\", {\n id: fileId,\n bucket: this,\n });\n }\n\n /**\n * Checks if a file exists in the bucket.\n */\n public async exists(fileId: string): Promise<boolean> {\n return this.provider.exists(this.name, fileId);\n }\n\n /**\n * Downloads a file from the bucket.\n */\n public async download(fileId: string): Promise<FileLike> {\n return this.provider.download(this.name, fileId);\n }\n\n protected $provider() {\n if (!this.options.provider) {\n return this.alepha.inject(FileStorageProvider);\n }\n if (this.options.provider === \"memory\") {\n return this.alepha.inject(MemoryFileStorageProvider);\n }\n return this.alepha.inject(this.options.provider);\n }\n}\n\n$bucket[KIND] = BucketPrimitive;\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport interface BucketFileOptions {\n /**\n * Optional description of the bucket.\n */\n description?: string;\n\n /**\n * Allowed MIME types.\n */\n mimeTypes?: string[];\n\n /**\n * Maximum size of the files in the bucket.\n *\n * @default 10\n */\n maxSize?: number;\n}\n","import { randomUUID } from \"node:crypto\";\nimport type * as fs from \"node:fs\";\nimport { createReadStream } from \"node:fs\";\nimport { mkdir, stat, unlink } from \"node:fs/promises\";\nimport { tmpdir } from \"node:os\";\nimport { join } from \"node:path\";\nimport {\n $atom,\n $hook,\n $inject,\n $use,\n Alepha,\n AlephaError,\n type FileLike,\n type Static,\n t,\n} from \"alepha\";\nimport { FileDetector, FileSystemProvider } from \"alepha/file\";\nimport { $logger } from \"alepha/logger\";\nimport { FileNotFoundError } from \"../errors/FileNotFoundError.ts\";\nimport { $bucket } from \"../primitives/$bucket.ts\";\nimport type { FileStorageProvider } from \"./FileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Local file storage configuration atom\n */\nexport const localFileStorageOptions = $atom({\n name: \"alepha.bucket.local.options\",\n schema: t.object({\n storagePath: t.string({\n description: \"Directory path where files will be stored\",\n }),\n }),\n default: {\n storagePath: \"node_modules/.alepha/buckets\",\n },\n});\n\nexport type LocalFileStorageProviderOptions = Static<\n typeof localFileStorageOptions.schema\n>;\n\ndeclare module \"alepha\" {\n interface State {\n [localFileStorageOptions.key]: LocalFileStorageProviderOptions;\n }\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport class LocalFileStorageProvider implements FileStorageProvider {\n protected readonly alepha = $inject(Alepha);\n protected readonly log = $logger();\n protected readonly fileDetector = $inject(FileDetector);\n protected readonly fileSystemProvider = $inject(FileSystemProvider);\n protected readonly options = $use(localFileStorageOptions);\n\n protected get storagePath(): string {\n return this.options.storagePath;\n }\n\n protected readonly onConfigure = $hook({\n on: \"configure\",\n handler: async () => {\n if (\n this.alepha.isTest() &&\n this.storagePath === localFileStorageOptions.options.default.storagePath\n ) {\n this.alepha.store.set(localFileStorageOptions, {\n storagePath: join(tmpdir(), `alepha-test-${Date.now()}`),\n });\n }\n },\n });\n\n protected readonly onStart = $hook({\n on: \"start\",\n handler: async () => {\n try {\n await mkdir(this.storagePath, { recursive: true });\n } catch {}\n\n for (const bucket of this.alepha.primitives($bucket)) {\n if (bucket.provider !== this) {\n continue;\n }\n\n await mkdir(join(this.storagePath, bucket.name), {\n recursive: true,\n });\n\n this.log.debug(`Bucket '${bucket.name}' at ${this.storagePath} OK`);\n }\n },\n });\n\n public async upload(\n bucketName: string,\n file: FileLike,\n fileId?: string,\n ): Promise<string> {\n fileId ??= this.createId(file.type);\n\n this.log.trace(`Uploading file to ${bucketName}`);\n\n await this.fileSystemProvider.writeFile(\n this.path(bucketName, fileId),\n file,\n );\n\n return fileId;\n }\n\n public async download(bucketName: string, fileId: string): Promise<FileLike> {\n const filePath = this.path(bucketName, fileId);\n\n try {\n const stats = await stat(filePath);\n const mimeType = this.fileDetector.getContentType(fileId);\n\n return this.fileSystemProvider.createFile({\n stream: createReadStream(filePath),\n name: fileId,\n type: mimeType,\n size: stats.size,\n });\n } catch (error) {\n if (this.isErrorNoEntry(error)) {\n throw new FileNotFoundError(`File with ID ${fileId} not found.`);\n }\n throw new AlephaError(\"Invalid file operation\", { cause: error });\n }\n }\n\n public async exists(bucketName: string, fileId: string): Promise<boolean> {\n try {\n await stat(this.path(bucketName, fileId));\n return true;\n } catch (error) {\n if (this.isErrorNoEntry(error)) {\n return false;\n }\n throw new AlephaError(\"Error checking file existence\", { cause: error });\n }\n }\n\n public async delete(bucketName: string, fileId: string): Promise<void> {\n try {\n return await unlink(this.path(bucketName, fileId));\n } catch (error) {\n if (this.isErrorNoEntry(error)) {\n throw new FileNotFoundError(`File with ID ${fileId} not found.`);\n }\n throw new AlephaError(\"Error deleting file\", { cause: error });\n }\n }\n\n protected stat(bucket: string, fileId: string): Promise<fs.Stats> {\n return stat(this.path(bucket, fileId));\n }\n\n protected createId(mimeType: string): string {\n const ext = this.fileDetector.getExtensionFromMimeType(mimeType);\n return `${randomUUID()}.${ext}`;\n }\n\n protected path(bucket: string, fileId = \"\"): string {\n return join(this.storagePath, bucket, fileId);\n }\n\n protected isErrorNoEntry(error: unknown): boolean {\n return error instanceof Error && \"code\" in error && error.code === \"ENOENT\";\n }\n}\n","import { $module, type FileLike } from \"alepha\";\nimport {\n $bucket,\n type BucketFileOptions,\n type BucketPrimitive,\n} from \"./primitives/$bucket.ts\";\nimport { FileStorageProvider } from \"./providers/FileStorageProvider.ts\";\nimport { LocalFileStorageProvider } from \"./providers/LocalFileStorageProvider.ts\";\nimport { MemoryFileStorageProvider } from \"./providers/MemoryFileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport * from \"./errors/FileNotFoundError.ts\";\nexport * from \"./primitives/$bucket.ts\";\nexport * from \"./providers/FileStorageProvider.ts\";\nexport * from \"./providers/LocalFileStorageProvider.ts\";\nexport * from \"./providers/MemoryFileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\ndeclare module \"alepha\" {\n interface Hooks {\n /**\n * Triggered when a file is uploaded to a bucket.\n * Can be used to perform actions after a file is uploaded, like creating a database record!\n */\n \"bucket:file:uploaded\": {\n id: string;\n file: FileLike;\n bucket: BucketPrimitive;\n options: BucketFileOptions;\n };\n /**\n * Triggered when a file is deleted from a bucket.\n */\n \"bucket:file:deleted\": {\n id: string;\n bucket: BucketPrimitive;\n };\n }\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Provides file storage capabilities through declarative bucket primitives with support for multiple storage backends.\n *\n * The bucket module enables unified file operations across different storage systems using the `$bucket` primitive\n * on class properties. It abstracts storage provider differences, offering consistent APIs for local filesystem,\n * cloud storage, or in-memory storage for testing environments.\n *\n * @see {@link $bucket}\n * @see {@link FileStorageProvider}\n * @module alepha.bucket\n */\nexport const AlephaBucket = $module({\n name: \"alepha.bucket\",\n primitives: [$bucket],\n services: [\n FileStorageProvider,\n MemoryFileStorageProvider,\n LocalFileStorageProvider,\n ],\n register: (alepha) =>\n alepha.with({\n optional: true,\n provide: FileStorageProvider,\n use: alepha.isTest()\n ? MemoryFileStorageProvider\n : LocalFileStorageProvider,\n }),\n});\n"],"mappings":";;;;;;;;;;AAAA,IAAa,mBAAb,cAAsC,MAAM;CAC1C,AAAgB,SAAS;;;;;ACC3B,IAAsB,sBAAtB,MAA0C;;;;ACA1C,IAAa,oBAAb,cAAuC,YAAY;CACjD,AAAgB,SAAS;;;;;ACG3B,IAAa,4BAAb,MAAsE;CACpE,AAAgB,QAAkC,EAAE;CACpD,AAAmB,aAAa,QAAQ,mBAAmB;CAC3D,AAAmB,eAAe,QAAQ,aAAa;CAEvD,MAAa,OACX,YACA,MACA,QACiB;AACjB,aAAW,KAAK,UAAU;AAE1B,OAAK,MAAM,GAAG,WAAW,GAAG,YAAY,KAAK,WAAW,WAAW;GACjE,QAAQ,KAAK,QAAQ;GACrB,MAAM,KAAK;GACX,MAAM,KAAK;GACX,MAAM,KAAK;GACZ,CAAC;AAEF,SAAO;;CAGT,MAAa,SAAS,YAAoB,QAAmC;EAC3E,MAAM,UAAU,GAAG,WAAW,GAAG;EACjC,MAAM,OAAO,KAAK,MAAM;AAExB,MAAI,CAAC,KACH,OAAM,IAAI,kBAAkB,gBAAgB,OAAO,aAAa;AAGlE,SAAO;;CAGT,MAAa,OAAO,YAAoB,QAAkC;AACxE,SAAO,GAAG,WAAW,GAAG,YAAY,KAAK;;CAG3C,MAAa,OAAO,YAAoB,QAA+B;EACrE,MAAM,UAAU,GAAG,WAAW,GAAG;AACjC,MAAI,EAAE,WAAW,KAAK,OACpB,OAAM,IAAI,kBAAkB,gBAAgB,OAAO,aAAa;AAGlE,SAAO,KAAK,MAAM;;CAGpB,AAAU,WAAmB;AAC3B,SAAO,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACSvB,MAAa,WAAW,YACtB,gBAAgB,iBAAiB,QAAQ;AA+I3C,IAAa,kBAAb,cAAqC,UAAkC;CACrE,AAAgB,WAAW,KAAK,WAAW;CAC3C,AAAiB,aAAa,QAAQ,mBAAmB;CAEzD,IAAW,OAAO;AAChB,SAAO,KAAK,QAAQ,QAAQ,GAAG,KAAK,OAAO;;;;;CAM7C,MAAa,OACX,MACA,SACiB;AACjB,MAAI,gBAAgB,KAGlB,QAAO,KAAK,WAAW,WAAW,EAAE,MAAM,CAAC;AAG7C,YAAU;GACR,GAAG,KAAK;GACR,GAAG;GACJ;EAED,MAAM,YAAY,QAAQ,aAAa;EACvC,MAAM,UAAU,QAAQ,WAAW;AAEnC,MAAI,WAAW;GACb,MAAM,WAAW,KAAK,QAAQ;AAC9B,OAAI,CAAC,UAAU,SAAS,SAAS,CAC/B,OAAM,IAAI,iBACR,aAAa,SAAS,4BAA4B,KAAK,OACxD;;AAKL,MAAI,KAAK,OAAO,UAAU,OAAO,KAC/B,OAAM,IAAI,iBACR,aAAa,KAAK,KAAK,+BAA+B,KAAK,QAAQ,QAAQ,gBAAgB,KAAK,OACjG;EAGH,MAAM,KAAK,MAAM,KAAK,SAAS,OAAO,KAAK,MAAM,KAAK;AAEtD,QAAM,KAAK,OAAO,OAAO,KAAK,wBAAwB;GACpD;GACA,QAAQ;GACR;GACA;GACD,CAAC;AAEF,SAAO;;;;;CAMT,MAAa,OAAO,QAAgB,WAAW,OAAsB;AACnE,QAAM,KAAK,SAAS,OAAO,KAAK,MAAM,OAAO;AAE7C,MAAI,SACF;AAGF,QAAM,KAAK,OAAO,OAAO,KAAK,uBAAuB;GACnD,IAAI;GACJ,QAAQ;GACT,CAAC;;;;;CAMJ,MAAa,OAAO,QAAkC;AACpD,SAAO,KAAK,SAAS,OAAO,KAAK,MAAM,OAAO;;;;;CAMhD,MAAa,SAAS,QAAmC;AACvD,SAAO,KAAK,SAAS,SAAS,KAAK,MAAM,OAAO;;CAGlD,AAAU,YAAY;AACpB,MAAI,CAAC,KAAK,QAAQ,SAChB,QAAO,KAAK,OAAO,OAAO,oBAAoB;AAEhD,MAAI,KAAK,QAAQ,aAAa,SAC5B,QAAO,KAAK,OAAO,OAAO,0BAA0B;AAEtD,SAAO,KAAK,OAAO,OAAO,KAAK,QAAQ,SAAS;;;AAIpD,QAAQ,QAAQ;;;;;;;ACpRhB,MAAa,0BAA0B,MAAM;CAC3C,MAAM;CACN,QAAQ,EAAE,OAAO,EACf,aAAa,EAAE,OAAO,EACpB,aAAa,6CACd,CAAC,EACH,CAAC;CACF,SAAS,EACP,aAAa,gCACd;CACF,CAAC;AAcF,IAAa,2BAAb,MAAqE;CACnE,AAAmB,SAAS,QAAQ,OAAO;CAC3C,AAAmB,MAAM,SAAS;CAClC,AAAmB,eAAe,QAAQ,aAAa;CACvD,AAAmB,qBAAqB,QAAQ,mBAAmB;CACnE,AAAmB,UAAU,KAAK,wBAAwB;CAE1D,IAAc,cAAsB;AAClC,SAAO,KAAK,QAAQ;;CAGtB,AAAmB,cAAc,MAAM;EACrC,IAAI;EACJ,SAAS,YAAY;AACnB,OACE,KAAK,OAAO,QAAQ,IACpB,KAAK,gBAAgB,wBAAwB,QAAQ,QAAQ,YAE7D,MAAK,OAAO,MAAM,IAAI,yBAAyB,EAC7C,aAAa,KAAK,QAAQ,EAAE,eAAe,KAAK,KAAK,GAAG,EACzD,CAAC;;EAGP,CAAC;CAEF,AAAmB,UAAU,MAAM;EACjC,IAAI;EACJ,SAAS,YAAY;AACnB,OAAI;AACF,UAAM,MAAM,KAAK,aAAa,EAAE,WAAW,MAAM,CAAC;WAC5C;AAER,QAAK,MAAM,UAAU,KAAK,OAAO,WAAW,QAAQ,EAAE;AACpD,QAAI,OAAO,aAAa,KACtB;AAGF,UAAM,MAAM,KAAK,KAAK,aAAa,OAAO,KAAK,EAAE,EAC/C,WAAW,MACZ,CAAC;AAEF,SAAK,IAAI,MAAM,WAAW,OAAO,KAAK,OAAO,KAAK,YAAY,KAAK;;;EAGxE,CAAC;CAEF,MAAa,OACX,YACA,MACA,QACiB;AACjB,aAAW,KAAK,SAAS,KAAK,KAAK;AAEnC,OAAK,IAAI,MAAM,qBAAqB,aAAa;AAEjD,QAAM,KAAK,mBAAmB,UAC5B,KAAK,KAAK,YAAY,OAAO,EAC7B,KACD;AAED,SAAO;;CAGT,MAAa,SAAS,YAAoB,QAAmC;EAC3E,MAAM,WAAW,KAAK,KAAK,YAAY,OAAO;AAE9C,MAAI;GACF,MAAM,QAAQ,MAAM,KAAK,SAAS;GAClC,MAAM,WAAW,KAAK,aAAa,eAAe,OAAO;AAEzD,UAAO,KAAK,mBAAmB,WAAW;IACxC,QAAQ,iBAAiB,SAAS;IAClC,MAAM;IACN,MAAM;IACN,MAAM,MAAM;IACb,CAAC;WACK,OAAO;AACd,OAAI,KAAK,eAAe,MAAM,CAC5B,OAAM,IAAI,kBAAkB,gBAAgB,OAAO,aAAa;AAElE,SAAM,IAAI,YAAY,0BAA0B,EAAE,OAAO,OAAO,CAAC;;;CAIrE,MAAa,OAAO,YAAoB,QAAkC;AACxE,MAAI;AACF,SAAM,KAAK,KAAK,KAAK,YAAY,OAAO,CAAC;AACzC,UAAO;WACA,OAAO;AACd,OAAI,KAAK,eAAe,MAAM,CAC5B,QAAO;AAET,SAAM,IAAI,YAAY,iCAAiC,EAAE,OAAO,OAAO,CAAC;;;CAI5E,MAAa,OAAO,YAAoB,QAA+B;AACrE,MAAI;AACF,UAAO,MAAM,OAAO,KAAK,KAAK,YAAY,OAAO,CAAC;WAC3C,OAAO;AACd,OAAI,KAAK,eAAe,MAAM,CAC5B,OAAM,IAAI,kBAAkB,gBAAgB,OAAO,aAAa;AAElE,SAAM,IAAI,YAAY,uBAAuB,EAAE,OAAO,OAAO,CAAC;;;CAIlE,AAAU,KAAK,QAAgB,QAAmC;AAChE,SAAO,KAAK,KAAK,KAAK,QAAQ,OAAO,CAAC;;CAGxC,AAAU,SAAS,UAA0B;EAC3C,MAAM,MAAM,KAAK,aAAa,yBAAyB,SAAS;AAChE,SAAO,GAAG,YAAY,CAAC,GAAG;;CAG5B,AAAU,KAAK,QAAgB,SAAS,IAAY;AAClD,SAAO,KAAK,KAAK,aAAa,QAAQ,OAAO;;CAG/C,AAAU,eAAe,OAAyB;AAChD,SAAO,iBAAiB,SAAS,UAAU,SAAS,MAAM,SAAS;;;;;;;;;;;;;;;;;ACtHvE,MAAa,eAAe,QAAQ;CAClC,MAAM;CACN,YAAY,CAAC,QAAQ;CACrB,UAAU;EACR;EACA;EACA;EACD;CACD,WAAW,WACT,OAAO,KAAK;EACV,UAAU;EACV,SAAS;EACT,KAAK,OAAO,QAAQ,GAChB,4BACA;EACL,CAAC;CACL,CAAC"}
1
+ {"version":3,"file":"index.js","names":[],"sources":["../../src/bucket/errors/InvalidFileError.ts","../../src/bucket/providers/FileStorageProvider.ts","../../src/bucket/errors/FileNotFoundError.ts","../../src/bucket/providers/MemoryFileStorageProvider.ts","../../src/bucket/primitives/$bucket.ts","../../src/bucket/providers/LocalFileStorageProvider.ts","../../src/bucket/index.ts"],"sourcesContent":["export class InvalidFileError extends Error {\n public readonly status = 400;\n}\n","import type { FileLike } from \"alepha\";\n\nexport abstract class FileStorageProvider {\n /**\n * Uploads a file to the storage.\n *\n * @param bucketName - Container name\n * @param file - File to upload\n * @param fileId - Optional file identifier. If not provided, a unique ID will be generated.\n * @return The identifier of the uploaded file.\n */\n abstract upload(\n bucketName: string,\n file: FileLike,\n fileId?: string,\n ): Promise<string>;\n\n /**\n * Downloads a file from the storage.\n *\n * @param bucketName - Container name\n * @param fileId - Identifier of the file to download\n * @return The downloaded file as a FileLike object.\n */\n abstract download(bucketName: string, fileId: string): Promise<FileLike>;\n\n /**\n * Check if fileId exists in the storage bucket.\n *\n * @param bucketName - Container name\n * @param fileId - Identifier of the file to stream\n * @return True is the file exists, false otherwise.\n */\n abstract exists(bucketName: string, fileId: string): Promise<boolean>;\n\n /**\n * Delete permanently a file from the storage.\n *\n * @param bucketName - Container name\n * @param fileId - Identifier of the file to delete\n */\n abstract delete(bucketName: string, fileId: string): Promise<void>;\n}\n","import { AlephaError } from \"alepha\";\n\nexport class FileNotFoundError extends AlephaError {\n public readonly status = 404;\n}\n","import { randomUUID } from \"node:crypto\";\nimport { $inject, type FileLike } from \"alepha\";\nimport { FileDetector, FileSystemProvider } from \"alepha/file\";\nimport { FileNotFoundError } from \"../errors/FileNotFoundError.ts\";\nimport type { FileStorageProvider } from \"./FileStorageProvider.ts\";\n\nexport class MemoryFileStorageProvider implements FileStorageProvider {\n public readonly files: Record<string, FileLike> = {};\n protected readonly fileSystem = $inject(FileSystemProvider);\n protected readonly fileDetector = $inject(FileDetector);\n\n public async upload(\n bucketName: string,\n file: FileLike,\n fileId?: string,\n ): Promise<string> {\n fileId ??= this.createId();\n\n this.files[`${bucketName}/${fileId}`] = this.fileSystem.createFile({\n stream: file.stream(),\n name: file.name,\n type: file.type,\n size: file.size,\n });\n\n return fileId;\n }\n\n public async download(bucketName: string, fileId: string): Promise<FileLike> {\n const fileKey = `${bucketName}/${fileId}`;\n const file = this.files[fileKey];\n\n if (!file) {\n throw new FileNotFoundError(`File with ID ${fileId} not found.`);\n }\n\n return file;\n }\n\n public async exists(bucketName: string, fileId: string): Promise<boolean> {\n return `${bucketName}/${fileId}` in this.files;\n }\n\n public async delete(bucketName: string, fileId: string): Promise<void> {\n const fileKey = `${bucketName}/${fileId}`;\n if (!(fileKey in this.files)) {\n throw new FileNotFoundError(`File with ID ${fileId} not found.`);\n }\n\n delete this.files[fileKey];\n }\n\n protected createId(): string {\n return randomUUID();\n }\n}\n","import {\n $inject,\n createPrimitive,\n type FileLike,\n KIND,\n Primitive,\n type Service,\n} from \"alepha\";\nimport { FileSystemProvider } from \"alepha/file\";\nimport { InvalidFileError } from \"../errors/InvalidFileError.ts\";\nimport { FileStorageProvider } from \"../providers/FileStorageProvider.ts\";\nimport { MemoryFileStorageProvider } from \"../providers/MemoryFileStorageProvider.ts\";\n\n/**\n * Creates a bucket primitive for file storage and management with configurable validation.\n *\n * Provides a comprehensive file storage system that handles uploads, downloads, validation,\n * and management across multiple storage backends with MIME type and size limit controls.\n *\n * **Key Features**\n * - Multi-provider support (filesystem, cloud storage, in-memory)\n * - Automatic MIME type and file size validation\n * - Event integration for file operations monitoring\n * - Flexible per-bucket and per-operation configuration\n * - Smart file type and size detection\n *\n * **Common Use Cases**\n * - User profile pictures and document uploads\n * - Product images and media management\n * - Document storage and retrieval systems\n *\n * @example\n * ```ts\n * class MediaService {\n * images = $bucket({\n * name: \"user-images\",\n * mimeTypes: [\"image/jpeg\", \"image/png\", \"image/gif\"],\n * maxSize: 5 // 5MB limit\n * });\n *\n * documents = $bucket({\n * name: \"documents\",\n * mimeTypes: [\"application/pdf\", \"text/plain\"],\n * maxSize: 50 // 50MB limit\n * });\n *\n * async uploadProfileImage(file: FileLike, userId: string): Promise<string> {\n * const fileId = await this.images.upload(file);\n * await this.userService.updateProfileImage(userId, fileId);\n * return fileId;\n * }\n *\n * async downloadDocument(documentId: string): Promise<FileLike> {\n * return await this.documents.download(documentId);\n * }\n *\n * async deleteDocument(documentId: string): Promise<void> {\n * await this.documents.delete(documentId);\n * }\n * }\n * ```\n */\nexport const $bucket = (options: BucketPrimitiveOptions) =>\n createPrimitive(BucketPrimitive, options);\n\nexport interface BucketPrimitiveOptions extends BucketFileOptions {\n /**\n * File storage provider configuration for the bucket.\n *\n * Options:\n * - **\"memory\"**: In-memory storage (default for development, lost on restart)\n * - **Service<FileStorageProvider>**: Custom provider class (e.g., S3FileStorageProvider, AzureBlobProvider)\n * - **undefined**: Uses the default file storage provider from dependency injection\n *\n * **Provider Selection Guidelines**:\n * - **Development**: Use \"memory\" for fast, simple testing without external dependencies\n * - **Production**: Use cloud providers (S3, Azure Blob, Google Cloud Storage) for scalability\n * - **Local deployment**: Use filesystem providers for on-premise installations\n * - **Hybrid**: Use different providers for different bucket types (temp files vs permanent storage)\n *\n * **Provider Capabilities**:\n * - File persistence and durability guarantees\n * - Scalability and performance characteristics\n * - Geographic distribution and CDN integration\n * - Cost implications for storage and bandwidth\n * - Backup and disaster recovery features\n *\n * @default Uses injected FileStorageProvider\n * @example \"memory\"\n * @example S3FileStorageProvider\n * @example AzureBlobStorageProvider\n */\n provider?: Service<FileStorageProvider> | \"memory\";\n\n /**\n * Unique name identifier for the bucket.\n *\n * This name is used for:\n * - Storage backend organization and partitioning\n * - File path generation and URL construction\n * - Logging, monitoring, and debugging\n * - Access control and permissions management\n * - Backup and replication configuration\n *\n * **Naming Conventions**:\n * - Use lowercase with hyphens for consistency\n * - Include purpose or content type in the name\n * - Avoid spaces and special characters\n * - Consider environment prefixes for deployment isolation\n *\n * If not provided, defaults to the property key where the bucket is declared.\n *\n * @example \"user-avatars\"\n * @example \"product-images\"\n * @example \"legal-documents\"\n * @example \"temp-processing-files\"\n */\n name?: string;\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport interface BucketFileOptions {\n /**\n * Human-readable description of the bucket's purpose and contents.\n *\n * Used for:\n * - Documentation generation and API references\n * - Developer onboarding and system understanding\n * - Monitoring dashboards and admin interfaces\n * - Compliance and audit documentation\n *\n * **Description Best Practices**:\n * - Explain what types of files this bucket stores\n * - Mention any special handling or processing requirements\n * - Include information about retention policies if applicable\n * - Note any compliance or security considerations\n *\n * @example \"User profile pictures and avatar images\"\n * @example \"Product catalog images with automated thumbnail generation\"\n * @example \"Legal documents requiring long-term retention\"\n * @example \"Temporary files for data processing workflows\"\n */\n description?: string;\n\n /**\n * Array of allowed MIME types for files uploaded to this bucket.\n *\n * When specified, only files with these exact MIME types will be accepted.\n * Files with disallowed MIME types will be rejected with an InvalidFileError.\n *\n * **MIME Type Categories**:\n * - Images: \"image/jpeg\", \"image/png\", \"image/gif\", \"image/webp\", \"image/svg+xml\"\n * - Documents: \"application/pdf\", \"text/plain\", \"text/csv\"\n * - Office: \"application/msword\", \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\"\n * - Archives: \"application/zip\", \"application/x-tar\", \"application/gzip\"\n * - Media: \"video/mp4\", \"audio/mpeg\", \"audio/wav\"\n *\n * **Security Considerations**:\n * - Always validate MIME types for user uploads\n * - Be cautious with executable file types\n * - Consider using allow-lists rather than deny-lists\n * - Remember that MIME types can be spoofed by malicious users\n *\n * If not specified, all MIME types are allowed (not recommended for user uploads).\n *\n * @example [\"image/jpeg\", \"image/png\"] // Only JPEG and PNG images\n * @example [\"application/pdf\", \"text/plain\"] // Documents only\n * @example [\"video/mp4\", \"video/webm\"] // Video files\n */\n mimeTypes?: string[];\n\n /**\n * Maximum file size allowed in megabytes (MB).\n *\n * Files larger than this limit will be rejected with an InvalidFileError.\n * This helps prevent:\n * - Storage quota exhaustion\n * - Memory issues during file processing\n * - Long upload times and timeouts\n * - Abuse of storage resources\n *\n * **Size Guidelines by File Type**:\n * - Profile images: 1-5 MB\n * - Product photos: 5-10 MB\n * - Documents: 10-50 MB\n * - Video files: 50-500 MB\n * - Data files: 100-1000 MB\n *\n * **Considerations**:\n * - Consider your storage costs and limits\n * - Factor in network upload speeds for users\n * - Account for processing requirements (thumbnails, compression)\n * - Set reasonable limits based on actual use cases\n *\n * @default 10 MB\n *\n * @example 1 // 1MB for small images\n * @example 25 // 25MB for documents\n * @example 100 // 100MB for media files\n */\n maxSize?: number;\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport class BucketPrimitive extends Primitive<BucketPrimitiveOptions> {\n public readonly provider = this.$provider();\n private readonly fileSystem = $inject(FileSystemProvider);\n\n public get name() {\n return this.options.name ?? `${this.config.propertyKey}`;\n }\n\n /**\n * Uploads a file to the bucket.\n */\n public async upload(\n file: FileLike,\n options?: BucketFileOptions,\n ): Promise<string> {\n if (file instanceof File) {\n // our createFile is smarter than the browser's File constructor\n // by doing this, we can guess the MIME type and size!\n file = this.fileSystem.createFile({ file });\n }\n\n options = {\n ...this.options,\n ...options,\n };\n\n const mimeTypes = options.mimeTypes ?? undefined;\n const maxSize = options.maxSize ?? 10; // Default to 10 MB if not specified\n\n if (mimeTypes) {\n const mimeType = file.type || \"application/octet-stream\";\n if (!mimeTypes.includes(mimeType)) {\n throw new InvalidFileError(\n `MIME type ${mimeType} is not allowed in bucket ${this.name}`,\n );\n }\n }\n\n // check size in bytes, convert MB to bytes\n if (file.size > maxSize * 1024 * 1024) {\n throw new InvalidFileError(\n `File size ${file.size} exceeds the maximum size of ${this.options.maxSize} MB in bucket ${this.name}`,\n );\n }\n\n const id = await this.provider.upload(this.name, file);\n\n await this.alepha.events.emit(\"bucket:file:uploaded\", {\n id,\n bucket: this,\n file,\n options,\n });\n\n return id;\n }\n\n /**\n * Delete permanently a file from the bucket.\n */\n public async delete(fileId: string, skipHook = false): Promise<void> {\n await this.provider.delete(this.name, fileId);\n\n if (skipHook) {\n return;\n }\n\n await this.alepha.events.emit(\"bucket:file:deleted\", {\n id: fileId,\n bucket: this,\n });\n }\n\n /**\n * Checks if a file exists in the bucket.\n */\n public async exists(fileId: string): Promise<boolean> {\n return this.provider.exists(this.name, fileId);\n }\n\n /**\n * Downloads a file from the bucket.\n */\n public async download(fileId: string): Promise<FileLike> {\n return this.provider.download(this.name, fileId);\n }\n\n protected $provider() {\n if (!this.options.provider) {\n return this.alepha.inject(FileStorageProvider);\n }\n if (this.options.provider === \"memory\") {\n return this.alepha.inject(MemoryFileStorageProvider);\n }\n return this.alepha.inject(this.options.provider);\n }\n}\n\n$bucket[KIND] = BucketPrimitive;\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport interface BucketFileOptions {\n /**\n * Optional description of the bucket.\n */\n description?: string;\n\n /**\n * Allowed MIME types.\n */\n mimeTypes?: string[];\n\n /**\n * Maximum size of the files in the bucket.\n *\n * @default 10\n */\n maxSize?: number;\n}\n","import { randomUUID } from \"node:crypto\";\nimport type * as fs from \"node:fs\";\nimport { createReadStream } from \"node:fs\";\nimport { mkdir, stat, unlink } from \"node:fs/promises\";\nimport { tmpdir } from \"node:os\";\nimport { join } from \"node:path\";\nimport {\n $atom,\n $hook,\n $inject,\n $use,\n Alepha,\n AlephaError,\n type FileLike,\n type Static,\n t,\n} from \"alepha\";\nimport { FileDetector, FileSystemProvider } from \"alepha/file\";\nimport { $logger } from \"alepha/logger\";\nimport { FileNotFoundError } from \"../errors/FileNotFoundError.ts\";\nimport { $bucket } from \"../primitives/$bucket.ts\";\nimport type { FileStorageProvider } from \"./FileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Local file storage configuration atom\n */\nexport const localFileStorageOptions = $atom({\n name: \"alepha.bucket.local.options\",\n schema: t.object({\n storagePath: t.string({\n description: \"Directory path where files will be stored\",\n }),\n }),\n default: {\n storagePath: \"node_modules/.alepha/buckets\",\n },\n});\n\nexport type LocalFileStorageProviderOptions = Static<\n typeof localFileStorageOptions.schema\n>;\n\ndeclare module \"alepha\" {\n interface State {\n [localFileStorageOptions.key]: LocalFileStorageProviderOptions;\n }\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport class LocalFileStorageProvider implements FileStorageProvider {\n protected readonly alepha = $inject(Alepha);\n protected readonly log = $logger();\n protected readonly fileDetector = $inject(FileDetector);\n protected readonly fileSystemProvider = $inject(FileSystemProvider);\n protected readonly options = $use(localFileStorageOptions);\n\n protected get storagePath(): string {\n return this.options.storagePath;\n }\n\n protected readonly onConfigure = $hook({\n on: \"configure\",\n handler: async () => {\n if (\n this.alepha.isTest() &&\n this.storagePath === localFileStorageOptions.options.default.storagePath\n ) {\n this.alepha.store.set(localFileStorageOptions, {\n storagePath: join(tmpdir(), `alepha-test-${Date.now()}`),\n });\n }\n },\n });\n\n protected readonly onStart = $hook({\n on: \"start\",\n handler: async () => {\n try {\n await mkdir(this.storagePath, { recursive: true });\n } catch {}\n\n for (const bucket of this.alepha.primitives($bucket)) {\n if (bucket.provider !== this) {\n continue;\n }\n\n await mkdir(join(this.storagePath, bucket.name), {\n recursive: true,\n });\n\n this.log.debug(`Bucket '${bucket.name}' at ${this.storagePath} OK`);\n }\n },\n });\n\n public async upload(\n bucketName: string,\n file: FileLike,\n fileId?: string,\n ): Promise<string> {\n fileId ??= this.createId(file.type);\n\n this.log.trace(`Uploading file to ${bucketName}`);\n\n await this.fileSystemProvider.writeFile(\n this.path(bucketName, fileId),\n file,\n );\n\n return fileId;\n }\n\n public async download(bucketName: string, fileId: string): Promise<FileLike> {\n const filePath = this.path(bucketName, fileId);\n\n try {\n const stats = await stat(filePath);\n const mimeType = this.fileDetector.getContentType(fileId);\n\n return this.fileSystemProvider.createFile({\n stream: createReadStream(filePath),\n name: fileId,\n type: mimeType,\n size: stats.size,\n });\n } catch (error) {\n if (this.isErrorNoEntry(error)) {\n throw new FileNotFoundError(`File with ID ${fileId} not found.`);\n }\n throw new AlephaError(\"Invalid file operation\", { cause: error });\n }\n }\n\n public async exists(bucketName: string, fileId: string): Promise<boolean> {\n try {\n await stat(this.path(bucketName, fileId));\n return true;\n } catch (error) {\n if (this.isErrorNoEntry(error)) {\n return false;\n }\n throw new AlephaError(\"Error checking file existence\", { cause: error });\n }\n }\n\n public async delete(bucketName: string, fileId: string): Promise<void> {\n try {\n return await unlink(this.path(bucketName, fileId));\n } catch (error) {\n if (this.isErrorNoEntry(error)) {\n throw new FileNotFoundError(`File with ID ${fileId} not found.`);\n }\n throw new AlephaError(\"Error deleting file\", { cause: error });\n }\n }\n\n protected stat(bucket: string, fileId: string): Promise<fs.Stats> {\n return stat(this.path(bucket, fileId));\n }\n\n protected createId(mimeType: string): string {\n const ext = this.fileDetector.getExtensionFromMimeType(mimeType);\n return `${randomUUID()}.${ext}`;\n }\n\n protected path(bucket: string, fileId = \"\"): string {\n return join(this.storagePath, bucket, fileId);\n }\n\n protected isErrorNoEntry(error: unknown): boolean {\n return error instanceof Error && \"code\" in error && error.code === \"ENOENT\";\n }\n}\n","import { $module, type FileLike } from \"alepha\";\nimport {\n $bucket,\n type BucketFileOptions,\n type BucketPrimitive,\n} from \"./primitives/$bucket.ts\";\nimport { FileStorageProvider } from \"./providers/FileStorageProvider.ts\";\nimport { LocalFileStorageProvider } from \"./providers/LocalFileStorageProvider.ts\";\nimport { MemoryFileStorageProvider } from \"./providers/MemoryFileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\nexport * from \"./errors/FileNotFoundError.ts\";\nexport * from \"./primitives/$bucket.ts\";\nexport * from \"./providers/FileStorageProvider.ts\";\nexport * from \"./providers/LocalFileStorageProvider.ts\";\nexport * from \"./providers/MemoryFileStorageProvider.ts\";\n\n// ---------------------------------------------------------------------------------------------------------------------\n\ndeclare module \"alepha\" {\n interface Hooks {\n /**\n * Triggered when a file is uploaded to a bucket.\n * Can be used to perform actions after a file is uploaded, like creating a database record!\n */\n \"bucket:file:uploaded\": {\n id: string;\n file: FileLike;\n bucket: BucketPrimitive;\n options: BucketFileOptions;\n };\n /**\n * Triggered when a file is deleted from a bucket.\n */\n \"bucket:file:deleted\": {\n id: string;\n bucket: BucketPrimitive;\n };\n }\n}\n\n// ---------------------------------------------------------------------------------------------------------------------\n\n/**\n * Provides file storage capabilities through declarative bucket primitives with support for multiple storage backends.\n *\n * The bucket module enables unified file operations across different storage systems using the `$bucket` primitive\n * on class properties. It abstracts storage provider differences, offering consistent APIs for local filesystem,\n * cloud storage, or in-memory storage for testing environments.\n *\n * @see {@link $bucket}\n * @see {@link FileStorageProvider}\n * @module alepha.bucket\n */\nexport const AlephaBucket = $module({\n name: \"alepha.bucket\",\n primitives: [$bucket],\n services: [\n FileStorageProvider,\n MemoryFileStorageProvider,\n LocalFileStorageProvider,\n ],\n register: (alepha) => {\n alepha.with({\n optional: true,\n provide: FileStorageProvider,\n use:\n alepha.isTest() || alepha.isServerless()\n ? MemoryFileStorageProvider\n : LocalFileStorageProvider,\n });\n },\n});\n"],"mappings":";;;;;;;;;;AAAA,IAAa,mBAAb,cAAsC,MAAM;CAC1C,AAAgB,SAAS;;;;;ACC3B,IAAsB,sBAAtB,MAA0C;;;;ACA1C,IAAa,oBAAb,cAAuC,YAAY;CACjD,AAAgB,SAAS;;;;;ACG3B,IAAa,4BAAb,MAAsE;CACpE,AAAgB,QAAkC,EAAE;CACpD,AAAmB,aAAa,QAAQ,mBAAmB;CAC3D,AAAmB,eAAe,QAAQ,aAAa;CAEvD,MAAa,OACX,YACA,MACA,QACiB;AACjB,aAAW,KAAK,UAAU;AAE1B,OAAK,MAAM,GAAG,WAAW,GAAG,YAAY,KAAK,WAAW,WAAW;GACjE,QAAQ,KAAK,QAAQ;GACrB,MAAM,KAAK;GACX,MAAM,KAAK;GACX,MAAM,KAAK;GACZ,CAAC;AAEF,SAAO;;CAGT,MAAa,SAAS,YAAoB,QAAmC;EAC3E,MAAM,UAAU,GAAG,WAAW,GAAG;EACjC,MAAM,OAAO,KAAK,MAAM;AAExB,MAAI,CAAC,KACH,OAAM,IAAI,kBAAkB,gBAAgB,OAAO,aAAa;AAGlE,SAAO;;CAGT,MAAa,OAAO,YAAoB,QAAkC;AACxE,SAAO,GAAG,WAAW,GAAG,YAAY,KAAK;;CAG3C,MAAa,OAAO,YAAoB,QAA+B;EACrE,MAAM,UAAU,GAAG,WAAW,GAAG;AACjC,MAAI,EAAE,WAAW,KAAK,OACpB,OAAM,IAAI,kBAAkB,gBAAgB,OAAO,aAAa;AAGlE,SAAO,KAAK,MAAM;;CAGpB,AAAU,WAAmB;AAC3B,SAAO,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACSvB,MAAa,WAAW,YACtB,gBAAgB,iBAAiB,QAAQ;AA+I3C,IAAa,kBAAb,cAAqC,UAAkC;CACrE,AAAgB,WAAW,KAAK,WAAW;CAC3C,AAAiB,aAAa,QAAQ,mBAAmB;CAEzD,IAAW,OAAO;AAChB,SAAO,KAAK,QAAQ,QAAQ,GAAG,KAAK,OAAO;;;;;CAM7C,MAAa,OACX,MACA,SACiB;AACjB,MAAI,gBAAgB,KAGlB,QAAO,KAAK,WAAW,WAAW,EAAE,MAAM,CAAC;AAG7C,YAAU;GACR,GAAG,KAAK;GACR,GAAG;GACJ;EAED,MAAM,YAAY,QAAQ,aAAa;EACvC,MAAM,UAAU,QAAQ,WAAW;AAEnC,MAAI,WAAW;GACb,MAAM,WAAW,KAAK,QAAQ;AAC9B,OAAI,CAAC,UAAU,SAAS,SAAS,CAC/B,OAAM,IAAI,iBACR,aAAa,SAAS,4BAA4B,KAAK,OACxD;;AAKL,MAAI,KAAK,OAAO,UAAU,OAAO,KAC/B,OAAM,IAAI,iBACR,aAAa,KAAK,KAAK,+BAA+B,KAAK,QAAQ,QAAQ,gBAAgB,KAAK,OACjG;EAGH,MAAM,KAAK,MAAM,KAAK,SAAS,OAAO,KAAK,MAAM,KAAK;AAEtD,QAAM,KAAK,OAAO,OAAO,KAAK,wBAAwB;GACpD;GACA,QAAQ;GACR;GACA;GACD,CAAC;AAEF,SAAO;;;;;CAMT,MAAa,OAAO,QAAgB,WAAW,OAAsB;AACnE,QAAM,KAAK,SAAS,OAAO,KAAK,MAAM,OAAO;AAE7C,MAAI,SACF;AAGF,QAAM,KAAK,OAAO,OAAO,KAAK,uBAAuB;GACnD,IAAI;GACJ,QAAQ;GACT,CAAC;;;;;CAMJ,MAAa,OAAO,QAAkC;AACpD,SAAO,KAAK,SAAS,OAAO,KAAK,MAAM,OAAO;;;;;CAMhD,MAAa,SAAS,QAAmC;AACvD,SAAO,KAAK,SAAS,SAAS,KAAK,MAAM,OAAO;;CAGlD,AAAU,YAAY;AACpB,MAAI,CAAC,KAAK,QAAQ,SAChB,QAAO,KAAK,OAAO,OAAO,oBAAoB;AAEhD,MAAI,KAAK,QAAQ,aAAa,SAC5B,QAAO,KAAK,OAAO,OAAO,0BAA0B;AAEtD,SAAO,KAAK,OAAO,OAAO,KAAK,QAAQ,SAAS;;;AAIpD,QAAQ,QAAQ;;;;;;;ACpRhB,MAAa,0BAA0B,MAAM;CAC3C,MAAM;CACN,QAAQ,EAAE,OAAO,EACf,aAAa,EAAE,OAAO,EACpB,aAAa,6CACd,CAAC,EACH,CAAC;CACF,SAAS,EACP,aAAa,gCACd;CACF,CAAC;AAcF,IAAa,2BAAb,MAAqE;CACnE,AAAmB,SAAS,QAAQ,OAAO;CAC3C,AAAmB,MAAM,SAAS;CAClC,AAAmB,eAAe,QAAQ,aAAa;CACvD,AAAmB,qBAAqB,QAAQ,mBAAmB;CACnE,AAAmB,UAAU,KAAK,wBAAwB;CAE1D,IAAc,cAAsB;AAClC,SAAO,KAAK,QAAQ;;CAGtB,AAAmB,cAAc,MAAM;EACrC,IAAI;EACJ,SAAS,YAAY;AACnB,OACE,KAAK,OAAO,QAAQ,IACpB,KAAK,gBAAgB,wBAAwB,QAAQ,QAAQ,YAE7D,MAAK,OAAO,MAAM,IAAI,yBAAyB,EAC7C,aAAa,KAAK,QAAQ,EAAE,eAAe,KAAK,KAAK,GAAG,EACzD,CAAC;;EAGP,CAAC;CAEF,AAAmB,UAAU,MAAM;EACjC,IAAI;EACJ,SAAS,YAAY;AACnB,OAAI;AACF,UAAM,MAAM,KAAK,aAAa,EAAE,WAAW,MAAM,CAAC;WAC5C;AAER,QAAK,MAAM,UAAU,KAAK,OAAO,WAAW,QAAQ,EAAE;AACpD,QAAI,OAAO,aAAa,KACtB;AAGF,UAAM,MAAM,KAAK,KAAK,aAAa,OAAO,KAAK,EAAE,EAC/C,WAAW,MACZ,CAAC;AAEF,SAAK,IAAI,MAAM,WAAW,OAAO,KAAK,OAAO,KAAK,YAAY,KAAK;;;EAGxE,CAAC;CAEF,MAAa,OACX,YACA,MACA,QACiB;AACjB,aAAW,KAAK,SAAS,KAAK,KAAK;AAEnC,OAAK,IAAI,MAAM,qBAAqB,aAAa;AAEjD,QAAM,KAAK,mBAAmB,UAC5B,KAAK,KAAK,YAAY,OAAO,EAC7B,KACD;AAED,SAAO;;CAGT,MAAa,SAAS,YAAoB,QAAmC;EAC3E,MAAM,WAAW,KAAK,KAAK,YAAY,OAAO;AAE9C,MAAI;GACF,MAAM,QAAQ,MAAM,KAAK,SAAS;GAClC,MAAM,WAAW,KAAK,aAAa,eAAe,OAAO;AAEzD,UAAO,KAAK,mBAAmB,WAAW;IACxC,QAAQ,iBAAiB,SAAS;IAClC,MAAM;IACN,MAAM;IACN,MAAM,MAAM;IACb,CAAC;WACK,OAAO;AACd,OAAI,KAAK,eAAe,MAAM,CAC5B,OAAM,IAAI,kBAAkB,gBAAgB,OAAO,aAAa;AAElE,SAAM,IAAI,YAAY,0BAA0B,EAAE,OAAO,OAAO,CAAC;;;CAIrE,MAAa,OAAO,YAAoB,QAAkC;AACxE,MAAI;AACF,SAAM,KAAK,KAAK,KAAK,YAAY,OAAO,CAAC;AACzC,UAAO;WACA,OAAO;AACd,OAAI,KAAK,eAAe,MAAM,CAC5B,QAAO;AAET,SAAM,IAAI,YAAY,iCAAiC,EAAE,OAAO,OAAO,CAAC;;;CAI5E,MAAa,OAAO,YAAoB,QAA+B;AACrE,MAAI;AACF,UAAO,MAAM,OAAO,KAAK,KAAK,YAAY,OAAO,CAAC;WAC3C,OAAO;AACd,OAAI,KAAK,eAAe,MAAM,CAC5B,OAAM,IAAI,kBAAkB,gBAAgB,OAAO,aAAa;AAElE,SAAM,IAAI,YAAY,uBAAuB,EAAE,OAAO,OAAO,CAAC;;;CAIlE,AAAU,KAAK,QAAgB,QAAmC;AAChE,SAAO,KAAK,KAAK,KAAK,QAAQ,OAAO,CAAC;;CAGxC,AAAU,SAAS,UAA0B;EAC3C,MAAM,MAAM,KAAK,aAAa,yBAAyB,SAAS;AAChE,SAAO,GAAG,YAAY,CAAC,GAAG;;CAG5B,AAAU,KAAK,QAAgB,SAAS,IAAY;AAClD,SAAO,KAAK,KAAK,aAAa,QAAQ,OAAO;;CAG/C,AAAU,eAAe,OAAyB;AAChD,SAAO,iBAAiB,SAAS,UAAU,SAAS,MAAM,SAAS;;;;;;;;;;;;;;;;;ACtHvE,MAAa,eAAe,QAAQ;CAClC,MAAM;CACN,YAAY,CAAC,QAAQ;CACrB,UAAU;EACR;EACA;EACA;EACD;CACD,WAAW,WAAW;AACpB,SAAO,KAAK;GACV,UAAU;GACV,SAAS;GACT,KACE,OAAO,QAAQ,IAAI,OAAO,cAAc,GACpC,4BACA;GACP,CAAC;;CAEL,CAAC"}
@@ -2011,7 +2011,7 @@ function createSuiteCollector(name, factory = () => {}, mode, each, suiteOptions
2011
2011
  let suite$1;
2012
2012
  initSuite(true);
2013
2013
  const task = function(name$1 = "", options = {}) {
2014
- var _collectorContext$cur, _collectorContext$cur2;
2014
+ var _collectorContext$cur, _collectorContext$cur2, _collectorContext$cur3;
2015
2015
  const timeout = (options === null || options === void 0 ? void 0 : options.timeout) ?? runner.config.testTimeout;
2016
2016
  const currentSuite = (_collectorContext$cur = collectorContext.currentSuite) === null || _collectorContext$cur === void 0 ? void 0 : _collectorContext$cur.suite;
2017
2017
  const task$1 = {
@@ -2024,7 +2024,7 @@ function createSuiteCollector(name, factory = () => {}, mode, each, suiteOptions
2024
2024
  fails: options.fails,
2025
2025
  context: void 0,
2026
2026
  type: "test",
2027
- file: void 0,
2027
+ file: (currentSuite === null || currentSuite === void 0 ? void 0 : currentSuite.file) ?? ((_collectorContext$cur3 = collectorContext.currentSuite) === null || _collectorContext$cur3 === void 0 ? void 0 : _collectorContext$cur3.file),
2028
2028
  timeout,
2029
2029
  retry: options.retry ?? runner.config.retry,
2030
2030
  repeats: options.repeats,
@@ -2096,19 +2096,19 @@ function createSuiteCollector(name, factory = () => {}, mode, each, suiteOptions
2096
2096
  getHooks(suite$1)[name$1].push(...fn$1);
2097
2097
  }
2098
2098
  function initSuite(includeLocation) {
2099
- var _collectorContext$cur3, _collectorContext$cur4;
2099
+ var _collectorContext$cur4, _collectorContext$cur5, _collectorContext$cur6;
2100
2100
  if (typeof suiteOptions === "number") suiteOptions = { timeout: suiteOptions };
2101
- const currentSuite = (_collectorContext$cur3 = collectorContext.currentSuite) === null || _collectorContext$cur3 === void 0 ? void 0 : _collectorContext$cur3.suite;
2101
+ const currentSuite = (_collectorContext$cur4 = collectorContext.currentSuite) === null || _collectorContext$cur4 === void 0 ? void 0 : _collectorContext$cur4.suite;
2102
2102
  suite$1 = {
2103
2103
  id: "",
2104
2104
  type: "suite",
2105
2105
  name,
2106
- fullName: createTaskName([(currentSuite === null || currentSuite === void 0 ? void 0 : currentSuite.fullName) ?? ((_collectorContext$cur4 = collectorContext.currentSuite) === null || _collectorContext$cur4 === void 0 || (_collectorContext$cur4 = _collectorContext$cur4.file) === null || _collectorContext$cur4 === void 0 ? void 0 : _collectorContext$cur4.fullName), name]),
2106
+ fullName: createTaskName([(currentSuite === null || currentSuite === void 0 ? void 0 : currentSuite.fullName) ?? ((_collectorContext$cur5 = collectorContext.currentSuite) === null || _collectorContext$cur5 === void 0 || (_collectorContext$cur5 = _collectorContext$cur5.file) === null || _collectorContext$cur5 === void 0 ? void 0 : _collectorContext$cur5.fullName), name]),
2107
2107
  fullTestName: createTaskName([currentSuite === null || currentSuite === void 0 ? void 0 : currentSuite.fullTestName, name]),
2108
2108
  suite: currentSuite,
2109
2109
  mode,
2110
2110
  each,
2111
- file: void 0,
2111
+ file: (currentSuite === null || currentSuite === void 0 ? void 0 : currentSuite.file) ?? ((_collectorContext$cur6 = collectorContext.currentSuite) === null || _collectorContext$cur6 === void 0 ? void 0 : _collectorContext$cur6.file),
2112
2112
  shuffle: suiteOptions === null || suiteOptions === void 0 ? void 0 : suiteOptions.shuffle,
2113
2113
  tasks: [],
2114
2114
  meta: Object.create(null),
@@ -2136,11 +2136,7 @@ function createSuiteCollector(name, factory = () => {}, mode, each, suiteOptions
2136
2136
  if (factory) await runWithSuite(collector, () => factory(test$7));
2137
2137
  const allChildren = [];
2138
2138
  for (const i of tasks) allChildren.push(i.type === "collector" ? await i.collect(file) : i);
2139
- suite$1.file = file;
2140
2139
  suite$1.tasks = allChildren;
2141
- allChildren.forEach((task$1) => {
2142
- task$1.file = file;
2143
- });
2144
2140
  return suite$1;
2145
2141
  }
2146
2142
  collectTask(collector);
@@ -2361,7 +2357,7 @@ async function runWithSuite(suite$1, fn$1) {
2361
2357
  }
2362
2358
  function withTimeout(fn$1, timeout, isHook = false, stackTraceError, onTimeout) {
2363
2359
  if (timeout <= 0 || timeout === Number.POSITIVE_INFINITY) return fn$1;
2364
- const { setTimeout: setTimeout$1, clearTimeout: clearTimeout$1 } = getSafeTimers();
2360
+ const { setTimeout: setTimeout$2, clearTimeout: clearTimeout$1 } = getSafeTimers();
2365
2361
  return (function runWithTimeout(...args) {
2366
2362
  const startTime = now$2();
2367
2363
  const runner$1 = getRunner();
@@ -2369,7 +2365,7 @@ function withTimeout(fn$1, timeout, isHook = false, stackTraceError, onTimeout)
2369
2365
  runner$1._currentTaskTimeout = timeout;
2370
2366
  return new Promise((resolve_, reject_) => {
2371
2367
  var _timer$unref;
2372
- const timer = setTimeout$1(() => {
2368
+ const timer = setTimeout$2(() => {
2373
2369
  clearTimeout$1(timer);
2374
2370
  rejectTimeoutError();
2375
2371
  }, timeout);
@@ -2478,7 +2474,7 @@ function getFileContext(file) {
2478
2474
  const now$1 = globalThis.performance ? globalThis.performance.now.bind(globalThis.performance) : Date.now;
2479
2475
  const now = globalThis.performance ? globalThis.performance.now.bind(globalThis.performance) : Date.now;
2480
2476
  const unixNow = Date.now;
2481
- const { clearTimeout, setTimeout } = getSafeTimers();
2477
+ const { clearTimeout, setTimeout: setTimeout$1 } = getSafeTimers();
2482
2478
  const packs = /* @__PURE__ */ new Map();
2483
2479
  const eventsPacks = [];
2484
2480
  const pendingTasksUpdates = [];
@@ -2516,7 +2512,7 @@ function throttle(fn$1, ms) {
2516
2512
  pendingCall = void 0;
2517
2513
  return fn$1.apply(this, args);
2518
2514
  }
2519
- pendingCall ?? (pendingCall = setTimeout(() => call.bind(this)(...args), ms));
2515
+ pendingCall ?? (pendingCall = setTimeout$1(() => call.bind(this)(...args), ms));
2520
2516
  };
2521
2517
  }
2522
2518
  const sendTasksUpdateThrottled = throttle(sendTasksUpdate, 100);
@@ -2771,4 +2767,4 @@ var import_dist = require_dist();
2771
2767
 
2772
2768
  //#endregion
2773
2769
  export { };
2774
- //# sourceMappingURL=dist-Dl9Vl7Ur.js.map
2770
+ //# sourceMappingURL=dist-lGnqsKpu.js.map