alepha 0.13.0 → 0.13.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/api-files/index.d.ts +28 -91
- package/dist/api-files/index.js +10 -755
- package/dist/api-files/index.js.map +1 -1
- package/dist/api-jobs/index.d.ts +67 -67
- package/dist/api-jobs/index.js +13 -13
- package/dist/api-jobs/index.js.map +1 -1
- package/dist/api-notifications/index.d.ts +129 -146
- package/dist/api-notifications/index.js +17 -39
- package/dist/api-notifications/index.js.map +1 -1
- package/dist/api-parameters/index.d.ts +21 -22
- package/dist/api-parameters/index.js +22 -22
- package/dist/api-parameters/index.js.map +1 -1
- package/dist/api-users/index.d.ts +224 -2001
- package/dist/api-users/index.js +914 -4787
- package/dist/api-users/index.js.map +1 -1
- package/dist/api-verifications/index.d.ts +96 -96
- package/dist/batch/index.d.ts +13 -13
- package/dist/batch/index.js +8 -8
- package/dist/batch/index.js.map +1 -1
- package/dist/bucket/index.d.ts +14 -14
- package/dist/bucket/index.js +12 -12
- package/dist/bucket/index.js.map +1 -1
- package/dist/cache/index.d.ts +11 -11
- package/dist/cache/index.js +9 -9
- package/dist/cache/index.js.map +1 -1
- package/dist/cli/{dist-Sz2EXvQX.cjs → dist-Dl9Vl7Ur.js} +17 -13
- package/dist/cli/{dist-BBPjuQ56.js.map → dist-Dl9Vl7Ur.js.map} +1 -1
- package/dist/cli/index.d.ts +31 -37
- package/dist/cli/index.js +152 -83
- package/dist/cli/index.js.map +1 -1
- package/dist/command/index.d.ts +19 -19
- package/dist/command/index.js +25 -25
- package/dist/command/index.js.map +1 -1
- package/dist/core/index.browser.js +218 -218
- package/dist/core/index.browser.js.map +1 -1
- package/dist/core/index.d.ts +232 -232
- package/dist/core/index.js +218 -218
- package/dist/core/index.js.map +1 -1
- package/dist/core/{index.cjs → index.native.js} +304 -455
- package/dist/core/index.native.js.map +1 -0
- package/dist/datetime/index.d.ts +9 -9
- package/dist/datetime/index.js +7 -7
- package/dist/datetime/index.js.map +1 -1
- package/dist/email/index.d.ts +16 -16
- package/dist/email/index.js +80 -82
- package/dist/email/index.js.map +1 -1
- package/dist/file/index.js +1 -1
- package/dist/file/index.js.map +1 -1
- package/dist/lock/index.d.ts +9 -9
- package/dist/lock/index.js +8 -8
- package/dist/lock/index.js.map +1 -1
- package/dist/lock-redis/index.js +3 -66
- package/dist/lock-redis/index.js.map +1 -1
- package/dist/logger/index.d.ts +5 -5
- package/dist/logger/index.js +8 -8
- package/dist/logger/index.js.map +1 -1
- package/dist/orm/index.browser.js +114 -114
- package/dist/orm/index.browser.js.map +1 -1
- package/dist/orm/index.d.ts +219 -219
- package/dist/orm/index.js +46 -46
- package/dist/orm/index.js.map +1 -1
- package/dist/queue/index.d.ts +25 -25
- package/dist/queue/index.js +20 -20
- package/dist/queue/index.js.map +1 -1
- package/dist/queue-redis/index.d.ts +2 -2
- package/dist/redis/index.d.ts +10 -10
- package/dist/retry/index.d.ts +20 -20
- package/dist/retry/index.js +9 -9
- package/dist/retry/index.js.map +1 -1
- package/dist/scheduler/index.d.ts +12 -12
- package/dist/scheduler/index.js +9 -9
- package/dist/scheduler/index.js.map +1 -1
- package/dist/security/index.d.ts +53 -53
- package/dist/security/index.js +32 -32
- package/dist/security/index.js.map +1 -1
- package/dist/server/index.browser.js +1 -1
- package/dist/server/index.browser.js.map +1 -1
- package/dist/server/index.d.ts +101 -101
- package/dist/server/index.js +17 -17
- package/dist/server/index.js.map +1 -1
- package/dist/server-auth/index.browser.js +4 -982
- package/dist/server-auth/index.browser.js.map +1 -1
- package/dist/server-auth/index.d.ts +204 -785
- package/dist/server-auth/index.js +47 -1239
- package/dist/server-auth/index.js.map +1 -1
- package/dist/server-cache/index.d.ts +10 -10
- package/dist/server-cache/index.js +2 -2
- package/dist/server-cache/index.js.map +1 -1
- package/dist/server-compress/index.d.ts +4 -4
- package/dist/server-compress/index.js +1 -1
- package/dist/server-compress/index.js.map +1 -1
- package/dist/server-cookies/index.browser.js +8 -8
- package/dist/server-cookies/index.browser.js.map +1 -1
- package/dist/server-cookies/index.d.ts +17 -17
- package/dist/server-cookies/index.js +10 -10
- package/dist/server-cookies/index.js.map +1 -1
- package/dist/server-cors/index.d.ts +17 -17
- package/dist/server-cors/index.js +9 -9
- package/dist/server-cors/index.js.map +1 -1
- package/dist/server-health/index.d.ts +2 -2
- package/dist/server-helmet/index.d.ts +1 -1
- package/dist/server-links/index.browser.js +12 -12
- package/dist/server-links/index.browser.js.map +1 -1
- package/dist/server-links/index.d.ts +59 -251
- package/dist/server-links/index.js +23 -502
- package/dist/server-links/index.js.map +1 -1
- package/dist/server-metrics/index.d.ts +4 -4
- package/dist/server-metrics/index.js +170 -174
- package/dist/server-metrics/index.js.map +1 -1
- package/dist/server-multipart/index.d.ts +2 -2
- package/dist/server-proxy/index.d.ts +12 -12
- package/dist/server-proxy/index.js +10 -10
- package/dist/server-proxy/index.js.map +1 -1
- package/dist/server-rate-limit/index.d.ts +22 -22
- package/dist/server-rate-limit/index.js +12 -12
- package/dist/server-rate-limit/index.js.map +1 -1
- package/dist/server-security/index.d.ts +24 -24
- package/dist/server-security/index.js +15 -15
- package/dist/server-security/index.js.map +1 -1
- package/dist/server-static/index.d.ts +14 -14
- package/dist/server-static/index.js +8 -8
- package/dist/server-static/index.js.map +1 -1
- package/dist/server-swagger/index.d.ts +25 -184
- package/dist/server-swagger/index.js +21 -724
- package/dist/server-swagger/index.js.map +1 -1
- package/dist/sms/index.d.ts +14 -14
- package/dist/sms/index.js +9 -9
- package/dist/sms/index.js.map +1 -1
- package/dist/thread/index.d.ts +11 -11
- package/dist/thread/index.js +17 -17
- package/dist/thread/index.js.map +1 -1
- package/dist/topic/index.d.ts +26 -26
- package/dist/topic/index.js +16 -16
- package/dist/topic/index.js.map +1 -1
- package/dist/topic-redis/index.d.ts +1 -1
- package/dist/vite/index.d.ts +3 -3
- package/dist/vite/index.js +12 -13
- package/dist/vite/index.js.map +1 -1
- package/dist/websocket/index.browser.js +11 -11
- package/dist/websocket/index.browser.js.map +1 -1
- package/dist/websocket/index.d.ts +51 -51
- package/dist/websocket/index.js +13 -13
- package/dist/websocket/index.js.map +1 -1
- package/package.json +62 -52
- package/src/api-files/services/FileService.ts +5 -7
- package/src/api-jobs/index.ts +1 -1
- package/src/api-jobs/{descriptors → primitives}/$job.ts +8 -8
- package/src/api-jobs/providers/JobProvider.ts +9 -9
- package/src/api-jobs/services/JobService.ts +5 -5
- package/src/api-notifications/index.ts +5 -15
- package/src/api-notifications/{descriptors → primitives}/$notification.ts +10 -10
- package/src/api-notifications/services/NotificationSenderService.ts +3 -3
- package/src/api-parameters/index.ts +1 -1
- package/src/api-parameters/{descriptors → primitives}/$config.ts +7 -12
- package/src/api-users/index.ts +1 -1
- package/src/api-users/{descriptors → primitives}/$userRealm.ts +8 -8
- package/src/api-users/providers/UserRealmProvider.ts +1 -1
- package/src/batch/index.ts +3 -3
- package/src/batch/{descriptors → primitives}/$batch.ts +13 -16
- package/src/bucket/index.ts +8 -8
- package/src/bucket/{descriptors → primitives}/$bucket.ts +8 -8
- package/src/bucket/providers/LocalFileStorageProvider.ts +3 -3
- package/src/cache/index.ts +4 -4
- package/src/cache/{descriptors → primitives}/$cache.ts +15 -15
- package/src/cli/apps/AlephaPackageBuilderCli.ts +30 -3
- package/src/cli/assets/appRouterTs.ts +9 -0
- package/src/cli/assets/indexHtml.ts +2 -1
- package/src/cli/assets/mainBrowserTs.ts +10 -0
- package/src/cli/commands/CoreCommands.ts +6 -5
- package/src/cli/commands/DrizzleCommands.ts +69 -61
- package/src/cli/commands/VerifyCommands.ts +2 -2
- package/src/cli/commands/ViteCommands.ts +6 -1
- package/src/cli/services/ProjectUtils.ts +78 -41
- package/src/command/index.ts +5 -5
- package/src/command/{descriptors → primitives}/$command.ts +9 -12
- package/src/command/providers/CliProvider.ts +10 -10
- package/src/core/Alepha.ts +30 -33
- package/src/core/constants/KIND.ts +1 -1
- package/src/core/constants/OPTIONS.ts +1 -1
- package/src/core/helpers/{descriptor.ts → primitive.ts} +18 -18
- package/src/core/helpers/ref.ts +1 -1
- package/src/core/index.shared.ts +8 -8
- package/src/core/{descriptors → primitives}/$context.ts +5 -5
- package/src/core/{descriptors → primitives}/$hook.ts +4 -4
- package/src/core/{descriptors → primitives}/$inject.ts +2 -2
- package/src/core/{descriptors → primitives}/$module.ts +9 -9
- package/src/core/{descriptors → primitives}/$use.ts +2 -2
- package/src/core/providers/CodecManager.ts +1 -1
- package/src/core/providers/JsonSchemaCodec.ts +1 -1
- package/src/core/providers/StateManager.ts +2 -2
- package/src/datetime/index.ts +3 -3
- package/src/datetime/{descriptors → primitives}/$interval.ts +6 -6
- package/src/email/index.ts +4 -4
- package/src/email/{descriptors → primitives}/$email.ts +8 -8
- package/src/file/index.ts +1 -1
- package/src/lock/index.ts +3 -3
- package/src/lock/{descriptors → primitives}/$lock.ts +10 -10
- package/src/logger/index.ts +8 -8
- package/src/logger/{descriptors → primitives}/$logger.ts +2 -2
- package/src/logger/services/Logger.ts +1 -1
- package/src/orm/constants/PG_SYMBOLS.ts +2 -2
- package/src/orm/index.browser.ts +2 -2
- package/src/orm/index.ts +8 -8
- package/src/orm/{descriptors → primitives}/$entity.ts +11 -11
- package/src/orm/{descriptors → primitives}/$repository.ts +2 -2
- package/src/orm/{descriptors → primitives}/$sequence.ts +8 -8
- package/src/orm/{descriptors → primitives}/$transaction.ts +4 -4
- package/src/orm/providers/DrizzleKitProvider.ts +1 -1
- package/src/orm/providers/PostgresTypeProvider.ts +3 -3
- package/src/orm/providers/RepositoryProvider.ts +4 -4
- package/src/orm/providers/drivers/DatabaseProvider.ts +7 -7
- package/src/orm/services/ModelBuilder.ts +9 -9
- package/src/orm/services/PgRelationManager.ts +2 -2
- package/src/orm/services/PostgresModelBuilder.ts +5 -5
- package/src/orm/services/Repository.ts +7 -7
- package/src/orm/services/SqliteModelBuilder.ts +5 -5
- package/src/queue/index.ts +7 -7
- package/src/queue/{descriptors → primitives}/$consumer.ts +15 -15
- package/src/queue/{descriptors → primitives}/$queue.ts +12 -12
- package/src/queue/providers/WorkerProvider.ts +7 -7
- package/src/retry/index.ts +3 -3
- package/src/retry/{descriptors → primitives}/$retry.ts +19 -17
- package/src/scheduler/index.ts +3 -3
- package/src/scheduler/{descriptors → primitives}/$scheduler.ts +9 -9
- package/src/scheduler/providers/CronProvider.ts +1 -1
- package/src/security/index.ts +9 -9
- package/src/security/{descriptors → primitives}/$permission.ts +7 -7
- package/src/security/{descriptors → primitives}/$realm.ts +6 -12
- package/src/security/{descriptors → primitives}/$role.ts +12 -12
- package/src/security/{descriptors → primitives}/$serviceAccount.ts +8 -8
- package/src/server/index.browser.ts +1 -1
- package/src/server/index.ts +14 -14
- package/src/server/{descriptors → primitives}/$action.ts +13 -13
- package/src/server/{descriptors → primitives}/$route.ts +9 -9
- package/src/server/providers/NodeHttpServerProvider.ts +2 -2
- package/src/server/services/HttpClient.ts +1 -1
- package/src/server-auth/index.browser.ts +1 -1
- package/src/server-auth/index.ts +6 -6
- package/src/server-auth/{descriptors → primitives}/$auth.ts +10 -10
- package/src/server-auth/{descriptors → primitives}/$authCredentials.ts +4 -4
- package/src/server-auth/{descriptors → primitives}/$authGithub.ts +4 -4
- package/src/server-auth/{descriptors → primitives}/$authGoogle.ts +4 -4
- package/src/server-auth/providers/ServerAuthProvider.ts +4 -4
- package/src/server-cache/providers/ServerCacheProvider.ts +7 -7
- package/src/server-compress/providers/ServerCompressProvider.ts +3 -3
- package/src/server-cookies/index.browser.ts +2 -2
- package/src/server-cookies/index.ts +5 -5
- package/src/server-cookies/{descriptors → primitives}/$cookie.browser.ts +12 -12
- package/src/server-cookies/{descriptors → primitives}/$cookie.ts +13 -13
- package/src/server-cookies/providers/ServerCookiesProvider.ts +4 -4
- package/src/server-cookies/services/CookieParser.ts +1 -1
- package/src/server-cors/index.ts +3 -3
- package/src/server-cors/{descriptors → primitives}/$cors.ts +11 -13
- package/src/server-cors/providers/ServerCorsProvider.ts +5 -5
- package/src/server-links/index.browser.ts +5 -5
- package/src/server-links/index.ts +9 -9
- package/src/server-links/{descriptors → primitives}/$remote.ts +11 -11
- package/src/server-links/providers/LinkProvider.ts +7 -7
- package/src/server-links/providers/{RemoteDescriptorProvider.ts → RemotePrimitiveProvider.ts} +6 -6
- package/src/server-links/providers/ServerLinksProvider.ts +3 -3
- package/src/server-proxy/index.ts +3 -3
- package/src/server-proxy/{descriptors → primitives}/$proxy.ts +8 -8
- package/src/server-proxy/providers/ServerProxyProvider.ts +4 -4
- package/src/server-rate-limit/index.ts +6 -6
- package/src/server-rate-limit/{descriptors → primitives}/$rateLimit.ts +13 -13
- package/src/server-rate-limit/providers/ServerRateLimitProvider.ts +5 -5
- package/src/server-security/index.ts +3 -3
- package/src/server-security/{descriptors → primitives}/$basicAuth.ts +13 -13
- package/src/server-security/providers/ServerBasicAuthProvider.ts +5 -5
- package/src/server-security/providers/ServerSecurityProvider.ts +4 -4
- package/src/server-static/index.ts +3 -3
- package/src/server-static/{descriptors → primitives}/$serve.ts +8 -10
- package/src/server-static/providers/ServerStaticProvider.ts +6 -6
- package/src/server-swagger/index.ts +5 -5
- package/src/server-swagger/{descriptors → primitives}/$swagger.ts +9 -9
- package/src/server-swagger/providers/ServerSwaggerProvider.ts +11 -10
- package/src/sms/index.ts +4 -4
- package/src/sms/{descriptors → primitives}/$sms.ts +8 -8
- package/src/thread/index.ts +3 -3
- package/src/thread/{descriptors → primitives}/$thread.ts +13 -13
- package/src/thread/providers/ThreadProvider.ts +7 -9
- package/src/topic/index.ts +5 -5
- package/src/topic/{descriptors → primitives}/$subscriber.ts +14 -14
- package/src/topic/{descriptors → primitives}/$topic.ts +10 -10
- package/src/topic/providers/TopicProvider.ts +4 -4
- package/src/vite/helpers/boot.ts +3 -3
- package/src/vite/tasks/copyAssets.ts +1 -1
- package/src/vite/tasks/generateSitemap.ts +3 -3
- package/src/vite/tasks/prerenderPages.ts +2 -2
- package/src/vite/tasks/runAlepha.ts +2 -2
- package/src/websocket/index.browser.ts +3 -3
- package/src/websocket/index.shared.ts +2 -2
- package/src/websocket/index.ts +4 -4
- package/src/websocket/interfaces/WebSocketInterfaces.ts +3 -3
- package/src/websocket/{descriptors → primitives}/$channel.ts +10 -10
- package/src/websocket/{descriptors → primitives}/$websocket.ts +8 -8
- package/src/websocket/providers/NodeWebSocketServerProvider.ts +7 -7
- package/src/websocket/providers/WebSocketServerProvider.ts +3 -3
- package/src/websocket/services/WebSocketClient.ts +5 -5
- package/dist/api-files/index.cjs +0 -1293
- package/dist/api-files/index.cjs.map +0 -1
- package/dist/api-files/index.d.cts +0 -829
- package/dist/api-jobs/index.cjs +0 -274
- package/dist/api-jobs/index.cjs.map +0 -1
- package/dist/api-jobs/index.d.cts +0 -654
- package/dist/api-notifications/index.cjs +0 -380
- package/dist/api-notifications/index.cjs.map +0 -1
- package/dist/api-notifications/index.d.cts +0 -289
- package/dist/api-parameters/index.cjs +0 -66
- package/dist/api-parameters/index.cjs.map +0 -1
- package/dist/api-parameters/index.d.cts +0 -84
- package/dist/api-users/index.cjs +0 -6009
- package/dist/api-users/index.cjs.map +0 -1
- package/dist/api-users/index.d.cts +0 -4740
- package/dist/api-verifications/index.cjs +0 -407
- package/dist/api-verifications/index.cjs.map +0 -1
- package/dist/api-verifications/index.d.cts +0 -207
- package/dist/batch/index.cjs +0 -408
- package/dist/batch/index.cjs.map +0 -1
- package/dist/batch/index.d.cts +0 -330
- package/dist/bin/index.cjs +0 -17
- package/dist/bin/index.cjs.map +0 -1
- package/dist/bin/index.d.cts +0 -1
- package/dist/bucket/index.cjs +0 -303
- package/dist/bucket/index.cjs.map +0 -1
- package/dist/bucket/index.d.cts +0 -355
- package/dist/cache/index.cjs +0 -241
- package/dist/cache/index.cjs.map +0 -1
- package/dist/cache/index.d.cts +0 -202
- package/dist/cache-redis/index.cjs +0 -84
- package/dist/cache-redis/index.cjs.map +0 -1
- package/dist/cache-redis/index.d.cts +0 -40
- package/dist/cli/chunk-DSlc6foC.cjs +0 -43
- package/dist/cli/dist-BBPjuQ56.js +0 -2778
- package/dist/cli/dist-Sz2EXvQX.cjs.map +0 -1
- package/dist/cli/index.cjs +0 -1241
- package/dist/cli/index.cjs.map +0 -1
- package/dist/cli/index.d.cts +0 -422
- package/dist/command/index.cjs +0 -693
- package/dist/command/index.cjs.map +0 -1
- package/dist/command/index.d.cts +0 -340
- package/dist/core/index.cjs.map +0 -1
- package/dist/core/index.d.cts +0 -1927
- package/dist/datetime/index.cjs +0 -318
- package/dist/datetime/index.cjs.map +0 -1
- package/dist/datetime/index.d.cts +0 -145
- package/dist/email/index.cjs +0 -10874
- package/dist/email/index.cjs.map +0 -1
- package/dist/email/index.d.cts +0 -186
- package/dist/fake/index.cjs +0 -34641
- package/dist/fake/index.cjs.map +0 -1
- package/dist/fake/index.d.cts +0 -74
- package/dist/file/index.cjs +0 -1212
- package/dist/file/index.cjs.map +0 -1
- package/dist/file/index.d.cts +0 -698
- package/dist/lock/index.cjs +0 -226
- package/dist/lock/index.cjs.map +0 -1
- package/dist/lock/index.d.cts +0 -361
- package/dist/lock-redis/index.cjs +0 -113
- package/dist/lock-redis/index.cjs.map +0 -1
- package/dist/lock-redis/index.d.cts +0 -24
- package/dist/logger/index.cjs +0 -521
- package/dist/logger/index.cjs.map +0 -1
- package/dist/logger/index.d.cts +0 -281
- package/dist/orm/index.cjs +0 -2986
- package/dist/orm/index.cjs.map +0 -1
- package/dist/orm/index.d.cts +0 -2213
- package/dist/queue/index.cjs +0 -1044
- package/dist/queue/index.cjs.map +0 -1
- package/dist/queue/index.d.cts +0 -1265
- package/dist/queue-redis/index.cjs +0 -873
- package/dist/queue-redis/index.cjs.map +0 -1
- package/dist/queue-redis/index.d.cts +0 -82
- package/dist/redis/index.cjs +0 -153
- package/dist/redis/index.cjs.map +0 -1
- package/dist/redis/index.d.cts +0 -82
- package/dist/retry/index.cjs +0 -146
- package/dist/retry/index.cjs.map +0 -1
- package/dist/retry/index.d.cts +0 -172
- package/dist/router/index.cjs +0 -111
- package/dist/router/index.cjs.map +0 -1
- package/dist/router/index.d.cts +0 -46
- package/dist/scheduler/index.cjs +0 -576
- package/dist/scheduler/index.cjs.map +0 -1
- package/dist/scheduler/index.d.cts +0 -145
- package/dist/security/index.cjs +0 -2402
- package/dist/security/index.cjs.map +0 -1
- package/dist/security/index.d.cts +0 -598
- package/dist/server/index.cjs +0 -1680
- package/dist/server/index.cjs.map +0 -1
- package/dist/server/index.d.cts +0 -810
- package/dist/server-auth/index.cjs +0 -3146
- package/dist/server-auth/index.cjs.map +0 -1
- package/dist/server-auth/index.d.cts +0 -1164
- package/dist/server-cache/index.cjs +0 -252
- package/dist/server-cache/index.cjs.map +0 -1
- package/dist/server-cache/index.d.cts +0 -164
- package/dist/server-compress/index.cjs +0 -141
- package/dist/server-compress/index.cjs.map +0 -1
- package/dist/server-compress/index.d.cts +0 -38
- package/dist/server-cookies/index.cjs +0 -234
- package/dist/server-cookies/index.cjs.map +0 -1
- package/dist/server-cookies/index.d.cts +0 -144
- package/dist/server-cors/index.cjs +0 -201
- package/dist/server-cors/index.cjs.map +0 -1
- package/dist/server-cors/index.d.cts +0 -140
- package/dist/server-health/index.cjs +0 -62
- package/dist/server-health/index.cjs.map +0 -1
- package/dist/server-health/index.d.cts +0 -58
- package/dist/server-helmet/index.cjs +0 -131
- package/dist/server-helmet/index.cjs.map +0 -1
- package/dist/server-helmet/index.d.cts +0 -97
- package/dist/server-links/index.cjs +0 -992
- package/dist/server-links/index.cjs.map +0 -1
- package/dist/server-links/index.d.cts +0 -513
- package/dist/server-metrics/index.cjs +0 -4535
- package/dist/server-metrics/index.cjs.map +0 -1
- package/dist/server-metrics/index.d.cts +0 -35
- package/dist/server-multipart/index.cjs +0 -237
- package/dist/server-multipart/index.cjs.map +0 -1
- package/dist/server-multipart/index.d.cts +0 -50
- package/dist/server-proxy/index.cjs +0 -186
- package/dist/server-proxy/index.cjs.map +0 -1
- package/dist/server-proxy/index.d.cts +0 -234
- package/dist/server-rate-limit/index.cjs +0 -241
- package/dist/server-rate-limit/index.cjs.map +0 -1
- package/dist/server-rate-limit/index.d.cts +0 -183
- package/dist/server-security/index.cjs +0 -316
- package/dist/server-security/index.cjs.map +0 -1
- package/dist/server-security/index.d.cts +0 -173
- package/dist/server-static/index.cjs +0 -170
- package/dist/server-static/index.cjs.map +0 -1
- package/dist/server-static/index.d.cts +0 -121
- package/dist/server-swagger/index.cjs +0 -1021
- package/dist/server-swagger/index.cjs.map +0 -1
- package/dist/server-swagger/index.d.cts +0 -382
- package/dist/sms/index.cjs +0 -221
- package/dist/sms/index.cjs.map +0 -1
- package/dist/sms/index.d.cts +0 -130
- package/dist/thread/index.cjs +0 -350
- package/dist/thread/index.cjs.map +0 -1
- package/dist/thread/index.d.cts +0 -260
- package/dist/topic/index.cjs +0 -282
- package/dist/topic/index.cjs.map +0 -1
- package/dist/topic/index.d.cts +0 -523
- package/dist/topic-redis/index.cjs +0 -71
- package/dist/topic-redis/index.cjs.map +0 -1
- package/dist/topic-redis/index.d.cts +0 -42
- package/dist/vite/index.cjs +0 -1077
- package/dist/vite/index.cjs.map +0 -1
- package/dist/vite/index.d.cts +0 -542
- package/dist/websocket/index.cjs +0 -1117
- package/dist/websocket/index.cjs.map +0 -1
- package/dist/websocket/index.d.cts +0 -861
- package/src/api-notifications/providers/MemorySmsProvider.ts +0 -20
- package/src/api-notifications/providers/SmsProvider.ts +0 -8
- /package/src/core/{descriptors → primitives}/$atom.ts +0 -0
- /package/src/core/{descriptors → primitives}/$env.ts +0 -0
- /package/src/server-auth/{descriptors → primitives}/$authApple.ts +0 -0
- /package/src/server-links/{descriptors → primitives}/$client.ts +0 -0
|
@@ -1,873 +0,0 @@
|
|
|
1
|
-
let alepha = require("alepha");
|
|
2
|
-
let alepha_queue = require("alepha/queue");
|
|
3
|
-
let alepha_logger = require("alepha/logger");
|
|
4
|
-
let alepha_redis = require("alepha/redis");
|
|
5
|
-
|
|
6
|
-
//#region src/queue-redis/providers/RedisQueueProvider.ts
|
|
7
|
-
const DEFAULT_MAX_ATTEMPTS = 1;
|
|
8
|
-
const DEFAULT_LOCK_DURATION = 3e4;
|
|
9
|
-
const DEFAULT_BACKOFF_DELAY = 1e3;
|
|
10
|
-
const DEFAULT_BACKOFF_MAX_DELAY = 3e4;
|
|
11
|
-
const envSchema = alepha.t.object({ REDIS_QUEUE_PREFIX: alepha.t.text({ default: "queue" }) });
|
|
12
|
-
const ACQUIRE_JOB_SCRIPT = `
|
|
13
|
-
local waitingKey = KEYS[1]
|
|
14
|
-
local activeKey = KEYS[2]
|
|
15
|
-
local jobKeyPrefix = KEYS[3]
|
|
16
|
-
local workerId = ARGV[1]
|
|
17
|
-
local now = tonumber(ARGV[2])
|
|
18
|
-
local lockDuration = tonumber(ARGV[3])
|
|
19
|
-
|
|
20
|
-
-- Get highest priority job (lowest score)
|
|
21
|
-
local jobs = redis.call('ZRANGE', waitingKey, 0, 0)
|
|
22
|
-
if #jobs == 0 then
|
|
23
|
-
return nil
|
|
24
|
-
end
|
|
25
|
-
|
|
26
|
-
local jobId = jobs[1]
|
|
27
|
-
local jobKey = jobKeyPrefix .. ':' .. jobId
|
|
28
|
-
|
|
29
|
-
-- Remove from waiting (atomic check)
|
|
30
|
-
local removed = redis.call('ZREM', waitingKey, jobId)
|
|
31
|
-
if removed == 0 then
|
|
32
|
-
return nil
|
|
33
|
-
end
|
|
34
|
-
|
|
35
|
-
-- Get current job data
|
|
36
|
-
local jobData = redis.call('HGETALL', jobKey)
|
|
37
|
-
if #jobData == 0 then
|
|
38
|
-
return nil
|
|
39
|
-
end
|
|
40
|
-
|
|
41
|
-
-- Parse job data into table
|
|
42
|
-
local job = {}
|
|
43
|
-
for i = 1, #jobData, 2 do
|
|
44
|
-
job[jobData[i]] = jobData[i + 1]
|
|
45
|
-
end
|
|
46
|
-
|
|
47
|
-
-- Parse current state
|
|
48
|
-
local state = cjson.decode(job['state'])
|
|
49
|
-
local options = cjson.decode(job['options'])
|
|
50
|
-
|
|
51
|
-
-- Update state
|
|
52
|
-
state['status'] = 'active'
|
|
53
|
-
state['attempts'] = state['attempts'] + 1
|
|
54
|
-
state['lockedBy'] = workerId
|
|
55
|
-
state['lockedUntil'] = now + (options['lockDuration'] or lockDuration)
|
|
56
|
-
state['processedAt'] = now
|
|
57
|
-
|
|
58
|
-
-- Save updated state
|
|
59
|
-
redis.call('HSET', jobKey, 'state', cjson.encode(state))
|
|
60
|
-
|
|
61
|
-
-- Add to active set
|
|
62
|
-
redis.call('SADD', activeKey, jobId)
|
|
63
|
-
|
|
64
|
-
-- Return job data
|
|
65
|
-
return cjson.encode({
|
|
66
|
-
id = job['id'],
|
|
67
|
-
queue = job['queue'],
|
|
68
|
-
payload = cjson.decode(job['payload']),
|
|
69
|
-
options = options,
|
|
70
|
-
state = state
|
|
71
|
-
})
|
|
72
|
-
`;
|
|
73
|
-
const COMPLETE_JOB_SCRIPT = `
|
|
74
|
-
local jobKey = KEYS[1]
|
|
75
|
-
local activeKey = KEYS[2]
|
|
76
|
-
local completedKey = KEYS[3]
|
|
77
|
-
local jobId = ARGV[1]
|
|
78
|
-
local now = tonumber(ARGV[2])
|
|
79
|
-
local result = ARGV[3]
|
|
80
|
-
|
|
81
|
-
-- Get job data
|
|
82
|
-
local jobData = redis.call('HGETALL', jobKey)
|
|
83
|
-
if #jobData == 0 then
|
|
84
|
-
return nil
|
|
85
|
-
end
|
|
86
|
-
|
|
87
|
-
-- Parse job data
|
|
88
|
-
local job = {}
|
|
89
|
-
for i = 1, #jobData, 2 do
|
|
90
|
-
job[jobData[i]] = jobData[i + 1]
|
|
91
|
-
end
|
|
92
|
-
|
|
93
|
-
local state = cjson.decode(job['state'])
|
|
94
|
-
local options = cjson.decode(job['options'])
|
|
95
|
-
local processedAt = state['processedAt'] or now
|
|
96
|
-
|
|
97
|
-
-- Remove from active
|
|
98
|
-
redis.call('SREM', activeKey, jobId)
|
|
99
|
-
|
|
100
|
-
-- Update state
|
|
101
|
-
state['status'] = 'completed'
|
|
102
|
-
state['completedAt'] = now
|
|
103
|
-
state['result'] = result ~= '' and cjson.decode(result) or nil
|
|
104
|
-
state['lockedBy'] = nil
|
|
105
|
-
state['lockedUntil'] = nil
|
|
106
|
-
|
|
107
|
-
local removeOnComplete = options['removeOnComplete']
|
|
108
|
-
|
|
109
|
-
if removeOnComplete == true then
|
|
110
|
-
-- Remove job immediately
|
|
111
|
-
redis.call('DEL', jobKey)
|
|
112
|
-
return cjson.encode({ removed = true, duration = now - processedAt })
|
|
113
|
-
else
|
|
114
|
-
-- Update job state
|
|
115
|
-
redis.call('HSET', jobKey, 'state', cjson.encode(state))
|
|
116
|
-
|
|
117
|
-
-- Add to completed list (newest first)
|
|
118
|
-
redis.call('LPUSH', completedKey, jobId)
|
|
119
|
-
|
|
120
|
-
-- If removeOnComplete is a number, trim the list (0 means keep none)
|
|
121
|
-
if type(removeOnComplete) == 'number' and removeOnComplete >= 0 then
|
|
122
|
-
-- Get jobs to remove
|
|
123
|
-
local toRemove = redis.call('LRANGE', completedKey, removeOnComplete, -1)
|
|
124
|
-
for _, oldJobId in ipairs(toRemove) do
|
|
125
|
-
redis.call('DEL', jobKey:gsub(jobId, oldJobId))
|
|
126
|
-
end
|
|
127
|
-
redis.call('LTRIM', completedKey, 0, removeOnComplete - 1)
|
|
128
|
-
end
|
|
129
|
-
|
|
130
|
-
return cjson.encode({ removed = false, duration = now - processedAt })
|
|
131
|
-
end
|
|
132
|
-
`;
|
|
133
|
-
const FAIL_JOB_SCRIPT = `
|
|
134
|
-
local jobKey = KEYS[1]
|
|
135
|
-
local activeKey = KEYS[2]
|
|
136
|
-
local delayedKey = KEYS[3]
|
|
137
|
-
local failedKey = KEYS[4]
|
|
138
|
-
local jobId = ARGV[1]
|
|
139
|
-
local now = tonumber(ARGV[2])
|
|
140
|
-
local errorMsg = ARGV[3]
|
|
141
|
-
local stackTrace = ARGV[4]
|
|
142
|
-
local backoffDelay = tonumber(ARGV[5])
|
|
143
|
-
|
|
144
|
-
-- Get job data
|
|
145
|
-
local jobData = redis.call('HGETALL', jobKey)
|
|
146
|
-
if #jobData == 0 then
|
|
147
|
-
return nil
|
|
148
|
-
end
|
|
149
|
-
|
|
150
|
-
-- Parse job data
|
|
151
|
-
local job = {}
|
|
152
|
-
for i = 1, #jobData, 2 do
|
|
153
|
-
job[jobData[i]] = jobData[i + 1]
|
|
154
|
-
end
|
|
155
|
-
|
|
156
|
-
local state = cjson.decode(job['state'])
|
|
157
|
-
local options = cjson.decode(job['options'])
|
|
158
|
-
|
|
159
|
-
-- Remove from active
|
|
160
|
-
redis.call('SREM', activeKey, jobId)
|
|
161
|
-
|
|
162
|
-
local maxAttempts = options['maxAttempts'] or 1
|
|
163
|
-
local hasMoreAttempts = state['attempts'] < maxAttempts
|
|
164
|
-
|
|
165
|
-
if hasMoreAttempts then
|
|
166
|
-
-- Schedule for retry
|
|
167
|
-
state['status'] = 'delayed'
|
|
168
|
-
state['availableAt'] = now + backoffDelay
|
|
169
|
-
state['error'] = errorMsg
|
|
170
|
-
state['stackTrace'] = stackTrace ~= '' and stackTrace or nil
|
|
171
|
-
state['lockedBy'] = nil
|
|
172
|
-
state['lockedUntil'] = nil
|
|
173
|
-
|
|
174
|
-
redis.call('HSET', jobKey, 'state', cjson.encode(state))
|
|
175
|
-
redis.call('ZADD', delayedKey, now + backoffDelay, jobId)
|
|
176
|
-
|
|
177
|
-
return cjson.encode({ status = 'retrying', delay = backoffDelay, attempt = state['attempts'] + 1 })
|
|
178
|
-
else
|
|
179
|
-
-- Permanently failed
|
|
180
|
-
state['status'] = 'failed'
|
|
181
|
-
state['failedAt'] = now
|
|
182
|
-
state['error'] = errorMsg
|
|
183
|
-
state['stackTrace'] = stackTrace ~= '' and stackTrace or nil
|
|
184
|
-
state['lockedBy'] = nil
|
|
185
|
-
state['lockedUntil'] = nil
|
|
186
|
-
|
|
187
|
-
local removeOnFail = options['removeOnFail']
|
|
188
|
-
|
|
189
|
-
if removeOnFail == true then
|
|
190
|
-
redis.call('DEL', jobKey)
|
|
191
|
-
return cjson.encode({ status = 'failed', removed = true, attempts = state['attempts'] })
|
|
192
|
-
else
|
|
193
|
-
redis.call('HSET', jobKey, 'state', cjson.encode(state))
|
|
194
|
-
redis.call('LPUSH', failedKey, jobId)
|
|
195
|
-
|
|
196
|
-
if type(removeOnFail) == 'number' and removeOnFail >= 0 then
|
|
197
|
-
local toRemove = redis.call('LRANGE', failedKey, removeOnFail, -1)
|
|
198
|
-
for _, oldJobId in ipairs(toRemove) do
|
|
199
|
-
redis.call('DEL', jobKey:gsub(jobId, oldJobId))
|
|
200
|
-
end
|
|
201
|
-
redis.call('LTRIM', failedKey, 0, removeOnFail - 1)
|
|
202
|
-
end
|
|
203
|
-
|
|
204
|
-
return cjson.encode({ status = 'failed', removed = false, attempts = state['attempts'] })
|
|
205
|
-
end
|
|
206
|
-
end
|
|
207
|
-
`;
|
|
208
|
-
/**
|
|
209
|
-
* Redis-based queue provider with full job support.
|
|
210
|
-
*
|
|
211
|
-
* Features:
|
|
212
|
-
* - Atomic job acquisition using Lua scripts
|
|
213
|
-
* - Blocking wait using Redis BZPOPMIN (no polling)
|
|
214
|
-
* - Event emission for job lifecycle
|
|
215
|
-
* - removeOnComplete/removeOnFail support
|
|
216
|
-
*
|
|
217
|
-
* Uses the following Redis data structures:
|
|
218
|
-
* - HASH `{prefix}:job:{queue}:{id}` - Job data
|
|
219
|
-
* - ZSET `{prefix}:waiting:{queue}` - Waiting jobs (score = priority)
|
|
220
|
-
* - ZSET `{prefix}:delayed:{queue}` - Delayed jobs (score = availableAt timestamp)
|
|
221
|
-
* - SET `{prefix}:active:{queue}` - Active jobs
|
|
222
|
-
* - LIST `{prefix}:completed:{queue}` - Completed jobs (newest first)
|
|
223
|
-
* - LIST `{prefix}:failed:{queue}` - Failed jobs (newest first)
|
|
224
|
-
* - LIST `{prefix}:messages:{queue}` - Simple message queue (backward compat)
|
|
225
|
-
* - LIST `{prefix}:notify:{queue}` - Notification list for blocking wait
|
|
226
|
-
*/
|
|
227
|
-
var RedisQueueProvider = class extends alepha_queue.QueueProvider {
|
|
228
|
-
log = (0, alepha_logger.$logger)();
|
|
229
|
-
env = (0, alepha.$env)(envSchema);
|
|
230
|
-
redisProvider = (0, alepha.$inject)(alepha_redis.RedisProvider);
|
|
231
|
-
blockingClient;
|
|
232
|
-
shouldStop = false;
|
|
233
|
-
acquireJobSha;
|
|
234
|
-
completeJobSha;
|
|
235
|
-
failJobSha;
|
|
236
|
-
start = (0, alepha.$hook)({
|
|
237
|
-
on: "start",
|
|
238
|
-
handler: async () => {
|
|
239
|
-
this.shouldStop = false;
|
|
240
|
-
this.blockingClient = this.redisProvider.duplicate();
|
|
241
|
-
await this.blockingClient.connect();
|
|
242
|
-
const redis = this.redisProvider.publisher;
|
|
243
|
-
const acquireSha = await redis.scriptLoad(ACQUIRE_JOB_SCRIPT);
|
|
244
|
-
const completeSha = await redis.scriptLoad(COMPLETE_JOB_SCRIPT);
|
|
245
|
-
const failSha = await redis.scriptLoad(FAIL_JOB_SCRIPT);
|
|
246
|
-
this.acquireJobSha = acquireSha.toString();
|
|
247
|
-
this.completeJobSha = completeSha.toString();
|
|
248
|
-
this.failJobSha = failSha.toString();
|
|
249
|
-
}
|
|
250
|
-
});
|
|
251
|
-
stop = (0, alepha.$hook)({
|
|
252
|
-
on: "stop",
|
|
253
|
-
handler: async () => {
|
|
254
|
-
this.shouldStop = true;
|
|
255
|
-
if (this.blockingClient?.isOpen) await this.blockingClient.close();
|
|
256
|
-
}
|
|
257
|
-
});
|
|
258
|
-
key(type, queue, id) {
|
|
259
|
-
const base = `${this.env.REDIS_QUEUE_PREFIX}:${type}:${queue}`;
|
|
260
|
-
return id ? `${base}:${id}` : base;
|
|
261
|
-
}
|
|
262
|
-
messageKey(queue) {
|
|
263
|
-
return `${this.env.REDIS_QUEUE_PREFIX}:${queue}`;
|
|
264
|
-
}
|
|
265
|
-
notifyKey(queue) {
|
|
266
|
-
return `${this.env.REDIS_QUEUE_PREFIX}:notify:${queue}`;
|
|
267
|
-
}
|
|
268
|
-
async push(queue, message) {
|
|
269
|
-
await this.redisProvider.publisher.LPUSH(this.messageKey(queue), message);
|
|
270
|
-
}
|
|
271
|
-
async pop(queue) {
|
|
272
|
-
const value = await this.redisProvider.publisher.RPOP(this.messageKey(queue));
|
|
273
|
-
if (value == null) return void 0;
|
|
274
|
-
return String(value);
|
|
275
|
-
}
|
|
276
|
-
async popBlocking(queues, timeoutSeconds) {
|
|
277
|
-
if (queues.length === 0 || !this.blockingClient) return;
|
|
278
|
-
const prefixedQueues = queues.map((q) => this.messageKey(q));
|
|
279
|
-
const result = await this.blockingClient.BRPOP(prefixedQueues, timeoutSeconds);
|
|
280
|
-
if (result == null) return void 0;
|
|
281
|
-
const key = result.key.toString();
|
|
282
|
-
const prefixLength = this.env.REDIS_QUEUE_PREFIX.length + 1;
|
|
283
|
-
return {
|
|
284
|
-
queue: key.substring(prefixLength),
|
|
285
|
-
message: result.element.toString()
|
|
286
|
-
};
|
|
287
|
-
}
|
|
288
|
-
async generateJobId() {
|
|
289
|
-
return `job_${await this.redisProvider.publisher.INCR(`${this.env.REDIS_QUEUE_PREFIX}:job_counter`)}_${Date.now()}`;
|
|
290
|
-
}
|
|
291
|
-
serializeJob(job) {
|
|
292
|
-
return {
|
|
293
|
-
id: job.id,
|
|
294
|
-
queue: job.queue,
|
|
295
|
-
payload: JSON.stringify(job.payload),
|
|
296
|
-
options: JSON.stringify(job.options),
|
|
297
|
-
state: JSON.stringify(job.state)
|
|
298
|
-
};
|
|
299
|
-
}
|
|
300
|
-
deserializeJob(data) {
|
|
301
|
-
if (!data.id) return void 0;
|
|
302
|
-
return {
|
|
303
|
-
id: data.id,
|
|
304
|
-
queue: data.queue,
|
|
305
|
-
payload: JSON.parse(data.payload),
|
|
306
|
-
options: JSON.parse(data.options),
|
|
307
|
-
state: JSON.parse(data.state)
|
|
308
|
-
};
|
|
309
|
-
}
|
|
310
|
-
async addJob(queue, payload, options) {
|
|
311
|
-
const redis = this.redisProvider.publisher;
|
|
312
|
-
const now = Date.now();
|
|
313
|
-
const delay = options?.delay ?? 0;
|
|
314
|
-
const isDelayed = delay > 0;
|
|
315
|
-
const job = {
|
|
316
|
-
id: await this.generateJobId(),
|
|
317
|
-
queue,
|
|
318
|
-
payload,
|
|
319
|
-
options: {
|
|
320
|
-
priority: options?.priority ?? 0,
|
|
321
|
-
delay: options?.delay ?? 0,
|
|
322
|
-
maxAttempts: options?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS,
|
|
323
|
-
backoff: options?.backoff,
|
|
324
|
-
lockDuration: options?.lockDuration ?? DEFAULT_LOCK_DURATION,
|
|
325
|
-
removeOnComplete: options?.removeOnComplete,
|
|
326
|
-
removeOnFail: options?.removeOnFail
|
|
327
|
-
},
|
|
328
|
-
state: {
|
|
329
|
-
status: isDelayed ? "delayed" : "waiting",
|
|
330
|
-
attempts: 0,
|
|
331
|
-
createdAt: now,
|
|
332
|
-
availableAt: isDelayed ? now + delay : now
|
|
333
|
-
}
|
|
334
|
-
};
|
|
335
|
-
await redis.HSET(this.key("job", queue, job.id), this.serializeJob(job));
|
|
336
|
-
if (isDelayed) await redis.ZADD(this.key("delayed", queue), {
|
|
337
|
-
score: job.state.availableAt,
|
|
338
|
-
value: job.id
|
|
339
|
-
});
|
|
340
|
-
else {
|
|
341
|
-
await redis.ZADD(this.key("waiting", queue), {
|
|
342
|
-
score: job.options.priority ?? 0,
|
|
343
|
-
value: job.id
|
|
344
|
-
});
|
|
345
|
-
await redis.LPUSH(this.notifyKey(queue), job.id);
|
|
346
|
-
}
|
|
347
|
-
this.log.debug(`Added job ${job.id} to queue ${queue}`, {
|
|
348
|
-
status: job.state.status,
|
|
349
|
-
priority: job.options.priority
|
|
350
|
-
});
|
|
351
|
-
if (!isDelayed) await this.emit({
|
|
352
|
-
type: "waiting",
|
|
353
|
-
queue,
|
|
354
|
-
jobId: job.id,
|
|
355
|
-
timestamp: now,
|
|
356
|
-
job
|
|
357
|
-
});
|
|
358
|
-
return job;
|
|
359
|
-
}
|
|
360
|
-
async acquireJob(queues, workerId, timeoutSeconds) {
|
|
361
|
-
if (!this.blockingClient || this.shouldStop) return;
|
|
362
|
-
const redis = this.redisProvider.publisher;
|
|
363
|
-
const endTime = Date.now() + timeoutSeconds * 1e3;
|
|
364
|
-
while (Date.now() < endTime && !this.shouldStop) {
|
|
365
|
-
for (const queue of queues) try {
|
|
366
|
-
const result = await redis.evalSha(this.acquireJobSha, {
|
|
367
|
-
keys: [
|
|
368
|
-
this.key("waiting", queue),
|
|
369
|
-
this.key("active", queue),
|
|
370
|
-
this.key("job", queue)
|
|
371
|
-
],
|
|
372
|
-
arguments: [
|
|
373
|
-
workerId,
|
|
374
|
-
String(Date.now()),
|
|
375
|
-
String(DEFAULT_LOCK_DURATION)
|
|
376
|
-
]
|
|
377
|
-
});
|
|
378
|
-
if (result) {
|
|
379
|
-
const job = JSON.parse(result);
|
|
380
|
-
this.log.debug(`Worker ${workerId} acquired job ${job.id}`, {
|
|
381
|
-
queue,
|
|
382
|
-
attempt: job.state.attempts
|
|
383
|
-
});
|
|
384
|
-
await this.emit({
|
|
385
|
-
type: "active",
|
|
386
|
-
queue,
|
|
387
|
-
jobId: job.id,
|
|
388
|
-
timestamp: Date.now(),
|
|
389
|
-
workerId,
|
|
390
|
-
attempt: job.state.attempts
|
|
391
|
-
});
|
|
392
|
-
return {
|
|
393
|
-
queue,
|
|
394
|
-
job
|
|
395
|
-
};
|
|
396
|
-
}
|
|
397
|
-
} catch (error) {
|
|
398
|
-
this.log.warn(`Failed to acquire job from ${queue}`, error);
|
|
399
|
-
}
|
|
400
|
-
const notifyKeys = queues.map((q) => this.notifyKey(q));
|
|
401
|
-
const remainingTimeout = Math.max(1, Math.ceil((endTime - Date.now()) / 1e3));
|
|
402
|
-
try {
|
|
403
|
-
if (await this.blockingClient.BRPOP(notifyKeys, Math.min(remainingTimeout, 5))) {}
|
|
404
|
-
} catch {
|
|
405
|
-
if (this.shouldStop) return;
|
|
406
|
-
}
|
|
407
|
-
}
|
|
408
|
-
}
|
|
409
|
-
bufferRecordToString(record) {
|
|
410
|
-
const result = {};
|
|
411
|
-
for (const [key, value] of Object.entries(record)) result[key] = value?.toString() ?? "";
|
|
412
|
-
return result;
|
|
413
|
-
}
|
|
414
|
-
async completeJob(queue, jobId, result) {
|
|
415
|
-
const redis = this.redisProvider.publisher;
|
|
416
|
-
const now = Date.now();
|
|
417
|
-
try {
|
|
418
|
-
const luaResult = await redis.evalSha(this.completeJobSha, {
|
|
419
|
-
keys: [
|
|
420
|
-
this.key("job", queue, jobId),
|
|
421
|
-
this.key("active", queue),
|
|
422
|
-
this.key("completed", queue)
|
|
423
|
-
],
|
|
424
|
-
arguments: [
|
|
425
|
-
jobId,
|
|
426
|
-
String(now),
|
|
427
|
-
result !== void 0 ? JSON.stringify(result) : ""
|
|
428
|
-
]
|
|
429
|
-
});
|
|
430
|
-
if (!luaResult) {
|
|
431
|
-
this.log.warn(`Attempted to complete unknown job ${jobId}`);
|
|
432
|
-
return;
|
|
433
|
-
}
|
|
434
|
-
const { removed, duration } = JSON.parse(luaResult);
|
|
435
|
-
this.log.debug(`Job ${jobId} completed${removed ? " and removed" : ""}`, {
|
|
436
|
-
queue,
|
|
437
|
-
result
|
|
438
|
-
});
|
|
439
|
-
await this.emit({
|
|
440
|
-
type: "completed",
|
|
441
|
-
queue,
|
|
442
|
-
jobId,
|
|
443
|
-
timestamp: now,
|
|
444
|
-
result,
|
|
445
|
-
duration
|
|
446
|
-
});
|
|
447
|
-
} catch (error) {
|
|
448
|
-
this.log.warn(`Lua completeJob failed, using fallback`, error);
|
|
449
|
-
await this.completeJobFallback(queue, jobId, result);
|
|
450
|
-
}
|
|
451
|
-
}
|
|
452
|
-
async completeJobFallback(queue, jobId, result) {
|
|
453
|
-
const redis = this.redisProvider.publisher;
|
|
454
|
-
const now = Date.now();
|
|
455
|
-
const jobData = await redis.HGETALL(this.key("job", queue, jobId));
|
|
456
|
-
const job = this.deserializeJob(this.bufferRecordToString(jobData));
|
|
457
|
-
if (!job) {
|
|
458
|
-
this.log.warn(`Attempted to complete unknown job ${jobId}`);
|
|
459
|
-
return;
|
|
460
|
-
}
|
|
461
|
-
const duration = now - (job.state.processedAt ?? now);
|
|
462
|
-
await redis.SREM(this.key("active", queue), jobId);
|
|
463
|
-
job.state.status = "completed";
|
|
464
|
-
job.state.completedAt = now;
|
|
465
|
-
job.state.result = result;
|
|
466
|
-
job.state.lockedBy = void 0;
|
|
467
|
-
job.state.lockedUntil = void 0;
|
|
468
|
-
const removeOnComplete = job.options.removeOnComplete;
|
|
469
|
-
if (removeOnComplete === true) await redis.DEL(this.key("job", queue, jobId));
|
|
470
|
-
else {
|
|
471
|
-
await redis.HSET(this.key("job", queue, jobId), { state: JSON.stringify(job.state) });
|
|
472
|
-
await redis.LPUSH(this.key("completed", queue), jobId);
|
|
473
|
-
if (typeof removeOnComplete === "number" && removeOnComplete >= 0) await this.cleanJobs(queue, "completed", { maxCount: removeOnComplete });
|
|
474
|
-
}
|
|
475
|
-
this.log.debug(`Job ${jobId} completed`, { queue });
|
|
476
|
-
await this.emit({
|
|
477
|
-
type: "completed",
|
|
478
|
-
queue,
|
|
479
|
-
jobId,
|
|
480
|
-
timestamp: now,
|
|
481
|
-
result,
|
|
482
|
-
duration
|
|
483
|
-
});
|
|
484
|
-
}
|
|
485
|
-
async failJob(queue, jobId, error, stackTrace) {
|
|
486
|
-
const redis = this.redisProvider.publisher;
|
|
487
|
-
const now = Date.now();
|
|
488
|
-
const jobData = await redis.HGETALL(this.key("job", queue, jobId));
|
|
489
|
-
const job = this.deserializeJob(this.bufferRecordToString(jobData));
|
|
490
|
-
if (!job) {
|
|
491
|
-
this.log.warn(`Attempted to fail unknown job ${jobId}`);
|
|
492
|
-
return;
|
|
493
|
-
}
|
|
494
|
-
const backoffDelay = this.calculateBackoff(job);
|
|
495
|
-
try {
|
|
496
|
-
const luaResult = await redis.evalSha(this.failJobSha, {
|
|
497
|
-
keys: [
|
|
498
|
-
this.key("job", queue, jobId),
|
|
499
|
-
this.key("active", queue),
|
|
500
|
-
this.key("delayed", queue),
|
|
501
|
-
this.key("failed", queue)
|
|
502
|
-
],
|
|
503
|
-
arguments: [
|
|
504
|
-
jobId,
|
|
505
|
-
String(now),
|
|
506
|
-
error,
|
|
507
|
-
stackTrace ?? "",
|
|
508
|
-
String(backoffDelay)
|
|
509
|
-
]
|
|
510
|
-
});
|
|
511
|
-
if (!luaResult) {
|
|
512
|
-
this.log.warn(`Attempted to fail unknown job ${jobId}`);
|
|
513
|
-
return;
|
|
514
|
-
}
|
|
515
|
-
const result = JSON.parse(luaResult);
|
|
516
|
-
if (result.status === "retrying") {
|
|
517
|
-
this.log.debug(`Job ${jobId} failed, will retry in ${result.delay}ms`, {
|
|
518
|
-
queue,
|
|
519
|
-
attempt: job.state.attempts,
|
|
520
|
-
error
|
|
521
|
-
});
|
|
522
|
-
await this.emit({
|
|
523
|
-
type: "retrying",
|
|
524
|
-
queue,
|
|
525
|
-
jobId,
|
|
526
|
-
timestamp: now,
|
|
527
|
-
error,
|
|
528
|
-
attempt: result.attempt,
|
|
529
|
-
delay: result.delay
|
|
530
|
-
});
|
|
531
|
-
} else {
|
|
532
|
-
this.log.debug(`Job ${jobId} permanently failed${result.removed ? " and removed" : ""}`, {
|
|
533
|
-
queue,
|
|
534
|
-
error
|
|
535
|
-
});
|
|
536
|
-
await this.emit({
|
|
537
|
-
type: "failed",
|
|
538
|
-
queue,
|
|
539
|
-
jobId,
|
|
540
|
-
timestamp: now,
|
|
541
|
-
error,
|
|
542
|
-
stackTrace,
|
|
543
|
-
attempts: result.attempts
|
|
544
|
-
});
|
|
545
|
-
}
|
|
546
|
-
} catch (luaError) {
|
|
547
|
-
this.log.warn(`Lua failJob failed, using fallback`, luaError);
|
|
548
|
-
await this.failJobFallback(queue, jobId, error, stackTrace);
|
|
549
|
-
}
|
|
550
|
-
}
|
|
551
|
-
async failJobFallback(queue, jobId, error, stackTrace) {
|
|
552
|
-
const redis = this.redisProvider.publisher;
|
|
553
|
-
const now = Date.now();
|
|
554
|
-
const jobData = await redis.HGETALL(this.key("job", queue, jobId));
|
|
555
|
-
const job = this.deserializeJob(this.bufferRecordToString(jobData));
|
|
556
|
-
if (!job) {
|
|
557
|
-
this.log.warn(`Attempted to fail unknown job ${jobId}`);
|
|
558
|
-
return;
|
|
559
|
-
}
|
|
560
|
-
await redis.SREM(this.key("active", queue), jobId);
|
|
561
|
-
const maxAttempts = job.options.maxAttempts ?? DEFAULT_MAX_ATTEMPTS;
|
|
562
|
-
if (job.state.attempts < maxAttempts) {
|
|
563
|
-
const backoffDelay = this.calculateBackoff(job);
|
|
564
|
-
job.state.status = "delayed";
|
|
565
|
-
job.state.availableAt = now + backoffDelay;
|
|
566
|
-
job.state.error = error;
|
|
567
|
-
job.state.stackTrace = stackTrace;
|
|
568
|
-
job.state.lockedBy = void 0;
|
|
569
|
-
job.state.lockedUntil = void 0;
|
|
570
|
-
await redis.HSET(this.key("job", queue, jobId), { state: JSON.stringify(job.state) });
|
|
571
|
-
await redis.ZADD(this.key("delayed", queue), {
|
|
572
|
-
score: job.state.availableAt,
|
|
573
|
-
value: jobId
|
|
574
|
-
});
|
|
575
|
-
this.log.debug(`Job ${jobId} failed, will retry in ${backoffDelay}ms`, {
|
|
576
|
-
queue,
|
|
577
|
-
attempt: job.state.attempts,
|
|
578
|
-
maxAttempts
|
|
579
|
-
});
|
|
580
|
-
await this.emit({
|
|
581
|
-
type: "retrying",
|
|
582
|
-
queue,
|
|
583
|
-
jobId,
|
|
584
|
-
timestamp: now,
|
|
585
|
-
error,
|
|
586
|
-
attempt: job.state.attempts + 1,
|
|
587
|
-
delay: backoffDelay
|
|
588
|
-
});
|
|
589
|
-
} else {
|
|
590
|
-
job.state.status = "failed";
|
|
591
|
-
job.state.failedAt = now;
|
|
592
|
-
job.state.error = error;
|
|
593
|
-
job.state.stackTrace = stackTrace;
|
|
594
|
-
job.state.lockedBy = void 0;
|
|
595
|
-
job.state.lockedUntil = void 0;
|
|
596
|
-
const removeOnFail = job.options.removeOnFail;
|
|
597
|
-
if (removeOnFail === true) await redis.DEL(this.key("job", queue, jobId));
|
|
598
|
-
else {
|
|
599
|
-
await redis.HSET(this.key("job", queue, jobId), { state: JSON.stringify(job.state) });
|
|
600
|
-
await redis.LPUSH(this.key("failed", queue), jobId);
|
|
601
|
-
if (typeof removeOnFail === "number" && removeOnFail >= 0) await this.cleanJobs(queue, "failed", { maxCount: removeOnFail });
|
|
602
|
-
}
|
|
603
|
-
this.log.debug(`Job ${jobId} permanently failed`, { queue });
|
|
604
|
-
await this.emit({
|
|
605
|
-
type: "failed",
|
|
606
|
-
queue,
|
|
607
|
-
jobId,
|
|
608
|
-
timestamp: now,
|
|
609
|
-
error,
|
|
610
|
-
stackTrace,
|
|
611
|
-
attempts: job.state.attempts
|
|
612
|
-
});
|
|
613
|
-
}
|
|
614
|
-
}
|
|
615
|
-
calculateBackoff(job) {
|
|
616
|
-
const backoff = job.options.backoff;
|
|
617
|
-
const attempt = job.state.attempts;
|
|
618
|
-
if (!backoff) return DEFAULT_BACKOFF_DELAY;
|
|
619
|
-
const baseDelay = backoff.delay ?? DEFAULT_BACKOFF_DELAY;
|
|
620
|
-
const maxDelay = backoff.maxDelay ?? DEFAULT_BACKOFF_MAX_DELAY;
|
|
621
|
-
if (backoff.type === "fixed") return baseDelay;
|
|
622
|
-
const exponentialDelay = baseDelay * 2 ** (attempt - 1);
|
|
623
|
-
return Math.min(exponentialDelay, maxDelay);
|
|
624
|
-
}
|
|
625
|
-
async renewJobLock(queue, jobId, workerId) {
|
|
626
|
-
const redis = this.redisProvider.publisher;
|
|
627
|
-
const jobData = await redis.HGETALL(this.key("job", queue, jobId));
|
|
628
|
-
const job = this.deserializeJob(this.bufferRecordToString(jobData));
|
|
629
|
-
if (!job || job.state.lockedBy !== workerId) return false;
|
|
630
|
-
job.state.lockedUntil = Date.now() + (job.options.lockDuration ?? DEFAULT_LOCK_DURATION);
|
|
631
|
-
await redis.HSET(this.key("job", queue, jobId), { state: JSON.stringify(job.state) });
|
|
632
|
-
return true;
|
|
633
|
-
}
|
|
634
|
-
async getJob(queue, jobId) {
|
|
635
|
-
const jobData = await this.redisProvider.publisher.HGETALL(this.key("job", queue, jobId));
|
|
636
|
-
return this.deserializeJob(this.bufferRecordToString(jobData));
|
|
637
|
-
}
|
|
638
|
-
async getJobs(queue, status, options) {
|
|
639
|
-
const redis = this.redisProvider.publisher;
|
|
640
|
-
const limit = options?.limit ?? 100;
|
|
641
|
-
const offset = options?.offset ?? 0;
|
|
642
|
-
let jobIds;
|
|
643
|
-
switch (status) {
|
|
644
|
-
case "waiting":
|
|
645
|
-
jobIds = (await redis.ZRANGE(this.key("waiting", queue), offset, offset + limit - 1)).map((r) => r.toString());
|
|
646
|
-
break;
|
|
647
|
-
case "delayed":
|
|
648
|
-
jobIds = (await redis.ZRANGE(this.key("delayed", queue), offset, offset + limit - 1)).map((r) => r.toString());
|
|
649
|
-
break;
|
|
650
|
-
case "active":
|
|
651
|
-
jobIds = (await redis.SMEMBERS(this.key("active", queue))).map((r) => r.toString()).slice(offset, offset + limit);
|
|
652
|
-
break;
|
|
653
|
-
case "completed":
|
|
654
|
-
jobIds = (await redis.LRANGE(this.key("completed", queue), offset, offset + limit - 1)).map((r) => r.toString());
|
|
655
|
-
break;
|
|
656
|
-
case "failed":
|
|
657
|
-
jobIds = (await redis.LRANGE(this.key("failed", queue), offset, offset + limit - 1)).map((r) => r.toString());
|
|
658
|
-
break;
|
|
659
|
-
default: jobIds = [];
|
|
660
|
-
}
|
|
661
|
-
const jobs = [];
|
|
662
|
-
for (const jobId of jobIds) {
|
|
663
|
-
const job = await this.getJob(queue, jobId);
|
|
664
|
-
if (job) jobs.push(job);
|
|
665
|
-
}
|
|
666
|
-
return jobs;
|
|
667
|
-
}
|
|
668
|
-
async getJobCounts(queue) {
|
|
669
|
-
const redis = this.redisProvider.publisher;
|
|
670
|
-
const [waiting, delayed, active, completed, failed] = await Promise.all([
|
|
671
|
-
redis.ZCARD(this.key("waiting", queue)),
|
|
672
|
-
redis.ZCARD(this.key("delayed", queue)),
|
|
673
|
-
redis.SCARD(this.key("active", queue)),
|
|
674
|
-
redis.LLEN(this.key("completed", queue)),
|
|
675
|
-
redis.LLEN(this.key("failed", queue))
|
|
676
|
-
]);
|
|
677
|
-
return {
|
|
678
|
-
waiting,
|
|
679
|
-
delayed,
|
|
680
|
-
active,
|
|
681
|
-
completed,
|
|
682
|
-
failed
|
|
683
|
-
};
|
|
684
|
-
}
|
|
685
|
-
async promoteDelayedJobs(queue) {
|
|
686
|
-
const redis = this.redisProvider.publisher;
|
|
687
|
-
const now = Date.now();
|
|
688
|
-
const results = await redis.ZRANGEBYSCORE(this.key("delayed", queue), "-inf", now);
|
|
689
|
-
let promoted = 0;
|
|
690
|
-
for (const result of results) {
|
|
691
|
-
const jobId = result.toString();
|
|
692
|
-
if (await redis.ZREM(this.key("delayed", queue), jobId) === 0) continue;
|
|
693
|
-
const job = await this.getJob(queue, jobId);
|
|
694
|
-
if (!job) continue;
|
|
695
|
-
job.state.status = "waiting";
|
|
696
|
-
await redis.HSET(this.key("job", queue, jobId), { state: JSON.stringify(job.state) });
|
|
697
|
-
await redis.ZADD(this.key("waiting", queue), {
|
|
698
|
-
score: job.options.priority ?? 0,
|
|
699
|
-
value: jobId
|
|
700
|
-
});
|
|
701
|
-
await redis.LPUSH(this.notifyKey(queue), jobId);
|
|
702
|
-
promoted++;
|
|
703
|
-
this.log.debug(`Promoted delayed job ${jobId}`, { queue });
|
|
704
|
-
await this.emit({
|
|
705
|
-
type: "waiting",
|
|
706
|
-
queue,
|
|
707
|
-
jobId,
|
|
708
|
-
timestamp: now,
|
|
709
|
-
job
|
|
710
|
-
});
|
|
711
|
-
}
|
|
712
|
-
return promoted;
|
|
713
|
-
}
|
|
714
|
-
async recoverStalledJobs(queue, stalledThresholdMs) {
|
|
715
|
-
const redis = this.redisProvider.publisher;
|
|
716
|
-
const now = Date.now();
|
|
717
|
-
const activeJobIds = await redis.SMEMBERS(this.key("active", queue));
|
|
718
|
-
const stalledJobIds = [];
|
|
719
|
-
for (const result of activeJobIds) {
|
|
720
|
-
const jobId = result.toString();
|
|
721
|
-
const job = await this.getJob(queue, jobId);
|
|
722
|
-
if (!job) continue;
|
|
723
|
-
if (!((job.state.lockedUntil ?? 0) + stalledThresholdMs < now)) continue;
|
|
724
|
-
stalledJobIds.push(jobId);
|
|
725
|
-
const workerId = job.state.lockedBy;
|
|
726
|
-
await redis.SREM(this.key("active", queue), jobId);
|
|
727
|
-
const maxAttempts = job.options.maxAttempts ?? DEFAULT_MAX_ATTEMPTS;
|
|
728
|
-
const hasMoreAttempts = job.state.attempts < maxAttempts;
|
|
729
|
-
await this.emit({
|
|
730
|
-
type: "stalled",
|
|
731
|
-
queue,
|
|
732
|
-
jobId,
|
|
733
|
-
timestamp: now,
|
|
734
|
-
workerId,
|
|
735
|
-
willRetry: hasMoreAttempts
|
|
736
|
-
});
|
|
737
|
-
if (hasMoreAttempts) {
|
|
738
|
-
job.state.status = "waiting";
|
|
739
|
-
job.state.lockedBy = void 0;
|
|
740
|
-
job.state.lockedUntil = void 0;
|
|
741
|
-
job.state.error = "Job stalled (worker timeout)";
|
|
742
|
-
await redis.HSET(this.key("job", queue, jobId), { state: JSON.stringify(job.state) });
|
|
743
|
-
await redis.ZADD(this.key("waiting", queue), {
|
|
744
|
-
score: job.options.priority ?? 0,
|
|
745
|
-
value: jobId
|
|
746
|
-
});
|
|
747
|
-
await redis.LPUSH(this.notifyKey(queue), jobId);
|
|
748
|
-
this.log.warn(`Recovered stalled job ${jobId}`, { queue });
|
|
749
|
-
await this.emit({
|
|
750
|
-
type: "waiting",
|
|
751
|
-
queue,
|
|
752
|
-
jobId,
|
|
753
|
-
timestamp: now,
|
|
754
|
-
job
|
|
755
|
-
});
|
|
756
|
-
} else {
|
|
757
|
-
job.state.status = "failed";
|
|
758
|
-
job.state.failedAt = now;
|
|
759
|
-
job.state.lockedBy = void 0;
|
|
760
|
-
job.state.lockedUntil = void 0;
|
|
761
|
-
job.state.error = "Job stalled (worker timeout) - max attempts exceeded";
|
|
762
|
-
const removeOnFail = job.options.removeOnFail;
|
|
763
|
-
if (removeOnFail === true) await redis.DEL(this.key("job", queue, jobId));
|
|
764
|
-
else {
|
|
765
|
-
await redis.HSET(this.key("job", queue, jobId), { state: JSON.stringify(job.state) });
|
|
766
|
-
await redis.LPUSH(this.key("failed", queue), jobId);
|
|
767
|
-
if (typeof removeOnFail === "number" && removeOnFail >= 0) await this.cleanJobs(queue, "failed", { maxCount: removeOnFail });
|
|
768
|
-
}
|
|
769
|
-
this.log.warn(`Stalled job ${jobId} permanently failed`, { queue });
|
|
770
|
-
await this.emit({
|
|
771
|
-
type: "failed",
|
|
772
|
-
queue,
|
|
773
|
-
jobId,
|
|
774
|
-
timestamp: now,
|
|
775
|
-
error: job.state.error,
|
|
776
|
-
attempts: job.state.attempts
|
|
777
|
-
});
|
|
778
|
-
}
|
|
779
|
-
}
|
|
780
|
-
return stalledJobIds;
|
|
781
|
-
}
|
|
782
|
-
async cleanJobs(queue, status, options) {
|
|
783
|
-
const redis = this.redisProvider.publisher;
|
|
784
|
-
const listKey = this.key(status, queue);
|
|
785
|
-
const maxAge = options?.maxAge;
|
|
786
|
-
const maxCount = options?.maxCount;
|
|
787
|
-
let removed = 0;
|
|
788
|
-
if (maxAge !== void 0) {
|
|
789
|
-
const cutoff = Date.now() - maxAge;
|
|
790
|
-
const jobIds = await redis.LRANGE(listKey, 0, -1);
|
|
791
|
-
for (const result of jobIds) {
|
|
792
|
-
const jobId = result.toString();
|
|
793
|
-
const job = await this.getJob(queue, jobId);
|
|
794
|
-
if (!job) continue;
|
|
795
|
-
const timestamp = status === "completed" ? job.state.completedAt : job.state.failedAt;
|
|
796
|
-
if (timestamp && timestamp < cutoff) {
|
|
797
|
-
await redis.LREM(listKey, 1, jobId);
|
|
798
|
-
await redis.DEL(this.key("job", queue, jobId));
|
|
799
|
-
removed++;
|
|
800
|
-
}
|
|
801
|
-
}
|
|
802
|
-
}
|
|
803
|
-
if (maxCount !== void 0) {
|
|
804
|
-
if (await redis.LLEN(listKey) > maxCount) {
|
|
805
|
-
const toRemove = await redis.LRANGE(listKey, maxCount, -1);
|
|
806
|
-
for (const result of toRemove) {
|
|
807
|
-
const jobId = result.toString();
|
|
808
|
-
await redis.DEL(this.key("job", queue, jobId));
|
|
809
|
-
removed++;
|
|
810
|
-
}
|
|
811
|
-
await redis.LTRIM(listKey, 0, maxCount - 1);
|
|
812
|
-
}
|
|
813
|
-
}
|
|
814
|
-
return removed;
|
|
815
|
-
}
|
|
816
|
-
async removeJob(queue, jobId) {
|
|
817
|
-
const redis = this.redisProvider.publisher;
|
|
818
|
-
const job = await this.getJob(queue, jobId);
|
|
819
|
-
if (!job) return;
|
|
820
|
-
const previousStatus = job.state.status;
|
|
821
|
-
switch (job.state.status) {
|
|
822
|
-
case "waiting":
|
|
823
|
-
await redis.ZREM(this.key("waiting", queue), jobId);
|
|
824
|
-
break;
|
|
825
|
-
case "delayed":
|
|
826
|
-
await redis.ZREM(this.key("delayed", queue), jobId);
|
|
827
|
-
break;
|
|
828
|
-
case "active":
|
|
829
|
-
await redis.SREM(this.key("active", queue), jobId);
|
|
830
|
-
break;
|
|
831
|
-
case "completed":
|
|
832
|
-
await redis.LREM(this.key("completed", queue), 1, jobId);
|
|
833
|
-
break;
|
|
834
|
-
case "failed":
|
|
835
|
-
await redis.LREM(this.key("failed", queue), 1, jobId);
|
|
836
|
-
break;
|
|
837
|
-
}
|
|
838
|
-
await redis.DEL(this.key("job", queue, jobId));
|
|
839
|
-
await this.emit({
|
|
840
|
-
type: "removed",
|
|
841
|
-
queue,
|
|
842
|
-
jobId,
|
|
843
|
-
timestamp: Date.now(),
|
|
844
|
-
previousStatus
|
|
845
|
-
});
|
|
846
|
-
}
|
|
847
|
-
cancelWaiters() {
|
|
848
|
-
this.shouldStop = true;
|
|
849
|
-
}
|
|
850
|
-
};
|
|
851
|
-
|
|
852
|
-
//#endregion
|
|
853
|
-
//#region src/queue-redis/index.ts
|
|
854
|
-
/**
|
|
855
|
-
* Plugin for Alepha Queue that provides Redis queue capabilities.
|
|
856
|
-
*
|
|
857
|
-
* @see {@link RedisQueueProvider}
|
|
858
|
-
* @module alepha.queue.redis
|
|
859
|
-
*/
|
|
860
|
-
const AlephaQueueRedis = (0, alepha.$module)({
|
|
861
|
-
name: "alepha.queue.redis",
|
|
862
|
-
services: [RedisQueueProvider],
|
|
863
|
-
register: (alepha$1) => alepha$1.with({
|
|
864
|
-
optional: true,
|
|
865
|
-
provide: alepha_queue.QueueProvider,
|
|
866
|
-
use: RedisQueueProvider
|
|
867
|
-
}).with(alepha_queue.AlephaQueue)
|
|
868
|
-
});
|
|
869
|
-
|
|
870
|
-
//#endregion
|
|
871
|
-
exports.AlephaQueueRedis = AlephaQueueRedis;
|
|
872
|
-
exports.RedisQueueProvider = RedisQueueProvider;
|
|
873
|
-
//# sourceMappingURL=index.cjs.map
|