@hotmeshio/hotmesh 0.6.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +179 -142
- package/build/index.d.ts +3 -1
- package/build/index.js +5 -1
- package/build/modules/enums.d.ts +18 -0
- package/build/modules/enums.js +27 -1
- package/build/modules/utils.d.ts +27 -0
- package/build/modules/utils.js +79 -1
- package/build/package.json +24 -10
- package/build/services/connector/factory.d.ts +1 -1
- package/build/services/connector/factory.js +15 -1
- package/build/services/connector/providers/ioredis.d.ts +9 -0
- package/build/services/connector/providers/ioredis.js +26 -0
- package/build/services/connector/providers/postgres.js +3 -0
- package/build/services/connector/providers/redis.d.ts +9 -0
- package/build/services/connector/providers/redis.js +38 -0
- package/build/services/hotmesh/index.d.ts +66 -15
- package/build/services/hotmesh/index.js +84 -15
- package/build/services/memflow/index.d.ts +100 -14
- package/build/services/memflow/index.js +100 -14
- package/build/services/memflow/worker.d.ts +97 -0
- package/build/services/memflow/worker.js +217 -0
- package/build/services/memflow/workflow/proxyActivities.d.ts +74 -3
- package/build/services/memflow/workflow/proxyActivities.js +81 -4
- package/build/services/router/consumption/index.d.ts +2 -1
- package/build/services/router/consumption/index.js +38 -2
- package/build/services/router/error-handling/index.d.ts +3 -3
- package/build/services/router/error-handling/index.js +48 -13
- package/build/services/router/index.d.ts +1 -0
- package/build/services/router/index.js +2 -1
- package/build/services/search/factory.js +8 -0
- package/build/services/search/providers/redis/ioredis.d.ts +23 -0
- package/build/services/search/providers/redis/ioredis.js +189 -0
- package/build/services/search/providers/redis/redis.d.ts +23 -0
- package/build/services/search/providers/redis/redis.js +202 -0
- package/build/services/store/factory.js +9 -1
- package/build/services/store/index.d.ts +3 -2
- package/build/services/store/providers/postgres/kvtypes/hash/basic.js +36 -6
- package/build/services/store/providers/postgres/kvtypes/hash/expire.js +12 -2
- package/build/services/store/providers/postgres/kvtypes/hash/scan.js +30 -10
- package/build/services/store/providers/postgres/kvtypes/list.js +68 -10
- package/build/services/store/providers/postgres/kvtypes/string.js +60 -10
- package/build/services/store/providers/postgres/kvtypes/zset.js +92 -22
- package/build/services/store/providers/postgres/postgres.d.ts +3 -3
- package/build/services/store/providers/redis/_base.d.ts +137 -0
- package/build/services/store/providers/redis/_base.js +980 -0
- package/build/services/store/providers/redis/ioredis.d.ts +20 -0
- package/build/services/store/providers/redis/ioredis.js +190 -0
- package/build/services/store/providers/redis/redis.d.ts +18 -0
- package/build/services/store/providers/redis/redis.js +199 -0
- package/build/services/stream/factory.js +17 -1
- package/build/services/stream/providers/postgres/kvtables.js +76 -23
- package/build/services/stream/providers/postgres/lifecycle.d.ts +19 -0
- package/build/services/stream/providers/postgres/lifecycle.js +54 -0
- package/build/services/stream/providers/postgres/messages.d.ts +56 -0
- package/build/services/stream/providers/postgres/messages.js +253 -0
- package/build/services/stream/providers/postgres/notifications.d.ts +59 -0
- package/build/services/stream/providers/postgres/notifications.js +357 -0
- package/build/services/stream/providers/postgres/postgres.d.ts +110 -11
- package/build/services/stream/providers/postgres/postgres.js +196 -488
- package/build/services/stream/providers/postgres/scout.d.ts +68 -0
- package/build/services/stream/providers/postgres/scout.js +233 -0
- package/build/services/stream/providers/postgres/stats.d.ts +49 -0
- package/build/services/stream/providers/postgres/stats.js +113 -0
- package/build/services/stream/providers/redis/ioredis.d.ts +61 -0
- package/build/services/stream/providers/redis/ioredis.js +272 -0
- package/build/services/stream/providers/redis/redis.d.ts +61 -0
- package/build/services/stream/providers/redis/redis.js +305 -0
- package/build/services/sub/factory.js +8 -0
- package/build/services/sub/providers/postgres/postgres.js +37 -5
- package/build/services/sub/providers/redis/ioredis.d.ts +20 -0
- package/build/services/sub/providers/redis/ioredis.js +161 -0
- package/build/services/sub/providers/redis/redis.d.ts +18 -0
- package/build/services/sub/providers/redis/redis.js +148 -0
- package/build/services/worker/index.d.ts +1 -0
- package/build/services/worker/index.js +2 -0
- package/build/types/hotmesh.d.ts +42 -2
- package/build/types/index.d.ts +4 -3
- package/build/types/index.js +4 -1
- package/build/types/memflow.d.ts +32 -0
- package/build/types/provider.d.ts +17 -1
- package/build/types/redis.d.ts +258 -0
- package/build/types/redis.js +11 -0
- package/build/types/stream.d.ts +92 -1
- package/index.ts +4 -0
- package/package.json +24 -10
|
@@ -11,10 +11,81 @@ declare function getProxyInterruptPayload(context: ReturnType<typeof getContext>
|
|
|
11
11
|
*/
|
|
12
12
|
declare function wrapActivity<T>(activityName: string, options?: ActivityConfig): T;
|
|
13
13
|
/**
|
|
14
|
-
*
|
|
14
|
+
* Create proxies for activity functions with automatic retry and deterministic replay.
|
|
15
|
+
* Activities execute via message queue, so they can run on different servers.
|
|
16
|
+
*
|
|
17
|
+
* Without `taskQueue`, activities use the workflow's task queue (e.g., `my-workflow-activity`).
|
|
18
|
+
* With `taskQueue`, activities use the specified queue (e.g., `payment-activity`).
|
|
19
|
+
*
|
|
20
|
+
* The `activities` parameter is optional. If activities are already registered via
|
|
21
|
+
* `registerActivityWorker()`, you can reference them by providing just the `taskQueue`
|
|
22
|
+
* and a TypeScript interface.
|
|
23
|
+
*
|
|
15
24
|
* @template ACT
|
|
16
|
-
* @param {ActivityConfig} [options] -
|
|
17
|
-
* @
|
|
25
|
+
* @param {ActivityConfig} [options] - Activity configuration
|
|
26
|
+
* @param {any} [options.activities] - (Optional) Activity functions to register inline
|
|
27
|
+
* @param {string} [options.taskQueue] - (Optional) Task queue name (without `-activity` suffix)
|
|
28
|
+
* @param {object} [options.retryPolicy] - Retry configuration
|
|
29
|
+
* @returns {ProxyType<ACT>} Proxy for calling activities with durability and retry
|
|
30
|
+
*
|
|
31
|
+
* @example
|
|
32
|
+
* ```typescript
|
|
33
|
+
* // Inline registration (activities in same codebase)
|
|
34
|
+
* const activities = MemFlow.workflow.proxyActivities<typeof activities>({
|
|
35
|
+
* activities: { processData, validateData },
|
|
36
|
+
* retryPolicy: { maximumAttempts: 3 }
|
|
37
|
+
* });
|
|
38
|
+
*
|
|
39
|
+
* await activities.processData('input');
|
|
40
|
+
* ```
|
|
41
|
+
*
|
|
42
|
+
* @example
|
|
43
|
+
* ```typescript
|
|
44
|
+
* // Reference pre-registered activities (can be on different server)
|
|
45
|
+
* interface PaymentActivities {
|
|
46
|
+
* processPayment: (amount: number) => Promise<string>;
|
|
47
|
+
* sendEmail: (to: string, subject: string) => Promise<void>;
|
|
48
|
+
* }
|
|
49
|
+
*
|
|
50
|
+
* const { processPayment, sendEmail } =
|
|
51
|
+
* MemFlow.workflow.proxyActivities<PaymentActivities>({
|
|
52
|
+
* taskQueue: 'payment',
|
|
53
|
+
* retryPolicy: { maximumAttempts: 3 }
|
|
54
|
+
* });
|
|
55
|
+
*
|
|
56
|
+
* const result = await processPayment(100.00);
|
|
57
|
+
* await sendEmail('user@example.com', 'Payment processed');
|
|
58
|
+
* ```
|
|
59
|
+
*
|
|
60
|
+
* @example
|
|
61
|
+
* ```typescript
|
|
62
|
+
* // Shared activities in interceptor
|
|
63
|
+
* const interceptor: WorkflowInterceptor = {
|
|
64
|
+
* async execute(ctx, next) {
|
|
65
|
+
* const { auditLog } = MemFlow.workflow.proxyActivities<{
|
|
66
|
+
* auditLog: (id: string, action: string) => Promise<void>;
|
|
67
|
+
* }>({
|
|
68
|
+
* taskQueue: 'shared',
|
|
69
|
+
* retryPolicy: { maximumAttempts: 3 }
|
|
70
|
+
* });
|
|
71
|
+
*
|
|
72
|
+
* await auditLog(ctx.get('workflowId'), 'started');
|
|
73
|
+
* const result = await next();
|
|
74
|
+
* await auditLog(ctx.get('workflowId'), 'completed');
|
|
75
|
+
* return result;
|
|
76
|
+
* }
|
|
77
|
+
* };
|
|
78
|
+
* ```
|
|
79
|
+
*
|
|
80
|
+
* @example
|
|
81
|
+
* ```typescript
|
|
82
|
+
* // Custom task queue for specific activities
|
|
83
|
+
* const highPriority = MemFlow.workflow.proxyActivities<typeof activities>({
|
|
84
|
+
* activities: { criticalProcess },
|
|
85
|
+
* taskQueue: 'high-priority',
|
|
86
|
+
* retryPolicy: { maximumAttempts: 5 }
|
|
87
|
+
* });
|
|
88
|
+
* ```
|
|
18
89
|
*/
|
|
19
90
|
export declare function proxyActivities<ACT>(options?: ActivityConfig): ProxyType<ACT>;
|
|
20
91
|
export { wrapActivity, getProxyInterruptPayload };
|
|
@@ -10,7 +10,11 @@ const didRun_1 = require("./didRun");
|
|
|
10
10
|
*/
|
|
11
11
|
function getProxyInterruptPayload(context, activityName, execIndex, args, options) {
|
|
12
12
|
const { workflowDimension, workflowId, originJobId, workflowTopic, expire } = context;
|
|
13
|
-
|
|
13
|
+
// Use explicitly provided taskQueue, otherwise derive from workflow (original behavior)
|
|
14
|
+
// This keeps backward compatibility while allowing explicit global/custom queues
|
|
15
|
+
const activityTopic = options?.taskQueue
|
|
16
|
+
? `${options.taskQueue}-activity`
|
|
17
|
+
: `${workflowTopic}-activity`;
|
|
14
18
|
const activityJobId = `-${workflowId}-$${activityName}${workflowDimension}-${execIndex}`;
|
|
15
19
|
let maximumInterval;
|
|
16
20
|
if (options?.retryPolicy?.maximumInterval) {
|
|
@@ -77,15 +81,88 @@ function wrapActivity(activityName, options) {
|
|
|
77
81
|
}
|
|
78
82
|
exports.wrapActivity = wrapActivity;
|
|
79
83
|
/**
|
|
80
|
-
*
|
|
84
|
+
* Create proxies for activity functions with automatic retry and deterministic replay.
|
|
85
|
+
* Activities execute via message queue, so they can run on different servers.
|
|
86
|
+
*
|
|
87
|
+
* Without `taskQueue`, activities use the workflow's task queue (e.g., `my-workflow-activity`).
|
|
88
|
+
* With `taskQueue`, activities use the specified queue (e.g., `payment-activity`).
|
|
89
|
+
*
|
|
90
|
+
* The `activities` parameter is optional. If activities are already registered via
|
|
91
|
+
* `registerActivityWorker()`, you can reference them by providing just the `taskQueue`
|
|
92
|
+
* and a TypeScript interface.
|
|
93
|
+
*
|
|
81
94
|
* @template ACT
|
|
82
|
-
* @param {ActivityConfig} [options] -
|
|
83
|
-
* @
|
|
95
|
+
* @param {ActivityConfig} [options] - Activity configuration
|
|
96
|
+
* @param {any} [options.activities] - (Optional) Activity functions to register inline
|
|
97
|
+
* @param {string} [options.taskQueue] - (Optional) Task queue name (without `-activity` suffix)
|
|
98
|
+
* @param {object} [options.retryPolicy] - Retry configuration
|
|
99
|
+
* @returns {ProxyType<ACT>} Proxy for calling activities with durability and retry
|
|
100
|
+
*
|
|
101
|
+
* @example
|
|
102
|
+
* ```typescript
|
|
103
|
+
* // Inline registration (activities in same codebase)
|
|
104
|
+
* const activities = MemFlow.workflow.proxyActivities<typeof activities>({
|
|
105
|
+
* activities: { processData, validateData },
|
|
106
|
+
* retryPolicy: { maximumAttempts: 3 }
|
|
107
|
+
* });
|
|
108
|
+
*
|
|
109
|
+
* await activities.processData('input');
|
|
110
|
+
* ```
|
|
111
|
+
*
|
|
112
|
+
* @example
|
|
113
|
+
* ```typescript
|
|
114
|
+
* // Reference pre-registered activities (can be on different server)
|
|
115
|
+
* interface PaymentActivities {
|
|
116
|
+
* processPayment: (amount: number) => Promise<string>;
|
|
117
|
+
* sendEmail: (to: string, subject: string) => Promise<void>;
|
|
118
|
+
* }
|
|
119
|
+
*
|
|
120
|
+
* const { processPayment, sendEmail } =
|
|
121
|
+
* MemFlow.workflow.proxyActivities<PaymentActivities>({
|
|
122
|
+
* taskQueue: 'payment',
|
|
123
|
+
* retryPolicy: { maximumAttempts: 3 }
|
|
124
|
+
* });
|
|
125
|
+
*
|
|
126
|
+
* const result = await processPayment(100.00);
|
|
127
|
+
* await sendEmail('user@example.com', 'Payment processed');
|
|
128
|
+
* ```
|
|
129
|
+
*
|
|
130
|
+
* @example
|
|
131
|
+
* ```typescript
|
|
132
|
+
* // Shared activities in interceptor
|
|
133
|
+
* const interceptor: WorkflowInterceptor = {
|
|
134
|
+
* async execute(ctx, next) {
|
|
135
|
+
* const { auditLog } = MemFlow.workflow.proxyActivities<{
|
|
136
|
+
* auditLog: (id: string, action: string) => Promise<void>;
|
|
137
|
+
* }>({
|
|
138
|
+
* taskQueue: 'shared',
|
|
139
|
+
* retryPolicy: { maximumAttempts: 3 }
|
|
140
|
+
* });
|
|
141
|
+
*
|
|
142
|
+
* await auditLog(ctx.get('workflowId'), 'started');
|
|
143
|
+
* const result = await next();
|
|
144
|
+
* await auditLog(ctx.get('workflowId'), 'completed');
|
|
145
|
+
* return result;
|
|
146
|
+
* }
|
|
147
|
+
* };
|
|
148
|
+
* ```
|
|
149
|
+
*
|
|
150
|
+
* @example
|
|
151
|
+
* ```typescript
|
|
152
|
+
* // Custom task queue for specific activities
|
|
153
|
+
* const highPriority = MemFlow.workflow.proxyActivities<typeof activities>({
|
|
154
|
+
* activities: { criticalProcess },
|
|
155
|
+
* taskQueue: 'high-priority',
|
|
156
|
+
* retryPolicy: { maximumAttempts: 5 }
|
|
157
|
+
* });
|
|
158
|
+
* ```
|
|
84
159
|
*/
|
|
85
160
|
function proxyActivities(options) {
|
|
161
|
+
// Register activities if provided (optional - may already be registered remotely)
|
|
86
162
|
if (options?.activities) {
|
|
87
163
|
common_1.WorkerService.registerActivities(options.activities);
|
|
88
164
|
}
|
|
165
|
+
// Create proxy for all registered activities
|
|
89
166
|
const proxy = {};
|
|
90
167
|
const keys = Object.keys(common_1.WorkerService.activityRegistry);
|
|
91
168
|
if (keys.length) {
|
|
@@ -19,7 +19,8 @@ export declare class ConsumptionManager<S extends StreamService<ProviderClient,
|
|
|
19
19
|
private counts;
|
|
20
20
|
private hasReachedMaxBackoff;
|
|
21
21
|
private router;
|
|
22
|
-
|
|
22
|
+
private retryPolicy;
|
|
23
|
+
constructor(stream: S, logger: ILogger, throttleManager: ThrottleManager, errorHandler: ErrorHandler, lifecycleManager: LifecycleManager<S>, reclaimDelay: number, reclaimCount: number, appId: string, role: any, router: any, retryPolicy?: import('../../../types/stream').RetryPolicy);
|
|
23
24
|
createGroup(stream: string, group: string): Promise<void>;
|
|
24
25
|
publishMessage(topic: string, streamData: StreamData | StreamDataResponse, transaction?: ProviderTransaction): Promise<string | ProviderTransaction>;
|
|
25
26
|
consumeMessages(stream: string, group: string, consumer: string, callback: (streamData: StreamData) => Promise<StreamDataResponse | void>): Promise<void>;
|
|
@@ -7,7 +7,7 @@ const config_1 = require("../config");
|
|
|
7
7
|
const stream_1 = require("../../../types/stream");
|
|
8
8
|
const key_1 = require("../../../modules/key");
|
|
9
9
|
class ConsumptionManager {
|
|
10
|
-
constructor(stream, logger, throttleManager, errorHandler, lifecycleManager, reclaimDelay, reclaimCount, appId, role, router) {
|
|
10
|
+
constructor(stream, logger, throttleManager, errorHandler, lifecycleManager, reclaimDelay, reclaimCount, appId, role, router, retryPolicy) {
|
|
11
11
|
this.errorCount = 0;
|
|
12
12
|
this.counts = {};
|
|
13
13
|
this.stream = stream;
|
|
@@ -20,6 +20,7 @@ class ConsumptionManager {
|
|
|
20
20
|
this.appId = appId;
|
|
21
21
|
this.role = role;
|
|
22
22
|
this.router = router;
|
|
23
|
+
this.retryPolicy = retryPolicy;
|
|
23
24
|
}
|
|
24
25
|
async createGroup(stream, group) {
|
|
25
26
|
try {
|
|
@@ -32,6 +33,31 @@ class ConsumptionManager {
|
|
|
32
33
|
async publishMessage(topic, streamData, transaction) {
|
|
33
34
|
const code = streamData?.code || '200';
|
|
34
35
|
this.counts[code] = (this.counts[code] || 0) + 1;
|
|
36
|
+
// Extract retry policy from child workflow (590) and activity (591) message data
|
|
37
|
+
// ONLY if values differ from YAML defaults (10, 3/5, 120)
|
|
38
|
+
// If they're defaults, let old retry mechanism (policies.retry) handle it
|
|
39
|
+
const codeNum = typeof code === 'number' ? code : parseInt(code, 10);
|
|
40
|
+
if ((codeNum === 590 || codeNum === 591) && streamData.data) {
|
|
41
|
+
const data = streamData.data;
|
|
42
|
+
const backoff = data.backoffCoefficient;
|
|
43
|
+
const attempts = data.maximumAttempts;
|
|
44
|
+
const maxInterval = typeof data.maximumInterval === 'string'
|
|
45
|
+
? parseInt(data.maximumInterval)
|
|
46
|
+
: data.maximumInterval;
|
|
47
|
+
// Only extract if values are NOT the YAML defaults
|
|
48
|
+
// YAML defaults: backoffCoefficient=10, maximumAttempts=3 or 5, maximumInterval=120
|
|
49
|
+
const hasNonDefaultBackoff = backoff != null && backoff !== 10;
|
|
50
|
+
const hasNonDefaultAttempts = attempts != null && attempts !== 3 && attempts !== 5;
|
|
51
|
+
const hasNonDefaultInterval = maxInterval != null && maxInterval !== 120;
|
|
52
|
+
if (hasNonDefaultBackoff || hasNonDefaultAttempts || hasNonDefaultInterval) {
|
|
53
|
+
// Has custom values from config - add _streamRetryConfig
|
|
54
|
+
streamData._streamRetryConfig = {
|
|
55
|
+
max_retry_attempts: attempts,
|
|
56
|
+
backoff_coefficient: backoff,
|
|
57
|
+
maximum_interval_seconds: maxInterval,
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
}
|
|
35
61
|
const stream = this.stream.mintKey(key_1.KeyType.STREAMS, { topic });
|
|
36
62
|
const responses = await this.stream.publishMessages(stream, [JSON.stringify(streamData)], { transaction });
|
|
37
63
|
return responses[0];
|
|
@@ -390,7 +416,17 @@ class ConsumptionManager {
|
|
|
390
416
|
async publishResponse(input, output) {
|
|
391
417
|
if (output && typeof output === 'object') {
|
|
392
418
|
if (output.status === 'error') {
|
|
393
|
-
|
|
419
|
+
// Extract retry policy with priority:
|
|
420
|
+
// 1. Use message-level _streamRetryConfig (from database columns or previous retry)
|
|
421
|
+
// 2. Fall back to router-level retryPolicy (from worker config)
|
|
422
|
+
const retryPolicy = input._streamRetryConfig
|
|
423
|
+
? {
|
|
424
|
+
maximumAttempts: input._streamRetryConfig.max_retry_attempts,
|
|
425
|
+
backoffCoefficient: input._streamRetryConfig.backoff_coefficient,
|
|
426
|
+
maximumInterval: input._streamRetryConfig.maximum_interval_seconds,
|
|
427
|
+
}
|
|
428
|
+
: this.retryPolicy;
|
|
429
|
+
return await this.errorHandler.handleRetry(input, output, this.publishMessage.bind(this), retryPolicy);
|
|
394
430
|
}
|
|
395
431
|
else if (typeof output.metadata !== 'object') {
|
|
396
432
|
output.metadata = { ...input.metadata, guid: (0, utils_1.guid)() };
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { StreamData, StreamDataResponse } from '../../../types/stream';
|
|
1
|
+
import { StreamData, StreamDataResponse, RetryPolicy } from '../../../types/stream';
|
|
2
2
|
export declare class ErrorHandler {
|
|
3
|
-
shouldRetry(input: StreamData, output: StreamDataResponse): [boolean, number];
|
|
3
|
+
shouldRetry(input: StreamData, output: StreamDataResponse, retryPolicy?: RetryPolicy): [boolean, number];
|
|
4
4
|
structureUnhandledError(input: StreamData, err: Error): StreamDataResponse;
|
|
5
5
|
structureUnacknowledgedError(input: StreamData): StreamDataResponse;
|
|
6
6
|
structureError(input: StreamData, output: StreamDataResponse): StreamDataResponse;
|
|
7
|
-
handleRetry(input: StreamData, output: StreamDataResponse, publishMessage: (topic: string, streamData: StreamData | StreamDataResponse) => Promise<string
|
|
7
|
+
handleRetry(input: StreamData, output: StreamDataResponse, publishMessage: (topic: string, streamData: StreamData | StreamDataResponse) => Promise<string>, retryPolicy?: RetryPolicy): Promise<string>;
|
|
8
8
|
}
|
|
@@ -5,18 +5,37 @@ const utils_1 = require("../../../modules/utils");
|
|
|
5
5
|
const config_1 = require("../config");
|
|
6
6
|
const stream_1 = require("../../../types/stream");
|
|
7
7
|
class ErrorHandler {
|
|
8
|
-
shouldRetry(input, output) {
|
|
9
|
-
|
|
8
|
+
shouldRetry(input, output, retryPolicy) {
|
|
9
|
+
const tryCount = input.metadata.try || 0;
|
|
10
|
+
// Priority 1: Use structured retry policy (from stream columns or config)
|
|
11
|
+
if (retryPolicy) {
|
|
12
|
+
const maxAttempts = retryPolicy.maximumAttempts || 3;
|
|
13
|
+
const backoffCoeff = retryPolicy.backoffCoefficient || 10;
|
|
14
|
+
const maxInterval = typeof retryPolicy.maximumInterval === 'string'
|
|
15
|
+
? parseInt(retryPolicy.maximumInterval)
|
|
16
|
+
: (retryPolicy.maximumInterval || 120);
|
|
17
|
+
// Check if we can retry (next attempt would be attempt #tryCount+2, must be <= maxAttempts)
|
|
18
|
+
// tryCount=0 is 1st attempt, tryCount=1 is 2nd attempt, etc.
|
|
19
|
+
// So after tryCount, we've made (tryCount + 1) attempts
|
|
20
|
+
// We can retry if (tryCount + 1) < maxAttempts
|
|
21
|
+
if ((tryCount + 1) < maxAttempts) {
|
|
22
|
+
// Exponential backoff: min(coefficient^(try+1), maxInterval)
|
|
23
|
+
// First retry (after try=0): coefficient^1
|
|
24
|
+
// Second retry (after try=1): coefficient^2, etc.
|
|
25
|
+
const backoffSeconds = Math.min(Math.pow(backoffCoeff, tryCount + 1), maxInterval);
|
|
26
|
+
return [true, backoffSeconds * 1000]; // Convert to milliseconds
|
|
27
|
+
}
|
|
28
|
+
return [false, 0];
|
|
29
|
+
}
|
|
30
|
+
// Priority 2: Use message-level policies (existing behavior)
|
|
10
31
|
const policies = input.policies?.retry;
|
|
11
32
|
const errorCode = output.code.toString();
|
|
12
33
|
const policy = policies?.[errorCode];
|
|
13
34
|
const maxRetries = policy?.[0];
|
|
14
|
-
const
|
|
15
|
-
|
|
16
|
-
//only possible values for tryCount are 0, 1, 2
|
|
17
|
-
if (maxRetries > tryCount) {
|
|
35
|
+
const cappedTryCount = Math.min(tryCount, config_1.HMSH_MAX_RETRIES);
|
|
36
|
+
if (maxRetries > cappedTryCount) {
|
|
18
37
|
// 10ms, 100ms, or 1000ms delays between system retries
|
|
19
|
-
return [true, Math.pow(10,
|
|
38
|
+
return [true, Math.pow(10, cappedTryCount + 1)];
|
|
20
39
|
}
|
|
21
40
|
return [false, 0];
|
|
22
41
|
}
|
|
@@ -78,16 +97,32 @@ class ErrorHandler {
|
|
|
78
97
|
data,
|
|
79
98
|
};
|
|
80
99
|
}
|
|
81
|
-
async handleRetry(input, output, publishMessage) {
|
|
82
|
-
const [shouldRetry, timeout] = this.shouldRetry(input, output);
|
|
100
|
+
async handleRetry(input, output, publishMessage, retryPolicy) {
|
|
101
|
+
const [shouldRetry, timeout] = this.shouldRetry(input, output, retryPolicy);
|
|
83
102
|
if (shouldRetry) {
|
|
84
|
-
|
|
85
|
-
|
|
103
|
+
// Only sleep if no retryPolicy (legacy behavior for backward compatibility)
|
|
104
|
+
// With retryPolicy, use visibility timeout instead of in-memory sleep
|
|
105
|
+
if (!retryPolicy) {
|
|
106
|
+
await (0, utils_1.sleepFor)(timeout);
|
|
107
|
+
}
|
|
108
|
+
// Create new message with incremented try count
|
|
109
|
+
const newMessage = {
|
|
86
110
|
data: input.data,
|
|
87
|
-
//note: retain guid (this is a retry attempt)
|
|
88
111
|
metadata: { ...input.metadata, try: (input.metadata.try || 0) + 1 },
|
|
89
112
|
policies: input.policies,
|
|
90
|
-
}
|
|
113
|
+
};
|
|
114
|
+
// Propagate retry config to new message (for immutable pattern)
|
|
115
|
+
if (input._streamRetryConfig) {
|
|
116
|
+
newMessage._streamRetryConfig = input._streamRetryConfig;
|
|
117
|
+
}
|
|
118
|
+
// Add visibility delay for production-ready retry with retryPolicy
|
|
119
|
+
if (retryPolicy && timeout > 0) {
|
|
120
|
+
newMessage._visibilityDelayMs = timeout;
|
|
121
|
+
}
|
|
122
|
+
// Track retry attempt count in database
|
|
123
|
+
const currentAttempt = input._retryAttempt || 0;
|
|
124
|
+
newMessage._retryAttempt = currentAttempt + 1;
|
|
125
|
+
return (await publishMessage(input.metadata.topic, newMessage));
|
|
91
126
|
}
|
|
92
127
|
else {
|
|
93
128
|
const structuredError = this.structureError(input, output);
|
|
@@ -13,6 +13,7 @@ declare class Router<S extends StreamService<ProviderClient, ProviderTransaction
|
|
|
13
13
|
reclaimCount: number;
|
|
14
14
|
logger: ILogger;
|
|
15
15
|
readonly: boolean;
|
|
16
|
+
retryPolicy: import('../../types/stream').RetryPolicy | undefined;
|
|
16
17
|
errorCount: number;
|
|
17
18
|
counts: {
|
|
18
19
|
[key: string]: number;
|
|
@@ -29,11 +29,12 @@ class Router {
|
|
|
29
29
|
this.reclaimCount = enhancedConfig.reclaimCount;
|
|
30
30
|
this.logger = logger;
|
|
31
31
|
this.readonly = enhancedConfig.readonly;
|
|
32
|
+
this.retryPolicy = enhancedConfig.retryPolicy;
|
|
32
33
|
// Initialize submodule managers
|
|
33
34
|
this.throttleManager = new throttling_1.ThrottleManager(enhancedConfig.throttle);
|
|
34
35
|
this.errorHandler = new error_handling_1.ErrorHandler();
|
|
35
36
|
this.lifecycleManager = new lifecycle_1.LifecycleManager(this.readonly, this.topic, this.logger, this.stream);
|
|
36
|
-
this.consumptionManager = new consumption_1.ConsumptionManager(this.stream, this.logger, this.throttleManager, this.errorHandler, this.lifecycleManager, this.reclaimDelay, this.reclaimCount, this.appId, this.role, this);
|
|
37
|
+
this.consumptionManager = new consumption_1.ConsumptionManager(this.stream, this.logger, this.throttleManager, this.errorHandler, this.lifecycleManager, this.reclaimDelay, this.reclaimCount, this.appId, this.role, this, this.retryPolicy);
|
|
37
38
|
this.resetThrottleState();
|
|
38
39
|
}
|
|
39
40
|
// Legacy compatibility methods
|
|
@@ -3,12 +3,20 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.SearchServiceFactory = void 0;
|
|
4
4
|
const utils_1 = require("../../modules/utils");
|
|
5
5
|
const postgres_1 = require("./providers/postgres/postgres");
|
|
6
|
+
const ioredis_1 = require("./providers/redis/ioredis");
|
|
7
|
+
const redis_1 = require("./providers/redis/redis");
|
|
6
8
|
class SearchServiceFactory {
|
|
7
9
|
static async init(providerClient, storeProviderClient, namespace, appId, logger) {
|
|
8
10
|
let service;
|
|
9
11
|
if ((0, utils_1.identifyProvider)(providerClient) === 'postgres') {
|
|
10
12
|
service = new postgres_1.PostgresSearchService(providerClient, storeProviderClient);
|
|
11
13
|
}
|
|
14
|
+
else if ((0, utils_1.identifyProvider)(providerClient) === 'redis') {
|
|
15
|
+
service = new redis_1.RedisSearchService(providerClient, storeProviderClient);
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
service = new ioredis_1.IORedisSearchService(providerClient, storeProviderClient);
|
|
19
|
+
}
|
|
12
20
|
await service.init(namespace, appId, logger);
|
|
13
21
|
return service;
|
|
14
22
|
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { SearchService } from '../../index';
|
|
2
|
+
import { ILogger } from '../../../logger';
|
|
3
|
+
import { IORedisClientType } from '../../../../types/redis';
|
|
4
|
+
declare class IORedisSearchService extends SearchService<IORedisClientType> {
|
|
5
|
+
constructor(searchClient: IORedisClientType, storeClient?: IORedisClientType);
|
|
6
|
+
init(namespace: string, appId: string, logger: ILogger): Promise<void>;
|
|
7
|
+
createSearchIndex(indexName: string, prefixes: string[], schema: string[]): Promise<void>;
|
|
8
|
+
listSearchIndexes(): Promise<string[]>;
|
|
9
|
+
updateContext(key: string, fields: Record<string, string>): Promise<any>;
|
|
10
|
+
setFields(key: string, fields: Record<string, string>): Promise<number>;
|
|
11
|
+
getField(key: string, field: string): Promise<string>;
|
|
12
|
+
getFields(key: string, fields: string[]): Promise<string[]>;
|
|
13
|
+
getAllFields(key: string): Promise<Record<string, string>>;
|
|
14
|
+
deleteFields(key: string, fields: string[]): Promise<number>;
|
|
15
|
+
incrementFieldByFloat(key: string, field: string, increment: number): Promise<number>;
|
|
16
|
+
sendQuery(...query: [string, ...string[]]): Promise<any>;
|
|
17
|
+
sendIndexedQuery(index: string, query: string[]): Promise<string[]>;
|
|
18
|
+
findEntities(): Promise<any[]>;
|
|
19
|
+
findEntityById(): Promise<any>;
|
|
20
|
+
findEntitiesByCondition(): Promise<any[]>;
|
|
21
|
+
createEntityIndex(): Promise<void>;
|
|
22
|
+
}
|
|
23
|
+
export { IORedisSearchService };
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.IORedisSearchService = void 0;
|
|
4
|
+
const index_1 = require("../../index");
|
|
5
|
+
class IORedisSearchService extends index_1.SearchService {
|
|
6
|
+
constructor(searchClient, storeClient) {
|
|
7
|
+
super(searchClient, storeClient);
|
|
8
|
+
}
|
|
9
|
+
async init(namespace, appId, logger) {
|
|
10
|
+
this.namespace = namespace;
|
|
11
|
+
this.appId = appId;
|
|
12
|
+
this.logger = logger;
|
|
13
|
+
}
|
|
14
|
+
async createSearchIndex(indexName, prefixes, schema) {
|
|
15
|
+
try {
|
|
16
|
+
await this.searchClient.call('FT.CREATE', indexName, 'ON', 'HASH', 'PREFIX', prefixes.length.toString(), ...prefixes, 'SCHEMA', ...schema);
|
|
17
|
+
}
|
|
18
|
+
catch (error) {
|
|
19
|
+
this.logger.info('Error creating search index', { error });
|
|
20
|
+
throw error;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
async listSearchIndexes() {
|
|
24
|
+
try {
|
|
25
|
+
const indexes = await this.searchClient.call('FT._LIST');
|
|
26
|
+
return indexes;
|
|
27
|
+
}
|
|
28
|
+
catch (error) {
|
|
29
|
+
this.logger.info('Error listing search indexes', { error });
|
|
30
|
+
throw error;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
async updateContext(key, fields) {
|
|
34
|
+
// Find replay ID if present (field with hyphen, not the @udata field)
|
|
35
|
+
const replayId = Object.keys(fields).find((k) => k.includes('-') && !k.startsWith('@'));
|
|
36
|
+
// Route based on @udata operation
|
|
37
|
+
if ('@udata:set' in fields) {
|
|
38
|
+
const udata = JSON.parse(fields['@udata:set']);
|
|
39
|
+
const fieldsToSet = Array.isArray(udata)
|
|
40
|
+
? Object.fromEntries(Array.from({ length: udata.length / 2 }, (_, i) => [
|
|
41
|
+
udata[i * 2],
|
|
42
|
+
udata[i * 2 + 1],
|
|
43
|
+
]))
|
|
44
|
+
: udata;
|
|
45
|
+
const result = await this.setFields(key, fieldsToSet);
|
|
46
|
+
if (replayId)
|
|
47
|
+
await this.searchClient.hset(key, { [replayId]: String(result) });
|
|
48
|
+
return result;
|
|
49
|
+
}
|
|
50
|
+
if ('@udata:get' in fields) {
|
|
51
|
+
const result = await this.getField(key, fields['@udata:get']);
|
|
52
|
+
if (replayId)
|
|
53
|
+
await this.searchClient.hset(key, { [replayId]: result });
|
|
54
|
+
return result;
|
|
55
|
+
}
|
|
56
|
+
if ('@udata:mget' in fields) {
|
|
57
|
+
const result = await this.getFields(key, JSON.parse(fields['@udata:mget']));
|
|
58
|
+
if (replayId)
|
|
59
|
+
await this.searchClient.hset(key, { [replayId]: result.join('|||') });
|
|
60
|
+
return result;
|
|
61
|
+
}
|
|
62
|
+
if ('@udata:delete' in fields) {
|
|
63
|
+
const result = await this.deleteFields(key, JSON.parse(fields['@udata:delete']));
|
|
64
|
+
if (replayId)
|
|
65
|
+
await this.searchClient.hset(key, { [replayId]: String(result) });
|
|
66
|
+
return result;
|
|
67
|
+
}
|
|
68
|
+
if ('@udata:increment' in fields) {
|
|
69
|
+
const { field, value } = JSON.parse(fields['@udata:increment']);
|
|
70
|
+
const result = await this.incrementFieldByFloat(key, field, value);
|
|
71
|
+
if (replayId)
|
|
72
|
+
await this.searchClient.hset(key, { [replayId]: String(result) });
|
|
73
|
+
return result;
|
|
74
|
+
}
|
|
75
|
+
if ('@udata:multiply' in fields) {
|
|
76
|
+
const { field, value } = JSON.parse(fields['@udata:multiply']);
|
|
77
|
+
const result = await this.incrementFieldByFloat(key, field, Math.log(value));
|
|
78
|
+
if (replayId)
|
|
79
|
+
await this.searchClient.hset(key, { [replayId]: String(result) });
|
|
80
|
+
return result;
|
|
81
|
+
}
|
|
82
|
+
if ('@udata:all' in fields) {
|
|
83
|
+
const all = await this.getAllFields(key);
|
|
84
|
+
const result = Object.fromEntries(Object.entries(all).filter(([k]) => k.startsWith('_')));
|
|
85
|
+
if (replayId)
|
|
86
|
+
await this.searchClient.hset(key, { [replayId]: JSON.stringify(result) });
|
|
87
|
+
return result;
|
|
88
|
+
}
|
|
89
|
+
// Default: call setFields
|
|
90
|
+
return await this.setFields(key, fields);
|
|
91
|
+
}
|
|
92
|
+
async setFields(key, fields) {
|
|
93
|
+
try {
|
|
94
|
+
const result = await this.searchClient.hset(key, fields);
|
|
95
|
+
return Number(result);
|
|
96
|
+
}
|
|
97
|
+
catch (error) {
|
|
98
|
+
this.logger.error(`Error setting fields for key: ${key}`, { error });
|
|
99
|
+
throw error;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
async getField(key, field) {
|
|
103
|
+
try {
|
|
104
|
+
return await this.searchClient.hget(key, field);
|
|
105
|
+
}
|
|
106
|
+
catch (error) {
|
|
107
|
+
this.logger.error(`Error getting field ${field} for key: ${key}`, {
|
|
108
|
+
error,
|
|
109
|
+
});
|
|
110
|
+
throw error;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
async getFields(key, fields) {
|
|
114
|
+
try {
|
|
115
|
+
return await this.searchClient.hmget(key, [...fields]);
|
|
116
|
+
}
|
|
117
|
+
catch (error) {
|
|
118
|
+
this.logger.error(`Error getting fields for key: ${key}`, { error });
|
|
119
|
+
throw error;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
async getAllFields(key) {
|
|
123
|
+
try {
|
|
124
|
+
return await this.searchClient.hgetall(key);
|
|
125
|
+
}
|
|
126
|
+
catch (error) {
|
|
127
|
+
this.logger.error(`Error getting fields for key: ${key}`, { error });
|
|
128
|
+
throw error;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
async deleteFields(key, fields) {
|
|
132
|
+
try {
|
|
133
|
+
const result = await this.searchClient.hdel(key, ...fields);
|
|
134
|
+
return Number(result);
|
|
135
|
+
}
|
|
136
|
+
catch (error) {
|
|
137
|
+
this.logger.error(`Error deleting fields for key: ${key}`, { error });
|
|
138
|
+
throw error;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
async incrementFieldByFloat(key, field, increment) {
|
|
142
|
+
try {
|
|
143
|
+
const result = await this.searchClient.hincrbyfloat(key, field, increment);
|
|
144
|
+
return Number(result);
|
|
145
|
+
}
|
|
146
|
+
catch (error) {
|
|
147
|
+
this.logger.error(`Error incrementing field ${field} for key: ${key}`, {
|
|
148
|
+
error,
|
|
149
|
+
});
|
|
150
|
+
throw error;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
async sendQuery(...query) {
|
|
154
|
+
try {
|
|
155
|
+
return await this.searchClient.call(...query);
|
|
156
|
+
}
|
|
157
|
+
catch (error) {
|
|
158
|
+
this.logger.error('Error executing query', { error });
|
|
159
|
+
throw error;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
async sendIndexedQuery(index, query) {
|
|
163
|
+
try {
|
|
164
|
+
if (query[0]?.startsWith('FT.')) {
|
|
165
|
+
const [cmd, ...rest] = query;
|
|
166
|
+
return (await this.searchClient.call(cmd, ...rest));
|
|
167
|
+
}
|
|
168
|
+
return (await this.searchClient.call('FT.SEARCH', index, ...query));
|
|
169
|
+
}
|
|
170
|
+
catch (error) {
|
|
171
|
+
this.logger.error('Error executing query', { error });
|
|
172
|
+
throw error;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
// Entity methods - not implemented for Redis (postgres-specific JSONB operations)
|
|
176
|
+
async findEntities() {
|
|
177
|
+
throw new Error('Entity findEntities not supported in Redis - use PostgreSQL');
|
|
178
|
+
}
|
|
179
|
+
async findEntityById() {
|
|
180
|
+
throw new Error('Entity findEntityById not supported in Redis - use PostgreSQL');
|
|
181
|
+
}
|
|
182
|
+
async findEntitiesByCondition() {
|
|
183
|
+
throw new Error('Entity findEntitiesByCondition not supported in Redis - use PostgreSQL');
|
|
184
|
+
}
|
|
185
|
+
async createEntityIndex() {
|
|
186
|
+
throw new Error('Entity createEntityIndex not supported in Redis - use PostgreSQL');
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
exports.IORedisSearchService = IORedisSearchService;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { SearchService } from '../../index';
|
|
2
|
+
import { ILogger } from '../../../logger';
|
|
3
|
+
import { RedisRedisClientType } from '../../../../types/redis';
|
|
4
|
+
declare class RedisSearchService extends SearchService<RedisRedisClientType> {
|
|
5
|
+
constructor(searchClient: RedisRedisClientType, storeClient?: RedisRedisClientType);
|
|
6
|
+
init(namespace: string, appId: string, logger: ILogger): Promise<void>;
|
|
7
|
+
createSearchIndex(indexName: string, prefixes: string[], schema: string[]): Promise<void>;
|
|
8
|
+
listSearchIndexes(): Promise<string[]>;
|
|
9
|
+
updateContext(key: string, fields: Record<string, string>): Promise<any>;
|
|
10
|
+
setFields(key: string, fields: Record<string, string>): Promise<number>;
|
|
11
|
+
getField(key: string, field: string): Promise<string>;
|
|
12
|
+
getFields(key: string, fields: string[]): Promise<string[]>;
|
|
13
|
+
getAllFields(key: string): Promise<Record<string, string>>;
|
|
14
|
+
deleteFields(key: string, fields: string[]): Promise<number>;
|
|
15
|
+
incrementFieldByFloat(key: string, field: string, increment: number): Promise<number>;
|
|
16
|
+
sendQuery(...query: any[]): Promise<any>;
|
|
17
|
+
sendIndexedQuery(index: string, query: string[]): Promise<string[]>;
|
|
18
|
+
findEntities(): Promise<any[]>;
|
|
19
|
+
findEntityById(): Promise<any>;
|
|
20
|
+
findEntitiesByCondition(): Promise<any[]>;
|
|
21
|
+
createEntityIndex(): Promise<void>;
|
|
22
|
+
}
|
|
23
|
+
export { RedisSearchService };
|