@hotmeshio/hotmesh 0.3.32 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (207) hide show
  1. package/README.md +128 -823
  2. package/build/index.d.ts +9 -9
  3. package/build/index.js +10 -10
  4. package/build/modules/enums.d.ts +23 -23
  5. package/build/modules/enums.js +26 -26
  6. package/build/modules/errors.d.ts +16 -16
  7. package/build/modules/errors.js +28 -28
  8. package/build/modules/key.js +190 -1
  9. package/build/modules/utils.js +374 -1
  10. package/build/package.json +22 -21
  11. package/build/services/activities/activity.js +549 -1
  12. package/build/services/activities/await.js +114 -1
  13. package/build/services/activities/cycle.js +112 -1
  14. package/build/services/activities/hook.js +168 -1
  15. package/build/services/activities/index.js +20 -1
  16. package/build/services/activities/interrupt.js +158 -1
  17. package/build/services/activities/signal.js +134 -1
  18. package/build/services/activities/trigger.js +246 -1
  19. package/build/services/activities/worker.js +106 -1
  20. package/build/services/collator/index.js +293 -1
  21. package/build/services/compiler/deployer.js +488 -1
  22. package/build/services/compiler/index.js +112 -1
  23. package/build/services/compiler/validator.js +147 -1
  24. package/build/services/engine/index.js +761 -1
  25. package/build/services/exporter/index.js +126 -1
  26. package/build/services/hotmesh/index.d.ts +160 -17
  27. package/build/services/hotmesh/index.js +160 -17
  28. package/build/services/mapper/index.js +81 -1
  29. package/build/services/{meshflow → memflow}/client.d.ts +3 -3
  30. package/build/services/{meshflow → memflow}/client.js +17 -16
  31. package/build/services/{meshflow → memflow}/connection.d.ts +2 -2
  32. package/build/services/{meshflow → memflow}/connection.js +1 -1
  33. package/build/services/memflow/context.d.ts +143 -0
  34. package/build/services/memflow/context.js +299 -0
  35. package/build/services/{meshflow → memflow}/exporter.d.ts +6 -6
  36. package/build/services/memflow/exporter.js +215 -0
  37. package/build/services/{meshflow → memflow}/handle.d.ts +4 -4
  38. package/build/services/{meshflow → memflow}/handle.js +2 -2
  39. package/build/services/{meshflow → memflow}/index.d.ts +18 -13
  40. package/build/services/{meshflow → memflow}/index.js +26 -21
  41. package/build/services/{meshflow → memflow}/schemas/factory.d.ts +4 -4
  42. package/build/services/{meshflow → memflow}/schemas/factory.js +5 -5
  43. package/build/services/{meshflow → memflow}/search.d.ts +1 -1
  44. package/build/services/{meshflow → memflow}/search.js +4 -4
  45. package/build/services/{meshflow → memflow}/worker.d.ts +5 -5
  46. package/build/services/{meshflow → memflow}/worker.js +24 -24
  47. package/build/services/memflow/workflow/common.d.ts +20 -0
  48. package/build/services/memflow/workflow/common.js +47 -0
  49. package/build/services/memflow/workflow/contextMethods.d.ts +14 -0
  50. package/build/services/memflow/workflow/contextMethods.js +33 -0
  51. package/build/services/{meshflow → memflow}/workflow/execChild.js +12 -12
  52. package/build/services/memflow/workflow/execHook.d.ts +65 -0
  53. package/build/services/memflow/workflow/execHook.js +73 -0
  54. package/build/services/{meshflow → memflow}/workflow/hook.js +19 -3
  55. package/build/services/{meshflow → memflow}/workflow/index.d.ts +7 -3
  56. package/build/services/{meshflow → memflow}/workflow/index.js +7 -3
  57. package/build/services/{meshflow → memflow}/workflow/proxyActivities.d.ts +2 -2
  58. package/build/services/{meshflow → memflow}/workflow/proxyActivities.js +8 -8
  59. package/build/services/{meshflow → memflow}/workflow/sleepFor.js +2 -2
  60. package/build/services/{meshflow → memflow}/workflow/waitFor.js +2 -2
  61. package/build/services/meshdata/index.d.ts +24 -24
  62. package/build/services/meshdata/index.js +40 -40
  63. package/build/services/pipe/functions/array.js +74 -1
  64. package/build/services/pipe/functions/bitwise.js +24 -1
  65. package/build/services/pipe/functions/conditional.js +36 -1
  66. package/build/services/pipe/functions/cron.js +40 -1
  67. package/build/services/pipe/functions/date.js +171 -1
  68. package/build/services/pipe/functions/index.js +30 -1
  69. package/build/services/pipe/functions/json.js +12 -1
  70. package/build/services/pipe/functions/logical.js +12 -1
  71. package/build/services/pipe/functions/math.js +184 -1
  72. package/build/services/pipe/functions/number.js +60 -1
  73. package/build/services/pipe/functions/object.js +81 -1
  74. package/build/services/pipe/functions/string.js +69 -1
  75. package/build/services/pipe/functions/symbol.js +33 -1
  76. package/build/services/pipe/functions/unary.js +18 -1
  77. package/build/services/pipe/index.js +242 -1
  78. package/build/services/quorum/index.js +263 -1
  79. package/build/services/reporter/index.js +348 -1
  80. package/build/services/router/config/index.d.ts +11 -0
  81. package/build/services/router/config/index.js +36 -0
  82. package/build/services/router/consumption/index.d.ts +34 -0
  83. package/build/services/router/consumption/index.js +395 -0
  84. package/build/services/router/error-handling/index.d.ts +8 -0
  85. package/build/services/router/error-handling/index.js +98 -0
  86. package/build/services/router/index.d.ts +13 -16
  87. package/build/services/router/index.js +121 -1
  88. package/build/services/router/lifecycle/index.d.ts +27 -0
  89. package/build/services/router/lifecycle/index.js +80 -0
  90. package/build/services/router/telemetry/index.d.ts +11 -0
  91. package/build/services/router/telemetry/index.js +32 -0
  92. package/build/services/router/throttling/index.d.ts +23 -0
  93. package/build/services/router/throttling/index.js +76 -0
  94. package/build/services/search/index.d.ts +2 -1
  95. package/build/services/search/providers/postgres/postgres.d.ts +2 -1
  96. package/build/services/search/providers/postgres/postgres.js +149 -1
  97. package/build/services/search/providers/redis/ioredis.d.ts +1 -0
  98. package/build/services/search/providers/redis/ioredis.js +121 -1
  99. package/build/services/search/providers/redis/redis.d.ts +1 -0
  100. package/build/services/search/providers/redis/redis.js +134 -1
  101. package/build/services/serializer/index.js +282 -1
  102. package/build/services/store/providers/postgres/kvsql.d.ts +1 -1
  103. package/build/services/store/providers/postgres/kvsql.js +198 -1
  104. package/build/services/store/providers/postgres/kvtables.js +441 -1
  105. package/build/services/store/providers/postgres/kvtransaction.js +248 -1
  106. package/build/services/store/providers/postgres/kvtypes/hash.d.ts +1 -1
  107. package/build/services/store/providers/postgres/kvtypes/hash.js +1287 -1
  108. package/build/services/store/providers/postgres/kvtypes/list.js +194 -1
  109. package/build/services/store/providers/postgres/kvtypes/string.js +115 -1
  110. package/build/services/store/providers/postgres/kvtypes/zset.js +214 -1
  111. package/build/services/store/providers/postgres/postgres.js +1036 -1
  112. package/build/services/store/providers/redis/_base.js +980 -1
  113. package/build/services/store/providers/redis/ioredis.js +180 -1
  114. package/build/services/store/providers/redis/redis.js +199 -1
  115. package/build/services/store/providers/store-initializable.js +2 -1
  116. package/build/services/stream/index.d.ts +5 -0
  117. package/build/services/stream/providers/nats/nats.d.ts +1 -0
  118. package/build/services/stream/providers/nats/nats.js +225 -1
  119. package/build/services/stream/providers/postgres/kvtables.d.ts +1 -0
  120. package/build/services/stream/providers/postgres/kvtables.js +146 -1
  121. package/build/services/stream/providers/postgres/postgres.d.ts +19 -0
  122. package/build/services/stream/providers/postgres/postgres.js +519 -1
  123. package/build/services/stream/providers/redis/ioredis.d.ts +1 -0
  124. package/build/services/stream/providers/redis/ioredis.js +272 -1
  125. package/build/services/stream/providers/redis/redis.d.ts +1 -0
  126. package/build/services/stream/providers/redis/redis.js +305 -1
  127. package/build/services/stream/providers/stream-initializable.js +2 -1
  128. package/build/services/sub/providers/nats/nats.js +105 -1
  129. package/build/services/sub/providers/postgres/postgres.js +92 -1
  130. package/build/services/sub/providers/redis/ioredis.js +81 -1
  131. package/build/services/sub/providers/redis/redis.js +72 -1
  132. package/build/services/task/index.js +206 -1
  133. package/build/services/telemetry/index.js +306 -1
  134. package/build/services/worker/index.js +197 -1
  135. package/build/types/error.d.ts +5 -5
  136. package/build/types/exporter.d.ts +1 -1
  137. package/build/types/index.d.ts +3 -3
  138. package/build/types/manifest.d.ts +2 -2
  139. package/build/types/{meshflow.d.ts → memflow.d.ts} +15 -15
  140. package/build/types/meshdata.d.ts +3 -3
  141. package/build/types/postgres.d.ts +7 -0
  142. package/build/types/stream.d.ts +3 -0
  143. package/index.ts +11 -11
  144. package/package.json +22 -21
  145. package/.github/ISSUE_TEMPLATE/bug_report.md +0 -38
  146. package/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
  147. package/build/services/meshflow/exporter.js +0 -1
  148. package/build/services/meshflow/workflow/common.d.ts +0 -18
  149. package/build/services/meshflow/workflow/common.js +0 -45
  150. package/typedoc.json +0 -46
  151. package/types/activity.ts +0 -268
  152. package/types/app.ts +0 -20
  153. package/types/async.ts +0 -6
  154. package/types/cache.ts +0 -1
  155. package/types/collator.ts +0 -9
  156. package/types/error.ts +0 -56
  157. package/types/exporter.ts +0 -102
  158. package/types/hook.ts +0 -44
  159. package/types/hotmesh.ts +0 -314
  160. package/types/index.ts +0 -306
  161. package/types/job.ts +0 -233
  162. package/types/logger.ts +0 -8
  163. package/types/manifest.ts +0 -70
  164. package/types/map.ts +0 -5
  165. package/types/meshcall.ts +0 -235
  166. package/types/meshdata.ts +0 -278
  167. package/types/meshflow.ts +0 -645
  168. package/types/ms.d.ts +0 -7
  169. package/types/nats.ts +0 -270
  170. package/types/pipe.ts +0 -90
  171. package/types/postgres.ts +0 -105
  172. package/types/provider.ts +0 -161
  173. package/types/quorum.ts +0 -167
  174. package/types/redis.ts +0 -404
  175. package/types/serializer.ts +0 -40
  176. package/types/stats.ts +0 -117
  177. package/types/stream.ts +0 -227
  178. package/types/task.ts +0 -7
  179. package/types/telemetry.ts +0 -16
  180. package/types/transition.ts +0 -20
  181. /package/build/services/{meshflow → memflow}/workflow/all.d.ts +0 -0
  182. /package/build/services/{meshflow → memflow}/workflow/all.js +0 -0
  183. /package/build/services/{meshflow → memflow}/workflow/context.d.ts +0 -0
  184. /package/build/services/{meshflow → memflow}/workflow/context.js +0 -0
  185. /package/build/services/{meshflow → memflow}/workflow/didRun.d.ts +0 -0
  186. /package/build/services/{meshflow → memflow}/workflow/didRun.js +0 -0
  187. /package/build/services/{meshflow → memflow}/workflow/emit.d.ts +0 -0
  188. /package/build/services/{meshflow → memflow}/workflow/emit.js +0 -0
  189. /package/build/services/{meshflow → memflow}/workflow/enrich.d.ts +0 -0
  190. /package/build/services/{meshflow → memflow}/workflow/enrich.js +0 -0
  191. /package/build/services/{meshflow → memflow}/workflow/execChild.d.ts +0 -0
  192. /package/build/services/{meshflow → memflow}/workflow/hook.d.ts +0 -0
  193. /package/build/services/{meshflow → memflow}/workflow/interrupt.d.ts +0 -0
  194. /package/build/services/{meshflow → memflow}/workflow/interrupt.js +0 -0
  195. /package/build/services/{meshflow → memflow}/workflow/isSideEffectAllowed.d.ts +0 -0
  196. /package/build/services/{meshflow → memflow}/workflow/isSideEffectAllowed.js +0 -0
  197. /package/build/services/{meshflow → memflow}/workflow/random.d.ts +0 -0
  198. /package/build/services/{meshflow → memflow}/workflow/random.js +0 -0
  199. /package/build/services/{meshflow → memflow}/workflow/searchMethods.d.ts +0 -0
  200. /package/build/services/{meshflow → memflow}/workflow/searchMethods.js +0 -0
  201. /package/build/services/{meshflow → memflow}/workflow/signal.d.ts +0 -0
  202. /package/build/services/{meshflow → memflow}/workflow/signal.js +0 -0
  203. /package/build/services/{meshflow → memflow}/workflow/sleepFor.d.ts +0 -0
  204. /package/build/services/{meshflow → memflow}/workflow/trace.d.ts +0 -0
  205. /package/build/services/{meshflow → memflow}/workflow/trace.js +0 -0
  206. /package/build/services/{meshflow → memflow}/workflow/waitFor.d.ts +0 -0
  207. /package/build/types/{meshflow.js → memflow.js} +0 -0
@@ -0,0 +1,395 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ConsumptionManager = void 0;
4
+ const utils_1 = require("../../../modules/utils");
5
+ const telemetry_1 = require("../telemetry");
6
+ const config_1 = require("../config");
7
+ const stream_1 = require("../../../types/stream");
8
+ const key_1 = require("../../../modules/key");
9
+ class ConsumptionManager {
10
+ constructor(stream, logger, throttleManager, errorHandler, lifecycleManager, reclaimDelay, reclaimCount, appId, role, router) {
11
+ this.errorCount = 0;
12
+ this.counts = {};
13
+ this.stream = stream;
14
+ this.logger = logger;
15
+ this.throttleManager = throttleManager;
16
+ this.errorHandler = errorHandler;
17
+ this.lifecycleManager = lifecycleManager;
18
+ this.reclaimDelay = reclaimDelay;
19
+ this.reclaimCount = reclaimCount;
20
+ this.appId = appId;
21
+ this.role = role;
22
+ this.router = router;
23
+ }
24
+ async createGroup(stream, group) {
25
+ try {
26
+ await this.stream.createConsumerGroup(stream, group);
27
+ }
28
+ catch (err) {
29
+ this.logger.debug('router-stream-group-exists', { stream, group });
30
+ }
31
+ }
32
+ async publishMessage(topic, streamData, transaction) {
33
+ const code = streamData?.code || '200';
34
+ this.counts[code] = (this.counts[code] || 0) + 1;
35
+ const stream = this.stream.mintKey(key_1.KeyType.STREAMS, { topic });
36
+ const responses = await this.stream.publishMessages(stream, [JSON.stringify(streamData)], { transaction });
37
+ return responses[0];
38
+ }
39
+ async consumeMessages(stream, group, consumer, callback) {
40
+ // exit early if readonly
41
+ if (this.lifecycleManager.isReadonly()) {
42
+ this.logger.info(`router-stream-readonly`, { group, consumer, stream });
43
+ return;
44
+ }
45
+ this.logger.info(`router-stream-starting`, { group, consumer, stream });
46
+ await this.lifecycleManager.startConsuming(this.router);
47
+ await this.createGroup(stream, group);
48
+ // Check if notifications are supported
49
+ const features = this.stream.getProviderSpecificFeatures();
50
+ const supportsNotifications = features.supportsNotifications;
51
+ if (supportsNotifications) {
52
+ this.logger.info(`router-stream-using-notifications`, { group, consumer, stream });
53
+ this.lifecycleManager.setIsUsingNotifications(true);
54
+ return this.consumeWithNotifications(stream, group, consumer, callback);
55
+ }
56
+ else {
57
+ this.logger.info(`router-stream-using-polling`, { group, consumer, stream });
58
+ this.lifecycleManager.setIsUsingNotifications(false);
59
+ return this.consumeWithPolling(stream, group, consumer, callback);
60
+ }
61
+ }
62
+ async consumeWithNotifications(stream, group, consumer, callback) {
63
+ let lastCheckedPendingMessagesAt = Date.now();
64
+ // Set up notification-based consumption
65
+ const notificationCallback = async (messages) => {
66
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
67
+ return;
68
+ }
69
+ await this.throttleManager.customSleep(); // respect throttle
70
+ if (this.lifecycleManager.isStopped(group, consumer, stream) || this.throttleManager.isPaused()) {
71
+ return;
72
+ }
73
+ // Process messages - use parallel processing for PostgreSQL
74
+ const features = this.stream.getProviderSpecificFeatures();
75
+ const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications currently
76
+ if (isPostgres && messages.length > 1) {
77
+ // Parallel processing for PostgreSQL batches
78
+ this.logger.debug('postgres-stream-parallel-processing', {
79
+ streamName: stream,
80
+ groupName: group,
81
+ messageCount: messages.length
82
+ });
83
+ const processingStart = Date.now();
84
+ const processingPromises = messages.map(async (message) => {
85
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
86
+ return;
87
+ }
88
+ return this.consumeOne(stream, group, message.id, message.data, callback);
89
+ });
90
+ // Process all messages in parallel
91
+ await Promise.allSettled(processingPromises);
92
+ this.logger.debug('postgres-stream-parallel-processing-complete', {
93
+ streamName: stream,
94
+ groupName: group,
95
+ messageCount: messages.length,
96
+ processingDuration: Date.now() - processingStart
97
+ });
98
+ }
99
+ else {
100
+ // Sequential processing for other providers or single messages
101
+ for (const message of messages) {
102
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
103
+ return;
104
+ }
105
+ await this.consumeOne(stream, group, message.id, message.data, callback);
106
+ }
107
+ }
108
+ // Check for pending messages if supported and enough time has passed
109
+ const now = Date.now();
110
+ if (this.stream.getProviderSpecificFeatures().supportsRetry &&
111
+ now - lastCheckedPendingMessagesAt > this.reclaimDelay) {
112
+ lastCheckedPendingMessagesAt = now;
113
+ const pendingMessages = await this.stream.retryMessages(stream, group, {
114
+ consumerName: consumer,
115
+ minIdleTime: this.reclaimDelay,
116
+ limit: config_1.HMSH_XPENDING_COUNT,
117
+ });
118
+ // Process reclaimed messages (also parallel for PostgreSQL)
119
+ if (isPostgres && pendingMessages.length > 1) {
120
+ const reclaimPromises = pendingMessages.map(async (message) => {
121
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
122
+ return;
123
+ }
124
+ return this.consumeOne(stream, group, message.id, message.data, callback);
125
+ });
126
+ await Promise.allSettled(reclaimPromises);
127
+ }
128
+ else {
129
+ for (const message of pendingMessages) {
130
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
131
+ return;
132
+ }
133
+ await this.consumeOne(stream, group, message.id, message.data, callback);
134
+ }
135
+ }
136
+ }
137
+ };
138
+ try {
139
+ // Start notification-based consumption - this should return immediately after setup
140
+ await this.stream.consumeMessages(stream, group, consumer, {
141
+ enableNotifications: true,
142
+ notificationCallback,
143
+ blockTimeout: config_1.HMSH_BLOCK_TIME_MS,
144
+ });
145
+ // Don't block here - let the worker initialization complete
146
+ // The notification system will handle message processing asynchronously
147
+ }
148
+ catch (error) {
149
+ this.logger.error(`router-stream-notification-error`, {
150
+ error,
151
+ stream,
152
+ group,
153
+ consumer,
154
+ });
155
+ // Fall back to polling if notifications fail
156
+ this.logger.info(`router-stream-fallback-to-polling`, { group, consumer, stream });
157
+ this.lifecycleManager.setIsUsingNotifications(false);
158
+ return this.consumeWithPolling(stream, group, consumer, callback);
159
+ }
160
+ }
161
+ async consumeWithPolling(stream, group, consumer, callback) {
162
+ let lastCheckedPendingMessagesAt = Date.now();
163
+ // Track if we've hit maximum backoff. Initially false.
164
+ // When true, we use the fallback mode: single attempt, no backoff, then sleep 3s if empty.
165
+ if (typeof this.hasReachedMaxBackoff === 'undefined') {
166
+ this.hasReachedMaxBackoff = false;
167
+ }
168
+ const sleepFor = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
169
+ const consume = async () => {
170
+ await this.throttleManager.customSleep(); // always respect the global throttle
171
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
172
+ return;
173
+ }
174
+ else if (this.throttleManager.isPaused()) {
175
+ setImmediate(consume.bind(this));
176
+ return;
177
+ }
178
+ // Randomizer that asymptotes at 150% of `HMSH_BLOCK_TIME_MS`
179
+ const streamDuration = config_1.HMSH_BLOCK_TIME_MS + Math.round(config_1.HMSH_BLOCK_TIME_MS * Math.random());
180
+ try {
181
+ let messages = [];
182
+ if (!this.hasReachedMaxBackoff) {
183
+ // Normal mode: try with backoff and finite retries
184
+ const features = this.stream.getProviderSpecificFeatures();
185
+ const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications
186
+ const batchSize = isPostgres ? 10 : 1; // Use batch size of 10 for PostgreSQL, 1 for others
187
+ messages = await this.stream.consumeMessages(stream, group, consumer, {
188
+ blockTimeout: streamDuration,
189
+ batchSize,
190
+ enableBackoff: true,
191
+ initialBackoff: config_1.INITIAL_STREAM_BACKOFF,
192
+ maxBackoff: config_1.MAX_STREAM_BACKOFF,
193
+ maxRetries: config_1.MAX_STREAM_RETRIES,
194
+ });
195
+ }
196
+ else {
197
+ // Fallback mode: just try once, no backoff
198
+ const features = this.stream.getProviderSpecificFeatures();
199
+ const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications
200
+ const batchSize = isPostgres ? 10 : 1; // Use batch size of 10 for PostgreSQL, 1 for others
201
+ messages = await this.stream.consumeMessages(stream, group, consumer, {
202
+ blockTimeout: streamDuration,
203
+ batchSize,
204
+ enableBackoff: false,
205
+ maxRetries: 1,
206
+ });
207
+ }
208
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
209
+ return;
210
+ }
211
+ else if (this.throttleManager.isPaused()) {
212
+ setImmediate(consume.bind(this));
213
+ return;
214
+ }
215
+ if (messages.length > 0) {
216
+ // Reset if we were in fallback mode
217
+ this.hasReachedMaxBackoff = false;
218
+ // Process messages - use parallel processing for PostgreSQL
219
+ const features = this.stream.getProviderSpecificFeatures();
220
+ const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications currently
221
+ if (isPostgres && messages.length > 1) {
222
+ // Parallel processing for PostgreSQL batches
223
+ this.logger.debug('postgres-stream-parallel-processing-polling', {
224
+ streamName: stream,
225
+ groupName: group,
226
+ messageCount: messages.length
227
+ });
228
+ const processingStart = Date.now();
229
+ const processingPromises = messages.map(async (message) => {
230
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
231
+ return;
232
+ }
233
+ return this.consumeOne(stream, group, message.id, message.data, callback);
234
+ });
235
+ // Process all messages in parallel
236
+ await Promise.allSettled(processingPromises);
237
+ this.logger.debug('postgres-stream-parallel-processing-polling-complete', {
238
+ streamName: stream,
239
+ groupName: group,
240
+ messageCount: messages.length,
241
+ processingDuration: Date.now() - processingStart
242
+ });
243
+ }
244
+ else {
245
+ // Sequential processing for other providers or single messages
246
+ for (const message of messages) {
247
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
248
+ return;
249
+ }
250
+ await this.consumeOne(stream, group, message.id, message.data, callback);
251
+ }
252
+ }
253
+ // Check for pending messages if supported and enough time has passed
254
+ const now = Date.now();
255
+ if (this.stream.getProviderSpecificFeatures().supportsRetry &&
256
+ now - lastCheckedPendingMessagesAt > this.reclaimDelay) {
257
+ lastCheckedPendingMessagesAt = now;
258
+ const pendingMessages = await this.stream.retryMessages(stream, group, {
259
+ consumerName: consumer,
260
+ minIdleTime: this.reclaimDelay,
261
+ limit: config_1.HMSH_XPENDING_COUNT,
262
+ });
263
+ // Process reclaimed messages (also parallel for PostgreSQL)
264
+ if (isPostgres && pendingMessages.length > 1) {
265
+ const reclaimPromises = pendingMessages.map(async (message) => {
266
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
267
+ return;
268
+ }
269
+ return this.consumeOne(stream, group, message.id, message.data, callback);
270
+ });
271
+ await Promise.allSettled(reclaimPromises);
272
+ }
273
+ else {
274
+ for (const message of pendingMessages) {
275
+ if (this.lifecycleManager.isStopped(group, consumer, stream)) {
276
+ return;
277
+ }
278
+ await this.consumeOne(stream, group, message.id, message.data, callback);
279
+ }
280
+ }
281
+ }
282
+ // If we got messages, just continue as normal
283
+ setImmediate(consume.bind(this));
284
+ }
285
+ else {
286
+ // No messages found
287
+ if (!this.hasReachedMaxBackoff) {
288
+ // We were in normal mode and got no messages after maxRetries
289
+ // Switch to fallback mode
290
+ this.hasReachedMaxBackoff = true;
291
+ }
292
+ else {
293
+ // We are already in fallback mode, still no messages
294
+ // Sleep for MAX_STREAM_BACKOFF ms before trying again
295
+ await sleepFor(config_1.MAX_STREAM_BACKOFF);
296
+ }
297
+ // Try again after sleeping
298
+ setImmediate(consume.bind(this));
299
+ }
300
+ }
301
+ catch (error) {
302
+ if (this.lifecycleManager.getShouldConsume() && process.env.NODE_ENV !== 'test') {
303
+ this.logger.error(`router-stream-error`, {
304
+ error,
305
+ stream,
306
+ group,
307
+ consumer,
308
+ });
309
+ this.errorCount++;
310
+ const timeout = Math.min(config_1.HMSH_GRADUATED_INTERVAL_MS * 2 ** this.errorCount, config_1.HMSH_MAX_TIMEOUT_MS);
311
+ setTimeout(consume.bind(this), timeout);
312
+ }
313
+ }
314
+ };
315
+ consume.call(this);
316
+ }
317
+ async consumeOne(stream, group, id, input, callback) {
318
+ this.logger.debug(`stream-read-one`, { group, stream, id });
319
+ let output;
320
+ const telemetry = new telemetry_1.RouterTelemetry(this.appId);
321
+ try {
322
+ telemetry.startStreamSpan(input, this.role);
323
+ output = await this.execStreamLeg(input, stream, id, callback.bind(this));
324
+ telemetry.setStreamErrorFromOutput(output);
325
+ this.errorCount = 0;
326
+ }
327
+ catch (err) {
328
+ this.logger.error(`stream-read-one-error`, { group, stream, id, err });
329
+ telemetry.setStreamErrorFromException(err);
330
+ }
331
+ const messageId = await this.publishResponse(input, output);
332
+ telemetry.setStreamAttributes({ 'app.worker.mid': messageId });
333
+ await this.ackAndDelete(stream, group, id);
334
+ telemetry.endStreamSpan();
335
+ this.logger.debug(`stream-read-one-end`, { group, stream, id });
336
+ }
337
+ async execStreamLeg(input, stream, id, callback) {
338
+ let output;
339
+ try {
340
+ output = await callback(input);
341
+ }
342
+ catch (error) {
343
+ this.logger.error(`stream-call-function-error`, {
344
+ error,
345
+ input: input,
346
+ stack: error.stack,
347
+ message: error.message,
348
+ name: error.name,
349
+ stream,
350
+ id,
351
+ });
352
+ output = this.errorHandler.structureUnhandledError(input, error);
353
+ }
354
+ return output;
355
+ }
356
+ async ackAndDelete(stream, group, id) {
357
+ await this.stream.ackAndDelete(stream, group, [id]);
358
+ }
359
+ // Add batch acknowledgment method for PostgreSQL optimization
360
+ async ackAndDeleteBatch(stream, group, ids) {
361
+ if (ids.length === 0)
362
+ return;
363
+ const features = this.stream.getProviderSpecificFeatures();
364
+ const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications
365
+ if (isPostgres && ids.length > 1) {
366
+ // Batch acknowledgment for PostgreSQL
367
+ await this.stream.ackAndDelete(stream, group, ids);
368
+ }
369
+ else {
370
+ // Individual acknowledgments for other providers
371
+ for (const id of ids) {
372
+ await this.stream.ackAndDelete(stream, group, [id]);
373
+ }
374
+ }
375
+ }
376
+ async publishResponse(input, output) {
377
+ if (output && typeof output === 'object') {
378
+ if (output.status === 'error') {
379
+ return await this.errorHandler.handleRetry(input, output, this.publishMessage.bind(this));
380
+ }
381
+ else if (typeof output.metadata !== 'object') {
382
+ output.metadata = { ...input.metadata, guid: (0, utils_1.guid)() };
383
+ }
384
+ else {
385
+ output.metadata.guid = (0, utils_1.guid)();
386
+ }
387
+ output.type = stream_1.StreamDataType.RESPONSE;
388
+ return (await this.publishMessage(null, output));
389
+ }
390
+ }
391
+ isStreamMessage(result) {
392
+ return Array.isArray(result) && Array.isArray(result[0]);
393
+ }
394
+ }
395
+ exports.ConsumptionManager = ConsumptionManager;
@@ -0,0 +1,8 @@
1
+ import { StreamData, StreamDataResponse } from '../../../types/stream';
2
+ export declare class ErrorHandler {
3
+ shouldRetry(input: StreamData, output: StreamDataResponse): [boolean, number];
4
+ structureUnhandledError(input: StreamData, err: Error): StreamDataResponse;
5
+ structureUnacknowledgedError(input: StreamData): StreamDataResponse;
6
+ structureError(input: StreamData, output: StreamDataResponse): StreamDataResponse;
7
+ handleRetry(input: StreamData, output: StreamDataResponse, publishMessage: (topic: string, streamData: StreamData | StreamDataResponse) => Promise<string>): Promise<string>;
8
+ }
@@ -0,0 +1,98 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ErrorHandler = void 0;
4
+ const utils_1 = require("../../../modules/utils");
5
+ const config_1 = require("../config");
6
+ const stream_1 = require("../../../types/stream");
7
+ class ErrorHandler {
8
+ shouldRetry(input, output) {
9
+ //const isUnhandledEngineError = output.code === 500;
10
+ const policies = input.policies?.retry;
11
+ const errorCode = output.code.toString();
12
+ const policy = policies?.[errorCode];
13
+ const maxRetries = policy?.[0];
14
+ const tryCount = Math.min(input.metadata.try || 0, config_1.HMSH_MAX_RETRIES);
15
+ //only possible values for maxRetries are 1, 2, 3
16
+ //only possible values for tryCount are 0, 1, 2
17
+ if (maxRetries > tryCount) {
18
+ // 10ms, 100ms, or 1000ms delays between system retries
19
+ return [true, Math.pow(10, tryCount + 1)];
20
+ }
21
+ return [false, 0];
22
+ }
23
+ structureUnhandledError(input, err) {
24
+ const error = {};
25
+ if (typeof err.message === 'string') {
26
+ error.message = err.message;
27
+ }
28
+ else {
29
+ error.message = config_1.HMSH_STATUS_UNKNOWN;
30
+ }
31
+ if (typeof err.stack === 'string') {
32
+ error.stack = err.stack;
33
+ }
34
+ if (typeof err.name === 'string') {
35
+ error.name = err.name;
36
+ }
37
+ return {
38
+ status: 'error',
39
+ code: config_1.HMSH_CODE_UNKNOWN,
40
+ metadata: { ...input.metadata, guid: (0, utils_1.guid)() },
41
+ data: error,
42
+ };
43
+ }
44
+ structureUnacknowledgedError(input) {
45
+ const message = 'stream message max delivery count exceeded';
46
+ const code = config_1.HMSH_CODE_UNACKED;
47
+ const data = { message, code };
48
+ const output = {
49
+ metadata: { ...input.metadata, guid: (0, utils_1.guid)() },
50
+ status: stream_1.StreamStatus.ERROR,
51
+ code,
52
+ data,
53
+ };
54
+ //send unacknowleded errors to the engine (it has no topic)
55
+ delete output.metadata.topic;
56
+ return output;
57
+ }
58
+ structureError(input, output) {
59
+ const message = output.data?.message
60
+ ? output.data?.message.toString()
61
+ : config_1.HMSH_STATUS_UNKNOWN;
62
+ const statusCode = output.code || output.data?.code;
63
+ const code = isNaN(statusCode)
64
+ ? config_1.HMSH_CODE_UNKNOWN
65
+ : parseInt(statusCode.toString());
66
+ const stack = output.data?.stack
67
+ ? output.data?.stack.toString()
68
+ : undefined;
69
+ const data = { message, code, stack };
70
+ if (typeof output.data?.error === 'object') {
71
+ data.error = { ...output.data.error };
72
+ }
73
+ return {
74
+ status: stream_1.StreamStatus.ERROR,
75
+ code,
76
+ stack,
77
+ metadata: { ...input.metadata, guid: (0, utils_1.guid)() },
78
+ data,
79
+ };
80
+ }
81
+ async handleRetry(input, output, publishMessage) {
82
+ const [shouldRetry, timeout] = this.shouldRetry(input, output);
83
+ if (shouldRetry) {
84
+ await (0, utils_1.sleepFor)(timeout);
85
+ return await publishMessage(input.metadata.topic, {
86
+ data: input.data,
87
+ //note: retain guid (this is a retry attempt)
88
+ metadata: { ...input.metadata, try: (input.metadata.try || 0) + 1 },
89
+ policies: input.policies,
90
+ });
91
+ }
92
+ else {
93
+ const structuredError = this.structureError(input, output);
94
+ return await publishMessage(null, structuredError);
95
+ }
96
+ }
97
+ }
98
+ exports.ErrorHandler = ErrorHandler;
@@ -4,57 +4,54 @@ import { StreamService } from '../stream';
4
4
  import { RouterConfig, StreamData, StreamDataResponse, StreamRole } from '../../types/stream';
5
5
  import { ProviderClient, ProviderTransaction } from '../../types/provider';
6
6
  declare class Router<S extends StreamService<ProviderClient, ProviderTransaction>> {
7
- static instances: Set<Router<any>>;
8
7
  appId: string;
9
8
  guid: string;
10
- hasReachedMaxBackoff: boolean | undefined;
11
9
  role: StreamRole;
12
10
  topic: string | undefined;
13
11
  stream: S;
14
12
  reclaimDelay: number;
15
13
  reclaimCount: number;
16
14
  logger: ILogger;
17
- throttle: number;
15
+ readonly: boolean;
18
16
  errorCount: number;
19
17
  counts: {
20
18
  [key: string]: number;
21
19
  };
20
+ hasReachedMaxBackoff: boolean | undefined;
22
21
  currentTimerId: NodeJS.Timeout | null;
23
- shouldConsume: boolean;
24
22
  sleepPromiseResolve: (() => void) | null;
25
23
  innerPromiseResolve: (() => void) | null;
26
24
  isSleeping: boolean;
27
25
  sleepTimout: NodeJS.Timeout | null;
28
- readonly: boolean;
26
+ private isUsingNotifications;
27
+ private throttleManager;
28
+ private errorHandler;
29
+ private lifecycleManager;
30
+ private consumptionManager;
29
31
  constructor(config: RouterConfig, stream: S, logger: ILogger);
32
+ get throttle(): number;
33
+ get shouldConsume(): boolean;
34
+ set shouldConsume(value: boolean);
30
35
  private resetThrottleState;
31
36
  createGroup(stream: string, group: string): Promise<void>;
32
37
  publishMessage(topic: string, streamData: StreamData | StreamDataResponse, transaction?: ProviderTransaction): Promise<string | ProviderTransaction>;
33
- /**
34
- * An adjustable throttle that will interrupt a sleeping
35
- * router if the throttle is reduced and the sleep time
36
- * has elapsed. If the throttle is increased, or if
37
- * the sleep time has not elapsed, the router will continue
38
- * to sleep until the new termination point. This
39
- * allows for dynamic, elastic throttling with smooth
40
- * acceleration and deceleration.
41
- */
42
38
  customSleep(): Promise<void>;
43
39
  consumeMessages(stream: string, group: string, consumer: string, callback: (streamData: StreamData) => Promise<StreamDataResponse | void>): Promise<void>;
44
40
  isStreamMessage(result: any): boolean;
45
41
  isPaused(): boolean;
46
42
  isStopped(group: string, consumer: string, stream: string): boolean;
47
43
  consumeOne(stream: string, group: string, id: string, input: StreamData, callback: (streamData: StreamData) => Promise<StreamDataResponse | void>): Promise<void>;
48
- execStreamLeg(input: StreamData, stream: string, id: string, callback: (streamData: StreamData) => Promise<StreamDataResponse | void>): Promise<StreamData>;
44
+ execStreamLeg(input: StreamData, stream: string, id: string, callback: (streamData: StreamData) => Promise<StreamDataResponse | void>): Promise<StreamDataResponse>;
49
45
  ackAndDelete(stream: string, group: string, id: string): Promise<void>;
50
46
  publishResponse(input: StreamData, output: StreamDataResponse | void): Promise<string>;
51
47
  shouldRetry(input: StreamData, output: StreamDataResponse): [boolean, number];
52
48
  structureUnhandledError(input: StreamData, err: Error): StreamDataResponse;
53
- structureUnacknowledgedError(input: StreamData): StreamData;
49
+ structureUnacknowledgedError(input: StreamData): StreamDataResponse;
54
50
  structureError(input: StreamData, output: StreamDataResponse): StreamDataResponse;
55
51
  static stopConsuming(): Promise<void>;
56
52
  stopConsuming(): Promise<void>;
57
53
  cancelThrottle(): void;
58
54
  setThrottle(delayInMillis: number): void;
55
+ static get instances(): Set<Router<any>>;
59
56
  }
60
57
  export { Router };