@hotmeshio/hotmesh 0.13.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (194) hide show
  1. package/README.md +18 -22
  2. package/build/modules/enums.d.ts +60 -5
  3. package/build/modules/enums.js +62 -7
  4. package/build/modules/errors.d.ts +15 -3
  5. package/build/modules/errors.js +17 -2
  6. package/build/package.json +6 -1
  7. package/build/services/activities/activity/context.d.ts +22 -0
  8. package/build/services/activities/activity/context.js +76 -0
  9. package/build/services/activities/activity/index.d.ts +116 -0
  10. package/build/services/activities/activity/index.js +299 -0
  11. package/build/services/activities/activity/mapping.d.ts +12 -0
  12. package/build/services/activities/activity/mapping.js +63 -0
  13. package/build/services/activities/activity/process.d.ts +28 -0
  14. package/build/services/activities/activity/process.js +100 -0
  15. package/build/services/activities/activity/protocol.d.ts +39 -0
  16. package/build/services/activities/activity/protocol.js +151 -0
  17. package/build/services/activities/activity/state.d.ts +40 -0
  18. package/build/services/activities/activity/state.js +143 -0
  19. package/build/services/activities/activity/transition.d.ts +23 -0
  20. package/build/services/activities/activity/transition.js +71 -0
  21. package/build/services/activities/activity/verify.d.ts +22 -0
  22. package/build/services/activities/activity/verify.js +85 -0
  23. package/build/services/activities/await.d.ts +1 -4
  24. package/build/services/activities/await.js +2 -36
  25. package/build/services/activities/cycle.d.ts +1 -11
  26. package/build/services/activities/cycle.js +3 -46
  27. package/build/services/activities/hook.d.ts +2 -11
  28. package/build/services/activities/hook.js +30 -50
  29. package/build/services/activities/interrupt.d.ts +2 -4
  30. package/build/services/activities/interrupt.js +4 -38
  31. package/build/services/activities/signal.d.ts +1 -11
  32. package/build/services/activities/signal.js +3 -48
  33. package/build/services/activities/trigger.d.ts +1 -3
  34. package/build/services/activities/trigger.js +0 -3
  35. package/build/services/activities/worker.d.ts +3 -6
  36. package/build/services/activities/worker.js +4 -40
  37. package/build/services/connector/factory.d.ts +6 -0
  38. package/build/services/connector/factory.js +24 -0
  39. package/build/services/durable/activity.d.ts +1 -1
  40. package/build/services/durable/activity.js +2 -2
  41. package/build/services/durable/client.d.ts +24 -29
  42. package/build/services/durable/client.js +24 -29
  43. package/build/services/durable/connection.d.ts +13 -7
  44. package/build/services/durable/connection.js +13 -7
  45. package/build/services/durable/handle.d.ts +58 -40
  46. package/build/services/durable/handle.js +60 -40
  47. package/build/services/durable/index.d.ts +148 -286
  48. package/build/services/durable/index.js +157 -292
  49. package/build/services/durable/interceptor.d.ts +43 -33
  50. package/build/services/durable/interceptor.js +59 -39
  51. package/build/services/durable/schemas/factory.d.ts +1 -1
  52. package/build/services/durable/schemas/factory.js +168 -38
  53. package/build/services/durable/telemetry.d.ts +80 -0
  54. package/build/services/durable/telemetry.js +137 -0
  55. package/build/services/durable/worker.d.ts +100 -21
  56. package/build/services/durable/worker.js +304 -63
  57. package/build/services/durable/workflow/all.d.ts +1 -1
  58. package/build/services/durable/workflow/all.js +1 -1
  59. package/build/services/durable/workflow/cancellationScope.d.ts +104 -0
  60. package/build/services/durable/workflow/cancellationScope.js +139 -0
  61. package/build/services/durable/workflow/common.d.ts +5 -4
  62. package/build/services/durable/workflow/common.js +6 -1
  63. package/build/services/durable/workflow/{waitFor.d.ts → condition.d.ts} +9 -8
  64. package/build/services/durable/workflow/{waitFor.js → condition.js} +44 -11
  65. package/build/services/durable/workflow/continueAsNew.d.ts +65 -0
  66. package/build/services/durable/workflow/continueAsNew.js +92 -0
  67. package/build/services/durable/workflow/didRun.d.ts +1 -1
  68. package/build/services/durable/workflow/didRun.js +3 -3
  69. package/build/services/durable/workflow/enrich.d.ts +5 -0
  70. package/build/services/durable/workflow/enrich.js +5 -0
  71. package/build/services/durable/workflow/entityMethods.d.ts +7 -0
  72. package/build/services/durable/workflow/entityMethods.js +7 -0
  73. package/build/services/durable/workflow/execHook.js +3 -3
  74. package/build/services/durable/workflow/execHookBatch.js +2 -2
  75. package/build/services/durable/workflow/{execChild.d.ts → executeChild.d.ts} +4 -40
  76. package/build/services/durable/workflow/{execChild.js → executeChild.js} +36 -45
  77. package/build/services/durable/workflow/hook.d.ts +1 -1
  78. package/build/services/durable/workflow/hook.js +4 -3
  79. package/build/services/durable/workflow/index.d.ts +45 -50
  80. package/build/services/durable/workflow/index.js +46 -51
  81. package/build/services/durable/workflow/interruption.d.ts +7 -6
  82. package/build/services/durable/workflow/interruption.js +11 -7
  83. package/build/services/durable/workflow/patched.d.ts +72 -0
  84. package/build/services/durable/workflow/patched.js +110 -0
  85. package/build/services/durable/workflow/proxyActivities.d.ts +7 -7
  86. package/build/services/durable/workflow/proxyActivities.js +50 -15
  87. package/build/services/durable/workflow/searchMethods.d.ts +7 -0
  88. package/build/services/durable/workflow/searchMethods.js +7 -0
  89. package/build/services/durable/workflow/signal.d.ts +4 -4
  90. package/build/services/durable/workflow/signal.js +4 -4
  91. package/build/services/durable/workflow/{sleepFor.d.ts → sleep.d.ts} +7 -7
  92. package/build/services/durable/workflow/{sleepFor.js → sleep.js} +39 -10
  93. package/build/services/durable/workflow/terminate.d.ts +55 -0
  94. package/build/services/durable/workflow/{interrupt.js → terminate.js} +21 -21
  95. package/build/services/durable/workflow/trace.js +2 -2
  96. package/build/services/durable/workflow/uuid4.d.ts +14 -0
  97. package/build/services/durable/workflow/uuid4.js +39 -0
  98. package/build/services/durable/workflow/{context.d.ts → workflowInfo.d.ts} +5 -5
  99. package/build/services/durable/workflow/{context.js → workflowInfo.js} +7 -7
  100. package/build/services/engine/compiler.d.ts +19 -0
  101. package/build/services/engine/compiler.js +20 -0
  102. package/build/services/engine/completion.d.ts +46 -0
  103. package/build/services/engine/completion.js +145 -0
  104. package/build/services/engine/dispatch.d.ts +24 -0
  105. package/build/services/engine/dispatch.js +98 -0
  106. package/build/services/engine/index.d.ts +49 -81
  107. package/build/services/engine/index.js +175 -573
  108. package/build/services/engine/init.d.ts +42 -0
  109. package/build/services/engine/init.js +74 -0
  110. package/build/services/engine/pubsub.d.ts +50 -0
  111. package/build/services/engine/pubsub.js +118 -0
  112. package/build/services/engine/reporting.d.ts +20 -0
  113. package/build/services/engine/reporting.js +38 -0
  114. package/build/services/engine/schema.d.ts +23 -0
  115. package/build/services/engine/schema.js +62 -0
  116. package/build/services/engine/signal.d.ts +57 -0
  117. package/build/services/engine/signal.js +117 -0
  118. package/build/services/engine/state.d.ts +35 -0
  119. package/build/services/engine/state.js +61 -0
  120. package/build/services/engine/version.d.ts +31 -0
  121. package/build/services/engine/version.js +73 -0
  122. package/build/services/hotmesh/deployment.d.ts +21 -0
  123. package/build/services/hotmesh/deployment.js +25 -0
  124. package/build/services/hotmesh/index.d.ts +141 -532
  125. package/build/services/hotmesh/index.js +222 -673
  126. package/build/services/hotmesh/init.d.ts +42 -0
  127. package/build/services/hotmesh/init.js +93 -0
  128. package/build/services/hotmesh/jobs.d.ts +67 -0
  129. package/build/services/hotmesh/jobs.js +99 -0
  130. package/build/services/hotmesh/pubsub.d.ts +38 -0
  131. package/build/services/hotmesh/pubsub.js +54 -0
  132. package/build/services/hotmesh/quorum.d.ts +30 -0
  133. package/build/services/hotmesh/quorum.js +62 -0
  134. package/build/services/hotmesh/validation.d.ts +6 -0
  135. package/build/services/hotmesh/validation.js +28 -0
  136. package/build/services/quorum/index.js +1 -0
  137. package/build/services/router/consumption/index.d.ts +11 -5
  138. package/build/services/router/consumption/index.js +24 -17
  139. package/build/services/router/error-handling/index.d.ts +2 -2
  140. package/build/services/router/error-handling/index.js +14 -14
  141. package/build/services/router/index.d.ts +1 -1
  142. package/build/services/router/index.js +2 -2
  143. package/build/services/serializer/index.d.ts +22 -0
  144. package/build/services/serializer/index.js +39 -1
  145. package/build/services/store/index.d.ts +1 -0
  146. package/build/services/store/providers/postgres/exporter-sql.d.ts +2 -2
  147. package/build/services/store/providers/postgres/exporter-sql.js +4 -4
  148. package/build/services/store/providers/postgres/kvtables.js +7 -6
  149. package/build/services/store/providers/postgres/kvtypes/hash/basic.js +67 -52
  150. package/build/services/store/providers/postgres/kvtypes/hash/jsonb.js +87 -72
  151. package/build/services/store/providers/postgres/kvtypes/hash/udata.js +106 -79
  152. package/build/services/store/providers/postgres/kvtypes/hash/utils.d.ts +16 -0
  153. package/build/services/store/providers/postgres/kvtypes/hash/utils.js +29 -16
  154. package/build/services/store/providers/postgres/postgres.d.ts +1 -0
  155. package/build/services/store/providers/postgres/postgres.js +14 -4
  156. package/build/services/stream/factory.d.ts +3 -1
  157. package/build/services/stream/factory.js +2 -2
  158. package/build/services/stream/index.d.ts +1 -0
  159. package/build/services/stream/providers/nats/nats.d.ts +1 -0
  160. package/build/services/stream/providers/nats/nats.js +1 -0
  161. package/build/services/stream/providers/postgres/credentials.d.ts +56 -0
  162. package/build/services/stream/providers/postgres/credentials.js +129 -0
  163. package/build/services/stream/providers/postgres/kvtables.js +18 -0
  164. package/build/services/stream/providers/postgres/messages.js +7 -7
  165. package/build/services/stream/providers/postgres/notifications.js +16 -2
  166. package/build/services/stream/providers/postgres/postgres.d.ts +7 -0
  167. package/build/services/stream/providers/postgres/postgres.js +35 -4
  168. package/build/services/stream/providers/postgres/procedures.d.ts +21 -0
  169. package/build/services/stream/providers/postgres/procedures.js +213 -0
  170. package/build/services/stream/providers/postgres/secured.d.ts +34 -0
  171. package/build/services/stream/providers/postgres/secured.js +146 -0
  172. package/build/services/stream/providers/postgres/stats.d.ts +1 -0
  173. package/build/services/stream/providers/postgres/stats.js +1 -0
  174. package/build/services/stream/registry.d.ts +1 -1
  175. package/build/services/stream/registry.js +5 -2
  176. package/build/services/telemetry/index.d.ts +10 -1
  177. package/build/services/telemetry/index.js +40 -7
  178. package/build/services/worker/credentials.d.ts +51 -0
  179. package/build/services/worker/credentials.js +87 -0
  180. package/build/services/worker/index.d.ts +2 -2
  181. package/build/services/worker/index.js +7 -6
  182. package/build/types/codec.d.ts +84 -0
  183. package/build/types/codec.js +2 -0
  184. package/build/types/durable.d.ts +104 -28
  185. package/build/types/error.d.ts +10 -1
  186. package/build/types/hotmesh.d.ts +67 -4
  187. package/build/types/index.d.ts +2 -1
  188. package/build/types/provider.d.ts +2 -2
  189. package/build/types/quorum.d.ts +35 -1
  190. package/build/types/stream.d.ts +12 -6
  191. package/package.json +6 -1
  192. package/build/services/activities/activity.d.ts +0 -192
  193. package/build/services/activities/activity.js +0 -786
  194. package/build/services/durable/workflow/interrupt.d.ts +0 -55
@@ -7,9 +7,16 @@ const config_1 = require("../config");
7
7
  const stream_1 = require("../../../types/stream");
8
8
  const key_1 = require("../../../modules/key");
9
9
  class ConsumptionManager {
10
- constructor(stream, logger, throttleManager, errorHandler, lifecycleManager, reclaimDelay, reclaimCount, appId, role, router, retryPolicy) {
11
- this.errorCount = 0;
12
- this.counts = {};
10
+ /**
11
+ * Consumption stats are written directly to the parent Router so
12
+ * they are visible in quorum rollcall profiles.
13
+ */
14
+ get errorCount() { return this.router.errorCount; }
15
+ set errorCount(v) { this.router.errorCount = v; }
16
+ get counts() { return this.router.counts; }
17
+ get hasReachedMaxBackoff() { return this.router.hasReachedMaxBackoff; }
18
+ set hasReachedMaxBackoff(v) { this.router.hasReachedMaxBackoff = v; }
19
+ constructor(stream, logger, throttleManager, errorHandler, lifecycleManager, reclaimDelay, reclaimCount, appId, role, router, retry) {
13
20
  this.stream = stream;
14
21
  this.logger = logger;
15
22
  this.throttleManager = throttleManager;
@@ -20,7 +27,7 @@ class ConsumptionManager {
20
27
  this.appId = appId;
21
28
  this.role = role;
22
29
  this.router = router;
23
- this.retryPolicy = retryPolicy;
30
+ this.retry = retry;
24
31
  }
25
32
  async createGroup(stream, group) {
26
33
  try {
@@ -107,7 +114,7 @@ class ConsumptionManager {
107
114
  }
108
115
  // Process messages - use parallel processing for PostgreSQL
109
116
  const features = this.stream.getProviderSpecificFeatures();
110
- const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications currently
117
+ const isPostgres = features.supportsParallelProcessing;
111
118
  if (isPostgres && messages.length > 1) {
112
119
  // Parallel processing for PostgreSQL batches
113
120
  this.logger.debug('postgres-stream-parallel-processing', {
@@ -221,7 +228,7 @@ class ConsumptionManager {
221
228
  if (!this.hasReachedMaxBackoff) {
222
229
  // Normal mode: try with backoff and finite retries
223
230
  const features = this.stream.getProviderSpecificFeatures();
224
- const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications
231
+ const isPostgres = features.supportsParallelProcessing;
225
232
  const batchSize = isPostgres ? 10 : 1; // Use batch size of 10 for PostgreSQL, 1 for others
226
233
  messages = await this.stream.consumeMessages(stream, group, consumer, {
227
234
  blockTimeout: streamDuration,
@@ -235,7 +242,7 @@ class ConsumptionManager {
235
242
  else {
236
243
  // Fallback mode: just try once, no backoff
237
244
  const features = this.stream.getProviderSpecificFeatures();
238
- const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications
245
+ const isPostgres = features.supportsParallelProcessing;
239
246
  const batchSize = isPostgres ? 10 : 1; // Use batch size of 10 for PostgreSQL, 1 for others
240
247
  messages = await this.stream.consumeMessages(stream, group, consumer, {
241
248
  blockTimeout: streamDuration,
@@ -256,7 +263,7 @@ class ConsumptionManager {
256
263
  this.hasReachedMaxBackoff = false;
257
264
  // Process messages - use parallel processing for PostgreSQL
258
265
  const features = this.stream.getProviderSpecificFeatures();
259
- const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications currently
266
+ const isPostgres = features.supportsParallelProcessing;
260
267
  if (isPostgres && messages.length > 1) {
261
268
  // Parallel processing for PostgreSQL batches
262
269
  this.logger.debug('postgres-stream-parallel-processing-polling', {
@@ -360,13 +367,13 @@ class ConsumptionManager {
360
367
  // the normal retry mechanism (ErrorHandler.handleRetry / shouldRetry).
361
368
  //
362
369
  // Normal retry flow: handleRetry() checks metadata.try against the
363
- // configured retryPolicy.maximumAttempts (or _streamRetryConfig) and
370
+ // configured retry.maximumAttempts (or _streamRetryConfig) and
364
371
  // applies exponential backoff + visibility delays. That mechanism is
365
372
  // the primary retry budget and is what developers configure via
366
- // HotMesh.init({ workers: [{ retryPolicy: { maximumAttempts, ... } }] }).
373
+ // HotMesh.init({ workers: [{ retry: { maximumAttempts, ... } }] }).
367
374
  //
368
375
  // This check catches messages that have somehow exceeded the normal
369
- // budget — e.g., when no retryPolicy is configured, when the retry
376
+ // budget — e.g., when no retry is configured, when the retry
370
377
  // logic is bypassed by an infrastructure error, or when a message
371
378
  // re-enters the stream through a path that doesn't increment
372
379
  // metadata.try. The threshold is the HIGHER of the configured retry
@@ -374,7 +381,7 @@ class ConsumptionManager {
374
381
  // never interferes with legitimate developer-configured retries.
375
382
  const retryAttempt = input._retryAttempt || 0;
376
383
  const configuredMax = input._streamRetryConfig?.max_retry_attempts
377
- ?? this.retryPolicy?.maximumAttempts
384
+ ?? this.retry?.maximumAttempts
378
385
  ?? 0;
379
386
  const poisonThreshold = Math.max(configuredMax, config_1.HMSH_POISON_MESSAGE_THRESHOLD);
380
387
  if (retryAttempt >= poisonThreshold) {
@@ -476,7 +483,7 @@ class ConsumptionManager {
476
483
  if (ids.length === 0)
477
484
  return;
478
485
  const features = this.stream.getProviderSpecificFeatures();
479
- const isPostgres = features.supportsNotifications; // Only PostgreSQL supports notifications
486
+ const isPostgres = features.supportsParallelProcessing;
480
487
  if (isPostgres && ids.length > 1) {
481
488
  // Batch acknowledgment for PostgreSQL
482
489
  await this.stream.ackAndDelete(stream, group, ids);
@@ -493,15 +500,15 @@ class ConsumptionManager {
493
500
  if (output.status === 'error') {
494
501
  // Extract retry policy with priority:
495
502
  // 1. Use message-level _streamRetryConfig (from database columns or previous retry)
496
- // 2. Fall back to router-level retryPolicy (from worker config)
497
- const retryPolicy = input._streamRetryConfig
503
+ // 2. Fall back to router-level retry (from worker config)
504
+ const retry = input._streamRetryConfig
498
505
  ? {
499
506
  maximumAttempts: input._streamRetryConfig.max_retry_attempts,
500
507
  backoffCoefficient: input._streamRetryConfig.backoff_coefficient,
501
508
  maximumInterval: input._streamRetryConfig.maximum_interval_seconds,
502
509
  }
503
- : this.retryPolicy;
504
- return await this.errorHandler.handleRetry(input, output, this.publishMessage.bind(this), retryPolicy);
510
+ : this.retry;
511
+ return await this.errorHandler.handleRetry(input, output, this.publishMessage.bind(this), retry);
505
512
  }
506
513
  else if (typeof output.metadata !== 'object') {
507
514
  output.metadata = { ...input.metadata, guid: (0, utils_1.guid)() };
@@ -1,8 +1,8 @@
1
1
  import { StreamData, StreamDataResponse, RetryPolicy } from '../../../types/stream';
2
2
  export declare class ErrorHandler {
3
- shouldRetry(input: StreamData, output: StreamDataResponse, retryPolicy?: RetryPolicy): [boolean, number];
3
+ shouldRetry(input: StreamData, output: StreamDataResponse, retry?: RetryPolicy): [boolean, number];
4
4
  structureUnhandledError(input: StreamData, err: Error): StreamDataResponse;
5
5
  structureUnacknowledgedError(input: StreamData): StreamDataResponse;
6
6
  structureError(input: StreamData, output: StreamDataResponse): StreamDataResponse;
7
- handleRetry(input: StreamData, output: StreamDataResponse, publishMessage: (topic: string, streamData: StreamData | StreamDataResponse) => Promise<string>, retryPolicy?: RetryPolicy): Promise<string>;
7
+ handleRetry(input: StreamData, output: StreamDataResponse, publishMessage: (topic: string, streamData: StreamData | StreamDataResponse) => Promise<string>, retry?: RetryPolicy): Promise<string>;
8
8
  }
@@ -5,15 +5,15 @@ const utils_1 = require("../../../modules/utils");
5
5
  const config_1 = require("../config");
6
6
  const stream_1 = require("../../../types/stream");
7
7
  class ErrorHandler {
8
- shouldRetry(input, output, retryPolicy) {
8
+ shouldRetry(input, output, retry) {
9
9
  const tryCount = input.metadata.try || 0;
10
10
  // Priority 1: Use structured retry policy (from stream columns or config)
11
- if (retryPolicy) {
12
- const maxAttempts = retryPolicy.maximumAttempts || 3;
13
- const backoffCoeff = retryPolicy.backoffCoefficient || 10;
14
- const maxInterval = typeof retryPolicy.maximumInterval === 'string'
15
- ? parseInt(retryPolicy.maximumInterval)
16
- : (retryPolicy.maximumInterval || 120);
11
+ if (retry) {
12
+ const maxAttempts = retry.maximumAttempts || 3;
13
+ const backoffCoeff = retry.backoffCoefficient || 10;
14
+ const maxInterval = typeof retry.maximumInterval === 'string'
15
+ ? parseInt(retry.maximumInterval)
16
+ : (retry.maximumInterval || 120);
17
17
  // Check if we can retry (next attempt would be attempt #tryCount+2, must be <= maxAttempts)
18
18
  // tryCount=0 is 1st attempt, tryCount=1 is 2nd attempt, etc.
19
19
  // So after tryCount, we've made (tryCount + 1) attempts
@@ -97,12 +97,12 @@ class ErrorHandler {
97
97
  data,
98
98
  };
99
99
  }
100
- async handleRetry(input, output, publishMessage, retryPolicy) {
101
- const [shouldRetry, timeout] = this.shouldRetry(input, output, retryPolicy);
100
+ async handleRetry(input, output, publishMessage, retry) {
101
+ const [shouldRetry, timeout] = this.shouldRetry(input, output, retry);
102
102
  if (shouldRetry) {
103
- // Only sleep if no retryPolicy (legacy behavior for backward compatibility)
104
- // With retryPolicy, use visibility timeout instead of in-memory sleep
105
- if (!retryPolicy) {
103
+ // Only sleep if no retry (legacy behavior for backward compatibility)
104
+ // With retry, use visibility timeout instead of in-memory sleep
105
+ if (!retry) {
106
106
  await (0, utils_1.sleepFor)(timeout);
107
107
  }
108
108
  // Create new message with incremented try count
@@ -115,8 +115,8 @@ class ErrorHandler {
115
115
  if (input._streamRetryConfig) {
116
116
  newMessage._streamRetryConfig = input._streamRetryConfig;
117
117
  }
118
- // Add visibility delay for production-ready retry with retryPolicy
119
- if (retryPolicy && timeout > 0) {
118
+ // Add visibility delay for production-ready retry with retry
119
+ if (retry && timeout > 0) {
120
120
  newMessage._visibilityDelayMs = timeout;
121
121
  }
122
122
  // Track retry attempt count in database
@@ -13,7 +13,7 @@ declare class Router<S extends StreamService<ProviderClient, ProviderTransaction
13
13
  reclaimCount: number;
14
14
  logger: ILogger;
15
15
  readonly: boolean;
16
- retryPolicy: import('../../types/stream').RetryPolicy | undefined;
16
+ retry: import('../../types/stream').RetryPolicy | undefined;
17
17
  errorCount: number;
18
18
  counts: {
19
19
  [key: string]: number;
@@ -29,12 +29,12 @@ class Router {
29
29
  this.reclaimCount = enhancedConfig.reclaimCount;
30
30
  this.logger = logger;
31
31
  this.readonly = enhancedConfig.readonly;
32
- this.retryPolicy = enhancedConfig.retryPolicy;
32
+ this.retry = enhancedConfig.retry;
33
33
  // Initialize submodule managers
34
34
  this.throttleManager = new throttling_1.ThrottleManager(enhancedConfig.throttle);
35
35
  this.errorHandler = new error_handling_1.ErrorHandler();
36
36
  this.lifecycleManager = new lifecycle_1.LifecycleManager(this.readonly, this.topic, this.logger, this.stream);
37
- this.consumptionManager = new consumption_1.ConsumptionManager(this.stream, this.logger, this.throttleManager, this.errorHandler, this.lifecycleManager, this.reclaimDelay, this.reclaimCount, this.appId, this.role, this, this.retryPolicy);
37
+ this.consumptionManager = new consumption_1.ConsumptionManager(this.stream, this.logger, this.throttleManager, this.errorHandler, this.lifecycleManager, this.reclaimDelay, this.reclaimCount, this.appId, this.role, this, this.retry);
38
38
  this.resetThrottleState();
39
39
  }
40
40
  // Legacy compatibility methods
@@ -1,4 +1,5 @@
1
1
  import { Consumes } from '../../types/activity';
2
+ import { PayloadCodec } from '../../types/codec';
2
3
  import { StringStringType, StringAnyType, SymbolMap, SymbolMaps, SymbolSets, Symbols } from '../../types/serializer';
3
4
  export declare const MDATA_SYMBOLS: {
4
5
  SLOTS: number;
@@ -21,6 +22,27 @@ export declare class SerializerService {
21
22
  symReverseKeys: SymbolMaps;
22
23
  symValMaps: SymbolMap;
23
24
  symValReverseMaps: SymbolMap;
25
+ private static codec;
26
+ /**
27
+ * Register a global payload codec for encoding/decoding serialized
28
+ * object data. Once registered, all object values are stored as
29
+ * `/b{encoded}` instead of `/s{json}`.
30
+ *
31
+ * Pass `null` to remove a previously registered codec.
32
+ *
33
+ * @example
34
+ * ```typescript
35
+ * SerializerService.registerCodec({
36
+ * encode(json) { return Buffer.from(json).toString('base64'); },
37
+ * decode(encoded) { return Buffer.from(encoded, 'base64').toString('utf8'); },
38
+ * });
39
+ * ```
40
+ */
41
+ static registerCodec(codec: PayloadCodec | null): void;
42
+ /**
43
+ * Returns the currently registered codec, or `null` if none.
44
+ */
45
+ static getCodec(): PayloadCodec | null;
24
46
  constructor();
25
47
  abbreviate(consumes: Consumes, symbolNames: string[], fields?: string[]): string[];
26
48
  resolveDimensionalIndex(path: string): string;
@@ -39,6 +39,30 @@ exports.MDATA_SYMBOLS = {
39
39
  },
40
40
  };
41
41
  class SerializerService {
42
+ /**
43
+ * Register a global payload codec for encoding/decoding serialized
44
+ * object data. Once registered, all object values are stored as
45
+ * `/b{encoded}` instead of `/s{json}`.
46
+ *
47
+ * Pass `null` to remove a previously registered codec.
48
+ *
49
+ * @example
50
+ * ```typescript
51
+ * SerializerService.registerCodec({
52
+ * encode(json) { return Buffer.from(json).toString('base64'); },
53
+ * decode(encoded) { return Buffer.from(encoded, 'base64').toString('utf8'); },
54
+ * });
55
+ * ```
56
+ */
57
+ static registerCodec(codec) {
58
+ SerializerService.codec = codec;
59
+ }
60
+ /**
61
+ * Returns the currently registered codec, or `null` if none.
62
+ */
63
+ static getCodec() {
64
+ return SerializerService.codec;
65
+ }
42
66
  constructor() {
43
67
  this.resetSymbols({}, {}, {});
44
68
  }
@@ -231,7 +255,13 @@ class SerializerService {
231
255
  value = '/n';
232
256
  }
233
257
  else {
234
- value = '/s' + JSON.stringify(value);
258
+ const json = JSON.stringify(value);
259
+ if (SerializerService.codec) {
260
+ value = '/b' + SerializerService.codec.encode(json);
261
+ }
262
+ else {
263
+ value = '/s' + json;
264
+ }
235
265
  }
236
266
  break;
237
267
  }
@@ -251,6 +281,13 @@ class SerializerService {
251
281
  return Number(rest);
252
282
  case '/n': // null
253
283
  return null;
284
+ case '/b': { // encoded object (codec)
285
+ const json = SerializerService.codec.decode(rest);
286
+ if (dateReg.exec(json)) {
287
+ return new Date(JSON.parse(json));
288
+ }
289
+ return JSON.parse(json);
290
+ }
254
291
  case '/s': // object (JSON string)
255
292
  if (dateReg.exec(rest)) {
256
293
  return new Date(JSON.parse(rest));
@@ -279,4 +316,5 @@ class SerializerService {
279
316
  return obj;
280
317
  }
281
318
  }
319
+ SerializerService.codec = null;
282
320
  exports.SerializerService = SerializerService;
@@ -88,6 +88,7 @@ declare abstract class StoreService<Provider extends ProviderClient, Transaction
88
88
  abstract scrub(jobId: string): Promise<void>;
89
89
  abstract findJobs(queryString?: string, limit?: number, batchSize?: number, cursor?: string): Promise<[string, string[]]>;
90
90
  abstract findJobFields(jobId: string, fieldMatchPattern?: string, limit?: number, batchSize?: number, cursor?: string): Promise<[string, StringStringType]>;
91
+ abstract setCancel(jobId: string, appId: string): Promise<void>;
91
92
  abstract setThrottleRate(options: ThrottleOptions): Promise<void>;
92
93
  abstract getThrottleRates(): Promise<StringStringType>;
93
94
  abstract getThrottleRate(topic: string): Promise<number>;
@@ -9,12 +9,12 @@ export declare const GET_JOB_BY_KEY = "\n SELECT id, key, status, created_at, u
9
9
  /**
10
10
  * Fetch all attributes for a job.
11
11
  */
12
- export declare const GET_JOB_ATTRIBUTES = "\n SELECT field, value\n FROM {schema}.jobs_attributes\n WHERE job_id = $1\n ORDER BY field\n";
12
+ export declare const GET_JOB_ATTRIBUTES = "\n SELECT symbol || dimension AS field, value\n FROM {schema}.jobs_attributes\n WHERE job_id = $1\n ORDER BY symbol, dimension\n";
13
13
  /**
14
14
  * Fetch activity inputs for a workflow.
15
15
  * Matches all activity jobs for the given workflow and extracts their input arguments.
16
16
  */
17
- export declare const GET_ACTIVITY_INPUTS = "\n SELECT j.key, ja.value\n FROM {schema}.jobs j\n JOIN {schema}.jobs_attributes ja ON ja.job_id = j.id\n WHERE j.key LIKE $1\n AND ja.field = $2\n";
17
+ export declare const GET_ACTIVITY_INPUTS = "\n SELECT j.key, ja.value\n FROM {schema}.jobs j\n JOIN {schema}.jobs_attributes ja ON ja.job_id = j.id\n WHERE j.key LIKE $1\n AND ja.symbol = $2 AND ja.dimension = $3\n";
18
18
  /**
19
19
  * Fetch all worker stream messages for a job AND its child activities.
20
20
  * Child activity jobs use the pattern: -{parentJobId}-$activityName-N
@@ -18,10 +18,10 @@ exports.GET_JOB_BY_KEY = `
18
18
  * Fetch all attributes for a job.
19
19
  */
20
20
  exports.GET_JOB_ATTRIBUTES = `
21
- SELECT field, value
21
+ SELECT symbol || dimension AS field, value
22
22
  FROM {schema}.jobs_attributes
23
23
  WHERE job_id = $1
24
- ORDER BY field
24
+ ORDER BY symbol, dimension
25
25
  `;
26
26
  /**
27
27
  * Fetch activity inputs for a workflow.
@@ -32,7 +32,7 @@ exports.GET_ACTIVITY_INPUTS = `
32
32
  FROM {schema}.jobs j
33
33
  JOIN {schema}.jobs_attributes ja ON ja.job_id = j.id
34
34
  WHERE j.key LIKE $1
35
- AND ja.field = $2
35
+ AND ja.symbol = $2 AND ja.dimension = $3
36
36
  `;
37
37
  /**
38
38
  * Fetch all worker stream messages for a job AND its child activities.
@@ -86,7 +86,7 @@ function buildChildWorkflowInputsQuery(childCount, schema) {
86
86
  FROM ${schema}.jobs j
87
87
  JOIN ${schema}.jobs_attributes ja ON ja.job_id = j.id
88
88
  WHERE j.key IN (${placeholders})
89
- AND ja.field = $${childCount + 1}
89
+ AND ja.symbol = $${childCount + 1} AND ja.dimension = $${childCount + 2}
90
90
  `;
91
91
  }
92
92
  exports.buildChildWorkflowInputsQuery = buildChildWorkflowInputsQuery;
@@ -275,12 +275,13 @@ const KVTables = (context) => ({
275
275
  await client.query(`
276
276
  CREATE TABLE IF NOT EXISTS ${attributesTableName} (
277
277
  job_id UUID NOT NULL,
278
- field TEXT NOT NULL,
278
+ symbol TEXT NOT NULL,
279
+ dimension TEXT NOT NULL DEFAULT '',
279
280
  value TEXT,
280
281
  type ${schemaName}.type_enum NOT NULL,
281
282
  created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
282
283
  updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
283
- PRIMARY KEY (job_id, field),
284
+ PRIMARY KEY (job_id, symbol, dimension),
284
285
  FOREIGN KEY (job_id) REFERENCES ${fullTableName} (id) ON DELETE CASCADE
285
286
  ) PARTITION BY HASH (job_id);
286
287
  `);
@@ -320,12 +321,12 @@ const KVTables = (context) => ({
320
321
  `);
321
322
  // Create indexes for attributes table
322
323
  await client.query(`
323
- CREATE INDEX IF NOT EXISTS idx_${tableDef.name}_attributes_type_field
324
- ON ${attributesTableName} (type, field);
324
+ CREATE INDEX IF NOT EXISTS idx_${tableDef.name}_attributes_type_symbol
325
+ ON ${attributesTableName} (type, symbol);
325
326
  `);
326
327
  await client.query(`
327
- CREATE INDEX IF NOT EXISTS idx_${tableDef.name}_attributes_field
328
- ON ${attributesTableName} (field);
328
+ CREATE INDEX IF NOT EXISTS idx_${tableDef.name}_attributes_symbol
329
+ ON ${attributesTableName} (symbol);
329
330
  `);
330
331
  break;
331
332
  case 'list':