@hotmeshio/hotmesh 0.10.2 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +1 -1
  2. package/build/modules/enums.d.ts +1 -0
  3. package/build/modules/enums.js +3 -1
  4. package/build/modules/errors.d.ts +2 -0
  5. package/build/modules/errors.js +2 -0
  6. package/build/modules/key.js +3 -2
  7. package/build/package.json +2 -2
  8. package/build/services/activities/worker.js +10 -0
  9. package/build/services/dba/index.d.ts +2 -1
  10. package/build/services/dba/index.js +11 -2
  11. package/build/services/durable/client.js +6 -1
  12. package/build/services/durable/exporter.d.ts +15 -0
  13. package/build/services/durable/exporter.js +384 -5
  14. package/build/services/durable/schemas/factory.d.ts +1 -1
  15. package/build/services/durable/schemas/factory.js +27 -4
  16. package/build/services/durable/worker.d.ts +2 -2
  17. package/build/services/durable/worker.js +15 -9
  18. package/build/services/durable/workflow/context.js +2 -0
  19. package/build/services/durable/workflow/execChild.js +5 -2
  20. package/build/services/durable/workflow/hook.js +6 -0
  21. package/build/services/durable/workflow/proxyActivities.js +3 -4
  22. package/build/services/engine/index.d.ts +2 -2
  23. package/build/services/engine/index.js +10 -5
  24. package/build/services/exporter/index.d.ts +16 -2
  25. package/build/services/exporter/index.js +76 -0
  26. package/build/services/hotmesh/index.d.ts +2 -2
  27. package/build/services/hotmesh/index.js +2 -2
  28. package/build/services/router/config/index.d.ts +2 -2
  29. package/build/services/router/config/index.js +2 -1
  30. package/build/services/router/consumption/index.js +80 -5
  31. package/build/services/store/index.d.ts +52 -0
  32. package/build/services/store/providers/postgres/exporter-sql.d.ts +40 -0
  33. package/build/services/store/providers/postgres/exporter-sql.js +92 -0
  34. package/build/services/store/providers/postgres/kvtables.js +6 -0
  35. package/build/services/store/providers/postgres/postgres.d.ts +42 -0
  36. package/build/services/store/providers/postgres/postgres.js +151 -0
  37. package/build/services/stream/index.d.ts +1 -0
  38. package/build/services/stream/providers/postgres/kvtables.d.ts +1 -1
  39. package/build/services/stream/providers/postgres/kvtables.js +235 -82
  40. package/build/services/stream/providers/postgres/lifecycle.d.ts +4 -3
  41. package/build/services/stream/providers/postgres/lifecycle.js +6 -5
  42. package/build/services/stream/providers/postgres/messages.d.ts +14 -6
  43. package/build/services/stream/providers/postgres/messages.js +153 -76
  44. package/build/services/stream/providers/postgres/notifications.d.ts +5 -2
  45. package/build/services/stream/providers/postgres/notifications.js +39 -35
  46. package/build/services/stream/providers/postgres/postgres.d.ts +21 -118
  47. package/build/services/stream/providers/postgres/postgres.js +87 -140
  48. package/build/services/stream/providers/postgres/scout.js +2 -2
  49. package/build/services/stream/providers/postgres/stats.js +3 -2
  50. package/build/services/stream/registry.d.ts +62 -0
  51. package/build/services/stream/registry.js +198 -0
  52. package/build/services/worker/index.js +20 -6
  53. package/build/types/durable.d.ts +6 -1
  54. package/build/types/error.d.ts +2 -0
  55. package/build/types/exporter.d.ts +84 -0
  56. package/build/types/hotmesh.d.ts +7 -1
  57. package/build/types/index.d.ts +1 -1
  58. package/build/types/stream.d.ts +2 -0
  59. package/package.json +2 -2
@@ -20,7 +20,7 @@
20
20
  */
21
21
  Object.defineProperty(exports, "__esModule", { value: true });
22
22
  exports.APP_ID = exports.APP_VERSION = exports.getWorkflowYAML = void 0;
23
- const APP_VERSION = '5';
23
+ const APP_VERSION = '8';
24
24
  exports.APP_VERSION = APP_VERSION;
25
25
  const APP_ID = 'durable';
26
26
  exports.APP_ID = APP_ID;
@@ -66,7 +66,13 @@ const getWorkflowYAML = (app, version) => {
66
66
  description: the arguments to pass to the flow
67
67
  type: array
68
68
  workflowTopic:
69
- description: the stream topic the worker is listening on
69
+ description: concatenated taskQueue-workflowName for engine-internal routing
70
+ type: string
71
+ taskQueue:
72
+ description: the task queue name (stream_name in worker_streams)
73
+ type: string
74
+ workflowName:
75
+ description: the workflow function name (workflow_name in worker_streams)
70
76
  type: string
71
77
  backoffCoefficient:
72
78
  description: the time multiple in seconds to backoff before retrying
@@ -155,7 +161,7 @@ const getWorkflowYAML = (app, version) => {
155
161
  worker:
156
162
  title: Main Worker - Calls linked Workflow functions
157
163
  type: worker
158
- topic: '{trigger.output.data.workflowTopic}'
164
+ topic: '{trigger.output.data.taskQueue}'
159
165
  emit: '{$job.data.done}'
160
166
  input:
161
167
  schema:
@@ -169,6 +175,8 @@ const getWorkflowYAML = (app, version) => {
169
175
  type: array
170
176
  workflowTopic:
171
177
  type: string
178
+ workflowName:
179
+ type: string
172
180
  canRetry:
173
181
  type: boolean
174
182
  expire:
@@ -178,6 +186,7 @@ const getWorkflowYAML = (app, version) => {
178
186
  workflowId: '{trigger.output.data.workflowId}'
179
187
  arguments: '{trigger.output.data.arguments}'
180
188
  workflowTopic: '{trigger.output.data.workflowTopic}'
189
+ workflowName: '{trigger.output.data.workflowName}'
181
190
  expire: '{trigger.output.data.expire}'
182
191
  canRetry:
183
192
  '@pipe':
@@ -348,6 +357,10 @@ const getWorkflowYAML = (app, version) => {
348
357
  properties:
349
358
  workflowTopic:
350
359
  type: string
360
+ taskQueue:
361
+ type: string
362
+ workflowName:
363
+ type: string
351
364
  backoffCoefficient:
352
365
  type: number
353
366
  maximumAttempts:
@@ -387,6 +400,7 @@ const getWorkflowYAML = (app, version) => {
387
400
  persistent: '{worker.output.data.persistent}'
388
401
  signalIn: '{worker.output.data.signalIn}'
389
402
  workflowId: '{worker.output.data.workflowId}'
403
+ taskQueue: '{worker.output.data.taskQueue}'
390
404
  workflowName: '{worker.output.data.workflowName}'
391
405
  workflowTopic: '{worker.output.data.workflowTopic}'
392
406
  entity: '{worker.output.data.entity}'
@@ -893,7 +907,7 @@ const getWorkflowYAML = (app, version) => {
893
907
  signaler_worker:
894
908
  title: Signal In - Worker
895
909
  type: worker
896
- topic: '{signaler.hook.data.workflowTopic}'
910
+ topic: '{signaler.hook.data.taskQueue}'
897
911
  input:
898
912
  schema:
899
913
  type: object
@@ -906,6 +920,8 @@ const getWorkflowYAML = (app, version) => {
906
920
  type: string
907
921
  arguments:
908
922
  type: array
923
+ workflowName:
924
+ type: string
909
925
  canRetry:
910
926
  type: boolean
911
927
  expire:
@@ -915,6 +931,7 @@ const getWorkflowYAML = (app, version) => {
915
931
  originJobId: '{trigger.output.data.originJobId}'
916
932
  workflowDimension: '{signaler.output.metadata.dad}'
917
933
  arguments: '{signaler.hook.data.arguments}'
934
+ workflowName: '{signaler.hook.data.workflowName}'
918
935
  expire: '{trigger.output.data.expire}'
919
936
  canRetry:
920
937
  '@pipe':
@@ -1136,6 +1153,7 @@ const getWorkflowYAML = (app, version) => {
1136
1153
  persistent: '{signaler_worker.output.data.persistent}'
1137
1154
  signalIn: '{signaler_worker.output.data.signalIn}'
1138
1155
  workflowId: '{signaler_worker.output.data.workflowId}'
1156
+ taskQueue: '{signaler_worker.output.data.taskQueue}'
1139
1157
  workflowName: '{signaler_worker.output.data.workflowName}'
1140
1158
  workflowTopic: '{signaler_worker.output.data.workflowTopic}'
1141
1159
  entity: '{signaler_worker.output.data.entity}'
@@ -1942,6 +1960,11 @@ const getWorkflowYAML = (app, version) => {
1942
1960
  - ['{collator_trigger.output.data.items}', '{collator_cycle_hook.output.data.cur_index}']
1943
1961
  - ['{@array.get}', workflowId]
1944
1962
  - ['{@object.get}']
1963
+ taskQueue:
1964
+ '@pipe':
1965
+ - ['{collator_trigger.output.data.items}', '{collator_cycle_hook.output.data.cur_index}']
1966
+ - ['{@array.get}', taskQueue]
1967
+ - ['{@object.get}']
1945
1968
  workflowName:
1946
1969
  '@pipe':
1947
1970
  - ['{collator_trigger.output.data.items}', '{collator_cycle_hook.output.data.cur_index}']
@@ -200,7 +200,7 @@ export declare class WorkerService {
200
200
  /**
201
201
  * @private
202
202
  */
203
- initWorkflowWorker(config: WorkerConfig, workflowTopic: string, workflowFunction: Function): Promise<HotMesh>;
203
+ initWorkflowWorker(config: WorkerConfig, taskQueue: string, workflowFunctionName: string, workflowTopic: string, workflowFunction: Function): Promise<HotMesh>;
204
204
  /**
205
205
  * @private
206
206
  */
@@ -213,7 +213,7 @@ export declare class WorkerService {
213
213
  /**
214
214
  * @private
215
215
  */
216
- wrapWorkflowFunction(workflowFunction: Function, workflowTopic: string, config: WorkerConfig): Function;
216
+ wrapWorkflowFunction(workflowFunction: Function, workflowTopic: string, workflowFunctionName: string, config: WorkerConfig): Function;
217
217
  /**
218
218
  * @private
219
219
  */
@@ -345,13 +345,15 @@ class WorkerService {
345
345
  static async create(config) {
346
346
  const workflow = config.workflow;
347
347
  const [workflowFunctionName, workflowFunction] = WorkerService.resolveWorkflowTarget(workflow);
348
- const baseTopic = `${config.taskQueue}-${workflowFunctionName}`;
349
- const activityTopic = `${baseTopic}-activity`;
350
- const workflowTopic = `${baseTopic}`;
348
+ // Separate taskQueue from workflowName - no concatenation for stream_name
349
+ const taskQueue = config.taskQueue;
350
+ const activityTopic = `${taskQueue}-activity`;
351
+ // workflowTopic remains concatenated for engine-internal routing (graph.subscribes)
352
+ const workflowTopic = `${taskQueue}-${workflowFunctionName}`;
351
353
  //initialize supporting workflows
352
354
  const worker = new WorkerService();
353
355
  worker.activityRunner = await worker.initActivityWorker(config, activityTopic);
354
- worker.workflowRunner = await worker.initWorkflowWorker(config, workflowTopic, workflowFunction);
356
+ worker.workflowRunner = await worker.initWorkflowWorker(config, taskQueue, workflowFunctionName, workflowTopic, workflowFunction);
355
357
  search_1.Search.configureSearchIndex(worker.workflowRunner, config.search);
356
358
  await WorkerService.activateWorkflow(worker.workflowRunner);
357
359
  return worker;
@@ -470,7 +472,7 @@ class WorkerService {
470
472
  /**
471
473
  * @private
472
474
  */
473
- async initWorkflowWorker(config, workflowTopic, workflowFunction) {
475
+ async initWorkflowWorker(config, taskQueue, workflowFunctionName, workflowTopic, workflowFunction) {
474
476
  const providerConfig = config.connection;
475
477
  const targetNamespace = config?.namespace ?? factory_1.APP_ID;
476
478
  const optionsHash = WorkerService.hashOptions(config?.connection);
@@ -483,9 +485,10 @@ class WorkerService {
483
485
  engine: { connection: providerConfig },
484
486
  workers: [
485
487
  {
486
- topic: workflowTopic,
488
+ topic: taskQueue,
489
+ workflowName: workflowFunctionName,
487
490
  connection: providerConfig,
488
- callback: this.wrapWorkflowFunction(workflowFunction, workflowTopic, config).bind(this),
491
+ callback: this.wrapWorkflowFunction(workflowFunction, workflowTopic, workflowFunctionName, config).bind(this),
489
492
  },
490
493
  ],
491
494
  });
@@ -495,7 +498,7 @@ class WorkerService {
495
498
  /**
496
499
  * @private
497
500
  */
498
- wrapWorkflowFunction(workflowFunction, workflowTopic, config) {
501
+ wrapWorkflowFunction(workflowFunction, workflowTopic, workflowFunctionName, config) {
499
502
  return async (data) => {
500
503
  const counter = { counter: 0 };
501
504
  const interruptionRegistry = [];
@@ -532,7 +535,8 @@ class WorkerService {
532
535
  replayQuery = '-*[ehklptydr]-*';
533
536
  }
534
537
  context.set('workflowTopic', workflowTopic);
535
- context.set('workflowName', workflowTopic.split('-').pop());
538
+ context.set('workflowName', workflowFunctionName);
539
+ context.set('taskQueue', config.taskQueue);
536
540
  context.set('workflowTrace', data.metadata.trc);
537
541
  context.set('workflowSpan', data.metadata.spn);
538
542
  const store = this.workflowRunner.engine.store;
@@ -693,6 +697,8 @@ class WorkerService {
693
697
  workflowDimension: err.workflowDimension,
694
698
  workflowId: err.workflowId,
695
699
  workflowTopic: err.workflowTopic,
700
+ taskQueue: err.taskQueue,
701
+ workflowName: err.workflowName,
696
702
  },
697
703
  };
698
704
  }
@@ -57,6 +57,7 @@ function getContext() {
57
57
  const interruptionRegistry = store.get('interruptionRegistry');
58
58
  const workflowDimension = store.get('workflowDimension') ?? '';
59
59
  const workflowTopic = store.get('workflowTopic');
60
+ const taskQueue = store.get('taskQueue');
60
61
  const connection = store.get('connection');
61
62
  const namespace = store.get('namespace');
62
63
  const originJobId = store.get('originJobId');
@@ -78,6 +79,7 @@ function getContext() {
78
79
  originJobId,
79
80
  raw,
80
81
  replay,
82
+ taskQueue,
81
83
  workflowId,
82
84
  workflowDimension,
83
85
  workflowTopic,
@@ -9,7 +9,7 @@ const didRun_1 = require("./didRun");
9
9
  * @private
10
10
  */
11
11
  function getChildInterruptPayload(context, options, execIndex) {
12
- const { workflowId, originJobId, workflowDimension, expire } = context;
12
+ const { workflowId, originJobId, workflowDimension, expire, taskQueue: parentTaskQueue } = context;
13
13
  let childJobId;
14
14
  if (options.workflowId) {
15
15
  childJobId = options.workflowId;
@@ -21,7 +21,8 @@ function getChildInterruptPayload(context, options, execIndex) {
21
21
  childJobId = `-${options.workflowName}-${(0, common_1.guid)()}-${workflowDimension}-${execIndex}`;
22
22
  }
23
23
  const parentWorkflowId = workflowId;
24
- const taskQueueName = options.taskQueue ?? options.entity;
24
+ // Use explicit taskQueue, or parent's taskQueue, or entity as fallback
25
+ const taskQueueName = options.taskQueue ?? parentTaskQueue ?? options.entity;
25
26
  const workflowName = options.taskQueue
26
27
  ? options.workflowName
27
28
  : options.entity ?? options.workflowName;
@@ -42,6 +43,8 @@ function getChildInterruptPayload(context, options, execIndex) {
42
43
  workflowDimension: workflowDimension,
43
44
  workflowId: childJobId,
44
45
  workflowTopic,
46
+ taskQueue: taskQueueName,
47
+ workflowName: workflowName,
45
48
  };
46
49
  }
47
50
  /**
@@ -109,10 +109,16 @@ async function hook(options) {
109
109
  entity: options.entity,
110
110
  }, null, 2)}`);
111
111
  }
112
+ // Extract taskQueue and workflowName from targetTopic
113
+ const hookTaskQueue = options.taskQueue ?? options.entity ?? targetTopic.split('-')[0];
114
+ const hookWorkflowName = options.workflowName ?? options.entity ??
115
+ (targetTopic.startsWith(`${hookTaskQueue}-`) ? targetTopic.substring(hookTaskQueue.length + 1) : targetTopic);
112
116
  const payload = {
113
117
  arguments: [...options.args],
114
118
  id: targetWorkflowId,
115
119
  workflowTopic: targetTopic,
120
+ taskQueue: hookTaskQueue,
121
+ workflowName: hookWorkflowName,
116
122
  backoffCoefficient: options.config?.backoffCoefficient || common_1.HMSH_DURABLE_EXP_BACKOFF,
117
123
  maximumAttempts: options.config?.maximumAttempts || common_1.HMSH_DURABLE_MAX_ATTEMPTS,
118
124
  maximumInterval: (0, common_1.s)(options?.config?.maximumInterval ?? common_1.HMSH_DURABLE_MAX_INTERVAL),
@@ -9,12 +9,11 @@ const didRun_1 = require("./didRun");
9
9
  * @private
10
10
  */
11
11
  function getProxyInterruptPayload(context, activityName, execIndex, args, options) {
12
- const { workflowDimension, workflowId, originJobId, workflowTopic, expire } = context;
13
- // Use explicitly provided taskQueue, otherwise derive from workflow (original behavior)
14
- // This keeps backward compatibility while allowing explicit global/custom queues
12
+ const { workflowDimension, workflowId, originJobId, taskQueue, expire } = context;
13
+ // Activity topic uses the task queue directly (no workflow name concatenation)
15
14
  const activityTopic = options?.taskQueue
16
15
  ? `${options.taskQueue}-activity`
17
- : `${workflowTopic}-activity`;
16
+ : `${taskQueue}-activity`;
18
17
  const activityJobId = `-${workflowId}-$${activityName}${workflowDimension}-${execIndex}`;
19
18
  let maximumInterval;
20
19
  if (options?.retryPolicy?.maximumInterval) {
@@ -16,7 +16,7 @@ import { TaskService } from '../task';
16
16
  import { AppVID } from '../../types/app';
17
17
  import { ActivityType } from '../../types/activity';
18
18
  import { CacheMode } from '../../types/cache';
19
- import { JobExport } from '../../types/exporter';
19
+ import { ExportOptions, JobExport } from '../../types/exporter';
20
20
  import { JobState, JobData, JobMetadata, JobOutput, JobStatus, JobInterruptOptions, JobCompletionOptions, ExtensionType } from '../../types/job';
21
21
  import { HotMeshApps, HotMeshConfig, HotMeshManifest, HotMeshSettings } from '../../types/hotmesh';
22
22
  import { ProviderClient, ProviderTransaction } from '../../types/provider';
@@ -254,7 +254,7 @@ declare class EngineService {
254
254
  /**
255
255
  * @private
256
256
  */
257
- export(jobId: string): Promise<JobExport>;
257
+ export(jobId: string, options?: ExportOptions): Promise<JobExport>;
258
258
  /**
259
259
  * @private
260
260
  */
@@ -5,6 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.EngineService = void 0;
7
7
  const key_1 = require("../../modules/key");
8
+ const registry_1 = require("../stream/registry");
8
9
  const enums_1 = require("../../modules/enums");
9
10
  const utils_1 = require("../../modules/utils");
10
11
  const activities_1 = __importDefault(require("../activities"));
@@ -47,10 +48,11 @@ class EngineService {
47
48
  await instance.initSubChannel(config.engine.sub, config.engine.pub ?? config.engine.store);
48
49
  await instance.initStreamChannel(config.engine.stream, config.engine.store);
49
50
  instance.router = await instance.initRouter(config);
50
- const streamName = instance.store.mintKey(key_1.KeyType.STREAMS, {
51
- appId: instance.appId,
51
+ // Use singleton consumer via registry for engine stream
52
+ await registry_1.StreamConsumerRegistry.registerEngine(namespace, appId, guid, instance.processStreamMessage.bind(instance), instance.stream, instance.store, logger, {
53
+ reclaimDelay: config.engine.reclaimDelay,
54
+ reclaimCount: config.engine.reclaimCount,
52
55
  });
53
- instance.router.consumeMessages(streamName, 'ENGINE', instance.guid, instance.processStreamMessage.bind(instance));
54
56
  instance.taskService = new task_1.TaskService(instance.store, logger);
55
57
  instance.exporter = new exporter_1.ExporterService(instance.appId, instance.store, logger);
56
58
  instance.inited = (0, utils_1.formatISODate)(new Date());
@@ -208,6 +210,9 @@ class EngineService {
208
210
  */
209
211
  async initActivity(topic, data = {}, context) {
210
212
  const [activityId, schema] = await this.getSchema(topic);
213
+ if (!schema) {
214
+ throw new Error(`Activity schema not found for "${activityId}" (topic: ${topic}) in app ${this.appId}`);
215
+ }
211
216
  const ActivityHandler = activities_1.default[schema.type];
212
217
  if (ActivityHandler) {
213
218
  const utc = (0, utils_1.formatISODate)(new Date());
@@ -744,8 +749,8 @@ class EngineService {
744
749
  /**
745
750
  * @private
746
751
  */
747
- async export(jobId) {
748
- return await this.exporter.export(jobId);
752
+ async export(jobId, options = {}) {
753
+ return await this.exporter.export(jobId, options);
749
754
  }
750
755
  /**
751
756
  * @private
@@ -1,8 +1,8 @@
1
1
  import { ILogger } from '../logger';
2
2
  import { StoreService } from '../store';
3
- import { DependencyExport, ExportOptions, JobActionExport, JobExport } from '../../types/exporter';
3
+ import { ActivityDetail, DependencyExport, ExportOptions, JobActionExport, JobExport, StreamHistoryEntry } from '../../types/exporter';
4
4
  import { ProviderClient, ProviderTransaction } from '../../types/provider';
5
- import { StringStringType, Symbols } from '../../types/serializer';
5
+ import { StringAnyType, StringStringType, Symbols } from '../../types/serializer';
6
6
  /**
7
7
  * Downloads job data and expands process data and
8
8
  * includes dependency list
@@ -34,6 +34,20 @@ declare class ExporterService {
34
34
  * @returns - the inflated job data
35
35
  */
36
36
  inflate(jobHash: StringStringType, dependencyList: string[]): JobExport;
37
+ /**
38
+ * Build structured activity details by correlating stream messages
39
+ * (inputs, timing, retries) with the process hierarchy (outputs).
40
+ *
41
+ * Stream messages carry the raw data that flowed through each activity:
42
+ * - `data` contains the activity input arguments
43
+ * - `dad` (dimensional address) reveals cycle iterations (e.g., ,0,1,0 = 2nd cycle)
44
+ * - `created_at` / `expired_at` give precise timing
45
+ * - `retry_attempt` tracks retries
46
+ *
47
+ * The process hierarchy carries activity outputs organized by dimension.
48
+ * This method merges both into a flat, dashboard-friendly list.
49
+ */
50
+ buildActivities(process: StringAnyType, streamHistory: StreamHistoryEntry[]): ActivityDetail[];
37
51
  /**
38
52
  * Inflates the dependency data into a JobExport object by
39
53
  * organizing the dimensional isolate in such a way as to interleave
@@ -28,6 +28,10 @@ class ExporterService {
28
28
  const depData = []; // await this.store.getDependencies(jobId);
29
29
  const jobData = await this.store.getRaw(jobId);
30
30
  const jobExport = this.inflate(jobData, depData);
31
+ if (options.enrich_inputs && this.store.getStreamHistory) {
32
+ const streamHistory = await this.store.getStreamHistory(jobId);
33
+ jobExport.activities = this.buildActivities(jobExport.process, streamHistory);
34
+ }
31
35
  return jobExport;
32
36
  }
33
37
  /**
@@ -77,6 +81,78 @@ class ExporterService {
77
81
  status: jobHash[':'],
78
82
  };
79
83
  }
84
+ /**
85
+ * Build structured activity details by correlating stream messages
86
+ * (inputs, timing, retries) with the process hierarchy (outputs).
87
+ *
88
+ * Stream messages carry the raw data that flowed through each activity:
89
+ * - `data` contains the activity input arguments
90
+ * - `dad` (dimensional address) reveals cycle iterations (e.g., ,0,1,0 = 2nd cycle)
91
+ * - `created_at` / `expired_at` give precise timing
92
+ * - `retry_attempt` tracks retries
93
+ *
94
+ * The process hierarchy carries activity outputs organized by dimension.
95
+ * This method merges both into a flat, dashboard-friendly list.
96
+ */
97
+ buildActivities(process, streamHistory) {
98
+ const activities = [];
99
+ for (const entry of streamHistory) {
100
+ // Parse dimensional address: ",0,1,0,0" → ["0","1","0","0"]
101
+ const dimParts = (entry.dad || '').split(',').filter(Boolean);
102
+ const dimension = dimParts.join('/');
103
+ // Detect cycle iteration from dimensional address
104
+ // In a cycling workflow, the 2nd dimension component increments per cycle
105
+ const cycleIteration = dimParts.length > 1 ? parseInt(dimParts[1]) || 0 : 0;
106
+ // Look up the corresponding output from the process hierarchy
107
+ // Process keys are like: process[dimension][activityName].output.data
108
+ let output;
109
+ let activityName = entry.aid;
110
+ // Walk the process hierarchy using the dimension path
111
+ let node = process;
112
+ for (const part of dimParts) {
113
+ if (node && typeof node === 'object' && node[part]) {
114
+ node = node[part];
115
+ }
116
+ else {
117
+ node = undefined;
118
+ break;
119
+ }
120
+ }
121
+ if (node && typeof node === 'object') {
122
+ // node is now at the dimensional level, look for the activity
123
+ if (node[activityName]?.output?.data) {
124
+ output = node[activityName].output.data;
125
+ }
126
+ }
127
+ // Compute timing
128
+ const startedAt = entry.created_at;
129
+ const completedAt = entry.expired_at;
130
+ let durationMs;
131
+ if (startedAt && completedAt) {
132
+ durationMs = new Date(completedAt).getTime() - new Date(startedAt).getTime();
133
+ }
134
+ activities.push({
135
+ name: activityName,
136
+ type: entry.aid,
137
+ dimension,
138
+ input: entry.data,
139
+ output,
140
+ started_at: startedAt,
141
+ completed_at: completedAt,
142
+ duration_ms: durationMs,
143
+ retry_attempt: entry.code === undefined ? 0 : undefined,
144
+ cycle_iteration: cycleIteration > 0 ? cycleIteration : undefined,
145
+ error: null,
146
+ });
147
+ }
148
+ // Sort by time, then by dimension for cycle ordering
149
+ activities.sort((a, b) => {
150
+ const timeA = a.started_at || '';
151
+ const timeB = b.started_at || '';
152
+ return timeA.localeCompare(timeB);
153
+ });
154
+ return activities;
155
+ }
80
156
  /**
81
157
  * Inflates the dependency data into a JobExport object by
82
158
  * organizing the dimensional isolate in such a way as to interleave
@@ -4,7 +4,7 @@ import { QuorumService } from '../quorum';
4
4
  import { WorkerService } from '../worker';
5
5
  import { JobState, JobData, JobOutput, JobStatus, JobInterruptOptions, ExtensionType } from '../../types/job';
6
6
  import { HotMeshConfig, HotMeshManifest } from '../../types/hotmesh';
7
- import { JobExport } from '../../types/exporter';
7
+ import { ExportOptions, JobExport } from '../../types/exporter';
8
8
  import { JobMessageCallback, QuorumMessage, QuorumMessageCallback, QuorumProfile, ThrottleOptions } from '../../types/quorum';
9
9
  import { StringAnyType, StringStringType } from '../../types/serializer';
10
10
  import { JobStatsInput, GetStatsOptions, IdsResponse, StatsResponse } from '../../types/stats';
@@ -632,7 +632,7 @@ declare class HotMesh {
632
632
  * activity data, transitions, and dependency chains. Useful for
633
633
  * debugging, auditing, and visualizing workflow execution.
634
634
  */
635
- export(jobId: string): Promise<JobExport>;
635
+ export(jobId: string, options?: ExportOptions): Promise<JobExport>;
636
636
  /**
637
637
  * Returns all raw key-value pairs for a job's HASH record. This is
638
638
  * the lowest-level read — it returns internal engine fields alongside
@@ -769,8 +769,8 @@ class HotMesh {
769
769
  * activity data, transitions, and dependency chains. Useful for
770
770
  * debugging, auditing, and visualizing workflow execution.
771
771
  */
772
- async export(jobId) {
773
- return await this.engine?.export(jobId);
772
+ async export(jobId, options = {}) {
773
+ return await this.engine?.export(jobId, options);
774
774
  }
775
775
  /**
776
776
  * Returns all raw key-value pairs for a job's HASH record. This is
@@ -1,4 +1,4 @@
1
- import { HMSH_BLOCK_TIME_MS, HMSH_MAX_RETRIES, HMSH_MAX_TIMEOUT_MS, HMSH_GRADUATED_INTERVAL_MS, HMSH_CODE_UNACKED, HMSH_CODE_UNKNOWN, HMSH_STATUS_UNKNOWN, HMSH_XCLAIM_COUNT, HMSH_XCLAIM_DELAY_MS, HMSH_XPENDING_COUNT, MAX_DELAY, MAX_STREAM_BACKOFF, INITIAL_STREAM_BACKOFF, MAX_STREAM_RETRIES } from '../../../modules/enums';
1
+ import { HMSH_BLOCK_TIME_MS, HMSH_MAX_RETRIES, HMSH_MAX_TIMEOUT_MS, HMSH_GRADUATED_INTERVAL_MS, HMSH_CODE_UNACKED, HMSH_CODE_UNKNOWN, HMSH_STATUS_UNKNOWN, HMSH_XCLAIM_COUNT, HMSH_XCLAIM_DELAY_MS, HMSH_XPENDING_COUNT, MAX_DELAY, MAX_STREAM_BACKOFF, INITIAL_STREAM_BACKOFF, MAX_STREAM_RETRIES, HMSH_POISON_MESSAGE_THRESHOLD } from '../../../modules/enums';
2
2
  import { RouterConfig } from '../../../types/stream';
3
3
  export declare class RouterConfigManager {
4
4
  static validateThrottle(delayInMillis: number): void;
@@ -8,4 +8,4 @@ export declare class RouterConfigManager {
8
8
  readonly: boolean;
9
9
  };
10
10
  }
11
- export { HMSH_BLOCK_TIME_MS, HMSH_MAX_RETRIES, HMSH_MAX_TIMEOUT_MS, HMSH_GRADUATED_INTERVAL_MS, HMSH_CODE_UNACKED, HMSH_CODE_UNKNOWN, HMSH_STATUS_UNKNOWN, HMSH_XCLAIM_COUNT, HMSH_XCLAIM_DELAY_MS, HMSH_XPENDING_COUNT, MAX_DELAY, MAX_STREAM_BACKOFF, INITIAL_STREAM_BACKOFF, MAX_STREAM_RETRIES, };
11
+ export { HMSH_BLOCK_TIME_MS, HMSH_MAX_RETRIES, HMSH_MAX_TIMEOUT_MS, HMSH_GRADUATED_INTERVAL_MS, HMSH_CODE_UNACKED, HMSH_CODE_UNKNOWN, HMSH_STATUS_UNKNOWN, HMSH_XCLAIM_COUNT, HMSH_XCLAIM_DELAY_MS, HMSH_XPENDING_COUNT, MAX_DELAY, MAX_STREAM_BACKOFF, INITIAL_STREAM_BACKOFF, MAX_STREAM_RETRIES, HMSH_POISON_MESSAGE_THRESHOLD, };
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.MAX_STREAM_RETRIES = exports.INITIAL_STREAM_BACKOFF = exports.MAX_STREAM_BACKOFF = exports.MAX_DELAY = exports.HMSH_XPENDING_COUNT = exports.HMSH_XCLAIM_DELAY_MS = exports.HMSH_XCLAIM_COUNT = exports.HMSH_STATUS_UNKNOWN = exports.HMSH_CODE_UNKNOWN = exports.HMSH_CODE_UNACKED = exports.HMSH_GRADUATED_INTERVAL_MS = exports.HMSH_MAX_TIMEOUT_MS = exports.HMSH_MAX_RETRIES = exports.HMSH_BLOCK_TIME_MS = exports.RouterConfigManager = void 0;
3
+ exports.HMSH_POISON_MESSAGE_THRESHOLD = exports.MAX_STREAM_RETRIES = exports.INITIAL_STREAM_BACKOFF = exports.MAX_STREAM_BACKOFF = exports.MAX_DELAY = exports.HMSH_XPENDING_COUNT = exports.HMSH_XCLAIM_DELAY_MS = exports.HMSH_XCLAIM_COUNT = exports.HMSH_STATUS_UNKNOWN = exports.HMSH_CODE_UNKNOWN = exports.HMSH_CODE_UNACKED = exports.HMSH_GRADUATED_INTERVAL_MS = exports.HMSH_MAX_TIMEOUT_MS = exports.HMSH_MAX_RETRIES = exports.HMSH_BLOCK_TIME_MS = exports.RouterConfigManager = void 0;
4
4
  const enums_1 = require("../../../modules/enums");
5
5
  Object.defineProperty(exports, "HMSH_BLOCK_TIME_MS", { enumerable: true, get: function () { return enums_1.HMSH_BLOCK_TIME_MS; } });
6
6
  Object.defineProperty(exports, "HMSH_MAX_RETRIES", { enumerable: true, get: function () { return enums_1.HMSH_MAX_RETRIES; } });
@@ -16,6 +16,7 @@ Object.defineProperty(exports, "MAX_DELAY", { enumerable: true, get: function ()
16
16
  Object.defineProperty(exports, "MAX_STREAM_BACKOFF", { enumerable: true, get: function () { return enums_1.MAX_STREAM_BACKOFF; } });
17
17
  Object.defineProperty(exports, "INITIAL_STREAM_BACKOFF", { enumerable: true, get: function () { return enums_1.INITIAL_STREAM_BACKOFF; } });
18
18
  Object.defineProperty(exports, "MAX_STREAM_RETRIES", { enumerable: true, get: function () { return enums_1.MAX_STREAM_RETRIES; } });
19
+ Object.defineProperty(exports, "HMSH_POISON_MESSAGE_THRESHOLD", { enumerable: true, get: function () { return enums_1.HMSH_POISON_MESSAGE_THRESHOLD; } });
19
20
  class RouterConfigManager {
20
21
  static validateThrottle(delayInMillis) {
21
22
  if (!Number.isInteger(delayInMillis) ||
@@ -356,6 +356,68 @@ class ConsumptionManager {
356
356
  }
357
357
  async consumeOne(stream, group, id, input, callback) {
358
358
  this.logger.debug(`stream-read-one`, { group, stream, id });
359
+ // Poison message circuit breaker. This is a SAFETY NET that sits above
360
+ // the normal retry mechanism (ErrorHandler.handleRetry / shouldRetry).
361
+ //
362
+ // Normal retry flow: handleRetry() checks metadata.try against the
363
+ // configured retryPolicy.maximumAttempts (or _streamRetryConfig) and
364
+ // applies exponential backoff + visibility delays. That mechanism is
365
+ // the primary retry budget and is what developers configure via
366
+ // HotMesh.init({ workers: [{ retryPolicy: { maximumAttempts, ... } }] }).
367
+ //
368
+ // This check catches messages that have somehow exceeded the normal
369
+ // budget — e.g., when no retryPolicy is configured, when the retry
370
+ // logic is bypassed by an infrastructure error, or when a message
371
+ // re-enters the stream through a path that doesn't increment
372
+ // metadata.try. The threshold is the HIGHER of the configured retry
373
+ // budget and the system-wide HMSH_POISON_MESSAGE_THRESHOLD, so it
374
+ // never interferes with legitimate developer-configured retries.
375
+ const retryAttempt = input._retryAttempt || 0;
376
+ const configuredMax = input._streamRetryConfig?.max_retry_attempts
377
+ ?? this.retryPolicy?.maximumAttempts
378
+ ?? 0;
379
+ const poisonThreshold = Math.max(configuredMax, config_1.HMSH_POISON_MESSAGE_THRESHOLD);
380
+ if (retryAttempt >= poisonThreshold) {
381
+ this.logger.error(`stream-poison-message-detected`, {
382
+ group,
383
+ stream,
384
+ id,
385
+ retryAttempt,
386
+ poisonThreshold,
387
+ configuredMaxAttempts: configuredMax,
388
+ systemThreshold: config_1.HMSH_POISON_MESSAGE_THRESHOLD,
389
+ topic: input.metadata?.topic,
390
+ activityId: input.metadata?.aid,
391
+ jobId: input.metadata?.jid,
392
+ metadata: input.metadata,
393
+ });
394
+ const errorOutput = this.errorHandler.structureUnhandledError(input, new Error(`Poison message detected: retry attempt ${retryAttempt} reached ` +
395
+ `threshold ${poisonThreshold} (configured: ${configuredMax}, ` +
396
+ `system: ${config_1.HMSH_POISON_MESSAGE_THRESHOLD}). Discarding message ` +
397
+ `for activity "${input.metadata?.aid || 'unknown'}" ` +
398
+ `(topic: ${input.metadata?.topic || 'unknown'}, ` +
399
+ `job: ${input.metadata?.jid || 'unknown'}).`));
400
+ try {
401
+ await this.publishMessage(null, errorOutput);
402
+ }
403
+ catch (publishErr) {
404
+ this.logger.error(`stream-poison-message-publish-error`, {
405
+ error: publishErr,
406
+ stream,
407
+ id,
408
+ retryAttempt,
409
+ poisonThreshold,
410
+ });
411
+ }
412
+ // Mark as dead-lettered if the provider supports it; otherwise just ack
413
+ if (this.stream.deadLetterMessages) {
414
+ await this.stream.deadLetterMessages(stream, group, [id]);
415
+ }
416
+ else {
417
+ await this.ackAndDelete(stream, group, id);
418
+ }
419
+ return;
420
+ }
359
421
  let output;
360
422
  const telemetry = new telemetry_1.RouterTelemetry(this.appId);
361
423
  try {
@@ -367,12 +429,25 @@ class ConsumptionManager {
367
429
  catch (err) {
368
430
  this.logger.error(`stream-read-one-error`, { group, stream, id, err });
369
431
  telemetry.setStreamErrorFromException(err);
432
+ output = this.errorHandler.structureUnhandledError(input, err instanceof Error ? err : new Error(String(err)));
433
+ }
434
+ try {
435
+ const messageId = await this.publishResponse(input, output);
436
+ telemetry.setStreamAttributes({ 'app.worker.mid': messageId });
437
+ }
438
+ catch (publishErr) {
439
+ // If publishResponse fails, still ack the message to prevent
440
+ // infinite reprocessing. Log the error for debugging.
441
+ this.logger.error(`stream-publish-response-error`, {
442
+ group, stream, id, error: publishErr,
443
+ });
444
+ this.errorCount++;
445
+ }
446
+ finally {
447
+ await this.ackAndDelete(stream, group, id);
448
+ telemetry.endStreamSpan();
449
+ this.logger.debug(`stream-read-one-end`, { group, stream, id });
370
450
  }
371
- const messageId = await this.publishResponse(input, output);
372
- telemetry.setStreamAttributes({ 'app.worker.mid': messageId });
373
- await this.ackAndDelete(stream, group, id);
374
- telemetry.endStreamSpan();
375
- this.logger.debug(`stream-read-one-end`, { group, stream, id });
376
451
  }
377
452
  async execStreamLeg(input, stream, id, callback) {
378
453
  let output;