@hotmeshio/hotmesh 0.10.1 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/README.md +1 -1
  2. package/build/modules/errors.d.ts +2 -0
  3. package/build/modules/errors.js +2 -0
  4. package/build/modules/key.js +3 -2
  5. package/build/package.json +3 -1
  6. package/build/services/activities/worker.js +10 -0
  7. package/build/services/dba/index.d.ts +54 -19
  8. package/build/services/dba/index.js +129 -31
  9. package/build/services/durable/client.js +6 -1
  10. package/build/services/durable/exporter.d.ts +75 -3
  11. package/build/services/durable/exporter.js +768 -2
  12. package/build/services/durable/handle.d.ts +12 -1
  13. package/build/services/durable/handle.js +13 -0
  14. package/build/services/durable/schemas/factory.d.ts +1 -1
  15. package/build/services/durable/schemas/factory.js +27 -4
  16. package/build/services/durable/worker.d.ts +2 -2
  17. package/build/services/durable/worker.js +15 -9
  18. package/build/services/durable/workflow/context.js +2 -0
  19. package/build/services/durable/workflow/execChild.js +5 -2
  20. package/build/services/durable/workflow/hook.js +6 -0
  21. package/build/services/durable/workflow/proxyActivities.js +3 -4
  22. package/build/services/engine/index.js +5 -3
  23. package/build/services/store/index.d.ts +40 -0
  24. package/build/services/store/providers/postgres/exporter-sql.d.ts +23 -0
  25. package/build/services/store/providers/postgres/exporter-sql.js +52 -0
  26. package/build/services/store/providers/postgres/kvtables.js +12 -1
  27. package/build/services/store/providers/postgres/postgres.d.ts +34 -0
  28. package/build/services/store/providers/postgres/postgres.js +99 -0
  29. package/build/services/stream/providers/postgres/kvtables.d.ts +1 -1
  30. package/build/services/stream/providers/postgres/kvtables.js +175 -82
  31. package/build/services/stream/providers/postgres/lifecycle.d.ts +4 -3
  32. package/build/services/stream/providers/postgres/lifecycle.js +6 -5
  33. package/build/services/stream/providers/postgres/messages.d.ts +9 -6
  34. package/build/services/stream/providers/postgres/messages.js +121 -75
  35. package/build/services/stream/providers/postgres/notifications.d.ts +5 -2
  36. package/build/services/stream/providers/postgres/notifications.js +39 -35
  37. package/build/services/stream/providers/postgres/postgres.d.ts +20 -118
  38. package/build/services/stream/providers/postgres/postgres.js +83 -140
  39. package/build/services/stream/registry.d.ts +62 -0
  40. package/build/services/stream/registry.js +198 -0
  41. package/build/services/worker/index.js +20 -6
  42. package/build/types/dba.d.ts +31 -5
  43. package/build/types/durable.d.ts +6 -1
  44. package/build/types/error.d.ts +2 -0
  45. package/build/types/exporter.d.ts +166 -0
  46. package/build/types/hotmesh.d.ts +7 -1
  47. package/build/types/index.d.ts +1 -1
  48. package/build/types/stream.d.ts +2 -0
  49. package/package.json +3 -1
@@ -1,5 +1,5 @@
1
1
  import { HotMesh } from '../hotmesh';
2
- import { DurableJobExport, ExportOptions } from '../../types/exporter';
2
+ import { DurableJobExport, ExportOptions, ExecutionExportOptions, WorkflowExecution } from '../../types/exporter';
3
3
  import { JobInterruptOptions } from '../../types/job';
4
4
  import { StreamError } from '../../types/stream';
5
5
  import { ExporterService } from './exporter';
@@ -44,6 +44,17 @@ export declare class WorkflowHandleService {
44
44
  * Exports the workflow state to a JSON object.
45
45
  */
46
46
  export(options?: ExportOptions): Promise<DurableJobExport>;
47
+ /**
48
+ * Exports the workflow as a Temporal-like execution event history.
49
+ *
50
+ * **Sparse mode** (default): transforms the main workflow's timeline
51
+ * into a flat event list with workflow lifecycle, activity, child workflow,
52
+ * timer, and signal events.
53
+ *
54
+ * **Verbose mode**: recursively fetches child workflow jobs and attaches
55
+ * their full execution histories as nested `children`.
56
+ */
57
+ exportExecution(options?: ExecutionExportOptions): Promise<WorkflowExecution>;
47
58
  /**
48
59
  * Sends a signal to the workflow. This is a way to send
49
60
  * a message to a workflow that is paused due to having
@@ -43,6 +43,19 @@ class WorkflowHandleService {
43
43
  async export(options) {
44
44
  return this.exporter.export(this.workflowId, options);
45
45
  }
46
+ /**
47
+ * Exports the workflow as a Temporal-like execution event history.
48
+ *
49
+ * **Sparse mode** (default): transforms the main workflow's timeline
50
+ * into a flat event list with workflow lifecycle, activity, child workflow,
51
+ * timer, and signal events.
52
+ *
53
+ * **Verbose mode**: recursively fetches child workflow jobs and attaches
54
+ * their full execution histories as nested `children`.
55
+ */
56
+ async exportExecution(options) {
57
+ return this.exporter.exportExecution(this.workflowId, this.workflowTopic, options);
58
+ }
46
59
  /**
47
60
  * Sends a signal to the workflow. This is a way to send
48
61
  * a message to a workflow that is paused due to having
@@ -17,7 +17,7 @@
17
17
  * * Service Meshes
18
18
  * * Master Data Management systems
19
19
  */
20
- declare const APP_VERSION = "5";
20
+ declare const APP_VERSION = "8";
21
21
  declare const APP_ID = "durable";
22
22
  /**
23
23
  * returns a new durable workflow schema
@@ -20,7 +20,7 @@
20
20
  */
21
21
  Object.defineProperty(exports, "__esModule", { value: true });
22
22
  exports.APP_ID = exports.APP_VERSION = exports.getWorkflowYAML = void 0;
23
- const APP_VERSION = '5';
23
+ const APP_VERSION = '8';
24
24
  exports.APP_VERSION = APP_VERSION;
25
25
  const APP_ID = 'durable';
26
26
  exports.APP_ID = APP_ID;
@@ -66,7 +66,13 @@ const getWorkflowYAML = (app, version) => {
66
66
  description: the arguments to pass to the flow
67
67
  type: array
68
68
  workflowTopic:
69
- description: the stream topic the worker is listening on
69
+ description: concatenated taskQueue-workflowName for engine-internal routing
70
+ type: string
71
+ taskQueue:
72
+ description: the task queue name (stream_name in worker_streams)
73
+ type: string
74
+ workflowName:
75
+ description: the workflow function name (workflow_name in worker_streams)
70
76
  type: string
71
77
  backoffCoefficient:
72
78
  description: the time multiple in seconds to backoff before retrying
@@ -155,7 +161,7 @@ const getWorkflowYAML = (app, version) => {
155
161
  worker:
156
162
  title: Main Worker - Calls linked Workflow functions
157
163
  type: worker
158
- topic: '{trigger.output.data.workflowTopic}'
164
+ topic: '{trigger.output.data.taskQueue}'
159
165
  emit: '{$job.data.done}'
160
166
  input:
161
167
  schema:
@@ -169,6 +175,8 @@ const getWorkflowYAML = (app, version) => {
169
175
  type: array
170
176
  workflowTopic:
171
177
  type: string
178
+ workflowName:
179
+ type: string
172
180
  canRetry:
173
181
  type: boolean
174
182
  expire:
@@ -178,6 +186,7 @@ const getWorkflowYAML = (app, version) => {
178
186
  workflowId: '{trigger.output.data.workflowId}'
179
187
  arguments: '{trigger.output.data.arguments}'
180
188
  workflowTopic: '{trigger.output.data.workflowTopic}'
189
+ workflowName: '{trigger.output.data.workflowName}'
181
190
  expire: '{trigger.output.data.expire}'
182
191
  canRetry:
183
192
  '@pipe':
@@ -348,6 +357,10 @@ const getWorkflowYAML = (app, version) => {
348
357
  properties:
349
358
  workflowTopic:
350
359
  type: string
360
+ taskQueue:
361
+ type: string
362
+ workflowName:
363
+ type: string
351
364
  backoffCoefficient:
352
365
  type: number
353
366
  maximumAttempts:
@@ -387,6 +400,7 @@ const getWorkflowYAML = (app, version) => {
387
400
  persistent: '{worker.output.data.persistent}'
388
401
  signalIn: '{worker.output.data.signalIn}'
389
402
  workflowId: '{worker.output.data.workflowId}'
403
+ taskQueue: '{worker.output.data.taskQueue}'
390
404
  workflowName: '{worker.output.data.workflowName}'
391
405
  workflowTopic: '{worker.output.data.workflowTopic}'
392
406
  entity: '{worker.output.data.entity}'
@@ -893,7 +907,7 @@ const getWorkflowYAML = (app, version) => {
893
907
  signaler_worker:
894
908
  title: Signal In - Worker
895
909
  type: worker
896
- topic: '{signaler.hook.data.workflowTopic}'
910
+ topic: '{signaler.hook.data.taskQueue}'
897
911
  input:
898
912
  schema:
899
913
  type: object
@@ -906,6 +920,8 @@ const getWorkflowYAML = (app, version) => {
906
920
  type: string
907
921
  arguments:
908
922
  type: array
923
+ workflowName:
924
+ type: string
909
925
  canRetry:
910
926
  type: boolean
911
927
  expire:
@@ -915,6 +931,7 @@ const getWorkflowYAML = (app, version) => {
915
931
  originJobId: '{trigger.output.data.originJobId}'
916
932
  workflowDimension: '{signaler.output.metadata.dad}'
917
933
  arguments: '{signaler.hook.data.arguments}'
934
+ workflowName: '{signaler.hook.data.workflowName}'
918
935
  expire: '{trigger.output.data.expire}'
919
936
  canRetry:
920
937
  '@pipe':
@@ -1136,6 +1153,7 @@ const getWorkflowYAML = (app, version) => {
1136
1153
  persistent: '{signaler_worker.output.data.persistent}'
1137
1154
  signalIn: '{signaler_worker.output.data.signalIn}'
1138
1155
  workflowId: '{signaler_worker.output.data.workflowId}'
1156
+ taskQueue: '{signaler_worker.output.data.taskQueue}'
1139
1157
  workflowName: '{signaler_worker.output.data.workflowName}'
1140
1158
  workflowTopic: '{signaler_worker.output.data.workflowTopic}'
1141
1159
  entity: '{signaler_worker.output.data.entity}'
@@ -1942,6 +1960,11 @@ const getWorkflowYAML = (app, version) => {
1942
1960
  - ['{collator_trigger.output.data.items}', '{collator_cycle_hook.output.data.cur_index}']
1943
1961
  - ['{@array.get}', workflowId]
1944
1962
  - ['{@object.get}']
1963
+ taskQueue:
1964
+ '@pipe':
1965
+ - ['{collator_trigger.output.data.items}', '{collator_cycle_hook.output.data.cur_index}']
1966
+ - ['{@array.get}', taskQueue]
1967
+ - ['{@object.get}']
1945
1968
  workflowName:
1946
1969
  '@pipe':
1947
1970
  - ['{collator_trigger.output.data.items}', '{collator_cycle_hook.output.data.cur_index}']
@@ -200,7 +200,7 @@ export declare class WorkerService {
200
200
  /**
201
201
  * @private
202
202
  */
203
- initWorkflowWorker(config: WorkerConfig, workflowTopic: string, workflowFunction: Function): Promise<HotMesh>;
203
+ initWorkflowWorker(config: WorkerConfig, taskQueue: string, workflowFunctionName: string, workflowTopic: string, workflowFunction: Function): Promise<HotMesh>;
204
204
  /**
205
205
  * @private
206
206
  */
@@ -213,7 +213,7 @@ export declare class WorkerService {
213
213
  /**
214
214
  * @private
215
215
  */
216
- wrapWorkflowFunction(workflowFunction: Function, workflowTopic: string, config: WorkerConfig): Function;
216
+ wrapWorkflowFunction(workflowFunction: Function, workflowTopic: string, workflowFunctionName: string, config: WorkerConfig): Function;
217
217
  /**
218
218
  * @private
219
219
  */
@@ -345,13 +345,15 @@ class WorkerService {
345
345
  static async create(config) {
346
346
  const workflow = config.workflow;
347
347
  const [workflowFunctionName, workflowFunction] = WorkerService.resolveWorkflowTarget(workflow);
348
- const baseTopic = `${config.taskQueue}-${workflowFunctionName}`;
349
- const activityTopic = `${baseTopic}-activity`;
350
- const workflowTopic = `${baseTopic}`;
348
+ // Separate taskQueue from workflowName - no concatenation for stream_name
349
+ const taskQueue = config.taskQueue;
350
+ const activityTopic = `${taskQueue}-activity`;
351
+ // workflowTopic remains concatenated for engine-internal routing (graph.subscribes)
352
+ const workflowTopic = `${taskQueue}-${workflowFunctionName}`;
351
353
  //initialize supporting workflows
352
354
  const worker = new WorkerService();
353
355
  worker.activityRunner = await worker.initActivityWorker(config, activityTopic);
354
- worker.workflowRunner = await worker.initWorkflowWorker(config, workflowTopic, workflowFunction);
356
+ worker.workflowRunner = await worker.initWorkflowWorker(config, taskQueue, workflowFunctionName, workflowTopic, workflowFunction);
355
357
  search_1.Search.configureSearchIndex(worker.workflowRunner, config.search);
356
358
  await WorkerService.activateWorkflow(worker.workflowRunner);
357
359
  return worker;
@@ -470,7 +472,7 @@ class WorkerService {
470
472
  /**
471
473
  * @private
472
474
  */
473
- async initWorkflowWorker(config, workflowTopic, workflowFunction) {
475
+ async initWorkflowWorker(config, taskQueue, workflowFunctionName, workflowTopic, workflowFunction) {
474
476
  const providerConfig = config.connection;
475
477
  const targetNamespace = config?.namespace ?? factory_1.APP_ID;
476
478
  const optionsHash = WorkerService.hashOptions(config?.connection);
@@ -483,9 +485,10 @@ class WorkerService {
483
485
  engine: { connection: providerConfig },
484
486
  workers: [
485
487
  {
486
- topic: workflowTopic,
488
+ topic: taskQueue,
489
+ workflowName: workflowFunctionName,
487
490
  connection: providerConfig,
488
- callback: this.wrapWorkflowFunction(workflowFunction, workflowTopic, config).bind(this),
491
+ callback: this.wrapWorkflowFunction(workflowFunction, workflowTopic, workflowFunctionName, config).bind(this),
489
492
  },
490
493
  ],
491
494
  });
@@ -495,7 +498,7 @@ class WorkerService {
495
498
  /**
496
499
  * @private
497
500
  */
498
- wrapWorkflowFunction(workflowFunction, workflowTopic, config) {
501
+ wrapWorkflowFunction(workflowFunction, workflowTopic, workflowFunctionName, config) {
499
502
  return async (data) => {
500
503
  const counter = { counter: 0 };
501
504
  const interruptionRegistry = [];
@@ -532,7 +535,8 @@ class WorkerService {
532
535
  replayQuery = '-*[ehklptydr]-*';
533
536
  }
534
537
  context.set('workflowTopic', workflowTopic);
535
- context.set('workflowName', workflowTopic.split('-').pop());
538
+ context.set('workflowName', workflowFunctionName);
539
+ context.set('taskQueue', config.taskQueue);
536
540
  context.set('workflowTrace', data.metadata.trc);
537
541
  context.set('workflowSpan', data.metadata.spn);
538
542
  const store = this.workflowRunner.engine.store;
@@ -693,6 +697,8 @@ class WorkerService {
693
697
  workflowDimension: err.workflowDimension,
694
698
  workflowId: err.workflowId,
695
699
  workflowTopic: err.workflowTopic,
700
+ taskQueue: err.taskQueue,
701
+ workflowName: err.workflowName,
696
702
  },
697
703
  };
698
704
  }
@@ -57,6 +57,7 @@ function getContext() {
57
57
  const interruptionRegistry = store.get('interruptionRegistry');
58
58
  const workflowDimension = store.get('workflowDimension') ?? '';
59
59
  const workflowTopic = store.get('workflowTopic');
60
+ const taskQueue = store.get('taskQueue');
60
61
  const connection = store.get('connection');
61
62
  const namespace = store.get('namespace');
62
63
  const originJobId = store.get('originJobId');
@@ -78,6 +79,7 @@ function getContext() {
78
79
  originJobId,
79
80
  raw,
80
81
  replay,
82
+ taskQueue,
81
83
  workflowId,
82
84
  workflowDimension,
83
85
  workflowTopic,
@@ -9,7 +9,7 @@ const didRun_1 = require("./didRun");
9
9
  * @private
10
10
  */
11
11
  function getChildInterruptPayload(context, options, execIndex) {
12
- const { workflowId, originJobId, workflowDimension, expire } = context;
12
+ const { workflowId, originJobId, workflowDimension, expire, taskQueue: parentTaskQueue } = context;
13
13
  let childJobId;
14
14
  if (options.workflowId) {
15
15
  childJobId = options.workflowId;
@@ -21,7 +21,8 @@ function getChildInterruptPayload(context, options, execIndex) {
21
21
  childJobId = `-${options.workflowName}-${(0, common_1.guid)()}-${workflowDimension}-${execIndex}`;
22
22
  }
23
23
  const parentWorkflowId = workflowId;
24
- const taskQueueName = options.taskQueue ?? options.entity;
24
+ // Use explicit taskQueue, or parent's taskQueue, or entity as fallback
25
+ const taskQueueName = options.taskQueue ?? parentTaskQueue ?? options.entity;
25
26
  const workflowName = options.taskQueue
26
27
  ? options.workflowName
27
28
  : options.entity ?? options.workflowName;
@@ -42,6 +43,8 @@ function getChildInterruptPayload(context, options, execIndex) {
42
43
  workflowDimension: workflowDimension,
43
44
  workflowId: childJobId,
44
45
  workflowTopic,
46
+ taskQueue: taskQueueName,
47
+ workflowName: workflowName,
45
48
  };
46
49
  }
47
50
  /**
@@ -109,10 +109,16 @@ async function hook(options) {
109
109
  entity: options.entity,
110
110
  }, null, 2)}`);
111
111
  }
112
+ // Extract taskQueue and workflowName from targetTopic
113
+ const hookTaskQueue = options.taskQueue ?? options.entity ?? targetTopic.split('-')[0];
114
+ const hookWorkflowName = options.workflowName ?? options.entity ??
115
+ (targetTopic.startsWith(`${hookTaskQueue}-`) ? targetTopic.substring(hookTaskQueue.length + 1) : targetTopic);
112
116
  const payload = {
113
117
  arguments: [...options.args],
114
118
  id: targetWorkflowId,
115
119
  workflowTopic: targetTopic,
120
+ taskQueue: hookTaskQueue,
121
+ workflowName: hookWorkflowName,
116
122
  backoffCoefficient: options.config?.backoffCoefficient || common_1.HMSH_DURABLE_EXP_BACKOFF,
117
123
  maximumAttempts: options.config?.maximumAttempts || common_1.HMSH_DURABLE_MAX_ATTEMPTS,
118
124
  maximumInterval: (0, common_1.s)(options?.config?.maximumInterval ?? common_1.HMSH_DURABLE_MAX_INTERVAL),
@@ -9,12 +9,11 @@ const didRun_1 = require("./didRun");
9
9
  * @private
10
10
  */
11
11
  function getProxyInterruptPayload(context, activityName, execIndex, args, options) {
12
- const { workflowDimension, workflowId, originJobId, workflowTopic, expire } = context;
13
- // Use explicitly provided taskQueue, otherwise derive from workflow (original behavior)
14
- // This keeps backward compatibility while allowing explicit global/custom queues
12
+ const { workflowDimension, workflowId, originJobId, taskQueue, expire } = context;
13
+ // Activity topic uses the task queue directly (no workflow name concatenation)
15
14
  const activityTopic = options?.taskQueue
16
15
  ? `${options.taskQueue}-activity`
17
- : `${workflowTopic}-activity`;
16
+ : `${taskQueue}-activity`;
18
17
  const activityJobId = `-${workflowId}-$${activityName}${workflowDimension}-${execIndex}`;
19
18
  let maximumInterval;
20
19
  if (options?.retryPolicy?.maximumInterval) {
@@ -5,6 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.EngineService = void 0;
7
7
  const key_1 = require("../../modules/key");
8
+ const registry_1 = require("../stream/registry");
8
9
  const enums_1 = require("../../modules/enums");
9
10
  const utils_1 = require("../../modules/utils");
10
11
  const activities_1 = __importDefault(require("../activities"));
@@ -47,10 +48,11 @@ class EngineService {
47
48
  await instance.initSubChannel(config.engine.sub, config.engine.pub ?? config.engine.store);
48
49
  await instance.initStreamChannel(config.engine.stream, config.engine.store);
49
50
  instance.router = await instance.initRouter(config);
50
- const streamName = instance.store.mintKey(key_1.KeyType.STREAMS, {
51
- appId: instance.appId,
51
+ // Use singleton consumer via registry for engine stream
52
+ await registry_1.StreamConsumerRegistry.registerEngine(namespace, appId, guid, instance.processStreamMessage.bind(instance), instance.stream, instance.store, logger, {
53
+ reclaimDelay: config.engine.reclaimDelay,
54
+ reclaimCount: config.engine.reclaimCount,
52
55
  });
53
- instance.router.consumeMessages(streamName, 'ENGINE', instance.guid, instance.processStreamMessage.bind(instance));
54
56
  instance.taskService = new task_1.TaskService(instance.store, logger);
55
57
  instance.exporter = new exporter_1.ExporterService(instance.appId, instance.store, logger);
56
58
  instance.inited = (0, utils_1.formatISODate)(new Date());
@@ -91,5 +91,45 @@ declare abstract class StoreService<Provider extends ProviderClient, Transaction
91
91
  abstract setThrottleRate(options: ThrottleOptions): Promise<void>;
92
92
  abstract getThrottleRates(): Promise<StringStringType>;
93
93
  abstract getThrottleRate(topic: string): Promise<number>;
94
+ /**
95
+ * Fetch activity inputs for a workflow. Used by the exporter to enrich
96
+ * timeline events with activity arguments.
97
+ *
98
+ * @param workflowId - The workflow ID
99
+ * @param symbolField - The compressed symbol field for activity arguments
100
+ * @returns Map of job_id -> parsed input arguments and activityName:index -> parsed inputs
101
+ */
102
+ getActivityInputs?(workflowId: string, symbolField: string): Promise<{
103
+ byJobId: Map<string, any>;
104
+ byNameIndex: Map<string, any>;
105
+ }>;
106
+ /**
107
+ * Fetch child workflow inputs in batch. Used by the exporter to enrich
108
+ * child workflow events with their arguments.
109
+ *
110
+ * @param childJobKeys - Array of child job keys to fetch
111
+ * @param symbolField - The compressed symbol field for workflow arguments
112
+ * @returns Map of child_workflow_id -> parsed input arguments
113
+ */
114
+ getChildWorkflowInputs?(childJobKeys: string[], symbolField: string): Promise<Map<string, any>>;
115
+ /**
116
+ * Fetch job record and attributes by key. Used by the exporter to
117
+ * reconstruct execution history for expired jobs.
118
+ *
119
+ * @param jobKey - The job key (e.g., "hmsh:durable:j:workflowId")
120
+ * @returns Job row and all attributes
121
+ */
122
+ getJobByKeyDirect?(jobKey: string): Promise<{
123
+ job: {
124
+ id: string;
125
+ key: string;
126
+ status: number;
127
+ created_at: Date;
128
+ updated_at: Date;
129
+ expired_at?: Date;
130
+ is_live: boolean;
131
+ };
132
+ attributes: Record<string, string>;
133
+ }>;
94
134
  }
95
135
  export { StoreService };
@@ -0,0 +1,23 @@
1
+ /**
2
+ * SQL queries for exporter enrichment (Postgres provider).
3
+ * These queries support the exporter's input enrichment and direct query features.
4
+ */
5
+ /**
6
+ * Fetch job record by key.
7
+ */
8
+ export declare const GET_JOB_BY_KEY = "\n SELECT id, key, status, created_at, updated_at, expired_at, is_live\n FROM {schema}.jobs\n WHERE key = $1\n LIMIT 1\n";
9
+ /**
10
+ * Fetch all attributes for a job.
11
+ */
12
+ export declare const GET_JOB_ATTRIBUTES = "\n SELECT field, value\n FROM {schema}.jobs_attributes\n WHERE job_id = $1\n ORDER BY field\n";
13
+ /**
14
+ * Fetch activity inputs for a workflow.
15
+ * Matches all activity jobs for the given workflow and extracts their input arguments.
16
+ */
17
+ export declare const GET_ACTIVITY_INPUTS = "\n SELECT j.key, ja.value\n FROM {schema}.jobs j\n JOIN {schema}.jobs_attributes ja ON ja.job_id = j.id\n WHERE j.key LIKE $1\n AND ja.field = $2\n";
18
+ /**
19
+ * Fetch child workflow inputs in batch.
20
+ * Uses parameterized IN clause for exact-match efficiency.
21
+ * Note: This query template must be built dynamically with the correct number of placeholders.
22
+ */
23
+ export declare function buildChildWorkflowInputsQuery(childCount: number, schema: string): string;
@@ -0,0 +1,52 @@
1
+ "use strict";
2
+ /**
3
+ * SQL queries for exporter enrichment (Postgres provider).
4
+ * These queries support the exporter's input enrichment and direct query features.
5
+ */
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ exports.buildChildWorkflowInputsQuery = exports.GET_ACTIVITY_INPUTS = exports.GET_JOB_ATTRIBUTES = exports.GET_JOB_BY_KEY = void 0;
8
+ /**
9
+ * Fetch job record by key.
10
+ */
11
+ exports.GET_JOB_BY_KEY = `
12
+ SELECT id, key, status, created_at, updated_at, expired_at, is_live
13
+ FROM {schema}.jobs
14
+ WHERE key = $1
15
+ LIMIT 1
16
+ `;
17
+ /**
18
+ * Fetch all attributes for a job.
19
+ */
20
+ exports.GET_JOB_ATTRIBUTES = `
21
+ SELECT field, value
22
+ FROM {schema}.jobs_attributes
23
+ WHERE job_id = $1
24
+ ORDER BY field
25
+ `;
26
+ /**
27
+ * Fetch activity inputs for a workflow.
28
+ * Matches all activity jobs for the given workflow and extracts their input arguments.
29
+ */
30
+ exports.GET_ACTIVITY_INPUTS = `
31
+ SELECT j.key, ja.value
32
+ FROM {schema}.jobs j
33
+ JOIN {schema}.jobs_attributes ja ON ja.job_id = j.id
34
+ WHERE j.key LIKE $1
35
+ AND ja.field = $2
36
+ `;
37
+ /**
38
+ * Fetch child workflow inputs in batch.
39
+ * Uses parameterized IN clause for exact-match efficiency.
40
+ * Note: This query template must be built dynamically with the correct number of placeholders.
41
+ */
42
+ function buildChildWorkflowInputsQuery(childCount, schema) {
43
+ const placeholders = Array.from({ length: childCount }, (_, i) => `$${i + 1}`).join(',');
44
+ return `
45
+ SELECT j.key, ja.value
46
+ FROM ${schema}.jobs j
47
+ JOIN ${schema}.jobs_attributes ja ON ja.job_id = j.id
48
+ WHERE j.key IN (${placeholders})
49
+ AND ja.field = $${childCount + 1}
50
+ `;
51
+ }
52
+ exports.buildChildWorkflowInputsQuery = buildChildWorkflowInputsQuery;
@@ -182,6 +182,7 @@ const KVTables = (context) => ({
182
182
  created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
183
183
  updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
184
184
  expired_at TIMESTAMP WITH TIME ZONE,
185
+ pruned_at TIMESTAMP WITH TIME ZONE,
185
186
  is_live BOOLEAN DEFAULT TRUE,
186
187
  PRIMARY KEY (id)
187
188
  ) PARTITION BY HASH (id);
@@ -214,8 +215,18 @@ const KVTables = (context) => ({
214
215
  ON ${fullTableName} (entity, status);
215
216
  `);
216
217
  await client.query(`
217
- CREATE INDEX IF NOT EXISTS idx_${tableDef.name}_expired_at
218
+ CREATE INDEX IF NOT EXISTS idx_${tableDef.name}_expired_at
218
219
  ON ${fullTableName} (expired_at);
220
+ `);
221
+ await client.query(`
222
+ CREATE INDEX IF NOT EXISTS idx_${tableDef.name}_pruned_at
223
+ ON ${fullTableName} (pruned_at) WHERE pruned_at IS NULL;
224
+ `);
225
+ // Index for paginated entity listing with sort (dashboard entity queries)
226
+ await client.query(`
227
+ CREATE INDEX IF NOT EXISTS idx_${tableDef.name}_entity_created
228
+ ON ${fullTableName} (entity, created_at DESC)
229
+ WHERE entity IS NOT NULL;
219
230
  `);
220
231
  // Create function to update is_live flag in the schema
221
232
  await client.query(`
@@ -194,5 +194,39 @@ declare class PostgresStoreService extends StoreService<ProviderClient, Provider
194
194
  * Enhanced shouldScout that can handle notifications
195
195
  */
196
196
  shouldScout(): Promise<boolean>;
197
+ /**
198
+ * Fetch activity inputs for a workflow. Used by the exporter to enrich
199
+ * timeline events with activity arguments.
200
+ */
201
+ getActivityInputs(workflowId: string, symbolField: string): Promise<{
202
+ byJobId: Map<string, any>;
203
+ byNameIndex: Map<string, any>;
204
+ }>;
205
+ /**
206
+ * Fetch child workflow inputs in batch. Used by the exporter to enrich
207
+ * child workflow events with their arguments.
208
+ */
209
+ getChildWorkflowInputs(childJobKeys: string[], symbolField: string): Promise<Map<string, any>>;
210
+ /**
211
+ * Fetch job record and attributes by key. Used by the exporter to
212
+ * reconstruct execution history for expired jobs.
213
+ */
214
+ getJobByKeyDirect(jobKey: string): Promise<{
215
+ job: {
216
+ id: string;
217
+ key: string;
218
+ status: number;
219
+ created_at: Date;
220
+ updated_at: Date;
221
+ expired_at?: Date;
222
+ is_live: boolean;
223
+ };
224
+ attributes: Record<string, string>;
225
+ }>;
226
+ /**
227
+ * Parse a HotMesh-encoded value string.
228
+ * Values may be prefixed with `/s` (JSON), `/d` (number), `/t` or `/f` (boolean), `/n` (null).
229
+ */
230
+ private parseHmshValue;
197
231
  }
198
232
  export { PostgresStoreService };
@@ -1284,5 +1284,104 @@ class PostgresStoreService extends __1.StoreService {
1284
1284
  }
1285
1285
  return false;
1286
1286
  }
1287
+ // ── Exporter queries ───────────────────────────────────────────────────────
1288
+ /**
1289
+ * Fetch activity inputs for a workflow. Used by the exporter to enrich
1290
+ * timeline events with activity arguments.
1291
+ */
1292
+ async getActivityInputs(workflowId, symbolField) {
1293
+ const { GET_ACTIVITY_INPUTS } = await Promise.resolve().then(() => __importStar(require('./exporter-sql')));
1294
+ const schemaName = this.kvsql().safeName(this.appId);
1295
+ const sql = GET_ACTIVITY_INPUTS.replace(/{schema}/g, schemaName);
1296
+ const jobKeyPattern = `hmsh:${this.appId}:j:-${workflowId}-%`;
1297
+ const result = await this.pgClient.query(sql, [jobKeyPattern, symbolField]);
1298
+ const byJobId = new Map();
1299
+ const byNameIndex = new Map();
1300
+ for (const row of result.rows) {
1301
+ const jobKey = row.key;
1302
+ const jobId = jobKey.replace(`hmsh:${this.appId}:j:`, '');
1303
+ try {
1304
+ const parsed = this.parseHmshValue(row.value);
1305
+ byJobId.set(jobId, parsed);
1306
+ // Extract activityName and executionIndex from job_id
1307
+ // Format: -workflowId-$activityName-executionIndex
1308
+ const match = jobId.match(/\$([^-]+)-(\d+)$/);
1309
+ if (match) {
1310
+ const activityName = match[1];
1311
+ const executionIndex = match[2];
1312
+ byNameIndex.set(`${activityName}:${executionIndex}`, parsed);
1313
+ }
1314
+ }
1315
+ catch {
1316
+ // Skip unparseable values
1317
+ }
1318
+ }
1319
+ return { byJobId, byNameIndex };
1320
+ }
1321
+ /**
1322
+ * Fetch child workflow inputs in batch. Used by the exporter to enrich
1323
+ * child workflow events with their arguments.
1324
+ */
1325
+ async getChildWorkflowInputs(childJobKeys, symbolField) {
1326
+ if (childJobKeys.length === 0) {
1327
+ return new Map();
1328
+ }
1329
+ const { buildChildWorkflowInputsQuery } = await Promise.resolve().then(() => __importStar(require('./exporter-sql')));
1330
+ const schemaName = this.kvsql().safeName(this.appId);
1331
+ const sql = buildChildWorkflowInputsQuery(childJobKeys.length, schemaName);
1332
+ const result = await this.pgClient.query(sql, [...childJobKeys, symbolField]);
1333
+ const childInputMap = new Map();
1334
+ for (const row of result.rows) {
1335
+ const jobKey = row.key;
1336
+ const childId = jobKey.replace(`hmsh:${this.appId}:j:`, '');
1337
+ try {
1338
+ const parsed = this.parseHmshValue(row.value);
1339
+ childInputMap.set(childId, parsed);
1340
+ }
1341
+ catch {
1342
+ // Skip unparseable values
1343
+ }
1344
+ }
1345
+ return childInputMap;
1346
+ }
1347
+ /**
1348
+ * Fetch job record and attributes by key. Used by the exporter to
1349
+ * reconstruct execution history for expired jobs.
1350
+ */
1351
+ async getJobByKeyDirect(jobKey) {
1352
+ const { GET_JOB_BY_KEY, GET_JOB_ATTRIBUTES } = await Promise.resolve().then(() => __importStar(require('./exporter-sql')));
1353
+ const schemaName = this.kvsql().safeName(this.appId);
1354
+ const jobSql = GET_JOB_BY_KEY.replace(/{schema}/g, schemaName);
1355
+ const jobResult = await this.pgClient.query(jobSql, [jobKey]);
1356
+ if (jobResult.rows.length === 0) {
1357
+ throw new Error(`No job found for key "${jobKey}"`);
1358
+ }
1359
+ const job = jobResult.rows[0];
1360
+ const attrSql = GET_JOB_ATTRIBUTES.replace(/{schema}/g, schemaName);
1361
+ const attrResult = await this.pgClient.query(attrSql, [job.id]);
1362
+ const attributes = {};
1363
+ for (const row of attrResult.rows) {
1364
+ attributes[row.field] = row.value;
1365
+ }
1366
+ return { job, attributes };
1367
+ }
1368
+ /**
1369
+ * Parse a HotMesh-encoded value string.
1370
+ * Values may be prefixed with `/s` (JSON), `/d` (number), `/t` or `/f` (boolean), `/n` (null).
1371
+ */
1372
+ parseHmshValue(raw) {
1373
+ if (typeof raw !== 'string')
1374
+ return undefined;
1375
+ const prefix = raw.slice(0, 2);
1376
+ const rest = raw.slice(2);
1377
+ switch (prefix) {
1378
+ case '/t': return true;
1379
+ case '/f': return false;
1380
+ case '/d': return Number(rest);
1381
+ case '/n': return null;
1382
+ case '/s': return JSON.parse(rest);
1383
+ default: return raw;
1384
+ }
1385
+ }
1287
1386
  }
1288
1387
  exports.PostgresStoreService = PostgresStoreService;