@hotmeshio/hotmesh 0.10.2 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +1 -1
  2. package/build/modules/enums.d.ts +1 -0
  3. package/build/modules/enums.js +3 -1
  4. package/build/modules/errors.d.ts +2 -0
  5. package/build/modules/errors.js +2 -0
  6. package/build/modules/key.js +3 -2
  7. package/build/package.json +2 -2
  8. package/build/services/activities/worker.js +10 -0
  9. package/build/services/dba/index.d.ts +2 -1
  10. package/build/services/dba/index.js +11 -2
  11. package/build/services/durable/client.js +6 -1
  12. package/build/services/durable/exporter.d.ts +15 -0
  13. package/build/services/durable/exporter.js +384 -5
  14. package/build/services/durable/schemas/factory.d.ts +1 -1
  15. package/build/services/durable/schemas/factory.js +27 -4
  16. package/build/services/durable/worker.d.ts +2 -2
  17. package/build/services/durable/worker.js +15 -9
  18. package/build/services/durable/workflow/context.js +2 -0
  19. package/build/services/durable/workflow/execChild.js +5 -2
  20. package/build/services/durable/workflow/hook.js +6 -0
  21. package/build/services/durable/workflow/proxyActivities.js +3 -4
  22. package/build/services/engine/index.d.ts +2 -2
  23. package/build/services/engine/index.js +10 -5
  24. package/build/services/exporter/index.d.ts +16 -2
  25. package/build/services/exporter/index.js +76 -0
  26. package/build/services/hotmesh/index.d.ts +2 -2
  27. package/build/services/hotmesh/index.js +2 -2
  28. package/build/services/router/config/index.d.ts +2 -2
  29. package/build/services/router/config/index.js +2 -1
  30. package/build/services/router/consumption/index.js +80 -5
  31. package/build/services/store/index.d.ts +52 -0
  32. package/build/services/store/providers/postgres/exporter-sql.d.ts +40 -0
  33. package/build/services/store/providers/postgres/exporter-sql.js +92 -0
  34. package/build/services/store/providers/postgres/kvtables.js +6 -0
  35. package/build/services/store/providers/postgres/postgres.d.ts +42 -0
  36. package/build/services/store/providers/postgres/postgres.js +151 -0
  37. package/build/services/stream/index.d.ts +1 -0
  38. package/build/services/stream/providers/postgres/kvtables.d.ts +1 -1
  39. package/build/services/stream/providers/postgres/kvtables.js +235 -82
  40. package/build/services/stream/providers/postgres/lifecycle.d.ts +4 -3
  41. package/build/services/stream/providers/postgres/lifecycle.js +6 -5
  42. package/build/services/stream/providers/postgres/messages.d.ts +14 -6
  43. package/build/services/stream/providers/postgres/messages.js +153 -76
  44. package/build/services/stream/providers/postgres/notifications.d.ts +5 -2
  45. package/build/services/stream/providers/postgres/notifications.js +39 -35
  46. package/build/services/stream/providers/postgres/postgres.d.ts +21 -118
  47. package/build/services/stream/providers/postgres/postgres.js +87 -140
  48. package/build/services/stream/providers/postgres/scout.js +2 -2
  49. package/build/services/stream/providers/postgres/stats.js +3 -2
  50. package/build/services/stream/registry.d.ts +62 -0
  51. package/build/services/stream/registry.js +198 -0
  52. package/build/services/worker/index.js +20 -6
  53. package/build/types/durable.d.ts +6 -1
  54. package/build/types/error.d.ts +2 -0
  55. package/build/types/exporter.d.ts +84 -0
  56. package/build/types/hotmesh.d.ts +7 -1
  57. package/build/types/index.d.ts +1 -1
  58. package/build/types/stream.d.ts +2 -0
  59. package/package.json +2 -2
@@ -0,0 +1,198 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.StreamConsumerRegistry = void 0;
4
+ const router_1 = require("../router");
5
+ const stream_1 = require("../../types/stream");
6
+ const key_1 = require("../../modules/key");
7
+ /**
8
+ * Process-wide singleton registry that manages one consumer per task queue (workers)
9
+ * and one per appId (engines). Instead of N consumers each polling independently,
10
+ * one consumer fetches batches from the stream and dispatches to registered callbacks
11
+ * based on the `workflow_name` column (workers) or round-robin (engines).
12
+ */
13
+ class StreamConsumerRegistry {
14
+ /**
15
+ * Register a worker callback for a (taskQueue, workflowName) pair.
16
+ * If no consumer exists for this taskQueue, a singleton Router is created.
17
+ */
18
+ static async registerWorker(namespace, appId, guid, taskQueue, workflowName, callback, stream, store, logger, config) {
19
+ const key = `${namespace}:${appId}:worker:${taskQueue}`;
20
+ let entry = StreamConsumerRegistry.workerConsumers.get(key);
21
+ if (!entry) {
22
+ // Create the singleton consumer for this task queue
23
+ const throttle = await store.getThrottleRate(taskQueue);
24
+ const router = new router_1.Router({
25
+ namespace,
26
+ appId,
27
+ guid,
28
+ role: stream_1.StreamRole.WORKER,
29
+ topic: taskQueue,
30
+ reclaimDelay: config?.reclaimDelay,
31
+ reclaimCount: config?.reclaimCount,
32
+ throttle,
33
+ retryPolicy: config?.retryPolicy,
34
+ }, stream, logger);
35
+ entry = {
36
+ router,
37
+ callbacks: new Map(),
38
+ stream,
39
+ logger,
40
+ };
41
+ StreamConsumerRegistry.workerConsumers.set(key, entry);
42
+ // Create the dispatch callback that routes by workflow_name
43
+ const dispatchCallback = StreamConsumerRegistry.createWorkerDispatcher(key);
44
+ // Start consuming from the task queue stream
45
+ const streamKey = stream.mintKey(key_1.KeyType.STREAMS, {
46
+ appId,
47
+ topic: taskQueue,
48
+ });
49
+ router.consumeMessages(streamKey, 'WORKER', guid, dispatchCallback);
50
+ }
51
+ // Register the callback for this workflow name
52
+ entry.callbacks.set(workflowName, callback);
53
+ logger.info('stream-consumer-registry-worker-registered', {
54
+ taskQueue,
55
+ workflowName,
56
+ totalCallbacks: entry.callbacks.size,
57
+ });
58
+ }
59
+ /**
60
+ * Register an engine callback for an appId.
61
+ * If no consumer exists for this appId, a singleton Router is created.
62
+ */
63
+ static async registerEngine(namespace, appId, guid, callback, stream, store, logger, config) {
64
+ const key = `${namespace}:${appId}:engine`;
65
+ let entry = StreamConsumerRegistry.engineConsumers.get(key);
66
+ if (!entry) {
67
+ const throttle = await store.getThrottleRate();
68
+ const router = new router_1.Router({
69
+ namespace,
70
+ appId,
71
+ guid,
72
+ role: stream_1.StreamRole.ENGINE,
73
+ reclaimDelay: config?.reclaimDelay,
74
+ reclaimCount: config?.reclaimCount,
75
+ throttle,
76
+ }, stream, logger);
77
+ entry = {
78
+ router,
79
+ callbacks: [],
80
+ stream,
81
+ logger,
82
+ };
83
+ StreamConsumerRegistry.engineConsumers.set(key, entry);
84
+ // Create the dispatch callback
85
+ const dispatchCallback = StreamConsumerRegistry.createEngineDispatcher(key);
86
+ // Start consuming from the engine stream
87
+ const streamKey = stream.mintKey(key_1.KeyType.STREAMS, { appId });
88
+ router.consumeMessages(streamKey, 'ENGINE', guid, dispatchCallback);
89
+ }
90
+ entry.callbacks.push(callback);
91
+ logger.info('stream-consumer-registry-engine-registered', {
92
+ appId,
93
+ totalCallbacks: entry.callbacks.length,
94
+ });
95
+ }
96
+ /**
97
+ * Creates a dispatch callback for worker consumers.
98
+ * Routes messages to the registered callback based on metadata.wfn (workflow_name).
99
+ */
100
+ static createWorkerDispatcher(key) {
101
+ return async (data) => {
102
+ const entry = StreamConsumerRegistry.workerConsumers.get(key);
103
+ if (!entry)
104
+ return;
105
+ const wfn = data.metadata?.wfn;
106
+ if (!wfn) {
107
+ entry.logger.warn('stream-consumer-registry-no-wfn', {
108
+ key,
109
+ metadata: data.metadata,
110
+ });
111
+ // Fall back to first registered callback if only one exists
112
+ if (entry.callbacks.size === 1) {
113
+ const [, callback] = entry.callbacks.entries().next().value;
114
+ return callback(data);
115
+ }
116
+ return;
117
+ }
118
+ const callback = entry.callbacks.get(wfn);
119
+ if (!callback) {
120
+ entry.logger.debug('stream-consumer-registry-no-callback', {
121
+ key,
122
+ wfn,
123
+ registered: [...entry.callbacks.keys()],
124
+ });
125
+ // Worker not registered yet. Re-publish with short visibility delay
126
+ // so it retries after the worker has time to register.
127
+ // This avoids consuming the error handler's retry budget.
128
+ const replayData = { ...data };
129
+ replayData._visibilityDelayMs = 500;
130
+ const streamKey = entry.stream.mintKey(key_1.KeyType.STREAMS, {
131
+ topic: data.metadata?.topic,
132
+ });
133
+ await entry.stream.publishMessages(streamKey, [JSON.stringify(replayData)]);
134
+ // Return void — the original message will be ack'd,
135
+ // but a new copy is queued with a delay.
136
+ return;
137
+ }
138
+ return callback(data);
139
+ };
140
+ }
141
+ /**
142
+ * Creates a dispatch callback for engine consumers.
143
+ * Engines are generic processors — the first registered callback handles the message.
144
+ */
145
+ static createEngineDispatcher(key) {
146
+ return async (data) => {
147
+ const entry = StreamConsumerRegistry.engineConsumers.get(key);
148
+ if (!entry || entry.callbacks.length === 0)
149
+ return;
150
+ // Engine callbacks are all equivalent — use the first one
151
+ return entry.callbacks[0](data);
152
+ };
153
+ }
154
+ /**
155
+ * Unregister a worker callback.
156
+ */
157
+ static async unregisterWorker(namespace, appId, taskQueue, workflowName) {
158
+ const key = `${namespace}:${appId}:worker:${taskQueue}`;
159
+ const entry = StreamConsumerRegistry.workerConsumers.get(key);
160
+ if (!entry)
161
+ return;
162
+ entry.callbacks.delete(workflowName);
163
+ if (entry.callbacks.size === 0) {
164
+ await entry.router.stopConsuming();
165
+ StreamConsumerRegistry.workerConsumers.delete(key);
166
+ }
167
+ }
168
+ /**
169
+ * Unregister an engine callback.
170
+ */
171
+ static async unregisterEngine(namespace, appId, callback) {
172
+ const key = `${namespace}:${appId}:engine`;
173
+ const entry = StreamConsumerRegistry.engineConsumers.get(key);
174
+ if (!entry)
175
+ return;
176
+ entry.callbacks = entry.callbacks.filter(cb => cb !== callback);
177
+ if (entry.callbacks.length === 0) {
178
+ await entry.router.stopConsuming();
179
+ StreamConsumerRegistry.engineConsumers.delete(key);
180
+ }
181
+ }
182
+ /**
183
+ * Stop all consumers and clear the registry.
184
+ */
185
+ static async shutdown() {
186
+ for (const [, entry] of StreamConsumerRegistry.workerConsumers) {
187
+ await entry.router.stopConsuming();
188
+ }
189
+ for (const [, entry] of StreamConsumerRegistry.engineConsumers) {
190
+ await entry.router.stopConsuming();
191
+ }
192
+ StreamConsumerRegistry.workerConsumers.clear();
193
+ StreamConsumerRegistry.engineConsumers.clear();
194
+ }
195
+ }
196
+ exports.StreamConsumerRegistry = StreamConsumerRegistry;
197
+ StreamConsumerRegistry.workerConsumers = new Map();
198
+ StreamConsumerRegistry.engineConsumers = new Map();
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.WorkerService = void 0;
4
4
  const key_1 = require("../../modules/key");
5
+ const registry_1 = require("../stream/registry");
5
6
  const utils_1 = require("../../modules/utils");
6
7
  const factory_1 = require("../connector/factory");
7
8
  const router_1 = require("../router");
@@ -45,12 +46,25 @@ class WorkerService {
45
46
  await service.subscribe.subscribe(key_1.KeyType.QUORUM, service.subscriptionHandler(), appId, service.topic);
46
47
  await service.subscribe.subscribe(key_1.KeyType.QUORUM, service.subscriptionHandler(), appId, service.guid);
47
48
  await service.initStreamChannel(service, worker.stream, worker.store);
48
- service.router = await service.initRouter(worker, logger);
49
- const key = service.store.mintKey(key_1.KeyType.STREAMS, {
50
- appId: service.appId,
51
- topic: worker.topic,
52
- });
53
- await service.router.consumeMessages(key, 'WORKER', service.guid, worker.callback);
49
+ if (worker.workflowName) {
50
+ // Use singleton consumer via registry (batched fetch + dispatch by workflow_name)
51
+ await registry_1.StreamConsumerRegistry.registerWorker(namespace, appId, guid, worker.topic, worker.workflowName, worker.callback, service.stream, service.store, logger, {
52
+ reclaimDelay: worker.reclaimDelay,
53
+ reclaimCount: worker.reclaimCount,
54
+ retryPolicy: worker.retryPolicy,
55
+ });
56
+ // Still need a router for publishing responses back to engine
57
+ service.router = await service.initRouter(worker, logger);
58
+ }
59
+ else {
60
+ // Legacy: own Router per worker (no workflowName = no singleton dispatch)
61
+ service.router = await service.initRouter(worker, logger);
62
+ const key = service.store.mintKey(key_1.KeyType.STREAMS, {
63
+ appId: service.appId,
64
+ topic: worker.topic,
65
+ });
66
+ await service.router.consumeMessages(key, 'WORKER', service.guid, worker.callback);
67
+ }
54
68
  service.inited = (0, utils_1.formatISODate)(new Date());
55
69
  services.push(service);
56
70
  }
@@ -75,7 +75,12 @@ type WorkflowContext = {
75
75
  */
76
76
  workflowDimension: string;
77
77
  /**
78
- * a concatenation of the task queue and workflow name (e.g., `${taskQueueName}-${workflowName}`)
78
+ * the task queue name (stream_name in worker_streams)
79
+ */
80
+ taskQueue: string;
81
+ /**
82
+ * a concatenation of the task queue and workflow name (e.g., `${taskQueueName}-${workflowName}`);
83
+ * used for engine-internal routing (graph.subscribes)
79
84
  */
80
85
  workflowTopic: string;
81
86
  /**
@@ -14,6 +14,8 @@ export type DurableChildErrorType = {
14
14
  workflowDimension: string;
15
15
  workflowId: string;
16
16
  workflowTopic: string;
17
+ taskQueue?: string;
18
+ workflowName?: string;
17
19
  };
18
20
  export type DurableWaitForAllErrorType = {
19
21
  items: string[];
@@ -18,6 +18,13 @@ export interface ExportOptions {
18
18
  * @default true
19
19
  */
20
20
  values?: boolean;
21
+ /**
22
+ * When true, fetches stream message history and produces a structured
23
+ * `activities` array with input/output per activity, timing, dimensional
24
+ * cycle info, and retry attempts. This is the dashboard-friendly format.
25
+ * @default false
26
+ */
27
+ enrich_inputs?: boolean;
21
28
  }
22
29
  export type JobAction = {
23
30
  cursor: number;
@@ -74,10 +81,24 @@ export interface DurableJobExport {
74
81
  timeline?: TimelineType[];
75
82
  transitions?: TransitionType[];
76
83
  }
84
+ export interface ActivityDetail {
85
+ name: string;
86
+ type: string;
87
+ dimension: string;
88
+ input?: Record<string, any>;
89
+ output?: Record<string, any>;
90
+ started_at?: string;
91
+ completed_at?: string;
92
+ duration_ms?: number;
93
+ retry_attempt?: number;
94
+ cycle_iteration?: number;
95
+ error?: string | null;
96
+ }
77
97
  export interface JobExport {
78
98
  dependencies: DependencyExport[];
79
99
  process: StringAnyType;
80
100
  status: string;
101
+ activities?: ActivityDetail[];
81
102
  }
82
103
  export type ExportMode = 'sparse' | 'verbose';
83
104
  export type WorkflowEventType = 'workflow_execution_started' | 'workflow_execution_completed' | 'workflow_execution_failed' | 'activity_task_scheduled' | 'activity_task_completed' | 'activity_task_failed' | 'child_workflow_execution_started' | 'child_workflow_execution_completed' | 'child_workflow_execution_failed' | 'timer_started' | 'timer_fired' | 'workflow_execution_signaled';
@@ -101,6 +122,7 @@ export interface ActivityTaskScheduledAttributes {
101
122
  activity_type: string;
102
123
  timeline_key: string;
103
124
  execution_index: number;
125
+ input?: any;
104
126
  }
105
127
  export interface ActivityTaskCompletedAttributes {
106
128
  kind: 'activity_task_completed';
@@ -109,6 +131,7 @@ export interface ActivityTaskCompletedAttributes {
109
131
  scheduled_event_id?: number;
110
132
  timeline_key: string;
111
133
  execution_index: number;
134
+ input?: any;
112
135
  }
113
136
  export interface ActivityTaskFailedAttributes {
114
137
  kind: 'activity_task_failed';
@@ -117,6 +140,7 @@ export interface ActivityTaskFailedAttributes {
117
140
  scheduled_event_id?: number;
118
141
  timeline_key: string;
119
142
  execution_index: number;
143
+ input?: any;
120
144
  }
121
145
  export interface ChildWorkflowExecutionStartedAttributes {
122
146
  kind: 'child_workflow_execution_started';
@@ -124,6 +148,7 @@ export interface ChildWorkflowExecutionStartedAttributes {
124
148
  awaited: boolean;
125
149
  timeline_key: string;
126
150
  execution_index: number;
151
+ input?: any;
127
152
  }
128
153
  export interface ChildWorkflowExecutionCompletedAttributes {
129
154
  kind: 'child_workflow_execution_completed';
@@ -199,10 +224,69 @@ export interface WorkflowExecution {
199
224
  events: WorkflowExecutionEvent[];
200
225
  summary: WorkflowExecutionSummary;
201
226
  children?: WorkflowExecution[];
227
+ stream_history?: StreamHistoryEntry[];
202
228
  }
203
229
  export interface ExecutionExportOptions {
204
230
  mode?: ExportMode;
205
231
  exclude_system?: boolean;
206
232
  omit_results?: boolean;
207
233
  max_depth?: number;
234
+ /**
235
+ * When true, enriches activity and child workflow events with their inputs
236
+ * by querying the underlying job attributes. This enables full visibility
237
+ * into activity arguments without requiring separate callback queries.
238
+ *
239
+ * @default false (late-binding: only return timeline_key references)
240
+ */
241
+ enrich_inputs?: boolean;
242
+ /**
243
+ * When true, allows fallback to direct database queries for expired jobs
244
+ * whose in-memory handles have been pruned. Only supported with providers
245
+ * that implement the extended exporter query interface (e.g., Postgres).
246
+ *
247
+ * @default false
248
+ */
249
+ allow_direct_query?: boolean;
250
+ /**
251
+ * When true, fetches the full stream message history for this workflow
252
+ * from the worker_streams table and attaches it as `stream_history`.
253
+ * This provides raw activity input/output data from the original stream
254
+ * messages, enabling Temporal-grade export fidelity.
255
+ *
256
+ * @default false
257
+ */
258
+ include_stream_history?: boolean;
259
+ }
260
+ export interface StreamHistoryEntry {
261
+ id: number;
262
+ jid: string;
263
+ aid: string;
264
+ dad: string;
265
+ msg_type: string;
266
+ topic: string;
267
+ workflow_name: string;
268
+ data: Record<string, any>;
269
+ status?: string;
270
+ code?: number;
271
+ created_at: string;
272
+ expired_at?: string;
273
+ }
274
+ export interface JobAttributesRow {
275
+ field: string;
276
+ value: string;
277
+ }
278
+ export interface JobRow {
279
+ id: string;
280
+ key: string;
281
+ status: number;
282
+ created_at: Date;
283
+ updated_at: Date;
284
+ expired_at?: Date;
285
+ is_live: boolean;
286
+ }
287
+ export interface ActivityInputMap {
288
+ /** Maps activity job_id to parsed input arguments */
289
+ byJobId: Map<string, any>;
290
+ /** Maps "activityName:executionIndex" to parsed input arguments */
291
+ byNameIndex: Map<string, any>;
208
292
  }
@@ -134,9 +134,15 @@ type HotMeshEngine = {
134
134
  };
135
135
  type HotMeshWorker = {
136
136
  /**
137
- * the topic that the worker subscribes to
137
+ * the topic/task queue that the worker subscribes to (stream_name)
138
138
  */
139
139
  topic: string;
140
+ /**
141
+ * the workflow function name for dispatch routing (workflow_name column).
142
+ * When set, workers sharing the same topic use a singleton consumer
143
+ * that fetches batches and dispatches by workflowName.
144
+ */
145
+ workflowName?: string;
140
146
  /**
141
147
  * set by hotmesh once the connnector service instances the provider
142
148
  * AND if the provider requires a separate channel for publishing
@@ -6,7 +6,7 @@ export { CollationFaultType, CollationStage } from './collator';
6
6
  export { ActivityConfig, ActivityInterceptor, ActivityInterceptorContext, ActivityWorkflowDataType, ChildResponseType, ClientConfig, ClientWorkflow, ContextType, Connection, ProxyResponseType, ProxyType, Registry, SignalOptions, FindJobsOptions, FindOptions, FindWhereOptions, FindWhereQuery, HookOptions, SearchResults, WorkflowConfig, WorkerConfig, WorkerOptions, WorkflowContext, WorkflowSearchOptions, WorkflowSearchSchema, WorkflowDataType, WorkflowOptions, WorkflowInterceptor, InterceptorRegistry, } from './durable';
7
7
  export { PruneOptions, PruneResult, } from './dba';
8
8
  export { DurableChildErrorType, DurableProxyErrorType, DurableSleepErrorType, DurableWaitForAllErrorType, DurableWaitForErrorType, } from './error';
9
- export { ActivityAction, DependencyExport, DurableJobExport, ExecutionExportOptions, ExportCycles, ExportItem, ExportMode, ExportOptions, ExportTransitions, JobAction, JobExport, JobActionExport, JobTimeline, WorkflowEventAttributes, WorkflowEventCategory, WorkflowEventType, WorkflowExecution, WorkflowExecutionEvent, WorkflowExecutionStatus, WorkflowExecutionSummary, } from './exporter';
9
+ export { ActivityAction, ActivityDetail, ActivityInputMap, ActivityTaskCompletedAttributes, ActivityTaskFailedAttributes, ActivityTaskScheduledAttributes, ChildWorkflowExecutionCompletedAttributes, ChildWorkflowExecutionFailedAttributes, ChildWorkflowExecutionStartedAttributes, DependencyExport, DurableJobExport, ExecutionExportOptions, ExportCycles, ExportFields, ExportItem, ExportMode, ExportOptions, ExportTransitions, JobAction, JobActionExport, JobAttributesRow, JobExport, JobRow, JobTimeline, StreamHistoryEntry, TimelineType, TimerFiredAttributes, TimerStartedAttributes, TransitionType, WorkflowEventAttributes, WorkflowEventCategory, WorkflowEventType, WorkflowExecution, WorkflowExecutionCompletedAttributes, WorkflowExecutionEvent, WorkflowExecutionFailedAttributes, WorkflowExecutionSignaledAttributes, WorkflowExecutionStartedAttributes, WorkflowExecutionStatus, WorkflowExecutionSummary, } from './exporter';
10
10
  export { HookCondition, HookConditions, HookGate, HookInterface, HookRule, HookRules, HookSignal, } from './hook';
11
11
  export { HotMesh, HotMeshEngine, HotMeshWorker, HotMeshSettings, HotMeshApp, HotMeshApps, HotMeshConfig, HotMeshManifest, HotMeshGraph, KeyType, KeyStoreParams, ScoutType, } from './hotmesh';
12
12
  export { ILogger, LogLevel } from './logger';
@@ -105,6 +105,8 @@ export interface StreamData {
105
105
  spn?: string;
106
106
  /** Current try count, used for retry logic */
107
107
  try?: number;
108
+ /** Workflow name (graph subscribes topic) for worker stream routing */
109
+ wfn?: string;
108
110
  /**
109
111
  * Indicates if the message should wait for a response.
110
112
  * If explicitly false, the connection is severed immediately
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hotmeshio/hotmesh",
3
- "version": "0.10.2",
3
+ "version": "0.12.0",
4
4
  "description": "Permanent-Memory Workflows & AI Agents",
5
5
  "main": "./build/index.js",
6
6
  "types": "./build/index.d.ts",
@@ -46,7 +46,7 @@
46
46
  "test:durable:sleep": "vitest run tests/durable/sleep/postgres.test.ts",
47
47
  "test:durable:signal": "vitest run tests/durable/signal/postgres.test.ts",
48
48
  "test:durable:unknown": "vitest run tests/durable/unknown/postgres.test.ts",
49
- "test:durable:exporter": "vitest run tests/durable/exporter/exporter.test.ts",
49
+ "test:durable:exporter": "HMSH_LOGLEVEL=info vitest run tests/durable/exporter",
50
50
  "test:durable:exporter:debug": "EXPORT_DEBUG=1 HMSH_LOGLEVEL=error vitest run tests/durable/basic/postgres.test.ts",
51
51
  "test:dba": "vitest run tests/dba",
52
52
  "test:cycle": "vitest run tests/functional/cycle",