@hotmeshio/hotmesh 0.10.0 → 0.10.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/build/package.json +5 -4
- package/build/services/dba/index.d.ts +52 -18
- package/build/services/dba/index.js +118 -29
- package/build/services/durable/exporter.d.ts +60 -3
- package/build/services/durable/exporter.js +430 -2
- package/build/services/durable/handle.d.ts +12 -1
- package/build/services/durable/handle.js +13 -0
- package/build/services/durable/index.d.ts +31 -2
- package/build/services/durable/index.js +31 -2
- package/build/services/durable/interceptor.d.ts +73 -0
- package/build/services/durable/interceptor.js +73 -0
- package/build/services/durable/workflow/proxyActivities.js +33 -29
- package/build/services/store/providers/postgres/kvtables.js +6 -1
- package/build/types/dba.d.ts +31 -5
- package/build/types/durable.d.ts +30 -12
- package/build/types/exporter.d.ts +127 -0
- package/build/types/index.d.ts +1 -1
- package/package.json +5 -4
- /package/{vitest.config.ts → vitest.config.mts} +0 -0
|
@@ -1,8 +1,158 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ExporterService = void 0;
|
|
3
|
+
exports.mapStatus = exports.isSystemActivity = exports.extractActivityName = exports.extractOperation = exports.computeDuration = exports.parseTimestamp = exports.ExporterService = void 0;
|
|
4
4
|
const utils_1 = require("../../modules/utils");
|
|
5
5
|
const serializer_1 = require("../serializer");
|
|
6
|
+
// ── Timestamp helpers ────────────────────────────────────────────────────────
|
|
7
|
+
/**
|
|
8
|
+
* Parse a HotMesh compact timestamp (YYYYMMDDHHmmss.mmm) into ISO 8601.
|
|
9
|
+
* Also accepts ISO 8601 strings directly.
|
|
10
|
+
*/
|
|
11
|
+
function parseTimestamp(raw) {
|
|
12
|
+
if (!raw || typeof raw !== 'string')
|
|
13
|
+
return null;
|
|
14
|
+
const m = raw.match(/^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(?:\.?(\d+))?$/);
|
|
15
|
+
if (m) {
|
|
16
|
+
const [, yr, mo, dy, hr, mi, sc, ms] = m;
|
|
17
|
+
const frac = ms ? `.${ms.padEnd(3, '0').slice(0, 3)}` : '.000';
|
|
18
|
+
return `${yr}-${mo}-${dy}T${hr}:${mi}:${sc}${frac}Z`;
|
|
19
|
+
}
|
|
20
|
+
// Try ISO 8601
|
|
21
|
+
if (raw.includes('T') || raw.includes('-')) {
|
|
22
|
+
const d = new Date(raw);
|
|
23
|
+
if (!isNaN(d.getTime()))
|
|
24
|
+
return d.toISOString();
|
|
25
|
+
}
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
exports.parseTimestamp = parseTimestamp;
|
|
29
|
+
/**
|
|
30
|
+
* Compute duration in milliseconds between two HotMesh timestamps.
|
|
31
|
+
*/
|
|
32
|
+
function computeDuration(ac, au) {
|
|
33
|
+
const start = parseTimestamp(ac);
|
|
34
|
+
const end = parseTimestamp(au);
|
|
35
|
+
if (!start || !end)
|
|
36
|
+
return null;
|
|
37
|
+
return new Date(end).getTime() - new Date(start).getTime();
|
|
38
|
+
}
|
|
39
|
+
exports.computeDuration = computeDuration;
|
|
40
|
+
// ── Timeline key parsing ─────────────────────────────────────────────────────
|
|
41
|
+
/**
|
|
42
|
+
* Extract the operation type (proxy, child, start, wait, sleep, hook)
|
|
43
|
+
* from a timeline key like `-proxy,0,0-1-`.
|
|
44
|
+
*/
|
|
45
|
+
function extractOperation(key) {
|
|
46
|
+
const parts = key.split('-').filter(Boolean);
|
|
47
|
+
return parts[0]?.split(',')[0] || 'unknown';
|
|
48
|
+
}
|
|
49
|
+
exports.extractOperation = extractOperation;
|
|
50
|
+
// ── Name extraction ──────────────────────────────────────────────────────────
|
|
51
|
+
/**
|
|
52
|
+
* Extract the activity name from a timeline entry value's job_id.
|
|
53
|
+
*
|
|
54
|
+
* Job ID format: `-{workflowId}-$${activityName}{dimension}-{execIndex}`
|
|
55
|
+
* Examples:
|
|
56
|
+
* `-wfId-$analyzeContent-5` → `'analyzeContent'`
|
|
57
|
+
* `-wfId-$processOrder,0,0-3` → `'processOrder'`
|
|
58
|
+
*/
|
|
59
|
+
function extractActivityName(value) {
|
|
60
|
+
const jobId = value?.job_id;
|
|
61
|
+
if (!jobId || typeof jobId !== 'string')
|
|
62
|
+
return 'unknown';
|
|
63
|
+
const dollarIdx = jobId.lastIndexOf('$');
|
|
64
|
+
if (dollarIdx === -1)
|
|
65
|
+
return jobId;
|
|
66
|
+
const afterDollar = jobId.substring(dollarIdx + 1);
|
|
67
|
+
const dashIdx = afterDollar.lastIndexOf('-');
|
|
68
|
+
const nameWithDim = dashIdx > 0 ? afterDollar.substring(0, dashIdx) : afterDollar;
|
|
69
|
+
// Strip dimension suffix (,N,N...)
|
|
70
|
+
const commaIdx = nameWithDim.indexOf(',');
|
|
71
|
+
return (commaIdx > 0 ? nameWithDim.substring(0, commaIdx) : nameWithDim) || 'unknown';
|
|
72
|
+
}
|
|
73
|
+
exports.extractActivityName = extractActivityName;
|
|
74
|
+
/**
|
|
75
|
+
* Check if an activity name is a system (interceptor) operation.
|
|
76
|
+
*/
|
|
77
|
+
function isSystemActivity(name) {
|
|
78
|
+
return name.startsWith('lt');
|
|
79
|
+
}
|
|
80
|
+
exports.isSystemActivity = isSystemActivity;
|
|
81
|
+
/**
|
|
82
|
+
* Extract a child workflow ID from a child/start timeline value.
|
|
83
|
+
*/
|
|
84
|
+
function extractChildWorkflowId(value) {
|
|
85
|
+
return value?.job_id || 'unknown';
|
|
86
|
+
}
|
|
87
|
+
// ── Status mapping ───────────────────────────────────────────────────────────
|
|
88
|
+
/**
|
|
89
|
+
* Map HotMesh job state to a human-readable execution status.
|
|
90
|
+
*
|
|
91
|
+
* HotMesh semaphore: `0` = idle, `> 0` = pending activities,
|
|
92
|
+
* `< 0` = failed / interrupted.
|
|
93
|
+
*
|
|
94
|
+
* A workflow can be "done" (`state.data.done === true`) while the
|
|
95
|
+
* semaphore is still > 0 (cleanup activities pending). We check
|
|
96
|
+
* both the `done` flag and the semaphore to determine status.
|
|
97
|
+
*/
|
|
98
|
+
function mapStatus(rawStatus, isDone, hasError) {
|
|
99
|
+
if (hasError || (rawStatus !== undefined && !isNaN(rawStatus) && rawStatus < 0)) {
|
|
100
|
+
return 'failed';
|
|
101
|
+
}
|
|
102
|
+
if (isDone || rawStatus === 0)
|
|
103
|
+
return 'completed';
|
|
104
|
+
if (rawStatus === undefined || isNaN(rawStatus))
|
|
105
|
+
return 'running';
|
|
106
|
+
return 'running';
|
|
107
|
+
}
|
|
108
|
+
exports.mapStatus = mapStatus;
|
|
109
|
+
// ── Event construction ───────────────────────────────────────────────────────
|
|
110
|
+
function makeEvent(event_id, event_type, category, event_time, duration_ms, is_system, attributes) {
|
|
111
|
+
return { event_id, event_type, category, event_time, duration_ms, is_system, attributes };
|
|
112
|
+
}
|
|
113
|
+
function computeSummary(events) {
|
|
114
|
+
const summary = {
|
|
115
|
+
total_events: events.length,
|
|
116
|
+
activities: { total: 0, completed: 0, failed: 0, system: 0, user: 0 },
|
|
117
|
+
child_workflows: { total: 0, completed: 0, failed: 0 },
|
|
118
|
+
timers: 0,
|
|
119
|
+
signals: 0,
|
|
120
|
+
};
|
|
121
|
+
for (const e of events) {
|
|
122
|
+
switch (e.event_type) {
|
|
123
|
+
case 'activity_task_scheduled':
|
|
124
|
+
summary.activities.total++;
|
|
125
|
+
if (e.is_system)
|
|
126
|
+
summary.activities.system++;
|
|
127
|
+
else
|
|
128
|
+
summary.activities.user++;
|
|
129
|
+
break;
|
|
130
|
+
case 'activity_task_completed':
|
|
131
|
+
summary.activities.completed++;
|
|
132
|
+
break;
|
|
133
|
+
case 'activity_task_failed':
|
|
134
|
+
summary.activities.failed++;
|
|
135
|
+
break;
|
|
136
|
+
case 'child_workflow_execution_started':
|
|
137
|
+
summary.child_workflows.total++;
|
|
138
|
+
break;
|
|
139
|
+
case 'child_workflow_execution_completed':
|
|
140
|
+
summary.child_workflows.completed++;
|
|
141
|
+
break;
|
|
142
|
+
case 'child_workflow_execution_failed':
|
|
143
|
+
summary.child_workflows.failed++;
|
|
144
|
+
break;
|
|
145
|
+
case 'timer_started':
|
|
146
|
+
summary.timers++;
|
|
147
|
+
break;
|
|
148
|
+
case 'workflow_execution_signaled':
|
|
149
|
+
summary.signals++;
|
|
150
|
+
break;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
return summary;
|
|
154
|
+
}
|
|
155
|
+
// ── Exporter Service ─────────────────────────────────────────────────────────
|
|
6
156
|
class ExporterService {
|
|
7
157
|
constructor(appId, store, logger) {
|
|
8
158
|
this.appId = appId;
|
|
@@ -10,7 +160,7 @@ class ExporterService {
|
|
|
10
160
|
this.store = store;
|
|
11
161
|
}
|
|
12
162
|
/**
|
|
13
|
-
* Convert the job hash from its
|
|
163
|
+
* Convert the job hash from its compiled format into a DurableJobExport object with
|
|
14
164
|
* facets that describe the workflow in terms relevant to narrative storytelling.
|
|
15
165
|
*/
|
|
16
166
|
async export(jobId, options = {}) {
|
|
@@ -22,6 +172,284 @@ class ExporterService {
|
|
|
22
172
|
const jobExport = this.inflate(jobData, options);
|
|
23
173
|
return jobExport;
|
|
24
174
|
}
|
|
175
|
+
/**
|
|
176
|
+
* Export a workflow execution as a Temporal-compatible event history.
|
|
177
|
+
*
|
|
178
|
+
* **Sparse mode** (default): transforms the main workflow's timeline
|
|
179
|
+
* into a flat event list. No additional I/O beyond the initial export.
|
|
180
|
+
*
|
|
181
|
+
* **Verbose mode**: recursively fetches child workflow jobs and attaches
|
|
182
|
+
* their executions as nested `children`.
|
|
183
|
+
*/
|
|
184
|
+
async exportExecution(jobId, workflowTopic, options = {}) {
|
|
185
|
+
const raw = await this.export(jobId);
|
|
186
|
+
const execution = this.transformToExecution(raw, jobId, workflowTopic, options);
|
|
187
|
+
if (options.mode === 'verbose') {
|
|
188
|
+
const maxDepth = options.max_depth ?? 5;
|
|
189
|
+
execution.children = await this.fetchChildren(raw, workflowTopic, options, 1, maxDepth);
|
|
190
|
+
}
|
|
191
|
+
return execution;
|
|
192
|
+
}
|
|
193
|
+
/**
|
|
194
|
+
* Pure transformation: convert a raw DurableJobExport into a
|
|
195
|
+
* Temporal-compatible WorkflowExecution event history.
|
|
196
|
+
*/
|
|
197
|
+
transformToExecution(raw, workflowId, workflowTopic, options) {
|
|
198
|
+
const events = [];
|
|
199
|
+
let nextId = 1;
|
|
200
|
+
// ── Extract timing from state metadata ─────────────────────────
|
|
201
|
+
const state = raw.state;
|
|
202
|
+
const metadata = state?.output?.metadata ?? state?.metadata;
|
|
203
|
+
const stateData = state?.output?.data ?? state?.data;
|
|
204
|
+
const jobCreated = metadata?.jc ?? stateData?.jc ?? metadata?.ac;
|
|
205
|
+
const jobUpdated = metadata?.ju ?? stateData?.ju ?? metadata?.au;
|
|
206
|
+
const startTime = parseTimestamp(jobCreated);
|
|
207
|
+
const closeTime = parseTimestamp(jobUpdated);
|
|
208
|
+
// ── Synthetic workflow_execution_started ─────────────────────────
|
|
209
|
+
if (startTime) {
|
|
210
|
+
events.push(makeEvent(nextId++, 'workflow_execution_started', 'workflow', startTime, null, false, {
|
|
211
|
+
kind: 'workflow_execution_started',
|
|
212
|
+
workflow_type: workflowTopic,
|
|
213
|
+
task_queue: workflowTopic,
|
|
214
|
+
input: options.omit_results ? undefined : raw.data,
|
|
215
|
+
}));
|
|
216
|
+
}
|
|
217
|
+
// ── Transform timeline entries ───────────────────────────────
|
|
218
|
+
const timeline = raw.timeline || [];
|
|
219
|
+
for (const entry of timeline) {
|
|
220
|
+
const operation = extractOperation(entry.key);
|
|
221
|
+
const val = (typeof entry.value === 'object' && entry.value !== null)
|
|
222
|
+
? entry.value
|
|
223
|
+
: null;
|
|
224
|
+
const ac = val?.ac;
|
|
225
|
+
const au = val?.au;
|
|
226
|
+
const acIso = parseTimestamp(ac);
|
|
227
|
+
const auIso = parseTimestamp(au);
|
|
228
|
+
const dur = computeDuration(ac, au);
|
|
229
|
+
const hasError = val != null && '$error' in val;
|
|
230
|
+
switch (operation) {
|
|
231
|
+
case 'proxy': {
|
|
232
|
+
const name = extractActivityName(val);
|
|
233
|
+
const isSys = isSystemActivity(name);
|
|
234
|
+
if (options.exclude_system && isSys)
|
|
235
|
+
break;
|
|
236
|
+
if (acIso) {
|
|
237
|
+
events.push(makeEvent(nextId++, 'activity_task_scheduled', 'activity', acIso, null, isSys, {
|
|
238
|
+
kind: 'activity_task_scheduled',
|
|
239
|
+
activity_type: name,
|
|
240
|
+
timeline_key: entry.key,
|
|
241
|
+
execution_index: entry.index,
|
|
242
|
+
}));
|
|
243
|
+
}
|
|
244
|
+
if (auIso) {
|
|
245
|
+
if (hasError) {
|
|
246
|
+
events.push(makeEvent(nextId++, 'activity_task_failed', 'activity', auIso, dur, isSys, {
|
|
247
|
+
kind: 'activity_task_failed',
|
|
248
|
+
activity_type: name,
|
|
249
|
+
failure: val?.$error,
|
|
250
|
+
timeline_key: entry.key,
|
|
251
|
+
execution_index: entry.index,
|
|
252
|
+
}));
|
|
253
|
+
}
|
|
254
|
+
else {
|
|
255
|
+
events.push(makeEvent(nextId++, 'activity_task_completed', 'activity', auIso, dur, isSys, {
|
|
256
|
+
kind: 'activity_task_completed',
|
|
257
|
+
activity_type: name,
|
|
258
|
+
result: options.omit_results ? undefined : val?.data,
|
|
259
|
+
timeline_key: entry.key,
|
|
260
|
+
execution_index: entry.index,
|
|
261
|
+
}));
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
break;
|
|
265
|
+
}
|
|
266
|
+
case 'child': {
|
|
267
|
+
const childId = extractChildWorkflowId(val);
|
|
268
|
+
if (acIso) {
|
|
269
|
+
events.push(makeEvent(nextId++, 'child_workflow_execution_started', 'child_workflow', acIso, null, false, {
|
|
270
|
+
kind: 'child_workflow_execution_started',
|
|
271
|
+
child_workflow_id: childId,
|
|
272
|
+
awaited: true,
|
|
273
|
+
timeline_key: entry.key,
|
|
274
|
+
execution_index: entry.index,
|
|
275
|
+
}));
|
|
276
|
+
}
|
|
277
|
+
if (auIso) {
|
|
278
|
+
if (hasError) {
|
|
279
|
+
events.push(makeEvent(nextId++, 'child_workflow_execution_failed', 'child_workflow', auIso, dur, false, {
|
|
280
|
+
kind: 'child_workflow_execution_failed',
|
|
281
|
+
child_workflow_id: childId,
|
|
282
|
+
failure: val?.$error,
|
|
283
|
+
timeline_key: entry.key,
|
|
284
|
+
execution_index: entry.index,
|
|
285
|
+
}));
|
|
286
|
+
}
|
|
287
|
+
else {
|
|
288
|
+
events.push(makeEvent(nextId++, 'child_workflow_execution_completed', 'child_workflow', auIso, dur, false, {
|
|
289
|
+
kind: 'child_workflow_execution_completed',
|
|
290
|
+
child_workflow_id: childId,
|
|
291
|
+
result: options.omit_results ? undefined : val?.data,
|
|
292
|
+
timeline_key: entry.key,
|
|
293
|
+
execution_index: entry.index,
|
|
294
|
+
}));
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
break;
|
|
298
|
+
}
|
|
299
|
+
case 'start': {
|
|
300
|
+
const childId = extractChildWorkflowId(val);
|
|
301
|
+
const ts = acIso || auIso;
|
|
302
|
+
if (ts) {
|
|
303
|
+
events.push(makeEvent(nextId++, 'child_workflow_execution_started', 'child_workflow', ts, null, false, {
|
|
304
|
+
kind: 'child_workflow_execution_started',
|
|
305
|
+
child_workflow_id: childId,
|
|
306
|
+
awaited: false,
|
|
307
|
+
timeline_key: entry.key,
|
|
308
|
+
execution_index: entry.index,
|
|
309
|
+
}));
|
|
310
|
+
}
|
|
311
|
+
break;
|
|
312
|
+
}
|
|
313
|
+
case 'wait': {
|
|
314
|
+
const signalName = val?.id
|
|
315
|
+
|| val?.data?.id
|
|
316
|
+
|| val?.data?.data?.id
|
|
317
|
+
|| `signal-${entry.index}`;
|
|
318
|
+
const ts = auIso || acIso;
|
|
319
|
+
if (ts) {
|
|
320
|
+
events.push(makeEvent(nextId++, 'workflow_execution_signaled', 'signal', ts, dur, false, {
|
|
321
|
+
kind: 'workflow_execution_signaled',
|
|
322
|
+
signal_name: signalName,
|
|
323
|
+
input: options.omit_results ? undefined : val?.data?.data,
|
|
324
|
+
timeline_key: entry.key,
|
|
325
|
+
execution_index: entry.index,
|
|
326
|
+
}));
|
|
327
|
+
}
|
|
328
|
+
break;
|
|
329
|
+
}
|
|
330
|
+
case 'sleep': {
|
|
331
|
+
if (acIso) {
|
|
332
|
+
events.push(makeEvent(nextId++, 'timer_started', 'timer', acIso, null, false, {
|
|
333
|
+
kind: 'timer_started',
|
|
334
|
+
duration_ms: dur ?? undefined,
|
|
335
|
+
timeline_key: entry.key,
|
|
336
|
+
execution_index: entry.index,
|
|
337
|
+
}));
|
|
338
|
+
}
|
|
339
|
+
if (auIso) {
|
|
340
|
+
events.push(makeEvent(nextId++, 'timer_fired', 'timer', auIso, dur, false, {
|
|
341
|
+
kind: 'timer_fired',
|
|
342
|
+
timeline_key: entry.key,
|
|
343
|
+
execution_index: entry.index,
|
|
344
|
+
}));
|
|
345
|
+
}
|
|
346
|
+
break;
|
|
347
|
+
}
|
|
348
|
+
// Unknown operation types are silently skipped (forward-compatible)
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
// ── Determine status ─────────────────────────────────────────
|
|
352
|
+
const isDone = stateData?.done === true;
|
|
353
|
+
const hasError = !!stateData?.$error;
|
|
354
|
+
const status = mapStatus(raw.status, isDone, hasError);
|
|
355
|
+
// ── Extract workflow result ──────────────────────────────────
|
|
356
|
+
const result = stateData?.response ?? (raw.data && Object.keys(raw.data).length > 0 ? raw.data : null);
|
|
357
|
+
// ── Synthetic workflow_execution_completed / failed ───────────
|
|
358
|
+
if (status === 'completed' && closeTime) {
|
|
359
|
+
const totalDur = startTime
|
|
360
|
+
? new Date(closeTime).getTime() - new Date(startTime).getTime()
|
|
361
|
+
: null;
|
|
362
|
+
events.push(makeEvent(nextId++, 'workflow_execution_completed', 'workflow', closeTime, totalDur, false, {
|
|
363
|
+
kind: 'workflow_execution_completed',
|
|
364
|
+
result: options.omit_results ? undefined : result,
|
|
365
|
+
}));
|
|
366
|
+
}
|
|
367
|
+
else if (status === 'failed' && closeTime) {
|
|
368
|
+
const totalDur = startTime
|
|
369
|
+
? new Date(closeTime).getTime() - new Date(startTime).getTime()
|
|
370
|
+
: null;
|
|
371
|
+
events.push(makeEvent(nextId++, 'workflow_execution_failed', 'workflow', closeTime, totalDur, false, {
|
|
372
|
+
kind: 'workflow_execution_failed',
|
|
373
|
+
failure: stateData?.err,
|
|
374
|
+
}));
|
|
375
|
+
}
|
|
376
|
+
// ── Sort chronologically ─────────────────────────────────────
|
|
377
|
+
events.sort((a, b) => {
|
|
378
|
+
const cmp = a.event_time.localeCompare(b.event_time);
|
|
379
|
+
return cmp !== 0 ? cmp : a.event_id - b.event_id;
|
|
380
|
+
});
|
|
381
|
+
// ── Re-number event IDs after sort ───────────────────────────
|
|
382
|
+
for (let i = 0; i < events.length; i++) {
|
|
383
|
+
events[i].event_id = i + 1;
|
|
384
|
+
}
|
|
385
|
+
// ── Back-references (Temporal-compatible) ────────────────────
|
|
386
|
+
const scheduledMap = new Map();
|
|
387
|
+
const initiatedMap = new Map();
|
|
388
|
+
for (const e of events) {
|
|
389
|
+
const attrs = e.attributes;
|
|
390
|
+
if (e.event_type === 'activity_task_scheduled' && attrs.timeline_key) {
|
|
391
|
+
scheduledMap.set(attrs.timeline_key, e.event_id);
|
|
392
|
+
}
|
|
393
|
+
if (e.event_type === 'child_workflow_execution_started' && attrs.timeline_key) {
|
|
394
|
+
initiatedMap.set(attrs.timeline_key, e.event_id);
|
|
395
|
+
}
|
|
396
|
+
if ((e.event_type === 'activity_task_completed' || e.event_type === 'activity_task_failed') && attrs.timeline_key) {
|
|
397
|
+
attrs.scheduled_event_id = scheduledMap.get(attrs.timeline_key) ?? null;
|
|
398
|
+
}
|
|
399
|
+
if ((e.event_type === 'child_workflow_execution_completed' || e.event_type === 'child_workflow_execution_failed') && attrs.timeline_key) {
|
|
400
|
+
attrs.initiated_event_id = initiatedMap.get(attrs.timeline_key) ?? null;
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
// ── Compute total duration ───────────────────────────────────
|
|
404
|
+
const totalDuration = (startTime && closeTime)
|
|
405
|
+
? new Date(closeTime).getTime() - new Date(startTime).getTime()
|
|
406
|
+
: null;
|
|
407
|
+
return {
|
|
408
|
+
workflow_id: workflowId,
|
|
409
|
+
workflow_type: workflowTopic,
|
|
410
|
+
task_queue: workflowTopic,
|
|
411
|
+
status,
|
|
412
|
+
start_time: startTime,
|
|
413
|
+
close_time: (status !== 'running') ? closeTime : null,
|
|
414
|
+
duration_ms: totalDuration,
|
|
415
|
+
result,
|
|
416
|
+
events,
|
|
417
|
+
summary: computeSummary(events),
|
|
418
|
+
};
|
|
419
|
+
}
|
|
420
|
+
/**
|
|
421
|
+
* Recursively fetch child workflow executions for verbose mode.
|
|
422
|
+
*/
|
|
423
|
+
async fetchChildren(raw, workflowTopic, options, depth, maxDepth) {
|
|
424
|
+
if (depth >= maxDepth)
|
|
425
|
+
return [];
|
|
426
|
+
const children = [];
|
|
427
|
+
const timeline = raw.timeline || [];
|
|
428
|
+
for (const entry of timeline) {
|
|
429
|
+
const operation = extractOperation(entry.key);
|
|
430
|
+
if (operation !== 'child' && operation !== 'start')
|
|
431
|
+
continue;
|
|
432
|
+
const val = (typeof entry.value === 'object' && entry.value !== null)
|
|
433
|
+
? entry.value
|
|
434
|
+
: null;
|
|
435
|
+
const childJobId = val?.job_id;
|
|
436
|
+
if (!childJobId || typeof childJobId !== 'string')
|
|
437
|
+
continue;
|
|
438
|
+
try {
|
|
439
|
+
const childRaw = await this.export(childJobId);
|
|
440
|
+
const childTopic = childRaw.data?.workflowTopic ?? workflowTopic;
|
|
441
|
+
const childExecution = this.transformToExecution(childRaw, childJobId, childTopic, options);
|
|
442
|
+
if (options.mode === 'verbose') {
|
|
443
|
+
childExecution.children = await this.fetchChildren(childRaw, childTopic, options, depth + 1, maxDepth);
|
|
444
|
+
}
|
|
445
|
+
children.push(childExecution);
|
|
446
|
+
}
|
|
447
|
+
catch {
|
|
448
|
+
// Child job may have expired or been cleaned up
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
return children;
|
|
452
|
+
}
|
|
25
453
|
/**
|
|
26
454
|
* Inflates the job data into a DurableJobExport object
|
|
27
455
|
* @param jobHash - the job data
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { HotMesh } from '../hotmesh';
|
|
2
|
-
import { DurableJobExport, ExportOptions } from '../../types/exporter';
|
|
2
|
+
import { DurableJobExport, ExportOptions, ExecutionExportOptions, WorkflowExecution } from '../../types/exporter';
|
|
3
3
|
import { JobInterruptOptions } from '../../types/job';
|
|
4
4
|
import { StreamError } from '../../types/stream';
|
|
5
5
|
import { ExporterService } from './exporter';
|
|
@@ -44,6 +44,17 @@ export declare class WorkflowHandleService {
|
|
|
44
44
|
* Exports the workflow state to a JSON object.
|
|
45
45
|
*/
|
|
46
46
|
export(options?: ExportOptions): Promise<DurableJobExport>;
|
|
47
|
+
/**
|
|
48
|
+
* Exports the workflow as a Temporal-like execution event history.
|
|
49
|
+
*
|
|
50
|
+
* **Sparse mode** (default): transforms the main workflow's timeline
|
|
51
|
+
* into a flat event list with workflow lifecycle, activity, child workflow,
|
|
52
|
+
* timer, and signal events.
|
|
53
|
+
*
|
|
54
|
+
* **Verbose mode**: recursively fetches child workflow jobs and attaches
|
|
55
|
+
* their full execution histories as nested `children`.
|
|
56
|
+
*/
|
|
57
|
+
exportExecution(options?: ExecutionExportOptions): Promise<WorkflowExecution>;
|
|
47
58
|
/**
|
|
48
59
|
* Sends a signal to the workflow. This is a way to send
|
|
49
60
|
* a message to a workflow that is paused due to having
|
|
@@ -43,6 +43,19 @@ class WorkflowHandleService {
|
|
|
43
43
|
async export(options) {
|
|
44
44
|
return this.exporter.export(this.workflowId, options);
|
|
45
45
|
}
|
|
46
|
+
/**
|
|
47
|
+
* Exports the workflow as a Temporal-like execution event history.
|
|
48
|
+
*
|
|
49
|
+
* **Sparse mode** (default): transforms the main workflow's timeline
|
|
50
|
+
* into a flat event list with workflow lifecycle, activity, child workflow,
|
|
51
|
+
* timer, and signal events.
|
|
52
|
+
*
|
|
53
|
+
* **Verbose mode**: recursively fetches child workflow jobs and attaches
|
|
54
|
+
* their full execution histories as nested `children`.
|
|
55
|
+
*/
|
|
56
|
+
async exportExecution(options) {
|
|
57
|
+
return this.exporter.exportExecution(this.workflowId, this.workflowTopic, options);
|
|
58
|
+
}
|
|
46
59
|
/**
|
|
47
60
|
* Sends a signal to the workflow. This is a way to send
|
|
48
61
|
* a message to a workflow that is paused due to having
|
|
@@ -364,12 +364,41 @@ declare class DurableClass {
|
|
|
364
364
|
static didInterrupt: typeof didInterrupt;
|
|
365
365
|
private static interceptorService;
|
|
366
366
|
/**
|
|
367
|
-
* Register a workflow interceptor
|
|
367
|
+
* Register a workflow interceptor that wraps the entire workflow execution
|
|
368
|
+
* in an onion-like pattern. Interceptors execute in registration order
|
|
369
|
+
* (first registered is outermost) and can perform actions before and after
|
|
370
|
+
* workflow execution, handle errors, and add cross-cutting concerns like
|
|
371
|
+
* logging, metrics, or tracing.
|
|
372
|
+
*
|
|
373
|
+
* Workflow interceptors run inside the workflow's async local storage context,
|
|
374
|
+
* so all Durable workflow methods (`proxyActivities`, `sleepFor`, `waitFor`,
|
|
375
|
+
* `execChild`, etc.) are available. When using Durable functions, always check
|
|
376
|
+
* for interruptions with `Durable.didInterrupt(err)` and rethrow them.
|
|
377
|
+
*
|
|
368
378
|
* @param interceptor The interceptor to register
|
|
379
|
+
*
|
|
380
|
+
* @example
|
|
381
|
+
* ```typescript
|
|
382
|
+
* // Logging interceptor
|
|
383
|
+
* Durable.registerInterceptor({
|
|
384
|
+
* async execute(ctx, next) {
|
|
385
|
+
* console.log(`Workflow ${ctx.get('workflowName')} starting`);
|
|
386
|
+
* try {
|
|
387
|
+
* const result = await next();
|
|
388
|
+
* console.log(`Workflow ${ctx.get('workflowName')} completed`);
|
|
389
|
+
* return result;
|
|
390
|
+
* } catch (err) {
|
|
391
|
+
* if (Durable.didInterrupt(err)) throw err;
|
|
392
|
+
* console.error(`Workflow ${ctx.get('workflowName')} failed`);
|
|
393
|
+
* throw err;
|
|
394
|
+
* }
|
|
395
|
+
* }
|
|
396
|
+
* });
|
|
397
|
+
* ```
|
|
369
398
|
*/
|
|
370
399
|
static registerInterceptor(interceptor: WorkflowInterceptor): void;
|
|
371
400
|
/**
|
|
372
|
-
* Clear all registered interceptors (both workflow and activity)
|
|
401
|
+
* Clear all registered interceptors (both workflow and activity).
|
|
373
402
|
*/
|
|
374
403
|
static clearInterceptors(): void;
|
|
375
404
|
/**
|
|
@@ -280,14 +280,43 @@ class DurableClass {
|
|
|
280
280
|
*/
|
|
281
281
|
constructor() { }
|
|
282
282
|
/**
|
|
283
|
-
* Register a workflow interceptor
|
|
283
|
+
* Register a workflow interceptor that wraps the entire workflow execution
|
|
284
|
+
* in an onion-like pattern. Interceptors execute in registration order
|
|
285
|
+
* (first registered is outermost) and can perform actions before and after
|
|
286
|
+
* workflow execution, handle errors, and add cross-cutting concerns like
|
|
287
|
+
* logging, metrics, or tracing.
|
|
288
|
+
*
|
|
289
|
+
* Workflow interceptors run inside the workflow's async local storage context,
|
|
290
|
+
* so all Durable workflow methods (`proxyActivities`, `sleepFor`, `waitFor`,
|
|
291
|
+
* `execChild`, etc.) are available. When using Durable functions, always check
|
|
292
|
+
* for interruptions with `Durable.didInterrupt(err)` and rethrow them.
|
|
293
|
+
*
|
|
284
294
|
* @param interceptor The interceptor to register
|
|
295
|
+
*
|
|
296
|
+
* @example
|
|
297
|
+
* ```typescript
|
|
298
|
+
* // Logging interceptor
|
|
299
|
+
* Durable.registerInterceptor({
|
|
300
|
+
* async execute(ctx, next) {
|
|
301
|
+
* console.log(`Workflow ${ctx.get('workflowName')} starting`);
|
|
302
|
+
* try {
|
|
303
|
+
* const result = await next();
|
|
304
|
+
* console.log(`Workflow ${ctx.get('workflowName')} completed`);
|
|
305
|
+
* return result;
|
|
306
|
+
* } catch (err) {
|
|
307
|
+
* if (Durable.didInterrupt(err)) throw err;
|
|
308
|
+
* console.error(`Workflow ${ctx.get('workflowName')} failed`);
|
|
309
|
+
* throw err;
|
|
310
|
+
* }
|
|
311
|
+
* }
|
|
312
|
+
* });
|
|
313
|
+
* ```
|
|
285
314
|
*/
|
|
286
315
|
static registerInterceptor(interceptor) {
|
|
287
316
|
DurableClass.interceptorService.register(interceptor);
|
|
288
317
|
}
|
|
289
318
|
/**
|
|
290
|
-
* Clear all registered interceptors (both workflow and activity)
|
|
319
|
+
* Clear all registered interceptors (both workflow and activity).
|
|
291
320
|
*/
|
|
292
321
|
static clearInterceptors() {
|
|
293
322
|
DurableClass.interceptorService.clear();
|
|
@@ -181,6 +181,79 @@ import { WorkflowInterceptor, InterceptorRegistry, ActivityInterceptor, Activity
|
|
|
181
181
|
* }
|
|
182
182
|
* };
|
|
183
183
|
* ```
|
|
184
|
+
*
|
|
185
|
+
* ## Activity Interceptors
|
|
186
|
+
*
|
|
187
|
+
* Activity interceptors wrap individual proxied activity calls, supporting
|
|
188
|
+
* both **before** and **after** phases. The before phase receives the activity
|
|
189
|
+
* input (and can modify `activityCtx.args`). The after phase receives the
|
|
190
|
+
* activity output as the return value of `next()`.
|
|
191
|
+
*
|
|
192
|
+
* This enables patterns like publishing activity results to an external
|
|
193
|
+
* system (e.g., SNS, audit log) without modifying the workflow itself.
|
|
194
|
+
*
|
|
195
|
+
* **Important:** The after-phase proxy activity calls go through the same
|
|
196
|
+
* interceptor chain. Guard against recursion by checking `activityCtx.activityName`
|
|
197
|
+
* to skip the interceptor's own calls.
|
|
198
|
+
*
|
|
199
|
+
* @example
|
|
200
|
+
* ```typescript
|
|
201
|
+
* import { Durable } from '@hotmeshio/hotmesh';
|
|
202
|
+
* import type { ActivityInterceptor } from '@hotmeshio/hotmesh/types/durable';
|
|
203
|
+
* import * as activities from './activities';
|
|
204
|
+
*
|
|
205
|
+
* // Activity interceptor that publishes results via a proxy activity
|
|
206
|
+
* const publishResultInterceptor: ActivityInterceptor = {
|
|
207
|
+
* async execute(activityCtx, workflowCtx, next) {
|
|
208
|
+
* try {
|
|
209
|
+
* // BEFORE: inspect or modify the activity input
|
|
210
|
+
* console.log(`Calling ${activityCtx.activityName}`, activityCtx.args);
|
|
211
|
+
*
|
|
212
|
+
* // Execute the activity (returns stored result on replay)
|
|
213
|
+
* const result = await next();
|
|
214
|
+
*
|
|
215
|
+
* // AFTER: use the activity output (only runs on replay,
|
|
216
|
+
* // once the result is available)
|
|
217
|
+
*
|
|
218
|
+
* // Guard: skip for the interceptor's own proxy calls
|
|
219
|
+
* if (activityCtx.activityName !== 'publishToSNS') {
|
|
220
|
+
* const { publishToSNS } = Durable.workflow.proxyActivities<{
|
|
221
|
+
* publishToSNS: (topic: string, payload: any) => Promise<void>;
|
|
222
|
+
* }>({
|
|
223
|
+
* taskQueue: 'shared-notifications',
|
|
224
|
+
* retryPolicy: { maximumAttempts: 3, throwOnError: true },
|
|
225
|
+
* });
|
|
226
|
+
*
|
|
227
|
+
* await publishToSNS('activity-results', {
|
|
228
|
+
* workflowId: workflowCtx.get('workflowId'),
|
|
229
|
+
* activityName: activityCtx.activityName,
|
|
230
|
+
* input: activityCtx.args,
|
|
231
|
+
* output: result,
|
|
232
|
+
* });
|
|
233
|
+
* }
|
|
234
|
+
*
|
|
235
|
+
* return result;
|
|
236
|
+
* } catch (err) {
|
|
237
|
+
* if (Durable.didInterrupt(err)) throw err;
|
|
238
|
+
* throw err;
|
|
239
|
+
* }
|
|
240
|
+
* },
|
|
241
|
+
* };
|
|
242
|
+
*
|
|
243
|
+
* Durable.registerActivityInterceptor(publishResultInterceptor);
|
|
244
|
+
* ```
|
|
245
|
+
*
|
|
246
|
+
* ## Activity Interceptor Replay Pattern
|
|
247
|
+
*
|
|
248
|
+
* Activity interceptors participate in the interruption/replay cycle:
|
|
249
|
+
*
|
|
250
|
+
* 1. **First execution**: Before-phase runs → `next()` registers the activity
|
|
251
|
+
* interruption and throws `DurableProxyError` → workflow pauses
|
|
252
|
+
* 2. **Second execution**: Before-phase replays → `next()` returns the stored
|
|
253
|
+
* activity result → after-phase runs → after-phase proxy call (e.g.,
|
|
254
|
+
* `publishToSNS`) registers its own interruption → workflow pauses
|
|
255
|
+
* 3. **Third execution**: Everything replays → after-phase proxy call returns
|
|
256
|
+
* its stored result → interceptor returns → workflow continues
|
|
184
257
|
*/
|
|
185
258
|
export declare class InterceptorService implements InterceptorRegistry {
|
|
186
259
|
interceptors: WorkflowInterceptor[];
|