@output.ai/core 0.0.15 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/worker.sh +1 -1
- package/package.json +17 -10
- package/src/configs.js +1 -6
- package/src/configs.spec.js +2 -50
- package/src/consts.js +6 -8
- package/src/index.d.ts +169 -7
- package/src/index.js +18 -1
- package/src/interface/evaluator.js +146 -0
- package/src/interface/step.js +4 -9
- package/src/interface/{schema_utils.js → validations/runtime.js} +0 -14
- package/src/interface/validations/runtime.spec.js +29 -0
- package/src/interface/validations/schema_utils.js +8 -0
- package/src/interface/validations/static.js +13 -1
- package/src/interface/validations/static.spec.js +29 -1
- package/src/interface/webhook.js +16 -4
- package/src/interface/workflow.js +32 -54
- package/src/internal_activities/index.js +16 -12
- package/src/tracing/index.d.ts +47 -0
- package/src/tracing/index.js +154 -0
- package/src/tracing/index.private.spec.js +84 -0
- package/src/tracing/index.public.spec.js +86 -0
- package/src/tracing/tracer_tree.js +83 -0
- package/src/tracing/tracer_tree.spec.js +115 -0
- package/src/tracing/utils.js +21 -0
- package/src/tracing/utils.spec.js +14 -0
- package/src/worker/catalog_workflow/catalog.js +19 -10
- package/src/worker/index.js +1 -5
- package/src/worker/interceptors/activity.js +28 -10
- package/src/worker/interceptors/workflow.js +19 -1
- package/src/worker/loader.js +6 -6
- package/src/worker/loader.spec.js +6 -9
- package/src/worker/sinks.js +56 -10
- package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.js +35 -4
- package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.spec.js +12 -4
- package/src/worker/webpack_loaders/workflow_rewriter/index.mjs +5 -4
- package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.js +13 -4
- package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.spec.js +16 -2
- package/src/worker/webpack_loaders/workflow_rewriter/tools.js +46 -13
- package/src/worker/webpack_loaders/workflow_rewriter/tools.spec.js +20 -2
- package/src/worker/tracer/index.js +0 -75
- package/src/worker/tracer/index.test.js +0 -103
- package/src/worker/tracer/tracer_tree.js +0 -84
- package/src/worker/tracer/tracer_tree.test.js +0 -115
- /package/src/{worker/async_storage.js → async_storage.js} +0 -0
- /package/src/interface/{schema_utils.spec.js → validations/schema_utils.spec.js} +0 -0
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { readFileSync, writeFileSync } from 'node:fs';
|
|
2
|
+
import { EOL } from 'os';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @typedef {object} NodeEntry
|
|
6
|
+
* @property {string} id
|
|
7
|
+
* @property {string} kind
|
|
8
|
+
* @property {string} name
|
|
9
|
+
* @property {number} startedAt
|
|
10
|
+
* @property {number} endedAt
|
|
11
|
+
* @property {object} [input]
|
|
12
|
+
* @property {object} [output]
|
|
13
|
+
* @property {object} [error]
|
|
14
|
+
* @property {NodeTree[]} children
|
|
15
|
+
*/
|
|
16
|
+
/**
|
|
17
|
+
* Create a node entry for the tree.
|
|
18
|
+
*
|
|
19
|
+
* Properties are sorted the way they should be in the final file, as this makes it easier to read.
|
|
20
|
+
*
|
|
21
|
+
* @param {string} id - Node id
|
|
22
|
+
* @returns {NodeEntry} The entry without any values
|
|
23
|
+
*/
|
|
24
|
+
const createEntry = id => ( {
|
|
25
|
+
id,
|
|
26
|
+
kind: '',
|
|
27
|
+
name: '',
|
|
28
|
+
startedAt: 0,
|
|
29
|
+
endedAt: 0,
|
|
30
|
+
input: undefined,
|
|
31
|
+
output: undefined,
|
|
32
|
+
error: undefined,
|
|
33
|
+
children: []
|
|
34
|
+
} );
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Build a tree of nodes from the raw trace file.
|
|
38
|
+
*
|
|
39
|
+
* Each node will have: id, name, kind, children, input, output or error, startedAt, endedAt.
|
|
40
|
+
*
|
|
41
|
+
* Entries with same id will be combined according to their phase (start, end OR error).
|
|
42
|
+
* - The details of the start phase becomes input, timestamp becomes startedAt;
|
|
43
|
+
* - The details of the end phase become output, timestamp becomes endedAt;
|
|
44
|
+
* - The details of the error phase become error, timestamp becomes endedAt;
|
|
45
|
+
* - Only start phase's kind and name are used;
|
|
46
|
+
*
|
|
47
|
+
*
|
|
48
|
+
* Children are added according to the parentId of each entry.
|
|
49
|
+
* The result tree has a single root: the only node without parentId, normally the workflow itself.
|
|
50
|
+
*
|
|
51
|
+
* @param {string} src - Full path to the raw log file (NDJSON)
|
|
52
|
+
* @returns {void}
|
|
53
|
+
*/
|
|
54
|
+
export const buildLogTree = src => {
|
|
55
|
+
const content = readFileSync( src, 'utf-8' );
|
|
56
|
+
const lines = content.split( EOL ).filter( l => l.trim().length > 0 );
|
|
57
|
+
const entries = lines.map( l => JSON.parse( l ) );
|
|
58
|
+
|
|
59
|
+
const nodes = new Map();
|
|
60
|
+
const ensureNode = id => nodes.get( id ) ?? nodes.set( id, createEntry( id ) ).get( id );
|
|
61
|
+
|
|
62
|
+
for ( const entry of entries ) {
|
|
63
|
+
const { kind, id, name, parentId, details, phase, timestamp } = entry;
|
|
64
|
+
const node = ensureNode( id );
|
|
65
|
+
|
|
66
|
+
if ( phase === 'start' ) {
|
|
67
|
+
Object.assign( node, { input: details, startedAt: timestamp, kind, name } );
|
|
68
|
+
} else if ( phase === 'end' ) {
|
|
69
|
+
Object.assign( node, { output: details, endedAt: timestamp } );
|
|
70
|
+
} else if ( phase === 'error' ) {
|
|
71
|
+
Object.assign( node, { error: details, endedAt: timestamp } );
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if ( parentId && phase === 'start' ) {
|
|
75
|
+
const parent = ensureNode( parentId );
|
|
76
|
+
parent.children.push( node );
|
|
77
|
+
parent.children.sort( ( a, b ) => a.startedAt - b.startedAt );
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const root = nodes.get( entries.find( e => !e.parentId ).id );
|
|
82
|
+
writeFileSync( src.replace( /\.raw$/, '.json' ), JSON.stringify( root, undefined, 2 ), 'utf-8' );
|
|
83
|
+
};
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { writeFileSync, readFileSync, rmSync } from 'node:fs';
|
|
3
|
+
import { mkdtempSync } from 'node:fs';
|
|
4
|
+
import { tmpdir } from 'node:os';
|
|
5
|
+
import { join } from 'path';
|
|
6
|
+
import { EOL } from 'os';
|
|
7
|
+
import { buildLogTree } from './tracer_tree.js';
|
|
8
|
+
|
|
9
|
+
const createTempDir = () => mkdtempSync( join( tmpdir(), 'output-sdk-trace-tree-' ) );
|
|
10
|
+
|
|
11
|
+
describe( 'tracer/tracer_tree', () => {
|
|
12
|
+
it( 'builds a tree from workflow/step/IO entries with grouping and sorting', () => {
|
|
13
|
+
const tmp = createTempDir();
|
|
14
|
+
const rawPath = join( tmp, 'run-123.raw' );
|
|
15
|
+
|
|
16
|
+
const entries = [
|
|
17
|
+
// workflow start
|
|
18
|
+
{ kind: 'workflow', phase: 'start', name: 'wf', id: 'wf', parentId: undefined, details: { a: 1 }, timestamp: 1000 },
|
|
19
|
+
// evaluator start/stop
|
|
20
|
+
{ kind: 'evaluator', phase: 'start', name: 'eval', id: 'eval', parentId: 'wf', details: { z: 0 }, timestamp: 1500 },
|
|
21
|
+
{ kind: 'evaluator', phase: 'end', name: 'eval', id: 'eval', parentId: 'wf', details: { z: 1 }, timestamp: 1600 },
|
|
22
|
+
// step1 start
|
|
23
|
+
{ kind: 'step', phase: 'start', name: 'step-1', id: 's1', parentId: 'wf', details: { x: 1 }, timestamp: 2000 },
|
|
24
|
+
// IO under step1
|
|
25
|
+
{ kind: 'IO', phase: 'start', name: 'test-1', id: 'io1', parentId: 's1', details: { y: 2 }, timestamp: 2300 },
|
|
26
|
+
// step2 start
|
|
27
|
+
{ kind: 'step', phase: 'start', name: 'step-2', id: 's2', parentId: 'wf', details: { x: 2 }, timestamp: 2400 },
|
|
28
|
+
// IO under step2
|
|
29
|
+
{ kind: 'IO', phase: 'start', name: 'test-2', id: 'io2', parentId: 's2', details: { y: 3 }, timestamp: 2500 },
|
|
30
|
+
{ kind: 'IO', phase: 'end', name: 'test-2', id: 'io2', parentId: 's2', details: { y: 4 }, timestamp: 2600 },
|
|
31
|
+
// IO under step1 ends
|
|
32
|
+
{ kind: 'IO', phase: 'end', name: 'test-1', id: 'io1', parentId: 's1', details: { y: 5 }, timestamp: 2700 },
|
|
33
|
+
// step1 end
|
|
34
|
+
{ kind: 'step', phase: 'end', name: 'step-1', id: 's1', parentId: 'wf', details: { done: true }, timestamp: 2800 },
|
|
35
|
+
// step2 end
|
|
36
|
+
{ kind: 'step', phase: 'end', name: 'step-2', id: 's2', parentId: 'wf', details: { done: true }, timestamp: 2900 },
|
|
37
|
+
// workflow end
|
|
38
|
+
{ kind: 'workflow', phase: 'end', name: 'wf', id: 'wf', parentId: undefined, details: { ok: true }, timestamp: 3000 }
|
|
39
|
+
];
|
|
40
|
+
|
|
41
|
+
writeFileSync( rawPath, entries.map( e => JSON.stringify( e ) ).join( EOL ) + EOL, 'utf-8' );
|
|
42
|
+
|
|
43
|
+
buildLogTree( rawPath );
|
|
44
|
+
|
|
45
|
+
const jsonText = readFileSync( rawPath.replace( /.raw$/, '.json' ), 'utf-8' );
|
|
46
|
+
|
|
47
|
+
const expected = {
|
|
48
|
+
id: 'wf',
|
|
49
|
+
kind: 'workflow',
|
|
50
|
+
name: 'wf',
|
|
51
|
+
startedAt: 1000,
|
|
52
|
+
endedAt: 3000,
|
|
53
|
+
input: { a: 1 },
|
|
54
|
+
output: { ok: true },
|
|
55
|
+
children: [
|
|
56
|
+
{
|
|
57
|
+
id: 'eval',
|
|
58
|
+
kind: 'evaluator',
|
|
59
|
+
name: 'eval',
|
|
60
|
+
startedAt: 1500,
|
|
61
|
+
endedAt: 1600,
|
|
62
|
+
input: { z: 0 },
|
|
63
|
+
output: { z: 1 },
|
|
64
|
+
children: []
|
|
65
|
+
},
|
|
66
|
+
{
|
|
67
|
+
id: 's1',
|
|
68
|
+
kind: 'step',
|
|
69
|
+
name: 'step-1',
|
|
70
|
+
startedAt: 2000,
|
|
71
|
+
endedAt: 2800,
|
|
72
|
+
input: { x: 1 },
|
|
73
|
+
output: { done: true },
|
|
74
|
+
children: [
|
|
75
|
+
{
|
|
76
|
+
id: 'io1',
|
|
77
|
+
kind: 'IO',
|
|
78
|
+
name: 'test-1',
|
|
79
|
+
startedAt: 2300,
|
|
80
|
+
endedAt: 2700,
|
|
81
|
+
input: { y: 2 },
|
|
82
|
+
output: { y: 5 },
|
|
83
|
+
children: []
|
|
84
|
+
}
|
|
85
|
+
]
|
|
86
|
+
},
|
|
87
|
+
{
|
|
88
|
+
id: 's2',
|
|
89
|
+
kind: 'step',
|
|
90
|
+
name: 'step-2',
|
|
91
|
+
startedAt: 2400,
|
|
92
|
+
endedAt: 2900,
|
|
93
|
+
input: { x: 2 },
|
|
94
|
+
output: { done: true },
|
|
95
|
+
children: [
|
|
96
|
+
{
|
|
97
|
+
id: 'io2',
|
|
98
|
+
kind: 'IO',
|
|
99
|
+
name: 'test-2',
|
|
100
|
+
startedAt: 2500,
|
|
101
|
+
endedAt: 2600,
|
|
102
|
+
input: { y: 3 },
|
|
103
|
+
output: { y: 4 },
|
|
104
|
+
children: []
|
|
105
|
+
}
|
|
106
|
+
]
|
|
107
|
+
}
|
|
108
|
+
]
|
|
109
|
+
};
|
|
110
|
+
|
|
111
|
+
expect( jsonText ).toBe( JSON.stringify( expected, undefined, 2 ) );
|
|
112
|
+
|
|
113
|
+
rmSync( tmp, { recursive: true, force: true } );
|
|
114
|
+
} );
|
|
115
|
+
} );
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @typedef {object} SerializedError
|
|
3
|
+
* @property {string} name - The error constructor name
|
|
4
|
+
* @property {string} message - The error message
|
|
5
|
+
* @property {string} stack - The error stack trace
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Serialize an error object.
|
|
10
|
+
*
|
|
11
|
+
* If it has ".cause", recursive serialize its cause until finally found an error without it.
|
|
12
|
+
*
|
|
13
|
+
* @param {Error} error
|
|
14
|
+
* @returns {SerializedError}
|
|
15
|
+
*/
|
|
16
|
+
export const serializeError = error =>
|
|
17
|
+
error.cause ? serializeError( error.cause ) : {
|
|
18
|
+
name: error.constructor.name,
|
|
19
|
+
message: error.message,
|
|
20
|
+
stack: error.stack
|
|
21
|
+
};
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { serializeError } from './utils.js';
|
|
3
|
+
|
|
4
|
+
describe( 'tracing/utils', () => {
|
|
5
|
+
it( 'serializeError unwraps causes and keeps message/stack', () => {
|
|
6
|
+
const inner = new Error( 'inner' );
|
|
7
|
+
const outer = new Error( 'outer', { cause: inner } );
|
|
8
|
+
|
|
9
|
+
const out = serializeError( outer );
|
|
10
|
+
expect( out.name ).toBe( 'Error' );
|
|
11
|
+
expect( out.message ).toBe( 'inner' );
|
|
12
|
+
expect( typeof out.stack ).toBe( 'string' );
|
|
13
|
+
} );
|
|
14
|
+
} );
|
|
@@ -73,6 +73,18 @@ class CatalogEntry {
|
|
|
73
73
|
};
|
|
74
74
|
}
|
|
75
75
|
|
|
76
|
+
/**
|
|
77
|
+
* Describes a single activity within a workflow.
|
|
78
|
+
*
|
|
79
|
+
* @class
|
|
80
|
+
* @extends CatalogEntry
|
|
81
|
+
*/
|
|
82
|
+
export class CatalogActivity extends CatalogEntry {}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* @param { CatalogWorkflowOptions}
|
|
86
|
+
*/
|
|
87
|
+
|
|
76
88
|
/**
|
|
77
89
|
* Describes a single workflow within the catalog.
|
|
78
90
|
*
|
|
@@ -87,19 +99,16 @@ export class CatalogWorkflow extends CatalogEntry {
|
|
|
87
99
|
activities;
|
|
88
100
|
|
|
89
101
|
/**
|
|
90
|
-
* @
|
|
91
|
-
* @param {
|
|
102
|
+
* @param {Object} params - Entry parameters.
|
|
103
|
+
* @param {string} params.name - Name of the entry.
|
|
104
|
+
* @param {string} [params.description] - Optional description.
|
|
105
|
+
* @param {object} [params.inputSchema] - JSON schema describing the expected input.
|
|
106
|
+
* @param {object} [params.outputSchema] - JSON schema describing the produced output.
|
|
107
|
+
* @param {string} params.path - Absolute path of the entity in the file system.
|
|
108
|
+
* @param {Array<CatalogActivity>} params.activities - Each activity of this workflow
|
|
92
109
|
*/
|
|
93
110
|
constructor( { activities, ...args } ) {
|
|
94
111
|
super( args );
|
|
95
112
|
this.activities = activities;
|
|
96
113
|
};
|
|
97
114
|
};
|
|
98
|
-
|
|
99
|
-
/**
|
|
100
|
-
* Describes a single activity within a workflow.
|
|
101
|
-
*
|
|
102
|
-
* @class
|
|
103
|
-
* @extends CatalogEntry
|
|
104
|
-
*/
|
|
105
|
-
export class CatalogActivity extends CatalogEntry {}
|
package/src/worker/index.js
CHANGED
|
@@ -6,15 +6,11 @@ import { fileURLToPath } from 'node:url';
|
|
|
6
6
|
import { worker as workerConfig } from '#configs';
|
|
7
7
|
import { loadActivities, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
|
|
8
8
|
import { ActivityExecutionInterceptor } from './interceptors/activity.js';
|
|
9
|
-
import { setupGlobalTracer } from './tracer/index.js';
|
|
10
9
|
import { sinks } from './sinks.js';
|
|
11
10
|
import { createCatalog } from './catalog_workflow/index.js';
|
|
12
11
|
|
|
13
12
|
const __dirname = dirname( fileURLToPath( import.meta.url ) );
|
|
14
13
|
|
|
15
|
-
// expose the coreTracker so other parts of the SDK can use it
|
|
16
|
-
setupGlobalTracer();
|
|
17
|
-
|
|
18
14
|
const { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue, catalogId } = workerConfig;
|
|
19
15
|
|
|
20
16
|
// Get caller directory from command line arguments
|
|
@@ -47,7 +43,7 @@ const callerDir = process.argv[2];
|
|
|
47
43
|
sinks,
|
|
48
44
|
interceptors: {
|
|
49
45
|
workflowModules: [ join( __dirname, './interceptors/workflow.js' ) ],
|
|
50
|
-
activityInbound: [ () => new ActivityExecutionInterceptor() ]
|
|
46
|
+
activityInbound: [ () => new ActivityExecutionInterceptor( activities ) ]
|
|
51
47
|
},
|
|
52
48
|
maxConcurrentWorkflowTaskExecutions: maxWorkflows,
|
|
53
49
|
maxConcurrentActivityTaskExecutions: maxActivities,
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { Context } from '@temporalio/activity';
|
|
2
|
-
import { Storage } from '
|
|
3
|
-
import {
|
|
2
|
+
import { Storage } from '#async_storage';
|
|
3
|
+
import { addEventStart, addEventEnd, addEventError } from '#tracing';
|
|
4
4
|
import { headersToObject } from '../sandboxed_utils.js';
|
|
5
|
-
import {
|
|
5
|
+
import { METADATA_ACCESS_SYMBOL } from '#consts';
|
|
6
6
|
|
|
7
7
|
/*
|
|
8
8
|
This interceptor is called for every activity execution
|
|
@@ -14,15 +14,33 @@ import { THIS_LIB_NAME, TraceEvent } from '#consts';
|
|
|
14
14
|
Some information it needs for its context comes from Temporal's Activity Context others are injected in the headers
|
|
15
15
|
*/
|
|
16
16
|
export class ActivityExecutionInterceptor {
|
|
17
|
+
constructor( activities ) {
|
|
18
|
+
this.activities = activities;
|
|
19
|
+
};
|
|
20
|
+
|
|
17
21
|
async execute( input, next ) {
|
|
18
|
-
const { workflowExecution: { workflowId }, activityId, activityType
|
|
19
|
-
const
|
|
22
|
+
const { workflowExecution: { workflowId }, activityId, activityType } = Context.current().info;
|
|
23
|
+
const { traceId, traceHelm } = headersToObject( input.headers );
|
|
24
|
+
const { type: kind, skipTrace } = this.activities?.[activityType]?.[METADATA_ACCESS_SYMBOL];
|
|
25
|
+
|
|
26
|
+
const traceContext = { kind, id: activityId, parentId: workflowId, name: activityType, traceId, traceHelm };
|
|
27
|
+
|
|
28
|
+
if ( !skipTrace ) {
|
|
29
|
+
addEventStart( { details: input.args[0], ...traceContext } );
|
|
30
|
+
}
|
|
20
31
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
const output = await next( input );
|
|
24
|
-
|
|
32
|
+
// creates a context for the nested tracing
|
|
33
|
+
try {
|
|
34
|
+
const output = await Storage.runWithContext( async _ => next( input ), { parentId: activityId, traceId, traceHelm } );
|
|
35
|
+
if ( !skipTrace ) {
|
|
36
|
+
addEventEnd( { details: output, ...traceContext } );
|
|
37
|
+
}
|
|
25
38
|
return output;
|
|
26
|
-
}
|
|
39
|
+
} catch ( error ) {
|
|
40
|
+
if ( !skipTrace ) {
|
|
41
|
+
addEventError( { details: error, ...traceContext } );
|
|
42
|
+
}
|
|
43
|
+
throw error;
|
|
44
|
+
}
|
|
27
45
|
}
|
|
28
46
|
};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
// THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
|
|
2
|
-
import { workflowInfo } from '@temporalio/workflow';
|
|
2
|
+
import { workflowInfo, proxySinks, ApplicationFailure } from '@temporalio/workflow';
|
|
3
3
|
import { memoToHeaders } from '../sandboxed_utils.js';
|
|
4
4
|
|
|
5
5
|
/*
|
|
@@ -18,6 +18,24 @@ class HeadersInjectionInterceptor {
|
|
|
18
18
|
}
|
|
19
19
|
};
|
|
20
20
|
|
|
21
|
+
const sinks = proxySinks();
|
|
22
|
+
|
|
23
|
+
class WorkflowExecutionInterceptor {
|
|
24
|
+
async execute( input, next ) {
|
|
25
|
+
sinks.trace.addWorkflowEventStart( input.args[0] );
|
|
26
|
+
try {
|
|
27
|
+
const output = await next( input );
|
|
28
|
+
// if the workflow is root, the result wraps "output" and "trace" onto an object, this will hide the trace
|
|
29
|
+
sinks.trace.addWorkflowEventEnd( !workflowInfo().memo.parentId ? output.output : output );
|
|
30
|
+
return output;
|
|
31
|
+
} catch ( error ) {
|
|
32
|
+
sinks.trace.addWorkflowEventError( error );
|
|
33
|
+
throw new ApplicationFailure( error.message, error.constructor.name );
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
};
|
|
37
|
+
|
|
21
38
|
export const interceptors = () => ( {
|
|
39
|
+
inbound: [ new WorkflowExecutionInterceptor() ],
|
|
22
40
|
outbound: [ new HeadersInjectionInterceptor( workflowInfo().workflowType ) ]
|
|
23
41
|
} );
|
package/src/worker/loader.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { dirname, join } from 'path';
|
|
2
2
|
import { fileURLToPath } from 'url';
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
3
|
+
import { sendWebhook, readTraceFile } from '#internal_activities';
|
|
4
|
+
import { ACTIVITY_SEND_WEBHOOK, ACTIVITY_READ_TRACE_FILE, WORKFLOWS_INDEX_FILENAME } from '#consts';
|
|
5
5
|
import {
|
|
6
6
|
iteratorOverImportedComponents,
|
|
7
7
|
recursiveNavigateWhileCollecting,
|
|
@@ -12,16 +12,16 @@ const __dirname = dirname( fileURLToPath( import.meta.url ) );
|
|
|
12
12
|
|
|
13
13
|
// returns a map of activities, where the key is they path + name and the value is the function with metadata
|
|
14
14
|
export async function loadActivities( path ) {
|
|
15
|
-
const activityPaths = recursiveNavigateWhileCollecting( path, [ 'steps.js' ] );
|
|
15
|
+
const activityPaths = recursiveNavigateWhileCollecting( path, [ 'steps.js', 'evaluators.js' ] );
|
|
16
16
|
const activities = [];
|
|
17
17
|
for await ( const { component, metadata, pathname, path } of iteratorOverImportedComponents( activityPaths ) ) {
|
|
18
|
-
console.log( '[Core.Scanner]', '
|
|
18
|
+
console.log( '[Core.Scanner]', 'Component loaded:', metadata.type, metadata.name, 'at', pathname );
|
|
19
19
|
activities[`${path}#${metadata.name}`] = component;
|
|
20
20
|
}
|
|
21
21
|
|
|
22
22
|
// system activities
|
|
23
|
-
activities[
|
|
24
|
-
activities[
|
|
23
|
+
activities[ACTIVITY_SEND_WEBHOOK] = sendWebhook;
|
|
24
|
+
activities[ACTIVITY_READ_TRACE_FILE] = readTraceFile;
|
|
25
25
|
return activities;
|
|
26
26
|
};
|
|
27
27
|
|
|
@@ -1,18 +1,15 @@
|
|
|
1
1
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
2
|
|
|
3
|
-
const METADATA_ACCESS_SYMBOL = Symbol( '__metadata' );
|
|
4
|
-
|
|
5
3
|
vi.mock( '#consts', () => ( {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
WORKFLOWS_INDEX_FILENAME: '__workflows_entrypoint.js'
|
|
9
|
-
METADATA_ACCESS_SYMBOL
|
|
4
|
+
ACTIVITY_SEND_WEBHOOK: '__internal#sendWebhook',
|
|
5
|
+
ACTIVITY_READ_TRACE_FILE: '__internal#readTraceFile',
|
|
6
|
+
WORKFLOWS_INDEX_FILENAME: '__workflows_entrypoint.js'
|
|
10
7
|
} ) );
|
|
11
8
|
|
|
12
|
-
const
|
|
9
|
+
const sendWebhookMock = vi.fn();
|
|
13
10
|
const readTraceFileMock = vi.fn();
|
|
14
11
|
vi.mock( '#internal_activities', () => ( {
|
|
15
|
-
|
|
12
|
+
sendWebhook: sendWebhookMock,
|
|
16
13
|
readTraceFile: readTraceFileMock
|
|
17
14
|
} ) );
|
|
18
15
|
|
|
@@ -41,7 +38,7 @@ describe( 'worker/loader', () => {
|
|
|
41
38
|
|
|
42
39
|
const activities = await loadActivities( '/root' );
|
|
43
40
|
expect( activities['/a#Act1'] ).toBeTypeOf( 'function' );
|
|
44
|
-
expect( activities['__internal#
|
|
41
|
+
expect( activities['__internal#sendWebhook'] ).toBe( sendWebhookMock );
|
|
45
42
|
expect( activities['__internal#readTraceFile'] ).toBe( readTraceFileMock );
|
|
46
43
|
} );
|
|
47
44
|
|
package/src/worker/sinks.js
CHANGED
|
@@ -1,16 +1,62 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { trace } from './tracer/index.js';
|
|
3
|
-
import { THIS_LIB_NAME } from '#consts';
|
|
1
|
+
import { addEventStart, addEventEnd, addEventError } from '#tracing';
|
|
4
2
|
|
|
3
|
+
/**
|
|
4
|
+
* Start a workflow trace event
|
|
5
|
+
*
|
|
6
|
+
* @param {function} method - Trace function to call
|
|
7
|
+
* @param {object} workflowInfo - Temporal workflowInfo object
|
|
8
|
+
* @param {object} details - Teh details to attach to the event
|
|
9
|
+
*/
|
|
10
|
+
const addWorkflowEvent = ( method, workflowInfo, details ) => {
|
|
11
|
+
const { workflowId: id, workflowType: name, memo: { parentId, traceId, traceHelm } } = workflowInfo;
|
|
12
|
+
method( { id, kind: 'workflow', name, details, parentId, traceId, traceHelm } );
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Start a trace event with given configuration
|
|
17
|
+
*
|
|
18
|
+
* @param {function} method - Trace function to call
|
|
19
|
+
* @param {object} workflowInfo - Temporal workflowInfo object
|
|
20
|
+
* @param {object} options - Trace options, like id, kind, name and details
|
|
21
|
+
*/
|
|
22
|
+
const addEvent = ( method, workflowInfo, options ) => {
|
|
23
|
+
const { id, name, kind, details } = options;
|
|
24
|
+
const { workflowId, memo: { traceId, traceHelm } } = workflowInfo;
|
|
25
|
+
method( { id, kind, name, details, parentId: workflowId, traceId, traceHelm } );
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
// This sink allow for sandbox Temporal environment to send trace logs back to the main thread.
|
|
5
29
|
export const sinks = {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
30
|
+
trace: {
|
|
31
|
+
addWorkflowEventStart: {
|
|
32
|
+
fn: ( ...args ) => addWorkflowEvent( addEventStart, ...args ),
|
|
33
|
+
callDuringReplay: false
|
|
34
|
+
},
|
|
35
|
+
|
|
36
|
+
addWorkflowEventEnd: {
|
|
37
|
+
fn: ( ...args ) => addWorkflowEvent( addEventEnd, ...args ),
|
|
38
|
+
callDuringReplay: false
|
|
39
|
+
},
|
|
40
|
+
|
|
41
|
+
addWorkflowEventError: {
|
|
42
|
+
fn: ( ...args ) => addWorkflowEvent( addEventError, ...args ),
|
|
13
43
|
callDuringReplay: false
|
|
44
|
+
},
|
|
45
|
+
|
|
46
|
+
addEventStart: {
|
|
47
|
+
fn: ( ...args ) => addEvent( addEventStart, ...args ),
|
|
48
|
+
callDuringReplay: false
|
|
49
|
+
},
|
|
50
|
+
|
|
51
|
+
addEventEnd: {
|
|
52
|
+
fn: ( ...args ) => addEvent( addEventEnd, ...args ),
|
|
53
|
+
callDuringReplay: false
|
|
54
|
+
},
|
|
55
|
+
|
|
56
|
+
addEventError: {
|
|
57
|
+
fn: ( ...args ) => addEvent( addEventError, ...args ),
|
|
58
|
+
callDuringReplay: false
|
|
59
|
+
|
|
14
60
|
}
|
|
15
61
|
}
|
|
16
62
|
};
|
|
@@ -2,9 +2,11 @@ import traverseModule from '@babel/traverse';
|
|
|
2
2
|
import {
|
|
3
3
|
buildWorkflowNameMap,
|
|
4
4
|
getLocalNameFromDestructuredProperty,
|
|
5
|
+
isEvaluatorsPath,
|
|
5
6
|
isStepsPath,
|
|
6
7
|
isWorkflowPath,
|
|
7
8
|
buildStepsNameMap,
|
|
9
|
+
buildEvaluatorsNameMap,
|
|
8
10
|
toAbsolutePath
|
|
9
11
|
} from './tools.js';
|
|
10
12
|
import {
|
|
@@ -34,15 +36,16 @@ const traverse = traverseModule.default ?? traverseModule;
|
|
|
34
36
|
* @returns {{ stepImports: Array<{localName:string,stepName:string}>,
|
|
35
37
|
* flowImports: Array<{localName:string,workflowName:string}> }} Collected info mappings.
|
|
36
38
|
*/
|
|
37
|
-
export default function collectTargetImports( ast, fileDir, { stepsNameCache, workflowNameCache } ) {
|
|
39
|
+
export default function collectTargetImports( ast, fileDir, { stepsNameCache, workflowNameCache, evaluatorsNameCache } ) {
|
|
38
40
|
const stepImports = [];
|
|
39
41
|
const flowImports = [];
|
|
42
|
+
const evaluatorImports = [];
|
|
40
43
|
|
|
41
44
|
traverse( ast, {
|
|
42
45
|
ImportDeclaration: path => {
|
|
43
46
|
const src = path.node.source.value;
|
|
44
47
|
// Ignore other imports
|
|
45
|
-
if ( !isStepsPath( src ) && !isWorkflowPath( src ) ) {
|
|
48
|
+
if ( !isStepsPath( src ) && !isWorkflowPath( src ) && !isEvaluatorsPath( src ) ) {
|
|
46
49
|
return;
|
|
47
50
|
}
|
|
48
51
|
|
|
@@ -58,6 +61,17 @@ export default function collectTargetImports( ast, fileDir, { stepsNameCache, wo
|
|
|
58
61
|
}
|
|
59
62
|
}
|
|
60
63
|
}
|
|
64
|
+
if ( isEvaluatorsPath( src ) ) {
|
|
65
|
+
const nameMap = buildEvaluatorsNameMap( absolutePath, evaluatorsNameCache );
|
|
66
|
+
for ( const s of path.node.specifiers.filter( s => isImportSpecifier( s ) ) ) {
|
|
67
|
+
const importedName = s.imported.name;
|
|
68
|
+
const localName = s.local.name;
|
|
69
|
+
const evaluatorName = nameMap.get( importedName );
|
|
70
|
+
if ( evaluatorName ) {
|
|
71
|
+
evaluatorImports.push( { localName, evaluatorName } );
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
61
75
|
if ( isWorkflowPath( src ) ) {
|
|
62
76
|
const { named, default: defName } = buildWorkflowNameMap( absolutePath, workflowNameCache );
|
|
63
77
|
for ( const s of path.node.specifiers ) {
|
|
@@ -94,7 +108,7 @@ export default function collectTargetImports( ast, fileDir, { stepsNameCache, wo
|
|
|
94
108
|
|
|
95
109
|
const req = firstArgument.value;
|
|
96
110
|
// Must be steps/workflows module
|
|
97
|
-
if ( !isStepsPath( req ) && !isWorkflowPath( req ) ) {
|
|
111
|
+
if ( !isStepsPath( req ) && !isWorkflowPath( req ) && !isEvaluatorsPath( req ) ) {
|
|
98
112
|
return;
|
|
99
113
|
}
|
|
100
114
|
|
|
@@ -116,6 +130,23 @@ export default function collectTargetImports( ast, fileDir, { stepsNameCache, wo
|
|
|
116
130
|
} else {
|
|
117
131
|
path.remove();
|
|
118
132
|
}
|
|
133
|
+
} else if ( isEvaluatorsPath( req ) && isObjectPattern( path.node.id ) ) {
|
|
134
|
+
const nameMap = buildEvaluatorsNameMap( absolutePath, evaluatorsNameCache );
|
|
135
|
+
for ( const prop of path.node.id.properties.filter( prop => isObjectProperty( prop ) && isIdentifier( prop.key ) ) ) {
|
|
136
|
+
const importedName = prop.key.name;
|
|
137
|
+
const localName = getLocalNameFromDestructuredProperty( prop );
|
|
138
|
+
if ( localName ) {
|
|
139
|
+
const evaluatorName = nameMap.get( importedName );
|
|
140
|
+
if ( evaluatorName ) {
|
|
141
|
+
evaluatorImports.push( { localName, evaluatorName } );
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
if ( isVariableDeclaration( path.parent ) && path.parent.declarations.length === 1 ) {
|
|
146
|
+
path.parentPath.remove();
|
|
147
|
+
} else {
|
|
148
|
+
path.remove();
|
|
149
|
+
}
|
|
119
150
|
} else if ( isWorkflowPath( req ) && isIdentifier( path.node.id ) ) {
|
|
120
151
|
const { default: defName } = buildWorkflowNameMap( absolutePath, workflowNameCache );
|
|
121
152
|
const localName = path.node.id.name;
|
|
@@ -129,5 +160,5 @@ export default function collectTargetImports( ast, fileDir, { stepsNameCache, wo
|
|
|
129
160
|
}
|
|
130
161
|
} );
|
|
131
162
|
|
|
132
|
-
return { stepImports, flowImports };
|
|
163
|
+
return { stepImports, evaluatorImports, flowImports };
|
|
133
164
|
};
|