@output.ai/core 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +114 -30
- package/package.json +9 -6
- package/src/consts.js +3 -1
- package/src/index.d.ts +36 -4
- package/src/interface/evaluator.js +12 -8
- package/src/interface/step.js +4 -4
- package/src/interface/validations/static.js +16 -2
- package/src/interface/validations/static.spec.js +20 -0
- package/src/interface/workflow.js +28 -25
- package/src/interface/zod_integration.spec.js +6 -6
- package/src/internal_activities/index.js +1 -33
- package/src/tracing/index.d.ts +4 -4
- package/src/tracing/index.js +12 -121
- package/src/tracing/internal_interface.js +66 -0
- package/src/tracing/processors/local/index.js +50 -0
- package/src/tracing/processors/local/index.spec.js +67 -0
- package/src/tracing/processors/s3/index.js +51 -0
- package/src/tracing/processors/s3/index.spec.js +64 -0
- package/src/tracing/processors/s3/redis_client.js +19 -0
- package/src/tracing/processors/s3/redis_client.spec.js +50 -0
- package/src/tracing/processors/s3/s3_client.js +33 -0
- package/src/tracing/processors/s3/s3_client.spec.js +67 -0
- package/src/tracing/{tracer_tree.js → tools/build_trace_tree.js} +4 -11
- package/src/tracing/{tracer_tree.spec.js → tools/build_trace_tree.spec.js} +4 -20
- package/src/tracing/{utils.js → tools/utils.js} +7 -0
- package/src/tracing/trace_engine.js +63 -0
- package/src/tracing/trace_engine.spec.js +91 -0
- package/src/utils.js +37 -0
- package/src/utils.spec.js +60 -0
- package/src/worker/catalog_workflow/index.js +2 -1
- package/src/worker/catalog_workflow/index.spec.js +6 -10
- package/src/worker/configs.js +24 -0
- package/src/worker/index.js +7 -4
- package/src/worker/interceptors/activity.js +7 -14
- package/src/worker/interceptors/workflow.js +11 -3
- package/src/worker/loader.js +65 -29
- package/src/worker/loader.spec.js +32 -25
- package/src/worker/loader_tools.js +63 -0
- package/src/worker/loader_tools.spec.js +85 -0
- package/src/worker/sinks.js +8 -4
- package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.js +38 -20
- package/src/worker/webpack_loaders/workflow_rewriter/index.mjs +5 -4
- package/src/worker/webpack_loaders/workflow_rewriter/index.spec.js +48 -0
- package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.js +16 -20
- package/src/worker/webpack_loaders/workflow_rewriter/tools.js +23 -0
- package/src/configs.js +0 -31
- package/src/configs.spec.js +0 -331
- package/src/interface/metadata.js +0 -4
- package/src/tracing/index.private.spec.js +0 -84
- package/src/tracing/index.public.spec.js +0 -86
- package/src/worker/internal_utils.js +0 -60
- package/src/worker/internal_utils.spec.js +0 -134
- /package/src/tracing/{utils.spec.js → tools/utils.spec.js} +0 -0
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
vi.mock( '#utils', () => ( {
|
|
4
|
+
throws: e => {
|
|
5
|
+
throw e;
|
|
6
|
+
}
|
|
7
|
+
} ) );
|
|
8
|
+
|
|
9
|
+
const sendMock = vi.fn();
|
|
10
|
+
const ctorState = { args: null };
|
|
11
|
+
class S3ClientMock {
|
|
12
|
+
constructor( args ) {
|
|
13
|
+
ctorState.args = args;
|
|
14
|
+
} send = sendMock;
|
|
15
|
+
}
|
|
16
|
+
class PutObjectCommandMock {
|
|
17
|
+
constructor( input ) {
|
|
18
|
+
this.input = input;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
vi.mock( '@aws-sdk/client-s3', () => ( {
|
|
23
|
+
S3Client: S3ClientMock,
|
|
24
|
+
PutObjectCommand: PutObjectCommandMock
|
|
25
|
+
} ) );
|
|
26
|
+
|
|
27
|
+
async function loadModule() {
|
|
28
|
+
vi.resetModules();
|
|
29
|
+
return import( './s3_client.js' );
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
describe( 'tracing/processors/s3/s3_client', () => {
|
|
33
|
+
beforeEach( () => {
|
|
34
|
+
vi.clearAllMocks();
|
|
35
|
+
delete process.env.AWS_REGION;
|
|
36
|
+
delete process.env.AWS_SECRET_ACCESS_KEY;
|
|
37
|
+
delete process.env.AWS_ACCESS_KEY_ID;
|
|
38
|
+
delete process.env.TRACE_REMOTE_S3_BUCKET;
|
|
39
|
+
} );
|
|
40
|
+
|
|
41
|
+
it( 'fails fast when required env vars are missing for client creation', async () => {
|
|
42
|
+
const { upload } = await loadModule();
|
|
43
|
+
expect( () => upload( { key: 'k', content: 'c' } ) ).toThrow();
|
|
44
|
+
} );
|
|
45
|
+
|
|
46
|
+
it( 'creates client once with env and uploads with bucket/key/content', async () => {
|
|
47
|
+
process.env.AWS_REGION = 'us-east-1';
|
|
48
|
+
process.env.AWS_SECRET_ACCESS_KEY = 'sek';
|
|
49
|
+
process.env.AWS_ACCESS_KEY_ID = 'id';
|
|
50
|
+
process.env.TRACE_REMOTE_S3_BUCKET = 'bucket';
|
|
51
|
+
|
|
52
|
+
const { upload } = await loadModule();
|
|
53
|
+
|
|
54
|
+
await upload( { key: 'wf/key.json', content: '{"a":1}' } );
|
|
55
|
+
|
|
56
|
+
expect( ctorState.args ).toEqual( { region: 'us-east-1', secretAccessKey: 'sek', accessKeyId: 'id' } );
|
|
57
|
+
expect( sendMock ).toHaveBeenCalledTimes( 1 );
|
|
58
|
+
const cmd = sendMock.mock.calls[0][0];
|
|
59
|
+
expect( cmd ).toBeInstanceOf( PutObjectCommandMock );
|
|
60
|
+
expect( cmd.input ).toEqual( { Bucket: 'bucket', Key: 'wf/key.json', Body: '{"a":1}' } );
|
|
61
|
+
|
|
62
|
+
// subsequent upload uses cached client
|
|
63
|
+
await upload( { key: 'wf/key2.json', content: '{}' } );
|
|
64
|
+
expect( sendMock ).toHaveBeenCalledTimes( 2 );
|
|
65
|
+
} );
|
|
66
|
+
} );
|
|
67
|
+
|
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
import { readFileSync, writeFileSync } from 'node:fs';
|
|
2
|
-
import { EOL } from 'os';
|
|
3
|
-
|
|
4
1
|
/**
|
|
5
2
|
* @typedef {object} NodeEntry
|
|
6
3
|
* @property {string} id
|
|
@@ -34,7 +31,7 @@ const createEntry = id => ( {
|
|
|
34
31
|
} );
|
|
35
32
|
|
|
36
33
|
/**
|
|
37
|
-
* Build a tree of nodes from
|
|
34
|
+
* Build a tree of nodes from a list of entries
|
|
38
35
|
*
|
|
39
36
|
* Each node will have: id, name, kind, children, input, output or error, startedAt, endedAt.
|
|
40
37
|
*
|
|
@@ -48,14 +45,10 @@ const createEntry = id => ( {
|
|
|
48
45
|
* Children are added according to the parentId of each entry.
|
|
49
46
|
* The result tree has a single root: the only node without parentId, normally the workflow itself.
|
|
50
47
|
*
|
|
51
|
-
* @param {
|
|
48
|
+
* @param {object[]} entries - The list of entries
|
|
52
49
|
* @returns {void}
|
|
53
50
|
*/
|
|
54
|
-
export
|
|
55
|
-
const content = readFileSync( src, 'utf-8' );
|
|
56
|
-
const lines = content.split( EOL ).filter( l => l.trim().length > 0 );
|
|
57
|
-
const entries = lines.map( l => JSON.parse( l ) );
|
|
58
|
-
|
|
51
|
+
export default entries => {
|
|
59
52
|
const nodes = new Map();
|
|
60
53
|
const ensureNode = id => nodes.get( id ) ?? nodes.set( id, createEntry( id ) ).get( id );
|
|
61
54
|
|
|
@@ -79,5 +72,5 @@ export const buildLogTree = src => {
|
|
|
79
72
|
}
|
|
80
73
|
|
|
81
74
|
const root = nodes.get( entries.find( e => !e.parentId ).id );
|
|
82
|
-
|
|
75
|
+
return root;
|
|
83
76
|
};
|
|
@@ -1,18 +1,8 @@
|
|
|
1
1
|
import { describe, it, expect } from 'vitest';
|
|
2
|
-
import
|
|
3
|
-
import { mkdtempSync } from 'node:fs';
|
|
4
|
-
import { tmpdir } from 'node:os';
|
|
5
|
-
import { join } from 'path';
|
|
6
|
-
import { EOL } from 'os';
|
|
7
|
-
import { buildLogTree } from './tracer_tree.js';
|
|
2
|
+
import buildLogTree from './build_trace_tree.js';
|
|
8
3
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
describe( 'tracer/tracer_tree', () => {
|
|
4
|
+
describe( 'build_trace_tree', () => {
|
|
12
5
|
it( 'builds a tree from workflow/step/IO entries with grouping and sorting', () => {
|
|
13
|
-
const tmp = createTempDir();
|
|
14
|
-
const rawPath = join( tmp, 'run-123.raw' );
|
|
15
|
-
|
|
16
6
|
const entries = [
|
|
17
7
|
// workflow start
|
|
18
8
|
{ kind: 'workflow', phase: 'start', name: 'wf', id: 'wf', parentId: undefined, details: { a: 1 }, timestamp: 1000 },
|
|
@@ -38,11 +28,7 @@ describe( 'tracer/tracer_tree', () => {
|
|
|
38
28
|
{ kind: 'workflow', phase: 'end', name: 'wf', id: 'wf', parentId: undefined, details: { ok: true }, timestamp: 3000 }
|
|
39
29
|
];
|
|
40
30
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
buildLogTree( rawPath );
|
|
44
|
-
|
|
45
|
-
const jsonText = readFileSync( rawPath.replace( /.raw$/, '.json' ), 'utf-8' );
|
|
31
|
+
const result = buildLogTree( entries );
|
|
46
32
|
|
|
47
33
|
const expected = {
|
|
48
34
|
id: 'wf',
|
|
@@ -108,8 +94,6 @@ describe( 'tracer/tracer_tree', () => {
|
|
|
108
94
|
]
|
|
109
95
|
};
|
|
110
96
|
|
|
111
|
-
expect(
|
|
112
|
-
|
|
113
|
-
rmSync( tmp, { recursive: true, force: true } );
|
|
97
|
+
expect( result ).toMatchObject( expected );
|
|
114
98
|
} );
|
|
115
99
|
} );
|
|
@@ -19,3 +19,10 @@ export const serializeError = error =>
|
|
|
19
19
|
message: error.message,
|
|
20
20
|
stack: error.stack
|
|
21
21
|
};
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Returns true if string value is stringbool and true
|
|
25
|
+
* @param {string} v
|
|
26
|
+
* @returns
|
|
27
|
+
*/
|
|
28
|
+
export const isStringboolTrue = v => [ '1', 'true', 'on' ].includes( v );
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { Storage } from '#async_storage';
|
|
2
|
+
import { EventEmitter } from 'node:events';
|
|
3
|
+
import { serializeError, isStringboolTrue } from './tools/utils.js';
|
|
4
|
+
import * as localProcessor from './processors/local/index.js';
|
|
5
|
+
import * as s3Processor from './processors/s3/index.js';
|
|
6
|
+
|
|
7
|
+
const traceBus = new EventEmitter();
|
|
8
|
+
const processors = [
|
|
9
|
+
{
|
|
10
|
+
isOn: isStringboolTrue( process.env.TRACE_LOCAL_ON ),
|
|
11
|
+
init: localProcessor.init,
|
|
12
|
+
exec: localProcessor.exec
|
|
13
|
+
},
|
|
14
|
+
{
|
|
15
|
+
isOn: isStringboolTrue( process.env.TRACE_REMOTE_ON ),
|
|
16
|
+
init: s3Processor.init,
|
|
17
|
+
exec: s3Processor.exec
|
|
18
|
+
}
|
|
19
|
+
];
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Starts processors based on env vars and attach them to the main bus to listen trace events
|
|
23
|
+
*/
|
|
24
|
+
export const init = async () => {
|
|
25
|
+
for ( const p of processors.filter( p => p.isOn ) ) {
|
|
26
|
+
await p.init();
|
|
27
|
+
traceBus.addListener( 'entry', p.exec );
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Serialize details of an event
|
|
33
|
+
*/
|
|
34
|
+
const serializeDetails = details => details instanceof Error ? serializeError( details ) : details;
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Creates a new trace event phase and sens to be written
|
|
38
|
+
*
|
|
39
|
+
* @param {string} phase - The phase
|
|
40
|
+
* @param {object} fields - All the trace fields
|
|
41
|
+
* @returns {void}
|
|
42
|
+
*/
|
|
43
|
+
export const addEventPhase = ( phase, { kind, name, id, parentId, details, executionContext } ) =>
|
|
44
|
+
traceBus.emit( 'entry', {
|
|
45
|
+
executionContext,
|
|
46
|
+
entry: { kind, phase, name, id, parentId, phase, timestamp: Date.now(), details: serializeDetails( details ) }
|
|
47
|
+
} );
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Adds an Event Phase, complementing the options with parentId and executionContext from the async storage.
|
|
51
|
+
*
|
|
52
|
+
* This function will have no effect if called from outside an Temporal Workflow/Activity environment,
|
|
53
|
+
* so it is safe to be used on unit tests or any dependencies that might be used elsewhere
|
|
54
|
+
*
|
|
55
|
+
* @param {object} options - The common trace configurations
|
|
56
|
+
*/
|
|
57
|
+
export function addEventPhaseWithContext( phase, options ) {
|
|
58
|
+
const storeContent = Storage.load();
|
|
59
|
+
if ( storeContent ) { // If there is no storageContext this was not called from an Temporal Environment
|
|
60
|
+
const { parentId, executionContext } = storeContent;
|
|
61
|
+
addEventPhase( phase, { ...options, parentId, executionContext } );
|
|
62
|
+
}
|
|
63
|
+
};
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
const storageLoadMock = vi.fn();
|
|
4
|
+
vi.mock( '#async_storage', () => ( {
|
|
5
|
+
Storage: { load: storageLoadMock }
|
|
6
|
+
} ) );
|
|
7
|
+
|
|
8
|
+
const localInitMock = vi.fn( async () => {} );
|
|
9
|
+
const localExecMock = vi.fn();
|
|
10
|
+
vi.mock( './processors/local/index.js', () => ( {
|
|
11
|
+
init: localInitMock,
|
|
12
|
+
exec: localExecMock
|
|
13
|
+
} ) );
|
|
14
|
+
|
|
15
|
+
const s3InitMock = vi.fn( async () => {} );
|
|
16
|
+
const s3ExecMock = vi.fn();
|
|
17
|
+
vi.mock( './processors/s3/index.js', () => ( {
|
|
18
|
+
init: s3InitMock,
|
|
19
|
+
exec: s3ExecMock
|
|
20
|
+
} ) );
|
|
21
|
+
|
|
22
|
+
async function loadTraceEngine() {
|
|
23
|
+
vi.resetModules();
|
|
24
|
+
return import( './trace_engine.js' );
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
describe( 'tracing/trace_engine', () => {
|
|
28
|
+
beforeEach( () => {
|
|
29
|
+
vi.clearAllMocks();
|
|
30
|
+
delete process.env.TRACE_LOCAL_ON;
|
|
31
|
+
delete process.env.TRACE_REMOTE_ON;
|
|
32
|
+
storageLoadMock.mockReset();
|
|
33
|
+
} );
|
|
34
|
+
|
|
35
|
+
it( 'init() starts only enabled processors and attaches listeners', async () => {
|
|
36
|
+
process.env.TRACE_LOCAL_ON = '1';
|
|
37
|
+
process.env.TRACE_REMOTE_ON = '0';
|
|
38
|
+
const { init, addEventPhase } = await loadTraceEngine();
|
|
39
|
+
|
|
40
|
+
await init();
|
|
41
|
+
|
|
42
|
+
expect( localInitMock ).toHaveBeenCalledTimes( 1 );
|
|
43
|
+
expect( s3InitMock ).not.toHaveBeenCalled();
|
|
44
|
+
|
|
45
|
+
addEventPhase( 'start', { kind: 'step', name: 'N', id: '1', parentId: 'p', details: { ok: true } } );
|
|
46
|
+
expect( localExecMock ).toHaveBeenCalledTimes( 1 );
|
|
47
|
+
const payload = localExecMock.mock.calls[0][0];
|
|
48
|
+
expect( payload.entry.name ).toBe( 'N' );
|
|
49
|
+
expect( payload.entry.kind ).toBe( 'step' );
|
|
50
|
+
expect( payload.entry.phase ).toBe( 'start' );
|
|
51
|
+
expect( payload.entry.details ).toEqual( { ok: true } );
|
|
52
|
+
} );
|
|
53
|
+
|
|
54
|
+
it( 'addEventPhase() emits an entry consumed by processors', async () => {
|
|
55
|
+
process.env.TRACE_LOCAL_ON = 'on';
|
|
56
|
+
const { init, addEventPhase } = await loadTraceEngine();
|
|
57
|
+
await init();
|
|
58
|
+
|
|
59
|
+
addEventPhase( 'end', { kind: 'workflow', name: 'W', id: '2', parentId: 'p2', details: 'done' } );
|
|
60
|
+
expect( localExecMock ).toHaveBeenCalledTimes( 1 );
|
|
61
|
+
const payload = localExecMock.mock.calls[0][0];
|
|
62
|
+
expect( payload.entry.name ).toBe( 'W' );
|
|
63
|
+
expect( payload.entry.phase ).toBe( 'end' );
|
|
64
|
+
expect( payload.entry.details ).toBe( 'done' );
|
|
65
|
+
} );
|
|
66
|
+
|
|
67
|
+
it( 'addEventPhaseWithContext() uses storage when available', async () => {
|
|
68
|
+
process.env.TRACE_LOCAL_ON = 'true';
|
|
69
|
+
storageLoadMock.mockReturnValue( { parentId: 'ctx-p', executionContext: { runId: 'r1' } } );
|
|
70
|
+
const { init, addEventPhaseWithContext } = await loadTraceEngine();
|
|
71
|
+
await init();
|
|
72
|
+
|
|
73
|
+
addEventPhaseWithContext( 'tick', { kind: 'step', name: 'S', id: '3', details: 1 } );
|
|
74
|
+
expect( localExecMock ).toHaveBeenCalledTimes( 1 );
|
|
75
|
+
const payload = localExecMock.mock.calls[0][0];
|
|
76
|
+
expect( payload.executionContext ).toEqual( { runId: 'r1' } );
|
|
77
|
+
expect( payload.entry.parentId ).toBe( 'ctx-p' );
|
|
78
|
+
expect( payload.entry.name ).toBe( 'S' );
|
|
79
|
+
expect( payload.entry.phase ).toBe( 'tick' );
|
|
80
|
+
} );
|
|
81
|
+
|
|
82
|
+
it( 'addEventPhaseWithContext() is a no-op when storage is absent', async () => {
|
|
83
|
+
process.env.TRACE_LOCAL_ON = '1';
|
|
84
|
+
storageLoadMock.mockReturnValue( undefined );
|
|
85
|
+
const { init, addEventPhaseWithContext } = await loadTraceEngine();
|
|
86
|
+
await init();
|
|
87
|
+
|
|
88
|
+
addEventPhaseWithContext( 'noop', { kind: 'step', name: 'X', id: '4', details: null } );
|
|
89
|
+
expect( localExecMock ).not.toHaveBeenCalled();
|
|
90
|
+
} );
|
|
91
|
+
} );
|
package/src/utils.js
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { METADATA_ACCESS_SYMBOL } from '#consts';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Node safe clone implementation that doesn't use global structuredClone()
|
|
5
|
+
* @param {object} v
|
|
6
|
+
* @returns {object}
|
|
7
|
+
*/
|
|
8
|
+
export const clone = v => JSON.parse( JSON.stringify( v ) );
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Throw given error
|
|
12
|
+
* @param {Error} e
|
|
13
|
+
* @throws {e}
|
|
14
|
+
*/
|
|
15
|
+
export const throws = e => {
|
|
16
|
+
throw e;
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Add metadata "values" property to a given object
|
|
21
|
+
* @param {object} target
|
|
22
|
+
* @param {object} values
|
|
23
|
+
* @returns
|
|
24
|
+
*/
|
|
25
|
+
export const setMetadata = ( target, values ) =>
|
|
26
|
+
Object.defineProperty( target, METADATA_ACCESS_SYMBOL, { value: values, writable: false, enumerable: false, configurable: false } );
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Merge two temporal activity options
|
|
30
|
+
* @param {import('@temporalio/workflow').ActivityOptions} base
|
|
31
|
+
* @param {import('@temporalio/workflow').ActivityOptions} ext
|
|
32
|
+
* @returns {import('@temporalio/workflow').ActivityOptions}
|
|
33
|
+
*/
|
|
34
|
+
export const mergeActivityOptions = ( base = {}, ext = {} ) =>
|
|
35
|
+
Object.entries( ext ).reduce( ( options, [ k, v ] ) =>
|
|
36
|
+
Object.assign( options, { [k]: typeof v === 'object' ? mergeActivityOptions( options[k], v ) : v } )
|
|
37
|
+
, clone( base ) );
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { clone, mergeActivityOptions } from './utils.js';
|
|
3
|
+
|
|
4
|
+
describe( 'clone', () => {
|
|
5
|
+
it( 'produces a deep copy without shared references', () => {
|
|
6
|
+
const original = { a: 1, nested: { b: 2 } };
|
|
7
|
+
const copied = clone( original );
|
|
8
|
+
|
|
9
|
+
copied.nested.b = 3;
|
|
10
|
+
|
|
11
|
+
expect( original.nested.b ).toBe( 2 );
|
|
12
|
+
expect( copied.nested.b ).toBe( 3 );
|
|
13
|
+
expect( copied ).not.toBe( original );
|
|
14
|
+
} );
|
|
15
|
+
} );
|
|
16
|
+
|
|
17
|
+
describe( 'mergeActivityOptions', () => {
|
|
18
|
+
it( 'recursively merges nested objects', () => {
|
|
19
|
+
const base = {
|
|
20
|
+
taskQueue: 'q1',
|
|
21
|
+
retry: { maximumAttempts: 3, backoffCoefficient: 2 }
|
|
22
|
+
};
|
|
23
|
+
const ext = {
|
|
24
|
+
retry: { maximumAttempts: 5, initialInterval: '1s' }
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
const result = mergeActivityOptions( base, ext );
|
|
28
|
+
|
|
29
|
+
expect( result ).toEqual( {
|
|
30
|
+
taskQueue: 'q1',
|
|
31
|
+
retry: { maximumAttempts: 5, backoffCoefficient: 2, initialInterval: '1s' }
|
|
32
|
+
} );
|
|
33
|
+
} );
|
|
34
|
+
|
|
35
|
+
it( 'omitted properties in second do not overwrite first', () => {
|
|
36
|
+
const base = {
|
|
37
|
+
taskQueue: 'q2',
|
|
38
|
+
retry: { initialInterval: '2s', backoffCoefficient: 2 }
|
|
39
|
+
};
|
|
40
|
+
const ext = {
|
|
41
|
+
retry: { backoffCoefficient: 3 }
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
const result = mergeActivityOptions( base, ext );
|
|
45
|
+
|
|
46
|
+
expect( result.retry.initialInterval ).toBe( '2s' );
|
|
47
|
+
expect( result.retry.backoffCoefficient ).toBe( 3 );
|
|
48
|
+
expect( result.taskQueue ).toBe( 'q2' );
|
|
49
|
+
} );
|
|
50
|
+
|
|
51
|
+
it( 'handles omitted second argument by returning a clone', () => {
|
|
52
|
+
const base = { taskQueue: 'q3', retry: { maximumAttempts: 2 } };
|
|
53
|
+
|
|
54
|
+
const result = mergeActivityOptions( base );
|
|
55
|
+
|
|
56
|
+
expect( result ).toEqual( base );
|
|
57
|
+
expect( result ).not.toBe( base );
|
|
58
|
+
} );
|
|
59
|
+
} );
|
|
60
|
+
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { dirname } from 'node:path';
|
|
2
3
|
import { METADATA_ACCESS_SYMBOL } from '#consts';
|
|
3
4
|
import { Catalog, CatalogActivity, CatalogWorkflow } from './catalog.js';
|
|
4
5
|
|
|
@@ -37,7 +38,7 @@ export const createCatalog = ( { workflows, activities } ) =>
|
|
|
37
38
|
inputSchema: convertToJsonSchema( workflow.inputSchema ),
|
|
38
39
|
outputSchema: convertToJsonSchema( workflow.outputSchema ),
|
|
39
40
|
activities: Object.entries( activities )
|
|
40
|
-
.filter( ( [ k ] ) => k.startsWith( `${workflow.path}#` ) )
|
|
41
|
+
.filter( ( [ k ] ) => k.startsWith( `${dirname( workflow.path )}#` ) )
|
|
41
42
|
.map( ( [ _, v ] ) => {
|
|
42
43
|
const metadata = v[METADATA_ACCESS_SYMBOL];
|
|
43
44
|
return new CatalogActivity( {
|
|
@@ -17,16 +17,14 @@ describe( 'createCatalog', () => {
|
|
|
17
17
|
const workflows = [
|
|
18
18
|
{
|
|
19
19
|
name: 'flow1',
|
|
20
|
-
path: '/flows/flow1',
|
|
21
|
-
pathname: '/flows/flow1/workflow.js',
|
|
20
|
+
path: '/flows/flow1/workflow.js',
|
|
22
21
|
description: 'desc-flow1',
|
|
23
22
|
inputSchema: z.object( { in: z.literal( 'f1' ) } ),
|
|
24
23
|
outputSchema: z.object( { out: z.literal( 'f1' ) } )
|
|
25
24
|
},
|
|
26
25
|
{
|
|
27
26
|
name: 'flow2',
|
|
28
|
-
path: '/flows/flow2',
|
|
29
|
-
pathname: '/flows/flow2/workflow.js',
|
|
27
|
+
path: '/flows/flow2/workflow.js',
|
|
30
28
|
description: 'desc-flow2',
|
|
31
29
|
inputSchema: z.object( { in: z.literal( 'f2' ) } ),
|
|
32
30
|
outputSchema: z.object( { out: z.literal( 'f2' ) } )
|
|
@@ -95,7 +93,7 @@ describe( 'createCatalog', () => {
|
|
|
95
93
|
expect( mapped ).toEqual( [
|
|
96
94
|
{
|
|
97
95
|
name: 'flow1',
|
|
98
|
-
path: '/flows/flow1',
|
|
96
|
+
path: '/flows/flow1/workflow.js',
|
|
99
97
|
description: 'desc-flow1',
|
|
100
98
|
inputSchema: {
|
|
101
99
|
$schema: 'https://json-schema.org/draft/2020-12/schema',
|
|
@@ -152,7 +150,7 @@ describe( 'createCatalog', () => {
|
|
|
152
150
|
},
|
|
153
151
|
{
|
|
154
152
|
name: 'flow2',
|
|
155
|
-
path: '/flows/flow2',
|
|
153
|
+
path: '/flows/flow2/workflow.js',
|
|
156
154
|
description: 'desc-flow2',
|
|
157
155
|
inputSchema: {
|
|
158
156
|
$schema: 'https://json-schema.org/draft/2020-12/schema',
|
|
@@ -192,9 +190,7 @@ describe( 'createCatalog', () => {
|
|
|
192
190
|
] );
|
|
193
191
|
|
|
194
192
|
// Original inputs are not mutated
|
|
195
|
-
expect( workflows[0].path ).toBe( '/flows/flow1' );
|
|
196
|
-
expect( workflows[
|
|
197
|
-
expect( workflows[1].path ).toBe( '/flows/flow2' );
|
|
198
|
-
expect( workflows[1].pathname ).toBe( '/flows/flow2/workflow.js' );
|
|
193
|
+
expect( workflows[0].path ).toBe( '/flows/flow1/workflow.js' );
|
|
194
|
+
expect( workflows[1].path ).toBe( '/flows/flow2/workflow.js' );
|
|
199
195
|
} );
|
|
200
196
|
} );
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import * as z from 'zod';
|
|
2
|
+
|
|
3
|
+
class InvalidEnvVarsErrors extends Error { }
|
|
4
|
+
|
|
5
|
+
const envVarSchema = z.object( {
|
|
6
|
+
CATALOG_ID: z.string().regex( /^[a-z0-9_.@-]+$/i ),
|
|
7
|
+
TEMPORAL_ADDRESS: z.string().default( 'localhost:7233' ),
|
|
8
|
+
TEMPORAL_API_KEY: z.string().optional(),
|
|
9
|
+
TEMPORAL_NAMESPACE: z.string().optional().default( 'default' )
|
|
10
|
+
} );
|
|
11
|
+
|
|
12
|
+
const { data: envVars, error } = envVarSchema.safeParse( process.env );
|
|
13
|
+
if ( error ) {
|
|
14
|
+
throw new InvalidEnvVarsErrors( z.prettifyError( error ) );
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export const address = envVars.TEMPORAL_ADDRESS;
|
|
18
|
+
export const apiKey = envVars.TEMPORAL_API_KEY;
|
|
19
|
+
export const executionTimeout = '1m';
|
|
20
|
+
export const maxActivities = 100;
|
|
21
|
+
export const maxWorkflows = 100;
|
|
22
|
+
export const namespace = envVars.TEMPORAL_NAMESPACE;
|
|
23
|
+
export const taskQueue = envVars.CATALOG_ID;
|
|
24
|
+
export const catalogId = envVars.CATALOG_ID;
|
package/src/worker/index.js
CHANGED
|
@@ -3,16 +3,16 @@ import { Client } from '@temporalio/client';
|
|
|
3
3
|
import { WorkflowIdConflictPolicy } from '@temporalio/common';
|
|
4
4
|
import { dirname, join } from 'path';
|
|
5
5
|
import { fileURLToPath } from 'node:url';
|
|
6
|
-
import {
|
|
6
|
+
import { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue, catalogId } from './configs.js';
|
|
7
7
|
import { loadActivities, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
|
|
8
8
|
import { ActivityExecutionInterceptor } from './interceptors/activity.js';
|
|
9
9
|
import { sinks } from './sinks.js';
|
|
10
10
|
import { createCatalog } from './catalog_workflow/index.js';
|
|
11
|
+
import { init as initTracing } from '#tracing';
|
|
12
|
+
import { WORKFLOW_CATALOG } from '#consts';
|
|
11
13
|
|
|
12
14
|
const __dirname = dirname( fileURLToPath( import.meta.url ) );
|
|
13
15
|
|
|
14
|
-
const { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue, catalogId } = workerConfig;
|
|
15
|
-
|
|
16
16
|
// Get caller directory from command line arguments
|
|
17
17
|
const callerDir = process.argv[2];
|
|
18
18
|
|
|
@@ -26,6 +26,9 @@ const callerDir = process.argv[2];
|
|
|
26
26
|
console.log( '[Core]', 'Creating worker entry point...' );
|
|
27
27
|
const workflowsPath = createWorkflowsEntryPoint( workflows );
|
|
28
28
|
|
|
29
|
+
console.log( '[Core]', 'Initializing tracing...' );
|
|
30
|
+
await initTracing();
|
|
31
|
+
|
|
29
32
|
console.log( '[Core]', 'Creating workflows catalog...' );
|
|
30
33
|
const catalog = createCatalog( { workflows, activities } );
|
|
31
34
|
|
|
@@ -69,7 +72,7 @@ const callerDir = process.argv[2];
|
|
|
69
72
|
} );
|
|
70
73
|
|
|
71
74
|
console.log( '[Core]', 'Starting catalog workflow...' );
|
|
72
|
-
await new Client( { connection, namespace } ).workflow.start(
|
|
75
|
+
await new Client( { connection, namespace } ).workflow.start( WORKFLOW_CATALOG, {
|
|
73
76
|
taskQueue,
|
|
74
77
|
workflowId: catalogId, // use the name of the task queue as the catalog name, ensuring uniqueness
|
|
75
78
|
workflowIdConflictPolicy: WorkflowIdConflictPolicy.TERMINATE_EXISTING,
|
|
@@ -20,26 +20,19 @@ export class ActivityExecutionInterceptor {
|
|
|
20
20
|
|
|
21
21
|
async execute( input, next ) {
|
|
22
22
|
const { workflowExecution: { workflowId }, activityId, activityType } = Context.current().info;
|
|
23
|
-
const {
|
|
24
|
-
const { type: kind
|
|
23
|
+
const { executionContext } = headersToObject( input.headers );
|
|
24
|
+
const { type: kind } = this.activities?.[activityType]?.[METADATA_ACCESS_SYMBOL];
|
|
25
25
|
|
|
26
|
-
const
|
|
27
|
-
|
|
28
|
-
if ( !skipTrace ) {
|
|
29
|
-
addEventStart( { details: input.args[0], ...traceContext } );
|
|
30
|
-
}
|
|
26
|
+
const traceArguments = { kind, id: activityId, parentId: workflowId, name: activityType, executionContext };
|
|
27
|
+
addEventStart( { details: input.args[0], ...traceArguments } );
|
|
31
28
|
|
|
32
29
|
// creates a context for the nested tracing
|
|
33
30
|
try {
|
|
34
|
-
const output = await Storage.runWithContext( async _ => next( input ), { parentId: activityId,
|
|
35
|
-
|
|
36
|
-
addEventEnd( { details: output, ...traceContext } );
|
|
37
|
-
}
|
|
31
|
+
const output = await Storage.runWithContext( async _ => next( input ), { parentId: activityId, executionContext } );
|
|
32
|
+
addEventEnd( { details: output, ...traceArguments } );
|
|
38
33
|
return output;
|
|
39
34
|
} catch ( error ) {
|
|
40
|
-
|
|
41
|
-
addEventError( { details: error, ...traceContext } );
|
|
42
|
-
}
|
|
35
|
+
addEventError( { details: error, ...traceArguments } );
|
|
43
36
|
throw error;
|
|
44
37
|
}
|
|
45
38
|
}
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
// THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
|
|
2
2
|
import { workflowInfo, proxySinks, ApplicationFailure } from '@temporalio/workflow';
|
|
3
3
|
import { memoToHeaders } from '../sandboxed_utils.js';
|
|
4
|
+
import { mergeActivityOptions } from '#utils';
|
|
5
|
+
// this is a dynamic generated file with activity configs overwrites
|
|
6
|
+
import stepOptions from '../temp/__activity_options.js';
|
|
4
7
|
|
|
5
8
|
/*
|
|
6
9
|
This is not an AI comment!
|
|
@@ -13,7 +16,13 @@ import { memoToHeaders } from '../sandboxed_utils.js';
|
|
|
13
16
|
*/
|
|
14
17
|
class HeadersInjectionInterceptor {
|
|
15
18
|
async scheduleActivity( input, next ) {
|
|
16
|
-
|
|
19
|
+
const memo = workflowInfo().memo ?? {};
|
|
20
|
+
Object.assign( input.headers, memoToHeaders( memo ) );
|
|
21
|
+
// apply per-invocation options passed as second argument by rewritten calls
|
|
22
|
+
const options = stepOptions[input.activityType];
|
|
23
|
+
if ( options ) {
|
|
24
|
+
input.options = mergeActivityOptions( memo.activityOptions, options );
|
|
25
|
+
}
|
|
17
26
|
return next( input );
|
|
18
27
|
}
|
|
19
28
|
};
|
|
@@ -25,8 +34,7 @@ class WorkflowExecutionInterceptor {
|
|
|
25
34
|
sinks.trace.addWorkflowEventStart( input.args[0] );
|
|
26
35
|
try {
|
|
27
36
|
const output = await next( input );
|
|
28
|
-
|
|
29
|
-
sinks.trace.addWorkflowEventEnd( !workflowInfo().memo.parentId ? output.output : output );
|
|
37
|
+
sinks.trace.addWorkflowEventEnd( output );
|
|
30
38
|
return output;
|
|
31
39
|
} catch ( error ) {
|
|
32
40
|
sinks.trace.addWorkflowEventError( error );
|