@output.ai/core 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +16 -22
  2. package/package.json +8 -6
  3. package/src/consts.js +1 -1
  4. package/src/interface/evaluator.js +8 -4
  5. package/src/interface/workflow.js +11 -14
  6. package/src/internal_activities/index.js +0 -32
  7. package/src/tracing/index.d.ts +4 -4
  8. package/src/tracing/index.js +12 -121
  9. package/src/tracing/internal_interface.js +66 -0
  10. package/src/tracing/processors/local/index.js +50 -0
  11. package/src/tracing/processors/local/index.spec.js +67 -0
  12. package/src/tracing/processors/s3/index.js +51 -0
  13. package/src/tracing/processors/s3/index.spec.js +64 -0
  14. package/src/tracing/processors/s3/redis_client.js +19 -0
  15. package/src/tracing/processors/s3/redis_client.spec.js +50 -0
  16. package/src/tracing/processors/s3/s3_client.js +33 -0
  17. package/src/tracing/processors/s3/s3_client.spec.js +67 -0
  18. package/src/tracing/{tracer_tree.js → tools/build_trace_tree.js} +4 -11
  19. package/src/tracing/{tracer_tree.spec.js → tools/build_trace_tree.spec.js} +4 -20
  20. package/src/tracing/{utils.js → tools/utils.js} +7 -0
  21. package/src/tracing/trace_engine.js +63 -0
  22. package/src/tracing/trace_engine.spec.js +91 -0
  23. package/src/utils.js +8 -0
  24. package/src/worker/catalog_workflow/index.js +2 -1
  25. package/src/worker/catalog_workflow/index.spec.js +6 -10
  26. package/src/worker/configs.js +24 -0
  27. package/src/worker/index.js +7 -4
  28. package/src/worker/interceptors/activity.js +7 -14
  29. package/src/worker/interceptors/workflow.js +1 -2
  30. package/src/worker/loader.js +39 -30
  31. package/src/worker/loader.spec.js +20 -24
  32. package/src/worker/loader_tools.js +63 -0
  33. package/src/worker/loader_tools.spec.js +85 -0
  34. package/src/worker/sinks.js +8 -4
  35. package/src/configs.js +0 -31
  36. package/src/configs.spec.js +0 -331
  37. package/src/tracing/index.private.spec.js +0 -84
  38. package/src/tracing/index.public.spec.js +0 -86
  39. package/src/worker/internal_utils.js +0 -60
  40. package/src/worker/internal_utils.spec.js +0 -134
  41. /package/src/tracing/{utils.spec.js → tools/utils.spec.js} +0 -0
@@ -0,0 +1,64 @@
1
+ import { describe, it, expect, vi, beforeEach } from 'vitest';
2
+
3
+ const redisMulti = {
4
+ zAdd: vi.fn().mockReturnThis(),
5
+ expire: vi.fn().mockReturnThis(),
6
+ zRange: vi.fn().mockReturnThis(),
7
+ exec: vi.fn()
8
+ };
9
+ const getRedisClientMock = vi.fn( async () => ( { multi: () => redisMulti } ) );
10
+ vi.mock( './redis_client.js', () => ( { getRedisClient: getRedisClientMock } ) );
11
+
12
+ const uploadMock = vi.fn();
13
+ vi.mock( './s3_client.js', () => ( { upload: uploadMock } ) );
14
+
15
+ const buildTraceTreeMock = vi.fn( entries => ( { count: entries.length } ) );
16
+ vi.mock( '../../tools/build_trace_tree.js', () => ( { default: buildTraceTreeMock } ) );
17
+
18
+ describe( 'tracing/processors/s3', () => {
19
+ beforeEach( () => {
20
+ vi.clearAllMocks();
21
+ process.env.TRACE_REMOTE_S3_BUCKET = 'bkt';
22
+ } );
23
+
24
+ it( 'init(): ensures redis client is created', async () => {
25
+ const { init } = await import( './index.js' );
26
+ await init();
27
+ expect( getRedisClientMock ).toHaveBeenCalledTimes( 1 );
28
+ } );
29
+
30
+ it( 'exec(): accumulates via redis, uploads only on root workflow end', async () => {
31
+ const { exec } = await import( './index.js' );
32
+ const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
33
+ const ctx = { executionContext: { workflowId: 'id1', workflowName: 'WF', startTime } };
34
+
35
+ // Redis will return progressively larger sorted sets
36
+ redisMulti.exec
37
+ .mockResolvedValueOnce( [ , , [ JSON.stringify( { name: 'A', phase: 'start', timestamp: startTime } ) ] ] )
38
+ .mockResolvedValueOnce( [ , , [
39
+ JSON.stringify( { name: 'A', phase: 'start', timestamp: startTime } ),
40
+ JSON.stringify( { name: 'A', phase: 'tick', timestamp: startTime + 1 } )
41
+ ] ] )
42
+ .mockResolvedValueOnce( [ , , [
43
+ JSON.stringify( { name: 'A', phase: 'start', timestamp: startTime } ),
44
+ JSON.stringify( { name: 'A', phase: 'tick', timestamp: startTime + 1 } ),
45
+ JSON.stringify( { name: 'A', phase: 'end', timestamp: startTime + 2 } )
46
+ ] ] );
47
+
48
+ await exec( { ...ctx, entry: { name: 'A', phase: 'start', timestamp: startTime, parentId: 'root' } } );
49
+ await exec( { ...ctx, entry: { name: 'A', phase: 'tick', timestamp: startTime + 1, parentId: 'root' } } );
50
+ // Root end: no parentId and not start
51
+ await exec( { ...ctx, entry: { name: 'A', phase: 'end', timestamp: startTime + 2 } } );
52
+
53
+ // Accumulation happened 3 times
54
+ expect( redisMulti.zAdd ).toHaveBeenCalledTimes( 3 );
55
+ expect( buildTraceTreeMock ).toHaveBeenCalledTimes( 3 );
56
+
57
+ // Only last call triggers upload
58
+ expect( uploadMock ).toHaveBeenCalledTimes( 1 );
59
+ const { key, content } = uploadMock.mock.calls[0][0];
60
+ expect( key ).toMatch( /^WF\/2020\/01\/02\// );
61
+ expect( JSON.parse( content.trim() ).count ).toBe( 3 );
62
+ } );
63
+ } );
64
+
@@ -0,0 +1,19 @@
1
+ import { createClient } from 'redis';
2
+ import { throws } from '#utils';
3
+
4
+ const state = { client: null };
5
+
6
+ /**
7
+ * Return a connected Redis instance
8
+ * @returns {redis.RedisClientType}
9
+ */
10
+ export async function getRedisClient() {
11
+ const url = process.env.REDIS_URL ?? throws( new Error( 'Missing REDIS_URL environment variable' ) );
12
+ if ( await state.client?.ping().catch( _ => 0 ) === 'PONG' ) {
13
+ return state.client;
14
+ };
15
+
16
+ const client = createClient( { url, socket: { keepAlive: 15000 } } );
17
+ await client.connect();
18
+ return state.client = client;
19
+ };
@@ -0,0 +1,50 @@
1
+ import { describe, it, expect, vi, beforeEach } from 'vitest';
2
+
3
+ vi.mock( '#utils', () => ( {
4
+ throws: e => {
5
+ throw e;
6
+ }
7
+ } ) );
8
+
9
+ const createClientImpl = vi.fn();
10
+ vi.mock( 'redis', () => ( { createClient: opts => createClientImpl( opts ) } ) );
11
+
12
+ async function loadModule() {
13
+ vi.resetModules();
14
+ return import( './redis_client.js' );
15
+ }
16
+
17
+ describe( 'tracing/processors/s3/redis_client', () => {
18
+ beforeEach( () => {
19
+ vi.clearAllMocks();
20
+ delete process.env.REDIS_URL;
21
+ } );
22
+
23
+ it( 'throws if REDIS_URL is missing', async () => {
24
+ const { getRedisClient } = await loadModule();
25
+ await expect( getRedisClient() ).rejects.toThrow( 'Missing REDIS_URL' );
26
+ } );
27
+
28
+ it( 'creates client with url, connects once, then reuses cached when ping is PONG', async () => {
29
+ process.env.REDIS_URL = 'redis://localhost:6379';
30
+
31
+ const pingMock = vi.fn().mockResolvedValue( 'PONG' );
32
+ const connectMock = vi.fn().mockResolvedValue();
33
+ const created = [];
34
+ createClientImpl.mockImplementation( opts => {
35
+ created.push( opts );
36
+ return { connect: connectMock, ping: pingMock };
37
+ } );
38
+
39
+ const { getRedisClient } = await loadModule();
40
+
41
+ const c1 = await getRedisClient();
42
+ const c2 = await getRedisClient();
43
+
44
+ expect( created ).toHaveLength( 1 );
45
+ expect( connectMock ).toHaveBeenCalledTimes( 1 );
46
+ expect( pingMock ).toHaveBeenCalledTimes( 1 );
47
+ expect( c1 ).toBe( c2 );
48
+ expect( created[0] ).toMatchObject( { url: 'redis://localhost:6379', socket: { keepAlive: 15000 } } );
49
+ } );
50
+ } );
@@ -0,0 +1,33 @@
1
+ import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
2
+ import { throws } from '#utils';
3
+
4
+ const state = { s3Client: null };
5
+
6
+ /**
7
+ * Return a S3 Client instance
8
+ * @returns {S3Client}
9
+ */
10
+ const getS3Client = () => {
11
+ if ( state.s3Client ) {
12
+ return state.s3Client;
13
+ }
14
+
15
+ const region = process.env.AWS_REGION ?? throws( new Error( 'Missing AWS_REGION env var' ) );
16
+ const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY ?? throws( new Error( 'Missing AWS_SECRET_ACCESS_KEY env var' ) );
17
+ const accessKeyId = process.env.AWS_ACCESS_KEY_ID ?? throws( new Error( 'Missing AWS_ACCESS_KEY_ID env var' ) );
18
+
19
+ return state.s3Client = new S3Client( { region, secretAccessKey, accessKeyId } );
20
+ };
21
+
22
+ /**
23
+ * Upload given file to S3
24
+ * @param {object} args
25
+ * @param {string} key - S3 file key
26
+ * @param {string} content - File content
27
+ */
28
+ export const upload = ( { key, content } ) =>
29
+ getS3Client().send( new PutObjectCommand( {
30
+ Bucket: process.env.TRACE_REMOTE_S3_BUCKET ?? throws( new Error( 'Missing TRACE_REMOTE_S3_BUCKET env var' ) ),
31
+ Key: key,
32
+ Body: content
33
+ } ) );
@@ -0,0 +1,67 @@
1
+ import { describe, it, expect, vi, beforeEach } from 'vitest';
2
+
3
+ vi.mock( '#utils', () => ( {
4
+ throws: e => {
5
+ throw e;
6
+ }
7
+ } ) );
8
+
9
+ const sendMock = vi.fn();
10
+ const ctorState = { args: null };
11
+ class S3ClientMock {
12
+ constructor( args ) {
13
+ ctorState.args = args;
14
+ } send = sendMock;
15
+ }
16
+ class PutObjectCommandMock {
17
+ constructor( input ) {
18
+ this.input = input;
19
+ }
20
+ }
21
+
22
+ vi.mock( '@aws-sdk/client-s3', () => ( {
23
+ S3Client: S3ClientMock,
24
+ PutObjectCommand: PutObjectCommandMock
25
+ } ) );
26
+
27
+ async function loadModule() {
28
+ vi.resetModules();
29
+ return import( './s3_client.js' );
30
+ }
31
+
32
+ describe( 'tracing/processors/s3/s3_client', () => {
33
+ beforeEach( () => {
34
+ vi.clearAllMocks();
35
+ delete process.env.AWS_REGION;
36
+ delete process.env.AWS_SECRET_ACCESS_KEY;
37
+ delete process.env.AWS_ACCESS_KEY_ID;
38
+ delete process.env.TRACE_REMOTE_S3_BUCKET;
39
+ } );
40
+
41
+ it( 'fails fast when required env vars are missing for client creation', async () => {
42
+ const { upload } = await loadModule();
43
+ expect( () => upload( { key: 'k', content: 'c' } ) ).toThrow();
44
+ } );
45
+
46
+ it( 'creates client once with env and uploads with bucket/key/content', async () => {
47
+ process.env.AWS_REGION = 'us-east-1';
48
+ process.env.AWS_SECRET_ACCESS_KEY = 'sek';
49
+ process.env.AWS_ACCESS_KEY_ID = 'id';
50
+ process.env.TRACE_REMOTE_S3_BUCKET = 'bucket';
51
+
52
+ const { upload } = await loadModule();
53
+
54
+ await upload( { key: 'wf/key.json', content: '{"a":1}' } );
55
+
56
+ expect( ctorState.args ).toEqual( { region: 'us-east-1', secretAccessKey: 'sek', accessKeyId: 'id' } );
57
+ expect( sendMock ).toHaveBeenCalledTimes( 1 );
58
+ const cmd = sendMock.mock.calls[0][0];
59
+ expect( cmd ).toBeInstanceOf( PutObjectCommandMock );
60
+ expect( cmd.input ).toEqual( { Bucket: 'bucket', Key: 'wf/key.json', Body: '{"a":1}' } );
61
+
62
+ // subsequent upload uses cached client
63
+ await upload( { key: 'wf/key2.json', content: '{}' } );
64
+ expect( sendMock ).toHaveBeenCalledTimes( 2 );
65
+ } );
66
+ } );
67
+
@@ -1,6 +1,3 @@
1
- import { readFileSync, writeFileSync } from 'node:fs';
2
- import { EOL } from 'os';
3
-
4
1
  /**
5
2
  * @typedef {object} NodeEntry
6
3
  * @property {string} id
@@ -34,7 +31,7 @@ const createEntry = id => ( {
34
31
  } );
35
32
 
36
33
  /**
37
- * Build a tree of nodes from the raw trace file.
34
+ * Build a tree of nodes from a list of entries
38
35
  *
39
36
  * Each node will have: id, name, kind, children, input, output or error, startedAt, endedAt.
40
37
  *
@@ -48,14 +45,10 @@ const createEntry = id => ( {
48
45
  * Children are added according to the parentId of each entry.
49
46
  * The result tree has a single root: the only node without parentId, normally the workflow itself.
50
47
  *
51
- * @param {string} src - Full path to the raw log file (NDJSON)
48
+ * @param {object[]} entries - The list of entries
52
49
  * @returns {void}
53
50
  */
54
- export const buildLogTree = src => {
55
- const content = readFileSync( src, 'utf-8' );
56
- const lines = content.split( EOL ).filter( l => l.trim().length > 0 );
57
- const entries = lines.map( l => JSON.parse( l ) );
58
-
51
+ export default entries => {
59
52
  const nodes = new Map();
60
53
  const ensureNode = id => nodes.get( id ) ?? nodes.set( id, createEntry( id ) ).get( id );
61
54
 
@@ -79,5 +72,5 @@ export const buildLogTree = src => {
79
72
  }
80
73
 
81
74
  const root = nodes.get( entries.find( e => !e.parentId ).id );
82
- writeFileSync( src.replace( /\.raw$/, '.json' ), JSON.stringify( root, undefined, 2 ), 'utf-8' );
75
+ return root;
83
76
  };
@@ -1,18 +1,8 @@
1
1
  import { describe, it, expect } from 'vitest';
2
- import { writeFileSync, readFileSync, rmSync } from 'node:fs';
3
- import { mkdtempSync } from 'node:fs';
4
- import { tmpdir } from 'node:os';
5
- import { join } from 'path';
6
- import { EOL } from 'os';
7
- import { buildLogTree } from './tracer_tree.js';
2
+ import buildLogTree from './build_trace_tree.js';
8
3
 
9
- const createTempDir = () => mkdtempSync( join( tmpdir(), 'output-sdk-trace-tree-' ) );
10
-
11
- describe( 'tracer/tracer_tree', () => {
4
+ describe( 'build_trace_tree', () => {
12
5
  it( 'builds a tree from workflow/step/IO entries with grouping and sorting', () => {
13
- const tmp = createTempDir();
14
- const rawPath = join( tmp, 'run-123.raw' );
15
-
16
6
  const entries = [
17
7
  // workflow start
18
8
  { kind: 'workflow', phase: 'start', name: 'wf', id: 'wf', parentId: undefined, details: { a: 1 }, timestamp: 1000 },
@@ -38,11 +28,7 @@ describe( 'tracer/tracer_tree', () => {
38
28
  { kind: 'workflow', phase: 'end', name: 'wf', id: 'wf', parentId: undefined, details: { ok: true }, timestamp: 3000 }
39
29
  ];
40
30
 
41
- writeFileSync( rawPath, entries.map( e => JSON.stringify( e ) ).join( EOL ) + EOL, 'utf-8' );
42
-
43
- buildLogTree( rawPath );
44
-
45
- const jsonText = readFileSync( rawPath.replace( /.raw$/, '.json' ), 'utf-8' );
31
+ const result = buildLogTree( entries );
46
32
 
47
33
  const expected = {
48
34
  id: 'wf',
@@ -108,8 +94,6 @@ describe( 'tracer/tracer_tree', () => {
108
94
  ]
109
95
  };
110
96
 
111
- expect( jsonText ).toBe( JSON.stringify( expected, undefined, 2 ) );
112
-
113
- rmSync( tmp, { recursive: true, force: true } );
97
+ expect( result ).toMatchObject( expected );
114
98
  } );
115
99
  } );
@@ -19,3 +19,10 @@ export const serializeError = error =>
19
19
  message: error.message,
20
20
  stack: error.stack
21
21
  };
22
+
23
+ /**
24
+ * Returns true if string value is stringbool and true
25
+ * @param {string} v
26
+ * @returns
27
+ */
28
+ export const isStringboolTrue = v => [ '1', 'true', 'on' ].includes( v );
@@ -0,0 +1,63 @@
1
+ import { Storage } from '#async_storage';
2
+ import { EventEmitter } from 'node:events';
3
+ import { serializeError, isStringboolTrue } from './tools/utils.js';
4
+ import * as localProcessor from './processors/local/index.js';
5
+ import * as s3Processor from './processors/s3/index.js';
6
+
7
+ const traceBus = new EventEmitter();
8
+ const processors = [
9
+ {
10
+ isOn: isStringboolTrue( process.env.TRACE_LOCAL_ON ),
11
+ init: localProcessor.init,
12
+ exec: localProcessor.exec
13
+ },
14
+ {
15
+ isOn: isStringboolTrue( process.env.TRACE_REMOTE_ON ),
16
+ init: s3Processor.init,
17
+ exec: s3Processor.exec
18
+ }
19
+ ];
20
+
21
+ /**
22
+ * Starts processors based on env vars and attach them to the main bus to listen trace events
23
+ */
24
+ export const init = async () => {
25
+ for ( const p of processors.filter( p => p.isOn ) ) {
26
+ await p.init();
27
+ traceBus.addListener( 'entry', p.exec );
28
+ }
29
+ };
30
+
31
+ /**
32
+ * Serialize details of an event
33
+ */
34
+ const serializeDetails = details => details instanceof Error ? serializeError( details ) : details;
35
+
36
+ /**
37
+ * Creates a new trace event phase and sens to be written
38
+ *
39
+ * @param {string} phase - The phase
40
+ * @param {object} fields - All the trace fields
41
+ * @returns {void}
42
+ */
43
+ export const addEventPhase = ( phase, { kind, name, id, parentId, details, executionContext } ) =>
44
+ traceBus.emit( 'entry', {
45
+ executionContext,
46
+ entry: { kind, phase, name, id, parentId, phase, timestamp: Date.now(), details: serializeDetails( details ) }
47
+ } );
48
+
49
+ /**
50
+ * Adds an Event Phase, complementing the options with parentId and executionContext from the async storage.
51
+ *
52
+ * This function will have no effect if called from outside an Temporal Workflow/Activity environment,
53
+ * so it is safe to be used on unit tests or any dependencies that might be used elsewhere
54
+ *
55
+ * @param {object} options - The common trace configurations
56
+ */
57
+ export function addEventPhaseWithContext( phase, options ) {
58
+ const storeContent = Storage.load();
59
+ if ( storeContent ) { // If there is no storageContext this was not called from an Temporal Environment
60
+ const { parentId, executionContext } = storeContent;
61
+ addEventPhase( phase, { ...options, parentId, executionContext } );
62
+ }
63
+ };
@@ -0,0 +1,91 @@
1
+ import { describe, it, expect, vi, beforeEach } from 'vitest';
2
+
3
+ const storageLoadMock = vi.fn();
4
+ vi.mock( '#async_storage', () => ( {
5
+ Storage: { load: storageLoadMock }
6
+ } ) );
7
+
8
+ const localInitMock = vi.fn( async () => {} );
9
+ const localExecMock = vi.fn();
10
+ vi.mock( './processors/local/index.js', () => ( {
11
+ init: localInitMock,
12
+ exec: localExecMock
13
+ } ) );
14
+
15
+ const s3InitMock = vi.fn( async () => {} );
16
+ const s3ExecMock = vi.fn();
17
+ vi.mock( './processors/s3/index.js', () => ( {
18
+ init: s3InitMock,
19
+ exec: s3ExecMock
20
+ } ) );
21
+
22
+ async function loadTraceEngine() {
23
+ vi.resetModules();
24
+ return import( './trace_engine.js' );
25
+ }
26
+
27
+ describe( 'tracing/trace_engine', () => {
28
+ beforeEach( () => {
29
+ vi.clearAllMocks();
30
+ delete process.env.TRACE_LOCAL_ON;
31
+ delete process.env.TRACE_REMOTE_ON;
32
+ storageLoadMock.mockReset();
33
+ } );
34
+
35
+ it( 'init() starts only enabled processors and attaches listeners', async () => {
36
+ process.env.TRACE_LOCAL_ON = '1';
37
+ process.env.TRACE_REMOTE_ON = '0';
38
+ const { init, addEventPhase } = await loadTraceEngine();
39
+
40
+ await init();
41
+
42
+ expect( localInitMock ).toHaveBeenCalledTimes( 1 );
43
+ expect( s3InitMock ).not.toHaveBeenCalled();
44
+
45
+ addEventPhase( 'start', { kind: 'step', name: 'N', id: '1', parentId: 'p', details: { ok: true } } );
46
+ expect( localExecMock ).toHaveBeenCalledTimes( 1 );
47
+ const payload = localExecMock.mock.calls[0][0];
48
+ expect( payload.entry.name ).toBe( 'N' );
49
+ expect( payload.entry.kind ).toBe( 'step' );
50
+ expect( payload.entry.phase ).toBe( 'start' );
51
+ expect( payload.entry.details ).toEqual( { ok: true } );
52
+ } );
53
+
54
+ it( 'addEventPhase() emits an entry consumed by processors', async () => {
55
+ process.env.TRACE_LOCAL_ON = 'on';
56
+ const { init, addEventPhase } = await loadTraceEngine();
57
+ await init();
58
+
59
+ addEventPhase( 'end', { kind: 'workflow', name: 'W', id: '2', parentId: 'p2', details: 'done' } );
60
+ expect( localExecMock ).toHaveBeenCalledTimes( 1 );
61
+ const payload = localExecMock.mock.calls[0][0];
62
+ expect( payload.entry.name ).toBe( 'W' );
63
+ expect( payload.entry.phase ).toBe( 'end' );
64
+ expect( payload.entry.details ).toBe( 'done' );
65
+ } );
66
+
67
+ it( 'addEventPhaseWithContext() uses storage when available', async () => {
68
+ process.env.TRACE_LOCAL_ON = 'true';
69
+ storageLoadMock.mockReturnValue( { parentId: 'ctx-p', executionContext: { runId: 'r1' } } );
70
+ const { init, addEventPhaseWithContext } = await loadTraceEngine();
71
+ await init();
72
+
73
+ addEventPhaseWithContext( 'tick', { kind: 'step', name: 'S', id: '3', details: 1 } );
74
+ expect( localExecMock ).toHaveBeenCalledTimes( 1 );
75
+ const payload = localExecMock.mock.calls[0][0];
76
+ expect( payload.executionContext ).toEqual( { runId: 'r1' } );
77
+ expect( payload.entry.parentId ).toBe( 'ctx-p' );
78
+ expect( payload.entry.name ).toBe( 'S' );
79
+ expect( payload.entry.phase ).toBe( 'tick' );
80
+ } );
81
+
82
+ it( 'addEventPhaseWithContext() is a no-op when storage is absent', async () => {
83
+ process.env.TRACE_LOCAL_ON = '1';
84
+ storageLoadMock.mockReturnValue( undefined );
85
+ const { init, addEventPhaseWithContext } = await loadTraceEngine();
86
+ await init();
87
+
88
+ addEventPhaseWithContext( 'noop', { kind: 'step', name: 'X', id: '4', details: null } );
89
+ expect( localExecMock ).not.toHaveBeenCalled();
90
+ } );
91
+ } );
package/src/utils.js ADDED
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Throw given error
3
+ * @param {Error} e
4
+ * @throws {e}
5
+ */
6
+ export const throws = e => {
7
+ throw e;
8
+ };
@@ -1,4 +1,5 @@
1
1
  import { z } from 'zod';
2
+ import { dirname } from 'node:path';
2
3
  import { METADATA_ACCESS_SYMBOL } from '#consts';
3
4
  import { Catalog, CatalogActivity, CatalogWorkflow } from './catalog.js';
4
5
 
@@ -37,7 +38,7 @@ export const createCatalog = ( { workflows, activities } ) =>
37
38
  inputSchema: convertToJsonSchema( workflow.inputSchema ),
38
39
  outputSchema: convertToJsonSchema( workflow.outputSchema ),
39
40
  activities: Object.entries( activities )
40
- .filter( ( [ k ] ) => k.startsWith( `${workflow.path}#` ) )
41
+ .filter( ( [ k ] ) => k.startsWith( `${dirname( workflow.path )}#` ) )
41
42
  .map( ( [ _, v ] ) => {
42
43
  const metadata = v[METADATA_ACCESS_SYMBOL];
43
44
  return new CatalogActivity( {
@@ -17,16 +17,14 @@ describe( 'createCatalog', () => {
17
17
  const workflows = [
18
18
  {
19
19
  name: 'flow1',
20
- path: '/flows/flow1',
21
- pathname: '/flows/flow1/workflow.js',
20
+ path: '/flows/flow1/workflow.js',
22
21
  description: 'desc-flow1',
23
22
  inputSchema: z.object( { in: z.literal( 'f1' ) } ),
24
23
  outputSchema: z.object( { out: z.literal( 'f1' ) } )
25
24
  },
26
25
  {
27
26
  name: 'flow2',
28
- path: '/flows/flow2',
29
- pathname: '/flows/flow2/workflow.js',
27
+ path: '/flows/flow2/workflow.js',
30
28
  description: 'desc-flow2',
31
29
  inputSchema: z.object( { in: z.literal( 'f2' ) } ),
32
30
  outputSchema: z.object( { out: z.literal( 'f2' ) } )
@@ -95,7 +93,7 @@ describe( 'createCatalog', () => {
95
93
  expect( mapped ).toEqual( [
96
94
  {
97
95
  name: 'flow1',
98
- path: '/flows/flow1',
96
+ path: '/flows/flow1/workflow.js',
99
97
  description: 'desc-flow1',
100
98
  inputSchema: {
101
99
  $schema: 'https://json-schema.org/draft/2020-12/schema',
@@ -152,7 +150,7 @@ describe( 'createCatalog', () => {
152
150
  },
153
151
  {
154
152
  name: 'flow2',
155
- path: '/flows/flow2',
153
+ path: '/flows/flow2/workflow.js',
156
154
  description: 'desc-flow2',
157
155
  inputSchema: {
158
156
  $schema: 'https://json-schema.org/draft/2020-12/schema',
@@ -192,9 +190,7 @@ describe( 'createCatalog', () => {
192
190
  ] );
193
191
 
194
192
  // Original inputs are not mutated
195
- expect( workflows[0].path ).toBe( '/flows/flow1' );
196
- expect( workflows[0].pathname ).toBe( '/flows/flow1/workflow.js' );
197
- expect( workflows[1].path ).toBe( '/flows/flow2' );
198
- expect( workflows[1].pathname ).toBe( '/flows/flow2/workflow.js' );
193
+ expect( workflows[0].path ).toBe( '/flows/flow1/workflow.js' );
194
+ expect( workflows[1].path ).toBe( '/flows/flow2/workflow.js' );
199
195
  } );
200
196
  } );
@@ -0,0 +1,24 @@
1
+ import * as z from 'zod';
2
+
3
+ class InvalidEnvVarsErrors extends Error { }
4
+
5
+ const envVarSchema = z.object( {
6
+ CATALOG_ID: z.string().regex( /^[a-z0-9_.@-]+$/i ),
7
+ TEMPORAL_ADDRESS: z.string().default( 'localhost:7233' ),
8
+ TEMPORAL_API_KEY: z.string().optional(),
9
+ TEMPORAL_NAMESPACE: z.string().optional().default( 'default' )
10
+ } );
11
+
12
+ const { data: envVars, error } = envVarSchema.safeParse( process.env );
13
+ if ( error ) {
14
+ throw new InvalidEnvVarsErrors( z.prettifyError( error ) );
15
+ }
16
+
17
+ export const address = envVars.TEMPORAL_ADDRESS;
18
+ export const apiKey = envVars.TEMPORAL_API_KEY;
19
+ export const executionTimeout = '1m';
20
+ export const maxActivities = 100;
21
+ export const maxWorkflows = 100;
22
+ export const namespace = envVars.TEMPORAL_NAMESPACE;
23
+ export const taskQueue = envVars.CATALOG_ID;
24
+ export const catalogId = envVars.CATALOG_ID;
@@ -3,16 +3,16 @@ import { Client } from '@temporalio/client';
3
3
  import { WorkflowIdConflictPolicy } from '@temporalio/common';
4
4
  import { dirname, join } from 'path';
5
5
  import { fileURLToPath } from 'node:url';
6
- import { worker as workerConfig } from '#configs';
6
+ import { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue, catalogId } from './configs.js';
7
7
  import { loadActivities, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
8
8
  import { ActivityExecutionInterceptor } from './interceptors/activity.js';
9
9
  import { sinks } from './sinks.js';
10
10
  import { createCatalog } from './catalog_workflow/index.js';
11
+ import { init as initTracing } from '#tracing';
12
+ import { WORKFLOW_CATALOG } from '#consts';
11
13
 
12
14
  const __dirname = dirname( fileURLToPath( import.meta.url ) );
13
15
 
14
- const { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue, catalogId } = workerConfig;
15
-
16
16
  // Get caller directory from command line arguments
17
17
  const callerDir = process.argv[2];
18
18
 
@@ -26,6 +26,9 @@ const callerDir = process.argv[2];
26
26
  console.log( '[Core]', 'Creating worker entry point...' );
27
27
  const workflowsPath = createWorkflowsEntryPoint( workflows );
28
28
 
29
+ console.log( '[Core]', 'Initializing tracing...' );
30
+ await initTracing();
31
+
29
32
  console.log( '[Core]', 'Creating workflows catalog...' );
30
33
  const catalog = createCatalog( { workflows, activities } );
31
34
 
@@ -69,7 +72,7 @@ const callerDir = process.argv[2];
69
72
  } );
70
73
 
71
74
  console.log( '[Core]', 'Starting catalog workflow...' );
72
- await new Client( { connection, namespace } ).workflow.start( 'catalog', {
75
+ await new Client( { connection, namespace } ).workflow.start( WORKFLOW_CATALOG, {
73
76
  taskQueue,
74
77
  workflowId: catalogId, // use the name of the task queue as the catalog name, ensuring uniqueness
75
78
  workflowIdConflictPolicy: WorkflowIdConflictPolicy.TERMINATE_EXISTING,
@@ -20,26 +20,19 @@ export class ActivityExecutionInterceptor {
20
20
 
21
21
  async execute( input, next ) {
22
22
  const { workflowExecution: { workflowId }, activityId, activityType } = Context.current().info;
23
- const { traceId, traceHelm } = headersToObject( input.headers );
24
- const { type: kind, skipTrace } = this.activities?.[activityType]?.[METADATA_ACCESS_SYMBOL];
23
+ const { executionContext } = headersToObject( input.headers );
24
+ const { type: kind } = this.activities?.[activityType]?.[METADATA_ACCESS_SYMBOL];
25
25
 
26
- const traceContext = { kind, id: activityId, parentId: workflowId, name: activityType, traceId, traceHelm };
27
-
28
- if ( !skipTrace ) {
29
- addEventStart( { details: input.args[0], ...traceContext } );
30
- }
26
+ const traceArguments = { kind, id: activityId, parentId: workflowId, name: activityType, executionContext };
27
+ addEventStart( { details: input.args[0], ...traceArguments } );
31
28
 
32
29
  // creates a context for the nested tracing
33
30
  try {
34
- const output = await Storage.runWithContext( async _ => next( input ), { parentId: activityId, traceId, traceHelm } );
35
- if ( !skipTrace ) {
36
- addEventEnd( { details: output, ...traceContext } );
37
- }
31
+ const output = await Storage.runWithContext( async _ => next( input ), { parentId: activityId, executionContext } );
32
+ addEventEnd( { details: output, ...traceArguments } );
38
33
  return output;
39
34
  } catch ( error ) {
40
- if ( !skipTrace ) {
41
- addEventError( { details: error, ...traceContext } );
42
- }
35
+ addEventError( { details: error, ...traceArguments } );
43
36
  throw error;
44
37
  }
45
38
  }
@@ -25,8 +25,7 @@ class WorkflowExecutionInterceptor {
25
25
  sinks.trace.addWorkflowEventStart( input.args[0] );
26
26
  try {
27
27
  const output = await next( input );
28
- // if the workflow is root, the result wraps "output" and "trace" onto an object, this will hide the trace
29
- sinks.trace.addWorkflowEventEnd( !workflowInfo().memo.parentId ? output.output : output );
28
+ sinks.trace.addWorkflowEventEnd( output );
30
29
  return output;
31
30
  } catch ( error ) {
32
31
  sinks.trace.addWorkflowEventError( error );