@output.ai/core 0.0.16 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -22
- package/package.json +19 -7
- package/src/consts.js +2 -11
- package/src/interface/evaluator.js +8 -4
- package/src/interface/step.js +1 -1
- package/src/interface/webhook.js +16 -4
- package/src/interface/workflow.js +28 -48
- package/src/internal_activities/index.js +3 -37
- package/src/tracing/index.d.ts +47 -0
- package/src/tracing/index.js +45 -0
- package/src/tracing/internal_interface.js +66 -0
- package/src/tracing/processors/local/index.js +50 -0
- package/src/tracing/processors/local/index.spec.js +67 -0
- package/src/tracing/processors/s3/index.js +51 -0
- package/src/tracing/processors/s3/index.spec.js +64 -0
- package/src/tracing/processors/s3/redis_client.js +19 -0
- package/src/tracing/processors/s3/redis_client.spec.js +50 -0
- package/src/tracing/processors/s3/s3_client.js +33 -0
- package/src/tracing/processors/s3/s3_client.spec.js +67 -0
- package/src/tracing/tools/build_trace_tree.js +76 -0
- package/src/tracing/tools/build_trace_tree.spec.js +99 -0
- package/src/tracing/tools/utils.js +28 -0
- package/src/tracing/tools/utils.spec.js +14 -0
- package/src/tracing/trace_engine.js +63 -0
- package/src/tracing/trace_engine.spec.js +91 -0
- package/src/utils.js +8 -0
- package/src/worker/catalog_workflow/index.js +2 -1
- package/src/worker/catalog_workflow/index.spec.js +6 -10
- package/src/worker/configs.js +24 -0
- package/src/worker/index.js +7 -8
- package/src/worker/interceptors/activity.js +15 -17
- package/src/worker/interceptors/workflow.js +18 -1
- package/src/worker/loader.js +40 -31
- package/src/worker/loader.spec.js +22 -29
- package/src/worker/loader_tools.js +63 -0
- package/src/worker/loader_tools.spec.js +85 -0
- package/src/worker/sinks.js +60 -10
- package/src/configs.js +0 -36
- package/src/configs.spec.js +0 -379
- package/src/worker/internal_utils.js +0 -60
- package/src/worker/internal_utils.spec.js +0 -134
- package/src/worker/tracer/index.js +0 -75
- package/src/worker/tracer/index.test.js +0 -102
- package/src/worker/tracer/tracer_tree.js +0 -85
- package/src/worker/tracer/tracer_tree.test.js +0 -115
- /package/src/{worker/async_storage.js → async_storage.js} +0 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { upload } from './s3_client.js';
|
|
2
|
+
import { getRedisClient } from './redis_client.js';
|
|
3
|
+
import buildTraceTree from '../../tools/build_trace_tree.js';
|
|
4
|
+
import { EOL } from 'node:os';
|
|
5
|
+
|
|
6
|
+
const oneMonthInSeconds = 60 * 60 * 24 * 30;
|
|
7
|
+
|
|
8
|
+
const accumulate = async ( { entry, executionContext: { workflowName, workflowId } } ) => {
|
|
9
|
+
const key = `traces/${workflowName}/${workflowId}`;
|
|
10
|
+
const transaction = ( await getRedisClient() ).multi();
|
|
11
|
+
|
|
12
|
+
transaction.zAdd( key, [
|
|
13
|
+
{ score: entry.timestamp, value: JSON.stringify( entry ) }
|
|
14
|
+
], { NX: true } );
|
|
15
|
+
transaction.expire( key, oneMonthInSeconds, 'GT' );
|
|
16
|
+
transaction.zRange( key, 0, -1 );
|
|
17
|
+
const [ ,, zList ] = await transaction.exec();
|
|
18
|
+
return zList.map( v => JSON.parse( v ) );
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
const getS3Key = ( { startTime, workflowId, workflowName } ) => {
|
|
22
|
+
const isoDate = new Date( startTime ).toISOString();
|
|
23
|
+
const [ year, month, day ] = isoDate.split( /\D/, 3 );
|
|
24
|
+
const timeStamp = isoDate.replace( /[:T.]/g, '-' );
|
|
25
|
+
return `${workflowName}/${year}/${month}/${day}/${timeStamp}_${workflowId}.json`;
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Init this processor
|
|
30
|
+
*/
|
|
31
|
+
export const init = async () => {
|
|
32
|
+
await getRedisClient();
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Execute this processor: send a complete trace tree file to S3 when the workflow finishes
|
|
37
|
+
*
|
|
38
|
+
* @param {object} args
|
|
39
|
+
* @param {object} entry - Trace event phase
|
|
40
|
+
* @param {object} executionContext - Execution info: workflowId, workflowName, startTime
|
|
41
|
+
*/
|
|
42
|
+
export const exec = async ( { entry, executionContext } ) => {
|
|
43
|
+
const { workflowName, workflowId, startTime } = executionContext;
|
|
44
|
+
const content = buildTraceTree( await accumulate( { entry, executionContext } ) );
|
|
45
|
+
|
|
46
|
+
const isRootWorkflowEnd = !entry.parentId && entry.phase !== 'start';
|
|
47
|
+
return isRootWorkflowEnd ? upload( {
|
|
48
|
+
key: getS3Key( { workflowId, workflowName, startTime } ),
|
|
49
|
+
content: JSON.stringify( content, undefined, 2 ) + EOL
|
|
50
|
+
} ) : 0;
|
|
51
|
+
};
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
const redisMulti = {
|
|
4
|
+
zAdd: vi.fn().mockReturnThis(),
|
|
5
|
+
expire: vi.fn().mockReturnThis(),
|
|
6
|
+
zRange: vi.fn().mockReturnThis(),
|
|
7
|
+
exec: vi.fn()
|
|
8
|
+
};
|
|
9
|
+
const getRedisClientMock = vi.fn( async () => ( { multi: () => redisMulti } ) );
|
|
10
|
+
vi.mock( './redis_client.js', () => ( { getRedisClient: getRedisClientMock } ) );
|
|
11
|
+
|
|
12
|
+
const uploadMock = vi.fn();
|
|
13
|
+
vi.mock( './s3_client.js', () => ( { upload: uploadMock } ) );
|
|
14
|
+
|
|
15
|
+
const buildTraceTreeMock = vi.fn( entries => ( { count: entries.length } ) );
|
|
16
|
+
vi.mock( '../../tools/build_trace_tree.js', () => ( { default: buildTraceTreeMock } ) );
|
|
17
|
+
|
|
18
|
+
describe( 'tracing/processors/s3', () => {
|
|
19
|
+
beforeEach( () => {
|
|
20
|
+
vi.clearAllMocks();
|
|
21
|
+
process.env.TRACE_REMOTE_S3_BUCKET = 'bkt';
|
|
22
|
+
} );
|
|
23
|
+
|
|
24
|
+
it( 'init(): ensures redis client is created', async () => {
|
|
25
|
+
const { init } = await import( './index.js' );
|
|
26
|
+
await init();
|
|
27
|
+
expect( getRedisClientMock ).toHaveBeenCalledTimes( 1 );
|
|
28
|
+
} );
|
|
29
|
+
|
|
30
|
+
it( 'exec(): accumulates via redis, uploads only on root workflow end', async () => {
|
|
31
|
+
const { exec } = await import( './index.js' );
|
|
32
|
+
const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
|
|
33
|
+
const ctx = { executionContext: { workflowId: 'id1', workflowName: 'WF', startTime } };
|
|
34
|
+
|
|
35
|
+
// Redis will return progressively larger sorted sets
|
|
36
|
+
redisMulti.exec
|
|
37
|
+
.mockResolvedValueOnce( [ , , [ JSON.stringify( { name: 'A', phase: 'start', timestamp: startTime } ) ] ] )
|
|
38
|
+
.mockResolvedValueOnce( [ , , [
|
|
39
|
+
JSON.stringify( { name: 'A', phase: 'start', timestamp: startTime } ),
|
|
40
|
+
JSON.stringify( { name: 'A', phase: 'tick', timestamp: startTime + 1 } )
|
|
41
|
+
] ] )
|
|
42
|
+
.mockResolvedValueOnce( [ , , [
|
|
43
|
+
JSON.stringify( { name: 'A', phase: 'start', timestamp: startTime } ),
|
|
44
|
+
JSON.stringify( { name: 'A', phase: 'tick', timestamp: startTime + 1 } ),
|
|
45
|
+
JSON.stringify( { name: 'A', phase: 'end', timestamp: startTime + 2 } )
|
|
46
|
+
] ] );
|
|
47
|
+
|
|
48
|
+
await exec( { ...ctx, entry: { name: 'A', phase: 'start', timestamp: startTime, parentId: 'root' } } );
|
|
49
|
+
await exec( { ...ctx, entry: { name: 'A', phase: 'tick', timestamp: startTime + 1, parentId: 'root' } } );
|
|
50
|
+
// Root end: no parentId and not start
|
|
51
|
+
await exec( { ...ctx, entry: { name: 'A', phase: 'end', timestamp: startTime + 2 } } );
|
|
52
|
+
|
|
53
|
+
// Accumulation happened 3 times
|
|
54
|
+
expect( redisMulti.zAdd ).toHaveBeenCalledTimes( 3 );
|
|
55
|
+
expect( buildTraceTreeMock ).toHaveBeenCalledTimes( 3 );
|
|
56
|
+
|
|
57
|
+
// Only last call triggers upload
|
|
58
|
+
expect( uploadMock ).toHaveBeenCalledTimes( 1 );
|
|
59
|
+
const { key, content } = uploadMock.mock.calls[0][0];
|
|
60
|
+
expect( key ).toMatch( /^WF\/2020\/01\/02\// );
|
|
61
|
+
expect( JSON.parse( content.trim() ).count ).toBe( 3 );
|
|
62
|
+
} );
|
|
63
|
+
} );
|
|
64
|
+
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { createClient } from 'redis';
|
|
2
|
+
import { throws } from '#utils';
|
|
3
|
+
|
|
4
|
+
const state = { client: null };
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Return a connected Redis instance
|
|
8
|
+
* @returns {redis.RedisClientType}
|
|
9
|
+
*/
|
|
10
|
+
export async function getRedisClient() {
|
|
11
|
+
const url = process.env.REDIS_URL ?? throws( new Error( 'Missing REDIS_URL environment variable' ) );
|
|
12
|
+
if ( await state.client?.ping().catch( _ => 0 ) === 'PONG' ) {
|
|
13
|
+
return state.client;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
const client = createClient( { url, socket: { keepAlive: 15000 } } );
|
|
17
|
+
await client.connect();
|
|
18
|
+
return state.client = client;
|
|
19
|
+
};
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
vi.mock( '#utils', () => ( {
|
|
4
|
+
throws: e => {
|
|
5
|
+
throw e;
|
|
6
|
+
}
|
|
7
|
+
} ) );
|
|
8
|
+
|
|
9
|
+
const createClientImpl = vi.fn();
|
|
10
|
+
vi.mock( 'redis', () => ( { createClient: opts => createClientImpl( opts ) } ) );
|
|
11
|
+
|
|
12
|
+
async function loadModule() {
|
|
13
|
+
vi.resetModules();
|
|
14
|
+
return import( './redis_client.js' );
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
describe( 'tracing/processors/s3/redis_client', () => {
|
|
18
|
+
beforeEach( () => {
|
|
19
|
+
vi.clearAllMocks();
|
|
20
|
+
delete process.env.REDIS_URL;
|
|
21
|
+
} );
|
|
22
|
+
|
|
23
|
+
it( 'throws if REDIS_URL is missing', async () => {
|
|
24
|
+
const { getRedisClient } = await loadModule();
|
|
25
|
+
await expect( getRedisClient() ).rejects.toThrow( 'Missing REDIS_URL' );
|
|
26
|
+
} );
|
|
27
|
+
|
|
28
|
+
it( 'creates client with url, connects once, then reuses cached when ping is PONG', async () => {
|
|
29
|
+
process.env.REDIS_URL = 'redis://localhost:6379';
|
|
30
|
+
|
|
31
|
+
const pingMock = vi.fn().mockResolvedValue( 'PONG' );
|
|
32
|
+
const connectMock = vi.fn().mockResolvedValue();
|
|
33
|
+
const created = [];
|
|
34
|
+
createClientImpl.mockImplementation( opts => {
|
|
35
|
+
created.push( opts );
|
|
36
|
+
return { connect: connectMock, ping: pingMock };
|
|
37
|
+
} );
|
|
38
|
+
|
|
39
|
+
const { getRedisClient } = await loadModule();
|
|
40
|
+
|
|
41
|
+
const c1 = await getRedisClient();
|
|
42
|
+
const c2 = await getRedisClient();
|
|
43
|
+
|
|
44
|
+
expect( created ).toHaveLength( 1 );
|
|
45
|
+
expect( connectMock ).toHaveBeenCalledTimes( 1 );
|
|
46
|
+
expect( pingMock ).toHaveBeenCalledTimes( 1 );
|
|
47
|
+
expect( c1 ).toBe( c2 );
|
|
48
|
+
expect( created[0] ).toMatchObject( { url: 'redis://localhost:6379', socket: { keepAlive: 15000 } } );
|
|
49
|
+
} );
|
|
50
|
+
} );
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
|
|
2
|
+
import { throws } from '#utils';
|
|
3
|
+
|
|
4
|
+
const state = { s3Client: null };
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Return a S3 Client instance
|
|
8
|
+
* @returns {S3Client}
|
|
9
|
+
*/
|
|
10
|
+
const getS3Client = () => {
|
|
11
|
+
if ( state.s3Client ) {
|
|
12
|
+
return state.s3Client;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const region = process.env.AWS_REGION ?? throws( new Error( 'Missing AWS_REGION env var' ) );
|
|
16
|
+
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY ?? throws( new Error( 'Missing AWS_SECRET_ACCESS_KEY env var' ) );
|
|
17
|
+
const accessKeyId = process.env.AWS_ACCESS_KEY_ID ?? throws( new Error( 'Missing AWS_ACCESS_KEY_ID env var' ) );
|
|
18
|
+
|
|
19
|
+
return state.s3Client = new S3Client( { region, secretAccessKey, accessKeyId } );
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Upload given file to S3
|
|
24
|
+
* @param {object} args
|
|
25
|
+
* @param {string} key - S3 file key
|
|
26
|
+
* @param {string} content - File content
|
|
27
|
+
*/
|
|
28
|
+
export const upload = ( { key, content } ) =>
|
|
29
|
+
getS3Client().send( new PutObjectCommand( {
|
|
30
|
+
Bucket: process.env.TRACE_REMOTE_S3_BUCKET ?? throws( new Error( 'Missing TRACE_REMOTE_S3_BUCKET env var' ) ),
|
|
31
|
+
Key: key,
|
|
32
|
+
Body: content
|
|
33
|
+
} ) );
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
vi.mock( '#utils', () => ( {
|
|
4
|
+
throws: e => {
|
|
5
|
+
throw e;
|
|
6
|
+
}
|
|
7
|
+
} ) );
|
|
8
|
+
|
|
9
|
+
const sendMock = vi.fn();
|
|
10
|
+
const ctorState = { args: null };
|
|
11
|
+
class S3ClientMock {
|
|
12
|
+
constructor( args ) {
|
|
13
|
+
ctorState.args = args;
|
|
14
|
+
} send = sendMock;
|
|
15
|
+
}
|
|
16
|
+
class PutObjectCommandMock {
|
|
17
|
+
constructor( input ) {
|
|
18
|
+
this.input = input;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
vi.mock( '@aws-sdk/client-s3', () => ( {
|
|
23
|
+
S3Client: S3ClientMock,
|
|
24
|
+
PutObjectCommand: PutObjectCommandMock
|
|
25
|
+
} ) );
|
|
26
|
+
|
|
27
|
+
async function loadModule() {
|
|
28
|
+
vi.resetModules();
|
|
29
|
+
return import( './s3_client.js' );
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
describe( 'tracing/processors/s3/s3_client', () => {
|
|
33
|
+
beforeEach( () => {
|
|
34
|
+
vi.clearAllMocks();
|
|
35
|
+
delete process.env.AWS_REGION;
|
|
36
|
+
delete process.env.AWS_SECRET_ACCESS_KEY;
|
|
37
|
+
delete process.env.AWS_ACCESS_KEY_ID;
|
|
38
|
+
delete process.env.TRACE_REMOTE_S3_BUCKET;
|
|
39
|
+
} );
|
|
40
|
+
|
|
41
|
+
it( 'fails fast when required env vars are missing for client creation', async () => {
|
|
42
|
+
const { upload } = await loadModule();
|
|
43
|
+
expect( () => upload( { key: 'k', content: 'c' } ) ).toThrow();
|
|
44
|
+
} );
|
|
45
|
+
|
|
46
|
+
it( 'creates client once with env and uploads with bucket/key/content', async () => {
|
|
47
|
+
process.env.AWS_REGION = 'us-east-1';
|
|
48
|
+
process.env.AWS_SECRET_ACCESS_KEY = 'sek';
|
|
49
|
+
process.env.AWS_ACCESS_KEY_ID = 'id';
|
|
50
|
+
process.env.TRACE_REMOTE_S3_BUCKET = 'bucket';
|
|
51
|
+
|
|
52
|
+
const { upload } = await loadModule();
|
|
53
|
+
|
|
54
|
+
await upload( { key: 'wf/key.json', content: '{"a":1}' } );
|
|
55
|
+
|
|
56
|
+
expect( ctorState.args ).toEqual( { region: 'us-east-1', secretAccessKey: 'sek', accessKeyId: 'id' } );
|
|
57
|
+
expect( sendMock ).toHaveBeenCalledTimes( 1 );
|
|
58
|
+
const cmd = sendMock.mock.calls[0][0];
|
|
59
|
+
expect( cmd ).toBeInstanceOf( PutObjectCommandMock );
|
|
60
|
+
expect( cmd.input ).toEqual( { Bucket: 'bucket', Key: 'wf/key.json', Body: '{"a":1}' } );
|
|
61
|
+
|
|
62
|
+
// subsequent upload uses cached client
|
|
63
|
+
await upload( { key: 'wf/key2.json', content: '{}' } );
|
|
64
|
+
expect( sendMock ).toHaveBeenCalledTimes( 2 );
|
|
65
|
+
} );
|
|
66
|
+
} );
|
|
67
|
+
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @typedef {object} NodeEntry
|
|
3
|
+
* @property {string} id
|
|
4
|
+
* @property {string} kind
|
|
5
|
+
* @property {string} name
|
|
6
|
+
* @property {number} startedAt
|
|
7
|
+
* @property {number} endedAt
|
|
8
|
+
* @property {object} [input]
|
|
9
|
+
* @property {object} [output]
|
|
10
|
+
* @property {object} [error]
|
|
11
|
+
* @property {NodeTree[]} children
|
|
12
|
+
*/
|
|
13
|
+
/**
|
|
14
|
+
* Create a node entry for the tree.
|
|
15
|
+
*
|
|
16
|
+
* Properties are sorted the way they should be in the final file, as this makes it easier to read.
|
|
17
|
+
*
|
|
18
|
+
* @param {string} id - Node id
|
|
19
|
+
* @returns {NodeEntry} The entry without any values
|
|
20
|
+
*/
|
|
21
|
+
const createEntry = id => ( {
|
|
22
|
+
id,
|
|
23
|
+
kind: '',
|
|
24
|
+
name: '',
|
|
25
|
+
startedAt: 0,
|
|
26
|
+
endedAt: 0,
|
|
27
|
+
input: undefined,
|
|
28
|
+
output: undefined,
|
|
29
|
+
error: undefined,
|
|
30
|
+
children: []
|
|
31
|
+
} );
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Build a tree of nodes from a list of entries
|
|
35
|
+
*
|
|
36
|
+
* Each node will have: id, name, kind, children, input, output or error, startedAt, endedAt.
|
|
37
|
+
*
|
|
38
|
+
* Entries with same id will be combined according to their phase (start, end OR error).
|
|
39
|
+
* - The details of the start phase becomes input, timestamp becomes startedAt;
|
|
40
|
+
* - The details of the end phase become output, timestamp becomes endedAt;
|
|
41
|
+
* - The details of the error phase become error, timestamp becomes endedAt;
|
|
42
|
+
* - Only start phase's kind and name are used;
|
|
43
|
+
*
|
|
44
|
+
*
|
|
45
|
+
* Children are added according to the parentId of each entry.
|
|
46
|
+
* The result tree has a single root: the only node without parentId, normally the workflow itself.
|
|
47
|
+
*
|
|
48
|
+
* @param {object[]} entries - The list of entries
|
|
49
|
+
* @returns {void}
|
|
50
|
+
*/
|
|
51
|
+
export default entries => {
|
|
52
|
+
const nodes = new Map();
|
|
53
|
+
const ensureNode = id => nodes.get( id ) ?? nodes.set( id, createEntry( id ) ).get( id );
|
|
54
|
+
|
|
55
|
+
for ( const entry of entries ) {
|
|
56
|
+
const { kind, id, name, parentId, details, phase, timestamp } = entry;
|
|
57
|
+
const node = ensureNode( id );
|
|
58
|
+
|
|
59
|
+
if ( phase === 'start' ) {
|
|
60
|
+
Object.assign( node, { input: details, startedAt: timestamp, kind, name } );
|
|
61
|
+
} else if ( phase === 'end' ) {
|
|
62
|
+
Object.assign( node, { output: details, endedAt: timestamp } );
|
|
63
|
+
} else if ( phase === 'error' ) {
|
|
64
|
+
Object.assign( node, { error: details, endedAt: timestamp } );
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if ( parentId && phase === 'start' ) {
|
|
68
|
+
const parent = ensureNode( parentId );
|
|
69
|
+
parent.children.push( node );
|
|
70
|
+
parent.children.sort( ( a, b ) => a.startedAt - b.startedAt );
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const root = nodes.get( entries.find( e => !e.parentId ).id );
|
|
75
|
+
return root;
|
|
76
|
+
};
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import buildLogTree from './build_trace_tree.js';
|
|
3
|
+
|
|
4
|
+
describe( 'build_trace_tree', () => {
|
|
5
|
+
it( 'builds a tree from workflow/step/IO entries with grouping and sorting', () => {
|
|
6
|
+
const entries = [
|
|
7
|
+
// workflow start
|
|
8
|
+
{ kind: 'workflow', phase: 'start', name: 'wf', id: 'wf', parentId: undefined, details: { a: 1 }, timestamp: 1000 },
|
|
9
|
+
// evaluator start/stop
|
|
10
|
+
{ kind: 'evaluator', phase: 'start', name: 'eval', id: 'eval', parentId: 'wf', details: { z: 0 }, timestamp: 1500 },
|
|
11
|
+
{ kind: 'evaluator', phase: 'end', name: 'eval', id: 'eval', parentId: 'wf', details: { z: 1 }, timestamp: 1600 },
|
|
12
|
+
// step1 start
|
|
13
|
+
{ kind: 'step', phase: 'start', name: 'step-1', id: 's1', parentId: 'wf', details: { x: 1 }, timestamp: 2000 },
|
|
14
|
+
// IO under step1
|
|
15
|
+
{ kind: 'IO', phase: 'start', name: 'test-1', id: 'io1', parentId: 's1', details: { y: 2 }, timestamp: 2300 },
|
|
16
|
+
// step2 start
|
|
17
|
+
{ kind: 'step', phase: 'start', name: 'step-2', id: 's2', parentId: 'wf', details: { x: 2 }, timestamp: 2400 },
|
|
18
|
+
// IO under step2
|
|
19
|
+
{ kind: 'IO', phase: 'start', name: 'test-2', id: 'io2', parentId: 's2', details: { y: 3 }, timestamp: 2500 },
|
|
20
|
+
{ kind: 'IO', phase: 'end', name: 'test-2', id: 'io2', parentId: 's2', details: { y: 4 }, timestamp: 2600 },
|
|
21
|
+
// IO under step1 ends
|
|
22
|
+
{ kind: 'IO', phase: 'end', name: 'test-1', id: 'io1', parentId: 's1', details: { y: 5 }, timestamp: 2700 },
|
|
23
|
+
// step1 end
|
|
24
|
+
{ kind: 'step', phase: 'end', name: 'step-1', id: 's1', parentId: 'wf', details: { done: true }, timestamp: 2800 },
|
|
25
|
+
// step2 end
|
|
26
|
+
{ kind: 'step', phase: 'end', name: 'step-2', id: 's2', parentId: 'wf', details: { done: true }, timestamp: 2900 },
|
|
27
|
+
// workflow end
|
|
28
|
+
{ kind: 'workflow', phase: 'end', name: 'wf', id: 'wf', parentId: undefined, details: { ok: true }, timestamp: 3000 }
|
|
29
|
+
];
|
|
30
|
+
|
|
31
|
+
const result = buildLogTree( entries );
|
|
32
|
+
|
|
33
|
+
const expected = {
|
|
34
|
+
id: 'wf',
|
|
35
|
+
kind: 'workflow',
|
|
36
|
+
name: 'wf',
|
|
37
|
+
startedAt: 1000,
|
|
38
|
+
endedAt: 3000,
|
|
39
|
+
input: { a: 1 },
|
|
40
|
+
output: { ok: true },
|
|
41
|
+
children: [
|
|
42
|
+
{
|
|
43
|
+
id: 'eval',
|
|
44
|
+
kind: 'evaluator',
|
|
45
|
+
name: 'eval',
|
|
46
|
+
startedAt: 1500,
|
|
47
|
+
endedAt: 1600,
|
|
48
|
+
input: { z: 0 },
|
|
49
|
+
output: { z: 1 },
|
|
50
|
+
children: []
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
id: 's1',
|
|
54
|
+
kind: 'step',
|
|
55
|
+
name: 'step-1',
|
|
56
|
+
startedAt: 2000,
|
|
57
|
+
endedAt: 2800,
|
|
58
|
+
input: { x: 1 },
|
|
59
|
+
output: { done: true },
|
|
60
|
+
children: [
|
|
61
|
+
{
|
|
62
|
+
id: 'io1',
|
|
63
|
+
kind: 'IO',
|
|
64
|
+
name: 'test-1',
|
|
65
|
+
startedAt: 2300,
|
|
66
|
+
endedAt: 2700,
|
|
67
|
+
input: { y: 2 },
|
|
68
|
+
output: { y: 5 },
|
|
69
|
+
children: []
|
|
70
|
+
}
|
|
71
|
+
]
|
|
72
|
+
},
|
|
73
|
+
{
|
|
74
|
+
id: 's2',
|
|
75
|
+
kind: 'step',
|
|
76
|
+
name: 'step-2',
|
|
77
|
+
startedAt: 2400,
|
|
78
|
+
endedAt: 2900,
|
|
79
|
+
input: { x: 2 },
|
|
80
|
+
output: { done: true },
|
|
81
|
+
children: [
|
|
82
|
+
{
|
|
83
|
+
id: 'io2',
|
|
84
|
+
kind: 'IO',
|
|
85
|
+
name: 'test-2',
|
|
86
|
+
startedAt: 2500,
|
|
87
|
+
endedAt: 2600,
|
|
88
|
+
input: { y: 3 },
|
|
89
|
+
output: { y: 4 },
|
|
90
|
+
children: []
|
|
91
|
+
}
|
|
92
|
+
]
|
|
93
|
+
}
|
|
94
|
+
]
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
expect( result ).toMatchObject( expected );
|
|
98
|
+
} );
|
|
99
|
+
} );
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @typedef {object} SerializedError
|
|
3
|
+
* @property {string} name - The error constructor name
|
|
4
|
+
* @property {string} message - The error message
|
|
5
|
+
* @property {string} stack - The error stack trace
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Serialize an error object.
|
|
10
|
+
*
|
|
11
|
+
* If it has ".cause", recursive serialize its cause until finally found an error without it.
|
|
12
|
+
*
|
|
13
|
+
* @param {Error} error
|
|
14
|
+
* @returns {SerializedError}
|
|
15
|
+
*/
|
|
16
|
+
export const serializeError = error =>
|
|
17
|
+
error.cause ? serializeError( error.cause ) : {
|
|
18
|
+
name: error.constructor.name,
|
|
19
|
+
message: error.message,
|
|
20
|
+
stack: error.stack
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Returns true if string value is stringbool and true
|
|
25
|
+
* @param {string} v
|
|
26
|
+
* @returns
|
|
27
|
+
*/
|
|
28
|
+
export const isStringboolTrue = v => [ '1', 'true', 'on' ].includes( v );
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { serializeError } from './utils.js';
|
|
3
|
+
|
|
4
|
+
describe( 'tracing/utils', () => {
|
|
5
|
+
it( 'serializeError unwraps causes and keeps message/stack', () => {
|
|
6
|
+
const inner = new Error( 'inner' );
|
|
7
|
+
const outer = new Error( 'outer', { cause: inner } );
|
|
8
|
+
|
|
9
|
+
const out = serializeError( outer );
|
|
10
|
+
expect( out.name ).toBe( 'Error' );
|
|
11
|
+
expect( out.message ).toBe( 'inner' );
|
|
12
|
+
expect( typeof out.stack ).toBe( 'string' );
|
|
13
|
+
} );
|
|
14
|
+
} );
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { Storage } from '#async_storage';
|
|
2
|
+
import { EventEmitter } from 'node:events';
|
|
3
|
+
import { serializeError, isStringboolTrue } from './tools/utils.js';
|
|
4
|
+
import * as localProcessor from './processors/local/index.js';
|
|
5
|
+
import * as s3Processor from './processors/s3/index.js';
|
|
6
|
+
|
|
7
|
+
const traceBus = new EventEmitter();
|
|
8
|
+
const processors = [
|
|
9
|
+
{
|
|
10
|
+
isOn: isStringboolTrue( process.env.TRACE_LOCAL_ON ),
|
|
11
|
+
init: localProcessor.init,
|
|
12
|
+
exec: localProcessor.exec
|
|
13
|
+
},
|
|
14
|
+
{
|
|
15
|
+
isOn: isStringboolTrue( process.env.TRACE_REMOTE_ON ),
|
|
16
|
+
init: s3Processor.init,
|
|
17
|
+
exec: s3Processor.exec
|
|
18
|
+
}
|
|
19
|
+
];
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Starts processors based on env vars and attach them to the main bus to listen trace events
|
|
23
|
+
*/
|
|
24
|
+
export const init = async () => {
|
|
25
|
+
for ( const p of processors.filter( p => p.isOn ) ) {
|
|
26
|
+
await p.init();
|
|
27
|
+
traceBus.addListener( 'entry', p.exec );
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Serialize details of an event
|
|
33
|
+
*/
|
|
34
|
+
const serializeDetails = details => details instanceof Error ? serializeError( details ) : details;
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Creates a new trace event phase and sens to be written
|
|
38
|
+
*
|
|
39
|
+
* @param {string} phase - The phase
|
|
40
|
+
* @param {object} fields - All the trace fields
|
|
41
|
+
* @returns {void}
|
|
42
|
+
*/
|
|
43
|
+
export const addEventPhase = ( phase, { kind, name, id, parentId, details, executionContext } ) =>
|
|
44
|
+
traceBus.emit( 'entry', {
|
|
45
|
+
executionContext,
|
|
46
|
+
entry: { kind, phase, name, id, parentId, phase, timestamp: Date.now(), details: serializeDetails( details ) }
|
|
47
|
+
} );
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Adds an Event Phase, complementing the options with parentId and executionContext from the async storage.
|
|
51
|
+
*
|
|
52
|
+
* This function will have no effect if called from outside an Temporal Workflow/Activity environment,
|
|
53
|
+
* so it is safe to be used on unit tests or any dependencies that might be used elsewhere
|
|
54
|
+
*
|
|
55
|
+
* @param {object} options - The common trace configurations
|
|
56
|
+
*/
|
|
57
|
+
export function addEventPhaseWithContext( phase, options ) {
|
|
58
|
+
const storeContent = Storage.load();
|
|
59
|
+
if ( storeContent ) { // If there is no storageContext this was not called from an Temporal Environment
|
|
60
|
+
const { parentId, executionContext } = storeContent;
|
|
61
|
+
addEventPhase( phase, { ...options, parentId, executionContext } );
|
|
62
|
+
}
|
|
63
|
+
};
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
const storageLoadMock = vi.fn();
|
|
4
|
+
vi.mock( '#async_storage', () => ( {
|
|
5
|
+
Storage: { load: storageLoadMock }
|
|
6
|
+
} ) );
|
|
7
|
+
|
|
8
|
+
const localInitMock = vi.fn( async () => {} );
|
|
9
|
+
const localExecMock = vi.fn();
|
|
10
|
+
vi.mock( './processors/local/index.js', () => ( {
|
|
11
|
+
init: localInitMock,
|
|
12
|
+
exec: localExecMock
|
|
13
|
+
} ) );
|
|
14
|
+
|
|
15
|
+
const s3InitMock = vi.fn( async () => {} );
|
|
16
|
+
const s3ExecMock = vi.fn();
|
|
17
|
+
vi.mock( './processors/s3/index.js', () => ( {
|
|
18
|
+
init: s3InitMock,
|
|
19
|
+
exec: s3ExecMock
|
|
20
|
+
} ) );
|
|
21
|
+
|
|
22
|
+
async function loadTraceEngine() {
|
|
23
|
+
vi.resetModules();
|
|
24
|
+
return import( './trace_engine.js' );
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
describe( 'tracing/trace_engine', () => {
|
|
28
|
+
beforeEach( () => {
|
|
29
|
+
vi.clearAllMocks();
|
|
30
|
+
delete process.env.TRACE_LOCAL_ON;
|
|
31
|
+
delete process.env.TRACE_REMOTE_ON;
|
|
32
|
+
storageLoadMock.mockReset();
|
|
33
|
+
} );
|
|
34
|
+
|
|
35
|
+
it( 'init() starts only enabled processors and attaches listeners', async () => {
|
|
36
|
+
process.env.TRACE_LOCAL_ON = '1';
|
|
37
|
+
process.env.TRACE_REMOTE_ON = '0';
|
|
38
|
+
const { init, addEventPhase } = await loadTraceEngine();
|
|
39
|
+
|
|
40
|
+
await init();
|
|
41
|
+
|
|
42
|
+
expect( localInitMock ).toHaveBeenCalledTimes( 1 );
|
|
43
|
+
expect( s3InitMock ).not.toHaveBeenCalled();
|
|
44
|
+
|
|
45
|
+
addEventPhase( 'start', { kind: 'step', name: 'N', id: '1', parentId: 'p', details: { ok: true } } );
|
|
46
|
+
expect( localExecMock ).toHaveBeenCalledTimes( 1 );
|
|
47
|
+
const payload = localExecMock.mock.calls[0][0];
|
|
48
|
+
expect( payload.entry.name ).toBe( 'N' );
|
|
49
|
+
expect( payload.entry.kind ).toBe( 'step' );
|
|
50
|
+
expect( payload.entry.phase ).toBe( 'start' );
|
|
51
|
+
expect( payload.entry.details ).toEqual( { ok: true } );
|
|
52
|
+
} );
|
|
53
|
+
|
|
54
|
+
it( 'addEventPhase() emits an entry consumed by processors', async () => {
|
|
55
|
+
process.env.TRACE_LOCAL_ON = 'on';
|
|
56
|
+
const { init, addEventPhase } = await loadTraceEngine();
|
|
57
|
+
await init();
|
|
58
|
+
|
|
59
|
+
addEventPhase( 'end', { kind: 'workflow', name: 'W', id: '2', parentId: 'p2', details: 'done' } );
|
|
60
|
+
expect( localExecMock ).toHaveBeenCalledTimes( 1 );
|
|
61
|
+
const payload = localExecMock.mock.calls[0][0];
|
|
62
|
+
expect( payload.entry.name ).toBe( 'W' );
|
|
63
|
+
expect( payload.entry.phase ).toBe( 'end' );
|
|
64
|
+
expect( payload.entry.details ).toBe( 'done' );
|
|
65
|
+
} );
|
|
66
|
+
|
|
67
|
+
it( 'addEventPhaseWithContext() uses storage when available', async () => {
|
|
68
|
+
process.env.TRACE_LOCAL_ON = 'true';
|
|
69
|
+
storageLoadMock.mockReturnValue( { parentId: 'ctx-p', executionContext: { runId: 'r1' } } );
|
|
70
|
+
const { init, addEventPhaseWithContext } = await loadTraceEngine();
|
|
71
|
+
await init();
|
|
72
|
+
|
|
73
|
+
addEventPhaseWithContext( 'tick', { kind: 'step', name: 'S', id: '3', details: 1 } );
|
|
74
|
+
expect( localExecMock ).toHaveBeenCalledTimes( 1 );
|
|
75
|
+
const payload = localExecMock.mock.calls[0][0];
|
|
76
|
+
expect( payload.executionContext ).toEqual( { runId: 'r1' } );
|
|
77
|
+
expect( payload.entry.parentId ).toBe( 'ctx-p' );
|
|
78
|
+
expect( payload.entry.name ).toBe( 'S' );
|
|
79
|
+
expect( payload.entry.phase ).toBe( 'tick' );
|
|
80
|
+
} );
|
|
81
|
+
|
|
82
|
+
it( 'addEventPhaseWithContext() is a no-op when storage is absent', async () => {
|
|
83
|
+
process.env.TRACE_LOCAL_ON = '1';
|
|
84
|
+
storageLoadMock.mockReturnValue( undefined );
|
|
85
|
+
const { init, addEventPhaseWithContext } = await loadTraceEngine();
|
|
86
|
+
await init();
|
|
87
|
+
|
|
88
|
+
addEventPhaseWithContext( 'noop', { kind: 'step', name: 'X', id: '4', details: null } );
|
|
89
|
+
expect( localExecMock ).not.toHaveBeenCalled();
|
|
90
|
+
} );
|
|
91
|
+
} );
|