@output.ai/core 0.0.16 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,86 @@
1
+ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
2
+ import { mkdtempSync, readFileSync, rmSync } from 'node:fs';
3
+ import { tmpdir, EOL } from 'node:os';
4
+ import { join } from 'path';
5
+
6
+ const createTempDir = () => mkdtempSync( join( tmpdir(), 'output-sdk-trace-' ) );
7
+
8
+ // Async storage mock to drive parent ids
9
+
10
+ const mockStorageData = { parentId: undefined, traceId: 'trace-1', traceHelm: 'tests' };
11
+ vi.mock( '#async_storage', () => ( { Storage: { load: () => mockStorageData } } ) );
12
+
13
+ vi.mock( './tracer_tree.js', () => ( { buildLogTree: vi.fn() } ) );
14
+
15
+ describe( 'Tracing (public namespace)', () => {
16
+ beforeEach( () => {
17
+ vi.resetModules();
18
+ vi.clearAllMocks();
19
+ vi.useFakeTimers();
20
+ vi.setSystemTime( new Date( '2020-01-01T00:00:00.000Z' ) );
21
+ process.env.TRACING_ENABLED = '1';
22
+ } );
23
+
24
+ afterEach( () => {
25
+ vi.useRealTimers();
26
+ delete process.env.TRACING_ENABLED;
27
+ } );
28
+
29
+ it( 'addEventStart writes a start entry', async () => {
30
+ const originalArgv2 = process.argv[2];
31
+ const tmp = createTempDir();
32
+ process.argv[2] = tmp;
33
+
34
+ const { Tracing } = await import( './index.js' );
35
+ Tracing.addEventStart( { id: '1', kind: 'evaluator', name: 'start', details: { a: 1 } } );
36
+
37
+ const { buildLogTree } = await import( './tracer_tree.js' );
38
+ const logPath = buildLogTree.mock.calls[0][0];
39
+ const raw = readFileSync( logPath, 'utf-8' );
40
+ const firstLine = raw.split( EOL )[0];
41
+ const entry = JSON.parse( firstLine );
42
+ expect( entry ).toMatchObject( { phase: 'start', kind: 'evaluator', name: 'start', id: '1', details: { a: 1 } } );
43
+
44
+ rmSync( tmp, { recursive: true, force: true } );
45
+ process.argv[2] = originalArgv2;
46
+ } );
47
+
48
+ it( 'addEventEnd writes an end entry', async () => {
49
+ const originalArgv2 = process.argv[2];
50
+ const tmp = createTempDir();
51
+ process.argv[2] = tmp;
52
+
53
+ const { Tracing } = await import( './index.js' );
54
+ Tracing.addEventEnd( { id: '1', details: { ok: true } } );
55
+
56
+ const { buildLogTree } = await import( './tracer_tree.js' );
57
+ const logPath = buildLogTree.mock.calls[0][0];
58
+ const raw = readFileSync( logPath, 'utf-8' );
59
+ const firstLine = raw.split( EOL )[0];
60
+ const entry = JSON.parse( firstLine );
61
+ expect( entry ).toMatchObject( { phase: 'end', id: '1', details: { ok: true } } );
62
+
63
+ rmSync( tmp, { recursive: true, force: true } );
64
+ process.argv[2] = originalArgv2;
65
+ } );
66
+
67
+ it( 'addEventError writes an error entry', async () => {
68
+ const originalArgv2 = process.argv[2];
69
+ const tmp = createTempDir();
70
+ process.argv[2] = tmp;
71
+
72
+ const { Tracing } = await import( './index.js' );
73
+ const error = new Error( 'boom' );
74
+ Tracing.addEventError( { id: '1', details: error } );
75
+
76
+ const { buildLogTree } = await import( './tracer_tree.js' );
77
+ const logPath = buildLogTree.mock.calls[0][0];
78
+ const raw = readFileSync( logPath, 'utf-8' );
79
+ const firstLine = raw.split( EOL )[0];
80
+ const entry = JSON.parse( firstLine );
81
+ expect( entry.phase ).toBe( 'error' );
82
+
83
+ rmSync( tmp, { recursive: true, force: true } );
84
+ process.argv[2] = originalArgv2;
85
+ } );
86
+ } );
@@ -0,0 +1,83 @@
1
+ import { readFileSync, writeFileSync } from 'node:fs';
2
+ import { EOL } from 'os';
3
+
4
+ /**
5
+ * @typedef {object} NodeEntry
6
+ * @property {string} id
7
+ * @property {string} kind
8
+ * @property {string} name
9
+ * @property {number} startedAt
10
+ * @property {number} endedAt
11
+ * @property {object} [input]
12
+ * @property {object} [output]
13
+ * @property {object} [error]
14
+ * @property {NodeTree[]} children
15
+ */
16
+ /**
17
+ * Create a node entry for the tree.
18
+ *
19
+ * Properties are sorted the way they should be in the final file, as this makes it easier to read.
20
+ *
21
+ * @param {string} id - Node id
22
+ * @returns {NodeEntry} The entry without any values
23
+ */
24
+ const createEntry = id => ( {
25
+ id,
26
+ kind: '',
27
+ name: '',
28
+ startedAt: 0,
29
+ endedAt: 0,
30
+ input: undefined,
31
+ output: undefined,
32
+ error: undefined,
33
+ children: []
34
+ } );
35
+
36
+ /**
37
+ * Build a tree of nodes from the raw trace file.
38
+ *
39
+ * Each node will have: id, name, kind, children, input, output or error, startedAt, endedAt.
40
+ *
41
+ * Entries with same id will be combined according to their phase (start, end OR error).
42
+ * - The details of the start phase becomes input, timestamp becomes startedAt;
43
+ * - The details of the end phase become output, timestamp becomes endedAt;
44
+ * - The details of the error phase become error, timestamp becomes endedAt;
45
+ * - Only start phase's kind and name are used;
46
+ *
47
+ *
48
+ * Children are added according to the parentId of each entry.
49
+ * The result tree has a single root: the only node without parentId, normally the workflow itself.
50
+ *
51
+ * @param {string} src - Full path to the raw log file (NDJSON)
52
+ * @returns {void}
53
+ */
54
+ export const buildLogTree = src => {
55
+ const content = readFileSync( src, 'utf-8' );
56
+ const lines = content.split( EOL ).filter( l => l.trim().length > 0 );
57
+ const entries = lines.map( l => JSON.parse( l ) );
58
+
59
+ const nodes = new Map();
60
+ const ensureNode = id => nodes.get( id ) ?? nodes.set( id, createEntry( id ) ).get( id );
61
+
62
+ for ( const entry of entries ) {
63
+ const { kind, id, name, parentId, details, phase, timestamp } = entry;
64
+ const node = ensureNode( id );
65
+
66
+ if ( phase === 'start' ) {
67
+ Object.assign( node, { input: details, startedAt: timestamp, kind, name } );
68
+ } else if ( phase === 'end' ) {
69
+ Object.assign( node, { output: details, endedAt: timestamp } );
70
+ } else if ( phase === 'error' ) {
71
+ Object.assign( node, { error: details, endedAt: timestamp } );
72
+ }
73
+
74
+ if ( parentId && phase === 'start' ) {
75
+ const parent = ensureNode( parentId );
76
+ parent.children.push( node );
77
+ parent.children.sort( ( a, b ) => a.startedAt - b.startedAt );
78
+ }
79
+ }
80
+
81
+ const root = nodes.get( entries.find( e => !e.parentId ).id );
82
+ writeFileSync( src.replace( /\.raw$/, '.json' ), JSON.stringify( root, undefined, 2 ), 'utf-8' );
83
+ };
@@ -0,0 +1,115 @@
1
+ import { describe, it, expect } from 'vitest';
2
+ import { writeFileSync, readFileSync, rmSync } from 'node:fs';
3
+ import { mkdtempSync } from 'node:fs';
4
+ import { tmpdir } from 'node:os';
5
+ import { join } from 'path';
6
+ import { EOL } from 'os';
7
+ import { buildLogTree } from './tracer_tree.js';
8
+
9
+ const createTempDir = () => mkdtempSync( join( tmpdir(), 'output-sdk-trace-tree-' ) );
10
+
11
+ describe( 'tracer/tracer_tree', () => {
12
+ it( 'builds a tree from workflow/step/IO entries with grouping and sorting', () => {
13
+ const tmp = createTempDir();
14
+ const rawPath = join( tmp, 'run-123.raw' );
15
+
16
+ const entries = [
17
+ // workflow start
18
+ { kind: 'workflow', phase: 'start', name: 'wf', id: 'wf', parentId: undefined, details: { a: 1 }, timestamp: 1000 },
19
+ // evaluator start/stop
20
+ { kind: 'evaluator', phase: 'start', name: 'eval', id: 'eval', parentId: 'wf', details: { z: 0 }, timestamp: 1500 },
21
+ { kind: 'evaluator', phase: 'end', name: 'eval', id: 'eval', parentId: 'wf', details: { z: 1 }, timestamp: 1600 },
22
+ // step1 start
23
+ { kind: 'step', phase: 'start', name: 'step-1', id: 's1', parentId: 'wf', details: { x: 1 }, timestamp: 2000 },
24
+ // IO under step1
25
+ { kind: 'IO', phase: 'start', name: 'test-1', id: 'io1', parentId: 's1', details: { y: 2 }, timestamp: 2300 },
26
+ // step2 start
27
+ { kind: 'step', phase: 'start', name: 'step-2', id: 's2', parentId: 'wf', details: { x: 2 }, timestamp: 2400 },
28
+ // IO under step2
29
+ { kind: 'IO', phase: 'start', name: 'test-2', id: 'io2', parentId: 's2', details: { y: 3 }, timestamp: 2500 },
30
+ { kind: 'IO', phase: 'end', name: 'test-2', id: 'io2', parentId: 's2', details: { y: 4 }, timestamp: 2600 },
31
+ // IO under step1 ends
32
+ { kind: 'IO', phase: 'end', name: 'test-1', id: 'io1', parentId: 's1', details: { y: 5 }, timestamp: 2700 },
33
+ // step1 end
34
+ { kind: 'step', phase: 'end', name: 'step-1', id: 's1', parentId: 'wf', details: { done: true }, timestamp: 2800 },
35
+ // step2 end
36
+ { kind: 'step', phase: 'end', name: 'step-2', id: 's2', parentId: 'wf', details: { done: true }, timestamp: 2900 },
37
+ // workflow end
38
+ { kind: 'workflow', phase: 'end', name: 'wf', id: 'wf', parentId: undefined, details: { ok: true }, timestamp: 3000 }
39
+ ];
40
+
41
+ writeFileSync( rawPath, entries.map( e => JSON.stringify( e ) ).join( EOL ) + EOL, 'utf-8' );
42
+
43
+ buildLogTree( rawPath );
44
+
45
+ const jsonText = readFileSync( rawPath.replace( /.raw$/, '.json' ), 'utf-8' );
46
+
47
+ const expected = {
48
+ id: 'wf',
49
+ kind: 'workflow',
50
+ name: 'wf',
51
+ startedAt: 1000,
52
+ endedAt: 3000,
53
+ input: { a: 1 },
54
+ output: { ok: true },
55
+ children: [
56
+ {
57
+ id: 'eval',
58
+ kind: 'evaluator',
59
+ name: 'eval',
60
+ startedAt: 1500,
61
+ endedAt: 1600,
62
+ input: { z: 0 },
63
+ output: { z: 1 },
64
+ children: []
65
+ },
66
+ {
67
+ id: 's1',
68
+ kind: 'step',
69
+ name: 'step-1',
70
+ startedAt: 2000,
71
+ endedAt: 2800,
72
+ input: { x: 1 },
73
+ output: { done: true },
74
+ children: [
75
+ {
76
+ id: 'io1',
77
+ kind: 'IO',
78
+ name: 'test-1',
79
+ startedAt: 2300,
80
+ endedAt: 2700,
81
+ input: { y: 2 },
82
+ output: { y: 5 },
83
+ children: []
84
+ }
85
+ ]
86
+ },
87
+ {
88
+ id: 's2',
89
+ kind: 'step',
90
+ name: 'step-2',
91
+ startedAt: 2400,
92
+ endedAt: 2900,
93
+ input: { x: 2 },
94
+ output: { done: true },
95
+ children: [
96
+ {
97
+ id: 'io2',
98
+ kind: 'IO',
99
+ name: 'test-2',
100
+ startedAt: 2500,
101
+ endedAt: 2600,
102
+ input: { y: 3 },
103
+ output: { y: 4 },
104
+ children: []
105
+ }
106
+ ]
107
+ }
108
+ ]
109
+ };
110
+
111
+ expect( jsonText ).toBe( JSON.stringify( expected, undefined, 2 ) );
112
+
113
+ rmSync( tmp, { recursive: true, force: true } );
114
+ } );
115
+ } );
@@ -0,0 +1,21 @@
1
+ /**
2
+ * @typedef {object} SerializedError
3
+ * @property {string} name - The error constructor name
4
+ * @property {string} message - The error message
5
+ * @property {string} stack - The error stack trace
6
+ */
7
+
8
+ /**
9
+ * Serialize an error object.
10
+ *
11
+ * If it has ".cause", recursive serialize its cause until finally found an error without it.
12
+ *
13
+ * @param {Error} error
14
+ * @returns {SerializedError}
15
+ */
16
+ export const serializeError = error =>
17
+ error.cause ? serializeError( error.cause ) : {
18
+ name: error.constructor.name,
19
+ message: error.message,
20
+ stack: error.stack
21
+ };
@@ -0,0 +1,14 @@
1
+ import { describe, it, expect } from 'vitest';
2
+ import { serializeError } from './utils.js';
3
+
4
+ describe( 'tracing/utils', () => {
5
+ it( 'serializeError unwraps causes and keeps message/stack', () => {
6
+ const inner = new Error( 'inner' );
7
+ const outer = new Error( 'outer', { cause: inner } );
8
+
9
+ const out = serializeError( outer );
10
+ expect( out.name ).toBe( 'Error' );
11
+ expect( out.message ).toBe( 'inner' );
12
+ expect( typeof out.stack ).toBe( 'string' );
13
+ } );
14
+ } );
@@ -6,15 +6,11 @@ import { fileURLToPath } from 'node:url';
6
6
  import { worker as workerConfig } from '#configs';
7
7
  import { loadActivities, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
8
8
  import { ActivityExecutionInterceptor } from './interceptors/activity.js';
9
- import { setupGlobalTracer } from './tracer/index.js';
10
9
  import { sinks } from './sinks.js';
11
10
  import { createCatalog } from './catalog_workflow/index.js';
12
11
 
13
12
  const __dirname = dirname( fileURLToPath( import.meta.url ) );
14
13
 
15
- // expose the coreTracker so other parts of the SDK can use it
16
- setupGlobalTracer();
17
-
18
14
  const { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue, catalogId } = workerConfig;
19
15
 
20
16
  // Get caller directory from command line arguments
@@ -1,8 +1,7 @@
1
1
  import { Context } from '@temporalio/activity';
2
- import { Storage } from '../async_storage.js';
3
- import { trace } from '../tracer/index.js';
2
+ import { Storage } from '#async_storage';
3
+ import { addEventStart, addEventEnd, addEventError } from '#tracing';
4
4
  import { headersToObject } from '../sandboxed_utils.js';
5
- import { THIS_LIB_NAME } from '#consts';
6
5
  import { METADATA_ACCESS_SYMBOL } from '#consts';
7
6
 
8
7
  /*
@@ -20,22 +19,28 @@ export class ActivityExecutionInterceptor {
20
19
  };
21
20
 
22
21
  async execute( input, next ) {
23
- const { workflowExecution: { workflowId }, activityId, activityType, workflowType } = Context.current().info;
22
+ const { workflowExecution: { workflowId }, activityId, activityType } = Context.current().info;
23
+ const { traceId, traceHelm } = headersToObject( input.headers );
24
+ const { type: kind, skipTrace } = this.activities?.[activityType]?.[METADATA_ACCESS_SYMBOL];
24
25
 
25
- const activityFn = this.activities?.[activityType];
26
- const { type: componentType, skipTrace } = activityFn?.[METADATA_ACCESS_SYMBOL];
26
+ const traceContext = { kind, id: activityId, parentId: workflowId, name: activityType, traceId, traceHelm };
27
27
 
28
- const context = { workflowId, workflowType, activityId, activityType, ...headersToObject( input.headers ) };
28
+ if ( !skipTrace ) {
29
+ addEventStart( { details: input.args[0], ...traceContext } );
30
+ }
29
31
 
30
- return Storage.runWithContext( async _ => {
32
+ // creates a context for the nested tracing
33
+ try {
34
+ const output = await Storage.runWithContext( async _ => next( input ), { parentId: activityId, traceId, traceHelm } );
31
35
  if ( !skipTrace ) {
32
- trace( { lib: THIS_LIB_NAME, event: `${componentType}_start`, input: input.args } );
36
+ addEventEnd( { details: output, ...traceContext } );
33
37
  }
34
- const output = await next( input );
38
+ return output;
39
+ } catch ( error ) {
35
40
  if ( !skipTrace ) {
36
- trace( { lib: THIS_LIB_NAME, event: `${componentType}_end`, output } );
41
+ addEventError( { details: error, ...traceContext } );
37
42
  }
38
- return output;
39
- }, context );
43
+ throw error;
44
+ }
40
45
  }
41
46
  };
@@ -1,5 +1,5 @@
1
1
  // THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
2
- import { workflowInfo } from '@temporalio/workflow';
2
+ import { workflowInfo, proxySinks, ApplicationFailure } from '@temporalio/workflow';
3
3
  import { memoToHeaders } from '../sandboxed_utils.js';
4
4
 
5
5
  /*
@@ -18,6 +18,24 @@ class HeadersInjectionInterceptor {
18
18
  }
19
19
  };
20
20
 
21
+ const sinks = proxySinks();
22
+
23
+ class WorkflowExecutionInterceptor {
24
+ async execute( input, next ) {
25
+ sinks.trace.addWorkflowEventStart( input.args[0] );
26
+ try {
27
+ const output = await next( input );
28
+ // if the workflow is root, the result wraps "output" and "trace" onto an object, this will hide the trace
29
+ sinks.trace.addWorkflowEventEnd( !workflowInfo().memo.parentId ? output.output : output );
30
+ return output;
31
+ } catch ( error ) {
32
+ sinks.trace.addWorkflowEventError( error );
33
+ throw new ApplicationFailure( error.message, error.constructor.name );
34
+ }
35
+ }
36
+ };
37
+
21
38
  export const interceptors = () => ( {
39
+ inbound: [ new WorkflowExecutionInterceptor() ],
22
40
  outbound: [ new HeadersInjectionInterceptor( workflowInfo().workflowType ) ]
23
41
  } );
@@ -1,7 +1,7 @@
1
1
  import { dirname, join } from 'path';
2
2
  import { fileURLToPath } from 'url';
3
- import { sendWebhookPost, readTraceFile } from '#internal_activities';
4
- import { SEND_WEBHOOK_ACTIVITY_NAME, WORKFLOWS_INDEX_FILENAME, READ_TRACE_FILE } from '#consts';
3
+ import { sendWebhook, readTraceFile } from '#internal_activities';
4
+ import { ACTIVITY_SEND_WEBHOOK, ACTIVITY_READ_TRACE_FILE, WORKFLOWS_INDEX_FILENAME } from '#consts';
5
5
  import {
6
6
  iteratorOverImportedComponents,
7
7
  recursiveNavigateWhileCollecting,
@@ -20,8 +20,8 @@ export async function loadActivities( path ) {
20
20
  }
21
21
 
22
22
  // system activities
23
- activities[SEND_WEBHOOK_ACTIVITY_NAME] = sendWebhookPost;
24
- activities[READ_TRACE_FILE] = readTraceFile;
23
+ activities[ACTIVITY_SEND_WEBHOOK] = sendWebhook;
24
+ activities[ACTIVITY_READ_TRACE_FILE] = readTraceFile;
25
25
  return activities;
26
26
  };
27
27
 
@@ -1,18 +1,15 @@
1
1
  import { describe, it, expect, vi, beforeEach } from 'vitest';
2
2
 
3
- const METADATA_ACCESS_SYMBOL = Symbol( '__metadata' );
4
-
5
3
  vi.mock( '#consts', () => ( {
6
- SEND_WEBHOOK_ACTIVITY_NAME: '__internal#sendWebhookPost',
7
- READ_TRACE_FILE: '__internal#readTraceFile',
8
- WORKFLOWS_INDEX_FILENAME: '__workflows_entrypoint.js',
9
- METADATA_ACCESS_SYMBOL
4
+ ACTIVITY_SEND_WEBHOOK: '__internal#sendWebhook',
5
+ ACTIVITY_READ_TRACE_FILE: '__internal#readTraceFile',
6
+ WORKFLOWS_INDEX_FILENAME: '__workflows_entrypoint.js'
10
7
  } ) );
11
8
 
12
- const sendWebhookPostMock = vi.fn();
9
+ const sendWebhookMock = vi.fn();
13
10
  const readTraceFileMock = vi.fn();
14
11
  vi.mock( '#internal_activities', () => ( {
15
- sendWebhookPost: sendWebhookPostMock,
12
+ sendWebhook: sendWebhookMock,
16
13
  readTraceFile: readTraceFileMock
17
14
  } ) );
18
15
 
@@ -41,7 +38,7 @@ describe( 'worker/loader', () => {
41
38
 
42
39
  const activities = await loadActivities( '/root' );
43
40
  expect( activities['/a#Act1'] ).toBeTypeOf( 'function' );
44
- expect( activities['__internal#sendWebhookPost'] ).toBe( sendWebhookPostMock );
41
+ expect( activities['__internal#sendWebhook'] ).toBe( sendWebhookMock );
45
42
  expect( activities['__internal#readTraceFile'] ).toBe( readTraceFileMock );
46
43
  } );
47
44
 
@@ -1,16 +1,62 @@
1
- import { Storage } from './async_storage.js';
2
- import { trace } from './tracer/index.js';
3
- import { THIS_LIB_NAME } from '#consts';
1
+ import { addEventStart, addEventEnd, addEventError } from '#tracing';
4
2
 
3
+ /**
4
+ * Start a workflow trace event
5
+ *
6
+ * @param {function} method - Trace function to call
7
+ * @param {object} workflowInfo - Temporal workflowInfo object
8
+ * @param {object} details - Teh details to attach to the event
9
+ */
10
+ const addWorkflowEvent = ( method, workflowInfo, details ) => {
11
+ const { workflowId: id, workflowType: name, memo: { parentId, traceId, traceHelm } } = workflowInfo;
12
+ method( { id, kind: 'workflow', name, details, parentId, traceId, traceHelm } );
13
+ };
14
+
15
+ /**
16
+ * Start a trace event with given configuration
17
+ *
18
+ * @param {function} method - Trace function to call
19
+ * @param {object} workflowInfo - Temporal workflowInfo object
20
+ * @param {object} options - Trace options, like id, kind, name and details
21
+ */
22
+ const addEvent = ( method, workflowInfo, options ) => {
23
+ const { id, name, kind, details } = options;
24
+ const { workflowId, memo: { traceId, traceHelm } } = workflowInfo;
25
+ method( { id, kind, name, details, parentId: workflowId, traceId, traceHelm } );
26
+ };
27
+
28
+ // This sink allow for sandbox Temporal environment to send trace logs back to the main thread.
5
29
  export const sinks = {
6
- // This sink allow for sandbox Temporal environment to send trace logs back to the main thread.
7
- log: {
8
- trace: {
9
- fn( workflowInfo, args ) {
10
- const { workflowId, workflowType, memo } = workflowInfo;
11
- Storage.runWithContext( _ => trace( { lib: THIS_LIB_NAME, ...args } ), { workflowId, workflowType, ...memo } );
12
- },
30
+ trace: {
31
+ addWorkflowEventStart: {
32
+ fn: ( ...args ) => addWorkflowEvent( addEventStart, ...args ),
33
+ callDuringReplay: false
34
+ },
35
+
36
+ addWorkflowEventEnd: {
37
+ fn: ( ...args ) => addWorkflowEvent( addEventEnd, ...args ),
38
+ callDuringReplay: false
39
+ },
40
+
41
+ addWorkflowEventError: {
42
+ fn: ( ...args ) => addWorkflowEvent( addEventError, ...args ),
13
43
  callDuringReplay: false
44
+ },
45
+
46
+ addEventStart: {
47
+ fn: ( ...args ) => addEvent( addEventStart, ...args ),
48
+ callDuringReplay: false
49
+ },
50
+
51
+ addEventEnd: {
52
+ fn: ( ...args ) => addEvent( addEventEnd, ...args ),
53
+ callDuringReplay: false
54
+ },
55
+
56
+ addEventError: {
57
+ fn: ( ...args ) => addEvent( addEventError, ...args ),
58
+ callDuringReplay: false
59
+
14
60
  }
15
61
  }
16
62
  };
@@ -1,75 +0,0 @@
1
- import { Storage } from '../async_storage.js';
2
- import { mkdirSync, existsSync, readdirSync, appendFileSync } from 'node:fs';
3
- import { join } from 'path';
4
- import { EOL } from 'os';
5
- import { buildLogTree } from './tracer_tree.js';
6
- import { tracing as tracingConfig } from '#configs';
7
-
8
- const callerDir = process.argv[2];
9
-
10
- /**
11
- * Appends new information to a file
12
- *
13
- * Information has to be a JSON
14
- *
15
- * File is encoded in utf-8
16
- *
17
- * @param {string} path - The full filename
18
- * @param {object} json - The content
19
- */
20
- const flushEntry = ( path, json ) => appendFileSync( path, JSON.stringify( json ) + EOL, 'utf-8' );
21
-
22
- /**
23
- * Add an event to the execution trace file.
24
- *
25
- * Events normally are the result of an operation, either a function call or an IO.
26
- *
27
- * @param {object} options
28
- * @param {string} options.lib - The macro part of the platform that triggered the event
29
- * @param {string} options.event - The name of the event
30
- * @param {any} [options.input] - The input of the operation
31
- * @param {any} [options.output] - The output of the operation
32
- */
33
- export function trace( { lib, event, input = undefined, output = undefined } ) {
34
- const now = Date.now();
35
-
36
- if ( !tracingConfig.enabled ) {
37
- return;
38
- }
39
-
40
- const {
41
- activityId: stepId,
42
- activityType: stepName,
43
- workflowId,
44
- workflowType,
45
- workflowPath,
46
- parentWorkflowId,
47
- rootWorkflowId,
48
- rootWorkflowType
49
- } = Storage.load();
50
-
51
- const entry = { event, input, lib, output, parentWorkflowId, stepId, stepName, timestamp: now, workflowId, workflowPath, workflowType };
52
-
53
- // test for rootWorkflow to append to the same file as the parent/grandparent
54
- const outputDir = join( callerDir, 'logs', 'runs', rootWorkflowType ?? workflowType );
55
- if ( !existsSync( outputDir ) ) {
56
- mkdirSync( outputDir, { recursive: true } );
57
- }
58
-
59
- const suffix = `-${rootWorkflowId ?? workflowId}.raw`;
60
- const logFile = readdirSync( outputDir ).find( f => f.endsWith( suffix ) ) ?? `${new Date( now ).toISOString()}-${suffix}`;
61
- const logPath = join( outputDir, logFile );
62
-
63
- flushEntry( logPath, entry );
64
- buildLogTree( logPath );
65
- };
66
-
67
- /**
68
- * Setup the global tracer function, so it is available to be used by other libraries
69
- *
70
- * It will be situated in the global object, under Symbol.for('__trace')
71
- *
72
- * @returns {object} The assigned globalThis
73
- */
74
- export const setupGlobalTracer = () =>
75
- Object.defineProperty( globalThis, Symbol.for( '__trace' ), { value: trace, writable: false, enumerable: false, configurable: false } );