@output.ai/core 0.0.8 → 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +96 -59
- package/package.json +10 -7
- package/src/configs.js +28 -6
- package/src/consts.js +1 -0
- package/src/errors.js +11 -0
- package/src/interface/step.js +16 -2
- package/src/interface/utils.js +41 -4
- package/src/interface/utils.spec.js +71 -0
- package/src/interface/validations/ajv_provider.js +3 -0
- package/src/interface/validations/runtime.js +69 -0
- package/src/interface/validations/runtime.spec.js +50 -0
- package/src/interface/validations/static.js +67 -0
- package/src/interface/validations/static.spec.js +101 -0
- package/src/interface/webhook.js +2 -0
- package/src/interface/workflow.js +42 -17
- package/src/worker/index.js +8 -4
- package/src/worker/interceptors/activity.js +3 -2
- package/src/worker/internal_utils.js +9 -3
- package/src/worker/sinks.js +2 -1
- package/src/worker/tracer/index.js +35 -3
- package/src/worker/tracer/index.test.js +103 -0
- package/src/worker/tracer/tracer_tree.js +29 -5
- package/src/worker/tracer/tracer_tree.test.js +116 -0
- package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.js +24 -8
- package/src/worker/webpack_loaders/workflow_rewriter/index.mjs +3 -1
- package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.js +9 -3
- package/src/worker/webpack_loaders/workflow_rewriter/tools.js +30 -10
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { validateStep, validateWorkflow, validateCreateWebhook, StaticValidationError } from './static.js';
|
|
3
|
+
|
|
4
|
+
const validArgs = Object.freeze( {
|
|
5
|
+
name: 'valid_name',
|
|
6
|
+
description: 'desc',
|
|
7
|
+
inputSchema: { type: 'object' },
|
|
8
|
+
outputSchema: { type: 'object' },
|
|
9
|
+
fn: () => {}
|
|
10
|
+
} );
|
|
11
|
+
|
|
12
|
+
describe( 'interface/validator', () => {
|
|
13
|
+
describe( 'validateStep', () => {
|
|
14
|
+
it( 'passes for valid args', () => {
|
|
15
|
+
expect( () => validateStep( { ...validArgs } ) ).not.toThrow();
|
|
16
|
+
} );
|
|
17
|
+
|
|
18
|
+
it( 'rejects missing name', () => {
|
|
19
|
+
const error = new StaticValidationError( '✖ Invalid input: expected string, received undefined\n → at name' );
|
|
20
|
+
expect( () => validateStep( { ...validArgs, name: undefined } ) ).toThrow( error );
|
|
21
|
+
} );
|
|
22
|
+
|
|
23
|
+
it( 'rejects non-string name', () => {
|
|
24
|
+
const error = new StaticValidationError( '✖ Invalid input: expected string, received number\n → at name' );
|
|
25
|
+
expect( () => validateStep( { ...validArgs, name: 123 } ) ).toThrow( error );
|
|
26
|
+
} );
|
|
27
|
+
|
|
28
|
+
it( 'rejects invalid name pattern', () => {
|
|
29
|
+
const error = new StaticValidationError( '✖ Invalid string: must match pattern /^[a-z_][a-z0-9_]*$/i\n → at name' );
|
|
30
|
+
expect( () => validateStep( { ...validArgs, name: '-bad' } ) ).toThrow( error );
|
|
31
|
+
} );
|
|
32
|
+
|
|
33
|
+
it( 'rejects non-string description', () => {
|
|
34
|
+
const error = new StaticValidationError( '✖ Invalid input: expected string, received number\n → at description' );
|
|
35
|
+
expect( () => validateStep( { ...validArgs, description: 10 } ) ).toThrow( error );
|
|
36
|
+
} );
|
|
37
|
+
|
|
38
|
+
it( 'rejects non-object inputSchema', () => {
|
|
39
|
+
const error = new StaticValidationError( '✖ Invalid input: expected object, received string\n → at inputSchema' );
|
|
40
|
+
expect( () => validateStep( { ...validArgs, inputSchema: 'not-an-object' } ) ).toThrow( error );
|
|
41
|
+
} );
|
|
42
|
+
|
|
43
|
+
it( 'rejects invalid inputSchema structure', () => {
|
|
44
|
+
const error = new StaticValidationError( '✖ data/type must be equal to one of the allowed values, \
|
|
45
|
+
data/type must be array, data/type must match a schema in anyOf\n → at inputSchema' );
|
|
46
|
+
expect( () => validateStep( { ...validArgs, inputSchema: { type: 1 } } ) ).toThrow( error );
|
|
47
|
+
} );
|
|
48
|
+
|
|
49
|
+
it( 'rejects non-object outputSchema', () => {
|
|
50
|
+
const error = new StaticValidationError( '✖ Invalid input: expected object, received number\n → at outputSchema' );
|
|
51
|
+
expect( () => validateStep( { ...validArgs, outputSchema: 10 } ) ).toThrow( error );
|
|
52
|
+
} );
|
|
53
|
+
|
|
54
|
+
it( 'rejects invalid outputSchema structure', () => {
|
|
55
|
+
const error = new StaticValidationError( '✖ data/type must be equal to one of the allowed values, \
|
|
56
|
+
data/type must be array, data/type must match a schema in anyOf\n → at outputSchema' );
|
|
57
|
+
expect( () => validateStep( { ...validArgs, outputSchema: { type: 1 } } ) ).toThrow( error );
|
|
58
|
+
} );
|
|
59
|
+
|
|
60
|
+
it( 'rejects missing fn', () => {
|
|
61
|
+
const error = new StaticValidationError( '✖ Invalid input: expected function, received undefined\n → at fn' );
|
|
62
|
+
expect( () => validateStep( { ...validArgs, fn: undefined } ) ).toThrow( error );
|
|
63
|
+
} );
|
|
64
|
+
|
|
65
|
+
it( 'rejects non-function fn', () => {
|
|
66
|
+
const error = new StaticValidationError( '✖ Invalid input: expected function, received string\n → at fn' );
|
|
67
|
+
expect( () => validateStep( { ...validArgs, fn: 'not-fn' } ) ).toThrow( error );
|
|
68
|
+
} );
|
|
69
|
+
} );
|
|
70
|
+
|
|
71
|
+
describe( 'validateWorkflow', () => {
|
|
72
|
+
it( 'passes for valid args', () => {
|
|
73
|
+
expect( () => validateWorkflow( { ...validArgs } ) ).not.toThrow();
|
|
74
|
+
} );
|
|
75
|
+
} );
|
|
76
|
+
|
|
77
|
+
describe( 'validate webhook', () => {
|
|
78
|
+
it( 'passes with valid http url', () => {
|
|
79
|
+
expect( () => validateCreateWebhook( { url: 'http://example.com' } ) ).not.toThrow();
|
|
80
|
+
} );
|
|
81
|
+
|
|
82
|
+
it( 'passes with valid https url', () => {
|
|
83
|
+
expect( () => validateCreateWebhook( { url: 'https://example.com/path?q=1' } ) ).not.toThrow();
|
|
84
|
+
} );
|
|
85
|
+
|
|
86
|
+
it( 'rejects missing url', () => {
|
|
87
|
+
const error = new StaticValidationError( '✖ Invalid input: expected string, received undefined\n → at url' );
|
|
88
|
+
expect( () => validateCreateWebhook( { } ) ).toThrow( error );
|
|
89
|
+
} );
|
|
90
|
+
|
|
91
|
+
it( 'rejects invalid scheme', () => {
|
|
92
|
+
const error = new StaticValidationError( '✖ Invalid URL\n → at url' );
|
|
93
|
+
expect( () => validateCreateWebhook( { url: 'ftp://example.com' } ) ).toThrow( error );
|
|
94
|
+
} );
|
|
95
|
+
|
|
96
|
+
it( 'rejects malformed url', () => {
|
|
97
|
+
const error = new StaticValidationError( '✖ Invalid URL\n → at url' );
|
|
98
|
+
expect( () => validateCreateWebhook( { url: 'http:////' } ) ).toThrow( error );
|
|
99
|
+
} );
|
|
100
|
+
} );
|
|
101
|
+
} );
|
package/src/interface/webhook.js
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
// THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
|
|
2
2
|
import { defineSignal, setHandler, proxyActivities, workflowInfo } from '@temporalio/workflow';
|
|
3
3
|
import { SEND_WEBHOOK_ACTIVITY_NAME } from '#consts';
|
|
4
|
+
import { validateCreateWebhook } from './validations/static.js';
|
|
4
5
|
|
|
5
6
|
export async function createWebhook( { url, payload } ) {
|
|
7
|
+
validateCreateWebhook( { url, payload } );
|
|
6
8
|
const workflowId = workflowInfo();
|
|
7
9
|
|
|
8
10
|
await proxyActivities( temporalActivityConfigs )[SEND_WEBHOOK_ACTIVITY_NAME]( { url, workflowId, payload } );
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
// THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
|
|
2
|
-
import { proxyActivities, inWorkflowContext, executeChild, workflowInfo } from '@temporalio/workflow';
|
|
3
|
-
import { getInvocationDir } from './utils.js';
|
|
2
|
+
import { proxyActivities, inWorkflowContext, executeChild, workflowInfo, ApplicationFailure } from '@temporalio/workflow';
|
|
3
|
+
import { getInvocationDir, invokeFnAndValidateOutputPreservingExecutionModel } from './utils.js';
|
|
4
4
|
import { setMetadata } from './metadata.js';
|
|
5
5
|
import { FatalError, ValidationError } from '../errors.js';
|
|
6
|
+
import { validateWorkflow } from './validations/static.js';
|
|
7
|
+
import { validateWorkflowInput, validateWorkflowOutput } from './validations/runtime.js';
|
|
6
8
|
|
|
7
9
|
const temporalActivityConfigs = {
|
|
8
10
|
startToCloseTimeout: '20 minute',
|
|
@@ -16,32 +18,55 @@ const temporalActivityConfigs = {
|
|
|
16
18
|
};
|
|
17
19
|
|
|
18
20
|
export function workflow( { name, description, inputSchema, outputSchema, fn } ) {
|
|
21
|
+
validateWorkflow( { name, description, inputSchema, outputSchema, fn } );
|
|
19
22
|
const workflowPath = getInvocationDir();
|
|
20
23
|
|
|
21
24
|
const steps = proxyActivities( temporalActivityConfigs );
|
|
22
25
|
|
|
23
26
|
const wrapper = async input => {
|
|
24
|
-
|
|
25
|
-
|
|
27
|
+
try {
|
|
28
|
+
if ( inputSchema ) {
|
|
29
|
+
validateWorkflowInput( name, inputSchema, input );
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// this returns a plain function, for example, in unit tests
|
|
33
|
+
if ( !inWorkflowContext() ) {
|
|
34
|
+
if ( outputSchema ) {
|
|
35
|
+
return invokeFnAndValidateOutputPreservingExecutionModel( fn, input, validateWorkflowOutput.bind( null, name, outputSchema ) );
|
|
36
|
+
}
|
|
37
|
+
return fn( input );
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
Object.assign( workflowInfo().memo, { workflowPath } );
|
|
26
41
|
|
|
27
|
-
|
|
42
|
+
// binds the methods called in the code that Webpack loader will add, they will exposed via "this"
|
|
43
|
+
const boundFn = fn.bind( {
|
|
44
|
+
invokeStep: async ( stepName, input ) => steps[`${workflowPath}#${stepName}`]( input ),
|
|
28
45
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
invokeStep: async ( stepName, input ) => steps[`${workflowPath}#${stepName}`]( input ),
|
|
46
|
+
startWorkflow: async ( name, input ) => {
|
|
47
|
+
const { memo, workflowId, workflowType } = workflowInfo();
|
|
32
48
|
|
|
33
|
-
|
|
34
|
-
|
|
49
|
+
// Checks if current memo has rootWorkflowId, which means current execution is already a child
|
|
50
|
+
// Then it sets the memory for the child execution passing along who's the original workflow is and its type
|
|
51
|
+
const workflowMemory = memo.rootWorkflowId ?
|
|
52
|
+
{ parentWorkflowId: workflowId, rootWorkflowType: memo.rootWorkflowType, rootWorkflowId: memo.rootWorkflowId } :
|
|
53
|
+
{ parentWorkflowId: workflowId, rootWorkflowId: workflowId, rootWorkflowType: workflowType };
|
|
35
54
|
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
{ parentWorkflowId: workflowId, rootWorkflowType: memo.rootWorkflowType, rootWorkflowId: memo.rootWorkflowId } :
|
|
40
|
-
{ parentWorkflowId: workflowId, rootWorkflowId: workflowId, rootWorkflowType: workflowType };
|
|
55
|
+
return executeChild( name, { args: input ? [ input ] : [], memo: workflowMemory } );
|
|
56
|
+
}
|
|
57
|
+
} );
|
|
41
58
|
|
|
42
|
-
|
|
59
|
+
if ( outputSchema ) {
|
|
60
|
+
return invokeFnAndValidateOutputPreservingExecutionModel( boundFn, input, validateWorkflowOutput.bind( null, name, outputSchema ) );
|
|
43
61
|
}
|
|
44
|
-
|
|
62
|
+
return boundFn( input );
|
|
63
|
+
} catch ( error ) {
|
|
64
|
+
/*
|
|
65
|
+
* Any errors in the workflow will interrupt its execution since the workflow is designed to orchestrate and
|
|
66
|
+
* IOs should be made in steps
|
|
67
|
+
*/
|
|
68
|
+
throw new ApplicationFailure( error.message, error.constructor.name );
|
|
69
|
+
}
|
|
45
70
|
};
|
|
46
71
|
|
|
47
72
|
setMetadata( wrapper, { name, description, inputSchema, outputSchema } );
|
package/src/worker/index.js
CHANGED
|
@@ -15,7 +15,7 @@ const __dirname = dirname( fileURLToPath( import.meta.url ) );
|
|
|
15
15
|
// expose the coreTracker so other parts of the SDK can use it
|
|
16
16
|
setupGlobalTracer();
|
|
17
17
|
|
|
18
|
-
const { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue } = workerConfig;
|
|
18
|
+
const { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue, catalogId } = workerConfig;
|
|
19
19
|
|
|
20
20
|
// Get caller directory from command line arguments
|
|
21
21
|
const callerDir = process.argv[2];
|
|
@@ -53,8 +53,12 @@ const callerDir = process.argv[2];
|
|
|
53
53
|
maxConcurrentActivityTaskExecutions: maxActivities,
|
|
54
54
|
bundlerOptions: {
|
|
55
55
|
webpackConfigHook: config => {
|
|
56
|
-
if ( !config.module ) {
|
|
57
|
-
|
|
56
|
+
if ( !config.module ) {
|
|
57
|
+
config.module = { };
|
|
58
|
+
}
|
|
59
|
+
if ( !config.module.rules ) {
|
|
60
|
+
config.module.rules = [];
|
|
61
|
+
}
|
|
58
62
|
// Use AST-based loader for rewriting steps/workflows
|
|
59
63
|
config.module.rules.push( {
|
|
60
64
|
test: /\.js$/,
|
|
@@ -71,7 +75,7 @@ const callerDir = process.argv[2];
|
|
|
71
75
|
console.log( '[Core]', 'Starting catalog workflow...' );
|
|
72
76
|
await new Client( { connection, namespace } ).workflow.start( 'catalog', {
|
|
73
77
|
taskQueue,
|
|
74
|
-
workflowId:
|
|
78
|
+
workflowId: catalogId, // use the name of the task queue as the catalog name, ensuring uniqueness
|
|
75
79
|
workflowIdConflictPolicy: WorkflowIdConflictPolicy.TERMINATE_EXISTING,
|
|
76
80
|
args: [ catalog ]
|
|
77
81
|
} );
|
|
@@ -3,6 +3,7 @@ import { Storage } from '../async_storage.js';
|
|
|
3
3
|
import { trace } from '../tracer/index.js';
|
|
4
4
|
import { TraceEvent } from '../tracer/types.js';
|
|
5
5
|
import { headersToObject } from '../sandboxed_utils.js';
|
|
6
|
+
import { THIS_LIB_NAME } from '#consts';
|
|
6
7
|
|
|
7
8
|
/*
|
|
8
9
|
This interceptor is called for every activity execution
|
|
@@ -19,9 +20,9 @@ export class ActivityExecutionInterceptor {
|
|
|
19
20
|
const context = { workflowId, workflowType, activityId, activityType, ...headersToObject( input.headers ) };
|
|
20
21
|
|
|
21
22
|
return Storage.runWithContext( async _ => {
|
|
22
|
-
trace( { lib:
|
|
23
|
+
trace( { lib: THIS_LIB_NAME, event: TraceEvent.STEP_START, input: input.args } );
|
|
23
24
|
const output = await next( input );
|
|
24
|
-
trace( { lib:
|
|
25
|
+
trace( { lib: THIS_LIB_NAME, event: TraceEvent.STEP_END, output } );
|
|
25
26
|
return output;
|
|
26
27
|
}, context );
|
|
27
28
|
}
|
|
@@ -16,7 +16,9 @@ import { writeFileSync, existsSync, readdirSync, mkdirSync } from 'fs';
|
|
|
16
16
|
* */
|
|
17
17
|
export function recursiveNavigateWhileCollecting( path, filenames, collection = [], ignoreDirNames = [ 'vendor', 'node_modules' ] ) {
|
|
18
18
|
for ( const entry of readdirSync( path, { withFileTypes: true } ) ) {
|
|
19
|
-
if ( ignoreDirNames.includes( entry.name ) ) {
|
|
19
|
+
if ( ignoreDirNames.includes( entry.name ) ) {
|
|
20
|
+
continue;
|
|
21
|
+
}
|
|
20
22
|
|
|
21
23
|
const pathname = resolve( path, entry.name );
|
|
22
24
|
if ( entry.isDirectory() ) {
|
|
@@ -38,7 +40,9 @@ export async function *iteratorOverImportedComponents( paths ) {
|
|
|
38
40
|
const imported = await import( url );
|
|
39
41
|
for ( const component of Object.values( imported ) ) {
|
|
40
42
|
const metadata = component[METADATA_ACCESS_SYMBOL];
|
|
41
|
-
if ( !metadata ) {
|
|
43
|
+
if ( !metadata ) {
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
42
46
|
yield { component, metadata, path, pathname };
|
|
43
47
|
}
|
|
44
48
|
}
|
|
@@ -49,6 +53,8 @@ export async function *iteratorOverImportedComponents( paths ) {
|
|
|
49
53
|
*/
|
|
50
54
|
export function writeFileOnLocationSync( path, content ) {
|
|
51
55
|
const targetDir = path.split( '/' ).slice( 0, -1 ).join( '/' );
|
|
52
|
-
if ( targetDir && !existsSync( targetDir ) ) {
|
|
56
|
+
if ( targetDir && !existsSync( targetDir ) ) {
|
|
57
|
+
mkdirSync( targetDir, { recursive: true } );
|
|
58
|
+
}
|
|
53
59
|
writeFileSync( path, content, 'utf-8' );
|
|
54
60
|
};
|
package/src/worker/sinks.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { Storage } from './async_storage.js';
|
|
2
2
|
import { trace } from './tracer/index.js';
|
|
3
|
+
import { THIS_LIB_NAME } from '#consts';
|
|
3
4
|
|
|
4
5
|
export const sinks = {
|
|
5
6
|
// This sink allow for sandbox Temporal environment to send trace logs back to the main thread.
|
|
@@ -7,7 +8,7 @@ export const sinks = {
|
|
|
7
8
|
trace: {
|
|
8
9
|
fn( workflowInfo, args ) {
|
|
9
10
|
const { workflowId, workflowType, memo } = workflowInfo;
|
|
10
|
-
Storage.runWithContext( _ => trace( { lib:
|
|
11
|
+
Storage.runWithContext( _ => trace( { lib: THIS_LIB_NAME, ...args } ), { workflowId, workflowType, ...memo } );
|
|
11
12
|
},
|
|
12
13
|
callDuringReplay: false
|
|
13
14
|
}
|
|
@@ -7,12 +7,35 @@ import { tracing as tracingConfig } from '#configs';
|
|
|
7
7
|
|
|
8
8
|
const callerDir = process.argv[2];
|
|
9
9
|
|
|
10
|
+
/**
|
|
11
|
+
* Appends new information to a file
|
|
12
|
+
*
|
|
13
|
+
* Information has to be a JSON
|
|
14
|
+
*
|
|
15
|
+
* File is encoded in utf-8
|
|
16
|
+
*
|
|
17
|
+
* @param {string} path - The full filename
|
|
18
|
+
* @param {object} json - The content
|
|
19
|
+
*/
|
|
10
20
|
const flushEntry = ( path, json ) => appendFileSync( path, JSON.stringify( json ) + EOL, 'utf-8' );
|
|
11
21
|
|
|
12
|
-
|
|
22
|
+
/**
|
|
23
|
+
* Add an event to the execution trace file.
|
|
24
|
+
*
|
|
25
|
+
* Events normally are the result of an operation, either a function call or an IO.
|
|
26
|
+
*
|
|
27
|
+
* @param {object} options
|
|
28
|
+
* @param {string} options.lib - The macro part of the platform that triggered the event
|
|
29
|
+
* @param {string} options.event - The name of the event
|
|
30
|
+
* @param {any} [options.input] - The input of the operation
|
|
31
|
+
* @param {any} [options.output] - The output of the operation
|
|
32
|
+
*/
|
|
33
|
+
export function trace( { lib, event, input = undefined, output = undefined } ) {
|
|
13
34
|
const now = Date.now();
|
|
14
35
|
|
|
15
|
-
if ( !tracingConfig.enabled ) {
|
|
36
|
+
if ( !tracingConfig.enabled ) {
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
16
39
|
|
|
17
40
|
const {
|
|
18
41
|
activityId: stepId,
|
|
@@ -29,7 +52,9 @@ export function trace( { lib, event, input, output } ) {
|
|
|
29
52
|
|
|
30
53
|
// test for rootWorkflow to append to the same file as the parent/grandparent
|
|
31
54
|
const outputDir = join( callerDir, 'logs', 'runs', rootWorkflowType ?? workflowType );
|
|
32
|
-
if ( !existsSync( outputDir ) ) {
|
|
55
|
+
if ( !existsSync( outputDir ) ) {
|
|
56
|
+
mkdirSync( outputDir, { recursive: true } );
|
|
57
|
+
}
|
|
33
58
|
|
|
34
59
|
const suffix = `-${rootWorkflowId ?? workflowId}.raw`;
|
|
35
60
|
const logFile = readdirSync( outputDir ).find( f => f.endsWith( suffix ) ) ?? `${new Date( now ).toISOString()}-${suffix}`;
|
|
@@ -39,5 +64,12 @@ export function trace( { lib, event, input, output } ) {
|
|
|
39
64
|
buildLogTree( logPath );
|
|
40
65
|
};
|
|
41
66
|
|
|
67
|
+
/**
|
|
68
|
+
* Setup the global tracer function, so it is available to be used by other libraries
|
|
69
|
+
*
|
|
70
|
+
* It will be situated in the global object, under Symbol.for('__trace')
|
|
71
|
+
*
|
|
72
|
+
* @returns {object} The assigned globalThis
|
|
73
|
+
*/
|
|
42
74
|
export const setupGlobalTracer = () =>
|
|
43
75
|
Object.defineProperty( globalThis, Symbol.for( '__trace' ), { value: trace, writable: false, enumerable: false, configurable: false } );
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { mkdtempSync, readFileSync, rmSync } from 'node:fs';
|
|
3
|
+
import { tmpdir, EOL } from 'node:os';
|
|
4
|
+
import { join } from 'path';
|
|
5
|
+
import { THIS_LIB_NAME } from '#consts';
|
|
6
|
+
|
|
7
|
+
const createTempDir = () => mkdtempSync( join( tmpdir(), 'flow-sdk-trace-' ) );
|
|
8
|
+
|
|
9
|
+
// Single mocks (configured per-test by mutating the backing objects)
|
|
10
|
+
const mockConfig = { tracing: { enabled: false } };
|
|
11
|
+
vi.mock( '#configs', () => mockConfig );
|
|
12
|
+
|
|
13
|
+
const mockStorageData = {
|
|
14
|
+
activityId: 's1',
|
|
15
|
+
activityType: 'Step 1',
|
|
16
|
+
workflowId: 'wf1',
|
|
17
|
+
workflowType: 'prompt',
|
|
18
|
+
workflowPath: '/workflows/prompt.js',
|
|
19
|
+
parentWorkflowId: undefined,
|
|
20
|
+
rootWorkflowId: undefined,
|
|
21
|
+
rootWorkflowType: undefined
|
|
22
|
+
};
|
|
23
|
+
vi.mock( '../async_storage.js', () => ( {
|
|
24
|
+
Storage: { load: () => mockStorageData }
|
|
25
|
+
} ) );
|
|
26
|
+
|
|
27
|
+
vi.mock( './tracer_tree.js', () => ( { buildLogTree: vi.fn() } ) );
|
|
28
|
+
|
|
29
|
+
describe( 'tracer/index', () => {
|
|
30
|
+
beforeEach( () => {
|
|
31
|
+
vi.resetModules();
|
|
32
|
+
vi.clearAllMocks();
|
|
33
|
+
vi.useFakeTimers();
|
|
34
|
+
vi.setSystemTime( new Date( '2020-01-01T00:00:00.000Z' ) );
|
|
35
|
+
|
|
36
|
+
} );
|
|
37
|
+
|
|
38
|
+
afterEach( () => {
|
|
39
|
+
vi.useRealTimers();
|
|
40
|
+
} );
|
|
41
|
+
|
|
42
|
+
it( 'writes a raw log entry and calls buildLogTree (mocked)', async () => {
|
|
43
|
+
const originalArgv2 = process.argv[2];
|
|
44
|
+
const tmp = createTempDir();
|
|
45
|
+
process.argv[2] = tmp;
|
|
46
|
+
|
|
47
|
+
mockConfig.tracing.enabled = true;
|
|
48
|
+
|
|
49
|
+
const { buildLogTree } = await import( './tracer_tree.js' );
|
|
50
|
+
const { trace } = await import( './index.js' );
|
|
51
|
+
|
|
52
|
+
const input = { foo: 1 };
|
|
53
|
+
trace( { lib: THIS_LIB_NAME, event: 'workflow_start', input, output: null } );
|
|
54
|
+
|
|
55
|
+
expect( buildLogTree ).toHaveBeenCalledTimes( 1 );
|
|
56
|
+
const logPath = buildLogTree.mock.calls[0][0];
|
|
57
|
+
|
|
58
|
+
const raw = readFileSync( logPath, 'utf-8' );
|
|
59
|
+
const [ firstLine ] = raw.split( EOL );
|
|
60
|
+
const entry = JSON.parse( firstLine );
|
|
61
|
+
|
|
62
|
+
expect( entry ).toMatchObject( {
|
|
63
|
+
lib: THIS_LIB_NAME,
|
|
64
|
+
event: 'workflow_start',
|
|
65
|
+
input,
|
|
66
|
+
output: null,
|
|
67
|
+
stepId: 's1',
|
|
68
|
+
stepName: 'Step 1',
|
|
69
|
+
workflowId: 'wf1',
|
|
70
|
+
workflowType: 'prompt',
|
|
71
|
+
workflowPath: '/workflows/prompt.js'
|
|
72
|
+
} );
|
|
73
|
+
expect( typeof entry.timestamp ).toBe( 'number' );
|
|
74
|
+
|
|
75
|
+
rmSync( tmp, { recursive: true, force: true } );
|
|
76
|
+
process.argv[2] = originalArgv2;
|
|
77
|
+
} );
|
|
78
|
+
|
|
79
|
+
it( 'does nothing when tracing is disabled', async () => {
|
|
80
|
+
const originalArgv2 = process.argv[2];
|
|
81
|
+
const tmp = createTempDir();
|
|
82
|
+
process.argv[2] = tmp;
|
|
83
|
+
|
|
84
|
+
mockConfig.tracing.enabled = false;
|
|
85
|
+
const { buildLogTree } = await import( './tracer_tree.js' );
|
|
86
|
+
const { trace } = await import( './index.js' );
|
|
87
|
+
|
|
88
|
+
trace( { lib: THIS_LIB_NAME, event: 'workflow_start', input: {}, output: null } );
|
|
89
|
+
|
|
90
|
+
expect( buildLogTree ).not.toHaveBeenCalled();
|
|
91
|
+
|
|
92
|
+
rmSync( tmp, { recursive: true, force: true } );
|
|
93
|
+
process.argv[2] = originalArgv2;
|
|
94
|
+
} );
|
|
95
|
+
|
|
96
|
+
it( 'setupGlobalTracer installs global symbol', async () => {
|
|
97
|
+
mockConfig.tracing.enabled = false;
|
|
98
|
+
const { setupGlobalTracer } = await import( './index.js' );
|
|
99
|
+
setupGlobalTracer();
|
|
100
|
+
const sym = Symbol.for( '__trace' );
|
|
101
|
+
expect( typeof globalThis[sym] ).toBe( 'function' );
|
|
102
|
+
} );
|
|
103
|
+
} );
|
|
@@ -1,16 +1,40 @@
|
|
|
1
1
|
import { readFileSync, writeFileSync } from 'node:fs';
|
|
2
2
|
import { EOL } from 'os';
|
|
3
3
|
import { TraceEvent } from './types.js';
|
|
4
|
+
import { THIS_LIB_NAME } from '#consts';
|
|
4
5
|
|
|
5
|
-
|
|
6
|
+
/**
|
|
7
|
+
* Sorting function that compares two objects and ASC sort them by either .startedAt or, if not present, .timestamp
|
|
8
|
+
*
|
|
9
|
+
* @param {object} a
|
|
10
|
+
* @param {object} b
|
|
11
|
+
* @returns {number} The sorting result [1,-1]
|
|
12
|
+
*/
|
|
13
|
+
const timestampAscSort = ( a, b ) => {
|
|
14
|
+
if ( a.startedAt ) {
|
|
15
|
+
return a.startedAt > b.startedAt ? 1 : 1;
|
|
16
|
+
}
|
|
17
|
+
return a.timestamp > b.timestamp ? 1 : -1;
|
|
18
|
+
};
|
|
6
19
|
|
|
20
|
+
/**
|
|
21
|
+
* Add a member to an array an sort it. It is a mutating method.
|
|
22
|
+
*
|
|
23
|
+
* @param {array} arr - The arr to be changed
|
|
24
|
+
* @param {any} entry - The entry to be added
|
|
25
|
+
* @param {Function} sorter - The sort function to be used (within .filter)
|
|
26
|
+
*/
|
|
7
27
|
const pushSort = ( arr, entry, sorter ) => {
|
|
8
28
|
arr.push( entry );
|
|
9
29
|
arr.sort( sorter );
|
|
10
30
|
};
|
|
11
31
|
|
|
12
|
-
|
|
13
|
-
|
|
32
|
+
/**
|
|
33
|
+
* Transform the trace file into a tree of events, where nested events are represented as children of parent events.
|
|
34
|
+
* And the events STEP_START/STEP_END and WORKFLOW_START/WORKFLOW_END are combined into single events with start and end timestamps.
|
|
35
|
+
*
|
|
36
|
+
* @param {string} src - The trace src filename
|
|
37
|
+
*/
|
|
14
38
|
export const buildLogTree = src => {
|
|
15
39
|
const content = readFileSync( src, 'utf-8' );
|
|
16
40
|
const entries = content.split( EOL ).slice( 0, -1 ).map( c => JSON.parse( c ) );
|
|
@@ -19,7 +43,7 @@ export const buildLogTree = src => {
|
|
|
19
43
|
const workflowsMap = new Map();
|
|
20
44
|
|
|
21
45
|
// close steps/workflows
|
|
22
|
-
for ( const entry of entries.filter( e => e.lib ===
|
|
46
|
+
for ( const entry of entries.filter( e => e.lib === THIS_LIB_NAME ) ) {
|
|
23
47
|
const { event, workflowId, workflowType, workflowPath, parentWorkflowId, stepId, stepName, input, output, timestamp } = entry;
|
|
24
48
|
|
|
25
49
|
const baseEntry = { children: [], startedAt: timestamp, workflowId };
|
|
@@ -41,7 +65,7 @@ export const buildLogTree = src => {
|
|
|
41
65
|
}
|
|
42
66
|
|
|
43
67
|
// insert operations inside steps
|
|
44
|
-
for ( const entry of entries.filter( e => e.lib !==
|
|
68
|
+
for ( const entry of entries.filter( e => e.lib !== THIS_LIB_NAME ) ) {
|
|
45
69
|
pushSort( stepsMap.get( `${entry.workflowId}:${entry.stepId}` ).children, entry, timestampAscSort );
|
|
46
70
|
}
|
|
47
71
|
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { writeFileSync, readFileSync, rmSync } from 'node:fs';
|
|
3
|
+
import { mkdtempSync } from 'node:fs';
|
|
4
|
+
import { tmpdir } from 'node:os';
|
|
5
|
+
import { join } from 'path';
|
|
6
|
+
import { EOL } from 'os';
|
|
7
|
+
import { buildLogTree } from './tracer_tree.js';
|
|
8
|
+
import { TraceEvent } from './types.js';
|
|
9
|
+
import { THIS_LIB_NAME } from '#consts';
|
|
10
|
+
|
|
11
|
+
const createTempDir = () => mkdtempSync( join( tmpdir(), 'flow-sdk-trace-tree-' ) );
|
|
12
|
+
|
|
13
|
+
describe( 'tracer/tracer_tree', () => {
|
|
14
|
+
it( 'builds a tree JSON from a raw log file', () => {
|
|
15
|
+
const tmp = createTempDir();
|
|
16
|
+
const rawPath = join( tmp, 'run-123.raw' );
|
|
17
|
+
|
|
18
|
+
const entries = [
|
|
19
|
+
// root workflow start
|
|
20
|
+
{
|
|
21
|
+
lib: THIS_LIB_NAME,
|
|
22
|
+
event: TraceEvent.WORKFLOW_START,
|
|
23
|
+
input: { a: 1 },
|
|
24
|
+
output: null,
|
|
25
|
+
timestamp: 1000,
|
|
26
|
+
stepId: undefined,
|
|
27
|
+
stepName: undefined,
|
|
28
|
+
workflowId: 'wf1',
|
|
29
|
+
workflowType: 'prompt',
|
|
30
|
+
workflowPath: '/workflows/prompt.js',
|
|
31
|
+
parentWorkflowId: undefined
|
|
32
|
+
},
|
|
33
|
+
// step start
|
|
34
|
+
{
|
|
35
|
+
lib: THIS_LIB_NAME,
|
|
36
|
+
event: TraceEvent.STEP_START,
|
|
37
|
+
input: { x: 1 },
|
|
38
|
+
output: null,
|
|
39
|
+
timestamp: 2000,
|
|
40
|
+
stepId: 's1',
|
|
41
|
+
stepName: 'Step 1',
|
|
42
|
+
workflowId: 'wf1',
|
|
43
|
+
workflowType: 'prompt',
|
|
44
|
+
workflowPath: '/workflows/prompt.js',
|
|
45
|
+
parentWorkflowId: undefined
|
|
46
|
+
},
|
|
47
|
+
// non-core operation within step
|
|
48
|
+
{
|
|
49
|
+
lib: 'tool',
|
|
50
|
+
event: 'call',
|
|
51
|
+
input: { y: 2 },
|
|
52
|
+
output: { y: 3 },
|
|
53
|
+
timestamp: 3000,
|
|
54
|
+
stepId: 's1',
|
|
55
|
+
stepName: 'Step 1',
|
|
56
|
+
workflowId: 'wf1'
|
|
57
|
+
},
|
|
58
|
+
// step end
|
|
59
|
+
{
|
|
60
|
+
lib: THIS_LIB_NAME,
|
|
61
|
+
event: TraceEvent.STEP_END,
|
|
62
|
+
input: null,
|
|
63
|
+
output: { done: true },
|
|
64
|
+
timestamp: 4000,
|
|
65
|
+
stepId: 's1',
|
|
66
|
+
stepName: 'Step 1',
|
|
67
|
+
workflowId: 'wf1',
|
|
68
|
+
workflowType: 'prompt',
|
|
69
|
+
workflowPath: '/workflows/prompt.js',
|
|
70
|
+
parentWorkflowId: undefined
|
|
71
|
+
},
|
|
72
|
+
// workflow end
|
|
73
|
+
{
|
|
74
|
+
lib: THIS_LIB_NAME,
|
|
75
|
+
event: TraceEvent.WORKFLOW_END,
|
|
76
|
+
input: null,
|
|
77
|
+
output: { ok: true },
|
|
78
|
+
timestamp: 5000,
|
|
79
|
+
stepId: undefined,
|
|
80
|
+
stepName: undefined,
|
|
81
|
+
workflowId: 'wf1',
|
|
82
|
+
workflowType: 'prompt',
|
|
83
|
+
workflowPath: '/workflows/prompt.js',
|
|
84
|
+
parentWorkflowId: undefined
|
|
85
|
+
}
|
|
86
|
+
];
|
|
87
|
+
|
|
88
|
+
writeFileSync( rawPath, entries.map( e => JSON.stringify( e ) ).join( EOL ) + EOL, 'utf-8' );
|
|
89
|
+
|
|
90
|
+
buildLogTree( rawPath );
|
|
91
|
+
|
|
92
|
+
const tree = JSON.parse( readFileSync( rawPath.replace( /.raw$/, '.json' ), 'utf-8' ) );
|
|
93
|
+
|
|
94
|
+
expect( tree.event ).toBe( 'workflow' );
|
|
95
|
+
expect( tree.workflowId ).toBe( 'wf1' );
|
|
96
|
+
expect( tree.workflowType ).toBe( 'prompt' );
|
|
97
|
+
expect( tree.startedAt ).toBe( 1000 );
|
|
98
|
+
expect( tree.endedAt ).toBe( 5000 );
|
|
99
|
+
expect( tree.output ).toEqual( { ok: true } );
|
|
100
|
+
expect( Array.isArray( tree.children ) ).toBe( true );
|
|
101
|
+
expect( tree.children.length ).toBe( 1 );
|
|
102
|
+
|
|
103
|
+
const step = tree.children[0];
|
|
104
|
+
expect( step.event ).toBe( 'step' );
|
|
105
|
+
expect( step.stepId ).toBe( 's1' );
|
|
106
|
+
expect( step.startedAt ).toBe( 2000 );
|
|
107
|
+
expect( step.endedAt ).toBe( 4000 );
|
|
108
|
+
expect( step.output ).toEqual( { done: true } );
|
|
109
|
+
expect( step.children.length ).toBe( 1 );
|
|
110
|
+
expect( step.children[0].lib ).toBe( 'tool' );
|
|
111
|
+
expect( step.children[0].timestamp ).toBe( 3000 );
|
|
112
|
+
|
|
113
|
+
rmSync( tmp, { recursive: true, force: true } );
|
|
114
|
+
} );
|
|
115
|
+
} );
|
|
116
|
+
|