@output.ai/core 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,104 @@
1
+ # Core
2
+
3
+ Provides tools to run a workflow, which is a well defined logical unit of work.
4
+
5
+ ## step()
6
+
7
+ Defines a step, which is a enclosed unit of work which can import external dependencies.
8
+
9
+ ```ts
10
+ import { step } from 'flow-core';
11
+
12
+ export const aSingleStep = step( {
13
+ name: 'aSingleStep',
14
+ fn: async (): Promise<string> => {
15
+ // do stuff
16
+ }
17
+ } );
18
+ ```
19
+
20
+ All step needs a name, and the name will be used in the context of the workflow (see below).
21
+
22
+ ## workflow()
23
+
24
+ Defines a logical structure of step invocations. No external dependencies can be imported in the file containing the workflow.
25
+
26
+ ```ts
27
+ import { workflow } from 'flow-core';
28
+ import type { WorkflowContext } from 'flow-core';
29
+ import type { PromptWorkflowInput, PromptWorkflowOutput } from './types';
30
+
31
+ export default workflow( {
32
+ name: 'prompt',
33
+ description: 'A workflow to demonstrate the prompt feature',
34
+ fn: async ( input: PromptWorkflowInput, context: WorkflowContext ): Promise<PromptWorkflowOutput> => {
35
+ const result = await context.steps.aSingleStep();
36
+
37
+ return { result };
38
+ }
39
+ } );
40
+ ```
41
+
42
+ The workflow Context has the following properties:
43
+
44
+ ### .steps.*()
45
+
46
+ Each step defined in the step file is available here as js function with the "name" it was defined there. The input arguments are the input arguments of its `fn` function.
47
+
48
+ ### .tools.webhook()
49
+
50
+ Provides a webhook feature where a POST is made to a given url and the workflow is stopped until another POST is made back to the workflows API:
51
+
52
+ ```js
53
+ const result = await context.tools.webhook( {
54
+ name: 'needFeedback',
55
+ description: 'Get a feedback',
56
+ url: 'http://xxx.xxx/feedback',
57
+ payload: { /* a given payload */ }
58
+ } );
59
+ ```
60
+
61
+ the url will receive the following payload:
62
+
63
+ ```js
64
+ {
65
+ workflowId: '', // alphanumerical id of the workflow execution,
66
+ payload: { }, // the payload sent using tools.webhook()
67
+ }
68
+ ```
69
+
70
+ To resume the workflow, a POST has to be made to a given url, with a response payload and the workflowId.
71
+
72
+ The host is localhost:3001 if running locally, there is not remote host yet.
73
+
74
+ The format is:
75
+ ```
76
+ POST https://locahost:3001/workflow/feedback
77
+ {
78
+ workflowId,
79
+ payload: {}
80
+ }
81
+ ```
82
+
83
+ ## Folder structure:
84
+
85
+ ```
86
+ └ workflows
87
+ └ my workflow
88
+ ├ index.ts // contains the workflow()
89
+ ├ types.ts // contains input, output types, and other misc types
90
+ └ steps.ts // contains all steps
91
+ ```
92
+
93
+ ## Booting
94
+
95
+ The project with workflows will be a "worker", meaning its workflows can be invoked individually by their names. To do so it has to be started using a npx command.
96
+
97
+ _package.json_
98
+ ```js
99
+ ...
100
+ "scripts": {
101
+ "start": "npx flow-worker"
102
+ },
103
+ ...
104
+ ```
package/bin/worker.sh ADDED
@@ -0,0 +1,26 @@
1
+ #!/bin/bash
2
+
3
+ set -e
4
+
5
+ invocation_dir="$(pwd)"
6
+
7
+ # Follow symlinks to get the real script path
8
+ if [ -L "${BASH_SOURCE[0]}" ]; then
9
+ real_script="$(readlink "${BASH_SOURCE[0]}")"
10
+ # If relative path, resolve from symlink directory
11
+ if [[ "$real_script" != /* ]]; then
12
+ real_script="$(dirname "${BASH_SOURCE[0]}")/$real_script"
13
+ fi
14
+ else
15
+ real_script="${BASH_SOURCE[0]}"
16
+ fi
17
+
18
+ # Get the real script directory (should be node_modules/<pkg>/bin)
19
+ script_dir="$(cd "$(dirname "$real_script")" && pwd)"
20
+
21
+ # SDK dir is the parent (node_modules/flow-core)
22
+ sdk_dir="$(dirname "$script_dir")"
23
+
24
+ cd ${sdk_dir}
25
+
26
+ exec npm run worker -- ${invocation_dir} "${@:2}" # Pass remaining args
package/package.json ADDED
@@ -0,0 +1,32 @@
1
+ {
2
+ "name": "@output.ai/core",
3
+ "version": "0.0.7",
4
+ "description": "The core module of the output framework",
5
+ "type": "module",
6
+ "main": "src/index.js",
7
+ "types": "src/index.d.ts",
8
+ "files": [
9
+ "./src",
10
+ "./bin"
11
+ ],
12
+ "scripts": {
13
+ "worker": "node ./src/worker/index.js"
14
+ },
15
+ "bin": {
16
+ "flow-worker": "./bin/worker.sh"
17
+ },
18
+ "repository": {
19
+ "type": "git",
20
+ "url": "git+https://github.com/growthxai/flow-sdk"
21
+ },
22
+ "dependencies": {
23
+ "@temporalio/worker": "1.13.0",
24
+ "@temporalio/workflow": "1.13.0",
25
+ "undici": "7.15.0"
26
+ },
27
+ "imports": {
28
+ "#consts": "./src/consts.js",
29
+ "#configs": "./src/configs.js",
30
+ "#internal_activities": "./src/internal_activities/index.js"
31
+ }
32
+ }
package/src/configs.js ADDED
@@ -0,0 +1,17 @@
1
+ export const worker = {
2
+ address: process.env.TEMPORAL_ADDRESS,
3
+ apiKey: process.env.TEMPORAL_API_KEY,
4
+ executionTimeout: '1m',
5
+ maxActivities: 100,
6
+ maxWorkflows: 100,
7
+ namespace: process.env.TEMPORAL_NAMESPACE ?? 'default',
8
+ taskQueue: process.env.TEMPORAL_TASK_QUEUE ?? 'main'
9
+ };
10
+
11
+ export const api = {
12
+ authKey: process.env.API_AUTH_KEY
13
+ };
14
+
15
+ export const tracing = {
16
+ enabled: [ '1', 'true' ].includes( process.env.TRACING_ENABLED )
17
+ };
package/src/consts.js ADDED
@@ -0,0 +1,3 @@
1
+ export const nameSymbol = Symbol( '__name' );
2
+ export const sendWebhookPostName = '__internal#sendWebhookPost';
3
+ export const workflowsIndexFileName = '__workflows_entrypoint.js';
@@ -0,0 +1,3 @@
1
+ export declare class FatalError extends Error { }
2
+
3
+ export declare class ValidationError extends Error { }
package/src/errors.js ADDED
@@ -0,0 +1,3 @@
1
+ export class FatalError extends Error { }
2
+
3
+ export class ValidationError extends Error { }
package/src/index.d.ts ADDED
@@ -0,0 +1,38 @@
1
+ export interface StepDefinition<TInput = unknown, TOutput = unknown> {
2
+ name: string;
3
+ description?: string;
4
+ fn: ( input: TInput ) => Promise<TOutput>;
5
+ }
6
+
7
+ export interface WorkflowContext {
8
+ steps: {
9
+ [stepName: string]: ( input?: unknown ) => Promise<unknown>;
10
+ };
11
+ workflowId: string | null;
12
+ tools: {
13
+ webhook: ( options: {
14
+ name: string;
15
+ description?: string;
16
+ url: string;
17
+ payload: object;
18
+ } ) => Promise<unknown>;
19
+ };
20
+ }
21
+
22
+ export interface WorkflowDefinition<TInput = unknown, TOutput = unknown> {
23
+ name: string;
24
+ description?: string;
25
+ fn: ( input: TInput, context: WorkflowContext ) => Promise<TOutput>;
26
+ }
27
+
28
+ export function step<TInput = unknown, TOutput = unknown>(
29
+ definition: StepDefinition<TInput, TOutput>
30
+ ): ( input: TInput ) => Promise<TOutput>;
31
+
32
+ export function workflow<TInput = unknown, TOutput = unknown>(
33
+ definition: WorkflowDefinition<TInput, TOutput>
34
+ ): ( input: TInput ) => Promise<TOutput>;
35
+
36
+ export function startWorkflow<TOutput = unknown>( name: string, options?: { input?: TInput } ): Promise<TOutput>;
37
+
38
+ export { FatalError, ValidationError } from './errors.js';
package/src/index.js ADDED
@@ -0,0 +1,5 @@
1
+ import { step } from './interface/step.js';
2
+ import { startWorkflow, workflow } from './interface/workflow.js';
3
+ import { FatalError, ValidationError } from './errors.js';
4
+
5
+ export { step, startWorkflow, workflow, FatalError, ValidationError };
@@ -0,0 +1,4 @@
1
+ import { nameSymbol } from '#consts';
2
+
3
+ export const setName = ( target, name ) =>
4
+ Object.defineProperty( target, nameSymbol, { value: name, writable: false, enumerable: false, configurable: false } );
@@ -0,0 +1,6 @@
1
+ import { setName } from './metadata.js';
2
+
3
+ export function step( { name, description: _description, fn } ) {
4
+ setName( fn, name );
5
+ return fn;
6
+ };
@@ -0,0 +1,9 @@
1
+ // This is rigged to return which folder had the source of calls for both interface methods (step workflow) functions
2
+ // Important, if to refactor, pay attention to the depth in the stack trace to extract the info
3
+ // now is 3 cause (1 line is name, 2 line is this function, 3 line is step/workflow, 4 line is caller)
4
+ export const getInvocationDir = _ => new Error()
5
+ .stack.split( '\n' )[3]
6
+ .split( ' ' )
7
+ .at( -1 )
8
+ .replace( /\((.+):\d+:\d+\)/, '$1' )
9
+ .split( '/' ).slice( 0, -1 ).join( '/' );
@@ -0,0 +1,18 @@
1
+ import { defineSignal, setHandler } from '@temporalio/workflow';
2
+
3
+ export function createWebhook( workflowId, requestDispatcher ) {
4
+ return async function webhook( { name: _name, description: _description, url, payload } ) {
5
+ const { error } = await requestDispatcher( { url, workflowId, payload } );
6
+ if ( error ) {
7
+ throw new Error( 'Webhook call failed' );
8
+ }
9
+
10
+ const resumeSignal = defineSignal( 'resume' );
11
+
12
+ return new Promise( resolve => {
13
+ setHandler( resumeSignal, responsePayload => {
14
+ resolve( responsePayload );
15
+ } );
16
+ } );
17
+ };
18
+ };
@@ -0,0 +1,69 @@
1
+ // THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
2
+ import { proxyActivities, IllegalStateError, executeChild, workflowInfo } from '@temporalio/workflow';
3
+ import { getInvocationDir } from './utils.js';
4
+ import { createWebhook } from './webhook.js';
5
+ import { setName } from './metadata.js';
6
+ import { sendWebhookPostName } from '#consts';
7
+ import { FatalError, ValidationError } from '../errors.js';
8
+
9
+ const temporalActivityConfigs = {
10
+ startToCloseTimeout: '20 minute',
11
+ retry: {
12
+ initialInterval: '10s',
13
+ backoffCoefficient: 2.0,
14
+ maximumInterval: '2 minutes',
15
+ maximumAttempts: 3,
16
+ nonRetryableErrorTypes: [ ValidationError.name, FatalError.name ]
17
+ }
18
+ };
19
+
20
+ export function workflow( { name, description: _description, fn } ) {
21
+ const workflowPath = getInvocationDir();
22
+
23
+ // This wraps the actually function call so before being invoked it creates the activity proxies and export the context
24
+ const wrapper = async input => {
25
+ try {
26
+ const { workflowId } = workflowInfo();
27
+
28
+ // enrich context information needed for tracking
29
+ Object.assign( workflowInfo().memo, { workflowPath } );
30
+
31
+ const proxies = proxyActivities( temporalActivityConfigs );
32
+ const context = {
33
+ steps: new Proxy( {}, {
34
+ get( _target, name ) {
35
+ return proxies[`${workflowPath}#${name}`];
36
+ }
37
+ } ),
38
+ tools: {
39
+ webhook: createWebhook( workflowId, proxies[sendWebhookPostName] )
40
+ }
41
+ };
42
+
43
+ return fn( input, context );
44
+
45
+ } catch ( error ) {
46
+ // IllegalStateError is thrown when temporal is not running and one try to access workflowInfo()
47
+ if ( error instanceof IllegalStateError ) {
48
+ return fn( input, { steps: null, workflowId: null } );
49
+ }
50
+
51
+ throw error;
52
+ }
53
+ };
54
+
55
+ setName( wrapper, name );
56
+ return wrapper;
57
+ };
58
+
59
+ export async function startWorkflow( name, { input } = {} ) {
60
+ const { memo, workflowId, workflowType } = workflowInfo();
61
+
62
+ // Checks if current memo has rootWorkflowId, which means current execution is already a child
63
+ // Then it sets the memory for the child execution passing along who's the original workflow is and its type
64
+ const workflowMemory = memo.rootWorkflowId ?
65
+ { parentWorkflowId: workflowId, rootWorkflowType: memo.rootWorkflowType, rootWorkflowId: memo.rootWorkflowId } :
66
+ { parentWorkflowId: workflowId, rootWorkflowId: workflowId, rootWorkflowType: workflowType };
67
+
68
+ return executeChild( name, { args: input ? [ input ] : [], memo: workflowMemory } );
69
+ }
@@ -0,0 +1,17 @@
1
+ import { fetch } from 'undici';
2
+ import { api as apiConfig } from '#configs';
3
+
4
+ export async function sendWebhookPost( { url, workflowId, payload } ) {
5
+ const res = await fetch( url, {
6
+ method: 'POST',
7
+ headers: {
8
+ 'Content-Type': 'application/json',
9
+ Authentication: `Basic ${apiConfig.authKey}`
10
+ },
11
+ body: JSON.stringify( { workflowId, payload } )
12
+ } );
13
+
14
+ console.log( '[Core.SendWebhookPost]', res.status, res.statusText );
15
+
16
+ return { error: !res.ok };
17
+ };
@@ -0,0 +1,19 @@
1
+ import { AsyncLocalStorage } from 'node:async_hooks';
2
+
3
+ export const store = new AsyncLocalStorage();
4
+
5
+ export const Storage = {
6
+ /**
7
+ * Execute a code piece wrapped in a function binding a given arbitrary object as context around it so it can be retrieve later
8
+ * @param {Function|AsyncFunction} fn The code to execute wrapped around a function without arguments
9
+ * @param {Object} context The context to bind
10
+ * @returns {any} The result of the `fn` execution
11
+ */
12
+ runWithContext: ( fn, context ) => store.run( context, fn ),
13
+
14
+ /**
15
+ * Load the context stored upstream in the chain of calls that lead to this point with `runWithContext`.
16
+ * @returns {any} Stored context
17
+ */
18
+ load: () => store.getStore()
19
+ };
@@ -0,0 +1,49 @@
1
+ import { Worker, NativeConnection } from '@temporalio/worker';
2
+ import { dirname, join } from 'path';
3
+ import { fileURLToPath } from 'node:url';
4
+ import { worker as workerConfig } from '#configs';
5
+ import { loadActivities, loadWorkflows } from './loader.js';
6
+ import { ActivityExecutionInterceptor } from './interceptors/activity.js';
7
+ import { setupGlobalTracer } from './tracer/index.js';
8
+ import { sinks } from './sinks.js';
9
+
10
+ const __dirname = dirname( fileURLToPath( import.meta.url ) );
11
+
12
+ // expose the coreTracker so other parts of the SDK can use it
13
+ setupGlobalTracer();
14
+
15
+ const { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue } = workerConfig;
16
+
17
+ // Get caller directory from command line arguments
18
+ const callerDir = process.argv[2];
19
+
20
+ ( async () => {
21
+ console.log( '[Core]', 'Loading workflows...', { callerDir } );
22
+ const workflowsPath = await loadWorkflows( callerDir );
23
+
24
+ console.log( '[Core]', 'Loading activities...', { callerDir } );
25
+ const activities = await loadActivities( callerDir );
26
+
27
+ console.log( '[Core]', 'Connecting Temporal...' );
28
+ // enable TLS only when connecting remove (api key is present)
29
+ const connection = await NativeConnection.connect( { address, tls: Boolean( apiKey ), apiKey } );
30
+
31
+ console.log( '[Core]', 'Creating Worker...' );
32
+ const worker = await Worker.create( {
33
+ connection,
34
+ namespace,
35
+ taskQueue,
36
+ workflowsPath,
37
+ activities,
38
+ sinks,
39
+ interceptors: {
40
+ workflowModules: [ join( __dirname, './interceptors/workflow.js' ) ],
41
+ activityInbound: [ () => new ActivityExecutionInterceptor() ]
42
+ },
43
+ maxConcurrentWorkflowTaskExecutions: maxWorkflows,
44
+ maxConcurrentActivityTaskExecutions: maxActivities
45
+ } );
46
+
47
+ console.log( '[Core] Starting...' );
48
+ worker.run();
49
+ } )();
@@ -0,0 +1,28 @@
1
+ import { Context } from '@temporalio/activity';
2
+ import { Storage } from '../async_storage.js';
3
+ import { trace } from '../tracer/index.js';
4
+ import { TraceEvent } from '../tracer/types.js';
5
+ import { headersToObject } from '../sandboxed_utils.js';
6
+
7
+ /*
8
+ This interceptor is called for every activity execution
9
+
10
+ It will have access to the activity scope.
11
+
12
+ What it does is to wrap the execution using Node's AsyncLocalStorage to save context info for tracing.
13
+
14
+ Some information it needs for its context comes from Temporal's Activity Context others are injected in the headers
15
+ */
16
+ export class ActivityExecutionInterceptor {
17
+ async execute( input, next ) {
18
+ const { workflowExecution: { workflowId }, activityId, activityType, workflowType } = Context.current().info;
19
+ const context = { workflowId, workflowType, activityId, activityType, ...headersToObject( input.headers ) };
20
+
21
+ return Storage.runWithContext( async _ => {
22
+ trace( { lib: 'core', event: TraceEvent.STEP_START, input: input.args } );
23
+ const output = await next( input );
24
+ trace( { lib: 'core', event: TraceEvent.STEP_END, output } );
25
+ return output;
26
+ }, context );
27
+ }
28
+ };
@@ -0,0 +1,42 @@
1
+ // THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
2
+ import { proxySinks, workflowInfo } from '@temporalio/workflow';
3
+ import { TraceEvent } from '../tracer/types.js';
4
+ import { memoToHeaders } from '../sandboxed_utils.js';
5
+
6
+ /*
7
+ This is not an AI comment!
8
+
9
+ This interceptor adds information value from workflowInfo().memo as Activity invocation headers.
10
+
11
+ This is a strategy to share values between the workflow context and activity context.
12
+
13
+ We also want to preserve existing headers that might have been inject somewhere else and
14
+ */
15
+ class HeadersInjectionInterceptor {
16
+ async scheduleActivity( input, next ) {
17
+ Object.assign( input.headers, memoToHeaders( workflowInfo().memo ) );
18
+ return next( input );
19
+ }
20
+ };
21
+
22
+ /*
23
+ This interceptor captures the workflow execution start and stop to log these event for the internal tracing
24
+ This is not an AI comment!
25
+
26
+ It uses sinks to share them.
27
+ - https://docs.temporal.io/develop/typescript/observability
28
+ */
29
+ class WorkflowExecutionInterceptor {
30
+ async execute( input, next ) {
31
+ const sinks = proxySinks();
32
+ sinks.log.trace( { event: TraceEvent.WORKFLOW_START, input: input.args } );
33
+ const output = await next( input );
34
+ sinks.log.trace( { event: TraceEvent.WORKFLOW_END, output } );
35
+ return output;
36
+ }
37
+ };
38
+
39
+ export const interceptors = () => ( {
40
+ outbound: [ new HeadersInjectionInterceptor( workflowInfo().workflowType ) ],
41
+ inbound: [ new WorkflowExecutionInterceptor( workflowInfo().workflowType ) ]
42
+ } );
@@ -0,0 +1,64 @@
1
+ import { readdirSync, writeFileSync, existsSync, mkdirSync } from 'fs';
2
+ import { dirname, join, resolve } from 'path';
3
+ import { pathToFileURL, fileURLToPath } from 'url';
4
+ import { nameSymbol, sendWebhookPostName, workflowsIndexFileName } from '#consts';
5
+ import { sendWebhookPost } from '#internal_activities';
6
+
7
+ const ignoreDirNames = [ 'vendor', 'node_modules' ];
8
+
9
+ const __dirname = dirname( fileURLToPath( import.meta.url ) );
10
+
11
+ // recursive run directories and when find files with given name, save their path, pathname and URI
12
+ const recursiveNavigateWhileCollecting = ( path, filenames, collection = [] ) => {
13
+ for ( const entry of readdirSync( path, { withFileTypes: true } ) ) {
14
+ if ( ignoreDirNames.includes( entry.name ) ) { continue; }
15
+
16
+ const pathname = resolve( path, entry.name );
17
+ if ( entry.isDirectory() ) {
18
+ recursiveNavigateWhileCollecting( pathname, filenames, collection );
19
+ } else if ( filenames.includes( entry.name ) ) {
20
+ collection.push( { pathname, path, url: pathToFileURL( pathname ).href } );
21
+ }
22
+ }
23
+
24
+ return collection;
25
+ };
26
+
27
+ export async function loadActivities( path ) {
28
+ const activityPaths = recursiveNavigateWhileCollecting( path, [ 'steps.js' ] );
29
+ const activities = [];
30
+ for ( const { path, pathname, url } of activityPaths ) {
31
+ const exported = await import( url );
32
+ for ( const [ name, fn ] of Object.values( exported ).map( v => [ v[nameSymbol], v ] ).filter( v => v[0] ) ) {
33
+ console.log( '[Core.Scanner]', 'Activity loaded', name, pathname );
34
+ activities[`${path}#${name}`] = fn;
35
+ }
36
+ }
37
+
38
+ // system activities
39
+ activities[sendWebhookPostName] = sendWebhookPost;
40
+ return activities;
41
+ };
42
+
43
+ export async function loadWorkflows( path ) {
44
+ const workflowPaths = recursiveNavigateWhileCollecting( path, [ 'index.js' ] );
45
+ const workflows = [];
46
+ for ( const { pathname, url } of workflowPaths ) {
47
+ const exported = await import( url );
48
+
49
+ for ( const name of Object.values( exported ).map( v => v[nameSymbol] ).filter( n => n ) ) {
50
+ console.log( '[Core.Scanner]', 'Workflow loaded', name, pathname );
51
+ workflows.push( { name, pathname } );
52
+ }
53
+ }
54
+
55
+ const tempFolder = join( __dirname, 'temp' );
56
+ const entryPoint = join( tempFolder, workflowsIndexFileName );
57
+ const workflowsIndex = workflows.map( ( { name, pathname } ) => `export { default as ${name} } from '${pathname}';` ).join( '\n' );
58
+
59
+ if ( !existsSync( tempFolder ) ) {
60
+ mkdirSync( tempFolder );
61
+ }
62
+ writeFileSync( entryPoint, workflowsIndex, 'utf-8' );
63
+ return entryPoint;
64
+ };
@@ -0,0 +1,18 @@
1
+ // THIS IS SAFE TO RUN AT TEMPORAL'S SANDBOX ENVIRONMENT
2
+ import { defaultPayloadConverter } from '@temporalio/common';
3
+
4
+ /*
5
+ @important: They plain JS values need to be converted to "payload":
6
+ - https://typescript.temporal.io/api/namespaces/common/#headers
7
+ - https://community.temporal.io/t/specify-temporal-headers-when-starting-workflow/6712
8
+ */
9
+ export const memoToHeaders = memo =>
10
+ Object.fromEntries(
11
+ Object.entries( memo ?? {} ).map( ( [ k, v ] ) => [ k, defaultPayloadConverter.toPayload( v ) ] )
12
+ );
13
+
14
+ // And the opposite of the function above
15
+ export const headersToObject = headers =>
16
+ Object.fromEntries(
17
+ Object.entries( headers ?? {} ).map( ( [ k, v ] ) => [ k, defaultPayloadConverter.fromPayload( v ) ] )
18
+ );
@@ -0,0 +1,15 @@
1
+ import { Storage } from './async_storage.js';
2
+ import { trace } from './tracer/index.js';
3
+
4
+ export const sinks = {
5
+ // This sink allow for sandbox Temporal environment to send trace logs back to the main thread.
6
+ log: {
7
+ trace: {
8
+ fn( workflowInfo, args ) {
9
+ const { workflowId, workflowType, memo } = workflowInfo;
10
+ Storage.runWithContext( _ => trace( { lib: 'core', ...args } ), { workflowId, workflowType, ...memo } );
11
+ },
12
+ callDuringReplay: false
13
+ }
14
+ }
15
+ };
@@ -0,0 +1,6 @@
1
+ export { default as articleDraftAgentic } from '/app/test-workflows/dist/article-draft-agentic/index.js';
2
+ export { default as httpClientDemo } from '/app/test-workflows/dist/http/index.js';
3
+ export { default as nested } from '/app/test-workflows/dist/nested/index.js';
4
+ export { default as local_prompt } from '/app/test-workflows/dist/prompt/index.js';
5
+ export { default as simple } from '/app/test-workflows/dist/simple/index.js';
6
+ export { default as webhook } from '/app/test-workflows/dist/webhook/index.js';
@@ -0,0 +1,43 @@
1
+ import { Storage } from '../async_storage.js';
2
+ import { mkdirSync, existsSync, readdirSync, appendFileSync } from 'node:fs';
3
+ import { join } from 'path';
4
+ import { EOL } from 'os';
5
+ import { buildLogTree } from './tracer_tree.js';
6
+ import { tracing as tracingConfig } from '#configs';
7
+
8
+ const callerDir = process.argv[2];
9
+
10
+ const flushEntry = ( path, json ) => appendFileSync( path, JSON.stringify( json ) + EOL, 'utf-8' );
11
+
12
+ export function trace( { lib, event, input, output } ) {
13
+ const now = Date.now();
14
+
15
+ if ( !tracingConfig.enabled ) { return; }
16
+
17
+ const {
18
+ activityId: stepId,
19
+ activityType: stepName,
20
+ workflowId,
21
+ workflowType,
22
+ workflowPath,
23
+ parentWorkflowId,
24
+ rootWorkflowId,
25
+ rootWorkflowType
26
+ } = Storage.load();
27
+
28
+ const entry = { lib, event, input, output, timestamp: now, stepId, stepName, workflowId, workflowType, workflowPath, parentWorkflowId };
29
+
30
+ // test for rootWorkflow to append to the same file as the parent/grandparent
31
+ const outputDir = join( callerDir, 'logs', 'runs', rootWorkflowType ?? workflowType );
32
+ if ( !existsSync( outputDir ) ) { mkdirSync( outputDir, { recursive: true } ); }
33
+
34
+ const suffix = `-${rootWorkflowId ?? workflowId}.raw`;
35
+ const logFile = readdirSync( outputDir ).find( f => f.endsWith( suffix ) ) ?? `${new Date( now ).toISOString()}-${suffix}`;
36
+ const logPath = join( outputDir, logFile );
37
+
38
+ flushEntry( logPath, entry );
39
+ buildLogTree( logPath );
40
+ };
41
+
42
+ export const setupGlobalTracer = () =>
43
+ Object.defineProperty( globalThis, Symbol.for( '__trace' ), { value: trace, writable: false, enumerable: false, configurable: false } );
@@ -0,0 +1,61 @@
1
+ import { readFileSync, writeFileSync } from 'node:fs';
2
+ import { EOL } from 'os';
3
+ import { TraceEvent } from './types.js';
4
+
5
+ const timestampAscSort = ( a, b ) => a.timestamp > b.timestamp ? -1 : 1;
6
+
7
+ const pushSort = ( arr, entry, sorter ) => {
8
+ arr.push( entry );
9
+ arr.sort( sorter );
10
+ };
11
+
12
+ const coreLib = 'core';
13
+
14
+ export const buildLogTree = src => {
15
+ const content = readFileSync( src, 'utf-8' );
16
+ const entries = content.split( EOL ).slice( 0, -1 ).map( c => JSON.parse( c ) );
17
+
18
+ const stepsMap = new Map();
19
+ const workflowsMap = new Map();
20
+
21
+ // close steps/workflows
22
+ for ( const entry of entries.filter( e => e.lib === coreLib ) ) {
23
+ const { event, workflowId, workflowType, workflowPath, parentWorkflowId, stepId, stepName, input, output, timestamp } = entry;
24
+
25
+ const baseEntry = { children: [], startedAt: timestamp, workflowId };
26
+ if ( event === TraceEvent.STEP_START ) {
27
+ stepsMap.set( `${workflowId}:${stepId}`, { event: 'step', input, stepId, stepName, ...baseEntry } );
28
+ }
29
+
30
+ if ( event === TraceEvent.STEP_END ) {
31
+ Object.assign( stepsMap.get( `${workflowId}:${stepId}` ) ?? {}, { output, endedAt: timestamp } );
32
+ }
33
+
34
+ if ( event === TraceEvent.WORKFLOW_START ) {
35
+ workflowsMap.set( workflowId, { event: 'workflow', input, parentWorkflowId, workflowPath, workflowType, ...baseEntry } );
36
+ }
37
+
38
+ if ( event === TraceEvent.WORKFLOW_END ) {
39
+ Object.assign( workflowsMap.get( workflowId ) ?? {}, { output, endedAt: timestamp } );
40
+ }
41
+ }
42
+
43
+ // insert operations inside steps
44
+ for ( const entry of entries.filter( e => e.lib !== coreLib ) ) {
45
+ pushSort( stepsMap.get( `${entry.workflowId}:${entry.stepId}` ).children, entry, timestampAscSort );
46
+ }
47
+
48
+ // insert steps into workflows
49
+ for ( const step of stepsMap.values() ) {
50
+ pushSort( workflowsMap.get( step.workflowId ).children, step, timestampAscSort );
51
+ }
52
+
53
+ // insert children workflows
54
+ for ( const workflow of [ ...workflowsMap.values() ].filter( w => w.parentWorkflowId ) ) {
55
+ pushSort( workflowsMap.get( workflow.parentWorkflowId ).children, workflow, timestampAscSort );
56
+ }
57
+
58
+ const rootWorkflow = [ ...workflowsMap.values() ].find( w => !w.parentWorkflowId );
59
+
60
+ writeFileSync( src.replace( /\.raw$/, '.json' ), JSON.stringify( rootWorkflow, undefined, 2 ), 'utf-8' );
61
+ };
@@ -0,0 +1,6 @@
1
+ export const TraceEvent = {
2
+ WORKFLOW_START: 'workflow_start',
3
+ WORKFLOW_END: 'workflow_end',
4
+ STEP_START: 'step_start',
5
+ STEP_END: 'step_end'
6
+ };