@output.ai/core 0.0.16 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/README.md +16 -22
  2. package/package.json +19 -7
  3. package/src/consts.js +2 -11
  4. package/src/interface/evaluator.js +8 -4
  5. package/src/interface/step.js +1 -1
  6. package/src/interface/webhook.js +16 -4
  7. package/src/interface/workflow.js +28 -48
  8. package/src/internal_activities/index.js +3 -37
  9. package/src/tracing/index.d.ts +47 -0
  10. package/src/tracing/index.js +45 -0
  11. package/src/tracing/internal_interface.js +66 -0
  12. package/src/tracing/processors/local/index.js +50 -0
  13. package/src/tracing/processors/local/index.spec.js +67 -0
  14. package/src/tracing/processors/s3/index.js +51 -0
  15. package/src/tracing/processors/s3/index.spec.js +64 -0
  16. package/src/tracing/processors/s3/redis_client.js +19 -0
  17. package/src/tracing/processors/s3/redis_client.spec.js +50 -0
  18. package/src/tracing/processors/s3/s3_client.js +33 -0
  19. package/src/tracing/processors/s3/s3_client.spec.js +67 -0
  20. package/src/tracing/tools/build_trace_tree.js +76 -0
  21. package/src/tracing/tools/build_trace_tree.spec.js +99 -0
  22. package/src/tracing/tools/utils.js +28 -0
  23. package/src/tracing/tools/utils.spec.js +14 -0
  24. package/src/tracing/trace_engine.js +63 -0
  25. package/src/tracing/trace_engine.spec.js +91 -0
  26. package/src/utils.js +8 -0
  27. package/src/worker/catalog_workflow/index.js +2 -1
  28. package/src/worker/catalog_workflow/index.spec.js +6 -10
  29. package/src/worker/configs.js +24 -0
  30. package/src/worker/index.js +7 -8
  31. package/src/worker/interceptors/activity.js +15 -17
  32. package/src/worker/interceptors/workflow.js +18 -1
  33. package/src/worker/loader.js +40 -31
  34. package/src/worker/loader.spec.js +22 -29
  35. package/src/worker/loader_tools.js +63 -0
  36. package/src/worker/loader_tools.spec.js +85 -0
  37. package/src/worker/sinks.js +60 -10
  38. package/src/configs.js +0 -36
  39. package/src/configs.spec.js +0 -379
  40. package/src/worker/internal_utils.js +0 -60
  41. package/src/worker/internal_utils.spec.js +0 -134
  42. package/src/worker/tracer/index.js +0 -75
  43. package/src/worker/tracer/index.test.js +0 -102
  44. package/src/worker/tracer/tracer_tree.js +0 -85
  45. package/src/worker/tracer/tracer_tree.test.js +0 -115
  46. /package/src/{worker/async_storage.js → async_storage.js} +0 -0
@@ -1,4 +1,5 @@
1
1
  import { z } from 'zod';
2
+ import { dirname } from 'node:path';
2
3
  import { METADATA_ACCESS_SYMBOL } from '#consts';
3
4
  import { Catalog, CatalogActivity, CatalogWorkflow } from './catalog.js';
4
5
 
@@ -37,7 +38,7 @@ export const createCatalog = ( { workflows, activities } ) =>
37
38
  inputSchema: convertToJsonSchema( workflow.inputSchema ),
38
39
  outputSchema: convertToJsonSchema( workflow.outputSchema ),
39
40
  activities: Object.entries( activities )
40
- .filter( ( [ k ] ) => k.startsWith( `${workflow.path}#` ) )
41
+ .filter( ( [ k ] ) => k.startsWith( `${dirname( workflow.path )}#` ) )
41
42
  .map( ( [ _, v ] ) => {
42
43
  const metadata = v[METADATA_ACCESS_SYMBOL];
43
44
  return new CatalogActivity( {
@@ -17,16 +17,14 @@ describe( 'createCatalog', () => {
17
17
  const workflows = [
18
18
  {
19
19
  name: 'flow1',
20
- path: '/flows/flow1',
21
- pathname: '/flows/flow1/workflow.js',
20
+ path: '/flows/flow1/workflow.js',
22
21
  description: 'desc-flow1',
23
22
  inputSchema: z.object( { in: z.literal( 'f1' ) } ),
24
23
  outputSchema: z.object( { out: z.literal( 'f1' ) } )
25
24
  },
26
25
  {
27
26
  name: 'flow2',
28
- path: '/flows/flow2',
29
- pathname: '/flows/flow2/workflow.js',
27
+ path: '/flows/flow2/workflow.js',
30
28
  description: 'desc-flow2',
31
29
  inputSchema: z.object( { in: z.literal( 'f2' ) } ),
32
30
  outputSchema: z.object( { out: z.literal( 'f2' ) } )
@@ -95,7 +93,7 @@ describe( 'createCatalog', () => {
95
93
  expect( mapped ).toEqual( [
96
94
  {
97
95
  name: 'flow1',
98
- path: '/flows/flow1',
96
+ path: '/flows/flow1/workflow.js',
99
97
  description: 'desc-flow1',
100
98
  inputSchema: {
101
99
  $schema: 'https://json-schema.org/draft/2020-12/schema',
@@ -152,7 +150,7 @@ describe( 'createCatalog', () => {
152
150
  },
153
151
  {
154
152
  name: 'flow2',
155
- path: '/flows/flow2',
153
+ path: '/flows/flow2/workflow.js',
156
154
  description: 'desc-flow2',
157
155
  inputSchema: {
158
156
  $schema: 'https://json-schema.org/draft/2020-12/schema',
@@ -192,9 +190,7 @@ describe( 'createCatalog', () => {
192
190
  ] );
193
191
 
194
192
  // Original inputs are not mutated
195
- expect( workflows[0].path ).toBe( '/flows/flow1' );
196
- expect( workflows[0].pathname ).toBe( '/flows/flow1/workflow.js' );
197
- expect( workflows[1].path ).toBe( '/flows/flow2' );
198
- expect( workflows[1].pathname ).toBe( '/flows/flow2/workflow.js' );
193
+ expect( workflows[0].path ).toBe( '/flows/flow1/workflow.js' );
194
+ expect( workflows[1].path ).toBe( '/flows/flow2/workflow.js' );
199
195
  } );
200
196
  } );
@@ -0,0 +1,24 @@
1
+ import * as z from 'zod';
2
+
3
+ class InvalidEnvVarsErrors extends Error { }
4
+
5
+ const envVarSchema = z.object( {
6
+ CATALOG_ID: z.string().regex( /^[a-z0-9_.@-]+$/i ),
7
+ TEMPORAL_ADDRESS: z.string().default( 'localhost:7233' ),
8
+ TEMPORAL_API_KEY: z.string().optional(),
9
+ TEMPORAL_NAMESPACE: z.string().optional().default( 'default' )
10
+ } );
11
+
12
+ const { data: envVars, error } = envVarSchema.safeParse( process.env );
13
+ if ( error ) {
14
+ throw new InvalidEnvVarsErrors( z.prettifyError( error ) );
15
+ }
16
+
17
+ export const address = envVars.TEMPORAL_ADDRESS;
18
+ export const apiKey = envVars.TEMPORAL_API_KEY;
19
+ export const executionTimeout = '1m';
20
+ export const maxActivities = 100;
21
+ export const maxWorkflows = 100;
22
+ export const namespace = envVars.TEMPORAL_NAMESPACE;
23
+ export const taskQueue = envVars.CATALOG_ID;
24
+ export const catalogId = envVars.CATALOG_ID;
@@ -3,20 +3,16 @@ import { Client } from '@temporalio/client';
3
3
  import { WorkflowIdConflictPolicy } from '@temporalio/common';
4
4
  import { dirname, join } from 'path';
5
5
  import { fileURLToPath } from 'node:url';
6
- import { worker as workerConfig } from '#configs';
6
+ import { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue, catalogId } from './configs.js';
7
7
  import { loadActivities, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
8
8
  import { ActivityExecutionInterceptor } from './interceptors/activity.js';
9
- import { setupGlobalTracer } from './tracer/index.js';
10
9
  import { sinks } from './sinks.js';
11
10
  import { createCatalog } from './catalog_workflow/index.js';
11
+ import { init as initTracing } from '#tracing';
12
+ import { WORKFLOW_CATALOG } from '#consts';
12
13
 
13
14
  const __dirname = dirname( fileURLToPath( import.meta.url ) );
14
15
 
15
- // expose the coreTracker so other parts of the SDK can use it
16
- setupGlobalTracer();
17
-
18
- const { address, apiKey, maxActivities, maxWorkflows, namespace, taskQueue, catalogId } = workerConfig;
19
-
20
16
  // Get caller directory from command line arguments
21
17
  const callerDir = process.argv[2];
22
18
 
@@ -30,6 +26,9 @@ const callerDir = process.argv[2];
30
26
  console.log( '[Core]', 'Creating worker entry point...' );
31
27
  const workflowsPath = createWorkflowsEntryPoint( workflows );
32
28
 
29
+ console.log( '[Core]', 'Initializing tracing...' );
30
+ await initTracing();
31
+
33
32
  console.log( '[Core]', 'Creating workflows catalog...' );
34
33
  const catalog = createCatalog( { workflows, activities } );
35
34
 
@@ -73,7 +72,7 @@ const callerDir = process.argv[2];
73
72
  } );
74
73
 
75
74
  console.log( '[Core]', 'Starting catalog workflow...' );
76
- await new Client( { connection, namespace } ).workflow.start( 'catalog', {
75
+ await new Client( { connection, namespace } ).workflow.start( WORKFLOW_CATALOG, {
77
76
  taskQueue,
78
77
  workflowId: catalogId, // use the name of the task queue as the catalog name, ensuring uniqueness
79
78
  workflowIdConflictPolicy: WorkflowIdConflictPolicy.TERMINATE_EXISTING,
@@ -1,8 +1,7 @@
1
1
  import { Context } from '@temporalio/activity';
2
- import { Storage } from '../async_storage.js';
3
- import { trace } from '../tracer/index.js';
2
+ import { Storage } from '#async_storage';
3
+ import { addEventStart, addEventEnd, addEventError } from '#tracing';
4
4
  import { headersToObject } from '../sandboxed_utils.js';
5
- import { THIS_LIB_NAME } from '#consts';
6
5
  import { METADATA_ACCESS_SYMBOL } from '#consts';
7
6
 
8
7
  /*
@@ -20,22 +19,21 @@ export class ActivityExecutionInterceptor {
20
19
  };
21
20
 
22
21
  async execute( input, next ) {
23
- const { workflowExecution: { workflowId }, activityId, activityType, workflowType } = Context.current().info;
22
+ const { workflowExecution: { workflowId }, activityId, activityType } = Context.current().info;
23
+ const { executionContext } = headersToObject( input.headers );
24
+ const { type: kind } = this.activities?.[activityType]?.[METADATA_ACCESS_SYMBOL];
24
25
 
25
- const activityFn = this.activities?.[activityType];
26
- const { type: componentType, skipTrace } = activityFn?.[METADATA_ACCESS_SYMBOL];
26
+ const traceArguments = { kind, id: activityId, parentId: workflowId, name: activityType, executionContext };
27
+ addEventStart( { details: input.args[0], ...traceArguments } );
27
28
 
28
- const context = { workflowId, workflowType, activityId, activityType, ...headersToObject( input.headers ) };
29
-
30
- return Storage.runWithContext( async _ => {
31
- if ( !skipTrace ) {
32
- trace( { lib: THIS_LIB_NAME, event: `${componentType}_start`, input: input.args } );
33
- }
34
- const output = await next( input );
35
- if ( !skipTrace ) {
36
- trace( { lib: THIS_LIB_NAME, event: `${componentType}_end`, output } );
37
- }
29
+ // creates a context for the nested tracing
30
+ try {
31
+ const output = await Storage.runWithContext( async _ => next( input ), { parentId: activityId, executionContext } );
32
+ addEventEnd( { details: output, ...traceArguments } );
38
33
  return output;
39
- }, context );
34
+ } catch ( error ) {
35
+ addEventError( { details: error, ...traceArguments } );
36
+ throw error;
37
+ }
40
38
  }
41
39
  };
@@ -1,5 +1,5 @@
1
1
  // THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
2
- import { workflowInfo } from '@temporalio/workflow';
2
+ import { workflowInfo, proxySinks, ApplicationFailure } from '@temporalio/workflow';
3
3
  import { memoToHeaders } from '../sandboxed_utils.js';
4
4
 
5
5
  /*
@@ -18,6 +18,23 @@ class HeadersInjectionInterceptor {
18
18
  }
19
19
  };
20
20
 
21
+ const sinks = proxySinks();
22
+
23
+ class WorkflowExecutionInterceptor {
24
+ async execute( input, next ) {
25
+ sinks.trace.addWorkflowEventStart( input.args[0] );
26
+ try {
27
+ const output = await next( input );
28
+ sinks.trace.addWorkflowEventEnd( output );
29
+ return output;
30
+ } catch ( error ) {
31
+ sinks.trace.addWorkflowEventError( error );
32
+ throw new ApplicationFailure( error.message, error.constructor.name );
33
+ }
34
+ }
35
+ };
36
+
21
37
  export const interceptors = () => ( {
38
+ inbound: [ new WorkflowExecutionInterceptor() ],
22
39
  outbound: [ new HeadersInjectionInterceptor( workflowInfo().workflowType ) ]
23
40
  } );
@@ -1,51 +1,60 @@
1
- import { dirname, join } from 'path';
1
+ import { dirname, join } from 'node:path';
2
+ import { mkdirSync, writeFileSync } from 'node:fs';
3
+ import { EOL } from 'node:os';
2
4
  import { fileURLToPath } from 'url';
3
- import { sendWebhookPost, readTraceFile } from '#internal_activities';
4
- import { SEND_WEBHOOK_ACTIVITY_NAME, WORKFLOWS_INDEX_FILENAME, READ_TRACE_FILE } from '#consts';
5
- import {
6
- iteratorOverImportedComponents,
7
- recursiveNavigateWhileCollecting,
8
- writeFileOnLocationSync
9
- } from './internal_utils.js';
5
+ import { sendWebhook } from '#internal_activities';
6
+ import { ACTIVITY_SEND_WEBHOOK, WORKFLOWS_INDEX_FILENAME, WORKFLOW_CATALOG } from '#consts';
7
+ import { importComponents } from './loader_tools.js';
10
8
 
11
9
  const __dirname = dirname( fileURLToPath( import.meta.url ) );
12
10
 
13
- // returns a map of activities, where the key is they path + name and the value is the function with metadata
14
- export async function loadActivities( path ) {
15
- const activityPaths = recursiveNavigateWhileCollecting( path, [ 'steps.js', 'evaluators.js' ] );
16
- const activities = [];
17
- for await ( const { component, metadata, pathname, path } of iteratorOverImportedComponents( activityPaths ) ) {
18
- console.log( '[Core.Scanner]', 'Component loaded:', metadata.type, metadata.name, 'at', pathname );
19
- activities[`${path}#${metadata.name}`] = component;
11
+ /**
12
+ * Builds a map of activities, where the key is their path and name and the value is the function
13
+ *
14
+ * @param {string} target
15
+ * @returns {object}
16
+ */
17
+ export async function loadActivities( target ) {
18
+ const activities = {};
19
+ for await ( const { fn, metadata, path } of importComponents( target, [ 'steps.js', 'evaluators.js' ] ) ) {
20
+ console.log( '[Core.Scanner]', 'Component loaded:', metadata.type, metadata.name, 'at', path );
21
+ activities[`${dirname( path )}#${metadata.name}`] = fn;
20
22
  }
21
23
 
22
24
  // system activities
23
- activities[SEND_WEBHOOK_ACTIVITY_NAME] = sendWebhookPost;
24
- activities[READ_TRACE_FILE] = readTraceFile;
25
+ activities[ACTIVITY_SEND_WEBHOOK] = sendWebhook;
25
26
  return activities;
26
27
  };
27
28
 
28
- // Returns array of workflow object, which are function with metadata attached
29
- export async function loadWorkflows( path ) {
30
- const workflowPaths = recursiveNavigateWhileCollecting( path, [ 'workflow.js' ] );
29
+ /**
30
+ * Builds an array of workflow objects
31
+ *
32
+ * @param {string} target
33
+ * @returns {object[]}
34
+ */
35
+ export async function loadWorkflows( target ) {
31
36
  const workflows = [];
32
- for await ( const { metadata, pathname, path } of iteratorOverImportedComponents( workflowPaths ) ) {
33
- workflows.push( { ...metadata, pathname, path } );
34
- console.log( '[Core.Scanner]', 'Workflow loaded:', metadata.name, 'at', pathname );
37
+ for await ( const { metadata, path } of importComponents( target, [ 'workflow.js' ] ) ) {
38
+ console.log( '[Core.Scanner]', 'Workflow loaded:', metadata.name, 'at', path );
39
+ workflows.push( { ...metadata, path } );
35
40
  }
36
41
  return workflows;
37
42
  };
38
43
 
39
- // Creates a temporary index file importing all workflows
44
+ /**
45
+ * Creates a temporary index file importing all workflows
46
+ *
47
+ * @param {object[]} workflows
48
+ * @returns
49
+ */
40
50
  export function createWorkflowsEntryPoint( workflows ) {
41
- const entryPoint = join( __dirname, 'temp', WORKFLOWS_INDEX_FILENAME );
51
+ const path = join( __dirname, 'temp', WORKFLOWS_INDEX_FILENAME );
42
52
 
43
53
  // default system catalog workflow
44
- const catalogWorkflow = { name: 'catalog', pathname: join( __dirname, './catalog_workflow/workflow.js' ) };
45
- const workflowsIndex = [ ... workflows, catalogWorkflow ]
46
- .map( ( { name, pathname } ) => `export { default as ${name} } from '${pathname}';` )
47
- .join( '\n' );
54
+ const catalog = { name: WORKFLOW_CATALOG, path: join( __dirname, './catalog_workflow/workflow.js' ) };
55
+ const content = [ ... workflows, catalog ].map( ( { name, path } ) => `export { default as ${name} } from '${path}';` ).join( EOL );
48
56
 
49
- writeFileOnLocationSync( entryPoint, workflowsIndex );
50
- return entryPoint;
57
+ mkdirSync( dirname( path ), { recursive: true } );
58
+ writeFileSync( path, content, 'utf-8' );
59
+ return path;
51
60
  };
@@ -1,29 +1,24 @@
1
1
  import { describe, it, expect, vi, beforeEach } from 'vitest';
2
2
 
3
- const METADATA_ACCESS_SYMBOL = Symbol( '__metadata' );
4
-
5
3
  vi.mock( '#consts', () => ( {
6
- SEND_WEBHOOK_ACTIVITY_NAME: '__internal#sendWebhookPost',
7
- READ_TRACE_FILE: '__internal#readTraceFile',
4
+ ACTIVITY_SEND_WEBHOOK: '__internal#sendWebhook',
8
5
  WORKFLOWS_INDEX_FILENAME: '__workflows_entrypoint.js',
9
- METADATA_ACCESS_SYMBOL
6
+ WORKFLOW_CATALOG: 'catalog'
10
7
  } ) );
11
8
 
12
- const sendWebhookPostMock = vi.fn();
13
- const readTraceFileMock = vi.fn();
9
+ const sendWebhookMock = vi.fn();
14
10
  vi.mock( '#internal_activities', () => ( {
15
- sendWebhookPost: sendWebhookPostMock,
16
- readTraceFile: readTraceFileMock
11
+ sendWebhook: sendWebhookMock
17
12
  } ) );
18
13
 
19
- // Mock internal_utils to control filesystem-independent behavior
20
- const iteratorMock = vi.fn();
21
- const recursiveMock = vi.fn();
22
- const writeFileMock = vi.fn();
23
- vi.mock( './internal_utils.js', () => ( {
24
- iteratorOverImportedComponents: iteratorMock,
25
- recursiveNavigateWhileCollecting: recursiveMock,
26
- writeFileOnLocationSync: writeFileMock
14
+ const importComponentsMock = vi.fn();
15
+ vi.mock( './loader_tools.js', () => ( { importComponents: importComponentsMock } ) );
16
+
17
+ const mkdirSyncMock = vi.fn();
18
+ const writeFileSyncMock = vi.fn();
19
+ vi.mock( 'node:fs', () => ( {
20
+ mkdirSync: mkdirSyncMock,
21
+ writeFileSync: writeFileSyncMock
27
22
  } ) );
28
23
 
29
24
  describe( 'worker/loader', () => {
@@ -34,39 +29,37 @@ describe( 'worker/loader', () => {
34
29
  it( 'loadActivities returns map including system activity', async () => {
35
30
  const { loadActivities } = await import( './loader.js' );
36
31
 
37
- recursiveMock.mockReturnValue( [ { pathname: '/a/steps.js', path: '/a', url: 'file:///a/steps.js' } ] );
38
- iteratorMock.mockImplementation( async function *() {
39
- yield { component: () => {}, metadata: { name: 'Act1' }, pathname: '/a/steps.js', path: '/a' };
32
+ importComponentsMock.mockImplementationOnce( async function *() {
33
+ yield { fn: () => {}, metadata: { name: 'Act1' }, path: '/a/steps.js' };
40
34
  } );
41
35
 
42
36
  const activities = await loadActivities( '/root' );
43
37
  expect( activities['/a#Act1'] ).toBeTypeOf( 'function' );
44
- expect( activities['__internal#sendWebhookPost'] ).toBe( sendWebhookPostMock );
45
- expect( activities['__internal#readTraceFile'] ).toBe( readTraceFileMock );
38
+ expect( activities['__internal#sendWebhook'] ).toBe( sendWebhookMock );
46
39
  } );
47
40
 
48
41
  it( 'loadWorkflows returns array of workflows with metadata', async () => {
49
42
  const { loadWorkflows } = await import( './loader.js' );
50
43
 
51
- recursiveMock.mockReturnValue( [ { pathname: '/b/workflow.js', path: '/b', url: 'file:///b/workflow.js' } ] );
52
- iteratorMock.mockImplementation( async function *() {
53
- yield { metadata: { name: 'Flow1', description: 'd' }, pathname: '/b/workflow.js', path: '/b' };
44
+ importComponentsMock.mockImplementationOnce( async function *() {
45
+ yield { metadata: { name: 'Flow1', description: 'd' }, path: '/b/workflow.js' };
54
46
  } );
55
47
 
56
48
  const workflows = await loadWorkflows( '/root' );
57
- expect( workflows ).toEqual( [ { name: 'Flow1', description: 'd', pathname: '/b/workflow.js', path: '/b' } ] );
49
+ expect( workflows ).toEqual( [ { name: 'Flow1', description: 'd', path: '/b/workflow.js' } ] );
58
50
  } );
59
51
 
60
52
  it( 'createWorkflowsEntryPoint writes index and returns its path', async () => {
61
53
  const { createWorkflowsEntryPoint } = await import( './loader.js' );
62
54
 
63
- const workflows = [ { name: 'W', pathname: '/abs/wf.js' } ];
55
+ const workflows = [ { name: 'W', path: '/abs/wf.js' } ];
64
56
  const entry = createWorkflowsEntryPoint( workflows );
65
57
 
66
- expect( writeFileMock ).toHaveBeenCalledTimes( 1 );
67
- const [ writtenPath, contents ] = writeFileMock.mock.calls[0];
58
+ expect( writeFileSyncMock ).toHaveBeenCalledTimes( 1 );
59
+ const [ writtenPath, contents ] = writeFileSyncMock.mock.calls[0];
68
60
  expect( entry ).toBe( writtenPath );
69
61
  expect( contents ).toContain( 'export { default as W } from \'/abs/wf.js\';' );
70
62
  expect( contents ).toContain( 'export { default as catalog }' );
63
+ expect( mkdirSyncMock ).toHaveBeenCalledTimes( 1 );
71
64
  } );
72
65
  } );
@@ -0,0 +1,63 @@
1
+ import { resolve } from 'path';
2
+ import { pathToFileURL } from 'url';
3
+ import { METADATA_ACCESS_SYMBOL } from '#consts';
4
+ import { readdirSync } from 'fs';
5
+
6
+ /**
7
+ * @typedef {object} CollectedFile
8
+ * @property {string} path - The file path
9
+ * @property {string} url - The resolved url of the file, ready to be imported
10
+ */
11
+ /**
12
+ * @typedef {object} Component
13
+ * @property {Function} fn - The loaded component function
14
+ * @property {object} metadata - Associated metadata with the component
15
+ * @property {string} path - Associated metadata with the component
16
+ */
17
+
18
+ /**
19
+ * Recursive traverse directories looking for files with given name.
20
+ *
21
+ * @param {string} path - The path to scan
22
+ * @param {string[]} filenames - The filenames to look for
23
+ * @returns {CollectedFile[]} An array containing the collected files
24
+ * */
25
+ const findByNameRecursively = ( parentPath, filenames, collection = [], ignoreDirNames = [ 'vendor', 'node_modules' ] ) => {
26
+ for ( const entry of readdirSync( parentPath, { withFileTypes: true } ) ) {
27
+ if ( ignoreDirNames.includes( entry.name ) ) {
28
+ continue;
29
+ }
30
+
31
+ const path = resolve( parentPath, entry.name );
32
+ if ( entry.isDirectory() ) {
33
+ findByNameRecursively( path, filenames, collection );
34
+ } else if ( filenames.includes( entry.name ) ) {
35
+ collection.push( { path, url: pathToFileURL( path ).href } );
36
+ }
37
+ }
38
+
39
+ return collection;
40
+ };
41
+
42
+ /**
43
+ * For each path, dynamic import it, and for each exported component with metadata (step, workflow), yields it.
44
+ *
45
+ * @generator
46
+ * @async
47
+ * @function importComponents
48
+ * @param {string} target - Place to look for files
49
+ * @param {string[]} filenames - File names to load recursively from target
50
+ * @yields {Component}
51
+ */
52
+ export async function *importComponents( target, filenames ) {
53
+ for ( const { url, path } of findByNameRecursively( target, filenames ) ) {
54
+ const imported = await import( url );
55
+ for ( const fn of Object.values( imported ) ) {
56
+ const metadata = fn[METADATA_ACCESS_SYMBOL];
57
+ if ( !metadata ) {
58
+ continue;
59
+ }
60
+ yield { fn, metadata, path };
61
+ }
62
+ }
63
+ };
@@ -0,0 +1,85 @@
1
+ import { describe, it, expect } from 'vitest';
2
+ import { mkdirSync, writeFileSync, rmSync } from 'node:fs';
3
+ import { join } from 'node:path';
4
+ import { importComponents } from './loader_tools.js';
5
+
6
+ describe( '.importComponents', () => {
7
+ it( 'imports modules and yields metadata from exports tagged with METADATA_ACCESS_SYMBOL', async () => {
8
+ const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}` );
9
+ mkdirSync( root, { recursive: true } );
10
+ const file = join( root, 'meta.module.js' );
11
+ writeFileSync( file, [
12
+ 'import { METADATA_ACCESS_SYMBOL } from "#consts";',
13
+ 'export const StepA = () => {};',
14
+ 'StepA[METADATA_ACCESS_SYMBOL] = { kind: "step", name: "a" };',
15
+ 'export const FlowB = () => {};',
16
+ 'FlowB[METADATA_ACCESS_SYMBOL] = { kind: "workflow", name: "b" };'
17
+ ].join( '\n' ) );
18
+
19
+ const collected = [];
20
+ for await ( const m of importComponents( root, [ 'meta.module.js' ] ) ) {
21
+ collected.push( m );
22
+ }
23
+
24
+ expect( collected.length ).toBe( 2 );
25
+ expect( collected.map( m => m.metadata.name ).sort() ).toEqual( [ 'a', 'b' ] );
26
+ expect( collected.map( m => m.metadata.kind ).sort() ).toEqual( [ 'step', 'workflow' ] );
27
+ for ( const m of collected ) {
28
+ expect( m.path ).toBe( file );
29
+ expect( typeof m.fn ).toBe( 'function' );
30
+ }
31
+
32
+ rmSync( root, { recursive: true, force: true } );
33
+ } );
34
+
35
+ it( 'ignores exports without metadata symbol', async () => {
36
+ const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}-nometa` );
37
+ mkdirSync( root, { recursive: true } );
38
+ const file = join( root, 'meta.module.js' );
39
+ writeFileSync( file, [
40
+ 'export const Plain = () => {};',
41
+ 'export const AlsoPlain = {}'
42
+ ].join( '\n' ) );
43
+
44
+ const collected = [];
45
+ for await ( const m of importComponents( root, [ 'meta.module.js' ] ) ) {
46
+ collected.push( m );
47
+ }
48
+
49
+ expect( collected.length ).toBe( 0 );
50
+ rmSync( root, { recursive: true, force: true } );
51
+ } );
52
+
53
+ it( 'skips files inside ignored directories (node_modules, vendor)', async () => {
54
+ const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}-ignoredirs` );
55
+ const okDir = join( root, 'ok' );
56
+ const nmDir = join( root, 'node_modules' );
57
+ const vendorDir = join( root, 'vendor' );
58
+ mkdirSync( okDir, { recursive: true } );
59
+ mkdirSync( nmDir, { recursive: true } );
60
+ mkdirSync( vendorDir, { recursive: true } );
61
+
62
+ const okFile = join( okDir, 'meta.module.js' );
63
+ const nmFile = join( nmDir, 'meta.module.js' );
64
+ const vendorFile = join( vendorDir, 'meta.module.js' );
65
+
66
+ const fileContents = [
67
+ 'import { METADATA_ACCESS_SYMBOL } from "#consts";',
68
+ 'export const C = () => {};',
69
+ 'C[METADATA_ACCESS_SYMBOL] = { kind: "step", name: "c" };'
70
+ ].join( '\n' );
71
+ writeFileSync( okFile, fileContents );
72
+ writeFileSync( nmFile, fileContents );
73
+ writeFileSync( vendorFile, fileContents );
74
+
75
+ const collected = [];
76
+ for await ( const m of importComponents( root, [ 'meta.module.js' ] ) ) {
77
+ collected.push( m );
78
+ }
79
+
80
+ expect( collected.length ).toBe( 1 );
81
+ expect( collected[0].path ).toBe( okFile );
82
+
83
+ rmSync( root, { recursive: true, force: true } );
84
+ } );
85
+ } );
@@ -1,16 +1,66 @@
1
- import { Storage } from './async_storage.js';
2
- import { trace } from './tracer/index.js';
3
- import { THIS_LIB_NAME } from '#consts';
1
+ import { WORKFLOW_CATALOG } from '#consts';
2
+ import { addEventStart, addEventEnd, addEventError } from '#tracing';
4
3
 
4
+ /**
5
+ * Start a workflow trace event
6
+ *
7
+ * @param {function} method - Trace function to call
8
+ * @param {object} workflowInfo - Temporal workflowInfo object
9
+ * @param {object} details - Teh details to attach to the event
10
+ */
11
+ const addWorkflowEvent = ( method, workflowInfo, details ) => {
12
+ const { workflowId: id, workflowType: name, memo: { parentId, executionContext } } = workflowInfo;
13
+ if ( name === WORKFLOW_CATALOG ) {
14
+ return;
15
+ } // ignore internal catalog events
16
+ method( { id, kind: 'workflow', name, details, parentId, executionContext } );
17
+ };
18
+
19
+ /**
20
+ * Start a trace event with given configuration
21
+ *
22
+ * @param {function} method - Trace function to call
23
+ * @param {object} workflowInfo - Temporal workflowInfo object
24
+ * @param {object} options - Trace options, like id, kind, name and details
25
+ */
26
+ const addEvent = ( method, workflowInfo, options ) => {
27
+ const { id, name, kind, details } = options;
28
+ const { workflowId, memo: { executionContext } } = workflowInfo;
29
+ method( { id, kind, name, details, parentId: workflowId, executionContext } );
30
+ };
31
+
32
+ // This sink allow for sandbox Temporal environment to send trace logs back to the main thread.
5
33
  export const sinks = {
6
- // This sink allow for sandbox Temporal environment to send trace logs back to the main thread.
7
- log: {
8
- trace: {
9
- fn( workflowInfo, args ) {
10
- const { workflowId, workflowType, memo } = workflowInfo;
11
- Storage.runWithContext( _ => trace( { lib: THIS_LIB_NAME, ...args } ), { workflowId, workflowType, ...memo } );
12
- },
34
+ trace: {
35
+ addWorkflowEventStart: {
36
+ fn: ( ...args ) => addWorkflowEvent( addEventStart, ...args ),
37
+ callDuringReplay: false
38
+ },
39
+
40
+ addWorkflowEventEnd: {
41
+ fn: ( ...args ) => addWorkflowEvent( addEventEnd, ...args ),
42
+ callDuringReplay: false
43
+ },
44
+
45
+ addWorkflowEventError: {
46
+ fn: ( ...args ) => addWorkflowEvent( addEventError, ...args ),
13
47
  callDuringReplay: false
48
+ },
49
+
50
+ addEventStart: {
51
+ fn: ( ...args ) => addEvent( addEventStart, ...args ),
52
+ callDuringReplay: false
53
+ },
54
+
55
+ addEventEnd: {
56
+ fn: ( ...args ) => addEvent( addEventEnd, ...args ),
57
+ callDuringReplay: false
58
+ },
59
+
60
+ addEventError: {
61
+ fn: ( ...args ) => addEvent( addEventError, ...args ),
62
+ callDuringReplay: false
63
+
14
64
  }
15
65
  }
16
66
  };
package/src/configs.js DELETED
@@ -1,36 +0,0 @@
1
- import * as z from 'zod';
2
-
3
- class InvalidEnvVarsErrors extends Error { }
4
-
5
- const envVarSchema = z.object( {
6
- TEMPORAL_ADDRESS: z.string().optional().default( 'localhost:7233' ),
7
- TEMPORAL_NAMESPACE: z.string().optional().default( 'default' ),
8
- TEMPORAL_API_KEY: z.string().optional(),
9
- CATALOG_ID: z.string().regex( /^[a-z0-9_.@-]+$/i ),
10
- API_AUTH_KEY: z.string().optional(),
11
- TRACING_ENABLED: z.stringbool().optional()
12
- } );
13
-
14
- const { data: safeEnvVar, error } = envVarSchema.safeParse( process.env );
15
- if ( error ) {
16
- throw new InvalidEnvVarsErrors( z.prettifyError( error ) );
17
- }
18
-
19
- export const worker = {
20
- address: safeEnvVar.TEMPORAL_ADDRESS,
21
- apiKey: safeEnvVar.TEMPORAL_API_KEY,
22
- executionTimeout: '1m',
23
- maxActivities: 100,
24
- maxWorkflows: 100,
25
- namespace: safeEnvVar.TEMPORAL_NAMESPACE ?? 'default',
26
- taskQueue: safeEnvVar.CATALOG_ID,
27
- catalogId: safeEnvVar.CATALOG_ID
28
- };
29
-
30
- export const api = {
31
- authKey: safeEnvVar.API_AUTH_KEY
32
- };
33
-
34
- export const tracing = {
35
- enabled: safeEnvVar.TRACING_ENABLED
36
- };