@output.ai/core 0.0.7 → 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +85 -59
  2. package/package.json +10 -3
  3. package/src/configs.js +1 -1
  4. package/src/consts.js +4 -3
  5. package/src/errors.js +11 -0
  6. package/src/index.d.ts +302 -30
  7. package/src/index.js +3 -2
  8. package/src/interface/metadata.js +3 -3
  9. package/src/interface/step.js +18 -4
  10. package/src/interface/utils.js +41 -4
  11. package/src/interface/utils.spec.js +71 -0
  12. package/src/interface/validations/ajv_provider.js +3 -0
  13. package/src/interface/validations/runtime.js +69 -0
  14. package/src/interface/validations/runtime.spec.js +50 -0
  15. package/src/interface/validations/static.js +67 -0
  16. package/src/interface/validations/static.spec.js +101 -0
  17. package/src/interface/webhook.js +15 -14
  18. package/src/interface/workflow.js +45 -40
  19. package/src/internal_activities/index.js +16 -5
  20. package/src/worker/catalog_workflow/catalog.js +105 -0
  21. package/src/worker/catalog_workflow/index.js +21 -0
  22. package/src/worker/catalog_workflow/index.spec.js +139 -0
  23. package/src/worker/catalog_workflow/workflow.js +13 -0
  24. package/src/worker/index.js +41 -5
  25. package/src/worker/interceptors/activity.js +3 -2
  26. package/src/worker/internal_utils.js +60 -0
  27. package/src/worker/internal_utils.spec.js +134 -0
  28. package/src/worker/loader.js +30 -44
  29. package/src/worker/loader.spec.js +68 -0
  30. package/src/worker/sinks.js +2 -1
  31. package/src/worker/tracer/index.js +35 -3
  32. package/src/worker/tracer/index.test.js +115 -0
  33. package/src/worker/tracer/tracer_tree.js +29 -5
  34. package/src/worker/tracer/tracer_tree.test.js +116 -0
  35. package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.js +133 -0
  36. package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.spec.js +77 -0
  37. package/src/worker/webpack_loaders/workflow_rewriter/consts.js +3 -0
  38. package/src/worker/webpack_loaders/workflow_rewriter/index.mjs +58 -0
  39. package/src/worker/webpack_loaders/workflow_rewriter/index.spec.js +129 -0
  40. package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.js +70 -0
  41. package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.spec.js +33 -0
  42. package/src/worker/webpack_loaders/workflow_rewriter/tools.js +245 -0
  43. package/src/worker/webpack_loaders/workflow_rewriter/tools.spec.js +144 -0
  44. package/src/errors.d.ts +0 -3
  45. package/src/worker/temp/__workflows_entrypoint.js +0 -6
@@ -0,0 +1,105 @@
1
+ /**
2
+ * Represents the collection of metadata from workflows and activities that a worker has.
3
+ */
4
+ export class Catalog {
5
+ /**
6
+ * All workflows in the catalog
7
+ * @type {Array<CatalogWorkflow>}
8
+ */
9
+ workflows;
10
+
11
+ constructor() {
12
+ this.workflows = [];
13
+ };
14
+
15
+ /**
16
+ * Add a workflow entry to the catalog.
17
+ *
18
+ * @param {CatalogWorkflow} workflow - Workflow to add.
19
+ * @returns {Catalog} This catalog instance (for chaining).
20
+ */
21
+ addWorkflow( workflow ) {
22
+ this.workflows.push( workflow );
23
+ return this;
24
+ }
25
+ }
26
+
27
+ /**
28
+ * Base type for catalog entries (workflows, activities).
29
+ *
30
+ * Encapsulates common descriptive fields and JSON schemas.
31
+ */
32
+ class CatalogEntry {
33
+ /**
34
+ * Name of the entry. Only letters, numbers and _ allowed.
35
+ * @type {string}
36
+ */
37
+ name;
38
+ /**
39
+ * Optional description.
40
+ * @type {string|undefined}
41
+ */
42
+ description;
43
+ /**
44
+ * JSON schema describing the expected input.
45
+ * @type {object}
46
+ */
47
+ inputSchema;
48
+ /**
49
+ * JSON schema describing the produced output.
50
+ * @type {object}
51
+ */
52
+ outputSchema;
53
+ /**
54
+ * Absolute path of the entity in the file system.
55
+ * @type {string}
56
+ */
57
+ path;
58
+
59
+ /**
60
+ * @param {Object} params - Entry parameters.
61
+ * @param {string} params.name - Name of the entry.
62
+ * @param {string} [params.description] - Optional description.
63
+ * @param {object} [params.inputSchema] - JSON schema describing the expected input.
64
+ * @param {object} [params.outputSchema] - JSON schema describing the produced output.
65
+ * @param {string} params.path - Absolute path of the entity in the file system.
66
+ */
67
+ constructor( { name, description, inputSchema, outputSchema, path } ) {
68
+ this.name = name;
69
+ this.description = description;
70
+ this.inputSchema = inputSchema;
71
+ this.outputSchema = outputSchema;
72
+ this.path = path;
73
+ };
74
+ }
75
+
76
+ /**
77
+ * Describes a single workflow within the catalog.
78
+ *
79
+ * @class
80
+ * @extends CatalogEntry
81
+ */
82
+ export class CatalogWorkflow extends CatalogEntry {
83
+ /**
84
+ * Each activity of this workflow.
85
+ * @type {Array<CatalogActivity>}
86
+ */
87
+ activities;
88
+
89
+ /**
90
+ * @inheritdoc
91
+ * @param {Array<CatalogActivity>} params.activities - Activities referenced by this workflow.
92
+ */
93
+ constructor( { activities, ...args } ) {
94
+ super( args );
95
+ this.activities = activities;
96
+ };
97
+ };
98
+
99
+ /**
100
+ * Describes a single activity within a workflow.
101
+ *
102
+ * @class
103
+ * @extends CatalogEntry
104
+ */
105
+ export class CatalogActivity extends CatalogEntry {}
@@ -0,0 +1,21 @@
1
+ import { METADATA_ACCESS_SYMBOL } from '#consts';
2
+ import { Catalog, CatalogActivity, CatalogWorkflow } from './catalog.js';
3
+
4
+ /**
5
+ * Converts the list of workflows and the activities into the catalog information.
6
+ *
7
+ * This has information of all workflows and their activities from this worker.
8
+ *
9
+ * @param {object[]} workflows - The workflows objects, as they are returned from the loader module
10
+ * @param {object} activities - The activities functions map with metadata, as they are returned from the loader module
11
+ * @returns {Catalog} An catalog instance
12
+ */
13
+ export const createCatalog = ( { workflows, activities } ) =>
14
+ workflows.reduce( ( catalog, workflow ) =>
15
+ catalog.addWorkflow( new CatalogWorkflow( {
16
+ ...workflow,
17
+ activities: Object.entries( activities )
18
+ .filter( ( [ k ] ) => k.startsWith( `${workflow.path}#` ) )
19
+ .map( ( [ _, v ] ) => new CatalogActivity( v[METADATA_ACCESS_SYMBOL] ) )
20
+ } ) )
21
+ , new Catalog() );
@@ -0,0 +1,139 @@
1
+ import { describe, it, expect, vi } from 'vitest';
2
+
3
+ // Provide the same symbol to the module under test and to the test
4
+ const METADATA_ACCESS_SYMBOL = Symbol( '__metadata' );
5
+ vi.mock( '#consts', () => ( {
6
+ METADATA_ACCESS_SYMBOL
7
+ } ) );
8
+
9
+ const setMetadata = ( target, values ) =>
10
+ Object.defineProperty( target, METADATA_ACCESS_SYMBOL, { value: values, writable: false, enumerable: false, configurable: false } );
11
+
12
+ describe( 'createCatalog', () => {
13
+ it( 'builds catalog with activities grouped by workflow path and returns Catalog with CatalogWorkflow entries', async () => {
14
+ const { createCatalog } = await import( './index.js' );
15
+
16
+ const workflows = [
17
+ {
18
+ name: 'flow1',
19
+ path: '/flows/flow1',
20
+ pathname: '/flows/flow1/workflow.js',
21
+ description: 'desc-flow1',
22
+ inputSchema: { in: 'f1' },
23
+ outputSchema: { out: 'f1' }
24
+ },
25
+ {
26
+ name: 'flow2',
27
+ path: '/flows/flow2',
28
+ pathname: '/flows/flow2/workflow.js',
29
+ description: 'desc-flow2',
30
+ inputSchema: { in: 'f2' },
31
+ outputSchema: { out: 'f2' }
32
+ }
33
+ ];
34
+
35
+ const activity1 = () => {};
36
+ setMetadata( activity1, {
37
+ name: 'A1',
38
+ path: '/flows/flow1#A1',
39
+ description: 'desc-a1',
40
+ inputSchema: { in: 'a1' },
41
+ outputSchema: { out: 'a1' }
42
+ } );
43
+
44
+ const activity2 = () => {};
45
+ setMetadata( activity2, {
46
+ name: 'A2',
47
+ path: '/flows/flow1#A2',
48
+ description: 'desc-a2',
49
+ inputSchema: { in: 'a2' },
50
+ outputSchema: { out: 'a2' }
51
+ } );
52
+
53
+ const activity3 = () => {};
54
+ setMetadata( activity3, {
55
+ name: 'B1',
56
+ path: '/flows/flow2#B1',
57
+ description: 'desc-b1',
58
+ inputSchema: { in: 'b1' },
59
+ outputSchema: { out: 'b1' }
60
+ } );
61
+
62
+ const activity4 = () => {};
63
+ setMetadata( activity4, {
64
+ name: 'X',
65
+ path: '/other#X',
66
+ description: 'desc-x',
67
+ inputSchema: { in: 'x' },
68
+ outputSchema: { out: 'x' }
69
+ } );
70
+
71
+ const activities = {
72
+ '/flows/flow1#A1': activity1,
73
+ '/flows/flow1#A2': activity2,
74
+ '/flows/flow2#B1': activity3,
75
+ '/other#X': activity4
76
+ };
77
+
78
+ const catalog = createCatalog( { workflows, activities } );
79
+
80
+ const mapped = catalog.workflows.map( w => ( {
81
+ name: w.name,
82
+ path: w.path,
83
+ description: w.description,
84
+ inputSchema: w.inputSchema,
85
+ outputSchema: w.outputSchema,
86
+ activities: w.activities.map( a => ( {
87
+ name: a.name,
88
+ description: a.description,
89
+ inputSchema: a.inputSchema,
90
+ outputSchema: a.outputSchema
91
+ } ) )
92
+ } ) );
93
+
94
+ expect( mapped ).toEqual( [
95
+ {
96
+ name: 'flow1',
97
+ path: '/flows/flow1',
98
+ description: 'desc-flow1',
99
+ inputSchema: { in: 'f1' },
100
+ outputSchema: { out: 'f1' },
101
+ activities: [
102
+ {
103
+ name: 'A1',
104
+ description: 'desc-a1',
105
+ inputSchema: { in: 'a1' },
106
+ outputSchema: { out: 'a1' }
107
+ },
108
+ {
109
+ name: 'A2',
110
+ description: 'desc-a2',
111
+ inputSchema: { in: 'a2' },
112
+ outputSchema: { out: 'a2' }
113
+ }
114
+ ]
115
+ },
116
+ {
117
+ name: 'flow2',
118
+ path: '/flows/flow2',
119
+ description: 'desc-flow2',
120
+ inputSchema: { in: 'f2' },
121
+ outputSchema: { out: 'f2' },
122
+ activities: [
123
+ {
124
+ name: 'B1',
125
+ description: 'desc-b1',
126
+ inputSchema: { in: 'b1' },
127
+ outputSchema: { out: 'b1' }
128
+ }
129
+ ]
130
+ }
131
+ ] );
132
+
133
+ // Original inputs are not mutated
134
+ expect( workflows[0].path ).toBe( '/flows/flow1' );
135
+ expect( workflows[0].pathname ).toBe( '/flows/flow1/workflow.js' );
136
+ expect( workflows[1].path ).toBe( '/flows/flow2' );
137
+ expect( workflows[1].pathname ).toBe( '/flows/flow2/workflow.js' );
138
+ } );
139
+ } );
@@ -0,0 +1,13 @@
1
+ import { defineQuery, setHandler } from '@temporalio/workflow';
2
+
3
+ /**
4
+ * This is a special workflow, unique to each worker, which holds the meta information of all other workflows in that worker.
5
+ *
6
+ * The information is set in the startup and is accessible via a query called 'get'.
7
+ *
8
+ * @param {object} catalog - The catalog information
9
+ */
10
+ export default async function catalogWorkflow( catalog ) {
11
+ setHandler( defineQuery( 'get' ), () => catalog );
12
+ await new Promise( () => {} ); // stay alive
13
+ };
@@ -1,11 +1,14 @@
1
1
  import { Worker, NativeConnection } from '@temporalio/worker';
2
+ import { Client } from '@temporalio/client';
3
+ import { WorkflowIdConflictPolicy } from '@temporalio/common';
2
4
  import { dirname, join } from 'path';
3
5
  import { fileURLToPath } from 'node:url';
4
6
  import { worker as workerConfig } from '#configs';
5
- import { loadActivities, loadWorkflows } from './loader.js';
7
+ import { loadActivities, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
6
8
  import { ActivityExecutionInterceptor } from './interceptors/activity.js';
7
9
  import { setupGlobalTracer } from './tracer/index.js';
8
10
  import { sinks } from './sinks.js';
11
+ import { createCatalog } from './catalog_workflow/index.js';
9
12
 
10
13
  const __dirname = dirname( fileURLToPath( import.meta.url ) );
11
14
 
@@ -19,16 +22,22 @@ const callerDir = process.argv[2];
19
22
 
20
23
  ( async () => {
21
24
  console.log( '[Core]', 'Loading workflows...', { callerDir } );
22
- const workflowsPath = await loadWorkflows( callerDir );
25
+ const workflows = await loadWorkflows( callerDir );
23
26
 
24
27
  console.log( '[Core]', 'Loading activities...', { callerDir } );
25
28
  const activities = await loadActivities( callerDir );
26
29
 
30
+ console.log( '[Core]', 'Creating worker entry point...' );
31
+ const workflowsPath = createWorkflowsEntryPoint( workflows );
32
+
33
+ console.log( '[Core]', 'Creating workflows catalog...' );
34
+ const catalog = createCatalog( { workflows, activities } );
35
+
27
36
  console.log( '[Core]', 'Connecting Temporal...' );
28
37
  // enable TLS only when connecting remove (api key is present)
29
38
  const connection = await NativeConnection.connect( { address, tls: Boolean( apiKey ), apiKey } );
30
39
 
31
- console.log( '[Core]', 'Creating Worker...' );
40
+ console.log( '[Core]', 'Creating worker...' );
32
41
  const worker = await Worker.create( {
33
42
  connection,
34
43
  namespace,
@@ -41,9 +50,36 @@ const callerDir = process.argv[2];
41
50
  activityInbound: [ () => new ActivityExecutionInterceptor() ]
42
51
  },
43
52
  maxConcurrentWorkflowTaskExecutions: maxWorkflows,
44
- maxConcurrentActivityTaskExecutions: maxActivities
53
+ maxConcurrentActivityTaskExecutions: maxActivities,
54
+ bundlerOptions: {
55
+ webpackConfigHook: config => {
56
+ if ( !config.module ) {
57
+ config.module = { };
58
+ }
59
+ if ( !config.module.rules ) {
60
+ config.module.rules = [];
61
+ }
62
+ // Use AST-based loader for rewriting steps/workflows
63
+ config.module.rules.push( {
64
+ test: /\.js$/,
65
+ exclude: /node_modules/,
66
+ use: {
67
+ loader: join( __dirname, './webpack_loaders/workflow_rewriter/index.mjs' )
68
+ }
69
+ } );
70
+ return config;
71
+ }
72
+ }
73
+ } );
74
+
75
+ console.log( '[Core]', 'Starting catalog workflow...' );
76
+ await new Client( { connection, namespace } ).workflow.start( 'catalog', {
77
+ taskQueue,
78
+ workflowId: taskQueue, // use the name of the task queue as the catalog name, ensuring uniqueness
79
+ workflowIdConflictPolicy: WorkflowIdConflictPolicy.TERMINATE_EXISTING,
80
+ args: [ catalog ]
45
81
  } );
46
82
 
47
- console.log( '[Core] Starting...' );
83
+ console.log( '[Core]', 'Running worker...' );
48
84
  worker.run();
49
85
  } )();
@@ -3,6 +3,7 @@ import { Storage } from '../async_storage.js';
3
3
  import { trace } from '../tracer/index.js';
4
4
  import { TraceEvent } from '../tracer/types.js';
5
5
  import { headersToObject } from '../sandboxed_utils.js';
6
+ import { THIS_LIB_NAME } from '#consts';
6
7
 
7
8
  /*
8
9
  This interceptor is called for every activity execution
@@ -19,9 +20,9 @@ export class ActivityExecutionInterceptor {
19
20
  const context = { workflowId, workflowType, activityId, activityType, ...headersToObject( input.headers ) };
20
21
 
21
22
  return Storage.runWithContext( async _ => {
22
- trace( { lib: 'core', event: TraceEvent.STEP_START, input: input.args } );
23
+ trace( { lib: THIS_LIB_NAME, event: TraceEvent.STEP_START, input: input.args } );
23
24
  const output = await next( input );
24
- trace( { lib: 'core', event: TraceEvent.STEP_END, output } );
25
+ trace( { lib: THIS_LIB_NAME, event: TraceEvent.STEP_END, output } );
25
26
  return output;
26
27
  }, context );
27
28
  }
@@ -0,0 +1,60 @@
1
+ /*
2
+ * These tools cant be used in sandbox environment!!!
3
+ */
4
+ import { resolve } from 'path';
5
+ import { pathToFileURL } from 'url';
6
+ import { METADATA_ACCESS_SYMBOL } from '#consts';
7
+ import { writeFileSync, existsSync, readdirSync, mkdirSync } from 'fs';
8
+
9
+ /**
10
+ * Recursive traverse directories looking for files with given name,
11
+ * For each found file, return its path, pathname and URI
12
+ *
13
+ * @param {string} path - The path to scan
14
+ * @param {string[]} filenames - The filenames to look for
15
+ * @returns {string[{}]} An array containing an object with path, pathname and URI for each file found
16
+ * */
17
+ export function recursiveNavigateWhileCollecting( path, filenames, collection = [], ignoreDirNames = [ 'vendor', 'node_modules' ] ) {
18
+ for ( const entry of readdirSync( path, { withFileTypes: true } ) ) {
19
+ if ( ignoreDirNames.includes( entry.name ) ) {
20
+ continue;
21
+ }
22
+
23
+ const pathname = resolve( path, entry.name );
24
+ if ( entry.isDirectory() ) {
25
+ recursiveNavigateWhileCollecting( pathname, filenames, collection );
26
+ } else if ( filenames.includes( entry.name ) ) {
27
+ collection.push( { pathname, path, url: pathToFileURL( pathname ).href } );
28
+ }
29
+ }
30
+
31
+ return collection;
32
+ };
33
+
34
+ /**
35
+ * For each path, dynamic import it, and for each exported component with metadata (step, workflow), yields it.
36
+ * @param {string[]} paths - Paths of the files to import
37
+ */
38
+ export async function *iteratorOverImportedComponents( paths ) {
39
+ for ( const { url, path, pathname } of paths ) {
40
+ const imported = await import( url );
41
+ for ( const component of Object.values( imported ) ) {
42
+ const metadata = component[METADATA_ACCESS_SYMBOL];
43
+ if ( !metadata ) {
44
+ continue;
45
+ }
46
+ yield { component, metadata, path, pathname };
47
+ }
48
+ }
49
+ };
50
+
51
+ /**
52
+ * Write a file using the same signature as Node's FS writeFileSync, but recursively creates the necessary directories in the path.
53
+ */
54
+ export function writeFileOnLocationSync( path, content ) {
55
+ const targetDir = path.split( '/' ).slice( 0, -1 ).join( '/' );
56
+ if ( targetDir && !existsSync( targetDir ) ) {
57
+ mkdirSync( targetDir, { recursive: true } );
58
+ }
59
+ writeFileSync( path, content, 'utf-8' );
60
+ };
@@ -0,0 +1,134 @@
1
+ import { describe, it, expect } from 'vitest';
2
+ import { mkdtempSync, mkdirSync, writeFileSync, rmSync, readFileSync, existsSync } from 'node:fs';
3
+ import { join, resolve } from 'node:path';
4
+ import { tmpdir } from 'node:os';
5
+ import { pathToFileURL } from 'node:url';
6
+ import { recursiveNavigateWhileCollecting, iteratorOverImportedComponents, writeFileOnLocationSync } from './internal_utils.js';
7
+
8
+ function makeTmpRoot( prefix ) {
9
+ return mkdtempSync( join( tmpdir(), prefix ) );
10
+ }
11
+
12
+ describe( '.recursiveNavigateWhileCollecting', () => {
13
+ it( 'collects matching files recursively (happy path)', () => {
14
+ const root = makeTmpRoot( 'nsu-happy-' );
15
+ const target = 'target.txt';
16
+
17
+ // layout:
18
+ // root/target.txt
19
+ // root/a/target.txt
20
+ // root/b/c/target.txt
21
+ mkdirSync( join( root, 'a' ), { recursive: true } );
22
+ mkdirSync( join( root, 'b', 'c' ), { recursive: true } );
23
+ writeFileSync( join( root, target ), 'root' );
24
+ writeFileSync( join( root, 'a', target ), 'a' );
25
+ writeFileSync( join( root, 'b', 'c', target ), 'bc' );
26
+
27
+ const results = recursiveNavigateWhileCollecting( root, [ target ] );
28
+
29
+ expect( results.length ).toBe( 3 );
30
+ for ( const { pathname, path, url } of results ) {
31
+ expect( url ).toBe( pathToFileURL( pathname ).href );
32
+ expect( resolve( path, target ) ).toBe( pathname );
33
+ }
34
+
35
+ rmSync( root, { recursive: true, force: true } );
36
+ } );
37
+
38
+ it( 'skips files inside ignored directories (ignoreDirNames)', () => {
39
+ const root = makeTmpRoot( 'nsu-ignore-' );
40
+ const target = 'target.txt';
41
+
42
+ // layout:
43
+ // root/node_modules/target.txt (ignored)
44
+ // root/vendor/target.txt (ignored)
45
+ // root/ok/target.txt (collected)
46
+ mkdirSync( join( root, 'node_modules' ), { recursive: true } );
47
+ mkdirSync( join( root, 'vendor' ), { recursive: true } );
48
+ mkdirSync( join( root, 'ok' ), { recursive: true } );
49
+ writeFileSync( join( root, 'node_modules', target ), 'nm' );
50
+ writeFileSync( join( root, 'vendor', target ), 'v' );
51
+ writeFileSync( join( root, 'ok', target ), 'ok' );
52
+
53
+ const results = recursiveNavigateWhileCollecting( root, [ target ] );
54
+
55
+ expect( results.length ).toBe( 1 );
56
+ expect( results[0].pathname ).toBe( join( root, 'ok', target ) );
57
+ expect( results[0].path ).toBe( join( root, 'ok' ) );
58
+ expect( results[0].url ).toBe( pathToFileURL( results[0].pathname ).href );
59
+
60
+ rmSync( root, { recursive: true, force: true } );
61
+ } );
62
+ } );
63
+
64
+ describe( '.iteratorOverImportedComponents', () => {
65
+ it( 'imports modules and yields metadata from exports tagged with METADATA_ACCESS_SYMBOL', async () => {
66
+ const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}` );
67
+ mkdirSync( root, { recursive: true } );
68
+ const file = join( root, 'meta.module.js' );
69
+ writeFileSync( file, [
70
+ 'import { METADATA_ACCESS_SYMBOL } from \"#consts\";',
71
+ 'export const StepA = () => {};',
72
+ 'StepA[METADATA_ACCESS_SYMBOL] = { kind: \"step\", name: \"a\" };',
73
+ 'export const FlowB = () => {};',
74
+ 'FlowB[METADATA_ACCESS_SYMBOL] = { kind: \"workflow\", name: \"b\" };'
75
+ ].join( '\n' ) );
76
+
77
+ const paths = recursiveNavigateWhileCollecting( root, [ 'meta.module.js' ] );
78
+ const collected = [];
79
+ for await ( const m of iteratorOverImportedComponents( paths ) ) {
80
+ collected.push( m );
81
+ }
82
+
83
+ expect( collected.length ).toBe( 2 );
84
+ expect( collected.map( m => m.metadata.name ).sort() ).toEqual( [ 'a', 'b' ] );
85
+ expect( collected.map( m => m.metadata.kind ).sort() ).toEqual( [ 'step', 'workflow' ] );
86
+ for ( const m of collected ) {
87
+ expect( m.pathname ).toBe( file );
88
+ expect( m.path ).toBe( root );
89
+ expect( typeof m.component ).toBe( 'function' );
90
+ }
91
+
92
+ rmSync( root, { recursive: true, force: true } );
93
+ } );
94
+
95
+ it( 'ignores exports without metadata symbol', async () => {
96
+ const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}-nometa` );
97
+ mkdirSync( root, { recursive: true } );
98
+ const file = join( root, 'meta.module.js' );
99
+ writeFileSync( file, [
100
+ 'export const Plain = () => {};',
101
+ 'export const AlsoPlain = {}'
102
+ ].join( '\n' ) );
103
+
104
+ const paths = recursiveNavigateWhileCollecting( root, [ 'meta.module.js' ] );
105
+ const collected = [];
106
+ for await ( const m of iteratorOverImportedComponents( paths ) ) {
107
+ collected.push( m );
108
+ }
109
+
110
+ expect( collected.length ).toBe( 0 );
111
+ rmSync( root, { recursive: true, force: true } );
112
+ } );
113
+ } );
114
+
115
+ describe( '.writeFileOnLocationSync', () => {
116
+ it( 'creates missing directories and writes file', () => {
117
+ const root = makeTmpRoot( 'nsu-write-' );
118
+ const nested = join( root, 'a', 'b', 'c.txt' );
119
+ writeFileOnLocationSync( nested, 'hello' );
120
+ expect( existsSync( join( root, 'a', 'b' ) ) ).toBe( true );
121
+ expect( readFileSync( nested, 'utf-8' ) ).toBe( 'hello' );
122
+ rmSync( root, { recursive: true, force: true } );
123
+ } );
124
+
125
+ it( 'overwrites existing content', () => {
126
+ const root = makeTmpRoot( 'nsu-write2-' );
127
+ const file = join( root, 'x', 'y.txt' );
128
+ mkdirSync( join( root, 'x' ), { recursive: true } );
129
+ writeFileSync( file, 'old' );
130
+ writeFileOnLocationSync( file, 'new' );
131
+ expect( readFileSync( file, 'utf-8' ) ).toBe( 'new' );
132
+ rmSync( root, { recursive: true, force: true } );
133
+ } );
134
+ } );
@@ -1,64 +1,50 @@
1
- import { readdirSync, writeFileSync, existsSync, mkdirSync } from 'fs';
2
- import { dirname, join, resolve } from 'path';
3
- import { pathToFileURL, fileURLToPath } from 'url';
4
- import { nameSymbol, sendWebhookPostName, workflowsIndexFileName } from '#consts';
1
+ import { dirname, join } from 'path';
2
+ import { fileURLToPath } from 'url';
5
3
  import { sendWebhookPost } from '#internal_activities';
6
-
7
- const ignoreDirNames = [ 'vendor', 'node_modules' ];
4
+ import { SEND_WEBHOOK_ACTIVITY_NAME, WORKFLOWS_INDEX_FILENAME } from '#consts';
5
+ import {
6
+ iteratorOverImportedComponents,
7
+ recursiveNavigateWhileCollecting,
8
+ writeFileOnLocationSync
9
+ } from './internal_utils.js';
8
10
 
9
11
  const __dirname = dirname( fileURLToPath( import.meta.url ) );
10
12
 
11
- // recursive run directories and when find files with given name, save their path, pathname and URI
12
- const recursiveNavigateWhileCollecting = ( path, filenames, collection = [] ) => {
13
- for ( const entry of readdirSync( path, { withFileTypes: true } ) ) {
14
- if ( ignoreDirNames.includes( entry.name ) ) { continue; }
15
-
16
- const pathname = resolve( path, entry.name );
17
- if ( entry.isDirectory() ) {
18
- recursiveNavigateWhileCollecting( pathname, filenames, collection );
19
- } else if ( filenames.includes( entry.name ) ) {
20
- collection.push( { pathname, path, url: pathToFileURL( pathname ).href } );
21
- }
22
- }
23
-
24
- return collection;
25
- };
26
-
13
+ // returns a map of activities, where the key is they path + name and the value is the function with metadata
27
14
  export async function loadActivities( path ) {
28
15
  const activityPaths = recursiveNavigateWhileCollecting( path, [ 'steps.js' ] );
29
16
  const activities = [];
30
- for ( const { path, pathname, url } of activityPaths ) {
31
- const exported = await import( url );
32
- for ( const [ name, fn ] of Object.values( exported ).map( v => [ v[nameSymbol], v ] ).filter( v => v[0] ) ) {
33
- console.log( '[Core.Scanner]', 'Activity loaded', name, pathname );
34
- activities[`${path}#${name}`] = fn;
35
- }
17
+ for await ( const { component, metadata, pathname, path } of iteratorOverImportedComponents( activityPaths ) ) {
18
+ console.log( '[Core.Scanner]', 'Activity loaded:', metadata.name, 'at', pathname );
19
+ activities[`${path}#${metadata.name}`] = component;
36
20
  }
37
21
 
38
22
  // system activities
39
- activities[sendWebhookPostName] = sendWebhookPost;
23
+ activities[SEND_WEBHOOK_ACTIVITY_NAME] = sendWebhookPost;
40
24
  return activities;
41
25
  };
42
26
 
27
+ // Returns array of workflow object, which are function with metadata attached
43
28
  export async function loadWorkflows( path ) {
44
- const workflowPaths = recursiveNavigateWhileCollecting( path, [ 'index.js' ] );
29
+ const workflowPaths = recursiveNavigateWhileCollecting( path, [ 'workflow.js' ] );
45
30
  const workflows = [];
46
- for ( const { pathname, url } of workflowPaths ) {
47
- const exported = await import( url );
48
-
49
- for ( const name of Object.values( exported ).map( v => v[nameSymbol] ).filter( n => n ) ) {
50
- console.log( '[Core.Scanner]', 'Workflow loaded', name, pathname );
51
- workflows.push( { name, pathname } );
52
- }
31
+ for await ( const { metadata, pathname, path } of iteratorOverImportedComponents( workflowPaths ) ) {
32
+ workflows.push( { ...metadata, pathname, path } );
33
+ console.log( '[Core.Scanner]', 'Workflow loaded:', metadata.name, 'at', pathname );
53
34
  }
35
+ return workflows;
36
+ };
54
37
 
55
- const tempFolder = join( __dirname, 'temp' );
56
- const entryPoint = join( tempFolder, workflowsIndexFileName );
57
- const workflowsIndex = workflows.map( ( { name, pathname } ) => `export { default as ${name} } from '${pathname}';` ).join( '\n' );
38
+ // Creates a temporary index file importing all workflows
39
+ export function createWorkflowsEntryPoint( workflows ) {
40
+ const entryPoint = join( __dirname, 'temp', WORKFLOWS_INDEX_FILENAME );
58
41
 
59
- if ( !existsSync( tempFolder ) ) {
60
- mkdirSync( tempFolder );
61
- }
62
- writeFileSync( entryPoint, workflowsIndex, 'utf-8' );
42
+ // default system catalog workflow
43
+ const catalogWorkflow = { name: 'catalog', pathname: join( __dirname, './catalog_workflow/workflow.js' ) };
44
+ const workflowsIndex = [ ... workflows, catalogWorkflow ]
45
+ .map( ( { name, pathname } ) => `export { default as ${name} } from '${pathname}';` )
46
+ .join( '\n' );
47
+
48
+ writeFileOnLocationSync( entryPoint, workflowsIndex );
63
49
  return entryPoint;
64
50
  };