@output.ai/core 0.0.7 → 0.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +7 -3
- package/src/configs.js +1 -1
- package/src/consts.js +3 -3
- package/src/index.d.ts +302 -30
- package/src/index.js +3 -2
- package/src/interface/metadata.js +3 -3
- package/src/interface/step.js +3 -3
- package/src/interface/webhook.js +13 -14
- package/src/interface/workflow.js +22 -42
- package/src/internal_activities/index.js +16 -5
- package/src/worker/catalog_workflow/catalog.js +105 -0
- package/src/worker/catalog_workflow/index.js +21 -0
- package/src/worker/catalog_workflow/index.spec.js +139 -0
- package/src/worker/catalog_workflow/workflow.js +13 -0
- package/src/worker/index.js +37 -5
- package/src/worker/internal_utils.js +54 -0
- package/src/worker/internal_utils.spec.js +134 -0
- package/src/worker/loader.js +30 -44
- package/src/worker/loader.spec.js +68 -0
- package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.js +117 -0
- package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.spec.js +77 -0
- package/src/worker/webpack_loaders/workflow_rewriter/consts.js +3 -0
- package/src/worker/webpack_loaders/workflow_rewriter/index.mjs +56 -0
- package/src/worker/webpack_loaders/workflow_rewriter/index.spec.js +129 -0
- package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.js +64 -0
- package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.spec.js +33 -0
- package/src/worker/webpack_loaders/workflow_rewriter/tools.js +225 -0
- package/src/worker/webpack_loaders/workflow_rewriter/tools.spec.js +144 -0
- package/src/errors.d.ts +0 -3
- package/src/worker/temp/__workflows_entrypoint.js +0 -6
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { METADATA_ACCESS_SYMBOL } from '#consts';
|
|
2
|
+
import { Catalog, CatalogActivity, CatalogWorkflow } from './catalog.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Converts the list of workflows and the activities into the catalog information.
|
|
6
|
+
*
|
|
7
|
+
* This has information of all workflows and their activities from this worker.
|
|
8
|
+
*
|
|
9
|
+
* @param {object[]} workflows - The workflows objects, as they are returned from the loader module
|
|
10
|
+
* @param {object} activities - The activities functions map with metadata, as they are returned from the loader module
|
|
11
|
+
* @returns {Catalog} An catalog instance
|
|
12
|
+
*/
|
|
13
|
+
export const createCatalog = ( { workflows, activities } ) =>
|
|
14
|
+
workflows.reduce( ( catalog, workflow ) =>
|
|
15
|
+
catalog.addWorkflow( new CatalogWorkflow( {
|
|
16
|
+
...workflow,
|
|
17
|
+
activities: Object.entries( activities )
|
|
18
|
+
.filter( ( [ k ] ) => k.startsWith( `${workflow.path}#` ) )
|
|
19
|
+
.map( ( [ _, v ] ) => new CatalogActivity( v[METADATA_ACCESS_SYMBOL] ) )
|
|
20
|
+
} ) )
|
|
21
|
+
, new Catalog() );
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import { describe, it, expect, vi } from 'vitest';
|
|
2
|
+
|
|
3
|
+
// Provide the same symbol to the module under test and to the test
|
|
4
|
+
const METADATA_ACCESS_SYMBOL = Symbol( '__metadata' );
|
|
5
|
+
vi.mock( '#consts', () => ( {
|
|
6
|
+
METADATA_ACCESS_SYMBOL
|
|
7
|
+
} ) );
|
|
8
|
+
|
|
9
|
+
const setMetadata = ( target, values ) =>
|
|
10
|
+
Object.defineProperty( target, METADATA_ACCESS_SYMBOL, { value: values, writable: false, enumerable: false, configurable: false } );
|
|
11
|
+
|
|
12
|
+
describe( 'createCatalog', () => {
|
|
13
|
+
it( 'builds catalog with activities grouped by workflow path and returns Catalog with CatalogWorkflow entries', async () => {
|
|
14
|
+
const { createCatalog } = await import( './index.js' );
|
|
15
|
+
|
|
16
|
+
const workflows = [
|
|
17
|
+
{
|
|
18
|
+
name: 'flow1',
|
|
19
|
+
path: '/flows/flow1',
|
|
20
|
+
pathname: '/flows/flow1/workflow.js',
|
|
21
|
+
description: 'desc-flow1',
|
|
22
|
+
inputSchema: { in: 'f1' },
|
|
23
|
+
outputSchema: { out: 'f1' }
|
|
24
|
+
},
|
|
25
|
+
{
|
|
26
|
+
name: 'flow2',
|
|
27
|
+
path: '/flows/flow2',
|
|
28
|
+
pathname: '/flows/flow2/workflow.js',
|
|
29
|
+
description: 'desc-flow2',
|
|
30
|
+
inputSchema: { in: 'f2' },
|
|
31
|
+
outputSchema: { out: 'f2' }
|
|
32
|
+
}
|
|
33
|
+
];
|
|
34
|
+
|
|
35
|
+
const activity1 = () => {};
|
|
36
|
+
setMetadata( activity1, {
|
|
37
|
+
name: 'A1',
|
|
38
|
+
path: '/flows/flow1#A1',
|
|
39
|
+
description: 'desc-a1',
|
|
40
|
+
inputSchema: { in: 'a1' },
|
|
41
|
+
outputSchema: { out: 'a1' }
|
|
42
|
+
} );
|
|
43
|
+
|
|
44
|
+
const activity2 = () => {};
|
|
45
|
+
setMetadata( activity2, {
|
|
46
|
+
name: 'A2',
|
|
47
|
+
path: '/flows/flow1#A2',
|
|
48
|
+
description: 'desc-a2',
|
|
49
|
+
inputSchema: { in: 'a2' },
|
|
50
|
+
outputSchema: { out: 'a2' }
|
|
51
|
+
} );
|
|
52
|
+
|
|
53
|
+
const activity3 = () => {};
|
|
54
|
+
setMetadata( activity3, {
|
|
55
|
+
name: 'B1',
|
|
56
|
+
path: '/flows/flow2#B1',
|
|
57
|
+
description: 'desc-b1',
|
|
58
|
+
inputSchema: { in: 'b1' },
|
|
59
|
+
outputSchema: { out: 'b1' }
|
|
60
|
+
} );
|
|
61
|
+
|
|
62
|
+
const activity4 = () => {};
|
|
63
|
+
setMetadata( activity4, {
|
|
64
|
+
name: 'X',
|
|
65
|
+
path: '/other#X',
|
|
66
|
+
description: 'desc-x',
|
|
67
|
+
inputSchema: { in: 'x' },
|
|
68
|
+
outputSchema: { out: 'x' }
|
|
69
|
+
} );
|
|
70
|
+
|
|
71
|
+
const activities = {
|
|
72
|
+
'/flows/flow1#A1': activity1,
|
|
73
|
+
'/flows/flow1#A2': activity2,
|
|
74
|
+
'/flows/flow2#B1': activity3,
|
|
75
|
+
'/other#X': activity4
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
const catalog = createCatalog( { workflows, activities } );
|
|
79
|
+
|
|
80
|
+
const mapped = catalog.workflows.map( w => ( {
|
|
81
|
+
name: w.name,
|
|
82
|
+
path: w.path,
|
|
83
|
+
description: w.description,
|
|
84
|
+
inputSchema: w.inputSchema,
|
|
85
|
+
outputSchema: w.outputSchema,
|
|
86
|
+
activities: w.activities.map( a => ( {
|
|
87
|
+
name: a.name,
|
|
88
|
+
description: a.description,
|
|
89
|
+
inputSchema: a.inputSchema,
|
|
90
|
+
outputSchema: a.outputSchema
|
|
91
|
+
} ) )
|
|
92
|
+
} ) );
|
|
93
|
+
|
|
94
|
+
expect( mapped ).toEqual( [
|
|
95
|
+
{
|
|
96
|
+
name: 'flow1',
|
|
97
|
+
path: '/flows/flow1',
|
|
98
|
+
description: 'desc-flow1',
|
|
99
|
+
inputSchema: { in: 'f1' },
|
|
100
|
+
outputSchema: { out: 'f1' },
|
|
101
|
+
activities: [
|
|
102
|
+
{
|
|
103
|
+
name: 'A1',
|
|
104
|
+
description: 'desc-a1',
|
|
105
|
+
inputSchema: { in: 'a1' },
|
|
106
|
+
outputSchema: { out: 'a1' }
|
|
107
|
+
},
|
|
108
|
+
{
|
|
109
|
+
name: 'A2',
|
|
110
|
+
description: 'desc-a2',
|
|
111
|
+
inputSchema: { in: 'a2' },
|
|
112
|
+
outputSchema: { out: 'a2' }
|
|
113
|
+
}
|
|
114
|
+
]
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
name: 'flow2',
|
|
118
|
+
path: '/flows/flow2',
|
|
119
|
+
description: 'desc-flow2',
|
|
120
|
+
inputSchema: { in: 'f2' },
|
|
121
|
+
outputSchema: { out: 'f2' },
|
|
122
|
+
activities: [
|
|
123
|
+
{
|
|
124
|
+
name: 'B1',
|
|
125
|
+
description: 'desc-b1',
|
|
126
|
+
inputSchema: { in: 'b1' },
|
|
127
|
+
outputSchema: { out: 'b1' }
|
|
128
|
+
}
|
|
129
|
+
]
|
|
130
|
+
}
|
|
131
|
+
] );
|
|
132
|
+
|
|
133
|
+
// Original inputs are not mutated
|
|
134
|
+
expect( workflows[0].path ).toBe( '/flows/flow1' );
|
|
135
|
+
expect( workflows[0].pathname ).toBe( '/flows/flow1/workflow.js' );
|
|
136
|
+
expect( workflows[1].path ).toBe( '/flows/flow2' );
|
|
137
|
+
expect( workflows[1].pathname ).toBe( '/flows/flow2/workflow.js' );
|
|
138
|
+
} );
|
|
139
|
+
} );
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { defineQuery, setHandler } from '@temporalio/workflow';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* This is a special workflow, unique to each worker, which holds the meta information of all other workflows in that worker.
|
|
5
|
+
*
|
|
6
|
+
* The information is set in the startup and is accessible via a query called 'get'.
|
|
7
|
+
*
|
|
8
|
+
* @param {object} catalog - The catalog information
|
|
9
|
+
*/
|
|
10
|
+
export default async function catalogWorkflow( catalog ) {
|
|
11
|
+
setHandler( defineQuery( 'get' ), () => catalog );
|
|
12
|
+
await new Promise( () => {} ); // stay alive
|
|
13
|
+
};
|
package/src/worker/index.js
CHANGED
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
import { Worker, NativeConnection } from '@temporalio/worker';
|
|
2
|
+
import { Client } from '@temporalio/client';
|
|
3
|
+
import { WorkflowIdConflictPolicy } from '@temporalio/common';
|
|
2
4
|
import { dirname, join } from 'path';
|
|
3
5
|
import { fileURLToPath } from 'node:url';
|
|
4
6
|
import { worker as workerConfig } from '#configs';
|
|
5
|
-
import { loadActivities, loadWorkflows } from './loader.js';
|
|
7
|
+
import { loadActivities, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
|
|
6
8
|
import { ActivityExecutionInterceptor } from './interceptors/activity.js';
|
|
7
9
|
import { setupGlobalTracer } from './tracer/index.js';
|
|
8
10
|
import { sinks } from './sinks.js';
|
|
11
|
+
import { createCatalog } from './catalog_workflow/index.js';
|
|
9
12
|
|
|
10
13
|
const __dirname = dirname( fileURLToPath( import.meta.url ) );
|
|
11
14
|
|
|
@@ -19,16 +22,22 @@ const callerDir = process.argv[2];
|
|
|
19
22
|
|
|
20
23
|
( async () => {
|
|
21
24
|
console.log( '[Core]', 'Loading workflows...', { callerDir } );
|
|
22
|
-
const
|
|
25
|
+
const workflows = await loadWorkflows( callerDir );
|
|
23
26
|
|
|
24
27
|
console.log( '[Core]', 'Loading activities...', { callerDir } );
|
|
25
28
|
const activities = await loadActivities( callerDir );
|
|
26
29
|
|
|
30
|
+
console.log( '[Core]', 'Creating worker entry point...' );
|
|
31
|
+
const workflowsPath = createWorkflowsEntryPoint( workflows );
|
|
32
|
+
|
|
33
|
+
console.log( '[Core]', 'Creating workflows catalog...' );
|
|
34
|
+
const catalog = createCatalog( { workflows, activities } );
|
|
35
|
+
|
|
27
36
|
console.log( '[Core]', 'Connecting Temporal...' );
|
|
28
37
|
// enable TLS only when connecting remove (api key is present)
|
|
29
38
|
const connection = await NativeConnection.connect( { address, tls: Boolean( apiKey ), apiKey } );
|
|
30
39
|
|
|
31
|
-
console.log( '[Core]', 'Creating
|
|
40
|
+
console.log( '[Core]', 'Creating worker...' );
|
|
32
41
|
const worker = await Worker.create( {
|
|
33
42
|
connection,
|
|
34
43
|
namespace,
|
|
@@ -41,9 +50,32 @@ const callerDir = process.argv[2];
|
|
|
41
50
|
activityInbound: [ () => new ActivityExecutionInterceptor() ]
|
|
42
51
|
},
|
|
43
52
|
maxConcurrentWorkflowTaskExecutions: maxWorkflows,
|
|
44
|
-
maxConcurrentActivityTaskExecutions: maxActivities
|
|
53
|
+
maxConcurrentActivityTaskExecutions: maxActivities,
|
|
54
|
+
bundlerOptions: {
|
|
55
|
+
webpackConfigHook: config => {
|
|
56
|
+
if ( !config.module ) { config.module = { }; }
|
|
57
|
+
if ( !config.module.rules ) { config.module.rules = []; }
|
|
58
|
+
// Use AST-based loader for rewriting steps/workflows
|
|
59
|
+
config.module.rules.push( {
|
|
60
|
+
test: /\.js$/,
|
|
61
|
+
exclude: /node_modules/,
|
|
62
|
+
use: {
|
|
63
|
+
loader: join( __dirname, './webpack_loaders/workflow_rewriter/index.mjs' )
|
|
64
|
+
}
|
|
65
|
+
} );
|
|
66
|
+
return config;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
} );
|
|
70
|
+
|
|
71
|
+
console.log( '[Core]', 'Starting catalog workflow...' );
|
|
72
|
+
await new Client( { connection, namespace } ).workflow.start( 'catalog', {
|
|
73
|
+
taskQueue,
|
|
74
|
+
workflowId: taskQueue, // use the name of the task queue as the catalog name, ensuring uniqueness
|
|
75
|
+
workflowIdConflictPolicy: WorkflowIdConflictPolicy.TERMINATE_EXISTING,
|
|
76
|
+
args: [ catalog ]
|
|
45
77
|
} );
|
|
46
78
|
|
|
47
|
-
console.log( '[Core]
|
|
79
|
+
console.log( '[Core]', 'Running worker...' );
|
|
48
80
|
worker.run();
|
|
49
81
|
} )();
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* These tools cant be used in sandbox environment!!!
|
|
3
|
+
*/
|
|
4
|
+
import { resolve } from 'path';
|
|
5
|
+
import { pathToFileURL } from 'url';
|
|
6
|
+
import { METADATA_ACCESS_SYMBOL } from '#consts';
|
|
7
|
+
import { writeFileSync, existsSync, readdirSync, mkdirSync } from 'fs';
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Recursive traverse directories looking for files with given name,
|
|
11
|
+
* For each found file, return its path, pathname and URI
|
|
12
|
+
*
|
|
13
|
+
* @param {string} path - The path to scan
|
|
14
|
+
* @param {string[]} filenames - The filenames to look for
|
|
15
|
+
* @returns {string[{}]} An array containing an object with path, pathname and URI for each file found
|
|
16
|
+
* */
|
|
17
|
+
export function recursiveNavigateWhileCollecting( path, filenames, collection = [], ignoreDirNames = [ 'vendor', 'node_modules' ] ) {
|
|
18
|
+
for ( const entry of readdirSync( path, { withFileTypes: true } ) ) {
|
|
19
|
+
if ( ignoreDirNames.includes( entry.name ) ) { continue; }
|
|
20
|
+
|
|
21
|
+
const pathname = resolve( path, entry.name );
|
|
22
|
+
if ( entry.isDirectory() ) {
|
|
23
|
+
recursiveNavigateWhileCollecting( pathname, filenames, collection );
|
|
24
|
+
} else if ( filenames.includes( entry.name ) ) {
|
|
25
|
+
collection.push( { pathname, path, url: pathToFileURL( pathname ).href } );
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
return collection;
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* For each path, dynamic import it, and for each exported component with metadata (step, workflow), yields it.
|
|
34
|
+
* @param {string[]} paths - Paths of the files to import
|
|
35
|
+
*/
|
|
36
|
+
export async function *iteratorOverImportedComponents( paths ) {
|
|
37
|
+
for ( const { url, path, pathname } of paths ) {
|
|
38
|
+
const imported = await import( url );
|
|
39
|
+
for ( const component of Object.values( imported ) ) {
|
|
40
|
+
const metadata = component[METADATA_ACCESS_SYMBOL];
|
|
41
|
+
if ( !metadata ) { continue; }
|
|
42
|
+
yield { component, metadata, path, pathname };
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Write a file using the same signature as Node's FS writeFileSync, but recursively creates the necessary directories in the path.
|
|
49
|
+
*/
|
|
50
|
+
export function writeFileOnLocationSync( path, content ) {
|
|
51
|
+
const targetDir = path.split( '/' ).slice( 0, -1 ).join( '/' );
|
|
52
|
+
if ( targetDir && !existsSync( targetDir ) ) { mkdirSync( targetDir, { recursive: true } ); }
|
|
53
|
+
writeFileSync( path, content, 'utf-8' );
|
|
54
|
+
};
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import { mkdtempSync, mkdirSync, writeFileSync, rmSync, readFileSync, existsSync } from 'node:fs';
|
|
3
|
+
import { join, resolve } from 'node:path';
|
|
4
|
+
import { tmpdir } from 'node:os';
|
|
5
|
+
import { pathToFileURL } from 'node:url';
|
|
6
|
+
import { recursiveNavigateWhileCollecting, iteratorOverImportedComponents, writeFileOnLocationSync } from './internal_utils.js';
|
|
7
|
+
|
|
8
|
+
function makeTmpRoot( prefix ) {
|
|
9
|
+
return mkdtempSync( join( tmpdir(), prefix ) );
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
describe( '.recursiveNavigateWhileCollecting', () => {
|
|
13
|
+
it( 'collects matching files recursively (happy path)', () => {
|
|
14
|
+
const root = makeTmpRoot( 'nsu-happy-' );
|
|
15
|
+
const target = 'target.txt';
|
|
16
|
+
|
|
17
|
+
// layout:
|
|
18
|
+
// root/target.txt
|
|
19
|
+
// root/a/target.txt
|
|
20
|
+
// root/b/c/target.txt
|
|
21
|
+
mkdirSync( join( root, 'a' ), { recursive: true } );
|
|
22
|
+
mkdirSync( join( root, 'b', 'c' ), { recursive: true } );
|
|
23
|
+
writeFileSync( join( root, target ), 'root' );
|
|
24
|
+
writeFileSync( join( root, 'a', target ), 'a' );
|
|
25
|
+
writeFileSync( join( root, 'b', 'c', target ), 'bc' );
|
|
26
|
+
|
|
27
|
+
const results = recursiveNavigateWhileCollecting( root, [ target ] );
|
|
28
|
+
|
|
29
|
+
expect( results.length ).toBe( 3 );
|
|
30
|
+
for ( const { pathname, path, url } of results ) {
|
|
31
|
+
expect( url ).toBe( pathToFileURL( pathname ).href );
|
|
32
|
+
expect( resolve( path, target ) ).toBe( pathname );
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
rmSync( root, { recursive: true, force: true } );
|
|
36
|
+
} );
|
|
37
|
+
|
|
38
|
+
it( 'skips files inside ignored directories (ignoreDirNames)', () => {
|
|
39
|
+
const root = makeTmpRoot( 'nsu-ignore-' );
|
|
40
|
+
const target = 'target.txt';
|
|
41
|
+
|
|
42
|
+
// layout:
|
|
43
|
+
// root/node_modules/target.txt (ignored)
|
|
44
|
+
// root/vendor/target.txt (ignored)
|
|
45
|
+
// root/ok/target.txt (collected)
|
|
46
|
+
mkdirSync( join( root, 'node_modules' ), { recursive: true } );
|
|
47
|
+
mkdirSync( join( root, 'vendor' ), { recursive: true } );
|
|
48
|
+
mkdirSync( join( root, 'ok' ), { recursive: true } );
|
|
49
|
+
writeFileSync( join( root, 'node_modules', target ), 'nm' );
|
|
50
|
+
writeFileSync( join( root, 'vendor', target ), 'v' );
|
|
51
|
+
writeFileSync( join( root, 'ok', target ), 'ok' );
|
|
52
|
+
|
|
53
|
+
const results = recursiveNavigateWhileCollecting( root, [ target ] );
|
|
54
|
+
|
|
55
|
+
expect( results.length ).toBe( 1 );
|
|
56
|
+
expect( results[0].pathname ).toBe( join( root, 'ok', target ) );
|
|
57
|
+
expect( results[0].path ).toBe( join( root, 'ok' ) );
|
|
58
|
+
expect( results[0].url ).toBe( pathToFileURL( results[0].pathname ).href );
|
|
59
|
+
|
|
60
|
+
rmSync( root, { recursive: true, force: true } );
|
|
61
|
+
} );
|
|
62
|
+
} );
|
|
63
|
+
|
|
64
|
+
describe( '.iteratorOverImportedComponents', () => {
|
|
65
|
+
it( 'imports modules and yields metadata from exports tagged with METADATA_ACCESS_SYMBOL', async () => {
|
|
66
|
+
const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}` );
|
|
67
|
+
mkdirSync( root, { recursive: true } );
|
|
68
|
+
const file = join( root, 'meta.module.js' );
|
|
69
|
+
writeFileSync( file, [
|
|
70
|
+
'import { METADATA_ACCESS_SYMBOL } from \"#consts\";',
|
|
71
|
+
'export const StepA = () => {};',
|
|
72
|
+
'StepA[METADATA_ACCESS_SYMBOL] = { kind: \"step\", name: \"a\" };',
|
|
73
|
+
'export const FlowB = () => {};',
|
|
74
|
+
'FlowB[METADATA_ACCESS_SYMBOL] = { kind: \"workflow\", name: \"b\" };'
|
|
75
|
+
].join( '\n' ) );
|
|
76
|
+
|
|
77
|
+
const paths = recursiveNavigateWhileCollecting( root, [ 'meta.module.js' ] );
|
|
78
|
+
const collected = [];
|
|
79
|
+
for await ( const m of iteratorOverImportedComponents( paths ) ) {
|
|
80
|
+
collected.push( m );
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
expect( collected.length ).toBe( 2 );
|
|
84
|
+
expect( collected.map( m => m.metadata.name ).sort() ).toEqual( [ 'a', 'b' ] );
|
|
85
|
+
expect( collected.map( m => m.metadata.kind ).sort() ).toEqual( [ 'step', 'workflow' ] );
|
|
86
|
+
for ( const m of collected ) {
|
|
87
|
+
expect( m.pathname ).toBe( file );
|
|
88
|
+
expect( m.path ).toBe( root );
|
|
89
|
+
expect( typeof m.component ).toBe( 'function' );
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
rmSync( root, { recursive: true, force: true } );
|
|
93
|
+
} );
|
|
94
|
+
|
|
95
|
+
it( 'ignores exports without metadata symbol', async () => {
|
|
96
|
+
const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}-nometa` );
|
|
97
|
+
mkdirSync( root, { recursive: true } );
|
|
98
|
+
const file = join( root, 'meta.module.js' );
|
|
99
|
+
writeFileSync( file, [
|
|
100
|
+
'export const Plain = () => {};',
|
|
101
|
+
'export const AlsoPlain = {}'
|
|
102
|
+
].join( '\n' ) );
|
|
103
|
+
|
|
104
|
+
const paths = recursiveNavigateWhileCollecting( root, [ 'meta.module.js' ] );
|
|
105
|
+
const collected = [];
|
|
106
|
+
for await ( const m of iteratorOverImportedComponents( paths ) ) {
|
|
107
|
+
collected.push( m );
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
expect( collected.length ).toBe( 0 );
|
|
111
|
+
rmSync( root, { recursive: true, force: true } );
|
|
112
|
+
} );
|
|
113
|
+
} );
|
|
114
|
+
|
|
115
|
+
describe( '.writeFileOnLocationSync', () => {
|
|
116
|
+
it( 'creates missing directories and writes file', () => {
|
|
117
|
+
const root = makeTmpRoot( 'nsu-write-' );
|
|
118
|
+
const nested = join( root, 'a', 'b', 'c.txt' );
|
|
119
|
+
writeFileOnLocationSync( nested, 'hello' );
|
|
120
|
+
expect( existsSync( join( root, 'a', 'b' ) ) ).toBe( true );
|
|
121
|
+
expect( readFileSync( nested, 'utf-8' ) ).toBe( 'hello' );
|
|
122
|
+
rmSync( root, { recursive: true, force: true } );
|
|
123
|
+
} );
|
|
124
|
+
|
|
125
|
+
it( 'overwrites existing content', () => {
|
|
126
|
+
const root = makeTmpRoot( 'nsu-write2-' );
|
|
127
|
+
const file = join( root, 'x', 'y.txt' );
|
|
128
|
+
mkdirSync( join( root, 'x' ), { recursive: true } );
|
|
129
|
+
writeFileSync( file, 'old' );
|
|
130
|
+
writeFileOnLocationSync( file, 'new' );
|
|
131
|
+
expect( readFileSync( file, 'utf-8' ) ).toBe( 'new' );
|
|
132
|
+
rmSync( root, { recursive: true, force: true } );
|
|
133
|
+
} );
|
|
134
|
+
} );
|
package/src/worker/loader.js
CHANGED
|
@@ -1,64 +1,50 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import {
|
|
3
|
-
import { pathToFileURL, fileURLToPath } from 'url';
|
|
4
|
-
import { nameSymbol, sendWebhookPostName, workflowsIndexFileName } from '#consts';
|
|
1
|
+
import { dirname, join } from 'path';
|
|
2
|
+
import { fileURLToPath } from 'url';
|
|
5
3
|
import { sendWebhookPost } from '#internal_activities';
|
|
6
|
-
|
|
7
|
-
|
|
4
|
+
import { SEND_WEBHOOK_ACTIVITY_NAME, WORKFLOWS_INDEX_FILENAME } from '#consts';
|
|
5
|
+
import {
|
|
6
|
+
iteratorOverImportedComponents,
|
|
7
|
+
recursiveNavigateWhileCollecting,
|
|
8
|
+
writeFileOnLocationSync
|
|
9
|
+
} from './internal_utils.js';
|
|
8
10
|
|
|
9
11
|
const __dirname = dirname( fileURLToPath( import.meta.url ) );
|
|
10
12
|
|
|
11
|
-
//
|
|
12
|
-
const recursiveNavigateWhileCollecting = ( path, filenames, collection = [] ) => {
|
|
13
|
-
for ( const entry of readdirSync( path, { withFileTypes: true } ) ) {
|
|
14
|
-
if ( ignoreDirNames.includes( entry.name ) ) { continue; }
|
|
15
|
-
|
|
16
|
-
const pathname = resolve( path, entry.name );
|
|
17
|
-
if ( entry.isDirectory() ) {
|
|
18
|
-
recursiveNavigateWhileCollecting( pathname, filenames, collection );
|
|
19
|
-
} else if ( filenames.includes( entry.name ) ) {
|
|
20
|
-
collection.push( { pathname, path, url: pathToFileURL( pathname ).href } );
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
return collection;
|
|
25
|
-
};
|
|
26
|
-
|
|
13
|
+
// returns a map of activities, where the key is they path + name and the value is the function with metadata
|
|
27
14
|
export async function loadActivities( path ) {
|
|
28
15
|
const activityPaths = recursiveNavigateWhileCollecting( path, [ 'steps.js' ] );
|
|
29
16
|
const activities = [];
|
|
30
|
-
for ( const {
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
console.log( '[Core.Scanner]', 'Activity loaded', name, pathname );
|
|
34
|
-
activities[`${path}#${name}`] = fn;
|
|
35
|
-
}
|
|
17
|
+
for await ( const { component, metadata, pathname, path } of iteratorOverImportedComponents( activityPaths ) ) {
|
|
18
|
+
console.log( '[Core.Scanner]', 'Activity loaded:', metadata.name, 'at', pathname );
|
|
19
|
+
activities[`${path}#${metadata.name}`] = component;
|
|
36
20
|
}
|
|
37
21
|
|
|
38
22
|
// system activities
|
|
39
|
-
activities[
|
|
23
|
+
activities[SEND_WEBHOOK_ACTIVITY_NAME] = sendWebhookPost;
|
|
40
24
|
return activities;
|
|
41
25
|
};
|
|
42
26
|
|
|
27
|
+
// Returns array of workflow object, which are function with metadata attached
|
|
43
28
|
export async function loadWorkflows( path ) {
|
|
44
|
-
const workflowPaths = recursiveNavigateWhileCollecting( path, [ '
|
|
29
|
+
const workflowPaths = recursiveNavigateWhileCollecting( path, [ 'workflow.js' ] );
|
|
45
30
|
const workflows = [];
|
|
46
|
-
for ( const { pathname,
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
for ( const name of Object.values( exported ).map( v => v[nameSymbol] ).filter( n => n ) ) {
|
|
50
|
-
console.log( '[Core.Scanner]', 'Workflow loaded', name, pathname );
|
|
51
|
-
workflows.push( { name, pathname } );
|
|
52
|
-
}
|
|
31
|
+
for await ( const { metadata, pathname, path } of iteratorOverImportedComponents( workflowPaths ) ) {
|
|
32
|
+
workflows.push( { ...metadata, pathname, path } );
|
|
33
|
+
console.log( '[Core.Scanner]', 'Workflow loaded:', metadata.name, 'at', pathname );
|
|
53
34
|
}
|
|
35
|
+
return workflows;
|
|
36
|
+
};
|
|
54
37
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
const
|
|
38
|
+
// Creates a temporary index file importing all workflows
|
|
39
|
+
export function createWorkflowsEntryPoint( workflows ) {
|
|
40
|
+
const entryPoint = join( __dirname, 'temp', WORKFLOWS_INDEX_FILENAME );
|
|
58
41
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
42
|
+
// default system catalog workflow
|
|
43
|
+
const catalogWorkflow = { name: 'catalog', pathname: join( __dirname, './catalog_workflow/workflow.js' ) };
|
|
44
|
+
const workflowsIndex = [ ... workflows, catalogWorkflow ]
|
|
45
|
+
.map( ( { name, pathname } ) => `export { default as ${name} } from '${pathname}';` )
|
|
46
|
+
.join( '\n' );
|
|
47
|
+
|
|
48
|
+
writeFileOnLocationSync( entryPoint, workflowsIndex );
|
|
63
49
|
return entryPoint;
|
|
64
50
|
};
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
const METADATA_ACCESS_SYMBOL = Symbol( '__metadata' );
|
|
4
|
+
|
|
5
|
+
vi.mock( '#consts', () => ( {
|
|
6
|
+
SEND_WEBHOOK_ACTIVITY_NAME: '__internal#sendWebhookPost',
|
|
7
|
+
WORKFLOWS_INDEX_FILENAME: '__workflows_entrypoint.js',
|
|
8
|
+
METADATA_ACCESS_SYMBOL
|
|
9
|
+
} ) );
|
|
10
|
+
|
|
11
|
+
const sendWebhookPostMock = vi.fn();
|
|
12
|
+
vi.mock( '#internal_activities', () => ( {
|
|
13
|
+
sendWebhookPost: sendWebhookPostMock
|
|
14
|
+
} ) );
|
|
15
|
+
|
|
16
|
+
// Mock internal_utils to control filesystem-independent behavior
|
|
17
|
+
const iteratorMock = vi.fn();
|
|
18
|
+
const recursiveMock = vi.fn();
|
|
19
|
+
const writeFileMock = vi.fn();
|
|
20
|
+
vi.mock( './internal_utils.js', () => ( {
|
|
21
|
+
iteratorOverImportedComponents: iteratorMock,
|
|
22
|
+
recursiveNavigateWhileCollecting: recursiveMock,
|
|
23
|
+
writeFileOnLocationSync: writeFileMock
|
|
24
|
+
} ) );
|
|
25
|
+
|
|
26
|
+
describe( 'worker/loader', () => {
|
|
27
|
+
beforeEach( () => {
|
|
28
|
+
vi.clearAllMocks();
|
|
29
|
+
} );
|
|
30
|
+
|
|
31
|
+
it( 'loadActivities returns map including system activity', async () => {
|
|
32
|
+
const { loadActivities } = await import( './loader.js' );
|
|
33
|
+
|
|
34
|
+
recursiveMock.mockReturnValue( [ { pathname: '/a/steps.js', path: '/a', url: 'file:///a/steps.js' } ] );
|
|
35
|
+
iteratorMock.mockImplementation( async function *() {
|
|
36
|
+
yield { component: () => {}, metadata: { name: 'Act1' }, pathname: '/a/steps.js', path: '/a' };
|
|
37
|
+
} );
|
|
38
|
+
|
|
39
|
+
const activities = await loadActivities( '/root' );
|
|
40
|
+
expect( activities['/a#Act1'] ).toBeTypeOf( 'function' );
|
|
41
|
+
expect( activities['__internal#sendWebhookPost'] ).toBe( sendWebhookPostMock );
|
|
42
|
+
} );
|
|
43
|
+
|
|
44
|
+
it( 'loadWorkflows returns array of workflows with metadata', async () => {
|
|
45
|
+
const { loadWorkflows } = await import( './loader.js' );
|
|
46
|
+
|
|
47
|
+
recursiveMock.mockReturnValue( [ { pathname: '/b/workflow.js', path: '/b', url: 'file:///b/workflow.js' } ] );
|
|
48
|
+
iteratorMock.mockImplementation( async function *() {
|
|
49
|
+
yield { metadata: { name: 'Flow1', description: 'd' }, pathname: '/b/workflow.js', path: '/b' };
|
|
50
|
+
} );
|
|
51
|
+
|
|
52
|
+
const workflows = await loadWorkflows( '/root' );
|
|
53
|
+
expect( workflows ).toEqual( [ { name: 'Flow1', description: 'd', pathname: '/b/workflow.js', path: '/b' } ] );
|
|
54
|
+
} );
|
|
55
|
+
|
|
56
|
+
it( 'createWorkflowsEntryPoint writes index and returns its path', async () => {
|
|
57
|
+
const { createWorkflowsEntryPoint } = await import( './loader.js' );
|
|
58
|
+
|
|
59
|
+
const workflows = [ { name: 'W', pathname: '/abs/wf.js' } ];
|
|
60
|
+
const entry = createWorkflowsEntryPoint( workflows );
|
|
61
|
+
|
|
62
|
+
expect( writeFileMock ).toHaveBeenCalledTimes( 1 );
|
|
63
|
+
const [ writtenPath, contents ] = writeFileMock.mock.calls[0];
|
|
64
|
+
expect( entry ).toBe( writtenPath );
|
|
65
|
+
expect( contents ).toContain( 'export { default as W } from \'/abs/wf.js\';' );
|
|
66
|
+
expect( contents ).toContain( 'export { default as catalog }' );
|
|
67
|
+
} );
|
|
68
|
+
} );
|