@output.ai/core 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +16 -22
  2. package/package.json +8 -6
  3. package/src/consts.js +1 -1
  4. package/src/interface/evaluator.js +8 -4
  5. package/src/interface/workflow.js +11 -14
  6. package/src/internal_activities/index.js +0 -32
  7. package/src/tracing/index.d.ts +4 -4
  8. package/src/tracing/index.js +12 -121
  9. package/src/tracing/internal_interface.js +66 -0
  10. package/src/tracing/processors/local/index.js +50 -0
  11. package/src/tracing/processors/local/index.spec.js +67 -0
  12. package/src/tracing/processors/s3/index.js +51 -0
  13. package/src/tracing/processors/s3/index.spec.js +64 -0
  14. package/src/tracing/processors/s3/redis_client.js +19 -0
  15. package/src/tracing/processors/s3/redis_client.spec.js +50 -0
  16. package/src/tracing/processors/s3/s3_client.js +33 -0
  17. package/src/tracing/processors/s3/s3_client.spec.js +67 -0
  18. package/src/tracing/{tracer_tree.js → tools/build_trace_tree.js} +4 -11
  19. package/src/tracing/{tracer_tree.spec.js → tools/build_trace_tree.spec.js} +4 -20
  20. package/src/tracing/{utils.js → tools/utils.js} +7 -0
  21. package/src/tracing/trace_engine.js +63 -0
  22. package/src/tracing/trace_engine.spec.js +91 -0
  23. package/src/utils.js +8 -0
  24. package/src/worker/catalog_workflow/index.js +2 -1
  25. package/src/worker/catalog_workflow/index.spec.js +6 -10
  26. package/src/worker/configs.js +24 -0
  27. package/src/worker/index.js +7 -4
  28. package/src/worker/interceptors/activity.js +7 -14
  29. package/src/worker/interceptors/workflow.js +1 -2
  30. package/src/worker/loader.js +39 -30
  31. package/src/worker/loader.spec.js +20 -24
  32. package/src/worker/loader_tools.js +63 -0
  33. package/src/worker/loader_tools.spec.js +85 -0
  34. package/src/worker/sinks.js +8 -4
  35. package/src/configs.js +0 -31
  36. package/src/configs.spec.js +0 -331
  37. package/src/tracing/index.private.spec.js +0 -84
  38. package/src/tracing/index.public.spec.js +0 -86
  39. package/src/worker/internal_utils.js +0 -60
  40. package/src/worker/internal_utils.spec.js +0 -134
  41. /package/src/tracing/{utils.spec.js → tools/utils.spec.js} +0 -0
@@ -1,51 +1,60 @@
1
- import { dirname, join } from 'path';
1
+ import { dirname, join } from 'node:path';
2
+ import { mkdirSync, writeFileSync } from 'node:fs';
3
+ import { EOL } from 'node:os';
2
4
  import { fileURLToPath } from 'url';
3
- import { sendWebhook, readTraceFile } from '#internal_activities';
4
- import { ACTIVITY_SEND_WEBHOOK, ACTIVITY_READ_TRACE_FILE, WORKFLOWS_INDEX_FILENAME } from '#consts';
5
- import {
6
- iteratorOverImportedComponents,
7
- recursiveNavigateWhileCollecting,
8
- writeFileOnLocationSync
9
- } from './internal_utils.js';
5
+ import { sendWebhook } from '#internal_activities';
6
+ import { ACTIVITY_SEND_WEBHOOK, WORKFLOWS_INDEX_FILENAME, WORKFLOW_CATALOG } from '#consts';
7
+ import { importComponents } from './loader_tools.js';
10
8
 
11
9
  const __dirname = dirname( fileURLToPath( import.meta.url ) );
12
10
 
13
- // returns a map of activities, where the key is they path + name and the value is the function with metadata
14
- export async function loadActivities( path ) {
15
- const activityPaths = recursiveNavigateWhileCollecting( path, [ 'steps.js', 'evaluators.js' ] );
16
- const activities = [];
17
- for await ( const { component, metadata, pathname, path } of iteratorOverImportedComponents( activityPaths ) ) {
18
- console.log( '[Core.Scanner]', 'Component loaded:', metadata.type, metadata.name, 'at', pathname );
19
- activities[`${path}#${metadata.name}`] = component;
11
+ /**
12
+ * Builds a map of activities, where the key is their path and name and the value is the function
13
+ *
14
+ * @param {string} target
15
+ * @returns {object}
16
+ */
17
+ export async function loadActivities( target ) {
18
+ const activities = {};
19
+ for await ( const { fn, metadata, path } of importComponents( target, [ 'steps.js', 'evaluators.js' ] ) ) {
20
+ console.log( '[Core.Scanner]', 'Component loaded:', metadata.type, metadata.name, 'at', path );
21
+ activities[`${dirname( path )}#${metadata.name}`] = fn;
20
22
  }
21
23
 
22
24
  // system activities
23
25
  activities[ACTIVITY_SEND_WEBHOOK] = sendWebhook;
24
- activities[ACTIVITY_READ_TRACE_FILE] = readTraceFile;
25
26
  return activities;
26
27
  };
27
28
 
28
- // Returns array of workflow object, which are function with metadata attached
29
- export async function loadWorkflows( path ) {
30
- const workflowPaths = recursiveNavigateWhileCollecting( path, [ 'workflow.js' ] );
29
+ /**
30
+ * Builds an array of workflow objects
31
+ *
32
+ * @param {string} target
33
+ * @returns {object[]}
34
+ */
35
+ export async function loadWorkflows( target ) {
31
36
  const workflows = [];
32
- for await ( const { metadata, pathname, path } of iteratorOverImportedComponents( workflowPaths ) ) {
33
- workflows.push( { ...metadata, pathname, path } );
34
- console.log( '[Core.Scanner]', 'Workflow loaded:', metadata.name, 'at', pathname );
37
+ for await ( const { metadata, path } of importComponents( target, [ 'workflow.js' ] ) ) {
38
+ console.log( '[Core.Scanner]', 'Workflow loaded:', metadata.name, 'at', path );
39
+ workflows.push( { ...metadata, path } );
35
40
  }
36
41
  return workflows;
37
42
  };
38
43
 
39
- // Creates a temporary index file importing all workflows
44
+ /**
45
+ * Creates a temporary index file importing all workflows
46
+ *
47
+ * @param {object[]} workflows
48
+ * @returns
49
+ */
40
50
  export function createWorkflowsEntryPoint( workflows ) {
41
- const entryPoint = join( __dirname, 'temp', WORKFLOWS_INDEX_FILENAME );
51
+ const path = join( __dirname, 'temp', WORKFLOWS_INDEX_FILENAME );
42
52
 
43
53
  // default system catalog workflow
44
- const catalogWorkflow = { name: 'catalog', pathname: join( __dirname, './catalog_workflow/workflow.js' ) };
45
- const workflowsIndex = [ ... workflows, catalogWorkflow ]
46
- .map( ( { name, pathname } ) => `export { default as ${name} } from '${pathname}';` )
47
- .join( '\n' );
54
+ const catalog = { name: WORKFLOW_CATALOG, path: join( __dirname, './catalog_workflow/workflow.js' ) };
55
+ const content = [ ... workflows, catalog ].map( ( { name, path } ) => `export { default as ${name} } from '${path}';` ).join( EOL );
48
56
 
49
- writeFileOnLocationSync( entryPoint, workflowsIndex );
50
- return entryPoint;
57
+ mkdirSync( dirname( path ), { recursive: true } );
58
+ writeFileSync( path, content, 'utf-8' );
59
+ return path;
51
60
  };
@@ -2,25 +2,23 @@ import { describe, it, expect, vi, beforeEach } from 'vitest';
2
2
 
3
3
  vi.mock( '#consts', () => ( {
4
4
  ACTIVITY_SEND_WEBHOOK: '__internal#sendWebhook',
5
- ACTIVITY_READ_TRACE_FILE: '__internal#readTraceFile',
6
- WORKFLOWS_INDEX_FILENAME: '__workflows_entrypoint.js'
5
+ WORKFLOWS_INDEX_FILENAME: '__workflows_entrypoint.js',
6
+ WORKFLOW_CATALOG: 'catalog'
7
7
  } ) );
8
8
 
9
9
  const sendWebhookMock = vi.fn();
10
- const readTraceFileMock = vi.fn();
11
10
  vi.mock( '#internal_activities', () => ( {
12
- sendWebhook: sendWebhookMock,
13
- readTraceFile: readTraceFileMock
11
+ sendWebhook: sendWebhookMock
14
12
  } ) );
15
13
 
16
- // Mock internal_utils to control filesystem-independent behavior
17
- const iteratorMock = vi.fn();
18
- const recursiveMock = vi.fn();
19
- const writeFileMock = vi.fn();
20
- vi.mock( './internal_utils.js', () => ( {
21
- iteratorOverImportedComponents: iteratorMock,
22
- recursiveNavigateWhileCollecting: recursiveMock,
23
- writeFileOnLocationSync: writeFileMock
14
+ const importComponentsMock = vi.fn();
15
+ vi.mock( './loader_tools.js', () => ( { importComponents: importComponentsMock } ) );
16
+
17
+ const mkdirSyncMock = vi.fn();
18
+ const writeFileSyncMock = vi.fn();
19
+ vi.mock( 'node:fs', () => ( {
20
+ mkdirSync: mkdirSyncMock,
21
+ writeFileSync: writeFileSyncMock
24
22
  } ) );
25
23
 
26
24
  describe( 'worker/loader', () => {
@@ -31,39 +29,37 @@ describe( 'worker/loader', () => {
31
29
  it( 'loadActivities returns map including system activity', async () => {
32
30
  const { loadActivities } = await import( './loader.js' );
33
31
 
34
- recursiveMock.mockReturnValue( [ { pathname: '/a/steps.js', path: '/a', url: 'file:///a/steps.js' } ] );
35
- iteratorMock.mockImplementation( async function *() {
36
- yield { component: () => {}, metadata: { name: 'Act1' }, pathname: '/a/steps.js', path: '/a' };
32
+ importComponentsMock.mockImplementationOnce( async function *() {
33
+ yield { fn: () => {}, metadata: { name: 'Act1' }, path: '/a/steps.js' };
37
34
  } );
38
35
 
39
36
  const activities = await loadActivities( '/root' );
40
37
  expect( activities['/a#Act1'] ).toBeTypeOf( 'function' );
41
38
  expect( activities['__internal#sendWebhook'] ).toBe( sendWebhookMock );
42
- expect( activities['__internal#readTraceFile'] ).toBe( readTraceFileMock );
43
39
  } );
44
40
 
45
41
  it( 'loadWorkflows returns array of workflows with metadata', async () => {
46
42
  const { loadWorkflows } = await import( './loader.js' );
47
43
 
48
- recursiveMock.mockReturnValue( [ { pathname: '/b/workflow.js', path: '/b', url: 'file:///b/workflow.js' } ] );
49
- iteratorMock.mockImplementation( async function *() {
50
- yield { metadata: { name: 'Flow1', description: 'd' }, pathname: '/b/workflow.js', path: '/b' };
44
+ importComponentsMock.mockImplementationOnce( async function *() {
45
+ yield { metadata: { name: 'Flow1', description: 'd' }, path: '/b/workflow.js' };
51
46
  } );
52
47
 
53
48
  const workflows = await loadWorkflows( '/root' );
54
- expect( workflows ).toEqual( [ { name: 'Flow1', description: 'd', pathname: '/b/workflow.js', path: '/b' } ] );
49
+ expect( workflows ).toEqual( [ { name: 'Flow1', description: 'd', path: '/b/workflow.js' } ] );
55
50
  } );
56
51
 
57
52
  it( 'createWorkflowsEntryPoint writes index and returns its path', async () => {
58
53
  const { createWorkflowsEntryPoint } = await import( './loader.js' );
59
54
 
60
- const workflows = [ { name: 'W', pathname: '/abs/wf.js' } ];
55
+ const workflows = [ { name: 'W', path: '/abs/wf.js' } ];
61
56
  const entry = createWorkflowsEntryPoint( workflows );
62
57
 
63
- expect( writeFileMock ).toHaveBeenCalledTimes( 1 );
64
- const [ writtenPath, contents ] = writeFileMock.mock.calls[0];
58
+ expect( writeFileSyncMock ).toHaveBeenCalledTimes( 1 );
59
+ const [ writtenPath, contents ] = writeFileSyncMock.mock.calls[0];
65
60
  expect( entry ).toBe( writtenPath );
66
61
  expect( contents ).toContain( 'export { default as W } from \'/abs/wf.js\';' );
67
62
  expect( contents ).toContain( 'export { default as catalog }' );
63
+ expect( mkdirSyncMock ).toHaveBeenCalledTimes( 1 );
68
64
  } );
69
65
  } );
@@ -0,0 +1,63 @@
1
+ import { resolve } from 'path';
2
+ import { pathToFileURL } from 'url';
3
+ import { METADATA_ACCESS_SYMBOL } from '#consts';
4
+ import { readdirSync } from 'fs';
5
+
6
+ /**
7
+ * @typedef {object} CollectedFile
8
+ * @property {string} path - The file path
9
+ * @property {string} url - The resolved url of the file, ready to be imported
10
+ */
11
+ /**
12
+ * @typedef {object} Component
13
+ * @property {Function} fn - The loaded component function
14
+ * @property {object} metadata - Associated metadata with the component
15
+ * @property {string} path - Associated metadata with the component
16
+ */
17
+
18
+ /**
19
+ * Recursive traverse directories looking for files with given name.
20
+ *
21
+ * @param {string} path - The path to scan
22
+ * @param {string[]} filenames - The filenames to look for
23
+ * @returns {CollectedFile[]} An array containing the collected files
24
+ * */
25
+ const findByNameRecursively = ( parentPath, filenames, collection = [], ignoreDirNames = [ 'vendor', 'node_modules' ] ) => {
26
+ for ( const entry of readdirSync( parentPath, { withFileTypes: true } ) ) {
27
+ if ( ignoreDirNames.includes( entry.name ) ) {
28
+ continue;
29
+ }
30
+
31
+ const path = resolve( parentPath, entry.name );
32
+ if ( entry.isDirectory() ) {
33
+ findByNameRecursively( path, filenames, collection );
34
+ } else if ( filenames.includes( entry.name ) ) {
35
+ collection.push( { path, url: pathToFileURL( path ).href } );
36
+ }
37
+ }
38
+
39
+ return collection;
40
+ };
41
+
42
+ /**
43
+ * For each path, dynamic import it, and for each exported component with metadata (step, workflow), yields it.
44
+ *
45
+ * @generator
46
+ * @async
47
+ * @function importComponents
48
+ * @param {string} target - Place to look for files
49
+ * @param {string[]} filenames - File names to load recursively from target
50
+ * @yields {Component}
51
+ */
52
+ export async function *importComponents( target, filenames ) {
53
+ for ( const { url, path } of findByNameRecursively( target, filenames ) ) {
54
+ const imported = await import( url );
55
+ for ( const fn of Object.values( imported ) ) {
56
+ const metadata = fn[METADATA_ACCESS_SYMBOL];
57
+ if ( !metadata ) {
58
+ continue;
59
+ }
60
+ yield { fn, metadata, path };
61
+ }
62
+ }
63
+ };
@@ -0,0 +1,85 @@
1
+ import { describe, it, expect } from 'vitest';
2
+ import { mkdirSync, writeFileSync, rmSync } from 'node:fs';
3
+ import { join } from 'node:path';
4
+ import { importComponents } from './loader_tools.js';
5
+
6
+ describe( '.importComponents', () => {
7
+ it( 'imports modules and yields metadata from exports tagged with METADATA_ACCESS_SYMBOL', async () => {
8
+ const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}` );
9
+ mkdirSync( root, { recursive: true } );
10
+ const file = join( root, 'meta.module.js' );
11
+ writeFileSync( file, [
12
+ 'import { METADATA_ACCESS_SYMBOL } from "#consts";',
13
+ 'export const StepA = () => {};',
14
+ 'StepA[METADATA_ACCESS_SYMBOL] = { kind: "step", name: "a" };',
15
+ 'export const FlowB = () => {};',
16
+ 'FlowB[METADATA_ACCESS_SYMBOL] = { kind: "workflow", name: "b" };'
17
+ ].join( '\n' ) );
18
+
19
+ const collected = [];
20
+ for await ( const m of importComponents( root, [ 'meta.module.js' ] ) ) {
21
+ collected.push( m );
22
+ }
23
+
24
+ expect( collected.length ).toBe( 2 );
25
+ expect( collected.map( m => m.metadata.name ).sort() ).toEqual( [ 'a', 'b' ] );
26
+ expect( collected.map( m => m.metadata.kind ).sort() ).toEqual( [ 'step', 'workflow' ] );
27
+ for ( const m of collected ) {
28
+ expect( m.path ).toBe( file );
29
+ expect( typeof m.fn ).toBe( 'function' );
30
+ }
31
+
32
+ rmSync( root, { recursive: true, force: true } );
33
+ } );
34
+
35
+ it( 'ignores exports without metadata symbol', async () => {
36
+ const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}-nometa` );
37
+ mkdirSync( root, { recursive: true } );
38
+ const file = join( root, 'meta.module.js' );
39
+ writeFileSync( file, [
40
+ 'export const Plain = () => {};',
41
+ 'export const AlsoPlain = {}'
42
+ ].join( '\n' ) );
43
+
44
+ const collected = [];
45
+ for await ( const m of importComponents( root, [ 'meta.module.js' ] ) ) {
46
+ collected.push( m );
47
+ }
48
+
49
+ expect( collected.length ).toBe( 0 );
50
+ rmSync( root, { recursive: true, force: true } );
51
+ } );
52
+
53
+ it( 'skips files inside ignored directories (node_modules, vendor)', async () => {
54
+ const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}-ignoredirs` );
55
+ const okDir = join( root, 'ok' );
56
+ const nmDir = join( root, 'node_modules' );
57
+ const vendorDir = join( root, 'vendor' );
58
+ mkdirSync( okDir, { recursive: true } );
59
+ mkdirSync( nmDir, { recursive: true } );
60
+ mkdirSync( vendorDir, { recursive: true } );
61
+
62
+ const okFile = join( okDir, 'meta.module.js' );
63
+ const nmFile = join( nmDir, 'meta.module.js' );
64
+ const vendorFile = join( vendorDir, 'meta.module.js' );
65
+
66
+ const fileContents = [
67
+ 'import { METADATA_ACCESS_SYMBOL } from "#consts";',
68
+ 'export const C = () => {};',
69
+ 'C[METADATA_ACCESS_SYMBOL] = { kind: "step", name: "c" };'
70
+ ].join( '\n' );
71
+ writeFileSync( okFile, fileContents );
72
+ writeFileSync( nmFile, fileContents );
73
+ writeFileSync( vendorFile, fileContents );
74
+
75
+ const collected = [];
76
+ for await ( const m of importComponents( root, [ 'meta.module.js' ] ) ) {
77
+ collected.push( m );
78
+ }
79
+
80
+ expect( collected.length ).toBe( 1 );
81
+ expect( collected[0].path ).toBe( okFile );
82
+
83
+ rmSync( root, { recursive: true, force: true } );
84
+ } );
85
+ } );
@@ -1,3 +1,4 @@
1
+ import { WORKFLOW_CATALOG } from '#consts';
1
2
  import { addEventStart, addEventEnd, addEventError } from '#tracing';
2
3
 
3
4
  /**
@@ -8,8 +9,11 @@ import { addEventStart, addEventEnd, addEventError } from '#tracing';
8
9
  * @param {object} details - Teh details to attach to the event
9
10
  */
10
11
  const addWorkflowEvent = ( method, workflowInfo, details ) => {
11
- const { workflowId: id, workflowType: name, memo: { parentId, traceId, traceHelm } } = workflowInfo;
12
- method( { id, kind: 'workflow', name, details, parentId, traceId, traceHelm } );
12
+ const { workflowId: id, workflowType: name, memo: { parentId, executionContext } } = workflowInfo;
13
+ if ( name === WORKFLOW_CATALOG ) {
14
+ return;
15
+ } // ignore internal catalog events
16
+ method( { id, kind: 'workflow', name, details, parentId, executionContext } );
13
17
  };
14
18
 
15
19
  /**
@@ -21,8 +25,8 @@ const addWorkflowEvent = ( method, workflowInfo, details ) => {
21
25
  */
22
26
  const addEvent = ( method, workflowInfo, options ) => {
23
27
  const { id, name, kind, details } = options;
24
- const { workflowId, memo: { traceId, traceHelm } } = workflowInfo;
25
- method( { id, kind, name, details, parentId: workflowId, traceId, traceHelm } );
28
+ const { workflowId, memo: { executionContext } } = workflowInfo;
29
+ method( { id, kind, name, details, parentId: workflowId, executionContext } );
26
30
  };
27
31
 
28
32
  // This sink allow for sandbox Temporal environment to send trace logs back to the main thread.
package/src/configs.js DELETED
@@ -1,31 +0,0 @@
1
- import * as z from 'zod';
2
-
3
- class InvalidEnvVarsErrors extends Error { }
4
-
5
- const envVarSchema = z.object( {
6
- TEMPORAL_ADDRESS: z.string().optional().default( 'localhost:7233' ),
7
- TEMPORAL_NAMESPACE: z.string().optional().default( 'default' ),
8
- TEMPORAL_API_KEY: z.string().optional(),
9
- CATALOG_ID: z.string().regex( /^[a-z0-9_.@-]+$/i ),
10
- API_AUTH_KEY: z.string().optional()
11
- } );
12
-
13
- const { data: safeEnvVar, error } = envVarSchema.safeParse( process.env );
14
- if ( error ) {
15
- throw new InvalidEnvVarsErrors( z.prettifyError( error ) );
16
- }
17
-
18
- export const worker = {
19
- address: safeEnvVar.TEMPORAL_ADDRESS,
20
- apiKey: safeEnvVar.TEMPORAL_API_KEY,
21
- executionTimeout: '1m',
22
- maxActivities: 100,
23
- maxWorkflows: 100,
24
- namespace: safeEnvVar.TEMPORAL_NAMESPACE ?? 'default',
25
- taskQueue: safeEnvVar.CATALOG_ID,
26
- catalogId: safeEnvVar.CATALOG_ID
27
- };
28
-
29
- export const api = {
30
- authKey: safeEnvVar.API_AUTH_KEY
31
- };