@outputai/core 0.3.3-next.e0cece4.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@outputai/core",
3
- "version": "0.3.3-next.e0cece4.0",
3
+ "version": "0.4.0",
4
4
  "description": "The core module of the output framework",
5
5
  "type": "module",
6
6
  "exports": {
@@ -45,7 +45,12 @@
45
45
  "redis": "5.12.1",
46
46
  "stacktrace-parser": "0.1.11",
47
47
  "undici": "8.1.0",
48
- "winston": "3.19.0",
48
+ "winston": "3.19.0"
49
+ },
50
+ "peerDependencies": {
51
+ "zod": "^4.3.6"
52
+ },
53
+ "devDependencies": {
49
54
  "zod": "4.3.6"
50
55
  },
51
56
  "license": "Apache-2.0",
package/src/consts.js CHANGED
@@ -32,3 +32,7 @@ export const BusEventType = {
32
32
 
33
33
  RUNTIME_ERROR: 'runtime_error'
34
34
  };
35
+
36
+ export const WorkflowSpecialOutput = {
37
+ CONTINUED_AS_NEW: '<<continued_as_new>>'
38
+ };
@@ -139,9 +139,9 @@ export type WorkflowFunction<
139
139
  */
140
140
  export type WorkflowFunctionWrapper<WorkflowFunction> =
141
141
  [Parameters<WorkflowFunction>[0]] extends [undefined | null] ?
142
- ( input?: undefined | null, config?: WorkflowInvocationConfiguration<Parameters<WorkflowFunction>[1]> ) =>
142
+ ( input?: undefined | null, config?: WorkflowInvocationConfiguration ) =>
143
143
  ReturnType<WorkflowFunction> :
144
- ( input: Parameters<WorkflowFunction>[0], config?: WorkflowInvocationConfiguration<Parameters<WorkflowFunction>[1]> ) =>
144
+ ( input: Parameters<WorkflowFunction>[0], config?: WorkflowInvocationConfiguration ) =>
145
145
  ReturnType<WorkflowFunction>;
146
146
 
147
147
  /**
@@ -1,9 +1,40 @@
1
1
  import { dirname, join } from 'node:path';
2
2
  import { fileURLToPath } from 'node:url';
3
+ import {
4
+ findPackageRoot,
5
+ isPathDescendentFromNodeModules,
6
+ packageExposesWorkflows
7
+ } from './loader_tools.js';
3
8
 
4
9
  const __dirname = dirname( fileURLToPath( import.meta.url ) );
5
10
  const workerDir = __dirname; // sdk/core/src/worker
6
11
  const interfaceDir = join( __dirname, '..', 'interface' );
12
+ const packagesWithWorkflowsMap = new Map();
13
+
14
+ /**
15
+ * Skip loaders for most of `node_modules`, except packages that expose workflows.
16
+ */
17
+ const excludeUnlessPackageExposeWorkflows = resource => {
18
+ // internal parts: exclude
19
+ if ( resource.startsWith( workerDir ) || resource.startsWith( interfaceDir ) ) {
20
+ return true;
21
+ }
22
+ // not node_modules/: include
23
+ if ( !isPathDescendentFromNodeModules( resource ) ) {
24
+ return false;
25
+ }
26
+
27
+ const rootPath = findPackageRoot( resource );
28
+ if ( !rootPath ) {
29
+ return true;
30
+ }
31
+
32
+ if ( !packagesWithWorkflowsMap.has( rootPath ) ) {
33
+ packagesWithWorkflowsMap.set( rootPath, packageExposesWorkflows( join( rootPath, 'package.json' ) ) );
34
+ }
35
+
36
+ return !packagesWithWorkflowsMap.get( rootPath );
37
+ };
7
38
 
8
39
  export const webpackConfigHook = config => {
9
40
  // Prefer the "output-workflow-bundle" export condition when resolving packages.
@@ -23,8 +54,7 @@ export const webpackConfigHook = config => {
23
54
  // Validation loader (runs first)
24
55
  config.module.rules.push( {
25
56
  test: /\.js$/,
26
- // Exclude node_modules and internal core worker files
27
- exclude: resource => /node_modules/.test( resource ) || resource.startsWith( workerDir ) || resource.startsWith( interfaceDir ),
57
+ exclude: excludeUnlessPackageExposeWorkflows,
28
58
  enforce: 'pre',
29
59
  use: {
30
60
  loader: join( __dirname, './webpack_loaders/workflow_validator/index.mjs' )
@@ -33,8 +63,7 @@ export const webpackConfigHook = config => {
33
63
  // Use AST-based loader for rewriting steps/workflows
34
64
  config.module.rules.push( {
35
65
  test: /\.js$/,
36
- // Exclude node_modules and internal core worker files
37
- exclude: resource => /node_modules/.test( resource ) || resource.startsWith( workerDir ) || resource.startsWith( interfaceDir ),
66
+ exclude: excludeUnlessPackageExposeWorkflows,
38
67
  use: {
39
68
  loader: join( __dirname, './webpack_loaders/workflow_rewriter/index.mjs' )
40
69
  }
@@ -0,0 +1,62 @@
1
+ import { describe, it, expect, afterEach } from 'vitest';
2
+ import { mkdirSync, rmSync, writeFileSync } from 'node:fs';
3
+ import { dirname, join } from 'node:path';
4
+ import { fileURLToPath } from 'node:url';
5
+ import { webpackConfigHook } from './bundler_options.js';
6
+
7
+ const __dirname = dirname( fileURLToPath( import.meta.url ) );
8
+ const TEMP_BASE = join( process.cwd(), 'sdk/core/temp_test_bundler_options' );
9
+
10
+ afterEach( () => {
11
+ rmSync( TEMP_BASE, { recursive: true, force: true } );
12
+ } );
13
+
14
+ const buildExcludes = () => {
15
+ const config = webpackConfigHook( {} );
16
+ return config.module.rules.map( rule => rule.exclude );
17
+ };
18
+
19
+ const writePackageResource = ( packagePath, pkgJson ) => {
20
+ const resource = join( packagePath, 'lib', 'index.js' );
21
+ mkdirSync( dirname( resource ), { recursive: true } );
22
+ writeFileSync( join( packagePath, 'package.json' ), JSON.stringify( pkgJson ) );
23
+ writeFileSync( resource, 'export const x = 1;\n' );
24
+ return resource;
25
+ };
26
+
27
+ describe( 'webpackConfigHook loader excludes', () => {
28
+ it( 'keeps loaders enabled for project files outside node_modules', () => {
29
+ for ( const exclude of buildExcludes() ) {
30
+ expect( exclude( join( TEMP_BASE, 'src', 'workflow.js' ) ) ).toBe( false );
31
+ }
32
+ } );
33
+
34
+ it( 'excludes worker and interface internals', () => {
35
+ for ( const exclude of buildExcludes() ) {
36
+ expect( exclude( join( __dirname, 'loader.js' ) ) ).toBe( true );
37
+ expect( exclude( join( __dirname, '..', 'interface', 'index.js' ) ) ).toBe( true );
38
+ }
39
+ } );
40
+
41
+ it( 'keeps loaders enabled for packages that expose workflows', () => {
42
+ const resource = writePackageResource(
43
+ join( TEMP_BASE, 'node_modules', '@acme', 'catalog' ),
44
+ { name: '@acme/catalog', outputai: { workflows: { expose: true } } }
45
+ );
46
+
47
+ for ( const exclude of buildExcludes() ) {
48
+ expect( exclude( resource ) ).toBe( false );
49
+ }
50
+ } );
51
+
52
+ it( 'excludes packages that do not expose workflows', () => {
53
+ const resource = writePackageResource(
54
+ join( TEMP_BASE, 'node_modules', 'plain_lib' ),
55
+ { name: 'plain_lib', dependencies: { '@outputai/core': '1.0.0' } }
56
+ );
57
+
58
+ for ( const exclude of buildExcludes() ) {
59
+ expect( exclude( resource ) ).toBe( true );
60
+ }
61
+ } );
62
+ } );
@@ -1,8 +1,8 @@
1
1
  // THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
2
- import { workflowInfo, proxySinks, ApplicationFailure, ContinueAsNew } from '@temporalio/workflow';
2
+ import { workflowInfo, proxySinks, ApplicationFailure, ContinueAsNew, isCancellation } from '@temporalio/workflow';
3
3
  import { memoToHeaders } from '../sandboxed_utils.js';
4
4
  import { deepMerge } from '#utils';
5
- import { METADATA_ACCESS_SYMBOL } from '#consts';
5
+ import { METADATA_ACCESS_SYMBOL, WorkflowSpecialOutput } from '#consts';
6
6
  // this is a dynamic generated file with activity configs overwrites
7
7
  import stepOptions from '../temp/__activity_options.js';
8
8
 
@@ -44,7 +44,12 @@ class WorkflowExecutionInterceptor {
44
44
  * a new trace file will be generated
45
45
  */
46
46
  if ( error instanceof ContinueAsNew ) {
47
- sinks.workflow.end( '<continued_as_new>' );
47
+ sinks.workflow.end( WorkflowSpecialOutput.CONTINUED_AS_NEW );
48
+ throw error;
49
+ }
50
+
51
+ if ( isCancellation( error ) ) {
52
+ sinks.workflow.error( error );
48
53
  throw error;
49
54
  }
50
55
 
@@ -6,6 +6,7 @@ const workflowInfoMock = vi.fn();
6
6
  const workflowStartMock = vi.fn();
7
7
  const workflowEndMock = vi.fn();
8
8
  const workflowErrorMock = vi.fn();
9
+ const isCancellationMock = vi.fn();
9
10
  vi.mock( '@temporalio/workflow', () => ( {
10
11
  workflowInfo: ( ...args ) => workflowInfoMock( ...args ),
11
12
  proxySinks: () => ( {
@@ -26,7 +27,8 @@ vi.mock( '@temporalio/workflow', () => ( {
26
27
  super( 'ContinueAsNew' );
27
28
  this.name = 'ContinueAsNew';
28
29
  }
29
- }
30
+ },
31
+ isCancellation: ( ...args ) => isCancellationMock( ...args )
30
32
  } ) );
31
33
 
32
34
  const memoToHeadersMock = vi.fn( memo => ( memo ? { ...memo, __asHeaders: true } : {} ) );
@@ -50,6 +52,7 @@ vi.mock( '../temp/__activity_options.js', () => ( { default: stepOptionsDefault
50
52
  describe( 'workflow interceptors', () => {
51
53
  beforeEach( () => {
52
54
  vi.clearAllMocks();
55
+ isCancellationMock.mockReturnValue( false );
53
56
  workflowInfoMock.mockReturnValue( { workflowType: 'MyWorkflow', memo: { executionContext: { id: 'ctx-1' } } } );
54
57
  } );
55
58
 
@@ -151,8 +154,25 @@ describe( 'workflow interceptors', () => {
151
154
  expect( error.details ).toEqual( [ meta ] );
152
155
  } );
153
156
 
157
+ it( 'calls sinks.workflow.error and rethrows cancellation errors without wrapping', async () => {
158
+ const { interceptors } = await import( './workflow.js' );
159
+ const { ApplicationFailure } = await import( '@temporalio/workflow' );
160
+ const { inbound } = interceptors();
161
+ const interceptor = inbound[0];
162
+ const cancellation = new Error( 'Workflow cancelled' );
163
+ const next = vi.fn().mockRejectedValue( cancellation );
164
+ isCancellationMock.mockReturnValue( true );
165
+
166
+ await expect( interceptor.execute( { args: [ {} ] }, next ) ).rejects.toBe( cancellation );
167
+ expect( isCancellationMock ).toHaveBeenCalledWith( cancellation );
168
+ expect( cancellation ).not.toBeInstanceOf( ApplicationFailure );
169
+ expect( workflowErrorMock ).toHaveBeenCalledWith( cancellation );
170
+ expect( workflowEndMock ).not.toHaveBeenCalled();
171
+ } );
172
+
154
173
  it( 'on ContinueAsNew calls sinks.trace.addWorkflowEventEnd and rethrows', async () => {
155
174
  const { ContinueAsNew } = await import( '@temporalio/workflow' );
175
+ const { WorkflowSpecialOutput } = await import( '#consts' );
156
176
  const { interceptors } = await import( './workflow.js' );
157
177
  const { inbound } = interceptors();
158
178
  const interceptor = inbound[0];
@@ -160,7 +180,7 @@ describe( 'workflow interceptors', () => {
160
180
  const next = vi.fn().mockRejectedValue( continueErr );
161
181
 
162
182
  await expect( interceptor.execute( { args: [ {} ] }, next ) ).rejects.toThrow( ContinueAsNew );
163
- expect( workflowEndMock ).toHaveBeenCalledWith( '<continued_as_new>' );
183
+ expect( workflowEndMock ).toHaveBeenCalledWith( WorkflowSpecialOutput.CONTINUED_AS_NEW );
164
184
  expect( workflowErrorMock ).not.toHaveBeenCalled();
165
185
  } );
166
186
  } );
@@ -3,7 +3,14 @@ import { existsSync, mkdirSync, writeFileSync } from 'node:fs';
3
3
  import { EOL } from 'node:os';
4
4
  import { fileURLToPath } from 'url';
5
5
  import { getTraceDestinations, sendHttpRequest } from '#internal_activities';
6
- import { importComponents, staticMatchers, activityMatchersBuilder } from './loader_tools.js';
6
+ import {
7
+ activityMatchersBuilder,
8
+ findSharedActivitiesFromWorkflows,
9
+ findWorkflowsInNodeModules,
10
+ importComponents,
11
+ matchFiles,
12
+ staticMatchers
13
+ } from './loader_tools.js';
7
14
  import {
8
15
  ACTIVITY_SEND_HTTP_REQUEST,
9
16
  ACTIVITY_OPTIONS_FILENAME,
@@ -13,6 +20,7 @@ import {
13
20
  ACTIVITY_GET_TRACE_DESTINATIONS
14
21
  } from '#consts';
15
22
  import { createChildLogger } from '#logger';
23
+ import { ValidationError } from '#errors';
16
24
 
17
25
  const log = createChildLogger( 'Scanner' );
18
26
 
@@ -64,23 +72,34 @@ export async function loadActivities( rootDir, workflows ) {
64
72
  const activityOptionsMap = {};
65
73
 
66
74
  // Load workflow based activities
67
- for ( const { path: workflowPath, name: workflowName } of workflows ) {
75
+ for ( const { path: workflowPath, name: workflowName, external } of workflows ) {
68
76
  const dir = dirname( workflowPath );
69
- for await ( const { fn, metadata, path } of importComponents( dir, Object.values( activityMatchersBuilder( dir ) ) ) ) {
70
- log.info( 'Component loaded', { type: metadata.type, name: metadata.name, path, workflow: workflowName } );
77
+ for await ( const { fn, metadata, path } of importComponents( matchFiles( dir, Object.values( activityMatchersBuilder( dir ) ) ) ) ) {
78
+ log.info( 'Component loaded', { type: metadata.type, name: metadata.name, path, workflow: workflowName, ...( external && { external } ) } );
71
79
  // Activities loaded from a workflow path will use the workflow name as a namespace, which is unique across the platform, avoiding collision
72
80
  const activityKey = generateActivityKey( { namespace: workflowName, activityName: metadata.name } );
81
+ if ( activities[activityKey] ) {
82
+ throw new ValidationError( `Activity "${metadata.name}" in workflow "${workflowName}" conflicts with another \
83
+ activity in the same workflow. Activity names must be unique within a workflow.` );
84
+ }
73
85
  activities[activityKey] = fn;
74
86
  // propagate the custom options set on the step()/evaluator() constructor
75
87
  activityOptionsMap[activityKey] = metadata.options?.activityOptions ?? undefined;
76
88
  }
77
89
  }
78
90
 
79
- // Load shared activities/evaluators
80
- for await ( const { fn, metadata, path } of importComponents( rootDir, [ staticMatchers.sharedStepsDir, staticMatchers.sharedEvaluatorsDir ] ) ) {
81
- log.info( 'Shared component loaded', { type: metadata.type, name: metadata.name, path } );
82
- // The namespace for shared activities is fixed
91
+ // Load shared activities/evaluators from local and external npm modules
92
+ const localSharedActivities = matchFiles( rootDir, [ staticMatchers.sharedStepsDir, staticMatchers.sharedEvaluatorsDir ] );
93
+ const externalSharedActivities = findSharedActivitiesFromWorkflows( workflows.filter( w => w.external ) );
94
+ for await ( const { fn, metadata, path } of importComponents( [ ...localSharedActivities, ...externalSharedActivities ] ) ) {
95
+ const external = externalSharedActivities.some( a => a.path === path );
96
+ log.info( 'Shared component loaded', { type: metadata.type, name: metadata.name, path, ...( external && { external } ) } );
97
+ // Reuses the same global namespace for shared activities
83
98
  const activityKey = generateActivityKey( { namespace: SHARED_STEP_PREFIX, activityName: metadata.name } );
99
+ if ( activities[activityKey] ) {
100
+ throw new ValidationError( `Shared activity "${metadata.name}" conflicts with another shared activity. \
101
+ Shared activity names must be unique.` );
102
+ }
84
103
  activities[activityKey] = fn;
85
104
  activityOptionsMap[activityKey] = metadata.options?.activityOptions ?? undefined;
86
105
  }
@@ -103,19 +122,43 @@ export async function loadActivities( rootDir, workflows ) {
103
122
  * @returns {object[]}
104
123
  */
105
124
  export async function loadWorkflows( rootDir ) {
125
+ const workflowNames = new Set();
106
126
  const workflows = [];
107
- for await ( const { metadata, path } of importComponents( rootDir, [ staticMatchers.workflowFile ] ) ) {
127
+ const localWorkflows = matchFiles( rootDir, [ staticMatchers.workflowFile ] );
128
+ const externalWorkflows = findWorkflowsInNodeModules( rootDir );
129
+ for await ( const { metadata, path } of importComponents( [ ...localWorkflows, ...externalWorkflows ] ) ) {
130
+ const external = externalWorkflows.some( a => a.path === path );
108
131
  if ( staticMatchers.workflowPathHasShared( path ) ) {
109
- throw new Error( 'Workflow directory can\'t be named "shared"' );
132
+ throw new ValidationError( 'Workflow directory can\'t be named "shared"' );
133
+ }
134
+ const { name, aliases } = metadata;
135
+ if ( workflowNames.has( name ) ) {
136
+ throw new ValidationError( `Workflow name "${name}" conflicts with another workflow or alias. \
137
+ Workflow names and aliases must be unique.` );
138
+ }
139
+ if ( WORKFLOW_CATALOG === name ) {
140
+ throw new ValidationError( `Workflow name "${name}" is reserved for the internal catalog workflow.` );
141
+ }
142
+ workflowNames.add( name );
143
+ for ( const alias of aliases ?? [] ) {
144
+ if ( workflowNames.has( alias ) ) {
145
+ throw new ValidationError( `Workflow "${name}" alias "${alias}" conflicts with another workflow or alias. \
146
+ Workflow names and aliases must be unique.` );
147
+ }
148
+ if ( WORKFLOW_CATALOG === alias ) {
149
+ throw new ValidationError( `Workflow "${name}" alias "${alias}" is reserved for the internal catalog workflow.` );
150
+ }
151
+ workflowNames.add( alias );
110
152
  }
111
- log.info( 'Workflow loaded', { name: metadata.name, path } );
112
- workflows.push( { ...metadata, path } );
153
+
154
+ log.info( 'Workflow loaded', { name, path, aliases, ...( external && { external } ) } );
155
+ workflows.push( { ...metadata, path, external } );
113
156
  }
114
157
  return workflows;
115
158
  };
116
159
 
117
160
  /**
118
- * Loads the hook files from package.json's output config section.
161
+ * Loads the hook files from package.json's "outputai" section.
119
162
  *
120
163
  * @param {string} rootDir
121
164
  * @returns {void}
@@ -124,7 +167,12 @@ export async function loadHooks( rootDir ) {
124
167
  const packageFile = join( rootDir, 'package.json' );
125
168
  if ( existsSync( packageFile ) ) {
126
169
  const pkg = await import( packageFile, { with: { type: 'json' } } );
127
- for ( const path of pkg.default.output?.hookFiles ?? [] ) {
170
+ const content = pkg.default;
171
+ const hooks = [];
172
+ // @DEPRECATED: "output" is the legacy namespace for configs, can be removed after couple version (this is being added in 0.3.x)
173
+ hooks.push( ...( content['output']?.hookFiles ?? [] ) );
174
+ hooks.push( ...( content['outputai']?.hookFiles ?? [] ) );
175
+ for ( const path of hooks ) {
128
176
  const hookFile = join( rootDir, path );
129
177
  await import( hookFile );
130
178
  log.info( 'Hook file loaded', { path } );
@@ -132,40 +180,6 @@ export async function loadHooks( rootDir ) {
132
180
  }
133
181
  };
134
182
 
135
- /**
136
- * Validates that all workflow names and aliases are unique across the project.
137
- *
138
- * @param {object[]} workflows
139
- * @throws {Error} If any alias conflicts with a workflow name or another alias
140
- */
141
- function validateWorkflowNames( workflows ) {
142
- const allNames = new Map();
143
-
144
- // Register primary names (case-insensitive to prevent confusing collisions)
145
- for ( const { name } of workflows ) {
146
- allNames.set( name.toLowerCase(), `workflow "${name}"` );
147
- }
148
-
149
- // Check the reserved catalog name
150
- allNames.set( WORKFLOW_CATALOG.toLowerCase(), 'system workflow "$catalog"' );
151
-
152
- // Check aliases against all names
153
- for ( const { name, aliases = [] } of workflows ) {
154
- const lowerCaseName = name.toLowerCase();
155
- for ( const alias of aliases ) {
156
- const lowerAliasName = alias.toLowerCase();
157
- if ( lowerAliasName === lowerCaseName ) {
158
- throw new Error( `Workflow "${name}" has an alias identical to its own name` );
159
- }
160
- const conflict = allNames.get( lowerAliasName );
161
- if ( conflict ) {
162
- throw new Error( `Alias "${alias}" on workflow "${name}" conflicts with ${conflict}` );
163
- }
164
- allNames.set( lowerAliasName, `alias "${alias}" on workflow "${name}"` );
165
- }
166
- }
167
- }
168
-
169
183
  /**
170
184
  * Creates a temporary index file importing all workflows for Temporal.
171
185
  *
@@ -173,8 +187,6 @@ function validateWorkflowNames( workflows ) {
173
187
  * @returns
174
188
  */
175
189
  export function createWorkflowsEntryPoint( workflows ) {
176
- validateWorkflowNames( workflows );
177
-
178
190
  const path = join( __dirname, 'temp', WORKFLOWS_INDEX_FILENAME );
179
191
 
180
192
  // default system catalog workflow