@output.ai/core 0.5.7 → 0.5.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -1
- package/src/bus.js +3 -0
- package/src/hooks/index.d.ts +20 -0
- package/src/hooks/index.js +21 -0
- package/src/worker/configs.js +4 -1
- package/src/worker/index.js +8 -2
- package/src/worker/index.spec.js +12 -3
- package/src/worker/interceptors/activity.js +3 -0
- package/src/worker/loader.js +19 -1
- package/src/worker/loader.spec.js +46 -11
- package/src/worker/sinks.js +5 -3
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@output.ai/core",
|
|
3
|
-
"version": "0.5.
|
|
3
|
+
"version": "0.5.8",
|
|
4
4
|
"description": "The core module of the output framework",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"exports": {
|
|
@@ -15,6 +15,10 @@
|
|
|
15
15
|
"./utils": {
|
|
16
16
|
"types": "./src/utils/index.d.ts",
|
|
17
17
|
"import": "./src/utils/index.js"
|
|
18
|
+
},
|
|
19
|
+
"./hooks": {
|
|
20
|
+
"types": "./src/hooks/index.d.ts",
|
|
21
|
+
"import": "./src/hooks/index.js"
|
|
18
22
|
}
|
|
19
23
|
},
|
|
20
24
|
"files": [
|
|
@@ -48,6 +52,7 @@
|
|
|
48
52
|
"access": "public"
|
|
49
53
|
},
|
|
50
54
|
"imports": {
|
|
55
|
+
"#bus": "./src/bus.js",
|
|
51
56
|
"#consts": "./src/consts.js",
|
|
52
57
|
"#errors": "./src/errors.js",
|
|
53
58
|
"#logger": "./src/logger.js",
|
package/src/bus.js
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Payload passed to the onError handler when a workflow, activity or runtime error occurs.
|
|
3
|
+
*/
|
|
4
|
+
export interface ErrorHookPayload {
|
|
5
|
+
/** Origin of the error: workflow execution, activity execution, or runtime. */
|
|
6
|
+
source: 'workflow' | 'activity' | 'runtime';
|
|
7
|
+
/** Name of the workflow, when the error is scoped to a workflow or activity. */
|
|
8
|
+
workflowName?: string;
|
|
9
|
+
/** Name of the activity, when the error is from an activity. */
|
|
10
|
+
activityName?: string;
|
|
11
|
+
/** The error thrown. */
|
|
12
|
+
error: Error;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Register a handler invoked on workflow, activity or runtime errors.
|
|
17
|
+
*
|
|
18
|
+
* @param handler - Function called with the error payload.
|
|
19
|
+
*/
|
|
20
|
+
export declare function onError( handler: ( payload: ErrorHookPayload ) => void ): void;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { messageBus } from '#bus';
|
|
2
|
+
import { createChildLogger } from '#logger';
|
|
3
|
+
|
|
4
|
+
const log = createChildLogger( 'Hooks' );
|
|
5
|
+
|
|
6
|
+
export const onError = handler => {
|
|
7
|
+
const invokeHandler = async args => {
|
|
8
|
+
try {
|
|
9
|
+
await handler( args );
|
|
10
|
+
} catch ( error ) {
|
|
11
|
+
log.error( 'onError hook error', { error } );
|
|
12
|
+
}
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
messageBus.on( 'activity:error', async ( { workflowName, activityName, error } ) =>
|
|
16
|
+
invokeHandler( { source: 'activity', workflowName, activityName, error } ) );
|
|
17
|
+
messageBus.on( 'workflow:error', async ( { workflowName, error } ) =>
|
|
18
|
+
invokeHandler( { source: 'workflow', workflowName, error } ) );
|
|
19
|
+
messageBus.on( 'runtime:error', async ( { error } ) =>
|
|
20
|
+
invokeHandler( { source: 'runtime', error } ) );
|
|
21
|
+
};
|
package/src/worker/configs.js
CHANGED
|
@@ -24,7 +24,9 @@ const envVarSchema = z.object( {
|
|
|
24
24
|
// How often the worker sends a heartbeat to the Temporal Service during activity execution
|
|
25
25
|
OUTPUT_ACTIVITY_HEARTBEAT_INTERVAL_MS: z.preprocess( coalesceEmptyString, z.coerce.number().int().positive().default( 2 * 60 * 1000 ) ), // 2min
|
|
26
26
|
// Whether to send activity heartbeats (enabled by default)
|
|
27
|
-
OUTPUT_ACTIVITY_HEARTBEAT_ENABLED: z.transform( v => v === undefined ? true : isStringboolTrue( v ) )
|
|
27
|
+
OUTPUT_ACTIVITY_HEARTBEAT_ENABLED: z.transform( v => v === undefined ? true : isStringboolTrue( v ) ),
|
|
28
|
+
// Time to allow for hooks to flush before shutdown
|
|
29
|
+
OUTPUT_PROCESS_FAILURE_SHUTDOWN_DELAY: z.preprocess( coalesceEmptyString, z.coerce.number().int().positive().default( 3000 ) )
|
|
28
30
|
} );
|
|
29
31
|
|
|
30
32
|
const { data: envVars, error } = envVarSchema.safeParse( process.env );
|
|
@@ -44,3 +46,4 @@ export const taskQueue = envVars.OUTPUT_CATALOG_ID;
|
|
|
44
46
|
export const catalogId = envVars.OUTPUT_CATALOG_ID;
|
|
45
47
|
export const activityHeartbeatIntervalMs = envVars.OUTPUT_ACTIVITY_HEARTBEAT_INTERVAL_MS;
|
|
46
48
|
export const activityHeartbeatEnabled = envVars.OUTPUT_ACTIVITY_HEARTBEAT_ENABLED;
|
|
49
|
+
export const processFailureShutdownDelay = envVars.OUTPUT_PROCESS_FAILURE_SHUTDOWN_DELAY;
|
package/src/worker/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Worker, NativeConnection } from '@temporalio/worker';
|
|
2
2
|
import * as configs from './configs.js';
|
|
3
|
-
import { loadActivities, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
|
|
3
|
+
import { loadActivities, loadHooks, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
|
|
4
4
|
import { sinks } from './sinks.js';
|
|
5
5
|
import { createCatalog } from './catalog_workflow/index.js';
|
|
6
6
|
import { init as initTracing } from '#tracing';
|
|
@@ -9,6 +9,7 @@ import { initInterceptors } from './interceptors.js';
|
|
|
9
9
|
import { createChildLogger } from '#logger';
|
|
10
10
|
import { registerShutdown } from './shutdown.js';
|
|
11
11
|
import { startCatalog } from './start_catalog.js';
|
|
12
|
+
import { messageBus } from '#bus';
|
|
12
13
|
|
|
13
14
|
const log = createChildLogger( 'Worker' );
|
|
14
15
|
|
|
@@ -28,6 +29,9 @@ const callerDir = process.argv[2];
|
|
|
28
29
|
maxConcurrentWorkflowTaskPolls
|
|
29
30
|
} = configs;
|
|
30
31
|
|
|
32
|
+
log.info( 'Loading config...', { callerDir } );
|
|
33
|
+
await loadHooks( callerDir );
|
|
34
|
+
|
|
31
35
|
log.info( 'Loading workflows...', { callerDir } );
|
|
32
36
|
const workflows = await loadWorkflows( callerDir );
|
|
33
37
|
|
|
@@ -77,5 +81,7 @@ const callerDir = process.argv[2];
|
|
|
77
81
|
process.exit( 0 );
|
|
78
82
|
} )().catch( error => {
|
|
79
83
|
log.error( 'Fatal error', { message: error.message, stack: error.stack } );
|
|
80
|
-
|
|
84
|
+
messageBus.emit( 'runtime:error', { error } );
|
|
85
|
+
log.info( `Exiting in ${configs.processFailureShutdownDelay}ms` );
|
|
86
|
+
setTimeout( () => process.exit( 1 ), configs.processFailureShutdownDelay );
|
|
81
87
|
} );
|
package/src/worker/index.spec.js
CHANGED
|
@@ -17,16 +17,22 @@ const configValues = {
|
|
|
17
17
|
maxConcurrentActivityTaskExecutions: 40,
|
|
18
18
|
maxCachedWorkflows: 1000,
|
|
19
19
|
maxConcurrentActivityTaskPolls: 5,
|
|
20
|
-
maxConcurrentWorkflowTaskPolls: 5
|
|
20
|
+
maxConcurrentWorkflowTaskPolls: 5,
|
|
21
|
+
processFailureShutdownDelay: 0
|
|
21
22
|
};
|
|
22
23
|
vi.mock( './configs.js', () => configValues );
|
|
23
24
|
|
|
25
|
+
const messageBusMock = { on: vi.fn(), emit: vi.fn() };
|
|
26
|
+
vi.mock( '#bus', () => ( { messageBus: messageBusMock } ) );
|
|
27
|
+
|
|
24
28
|
const loadWorkflowsMock = vi.fn().mockResolvedValue( [] );
|
|
25
29
|
const loadActivitiesMock = vi.fn().mockResolvedValue( {} );
|
|
30
|
+
const loadHooksMock = vi.fn().mockResolvedValue( undefined );
|
|
26
31
|
const createWorkflowsEntryPointMock = vi.fn().mockReturnValue( '/fake/workflows/path.js' );
|
|
27
32
|
vi.mock( './loader.js', () => ( {
|
|
28
33
|
loadWorkflows: loadWorkflowsMock,
|
|
29
34
|
loadActivities: loadActivitiesMock,
|
|
35
|
+
loadHooks: loadHooksMock,
|
|
30
36
|
createWorkflowsEntryPoint: createWorkflowsEntryPointMock
|
|
31
37
|
} ) );
|
|
32
38
|
|
|
@@ -83,8 +89,9 @@ describe( 'worker/index', () => {
|
|
|
83
89
|
import( './index.js' );
|
|
84
90
|
|
|
85
91
|
await vi.waitFor( () => {
|
|
86
|
-
expect(
|
|
92
|
+
expect( loadHooksMock ).toHaveBeenCalledWith( '/test/caller/dir' );
|
|
87
93
|
} );
|
|
94
|
+
expect( loadWorkflowsMock ).toHaveBeenCalledWith( '/test/caller/dir' );
|
|
88
95
|
expect( loadActivitiesMock ).toHaveBeenCalledWith( '/test/caller/dir', [] );
|
|
89
96
|
expect( createWorkflowsEntryPointMock ).toHaveBeenCalledWith( [] );
|
|
90
97
|
expect( initTracing ).toHaveBeenCalled();
|
|
@@ -157,6 +164,8 @@ describe( 'worker/index', () => {
|
|
|
157
164
|
await vi.waitFor( () => {
|
|
158
165
|
expect( mockLog.error ).toHaveBeenCalledWith( 'Fatal error', expect.any( Object ) );
|
|
159
166
|
} );
|
|
160
|
-
|
|
167
|
+
await vi.waitFor( () => {
|
|
168
|
+
expect( exitMock ).toHaveBeenCalledWith( 1 );
|
|
169
|
+
} );
|
|
161
170
|
} );
|
|
162
171
|
} );
|
|
@@ -5,6 +5,7 @@ import { headersToObject } from '../sandboxed_utils.js';
|
|
|
5
5
|
import { LifecycleEvent, METADATA_ACCESS_SYMBOL } from '#consts';
|
|
6
6
|
import { activityHeartbeatEnabled, activityHeartbeatIntervalMs } from '../configs.js';
|
|
7
7
|
import { createChildLogger } from '#logger';
|
|
8
|
+
import { messageBus } from '#bus';
|
|
8
9
|
|
|
9
10
|
const log = createChildLogger( 'Activity' );
|
|
10
11
|
/*
|
|
@@ -56,6 +57,8 @@ export class ActivityExecutionInterceptor {
|
|
|
56
57
|
event: LifecycleEvent.ERROR, kind, ...logContext, durationMs: Date.now() - startDate, error: error.message
|
|
57
58
|
} );
|
|
58
59
|
addEventError( { details: error, ...traceArguments } );
|
|
60
|
+
|
|
61
|
+
messageBus.emit( 'activity:error', { error, workflowName, activityName: activityType } );
|
|
59
62
|
throw error;
|
|
60
63
|
|
|
61
64
|
} finally {
|
package/src/worker/loader.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { dirname, join } from 'node:path';
|
|
2
|
-
import { mkdirSync, writeFileSync } from 'node:fs';
|
|
2
|
+
import { existsSync, mkdirSync, writeFileSync } from 'node:fs';
|
|
3
3
|
import { EOL } from 'node:os';
|
|
4
4
|
import { fileURLToPath } from 'url';
|
|
5
5
|
import { getTraceDestinations, sendHttpRequest } from '#internal_activities';
|
|
@@ -114,6 +114,24 @@ export async function loadWorkflows( rootDir ) {
|
|
|
114
114
|
return workflows;
|
|
115
115
|
};
|
|
116
116
|
|
|
117
|
+
/**
|
|
118
|
+
* Loads the hook files from package.json's output config section.
|
|
119
|
+
*
|
|
120
|
+
* @param {string} rootDir
|
|
121
|
+
* @returns {void}
|
|
122
|
+
*/
|
|
123
|
+
export async function loadHooks( rootDir ) {
|
|
124
|
+
const packageFile = join( rootDir, 'package.json' );
|
|
125
|
+
if ( existsSync( packageFile ) ) {
|
|
126
|
+
const pkg = await import( packageFile, { with: { type: 'json' } } );
|
|
127
|
+
for ( const path of pkg.default.output?.hookFiles ?? [] ) {
|
|
128
|
+
const hookFile = join( rootDir, path );
|
|
129
|
+
await import( hookFile );
|
|
130
|
+
log.info( 'Hook file loaded', { path } );
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
};
|
|
134
|
+
|
|
117
135
|
/**
|
|
118
136
|
* Creates a temporary index file importing all workflows for Temporal.
|
|
119
137
|
*
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { tmpdir } from 'node:os';
|
|
2
4
|
|
|
3
5
|
vi.mock( '#consts', () => ( {
|
|
4
6
|
ACTIVITY_SEND_HTTP_REQUEST: '__internal#sendHttpRequest',
|
|
@@ -22,11 +24,15 @@ vi.mock( './loader_tools.js', async importOriginal => {
|
|
|
22
24
|
return { ...actual, importComponents: importComponentsMock };
|
|
23
25
|
} );
|
|
24
26
|
|
|
25
|
-
const
|
|
26
|
-
|
|
27
|
+
const fsMocks = vi.hoisted( () => ( {
|
|
28
|
+
mkdirSync: vi.fn(),
|
|
29
|
+
writeFileSync: vi.fn(),
|
|
30
|
+
existsSync: vi.fn().mockReturnValue( false )
|
|
31
|
+
} ) );
|
|
27
32
|
vi.mock( 'node:fs', () => ( {
|
|
28
|
-
mkdirSync:
|
|
29
|
-
writeFileSync:
|
|
33
|
+
mkdirSync: fsMocks.mkdirSync,
|
|
34
|
+
writeFileSync: fsMocks.writeFileSync,
|
|
35
|
+
existsSync: fsMocks.existsSync
|
|
30
36
|
} ) );
|
|
31
37
|
|
|
32
38
|
describe( 'worker/loader', () => {
|
|
@@ -54,14 +60,14 @@ describe( 'worker/loader', () => {
|
|
|
54
60
|
expect( activities['__internal#sendHttpRequest'] ).toBe( sendHttpRequestMock );
|
|
55
61
|
|
|
56
62
|
// options file written with the collected activityOptions map
|
|
57
|
-
expect(
|
|
58
|
-
const [ writtenPath, contents ] =
|
|
63
|
+
expect( fsMocks.writeFileSync ).toHaveBeenCalledTimes( 1 );
|
|
64
|
+
const [ writtenPath, contents ] = fsMocks.writeFileSync.mock.calls[0];
|
|
59
65
|
expect( writtenPath ).toMatch( /temp\/__activity_options\.js$/ );
|
|
60
66
|
expect( contents ).toContain( 'export default' );
|
|
61
67
|
expect( JSON.parse( contents.replace( /^export default\s*/, '' ).replace( /;\s*$/, '' ) ) ).toEqual( {
|
|
62
68
|
'A#Act1': { retry: { maximumAttempts: 3 } }
|
|
63
69
|
} );
|
|
64
|
-
expect(
|
|
70
|
+
expect( fsMocks.mkdirSync ).toHaveBeenCalled();
|
|
65
71
|
} );
|
|
66
72
|
|
|
67
73
|
it( 'loadActivities omits activity options when component has no options or no activityOptions', async () => {
|
|
@@ -74,7 +80,7 @@ describe( 'worker/loader', () => {
|
|
|
74
80
|
|
|
75
81
|
await loadActivities( '/root', [ { name: 'A', path: '/a/workflow.js' } ] );
|
|
76
82
|
const written = JSON.parse(
|
|
77
|
-
|
|
83
|
+
fsMocks.writeFileSync.mock.calls[0][1].replace( /^export default\s*/, '' ).replace( /;\s*$/, '' )
|
|
78
84
|
);
|
|
79
85
|
expect( written['A#NoOptions'] ).toBeUndefined();
|
|
80
86
|
expect( written['A#EmptyOptions'] ).toBeUndefined();
|
|
@@ -97,12 +103,12 @@ describe( 'worker/loader', () => {
|
|
|
97
103
|
const workflows = [ { name: 'W', path: '/abs/wf.js' } ];
|
|
98
104
|
const entry = createWorkflowsEntryPoint( workflows );
|
|
99
105
|
|
|
100
|
-
expect(
|
|
101
|
-
const [ writtenPath, contents ] =
|
|
106
|
+
expect( fsMocks.writeFileSync ).toHaveBeenCalledTimes( 1 );
|
|
107
|
+
const [ writtenPath, contents ] = fsMocks.writeFileSync.mock.calls[0];
|
|
102
108
|
expect( entry ).toBe( writtenPath );
|
|
103
109
|
expect( contents ).toContain( 'export { default as W } from \'/abs/wf.js\';' );
|
|
104
110
|
expect( contents ).toContain( 'export { default as catalog }' );
|
|
105
|
-
expect(
|
|
111
|
+
expect( fsMocks.mkdirSync ).toHaveBeenCalledTimes( 1 );
|
|
106
112
|
} );
|
|
107
113
|
|
|
108
114
|
it( 'loadActivities uses folder-based matchers for steps/evaluators and shared', async () => {
|
|
@@ -198,4 +204,33 @@ describe( 'worker/loader', () => {
|
|
|
198
204
|
expect( activities['$shared#SharedEvalPrimary'] ).toBeTypeOf( 'function' );
|
|
199
205
|
expect( activities['$shared#SharedEvalSecondary'] ).toBeTypeOf( 'function' );
|
|
200
206
|
} );
|
|
207
|
+
|
|
208
|
+
describe( 'loadHooks', () => {
|
|
209
|
+
it( 'resolves without importing when package.json does not exist', async () => {
|
|
210
|
+
fsMocks.existsSync.mockReturnValue( false );
|
|
211
|
+
const { loadHooks } = await import( './loader.js' );
|
|
212
|
+
await expect( loadHooks( '/root' ) ).resolves.toBeUndefined();
|
|
213
|
+
expect( fsMocks.existsSync ).toHaveBeenCalledWith( join( '/root', 'package.json' ) );
|
|
214
|
+
} );
|
|
215
|
+
|
|
216
|
+
it( 'imports hook files listed in package.json output.hookFiles', async () => {
|
|
217
|
+
vi.doUnmock( 'node:fs' );
|
|
218
|
+
vi.resetModules();
|
|
219
|
+
const fs = await import( 'node:fs' );
|
|
220
|
+
const tmpDir = fs.mkdtempSync( join( tmpdir(), 'loader-spec-' ) );
|
|
221
|
+
try {
|
|
222
|
+
fs.writeFileSync( join( tmpDir, 'package.json' ), JSON.stringify( {
|
|
223
|
+
output: { hookFiles: [ 'hook.js' ] }
|
|
224
|
+
} ) );
|
|
225
|
+
fs.writeFileSync( join( tmpDir, 'hook.js' ), 'globalThis.__loadHooksTestLoaded = true;' );
|
|
226
|
+
|
|
227
|
+
const { loadHooks } = await import( './loader.js' );
|
|
228
|
+
await loadHooks( tmpDir );
|
|
229
|
+
expect( globalThis.__loadHooksTestLoaded ).toBe( true );
|
|
230
|
+
} finally {
|
|
231
|
+
delete globalThis.__loadHooksTestLoaded;
|
|
232
|
+
fs.rmSync( tmpDir, { recursive: true, force: true } );
|
|
233
|
+
}
|
|
234
|
+
} );
|
|
235
|
+
} );
|
|
201
236
|
} );
|
package/src/worker/sinks.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { LifecycleEvent, WORKFLOW_CATALOG } from '#consts';
|
|
2
2
|
import { addEventStart, addEventEnd, addEventError } from '#tracing';
|
|
3
3
|
import { createChildLogger } from '#logger';
|
|
4
|
+
import { messageBus } from '#bus';
|
|
4
5
|
|
|
5
6
|
const log = createChildLogger( 'Workflow' );
|
|
6
7
|
|
|
@@ -81,9 +82,10 @@ export const sinks = {
|
|
|
81
82
|
},
|
|
82
83
|
|
|
83
84
|
addWorkflowEventError: {
|
|
84
|
-
fn: ( workflowInfo,
|
|
85
|
-
logWorkflowEvent( LifecycleEvent.ERROR, workflowInfo,
|
|
86
|
-
addWorkflowEvent( addEventError, workflowInfo,
|
|
85
|
+
fn: ( workflowInfo, error ) => {
|
|
86
|
+
logWorkflowEvent( LifecycleEvent.ERROR, workflowInfo, error );
|
|
87
|
+
addWorkflowEvent( addEventError, workflowInfo, error );
|
|
88
|
+
messageBus.emit( 'workflow:error', { error, workflowName: workflowInfo.workflowType } );
|
|
87
89
|
},
|
|
88
90
|
callDuringReplay: false
|
|
89
91
|
},
|