@output.ai/core 0.3.9-dev.pr306-3f50755 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@output.ai/core",
3
- "version": "0.3.9-dev.pr306-3f50755",
3
+ "version": "0.4.0",
4
4
  "description": "The core module of the output framework",
5
5
  "type": "module",
6
6
  "exports": {
@@ -66,8 +66,8 @@ setMetadata( sendHttpRequest, { type: ComponentType.INTERNAL_STEP } );
66
66
  * @returns {object} Information about enabled workflows
67
67
  */
68
68
  export const getTraceDestinations = ( { startTime, workflowId, workflowName } ) => ( {
69
- local: isStringboolTrue( process.env.TRACE_LOCAL_ON ) ? localProcessor.getDestination( { startTime, workflowId, workflowName } ) : null,
70
- remote: isStringboolTrue( process.env.TRACE_REMOTE_ON ) ? s3Processor.getDestination( { startTime, workflowId, workflowName } ) : null
69
+ local: isStringboolTrue( process.env.OUTPUT_TRACE_LOCAL_ON ) ? localProcessor.getDestination( { startTime, workflowId, workflowName } ) : null,
70
+ remote: isStringboolTrue( process.env.OUTPUT_TRACE_REMOTE_ON ) ? s3Processor.getDestination( { startTime, workflowId, workflowName } ) : null
71
71
  } );
72
72
 
73
73
  setMetadata( getTraceDestinations, { type: ComponentType.INTERNAL_STEP } );
@@ -43,7 +43,7 @@ const resolveIOPath = workflowName => join( callerDir, 'logs', resolveTraceFolde
43
43
  * Resolves the file path to be reported as the trace destination.
44
44
  *
45
45
  * Considering that in containerized environments (e.g., Docker), the file path might differ from the host machine,
46
- * this value takes in consideration the TRACE_HOST_PATH env variable instead of the local filesystem to mount
46
+ * this value takes in consideration the OUTPUT_TRACE_HOST_PATH env variable instead of the local filesystem to mount
47
47
  * the final file path.
48
48
  *
49
49
  * If the env variable is not present, it falls back to the same value used to write files locally.
@@ -51,8 +51,8 @@ const resolveIOPath = workflowName => join( callerDir, 'logs', resolveTraceFolde
51
51
  * @param {string} workflowName - The name of the workflow
52
52
  * @returns {string} The path to report, reflecting the actual filesystem
53
53
  */
54
- const resolveReportPath = workflowName => process.env.TRACE_HOST_PATH ?
55
- join( process.env.TRACE_HOST_PATH, resolveTraceFolder( workflowName ) ) :
54
+ const resolveReportPath = workflowName => process.env.OUTPUT_TRACE_HOST_PATH ?
55
+ join( process.env.OUTPUT_TRACE_HOST_PATH, resolveTraceFolder( workflowName ) ) :
56
56
  resolveIOPath( workflowName );
57
57
 
58
58
  /**
@@ -99,7 +99,7 @@ export const exec = ( { entry, executionContext } ) => {
99
99
  /**
100
100
  * Returns where the trace is saved as an absolute path.
101
101
  *
102
- * This uses the optional TRACE_HOST_PATH to return values relative to the host OS, not the container, if applicable.
102
+ * This uses the optional OUTPUT_TRACE_HOST_PATH to return values relative to the host OS, not the container, if applicable.
103
103
  *
104
104
  * @param {object} args
105
105
  * @param {string} args.startTime - The start time of the workflow
@@ -29,7 +29,7 @@ describe( 'tracing/processors/local', () => {
29
29
  vi.clearAllMocks();
30
30
  store.files.clear();
31
31
  process.argv[2] = '/tmp/project';
32
- delete process.env.TRACE_HOST_PATH; // Clear TRACE_HOST_PATH for clean tests
32
+ delete process.env.OUTPUT_TRACE_HOST_PATH; // Clear OUTPUT_TRACE_HOST_PATH for clean tests
33
33
  } );
34
34
 
35
35
  it( 'init(): creates temp dir and cleans up old files', async () => {
@@ -62,7 +62,7 @@ describe( 'tracing/processors/local', () => {
62
62
 
63
63
  expect( writeFileSyncMock ).toHaveBeenCalledTimes( 3 );
64
64
  const [ writtenPath, content ] = writeFileSyncMock.mock.calls.at( -1 );
65
- // Changed: Now uses process.cwd() + '/logs' fallback when TRACE_HOST_PATH not set
65
+ // Changed: Now uses process.cwd() + '/logs' fallback when OUTPUT_TRACE_HOST_PATH not set
66
66
  expect( writtenPath ).toMatch( /\/runs\/WF\// );
67
67
  expect( JSON.parse( content.trim() ).count ).toBe( 3 );
68
68
  } );
@@ -81,11 +81,11 @@ describe( 'tracing/processors/local', () => {
81
81
  expect( destination ).toContain( '/logs/runs/test-workflow/2020-01-02-03-04-05-678Z_workflow-id-123.json' );
82
82
  } );
83
83
 
84
- it( 'exec(): writes to container path regardless of TRACE_HOST_PATH', async () => {
84
+ it( 'exec(): writes to container path regardless of OUTPUT_TRACE_HOST_PATH', async () => {
85
85
  const { exec, init } = await import( './index.js' );
86
86
 
87
- // Set TRACE_HOST_PATH to simulate Docker environment
88
- process.env.TRACE_HOST_PATH = '/host/path/logs';
87
+ // Set OUTPUT_TRACE_HOST_PATH to simulate Docker environment
88
+ process.env.OUTPUT_TRACE_HOST_PATH = '/host/path/logs';
89
89
 
90
90
  init();
91
91
 
@@ -97,16 +97,16 @@ describe( 'tracing/processors/local', () => {
97
97
  expect( writeFileSyncMock ).toHaveBeenCalledTimes( 1 );
98
98
  const [ writtenPath ] = writeFileSyncMock.mock.calls.at( -1 );
99
99
 
100
- // Should write to process.cwd()/logs, NOT to TRACE_HOST_PATH
100
+ // Should write to process.cwd()/logs, NOT to OUTPUT_TRACE_HOST_PATH
101
101
  expect( writtenPath ).not.toContain( '/host/path/logs' );
102
102
  expect( writtenPath ).toMatch( /logs\/runs\/WF\// );
103
103
  } );
104
104
 
105
- it( 'getDestination(): returns TRACE_HOST_PATH when set', async () => {
105
+ it( 'getDestination(): returns OUTPUT_TRACE_HOST_PATH when set', async () => {
106
106
  const { getDestination } = await import( './index.js' );
107
107
 
108
- // Set TRACE_HOST_PATH to simulate Docker environment
109
- process.env.TRACE_HOST_PATH = '/host/path/logs';
108
+ // Set OUTPUT_TRACE_HOST_PATH to simulate Docker environment
109
+ process.env.OUTPUT_TRACE_HOST_PATH = '/host/path/logs';
110
110
 
111
111
  const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
112
112
  const workflowId = 'workflow-id-123';
@@ -114,15 +114,15 @@ describe( 'tracing/processors/local', () => {
114
114
 
115
115
  const destination = getDestination( { startTime, workflowId, workflowName } );
116
116
 
117
- // Should return TRACE_HOST_PATH-based path for reporting
117
+ // Should return OUTPUT_TRACE_HOST_PATH-based path for reporting
118
118
  expect( destination ).toBe( '/host/path/logs/runs/test-workflow/2020-01-02-03-04-05-678Z_workflow-id-123.json' );
119
119
  } );
120
120
 
121
121
  it( 'separation of write and report paths works correctly', async () => {
122
122
  const { exec, getDestination, init } = await import( './index.js' );
123
123
 
124
- // Set TRACE_HOST_PATH to simulate Docker environment
125
- process.env.TRACE_HOST_PATH = '/Users/ben/project/logs';
124
+ // Set OUTPUT_TRACE_HOST_PATH to simulate Docker environment
125
+ process.env.OUTPUT_TRACE_HOST_PATH = '/Users/ben/project/logs';
126
126
 
127
127
  init();
128
128
 
@@ -142,7 +142,7 @@ describe( 'tracing/processors/local', () => {
142
142
  expect( writtenPath ).not.toContain( '/Users/ben/project' );
143
143
  expect( writtenPath ).toMatch( /logs\/runs\/test-workflow\// );
144
144
 
145
- // Verify report path uses TRACE_HOST_PATH
145
+ // Verify report path uses OUTPUT_TRACE_HOST_PATH
146
146
  expect( destination ).toBe( '/Users/ben/project/logs/runs/test-workflow/2020-01-02-03-04-05-678Z_workflow-id-123.json' );
147
147
  } );
148
148
  } );
@@ -59,4 +59,4 @@ export const exec = async ( { entry, executionContext } ) => {
59
59
  * @returns {string} The S3 url of the trace file
60
60
  */
61
61
  export const getDestination = ( { startTime, workflowId, workflowName } ) =>
62
- `https://${process.env.TRACE_REMOTE_S3_BUCKET}.s3.amazonaws.com/${getS3Key( { workflowId, workflowName, startTime } )}`;
62
+ `https://${process.env.OUTPUT_TRACE_REMOTE_S3_BUCKET}.s3.amazonaws.com/${getS3Key( { workflowId, workflowName, startTime } )}`;
@@ -18,7 +18,7 @@ vi.mock( '../../tools/build_trace_tree.js', () => ( { default: buildTraceTreeMoc
18
18
  describe( 'tracing/processors/s3', () => {
19
19
  beforeEach( () => {
20
20
  vi.clearAllMocks();
21
- process.env.TRACE_REMOTE_S3_BUCKET = 'bkt';
21
+ process.env.OUTPUT_TRACE_REMOTE_S3_BUCKET = 'bkt';
22
22
  } );
23
23
 
24
24
  it( 'init(): ensures redis client is created', async () => {
@@ -8,7 +8,7 @@ const state = { client: null };
8
8
  * @returns {redis.RedisClientType}
9
9
  */
10
10
  export async function getRedisClient() {
11
- const url = process.env.REDIS_URL ?? throws( new Error( 'Missing REDIS_URL environment variable' ) );
11
+ const url = process.env.OUTPUT_REDIS_URL ?? throws( new Error( 'Missing OUTPUT_REDIS_URL environment variable' ) );
12
12
  if ( await state.client?.ping().catch( _ => 0 ) === 'PONG' ) {
13
13
  return state.client;
14
14
  };
@@ -17,16 +17,16 @@ async function loadModule() {
17
17
  describe( 'tracing/processors/s3/redis_client', () => {
18
18
  beforeEach( () => {
19
19
  vi.clearAllMocks();
20
- delete process.env.REDIS_URL;
20
+ delete process.env.OUTPUT_REDIS_URL;
21
21
  } );
22
22
 
23
- it( 'throws if REDIS_URL is missing', async () => {
23
+ it( 'throws if OUTPUT_REDIS_URL is missing', async () => {
24
24
  const { getRedisClient } = await loadModule();
25
- await expect( getRedisClient() ).rejects.toThrow( 'Missing REDIS_URL' );
25
+ await expect( getRedisClient() ).rejects.toThrow( 'Missing OUTPUT_REDIS_URL' );
26
26
  } );
27
27
 
28
28
  it( 'creates client with url, connects once, then reuses cached when ping is PONG', async () => {
29
- process.env.REDIS_URL = 'redis://localhost:6379';
29
+ process.env.OUTPUT_REDIS_URL = 'redis://localhost:6379';
30
30
 
31
31
  const pingMock = vi.fn().mockResolvedValue( 'PONG' );
32
32
  const connectMock = vi.fn().mockResolvedValue();
@@ -12,9 +12,9 @@ const getS3Client = () => {
12
12
  return state.s3Client;
13
13
  }
14
14
 
15
- const region = process.env.AWS_REGION ?? throws( new Error( 'Missing AWS_REGION env var' ) );
16
- const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY ?? throws( new Error( 'Missing AWS_SECRET_ACCESS_KEY env var' ) );
17
- const accessKeyId = process.env.AWS_ACCESS_KEY_ID ?? throws( new Error( 'Missing AWS_ACCESS_KEY_ID env var' ) );
15
+ const region = process.env.OUTPUT_AWS_REGION ?? throws( new Error( 'Missing OUTPUT_AWS_REGION env var' ) );
16
+ const secretAccessKey = process.env.OUTPUT_AWS_SECRET_ACCESS_KEY ?? throws( new Error( 'Missing OUTPUT_AWS_SECRET_ACCESS_KEY env var' ) );
17
+ const accessKeyId = process.env.OUTPUT_AWS_ACCESS_KEY_ID ?? throws( new Error( 'Missing OUTPUT_AWS_ACCESS_KEY_ID env var' ) );
18
18
 
19
19
  return state.s3Client = new S3Client( { region, secretAccessKey, accessKeyId } );
20
20
  };
@@ -27,7 +27,7 @@ const getS3Client = () => {
27
27
  */
28
28
  export const upload = ( { key, content } ) =>
29
29
  getS3Client().send( new PutObjectCommand( {
30
- Bucket: process.env.TRACE_REMOTE_S3_BUCKET ?? throws( new Error( 'Missing TRACE_REMOTE_S3_BUCKET env var' ) ),
30
+ Bucket: process.env.OUTPUT_TRACE_REMOTE_S3_BUCKET ?? throws( new Error( 'Missing OUTPUT_TRACE_REMOTE_S3_BUCKET env var' ) ),
31
31
  Key: key,
32
32
  Body: content
33
33
  } ) );
@@ -32,10 +32,10 @@ async function loadModule() {
32
32
  describe( 'tracing/processors/s3/s3_client', () => {
33
33
  beforeEach( () => {
34
34
  vi.clearAllMocks();
35
- delete process.env.AWS_REGION;
36
- delete process.env.AWS_SECRET_ACCESS_KEY;
37
- delete process.env.AWS_ACCESS_KEY_ID;
38
- delete process.env.TRACE_REMOTE_S3_BUCKET;
35
+ delete process.env.OUTPUT_AWS_REGION;
36
+ delete process.env.OUTPUT_AWS_SECRET_ACCESS_KEY;
37
+ delete process.env.OUTPUT_AWS_ACCESS_KEY_ID;
38
+ delete process.env.OUTPUT_TRACE_REMOTE_S3_BUCKET;
39
39
  } );
40
40
 
41
41
  it( 'fails fast when required env vars are missing for client creation', async () => {
@@ -44,10 +44,10 @@ describe( 'tracing/processors/s3/s3_client', () => {
44
44
  } );
45
45
 
46
46
  it( 'creates client once with env and uploads with bucket/key/content', async () => {
47
- process.env.AWS_REGION = 'us-east-1';
48
- process.env.AWS_SECRET_ACCESS_KEY = 'sek';
49
- process.env.AWS_ACCESS_KEY_ID = 'id';
50
- process.env.TRACE_REMOTE_S3_BUCKET = 'bucket';
47
+ process.env.OUTPUT_AWS_REGION = 'us-east-1';
48
+ process.env.OUTPUT_AWS_SECRET_ACCESS_KEY = 'sek';
49
+ process.env.OUTPUT_AWS_ACCESS_KEY_ID = 'id';
50
+ process.env.OUTPUT_TRACE_REMOTE_S3_BUCKET = 'bucket';
51
51
 
52
52
  const { upload } = await loadModule();
53
53
 
@@ -12,13 +12,13 @@ const log = createChildLogger( 'Tracing' );
12
12
  const traceBus = new EventEmitter();
13
13
  const processors = [
14
14
  {
15
- isOn: isStringboolTrue( process.env.TRACE_LOCAL_ON ),
15
+ isOn: isStringboolTrue( process.env.OUTPUT_TRACE_LOCAL_ON ),
16
16
  name: 'LOCAL',
17
17
  init: localProcessor.init,
18
18
  exec: localProcessor.exec
19
19
  },
20
20
  {
21
- isOn: isStringboolTrue( process.env.TRACE_REMOTE_ON ),
21
+ isOn: isStringboolTrue( process.env.OUTPUT_TRACE_REMOTE_ON ),
22
22
  name: 'REMOTE',
23
23
  init: s3Processor.init,
24
24
  exec: s3Processor.exec
@@ -27,14 +27,14 @@ async function loadTraceEngine() {
27
27
  describe( 'tracing/trace_engine', () => {
28
28
  beforeEach( () => {
29
29
  vi.clearAllMocks();
30
- delete process.env.TRACE_LOCAL_ON;
31
- delete process.env.TRACE_REMOTE_ON;
30
+ delete process.env.OUTPUT_TRACE_LOCAL_ON;
31
+ delete process.env.OUTPUT_TRACE_REMOTE_ON;
32
32
  storageLoadMock.mockReset();
33
33
  } );
34
34
 
35
35
  it( 'init() starts only enabled processors and attaches listeners', async () => {
36
- process.env.TRACE_LOCAL_ON = '1';
37
- process.env.TRACE_REMOTE_ON = '0';
36
+ process.env.OUTPUT_TRACE_LOCAL_ON = '1';
37
+ process.env.OUTPUT_TRACE_REMOTE_ON = '0';
38
38
  const { init, addEventPhase } = await loadTraceEngine();
39
39
 
40
40
  await init();
@@ -52,7 +52,7 @@ describe( 'tracing/trace_engine', () => {
52
52
  } );
53
53
 
54
54
  it( 'addEventPhase() emits an entry consumed by processors', async () => {
55
- process.env.TRACE_LOCAL_ON = 'on';
55
+ process.env.OUTPUT_TRACE_LOCAL_ON = 'on';
56
56
  const { init, addEventPhase } = await loadTraceEngine();
57
57
  await init();
58
58
 
@@ -65,7 +65,7 @@ describe( 'tracing/trace_engine', () => {
65
65
  } );
66
66
 
67
67
  it( 'addEventPhaseWithContext() uses storage when available', async () => {
68
- process.env.TRACE_LOCAL_ON = 'true';
68
+ process.env.OUTPUT_TRACE_LOCAL_ON = 'true';
69
69
  storageLoadMock.mockReturnValue( { parentId: 'ctx-p', executionContext: { runId: 'r1' } } );
70
70
  const { init, addEventPhaseWithContext } = await loadTraceEngine();
71
71
  await init();
@@ -80,7 +80,7 @@ describe( 'tracing/trace_engine', () => {
80
80
  } );
81
81
 
82
82
  it( 'addEventPhaseWithContext() is a no-op when storage is absent', async () => {
83
- process.env.TRACE_LOCAL_ON = '1';
83
+ process.env.OUTPUT_TRACE_LOCAL_ON = '1';
84
84
  storageLoadMock.mockReturnValue( undefined );
85
85
  const { init, addEventPhaseWithContext } = await loadTraceEngine();
86
86
  await init();
@@ -1,22 +1,30 @@
1
1
  import * as z from 'zod';
2
+ import { isStringboolTrue } from '#utils';
2
3
 
3
4
  class InvalidEnvVarsErrors extends Error { }
4
5
 
6
+ const coalesceEmptyString = v => v === '' ? undefined : v;
7
+
5
8
  const envVarSchema = z.object( {
6
- CATALOG_ID: z.string().regex( /^[a-z0-9_.@-]+$/i ),
9
+ OUTPUT_CATALOG_ID: z.string().regex( /^[a-z0-9_.@-]+$/i ),
7
10
  TEMPORAL_ADDRESS: z.string().default( 'localhost:7233' ),
8
11
  TEMPORAL_API_KEY: z.string().optional(),
9
12
  TEMPORAL_NAMESPACE: z.string().optional().default( 'default' ),
10
13
  // Worker concurrency — tune these via env vars to adjust for your workload.
11
14
  // Each step (API, LLM, etc.) call is one activity. Lower this to reduce memory pressure.
12
- MAX_CONCURRENT_ACTIVITY_TASKS: z.coerce.number().int().positive().default( 40 ),
15
+ TEMPORAL_MAX_CONCURRENT_ACTIVITY_TASK_EXECUTIONS: z.preprocess( coalesceEmptyString, z.coerce.number().int().positive().default( 40 ) ),
13
16
  // Workflows are lightweight state machines — this can be high.
14
- MAX_CONCURRENT_WORKFLOW_TASKS: z.coerce.number().int().positive().default( 200 ),
17
+ TEMPORAL_MAX_CONCURRENT_WORKFLOW_TASK_EXECUTIONS: z.preprocess( coalesceEmptyString, z.coerce.number().int().positive().default( 200 ) ),
15
18
  // LRU cache for sticky workflow execution. Lower values free memory faster after surges.
16
- MAX_CACHED_WORKFLOWS: z.coerce.number().int().positive().default( 1000 ),
19
+ TEMPORAL_MAX_CACHED_WORKFLOWS: z.preprocess( coalesceEmptyString, z.coerce.number().int().positive().default( 1000 ) ),
17
20
  // How aggressively the worker pulls tasks from Temporal.
18
- MAX_CONCURRENT_ACTIVITY_POLLS: z.coerce.number().int().positive().default( 5 ),
19
- MAX_CONCURRENT_WORKFLOW_POLLS: z.coerce.number().int().positive().default( 5 )
21
+ TEMPORAL_MAX_CONCURRENT_ACTIVITY_TASK_POLLS: z.preprocess( coalesceEmptyString, z.coerce.number().int().positive().default( 5 ) ),
22
+ TEMPORAL_MAX_CONCURRENT_WORKFLOW_TASK_POLLS: z.preprocess( coalesceEmptyString, z.coerce.number().int().positive().default( 5 ) ),
23
+ // Activity configs
24
+ // How often the worker sends a heartbeat to the Temporal Service during activity execution
25
+ OUTPUT_ACTIVITY_HEARTBEAT_INTERVAL_MS: z.preprocess( coalesceEmptyString, z.coerce.number().int().positive().default( 2 * 60 * 1000 ) ), // 2min
26
+ // Whether to send activity heartbeats (enabled by default)
27
+ OUTPUT_ACTIVITY_HEARTBEAT_ENABLED: z.transform( v => v === undefined ? true : isStringboolTrue( v ) )
20
28
  } );
21
29
 
22
30
  const { data: envVars, error } = envVarSchema.safeParse( process.env );
@@ -26,12 +34,13 @@ if ( error ) {
26
34
 
27
35
  export const address = envVars.TEMPORAL_ADDRESS;
28
36
  export const apiKey = envVars.TEMPORAL_API_KEY;
29
- export const executionTimeout = '1m';
30
- export const maxActivities = envVars.MAX_CONCURRENT_ACTIVITY_TASKS;
31
- export const maxWorkflows = envVars.MAX_CONCURRENT_WORKFLOW_TASKS;
32
- export const maxCachedWorkflows = envVars.MAX_CACHED_WORKFLOWS;
33
- export const maxActivityPolls = envVars.MAX_CONCURRENT_ACTIVITY_POLLS;
34
- export const maxWorkflowPolls = envVars.MAX_CONCURRENT_WORKFLOW_POLLS;
37
+ export const maxConcurrentActivityTaskExecutions = envVars.TEMPORAL_MAX_CONCURRENT_ACTIVITY_TASK_EXECUTIONS;
38
+ export const maxConcurrentWorkflowTaskExecutions = envVars.TEMPORAL_MAX_CONCURRENT_WORKFLOW_TASK_EXECUTIONS;
39
+ export const maxCachedWorkflows = envVars.TEMPORAL_MAX_CACHED_WORKFLOWS;
40
+ export const maxConcurrentActivityTaskPolls = envVars.TEMPORAL_MAX_CONCURRENT_ACTIVITY_TASK_POLLS;
41
+ export const maxConcurrentWorkflowTaskPolls = envVars.TEMPORAL_MAX_CONCURRENT_WORKFLOW_TASK_POLLS;
35
42
  export const namespace = envVars.TEMPORAL_NAMESPACE;
36
- export const taskQueue = envVars.CATALOG_ID;
37
- export const catalogId = envVars.CATALOG_ID;
43
+ export const taskQueue = envVars.OUTPUT_CATALOG_ID;
44
+ export const catalogId = envVars.OUTPUT_CATALOG_ID;
45
+ export const activityHeartbeatIntervalMs = envVars.OUTPUT_ACTIVITY_HEARTBEAT_INTERVAL_MS;
46
+ export const activityHeartbeatEnabled = envVars.OUTPUT_ACTIVITY_HEARTBEAT_ENABLED;
@@ -0,0 +1,130 @@
1
+ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
2
+
3
+ const CONFIG_KEYS = [
4
+ 'OUTPUT_CATALOG_ID',
5
+ 'TEMPORAL_ADDRESS',
6
+ 'TEMPORAL_API_KEY',
7
+ 'TEMPORAL_NAMESPACE',
8
+ 'TEMPORAL_MAX_CONCURRENT_ACTIVITY_TASK_EXECUTIONS',
9
+ 'TEMPORAL_MAX_CONCURRENT_WORKFLOW_TASK_EXECUTIONS',
10
+ 'TEMPORAL_MAX_CACHED_WORKFLOWS',
11
+ 'TEMPORAL_MAX_CONCURRENT_ACTIVITY_TASK_POLLS',
12
+ 'TEMPORAL_MAX_CONCURRENT_WORKFLOW_TASK_POLLS',
13
+ 'OUTPUT_ACTIVITY_HEARTBEAT_INTERVAL_MS',
14
+ 'OUTPUT_ACTIVITY_HEARTBEAT_ENABLED'
15
+ ];
16
+
17
+ const setEnv = ( overrides = {} ) => {
18
+ process.env.OUTPUT_CATALOG_ID = overrides.OUTPUT_CATALOG_ID ?? 'test-catalog';
19
+ CONFIG_KEYS.forEach( key => {
20
+ if ( overrides[key] !== undefined ) {
21
+ process.env[key] = String( overrides[key] );
22
+ }
23
+ } );
24
+ };
25
+
26
+ const clearEnv = () => {
27
+ CONFIG_KEYS.forEach( key => delete process.env[key] );
28
+ };
29
+
30
+ async function loadConfigs() {
31
+ vi.resetModules();
32
+ return import( './configs.js' );
33
+ }
34
+
35
+ describe( 'worker/configs', () => {
36
+ beforeEach( () => clearEnv() );
37
+ afterEach( () => clearEnv() );
38
+
39
+ it( 'throws when OUTPUT_CATALOG_ID is missing', async () => {
40
+ clearEnv();
41
+ vi.resetModules();
42
+
43
+ await expect( import( './configs.js' ) ).rejects.toThrow();
44
+ } );
45
+
46
+ it( 'throws when OUTPUT_CATALOG_ID does not match regex', async () => {
47
+ setEnv( { OUTPUT_CATALOG_ID: 'invalid space' } );
48
+ vi.resetModules();
49
+
50
+ await expect( import( './configs.js' ) ).rejects.toThrow();
51
+ } );
52
+
53
+ it( 'uses defaults when only OUTPUT_CATALOG_ID is set', async () => {
54
+ setEnv();
55
+ const configs = await loadConfigs();
56
+
57
+ expect( configs.address ).toBe( 'localhost:7233' );
58
+ expect( configs.namespace ).toBe( 'default' );
59
+ expect( configs.maxConcurrentActivityTaskExecutions ).toBe( 40 );
60
+ expect( configs.maxConcurrentWorkflowTaskExecutions ).toBe( 200 );
61
+ expect( configs.maxCachedWorkflows ).toBe( 1000 );
62
+ expect( configs.maxConcurrentActivityTaskPolls ).toBe( 5 );
63
+ expect( configs.maxConcurrentWorkflowTaskPolls ).toBe( 5 );
64
+ expect( configs.activityHeartbeatIntervalMs ).toBe( 2 * 60 * 1000 );
65
+ expect( configs.activityHeartbeatEnabled ).toBe( true );
66
+ expect( configs.taskQueue ).toBe( 'test-catalog' );
67
+ expect( configs.catalogId ).toBe( 'test-catalog' );
68
+ } );
69
+
70
+ it( 'treats empty string for optional number as default (preprocess)', async () => {
71
+ setEnv( { TEMPORAL_MAX_CONCURRENT_ACTIVITY_TASK_EXECUTIONS: '' } );
72
+ const configs = await loadConfigs();
73
+
74
+ expect( configs.maxConcurrentActivityTaskExecutions ).toBe( 40 );
75
+ } );
76
+
77
+ it( 'parses custom numeric env vars', async () => {
78
+ setEnv( {
79
+ TEMPORAL_MAX_CONCURRENT_ACTIVITY_TASK_EXECUTIONS: '10',
80
+ TEMPORAL_MAX_CONCURRENT_WORKFLOW_TASK_EXECUTIONS: '50',
81
+ TEMPORAL_MAX_CACHED_WORKFLOWS: '500',
82
+ OUTPUT_ACTIVITY_HEARTBEAT_INTERVAL_MS: '60000'
83
+ } );
84
+ const configs = await loadConfigs();
85
+
86
+ expect( configs.maxConcurrentActivityTaskExecutions ).toBe( 10 );
87
+ expect( configs.maxConcurrentWorkflowTaskExecutions ).toBe( 50 );
88
+ expect( configs.maxCachedWorkflows ).toBe( 500 );
89
+ expect( configs.activityHeartbeatIntervalMs ).toBe( 60000 );
90
+ } );
91
+
92
+ it( 'throws when optional number is zero or negative', async () => {
93
+ setEnv( { TEMPORAL_MAX_CONCURRENT_ACTIVITY_TASK_EXECUTIONS: '0' } );
94
+ vi.resetModules();
95
+
96
+ await expect( import( './configs.js' ) ).rejects.toThrow();
97
+ } );
98
+
99
+ it( 'OUTPUT_ACTIVITY_HEARTBEAT_ENABLED: "true"|"1"|"on" → true', async () => {
100
+ for ( const val of [ 'true', '1', 'on' ] ) {
101
+ setEnv( { OUTPUT_ACTIVITY_HEARTBEAT_ENABLED: val } );
102
+ const configs = await loadConfigs();
103
+ expect( configs.activityHeartbeatEnabled ).toBe( true );
104
+ clearEnv();
105
+ }
106
+ } );
107
+
108
+ it( 'OUTPUT_ACTIVITY_HEARTBEAT_ENABLED: "false"|other → false, undefined → true', async () => {
109
+ setEnv( { OUTPUT_ACTIVITY_HEARTBEAT_ENABLED: 'false' } );
110
+ const configsFalse = await loadConfigs();
111
+ expect( configsFalse.activityHeartbeatEnabled ).toBe( false );
112
+
113
+ setEnv( { OUTPUT_ACTIVITY_HEARTBEAT_ENABLED: '0' } );
114
+ const configsZero = await loadConfigs();
115
+ expect( configsZero.activityHeartbeatEnabled ).toBe( false );
116
+
117
+ clearEnv();
118
+ setEnv(); // only OUTPUT_CATALOG_ID; OUTPUT_ACTIVITY_HEARTBEAT_ENABLED absent → default true
119
+ const configsDefault = await loadConfigs();
120
+ expect( configsDefault.activityHeartbeatEnabled ).toBe( true );
121
+ } );
122
+
123
+ it( 'parses TEMPORAL_ADDRESS and TEMPORAL_NAMESPACE', async () => {
124
+ setEnv( { TEMPORAL_ADDRESS: 'temporal:7233', TEMPORAL_NAMESPACE: 'my-ns' } );
125
+ const configs = await loadConfigs();
126
+
127
+ expect( configs.address ).toBe( 'temporal:7233' );
128
+ expect( configs.namespace ).toBe( 'my-ns' );
129
+ } );
130
+ } );
@@ -1,10 +1,7 @@
1
1
  import { Worker, NativeConnection } from '@temporalio/worker';
2
2
  import { Client } from '@temporalio/client';
3
3
  import { WorkflowIdConflictPolicy } from '@temporalio/common';
4
- import {
5
- address, apiKey, maxActivities, maxWorkflows, maxCachedWorkflows,
6
- maxActivityPolls, maxWorkflowPolls, namespace, taskQueue, catalogId
7
- } from './configs.js';
4
+ import * as configs from './configs.js';
8
5
  import { loadActivities, loadWorkflows, createWorkflowsEntryPoint } from './loader.js';
9
6
  import { sinks } from './sinks.js';
10
7
  import { createCatalog } from './catalog_workflow/index.js';
@@ -20,6 +17,19 @@ const log = createChildLogger( 'Worker' );
20
17
  const callerDir = process.argv[2];
21
18
 
22
19
  ( async () => {
20
+ const {
21
+ address,
22
+ apiKey,
23
+ namespace,
24
+ taskQueue,
25
+ catalogId,
26
+ maxConcurrentWorkflowTaskExecutions,
27
+ maxConcurrentActivityTaskExecutions,
28
+ maxCachedWorkflows,
29
+ maxConcurrentActivityTaskPolls,
30
+ maxConcurrentWorkflowTaskPolls
31
+ } = configs;
32
+
23
33
  log.info( 'Loading workflows...', { callerDir } );
24
34
  const workflows = await loadWorkflows( callerDir );
25
35
 
@@ -48,11 +58,11 @@ const callerDir = process.argv[2];
48
58
  activities,
49
59
  sinks,
50
60
  interceptors: initInterceptors( { activities } ),
51
- maxConcurrentWorkflowTaskExecutions: maxWorkflows,
52
- maxConcurrentActivityTaskExecutions: maxActivities,
61
+ maxConcurrentWorkflowTaskExecutions,
62
+ maxConcurrentActivityTaskExecutions,
53
63
  maxCachedWorkflows,
54
- maxConcurrentActivityTaskPolls: maxActivityPolls,
55
- maxConcurrentWorkflowTaskPolls: maxWorkflowPolls,
64
+ maxConcurrentActivityTaskPolls,
65
+ maxConcurrentWorkflowTaskPolls,
56
66
  bundlerOptions: { webpackConfigHook }
57
67
  } );
58
68
 
@@ -3,13 +3,7 @@ import { Storage } from '#async_storage';
3
3
  import { addEventStart, addEventEnd, addEventError } from '#tracing';
4
4
  import { headersToObject } from '../sandboxed_utils.js';
5
5
  import { METADATA_ACCESS_SYMBOL } from '#consts';
6
-
7
- // Heartbeat config lives here (not in #consts) because consts.js is bundled into
8
- // the Temporal workflow sandbox where `process` is not available.
9
- // Activity interceptors run in the worker context where process.env is safe.
10
- const DEFAULT_HEARTBEAT_INTERVAL_MS = 2 * 60 * 1000; // 2 minutes
11
- const HEARTBEAT_INTERVAL_MS = Number( process.env.OUTPUT_HEARTBEAT_INTERVAL_MS ) || DEFAULT_HEARTBEAT_INTERVAL_MS;
12
- const HEARTBEAT_ENABLED = process.env.OUTPUT_HEARTBEAT_ENABLED !== 'false'; // on by default, set to 'false' to disable
6
+ import { activityHeartbeatEnabled, activityHeartbeatIntervalMs } from '../configs.js';
13
7
 
14
8
  /*
15
9
  This interceptor wraps every activity execution with cross-cutting concerns:
@@ -37,27 +31,24 @@ export class ActivityExecutionInterceptor {
37
31
  const { executionContext } = headersToObject( input.headers );
38
32
  const { type: kind } = this.activities?.[activityType]?.[METADATA_ACCESS_SYMBOL];
39
33
 
40
- // --- Tracing: record the start of the activity ---
41
34
  const traceArguments = { kind, id: activityId, parentId: workflowId, name: activityType, executionContext };
42
35
  addEventStart( { details: input.args[0], ...traceArguments } );
43
36
 
44
- // --- Heartbeating: signal Temporal periodically that this worker is still alive ---
45
- const heartbeatInterval = HEARTBEAT_ENABLED ?
46
- setInterval( () => Context.current().heartbeat(), HEARTBEAT_INTERVAL_MS ) :
47
- null;
48
-
37
+ const intervals = { heartbeat: null };
49
38
  try {
50
- // --- Execution: run the activity within an AsyncLocalStorage context for nested tracing ---
39
+ // Sends heartbeat to communicate that activity is still alive
40
+ intervals.heartbeat = activityHeartbeatEnabled && setInterval( () => Context.current().heartbeat(), activityHeartbeatIntervalMs );
41
+
51
42
  const output = await Storage.runWithContext( async _ => next( input ), { parentId: activityId, executionContext } );
52
43
  addEventEnd( { details: output, ...traceArguments } );
53
44
  return output;
45
+
54
46
  } catch ( error ) {
55
47
  addEventError( { details: error, ...traceArguments } );
56
48
  throw error;
49
+
57
50
  } finally {
58
- if ( heartbeatInterval ) {
59
- clearInterval( heartbeatInterval );
60
- }
51
+ clearInterval( intervals.heartbeat );
61
52
  }
62
53
  }
63
54
  };
@@ -43,6 +43,15 @@ vi.mock( '#consts', () => ( {
43
43
  }
44
44
  } ) );
45
45
 
46
+ vi.mock( '../configs.js', () => ( {
47
+ get activityHeartbeatEnabled() {
48
+ return process.env.OUTPUT_ACTIVITY_HEARTBEAT_ENABLED !== 'false';
49
+ },
50
+ get activityHeartbeatIntervalMs() {
51
+ return parseInt( process.env.OUTPUT_ACTIVITY_HEARTBEAT_INTERVAL_MS || '120000', 10 );
52
+ }
53
+ } ) );
54
+
46
55
  const makeActivities = () => ( {
47
56
  'myWorkflow#myStep': { [METADATA_ACCESS_SYMBOL]: { type: 'step' } }
48
57
  } );
@@ -58,8 +67,8 @@ describe( 'ActivityExecutionInterceptor', () => {
58
67
  vi.useFakeTimers();
59
68
  vi.resetModules();
60
69
  // Default: heartbeat enabled with 50ms interval for fast tests
61
- vi.stubEnv( 'OUTPUT_HEARTBEAT_ENABLED', 'true' );
62
- vi.stubEnv( 'OUTPUT_HEARTBEAT_INTERVAL_MS', '50' );
70
+ vi.stubEnv( 'OUTPUT_ACTIVITY_HEARTBEAT_ENABLED', 'true' );
71
+ vi.stubEnv( 'OUTPUT_ACTIVITY_HEARTBEAT_INTERVAL_MS', '50' );
63
72
  } );
64
73
 
65
74
  afterEach( () => {
@@ -152,8 +161,8 @@ describe( 'ActivityExecutionInterceptor', () => {
152
161
  expect( heartbeatMock ).not.toHaveBeenCalled();
153
162
  } );
154
163
 
155
- it( 'does not heartbeat when HEARTBEAT_ENABLED is false', async () => {
156
- vi.stubEnv( 'OUTPUT_HEARTBEAT_ENABLED', 'false' );
164
+ it( 'does not heartbeat when OUTPUT_ACTIVITY_HEARTBEAT_ENABLED is false', async () => {
165
+ vi.stubEnv( 'OUTPUT_ACTIVITY_HEARTBEAT_ENABLED', 'false' );
157
166
  const { ActivityExecutionInterceptor } = await import( './activity.js' );
158
167
  const interceptor = new ActivityExecutionInterceptor( makeActivities() );
159
168