@outputai/core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +11 -0
  3. package/bin/healthcheck.mjs +36 -0
  4. package/bin/healthcheck.spec.js +90 -0
  5. package/bin/worker.sh +26 -0
  6. package/package.json +67 -0
  7. package/src/activity_integration/context.d.ts +27 -0
  8. package/src/activity_integration/context.js +17 -0
  9. package/src/activity_integration/context.spec.js +42 -0
  10. package/src/activity_integration/events.d.ts +7 -0
  11. package/src/activity_integration/events.js +10 -0
  12. package/src/activity_integration/index.d.ts +9 -0
  13. package/src/activity_integration/index.js +3 -0
  14. package/src/activity_integration/tracing.d.ts +32 -0
  15. package/src/activity_integration/tracing.js +37 -0
  16. package/src/async_storage.js +19 -0
  17. package/src/bus.js +3 -0
  18. package/src/consts.js +32 -0
  19. package/src/errors.d.ts +15 -0
  20. package/src/errors.js +14 -0
  21. package/src/hooks/index.d.ts +28 -0
  22. package/src/hooks/index.js +32 -0
  23. package/src/index.d.ts +49 -0
  24. package/src/index.js +4 -0
  25. package/src/interface/evaluation_result.d.ts +173 -0
  26. package/src/interface/evaluation_result.js +215 -0
  27. package/src/interface/evaluator.d.ts +70 -0
  28. package/src/interface/evaluator.js +34 -0
  29. package/src/interface/evaluator.spec.js +565 -0
  30. package/src/interface/index.d.ts +9 -0
  31. package/src/interface/index.js +26 -0
  32. package/src/interface/step.d.ts +138 -0
  33. package/src/interface/step.js +22 -0
  34. package/src/interface/types.d.ts +27 -0
  35. package/src/interface/validations/runtime.js +20 -0
  36. package/src/interface/validations/runtime.spec.js +29 -0
  37. package/src/interface/validations/schema_utils.js +8 -0
  38. package/src/interface/validations/schema_utils.spec.js +67 -0
  39. package/src/interface/validations/static.js +136 -0
  40. package/src/interface/validations/static.spec.js +366 -0
  41. package/src/interface/webhook.d.ts +84 -0
  42. package/src/interface/webhook.js +64 -0
  43. package/src/interface/webhook.spec.js +122 -0
  44. package/src/interface/workflow.d.ts +273 -0
  45. package/src/interface/workflow.js +128 -0
  46. package/src/interface/workflow.spec.js +467 -0
  47. package/src/interface/workflow_context.js +31 -0
  48. package/src/interface/workflow_utils.d.ts +76 -0
  49. package/src/interface/workflow_utils.js +50 -0
  50. package/src/interface/workflow_utils.spec.js +190 -0
  51. package/src/interface/zod_integration.spec.js +646 -0
  52. package/src/internal_activities/index.js +66 -0
  53. package/src/internal_activities/index.spec.js +102 -0
  54. package/src/logger.js +73 -0
  55. package/src/tracing/internal_interface.js +71 -0
  56. package/src/tracing/processors/local/index.js +111 -0
  57. package/src/tracing/processors/local/index.spec.js +149 -0
  58. package/src/tracing/processors/s3/configs.js +31 -0
  59. package/src/tracing/processors/s3/configs.spec.js +64 -0
  60. package/src/tracing/processors/s3/index.js +114 -0
  61. package/src/tracing/processors/s3/index.spec.js +153 -0
  62. package/src/tracing/processors/s3/redis_client.js +62 -0
  63. package/src/tracing/processors/s3/redis_client.spec.js +185 -0
  64. package/src/tracing/processors/s3/s3_client.js +27 -0
  65. package/src/tracing/processors/s3/s3_client.spec.js +62 -0
  66. package/src/tracing/tools/build_trace_tree.js +83 -0
  67. package/src/tracing/tools/build_trace_tree.spec.js +135 -0
  68. package/src/tracing/tools/utils.js +21 -0
  69. package/src/tracing/tools/utils.spec.js +14 -0
  70. package/src/tracing/trace_engine.js +97 -0
  71. package/src/tracing/trace_engine.spec.js +199 -0
  72. package/src/utils/index.d.ts +134 -0
  73. package/src/utils/index.js +2 -0
  74. package/src/utils/resolve_invocation_dir.js +34 -0
  75. package/src/utils/resolve_invocation_dir.spec.js +102 -0
  76. package/src/utils/utils.js +211 -0
  77. package/src/utils/utils.spec.js +448 -0
  78. package/src/worker/bundler_options.js +43 -0
  79. package/src/worker/catalog_workflow/catalog.js +114 -0
  80. package/src/worker/catalog_workflow/index.js +54 -0
  81. package/src/worker/catalog_workflow/index.spec.js +196 -0
  82. package/src/worker/catalog_workflow/workflow.js +24 -0
  83. package/src/worker/configs.js +49 -0
  84. package/src/worker/configs.spec.js +130 -0
  85. package/src/worker/index.js +89 -0
  86. package/src/worker/index.spec.js +177 -0
  87. package/src/worker/interceptors/activity.js +62 -0
  88. package/src/worker/interceptors/activity.spec.js +212 -0
  89. package/src/worker/interceptors/workflow.js +70 -0
  90. package/src/worker/interceptors/workflow.spec.js +167 -0
  91. package/src/worker/interceptors.js +10 -0
  92. package/src/worker/loader.js +151 -0
  93. package/src/worker/loader.spec.js +236 -0
  94. package/src/worker/loader_tools.js +132 -0
  95. package/src/worker/loader_tools.spec.js +156 -0
  96. package/src/worker/log_hooks.js +95 -0
  97. package/src/worker/log_hooks.spec.js +217 -0
  98. package/src/worker/sandboxed_utils.js +18 -0
  99. package/src/worker/shutdown.js +26 -0
  100. package/src/worker/shutdown.spec.js +82 -0
  101. package/src/worker/sinks.js +74 -0
  102. package/src/worker/start_catalog.js +36 -0
  103. package/src/worker/start_catalog.spec.js +118 -0
  104. package/src/worker/webpack_loaders/consts.js +9 -0
  105. package/src/worker/webpack_loaders/tools.js +548 -0
  106. package/src/worker/webpack_loaders/tools.spec.js +330 -0
  107. package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.js +221 -0
  108. package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.spec.js +336 -0
  109. package/src/worker/webpack_loaders/workflow_rewriter/index.mjs +61 -0
  110. package/src/worker/webpack_loaders/workflow_rewriter/index.spec.js +216 -0
  111. package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.js +196 -0
  112. package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.spec.js +123 -0
  113. package/src/worker/webpack_loaders/workflow_validator/index.mjs +205 -0
  114. package/src/worker/webpack_loaders/workflow_validator/index.spec.js +613 -0
@@ -0,0 +1,102 @@
1
+ import { describe, it, expect, vi, beforeEach } from 'vitest';
2
+ import { MockAgent, setGlobalDispatcher } from 'undici';
3
+ import { FatalError } from '#errors';
4
+ import { serializeBodyAndInferContentType, serializeFetchResponse } from '#utils';
5
+ import { sendHttpRequest } from './index.js';
6
+
7
+ vi.mock( '#logger', () => {
8
+ const log = { info: vi.fn(), warn: vi.fn(), error: vi.fn(), debug: vi.fn() };
9
+ return { createChildLogger: vi.fn( () => log ) };
10
+ } );
11
+
12
+ vi.mock( '#utils', () => ( {
13
+ setMetadata: vi.fn(),
14
+ isStringboolTrue: vi.fn( () => false ),
15
+ serializeBodyAndInferContentType: vi.fn(),
16
+ serializeFetchResponse: vi.fn()
17
+ } ) );
18
+
19
+ const mockAgent = new MockAgent();
20
+ mockAgent.disableNetConnect();
21
+
22
+ setGlobalDispatcher( mockAgent );
23
+
24
+ const url = 'https://growthx.ai';
25
+ const method = 'GET';
26
+
27
+ describe( 'internal_activities/sendHttpRequest', () => {
28
+ beforeEach( async () => {
29
+ vi.restoreAllMocks();
30
+ vi.clearAllMocks();
31
+ } );
32
+
33
+ it( 'succeeds and returns serialized JSON response', async () => {
34
+ const payload = { a: 1 };
35
+ const method = 'POST';
36
+
37
+ mockAgent.get( url ).intercept( { path: '/', method } )
38
+ .reply( 200, JSON.stringify( { ok: true, value: 42 } ), {
39
+ headers: { 'content-type': 'application/json' }
40
+ } );
41
+
42
+ // mock utils
43
+ serializeBodyAndInferContentType.mockReturnValueOnce( {
44
+ body: JSON.stringify( payload ),
45
+ contentType: 'application/json; charset=UTF-8'
46
+ } );
47
+ const fakeSerialized = { sentinel: true };
48
+ serializeFetchResponse.mockResolvedValueOnce( fakeSerialized );
49
+
50
+ const result = await sendHttpRequest( { url, method, payload } );
51
+
52
+ // utils mocked: verify calls and returned value
53
+ expect( serializeBodyAndInferContentType ).toHaveBeenCalledTimes( 1 );
54
+ expect( serializeBodyAndInferContentType ).toHaveBeenCalledWith( payload );
55
+ expect( serializeFetchResponse ).toHaveBeenCalledTimes( 1 );
56
+ const respArg = serializeFetchResponse.mock.calls[0][0];
57
+ expect( respArg && typeof respArg.text ).toBe( 'function' );
58
+ expect( respArg.status ).toBe( 200 );
59
+ expect( respArg.headers.get( 'content-type' ) ).toContain( 'application/json' );
60
+ expect( result ).toBe( fakeSerialized );
61
+ } );
62
+
63
+ it( 'throws FatalError when response.ok is false', async () => {
64
+ mockAgent.get( url ).intercept( { path: '/', method } ).reply( 500, 'Internal error' );
65
+
66
+ await expect( sendHttpRequest( { url, method } ) ).rejects
67
+ .toThrow( new FatalError( 'GET https://growthx.ai 500' ) );
68
+ expect( serializeFetchResponse ).not.toHaveBeenCalled();
69
+ expect( serializeBodyAndInferContentType ).not.toHaveBeenCalled();
70
+ } );
71
+
72
+ it( 'throws FatalError on timeout failure', async () => {
73
+ mockAgent.get( url ).intercept( { path: '/', method } )
74
+ .reply( 200, 'ok', { headers: { 'content-type': 'text/plain' } } )
75
+ .delay( 10_000 );
76
+
77
+ await expect( sendHttpRequest( { url, method, timeout: 250 } ) ).rejects
78
+ .toThrow( new FatalError( 'GET https://growthx.ai The operation was aborted due to timeout' ) );
79
+ expect( serializeFetchResponse ).not.toHaveBeenCalled();
80
+ expect( serializeBodyAndInferContentType ).not.toHaveBeenCalled();
81
+ } );
82
+
83
+ it( 'wraps DNS resolution errors (ENOTFOUND) preserving cause message', async () => {
84
+ mockAgent.get( url ).intercept( { path: '/', method } )
85
+ .replyWithError( new Error( 'getaddrinfo ENOTFOUND nonexistent.example.test' ) );
86
+
87
+ await expect( sendHttpRequest( { url, method } ) ).rejects
88
+ .toThrow( new FatalError( 'GET https://growthx.ai Error: getaddrinfo ENOTFOUND nonexistent.example.test' ) );
89
+ expect( serializeFetchResponse ).not.toHaveBeenCalled();
90
+ expect( serializeBodyAndInferContentType ).not.toHaveBeenCalled();
91
+ } );
92
+
93
+ it( 'wraps TCP connection errors (ECONNREFUSED) preserving cause message', async () => {
94
+ mockAgent.get( url ).intercept( { path: '/', method } )
95
+ .replyWithError( new Error( 'connect ECONNREFUSED 127.0.0.1:65500' ) );
96
+
97
+ await expect( sendHttpRequest( { url, method } ) ).rejects
98
+ .toThrow( new FatalError( 'GET https://growthx.ai Error: connect ECONNREFUSED 127.0.0.1:65500' ) );
99
+ expect( serializeFetchResponse ).not.toHaveBeenCalled();
100
+ expect( serializeBodyAndInferContentType ).not.toHaveBeenCalled();
101
+ } );
102
+ } );
package/src/logger.js ADDED
@@ -0,0 +1,73 @@
1
+ import winston from 'winston';
2
+ import { shuffleArray } from '#utils';
3
+
4
+ const isProduction = process.env.NODE_ENV === 'production';
5
+
6
+ const levels = {
7
+ error: 0,
8
+ warn: 1,
9
+ info: 2,
10
+ http: 3,
11
+ debug: 4
12
+ };
13
+
14
+ const colors = shuffleArray( [
15
+ '033', // blue
16
+ '030', // green
17
+ '208', // orange
18
+ '045', // turquoise
19
+ '129', // purple
20
+ '184' // yellow
21
+ ] );
22
+ const assignedColors = new Map();
23
+
24
+ // Format metadata as friendly JSON: "{ name: "foo", count: 5 }"
25
+ const formatMeta = obj => {
26
+ const entries = Object.entries( obj );
27
+ if ( !entries.length ) {
28
+ return '';
29
+ }
30
+ return ' { ' + entries.map( ( [ k, v ] ) => `${k}: ${JSON.stringify( v )}` ).join( ', ' ) + ' }';
31
+ };
32
+ // Distribute the namespace in a map and assign it the next available color
33
+ const getColor = v =>
34
+ assignedColors.has( v ) ? assignedColors.get( v ) : assignedColors.set( v, colors[assignedColors.size % colors.length] ).get( v );
35
+
36
+ // Colorize a text using the namespace string
37
+ const colorizeByNamespace = ( namespace, text ) => `\x1b[38;5;${getColor( namespace )}m${text}\x1b[0m`;
38
+
39
+ // Development format: colorized with namespace prefix
40
+ const devFormat = winston.format.combine(
41
+ winston.format.colorize(),
42
+ winston.format.printf( ( { level, message, namespace, service: _, environment: __, ...rest } ) => {
43
+ const ns = 'Core' + ( namespace ? `.${namespace}` : '' );
44
+ const meta = formatMeta( rest );
45
+ return `[${level}] ${colorizeByNamespace( ns, `${namespace}: ${message}` )}${meta}`;
46
+ } )
47
+ );
48
+
49
+ // Production format: structured JSON
50
+ const prodFormat = winston.format.combine(
51
+ winston.format.timestamp( { format: 'YYYY-MM-DDTHH:mm:ss.SSSZ' } ),
52
+ winston.format.errors( { stack: true } ),
53
+ winston.format.json()
54
+ );
55
+
56
+ export const logger = winston.createLogger( {
57
+ levels,
58
+ level: isProduction ? 'info' : 'debug',
59
+ format: isProduction ? prodFormat : devFormat,
60
+ defaultMeta: {
61
+ service: 'output-worker',
62
+ environment: process.env.NODE_ENV || 'development'
63
+ },
64
+ transports: [ new winston.transports.Console() ]
65
+ } );
66
+
67
+ /**
68
+ * Creates a child logger with a specific namespace
69
+ *
70
+ * @param {string} namespace - The namespace for this logger (e.g., 'Scanner', 'Tracing')
71
+ * @returns {winston.Logger} Child logger instance with namespace metadata
72
+ */
73
+ export const createChildLogger = namespace => logger.child( { namespace } );
@@ -0,0 +1,71 @@
1
+ import { addEventPhase, addEventPhaseWithContext, init, getDestinations } from './trace_engine.js';
2
+
3
+ /**
4
+ * Init method, if not called, no processors are attached and trace functions are dummy
5
+ */
6
+ export { init, getDestinations };
7
+
8
+ /**
9
+ * Internal use only - adds event phase with AsyncLocalStorage context resolution
10
+ */
11
+ export { addEventPhaseWithContext };
12
+
13
+ /**
14
+ * Trace nomenclature
15
+ *
16
+ * Trace - The collection of Events;
17
+ * Event - Any entry in the Trace file, must have the two phases START and END or ERROR;
18
+ * Phase - An specific part of an Event, either START or the conclusive END or ERROR;
19
+ */
20
+
21
+ /**
22
+ * Internal use only
23
+ *
24
+ * Adds the start phase of a new event at the default trace for the current workflow.
25
+ *
26
+ * @param {object} args
27
+ * @param {string} args.id - A unique id for the Event, must be the same across all phases: start, end, error.
28
+ * @param {string} args.kind - The kind of Event, like HTTP, DiskWrite, DBOp, etc.
29
+ * @param {string} args.name - The human friendly name of the Event: query, request, create.
30
+ * @param {any} args.details - All details attached to this Event Phase. DB queried records, HTTP response body.
31
+ * @param {string} args.parentId - The parent Event, used to build a three.
32
+ * @param {object} args.executionContext - The original execution context from the workflow
33
+ * @returns {void}
34
+ */
35
+ export const addEventStart = options => addEventPhase( 'start', options );
36
+
37
+ /**
38
+ * Internal use only
39
+ *
40
+ * Adds the end phase at an event at the default trace for the current workflow.
41
+ *
42
+ * It needs to use the same id of the start phase.
43
+ *
44
+ * @param {object} args
45
+ * @param {string} args.id - A unique id for the Event, must be the same across all phases: start, end, error.
46
+ * @param {string} args.kind - The kind of Event, like HTTP, DiskWrite, DBOp, etc.
47
+ * @param {string} args.name - The human friendly name of the Event: query, request, create.
48
+ * @param {any} args.details - All details attached to this Event Phase. DB queried records, HTTP response body.
49
+ * @param {string} args.parentId - The parent Event, used to build a three.
50
+ * @param {object} args.executionContext - The original execution context from the workflow
51
+ * @returns {void}
52
+ */
53
+ export const addEventEnd = options => addEventPhase( 'end', options );
54
+
55
+ /**
56
+ * Internal use only
57
+ *
58
+ * Adds the error phase at an event as error at the default trace for the current workflow.
59
+ *
60
+ * It needs to use the same id of the start phase.
61
+ *
62
+ * @param {object} args
63
+ * @param {string} args.id - A unique id for the Event, must be the same across all phases: start, end, error.
64
+ * @param {string} args.kind - The kind of Event, like HTTP, DiskWrite, DBOp, etc.
65
+ * @param {string} args.name - The human friendly name of the Event: query, request, create.
66
+ * @param {any} args.details - All details attached to this Event Phase. DB queried records, HTTP response body.
67
+ * @param {string} args.parentId - The parent Event, used to build a three.
68
+ * @param {object} args.executionContext - The original execution context from the workflow
69
+ * @returns {void}
70
+ */
71
+ export const addEventError = options => addEventPhase( 'error', options );
@@ -0,0 +1,111 @@
1
+ import { appendFileSync, mkdirSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
2
+ import { dirname, join } from 'node:path';
3
+ import { fileURLToPath } from 'url';
4
+ import buildTraceTree from '../../tools/build_trace_tree.js';
5
+ import { EOL } from 'node:os';
6
+
7
+ const __dirname = dirname( fileURLToPath( import.meta.url ) );
8
+
9
+ const tempFilesTTL = 1000 * 60 * 60 * 24 * 7; // 1 week in milliseconds
10
+
11
+ // Retrieves the caller path from the standard args used to start workflows
12
+ const callerDir = process.argv[2];
13
+
14
+ const tempTraceFilesDir = join( __dirname, 'temp', 'traces' );
15
+
16
+ const accumulate = ( { entry, executionContext: { workflowId, startTime } } ) => {
17
+ const path = join( tempTraceFilesDir, `${startTime}_${workflowId}.trace` );
18
+ appendFileSync( path, JSON.stringify( entry ) + EOL, 'utf-8' );
19
+ return readFileSync( path, 'utf-8' ).split( EOL ).slice( 0, -1 ).map( v => JSON.parse( v ) );
20
+ };
21
+
22
+ const cleanupOldTempFiles = ( threshold = Date.now() - tempFilesTTL ) =>
23
+ readdirSync( tempTraceFilesDir )
24
+ .filter( f => +f.split( '_' )[0] < threshold )
25
+ .forEach( f => rmSync( join( tempTraceFilesDir, f ) ) );
26
+
27
+ /**
28
+ * Resolves the deep folder structure that stores a workflow trace.
29
+ * @param {string} workflowName - Name of the workflow
30
+ * @returns {string}
31
+ */
32
+ const resolveTraceFolder = workflowName => join( 'runs', workflowName );
33
+
34
+ /**
35
+ * Resolves the local file system path for ALL file I/O operations (read/write)
36
+ * Uses the project root path
37
+ * @param {string} workflowName - The name of the workflow
38
+ * @returns {string} The local filesystem path for file operations
39
+ */
40
+ const resolveIOPath = workflowName => join( callerDir, 'logs', resolveTraceFolder( workflowName ) );
41
+
42
+ /**
43
+ * Resolves the file path to be reported as the trace destination.
44
+ *
45
+ * Considering that in containerized environments (e.g., Docker), the file path might differ from the host machine,
46
+ * this value takes in consideration the OUTPUT_TRACE_HOST_PATH env variable instead of the local filesystem to mount
47
+ * the final file path.
48
+ *
49
+ * If the env variable is not present, it falls back to the same value used to write files locally.
50
+ *
51
+ * @param {string} workflowName - The name of the workflow
52
+ * @returns {string} The path to report, reflecting the actual filesystem
53
+ */
54
+ const resolveReportPath = workflowName => process.env.OUTPUT_TRACE_HOST_PATH ?
55
+ join( process.env.OUTPUT_TRACE_HOST_PATH, resolveTraceFolder( workflowName ) ) :
56
+ resolveIOPath( workflowName );
57
+
58
+ /**
59
+ * Builds the actual trace filename
60
+ *
61
+ * @param {object} options
62
+ * @param {number} options.startTime
63
+ * @param {string} options.workflowId
64
+ * @returns {string}
65
+ */
66
+ const buildTraceFilename = ( { startTime, workflowId } ) => {
67
+ const timestamp = new Date( startTime ).toISOString().replace( /[:T.]/g, '-' );
68
+ return `${timestamp}_${workflowId}.json`;
69
+ };
70
+
71
+ /**
72
+ * Init this processor
73
+ */
74
+ export const init = () => {
75
+ mkdirSync( tempTraceFilesDir, { recursive: true } );
76
+ cleanupOldTempFiles();
77
+ };
78
+
79
+ /**
80
+ * Execute this processor:
81
+ *
82
+ * Persist a trace tree file to local file system, updating upon each new entry
83
+ *
84
+ * @param {object} args
85
+ * @param {object} entry - Trace event phase
86
+ * @param {object} executionContext - Execution info: workflowId, workflowName, startTime
87
+ * @returns {void}
88
+ */
89
+ export const exec = ( { entry, executionContext } ) => {
90
+ const { workflowId, workflowName, startTime } = executionContext;
91
+ const content = buildTraceTree( accumulate( { entry, executionContext } ) );
92
+ const dir = resolveIOPath( workflowName );
93
+ const path = join( dir, buildTraceFilename( { startTime, workflowId } ) );
94
+
95
+ mkdirSync( dir, { recursive: true } );
96
+ writeFileSync( path, JSON.stringify( content, undefined, 2 ) + EOL, 'utf-8' );
97
+ };
98
+
99
+ /**
100
+ * Returns where the trace is saved as an absolute path.
101
+ *
102
+ * This uses the optional OUTPUT_TRACE_HOST_PATH to return values relative to the host OS, not the container, if applicable.
103
+ *
104
+ * @param {object} executionContext
105
+ * @param {string} executionContext.startTime - The start time of the workflow
106
+ * @param {string} executionContext.workflowId - The id of the workflow execution
107
+ * @param {string} executionContext.workflowName - The name of the workflow
108
+ * @returns {string} The absolute path where the trace will be saved
109
+ */
110
+ export const getDestination = ( { startTime, workflowId, workflowName } ) =>
111
+ join( resolveReportPath( workflowName ), buildTraceFilename( { workflowId, startTime } ) );
@@ -0,0 +1,149 @@
1
+ import { describe, it, expect, vi, beforeEach } from 'vitest';
2
+
3
+ // In-memory fs mock store
4
+ const store = { files: new Map() };
5
+ const mkdirSyncMock = vi.fn();
6
+ const writeFileSyncMock = vi.fn();
7
+ const appendFileSyncMock = vi.fn( ( path, data ) => {
8
+ const prev = store.files.get( path ) ?? '';
9
+ store.files.set( path, prev + data );
10
+ } );
11
+ const readFileSyncMock = vi.fn( path => store.files.get( path ) ?? '' );
12
+ const readdirSyncMock = vi.fn( () => [] );
13
+ const rmSyncMock = vi.fn();
14
+
15
+ vi.mock( 'node:fs', () => ( {
16
+ mkdirSync: mkdirSyncMock,
17
+ writeFileSync: writeFileSyncMock,
18
+ appendFileSync: appendFileSyncMock,
19
+ readFileSync: readFileSyncMock,
20
+ readdirSync: readdirSyncMock,
21
+ rmSync: rmSyncMock
22
+ } ) );
23
+
24
+ const buildTraceTreeMock = vi.fn( entries => ( { count: entries.length } ) );
25
+ vi.mock( '../../tools/build_trace_tree.js', () => ( { default: buildTraceTreeMock } ) );
26
+
27
+ describe( 'tracing/processors/local', () => {
28
+ beforeEach( () => {
29
+ vi.clearAllMocks();
30
+ store.files.clear();
31
+ process.argv[2] = '/tmp/project';
32
+ delete process.env.OUTPUT_TRACE_HOST_PATH; // Clear OUTPUT_TRACE_HOST_PATH for clean tests
33
+ } );
34
+
35
+ it( 'init(): creates temp dir and cleans up old files', async () => {
36
+ const { init } = await import( './index.js' );
37
+
38
+ const now = Date.now();
39
+ readdirSyncMock.mockReturnValue( [ `${now - ( 8 * 24 * 60 * 60 * 1000 )}_old.trace`, `${now}_new.trace` ] );
40
+
41
+ init();
42
+
43
+ // Should create temp dir relative to module location using __dirname
44
+ expect( mkdirSyncMock ).toHaveBeenCalledWith( expect.stringMatching( /temp\/traces$/ ), { recursive: true } );
45
+ expect( rmSyncMock ).toHaveBeenCalledTimes( 1 );
46
+ } );
47
+
48
+ it( 'exec(): accumulates entries and writes aggregated tree', async () => {
49
+ const { exec, init } = await import( './index.js' );
50
+ init();
51
+
52
+ const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
53
+ const ctx = { executionContext: { workflowId: 'id1', workflowName: 'WF', startTime } };
54
+
55
+ exec( { ...ctx, entry: { name: 'A', phase: 'start', timestamp: startTime } } );
56
+ exec( { ...ctx, entry: { name: 'A', phase: 'tick', timestamp: startTime + 1 } } );
57
+ exec( { ...ctx, entry: { name: 'A', phase: 'end', timestamp: startTime + 2 } } );
58
+
59
+ // buildTraceTree called with 1, 2, 3 entries respectively
60
+ expect( buildTraceTreeMock ).toHaveBeenCalledTimes( 3 );
61
+ expect( buildTraceTreeMock.mock.calls.at( -1 )[0].length ).toBe( 3 );
62
+
63
+ expect( writeFileSyncMock ).toHaveBeenCalledTimes( 3 );
64
+ const [ writtenPath, content ] = writeFileSyncMock.mock.calls.at( -1 );
65
+ // Changed: Now uses process.cwd() + '/logs' fallback when OUTPUT_TRACE_HOST_PATH not set
66
+ expect( writtenPath ).toMatch( /\/runs\/WF\// );
67
+ expect( JSON.parse( content.trim() ).count ).toBe( 3 );
68
+ } );
69
+
70
+ it( 'getDestination(): returns absolute path', async () => {
71
+ const { getDestination } = await import( './index.js' );
72
+
73
+ const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
74
+ const workflowId = 'workflow-id-123';
75
+ const workflowName = 'test-workflow';
76
+
77
+ const destination = getDestination( { startTime, workflowId, workflowName } );
78
+
79
+ // Should return an absolute path
80
+ expect( destination ).toMatch( /^\/|^[A-Z]:\\/i ); // Starting with / or Windows drive letter
81
+ expect( destination ).toContain( '/logs/runs/test-workflow/2020-01-02-03-04-05-678Z_workflow-id-123.json' );
82
+ } );
83
+
84
+ it( 'exec(): writes to container path regardless of OUTPUT_TRACE_HOST_PATH', async () => {
85
+ const { exec, init } = await import( './index.js' );
86
+
87
+ // Set OUTPUT_TRACE_HOST_PATH to simulate Docker environment
88
+ process.env.OUTPUT_TRACE_HOST_PATH = '/host/path/logs';
89
+
90
+ init();
91
+
92
+ const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
93
+ const ctx = { executionContext: { workflowId: 'id1', workflowName: 'WF', startTime } };
94
+
95
+ exec( { ...ctx, entry: { name: 'A', phase: 'start', timestamp: startTime } } );
96
+
97
+ expect( writeFileSyncMock ).toHaveBeenCalledTimes( 1 );
98
+ const [ writtenPath ] = writeFileSyncMock.mock.calls.at( -1 );
99
+
100
+ // Should write to process.cwd()/logs, NOT to OUTPUT_TRACE_HOST_PATH
101
+ expect( writtenPath ).not.toContain( '/host/path/logs' );
102
+ expect( writtenPath ).toMatch( /logs\/runs\/WF\// );
103
+ } );
104
+
105
+ it( 'getDestination(): returns OUTPUT_TRACE_HOST_PATH when set', async () => {
106
+ const { getDestination } = await import( './index.js' );
107
+
108
+ // Set OUTPUT_TRACE_HOST_PATH to simulate Docker environment
109
+ process.env.OUTPUT_TRACE_HOST_PATH = '/host/path/logs';
110
+
111
+ const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
112
+ const workflowId = 'workflow-id-123';
113
+ const workflowName = 'test-workflow';
114
+
115
+ const destination = getDestination( { startTime, workflowId, workflowName } );
116
+
117
+ // Should return OUTPUT_TRACE_HOST_PATH-based path for reporting
118
+ expect( destination ).toBe( '/host/path/logs/runs/test-workflow/2020-01-02-03-04-05-678Z_workflow-id-123.json' );
119
+ } );
120
+
121
+ it( 'separation of write and report paths works correctly', async () => {
122
+ const { exec, getDestination, init } = await import( './index.js' );
123
+
124
+ // Set OUTPUT_TRACE_HOST_PATH to simulate Docker environment
125
+ process.env.OUTPUT_TRACE_HOST_PATH = '/Users/ben/project/logs';
126
+
127
+ init();
128
+
129
+ const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
130
+ const workflowId = 'workflow-id-123';
131
+ const workflowName = 'test-workflow';
132
+ const ctx = { executionContext: { workflowId, workflowName, startTime } };
133
+
134
+ // Execute to write file
135
+ exec( { ...ctx, entry: { name: 'A', phase: 'start', timestamp: startTime } } );
136
+
137
+ // Get destination for reporting
138
+ const destination = getDestination( { startTime, workflowId, workflowName } );
139
+
140
+ // Verify write path is local
141
+ const [ writtenPath ] = writeFileSyncMock.mock.calls.at( -1 );
142
+ expect( writtenPath ).not.toContain( '/Users/ben/project' );
143
+ expect( writtenPath ).toMatch( /logs\/runs\/test-workflow\// );
144
+
145
+ // Verify report path uses OUTPUT_TRACE_HOST_PATH
146
+ expect( destination ).toBe( '/Users/ben/project/logs/runs/test-workflow/2020-01-02-03-04-05-678Z_workflow-id-123.json' );
147
+ } );
148
+ } );
149
+
@@ -0,0 +1,31 @@
1
+ import * as z from 'zod';
2
+
3
+ const envVarSchema = z.object( {
4
+ OUTPUT_AWS_REGION: z.string(),
5
+ OUTPUT_AWS_ACCESS_KEY_ID: z.string(),
6
+ OUTPUT_AWS_SECRET_ACCESS_KEY: z.string(),
7
+ OUTPUT_TRACE_REMOTE_S3_BUCKET: z.string(),
8
+ OUTPUT_REDIS_URL: z.string(),
9
+ OUTPUT_REDIS_TRACE_TTL: z.coerce.number().int().positive().default( 60 * 60 * 24 * 7 ), // 7 days
10
+ OUTPUT_TRACE_UPLOAD_DELAY_MS: z.coerce.number().int().nonnegative().default( 10_000 ) // 10s
11
+ } );
12
+
13
+ const env = {};
14
+
15
+ export const loadEnv = () => {
16
+ const parsedFields = envVarSchema.parse( process.env );
17
+ env.awsRegion = parsedFields.OUTPUT_AWS_REGION;
18
+ env.awsAccessKeyId = parsedFields.OUTPUT_AWS_ACCESS_KEY_ID;
19
+ env.awsSecretAccessKey = parsedFields.OUTPUT_AWS_SECRET_ACCESS_KEY;
20
+ env.remoteS3Bucket = parsedFields.OUTPUT_TRACE_REMOTE_S3_BUCKET;
21
+ env.redisUrl = parsedFields.OUTPUT_REDIS_URL;
22
+ env.redisIncompleteWorkflowsTTL = parsedFields.OUTPUT_REDIS_TRACE_TTL;
23
+ env.traceUploadDelayMs = parsedFields.OUTPUT_TRACE_UPLOAD_DELAY_MS;
24
+ };
25
+
26
+ export const getVars = () => {
27
+ if ( Object.keys( env ).length === 0 ) {
28
+ throw new Error( 'Env vars not loaded. Use loadEnv() first.' );
29
+ }
30
+ return env;
31
+ };
@@ -0,0 +1,64 @@
1
+ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
2
+
3
+ async function loadModule() {
4
+ vi.resetModules();
5
+ return import( './configs.js' );
6
+ }
7
+
8
+ describe( 'tracing/processors/s3/configs', () => {
9
+ const required = {
10
+ OUTPUT_AWS_REGION: 'us-east-1',
11
+ OUTPUT_AWS_ACCESS_KEY_ID: 'id',
12
+ OUTPUT_AWS_SECRET_ACCESS_KEY: 'sek',
13
+ OUTPUT_TRACE_REMOTE_S3_BUCKET: 'bkt',
14
+ OUTPUT_REDIS_URL: 'redis://localhost:6379'
15
+ };
16
+
17
+ beforeEach( () => {
18
+ vi.stubEnv( 'OUTPUT_AWS_REGION', required.OUTPUT_AWS_REGION );
19
+ vi.stubEnv( 'OUTPUT_AWS_ACCESS_KEY_ID', required.OUTPUT_AWS_ACCESS_KEY_ID );
20
+ vi.stubEnv( 'OUTPUT_AWS_SECRET_ACCESS_KEY', required.OUTPUT_AWS_SECRET_ACCESS_KEY );
21
+ vi.stubEnv( 'OUTPUT_TRACE_REMOTE_S3_BUCKET', required.OUTPUT_TRACE_REMOTE_S3_BUCKET );
22
+ vi.stubEnv( 'OUTPUT_REDIS_URL', required.OUTPUT_REDIS_URL );
23
+ } );
24
+
25
+ afterEach( () => {
26
+ vi.unstubAllEnvs();
27
+ } );
28
+
29
+ it( 'loadEnv() throws when required env vars are missing', async () => {
30
+ vi.stubEnv( 'OUTPUT_REDIS_URL', undefined );
31
+ const { loadEnv } = await loadModule();
32
+ expect( () => loadEnv() ).toThrow( /OUTPUT_REDIS_URL/ );
33
+ } );
34
+
35
+ it( 'loadEnv() populates getVars() with parsed env', async () => {
36
+ const { loadEnv, getVars } = await loadModule();
37
+ loadEnv();
38
+ const vars = getVars();
39
+ expect( vars.awsRegion ).toBe( required.OUTPUT_AWS_REGION );
40
+ expect( vars.awsAccessKeyId ).toBe( required.OUTPUT_AWS_ACCESS_KEY_ID );
41
+ expect( vars.awsSecretAccessKey ).toBe( required.OUTPUT_AWS_SECRET_ACCESS_KEY );
42
+ expect( vars.remoteS3Bucket ).toBe( required.OUTPUT_TRACE_REMOTE_S3_BUCKET );
43
+ expect( vars.redisUrl ).toBe( required.OUTPUT_REDIS_URL );
44
+ expect( vars.redisIncompleteWorkflowsTTL ).toBe( 60 * 60 * 24 * 7 );
45
+ } );
46
+
47
+ it( 'loadEnv() uses OUTPUT_REDIS_TRACE_TTL when set', async () => {
48
+ vi.stubEnv( 'OUTPUT_REDIS_TRACE_TTL', '3600' );
49
+ const { loadEnv, getVars } = await loadModule();
50
+ loadEnv();
51
+ expect( getVars().redisIncompleteWorkflowsTTL ).toBe( 3600 );
52
+ } );
53
+
54
+ it( 'getVars() throws when loadEnv() was not called', async () => {
55
+ const { getVars } = await loadModule();
56
+ expect( () => getVars() ).toThrow( 'Env vars not loaded. Use loadEnv() first.' );
57
+ } );
58
+
59
+ it( 'loadEnv() throws when OUTPUT_REDIS_TRACE_TTL is invalid', async () => {
60
+ vi.stubEnv( 'OUTPUT_REDIS_TRACE_TTL', 'not-a-number' );
61
+ const { loadEnv } = await loadModule();
62
+ expect( () => loadEnv() ).toThrow();
63
+ } );
64
+ } );