@outputai/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +11 -0
- package/bin/healthcheck.mjs +36 -0
- package/bin/healthcheck.spec.js +90 -0
- package/bin/worker.sh +26 -0
- package/package.json +67 -0
- package/src/activity_integration/context.d.ts +27 -0
- package/src/activity_integration/context.js +17 -0
- package/src/activity_integration/context.spec.js +42 -0
- package/src/activity_integration/events.d.ts +7 -0
- package/src/activity_integration/events.js +10 -0
- package/src/activity_integration/index.d.ts +9 -0
- package/src/activity_integration/index.js +3 -0
- package/src/activity_integration/tracing.d.ts +32 -0
- package/src/activity_integration/tracing.js +37 -0
- package/src/async_storage.js +19 -0
- package/src/bus.js +3 -0
- package/src/consts.js +32 -0
- package/src/errors.d.ts +15 -0
- package/src/errors.js +14 -0
- package/src/hooks/index.d.ts +28 -0
- package/src/hooks/index.js +32 -0
- package/src/index.d.ts +49 -0
- package/src/index.js +4 -0
- package/src/interface/evaluation_result.d.ts +173 -0
- package/src/interface/evaluation_result.js +215 -0
- package/src/interface/evaluator.d.ts +70 -0
- package/src/interface/evaluator.js +34 -0
- package/src/interface/evaluator.spec.js +565 -0
- package/src/interface/index.d.ts +9 -0
- package/src/interface/index.js +26 -0
- package/src/interface/step.d.ts +138 -0
- package/src/interface/step.js +22 -0
- package/src/interface/types.d.ts +27 -0
- package/src/interface/validations/runtime.js +20 -0
- package/src/interface/validations/runtime.spec.js +29 -0
- package/src/interface/validations/schema_utils.js +8 -0
- package/src/interface/validations/schema_utils.spec.js +67 -0
- package/src/interface/validations/static.js +136 -0
- package/src/interface/validations/static.spec.js +366 -0
- package/src/interface/webhook.d.ts +84 -0
- package/src/interface/webhook.js +64 -0
- package/src/interface/webhook.spec.js +122 -0
- package/src/interface/workflow.d.ts +273 -0
- package/src/interface/workflow.js +128 -0
- package/src/interface/workflow.spec.js +467 -0
- package/src/interface/workflow_context.js +31 -0
- package/src/interface/workflow_utils.d.ts +76 -0
- package/src/interface/workflow_utils.js +50 -0
- package/src/interface/workflow_utils.spec.js +190 -0
- package/src/interface/zod_integration.spec.js +646 -0
- package/src/internal_activities/index.js +66 -0
- package/src/internal_activities/index.spec.js +102 -0
- package/src/logger.js +73 -0
- package/src/tracing/internal_interface.js +71 -0
- package/src/tracing/processors/local/index.js +111 -0
- package/src/tracing/processors/local/index.spec.js +149 -0
- package/src/tracing/processors/s3/configs.js +31 -0
- package/src/tracing/processors/s3/configs.spec.js +64 -0
- package/src/tracing/processors/s3/index.js +114 -0
- package/src/tracing/processors/s3/index.spec.js +153 -0
- package/src/tracing/processors/s3/redis_client.js +62 -0
- package/src/tracing/processors/s3/redis_client.spec.js +185 -0
- package/src/tracing/processors/s3/s3_client.js +27 -0
- package/src/tracing/processors/s3/s3_client.spec.js +62 -0
- package/src/tracing/tools/build_trace_tree.js +83 -0
- package/src/tracing/tools/build_trace_tree.spec.js +135 -0
- package/src/tracing/tools/utils.js +21 -0
- package/src/tracing/tools/utils.spec.js +14 -0
- package/src/tracing/trace_engine.js +97 -0
- package/src/tracing/trace_engine.spec.js +199 -0
- package/src/utils/index.d.ts +134 -0
- package/src/utils/index.js +2 -0
- package/src/utils/resolve_invocation_dir.js +34 -0
- package/src/utils/resolve_invocation_dir.spec.js +102 -0
- package/src/utils/utils.js +211 -0
- package/src/utils/utils.spec.js +448 -0
- package/src/worker/bundler_options.js +43 -0
- package/src/worker/catalog_workflow/catalog.js +114 -0
- package/src/worker/catalog_workflow/index.js +54 -0
- package/src/worker/catalog_workflow/index.spec.js +196 -0
- package/src/worker/catalog_workflow/workflow.js +24 -0
- package/src/worker/configs.js +49 -0
- package/src/worker/configs.spec.js +130 -0
- package/src/worker/index.js +89 -0
- package/src/worker/index.spec.js +177 -0
- package/src/worker/interceptors/activity.js +62 -0
- package/src/worker/interceptors/activity.spec.js +212 -0
- package/src/worker/interceptors/workflow.js +70 -0
- package/src/worker/interceptors/workflow.spec.js +167 -0
- package/src/worker/interceptors.js +10 -0
- package/src/worker/loader.js +151 -0
- package/src/worker/loader.spec.js +236 -0
- package/src/worker/loader_tools.js +132 -0
- package/src/worker/loader_tools.spec.js +156 -0
- package/src/worker/log_hooks.js +95 -0
- package/src/worker/log_hooks.spec.js +217 -0
- package/src/worker/sandboxed_utils.js +18 -0
- package/src/worker/shutdown.js +26 -0
- package/src/worker/shutdown.spec.js +82 -0
- package/src/worker/sinks.js +74 -0
- package/src/worker/start_catalog.js +36 -0
- package/src/worker/start_catalog.spec.js +118 -0
- package/src/worker/webpack_loaders/consts.js +9 -0
- package/src/worker/webpack_loaders/tools.js +548 -0
- package/src/worker/webpack_loaders/tools.spec.js +330 -0
- package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.js +221 -0
- package/src/worker/webpack_loaders/workflow_rewriter/collect_target_imports.spec.js +336 -0
- package/src/worker/webpack_loaders/workflow_rewriter/index.mjs +61 -0
- package/src/worker/webpack_loaders/workflow_rewriter/index.spec.js +216 -0
- package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.js +196 -0
- package/src/worker/webpack_loaders/workflow_rewriter/rewrite_fn_bodies.spec.js +123 -0
- package/src/worker/webpack_loaders/workflow_validator/index.mjs +205 -0
- package/src/worker/webpack_loaders/workflow_validator/index.spec.js +613 -0
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import { upload } from './s3_client.js';
|
|
2
|
+
import { getRedisClient } from './redis_client.js';
|
|
3
|
+
import buildTraceTree from '../../tools/build_trace_tree.js';
|
|
4
|
+
import { EOL } from 'node:os';
|
|
5
|
+
import { loadEnv, getVars } from './configs.js';
|
|
6
|
+
import { createChildLogger } from '#logger';
|
|
7
|
+
|
|
8
|
+
const log = createChildLogger( 'S3 Processor' );
|
|
9
|
+
|
|
10
|
+
const createRedisKey = ( { workflowId, workflowName } ) => `traces/${workflowName}/${workflowId}`;
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Add new entry to list of entries
|
|
14
|
+
* @param {object} entry
|
|
15
|
+
* @param {string} key
|
|
16
|
+
*/
|
|
17
|
+
const addEntry = async ( entry, key ) => {
|
|
18
|
+
const client = await getRedisClient();
|
|
19
|
+
await client.multi()
|
|
20
|
+
.zAdd( key, [ { score: entry.timestamp, value: JSON.stringify( entry ) } ], { NX: true } )
|
|
21
|
+
.expire( key, getVars().redisIncompleteWorkflowsTTL )
|
|
22
|
+
.exec();
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Returns entries from cache, parsed to object
|
|
27
|
+
* @param {string} key
|
|
28
|
+
* @returns {object[]}
|
|
29
|
+
*/
|
|
30
|
+
const getEntries = async key => {
|
|
31
|
+
const client = await getRedisClient();
|
|
32
|
+
const zList = await client.zRange( key, 0, -1 );
|
|
33
|
+
return zList.map( v => JSON.parse( v ) );
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Removes the entries from cache
|
|
38
|
+
* @param {string} key
|
|
39
|
+
*/
|
|
40
|
+
const bustEntries = async key => {
|
|
41
|
+
const client = await getRedisClient();
|
|
42
|
+
await client.del( key );
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Return the S3 key for the trace file
|
|
47
|
+
* @param {object} args
|
|
48
|
+
* @param {number} args.startTime
|
|
49
|
+
* @param {string} args.workflowId
|
|
50
|
+
* @param {string} args.workflowName
|
|
51
|
+
* @returns
|
|
52
|
+
*/
|
|
53
|
+
const getS3Key = ( { startTime, workflowId, workflowName } ) => {
|
|
54
|
+
const isoDate = new Date( startTime ).toISOString();
|
|
55
|
+
const [ year, month, day ] = isoDate.split( /\D/, 3 );
|
|
56
|
+
const timeStamp = isoDate.replace( /[:T.]/g, '-' );
|
|
57
|
+
return `${workflowName}/${year}/${month}/${day}/${timeStamp}_${workflowId}.json`;
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Init this processor
|
|
62
|
+
*/
|
|
63
|
+
export const init = async () => {
|
|
64
|
+
loadEnv();
|
|
65
|
+
await getRedisClient();
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Execute this processor: send a complete trace tree file to S3 when the workflow finishes
|
|
70
|
+
*
|
|
71
|
+
* @param {object} args
|
|
72
|
+
* @param {object} entry - Trace event phase
|
|
73
|
+
* @param {object} executionContext - Execution info: workflowId, workflowName, startTime
|
|
74
|
+
*/
|
|
75
|
+
export const exec = async ( { entry, executionContext } ) => {
|
|
76
|
+
const { workflowName, workflowId, startTime } = executionContext;
|
|
77
|
+
const cacheKey = createRedisKey( { workflowId, workflowName } );
|
|
78
|
+
|
|
79
|
+
await addEntry( entry, cacheKey );
|
|
80
|
+
|
|
81
|
+
const isRootWorkflowEnd = entry.id === workflowId && entry.phase !== 'start';
|
|
82
|
+
if ( !isRootWorkflowEnd ) {
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Wait for straggler entries from other workers to land in Redis before uploading
|
|
87
|
+
const delayMs = getVars().traceUploadDelayMs;
|
|
88
|
+
if ( delayMs > 0 ) {
|
|
89
|
+
await new Promise( resolve => setTimeout( resolve, delayMs ) );
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const content = buildTraceTree( await getEntries( cacheKey ) );
|
|
93
|
+
// if the trace tree is incomplete it will return null, in this case we can safely discard
|
|
94
|
+
if ( !content ) {
|
|
95
|
+
log.warn( 'Incomplete trace file discarded', { workflowId, error: 'incomplete_trace_file' } );
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
await upload( {
|
|
99
|
+
key: getS3Key( { workflowId, workflowName, startTime } ),
|
|
100
|
+
content: JSON.stringify( content, undefined, 2 ) + EOL
|
|
101
|
+
} );
|
|
102
|
+
await bustEntries( cacheKey );
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Returns where the trace is saved
|
|
107
|
+
* @param {object} executionContext
|
|
108
|
+
* @param {string} executionContext.startTime - The start time of the workflow
|
|
109
|
+
* @param {string} executionContext.workflowId - The id of the workflow execution
|
|
110
|
+
* @param {string} executionContext.workflowName - The name of the workflow
|
|
111
|
+
* @returns {string} The S3 url of the trace file
|
|
112
|
+
*/
|
|
113
|
+
export const getDestination = ( { startTime, workflowId, workflowName } ) =>
|
|
114
|
+
`https://${getVars().remoteS3Bucket}.s3.amazonaws.com/${getS3Key( { workflowId, workflowName, startTime } )}`;
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
const loadEnvMock = vi.fn();
|
|
4
|
+
const getVarsMock = vi.fn( () => ( {
|
|
5
|
+
remoteS3Bucket: 'bkt',
|
|
6
|
+
redisIncompleteWorkflowsTTL: 3600
|
|
7
|
+
} ) );
|
|
8
|
+
vi.mock( './configs.js', () => ( { loadEnv: loadEnvMock, getVars: getVarsMock } ) );
|
|
9
|
+
|
|
10
|
+
const redisMulti = {
|
|
11
|
+
zAdd: vi.fn().mockReturnThis(),
|
|
12
|
+
expire: vi.fn().mockReturnThis(),
|
|
13
|
+
exec: vi.fn()
|
|
14
|
+
};
|
|
15
|
+
const zRangeMock = vi.fn();
|
|
16
|
+
const delMock = vi.fn().mockResolvedValue( undefined );
|
|
17
|
+
const getRedisClientMock = vi.fn( async () => ( {
|
|
18
|
+
multi: () => redisMulti,
|
|
19
|
+
zRange: zRangeMock,
|
|
20
|
+
del: delMock
|
|
21
|
+
} ) );
|
|
22
|
+
vi.mock( './redis_client.js', () => ( { getRedisClient: getRedisClientMock } ) );
|
|
23
|
+
|
|
24
|
+
const uploadMock = vi.fn();
|
|
25
|
+
vi.mock( './s3_client.js', () => ( { upload: uploadMock } ) );
|
|
26
|
+
|
|
27
|
+
const buildTraceTreeMock = vi.fn( entries => ( { count: entries.length } ) );
|
|
28
|
+
vi.mock( '../../tools/build_trace_tree.js', () => ( { default: buildTraceTreeMock } ) );
|
|
29
|
+
|
|
30
|
+
describe( 'tracing/processors/s3', () => {
|
|
31
|
+
beforeEach( () => {
|
|
32
|
+
vi.useFakeTimers();
|
|
33
|
+
vi.clearAllMocks();
|
|
34
|
+
getVarsMock.mockReturnValue( { remoteS3Bucket: 'bkt', redisIncompleteWorkflowsTTL: 3600, traceUploadDelayMs: 10_000 } );
|
|
35
|
+
} );
|
|
36
|
+
|
|
37
|
+
afterEach( () => {
|
|
38
|
+
vi.useRealTimers();
|
|
39
|
+
} );
|
|
40
|
+
|
|
41
|
+
it( 'init(): loads config and ensures redis client is created', async () => {
|
|
42
|
+
const { init } = await import( './index.js' );
|
|
43
|
+
await init();
|
|
44
|
+
expect( loadEnvMock ).toHaveBeenCalledTimes( 1 );
|
|
45
|
+
expect( getRedisClientMock ).toHaveBeenCalledTimes( 1 );
|
|
46
|
+
} );
|
|
47
|
+
|
|
48
|
+
it( 'exec(): accumulates via redis, uploads only on root workflow end', async () => {
|
|
49
|
+
const { exec } = await import( './index.js' );
|
|
50
|
+
const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
|
|
51
|
+
const ctx = { executionContext: { workflowId: 'id1', workflowName: 'WF', startTime } };
|
|
52
|
+
|
|
53
|
+
redisMulti.exec.mockResolvedValue( [] );
|
|
54
|
+
|
|
55
|
+
const workflowStart = { id: 'id1', name: 'WF', kind: 'workflow', phase: 'start', details: {}, timestamp: startTime };
|
|
56
|
+
const activityStart = { id: 'act-1', name: 'DoSomething', kind: 'step', parentId: 'id1', phase: 'start', details: {}, timestamp: startTime + 1 };
|
|
57
|
+
const workflowEnd = { id: 'id1', phase: 'end', details: { ok: true }, timestamp: startTime + 2 };
|
|
58
|
+
zRangeMock.mockResolvedValue( [
|
|
59
|
+
JSON.stringify( workflowStart ),
|
|
60
|
+
JSON.stringify( activityStart ),
|
|
61
|
+
JSON.stringify( workflowEnd )
|
|
62
|
+
] );
|
|
63
|
+
|
|
64
|
+
await exec( { ...ctx, entry: workflowStart } );
|
|
65
|
+
await exec( { ...ctx, entry: activityStart } );
|
|
66
|
+
// Root end: id matches workflowId and not start — triggers the 10s delay before upload
|
|
67
|
+
const endPromise = exec( { ...ctx, entry: workflowEnd } );
|
|
68
|
+
await vi.advanceTimersByTimeAsync( 10_000 );
|
|
69
|
+
await endPromise;
|
|
70
|
+
|
|
71
|
+
expect( redisMulti.zAdd ).toHaveBeenCalledTimes( 3 );
|
|
72
|
+
expect( buildTraceTreeMock ).toHaveBeenCalledTimes( 1 );
|
|
73
|
+
expect( zRangeMock ).toHaveBeenCalledTimes( 1 );
|
|
74
|
+
expect( uploadMock ).toHaveBeenCalledTimes( 1 );
|
|
75
|
+
const { key, content } = uploadMock.mock.calls[0][0];
|
|
76
|
+
expect( key ).toMatch( /^WF\/2020\/01\/02\// );
|
|
77
|
+
expect( JSON.parse( content.trim() ).count ).toBe( 3 );
|
|
78
|
+
expect( delMock ).toHaveBeenCalledTimes( 1 );
|
|
79
|
+
expect( delMock ).toHaveBeenCalledWith( 'traces/WF/id1' );
|
|
80
|
+
} );
|
|
81
|
+
|
|
82
|
+
it( 'getDestination(): returns S3 URL using bucket and key from getVars', async () => {
|
|
83
|
+
getVarsMock.mockReturnValue( { remoteS3Bucket: 'my-bucket', redisIncompleteWorkflowsTTL: 3600, traceUploadDelayMs: 10_000 } );
|
|
84
|
+
const { getDestination } = await import( './index.js' );
|
|
85
|
+
const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
|
|
86
|
+
const url = getDestination( { workflowId: 'id1', workflowName: 'WF', startTime } );
|
|
87
|
+
expect( getVarsMock ).toHaveBeenCalled();
|
|
88
|
+
expect( url ).toBe(
|
|
89
|
+
'https://my-bucket.s3.amazonaws.com/WF/2020/01/02/2020-01-02-03-04-05-678Z_id1.json'
|
|
90
|
+
);
|
|
91
|
+
} );
|
|
92
|
+
|
|
93
|
+
it( 'exec(): sets expiry on the redis key for each entry', async () => {
|
|
94
|
+
const { exec } = await import( './index.js' );
|
|
95
|
+
const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
|
|
96
|
+
const ctx = { executionContext: { workflowId: 'id1', workflowName: 'WF', startTime } };
|
|
97
|
+
|
|
98
|
+
redisMulti.exec.mockResolvedValue( [] );
|
|
99
|
+
const workflowStart = {
|
|
100
|
+
kind: 'workflow', id: 'id1', name: 'WF', parentId: undefined, phase: 'start', details: {}, timestamp: startTime
|
|
101
|
+
};
|
|
102
|
+
zRangeMock.mockResolvedValue( [ JSON.stringify( workflowStart ) ] );
|
|
103
|
+
|
|
104
|
+
await exec( { ...ctx, entry: workflowStart } );
|
|
105
|
+
|
|
106
|
+
expect( redisMulti.expire ).toHaveBeenCalledTimes( 1 );
|
|
107
|
+
expect( redisMulti.expire ).toHaveBeenCalledWith( 'traces/WF/id1', 3600 );
|
|
108
|
+
} );
|
|
109
|
+
|
|
110
|
+
it( 'exec(): does not treat a non-root end (e.g. step without parentId) as root workflow end — regression for wrong root detection', async () => {
|
|
111
|
+
const { exec } = await import( './index.js' );
|
|
112
|
+
const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
|
|
113
|
+
const ctx = { executionContext: { workflowId: 'id1', workflowName: 'WF', startTime } };
|
|
114
|
+
|
|
115
|
+
redisMulti.exec.mockResolvedValue( [] );
|
|
116
|
+
const workflowStart = { id: 'id1', name: 'WF', kind: 'workflow', phase: 'start', details: {}, timestamp: startTime };
|
|
117
|
+
const stepEndNoParent = { id: 'step-1', phase: 'end', details: { done: true }, timestamp: startTime + 1 };
|
|
118
|
+
zRangeMock.mockResolvedValue( [
|
|
119
|
+
JSON.stringify( workflowStart ),
|
|
120
|
+
JSON.stringify( stepEndNoParent )
|
|
121
|
+
] );
|
|
122
|
+
|
|
123
|
+
await exec( { ...ctx, entry: workflowStart } );
|
|
124
|
+
await exec( { ...ctx, entry: stepEndNoParent } );
|
|
125
|
+
|
|
126
|
+
expect( redisMulti.zAdd ).toHaveBeenCalledTimes( 2 );
|
|
127
|
+
expect( buildTraceTreeMock ).not.toHaveBeenCalled();
|
|
128
|
+
expect( uploadMock ).not.toHaveBeenCalled();
|
|
129
|
+
expect( delMock ).not.toHaveBeenCalled();
|
|
130
|
+
} );
|
|
131
|
+
|
|
132
|
+
it( 'exec(): when buildTraceTree returns null (incomplete tree), does not upload or bust cache', async () => {
|
|
133
|
+
const { exec } = await import( './index.js' );
|
|
134
|
+
const startTime = Date.parse( '2020-01-02T03:04:05.678Z' );
|
|
135
|
+
const ctx = { executionContext: { workflowId: 'id1', workflowName: 'WF', startTime } };
|
|
136
|
+
|
|
137
|
+
redisMulti.exec.mockResolvedValue( [] );
|
|
138
|
+
const workflowEnd = {
|
|
139
|
+
kind: 'workflow', id: 'id1', name: 'WF', parentId: undefined, phase: 'end', details: {}, timestamp: startTime
|
|
140
|
+
};
|
|
141
|
+
zRangeMock.mockResolvedValue( [ JSON.stringify( workflowEnd ) ] );
|
|
142
|
+
buildTraceTreeMock.mockReturnValueOnce( null );
|
|
143
|
+
|
|
144
|
+
const endPromise = exec( { ...ctx, entry: workflowEnd } );
|
|
145
|
+
await vi.advanceTimersByTimeAsync( 10_000 );
|
|
146
|
+
await endPromise;
|
|
147
|
+
|
|
148
|
+
expect( buildTraceTreeMock ).toHaveBeenCalledTimes( 1 );
|
|
149
|
+
expect( uploadMock ).not.toHaveBeenCalled();
|
|
150
|
+
expect( delMock ).not.toHaveBeenCalled();
|
|
151
|
+
} );
|
|
152
|
+
} );
|
|
153
|
+
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { createClient } from 'redis';
|
|
2
|
+
import { createChildLogger } from '#logger';
|
|
3
|
+
import { getVars } from './configs.js';
|
|
4
|
+
|
|
5
|
+
const log = createChildLogger( 'RedisClient' );
|
|
6
|
+
|
|
7
|
+
const state = {
|
|
8
|
+
client: null,
|
|
9
|
+
connectPromise: null
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
async function connect( url ) {
|
|
13
|
+
if ( state.client ) {
|
|
14
|
+
log.warn( 'Closing stale Redis client before reconnecting' );
|
|
15
|
+
await state.client.quit().catch( quitErr => {
|
|
16
|
+
log.warn( 'Failed to quit stale Redis client', { error: quitErr.message } );
|
|
17
|
+
} );
|
|
18
|
+
state.client = null;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const client = createClient( { url, socket: { keepAlive: 15000 } } );
|
|
22
|
+
try {
|
|
23
|
+
await client.connect();
|
|
24
|
+
return state.client = client;
|
|
25
|
+
} catch ( err ) {
|
|
26
|
+
await client.quit().catch( () => {} );
|
|
27
|
+
throw new Error( `Failed to connect to Redis: ${err.message} (${err.code || 'UNKNOWN'})`, { cause: err } );
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Return a connected Redis instance with automatic reconnection.
|
|
33
|
+
*
|
|
34
|
+
* Performs health check on cached client via ping(). If healthy, returns cached
|
|
35
|
+
* instance. Otherwise, closes stale client before creating new connection.
|
|
36
|
+
* Concurrent calls during connection will receive the same pending promise.
|
|
37
|
+
*
|
|
38
|
+
* @returns {Promise<redis.RedisClientType>} Connected Redis client
|
|
39
|
+
* @throws {Error} If connection fails (wrapped with context)
|
|
40
|
+
*/
|
|
41
|
+
export async function getRedisClient() {
|
|
42
|
+
const url = getVars().redisUrl;
|
|
43
|
+
|
|
44
|
+
const pingResult = await state.client?.ping().catch( err => {
|
|
45
|
+
log.error( 'Redis ping failed', { error: err.message, code: err.code } );
|
|
46
|
+
return null;
|
|
47
|
+
} );
|
|
48
|
+
|
|
49
|
+
if ( pingResult === 'PONG' ) {
|
|
50
|
+
return state.client;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if ( state.connectPromise ) {
|
|
54
|
+
return state.connectPromise;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
state.connectPromise = connect( url ).finally( () => {
|
|
58
|
+
state.connectPromise = null;
|
|
59
|
+
} );
|
|
60
|
+
|
|
61
|
+
return state.connectPromise;
|
|
62
|
+
}
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
vi.mock( '#utils', () => ( {
|
|
4
|
+
throws: e => {
|
|
5
|
+
throw e;
|
|
6
|
+
}
|
|
7
|
+
} ) );
|
|
8
|
+
|
|
9
|
+
const logCalls = { warn: [], error: [] };
|
|
10
|
+
vi.mock( '#logger', () => ( {
|
|
11
|
+
createChildLogger: () => ( {
|
|
12
|
+
warn: ( ...args ) => logCalls.warn.push( args ),
|
|
13
|
+
error: ( ...args ) => logCalls.error.push( args )
|
|
14
|
+
} )
|
|
15
|
+
} ) );
|
|
16
|
+
|
|
17
|
+
const getVarsMock = vi.fn();
|
|
18
|
+
vi.mock( './configs.js', () => ( { getVars: () => getVarsMock() } ) );
|
|
19
|
+
|
|
20
|
+
const createClientImpl = vi.fn();
|
|
21
|
+
vi.mock( 'redis', () => ( { createClient: opts => createClientImpl( opts ) } ) );
|
|
22
|
+
|
|
23
|
+
async function loadModule() {
|
|
24
|
+
vi.resetModules();
|
|
25
|
+
return import( './redis_client.js' );
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe( 'tracing/processors/s3/redis_client', () => {
|
|
29
|
+
beforeEach( () => {
|
|
30
|
+
vi.clearAllMocks();
|
|
31
|
+
getVarsMock.mockReturnValue( {} );
|
|
32
|
+
logCalls.warn = [];
|
|
33
|
+
logCalls.error = [];
|
|
34
|
+
} );
|
|
35
|
+
|
|
36
|
+
afterEach( () => {
|
|
37
|
+
vi.useRealTimers();
|
|
38
|
+
} );
|
|
39
|
+
|
|
40
|
+
it( 'throws when config redisUrl is missing', async () => {
|
|
41
|
+
getVarsMock.mockReturnValue( {} );
|
|
42
|
+
const { getRedisClient } = await loadModule();
|
|
43
|
+
await expect( getRedisClient() ).rejects.toThrow();
|
|
44
|
+
} );
|
|
45
|
+
|
|
46
|
+
it( 'creates client with url, connects once, then reuses cached when ping is PONG', async () => {
|
|
47
|
+
getVarsMock.mockReturnValue( { redisUrl: 'redis://localhost:6379' } );
|
|
48
|
+
|
|
49
|
+
const pingMock = vi.fn().mockResolvedValue( 'PONG' );
|
|
50
|
+
const connectMock = vi.fn().mockResolvedValue();
|
|
51
|
+
const created = [];
|
|
52
|
+
createClientImpl.mockImplementation( opts => {
|
|
53
|
+
created.push( opts );
|
|
54
|
+
return { connect: connectMock, ping: pingMock };
|
|
55
|
+
} );
|
|
56
|
+
|
|
57
|
+
const { getRedisClient } = await loadModule();
|
|
58
|
+
|
|
59
|
+
const c1 = await getRedisClient();
|
|
60
|
+
const c2 = await getRedisClient();
|
|
61
|
+
|
|
62
|
+
expect( created ).toHaveLength( 1 );
|
|
63
|
+
expect( connectMock ).toHaveBeenCalledTimes( 1 );
|
|
64
|
+
expect( pingMock ).toHaveBeenCalledTimes( 1 );
|
|
65
|
+
expect( c1 ).toBe( c2 );
|
|
66
|
+
expect( created[0] ).toMatchObject( { url: 'redis://localhost:6379', socket: { keepAlive: 15000 } } );
|
|
67
|
+
} );
|
|
68
|
+
|
|
69
|
+
it( 'closes stale client and reconnects when ping fails', async () => {
|
|
70
|
+
getVarsMock.mockReturnValue( { redisUrl: 'redis://localhost:6379' } );
|
|
71
|
+
|
|
72
|
+
const quitMock = vi.fn().mockResolvedValue();
|
|
73
|
+
const connectMock = vi.fn().mockResolvedValue();
|
|
74
|
+
const pingMock = vi.fn()
|
|
75
|
+
.mockResolvedValueOnce( 'PONG' )
|
|
76
|
+
.mockRejectedValueOnce( new Error( 'Connection lost' ) )
|
|
77
|
+
.mockResolvedValueOnce( 'PONG' );
|
|
78
|
+
|
|
79
|
+
const created = [];
|
|
80
|
+
createClientImpl.mockImplementation( opts => {
|
|
81
|
+
created.push( opts );
|
|
82
|
+
return { connect: connectMock, ping: pingMock, quit: quitMock };
|
|
83
|
+
} );
|
|
84
|
+
|
|
85
|
+
const { getRedisClient } = await loadModule();
|
|
86
|
+
|
|
87
|
+
const c1 = await getRedisClient();
|
|
88
|
+
const c2 = await getRedisClient();
|
|
89
|
+
expect( c1 ).toBe( c2 );
|
|
90
|
+
expect( created ).toHaveLength( 1 );
|
|
91
|
+
|
|
92
|
+
const c3 = await getRedisClient();
|
|
93
|
+
expect( quitMock ).toHaveBeenCalledTimes( 1 );
|
|
94
|
+
expect( created ).toHaveLength( 2 );
|
|
95
|
+
expect( c3 ).not.toBe( c1 );
|
|
96
|
+
} );
|
|
97
|
+
|
|
98
|
+
it( 'reconnects successfully even when quit() on stale client rejects', async () => {
|
|
99
|
+
getVarsMock.mockReturnValue( { redisUrl: 'redis://localhost:6379' } );
|
|
100
|
+
|
|
101
|
+
const quitMock = vi.fn().mockRejectedValue( new Error( 'Quit failed' ) );
|
|
102
|
+
const connectMock = vi.fn().mockResolvedValue();
|
|
103
|
+
const pingMock = vi.fn()
|
|
104
|
+
.mockResolvedValueOnce( 'PONG' )
|
|
105
|
+
.mockRejectedValueOnce( new Error( 'Connection lost' ) )
|
|
106
|
+
.mockResolvedValueOnce( 'PONG' );
|
|
107
|
+
|
|
108
|
+
const created = [];
|
|
109
|
+
createClientImpl.mockImplementation( opts => {
|
|
110
|
+
created.push( opts );
|
|
111
|
+
return { connect: connectMock, ping: pingMock, quit: quitMock };
|
|
112
|
+
} );
|
|
113
|
+
|
|
114
|
+
const { getRedisClient } = await loadModule();
|
|
115
|
+
|
|
116
|
+
const c1 = await getRedisClient();
|
|
117
|
+
const c1again = await getRedisClient();
|
|
118
|
+
expect( c1 ).toBe( c1again );
|
|
119
|
+
expect( created ).toHaveLength( 1 );
|
|
120
|
+
|
|
121
|
+
const c2 = await getRedisClient();
|
|
122
|
+
expect( quitMock ).toHaveBeenCalledTimes( 1 );
|
|
123
|
+
expect( created ).toHaveLength( 2 );
|
|
124
|
+
expect( c2 ).not.toBe( c1 );
|
|
125
|
+
} );
|
|
126
|
+
|
|
127
|
+
it( 'wraps connect() errors with code and cleans up failed client', async () => {
|
|
128
|
+
getVarsMock.mockReturnValue( { redisUrl: 'redis://localhost:6379' } );
|
|
129
|
+
|
|
130
|
+
const connectErr = new Error( 'Connection refused' );
|
|
131
|
+
connectErr.code = 'ECONNREFUSED';
|
|
132
|
+
const connectMock = vi.fn().mockRejectedValue( connectErr );
|
|
133
|
+
const quitMock = vi.fn().mockResolvedValue();
|
|
134
|
+
|
|
135
|
+
createClientImpl.mockImplementation( () => ( {
|
|
136
|
+
connect: connectMock,
|
|
137
|
+
quit: quitMock
|
|
138
|
+
} ) );
|
|
139
|
+
|
|
140
|
+
const { getRedisClient } = await loadModule();
|
|
141
|
+
|
|
142
|
+
try {
|
|
143
|
+
await getRedisClient();
|
|
144
|
+
expect.fail( 'Should have thrown' );
|
|
145
|
+
} catch ( err ) {
|
|
146
|
+
expect( err.message ).toBe( 'Failed to connect to Redis: Connection refused (ECONNREFUSED)' );
|
|
147
|
+
expect( err.cause ).toBe( connectErr );
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
expect( quitMock ).toHaveBeenCalledTimes( 1 );
|
|
151
|
+
} );
|
|
152
|
+
|
|
153
|
+
it( 'logs ping failures with error level', async () => {
|
|
154
|
+
getVarsMock.mockReturnValue( { redisUrl: 'redis://localhost:6379' } );
|
|
155
|
+
|
|
156
|
+
const pingErr = new Error( 'Connection reset' );
|
|
157
|
+
pingErr.code = 'ECONNRESET';
|
|
158
|
+
const pingMock = vi.fn()
|
|
159
|
+
.mockResolvedValueOnce( 'PONG' )
|
|
160
|
+
.mockRejectedValueOnce( pingErr )
|
|
161
|
+
.mockResolvedValueOnce( 'PONG' );
|
|
162
|
+
const connectMock = vi.fn().mockResolvedValue();
|
|
163
|
+
const quitMock = vi.fn().mockResolvedValue();
|
|
164
|
+
|
|
165
|
+
createClientImpl.mockImplementation( () => ( {
|
|
166
|
+
connect: connectMock,
|
|
167
|
+
ping: pingMock,
|
|
168
|
+
quit: quitMock
|
|
169
|
+
} ) );
|
|
170
|
+
|
|
171
|
+
const { getRedisClient } = await loadModule();
|
|
172
|
+
|
|
173
|
+
// First call: state.client is null, creates client (no ping)
|
|
174
|
+
await getRedisClient();
|
|
175
|
+
// Second call: pings existing client, returns PONG
|
|
176
|
+
await getRedisClient();
|
|
177
|
+
// Third call: pings existing client, fails with pingErr, reconnects
|
|
178
|
+
await getRedisClient();
|
|
179
|
+
|
|
180
|
+
expect( logCalls.error ).toContainEqual( [
|
|
181
|
+
'Redis ping failed',
|
|
182
|
+
{ error: 'Connection reset', code: 'ECONNRESET' }
|
|
183
|
+
] );
|
|
184
|
+
} );
|
|
185
|
+
} );
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
|
|
2
|
+
import { getVars } from './configs.js';
|
|
3
|
+
|
|
4
|
+
const state = { s3Client: null };
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Return a S3 Client instance
|
|
8
|
+
* @returns {S3Client}
|
|
9
|
+
*/
|
|
10
|
+
const getS3Client = () => {
|
|
11
|
+
if ( state.s3Client ) {
|
|
12
|
+
return state.s3Client;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const { awsRegion: region, awsSecretAccessKey: secretAccessKey, awsAccessKeyId: accessKeyId } = getVars();
|
|
16
|
+
|
|
17
|
+
return state.s3Client = new S3Client( { region, credentials: { accessKeyId, secretAccessKey } } );
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Upload given file to S3
|
|
22
|
+
* @param {object} args
|
|
23
|
+
* @param {string} key - S3 file key
|
|
24
|
+
* @param {string} content - File content
|
|
25
|
+
*/
|
|
26
|
+
export const upload = ( { key, content } ) =>
|
|
27
|
+
getS3Client().send( new PutObjectCommand( { Bucket: getVars().remoteS3Bucket, Key: key, Body: content } ) );
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
vi.mock( '#utils', () => ( {
|
|
4
|
+
throws: e => {
|
|
5
|
+
throw e;
|
|
6
|
+
}
|
|
7
|
+
} ) );
|
|
8
|
+
|
|
9
|
+
const getVarsMock = vi.fn();
|
|
10
|
+
vi.mock( './configs', () => ( { getVars: () => getVarsMock() } ) );
|
|
11
|
+
|
|
12
|
+
const sendMock = vi.fn();
|
|
13
|
+
const ctorState = { args: null };
|
|
14
|
+
class S3ClientMock {
|
|
15
|
+
constructor( args ) {
|
|
16
|
+
ctorState.args = args;
|
|
17
|
+
} send = sendMock;
|
|
18
|
+
}
|
|
19
|
+
class PutObjectCommandMock {
|
|
20
|
+
constructor( input ) {
|
|
21
|
+
this.input = input;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
vi.mock( '@aws-sdk/client-s3', () => ( {
|
|
26
|
+
S3Client: S3ClientMock,
|
|
27
|
+
PutObjectCommand: PutObjectCommandMock
|
|
28
|
+
} ) );
|
|
29
|
+
|
|
30
|
+
async function loadModule() {
|
|
31
|
+
vi.resetModules();
|
|
32
|
+
return import( './s3_client.js' );
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
describe( 'tracing/processors/s3/s3_client', () => {
|
|
36
|
+
beforeEach( () => {
|
|
37
|
+
vi.clearAllMocks();
|
|
38
|
+
getVarsMock.mockReturnValue( {
|
|
39
|
+
awsRegion: 'us-east-1',
|
|
40
|
+
awsAccessKeyId: 'id',
|
|
41
|
+
awsSecretAccessKey: 'sek',
|
|
42
|
+
remoteS3Bucket: 'bucket'
|
|
43
|
+
} );
|
|
44
|
+
} );
|
|
45
|
+
|
|
46
|
+
it( 'creates client once with config and uploads with bucket/key/content', async () => {
|
|
47
|
+
const { upload } = await loadModule();
|
|
48
|
+
|
|
49
|
+
await upload( { key: 'wf/key.json', content: '{"a":1}' } );
|
|
50
|
+
|
|
51
|
+
expect( ctorState.args ).toEqual( { region: 'us-east-1', credentials: { secretAccessKey: 'sek', accessKeyId: 'id' } } );
|
|
52
|
+
expect( sendMock ).toHaveBeenCalledTimes( 1 );
|
|
53
|
+
const cmd = sendMock.mock.calls[0][0];
|
|
54
|
+
expect( cmd ).toBeInstanceOf( PutObjectCommandMock );
|
|
55
|
+
expect( cmd.input ).toEqual( { Bucket: 'bucket', Key: 'wf/key.json', Body: '{"a":1}' } );
|
|
56
|
+
|
|
57
|
+
// subsequent upload uses cached client
|
|
58
|
+
await upload( { key: 'wf/key2.json', content: '{}' } );
|
|
59
|
+
expect( sendMock ).toHaveBeenCalledTimes( 2 );
|
|
60
|
+
} );
|
|
61
|
+
} );
|
|
62
|
+
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @typedef {object} NodeEntry
|
|
3
|
+
* @property {string} id
|
|
4
|
+
* @property {string} kind
|
|
5
|
+
* @property {string} name
|
|
6
|
+
* @property {number} startedAt
|
|
7
|
+
* @property {number} endedAt
|
|
8
|
+
* @property {object} [input]
|
|
9
|
+
* @property {object} [output]
|
|
10
|
+
* @property {object} [error]
|
|
11
|
+
* @property {NodeTree[]} children
|
|
12
|
+
*/
|
|
13
|
+
/**
|
|
14
|
+
* Create a node entry for the tree.
|
|
15
|
+
*
|
|
16
|
+
* Properties are sorted the way they should be in the final file, as this makes it easier to read.
|
|
17
|
+
*
|
|
18
|
+
* @param {string} id - Node id
|
|
19
|
+
* @returns {NodeEntry} The entry without any values
|
|
20
|
+
*/
|
|
21
|
+
const createEntry = id => ( {
|
|
22
|
+
id,
|
|
23
|
+
kind: '',
|
|
24
|
+
name: '',
|
|
25
|
+
startedAt: 0,
|
|
26
|
+
endedAt: null,
|
|
27
|
+
input: undefined,
|
|
28
|
+
output: undefined,
|
|
29
|
+
error: undefined,
|
|
30
|
+
children: []
|
|
31
|
+
} );
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Build a tree of nodes from a list of entries
|
|
35
|
+
*
|
|
36
|
+
* Each node will have: id, name, kind, children, input, output or error, startedAt, endedAt.
|
|
37
|
+
*
|
|
38
|
+
* Entries with same id will be combined according to their phase (start, end OR error).
|
|
39
|
+
* - The details of the start phase becomes input, timestamp becomes startedAt;
|
|
40
|
+
* - The details of the end phase become output, timestamp becomes endedAt;
|
|
41
|
+
* - The details of the error phase become error, timestamp becomes endedAt;
|
|
42
|
+
* - Only start phase's kind and name are used;
|
|
43
|
+
*
|
|
44
|
+
*
|
|
45
|
+
* Children are added according to the parentId of each entry.
|
|
46
|
+
* The result tree has a single root: the only node without parentId, normally the workflow itself.
|
|
47
|
+
*
|
|
48
|
+
* @param {object[]} entries - The list of entries
|
|
49
|
+
* @returns {object}
|
|
50
|
+
*/
|
|
51
|
+
export default entries => {
|
|
52
|
+
const nodes = new Map();
|
|
53
|
+
const ensureNode = id => nodes.get( id ) ?? nodes.set( id, createEntry( id ) ).get( id );
|
|
54
|
+
|
|
55
|
+
for ( const entry of entries ) {
|
|
56
|
+
const { kind, id, name, parentId, details, phase, timestamp } = entry;
|
|
57
|
+
const node = ensureNode( id );
|
|
58
|
+
|
|
59
|
+
if ( phase === 'start' ) {
|
|
60
|
+
Object.assign( node, { input: details, startedAt: timestamp, kind, name } );
|
|
61
|
+
} else if ( phase === 'end' ) {
|
|
62
|
+
Object.assign( node, { output: details, endedAt: timestamp } );
|
|
63
|
+
} else if ( phase === 'error' ) {
|
|
64
|
+
Object.assign( node, { error: details, endedAt: timestamp } );
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if ( parentId && phase === 'start' ) {
|
|
68
|
+
const parent = ensureNode( parentId );
|
|
69
|
+
parent.children.push( node );
|
|
70
|
+
parent.children.sort( ( a, b ) => a.startedAt - b.startedAt );
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const rootNode = nodes.get( entries.find( e => !e.parentId )?.id );
|
|
75
|
+
if ( !rootNode ) {
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
if ( !rootNode.endedAt ) {
|
|
79
|
+
rootNode.output = '<<Workflow did not finish yet. If this workflows is supposed to have been completed already, \
|
|
80
|
+
this can indicate it timed out or was interrupted.>>';
|
|
81
|
+
}
|
|
82
|
+
return rootNode;
|
|
83
|
+
};
|