@output.ai/core 0.0.16 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,10 +1,18 @@
1
1
  {
2
2
  "name": "@output.ai/core",
3
- "version": "0.0.16",
3
+ "version": "0.1.0",
4
4
  "description": "The core module of the output framework",
5
5
  "type": "module",
6
- "main": "src/index.js",
7
- "types": "src/index.d.ts",
6
+ "exports": {
7
+ ".": {
8
+ "types": "./src/index.d.ts",
9
+ "import": "./src/index.js"
10
+ },
11
+ "./tracing": {
12
+ "types": "./src/tracing/index.d.ts",
13
+ "import": "./src/tracing/index.js"
14
+ }
15
+ },
8
16
  "files": [
9
17
  "./src",
10
18
  "./bin"
@@ -30,6 +38,8 @@
30
38
  "#consts": "./src/consts.js",
31
39
  "#configs": "./src/configs.js",
32
40
  "#errors": "./src/errors.js",
41
+ "#tracing": "./src/tracing/index.js",
42
+ "#async_storage": "./src/async_storage.js",
33
43
  "#internal_activities": "./src/internal_activities/index.js"
34
44
  }
35
45
  }
package/src/configs.js CHANGED
@@ -7,8 +7,7 @@ const envVarSchema = z.object( {
7
7
  TEMPORAL_NAMESPACE: z.string().optional().default( 'default' ),
8
8
  TEMPORAL_API_KEY: z.string().optional(),
9
9
  CATALOG_ID: z.string().regex( /^[a-z0-9_.@-]+$/i ),
10
- API_AUTH_KEY: z.string().optional(),
11
- TRACING_ENABLED: z.stringbool().optional()
10
+ API_AUTH_KEY: z.string().optional()
12
11
  } );
13
12
 
14
13
  const { data: safeEnvVar, error } = envVarSchema.safeParse( process.env );
@@ -30,7 +29,3 @@ export const worker = {
30
29
  export const api = {
31
30
  authKey: safeEnvVar.API_AUTH_KEY
32
31
  };
33
-
34
- export const tracing = {
35
- enabled: safeEnvVar.TRACING_ENABLED
36
- };
@@ -181,38 +181,6 @@ describe( 'configs', () => {
181
181
  const { api } = await import( './configs.js' );
182
182
  expect( api.authKey ).toBeUndefined();
183
183
  } );
184
-
185
- it( 'should handle TRACING_ENABLED when true', async () => {
186
- process.env = {
187
- TEMPORAL_ADDRESS: 'http://localhost:7233',
188
- CATALOG_ID: 'test-catalog',
189
- TRACING_ENABLED: 'true'
190
- };
191
-
192
- const { tracing } = await import( './configs.js' );
193
- expect( tracing.enabled ).toBe( true );
194
- } );
195
-
196
- it( 'should handle TRACING_ENABLED when false', async () => {
197
- process.env = {
198
- TEMPORAL_ADDRESS: 'http://localhost:7233',
199
- CATALOG_ID: 'test-catalog',
200
- TRACING_ENABLED: 'false'
201
- };
202
-
203
- const { tracing } = await import( './configs.js' );
204
- expect( tracing.enabled ).toBe( false );
205
- } );
206
-
207
- it( 'should handle missing TRACING_ENABLED', async () => {
208
- process.env = {
209
- TEMPORAL_ADDRESS: 'http://localhost:7233',
210
- CATALOG_ID: 'test-catalog'
211
- };
212
-
213
- const { tracing } = await import( './configs.js' );
214
- expect( tracing.enabled ).toBeUndefined();
215
- } );
216
184
  } );
217
185
  } );
218
186
 
@@ -253,20 +221,6 @@ describe( 'configs', () => {
253
221
  } );
254
222
  } );
255
223
 
256
- it( 'should export tracing config', async () => {
257
- process.env = {
258
- TEMPORAL_ADDRESS: 'http://localhost:7233',
259
- CATALOG_ID: 'test-catalog',
260
- TRACING_ENABLED: 'true'
261
- };
262
-
263
- const { tracing } = await import( './configs.js' );
264
-
265
- expect( tracing ).toEqual( {
266
- enabled: true
267
- } );
268
- } );
269
-
270
224
  it( 'should have correct static worker config values', async () => {
271
225
  process.env = {
272
226
  TEMPORAL_ADDRESS: 'http://localhost:7233',
@@ -361,11 +315,10 @@ describe( 'configs', () => {
361
315
  TEMPORAL_NAMESPACE: 'production',
362
316
  TEMPORAL_API_KEY: 'prod-api-key-123',
363
317
  CATALOG_ID: 'prod.catalog@v1',
364
- API_AUTH_KEY: 'secure-auth-key',
365
- TRACING_ENABLED: 'true'
318
+ API_AUTH_KEY: 'secure-auth-key'
366
319
  };
367
320
 
368
- const { worker, api, tracing } = await import( './configs.js' );
321
+ const { worker, api } = await import( './configs.js' );
369
322
 
370
323
  expect( worker.address ).toBe( 'https://temporal.cloud.example.com' );
371
324
  expect( worker.namespace ).toBe( 'production' );
@@ -373,7 +326,6 @@ describe( 'configs', () => {
373
326
  expect( worker.catalogId ).toBe( 'prod.catalog@v1' );
374
327
  expect( worker.taskQueue ).toBe( 'prod.catalog@v1' );
375
328
  expect( api.authKey ).toBe( 'secure-auth-key' );
376
- expect( tracing.enabled ).toBe( true );
377
329
  } );
378
330
  } );
379
331
  } );
package/src/consts.js CHANGED
@@ -1,16 +1,7 @@
1
- export const SEND_WEBHOOK_ACTIVITY_NAME = '__internal#sendWebhookPost';
2
- export const READ_TRACE_FILE = '__internal#readTraceFile';
1
+ export const ACTIVITY_SEND_WEBHOOK = '__internal#sendWebhook';
2
+ export const ACTIVITY_READ_TRACE_FILE = '__internal#readTraceFile';
3
3
  export const METADATA_ACCESS_SYMBOL = Symbol( '__metadata' );
4
4
  export const WORKFLOWS_INDEX_FILENAME = '__workflows_entrypoint.js';
5
- export const THIS_LIB_NAME = 'core';
6
- export const TraceEvent = {
7
- WORKFLOW_START: 'workflow_start',
8
- WORKFLOW_END: 'workflow_end',
9
- STEP_START: 'step_start',
10
- STEP_END: 'step_end',
11
- EVALUATOR_START: 'evaluator_start',
12
- EVALUATOR_END: 'evaluator_end'
13
- };
14
5
  export const ComponentType = {
15
6
  EVALUATOR: 'evaluator',
16
7
  INTERNAL_STEP: 'internal_step',
@@ -16,6 +16,6 @@ export function step( { name, description, inputSchema, outputSchema, fn } ) {
16
16
  return output;
17
17
  };
18
18
 
19
- setMetadata( wrapper, { name, description, inputSchema, outputSchema, type: ComponentType.EVALUATOR } );
19
+ setMetadata( wrapper, { name, description, inputSchema, outputSchema, type: ComponentType.STEP } );
20
20
  return wrapper;
21
21
  };
@@ -1,18 +1,30 @@
1
1
  // THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
2
- import { defineSignal, setHandler, proxyActivities, workflowInfo } from '@temporalio/workflow';
3
- import { SEND_WEBHOOK_ACTIVITY_NAME } from '#consts';
2
+ import { defineSignal, setHandler, proxyActivities, workflowInfo, proxySinks } from '@temporalio/workflow';
3
+ import { ACTIVITY_SEND_WEBHOOK } from '#consts';
4
+ import { FatalError } from '#errors';
4
5
  import { validateCreateWebhook } from './validations/static.js';
5
6
 
6
7
  export async function createWebhook( { url, payload } ) {
7
8
  validateCreateWebhook( { url, payload } );
8
- const workflowId = workflowInfo();
9
+ const { workflowId } = workflowInfo();
9
10
 
10
- await proxyActivities( temporalActivityConfigs )[SEND_WEBHOOK_ACTIVITY_NAME]( { url, workflowId, payload } );
11
+ await proxyActivities( {
12
+ startToCloseTimeout: '3m',
13
+ retry: {
14
+ initialInterval: '15s',
15
+ maximumAttempts: 5,
16
+ nonRetryableErrorTypes: [ FatalError.name ]
17
+ }
18
+ } )[ACTIVITY_SEND_WEBHOOK]( { url, workflowId, payload } );
11
19
 
20
+ const sinks = await proxySinks();
12
21
  const resumeSignal = defineSignal( 'resume' );
13
22
 
23
+ const traceId = `${workflowId}-${url}-${Date.now()}`;
24
+ sinks.trace.addEventStart( { id: traceId, name: 'resume', kind: 'webhook' } );
14
25
  return new Promise( resolve =>
15
26
  setHandler( resumeSignal, responsePayload => {
27
+ sinks.trace.addEventEnd( { id: traceId, details: responsePayload } );
16
28
  resolve( responsePayload );
17
29
  } )
18
30
  );
@@ -1,11 +1,11 @@
1
1
  // THIS RUNS IN THE TEMPORAL'S SANDBOX ENVIRONMENT
2
- import { proxyActivities, inWorkflowContext, executeChild, workflowInfo, ApplicationFailure, proxySinks } from '@temporalio/workflow';
2
+ import { proxyActivities, inWorkflowContext, executeChild, workflowInfo } from '@temporalio/workflow';
3
3
  import { getInvocationDir } from './utils.js';
4
4
  import { setMetadata } from './metadata.js';
5
5
  import { FatalError, ValidationError } from '#errors';
6
6
  import { validateWorkflow } from './validations/static.js';
7
7
  import { validateWithSchema } from './validations/runtime.js';
8
- import { READ_TRACE_FILE, TraceEvent } from '#consts';
8
+ import { ACTIVITY_READ_TRACE_FILE } from '#consts';
9
9
 
10
10
  const temporalActivityConfigs = {
11
11
  startToCloseTimeout: '20 minute',
@@ -23,62 +23,45 @@ export function workflow( { name, description, inputSchema, outputSchema, fn } )
23
23
  const workflowPath = getInvocationDir();
24
24
 
25
25
  const steps = proxyActivities( temporalActivityConfigs );
26
- const sinks = proxySinks();
27
26
 
28
27
  const wrapper = async input => {
29
- try {
30
- if ( inWorkflowContext() ) {
31
- sinks.log.trace( { event: TraceEvent.WORKFLOW_START, input } );
32
- }
33
-
34
- validateWithSchema( inputSchema, input, `Workflow ${name} input` );
35
-
36
- // this returns a plain function, for example, in unit tests
37
- if ( !inWorkflowContext() ) {
38
- const output = await fn( input );
39
- validateWithSchema( outputSchema, output, `Workflow ${name} output` );
40
- return output;
41
- }
42
-
43
- const { memo, workflowId } = workflowInfo();
44
-
45
- Object.assign( workflowInfo().memo, { workflowPath } );
46
-
47
- // binds the methods called in the code that Webpack loader will add, they will exposed via "this"
48
- const output = await fn.call( {
49
- invokeStep: async ( stepName, input ) => steps[`${workflowPath}#${stepName}`]( input ),
50
- invokeEvaluator: async ( evaluatorName, input ) => steps[`${workflowPath}#${evaluatorName}`]( input ),
28
+ validateWithSchema( inputSchema, input, `Workflow ${name} input` );
51
29
 
52
- startWorkflow: async ( childName, input ) => {
30
+ // this returns a plain function, for example, in unit tests
31
+ if ( !inWorkflowContext() ) {
32
+ const output = await fn( input );
33
+ validateWithSchema( outputSchema, output, `Workflow ${name} output` );
34
+ return output;
35
+ }
53
36
 
54
- // Checks if current memo has rootWorkflowId, which means current execution is already a child
55
- // Then it sets the memory for the child execution passing along who's the original workflow is and its type
56
- const workflowMemory = memo.rootWorkflowId ?
57
- { parentWorkflowId: workflowId, rootWorkflowType: memo.rootWorkflowType, rootWorkflowId: memo.rootWorkflowId } :
58
- { parentWorkflowId: workflowId, rootWorkflowId: workflowId, rootWorkflowType: name };
37
+ const { workflowId, memo } = workflowInfo();
59
38
 
60
- return executeChild( childName, { args: input ? [ input ] : [], memo: workflowMemory } );
61
- }
62
- }, input );
39
+ // keep trace id and helm from memo if present (child workflow)
40
+ const traceId = memo.traceId ?? workflowId;
41
+ const traceHelm = memo.traceHelm ?? name;
63
42
 
64
- validateWithSchema( outputSchema, output, `Workflow ${name} output` );
43
+ Object.assign( memo, { traceId, traceHelm } );
65
44
 
66
- sinks.log.trace( { event: TraceEvent.WORKFLOW_END, output } );
45
+ // binds the methods called in the code that Webpack loader will add, they will exposed via "this"
46
+ const output = await fn.call( {
47
+ invokeStep: async ( stepName, input ) => steps[`${workflowPath}#${stepName}`]( input ),
48
+ invokeEvaluator: async ( evaluatorName, input ) => steps[`${workflowPath}#${evaluatorName}`]( input ),
67
49
 
68
- // add trace if not child
69
- if ( !memo.rootWorkflowId ) {
70
- const trace = await steps[READ_TRACE_FILE]( { workflowType: name, workflowId } );
71
- return { output, trace };
50
+ startWorkflow: async ( childName, input ) => {
51
+ return executeChild( childName, { args: input ? [ input ] : [], memo: { traceId, traceHelm, parentId: workflowId } } );
72
52
  }
53
+ }, input );
73
54
 
74
- return output;
75
- } catch ( error ) {
76
- /*
77
- * Any errors in the workflow will interrupt its execution since the workflow is designed to orchestrate and
78
- * IOs should be made in steps
79
- */
80
- throw new ApplicationFailure( error.message, error.constructor.name );
55
+ validateWithSchema( outputSchema, output, `Workflow ${name} output` );
56
+
57
+ // Adds trace file content to the output if it is the root workflow
58
+ // @TODO this will be replaced in favor of a persistent storage elsewhere
59
+ if ( !memo.parentId ) {
60
+ const trace = await steps[ACTIVITY_READ_TRACE_FILE]( { traceId, traceHelm } );
61
+ return { output, trace };
81
62
  }
63
+
64
+ return output;
82
65
  };
83
66
 
84
67
  setMetadata( wrapper, { name, description, inputSchema, outputSchema } );
@@ -15,7 +15,7 @@ const callerDir = process.argv[2];
15
15
  * @param {any} options.payload - The payload to send url
16
16
  * @throws {FatalError}
17
17
  */
18
- export const sendWebhookPost = async ( { url, workflowId, payload } ) => {
18
+ export const sendWebhook = async ( { url, workflowId, payload } ) => {
19
19
  const request = fetch( url, {
20
20
  method: 'POST',
21
21
  headers: {
@@ -33,41 +33,39 @@ export const sendWebhookPost = async ( { url, workflowId, payload } ) => {
33
33
  }
34
34
  } )();
35
35
 
36
- console.log( '[Core.SendWebhookPost]', res.status, res.statusText );
36
+ console.log( '[Core.SendWebhook]', res.status, res.statusText );
37
37
 
38
38
  if ( !res.ok ) {
39
39
  throw new FatalError( `Webhook fail: ${res.status}` );
40
40
  }
41
41
  };
42
42
 
43
- setMetadata( sendWebhookPost, { type: ComponentType.INTERNAL_STEP } );
43
+ setMetadata( sendWebhook, { type: ComponentType.INTERNAL_STEP } );
44
44
 
45
45
  /**
46
46
  * Read the trace file of a given execution and returns the content
47
47
  *
48
48
  * @param {object} options
49
- * @param {string} options.workflowType - The type of the workflow
50
- * @param {string} options.workflowId - The workflow execution id
49
+ * @param {string} options.traceId - The is of the trace
50
+ * @param {string} options.traceHelm - The helm of the trace file
51
51
  * @returns {string[]} Each line of the trace file
52
52
  */
53
- export const readTraceFile = async ( { workflowType, workflowId } ) => {
54
- const dir = join( callerDir, 'logs', 'runs', workflowType );
53
+ export const readTraceFile = async ( { traceId, traceHelm } ) => {
54
+ const dir = join( callerDir, 'logs', 'runs', traceHelm );
55
55
 
56
56
  if ( !existsSync( dir ) ) {
57
57
  console.log( '[Core.ReadTraceFile]', 'Trace folder not found', dir );
58
58
  return [];
59
59
  }
60
60
 
61
- const suffix = `-${workflowId}.raw`;
62
- const matchingFile = readdirSync( dir ).find( f => f.endsWith( suffix ) );
61
+ const fileName = readdirSync( dir ).find( f => f.endsWith( `-${traceId}.raw` ) );
63
62
 
64
- if ( !matchingFile ) {
65
- console.log( '[Core.ReadTraceFile]', 'Trace file not found', dir, suffix );
63
+ if ( !fileName ) {
64
+ console.log( '[Core.ReadTraceFile]', 'Trace file not found', { traceId, traceHelm } );
66
65
  return [];
67
66
  }
68
67
 
69
- const file = join( dir, matchingFile );
70
- return existsSync( file ) ? readFileSync( file, 'utf-8' ).split( '\n' ) : [];
68
+ return readFileSync( join( dir, fileName ), 'utf-8' ).split( '\n' );
71
69
  };
72
70
 
73
71
  setMetadata( readTraceFile, { type: ComponentType.INTERNAL_STEP, skipTrace: true } );
@@ -0,0 +1,47 @@
1
+ /*
2
+ ╭───────────╮
3
+ │ T R A C E │╮
4
+ ╰───────────╯│
5
+ ╰───────────╯
6
+ */
7
+
8
+ /**
9
+ * The public namespace for tracing
10
+ *
11
+ * @namespace
12
+ */
13
+ export declare const Tracing: {
14
+
15
+ /**
16
+ * Adds the start phase of a new event at the default trace for the current workflow.
17
+ *
18
+ * @param {string} id - A unique id for the Event, must be the same across all phases: start, end, error.
19
+ * @param {string} kind - The kind of Event, like HTTP, DiskWrite, DBOp, etc.
20
+ * @param {string} name - The human friendly name of the Event: eg: query, request, create.
21
+ * @param {any} details - All details attached to this Event Phase. Eg: DB queried records, HTTP response body.
22
+ * @returns {void}
23
+ */
24
+ addEventStart( args: { id: string; kind: string; name: string; details: any } ): void; // eslint-disable-line @typescript-eslint/no-explicit-any
25
+
26
+ /**
27
+ * Adds the end phase at an event at the default trace for the current workflow.
28
+ *
29
+ * It needs to use the same id of the start phase.
30
+ *
31
+ * @param {string} id - A unique id for the Event, must be the same across all phases: start, end, error.
32
+ * @param {any} details - All details attached to this Event Phase. Eg: DB queried records, HTTP response body.
33
+ * @returns {void}
34
+ */
35
+ addEventEnd( args: { id: string; details: any } ): void; // eslint-disable-line @typescript-eslint/no-explicit-any
36
+
37
+ /**
38
+ * Adds the error phase at an event as error at the default trace for the current workflow.
39
+ *
40
+ * It needs to use the same id of the start phase.
41
+ *
42
+ * @param {string} id - A unique id for the Event, must be the same across all phases: start, end, error.
43
+ * @param {any} details - All details attached to this Event Phase. Eg: DB queried records, HTTP response body.
44
+ * @returns {void}
45
+ */
46
+ addEventError( args: { id: string; details: any } ): void; // eslint-disable-line @typescript-eslint/no-explicit-any
47
+ };
@@ -0,0 +1,154 @@
1
+ import { Storage } from '#async_storage';
2
+ import { mkdirSync, existsSync, readdirSync, appendFileSync } from 'node:fs';
3
+ import { join } from 'path';
4
+ import { EOL } from 'os';
5
+ import { buildLogTree } from './tracer_tree.js';
6
+ import { serializeError } from './utils.js';
7
+
8
+ const callerDir = process.argv[2];
9
+ // It is is isolated here instead of #configs to allow this module to be exported without all other configs requirements
10
+ const tracingEnabled = [ '1', 'true', 'on' ].includes( process.env.TRACING_ENABLED );
11
+
12
+ /**
13
+ * Trace nomenclature
14
+ *
15
+ * Trace - The collection of Events;
16
+ * Event - Any entry in the Trace file, must have the two phases START and END or ERROR;
17
+ * Phase - An specific part of an Event, either START or the conclusive END or ERROR;
18
+ */
19
+
20
+ /**
21
+ * Adds the Trace Event Phase to the trace file
22
+ *
23
+ * @param {string} phase - The phase
24
+ * @param {object} options - All the trace fields
25
+ * @returns {void}
26
+ */
27
+ function addEventPhase( phase, { kind, details, id, name, parentId, traceId, traceHelm } ) {
28
+ if ( !tracingEnabled || name === 'catalog' ) {
29
+ return;
30
+ }
31
+
32
+ const parsedDetails = details instanceof Error ? serializeError( details ) : details;
33
+ const timestamp = Date.now();
34
+ const entry = { phase, kind, details: parsedDetails, id, name, timestamp, parentId };
35
+ const outputDir = join( callerDir, 'logs', 'runs', traceHelm );
36
+
37
+ if ( !existsSync( outputDir ) ) {
38
+ mkdirSync( outputDir, { recursive: true } );
39
+ }
40
+
41
+ const suffix = `-${traceId}.raw`;
42
+ const logFile = readdirSync( outputDir ).find( f => f.endsWith( suffix ) ) ?? `${new Date( timestamp ).toISOString()}-${suffix}`;
43
+ const logPath = join( outputDir, logFile );
44
+
45
+ appendFileSync( logPath, JSON.stringify( entry ) + EOL, 'utf-8' );
46
+ buildLogTree( logPath );
47
+ };
48
+
49
+ /**
50
+ * Internal use only
51
+ *
52
+ * Adds the start phase of a new event at the default trace for the current workflow.
53
+ *
54
+ * @param {string} id - A unique id for the Event, must be the same across all phases: start, end, error.
55
+ * @param {string} kind - The kind of Event, like HTTP, DiskWrite, DBOp, etc.
56
+ * @param {string} name - The human friendly name of the Event: eg: query, request, create.
57
+ * @param {any} details - All details attached to this Event Phase. Eg: DB queried records, HTTP response body.
58
+ * @param {string} parentId - The parent Event, used to build a three.
59
+ * @param {string} traceId - The traceId, which identifies from which trace this Event belongs.
60
+ * @param {string} traceHelm - The trace helm, which is a taxonomical naming of the Trace.
61
+ * @returns {void}
62
+ */
63
+ export const addEventStart = options => addEventPhase( 'start', options );
64
+
65
+ /**
66
+ * Internal use only
67
+ *
68
+ * Adds the end phase at an event at the default trace for the current workflow.
69
+ *
70
+ * It needs to use the same id of the start phase.
71
+ *
72
+ * @param {string} id - A unique id for the Event, must be the same across all phases: start, end, error.
73
+ * @param {string} kind - The kind of Event, like HTTP, DiskWrite, DBOp, etc.
74
+ * @param {string} name - The human friendly name of the Event: eg: query, request, create.
75
+ * @param {any} details - All details attached to this Event Phase. Eg: DB queried records, HTTP response body.
76
+ * @param {string} parentId - The parent Event, used to build a three.
77
+ * @param {string} traceId - The traceId, which identifies from which trace this Event belongs.
78
+ * @param {string} traceHelm - The trace helm, which is a taxonomical naming of the Trace.
79
+ * @returns {void}
80
+ */
81
+ export const addEventEnd = options => addEventPhase( 'end', options );
82
+
83
+ /**
84
+ * Internal use only
85
+ *
86
+ * Adds the error phase at an event as error at the default trace for the current workflow.
87
+ *
88
+ * It needs to use the same id of the start phase.
89
+ *
90
+ * @param {string} id - A unique id for the Event, must be the same across all phases: start, end, error.
91
+ * @param {string} kind - The kind of Event, like HTTP, DiskWrite, DBOp, etc.
92
+ * @param {string} name - The human friendly name of the Event: eg: query, request, create.
93
+ * @param {any} details - All details attached to this Event Phase. Eg: DB queried records, HTTP response body.
94
+ * @param {string} parentId - The parent Event, used to build a three.
95
+ * @param {string} traceId - The traceId, which identifies from which trace this Event belongs.
96
+ * @param {string} traceHelm - The trace helm, which is a taxonomical naming of the Trace.
97
+ * @returns {void}
98
+ */
99
+ export const addEventError = options => addEventPhase( 'error', options );
100
+
101
+ /**
102
+ * Adds an Event Phase reading traceId, traceHelm and parentId from the context.
103
+ *
104
+ * @param {object} options - The common trace configurations
105
+ */
106
+ function addEventPhaseWithContext( phase, options ) {
107
+ const storeContent = Storage.load();
108
+ if ( !storeContent ) { // means this was called from a unit test
109
+ return;
110
+ }
111
+ const { parentId, traceId, traceHelm } = storeContent;
112
+ addEventPhase( phase, { ...options, parentId, traceId, traceHelm } );
113
+ };
114
+
115
+ /**
116
+ * The public namespace for tracing
117
+ *
118
+ * @namespace
119
+ */
120
+ export const Tracing = {
121
+
122
+ /**
123
+ * Adds the start phase of a new event at the default trace for the current workflow.
124
+ *
125
+ * @param {string} id - A unique id for the Event, must be the same across all phases: start, end, error.
126
+ * @param {string} kind - The kind of Event, like HTTP, DiskWrite, DBOp, etc.
127
+ * @param {string} name - The human friendly name of the Event: eg: query, request, create.
128
+ * @param {any} details - All details attached to this Event Phase. Eg: DB queried records, HTTP response body.
129
+ * @returns {void}
130
+ */
131
+ addEventStart: ( { id, kind, name, details } ) => addEventPhaseWithContext( 'start', { kind, name, details, id } ),
132
+
133
+ /**
134
+ * Adds the end phase at an event at the default trace for the current workflow.
135
+ *
136
+ * It needs to use the same id of the start phase.
137
+ *
138
+ * @param {string} id - A unique id for the Event, must be the same across all phases: start, end, error.
139
+ * @param {any} details - All details attached to this Event Phase. Eg: DB queried records, HTTP response body.
140
+ * @returns {void}
141
+ */
142
+ addEventEnd: ( { id, details } ) => addEventPhaseWithContext( 'end', { id, details } ),
143
+
144
+ /**
145
+ * Adds the error phase at an event as error at the default trace for the current workflow.
146
+ *
147
+ * It needs to use the same id of the start phase.
148
+ *
149
+ * @param {string} id - A unique id for the Event, must be the same across all phases: start, end, error.
150
+ * @param {any} details - All details attached to this Event Phase. Eg: DB queried records, HTTP response body.
151
+ * @returns {void}
152
+ */
153
+ addEventError: ( { id, details } ) => addEventPhaseWithContext( 'error', { id, details } )
154
+ };
@@ -0,0 +1,84 @@
1
+ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
2
+ import { mkdtempSync, readFileSync, rmSync } from 'node:fs';
3
+ import { tmpdir, EOL } from 'node:os';
4
+ import { join } from 'path';
5
+
6
+ const createTempDir = () => mkdtempSync( join( tmpdir(), 'output-sdk-trace-' ) );
7
+
8
+ // Use env var to enable tracing
9
+
10
+ vi.mock( '#async_storage', () => ( { Storage: { load: () => ( { parentId: undefined, traceId: 'trace-1', traceHelm: 'tests' } ) } } ) );
11
+ vi.mock( 'path', async importActual => {
12
+ const actual = await importActual();
13
+ return { ...actual, join: ( first, ...rest ) => actual.join( first ?? process.cwd(), ...rest ) };
14
+ } );
15
+ vi.mock( './tracer_tree.js', () => ( { buildLogTree: vi.fn() } ) );
16
+
17
+ describe( 'tracing private exports', () => {
18
+ beforeEach( () => {
19
+ vi.resetModules();
20
+ vi.clearAllMocks();
21
+ vi.useFakeTimers();
22
+ vi.setSystemTime( new Date( '2020-01-01T00:00:00.000Z' ) );
23
+ process.env.TRACING_ENABLED = '1';
24
+ } );
25
+
26
+ afterEach( () => {
27
+ vi.useRealTimers();
28
+ delete process.env.TRACING_ENABLED;
29
+ } );
30
+
31
+ it( 'addEventStart (private) writes start', async () => {
32
+ const originalArgv2 = process.argv[2];
33
+ const tmp = createTempDir();
34
+ process.argv[2] = tmp;
35
+
36
+ const { addEventStart } = await import( './index.js?v=private1' );
37
+ addEventStart( { id: 'a', kind: 'evaluator', name: 'start', details: { foo: 1 }, traceId: 'trace-1', traceHelm: 'tests' } );
38
+
39
+ const { buildLogTree } = await import( './tracer_tree.js' );
40
+ const logPath = buildLogTree.mock.calls[0][0];
41
+ const raw = readFileSync( logPath, 'utf-8' );
42
+ const entry = JSON.parse( raw.split( EOL )[0] );
43
+ expect( entry.phase ).toBe( 'start' );
44
+
45
+ rmSync( tmp, { recursive: true, force: true } );
46
+ process.argv[2] = originalArgv2;
47
+ } );
48
+
49
+ it( 'addEventEnd (private) writes end', async () => {
50
+ const originalArgv2 = process.argv[2];
51
+ const tmp = createTempDir();
52
+ process.argv[2] = tmp;
53
+
54
+ const { addEventEnd } = await import( './index.js?v=private2' );
55
+ addEventEnd( { id: 'a', details: { ok: true }, traceId: 'trace-1', traceHelm: 'tests' } );
56
+
57
+ const { buildLogTree } = await import( './tracer_tree.js' );
58
+ const logPath = buildLogTree.mock.calls[0][0];
59
+ const raw = readFileSync( logPath, 'utf-8' );
60
+ const entry = JSON.parse( raw.split( EOL )[0] );
61
+ expect( entry.phase ).toBe( 'end' );
62
+
63
+ rmSync( tmp, { recursive: true, force: true } );
64
+ process.argv[2] = originalArgv2;
65
+ } );
66
+
67
+ it( 'addEventError (private) writes error', async () => {
68
+ const originalArgv2 = process.argv[2];
69
+ const tmp = createTempDir();
70
+ process.argv[2] = tmp;
71
+
72
+ const { addEventError } = await import( './index.js?v=private3' );
73
+ addEventError( { id: 'a', details: new Error( 'oops' ), traceId: 'trace-1', traceHelm: 'tests' } );
74
+
75
+ const { buildLogTree } = await import( './tracer_tree.js' );
76
+ const logPath = buildLogTree.mock.calls[0][0];
77
+ const raw = readFileSync( logPath, 'utf-8' );
78
+ const entry = JSON.parse( raw.split( EOL )[0] );
79
+ expect( entry.phase ).toBe( 'error' );
80
+
81
+ rmSync( tmp, { recursive: true, force: true } );
82
+ process.argv[2] = originalArgv2;
83
+ } );
84
+ } );