@outputai/core 0.4.1-dev.92bc2fb.0 → 0.4.1-next.ae3ab85.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@outputai/core",
3
- "version": "0.4.1-dev.92bc2fb.0",
3
+ "version": "0.4.1-next.ae3ab85.0",
4
4
  "description": "The core module of the output framework",
5
5
  "type": "module",
6
6
  "exports": {
@@ -19,9 +19,6 @@
19
19
  "./sdk_utils": {
20
20
  "types": "./src/utils/index.d.ts",
21
21
  "import": "./src/utils/index.js"
22
- },
23
- "./sdk_tracing_tools": {
24
- "import": "./src/tracing/tools/index.js"
25
22
  }
26
23
  },
27
24
  "files": [
@@ -36,6 +33,7 @@
36
33
  },
37
34
  "dependencies": {
38
35
  "@aws-sdk/client-s3": "3.1038.0",
36
+ "@aws-sdk/lib-storage": "3.1038.0",
39
37
  "@babel/generator": "7.29.1",
40
38
  "@babel/parser": "7.29.2",
41
39
  "@babel/traverse": "7.29.0",
@@ -45,6 +43,7 @@
45
43
  "@temporalio/common": "1.17.0",
46
44
  "@temporalio/worker": "1.17.0",
47
45
  "@temporalio/workflow": "1.17.0",
46
+ "json-stream-stringify": "3.1.6",
48
47
  "redis": "5.12.1",
49
48
  "stacktrace-parser": "0.1.11",
50
49
  "undici": "8.1.0",
@@ -42,5 +42,4 @@ export declare function addEventAttribute( args: { eventId: string; name: string
42
42
  */
43
43
  export declare const Attribute: {
44
44
  COST: 'cost';
45
- TOKEN_USAGE: 'token_usage';
46
45
  };
@@ -49,6 +49,5 @@ export const addEventAttribute = ( { eventId, name, value } ) =>
49
49
  * Known attributes
50
50
  */
51
51
  export const Attribute = {
52
- COST: 'cost',
53
- TOKEN_USAGE: 'token_usage'
52
+ COST: 'cost'
54
53
  };
@@ -1,9 +1,11 @@
1
- import { appendFileSync, mkdirSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
1
+ import { appendFileSync, mkdirSync, readdirSync, readFileSync, rmSync, createWriteStream } from 'node:fs';
2
2
  import { dirname, join } from 'node:path';
3
3
  import { fileURLToPath } from 'url';
4
4
  import buildTraceTree from '../../tools/build_trace_tree.js';
5
- import { safeFormatJSON } from '../../tools/utils.js';
6
5
  import { EOL } from 'node:os';
6
+ import { JsonStreamStringify } from 'json-stream-stringify';
7
+
8
+ import { pipeline } from 'stream/promises';
7
9
 
8
10
  const __dirname = dirname( fileURLToPath( import.meta.url ) );
9
11
 
@@ -109,7 +111,7 @@ export const init = () => {
109
111
  * @param {object} args.executionContext - Execution info: workflowId, workflowName, startTime
110
112
  * @returns {void}
111
113
  */
112
- export const exec = ( { entry, executionContext } ) => {
114
+ export const exec = async ( { entry, executionContext } ) => {
113
115
  const { workflowId, workflowName, startTime } = executionContext;
114
116
  const tempFilePath = createTempFilePath( executionContext );
115
117
  addEntry( entry, tempFilePath );
@@ -126,7 +128,11 @@ export const exec = ( { entry, executionContext } ) => {
126
128
  const path = join( dir, buildTraceFilename( { startTime, workflowId } ) );
127
129
 
128
130
  mkdirSync( dir, { recursive: true } );
129
- writeFileSync( path, safeFormatJSON( content ) + EOL, 'utf-8' );
131
+
132
+ await pipeline(
133
+ new JsonStreamStringify( content ),
134
+ createWriteStream( path )
135
+ );
130
136
  };
131
137
 
132
138
  /**
@@ -1,5 +1,4 @@
1
1
  import { describe, it, expect, vi, beforeEach } from 'vitest';
2
- import { EOL } from 'node:os';
3
2
 
4
3
  // In-memory fs mock store
5
4
  const store = { files: new Map() };
@@ -12,6 +11,7 @@ const appendFileSyncMock = vi.fn( ( path, data ) => {
12
11
  const readFileSyncMock = vi.fn( path => store.files.get( path ) ?? '' );
13
12
  const readdirSyncMock = vi.fn( () => [] );
14
13
  const rmSyncMock = vi.fn();
14
+ const createWriteStreamMock = vi.fn( path => ( { path } ) );
15
15
 
16
16
  vi.mock( 'node:fs', () => ( {
17
17
  mkdirSync: mkdirSyncMock,
@@ -19,9 +19,36 @@ vi.mock( 'node:fs', () => ( {
19
19
  appendFileSync: appendFileSyncMock,
20
20
  readFileSync: readFileSyncMock,
21
21
  readdirSync: readdirSyncMock,
22
- rmSync: rmSyncMock
22
+ rmSync: rmSyncMock,
23
+ createWriteStream: createWriteStreamMock
23
24
  } ) );
24
25
 
26
+ const pipelineMock = vi.fn( async ( source, destination ) => {
27
+ const chunks = [];
28
+ for await ( const chunk of source ) {
29
+ chunks.push( Buffer.isBuffer( chunk ) ? chunk : Buffer.from( chunk ) );
30
+ }
31
+ store.files.set( destination.path, Buffer.concat( chunks ).toString( 'utf8' ) );
32
+ } );
33
+ vi.mock( 'node:stream/promises', () => ( { pipeline: pipelineMock } ) );
34
+
35
+ vi.mock( 'json-stream-stringify', async () => {
36
+ const { Readable } = await import( 'node:stream' );
37
+ return {
38
+ JsonStreamStringify: class extends Readable {
39
+ constructor( body ) {
40
+ super();
41
+ this.body = body;
42
+ }
43
+
44
+ _read() {
45
+ this.push( JSON.stringify( this.body ) );
46
+ this.push( null );
47
+ }
48
+ }
49
+ };
50
+ } );
51
+
25
52
  const buildTraceTreeMock = vi.fn( entries => ( { count: entries.length } ) );
26
53
  vi.mock( '../../tools/build_trace_tree.js', () => ( { default: buildTraceTreeMock } ) );
27
54
 
@@ -58,17 +85,18 @@ describe( 'tracing/processors/local', () => {
58
85
  const workflowId = 'id1';
59
86
  const ctx = { executionContext: { workflowId, workflowName: 'WF', startTime } };
60
87
 
61
- exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
62
- exec( { ...ctx, entry: childTick( 'child-1', startTime + 1 ) } );
63
- exec( { ...ctx, entry: rootEnd( workflowId, startTime + 2 ) } );
88
+ await exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
89
+ await exec( { ...ctx, entry: childTick( 'child-1', startTime + 1 ) } );
90
+ await exec( { ...ctx, entry: rootEnd( workflowId, startTime + 2 ) } );
64
91
 
65
92
  expect( buildTraceTreeMock ).toHaveBeenCalledTimes( 1 );
66
93
  expect( buildTraceTreeMock.mock.calls[0][0] ).toHaveLength( 3 );
67
94
 
68
- expect( writeFileSyncMock ).toHaveBeenCalledTimes( 1 );
69
- const [ writtenPath, content ] = writeFileSyncMock.mock.calls[0];
95
+ expect( createWriteStreamMock ).toHaveBeenCalledTimes( 1 );
96
+ expect( pipelineMock ).toHaveBeenCalledTimes( 1 );
97
+ const [ writtenPath ] = createWriteStreamMock.mock.calls[0];
70
98
  expect( writtenPath ).toMatch( /\/tmp\/project\/logs\/runs\/WF\// );
71
- expect( JSON.parse( content.trim() ).count ).toBe( 3 );
99
+ expect( JSON.parse( store.files.get( writtenPath ) ).count ).toBe( 3 );
72
100
  } );
73
101
 
74
102
  it( 'exec(): does not build or write on non-flush entries', async () => {
@@ -79,11 +107,13 @@ describe( 'tracing/processors/local', () => {
79
107
  const workflowId = 'id1';
80
108
  const ctx = { executionContext: { workflowId, workflowName: 'WF', startTime } };
81
109
 
82
- exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
83
- exec( { ...ctx, entry: childTick( 'child-1', startTime + 1 ) } );
110
+ await exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
111
+ await exec( { ...ctx, entry: childTick( 'child-1', startTime + 1 ) } );
84
112
 
85
113
  expect( buildTraceTreeMock ).not.toHaveBeenCalled();
86
114
  expect( writeFileSyncMock ).not.toHaveBeenCalled();
115
+ expect( createWriteStreamMock ).not.toHaveBeenCalled();
116
+ expect( pipelineMock ).not.toHaveBeenCalled();
87
117
  } );
88
118
 
89
119
  it( 'exec(): flushes on error action before root end', async () => {
@@ -94,12 +124,13 @@ describe( 'tracing/processors/local', () => {
94
124
  const workflowId = 'id1';
95
125
  const ctx = { executionContext: { workflowId, workflowName: 'WF', startTime } };
96
126
 
97
- exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
98
- exec( { ...ctx, entry: { id: 'step-1', action: 'error', timestamp: startTime + 1 } } );
127
+ await exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
128
+ await exec( { ...ctx, entry: { id: 'step-1', action: 'error', timestamp: startTime + 1 } } );
99
129
 
100
130
  expect( buildTraceTreeMock ).toHaveBeenCalledTimes( 1 );
101
131
  expect( buildTraceTreeMock.mock.calls[0][0] ).toHaveLength( 2 );
102
- expect( writeFileSyncMock ).toHaveBeenCalledTimes( 1 );
132
+ expect( createWriteStreamMock ).toHaveBeenCalledTimes( 1 );
133
+ expect( pipelineMock ).toHaveBeenCalledTimes( 1 );
103
134
  } );
104
135
 
105
136
  it( 'getDestination(): returns absolute path under callerDir logs', async () => {
@@ -128,11 +159,11 @@ describe( 'tracing/processors/local', () => {
128
159
  const workflowId = 'id1';
129
160
  const ctx = { executionContext: { workflowId, workflowName: 'WF', startTime } };
130
161
 
131
- exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
132
- exec( { ...ctx, entry: rootEnd( workflowId, startTime + 1 ) } );
162
+ await exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
163
+ await exec( { ...ctx, entry: rootEnd( workflowId, startTime + 1 ) } );
133
164
 
134
- expect( writeFileSyncMock ).toHaveBeenCalledTimes( 1 );
135
- const [ writtenPath ] = writeFileSyncMock.mock.calls[0];
165
+ expect( createWriteStreamMock ).toHaveBeenCalledTimes( 1 );
166
+ const [ writtenPath ] = createWriteStreamMock.mock.calls[0];
136
167
 
137
168
  expect( writtenPath ).not.toContain( '/host/path/logs' );
138
169
  expect( writtenPath ).toMatch( /\/tmp\/project\/logs\/runs\/WF\// );
@@ -164,15 +195,15 @@ describe( 'tracing/processors/local', () => {
164
195
  const workflowName = 'test-workflow';
165
196
  const ctx = { executionContext: { workflowId, workflowName, startTime } };
166
197
 
167
- exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
168
- exec( { ...ctx, entry: rootEnd( workflowId, startTime + 1 ) } );
198
+ await exec( { ...ctx, entry: rootStart( workflowId, startTime ) } );
199
+ await exec( { ...ctx, entry: rootEnd( workflowId, startTime + 1 ) } );
169
200
 
170
201
  const destination = getDestination( { startTime, workflowId, workflowName } );
171
202
 
172
- const [ writtenPath, payload ] = writeFileSyncMock.mock.calls[0];
203
+ const [ writtenPath ] = createWriteStreamMock.mock.calls[0];
173
204
  expect( writtenPath ).not.toContain( '/Users/ben/project' );
174
205
  expect( writtenPath ).toMatch( /\/tmp\/project\/logs\/runs\/test-workflow\// );
175
- expect( payload.endsWith( EOL ) ).toBe( true );
206
+ expect( JSON.parse( store.files.get( writtenPath ) ).count ).toBe( 2 );
176
207
 
177
208
  expect( destination ).toBe( '/Users/ben/project/logs/runs/test-workflow/2020-01-02-03-04-05-678Z_workflow-id-123.json' );
178
209
  } );
@@ -1,10 +1,9 @@
1
1
  import { upload } from './s3_client.js';
2
2
  import { getRedisClient } from './redis_client.js';
3
3
  import buildTraceTree from '../../tools/build_trace_tree.js';
4
- import { EOL } from 'node:os';
5
4
  import { loadEnv, getVars } from './configs.js';
6
5
  import { createChildLogger } from '#logger';
7
- import { safeFormatJSON } from '../../tools/utils.js';
6
+ import { JsonStreamStringify } from 'json-stream-stringify';
8
7
 
9
8
  const log = createChildLogger( 'S3 Processor' );
10
9
 
@@ -100,9 +99,10 @@ export const exec = async ( { entry, executionContext } ) => {
100
99
  log.warn( 'Incomplete trace file discarded', { workflowId, error: 'incomplete_trace_file' } );
101
100
  return;
102
101
  }
102
+
103
103
  await upload( {
104
104
  key: getS3Key( { workflowId, workflowName, startTime } ),
105
- content: safeFormatJSON( content ) + EOL
105
+ content: new JsonStreamStringify( content )
106
106
  } );
107
107
  await bustEntries( cacheKey );
108
108
  };
@@ -27,6 +27,31 @@ vi.mock( './s3_client.js', () => ( { upload: uploadMock } ) );
27
27
  const buildTraceTreeMock = vi.fn( entries => ( { count: entries.length } ) );
28
28
  vi.mock( '../../tools/build_trace_tree.js', () => ( { default: buildTraceTreeMock } ) );
29
29
 
30
+ vi.mock( 'json-stream-stringify', async () => {
31
+ const { Readable } = await import( 'node:stream' );
32
+ return {
33
+ JsonStreamStringify: class extends Readable {
34
+ constructor( body ) {
35
+ super();
36
+ this.body = body;
37
+ }
38
+
39
+ _read() {
40
+ this.push( JSON.stringify( this.body ) );
41
+ this.push( null );
42
+ }
43
+ }
44
+ };
45
+ } );
46
+
47
+ const streamToString = async stream => {
48
+ const chunks = [];
49
+ for await ( const chunk of stream ) {
50
+ chunks.push( Buffer.isBuffer( chunk ) ? chunk : Buffer.from( chunk ) );
51
+ }
52
+ return Buffer.concat( chunks ).toString( 'utf8' );
53
+ };
54
+
30
55
  describe( 'tracing/processors/s3', () => {
31
56
  beforeEach( () => {
32
57
  vi.useFakeTimers();
@@ -74,7 +99,7 @@ describe( 'tracing/processors/s3', () => {
74
99
  expect( uploadMock ).toHaveBeenCalledTimes( 1 );
75
100
  const { key, content } = uploadMock.mock.calls[0][0];
76
101
  expect( key ).toMatch( /^WF\/2020\/01\/02\// );
77
- expect( JSON.parse( content.trim() ).count ).toBe( 3 );
102
+ expect( JSON.parse( await streamToString( content ) ).count ).toBe( 3 );
78
103
  expect( delMock ).toHaveBeenCalledTimes( 1 );
79
104
  expect( delMock ).toHaveBeenCalledWith( 'traces/WF/id1' );
80
105
  } );
@@ -1,4 +1,5 @@
1
- import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
1
+ import { S3Client } from '@aws-sdk/client-s3';
2
+ import { Upload } from '@aws-sdk/lib-storage';
2
3
  import { getVars } from './configs.js';
3
4
 
4
5
  const state = { s3Client: null };
@@ -21,7 +22,14 @@ const getS3Client = () => {
21
22
  * Upload given file to S3
22
23
  * @param {object} args
23
24
  * @param {string} key - S3 file key
24
- * @param {string} content - File content
25
+ * @param {string|import('node:stream').Readable} content - File content
25
26
  */
26
27
  export const upload = ( { key, content } ) =>
27
- getS3Client().send( new PutObjectCommand( { Bucket: getVars().remoteS3Bucket, Key: key, Body: content } ) );
28
+ new Upload( {
29
+ client: getS3Client(),
30
+ params: {
31
+ Bucket: getVars().remoteS3Bucket,
32
+ Key: key,
33
+ Body: content
34
+ }
35
+ } ).done();
@@ -9,24 +9,27 @@ vi.mock( '#utils', () => ( {
9
9
  const getVarsMock = vi.fn();
10
10
  vi.mock( './configs', () => ( { getVars: () => getVarsMock() } ) );
11
11
 
12
- const sendMock = vi.fn();
13
12
  const ctorState = { args: null };
14
13
  class S3ClientMock {
15
14
  constructor( args ) {
16
15
  ctorState.args = args;
17
- } send = sendMock;
18
- }
19
- class PutObjectCommandMock {
20
- constructor( input ) {
21
- this.input = input;
22
16
  }
23
17
  }
24
-
25
18
  vi.mock( '@aws-sdk/client-s3', () => ( {
26
- S3Client: S3ClientMock,
27
- PutObjectCommand: PutObjectCommandMock
19
+ S3Client: S3ClientMock
28
20
  } ) );
29
21
 
22
+ const uploadDoneMock = vi.fn();
23
+ const uploadCtorState = { args: [] };
24
+ class UploadMock {
25
+ constructor( args ) {
26
+ uploadCtorState.args.push( args );
27
+ }
28
+
29
+ done = uploadDoneMock;
30
+ }
31
+ vi.mock( '@aws-sdk/lib-storage', () => ( { Upload: UploadMock } ) );
32
+
30
33
  async function loadModule() {
31
34
  vi.resetModules();
32
35
  return import( './s3_client.js' );
@@ -35,6 +38,9 @@ async function loadModule() {
35
38
  describe( 'tracing/processors/s3/s3_client', () => {
36
39
  beforeEach( () => {
37
40
  vi.clearAllMocks();
41
+ ctorState.args = null;
42
+ uploadCtorState.args = [];
43
+ uploadDoneMock.mockResolvedValue( undefined );
38
44
  getVarsMock.mockReturnValue( {
39
45
  awsRegion: 'us-east-1',
40
46
  awsAccessKeyId: 'id',
@@ -48,15 +54,21 @@ describe( 'tracing/processors/s3/s3_client', () => {
48
54
 
49
55
  await upload( { key: 'wf/key.json', content: '{"a":1}' } );
50
56
 
51
- expect( ctorState.args ).toEqual( { region: 'us-east-1', credentials: { secretAccessKey: 'sek', accessKeyId: 'id' } } );
52
- expect( sendMock ).toHaveBeenCalledTimes( 1 );
53
- const cmd = sendMock.mock.calls[0][0];
54
- expect( cmd ).toBeInstanceOf( PutObjectCommandMock );
55
- expect( cmd.input ).toEqual( { Bucket: 'bucket', Key: 'wf/key.json', Body: '{"a":1}' } );
57
+ expect( ctorState.args ).toEqual( {
58
+ region: 'us-east-1',
59
+ credentials: { secretAccessKey: 'sek', accessKeyId: 'id' }
60
+ } );
61
+ expect( uploadCtorState.args ).toHaveLength( 1 );
62
+ expect( uploadCtorState.args[0] ).toEqual( {
63
+ client: expect.any( S3ClientMock ),
64
+ params: { Bucket: 'bucket', Key: 'wf/key.json', Body: '{"a":1}' }
65
+ } );
66
+ expect( uploadDoneMock ).toHaveBeenCalledTimes( 1 );
56
67
 
57
68
  // subsequent upload uses cached client
58
69
  await upload( { key: 'wf/key2.json', content: '{}' } );
59
- expect( sendMock ).toHaveBeenCalledTimes( 2 );
70
+ expect( uploadCtorState.args ).toHaveLength( 2 );
71
+ expect( uploadDoneMock ).toHaveBeenCalledTimes( 2 );
60
72
  } );
61
73
  } );
62
74
 
@@ -19,31 +19,3 @@ export const serializeError = error =>
19
19
  message: error.message,
20
20
  stack: error.stack
21
21
  };
22
-
23
- /**
24
- * Tries to stringify an object to an indented JSON string.
25
- * If its byte size is bigger than threshold returns a plain JSON string without formatting.
26
- *
27
- * @param {object|array} content
28
- * @param {*} [threshold] - The max allowed size to try to stringify with formatting (in bytes). Default is 50mb
29
- * @returns {string} String representation of the object
30
- */
31
- export const safeFormatJSON = ( content, threshold = 50 * 1024 * 1024 /* 50mb */ ) => {
32
- const plainString = JSON.stringify( content );
33
- const plainStringSize = Buffer.byteLength( plainString, 'utf8' );
34
-
35
- if ( plainStringSize > threshold ) {
36
- return plainString;
37
- }
38
- try {
39
- return JSON.stringify( content, undefined, 2 );
40
- } catch ( error ) {
41
- // Only handles this specific error because other common parsing errors like:
42
- // "TypeError: cyclic object value" and "RangeError: Maximum call stack size exceeded"
43
- // would have been thrown on the first parsing.
44
- if ( error instanceof RangeError && error.message === 'Invalid string length' ) {
45
- return plainString;
46
- }
47
- throw error;
48
- }
49
- };
@@ -1,15 +1,5 @@
1
- import { describe, it, expect, vi } from 'vitest';
2
- import { safeFormatJSON, serializeError } from './utils.js';
3
-
4
- const isPrettyStringifyCall = args => args.length >= 3 && args[2] === 2;
5
-
6
- /** @param {number} targetBytes UTF-8 size of compact JSON.stringify( { a: "<xs>" } ) */
7
- const objectWithCompactByteLength = targetBytes => {
8
- const sample = { a: '' };
9
- const overhead = Buffer.byteLength( JSON.stringify( sample ), 'utf8' );
10
- const repeat = Math.max( 0, targetBytes - overhead );
11
- return { a: 'x'.repeat( repeat ) };
12
- };
1
+ import { describe, it, expect } from 'vitest';
2
+ import { serializeError } from './utils.js';
13
3
 
14
4
  describe( 'tracing/utils', () => {
15
5
  it( 'serializeError unwraps causes and keeps message/stack', () => {
@@ -21,126 +11,4 @@ describe( 'tracing/utils', () => {
21
11
  expect( out.message ).toBe( 'inner' );
22
12
  expect( typeof out.stack ).toBe( 'string' );
23
13
  } );
24
-
25
- describe( 'safeFormatJSON', () => {
26
- it( 'formats small objects with indentation when under threshold', () => {
27
- const content = { a: 1, b: [ 2, 3 ] };
28
- const out = safeFormatJSON( content, 10_000 );
29
-
30
- expect( out ).toContain( '\n' );
31
- expect( out ).toMatch( /^\{\n/ );
32
- expect( JSON.parse( out ) ).toEqual( content );
33
- } );
34
-
35
- it( 'formats small arrays with indentation when under threshold', () => {
36
- const content = [ 1, { nested: true } ];
37
- const out = safeFormatJSON( content, 10_000 );
38
-
39
- expect( out ).toContain( '\n' );
40
- expect( out.trimStart() ).toMatch( /^\[/ );
41
- expect( JSON.parse( out ) ).toEqual( content );
42
- } );
43
-
44
- it( 'returns compact JSON when compact UTF-8 size is strictly greater than threshold', () => {
45
- const content = objectWithCompactByteLength( 40 );
46
- const compact = JSON.stringify( content );
47
- expect( Buffer.byteLength( compact, 'utf8' ) ).toBe( 40 );
48
-
49
- const out = safeFormatJSON( content, 39 );
50
- expect( out ).toBe( compact );
51
- expect( out ).not.toContain( '\n ' );
52
- expect( JSON.parse( out ) ).toEqual( content );
53
- } );
54
-
55
- it( 'uses pretty JSON when compact UTF-8 size equals threshold', () => {
56
- const content = objectWithCompactByteLength( 40 );
57
- const compact = JSON.stringify( content );
58
- expect( Buffer.byteLength( compact, 'utf8' ) ).toBe( 40 );
59
-
60
- const out = safeFormatJSON( content, 40 );
61
- expect( out ).not.toBe( compact );
62
- expect( out ).toContain( '\n' );
63
- expect( JSON.parse( out ) ).toEqual( content );
64
- } );
65
-
66
- it( 'uses UTF-8 byte length for threshold, not JavaScript string length', () => {
67
- const content = { label: 'éclair' };
68
- const compact = JSON.stringify( content );
69
- expect( compact.length ).toBeLessThan( Buffer.byteLength( compact, 'utf8' ) );
70
-
71
- const bytes = Buffer.byteLength( compact, 'utf8' );
72
- const outCompact = safeFormatJSON( content, bytes - 1 );
73
- expect( outCompact ).toBe( compact );
74
-
75
- const outPretty = safeFormatJSON( content, bytes + 100 );
76
- expect( outPretty ).toContain( '\n' );
77
- expect( JSON.parse( outPretty ) ).toEqual( content );
78
- } );
79
-
80
- it( 'round-trips empty object and primitives for both branches', () => {
81
- const tiny = {};
82
- const pretty = safeFormatJSON( tiny, 100 );
83
- expect( JSON.parse( pretty ) ).toEqual( tiny );
84
-
85
- const forcedCompact = safeFormatJSON( tiny, 0 );
86
- expect( JSON.parse( forcedCompact ) ).toEqual( tiny );
87
- } );
88
-
89
- it( 'returns compact JSON when pretty stringify throws Invalid string length', () => {
90
- const content = { a: 1 };
91
- const compact = JSON.stringify( content );
92
- const origStringify = JSON.stringify.bind( JSON );
93
-
94
- const spy = vi.spyOn( JSON, 'stringify' ).mockImplementation( ( ...args ) => {
95
- if ( isPrettyStringifyCall( args ) ) {
96
- throw new RangeError( 'Invalid string length' );
97
- }
98
- return origStringify( ...args );
99
- } );
100
-
101
- try {
102
- const out = safeFormatJSON( content, 10_000 );
103
- expect( out ).toBe( compact );
104
- expect( JSON.parse( out ) ).toEqual( content );
105
- } finally {
106
- spy.mockRestore();
107
- }
108
- } );
109
-
110
- it( 'rethrows RangeError when message is not Invalid string length', () => {
111
- const content = { a: 1 };
112
- const origStringify = JSON.stringify.bind( JSON );
113
-
114
- const spy = vi.spyOn( JSON, 'stringify' ).mockImplementation( ( ...args ) => {
115
- if ( isPrettyStringifyCall( args ) ) {
116
- throw new RangeError( 'not the string length error' );
117
- }
118
- return origStringify( ...args );
119
- } );
120
-
121
- try {
122
- expect( () => safeFormatJSON( content, 10_000 ) ).toThrow( RangeError );
123
- } finally {
124
- spy.mockRestore();
125
- }
126
- } );
127
-
128
- it( 'rethrows non-RangeError from pretty stringify', () => {
129
- const content = { a: 1 };
130
- const origStringify = JSON.stringify.bind( JSON );
131
-
132
- const spy = vi.spyOn( JSON, 'stringify' ).mockImplementation( ( ...args ) => {
133
- if ( isPrettyStringifyCall( args ) ) {
134
- throw new TypeError( 'cyclic structure' );
135
- }
136
- return origStringify( ...args );
137
- } );
138
-
139
- try {
140
- expect( () => safeFormatJSON( content, 10_000 ) ).toThrow( TypeError );
141
- } finally {
142
- spy.mockRestore();
143
- }
144
- } );
145
- } );
146
14
  } );
@@ -1,118 +0,0 @@
1
- /**
2
- * Aggregate `attributes.cost` and `attributes.token_usage` across an entire trace tree.
3
- *
4
- * Walks every node in the tree, sums `attributes.cost.total` grouped by the emitting
5
- * event name (inferred from node `kind` — see `eventNameForKind`), and sums
6
- * `attributes.token_usage` across LLM nodes. Falls back to `output.usage` on
7
- * legacy llm trace nodes that predate the `attributes.token_usage` write
8
- * (see overview §1.2).
9
- *
10
- * @typedef {object} TraceAttributes
11
- * @property {{ total: number, components: Array<{ name: string, value: number }> }} cost
12
- * @property {{ inputTokens: number, outputTokens: number, cachedInputTokens: number, totalTokens: number }} tokenUsage
13
- */
14
-
15
- const COST_EVENT_LLM = 'cost:llm:request';
16
- const COST_EVENT_HTTP = 'cost:http:request';
17
- const COST_EVENT_OTHER = 'other';
18
-
19
- /**
20
- * Map a trace node `kind` to the canonical cost event name that would emit it.
21
- * Unknown kinds bucket into `other` so future event sources still roll up cleanly.
22
- *
23
- * @param {string} kind
24
- * @returns {string}
25
- */
26
- const eventNameForKind = kind => {
27
- if ( kind === 'llm' ) {
28
- return COST_EVENT_LLM;
29
- }
30
- if ( kind === 'http' ) {
31
- return COST_EVENT_HTTP;
32
- }
33
- return COST_EVENT_OTHER;
34
- };
35
-
36
- const isNumber = value => typeof value === 'number' && Number.isFinite( value );
37
-
38
- /**
39
- * Pull token usage off an llm node, preferring the new attribute over the legacy
40
- * `output.usage` fallback. Returns `null` when neither shape is present.
41
- */
42
- const readTokenUsage = node => {
43
- const attrUsage = node.attributes?.token_usage;
44
- if ( attrUsage && typeof attrUsage === 'object' ) {
45
- return attrUsage;
46
- }
47
- const legacyUsage = node.output?.usage;
48
- if ( legacyUsage && typeof legacyUsage === 'object' ) {
49
- return legacyUsage;
50
- }
51
- return null;
52
- };
53
-
54
- /**
55
- * Recursively walk a trace tree depth-first, applying `visit` to each node.
56
- */
57
- const walk = ( node, visit ) => {
58
- if ( !node ) {
59
- return;
60
- }
61
- visit( node );
62
- for ( const child of node.children ?? [] ) {
63
- walk( child, visit );
64
- }
65
- };
66
-
67
- /**
68
- * Build the aggregated `attributes` payload returned by `/trace-attributes`.
69
- * Component buckets always appear in a stable order so callers can index them
70
- * positionally if they want to.
71
- *
72
- * @param {object|null} root - The root NodeEntry returned by `buildTraceTree`.
73
- * @returns {TraceAttributes}
74
- */
75
- export default function aggregateTraceAttributes( root ) {
76
- const costByEvent = new Map( [
77
- [ COST_EVENT_LLM, 0 ],
78
- [ COST_EVENT_HTTP, 0 ],
79
- [ COST_EVENT_OTHER, 0 ]
80
- ] );
81
- const tokenUsage = { inputTokens: 0, outputTokens: 0, cachedInputTokens: 0, totalTokens: 0 };
82
-
83
- walk( root, node => {
84
- const cost = node.attributes?.cost;
85
- if ( cost && isNumber( cost.total ) ) {
86
- const eventName = eventNameForKind( node.kind );
87
- costByEvent.set( eventName, ( costByEvent.get( eventName ) ?? 0 ) + cost.total );
88
- }
89
-
90
- if ( node.kind === 'llm' ) {
91
- const usage = readTokenUsage( node );
92
- if ( usage ) {
93
- if ( isNumber( usage.inputTokens ) ) {
94
- tokenUsage.inputTokens += usage.inputTokens;
95
- }
96
- if ( isNumber( usage.outputTokens ) ) {
97
- tokenUsage.outputTokens += usage.outputTokens;
98
- }
99
- if ( isNumber( usage.cachedInputTokens ) ) {
100
- tokenUsage.cachedInputTokens += usage.cachedInputTokens;
101
- }
102
- if ( isNumber( usage.totalTokens ) ) {
103
- tokenUsage.totalTokens += usage.totalTokens;
104
- }
105
- }
106
- }
107
- } );
108
-
109
- const components = Array.from( costByEvent, ( [ name, value ] ) => ( { name, value } ) );
110
- const total = components.reduce( ( sum, { value } ) => sum + value, 0 );
111
-
112
- return {
113
- cost: { total, components },
114
- tokenUsage
115
- };
116
- }
117
-
118
- export { COST_EVENT_LLM, COST_EVENT_HTTP, COST_EVENT_OTHER };
@@ -1,231 +0,0 @@
1
- import { describe, it, expect } from 'vitest';
2
- import aggregateTraceAttributes, {
3
- COST_EVENT_LLM,
4
- COST_EVENT_HTTP,
5
- COST_EVENT_OTHER
6
- } from './aggregate_trace_attributes.js';
7
-
8
- const node = ( { id, kind = 'step', attributes = {}, output, children = [] } ) => ( {
9
- id,
10
- kind,
11
- name: id,
12
- startedAt: 0,
13
- endedAt: 0,
14
- input: undefined,
15
- output,
16
- attributes,
17
- children
18
- } );
19
-
20
- describe( 'aggregate_trace_attributes', () => {
21
- it( 'returns zeros for a null root', () => {
22
- const result = aggregateTraceAttributes( null );
23
- expect( result.cost.total ).toBe( 0 );
24
- expect( result.cost.components ).toEqual( [
25
- { name: COST_EVENT_LLM, value: 0 },
26
- { name: COST_EVENT_HTTP, value: 0 },
27
- { name: COST_EVENT_OTHER, value: 0 }
28
- ] );
29
- expect( result.tokenUsage ).toEqual( {
30
- inputTokens: 0, outputTokens: 0, cachedInputTokens: 0, totalTokens: 0
31
- } );
32
- } );
33
-
34
- it( 'returns zeros for a tree with no cost or usage attributes', () => {
35
- const root = node( {
36
- id: 'wf',
37
- kind: 'workflow',
38
- children: [ node( { id: 's1' } ), node( { id: 's2' } ) ]
39
- } );
40
- const result = aggregateTraceAttributes( root );
41
- expect( result.cost.total ).toBe( 0 );
42
- expect( result.tokenUsage.totalTokens ).toBe( 0 );
43
- } );
44
-
45
- it( 'buckets cost by node kind into llm / http / other components', () => {
46
- const root = node( {
47
- id: 'wf',
48
- kind: 'workflow',
49
- children: [
50
- node( { id: 'llm-1', kind: 'llm', attributes: { cost: { total: 0.20 } } } ),
51
- node( { id: 'llm-2', kind: 'llm', attributes: { cost: { total: 0.10 } } } ),
52
- node( { id: 'http-1', kind: 'http', attributes: { cost: { total: 0.50 } } } ),
53
- // Unknown kind falls into the catch-all bucket
54
- node( { id: 'step-1', kind: 'step', attributes: { cost: { total: 0.07 } } } )
55
- ]
56
- } );
57
- const result = aggregateTraceAttributes( root );
58
-
59
- const byName = Object.fromEntries( result.cost.components.map( c => [ c.name, c.value ] ) );
60
- expect( byName[COST_EVENT_LLM] ).toBeCloseTo( 0.30, 10 );
61
- expect( byName[COST_EVENT_HTTP] ).toBeCloseTo( 0.50, 10 );
62
- expect( byName[COST_EVENT_OTHER] ).toBeCloseTo( 0.07, 10 );
63
- expect( result.cost.total ).toBeCloseTo( 0.87, 10 );
64
- } );
65
-
66
- it( 'total equals the sum of all components', () => {
67
- const root = node( {
68
- id: 'wf',
69
- kind: 'workflow',
70
- children: [
71
- node( { id: 'llm-1', kind: 'llm', attributes: { cost: { total: 0.1234 } } } ),
72
- node( { id: 'http-1', kind: 'http', attributes: { cost: { total: 0.0011 } } } )
73
- ]
74
- } );
75
- const { cost } = aggregateTraceAttributes( root );
76
- const sum = cost.components.reduce( ( s, c ) => s + c.value, 0 );
77
- expect( cost.total ).toBeCloseTo( sum, 10 );
78
- } );
79
-
80
- it( 'sums token_usage across llm nodes from the attribute path', () => {
81
- const root = node( {
82
- id: 'wf',
83
- kind: 'workflow',
84
- children: [
85
- node( {
86
- id: 'llm-1', kind: 'llm', attributes: {
87
- token_usage: { inputTokens: 100, outputTokens: 20, cachedInputTokens: 5, totalTokens: 125 }
88
- }
89
- } ),
90
- node( {
91
- id: 'llm-2', kind: 'llm', attributes: {
92
- token_usage: { inputTokens: 50, outputTokens: 10, cachedInputTokens: 1, totalTokens: 61 }
93
- }
94
- } )
95
- ]
96
- } );
97
- const { tokenUsage } = aggregateTraceAttributes( root );
98
- expect( tokenUsage ).toEqual( {
99
- inputTokens: 150,
100
- outputTokens: 30,
101
- cachedInputTokens: 6,
102
- totalTokens: 186
103
- } );
104
- } );
105
-
106
- it( 'falls back to output.usage on legacy llm nodes that lack attributes.token_usage', () => {
107
- const root = node( {
108
- id: 'wf',
109
- kind: 'workflow',
110
- children: [
111
- // Legacy shape — usage lives on output.usage, no attributes.token_usage
112
- node( {
113
- id: 'llm-legacy',
114
- kind: 'llm',
115
- output: { result: '...', usage: { inputTokens: 200, outputTokens: 40, totalTokens: 240 } }
116
- } )
117
- ]
118
- } );
119
- const { tokenUsage } = aggregateTraceAttributes( root );
120
- expect( tokenUsage.inputTokens ).toBe( 200 );
121
- expect( tokenUsage.outputTokens ).toBe( 40 );
122
- expect( tokenUsage.totalTokens ).toBe( 240 );
123
- expect( tokenUsage.cachedInputTokens ).toBe( 0 );
124
- } );
125
-
126
- it( 'prefers attributes.token_usage over output.usage when both are present', () => {
127
- const root = node( {
128
- id: 'wf',
129
- kind: 'workflow',
130
- children: [
131
- node( {
132
- id: 'llm-1',
133
- kind: 'llm',
134
- attributes: { token_usage: { inputTokens: 10, outputTokens: 2, totalTokens: 12 } },
135
- output: { usage: { inputTokens: 999, outputTokens: 999, totalTokens: 999 } }
136
- } )
137
- ]
138
- } );
139
- const { tokenUsage } = aggregateTraceAttributes( root );
140
- expect( tokenUsage.inputTokens ).toBe( 10 );
141
- expect( tokenUsage.totalTokens ).toBe( 12 );
142
- } );
143
-
144
- it( 'ignores token_usage shapes on non-llm nodes', () => {
145
- const root = node( {
146
- id: 'wf',
147
- kind: 'workflow',
148
- // attributes.token_usage on a non-llm node is intentionally ignored —
149
- // only llm nodes contribute to the token-usage rollup today.
150
- children: [
151
- node( {
152
- id: 'step-1', kind: 'step', attributes: {
153
- token_usage: { inputTokens: 999, outputTokens: 999, totalTokens: 999 }
154
- }
155
- } )
156
- ]
157
- } );
158
- const { tokenUsage } = aggregateTraceAttributes( root );
159
- expect( tokenUsage.totalTokens ).toBe( 0 );
160
- } );
161
-
162
- it( 'aggregates a mixed tree with cost on http nodes and usage on llm nodes', () => {
163
- const root = node( {
164
- id: 'wf',
165
- kind: 'workflow',
166
- children: [
167
- node( {
168
- id: 'llm-1',
169
- kind: 'llm',
170
- attributes: {
171
- cost: { total: 0.0038 },
172
- token_usage: { inputTokens: 2264, outputTokens: 411, cachedInputTokens: 100, totalTokens: 2775 }
173
- }
174
- } ),
175
- node( {
176
- id: 'http-1',
177
- kind: 'http',
178
- attributes: { cost: { total: 0.50 } }
179
- } )
180
- ]
181
- } );
182
- const result = aggregateTraceAttributes( root );
183
-
184
- const byName = Object.fromEntries( result.cost.components.map( c => [ c.name, c.value ] ) );
185
- expect( byName[COST_EVENT_LLM] ).toBeCloseTo( 0.0038, 10 );
186
- expect( byName[COST_EVENT_HTTP] ).toBeCloseTo( 0.50, 10 );
187
- expect( byName[COST_EVENT_OTHER] ).toBe( 0 );
188
- expect( result.cost.total ).toBeCloseTo( 0.5038, 10 );
189
-
190
- expect( result.tokenUsage ).toEqual( {
191
- inputTokens: 2264,
192
- outputTokens: 411,
193
- cachedInputTokens: 100,
194
- totalTokens: 2775
195
- } );
196
- } );
197
-
198
- it( 'recurses through nested children', () => {
199
- const root = node( {
200
- id: 'wf',
201
- kind: 'workflow',
202
- children: [
203
- node( {
204
- id: 's1',
205
- kind: 'step',
206
- children: [
207
- node( {
208
- id: 'llm-1', kind: 'llm', attributes: {
209
- cost: { total: 0.01 },
210
- token_usage: { inputTokens: 10, outputTokens: 5, totalTokens: 15 }
211
- }
212
- } )
213
- ]
214
- } )
215
- ]
216
- } );
217
- const result = aggregateTraceAttributes( root );
218
- expect( result.cost.total ).toBeCloseTo( 0.01, 10 );
219
- expect( result.tokenUsage.totalTokens ).toBe( 15 );
220
- } );
221
-
222
- it( 'keeps the canonical component ordering: llm, http, other', () => {
223
- const root = node( { id: 'wf', kind: 'workflow' } );
224
- const { cost } = aggregateTraceAttributes( root );
225
- expect( cost.components.map( c => c.name ) ).toEqual( [
226
- COST_EVENT_LLM,
227
- COST_EVENT_HTTP,
228
- COST_EVENT_OTHER
229
- ] );
230
- } );
231
- } );
@@ -1,7 +0,0 @@
1
- export { default as buildTraceTree } from './build_trace_tree.js';
2
- export {
3
- default as aggregateTraceAttributes,
4
- COST_EVENT_LLM,
5
- COST_EVENT_HTTP,
6
- COST_EVENT_OTHER
7
- } from './aggregate_trace_attributes.js';