@output.ai/core 0.1.8-dev.pr156.c8e7f40 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md
CHANGED
|
@@ -191,7 +191,6 @@ The url of the example will receive the payload, plus the workflowId:
|
|
|
191
191
|
To resume the workflow, a POST has to be made with a response payload and the workflowId.
|
|
192
192
|
|
|
193
193
|
- Production: `https://output-api-production.onrender.com/workflow/feedback`
|
|
194
|
-
- Staging: `https://output-api-staging.onrender.com/workflow/feedback`
|
|
195
194
|
- Local: `http://localhost:3001/workflow/feedback`
|
|
196
195
|
|
|
197
196
|
Example:
|
|
@@ -234,7 +233,7 @@ After having the API and the engine running, to start the worker just run:
|
|
|
234
233
|
Necessary env variables to run the worker locally:
|
|
235
234
|
|
|
236
235
|
- `TEMPORAL_ADDRESS`: The temporal backend address, prefer the remote;
|
|
237
|
-
- `TEMPORAL_NAMESPACE`: The name of the namespace, if using remote, use: "output-
|
|
236
|
+
- `TEMPORAL_NAMESPACE`: The name of the namespace, if using remote, use: "output-production.i0jzq";
|
|
238
237
|
- `TEMPORAL_API_KEY`: The API key to access remote temporal. If using local temporal, leave it blank;
|
|
239
238
|
- `CATALOG_ID`: The name of the local catalog, always set this. Use your email;
|
|
240
239
|
- `API_AUTH_KEY`: The API key to access the Framework API. Local can be blank, remote use the proper API Key;
|
package/package.json
CHANGED
|
@@ -1,29 +1,48 @@
|
|
|
1
1
|
import { appendFileSync, mkdirSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
|
|
2
|
-
import { join } from 'node:path';
|
|
2
|
+
import { dirname, join } from 'node:path';
|
|
3
|
+
import { fileURLToPath } from 'url';
|
|
3
4
|
import buildTraceTree from '../../tools/build_trace_tree.js';
|
|
4
5
|
import { EOL } from 'node:os';
|
|
5
6
|
|
|
6
|
-
const
|
|
7
|
-
|
|
8
|
-
const
|
|
9
|
-
|
|
7
|
+
const __dirname = dirname( fileURLToPath( import.meta.url ) );
|
|
8
|
+
|
|
9
|
+
const PURGE_TEMP_FILES_THRESHOLD = 1000 * 60 * 60 * 24 * 7; // 1 week in milliseconds
|
|
10
|
+
|
|
11
|
+
// The path to the project root
|
|
12
|
+
const LOCAL_PROJECT_ROOT_PATH = process.argv[2] || process.cwd();
|
|
13
|
+
|
|
14
|
+
// The path to the local trace logs
|
|
15
|
+
const LOCAL_TRACE_LOG_PATH = join( LOCAL_PROJECT_ROOT_PATH, 'logs' );
|
|
16
|
+
|
|
17
|
+
// The path to the temporary trace logs
|
|
18
|
+
const TMP_TRACE_LOG_PATH = join( __dirname, 'temp', 'traces' );
|
|
10
19
|
|
|
11
20
|
const accumulate = ( { entry, executionContext: { workflowId, startTime } } ) => {
|
|
12
|
-
const path = join(
|
|
21
|
+
const path = join( TMP_TRACE_LOG_PATH, `${startTime}_${workflowId}.trace` );
|
|
13
22
|
appendFileSync( path, JSON.stringify( entry ) + EOL, 'utf-8' );
|
|
14
23
|
return readFileSync( path, 'utf-8' ).split( EOL ).slice( 0, -1 ).map( v => JSON.parse( v ) );
|
|
15
24
|
};
|
|
16
25
|
|
|
17
|
-
const cleanupOldTempFiles = ( threshold = Date.now() -
|
|
18
|
-
readdirSync(
|
|
26
|
+
const cleanupOldTempFiles = ( threshold = Date.now() - PURGE_TEMP_FILES_THRESHOLD ) =>
|
|
27
|
+
readdirSync( TMP_TRACE_LOG_PATH )
|
|
19
28
|
.filter( f => +f.split( '_' )[0] < threshold )
|
|
20
|
-
.forEach( f => rmSync( join(
|
|
29
|
+
.forEach( f => rmSync( join( TMP_TRACE_LOG_PATH, f ) ) );
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Get the host trace log path, which is used for reporting trace locations.
|
|
33
|
+
* In containerized environments (e.g., Docker), this can be different from the local path
|
|
34
|
+
* to map container paths to host filesystem paths.
|
|
35
|
+
* @returns {string} The host trace log path from HOST_TRACE_PATH env var, or local path as fallback
|
|
36
|
+
*/
|
|
37
|
+
const getHostTraceLogPath = () => {
|
|
38
|
+
return process.env.HOST_TRACE_PATH || LOCAL_TRACE_LOG_PATH;
|
|
39
|
+
};
|
|
21
40
|
|
|
22
41
|
/**
|
|
23
42
|
* Init this processor
|
|
24
43
|
*/
|
|
25
44
|
export const init = () => {
|
|
26
|
-
mkdirSync(
|
|
45
|
+
mkdirSync( TMP_TRACE_LOG_PATH, { recursive: true } );
|
|
27
46
|
cleanupOldTempFiles();
|
|
28
47
|
};
|
|
29
48
|
|
|
@@ -34,8 +53,7 @@ export const init = () => {
|
|
|
34
53
|
* @returns {string} The local filesystem path for file operations
|
|
35
54
|
*/
|
|
36
55
|
const getLocalOutputDir = workflowName => {
|
|
37
|
-
|
|
38
|
-
return join( root, 'logs', 'runs', workflowName );
|
|
56
|
+
return join( LOCAL_PROJECT_ROOT_PATH, 'logs', 'runs', workflowName );
|
|
39
57
|
};
|
|
40
58
|
|
|
41
59
|
/**
|
|
@@ -45,9 +63,7 @@ const getLocalOutputDir = workflowName => {
|
|
|
45
63
|
* @returns {string} The path to report to users/API
|
|
46
64
|
*/
|
|
47
65
|
const getReportOutputDir = workflowName => {
|
|
48
|
-
|
|
49
|
-
const basePath = process.env.HOST_TRACE_PATH || join( root, 'logs' );
|
|
50
|
-
return join( basePath, 'runs', workflowName );
|
|
66
|
+
return join( getHostTraceLogPath(), 'runs', workflowName );
|
|
51
67
|
};
|
|
52
68
|
|
|
53
69
|
const buildOutputFileName = ( { startTime, workflowId } ) => {
|
|
@@ -40,8 +40,8 @@ describe( 'tracing/processors/local', () => {
|
|
|
40
40
|
|
|
41
41
|
init();
|
|
42
42
|
|
|
43
|
-
// Should create temp dir
|
|
44
|
-
expect( mkdirSyncMock ).toHaveBeenCalledWith( expect.stringMatching( /
|
|
43
|
+
// Should create temp dir relative to module location using __dirname
|
|
44
|
+
expect( mkdirSyncMock ).toHaveBeenCalledWith( expect.stringMatching( /temp\/traces$/ ), { recursive: true } );
|
|
45
45
|
expect( rmSyncMock ).toHaveBeenCalledTimes( 1 );
|
|
46
46
|
} );
|
|
47
47
|
|