@output.ai/core 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +16 -22
  2. package/package.json +8 -6
  3. package/src/consts.js +1 -1
  4. package/src/interface/evaluator.js +8 -4
  5. package/src/interface/workflow.js +11 -14
  6. package/src/internal_activities/index.js +0 -32
  7. package/src/tracing/index.d.ts +4 -4
  8. package/src/tracing/index.js +12 -121
  9. package/src/tracing/internal_interface.js +66 -0
  10. package/src/tracing/processors/local/index.js +50 -0
  11. package/src/tracing/processors/local/index.spec.js +67 -0
  12. package/src/tracing/processors/s3/index.js +51 -0
  13. package/src/tracing/processors/s3/index.spec.js +64 -0
  14. package/src/tracing/processors/s3/redis_client.js +19 -0
  15. package/src/tracing/processors/s3/redis_client.spec.js +50 -0
  16. package/src/tracing/processors/s3/s3_client.js +33 -0
  17. package/src/tracing/processors/s3/s3_client.spec.js +67 -0
  18. package/src/tracing/{tracer_tree.js → tools/build_trace_tree.js} +4 -11
  19. package/src/tracing/{tracer_tree.spec.js → tools/build_trace_tree.spec.js} +4 -20
  20. package/src/tracing/{utils.js → tools/utils.js} +7 -0
  21. package/src/tracing/trace_engine.js +63 -0
  22. package/src/tracing/trace_engine.spec.js +91 -0
  23. package/src/utils.js +8 -0
  24. package/src/worker/catalog_workflow/index.js +2 -1
  25. package/src/worker/catalog_workflow/index.spec.js +6 -10
  26. package/src/worker/configs.js +24 -0
  27. package/src/worker/index.js +7 -4
  28. package/src/worker/interceptors/activity.js +7 -14
  29. package/src/worker/interceptors/workflow.js +1 -2
  30. package/src/worker/loader.js +39 -30
  31. package/src/worker/loader.spec.js +20 -24
  32. package/src/worker/loader_tools.js +63 -0
  33. package/src/worker/loader_tools.spec.js +85 -0
  34. package/src/worker/sinks.js +8 -4
  35. package/src/configs.js +0 -31
  36. package/src/configs.spec.js +0 -331
  37. package/src/tracing/index.private.spec.js +0 -84
  38. package/src/tracing/index.public.spec.js +0 -86
  39. package/src/worker/internal_utils.js +0 -60
  40. package/src/worker/internal_utils.spec.js +0 -134
  41. /package/src/tracing/{utils.spec.js → tools/utils.spec.js} +0 -0
@@ -1,331 +0,0 @@
1
- import { describe, it, expect, beforeEach, vi } from 'vitest';
2
-
3
- describe( 'configs', () => {
4
- const originalEnv = process.env;
5
-
6
- beforeEach( () => {
7
- vi.resetModules();
8
- process.env = { ...originalEnv };
9
- } );
10
-
11
- afterEach( () => {
12
- process.env = originalEnv;
13
- } );
14
-
15
- describe( 'Environment Variable Validation', () => {
16
- describe( 'TEMPORAL_ADDRESS', () => {
17
- it( 'should accept string addresses', async () => {
18
- process.env = {
19
- TEMPORAL_ADDRESS: 'localhost:7233',
20
- CATALOG_ID: 'test-catalog'
21
- };
22
-
23
- const { worker } = await import( './configs.js' );
24
- expect( worker.address ).toBe( 'localhost:7233' );
25
- } );
26
-
27
- it( 'should use default when omitted', async () => {
28
- process.env = {
29
- CATALOG_ID: 'test-catalog'
30
- };
31
-
32
- const { worker } = await import( './configs.js' );
33
- expect( worker.address ).toBe( 'localhost:7233' );
34
- } );
35
-
36
- it( 'should reject non-string values', async () => {
37
- process.env = {
38
- TEMPORAL_ADDRESS: '123',
39
- CATALOG_ID: 'test-catalog'
40
- };
41
-
42
- // Convert to number to test non-string
43
- const originalEnv = process.env.TEMPORAL_ADDRESS;
44
- process.env.TEMPORAL_ADDRESS = 123;
45
-
46
- await expect( import( './configs.js' ) ).rejects.toThrow();
47
-
48
- process.env.TEMPORAL_ADDRESS = originalEnv;
49
- } );
50
- } );
51
-
52
- describe( 'CATALOG_ID', () => {
53
- it( 'should accept valid catalog IDs with letters and numbers', async () => {
54
- process.env = {
55
- TEMPORAL_ADDRESS: 'http://localhost:7233',
56
- CATALOG_ID: 'catalog123'
57
- };
58
-
59
- const { worker } = await import( './configs.js' );
60
- expect( worker.catalogId ).toBe( 'catalog123' );
61
- expect( worker.taskQueue ).toBe( 'catalog123' );
62
- } );
63
-
64
- it( 'should accept catalog IDs with dots and hyphens', async () => {
65
- process.env = {
66
- TEMPORAL_ADDRESS: 'http://localhost:7233',
67
- CATALOG_ID: 'my.catalog-id'
68
- };
69
-
70
- const { worker } = await import( './configs.js' );
71
- expect( worker.catalogId ).toBe( 'my.catalog-id' );
72
- } );
73
-
74
- it( 'should accept catalog IDs with underscores', async () => {
75
- process.env = {
76
- TEMPORAL_ADDRESS: 'http://localhost:7233',
77
- CATALOG_ID: 'my_catalog_id'
78
- };
79
-
80
- const { worker } = await import( './configs.js' );
81
- expect( worker.catalogId ).toBe( 'my_catalog_id' );
82
- } );
83
-
84
- it( 'should accept catalog IDs with @ symbol', async () => {
85
- process.env = {
86
- TEMPORAL_ADDRESS: 'http://localhost:7233',
87
- CATALOG_ID: '@my-catalog'
88
- };
89
-
90
- const { worker } = await import( './configs.js' );
91
- expect( worker.catalogId ).toBe( '@my-catalog' );
92
- } );
93
-
94
- it( 'should reject catalog IDs with special characters', async () => {
95
- process.env = {
96
- TEMPORAL_ADDRESS: 'http://localhost:7233',
97
- CATALOG_ID: 'catalog!@#$'
98
- };
99
-
100
- await expect( import( './configs.js' ) ).rejects.toThrow();
101
- } );
102
-
103
- it( 'should reject when CATALOG_ID is missing', async () => {
104
- process.env = {
105
- TEMPORAL_ADDRESS: 'http://localhost:7233'
106
- };
107
-
108
- await expect( import( './configs.js' ) ).rejects.toThrow();
109
- } );
110
-
111
- it( 'should reject empty CATALOG_ID', async () => {
112
- process.env = {
113
- TEMPORAL_ADDRESS: 'http://localhost:7233',
114
- CATALOG_ID: ''
115
- };
116
-
117
- await expect( import( './configs.js' ) ).rejects.toThrow();
118
- } );
119
- } );
120
-
121
- describe( 'Optional Fields', () => {
122
- it( 'should use default namespace when TEMPORAL_NAMESPACE is not provided', async () => {
123
- process.env = {
124
- TEMPORAL_ADDRESS: 'http://localhost:7233',
125
- CATALOG_ID: 'test-catalog'
126
- };
127
-
128
- const { worker } = await import( './configs.js' );
129
- expect( worker.namespace ).toBe( 'default' );
130
- } );
131
-
132
- it( 'should use custom namespace when provided', async () => {
133
- process.env = {
134
- TEMPORAL_ADDRESS: 'http://localhost:7233',
135
- CATALOG_ID: 'test-catalog',
136
- TEMPORAL_NAMESPACE: 'custom-namespace'
137
- };
138
-
139
- const { worker } = await import( './configs.js' );
140
- expect( worker.namespace ).toBe( 'custom-namespace' );
141
- } );
142
-
143
- it( 'should handle TEMPORAL_API_KEY when provided', async () => {
144
- process.env = {
145
- TEMPORAL_ADDRESS: 'http://localhost:7233',
146
- CATALOG_ID: 'test-catalog',
147
- TEMPORAL_API_KEY: 'secret-api-key'
148
- };
149
-
150
- const { worker } = await import( './configs.js' );
151
- expect( worker.apiKey ).toBe( 'secret-api-key' );
152
- } );
153
-
154
- it( 'should handle missing TEMPORAL_API_KEY', async () => {
155
- process.env = {
156
- TEMPORAL_ADDRESS: 'http://localhost:7233',
157
- CATALOG_ID: 'test-catalog'
158
- };
159
-
160
- const { worker } = await import( './configs.js' );
161
- expect( worker.apiKey ).toBeUndefined();
162
- } );
163
-
164
- it( 'should handle API_AUTH_KEY when provided', async () => {
165
- process.env = {
166
- TEMPORAL_ADDRESS: 'http://localhost:7233',
167
- CATALOG_ID: 'test-catalog',
168
- API_AUTH_KEY: 'api-secret-key'
169
- };
170
-
171
- const { api } = await import( './configs.js' );
172
- expect( api.authKey ).toBe( 'api-secret-key' );
173
- } );
174
-
175
- it( 'should handle missing API_AUTH_KEY', async () => {
176
- process.env = {
177
- TEMPORAL_ADDRESS: 'http://localhost:7233',
178
- CATALOG_ID: 'test-catalog'
179
- };
180
-
181
- const { api } = await import( './configs.js' );
182
- expect( api.authKey ).toBeUndefined();
183
- } );
184
- } );
185
- } );
186
-
187
- describe( 'Exported Config Objects', () => {
188
- it( 'should export worker config with all properties', async () => {
189
- process.env = {
190
- TEMPORAL_ADDRESS: 'http://localhost:7233',
191
- CATALOG_ID: 'test-catalog',
192
- TEMPORAL_NAMESPACE: 'test-namespace',
193
- TEMPORAL_API_KEY: 'test-api-key'
194
- };
195
-
196
- const { worker } = await import( './configs.js' );
197
-
198
- expect( worker ).toEqual( {
199
- address: 'http://localhost:7233',
200
- apiKey: 'test-api-key',
201
- executionTimeout: '1m',
202
- maxActivities: 100,
203
- maxWorkflows: 100,
204
- namespace: 'test-namespace',
205
- taskQueue: 'test-catalog',
206
- catalogId: 'test-catalog'
207
- } );
208
- } );
209
-
210
- it( 'should export api config', async () => {
211
- process.env = {
212
- TEMPORAL_ADDRESS: 'http://localhost:7233',
213
- CATALOG_ID: 'test-catalog',
214
- API_AUTH_KEY: 'test-auth-key'
215
- };
216
-
217
- const { api } = await import( './configs.js' );
218
-
219
- expect( api ).toEqual( {
220
- authKey: 'test-auth-key'
221
- } );
222
- } );
223
-
224
- it( 'should have correct static worker config values', async () => {
225
- process.env = {
226
- TEMPORAL_ADDRESS: 'http://localhost:7233',
227
- CATALOG_ID: 'test-catalog'
228
- };
229
-
230
- const { worker } = await import( './configs.js' );
231
-
232
- expect( worker.executionTimeout ).toBe( '1m' );
233
- expect( worker.maxActivities ).toBe( 100 );
234
- expect( worker.maxWorkflows ).toBe( 100 );
235
- } );
236
- } );
237
-
238
- describe( 'Error Handling', () => {
239
- it( 'should throw InvalidEnvVarsErrors for invalid configuration', async () => {
240
- process.env = {
241
- TEMPORAL_ADDRESS: 'localhost:7233',
242
- CATALOG_ID: 'invalid!@#'
243
- };
244
-
245
- await expect( import( './configs.js' ) ).rejects.toThrow();
246
- } );
247
-
248
- it( 'should handle multiple validation errors', async () => {
249
- process.env = {
250
- TEMPORAL_ADDRESS: 'localhost:7233',
251
- CATALOG_ID: 'invalid!@#'
252
- };
253
-
254
- await expect( import( './configs.js' ) ).rejects.toThrow();
255
- } );
256
- } );
257
-
258
- describe( 'Edge Cases', () => {
259
- it( 'should handle environment variables with spaces', async () => {
260
- process.env = {
261
- TEMPORAL_ADDRESS: 'http://localhost:7233',
262
- CATALOG_ID: 'test-catalog',
263
- TEMPORAL_NAMESPACE: ' custom-namespace '
264
- };
265
-
266
- const { worker } = await import( './configs.js' );
267
- expect( worker.namespace ).toBe( ' custom-namespace ' );
268
- } );
269
-
270
- it( 'should handle very long catalog IDs', async () => {
271
- process.env = {
272
- TEMPORAL_ADDRESS: 'http://localhost:7233',
273
- CATALOG_ID: 'a'.repeat( 100 )
274
- };
275
-
276
- const { worker } = await import( './configs.js' );
277
- expect( worker.catalogId ).toBe( 'a'.repeat( 100 ) );
278
- } );
279
-
280
- it( 'should handle URLs with ports outside typical range', async () => {
281
- process.env = {
282
- TEMPORAL_ADDRESS: 'http://localhost:65535',
283
- CATALOG_ID: 'test-catalog'
284
- };
285
-
286
- const { worker } = await import( './configs.js' );
287
- expect( worker.address ).toBe( 'http://localhost:65535' );
288
- } );
289
-
290
- it( 'should handle container names with numbers', async () => {
291
- process.env = {
292
- TEMPORAL_ADDRESS: 'temporal123:7233',
293
- CATALOG_ID: 'test-catalog'
294
- };
295
-
296
- const { worker } = await import( './configs.js' );
297
- expect( worker.address ).toBe( 'temporal123:7233' );
298
- } );
299
-
300
- it( 'should handle mixed case in catalog ID', async () => {
301
- process.env = {
302
- TEMPORAL_ADDRESS: 'http://localhost:7233',
303
- CATALOG_ID: 'Test.Catalog-ID_123'
304
- };
305
-
306
- const { worker } = await import( './configs.js' );
307
- expect( worker.catalogId ).toBe( 'Test.Catalog-ID_123' );
308
- } );
309
- } );
310
-
311
- describe( 'Complete Valid Configuration', () => {
312
- it( 'should handle a complete valid configuration with all optional fields', async () => {
313
- process.env = {
314
- TEMPORAL_ADDRESS: 'https://temporal.cloud.example.com',
315
- TEMPORAL_NAMESPACE: 'production',
316
- TEMPORAL_API_KEY: 'prod-api-key-123',
317
- CATALOG_ID: 'prod.catalog@v1',
318
- API_AUTH_KEY: 'secure-auth-key'
319
- };
320
-
321
- const { worker, api } = await import( './configs.js' );
322
-
323
- expect( worker.address ).toBe( 'https://temporal.cloud.example.com' );
324
- expect( worker.namespace ).toBe( 'production' );
325
- expect( worker.apiKey ).toBe( 'prod-api-key-123' );
326
- expect( worker.catalogId ).toBe( 'prod.catalog@v1' );
327
- expect( worker.taskQueue ).toBe( 'prod.catalog@v1' );
328
- expect( api.authKey ).toBe( 'secure-auth-key' );
329
- } );
330
- } );
331
- } );
@@ -1,84 +0,0 @@
1
- import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
2
- import { mkdtempSync, readFileSync, rmSync } from 'node:fs';
3
- import { tmpdir, EOL } from 'node:os';
4
- import { join } from 'path';
5
-
6
- const createTempDir = () => mkdtempSync( join( tmpdir(), 'output-sdk-trace-' ) );
7
-
8
- // Use env var to enable tracing
9
-
10
- vi.mock( '#async_storage', () => ( { Storage: { load: () => ( { parentId: undefined, traceId: 'trace-1', traceHelm: 'tests' } ) } } ) );
11
- vi.mock( 'path', async importActual => {
12
- const actual = await importActual();
13
- return { ...actual, join: ( first, ...rest ) => actual.join( first ?? process.cwd(), ...rest ) };
14
- } );
15
- vi.mock( './tracer_tree.js', () => ( { buildLogTree: vi.fn() } ) );
16
-
17
- describe( 'tracing private exports', () => {
18
- beforeEach( () => {
19
- vi.resetModules();
20
- vi.clearAllMocks();
21
- vi.useFakeTimers();
22
- vi.setSystemTime( new Date( '2020-01-01T00:00:00.000Z' ) );
23
- process.env.TRACING_ENABLED = '1';
24
- } );
25
-
26
- afterEach( () => {
27
- vi.useRealTimers();
28
- delete process.env.TRACING_ENABLED;
29
- } );
30
-
31
- it( 'addEventStart (private) writes start', async () => {
32
- const originalArgv2 = process.argv[2];
33
- const tmp = createTempDir();
34
- process.argv[2] = tmp;
35
-
36
- const { addEventStart } = await import( './index.js?v=private1' );
37
- addEventStart( { id: 'a', kind: 'evaluator', name: 'start', details: { foo: 1 }, traceId: 'trace-1', traceHelm: 'tests' } );
38
-
39
- const { buildLogTree } = await import( './tracer_tree.js' );
40
- const logPath = buildLogTree.mock.calls[0][0];
41
- const raw = readFileSync( logPath, 'utf-8' );
42
- const entry = JSON.parse( raw.split( EOL )[0] );
43
- expect( entry.phase ).toBe( 'start' );
44
-
45
- rmSync( tmp, { recursive: true, force: true } );
46
- process.argv[2] = originalArgv2;
47
- } );
48
-
49
- it( 'addEventEnd (private) writes end', async () => {
50
- const originalArgv2 = process.argv[2];
51
- const tmp = createTempDir();
52
- process.argv[2] = tmp;
53
-
54
- const { addEventEnd } = await import( './index.js?v=private2' );
55
- addEventEnd( { id: 'a', details: { ok: true }, traceId: 'trace-1', traceHelm: 'tests' } );
56
-
57
- const { buildLogTree } = await import( './tracer_tree.js' );
58
- const logPath = buildLogTree.mock.calls[0][0];
59
- const raw = readFileSync( logPath, 'utf-8' );
60
- const entry = JSON.parse( raw.split( EOL )[0] );
61
- expect( entry.phase ).toBe( 'end' );
62
-
63
- rmSync( tmp, { recursive: true, force: true } );
64
- process.argv[2] = originalArgv2;
65
- } );
66
-
67
- it( 'addEventError (private) writes error', async () => {
68
- const originalArgv2 = process.argv[2];
69
- const tmp = createTempDir();
70
- process.argv[2] = tmp;
71
-
72
- const { addEventError } = await import( './index.js?v=private3' );
73
- addEventError( { id: 'a', details: new Error( 'oops' ), traceId: 'trace-1', traceHelm: 'tests' } );
74
-
75
- const { buildLogTree } = await import( './tracer_tree.js' );
76
- const logPath = buildLogTree.mock.calls[0][0];
77
- const raw = readFileSync( logPath, 'utf-8' );
78
- const entry = JSON.parse( raw.split( EOL )[0] );
79
- expect( entry.phase ).toBe( 'error' );
80
-
81
- rmSync( tmp, { recursive: true, force: true } );
82
- process.argv[2] = originalArgv2;
83
- } );
84
- } );
@@ -1,86 +0,0 @@
1
- import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
2
- import { mkdtempSync, readFileSync, rmSync } from 'node:fs';
3
- import { tmpdir, EOL } from 'node:os';
4
- import { join } from 'path';
5
-
6
- const createTempDir = () => mkdtempSync( join( tmpdir(), 'output-sdk-trace-' ) );
7
-
8
- // Async storage mock to drive parent ids
9
-
10
- const mockStorageData = { parentId: undefined, traceId: 'trace-1', traceHelm: 'tests' };
11
- vi.mock( '#async_storage', () => ( { Storage: { load: () => mockStorageData } } ) );
12
-
13
- vi.mock( './tracer_tree.js', () => ( { buildLogTree: vi.fn() } ) );
14
-
15
- describe( 'Tracing (public namespace)', () => {
16
- beforeEach( () => {
17
- vi.resetModules();
18
- vi.clearAllMocks();
19
- vi.useFakeTimers();
20
- vi.setSystemTime( new Date( '2020-01-01T00:00:00.000Z' ) );
21
- process.env.TRACING_ENABLED = '1';
22
- } );
23
-
24
- afterEach( () => {
25
- vi.useRealTimers();
26
- delete process.env.TRACING_ENABLED;
27
- } );
28
-
29
- it( 'addEventStart writes a start entry', async () => {
30
- const originalArgv2 = process.argv[2];
31
- const tmp = createTempDir();
32
- process.argv[2] = tmp;
33
-
34
- const { Tracing } = await import( './index.js' );
35
- Tracing.addEventStart( { id: '1', kind: 'evaluator', name: 'start', details: { a: 1 } } );
36
-
37
- const { buildLogTree } = await import( './tracer_tree.js' );
38
- const logPath = buildLogTree.mock.calls[0][0];
39
- const raw = readFileSync( logPath, 'utf-8' );
40
- const firstLine = raw.split( EOL )[0];
41
- const entry = JSON.parse( firstLine );
42
- expect( entry ).toMatchObject( { phase: 'start', kind: 'evaluator', name: 'start', id: '1', details: { a: 1 } } );
43
-
44
- rmSync( tmp, { recursive: true, force: true } );
45
- process.argv[2] = originalArgv2;
46
- } );
47
-
48
- it( 'addEventEnd writes an end entry', async () => {
49
- const originalArgv2 = process.argv[2];
50
- const tmp = createTempDir();
51
- process.argv[2] = tmp;
52
-
53
- const { Tracing } = await import( './index.js' );
54
- Tracing.addEventEnd( { id: '1', details: { ok: true } } );
55
-
56
- const { buildLogTree } = await import( './tracer_tree.js' );
57
- const logPath = buildLogTree.mock.calls[0][0];
58
- const raw = readFileSync( logPath, 'utf-8' );
59
- const firstLine = raw.split( EOL )[0];
60
- const entry = JSON.parse( firstLine );
61
- expect( entry ).toMatchObject( { phase: 'end', id: '1', details: { ok: true } } );
62
-
63
- rmSync( tmp, { recursive: true, force: true } );
64
- process.argv[2] = originalArgv2;
65
- } );
66
-
67
- it( 'addEventError writes an error entry', async () => {
68
- const originalArgv2 = process.argv[2];
69
- const tmp = createTempDir();
70
- process.argv[2] = tmp;
71
-
72
- const { Tracing } = await import( './index.js' );
73
- const error = new Error( 'boom' );
74
- Tracing.addEventError( { id: '1', details: error } );
75
-
76
- const { buildLogTree } = await import( './tracer_tree.js' );
77
- const logPath = buildLogTree.mock.calls[0][0];
78
- const raw = readFileSync( logPath, 'utf-8' );
79
- const firstLine = raw.split( EOL )[0];
80
- const entry = JSON.parse( firstLine );
81
- expect( entry.phase ).toBe( 'error' );
82
-
83
- rmSync( tmp, { recursive: true, force: true } );
84
- process.argv[2] = originalArgv2;
85
- } );
86
- } );
@@ -1,60 +0,0 @@
1
- /*
2
- * These tools cant be used in sandbox environment!!!
3
- */
4
- import { resolve } from 'path';
5
- import { pathToFileURL } from 'url';
6
- import { METADATA_ACCESS_SYMBOL } from '#consts';
7
- import { writeFileSync, existsSync, readdirSync, mkdirSync } from 'fs';
8
-
9
- /**
10
- * Recursive traverse directories looking for files with given name,
11
- * For each found file, return its path, pathname and URI
12
- *
13
- * @param {string} path - The path to scan
14
- * @param {string[]} filenames - The filenames to look for
15
- * @returns {string[{}]} An array containing an object with path, pathname and URI for each file found
16
- * */
17
- export function recursiveNavigateWhileCollecting( path, filenames, collection = [], ignoreDirNames = [ 'vendor', 'node_modules' ] ) {
18
- for ( const entry of readdirSync( path, { withFileTypes: true } ) ) {
19
- if ( ignoreDirNames.includes( entry.name ) ) {
20
- continue;
21
- }
22
-
23
- const pathname = resolve( path, entry.name );
24
- if ( entry.isDirectory() ) {
25
- recursiveNavigateWhileCollecting( pathname, filenames, collection );
26
- } else if ( filenames.includes( entry.name ) ) {
27
- collection.push( { pathname, path, url: pathToFileURL( pathname ).href } );
28
- }
29
- }
30
-
31
- return collection;
32
- };
33
-
34
- /**
35
- * For each path, dynamic import it, and for each exported component with metadata (step, workflow), yields it.
36
- * @param {string[]} paths - Paths of the files to import
37
- */
38
- export async function *iteratorOverImportedComponents( paths ) {
39
- for ( const { url, path, pathname } of paths ) {
40
- const imported = await import( url );
41
- for ( const component of Object.values( imported ) ) {
42
- const metadata = component[METADATA_ACCESS_SYMBOL];
43
- if ( !metadata ) {
44
- continue;
45
- }
46
- yield { component, metadata, path, pathname };
47
- }
48
- }
49
- };
50
-
51
- /**
52
- * Write a file using the same signature as Node's FS writeFileSync, but recursively creates the necessary directories in the path.
53
- */
54
- export function writeFileOnLocationSync( path, content ) {
55
- const targetDir = path.split( '/' ).slice( 0, -1 ).join( '/' );
56
- if ( targetDir && !existsSync( targetDir ) ) {
57
- mkdirSync( targetDir, { recursive: true } );
58
- }
59
- writeFileSync( path, content, 'utf-8' );
60
- };
@@ -1,134 +0,0 @@
1
- import { describe, it, expect } from 'vitest';
2
- import { mkdtempSync, mkdirSync, writeFileSync, rmSync, readFileSync, existsSync } from 'node:fs';
3
- import { join, resolve } from 'node:path';
4
- import { tmpdir } from 'node:os';
5
- import { pathToFileURL } from 'node:url';
6
- import { recursiveNavigateWhileCollecting, iteratorOverImportedComponents, writeFileOnLocationSync } from './internal_utils.js';
7
-
8
- function makeTmpRoot( prefix ) {
9
- return mkdtempSync( join( tmpdir(), prefix ) );
10
- }
11
-
12
- describe( '.recursiveNavigateWhileCollecting', () => {
13
- it( 'collects matching files recursively (happy path)', () => {
14
- const root = makeTmpRoot( 'nsu-happy-' );
15
- const target = 'target.txt';
16
-
17
- // layout:
18
- // root/target.txt
19
- // root/a/target.txt
20
- // root/b/c/target.txt
21
- mkdirSync( join( root, 'a' ), { recursive: true } );
22
- mkdirSync( join( root, 'b', 'c' ), { recursive: true } );
23
- writeFileSync( join( root, target ), 'root' );
24
- writeFileSync( join( root, 'a', target ), 'a' );
25
- writeFileSync( join( root, 'b', 'c', target ), 'bc' );
26
-
27
- const results = recursiveNavigateWhileCollecting( root, [ target ] );
28
-
29
- expect( results.length ).toBe( 3 );
30
- for ( const { pathname, path, url } of results ) {
31
- expect( url ).toBe( pathToFileURL( pathname ).href );
32
- expect( resolve( path, target ) ).toBe( pathname );
33
- }
34
-
35
- rmSync( root, { recursive: true, force: true } );
36
- } );
37
-
38
- it( 'skips files inside ignored directories (ignoreDirNames)', () => {
39
- const root = makeTmpRoot( 'nsu-ignore-' );
40
- const target = 'target.txt';
41
-
42
- // layout:
43
- // root/node_modules/target.txt (ignored)
44
- // root/vendor/target.txt (ignored)
45
- // root/ok/target.txt (collected)
46
- mkdirSync( join( root, 'node_modules' ), { recursive: true } );
47
- mkdirSync( join( root, 'vendor' ), { recursive: true } );
48
- mkdirSync( join( root, 'ok' ), { recursive: true } );
49
- writeFileSync( join( root, 'node_modules', target ), 'nm' );
50
- writeFileSync( join( root, 'vendor', target ), 'v' );
51
- writeFileSync( join( root, 'ok', target ), 'ok' );
52
-
53
- const results = recursiveNavigateWhileCollecting( root, [ target ] );
54
-
55
- expect( results.length ).toBe( 1 );
56
- expect( results[0].pathname ).toBe( join( root, 'ok', target ) );
57
- expect( results[0].path ).toBe( join( root, 'ok' ) );
58
- expect( results[0].url ).toBe( pathToFileURL( results[0].pathname ).href );
59
-
60
- rmSync( root, { recursive: true, force: true } );
61
- } );
62
- } );
63
-
64
- describe( '.iteratorOverImportedComponents', () => {
65
- it( 'imports modules and yields metadata from exports tagged with METADATA_ACCESS_SYMBOL', async () => {
66
- const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}` );
67
- mkdirSync( root, { recursive: true } );
68
- const file = join( root, 'meta.module.js' );
69
- writeFileSync( file, [
70
- 'import { METADATA_ACCESS_SYMBOL } from \"#consts\";',
71
- 'export const StepA = () => {};',
72
- 'StepA[METADATA_ACCESS_SYMBOL] = { kind: \"step\", name: \"a\" };',
73
- 'export const FlowB = () => {};',
74
- 'FlowB[METADATA_ACCESS_SYMBOL] = { kind: \"workflow\", name: \"b\" };'
75
- ].join( '\n' ) );
76
-
77
- const paths = recursiveNavigateWhileCollecting( root, [ 'meta.module.js' ] );
78
- const collected = [];
79
- for await ( const m of iteratorOverImportedComponents( paths ) ) {
80
- collected.push( m );
81
- }
82
-
83
- expect( collected.length ).toBe( 2 );
84
- expect( collected.map( m => m.metadata.name ).sort() ).toEqual( [ 'a', 'b' ] );
85
- expect( collected.map( m => m.metadata.kind ).sort() ).toEqual( [ 'step', 'workflow' ] );
86
- for ( const m of collected ) {
87
- expect( m.pathname ).toBe( file );
88
- expect( m.path ).toBe( root );
89
- expect( typeof m.component ).toBe( 'function' );
90
- }
91
-
92
- rmSync( root, { recursive: true, force: true } );
93
- } );
94
-
95
- it( 'ignores exports without metadata symbol', async () => {
96
- const root = join( process.cwd(), 'sdk/core/temp_test_modules', `meta-${Date.now()}-nometa` );
97
- mkdirSync( root, { recursive: true } );
98
- const file = join( root, 'meta.module.js' );
99
- writeFileSync( file, [
100
- 'export const Plain = () => {};',
101
- 'export const AlsoPlain = {}'
102
- ].join( '\n' ) );
103
-
104
- const paths = recursiveNavigateWhileCollecting( root, [ 'meta.module.js' ] );
105
- const collected = [];
106
- for await ( const m of iteratorOverImportedComponents( paths ) ) {
107
- collected.push( m );
108
- }
109
-
110
- expect( collected.length ).toBe( 0 );
111
- rmSync( root, { recursive: true, force: true } );
112
- } );
113
- } );
114
-
115
- describe( '.writeFileOnLocationSync', () => {
116
- it( 'creates missing directories and writes file', () => {
117
- const root = makeTmpRoot( 'nsu-write-' );
118
- const nested = join( root, 'a', 'b', 'c.txt' );
119
- writeFileOnLocationSync( nested, 'hello' );
120
- expect( existsSync( join( root, 'a', 'b' ) ) ).toBe( true );
121
- expect( readFileSync( nested, 'utf-8' ) ).toBe( 'hello' );
122
- rmSync( root, { recursive: true, force: true } );
123
- } );
124
-
125
- it( 'overwrites existing content', () => {
126
- const root = makeTmpRoot( 'nsu-write2-' );
127
- const file = join( root, 'x', 'y.txt' );
128
- mkdirSync( join( root, 'x' ), { recursive: true } );
129
- writeFileSync( file, 'old' );
130
- writeFileOnLocationSync( file, 'new' );
131
- expect( readFileSync( file, 'utf-8' ) ).toBe( 'new' );
132
- rmSync( root, { recursive: true, force: true } );
133
- } );
134
- } );