@workflow/world-local 4.0.1-beta.6 → 4.1.0-beta.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/config.d.ts +15 -4
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +35 -9
- package/dist/config.js.map +1 -1
- package/dist/fs.d.ts +13 -0
- package/dist/fs.d.ts.map +1 -1
- package/dist/fs.js +109 -14
- package/dist/fs.js.map +1 -1
- package/dist/index.d.ts +10 -7
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +21 -9
- package/dist/index.js.map +1 -1
- package/dist/init.d.ts +91 -0
- package/dist/init.d.ts.map +1 -0
- package/dist/init.js +263 -0
- package/dist/init.js.map +1 -0
- package/dist/queue.d.ts +2 -1
- package/dist/queue.d.ts.map +1 -1
- package/dist/queue.js +83 -40
- package/dist/queue.js.map +1 -1
- package/dist/storage/events-storage.d.ts +7 -0
- package/dist/storage/events-storage.d.ts.map +1 -0
- package/dist/storage/events-storage.js +527 -0
- package/dist/storage/events-storage.js.map +1 -0
- package/dist/storage/filters.d.ts +26 -0
- package/dist/storage/filters.d.ts.map +1 -0
- package/dist/storage/filters.js +43 -0
- package/dist/storage/filters.js.map +1 -0
- package/dist/storage/helpers.d.ts +14 -0
- package/dist/storage/helpers.d.ts.map +1 -0
- package/dist/storage/helpers.js +34 -0
- package/dist/storage/helpers.js.map +1 -0
- package/dist/storage/hooks-storage.d.ts +12 -0
- package/dist/storage/hooks-storage.d.ts.map +1 -0
- package/dist/storage/hooks-storage.js +89 -0
- package/dist/storage/hooks-storage.js.map +1 -0
- package/dist/storage/index.d.ts +10 -0
- package/dist/storage/index.d.ts.map +1 -0
- package/dist/storage/index.js +20 -0
- package/dist/storage/index.js.map +1 -0
- package/dist/storage/legacy.d.ts +13 -0
- package/dist/storage/legacy.d.ts.map +1 -0
- package/dist/storage/legacy.js +73 -0
- package/dist/storage/legacy.js.map +1 -0
- package/dist/storage/runs-storage.d.ts +7 -0
- package/dist/storage/runs-storage.d.ts.map +1 -0
- package/dist/storage/runs-storage.js +59 -0
- package/dist/storage/runs-storage.js.map +1 -0
- package/dist/storage/steps-storage.d.ts +7 -0
- package/dist/storage/steps-storage.d.ts.map +1 -0
- package/dist/storage/steps-storage.js +58 -0
- package/dist/storage/steps-storage.js.map +1 -0
- package/dist/storage.d.ts +9 -2
- package/dist/storage.d.ts.map +1 -1
- package/dist/storage.js +10 -421
- package/dist/storage.js.map +1 -1
- package/dist/streamer.d.ts +1 -1
- package/dist/streamer.d.ts.map +1 -1
- package/dist/streamer.js +103 -20
- package/dist/streamer.js.map +1 -1
- package/dist/test-helpers.d.ts +43 -0
- package/dist/test-helpers.d.ts.map +1 -0
- package/dist/test-helpers.js +89 -0
- package/dist/test-helpers.js.map +1 -0
- package/package.json +10 -9
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { monotonicFactory } from 'ulid';
|
|
2
|
+
import { ulidToDate } from '../fs.js';
|
|
3
|
+
/**
|
|
4
|
+
* Create a monotonic ULID factory that ensures ULIDs are always increasing
|
|
5
|
+
* even when generated within the same millisecond.
|
|
6
|
+
*/
|
|
7
|
+
export const monotonicUlid = monotonicFactory(() => Math.random());
|
|
8
|
+
/**
|
|
9
|
+
* Creates a function to extract createdAt date from a filename based on ULID.
|
|
10
|
+
* Used for efficient pagination without reading file contents.
|
|
11
|
+
*
|
|
12
|
+
* @param idPrefix - The prefix to strip from filenames (e.g., 'wrun', 'evnt', 'step')
|
|
13
|
+
* @returns A function that extracts Date from filename, or null if not extractable
|
|
14
|
+
*/
|
|
15
|
+
export const getObjectCreatedAt = (idPrefix) => (filename) => {
|
|
16
|
+
const replaceRegex = new RegExp(`^${idPrefix}_`, 'g');
|
|
17
|
+
const dashIndex = filename.indexOf('-');
|
|
18
|
+
if (dashIndex === -1) {
|
|
19
|
+
// No dash - extract ULID from the filename (e.g., wrun_ULID.json, evnt_ULID.json)
|
|
20
|
+
const ulid = filename.replace(/\.json$/, '').replace(replaceRegex, '');
|
|
21
|
+
return ulidToDate(ulid);
|
|
22
|
+
}
|
|
23
|
+
// For composite keys like {runId}-{stepId}, extract from the appropriate part
|
|
24
|
+
if (idPrefix === 'step') {
|
|
25
|
+
// Steps use sequential IDs (step_0, step_1, etc.) - no timestamp in filename.
|
|
26
|
+
// Return null to skip filename-based optimization and defer to JSON-based filtering.
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
// For events: wrun_ULID-evnt_ULID.json - extract from the eventId part
|
|
30
|
+
const id = filename.substring(dashIndex + 1).replace(/\.json$/, '');
|
|
31
|
+
const ulid = id.replace(replaceRegex, '');
|
|
32
|
+
return ulidToDate(ulid);
|
|
33
|
+
};
|
|
34
|
+
//# sourceMappingURL=helpers.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"helpers.js","sourceRoot":"","sources":["../../src/storage/helpers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,MAAM,CAAC;AACxC,OAAO,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AAEtC;;;GAGG;AACH,MAAM,CAAC,MAAM,aAAa,GAAG,gBAAgB,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;AAEnE;;;;;;GAMG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAC7B,CAAC,QAAgB,EAAE,EAAE,CACrB,CAAC,QAAgB,EAAe,EAAE;IAChC,MAAM,YAAY,GAAG,IAAI,MAAM,CAAC,IAAI,QAAQ,GAAG,EAAE,GAAG,CAAC,CAAC;IACtD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,SAAS,KAAK,CAAC,CAAC,EAAE,CAAC;QACrB,kFAAkF;QAClF,MAAM,IAAI,GAAG,QAAQ,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;QACvE,OAAO,UAAU,CAAC,IAAI,CAAC,CAAC;IAC1B,CAAC;IAED,8EAA8E;IAC9E,IAAI,QAAQ,KAAK,MAAM,EAAE,CAAC;QACxB,8EAA8E;QAC9E,qFAAqF;QACrF,OAAO,IAAI,CAAC;IACd,CAAC;IAED,uEAAuE;IACvE,MAAM,EAAE,GAAG,QAAQ,CAAC,SAAS,CAAC,SAAS,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;IACpE,MAAM,IAAI,GAAG,EAAE,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;IAC1C,OAAO,UAAU,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { Storage } from '@workflow/world';
|
|
2
|
+
/**
|
|
3
|
+
* Creates a hooks storage implementation using the filesystem.
|
|
4
|
+
* Implements the Storage['hooks'] interface with hook CRUD operations.
|
|
5
|
+
*/
|
|
6
|
+
export declare function createHooksStorage(basedir: string): Storage['hooks'];
|
|
7
|
+
/**
|
|
8
|
+
* Helper function to delete all hooks associated with a workflow run.
|
|
9
|
+
* Called when a run reaches a terminal state.
|
|
10
|
+
*/
|
|
11
|
+
export declare function deleteAllHooksForRun(basedir: string, runId: string): Promise<void>;
|
|
12
|
+
//# sourceMappingURL=hooks-storage.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hooks-storage.d.ts","sourceRoot":"","sources":["../../src/storage/hooks-storage.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAKV,OAAO,EACR,MAAM,iBAAiB,CAAC;AAWzB;;;GAGG;AACH,wBAAgB,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAyEpE;AAED;;;GAGG;AACH,wBAAsB,oBAAoB,CACxC,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,MAAM,GACZ,OAAO,CAAC,IAAI,CAAC,CAWf"}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { HookSchema } from '@workflow/world';
|
|
3
|
+
import { DEFAULT_RESOLVE_DATA_OPTION } from '../config.js';
|
|
4
|
+
import { deleteJSON, listJSONFiles, paginatedFileSystemQuery, readJSON, } from '../fs.js';
|
|
5
|
+
import { filterHookData } from './filters.js';
|
|
6
|
+
/**
|
|
7
|
+
* Creates a hooks storage implementation using the filesystem.
|
|
8
|
+
* Implements the Storage['hooks'] interface with hook CRUD operations.
|
|
9
|
+
*/
|
|
10
|
+
export function createHooksStorage(basedir) {
|
|
11
|
+
// Helper function to find a hook by token (shared between getByToken)
|
|
12
|
+
async function findHookByToken(token) {
|
|
13
|
+
const hooksDir = path.join(basedir, 'hooks');
|
|
14
|
+
const files = await listJSONFiles(hooksDir);
|
|
15
|
+
for (const file of files) {
|
|
16
|
+
const hookPath = path.join(hooksDir, `${file}.json`);
|
|
17
|
+
const hook = await readJSON(hookPath, HookSchema);
|
|
18
|
+
if (hook && hook.token === token) {
|
|
19
|
+
return hook;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
async function get(hookId, params) {
|
|
25
|
+
const hookPath = path.join(basedir, 'hooks', `${hookId}.json`);
|
|
26
|
+
const hook = await readJSON(hookPath, HookSchema);
|
|
27
|
+
if (!hook) {
|
|
28
|
+
throw new Error(`Hook ${hookId} not found`);
|
|
29
|
+
}
|
|
30
|
+
const resolveData = params?.resolveData || DEFAULT_RESOLVE_DATA_OPTION;
|
|
31
|
+
return filterHookData(hook, resolveData);
|
|
32
|
+
}
|
|
33
|
+
async function getByToken(token) {
|
|
34
|
+
const hook = await findHookByToken(token);
|
|
35
|
+
if (!hook) {
|
|
36
|
+
throw new Error(`Hook with token ${token} not found`);
|
|
37
|
+
}
|
|
38
|
+
return hook;
|
|
39
|
+
}
|
|
40
|
+
async function list(params) {
|
|
41
|
+
const hooksDir = path.join(basedir, 'hooks');
|
|
42
|
+
const resolveData = params.resolveData || DEFAULT_RESOLVE_DATA_OPTION;
|
|
43
|
+
const result = await paginatedFileSystemQuery({
|
|
44
|
+
directory: hooksDir,
|
|
45
|
+
schema: HookSchema,
|
|
46
|
+
sortOrder: params.pagination?.sortOrder,
|
|
47
|
+
limit: params.pagination?.limit,
|
|
48
|
+
cursor: params.pagination?.cursor,
|
|
49
|
+
filePrefix: undefined, // Hooks don't have ULIDs, so we can't optimize by filename
|
|
50
|
+
filter: (hook) => {
|
|
51
|
+
// Filter by runId if provided
|
|
52
|
+
if (params.runId && hook.runId !== params.runId) {
|
|
53
|
+
return false;
|
|
54
|
+
}
|
|
55
|
+
return true;
|
|
56
|
+
},
|
|
57
|
+
getCreatedAt: () => {
|
|
58
|
+
// Hook files don't have ULID timestamps in filename
|
|
59
|
+
// We need to read the file to get createdAt, but that's inefficient
|
|
60
|
+
// So we return the hook's createdAt directly (item.createdAt will be used for sorting)
|
|
61
|
+
// Return a dummy date to pass the null check, actual sorting uses item.createdAt
|
|
62
|
+
return new Date(0);
|
|
63
|
+
},
|
|
64
|
+
getId: (hook) => hook.hookId,
|
|
65
|
+
});
|
|
66
|
+
// Transform the data after pagination
|
|
67
|
+
return {
|
|
68
|
+
...result,
|
|
69
|
+
data: result.data.map((hook) => filterHookData(hook, resolveData)),
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
return { get, getByToken, list };
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Helper function to delete all hooks associated with a workflow run.
|
|
76
|
+
* Called when a run reaches a terminal state.
|
|
77
|
+
*/
|
|
78
|
+
export async function deleteAllHooksForRun(basedir, runId) {
|
|
79
|
+
const hooksDir = path.join(basedir, 'hooks');
|
|
80
|
+
const files = await listJSONFiles(hooksDir);
|
|
81
|
+
for (const file of files) {
|
|
82
|
+
const hookPath = path.join(hooksDir, `${file}.json`);
|
|
83
|
+
const hook = await readJSON(hookPath, HookSchema);
|
|
84
|
+
if (hook && hook.runId === runId) {
|
|
85
|
+
await deleteJSON(hookPath);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
//# sourceMappingURL=hooks-storage.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hooks-storage.js","sourceRoot":"","sources":["../../src/storage/hooks-storage.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,WAAW,CAAC;AAQ7B,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,2BAA2B,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EACL,UAAU,EACV,aAAa,EACb,wBAAwB,EACxB,QAAQ,GACT,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAE9C;;;GAGG;AACH,MAAM,UAAU,kBAAkB,CAAC,OAAe;IAChD,sEAAsE;IACtE,KAAK,UAAU,eAAe,CAAC,KAAa;QAC1C,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC7C,MAAM,KAAK,GAAG,MAAM,aAAa,CAAC,QAAQ,CAAC,CAAC;QAE5C,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACzB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,IAAI,OAAO,CAAC,CAAC;YACrD,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;YAClD,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;gBACjC,OAAO,IAAI,CAAC;YACd,CAAC;QACH,CAAC;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,UAAU,GAAG,CAAC,MAAc,EAAE,MAAsB;QACvD,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,GAAG,MAAM,OAAO,CAAC,CAAC;QAC/D,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;QAClD,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,MAAM,IAAI,KAAK,CAAC,QAAQ,MAAM,YAAY,CAAC,CAAC;QAC9C,CAAC;QACD,MAAM,WAAW,GAAG,MAAM,EAAE,WAAW,IAAI,2BAA2B,CAAC;QACvE,OAAO,cAAc,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;IAC3C,CAAC;IAED,KAAK,UAAU,UAAU,CAAC,KAAa;QACrC,MAAM,IAAI,GAAG,MAAM,eAAe,CAAC,KAAK,CAAC,CAAC;QAC1C,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,MAAM,IAAI,KAAK,CAAC,mBAAmB,KAAK,YAAY,CAAC,CAAC;QACxD,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,UAAU,IAAI,CACjB,MAAuB;QAEvB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC7C,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,2BAA2B,CAAC;QAEtE,MAAM,MAAM,GAAG,MAAM,wBAAwB,CAAC;YAC5C,SAAS,EAAE,QAAQ;YACnB,MAAM,EAAE,UAAU;YAClB,SAAS,EAAE,MAAM,CAAC,UAAU,EAAE,SAAS;YACvC,KAAK,EAAE,MAAM,CAAC,UAAU,EAAE,KAAK;YAC/B,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,MAAM;YACjC,UAAU,EAAE,SAAS,EAAE,2DAA2D;YAClF,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;gBACf,8BAA8B;gBAC9B,IAAI,MAAM,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,MAAM,CAAC,KAAK,EAAE,CAAC;oBAChD,OAAO,KAAK,CAAC;gBACf,CAAC;gBACD,OAAO,IAAI,CAAC;YACd,CAAC;YACD,YAAY,EAAE,GAAG,EAAE;gBACjB,oDAAoD;gBACpD,oEAAoE;gBACpE,uFAAuF;gBACvF,iFAAiF;gBACjF,OAAO,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC;YACrB,CAAC;YACD,KAAK,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM;SAC7B,CAAC,CAAC;QAEH,sCAAsC;QACtC,OAAO;YACL,GAAG,MAAM;YACT,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;SACnE,CAAC;IACJ,CAAC;IAED,OAAO,EAAE,GAAG,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC;AACnC,CAAC;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,oBAAoB,CACxC,OAAe,EACf,KAAa;IAEb,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC7C,MAAM,KAAK,GAAG,MAAM,aAAa,CAAC,QAAQ,CAAC,CAAC;IAE5C,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;QACzB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,IAAI,OAAO,CAAC,CAAC;QACrD,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;QAClD,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;YACjC,MAAM,UAAU,CAAC,QAAQ,CAAC,CAAC;QAC7B,CAAC;IACH,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { Storage } from '@workflow/world';
|
|
2
|
+
/**
|
|
3
|
+
* Creates a complete storage implementation using the filesystem.
|
|
4
|
+
* This is the main entry point that composes all storage implementations.
|
|
5
|
+
*
|
|
6
|
+
* @param basedir - The base directory for storing workflow data
|
|
7
|
+
* @returns A complete Storage implementation
|
|
8
|
+
*/
|
|
9
|
+
export declare function createStorage(basedir: string): Storage;
|
|
10
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/storage/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,iBAAiB,CAAC;AAM/C;;;;;;GAMG;AACH,wBAAgB,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAOtD"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { createEventsStorage } from './events-storage.js';
|
|
2
|
+
import { createHooksStorage } from './hooks-storage.js';
|
|
3
|
+
import { createRunsStorage } from './runs-storage.js';
|
|
4
|
+
import { createStepsStorage } from './steps-storage.js';
|
|
5
|
+
/**
|
|
6
|
+
* Creates a complete storage implementation using the filesystem.
|
|
7
|
+
* This is the main entry point that composes all storage implementations.
|
|
8
|
+
*
|
|
9
|
+
* @param basedir - The base directory for storing workflow data
|
|
10
|
+
* @returns A complete Storage implementation
|
|
11
|
+
*/
|
|
12
|
+
export function createStorage(basedir) {
|
|
13
|
+
return {
|
|
14
|
+
runs: createRunsStorage(basedir),
|
|
15
|
+
steps: createStepsStorage(basedir),
|
|
16
|
+
events: createEventsStorage(basedir),
|
|
17
|
+
hooks: createHooksStorage(basedir),
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/storage/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAC;AAC1D,OAAO,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AACxD,OAAO,EAAE,iBAAiB,EAAE,MAAM,mBAAmB,CAAC;AACtD,OAAO,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAExD;;;;;;GAMG;AACH,MAAM,UAAU,aAAa,CAAC,OAAe;IAC3C,OAAO;QACL,IAAI,EAAE,iBAAiB,CAAC,OAAO,CAAC;QAChC,KAAK,EAAE,kBAAkB,CAAC,OAAO,CAAC;QAClC,MAAM,EAAE,mBAAmB,CAAC,OAAO,CAAC;QACpC,KAAK,EAAE,kBAAkB,CAAC,OAAO,CAAC;KACnC,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { EventResult, WorkflowRun } from '@workflow/world';
|
|
2
|
+
/**
|
|
3
|
+
* Handle events for legacy runs (pre-event-sourcing, specVersion < 2).
|
|
4
|
+
* Legacy runs use different behavior:
|
|
5
|
+
* - run_cancelled: Skip event storage, directly update run
|
|
6
|
+
* - wait_completed: Store event only (no entity mutation)
|
|
7
|
+
* - hook_received: Store event only (hooks exist via old system, no entity mutation)
|
|
8
|
+
* - Other events: Throw error (not supported for legacy runs)
|
|
9
|
+
*/
|
|
10
|
+
export declare function handleLegacyEvent(basedir: string, runId: string, data: any, currentRun: WorkflowRun, params?: {
|
|
11
|
+
resolveData?: 'none' | 'all';
|
|
12
|
+
}): Promise<EventResult>;
|
|
13
|
+
//# sourceMappingURL=legacy.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"legacy.d.ts","sourceRoot":"","sources":["../../src/storage/legacy.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAS,WAAW,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAQvE;;;;;;;GAOG;AACH,wBAAsB,iBAAiB,CACrC,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,GAAG,EACT,UAAU,EAAE,WAAW,EACvB,MAAM,CAAC,EAAE;IAAE,WAAW,CAAC,EAAE,MAAM,GAAG,KAAK,CAAA;CAAE,GACxC,OAAO,CAAC,WAAW,CAAC,CA6DtB"}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { SPEC_VERSION_CURRENT } from '@workflow/world';
|
|
3
|
+
import { DEFAULT_RESOLVE_DATA_OPTION } from '../config.js';
|
|
4
|
+
import { writeJSON } from '../fs.js';
|
|
5
|
+
import { filterEventData, filterRunData } from './filters.js';
|
|
6
|
+
import { monotonicUlid } from './helpers.js';
|
|
7
|
+
import { deleteAllHooksForRun } from './hooks-storage.js';
|
|
8
|
+
/**
|
|
9
|
+
* Handle events for legacy runs (pre-event-sourcing, specVersion < 2).
|
|
10
|
+
* Legacy runs use different behavior:
|
|
11
|
+
* - run_cancelled: Skip event storage, directly update run
|
|
12
|
+
* - wait_completed: Store event only (no entity mutation)
|
|
13
|
+
* - hook_received: Store event only (hooks exist via old system, no entity mutation)
|
|
14
|
+
* - Other events: Throw error (not supported for legacy runs)
|
|
15
|
+
*/
|
|
16
|
+
export async function handleLegacyEvent(basedir, runId, data, currentRun, params) {
|
|
17
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18
|
+
switch (data.eventType) {
|
|
19
|
+
case 'run_cancelled': {
|
|
20
|
+
// Legacy: Skip event storage, directly update run to cancelled
|
|
21
|
+
const now = new Date();
|
|
22
|
+
const run = {
|
|
23
|
+
runId: currentRun.runId,
|
|
24
|
+
deploymentId: currentRun.deploymentId,
|
|
25
|
+
workflowName: currentRun.workflowName,
|
|
26
|
+
specVersion: currentRun.specVersion,
|
|
27
|
+
executionContext: currentRun.executionContext,
|
|
28
|
+
input: currentRun.input,
|
|
29
|
+
createdAt: currentRun.createdAt,
|
|
30
|
+
expiredAt: currentRun.expiredAt,
|
|
31
|
+
startedAt: currentRun.startedAt,
|
|
32
|
+
status: 'cancelled',
|
|
33
|
+
output: undefined,
|
|
34
|
+
error: undefined,
|
|
35
|
+
completedAt: now,
|
|
36
|
+
updatedAt: now,
|
|
37
|
+
};
|
|
38
|
+
const runPath = path.join(basedir, 'runs', `${runId}.json`);
|
|
39
|
+
await writeJSON(runPath, run, { overwrite: true });
|
|
40
|
+
await deleteAllHooksForRun(basedir, runId);
|
|
41
|
+
// Return without event (legacy behavior skips event storage)
|
|
42
|
+
// Type assertion: EventResult expects WorkflowRun, filterRunData may return WorkflowRunWithoutData
|
|
43
|
+
return {
|
|
44
|
+
event: undefined,
|
|
45
|
+
run: filterRunData(run, resolveData),
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
case 'wait_completed':
|
|
49
|
+
case 'hook_received': {
|
|
50
|
+
// Legacy: Store event only (no entity mutation)
|
|
51
|
+
// - wait_completed: for replay purposes
|
|
52
|
+
// - hook_received: hooks exist via old system, just record the event
|
|
53
|
+
const eventId = `evnt_${monotonicUlid()}`;
|
|
54
|
+
const now = new Date();
|
|
55
|
+
const event = {
|
|
56
|
+
...data,
|
|
57
|
+
runId,
|
|
58
|
+
eventId,
|
|
59
|
+
createdAt: now,
|
|
60
|
+
specVersion: SPEC_VERSION_CURRENT,
|
|
61
|
+
};
|
|
62
|
+
const compositeKey = `${runId}-${eventId}`;
|
|
63
|
+
const eventPath = path.join(basedir, 'events', `${compositeKey}.json`);
|
|
64
|
+
await writeJSON(eventPath, event);
|
|
65
|
+
return { event: filterEventData(event, resolveData) };
|
|
66
|
+
}
|
|
67
|
+
default:
|
|
68
|
+
throw new Error(`Event type '${data.eventType}' not supported for legacy runs ` +
|
|
69
|
+
`(specVersion: ${currentRun.specVersion || 'undefined'}). ` +
|
|
70
|
+
`Please upgrade 'workflow' package.`);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
//# sourceMappingURL=legacy.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"legacy.js","sourceRoot":"","sources":["../../src/storage/legacy.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,WAAW,CAAC;AAE7B,OAAO,EAAE,oBAAoB,EAAE,MAAM,iBAAiB,CAAC;AACvD,OAAO,EAAE,2BAA2B,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AACrC,OAAO,EAAE,eAAe,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAC9D,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAC7C,OAAO,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAE1D;;;;;;;GAOG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,OAAe,EACf,KAAa,EACb,IAAS,EACT,UAAuB,EACvB,MAAyC;IAEzC,MAAM,WAAW,GAAG,MAAM,EAAE,WAAW,IAAI,2BAA2B,CAAC;IAEvE,QAAQ,IAAI,CAAC,SAAS,EAAE,CAAC;QACvB,KAAK,eAAe,CAAC,CAAC,CAAC;YACrB,+DAA+D;YAC/D,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;YACvB,MAAM,GAAG,GAAgB;gBACvB,KAAK,EAAE,UAAU,CAAC,KAAK;gBACvB,YAAY,EAAE,UAAU,CAAC,YAAY;gBACrC,YAAY,EAAE,UAAU,CAAC,YAAY;gBACrC,WAAW,EAAE,UAAU,CAAC,WAAW;gBACnC,gBAAgB,EAAE,UAAU,CAAC,gBAAgB;gBAC7C,KAAK,EAAE,UAAU,CAAC,KAAK;gBACvB,SAAS,EAAE,UAAU,CAAC,SAAS;gBAC/B,SAAS,EAAE,UAAU,CAAC,SAAS;gBAC/B,SAAS,EAAE,UAAU,CAAC,SAAS;gBAC/B,MAAM,EAAE,WAAW;gBACnB,MAAM,EAAE,SAAS;gBACjB,KAAK,EAAE,SAAS;gBAChB,WAAW,EAAE,GAAG;gBAChB,SAAS,EAAE,GAAG;aACf,CAAC;YACF,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,GAAG,KAAK,OAAO,CAAC,CAAC;YAC5D,MAAM,SAAS,CAAC,OAAO,EAAE,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YACnD,MAAM,oBAAoB,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;YAC3C,6DAA6D;YAC7D,mGAAmG;YACnG,OAAO;gBACL,KAAK,EAAE,SAAS;gBAChB,GAAG,EAAE,aAAa,CAAC,GAAG,EAAE,WAAW,CAAgB;aACpD,CAAC;QACJ,CAAC;QAED,KAAK,gBAAgB,CAAC;QACtB,KAAK,eAAe,CAAC,CAAC,CAAC;YACrB,gDAAgD;YAChD,wCAAwC;YACxC,qEAAqE;YACrE,MAAM,OAAO,GAAG,QAAQ,aAAa,EAAE,EAAE,CAAC;YAC1C,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;YACvB,MAAM,KAAK,GAAU;gBACnB,GAAG,IAAI;gBACP,KAAK;gBACL,OAAO;gBACP,SAAS,EAAE,GAAG;gBACd,WAAW,EAAE,oBAAoB;aAClC,CAAC;YACF,MAAM,YAAY,GAAG,GAAG,KAAK,IAAI,OAAO,EAAE,CAAC;YAC3C,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,QAAQ,EAAE,GAAG,YAAY,OAAO,CAAC,CAAC;YACvE,MAAM,SAAS,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;YAClC,OAAO,EAAE,KAAK,EAAE,eAAe,CAAC,KAAK,EAAE,WAAW,CAAC,EAAE,CAAC;QACxD,CAAC;QAED;YACE,MAAM,IAAI,KAAK,CACb,eAAe,IAAI,CAAC,SAAS,kCAAkC;gBAC7D,iBAAiB,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK;gBAC3D,oCAAoC,CACvC,CAAC;IACN,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { Storage } from '@workflow/world';
|
|
2
|
+
/**
|
|
3
|
+
* Creates the runs storage implementation using the filesystem.
|
|
4
|
+
* Implements the Storage['runs'] interface with get and list operations.
|
|
5
|
+
*/
|
|
6
|
+
export declare function createRunsStorage(basedir: string): Storage['runs'];
|
|
7
|
+
//# sourceMappingURL=runs-storage.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"runs-storage.d.ts","sourceRoot":"","sources":["../../src/storage/runs-storage.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,OAAO,EAA0B,MAAM,iBAAiB,CAAC;AAOvE;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAmDlE"}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { WorkflowRunNotFoundError } from '@workflow/errors';
|
|
3
|
+
import { WorkflowRunSchema } from '@workflow/world';
|
|
4
|
+
import { DEFAULT_RESOLVE_DATA_OPTION } from '../config.js';
|
|
5
|
+
import { paginatedFileSystemQuery, readJSON } from '../fs.js';
|
|
6
|
+
import { filterRunData } from './filters.js';
|
|
7
|
+
import { getObjectCreatedAt } from './helpers.js';
|
|
8
|
+
/**
|
|
9
|
+
* Creates the runs storage implementation using the filesystem.
|
|
10
|
+
* Implements the Storage['runs'] interface with get and list operations.
|
|
11
|
+
*/
|
|
12
|
+
export function createRunsStorage(basedir) {
|
|
13
|
+
return {
|
|
14
|
+
get: (async (id, params) => {
|
|
15
|
+
const runPath = path.join(basedir, 'runs', `${id}.json`);
|
|
16
|
+
const run = await readJSON(runPath, WorkflowRunSchema);
|
|
17
|
+
if (!run) {
|
|
18
|
+
throw new WorkflowRunNotFoundError(id);
|
|
19
|
+
}
|
|
20
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
21
|
+
return filterRunData(run, resolveData);
|
|
22
|
+
}),
|
|
23
|
+
list: (async (params) => {
|
|
24
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
25
|
+
const result = await paginatedFileSystemQuery({
|
|
26
|
+
directory: path.join(basedir, 'runs'),
|
|
27
|
+
schema: WorkflowRunSchema,
|
|
28
|
+
filter: (run) => {
|
|
29
|
+
if (params?.workflowName &&
|
|
30
|
+
run.workflowName !== params.workflowName) {
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
33
|
+
if (params?.status && run.status !== params.status) {
|
|
34
|
+
return false;
|
|
35
|
+
}
|
|
36
|
+
return true;
|
|
37
|
+
},
|
|
38
|
+
sortOrder: params?.pagination?.sortOrder ?? 'desc',
|
|
39
|
+
limit: params?.pagination?.limit,
|
|
40
|
+
cursor: params?.pagination?.cursor,
|
|
41
|
+
getCreatedAt: getObjectCreatedAt('wrun'),
|
|
42
|
+
getId: (run) => run.runId,
|
|
43
|
+
});
|
|
44
|
+
// If resolveData is "none", replace input/output with undefined
|
|
45
|
+
if (resolveData === 'none') {
|
|
46
|
+
return {
|
|
47
|
+
...result,
|
|
48
|
+
data: result.data.map((run) => ({
|
|
49
|
+
...run,
|
|
50
|
+
input: undefined,
|
|
51
|
+
output: undefined,
|
|
52
|
+
})),
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
return result;
|
|
56
|
+
}),
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
//# sourceMappingURL=runs-storage.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"runs-storage.js","sourceRoot":"","sources":["../../src/storage/runs-storage.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,WAAW,CAAC;AAC7B,OAAO,EAAE,wBAAwB,EAAE,MAAM,kBAAkB,CAAC;AAE5D,OAAO,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,EAAE,2BAA2B,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAE,wBAAwB,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAC;AAC9D,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAC7C,OAAO,EAAE,kBAAkB,EAAE,MAAM,cAAc,CAAC;AAElD;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAAC,OAAe;IAC/C,OAAO;QACL,GAAG,EAAE,CAAC,KAAK,EAAE,EAAU,EAAE,MAAY,EAAE,EAAE;YACvC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,CAAC,CAAC;YACzD,MAAM,GAAG,GAAG,MAAM,QAAQ,CAAC,OAAO,EAAE,iBAAiB,CAAC,CAAC;YACvD,IAAI,CAAC,GAAG,EAAE,CAAC;gBACT,MAAM,IAAI,wBAAwB,CAAC,EAAE,CAAC,CAAC;YACzC,CAAC;YACD,MAAM,WAAW,GAAG,MAAM,EAAE,WAAW,IAAI,2BAA2B,CAAC;YACvE,OAAO,aAAa,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;QACzC,CAAC,CAA2B;QAE5B,IAAI,EAAE,CAAC,KAAK,EAAE,MAAY,EAAE,EAAE;YAC5B,MAAM,WAAW,GAAG,MAAM,EAAE,WAAW,IAAI,2BAA2B,CAAC;YACvE,MAAM,MAAM,GAAG,MAAM,wBAAwB,CAAC;gBAC5C,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC;gBACrC,MAAM,EAAE,iBAAiB;gBACzB,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE;oBACd,IACE,MAAM,EAAE,YAAY;wBACpB,GAAG,CAAC,YAAY,KAAK,MAAM,CAAC,YAAY,EACxC,CAAC;wBACD,OAAO,KAAK,CAAC;oBACf,CAAC;oBACD,IAAI,MAAM,EAAE,MAAM,IAAI,GAAG,CAAC,MAAM,KAAK,MAAM,CAAC,MAAM,EAAE,CAAC;wBACnD,OAAO,KAAK,CAAC;oBACf,CAAC;oBACD,OAAO,IAAI,CAAC;gBACd,CAAC;gBACD,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,SAAS,IAAI,MAAM;gBAClD,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,KAAK;gBAChC,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM;gBAClC,YAAY,EAAE,kBAAkB,CAAC,MAAM,CAAC;gBACxC,KAAK,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,KAAK;aAC1B,CAAC,CAAC;YAEH,gEAAgE;YAChE,IAAI,WAAW,KAAK,MAAM,EAAE,CAAC;gBAC3B,OAAO;oBACL,GAAG,MAAM;oBACT,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC;wBAC9B,GAAG,GAAG;wBACN,KAAK,EAAE,SAAS;wBAChB,MAAM,EAAE,SAAS;qBAClB,CAAC,CAA6B;iBAChC,CAAC;YACJ,CAAC;YAED,OAAO,MAAM,CAAC;QAChB,CAAC,CAA4B;KAC9B,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { Storage } from '@workflow/world';
|
|
2
|
+
/**
|
|
3
|
+
* Creates the steps storage implementation using the filesystem.
|
|
4
|
+
* Implements the Storage['steps'] interface with get and list operations.
|
|
5
|
+
*/
|
|
6
|
+
export declare function createStepsStorage(basedir: string): Storage['steps'];
|
|
7
|
+
//# sourceMappingURL=steps-storage.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"steps-storage.d.ts","sourceRoot":"","sources":["../../src/storage/steps-storage.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,OAAO,EAAmB,MAAM,iBAAiB,CAAC;AAOhE;;;GAGG;AACH,wBAAgB,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAiDpE"}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { StepSchema } from '@workflow/world';
|
|
3
|
+
import { DEFAULT_RESOLVE_DATA_OPTION } from '../config.js';
|
|
4
|
+
import { listJSONFiles, paginatedFileSystemQuery, readJSON } from '../fs.js';
|
|
5
|
+
import { filterStepData } from './filters.js';
|
|
6
|
+
import { getObjectCreatedAt } from './helpers.js';
|
|
7
|
+
/**
|
|
8
|
+
* Creates the steps storage implementation using the filesystem.
|
|
9
|
+
* Implements the Storage['steps'] interface with get and list operations.
|
|
10
|
+
*/
|
|
11
|
+
export function createStepsStorage(basedir) {
|
|
12
|
+
return {
|
|
13
|
+
get: (async (runId, stepId, params) => {
|
|
14
|
+
if (!runId) {
|
|
15
|
+
const fileIds = await listJSONFiles(path.join(basedir, 'steps'));
|
|
16
|
+
const fileId = fileIds.find((fileId) => fileId.endsWith(`-${stepId}`));
|
|
17
|
+
if (!fileId) {
|
|
18
|
+
throw new Error(`Step ${stepId} not found`);
|
|
19
|
+
}
|
|
20
|
+
runId = fileId.split('-')[0];
|
|
21
|
+
}
|
|
22
|
+
const compositeKey = `${runId}-${stepId}`;
|
|
23
|
+
const stepPath = path.join(basedir, 'steps', `${compositeKey}.json`);
|
|
24
|
+
const step = await readJSON(stepPath, StepSchema);
|
|
25
|
+
if (!step) {
|
|
26
|
+
throw new Error(`Step ${stepId} in run ${runId} not found`);
|
|
27
|
+
}
|
|
28
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
29
|
+
return filterStepData(step, resolveData);
|
|
30
|
+
}),
|
|
31
|
+
list: (async (params) => {
|
|
32
|
+
const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
33
|
+
const result = await paginatedFileSystemQuery({
|
|
34
|
+
directory: path.join(basedir, 'steps'),
|
|
35
|
+
schema: StepSchema,
|
|
36
|
+
filePrefix: `${params.runId}-`,
|
|
37
|
+
sortOrder: params.pagination?.sortOrder ?? 'desc',
|
|
38
|
+
limit: params.pagination?.limit,
|
|
39
|
+
cursor: params.pagination?.cursor,
|
|
40
|
+
getCreatedAt: getObjectCreatedAt('step'),
|
|
41
|
+
getId: (step) => step.stepId,
|
|
42
|
+
});
|
|
43
|
+
// If resolveData is "none", replace input/output with undefined
|
|
44
|
+
if (resolveData === 'none') {
|
|
45
|
+
return {
|
|
46
|
+
...result,
|
|
47
|
+
data: result.data.map((step) => ({
|
|
48
|
+
...step,
|
|
49
|
+
input: undefined,
|
|
50
|
+
output: undefined,
|
|
51
|
+
})),
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
return result;
|
|
55
|
+
}),
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
//# sourceMappingURL=steps-storage.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"steps-storage.js","sourceRoot":"","sources":["../../src/storage/steps-storage.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,WAAW,CAAC;AAE7B,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,2BAA2B,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAE,aAAa,EAAE,wBAAwB,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAC;AAC7E,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,EAAE,kBAAkB,EAAE,MAAM,cAAc,CAAC;AAElD;;;GAGG;AACH,MAAM,UAAU,kBAAkB,CAAC,OAAe;IAChD,OAAO;QACL,GAAG,EAAE,CAAC,KAAK,EAAE,KAAyB,EAAE,MAAc,EAAE,MAAY,EAAE,EAAE;YACtE,IAAI,CAAC,KAAK,EAAE,CAAC;gBACX,MAAM,OAAO,GAAG,MAAM,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC;gBACjE,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,MAAM,EAAE,CAAC,CAAC,CAAC;gBACvE,IAAI,CAAC,MAAM,EAAE,CAAC;oBACZ,MAAM,IAAI,KAAK,CAAC,QAAQ,MAAM,YAAY,CAAC,CAAC;gBAC9C,CAAC;gBACD,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YAC/B,CAAC;YACD,MAAM,YAAY,GAAG,GAAG,KAAK,IAAI,MAAM,EAAE,CAAC;YAC1C,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,GAAG,YAAY,OAAO,CAAC,CAAC;YACrE,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;YAClD,IAAI,CAAC,IAAI,EAAE,CAAC;gBACV,MAAM,IAAI,KAAK,CAAC,QAAQ,MAAM,WAAW,KAAK,YAAY,CAAC,CAAC;YAC9D,CAAC;YACD,MAAM,WAAW,GAAG,MAAM,EAAE,WAAW,IAAI,2BAA2B,CAAC;YACvE,OAAO,cAAc,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;QAC3C,CAAC,CAA4B;QAE7B,IAAI,EAAE,CAAC,KAAK,EAAE,MAAW,EAAE,EAAE;YAC3B,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,2BAA2B,CAAC;YACtE,MAAM,MAAM,GAAG,MAAM,wBAAwB,CAAC;gBAC5C,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC;gBACtC,MAAM,EAAE,UAAU;gBAClB,UAAU,EAAE,GAAG,MAAM,CAAC,KAAK,GAAG;gBAC9B,SAAS,EAAE,MAAM,CAAC,UAAU,EAAE,SAAS,IAAI,MAAM;gBACjD,KAAK,EAAE,MAAM,CAAC,UAAU,EAAE,KAAK;gBAC/B,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,MAAM;gBACjC,YAAY,EAAE,kBAAkB,CAAC,MAAM,CAAC;gBACxC,KAAK,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM;aAC7B,CAAC,CAAC;YAEH,gEAAgE;YAChE,IAAI,WAAW,KAAK,MAAM,EAAE,CAAC;gBAC3B,OAAO;oBACL,GAAG,MAAM;oBACT,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;wBAC/B,GAAG,IAAI;wBACP,KAAK,EAAE,SAAS;wBAChB,MAAM,EAAE,SAAS;qBAClB,CAAC,CAAsB;iBACzB,CAAC;YACJ,CAAC;YAED,OAAO,MAAM,CAAC;QAChB,CAAC,CAA6B;KAC/B,CAAC;AACJ,CAAC"}
|
package/dist/storage.d.ts
CHANGED
|
@@ -1,3 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
/**
|
|
2
|
+
* Filesystem-based storage implementation for workflow data.
|
|
3
|
+
*
|
|
4
|
+
* This module provides a complete Storage implementation that persists
|
|
5
|
+
* workflow runs, steps, events, and hooks to the local filesystem.
|
|
6
|
+
*
|
|
7
|
+
* @module
|
|
8
|
+
*/
|
|
9
|
+
export { createStorage } from './storage/index.js';
|
|
3
10
|
//# sourceMappingURL=storage.d.ts.map
|
package/dist/storage.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"storage.d.ts","sourceRoot":"","sources":["../src/storage.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"storage.d.ts","sourceRoot":"","sources":["../src/storage.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAGH,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC"}
|