@elaraai/e3-core 0.0.2-beta.4 → 0.0.2-beta.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +25 -22
- package/dist/src/dataflow/api-compat.d.ts +90 -0
- package/dist/src/dataflow/api-compat.d.ts.map +1 -0
- package/dist/src/dataflow/api-compat.js +139 -0
- package/dist/src/dataflow/api-compat.js.map +1 -0
- package/dist/src/dataflow/index.d.ts +18 -0
- package/dist/src/dataflow/index.d.ts.map +1 -0
- package/dist/src/dataflow/index.js +23 -0
- package/dist/src/dataflow/index.js.map +1 -0
- package/dist/src/dataflow/orchestrator/LocalOrchestrator.d.ts +76 -0
- package/dist/src/dataflow/orchestrator/LocalOrchestrator.d.ts.map +1 -0
- package/dist/src/dataflow/orchestrator/LocalOrchestrator.js +695 -0
- package/dist/src/dataflow/orchestrator/LocalOrchestrator.js.map +1 -0
- package/dist/src/dataflow/orchestrator/index.d.ts +12 -0
- package/dist/src/dataflow/orchestrator/index.d.ts.map +1 -0
- package/dist/src/dataflow/orchestrator/index.js +12 -0
- package/dist/src/dataflow/orchestrator/index.js.map +1 -0
- package/dist/src/dataflow/orchestrator/interfaces.d.ts +163 -0
- package/dist/src/dataflow/orchestrator/interfaces.d.ts.map +1 -0
- package/dist/src/dataflow/orchestrator/interfaces.js +52 -0
- package/dist/src/dataflow/orchestrator/interfaces.js.map +1 -0
- package/dist/src/dataflow/state-store/FileStateStore.d.ts +67 -0
- package/dist/src/dataflow/state-store/FileStateStore.d.ts.map +1 -0
- package/dist/src/dataflow/state-store/FileStateStore.js +300 -0
- package/dist/src/dataflow/state-store/FileStateStore.js.map +1 -0
- package/dist/src/dataflow/state-store/InMemoryStateStore.d.ts +42 -0
- package/dist/src/dataflow/state-store/InMemoryStateStore.d.ts.map +1 -0
- package/dist/src/dataflow/state-store/InMemoryStateStore.js +229 -0
- package/dist/src/dataflow/state-store/InMemoryStateStore.js.map +1 -0
- package/dist/src/dataflow/state-store/index.d.ts +13 -0
- package/dist/src/dataflow/state-store/index.d.ts.map +1 -0
- package/dist/src/dataflow/state-store/index.js +13 -0
- package/dist/src/dataflow/state-store/index.js.map +1 -0
- package/dist/src/dataflow/state-store/interfaces.d.ts +159 -0
- package/dist/src/dataflow/state-store/interfaces.d.ts.map +1 -0
- package/dist/src/dataflow/state-store/interfaces.js +6 -0
- package/dist/src/dataflow/state-store/interfaces.js.map +1 -0
- package/dist/src/dataflow/steps.d.ts +222 -0
- package/dist/src/dataflow/steps.d.ts.map +1 -0
- package/dist/src/dataflow/steps.js +707 -0
- package/dist/src/dataflow/steps.js.map +1 -0
- package/dist/src/dataflow/types.d.ts +127 -0
- package/dist/src/dataflow/types.d.ts.map +1 -0
- package/dist/src/dataflow/types.js +7 -0
- package/dist/src/dataflow/types.js.map +1 -0
- package/dist/src/dataflow.d.ts +113 -38
- package/dist/src/dataflow.d.ts.map +1 -1
- package/dist/src/dataflow.js +269 -416
- package/dist/src/dataflow.js.map +1 -1
- package/dist/src/dataset-refs.d.ts +124 -0
- package/dist/src/dataset-refs.d.ts.map +1 -0
- package/dist/src/dataset-refs.js +319 -0
- package/dist/src/dataset-refs.js.map +1 -0
- package/dist/src/errors.d.ts +39 -9
- package/dist/src/errors.d.ts.map +1 -1
- package/dist/src/errors.js +51 -8
- package/dist/src/errors.js.map +1 -1
- package/dist/src/execution/LocalTaskRunner.d.ts +73 -0
- package/dist/src/execution/LocalTaskRunner.d.ts.map +1 -0
- package/dist/src/execution/LocalTaskRunner.js +399 -0
- package/dist/src/execution/LocalTaskRunner.js.map +1 -0
- package/dist/src/execution/MockTaskRunner.d.ts +49 -0
- package/dist/src/execution/MockTaskRunner.d.ts.map +1 -0
- package/dist/src/execution/MockTaskRunner.js +54 -0
- package/dist/src/execution/MockTaskRunner.js.map +1 -0
- package/dist/src/execution/index.d.ts +16 -0
- package/dist/src/execution/index.d.ts.map +1 -0
- package/dist/src/execution/index.js +8 -0
- package/dist/src/execution/index.js.map +1 -0
- package/dist/src/execution/interfaces.d.ts +246 -0
- package/dist/src/execution/interfaces.d.ts.map +1 -0
- package/dist/src/execution/interfaces.js +6 -0
- package/dist/src/execution/interfaces.js.map +1 -0
- package/dist/src/execution/processHelpers.d.ts +20 -0
- package/dist/src/execution/processHelpers.d.ts.map +1 -0
- package/dist/src/execution/processHelpers.js +62 -0
- package/dist/src/execution/processHelpers.js.map +1 -0
- package/dist/src/executions.d.ts +71 -104
- package/dist/src/executions.d.ts.map +1 -1
- package/dist/src/executions.js +110 -476
- package/dist/src/executions.js.map +1 -1
- package/dist/src/index.d.ts +19 -9
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/index.js +48 -18
- package/dist/src/index.js.map +1 -1
- package/dist/src/objects.d.ts +8 -51
- package/dist/src/objects.d.ts.map +1 -1
- package/dist/src/objects.js +13 -230
- package/dist/src/objects.js.map +1 -1
- package/dist/src/packages.d.ts +22 -14
- package/dist/src/packages.d.ts.map +1 -1
- package/dist/src/packages.js +134 -88
- package/dist/src/packages.js.map +1 -1
- package/dist/src/storage/in-memory/InMemoryRepoStore.d.ts +35 -0
- package/dist/src/storage/in-memory/InMemoryRepoStore.d.ts.map +1 -0
- package/dist/src/storage/in-memory/InMemoryRepoStore.js +107 -0
- package/dist/src/storage/in-memory/InMemoryRepoStore.js.map +1 -0
- package/dist/src/storage/in-memory/InMemoryStorage.d.ts +139 -0
- package/dist/src/storage/in-memory/InMemoryStorage.d.ts.map +1 -0
- package/dist/src/storage/in-memory/InMemoryStorage.js +439 -0
- package/dist/src/storage/in-memory/InMemoryStorage.js.map +1 -0
- package/dist/src/storage/in-memory/index.d.ts +12 -0
- package/dist/src/storage/in-memory/index.d.ts.map +1 -0
- package/dist/src/storage/in-memory/index.js +12 -0
- package/dist/src/storage/in-memory/index.js.map +1 -0
- package/dist/src/storage/index.d.ts +18 -0
- package/dist/src/storage/index.d.ts.map +1 -0
- package/dist/src/storage/index.js +10 -0
- package/dist/src/storage/index.js.map +1 -0
- package/dist/src/storage/interfaces.d.ts +581 -0
- package/dist/src/storage/interfaces.d.ts.map +1 -0
- package/dist/src/storage/interfaces.js +6 -0
- package/dist/src/storage/interfaces.js.map +1 -0
- package/dist/src/storage/local/LocalBackend.d.ts +56 -0
- package/dist/src/storage/local/LocalBackend.d.ts.map +1 -0
- package/dist/src/storage/local/LocalBackend.js +145 -0
- package/dist/src/storage/local/LocalBackend.js.map +1 -0
- package/dist/src/storage/local/LocalDatasetRefStore.d.ts +22 -0
- package/dist/src/storage/local/LocalDatasetRefStore.d.ts.map +1 -0
- package/dist/src/storage/local/LocalDatasetRefStore.js +118 -0
- package/dist/src/storage/local/LocalDatasetRefStore.js.map +1 -0
- package/dist/src/storage/local/LocalLockService.d.ts +111 -0
- package/dist/src/storage/local/LocalLockService.d.ts.map +1 -0
- package/dist/src/storage/local/LocalLockService.js +355 -0
- package/dist/src/storage/local/LocalLockService.js.map +1 -0
- package/dist/src/storage/local/LocalLogStore.d.ts +23 -0
- package/dist/src/storage/local/LocalLogStore.d.ts.map +1 -0
- package/dist/src/storage/local/LocalLogStore.js +66 -0
- package/dist/src/storage/local/LocalLogStore.js.map +1 -0
- package/dist/src/storage/local/LocalObjectStore.d.ts +55 -0
- package/dist/src/storage/local/LocalObjectStore.d.ts.map +1 -0
- package/dist/src/storage/local/LocalObjectStore.js +300 -0
- package/dist/src/storage/local/LocalObjectStore.js.map +1 -0
- package/dist/src/storage/local/LocalRefStore.d.ts +50 -0
- package/dist/src/storage/local/LocalRefStore.d.ts.map +1 -0
- package/dist/src/storage/local/LocalRefStore.js +337 -0
- package/dist/src/storage/local/LocalRefStore.js.map +1 -0
- package/dist/src/storage/local/LocalRepoStore.d.ts +55 -0
- package/dist/src/storage/local/LocalRepoStore.d.ts.map +1 -0
- package/dist/src/storage/local/LocalRepoStore.js +365 -0
- package/dist/src/storage/local/LocalRepoStore.js.map +1 -0
- package/dist/src/storage/local/gc.d.ts +92 -0
- package/dist/src/storage/local/gc.d.ts.map +1 -0
- package/dist/src/storage/local/gc.js +377 -0
- package/dist/src/storage/local/gc.js.map +1 -0
- package/dist/src/storage/local/index.d.ts +18 -0
- package/dist/src/storage/local/index.d.ts.map +1 -0
- package/dist/src/storage/local/index.js +18 -0
- package/dist/src/storage/local/index.js.map +1 -0
- package/dist/src/storage/local/localHelpers.d.ts +25 -0
- package/dist/src/storage/local/localHelpers.d.ts.map +1 -0
- package/dist/src/storage/local/localHelpers.js +69 -0
- package/dist/src/storage/local/localHelpers.js.map +1 -0
- package/dist/src/{repository.d.ts → storage/local/repository.d.ts} +8 -4
- package/dist/src/storage/local/repository.d.ts.map +1 -0
- package/dist/src/{repository.js → storage/local/repository.js} +31 -29
- package/dist/src/storage/local/repository.js.map +1 -0
- package/dist/src/tasks.d.ts +16 -10
- package/dist/src/tasks.d.ts.map +1 -1
- package/dist/src/tasks.js +35 -41
- package/dist/src/tasks.js.map +1 -1
- package/dist/src/test-helpers.d.ts +5 -4
- package/dist/src/test-helpers.d.ts.map +1 -1
- package/dist/src/test-helpers.js +9 -21
- package/dist/src/test-helpers.js.map +1 -1
- package/dist/src/transfer/InMemoryTransferBackend.d.ts +66 -0
- package/dist/src/transfer/InMemoryTransferBackend.d.ts.map +1 -0
- package/dist/src/transfer/InMemoryTransferBackend.js +166 -0
- package/dist/src/transfer/InMemoryTransferBackend.js.map +1 -0
- package/dist/src/transfer/index.d.ts +8 -0
- package/dist/src/transfer/index.d.ts.map +1 -0
- package/dist/src/transfer/index.js +9 -0
- package/dist/src/transfer/index.js.map +1 -0
- package/dist/src/transfer/interfaces.d.ts +103 -0
- package/dist/src/transfer/interfaces.d.ts.map +1 -0
- package/dist/src/transfer/interfaces.js +6 -0
- package/dist/src/transfer/interfaces.js.map +1 -0
- package/dist/src/transfer/types.d.ts +79 -0
- package/dist/src/transfer/types.d.ts.map +1 -0
- package/dist/src/transfer/types.js +58 -0
- package/dist/src/transfer/types.js.map +1 -0
- package/dist/src/trees.d.ts +147 -59
- package/dist/src/trees.d.ts.map +1 -1
- package/dist/src/trees.js +372 -419
- package/dist/src/trees.js.map +1 -1
- package/dist/src/uuid.d.ts +26 -0
- package/dist/src/uuid.d.ts.map +1 -0
- package/dist/src/uuid.js +80 -0
- package/dist/src/uuid.js.map +1 -0
- package/dist/src/workspaceStatus.d.ts +6 -4
- package/dist/src/workspaceStatus.d.ts.map +1 -1
- package/dist/src/workspaceStatus.js +43 -49
- package/dist/src/workspaceStatus.js.map +1 -1
- package/dist/src/workspaces.d.ts +35 -47
- package/dist/src/workspaces.d.ts.map +1 -1
- package/dist/src/workspaces.js +194 -156
- package/dist/src/workspaces.js.map +1 -1
- package/package.json +4 -4
- package/dist/src/gc.d.ts +0 -54
- package/dist/src/gc.d.ts.map +0 -1
- package/dist/src/gc.js +0 -233
- package/dist/src/gc.js.map +0 -1
- package/dist/src/repository.d.ts.map +0 -1
- package/dist/src/repository.js.map +0 -1
- package/dist/src/workspaceLock.d.ts +0 -67
- package/dist/src/workspaceLock.d.ts.map +0 -1
- package/dist/src/workspaceLock.js +0 -217
- package/dist/src/workspaceLock.js.map +0 -1
|
@@ -0,0 +1,695 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Copyright (c) 2025 Elara AI Pty Ltd
|
|
3
|
+
* Licensed under BSL 1.1. See LICENSE for details.
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Local in-process dataflow orchestrator.
|
|
7
|
+
*
|
|
8
|
+
* Executes dataflow using an async loop with step functions.
|
|
9
|
+
* This is the default orchestrator for CLI and local API server usage.
|
|
10
|
+
*
|
|
11
|
+
* Supports reactive execution: after each task completes, checks for
|
|
12
|
+
* root input changes. If inputs changed, affected tasks are invalidated
|
|
13
|
+
* and re-executed. Version vector consistency checks defer tasks whose
|
|
14
|
+
* inputs have conflicting provenance (diamond dependency protection).
|
|
15
|
+
*/
|
|
16
|
+
import { decodeBeast2For, encodeBeast2For, variant } from '@elaraai/east';
|
|
17
|
+
import { WorkspaceStateType } from '@elaraai/e3-types';
|
|
18
|
+
import { taskExecute } from '../../execution/LocalTaskRunner.js';
|
|
19
|
+
import { WorkspaceLockError, DataflowAbortedError, DataflowError } from '../../errors.js';
|
|
20
|
+
import { uuidv7 } from '../../uuid.js';
|
|
21
|
+
import { stateToStatus } from './interfaces.js';
|
|
22
|
+
import { stepInitialize, stepGetReady, stepPrepareTask, stepTaskStarted, stepTaskCompleted, stepTaskFailed, stepTasksSkipped, stepIsComplete, stepFinalize, stepCancel, stepApplyTreeUpdate, stepDetectInputChanges, stepInvalidateTasks, stepCheckVersionConsistency, } from '../steps.js';
|
|
23
|
+
// =============================================================================
|
|
24
|
+
// Async Mutex for State Mutations
|
|
25
|
+
// =============================================================================
|
|
26
|
+
/**
|
|
27
|
+
* Simple async mutex to serialize state mutations.
|
|
28
|
+
*
|
|
29
|
+
* When multiple tasks complete concurrently, their `.then()` callbacks
|
|
30
|
+
* mutate shared DataflowExecutionState. Between `await` points
|
|
31
|
+
* (stepApplyTreeUpdate, handleInputChanges), another callback can run
|
|
32
|
+
* and corrupt counters/version vectors. This mutex ensures only one
|
|
33
|
+
* state mutation runs at a time while task execution itself runs in parallel.
|
|
34
|
+
*/
|
|
35
|
+
class AsyncMutex {
|
|
36
|
+
queue = [];
|
|
37
|
+
locked = false;
|
|
38
|
+
/**
|
|
39
|
+
* Acquire the mutex, execute the callback, then release.
|
|
40
|
+
* If the mutex is already held, waits until it's available.
|
|
41
|
+
*/
|
|
42
|
+
async runExclusive(fn) {
|
|
43
|
+
await this.acquire();
|
|
44
|
+
try {
|
|
45
|
+
return await fn();
|
|
46
|
+
}
|
|
47
|
+
finally {
|
|
48
|
+
this.release();
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
acquire() {
|
|
52
|
+
return new Promise((resolve) => {
|
|
53
|
+
if (!this.locked) {
|
|
54
|
+
this.locked = true;
|
|
55
|
+
resolve();
|
|
56
|
+
}
|
|
57
|
+
else {
|
|
58
|
+
this.queue.push(resolve);
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
release() {
|
|
63
|
+
const next = this.queue.shift();
|
|
64
|
+
if (next) {
|
|
65
|
+
next();
|
|
66
|
+
}
|
|
67
|
+
else {
|
|
68
|
+
this.locked = false;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Local orchestrator for in-process dataflow execution.
|
|
74
|
+
*
|
|
75
|
+
* @remarks
|
|
76
|
+
* - Uses step functions for each operation
|
|
77
|
+
* - Per-dataset ref writes are atomic and independent (no mutex needed)
|
|
78
|
+
* - Supports AbortSignal for cancellation
|
|
79
|
+
* - Persists state through the provided state store
|
|
80
|
+
* - Reactive: detects input changes after each task, invalidates and
|
|
81
|
+
* re-executes affected tasks until fixpoint
|
|
82
|
+
*/
|
|
83
|
+
export class LocalOrchestrator {
|
|
84
|
+
stateStore;
|
|
85
|
+
executions = new Map();
|
|
86
|
+
/**
|
|
87
|
+
* Create a new LocalOrchestrator.
|
|
88
|
+
*
|
|
89
|
+
* @param stateStore - Optional state store for persistence.
|
|
90
|
+
* If not provided, state is only kept in memory.
|
|
91
|
+
*/
|
|
92
|
+
constructor(stateStore) {
|
|
93
|
+
this.stateStore = stateStore;
|
|
94
|
+
}
|
|
95
|
+
async start(storage, repo, workspace, options = {}) {
|
|
96
|
+
// Acquire locks if not provided externally.
|
|
97
|
+
// Dual-lock model:
|
|
98
|
+
// - Shared lock on workspace (allows concurrent e3 set)
|
|
99
|
+
// - Exclusive lock on workspace#dataflow (prevents concurrent starts)
|
|
100
|
+
const externalLock = !!options.lock;
|
|
101
|
+
let sharedLock = null;
|
|
102
|
+
let dataflowLock = null;
|
|
103
|
+
if (externalLock) {
|
|
104
|
+
// Caller's lock serves as shared workspace lock
|
|
105
|
+
sharedLock = options.lock;
|
|
106
|
+
// Still acquire exclusive dataflow lock (prevents concurrent starts)
|
|
107
|
+
dataflowLock = await storage.locks.acquire(repo, `${workspace}#dataflow`, variant('dataflow', null));
|
|
108
|
+
if (!dataflowLock) {
|
|
109
|
+
throw new WorkspaceLockError(workspace);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
// Acquire shared workspace lock first (coexists with e3 set)
|
|
114
|
+
sharedLock = await storage.locks.acquire(repo, workspace, variant('dataflow', null), { mode: 'shared' });
|
|
115
|
+
if (!sharedLock) {
|
|
116
|
+
throw new WorkspaceLockError(workspace);
|
|
117
|
+
}
|
|
118
|
+
// Acquire exclusive dataflow lock (prevents concurrent starts)
|
|
119
|
+
dataflowLock = await storage.locks.acquire(repo, `${workspace}#dataflow`, variant('dataflow', null));
|
|
120
|
+
if (!dataflowLock) {
|
|
121
|
+
await sharedLock.release();
|
|
122
|
+
throw new WorkspaceLockError(workspace);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
try {
|
|
126
|
+
// Get next execution ID from state store if available
|
|
127
|
+
const executionId = this.stateStore
|
|
128
|
+
? await this.stateStore.nextExecutionId(repo, workspace)
|
|
129
|
+
: String(Date.now()); // Fallback to timestamp if no state store
|
|
130
|
+
// Initialize execution state
|
|
131
|
+
const { state, readyTasks: _ } = await stepInitialize(storage, repo, workspace, executionId, {
|
|
132
|
+
concurrency: options.concurrency,
|
|
133
|
+
force: options.force,
|
|
134
|
+
filter: options.filter,
|
|
135
|
+
});
|
|
136
|
+
// Persist initial state
|
|
137
|
+
if (this.stateStore) {
|
|
138
|
+
await this.stateStore.create(state);
|
|
139
|
+
}
|
|
140
|
+
// Create completion promise
|
|
141
|
+
let resolveCompletion;
|
|
142
|
+
let rejectCompletion;
|
|
143
|
+
const completionPromise = new Promise((resolve, reject) => {
|
|
144
|
+
resolveCompletion = resolve;
|
|
145
|
+
rejectCompletion = reject;
|
|
146
|
+
});
|
|
147
|
+
// Create running execution state
|
|
148
|
+
const execution = {
|
|
149
|
+
state,
|
|
150
|
+
lock: dataflowLock,
|
|
151
|
+
sharedLock,
|
|
152
|
+
externalLock,
|
|
153
|
+
options,
|
|
154
|
+
aborted: false,
|
|
155
|
+
runningTasks: new Map(),
|
|
156
|
+
mutex: new AsyncMutex(),
|
|
157
|
+
runId: uuidv7(),
|
|
158
|
+
taskExecutions: new Map(),
|
|
159
|
+
completionPromise,
|
|
160
|
+
resolveCompletion,
|
|
161
|
+
rejectCompletion,
|
|
162
|
+
};
|
|
163
|
+
const key = this.executionKey(repo, workspace, executionId);
|
|
164
|
+
this.executions.set(key, execution);
|
|
165
|
+
// Listen for abort signal to persist cancellation immediately.
|
|
166
|
+
if (options.signal) {
|
|
167
|
+
const onAbort = () => {
|
|
168
|
+
execution.aborted = true;
|
|
169
|
+
if (this.stateStore) {
|
|
170
|
+
void this.stateStore.updateStatus(repo, workspace, executionId, 'cancelled', { error: 'Execution was cancelled' }).catch(() => { });
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
options.signal.addEventListener('abort', onAbort, { once: true });
|
|
174
|
+
execution.abortCleanup = () => options.signal.removeEventListener('abort', onAbort);
|
|
175
|
+
}
|
|
176
|
+
// Start the execution loop (non-blocking)
|
|
177
|
+
this.runExecutionLoop(storage, repo, execution).catch(err => {
|
|
178
|
+
rejectCompletion(err);
|
|
179
|
+
});
|
|
180
|
+
return { id: executionId, repo, workspace };
|
|
181
|
+
}
|
|
182
|
+
catch (err) {
|
|
183
|
+
// Always release the dataflow lock on initialization failure
|
|
184
|
+
await dataflowLock.release();
|
|
185
|
+
// Release shared workspace lock only if we acquired it (not external)
|
|
186
|
+
if (!externalLock && sharedLock) {
|
|
187
|
+
await sharedLock.release();
|
|
188
|
+
}
|
|
189
|
+
throw err;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
async wait(handle) {
|
|
193
|
+
const key = this.executionKey(handle.repo, handle.workspace, handle.id);
|
|
194
|
+
const execution = this.executions.get(key);
|
|
195
|
+
if (!execution) {
|
|
196
|
+
throw new Error(`Execution ${handle.id} not found for workspace '${handle.workspace}'`);
|
|
197
|
+
}
|
|
198
|
+
return execution.completionPromise;
|
|
199
|
+
}
|
|
200
|
+
async getStatus(handle) {
|
|
201
|
+
const key = this.executionKey(handle.repo, handle.workspace, handle.id);
|
|
202
|
+
const execution = this.executions.get(key);
|
|
203
|
+
if (!execution) {
|
|
204
|
+
// Try to read from state store
|
|
205
|
+
if (this.stateStore) {
|
|
206
|
+
const state = await this.stateStore.read(handle.repo, handle.workspace, handle.id);
|
|
207
|
+
if (state) {
|
|
208
|
+
return stateToStatus(state);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
throw new Error(`Execution ${handle.id} not found for workspace '${handle.workspace}'`);
|
|
212
|
+
}
|
|
213
|
+
return stateToStatus(execution.state);
|
|
214
|
+
}
|
|
215
|
+
async cancel(handle) {
|
|
216
|
+
const key = this.executionKey(handle.repo, handle.workspace, handle.id);
|
|
217
|
+
const execution = this.executions.get(key);
|
|
218
|
+
if (!execution) {
|
|
219
|
+
throw new Error(`Execution ${handle.id} not found for workspace '${handle.workspace}'`);
|
|
220
|
+
}
|
|
221
|
+
execution.aborted = true;
|
|
222
|
+
if (this.stateStore) {
|
|
223
|
+
await this.stateStore.updateStatus(handle.repo, handle.workspace, handle.id, 'cancelled', { error: 'Execution was cancelled' });
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
async getEvents(handle, sinceSeq) {
|
|
227
|
+
if (!this.stateStore) {
|
|
228
|
+
return [];
|
|
229
|
+
}
|
|
230
|
+
return this.stateStore.getEventsSince(handle.repo, handle.workspace, handle.id, sinceSeq);
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Main execution loop with reactive fixpoint.
|
|
234
|
+
*
|
|
235
|
+
* After each task completes, checks for input changes and invalidates
|
|
236
|
+
* affected tasks. Uses version vector consistency checks to defer tasks
|
|
237
|
+
* whose inputs have conflicting provenance. Execution continues until
|
|
238
|
+
* fixpoint (no more ready, running, or deferred tasks).
|
|
239
|
+
*/
|
|
240
|
+
async runExecutionLoop(storage, repo, execution) {
|
|
241
|
+
const { state, options } = execution;
|
|
242
|
+
try {
|
|
243
|
+
let hasFailure = false;
|
|
244
|
+
// Read workspace state for DataflowRun recording
|
|
245
|
+
const wsData = await storage.refs.workspaceRead(repo, state.workspace);
|
|
246
|
+
const wsDecoder = decodeBeast2For(WorkspaceStateType);
|
|
247
|
+
const wsState = wsData && wsData.length > 0 ? wsDecoder(wsData) : null;
|
|
248
|
+
// Cache structure for the entire execution (immutable during execution)
|
|
249
|
+
const structure = wsState ? await this.readStructure(storage, repo, wsState.packageHash) : null;
|
|
250
|
+
// Write initial DataflowRun record
|
|
251
|
+
if (wsState) {
|
|
252
|
+
const initialRun = {
|
|
253
|
+
runId: execution.runId,
|
|
254
|
+
workspaceName: state.workspace,
|
|
255
|
+
packageRef: `${wsState.packageName}@${wsState.packageVersion}`,
|
|
256
|
+
startedAt: state.startedAt,
|
|
257
|
+
completedAt: variant('none', null),
|
|
258
|
+
status: variant('running', {}),
|
|
259
|
+
inputVersions: new Map(state.inputSnapshot),
|
|
260
|
+
outputVersions: variant('none', null),
|
|
261
|
+
taskExecutions: new Map(),
|
|
262
|
+
summary: {
|
|
263
|
+
total: BigInt(state.tasks.size),
|
|
264
|
+
completed: 0n,
|
|
265
|
+
cached: 0n,
|
|
266
|
+
failed: 0n,
|
|
267
|
+
skipped: 0n,
|
|
268
|
+
reexecuted: 0n,
|
|
269
|
+
},
|
|
270
|
+
};
|
|
271
|
+
await storage.refs.dataflowRunWrite(repo, state.workspace, initialRun);
|
|
272
|
+
}
|
|
273
|
+
// Check for abort signal from options
|
|
274
|
+
const checkAborted = () => {
|
|
275
|
+
if (options.signal?.aborted && !execution.aborted) {
|
|
276
|
+
execution.aborted = true;
|
|
277
|
+
}
|
|
278
|
+
return execution.aborted;
|
|
279
|
+
};
|
|
280
|
+
while (true) {
|
|
281
|
+
// Check if we're done
|
|
282
|
+
if (execution.runningTasks.size === 0 && stepIsComplete(state)) {
|
|
283
|
+
break;
|
|
284
|
+
}
|
|
285
|
+
// Get ready tasks
|
|
286
|
+
const readyTasks = stepGetReady(state);
|
|
287
|
+
// Track whether any task was completed synchronously (via cache hit)
|
|
288
|
+
// in this iteration. If so, new downstream tasks may have become ready
|
|
289
|
+
// that aren't in the stale readyTasks array.
|
|
290
|
+
let hadSyncCompletion = false;
|
|
291
|
+
// Launch tasks up to concurrency limit if no failure and not aborted
|
|
292
|
+
const concurrencyLimit = Number(state.concurrency);
|
|
293
|
+
while (!hasFailure &&
|
|
294
|
+
!checkAborted() &&
|
|
295
|
+
readyTasks.length > 0 &&
|
|
296
|
+
execution.runningTasks.size < concurrencyLimit) {
|
|
297
|
+
const taskName = readyTasks.shift();
|
|
298
|
+
const taskState = state.tasks.get(taskName);
|
|
299
|
+
if (!taskState || taskState.status === 'in_progress' || taskState.status === 'completed') {
|
|
300
|
+
continue;
|
|
301
|
+
}
|
|
302
|
+
// Version vector consistency check before launching
|
|
303
|
+
const vvCheck = stepCheckVersionConsistency(state, taskName);
|
|
304
|
+
if (!vvCheck.consistent) {
|
|
305
|
+
// Defer: inputs have inconsistent versions of the same root input
|
|
306
|
+
const ts = state.tasks.get(taskName);
|
|
307
|
+
if (ts)
|
|
308
|
+
ts.status = 'deferred';
|
|
309
|
+
// Emit task_deferred event
|
|
310
|
+
const mutableState = state;
|
|
311
|
+
mutableState.eventSeq = state.eventSeq + 1n;
|
|
312
|
+
const deferEvent = variant('task_deferred', {
|
|
313
|
+
seq: mutableState.eventSeq,
|
|
314
|
+
timestamp: new Date(),
|
|
315
|
+
task: taskName,
|
|
316
|
+
conflictPath: vvCheck.conflictPath,
|
|
317
|
+
});
|
|
318
|
+
mutableState.events.push(deferEvent);
|
|
319
|
+
options.onTaskDeferred?.(taskName, vvCheck.conflictPath);
|
|
320
|
+
continue;
|
|
321
|
+
}
|
|
322
|
+
// Prepare task (resolve inputs, check cache)
|
|
323
|
+
const prepared = await stepPrepareTask(storage, state, taskName);
|
|
324
|
+
// Check cache
|
|
325
|
+
if (prepared.cachedOutputHash !== null) {
|
|
326
|
+
hadSyncCompletion = true;
|
|
327
|
+
// Cache hit — wrap in mutex to serialize with concurrent .then() callbacks
|
|
328
|
+
await execution.mutex.runExclusive(async () => {
|
|
329
|
+
// Write ref with merged VV and update state
|
|
330
|
+
await stepApplyTreeUpdate(storage, repo, state.workspace, prepared.outputPath, prepared.cachedOutputHash, vvCheck.mergedVV);
|
|
331
|
+
stepTaskCompleted(state, taskName, prepared.cachedOutputHash, true, 0);
|
|
332
|
+
// Track task execution for DataflowRun
|
|
333
|
+
const existingCached = execution.taskExecutions.get(taskName);
|
|
334
|
+
execution.taskExecutions.set(taskName, {
|
|
335
|
+
executionId: state.id,
|
|
336
|
+
cached: true,
|
|
337
|
+
outputVersions: new Map(vvCheck.mergedVV),
|
|
338
|
+
executionCount: (existingCached?.executionCount ?? 0n) + 1n,
|
|
339
|
+
});
|
|
340
|
+
// Notify callback
|
|
341
|
+
options.onTaskComplete?.({
|
|
342
|
+
name: taskName,
|
|
343
|
+
cached: true,
|
|
344
|
+
state: 'success',
|
|
345
|
+
duration: 0,
|
|
346
|
+
});
|
|
347
|
+
// Detect input changes after cached result
|
|
348
|
+
await this.handleInputChanges(storage, state, options, structure);
|
|
349
|
+
// Update state store
|
|
350
|
+
await this.persistState(execution, state);
|
|
351
|
+
});
|
|
352
|
+
continue;
|
|
353
|
+
}
|
|
354
|
+
// Mark as started (event added by step function)
|
|
355
|
+
stepTaskStarted(state, taskName);
|
|
356
|
+
await this.persistState(execution, state);
|
|
357
|
+
options.onTaskStart?.(taskName);
|
|
358
|
+
// Launch task execution
|
|
359
|
+
const taskPromise = this.executeTask(storage, repo, execution, taskName, prepared).then(result => execution.mutex.runExclusive(async () => {
|
|
360
|
+
// Handle task completion
|
|
361
|
+
if (result.state === 'success') {
|
|
362
|
+
// Re-check VV consistency (inputs may have changed during execution)
|
|
363
|
+
const postVVCheck = stepCheckVersionConsistency(state, taskName);
|
|
364
|
+
const mergedVV = postVVCheck.consistent
|
|
365
|
+
? postVVCheck.mergedVV
|
|
366
|
+
: new Map();
|
|
367
|
+
if (result.outputHash) {
|
|
368
|
+
// Write output ref with merged VV
|
|
369
|
+
await stepApplyTreeUpdate(storage, repo, state.workspace, prepared.outputPath, result.outputHash, mergedVV);
|
|
370
|
+
}
|
|
371
|
+
stepTaskCompleted(state, taskName, result.outputHash ?? '', result.cached, result.duration);
|
|
372
|
+
// Track task execution for DataflowRun
|
|
373
|
+
const existing = execution.taskExecutions.get(taskName);
|
|
374
|
+
execution.taskExecutions.set(taskName, {
|
|
375
|
+
executionId: result.executionId ?? state.id,
|
|
376
|
+
cached: result.cached,
|
|
377
|
+
outputVersions: new Map(mergedVV),
|
|
378
|
+
executionCount: (existing?.executionCount ?? 0n) + 1n,
|
|
379
|
+
});
|
|
380
|
+
options.onTaskComplete?.({
|
|
381
|
+
name: taskName,
|
|
382
|
+
cached: result.cached,
|
|
383
|
+
state: 'success',
|
|
384
|
+
duration: result.duration,
|
|
385
|
+
});
|
|
386
|
+
// Detect input changes after task completion
|
|
387
|
+
await this.handleInputChanges(storage, state, options, structure);
|
|
388
|
+
}
|
|
389
|
+
else {
|
|
390
|
+
hasFailure = true;
|
|
391
|
+
const { result: failedResult } = stepTaskFailed(state, taskName, result.error, result.exitCode, result.duration);
|
|
392
|
+
options.onTaskComplete?.({
|
|
393
|
+
name: taskName,
|
|
394
|
+
cached: false,
|
|
395
|
+
state: result.state === 'failed' ? 'failed' : 'error',
|
|
396
|
+
error: result.error,
|
|
397
|
+
exitCode: result.exitCode,
|
|
398
|
+
duration: result.duration,
|
|
399
|
+
});
|
|
400
|
+
// Skip dependents (events added by step function)
|
|
401
|
+
const skipEvents = stepTasksSkipped(state, failedResult.toSkip, taskName);
|
|
402
|
+
for (const skipEvent of skipEvents) {
|
|
403
|
+
if (skipEvent.type === 'task_skipped') {
|
|
404
|
+
options.onTaskComplete?.({
|
|
405
|
+
name: skipEvent.value.task,
|
|
406
|
+
cached: false,
|
|
407
|
+
state: 'skipped',
|
|
408
|
+
duration: 0,
|
|
409
|
+
});
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
// Update state store
|
|
414
|
+
await this.persistState(execution, state);
|
|
415
|
+
})).finally(() => {
|
|
416
|
+
execution.runningTasks.delete(taskName);
|
|
417
|
+
});
|
|
418
|
+
execution.runningTasks.set(taskName, taskPromise);
|
|
419
|
+
}
|
|
420
|
+
// Wait for at least one task to complete if we can't launch more
|
|
421
|
+
if (execution.runningTasks.size > 0) {
|
|
422
|
+
await Promise.race(execution.runningTasks.values());
|
|
423
|
+
}
|
|
424
|
+
else if (hadSyncCompletion) {
|
|
425
|
+
// A cached task completed synchronously, which may have made new
|
|
426
|
+
// downstream tasks ready. Continue to re-check at the top of the loop.
|
|
427
|
+
continue;
|
|
428
|
+
}
|
|
429
|
+
else if (readyTasks.length === 0 || checkAborted() || hasFailure) {
|
|
430
|
+
break;
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
// Wait for any remaining tasks
|
|
434
|
+
if (execution.runningTasks.size > 0) {
|
|
435
|
+
await Promise.all(execution.runningTasks.values());
|
|
436
|
+
}
|
|
437
|
+
// Check for stuck state: non-terminal tasks remain but none are ready or running.
|
|
438
|
+
// When a filter is active, only the filtered task is relevant — non-filtered
|
|
439
|
+
// tasks are expected to remain pending.
|
|
440
|
+
const filterValue = state.filter.type === 'some' ? state.filter.value : null;
|
|
441
|
+
const stuckTasks = [...state.tasks.entries()]
|
|
442
|
+
.filter(([name, ts]) => {
|
|
443
|
+
if (ts.status !== 'pending' && ts.status !== 'ready' && ts.status !== 'deferred') {
|
|
444
|
+
return false;
|
|
445
|
+
}
|
|
446
|
+
// When a filter is active, non-filtered tasks staying pending is expected
|
|
447
|
+
if (filterValue !== null && name !== filterValue) {
|
|
448
|
+
return false;
|
|
449
|
+
}
|
|
450
|
+
return true;
|
|
451
|
+
})
|
|
452
|
+
.map(([name, ts]) => `${name} (${ts.status})`)
|
|
453
|
+
.join(', ');
|
|
454
|
+
if (stuckTasks.length > 0 && !checkAborted() && !hasFailure) {
|
|
455
|
+
throw new DataflowError(`Dataflow stuck: ${stuckTasks}`);
|
|
456
|
+
}
|
|
457
|
+
// Check for abort one final time
|
|
458
|
+
if (checkAborted()) {
|
|
459
|
+
stepCancel(state, 'Execution was aborted');
|
|
460
|
+
if (this.stateStore) {
|
|
461
|
+
await this.stateStore.update(state);
|
|
462
|
+
}
|
|
463
|
+
// Write cancelled DataflowRun record
|
|
464
|
+
if (wsState) {
|
|
465
|
+
const cancelledRun = {
|
|
466
|
+
runId: execution.runId,
|
|
467
|
+
workspaceName: state.workspace,
|
|
468
|
+
packageRef: `${wsState.packageName}@${wsState.packageVersion}`,
|
|
469
|
+
startedAt: state.startedAt,
|
|
470
|
+
completedAt: variant('some', new Date()),
|
|
471
|
+
status: variant('cancelled', {}),
|
|
472
|
+
inputVersions: new Map(state.inputSnapshot),
|
|
473
|
+
outputVersions: variant('some', this.buildOutputVersions(state)),
|
|
474
|
+
taskExecutions: new Map(execution.taskExecutions),
|
|
475
|
+
summary: {
|
|
476
|
+
total: BigInt(state.tasks.size),
|
|
477
|
+
completed: state.executed + state.cached,
|
|
478
|
+
cached: state.cached,
|
|
479
|
+
failed: state.failed,
|
|
480
|
+
skipped: state.skipped,
|
|
481
|
+
reexecuted: state.reexecuted,
|
|
482
|
+
},
|
|
483
|
+
};
|
|
484
|
+
await storage.refs.dataflowRunWrite(repo, state.workspace, cancelledRun);
|
|
485
|
+
}
|
|
486
|
+
// Build partial results for abort error
|
|
487
|
+
const partialResults = this.buildPartialResults(state);
|
|
488
|
+
throw new DataflowAbortedError(partialResults);
|
|
489
|
+
}
|
|
490
|
+
// Finalize (event added by step function)
|
|
491
|
+
const { result } = stepFinalize(state, execution.runId);
|
|
492
|
+
if (this.stateStore) {
|
|
493
|
+
await this.stateStore.update(state);
|
|
494
|
+
}
|
|
495
|
+
// Write final DataflowRun record
|
|
496
|
+
if (wsState) {
|
|
497
|
+
let finalStatus;
|
|
498
|
+
if (!result.success) {
|
|
499
|
+
// Find the failed task for the error record
|
|
500
|
+
const failedTaskEntry = [...state.tasks.entries()]
|
|
501
|
+
.find(([, ts]) => ts.status === 'failed');
|
|
502
|
+
const failedTaskName = failedTaskEntry?.[0] ?? 'unknown';
|
|
503
|
+
const failedError = failedTaskEntry?.[1].error.type === 'some'
|
|
504
|
+
? failedTaskEntry[1].error.value
|
|
505
|
+
: 'Task failed';
|
|
506
|
+
finalStatus = variant('failed', {
|
|
507
|
+
failedTask: failedTaskName,
|
|
508
|
+
error: failedError,
|
|
509
|
+
});
|
|
510
|
+
}
|
|
511
|
+
else {
|
|
512
|
+
finalStatus = variant('completed', {});
|
|
513
|
+
}
|
|
514
|
+
const finalRun = {
|
|
515
|
+
runId: execution.runId,
|
|
516
|
+
workspaceName: state.workspace,
|
|
517
|
+
packageRef: `${wsState.packageName}@${wsState.packageVersion}`,
|
|
518
|
+
startedAt: state.startedAt,
|
|
519
|
+
completedAt: variant('some', new Date()),
|
|
520
|
+
status: finalStatus,
|
|
521
|
+
inputVersions: new Map(state.inputSnapshot),
|
|
522
|
+
outputVersions: variant('some', this.buildOutputVersions(state)),
|
|
523
|
+
taskExecutions: new Map(execution.taskExecutions),
|
|
524
|
+
summary: {
|
|
525
|
+
total: BigInt(state.tasks.size),
|
|
526
|
+
completed: state.executed + state.cached,
|
|
527
|
+
cached: state.cached,
|
|
528
|
+
failed: state.failed,
|
|
529
|
+
skipped: state.skipped,
|
|
530
|
+
reexecuted: state.reexecuted,
|
|
531
|
+
},
|
|
532
|
+
};
|
|
533
|
+
await storage.refs.dataflowRunWrite(repo, state.workspace, finalRun);
|
|
534
|
+
// Update workspace state with currentRunId on success
|
|
535
|
+
if (result.success) {
|
|
536
|
+
const currentWsData = await storage.refs.workspaceRead(repo, state.workspace);
|
|
537
|
+
if (currentWsData && currentWsData.length > 0) {
|
|
538
|
+
const currentWsState = wsDecoder(currentWsData);
|
|
539
|
+
const updatedWsState = {
|
|
540
|
+
...currentWsState,
|
|
541
|
+
currentRunId: variant('some', execution.runId),
|
|
542
|
+
};
|
|
543
|
+
const encoder = encodeBeast2For(WorkspaceStateType);
|
|
544
|
+
await storage.refs.workspaceWrite(repo, state.workspace, encoder(updatedWsState));
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
execution.resolveCompletion(result);
|
|
549
|
+
}
|
|
550
|
+
finally {
|
|
551
|
+
// Remove abort listener to avoid leaking execution object
|
|
552
|
+
execution.abortCleanup?.();
|
|
553
|
+
// Always release the dataflow lock (we always acquire it)
|
|
554
|
+
await execution.lock.release();
|
|
555
|
+
// Release shared workspace lock only if we acquired it (not external)
|
|
556
|
+
if (!execution.externalLock && execution.sharedLock) {
|
|
557
|
+
await execution.sharedLock.release();
|
|
558
|
+
}
|
|
559
|
+
// Clean up execution state
|
|
560
|
+
const key = this.executionKey(repo, state.workspace, state.id);
|
|
561
|
+
this.executions.delete(key);
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
/**
|
|
565
|
+
* Detect input changes and invalidate affected tasks.
|
|
566
|
+
*
|
|
567
|
+
* Called after each task completion to implement the reactive loop.
|
|
568
|
+
*/
|
|
569
|
+
async handleInputChanges(storage, state, options, structure) {
|
|
570
|
+
const { changes, events: changeEvents } = await stepDetectInputChanges(storage, state, structure);
|
|
571
|
+
// Notify via callbacks
|
|
572
|
+
for (const evt of changeEvents) {
|
|
573
|
+
if (evt.type === 'input_changed') {
|
|
574
|
+
options.onInputChanged?.(evt.value.path, evt.value.previousHash, evt.value.newHash);
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
if (changes.length > 0) {
|
|
578
|
+
const mutableState = state;
|
|
579
|
+
const { invalidated, events: invEvents } = stepInvalidateTasks(state, changes);
|
|
580
|
+
// Track re-executions (tasks that were completed and are now invalidated)
|
|
581
|
+
mutableState.reexecuted = state.reexecuted + BigInt(invalidated.length);
|
|
582
|
+
for (const evt of invEvents) {
|
|
583
|
+
if (evt.type === 'task_invalidated') {
|
|
584
|
+
options.onTaskInvalidated?.(evt.value.task, evt.value.reason);
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
/**
|
|
590
|
+
* Execute a single task.
|
|
591
|
+
*/
|
|
592
|
+
async executeTask(storage, repo, execution, taskName, prepared) {
|
|
593
|
+
const { options } = execution;
|
|
594
|
+
const startTime = Date.now();
|
|
595
|
+
const execOptions = {
|
|
596
|
+
force: execution.state.force,
|
|
597
|
+
signal: options.signal,
|
|
598
|
+
onStdout: options.onStdout ? (data) => options.onStdout(taskName, data) : undefined,
|
|
599
|
+
onStderr: options.onStderr ? (data) => options.onStderr(taskName, data) : undefined,
|
|
600
|
+
};
|
|
601
|
+
// Use provided runner if available, otherwise call taskExecute directly
|
|
602
|
+
if (options.runner) {
|
|
603
|
+
const result = await options.runner.execute(storage, prepared.taskHash, prepared.inputHashes, execOptions);
|
|
604
|
+
return {
|
|
605
|
+
state: result.state,
|
|
606
|
+
cached: result.cached,
|
|
607
|
+
outputHash: result.outputHash,
|
|
608
|
+
executionId: result.executionId,
|
|
609
|
+
exitCode: result.exitCode,
|
|
610
|
+
error: result.error,
|
|
611
|
+
duration: Date.now() - startTime,
|
|
612
|
+
};
|
|
613
|
+
}
|
|
614
|
+
else {
|
|
615
|
+
const result = await taskExecute(storage, repo, prepared.taskHash, prepared.inputHashes, execOptions);
|
|
616
|
+
return {
|
|
617
|
+
state: result.state,
|
|
618
|
+
cached: result.cached,
|
|
619
|
+
outputHash: result.outputHash ?? undefined,
|
|
620
|
+
executionId: result.executionId,
|
|
621
|
+
exitCode: result.exitCode ?? undefined,
|
|
622
|
+
error: result.error ?? undefined,
|
|
623
|
+
duration: Date.now() - startTime,
|
|
624
|
+
};
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
/**
|
|
628
|
+
* Build partial results for abort error.
|
|
629
|
+
*/
|
|
630
|
+
buildPartialResults(state) {
|
|
631
|
+
const results = [];
|
|
632
|
+
for (const [name, taskState] of state.tasks) {
|
|
633
|
+
if (taskState.status === 'completed' || taskState.status === 'failed' || taskState.status === 'skipped') {
|
|
634
|
+
// Extract values from Option types
|
|
635
|
+
const cached = taskState.cached.type === 'some' ? taskState.cached.value : false;
|
|
636
|
+
const error = taskState.error.type === 'some' ? taskState.error.value : undefined;
|
|
637
|
+
const exitCode = taskState.exitCode.type === 'some' ? Number(taskState.exitCode.value) : undefined;
|
|
638
|
+
const duration = taskState.duration.type === 'some' ? Number(taskState.duration.value) : 0;
|
|
639
|
+
results.push({
|
|
640
|
+
name,
|
|
641
|
+
cached,
|
|
642
|
+
state: taskState.status === 'completed' ? 'success' : taskState.status,
|
|
643
|
+
error,
|
|
644
|
+
exitCode,
|
|
645
|
+
duration,
|
|
646
|
+
});
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
return results;
|
|
650
|
+
}
|
|
651
|
+
/**
|
|
652
|
+
* Build output versions map from completed task states.
|
|
653
|
+
*/
|
|
654
|
+
buildOutputVersions(state) {
|
|
655
|
+
const outputVersions = new Map();
|
|
656
|
+
const graph = state.graph.type === 'some' ? state.graph.value : null;
|
|
657
|
+
if (graph) {
|
|
658
|
+
for (const task of graph.tasks) {
|
|
659
|
+
const ts = state.tasks.get(task.name);
|
|
660
|
+
if (ts && ts.outputHash.type === 'some') {
|
|
661
|
+
outputVersions.set(task.output, ts.outputHash.value);
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
return outputVersions;
|
|
666
|
+
}
|
|
667
|
+
/**
|
|
668
|
+
* Read workspace structure from storage.
|
|
669
|
+
*/
|
|
670
|
+
async readStructure(storage, repo, packageHash) {
|
|
671
|
+
const { PackageObjectType } = await import('@elaraai/e3-types');
|
|
672
|
+
const pkgData = await storage.objects.read(repo, packageHash);
|
|
673
|
+
const pkgDecoder = decodeBeast2For(PackageObjectType);
|
|
674
|
+
const pkgObject = pkgDecoder(Buffer.from(pkgData));
|
|
675
|
+
return pkgObject.data.structure;
|
|
676
|
+
}
|
|
677
|
+
/**
|
|
678
|
+
* Persist state, skipping the write when execution has been aborted
|
|
679
|
+
* and the state doesn't yet reflect cancellation (defense-in-depth).
|
|
680
|
+
*/
|
|
681
|
+
async persistState(execution, state) {
|
|
682
|
+
if (!this.stateStore)
|
|
683
|
+
return;
|
|
684
|
+
if (execution.aborted && state.status !== 'cancelled')
|
|
685
|
+
return;
|
|
686
|
+
await this.stateStore.update(state);
|
|
687
|
+
}
|
|
688
|
+
/**
|
|
689
|
+
* Generate unique key for an execution.
|
|
690
|
+
*/
|
|
691
|
+
executionKey(repo, workspace, id) {
|
|
692
|
+
return `${repo}::${workspace}:${id}`;
|
|
693
|
+
}
|
|
694
|
+
}
|
|
695
|
+
//# sourceMappingURL=LocalOrchestrator.js.map
|