prompt-language-shell 0.9.0 → 0.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{services/config-labels.js → configuration/labels.js} +1 -1
- package/dist/configuration/schema.js +2 -2
- package/dist/configuration/steps.js +171 -0
- package/dist/configuration/transformation.js +17 -0
- package/dist/execution/handlers.js +1 -7
- package/dist/execution/hooks.js +291 -0
- package/dist/execution/processing.js +3 -1
- package/dist/execution/reducer.js +14 -12
- package/dist/execution/runner.js +81 -0
- package/dist/execution/types.js +1 -0
- package/dist/execution/utils.js +22 -0
- package/dist/services/components.js +109 -394
- package/dist/services/logger.js +3 -3
- package/dist/services/refinement.js +5 -2
- package/dist/services/router.js +69 -46
- package/dist/skills/execute.md +28 -10
- package/dist/ui/Command.js +11 -7
- package/dist/ui/Component.js +5 -2
- package/dist/ui/Config.js +9 -3
- package/dist/ui/Execute.js +211 -148
- package/dist/ui/Introspect.js +13 -14
- package/dist/ui/List.js +2 -2
- package/dist/ui/Main.js +14 -7
- package/dist/ui/Schedule.js +3 -1
- package/dist/ui/Subtask.js +6 -3
- package/dist/ui/Task.js +7 -171
- package/dist/ui/Validate.js +26 -21
- package/dist/ui/Workflow.js +21 -4
- package/package.json +1 -1
- package/dist/parser.js +0 -13
- package/dist/services/config-utils.js +0 -20
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import YAML from 'yaml';
|
|
2
2
|
import { AnthropicModel, ConfigDefinitionType, DebugLevel, SUPPORTED_DEBUG_LEVELS, SUPPORTED_MODELS, } from './types.js';
|
|
3
|
-
import { flattenConfig } from '
|
|
4
|
-
import { getConfigLabel } from '
|
|
3
|
+
import { flattenConfig } from './transformation.js';
|
|
4
|
+
import { getConfigLabel } from './labels.js';
|
|
5
5
|
import { defaultFileSystem } from '../services/filesystem.js';
|
|
6
6
|
import { getConfigPath, loadConfig } from './io.js';
|
|
7
7
|
/**
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
import { parse as parseYaml } from 'yaml';
|
|
2
|
+
import { ConfigDefinitionType } from './types.js';
|
|
3
|
+
import { getConfigPath, loadConfig } from './io.js';
|
|
4
|
+
import { getConfigSchema } from './schema.js';
|
|
5
|
+
import { getConfigLabel } from './labels.js';
|
|
6
|
+
import { defaultFileSystem } from '../services/filesystem.js';
|
|
7
|
+
import { StepType } from '../ui/Config.js';
|
|
8
|
+
export function createConfigSteps() {
|
|
9
|
+
// Use schema-based config step generation for required Anthropic settings
|
|
10
|
+
return createConfigStepsFromSchema(['anthropic.key', 'anthropic.model']);
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Get current config value for a dotted key path
|
|
14
|
+
*/
|
|
15
|
+
function getConfigValue(config, key) {
|
|
16
|
+
if (!config)
|
|
17
|
+
return undefined;
|
|
18
|
+
const parts = key.split('.');
|
|
19
|
+
let value = config;
|
|
20
|
+
for (const part of parts) {
|
|
21
|
+
if (value && typeof value === 'object' && part in value) {
|
|
22
|
+
value = value[part];
|
|
23
|
+
}
|
|
24
|
+
else {
|
|
25
|
+
return undefined;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return value;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Get validation function for a config definition
|
|
32
|
+
*/
|
|
33
|
+
function getValidator(definition) {
|
|
34
|
+
switch (definition.type) {
|
|
35
|
+
case ConfigDefinitionType.RegExp:
|
|
36
|
+
return (value) => definition.pattern.test(value);
|
|
37
|
+
case ConfigDefinitionType.String:
|
|
38
|
+
return () => true; // Strings are always valid
|
|
39
|
+
case ConfigDefinitionType.Enum:
|
|
40
|
+
return (value) => definition.values.includes(value);
|
|
41
|
+
case ConfigDefinitionType.Number:
|
|
42
|
+
return (value) => !isNaN(Number(value));
|
|
43
|
+
case ConfigDefinitionType.Boolean:
|
|
44
|
+
return (value) => value === 'true' || value === 'false';
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Create config steps from schema for specified keys
|
|
49
|
+
*/
|
|
50
|
+
export function createConfigStepsFromSchema(keys, fs = defaultFileSystem) {
|
|
51
|
+
const schema = getConfigSchema();
|
|
52
|
+
let currentConfig = null;
|
|
53
|
+
let rawConfig = null;
|
|
54
|
+
// Load validated config (may fail if config has validation errors)
|
|
55
|
+
try {
|
|
56
|
+
currentConfig = loadConfig(fs);
|
|
57
|
+
}
|
|
58
|
+
catch {
|
|
59
|
+
// Config doesn't exist or has validation errors, use defaults
|
|
60
|
+
}
|
|
61
|
+
// Load raw config separately (for discovered keys not in schema)
|
|
62
|
+
try {
|
|
63
|
+
const configFile = getConfigPath();
|
|
64
|
+
if (fs.exists(configFile)) {
|
|
65
|
+
const content = fs.readFile(configFile, 'utf-8');
|
|
66
|
+
rawConfig = parseYaml(content);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
catch {
|
|
70
|
+
// Config file doesn't exist or can't be parsed
|
|
71
|
+
}
|
|
72
|
+
return keys.map((key) => {
|
|
73
|
+
// Check if key is in schema (system config)
|
|
74
|
+
if (!(key in schema)) {
|
|
75
|
+
// Key is not in schema - it's from a skill or discovered config
|
|
76
|
+
// Create a simple text step with cached label or full path as description
|
|
77
|
+
const keyParts = key.split('.');
|
|
78
|
+
const shortKey = keyParts[keyParts.length - 1];
|
|
79
|
+
// Load current value if it exists (use rawConfig since discovered keys aren't in validated config)
|
|
80
|
+
const currentValue = getConfigValue(rawConfig, key);
|
|
81
|
+
const value = currentValue !== undefined && typeof currentValue === 'string'
|
|
82
|
+
? currentValue
|
|
83
|
+
: null;
|
|
84
|
+
// Use cached label if available, fallback to key path
|
|
85
|
+
const cachedLabel = getConfigLabel(key, fs);
|
|
86
|
+
return {
|
|
87
|
+
description: cachedLabel ?? key,
|
|
88
|
+
key: shortKey,
|
|
89
|
+
path: key,
|
|
90
|
+
type: StepType.Text,
|
|
91
|
+
value,
|
|
92
|
+
validate: () => true, // Accept any string for now
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
const definition = schema[key];
|
|
96
|
+
const currentValue = getConfigValue(currentConfig, key);
|
|
97
|
+
const keyParts = key.split('.');
|
|
98
|
+
const shortKey = keyParts[keyParts.length - 1];
|
|
99
|
+
// Map definition to ConfigStep based on type
|
|
100
|
+
switch (definition.type) {
|
|
101
|
+
case ConfigDefinitionType.RegExp:
|
|
102
|
+
case ConfigDefinitionType.String: {
|
|
103
|
+
const value = currentValue !== undefined && typeof currentValue === 'string'
|
|
104
|
+
? currentValue
|
|
105
|
+
: definition.type === ConfigDefinitionType.String
|
|
106
|
+
? (definition.default ?? '')
|
|
107
|
+
: null;
|
|
108
|
+
return {
|
|
109
|
+
description: definition.description,
|
|
110
|
+
key: shortKey,
|
|
111
|
+
path: key,
|
|
112
|
+
type: StepType.Text,
|
|
113
|
+
value,
|
|
114
|
+
validate: getValidator(definition),
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
case ConfigDefinitionType.Number: {
|
|
118
|
+
const value = currentValue !== undefined && typeof currentValue === 'number'
|
|
119
|
+
? String(currentValue)
|
|
120
|
+
: definition.default !== undefined
|
|
121
|
+
? String(definition.default)
|
|
122
|
+
: '0';
|
|
123
|
+
return {
|
|
124
|
+
description: definition.description,
|
|
125
|
+
key: shortKey,
|
|
126
|
+
path: key,
|
|
127
|
+
type: StepType.Text,
|
|
128
|
+
value,
|
|
129
|
+
validate: getValidator(definition),
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
case ConfigDefinitionType.Enum: {
|
|
133
|
+
const currentStr = currentValue !== undefined && typeof currentValue === 'string'
|
|
134
|
+
? currentValue
|
|
135
|
+
: definition.default;
|
|
136
|
+
const defaultIndex = currentStr
|
|
137
|
+
? definition.values.indexOf(currentStr)
|
|
138
|
+
: 0;
|
|
139
|
+
return {
|
|
140
|
+
description: definition.description,
|
|
141
|
+
key: shortKey,
|
|
142
|
+
path: key,
|
|
143
|
+
type: StepType.Selection,
|
|
144
|
+
options: definition.values.map((value) => ({
|
|
145
|
+
label: value,
|
|
146
|
+
value,
|
|
147
|
+
})),
|
|
148
|
+
defaultIndex: Math.max(0, defaultIndex),
|
|
149
|
+
validate: getValidator(definition),
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
case ConfigDefinitionType.Boolean: {
|
|
153
|
+
const currentBool = currentValue !== undefined && typeof currentValue === 'boolean'
|
|
154
|
+
? currentValue
|
|
155
|
+
: undefined;
|
|
156
|
+
return {
|
|
157
|
+
description: definition.description,
|
|
158
|
+
key: shortKey,
|
|
159
|
+
path: key,
|
|
160
|
+
type: StepType.Selection,
|
|
161
|
+
options: [
|
|
162
|
+
{ label: 'yes', value: 'true' },
|
|
163
|
+
{ label: 'no', value: 'false' },
|
|
164
|
+
],
|
|
165
|
+
defaultIndex: currentBool !== undefined ? (currentBool ? 0 : 1) : 0,
|
|
166
|
+
validate: getValidator(definition),
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
}
|
|
@@ -1,5 +1,22 @@
|
|
|
1
1
|
import { ConfigDefinitionType } from './types.js';
|
|
2
2
|
import { getConfigSchema } from './schema.js';
|
|
3
|
+
/**
|
|
4
|
+
* Flatten nested config object to dot notation
|
|
5
|
+
* Example: { a: { b: 1 } } => { 'a.b': 1 }
|
|
6
|
+
*/
|
|
7
|
+
export function flattenConfig(obj, prefix = '') {
|
|
8
|
+
const result = {};
|
|
9
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
10
|
+
const fullKey = prefix ? `${prefix}.${key}` : key;
|
|
11
|
+
if (value && typeof value === 'object' && !Array.isArray(value)) {
|
|
12
|
+
Object.assign(result, flattenConfig(value, fullKey));
|
|
13
|
+
}
|
|
14
|
+
else {
|
|
15
|
+
result[fullKey] = value;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
return result;
|
|
19
|
+
}
|
|
3
20
|
/**
|
|
4
21
|
* Convert string value to appropriate type based on schema definition
|
|
5
22
|
*/
|
|
@@ -19,7 +19,6 @@ export function handleTaskCompletion(index, elapsed, context) {
|
|
|
19
19
|
message,
|
|
20
20
|
summary,
|
|
21
21
|
tasks: updatedTaskInfos,
|
|
22
|
-
completed: index + 1,
|
|
23
22
|
completionMessage: null,
|
|
24
23
|
error: null,
|
|
25
24
|
},
|
|
@@ -39,7 +38,6 @@ export function handleTaskCompletion(index, elapsed, context) {
|
|
|
39
38
|
message,
|
|
40
39
|
summary,
|
|
41
40
|
tasks: updatedTaskInfos,
|
|
42
|
-
completed: index + 1,
|
|
43
41
|
completionMessage: completion,
|
|
44
42
|
error: null,
|
|
45
43
|
},
|
|
@@ -65,7 +63,6 @@ export function handleTaskFailure(index, error, elapsed, context) {
|
|
|
65
63
|
message,
|
|
66
64
|
summary,
|
|
67
65
|
tasks: updatedTaskInfos,
|
|
68
|
-
completed: index + 1,
|
|
69
66
|
completionMessage: null,
|
|
70
67
|
error: null,
|
|
71
68
|
},
|
|
@@ -83,7 +80,6 @@ export function handleTaskFailure(index, error, elapsed, context) {
|
|
|
83
80
|
message,
|
|
84
81
|
summary,
|
|
85
82
|
tasks: updatedTaskInfos,
|
|
86
|
-
completed: index + 1,
|
|
87
83
|
completionMessage: null,
|
|
88
84
|
error: null,
|
|
89
85
|
},
|
|
@@ -104,7 +100,6 @@ export function handleTaskFailure(index, error, elapsed, context) {
|
|
|
104
100
|
message,
|
|
105
101
|
summary,
|
|
106
102
|
tasks: updatedTaskInfos,
|
|
107
|
-
completed: index + 1,
|
|
108
103
|
completionMessage: completion,
|
|
109
104
|
error: null,
|
|
110
105
|
},
|
|
@@ -114,12 +109,11 @@ export function handleTaskFailure(index, error, elapsed, context) {
|
|
|
114
109
|
/**
|
|
115
110
|
* Builds final state for task abortion.
|
|
116
111
|
*/
|
|
117
|
-
export function buildAbortedState(tasks, message, summary
|
|
112
|
+
export function buildAbortedState(tasks, message, summary) {
|
|
118
113
|
return {
|
|
119
114
|
message,
|
|
120
115
|
summary,
|
|
121
116
|
tasks,
|
|
122
|
-
completed,
|
|
123
117
|
completionMessage: null,
|
|
124
118
|
error: null,
|
|
125
119
|
};
|
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
import { useCallback, useEffect, useRef, useState, } from 'react';
|
|
2
|
+
import { ComponentStatus, } from '../types/components.js';
|
|
3
|
+
import { FeedbackType } from '../types/types.js';
|
|
4
|
+
import { createFeedback, createMessage } from '../services/components.js';
|
|
5
|
+
import { formatErrorMessage, getExecutionErrorMessage, } from '../services/messages.js';
|
|
6
|
+
import { ExecutionStatus } from '../services/shell.js';
|
|
7
|
+
import { ensureMinimumTime } from '../services/timing.js';
|
|
8
|
+
import { handleTaskCompletion, handleTaskFailure } from './handlers.js';
|
|
9
|
+
import { processTasks } from './processing.js';
|
|
10
|
+
import { executeTask } from './runner.js';
|
|
11
|
+
import { ExecuteActionType } from './types.js';
|
|
12
|
+
import { getCurrentTaskIndex } from './utils.js';
|
|
13
|
+
const ELAPSED_UPDATE_INTERVAL = 1000;
|
|
14
|
+
/**
|
|
15
|
+
* Track elapsed time from a start timestamp.
|
|
16
|
+
* Returns 0 when not active or no start time.
|
|
17
|
+
*/
|
|
18
|
+
export function useElapsedTimer(startTime, isActive) {
|
|
19
|
+
const [elapsed, setElapsed] = useState(0);
|
|
20
|
+
useEffect(() => {
|
|
21
|
+
if (!startTime || !isActive)
|
|
22
|
+
return;
|
|
23
|
+
const interval = setInterval(() => {
|
|
24
|
+
setElapsed(Date.now() - startTime);
|
|
25
|
+
}, ELAPSED_UPDATE_INTERVAL);
|
|
26
|
+
return () => {
|
|
27
|
+
clearInterval(interval);
|
|
28
|
+
};
|
|
29
|
+
}, [startTime, isActive]);
|
|
30
|
+
return elapsed;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Manage live output and timing for the currently executing task.
|
|
34
|
+
* Groups related state for tracking a running task's output.
|
|
35
|
+
*/
|
|
36
|
+
export function useLiveTaskOutput() {
|
|
37
|
+
const [output, setOutput] = useState({
|
|
38
|
+
stdout: '',
|
|
39
|
+
stderr: '',
|
|
40
|
+
error: '',
|
|
41
|
+
});
|
|
42
|
+
const [startTime, setStartTime] = useState(null);
|
|
43
|
+
const start = useCallback(() => {
|
|
44
|
+
setOutput({ stdout: '', stderr: '', error: '' });
|
|
45
|
+
setStartTime(Date.now());
|
|
46
|
+
}, []);
|
|
47
|
+
const stop = useCallback(() => {
|
|
48
|
+
setStartTime(null);
|
|
49
|
+
}, []);
|
|
50
|
+
return {
|
|
51
|
+
output,
|
|
52
|
+
startTime,
|
|
53
|
+
setOutput,
|
|
54
|
+
start,
|
|
55
|
+
stop,
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Handle execution cancellation with a ref-based flag.
|
|
60
|
+
* The ref is needed because callbacks check the current cancellation state.
|
|
61
|
+
*/
|
|
62
|
+
export function useCancellation() {
|
|
63
|
+
const cancelledRef = useRef(false);
|
|
64
|
+
const cancel = useCallback(() => {
|
|
65
|
+
cancelledRef.current = true;
|
|
66
|
+
}, []);
|
|
67
|
+
const reset = useCallback(() => {
|
|
68
|
+
cancelledRef.current = false;
|
|
69
|
+
}, []);
|
|
70
|
+
return {
|
|
71
|
+
cancelledRef,
|
|
72
|
+
cancel,
|
|
73
|
+
reset,
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
const MINIMUM_PROCESSING_TIME = 400;
|
|
77
|
+
/**
|
|
78
|
+
* Helper to create ExecuteState with defaults
|
|
79
|
+
*/
|
|
80
|
+
function createExecuteState(overrides = {}) {
|
|
81
|
+
return {
|
|
82
|
+
message: '',
|
|
83
|
+
summary: '',
|
|
84
|
+
tasks: [],
|
|
85
|
+
completionMessage: null,
|
|
86
|
+
error: null,
|
|
87
|
+
...overrides,
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* Process input tasks through AI to generate executable commands.
|
|
92
|
+
* Handles the initial phase of task execution.
|
|
93
|
+
*/
|
|
94
|
+
export function useTaskProcessor(config) {
|
|
95
|
+
const { inputTasks, service, isActive, hasProcessed, tasksCount, dispatch, requestHandlers, lifecycleHandlers, workflowHandlers, } = config;
|
|
96
|
+
useEffect(() => {
|
|
97
|
+
if (!isActive || tasksCount > 0 || hasProcessed) {
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
let mounted = true;
|
|
101
|
+
async function process(svc) {
|
|
102
|
+
const startTime = Date.now();
|
|
103
|
+
try {
|
|
104
|
+
const result = await processTasks(inputTasks, svc);
|
|
105
|
+
await ensureMinimumTime(startTime, MINIMUM_PROCESSING_TIME);
|
|
106
|
+
if (!mounted)
|
|
107
|
+
return;
|
|
108
|
+
// Add debug components to timeline if present
|
|
109
|
+
if (result.debug?.length) {
|
|
110
|
+
workflowHandlers.addToTimeline(...result.debug);
|
|
111
|
+
}
|
|
112
|
+
if (result.commands.length === 0) {
|
|
113
|
+
if (result.error) {
|
|
114
|
+
const errorMessage = getExecutionErrorMessage(result.error);
|
|
115
|
+
workflowHandlers.addToTimeline(createMessage({ text: errorMessage }, ComponentStatus.Done));
|
|
116
|
+
requestHandlers.onCompleted(createExecuteState({ message: result.message }));
|
|
117
|
+
lifecycleHandlers.completeActive();
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
dispatch({
|
|
121
|
+
type: ExecuteActionType.ProcessingComplete,
|
|
122
|
+
payload: { message: result.message },
|
|
123
|
+
});
|
|
124
|
+
requestHandlers.onCompleted(createExecuteState({ message: result.message }));
|
|
125
|
+
lifecycleHandlers.completeActive();
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
128
|
+
// Create task infos from commands
|
|
129
|
+
const taskInfos = result.commands.map((cmd, index) => ({
|
|
130
|
+
label: inputTasks[index]?.action ?? cmd.description,
|
|
131
|
+
command: cmd,
|
|
132
|
+
status: ExecutionStatus.Pending,
|
|
133
|
+
elapsed: 0,
|
|
134
|
+
}));
|
|
135
|
+
dispatch({
|
|
136
|
+
type: ExecuteActionType.CommandsReady,
|
|
137
|
+
payload: {
|
|
138
|
+
message: result.message,
|
|
139
|
+
summary: result.summary,
|
|
140
|
+
tasks: taskInfos,
|
|
141
|
+
},
|
|
142
|
+
});
|
|
143
|
+
requestHandlers.onCompleted(createExecuteState({
|
|
144
|
+
message: result.message,
|
|
145
|
+
summary: result.summary,
|
|
146
|
+
tasks: taskInfos,
|
|
147
|
+
}));
|
|
148
|
+
}
|
|
149
|
+
catch (err) {
|
|
150
|
+
await ensureMinimumTime(startTime, MINIMUM_PROCESSING_TIME);
|
|
151
|
+
if (mounted) {
|
|
152
|
+
const errorMessage = formatErrorMessage(err);
|
|
153
|
+
dispatch({
|
|
154
|
+
type: ExecuteActionType.ProcessingError,
|
|
155
|
+
payload: { error: errorMessage },
|
|
156
|
+
});
|
|
157
|
+
requestHandlers.onCompleted(createExecuteState({ error: errorMessage }));
|
|
158
|
+
requestHandlers.onError(errorMessage);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
void process(service);
|
|
163
|
+
return () => {
|
|
164
|
+
mounted = false;
|
|
165
|
+
};
|
|
166
|
+
}, [
|
|
167
|
+
inputTasks,
|
|
168
|
+
isActive,
|
|
169
|
+
service,
|
|
170
|
+
requestHandlers,
|
|
171
|
+
lifecycleHandlers,
|
|
172
|
+
workflowHandlers,
|
|
173
|
+
tasksCount,
|
|
174
|
+
hasProcessed,
|
|
175
|
+
dispatch,
|
|
176
|
+
]);
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Execute tasks sequentially, managing state and handling completion/errors.
|
|
180
|
+
*/
|
|
181
|
+
export function useTaskExecutor(config) {
|
|
182
|
+
const { isActive, tasks, message, summary, error, workdir, setWorkdir, cancelledRef, liveOutput, dispatch, requestHandlers, lifecycleHandlers, workflowHandlers, } = config;
|
|
183
|
+
const currentTaskIndex = getCurrentTaskIndex(tasks);
|
|
184
|
+
useEffect(() => {
|
|
185
|
+
if (!isActive ||
|
|
186
|
+
tasks.length === 0 ||
|
|
187
|
+
currentTaskIndex >= tasks.length ||
|
|
188
|
+
error) {
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
const currentTask = tasks[currentTaskIndex];
|
|
192
|
+
if (currentTask.status !== ExecutionStatus.Pending) {
|
|
193
|
+
return;
|
|
194
|
+
}
|
|
195
|
+
cancelledRef.current = false;
|
|
196
|
+
// Mark task as started (running)
|
|
197
|
+
dispatch({
|
|
198
|
+
type: ExecuteActionType.TaskStarted,
|
|
199
|
+
payload: { index: currentTaskIndex },
|
|
200
|
+
});
|
|
201
|
+
// Reset live state for new task
|
|
202
|
+
liveOutput.start();
|
|
203
|
+
// Merge workdir into command
|
|
204
|
+
const command = workdir
|
|
205
|
+
? { ...currentTask.command, workdir }
|
|
206
|
+
: currentTask.command;
|
|
207
|
+
void executeTask(command, currentTaskIndex, {
|
|
208
|
+
onOutputChange: (output) => {
|
|
209
|
+
if (!cancelledRef.current) {
|
|
210
|
+
liveOutput.setOutput(output);
|
|
211
|
+
}
|
|
212
|
+
},
|
|
213
|
+
onComplete: (elapsed, output) => {
|
|
214
|
+
if (cancelledRef.current)
|
|
215
|
+
return;
|
|
216
|
+
liveOutput.stop();
|
|
217
|
+
// Track working directory
|
|
218
|
+
if (output.workdir) {
|
|
219
|
+
setWorkdir(output.workdir);
|
|
220
|
+
}
|
|
221
|
+
const tasksWithOutput = tasks.map((task, i) => i === currentTaskIndex
|
|
222
|
+
? {
|
|
223
|
+
...task,
|
|
224
|
+
stdout: output.stdout,
|
|
225
|
+
stderr: output.stderr,
|
|
226
|
+
error: output.error,
|
|
227
|
+
}
|
|
228
|
+
: task);
|
|
229
|
+
const result = handleTaskCompletion(currentTaskIndex, elapsed, {
|
|
230
|
+
tasks: tasksWithOutput,
|
|
231
|
+
message,
|
|
232
|
+
summary,
|
|
233
|
+
});
|
|
234
|
+
dispatch(result.action);
|
|
235
|
+
requestHandlers.onCompleted(result.finalState);
|
|
236
|
+
if (result.shouldComplete) {
|
|
237
|
+
lifecycleHandlers.completeActive();
|
|
238
|
+
}
|
|
239
|
+
},
|
|
240
|
+
onError: (errorMsg, elapsed, output) => {
|
|
241
|
+
if (cancelledRef.current)
|
|
242
|
+
return;
|
|
243
|
+
liveOutput.stop();
|
|
244
|
+
// Track working directory
|
|
245
|
+
if (output.workdir) {
|
|
246
|
+
setWorkdir(output.workdir);
|
|
247
|
+
}
|
|
248
|
+
const tasksWithOutput = tasks.map((task, i) => i === currentTaskIndex
|
|
249
|
+
? {
|
|
250
|
+
...task,
|
|
251
|
+
stdout: output.stdout,
|
|
252
|
+
stderr: output.stderr,
|
|
253
|
+
error: output.error,
|
|
254
|
+
}
|
|
255
|
+
: task);
|
|
256
|
+
const result = handleTaskFailure(currentTaskIndex, errorMsg, elapsed, {
|
|
257
|
+
tasks: tasksWithOutput,
|
|
258
|
+
message,
|
|
259
|
+
summary,
|
|
260
|
+
});
|
|
261
|
+
dispatch(result.action);
|
|
262
|
+
requestHandlers.onCompleted(result.finalState);
|
|
263
|
+
if (result.action.type === ExecuteActionType.TaskErrorCritical) {
|
|
264
|
+
const criticalErrorMessage = getExecutionErrorMessage(errorMsg);
|
|
265
|
+
workflowHandlers.addToQueue(createFeedback({
|
|
266
|
+
type: FeedbackType.Failed,
|
|
267
|
+
message: criticalErrorMessage,
|
|
268
|
+
}));
|
|
269
|
+
}
|
|
270
|
+
if (result.shouldComplete) {
|
|
271
|
+
lifecycleHandlers.completeActive();
|
|
272
|
+
}
|
|
273
|
+
},
|
|
274
|
+
});
|
|
275
|
+
}, [
|
|
276
|
+
isActive,
|
|
277
|
+
tasks,
|
|
278
|
+
currentTaskIndex,
|
|
279
|
+
message,
|
|
280
|
+
summary,
|
|
281
|
+
error,
|
|
282
|
+
workdir,
|
|
283
|
+
setWorkdir,
|
|
284
|
+
cancelledRef,
|
|
285
|
+
liveOutput,
|
|
286
|
+
dispatch,
|
|
287
|
+
requestHandlers,
|
|
288
|
+
lifecycleHandlers,
|
|
289
|
+
workflowHandlers,
|
|
290
|
+
]);
|
|
291
|
+
}
|
|
@@ -18,7 +18,7 @@ export async function processTasks(tasks, service) {
|
|
|
18
18
|
// Load user config for placeholder resolution
|
|
19
19
|
const userConfig = loadUserConfig();
|
|
20
20
|
// Format tasks for the execute tool and resolve placeholders
|
|
21
|
-
const
|
|
21
|
+
const taskList = tasks
|
|
22
22
|
.map((task) => {
|
|
23
23
|
const resolvedAction = replacePlaceholders(task.action, userConfig);
|
|
24
24
|
const params = task.params
|
|
@@ -27,6 +27,8 @@ export async function processTasks(tasks, service) {
|
|
|
27
27
|
return `- ${resolvedAction}${params}`;
|
|
28
28
|
})
|
|
29
29
|
.join('\n');
|
|
30
|
+
// Build message with confirmed schedule header
|
|
31
|
+
const taskDescriptions = `Confirmed schedule (${tasks.length} tasks):\n${taskList}`;
|
|
30
32
|
// Call execute tool to get commands
|
|
31
33
|
const result = await service.processWithTool(taskDescriptions, 'execute');
|
|
32
34
|
// Resolve placeholders in command strings
|
|
@@ -6,7 +6,6 @@ export const initialState = {
|
|
|
6
6
|
error: null,
|
|
7
7
|
tasks: [],
|
|
8
8
|
message: '',
|
|
9
|
-
completed: 0,
|
|
10
9
|
hasProcessed: false,
|
|
11
10
|
completionMessage: null,
|
|
12
11
|
summary: '',
|
|
@@ -25,7 +24,6 @@ export function executeReducer(state, action) {
|
|
|
25
24
|
message: action.payload.message,
|
|
26
25
|
summary: action.payload.summary,
|
|
27
26
|
tasks: action.payload.tasks,
|
|
28
|
-
completed: 0,
|
|
29
27
|
};
|
|
30
28
|
case ExecuteActionType.ProcessingError:
|
|
31
29
|
return {
|
|
@@ -33,6 +31,15 @@ export function executeReducer(state, action) {
|
|
|
33
31
|
error: action.payload.error,
|
|
34
32
|
hasProcessed: true,
|
|
35
33
|
};
|
|
34
|
+
case ExecuteActionType.TaskStarted: {
|
|
35
|
+
const updatedTasks = state.tasks.map((task, i) => i === action.payload.index
|
|
36
|
+
? { ...task, status: ExecutionStatus.Running }
|
|
37
|
+
: task);
|
|
38
|
+
return {
|
|
39
|
+
...state,
|
|
40
|
+
tasks: updatedTasks,
|
|
41
|
+
};
|
|
42
|
+
}
|
|
36
43
|
case ExecuteActionType.TaskComplete: {
|
|
37
44
|
const updatedTaskInfos = state.tasks.map((task, i) => i === action.payload.index
|
|
38
45
|
? {
|
|
@@ -44,7 +51,6 @@ export function executeReducer(state, action) {
|
|
|
44
51
|
return {
|
|
45
52
|
...state,
|
|
46
53
|
tasks: updatedTaskInfos,
|
|
47
|
-
completed: action.payload.index + 1,
|
|
48
54
|
};
|
|
49
55
|
}
|
|
50
56
|
case ExecuteActionType.AllTasksComplete: {
|
|
@@ -60,7 +66,6 @@ export function executeReducer(state, action) {
|
|
|
60
66
|
return {
|
|
61
67
|
...state,
|
|
62
68
|
tasks: updatedTaskInfos,
|
|
63
|
-
completed: action.payload.index + 1,
|
|
64
69
|
completionMessage: completion,
|
|
65
70
|
};
|
|
66
71
|
}
|
|
@@ -85,7 +90,6 @@ export function executeReducer(state, action) {
|
|
|
85
90
|
return {
|
|
86
91
|
...state,
|
|
87
92
|
tasks: updatedTaskInfos,
|
|
88
|
-
completed: action.payload.index + 1,
|
|
89
93
|
};
|
|
90
94
|
}
|
|
91
95
|
case ExecuteActionType.LastTaskError: {
|
|
@@ -101,21 +105,19 @@ export function executeReducer(state, action) {
|
|
|
101
105
|
return {
|
|
102
106
|
...state,
|
|
103
107
|
tasks: updatedTaskInfos,
|
|
104
|
-
completed: action.payload.index + 1,
|
|
105
108
|
completionMessage: completion,
|
|
106
109
|
};
|
|
107
110
|
}
|
|
108
111
|
case ExecuteActionType.CancelExecution: {
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
}
|
|
113
|
-
else if (taskIndex === action.payload.completed) {
|
|
112
|
+
// Mark running task as aborted, pending tasks as cancelled
|
|
113
|
+
const updatedTaskInfos = state.tasks.map((task) => {
|
|
114
|
+
if (task.status === ExecutionStatus.Running) {
|
|
114
115
|
return { ...task, status: ExecutionStatus.Aborted };
|
|
115
116
|
}
|
|
116
|
-
else {
|
|
117
|
+
else if (task.status === ExecutionStatus.Pending) {
|
|
117
118
|
return { ...task, status: ExecutionStatus.Cancelled };
|
|
118
119
|
}
|
|
120
|
+
return task;
|
|
119
121
|
});
|
|
120
122
|
return {
|
|
121
123
|
...state,
|