awesome-slash 2.4.4 → 2.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +6 -6
- package/.claude-plugin/plugin.json +1 -1
- package/CHANGELOG.md +123 -1
- package/README.md +186 -159
- package/SECURITY.md +25 -81
- package/adapters/codex/install.sh +58 -16
- package/adapters/opencode/install.sh +92 -23
- package/lib/index.js +47 -4
- package/lib/patterns/review-patterns.js +58 -11
- package/lib/patterns/slop-patterns.js +154 -147
- package/lib/platform/detect-platform.js +99 -350
- package/lib/platform/detection-configs.js +93 -0
- package/lib/platform/state-dir.js +122 -0
- package/lib/platform/verify-tools.js +10 -78
- package/lib/schemas/README.md +195 -0
- package/lib/schemas/validator.js +247 -0
- package/lib/sources/custom-handler.js +199 -0
- package/lib/sources/policy-questions.js +239 -0
- package/lib/sources/source-cache.js +164 -0
- package/lib/state/workflow-state.js +368 -665
- package/lib/types/README.md +292 -0
- package/lib/types/agent-frontmatter.d.ts +134 -0
- package/lib/types/command-frontmatter.d.ts +107 -0
- package/lib/types/hook-frontmatter.d.ts +115 -0
- package/lib/types/index.d.ts +84 -0
- package/lib/types/plugin-manifest.d.ts +102 -0
- package/lib/types/skill-frontmatter.d.ts +89 -0
- package/lib/utils/cache-manager.js +154 -0
- package/lib/utils/context-optimizer.js +5 -36
- package/lib/utils/deprecation.js +37 -0
- package/lib/utils/shell-escape.js +88 -0
- package/mcp-server/index.js +513 -22
- package/package.json +6 -2
- package/plugins/deslop-around/.claude-plugin/plugin.json +1 -1
- package/plugins/deslop-around/lib/index.js +170 -0
- package/plugins/deslop-around/lib/patterns/review-patterns.js +58 -11
- package/plugins/deslop-around/lib/patterns/slop-patterns.js +169 -129
- package/plugins/deslop-around/lib/platform/detect-platform.js +162 -316
- package/plugins/deslop-around/lib/platform/detection-configs.js +93 -0
- package/plugins/deslop-around/lib/platform/state-dir.js +122 -0
- package/plugins/deslop-around/lib/platform/verify-tools.js +10 -78
- package/plugins/deslop-around/lib/schemas/README.md +195 -0
- package/plugins/deslop-around/lib/schemas/validator.js +247 -0
- package/plugins/deslop-around/lib/sources/custom-handler.js +199 -0
- package/plugins/deslop-around/lib/sources/policy-questions.js +239 -0
- package/plugins/deslop-around/lib/sources/source-cache.js +164 -0
- package/plugins/deslop-around/lib/state/workflow-state.js +387 -484
- package/plugins/deslop-around/lib/types/README.md +292 -0
- package/plugins/deslop-around/lib/types/agent-frontmatter.d.ts +134 -0
- package/plugins/deslop-around/lib/types/command-frontmatter.d.ts +107 -0
- package/plugins/deslop-around/lib/types/hook-frontmatter.d.ts +115 -0
- package/plugins/deslop-around/lib/types/index.d.ts +84 -0
- package/plugins/deslop-around/lib/types/plugin-manifest.d.ts +102 -0
- package/plugins/deslop-around/lib/types/skill-frontmatter.d.ts +89 -0
- package/plugins/deslop-around/lib/utils/cache-manager.js +154 -0
- package/plugins/deslop-around/lib/utils/context-optimizer.js +115 -37
- package/plugins/deslop-around/lib/utils/deprecation.js +37 -0
- package/plugins/deslop-around/lib/utils/shell-escape.js +88 -0
- package/plugins/next-task/.claude-plugin/plugin.json +1 -1
- package/plugins/next-task/agents/delivery-validator.md +2 -2
- package/plugins/next-task/agents/implementation-agent.md +3 -4
- package/plugins/next-task/agents/planning-agent.md +77 -19
- package/plugins/next-task/agents/review-orchestrator.md +21 -122
- package/plugins/next-task/agents/task-discoverer.md +164 -23
- package/plugins/next-task/commands/next-task.md +180 -14
- package/plugins/next-task/lib/index.js +170 -0
- package/plugins/next-task/lib/patterns/review-patterns.js +58 -11
- package/plugins/next-task/lib/patterns/slop-patterns.js +169 -129
- package/plugins/next-task/lib/platform/detect-platform.js +162 -316
- package/plugins/next-task/lib/platform/detection-configs.js +93 -0
- package/plugins/next-task/lib/platform/state-dir.js +122 -0
- package/plugins/next-task/lib/platform/verify-tools.js +10 -78
- package/plugins/next-task/lib/schemas/README.md +195 -0
- package/plugins/next-task/lib/schemas/validator.js +247 -0
- package/plugins/next-task/lib/sources/custom-handler.js +199 -0
- package/plugins/next-task/lib/sources/policy-questions.js +239 -0
- package/plugins/next-task/lib/sources/source-cache.js +164 -0
- package/plugins/next-task/lib/state/workflow-state.js +387 -484
- package/plugins/next-task/lib/types/README.md +292 -0
- package/plugins/next-task/lib/types/agent-frontmatter.d.ts +134 -0
- package/plugins/next-task/lib/types/command-frontmatter.d.ts +107 -0
- package/plugins/next-task/lib/types/hook-frontmatter.d.ts +115 -0
- package/plugins/next-task/lib/types/index.d.ts +84 -0
- package/plugins/next-task/lib/types/plugin-manifest.d.ts +102 -0
- package/plugins/next-task/lib/types/skill-frontmatter.d.ts +89 -0
- package/plugins/next-task/lib/utils/cache-manager.js +154 -0
- package/plugins/next-task/lib/utils/context-optimizer.js +115 -37
- package/plugins/next-task/lib/utils/deprecation.js +37 -0
- package/plugins/next-task/lib/utils/shell-escape.js +88 -0
- package/plugins/project-review/.claude-plugin/plugin.json +1 -1
- package/plugins/project-review/lib/index.js +170 -0
- package/plugins/project-review/lib/patterns/review-patterns.js +58 -11
- package/plugins/project-review/lib/patterns/slop-patterns.js +169 -129
- package/plugins/project-review/lib/platform/detect-platform.js +162 -316
- package/plugins/project-review/lib/platform/detection-configs.js +93 -0
- package/plugins/project-review/lib/platform/state-dir.js +122 -0
- package/plugins/project-review/lib/platform/verify-tools.js +10 -78
- package/plugins/project-review/lib/schemas/README.md +195 -0
- package/plugins/project-review/lib/schemas/validator.js +247 -0
- package/plugins/project-review/lib/sources/custom-handler.js +199 -0
- package/plugins/project-review/lib/sources/policy-questions.js +239 -0
- package/plugins/project-review/lib/sources/source-cache.js +164 -0
- package/plugins/project-review/lib/state/workflow-state.js +387 -484
- package/plugins/project-review/lib/types/README.md +292 -0
- package/plugins/project-review/lib/types/agent-frontmatter.d.ts +134 -0
- package/plugins/project-review/lib/types/command-frontmatter.d.ts +107 -0
- package/plugins/project-review/lib/types/hook-frontmatter.d.ts +115 -0
- package/plugins/project-review/lib/types/index.d.ts +84 -0
- package/plugins/project-review/lib/types/plugin-manifest.d.ts +102 -0
- package/plugins/project-review/lib/types/skill-frontmatter.d.ts +89 -0
- package/plugins/project-review/lib/utils/cache-manager.js +154 -0
- package/plugins/project-review/lib/utils/context-optimizer.js +115 -37
- package/plugins/project-review/lib/utils/deprecation.js +37 -0
- package/plugins/project-review/lib/utils/shell-escape.js +88 -0
- package/plugins/reality-check/.claude-plugin/plugin.json +1 -1
- package/plugins/reality-check/agents/code-explorer.md +1 -1
- package/plugins/ship/.claude-plugin/plugin.json +1 -1
- package/plugins/ship/lib/index.js +170 -0
- package/plugins/ship/lib/patterns/review-patterns.js +58 -11
- package/plugins/ship/lib/patterns/slop-patterns.js +169 -129
- package/plugins/ship/lib/platform/detect-platform.js +162 -316
- package/plugins/ship/lib/platform/detection-configs.js +93 -0
- package/plugins/ship/lib/platform/state-dir.js +122 -0
- package/plugins/ship/lib/platform/verify-tools.js +10 -78
- package/plugins/ship/lib/schemas/README.md +195 -0
- package/plugins/ship/lib/schemas/validator.js +247 -0
- package/plugins/ship/lib/sources/custom-handler.js +199 -0
- package/plugins/ship/lib/sources/policy-questions.js +239 -0
- package/plugins/ship/lib/sources/source-cache.js +164 -0
- package/plugins/ship/lib/state/workflow-state.js +387 -484
- package/plugins/ship/lib/types/README.md +292 -0
- package/plugins/ship/lib/types/agent-frontmatter.d.ts +134 -0
- package/plugins/ship/lib/types/command-frontmatter.d.ts +107 -0
- package/plugins/ship/lib/types/hook-frontmatter.d.ts +115 -0
- package/plugins/ship/lib/types/index.d.ts +84 -0
- package/plugins/ship/lib/types/plugin-manifest.d.ts +102 -0
- package/plugins/ship/lib/types/skill-frontmatter.d.ts +89 -0
- package/plugins/ship/lib/utils/cache-manager.js +154 -0
- package/plugins/ship/lib/utils/context-optimizer.js +115 -37
- package/plugins/ship/lib/utils/deprecation.js +37 -0
- package/plugins/ship/lib/utils/shell-escape.js +88 -0
- package/scripts/install/codex.sh +216 -72
- package/scripts/install/opencode.sh +197 -21
- package/lib/state/workflow-state.schema.json +0 -282
- package/plugins/deslop-around/lib/state/workflow-state.schema.json +0 -282
- package/plugins/next-task/agents/policy-selector.md +0 -248
- package/plugins/next-task/lib/state/tasks-registry.schema.json +0 -85
- package/plugins/next-task/lib/state/workflow-state.schema.json +0 -282
- package/plugins/next-task/lib/state/worktree-status.schema.json +0 -219
- package/plugins/project-review/lib/state/workflow-state.schema.json +0 -282
- package/plugins/ship/lib/state/workflow-state.schema.json +0 -282
|
@@ -1,130 +1,69 @@
|
|
|
1
1
|
/**
|
|
2
|
-
*
|
|
2
|
+
* Simplified workflow state management
|
|
3
3
|
*
|
|
4
|
-
*
|
|
5
|
-
*
|
|
4
|
+
* Two files:
|
|
5
|
+
* - Main project: {stateDir}/tasks.json (tracks active worktree/task)
|
|
6
|
+
* - Worktree: {stateDir}/flow.json (tracks workflow progress)
|
|
7
|
+
*
|
|
8
|
+
* State directory is platform-aware:
|
|
9
|
+
* - Claude Code: .claude/
|
|
10
|
+
* - OpenCode: .opencode/
|
|
11
|
+
* - Codex CLI: .codex/
|
|
6
12
|
*/
|
|
7
13
|
|
|
8
14
|
const fs = require('fs');
|
|
9
15
|
const path = require('path');
|
|
10
16
|
const crypto = require('crypto');
|
|
17
|
+
const { getStateDir } = require('../platform/state-dir');
|
|
11
18
|
|
|
12
|
-
|
|
13
|
-
const
|
|
14
|
-
const
|
|
15
|
-
|
|
16
|
-
/**
|
|
17
|
-
* State cache configuration
|
|
18
|
-
*/
|
|
19
|
-
const STATE_CACHE_TTL_MS = 200; // Cache TTL for rapid successive reads
|
|
20
|
-
const _stateCache = new Map(); // Cache keyed by resolved base directory
|
|
21
|
-
|
|
22
|
-
/**
|
|
23
|
-
* Get cached state if valid
|
|
24
|
-
* @param {string} cacheKey - Cache key (resolved base path)
|
|
25
|
-
* @returns {Object|null} Cached state or null if expired/missing
|
|
26
|
-
*/
|
|
27
|
-
function getCachedState(cacheKey) {
|
|
28
|
-
const cached = _stateCache.get(cacheKey);
|
|
29
|
-
if (cached && Date.now() < cached.expiry) {
|
|
30
|
-
return cached.state;
|
|
31
|
-
}
|
|
32
|
-
return null;
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
/**
|
|
36
|
-
* Set state cache
|
|
37
|
-
* @param {string} cacheKey - Cache key (resolved base path)
|
|
38
|
-
* @param {Object} state - State to cache
|
|
39
|
-
*/
|
|
40
|
-
function setCachedState(cacheKey, state) {
|
|
41
|
-
_stateCache.set(cacheKey, {
|
|
42
|
-
state,
|
|
43
|
-
expiry: Date.now() + STATE_CACHE_TTL_MS
|
|
44
|
-
});
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
/**
|
|
48
|
-
* Invalidate state cache for a directory
|
|
49
|
-
* @param {string} cacheKey - Cache key (resolved base path)
|
|
50
|
-
*/
|
|
51
|
-
function invalidateStateCache(cacheKey) {
|
|
52
|
-
_stateCache.delete(cacheKey);
|
|
53
|
-
}
|
|
19
|
+
// File paths
|
|
20
|
+
const TASKS_FILE = 'tasks.json';
|
|
21
|
+
const FLOW_FILE = 'flow.json';
|
|
54
22
|
|
|
55
23
|
/**
|
|
56
|
-
*
|
|
57
|
-
|
|
58
|
-
function clearAllStateCaches() {
|
|
59
|
-
_stateCache.clear();
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
/**
|
|
63
|
-
* Validate and normalize base directory path to prevent path traversal
|
|
64
|
-
* @param {string} baseDir - Base directory path
|
|
24
|
+
* Validate and resolve path to prevent path traversal attacks
|
|
25
|
+
* @param {string} basePath - Base directory path
|
|
65
26
|
* @returns {string} Validated absolute path
|
|
66
|
-
* @throws {Error} If path is invalid
|
|
27
|
+
* @throws {Error} If path is invalid
|
|
67
28
|
*/
|
|
68
|
-
function
|
|
69
|
-
if (typeof
|
|
70
|
-
throw new Error('
|
|
29
|
+
function validatePath(basePath) {
|
|
30
|
+
if (typeof basePath !== 'string' || basePath.length === 0) {
|
|
31
|
+
throw new Error('Path must be a non-empty string');
|
|
71
32
|
}
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
const resolvedPath = path.resolve(baseDir);
|
|
75
|
-
|
|
76
|
-
// Check for null bytes (path traversal via null byte injection)
|
|
77
|
-
if (resolvedPath.includes('\0')) {
|
|
33
|
+
const resolved = path.resolve(basePath);
|
|
34
|
+
if (resolved.includes('\0')) {
|
|
78
35
|
throw new Error('Path contains invalid null byte');
|
|
79
36
|
}
|
|
37
|
+
return resolved;
|
|
38
|
+
}
|
|
80
39
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
try {
|
|
93
|
-
const parentStats = fs.statSync(parentDir);
|
|
94
|
-
if (!parentStats.isDirectory()) {
|
|
95
|
-
throw new Error('Parent path is not a directory');
|
|
96
|
-
}
|
|
97
|
-
} catch (parentError) {
|
|
98
|
-
if (parentError.code === 'ENOENT') {
|
|
99
|
-
throw new Error('Parent directory does not exist');
|
|
100
|
-
}
|
|
101
|
-
throw parentError;
|
|
102
|
-
}
|
|
103
|
-
} else if (error.message) {
|
|
104
|
-
throw error;
|
|
105
|
-
}
|
|
40
|
+
/**
|
|
41
|
+
* Validate that target path is within base directory
|
|
42
|
+
* @param {string} targetPath - Target file path
|
|
43
|
+
* @param {string} basePath - Base directory
|
|
44
|
+
* @throws {Error} If path traversal detected
|
|
45
|
+
*/
|
|
46
|
+
function validatePathWithinBase(targetPath, basePath) {
|
|
47
|
+
const resolvedTarget = path.resolve(targetPath);
|
|
48
|
+
const resolvedBase = path.resolve(basePath);
|
|
49
|
+
if (!resolvedTarget.startsWith(resolvedBase + path.sep) && resolvedTarget !== resolvedBase) {
|
|
50
|
+
throw new Error('Path traversal detected');
|
|
106
51
|
}
|
|
107
|
-
|
|
108
|
-
return resolvedPath;
|
|
109
52
|
}
|
|
110
53
|
|
|
111
54
|
/**
|
|
112
|
-
*
|
|
113
|
-
* @
|
|
114
|
-
* @param {string} baseDir - The validated base directory
|
|
115
|
-
* @throws {Error} If path traversal is detected
|
|
55
|
+
* Generate a unique workflow ID
|
|
56
|
+
* @returns {string} Workflow ID
|
|
116
57
|
*/
|
|
117
|
-
function
|
|
118
|
-
const
|
|
119
|
-
const
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
resolvedStatePath !== resolvedBaseDir) {
|
|
124
|
-
throw new Error('Path traversal detected: state path is outside base directory');
|
|
125
|
-
}
|
|
58
|
+
function generateWorkflowId() {
|
|
59
|
+
const now = new Date();
|
|
60
|
+
const date = now.toISOString().slice(0, 10).replace(/-/g, '');
|
|
61
|
+
const time = now.toISOString().slice(11, 19).replace(/:/g, '');
|
|
62
|
+
const random = crypto.randomBytes(4).toString('hex');
|
|
63
|
+
return `workflow-${date}-${time}-${random}`;
|
|
126
64
|
}
|
|
127
65
|
|
|
66
|
+
// Valid phases for the workflow
|
|
128
67
|
const PHASES = [
|
|
129
68
|
'policy-selection',
|
|
130
69
|
'task-discovery',
|
|
@@ -134,702 +73,466 @@ const PHASES = [
|
|
|
134
73
|
'user-approval',
|
|
135
74
|
'implementation',
|
|
136
75
|
'review-loop',
|
|
137
|
-
'delivery-
|
|
138
|
-
'
|
|
139
|
-
'create-pr',
|
|
140
|
-
'ci-wait',
|
|
141
|
-
'comment-fix',
|
|
142
|
-
'merge',
|
|
143
|
-
'production-ci',
|
|
144
|
-
'deploy',
|
|
145
|
-
'production-release',
|
|
76
|
+
'delivery-validation',
|
|
77
|
+
'shipping',
|
|
146
78
|
'complete'
|
|
147
79
|
];
|
|
148
80
|
|
|
149
|
-
// Pre-computed phase index map for O(1) lookup (vs O(n) array indexOf)
|
|
150
|
-
const PHASE_INDEX = new Map(PHASES.map((phase, index) => [phase, index]));
|
|
151
|
-
|
|
152
81
|
/**
|
|
153
|
-
*
|
|
154
|
-
* @param {string} phaseName - Phase to check
|
|
155
|
-
* @returns {boolean} True if valid phase
|
|
82
|
+
* Ensure state directory exists (platform-aware)
|
|
156
83
|
*/
|
|
157
|
-
function
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
* @param {string} phaseName - Phase name
|
|
164
|
-
* @returns {number} Phase index or -1 if invalid
|
|
165
|
-
*/
|
|
166
|
-
function getPhaseIndex(phaseName) {
|
|
167
|
-
return PHASE_INDEX.has(phaseName) ? PHASE_INDEX.get(phaseName) : -1;
|
|
84
|
+
function ensureStateDir(basePath) {
|
|
85
|
+
const stateDir = path.join(basePath, getStateDir(basePath));
|
|
86
|
+
if (!fs.existsSync(stateDir)) {
|
|
87
|
+
fs.mkdirSync(stateDir, { recursive: true });
|
|
88
|
+
}
|
|
89
|
+
return stateDir;
|
|
168
90
|
}
|
|
169
91
|
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
platform: 'detected',
|
|
174
|
-
stoppingPoint: 'merged',
|
|
175
|
-
mergeStrategy: 'squash',
|
|
176
|
-
autoFix: true,
|
|
177
|
-
maxReviewIterations: 3
|
|
178
|
-
};
|
|
92
|
+
// =============================================================================
|
|
93
|
+
// TASKS.JSON - Main project directory
|
|
94
|
+
// =============================================================================
|
|
179
95
|
|
|
180
96
|
/**
|
|
181
|
-
*
|
|
182
|
-
* @returns {string} Workflow ID in format: workflow-YYYYMMDD-HHMMSS-random
|
|
97
|
+
* Get path to tasks.json with validation
|
|
183
98
|
*/
|
|
184
|
-
function
|
|
185
|
-
const
|
|
186
|
-
const
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
return `workflow-${date}-${time}-${random}`;
|
|
99
|
+
function getTasksPath(projectPath = process.cwd()) {
|
|
100
|
+
const validatedBase = validatePath(projectPath);
|
|
101
|
+
const tasksPath = path.join(validatedBase, getStateDir(projectPath), TASKS_FILE);
|
|
102
|
+
validatePathWithinBase(tasksPath, validatedBase);
|
|
103
|
+
return tasksPath;
|
|
190
104
|
}
|
|
191
105
|
|
|
192
106
|
/**
|
|
193
|
-
*
|
|
194
|
-
*
|
|
195
|
-
*
|
|
196
|
-
* @throws {Error} If path validation fails
|
|
107
|
+
* Read tasks.json from main project
|
|
108
|
+
* Returns { active: null } if file doesn't exist or is corrupted
|
|
109
|
+
* Logs critical error on corruption to prevent silent data loss
|
|
197
110
|
*/
|
|
198
|
-
function
|
|
199
|
-
const
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
111
|
+
function readTasks(projectPath = process.cwd()) {
|
|
112
|
+
const tasksPath = getTasksPath(projectPath);
|
|
113
|
+
if (!fs.existsSync(tasksPath)) {
|
|
114
|
+
return { active: null };
|
|
115
|
+
}
|
|
116
|
+
try {
|
|
117
|
+
const data = JSON.parse(fs.readFileSync(tasksPath, 'utf8'));
|
|
118
|
+
// Normalize legacy format that may not have 'active' field
|
|
119
|
+
if (!Object.prototype.hasOwnProperty.call(data, 'active')) {
|
|
120
|
+
return { active: null };
|
|
121
|
+
}
|
|
122
|
+
return data;
|
|
123
|
+
} catch (e) {
|
|
124
|
+
console.error(`[CRITICAL] Corrupted tasks.json at ${tasksPath}: ${e.message}`);
|
|
125
|
+
return { active: null };
|
|
126
|
+
}
|
|
206
127
|
}
|
|
207
128
|
|
|
208
129
|
/**
|
|
209
|
-
*
|
|
210
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
211
|
-
* @throws {Error} If path validation fails
|
|
130
|
+
* Write tasks.json to main project
|
|
212
131
|
*/
|
|
213
|
-
function
|
|
214
|
-
|
|
215
|
-
const
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
validateStatePathWithinBase(stateDir, validatedBase);
|
|
219
|
-
|
|
220
|
-
if (!fs.existsSync(stateDir)) {
|
|
221
|
-
fs.mkdirSync(stateDir, { recursive: true });
|
|
222
|
-
}
|
|
132
|
+
function writeTasks(tasks, projectPath = process.cwd()) {
|
|
133
|
+
ensureStateDir(projectPath);
|
|
134
|
+
const tasksPath = getTasksPath(projectPath);
|
|
135
|
+
fs.writeFileSync(tasksPath, JSON.stringify(tasks, null, 2), 'utf8');
|
|
136
|
+
return true;
|
|
223
137
|
}
|
|
224
138
|
|
|
225
139
|
/**
|
|
226
|
-
*
|
|
227
|
-
* @param {string} [type='next-task'] - Workflow type
|
|
228
|
-
* @param {Object} [policy={}] - Policy overrides
|
|
229
|
-
* @returns {Object} New workflow state
|
|
140
|
+
* Set active task in main project
|
|
230
141
|
*/
|
|
231
|
-
function
|
|
232
|
-
const
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
workflow: {
|
|
237
|
-
id: generateWorkflowId(),
|
|
238
|
-
type,
|
|
239
|
-
status: 'pending',
|
|
240
|
-
startedAt: now,
|
|
241
|
-
lastUpdatedAt: now,
|
|
242
|
-
completedAt: null
|
|
243
|
-
},
|
|
244
|
-
policy: { ...DEFAULT_POLICY, ...policy },
|
|
245
|
-
task: null,
|
|
246
|
-
git: null,
|
|
247
|
-
pr: null,
|
|
248
|
-
phases: {
|
|
249
|
-
current: 'policy-selection',
|
|
250
|
-
currentIteration: 0,
|
|
251
|
-
history: []
|
|
252
|
-
},
|
|
253
|
-
agents: null,
|
|
254
|
-
checkpoints: {
|
|
255
|
-
canResume: true,
|
|
256
|
-
resumeFrom: null,
|
|
257
|
-
resumeContext: null
|
|
258
|
-
},
|
|
259
|
-
metrics: {
|
|
260
|
-
totalDuration: 0,
|
|
261
|
-
tokensUsed: 0,
|
|
262
|
-
toolCalls: 0,
|
|
263
|
-
filesModified: 0,
|
|
264
|
-
linesAdded: 0,
|
|
265
|
-
linesRemoved: 0
|
|
266
|
-
}
|
|
142
|
+
function setActiveTask(task, projectPath = process.cwd()) {
|
|
143
|
+
const tasks = readTasks(projectPath);
|
|
144
|
+
tasks.active = {
|
|
145
|
+
...task,
|
|
146
|
+
startedAt: new Date().toISOString()
|
|
267
147
|
};
|
|
148
|
+
return writeTasks(tasks, projectPath);
|
|
268
149
|
}
|
|
269
150
|
|
|
270
151
|
/**
|
|
271
|
-
*
|
|
272
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
273
|
-
* @param {Object} [options={}] - Options
|
|
274
|
-
* @param {boolean} [options.skipCache=false] - Skip cache and read from file
|
|
275
|
-
* @returns {Object|Error|null} Workflow state, Error if corrupted, or null if not found
|
|
152
|
+
* Clear active task
|
|
276
153
|
*/
|
|
277
|
-
function
|
|
278
|
-
const
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
// Check cache first (unless skipCache is true)
|
|
282
|
-
if (!options.skipCache) {
|
|
283
|
-
const cached = getCachedState(cacheKey);
|
|
284
|
-
if (cached !== null) {
|
|
285
|
-
// Return a deep copy to prevent mutations affecting cache
|
|
286
|
-
return JSON.parse(JSON.stringify(cached));
|
|
287
|
-
}
|
|
288
|
-
}
|
|
289
|
-
|
|
290
|
-
if (!fs.existsSync(statePath)) {
|
|
291
|
-
return null;
|
|
292
|
-
}
|
|
293
|
-
|
|
294
|
-
try {
|
|
295
|
-
const content = fs.readFileSync(statePath, 'utf8');
|
|
296
|
-
const state = JSON.parse(content);
|
|
297
|
-
|
|
298
|
-
// Version check
|
|
299
|
-
if (state.version !== SCHEMA_VERSION) {
|
|
300
|
-
console.warn(`State version mismatch: ${state.version} vs ${SCHEMA_VERSION}`);
|
|
301
|
-
// Future: Add migration logic here
|
|
302
|
-
}
|
|
303
|
-
|
|
304
|
-
// Cache the state
|
|
305
|
-
setCachedState(cacheKey, state);
|
|
306
|
-
|
|
307
|
-
return state;
|
|
308
|
-
} catch (error) {
|
|
309
|
-
const corrupted = new Error(`Corrupted workflow state: ${error.message}`);
|
|
310
|
-
corrupted.code = 'ERR_STATE_CORRUPTED';
|
|
311
|
-
corrupted.cause = error;
|
|
312
|
-
console.error(corrupted.message);
|
|
313
|
-
return corrupted;
|
|
314
|
-
}
|
|
154
|
+
function clearActiveTask(projectPath = process.cwd()) {
|
|
155
|
+
const tasks = readTasks(projectPath);
|
|
156
|
+
tasks.active = null;
|
|
157
|
+
return writeTasks(tasks, projectPath);
|
|
315
158
|
}
|
|
316
159
|
|
|
317
160
|
/**
|
|
318
|
-
*
|
|
319
|
-
*
|
|
320
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
321
|
-
* @returns {boolean} Success status
|
|
161
|
+
* Check if there's an active task
|
|
162
|
+
* Uses != null to catch both null and undefined (legacy format safety)
|
|
322
163
|
*/
|
|
323
|
-
function
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
try {
|
|
329
|
-
// Update timestamp
|
|
330
|
-
state.workflow.lastUpdatedAt = new Date().toISOString();
|
|
331
|
-
|
|
332
|
-
const content = JSON.stringify(state, null, 2);
|
|
333
|
-
fs.writeFileSync(statePath, content, 'utf8');
|
|
164
|
+
function hasActiveTask(projectPath = process.cwd()) {
|
|
165
|
+
const tasks = readTasks(projectPath);
|
|
166
|
+
return tasks.active != null;
|
|
167
|
+
}
|
|
334
168
|
|
|
335
|
-
|
|
336
|
-
|
|
169
|
+
// =============================================================================
|
|
170
|
+
// FLOW.JSON - Worktree directory
|
|
171
|
+
// =============================================================================
|
|
337
172
|
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
173
|
+
/**
|
|
174
|
+
* Get path to flow.json with validation
|
|
175
|
+
*/
|
|
176
|
+
function getFlowPath(worktreePath = process.cwd()) {
|
|
177
|
+
const validatedBase = validatePath(worktreePath);
|
|
178
|
+
const flowPath = path.join(validatedBase, getStateDir(worktreePath), FLOW_FILE);
|
|
179
|
+
validatePathWithinBase(flowPath, validatedBase);
|
|
180
|
+
return flowPath;
|
|
345
181
|
}
|
|
346
182
|
|
|
347
183
|
/**
|
|
348
|
-
*
|
|
349
|
-
*
|
|
350
|
-
*
|
|
351
|
-
* @returns {Object|null} Updated state or null on error
|
|
184
|
+
* Read flow.json from worktree
|
|
185
|
+
* Returns null if file doesn't exist or is corrupted
|
|
186
|
+
* Logs critical error on corruption to prevent silent data loss
|
|
352
187
|
*/
|
|
353
|
-
function
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
if (state instanceof Error) {
|
|
357
|
-
console.error(`Cannot update state: ${state.message}`);
|
|
188
|
+
function readFlow(worktreePath = process.cwd()) {
|
|
189
|
+
const flowPath = getFlowPath(worktreePath);
|
|
190
|
+
if (!fs.existsSync(flowPath)) {
|
|
358
191
|
return null;
|
|
359
192
|
}
|
|
360
|
-
|
|
361
|
-
|
|
193
|
+
try {
|
|
194
|
+
return JSON.parse(fs.readFileSync(flowPath, 'utf8'));
|
|
195
|
+
} catch (e) {
|
|
196
|
+
console.error(`[CRITICAL] Corrupted flow.json at ${flowPath}: ${e.message}`);
|
|
362
197
|
return null;
|
|
363
198
|
}
|
|
364
|
-
|
|
365
|
-
// Deep merge updates
|
|
366
|
-
state = deepMerge(state, updates);
|
|
367
|
-
|
|
368
|
-
if (writeState(state, baseDir)) {
|
|
369
|
-
return state;
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
return null;
|
|
373
199
|
}
|
|
374
200
|
|
|
375
201
|
/**
|
|
376
|
-
*
|
|
202
|
+
* Write flow.json to worktree
|
|
203
|
+
* Creates a copy to avoid mutating the original object
|
|
377
204
|
*/
|
|
378
|
-
|
|
205
|
+
function writeFlow(flow, worktreePath = process.cwd()) {
|
|
206
|
+
ensureStateDir(worktreePath);
|
|
207
|
+
// Clone to avoid mutating the original object
|
|
208
|
+
const flowCopy = JSON.parse(JSON.stringify(flow));
|
|
209
|
+
flowCopy.lastUpdate = new Date().toISOString();
|
|
210
|
+
const flowPath = getFlowPath(worktreePath);
|
|
211
|
+
fs.writeFileSync(flowPath, JSON.stringify(flowCopy, null, 2), 'utf8');
|
|
212
|
+
return true;
|
|
213
|
+
}
|
|
379
214
|
|
|
380
215
|
/**
|
|
381
|
-
*
|
|
382
|
-
*
|
|
383
|
-
*
|
|
384
|
-
* @param {number} [depth=0] - Current recursion depth (internal)
|
|
385
|
-
* @returns {Object} Merged object
|
|
386
|
-
* @throws {Error} If recursion depth exceeds MAX_MERGE_DEPTH
|
|
216
|
+
* Update flow.json with partial updates
|
|
217
|
+
* Handles null values correctly (null overwrites existing values)
|
|
218
|
+
* Deep merges nested objects when both exist
|
|
387
219
|
*/
|
|
388
|
-
function
|
|
389
|
-
|
|
390
|
-
if (depth > MAX_MERGE_DEPTH) {
|
|
391
|
-
throw new Error(`Maximum merge depth (${MAX_MERGE_DEPTH}) exceeded - possible circular reference or attack`);
|
|
392
|
-
}
|
|
393
|
-
|
|
394
|
-
// Handle null/undefined cases
|
|
395
|
-
if (!source || typeof source !== 'object') return target;
|
|
396
|
-
if (!target || typeof target !== 'object') return source;
|
|
220
|
+
function updateFlow(updates, worktreePath = process.cwd()) {
|
|
221
|
+
const flow = readFlow(worktreePath) || {};
|
|
397
222
|
|
|
398
|
-
const
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
if (key === '__proto__' || key === 'constructor' || key === 'prototype') {
|
|
403
|
-
continue;
|
|
404
|
-
}
|
|
405
|
-
|
|
406
|
-
const sourceVal = source[key];
|
|
407
|
-
const targetVal = result[key];
|
|
408
|
-
|
|
409
|
-
// Handle Date objects - preserve as-is
|
|
410
|
-
if (sourceVal instanceof Date) {
|
|
411
|
-
result[key] = new Date(sourceVal.getTime());
|
|
223
|
+
for (const [key, value] of Object.entries(updates)) {
|
|
224
|
+
// Null explicitly overwrites
|
|
225
|
+
if (value === null) {
|
|
226
|
+
flow[key] = null;
|
|
412
227
|
}
|
|
413
|
-
//
|
|
414
|
-
else if (
|
|
415
|
-
|
|
228
|
+
// Deep merge if both source and target are non-null objects
|
|
229
|
+
else if (
|
|
230
|
+
value && typeof value === 'object' && !Array.isArray(value) &&
|
|
231
|
+
flow[key] && typeof flow[key] === 'object' && !Array.isArray(flow[key])
|
|
232
|
+
) {
|
|
233
|
+
flow[key] = { ...flow[key], ...value };
|
|
416
234
|
}
|
|
417
|
-
//
|
|
418
|
-
else if (sourceVal && typeof sourceVal === 'object' && !Array.isArray(sourceVal)) {
|
|
419
|
-
result[key] = deepMerge(targetVal || {}, sourceVal, depth + 1);
|
|
420
|
-
}
|
|
421
|
-
// Replace arrays and primitives
|
|
235
|
+
// Otherwise direct assignment
|
|
422
236
|
else {
|
|
423
|
-
|
|
237
|
+
flow[key] = value;
|
|
424
238
|
}
|
|
425
239
|
}
|
|
426
240
|
|
|
427
|
-
return
|
|
241
|
+
return writeFlow(flow, worktreePath);
|
|
428
242
|
}
|
|
429
243
|
|
|
430
244
|
/**
|
|
431
|
-
*
|
|
432
|
-
*
|
|
433
|
-
* @param {
|
|
434
|
-
* @
|
|
245
|
+
* Create initial flow for a new task
|
|
246
|
+
* Also registers the task as active in the main project's tasks.json
|
|
247
|
+
* @param {Object} task - Task object with id, title, source, url
|
|
248
|
+
* @param {Object} policy - Policy object with stoppingPoint
|
|
249
|
+
* @param {string} worktreePath - Path to worktree
|
|
250
|
+
* @param {string} projectPath - Path to main project (for tasks.json registration)
|
|
435
251
|
*/
|
|
436
|
-
function
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
252
|
+
function createFlow(task, policy, worktreePath = process.cwd(), projectPath = null) {
|
|
253
|
+
const flow = {
|
|
254
|
+
task: {
|
|
255
|
+
id: task.id,
|
|
256
|
+
title: task.title,
|
|
257
|
+
source: task.source,
|
|
258
|
+
url: task.url || null
|
|
259
|
+
},
|
|
260
|
+
policy: {
|
|
261
|
+
stoppingPoint: policy.stoppingPoint || 'merged'
|
|
262
|
+
},
|
|
263
|
+
phase: 'policy-selection',
|
|
264
|
+
status: 'in_progress',
|
|
265
|
+
lastUpdate: new Date().toISOString(),
|
|
266
|
+
userNotes: '',
|
|
267
|
+
git: {
|
|
268
|
+
branch: null,
|
|
269
|
+
baseBranch: 'main'
|
|
270
|
+
},
|
|
271
|
+
pr: null,
|
|
272
|
+
exploration: null,
|
|
273
|
+
plan: null,
|
|
274
|
+
// Store projectPath so completeWorkflow knows where to clear the task
|
|
275
|
+
projectPath: projectPath
|
|
276
|
+
};
|
|
441
277
|
|
|
442
|
-
|
|
443
|
-
if (state instanceof Error) {
|
|
444
|
-
console.error(`Cannot start phase: ${state.message}`);
|
|
445
|
-
return null;
|
|
446
|
-
}
|
|
447
|
-
if (!state) {
|
|
448
|
-
console.error('No workflow state exists. Create a workflow first.');
|
|
449
|
-
return null;
|
|
450
|
-
}
|
|
278
|
+
writeFlow(flow, worktreePath);
|
|
451
279
|
|
|
452
|
-
|
|
280
|
+
// Register task as active in main project
|
|
281
|
+
if (projectPath) {
|
|
282
|
+
setActiveTask({
|
|
283
|
+
taskId: task.id,
|
|
284
|
+
title: task.title,
|
|
285
|
+
worktree: worktreePath,
|
|
286
|
+
branch: flow.git.branch
|
|
287
|
+
}, projectPath);
|
|
288
|
+
}
|
|
453
289
|
|
|
454
|
-
|
|
455
|
-
phase: phaseName,
|
|
456
|
-
status: 'in_progress',
|
|
457
|
-
startedAt: new Date().toISOString(),
|
|
458
|
-
completedAt: null,
|
|
459
|
-
duration: null,
|
|
460
|
-
result: null
|
|
461
|
-
});
|
|
462
|
-
|
|
463
|
-
return updateState({
|
|
464
|
-
workflow: { status: 'in_progress' },
|
|
465
|
-
phases: { current: phaseName, history },
|
|
466
|
-
checkpoints: { canResume: true, resumeFrom: phaseName, resumeContext: null }
|
|
467
|
-
}, baseDir);
|
|
290
|
+
return flow;
|
|
468
291
|
}
|
|
469
292
|
|
|
470
293
|
/**
|
|
471
|
-
*
|
|
472
|
-
* @param {Object} state - Current state
|
|
473
|
-
* @param {string} status - New status (completed/failed)
|
|
474
|
-
* @param {Object} result - Result data
|
|
475
|
-
* @returns {Object} Updated history
|
|
294
|
+
* Delete flow.json
|
|
476
295
|
*/
|
|
477
|
-
function
|
|
478
|
-
const
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
const now = new Date().toISOString();
|
|
483
|
-
entry.status = status;
|
|
484
|
-
entry.completedAt = now;
|
|
485
|
-
entry.duration = new Date(now).getTime() - new Date(entry.startedAt).getTime();
|
|
486
|
-
entry.result = result;
|
|
296
|
+
function deleteFlow(worktreePath = process.cwd()) {
|
|
297
|
+
const flowPath = getFlowPath(worktreePath);
|
|
298
|
+
if (fs.existsSync(flowPath)) {
|
|
299
|
+
fs.unlinkSync(flowPath);
|
|
300
|
+
return true;
|
|
487
301
|
}
|
|
302
|
+
return false;
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// =============================================================================
|
|
306
|
+
// PHASE MANAGEMENT
|
|
307
|
+
// =============================================================================
|
|
488
308
|
|
|
489
|
-
|
|
309
|
+
/**
|
|
310
|
+
* Check if phase is valid
|
|
311
|
+
*/
|
|
312
|
+
function isValidPhase(phase) {
|
|
313
|
+
return PHASES.includes(phase);
|
|
490
314
|
}
|
|
491
315
|
|
|
492
316
|
/**
|
|
493
|
-
*
|
|
494
|
-
* @param {Object} [result={}] - Phase result data
|
|
495
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
496
|
-
* @returns {Object|null} Updated state or null on error
|
|
317
|
+
* Set current phase
|
|
497
318
|
*/
|
|
498
|
-
function
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
console.error(`Cannot complete phase: ${state.message}`);
|
|
502
|
-
return null;
|
|
319
|
+
function setPhase(phase, worktreePath = process.cwd()) {
|
|
320
|
+
if (!isValidPhase(phase)) {
|
|
321
|
+
throw new Error(`Invalid phase: ${phase}`);
|
|
503
322
|
}
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
const history = finalizePhaseEntry(state, 'completed', result);
|
|
507
|
-
const currentIndex = getPhaseIndex(state.phases.current);
|
|
508
|
-
const nextPhase = currentIndex < PHASES.length - 1 ? PHASES[currentIndex + 1] : 'complete';
|
|
323
|
+
return updateFlow({ phase, status: 'in_progress' }, worktreePath);
|
|
324
|
+
}
|
|
509
325
|
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
326
|
+
/**
|
|
327
|
+
* Start a phase (alias for setPhase, for backwards compatibility)
|
|
328
|
+
*/
|
|
329
|
+
function startPhase(phase, worktreePath = process.cwd()) {
|
|
330
|
+
return setPhase(phase, worktreePath);
|
|
514
331
|
}
|
|
515
332
|
|
|
516
333
|
/**
|
|
517
334
|
* Fail the current phase
|
|
518
|
-
* @param {string} reason - Failure reason
|
|
519
|
-
* @param {Object} [context={}] - Context for resume
|
|
520
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
521
|
-
* @returns {Object|null} Updated state or null on error
|
|
522
335
|
*/
|
|
523
|
-
function failPhase(reason, context = {},
|
|
524
|
-
const
|
|
525
|
-
if (
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
return updateState({
|
|
534
|
-
workflow: { status: 'failed' },
|
|
535
|
-
phases: { history },
|
|
536
|
-
checkpoints: {
|
|
537
|
-
canResume: true,
|
|
538
|
-
resumeFrom: state.phases.current,
|
|
539
|
-
resumeContext: { reason, ...context }
|
|
540
|
-
}
|
|
541
|
-
}, baseDir);
|
|
336
|
+
function failPhase(reason, context = {}, worktreePath = process.cwd()) {
|
|
337
|
+
const flow = readFlow(worktreePath);
|
|
338
|
+
if (!flow) return null;
|
|
339
|
+
|
|
340
|
+
return updateFlow({
|
|
341
|
+
status: 'failed',
|
|
342
|
+
error: reason,
|
|
343
|
+
failContext: context
|
|
344
|
+
}, worktreePath);
|
|
542
345
|
}
|
|
543
346
|
|
|
544
347
|
/**
|
|
545
348
|
* Skip to a specific phase
|
|
546
|
-
* @param {string} phaseName - Phase to skip to
|
|
547
|
-
* @param {string} [reason='manual skip'] - Skip reason
|
|
548
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
549
|
-
* @returns {Object|null} Updated state or null on error
|
|
550
349
|
*/
|
|
551
|
-
function skipToPhase(
|
|
552
|
-
if (!isValidPhase(
|
|
553
|
-
|
|
554
|
-
return null;
|
|
555
|
-
}
|
|
556
|
-
|
|
557
|
-
const state = readState(baseDir);
|
|
558
|
-
if (state instanceof Error) {
|
|
559
|
-
console.error(`Cannot skip to phase: ${state.message}`);
|
|
560
|
-
return null;
|
|
561
|
-
}
|
|
562
|
-
if (!state) return null;
|
|
563
|
-
|
|
564
|
-
const currentIndex = getPhaseIndex(state.phases.current);
|
|
565
|
-
const targetIndex = getPhaseIndex(phaseName);
|
|
566
|
-
|
|
567
|
-
// Add skipped entries for phases we're jumping over
|
|
568
|
-
const history = [...(state.phases.history || [])];
|
|
569
|
-
const now = new Date().toISOString();
|
|
570
|
-
|
|
571
|
-
for (let i = currentIndex; i < targetIndex; i++) {
|
|
572
|
-
history.push({
|
|
573
|
-
phase: PHASES[i],
|
|
574
|
-
status: 'skipped',
|
|
575
|
-
startedAt: now,
|
|
576
|
-
completedAt: now,
|
|
577
|
-
duration: 0,
|
|
578
|
-
result: { skippedReason: reason }
|
|
579
|
-
});
|
|
350
|
+
function skipToPhase(phase, reason = 'manual skip', worktreePath = process.cwd()) {
|
|
351
|
+
if (!isValidPhase(phase)) {
|
|
352
|
+
throw new Error(`Invalid phase: ${phase}`);
|
|
580
353
|
}
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
},
|
|
587
|
-
checkpoints: {
|
|
588
|
-
resumeFrom: phaseName
|
|
589
|
-
}
|
|
590
|
-
}, baseDir);
|
|
354
|
+
return updateFlow({
|
|
355
|
+
phase,
|
|
356
|
+
status: 'in_progress',
|
|
357
|
+
skipReason: reason
|
|
358
|
+
}, worktreePath);
|
|
591
359
|
}
|
|
592
360
|
|
|
593
361
|
/**
|
|
594
|
-
* Complete
|
|
595
|
-
*
|
|
596
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
597
|
-
* @returns {Object|null} Updated state or null on error
|
|
362
|
+
* Complete current phase and move to next
|
|
363
|
+
* Uses updateFlow pattern to avoid direct mutation issues
|
|
598
364
|
*/
|
|
599
|
-
function
|
|
600
|
-
const
|
|
601
|
-
if (
|
|
602
|
-
console.error(`Cannot complete workflow: ${state.message}`);
|
|
603
|
-
return null;
|
|
604
|
-
}
|
|
605
|
-
if (!state) return null;
|
|
365
|
+
function completePhase(result = null, worktreePath = process.cwd()) {
|
|
366
|
+
const flow = readFlow(worktreePath);
|
|
367
|
+
if (!flow) return null;
|
|
606
368
|
|
|
607
|
-
const
|
|
608
|
-
const
|
|
609
|
-
const endTime = new Date(now).getTime();
|
|
369
|
+
const currentIndex = PHASES.indexOf(flow.phase);
|
|
370
|
+
const nextPhase = PHASES[currentIndex + 1] || 'complete';
|
|
610
371
|
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
},
|
|
623
|
-
metrics: {
|
|
624
|
-
totalDuration: endTime - startTime,
|
|
625
|
-
...result.metrics
|
|
372
|
+
// Build updates object
|
|
373
|
+
const updates = {
|
|
374
|
+
phase: nextPhase,
|
|
375
|
+
status: nextPhase === 'complete' ? 'completed' : 'in_progress'
|
|
376
|
+
};
|
|
377
|
+
|
|
378
|
+
// Store result in appropriate field
|
|
379
|
+
if (result) {
|
|
380
|
+
const resultField = getResultField(flow.phase);
|
|
381
|
+
if (resultField) {
|
|
382
|
+
updates[resultField] = result;
|
|
626
383
|
}
|
|
627
|
-
}
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
updateFlow(updates, worktreePath);
|
|
387
|
+
return readFlow(worktreePath);
|
|
628
388
|
}
|
|
629
389
|
|
|
630
390
|
/**
|
|
631
|
-
*
|
|
632
|
-
* @param {string} [reason='user aborted'] - Abort reason
|
|
633
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
634
|
-
* @returns {Object|null} Updated state or null on error
|
|
391
|
+
* Map phase to result field
|
|
635
392
|
*/
|
|
636
|
-
function
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
canResume: false,
|
|
644
|
-
resumeFrom: null,
|
|
645
|
-
resumeContext: { abortReason: reason }
|
|
646
|
-
}
|
|
647
|
-
}, baseDir);
|
|
393
|
+
function getResultField(phase) {
|
|
394
|
+
const mapping = {
|
|
395
|
+
'exploration': 'exploration',
|
|
396
|
+
'planning': 'plan',
|
|
397
|
+
'review-loop': 'reviewResult'
|
|
398
|
+
};
|
|
399
|
+
return mapping[phase] || null;
|
|
648
400
|
}
|
|
649
401
|
|
|
650
402
|
/**
|
|
651
|
-
*
|
|
652
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
653
|
-
* @returns {boolean} Success status
|
|
403
|
+
* Mark workflow as failed
|
|
654
404
|
*/
|
|
655
|
-
function
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
fs.unlinkSync(statePath);
|
|
661
|
-
}
|
|
662
|
-
return true;
|
|
663
|
-
} catch (error) {
|
|
664
|
-
console.error(`Error deleting state: ${error.message}`);
|
|
665
|
-
return false;
|
|
666
|
-
}
|
|
405
|
+
function failWorkflow(error, worktreePath = process.cwd()) {
|
|
406
|
+
return updateFlow({
|
|
407
|
+
status: 'failed',
|
|
408
|
+
error: error?.message || String(error)
|
|
409
|
+
}, worktreePath);
|
|
667
410
|
}
|
|
668
411
|
|
|
669
412
|
/**
|
|
670
|
-
*
|
|
671
|
-
*
|
|
672
|
-
* @
|
|
413
|
+
* Mark workflow as complete
|
|
414
|
+
* Automatically clears the active task from tasks.json using stored projectPath
|
|
415
|
+
* @param {string} worktreePath - Path to worktree
|
|
673
416
|
*/
|
|
674
|
-
function
|
|
675
|
-
const
|
|
676
|
-
|
|
677
|
-
if
|
|
417
|
+
function completeWorkflow(worktreePath = process.cwd()) {
|
|
418
|
+
const flow = readFlow(worktreePath);
|
|
419
|
+
|
|
420
|
+
// Clear active task from main project if projectPath is stored
|
|
421
|
+
if (flow && flow.projectPath) {
|
|
422
|
+
clearActiveTask(flow.projectPath);
|
|
423
|
+
}
|
|
678
424
|
|
|
679
|
-
return
|
|
425
|
+
return updateFlow({
|
|
426
|
+
phase: 'complete',
|
|
427
|
+
status: 'completed',
|
|
428
|
+
completedAt: new Date().toISOString()
|
|
429
|
+
}, worktreePath);
|
|
680
430
|
}
|
|
681
431
|
|
|
682
432
|
/**
|
|
683
|
-
*
|
|
684
|
-
*
|
|
685
|
-
* @returns {Object|null} Summary object or null
|
|
433
|
+
* Abort workflow
|
|
434
|
+
* Also clears the active task from tasks.json using stored projectPath
|
|
686
435
|
*/
|
|
687
|
-
function
|
|
688
|
-
const
|
|
689
|
-
if (state instanceof Error) {
|
|
690
|
-
return { error: state.message, code: state.code };
|
|
691
|
-
}
|
|
692
|
-
if (!state) return null;
|
|
436
|
+
function abortWorkflow(reason, worktreePath = process.cwd()) {
|
|
437
|
+
const flow = readFlow(worktreePath);
|
|
693
438
|
|
|
694
|
-
|
|
695
|
-
|
|
439
|
+
// Clear active task from main project if projectPath is stored
|
|
440
|
+
if (flow && flow.projectPath) {
|
|
441
|
+
clearActiveTask(flow.projectPath);
|
|
442
|
+
}
|
|
696
443
|
|
|
697
|
-
return {
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
progress: `${completedPhases}/${totalPhases}`,
|
|
703
|
-
progressPercent: Math.round((completedPhases / totalPhases) * 100),
|
|
704
|
-
task: state.task ? {
|
|
705
|
-
id: state.task.id,
|
|
706
|
-
title: state.task.title,
|
|
707
|
-
source: state.task.source
|
|
708
|
-
} : null,
|
|
709
|
-
pr: state.pr ? {
|
|
710
|
-
number: state.pr.number,
|
|
711
|
-
url: state.pr.url,
|
|
712
|
-
ciStatus: state.pr.ciStatus
|
|
713
|
-
} : null,
|
|
714
|
-
canResume: state.checkpoints.canResume,
|
|
715
|
-
resumeFrom: state.checkpoints.resumeFrom,
|
|
716
|
-
startedAt: state.workflow.startedAt,
|
|
717
|
-
duration: state.metrics?.totalDuration || 0
|
|
718
|
-
};
|
|
444
|
+
return updateFlow({
|
|
445
|
+
status: 'aborted',
|
|
446
|
+
abortReason: reason,
|
|
447
|
+
abortedAt: new Date().toISOString()
|
|
448
|
+
}, worktreePath);
|
|
719
449
|
}
|
|
720
450
|
|
|
451
|
+
// =============================================================================
|
|
452
|
+
// CONVENIENCE FUNCTIONS
|
|
453
|
+
// =============================================================================
|
|
454
|
+
|
|
721
455
|
/**
|
|
722
|
-
*
|
|
723
|
-
* @param {string} agentName - Agent identifier
|
|
724
|
-
* @param {Object} result - Agent result
|
|
725
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
726
|
-
* @returns {Object|null} Updated state or null on error
|
|
456
|
+
* Get workflow summary for display
|
|
727
457
|
*/
|
|
728
|
-
function
|
|
729
|
-
const
|
|
730
|
-
if (
|
|
731
|
-
console.error(`Cannot update agent result: ${state.message}`);
|
|
732
|
-
return null;
|
|
733
|
-
}
|
|
734
|
-
if (!state) return null;
|
|
458
|
+
function getFlowSummary(worktreePath = process.cwd()) {
|
|
459
|
+
const flow = readFlow(worktreePath);
|
|
460
|
+
if (!flow) return null;
|
|
735
461
|
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
462
|
+
return {
|
|
463
|
+
task: flow.task?.title || 'Unknown',
|
|
464
|
+
taskId: flow.task?.id,
|
|
465
|
+
phase: flow.phase,
|
|
466
|
+
status: flow.status,
|
|
467
|
+
lastUpdate: flow.lastUpdate,
|
|
468
|
+
pr: flow.pr?.number ? `#${flow.pr.number}` : null
|
|
741
469
|
};
|
|
742
|
-
|
|
743
|
-
agents.lastRun[agentName] = result;
|
|
744
|
-
agents.totalIssuesFound += result.issues || 0;
|
|
745
|
-
|
|
746
|
-
return updateState({ agents }, baseDir);
|
|
747
470
|
}
|
|
748
471
|
|
|
749
472
|
/**
|
|
750
|
-
*
|
|
751
|
-
* @param {Object} [result={}] - Iteration result
|
|
752
|
-
* @param {string} [baseDir=process.cwd()] - Base directory
|
|
753
|
-
* @returns {Object|null} Updated state or null on error
|
|
473
|
+
* Check if workflow can be resumed
|
|
754
474
|
*/
|
|
755
|
-
function
|
|
756
|
-
const
|
|
757
|
-
if (
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
}
|
|
761
|
-
if (!state) return null;
|
|
475
|
+
function canResume(worktreePath = process.cwd()) {
|
|
476
|
+
const flow = readFlow(worktreePath);
|
|
477
|
+
if (!flow) return false;
|
|
478
|
+
return flow.status === 'in_progress' && flow.phase !== 'complete';
|
|
479
|
+
}
|
|
762
480
|
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
totalIssuesFound: 0,
|
|
767
|
-
totalIssuesFixed: 0
|
|
768
|
-
};
|
|
481
|
+
// =============================================================================
|
|
482
|
+
// BACKWARDS COMPATIBILITY ALIASES
|
|
483
|
+
// =============================================================================
|
|
769
484
|
|
|
770
|
-
|
|
771
|
-
|
|
485
|
+
// These maintain compatibility with existing agent code
|
|
486
|
+
const readState = readFlow;
|
|
487
|
+
const writeState = writeFlow;
|
|
488
|
+
const updateState = updateFlow;
|
|
489
|
+
const createState = (type, policy) => createFlow({ id: 'manual', title: 'Manual task', source: 'manual' }, policy);
|
|
490
|
+
const deleteState = deleteFlow;
|
|
491
|
+
const hasActiveWorkflow = hasActiveTask;
|
|
492
|
+
const getWorkflowSummary = getFlowSummary;
|
|
772
493
|
|
|
773
|
-
return updateState({
|
|
774
|
-
phases: {
|
|
775
|
-
currentIteration: state.phases.currentIteration + 1
|
|
776
|
-
},
|
|
777
|
-
agents
|
|
778
|
-
}, baseDir);
|
|
779
|
-
}
|
|
780
|
-
|
|
781
|
-
// Export all functions
|
|
782
494
|
module.exports = {
|
|
783
495
|
// Constants
|
|
784
|
-
SCHEMA_VERSION,
|
|
785
496
|
PHASES,
|
|
786
|
-
PHASE_INDEX,
|
|
787
|
-
DEFAULT_POLICY,
|
|
788
|
-
MAX_MERGE_DEPTH,
|
|
789
497
|
|
|
790
|
-
//
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
498
|
+
// Tasks (main project)
|
|
499
|
+
getTasksPath,
|
|
500
|
+
readTasks,
|
|
501
|
+
writeTasks,
|
|
502
|
+
setActiveTask,
|
|
503
|
+
clearActiveTask,
|
|
504
|
+
hasActiveTask,
|
|
505
|
+
|
|
506
|
+
// Flow (worktree)
|
|
507
|
+
getFlowPath,
|
|
508
|
+
readFlow,
|
|
509
|
+
writeFlow,
|
|
510
|
+
updateFlow,
|
|
511
|
+
createFlow,
|
|
512
|
+
deleteFlow,
|
|
805
513
|
|
|
806
514
|
// Phase management
|
|
515
|
+
isValidPhase,
|
|
516
|
+
setPhase,
|
|
807
517
|
startPhase,
|
|
808
518
|
completePhase,
|
|
809
519
|
failPhase,
|
|
810
520
|
skipToPhase,
|
|
811
|
-
|
|
812
|
-
// Workflow lifecycle
|
|
521
|
+
failWorkflow,
|
|
813
522
|
completeWorkflow,
|
|
814
523
|
abortWorkflow,
|
|
524
|
+
|
|
525
|
+
// Convenience
|
|
526
|
+
getFlowSummary,
|
|
527
|
+
canResume,
|
|
528
|
+
generateWorkflowId,
|
|
529
|
+
|
|
530
|
+
// Backwards compatibility
|
|
531
|
+
readState,
|
|
532
|
+
writeState,
|
|
533
|
+
updateState,
|
|
534
|
+
createState,
|
|
535
|
+
deleteState,
|
|
815
536
|
hasActiveWorkflow,
|
|
816
|
-
getWorkflowSummary
|
|
817
|
-
|
|
818
|
-
// Agent management
|
|
819
|
-
updateAgentResult,
|
|
820
|
-
incrementIteration,
|
|
821
|
-
|
|
822
|
-
// Cache management
|
|
823
|
-
clearAllStateCaches,
|
|
824
|
-
|
|
825
|
-
// Internal functions for testing
|
|
826
|
-
_internal: {
|
|
827
|
-
validateBasePath,
|
|
828
|
-
validateStatePathWithinBase,
|
|
829
|
-
deepMerge,
|
|
830
|
-
getCachedState,
|
|
831
|
-
setCachedState,
|
|
832
|
-
invalidateStateCache,
|
|
833
|
-
STATE_CACHE_TTL_MS
|
|
834
|
-
}
|
|
537
|
+
getWorkflowSummary
|
|
835
538
|
};
|