@litmers/cursorflow-orchestrator 0.1.18 → 0.1.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/README.md +25 -7
- package/dist/cli/clean.js +7 -6
- package/dist/cli/clean.js.map +1 -1
- package/dist/cli/index.js +5 -1
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/init.js +7 -6
- package/dist/cli/init.js.map +1 -1
- package/dist/cli/logs.js +50 -42
- package/dist/cli/logs.js.map +1 -1
- package/dist/cli/monitor.js +15 -14
- package/dist/cli/monitor.js.map +1 -1
- package/dist/cli/prepare.js +37 -20
- package/dist/cli/prepare.js.map +1 -1
- package/dist/cli/resume.js +193 -40
- package/dist/cli/resume.js.map +1 -1
- package/dist/cli/run.js +3 -2
- package/dist/cli/run.js.map +1 -1
- package/dist/cli/signal.js +7 -7
- package/dist/cli/signal.js.map +1 -1
- package/dist/core/orchestrator.d.ts +2 -1
- package/dist/core/orchestrator.js +48 -91
- package/dist/core/orchestrator.js.map +1 -1
- package/dist/core/runner.js +55 -20
- package/dist/core/runner.js.map +1 -1
- package/dist/utils/config.js +7 -6
- package/dist/utils/config.js.map +1 -1
- package/dist/utils/doctor.js +7 -6
- package/dist/utils/doctor.js.map +1 -1
- package/dist/utils/enhanced-logger.js +14 -11
- package/dist/utils/enhanced-logger.js.map +1 -1
- package/dist/utils/git.js +163 -10
- package/dist/utils/git.js.map +1 -1
- package/dist/utils/log-formatter.d.ts +16 -0
- package/dist/utils/log-formatter.js +194 -0
- package/dist/utils/log-formatter.js.map +1 -0
- package/dist/utils/path.d.ts +19 -0
- package/dist/utils/path.js +77 -0
- package/dist/utils/path.js.map +1 -0
- package/dist/utils/state.d.ts +4 -1
- package/dist/utils/state.js +11 -8
- package/dist/utils/state.js.map +1 -1
- package/dist/utils/template.d.ts +14 -0
- package/dist/utils/template.js +122 -0
- package/dist/utils/template.js.map +1 -0
- package/dist/utils/types.d.ts +1 -0
- package/package.json +1 -1
- package/src/cli/clean.ts +7 -6
- package/src/cli/index.ts +5 -1
- package/src/cli/init.ts +7 -6
- package/src/cli/logs.ts +52 -42
- package/src/cli/monitor.ts +15 -14
- package/src/cli/prepare.ts +39 -20
- package/src/cli/resume.ts +810 -626
- package/src/cli/run.ts +3 -2
- package/src/cli/signal.ts +7 -6
- package/src/core/orchestrator.ts +62 -91
- package/src/core/runner.ts +58 -20
- package/src/utils/config.ts +7 -6
- package/src/utils/doctor.ts +7 -6
- package/src/utils/enhanced-logger.ts +14 -11
- package/src/utils/git.ts +145 -11
- package/src/utils/log-formatter.ts +162 -0
- package/src/utils/path.ts +45 -0
- package/src/utils/state.ts +16 -8
- package/src/utils/template.ts +92 -0
- package/src/utils/types.ts +1 -0
- package/templates/basic.json +21 -0
|
@@ -15,6 +15,7 @@ import * as fs from 'fs';
|
|
|
15
15
|
import * as path from 'path';
|
|
16
16
|
import { Transform, TransformCallback } from 'stream';
|
|
17
17
|
import { EnhancedLogConfig } from './types';
|
|
18
|
+
import { safeJoin } from './path';
|
|
18
19
|
|
|
19
20
|
// Re-export for backwards compatibility
|
|
20
21
|
export { EnhancedLogConfig } from './types';
|
|
@@ -413,10 +414,10 @@ export class EnhancedLogManager {
|
|
|
413
414
|
fs.mkdirSync(logDir, { recursive: true });
|
|
414
415
|
|
|
415
416
|
// Set up log file paths
|
|
416
|
-
this.cleanLogPath =
|
|
417
|
-
this.rawLogPath =
|
|
418
|
-
this.jsonLogPath =
|
|
419
|
-
this.readableLogPath =
|
|
417
|
+
this.cleanLogPath = safeJoin(logDir, 'terminal.log');
|
|
418
|
+
this.rawLogPath = safeJoin(logDir, 'terminal-raw.log');
|
|
419
|
+
this.jsonLogPath = safeJoin(logDir, 'terminal.jsonl');
|
|
420
|
+
this.readableLogPath = safeJoin(logDir, 'terminal-readable.log');
|
|
420
421
|
|
|
421
422
|
// Initialize log files
|
|
422
423
|
this.initLogFiles();
|
|
@@ -620,8 +621,8 @@ export class EnhancedLogManager {
|
|
|
620
621
|
|
|
621
622
|
// Shift existing rotated files
|
|
622
623
|
for (let i = this.config.maxFiles - 1; i >= 1; i--) {
|
|
623
|
-
const oldPath =
|
|
624
|
-
const newPath =
|
|
624
|
+
const oldPath = safeJoin(dir, `${base}.${i}${ext}`);
|
|
625
|
+
const newPath = safeJoin(dir, `${base}.${i + 1}${ext}`);
|
|
625
626
|
|
|
626
627
|
if (fs.existsSync(oldPath)) {
|
|
627
628
|
if (i === this.config.maxFiles - 1) {
|
|
@@ -633,7 +634,7 @@ export class EnhancedLogManager {
|
|
|
633
634
|
}
|
|
634
635
|
|
|
635
636
|
// Rotate current to .1
|
|
636
|
-
const rotatedPath =
|
|
637
|
+
const rotatedPath = safeJoin(dir, `${base}.1${ext}`);
|
|
637
638
|
fs.renameSync(logPath, rotatedPath);
|
|
638
639
|
}
|
|
639
640
|
|
|
@@ -724,8 +725,10 @@ export class EnhancedLogManager {
|
|
|
724
725
|
let metadata = { ...json };
|
|
725
726
|
|
|
726
727
|
// Extract cleaner text for significant AI message types
|
|
727
|
-
if (json.type === 'thinking' && json.text) {
|
|
728
|
-
displayMsg = json.text;
|
|
728
|
+
if ((json.type === 'thinking' || json.type === 'thought') && (json.text || json.thought)) {
|
|
729
|
+
displayMsg = json.text || json.thought;
|
|
730
|
+
// Clean up any double newlines at the end of deltas
|
|
731
|
+
displayMsg = displayMsg.replace(/\n+$/, '\n');
|
|
729
732
|
} else if (json.type === 'assistant' && json.message?.content) {
|
|
730
733
|
displayMsg = json.message.content
|
|
731
734
|
.filter((c: any) => c.type === 'text')
|
|
@@ -1101,8 +1104,8 @@ export function exportLogs(
|
|
|
1101
1104
|
format: 'text' | 'json' | 'markdown' | 'html',
|
|
1102
1105
|
outputPath?: string
|
|
1103
1106
|
): string {
|
|
1104
|
-
const cleanLogPath =
|
|
1105
|
-
const jsonLogPath =
|
|
1107
|
+
const cleanLogPath = safeJoin(laneRunDir, 'terminal.log');
|
|
1108
|
+
const jsonLogPath = safeJoin(laneRunDir, 'terminal.jsonl');
|
|
1106
1109
|
|
|
1107
1110
|
let output = '';
|
|
1108
1111
|
|
package/src/utils/git.ts
CHANGED
|
@@ -3,6 +3,77 @@
|
|
|
3
3
|
*/
|
|
4
4
|
|
|
5
5
|
import { execSync, spawnSync } from 'child_process';
|
|
6
|
+
import * as fs from 'fs';
|
|
7
|
+
import * as path from 'path';
|
|
8
|
+
import { safeJoin } from './path';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Acquire a file-based lock for Git operations
|
|
12
|
+
*/
|
|
13
|
+
function acquireLock(lockName: string, cwd?: string): string | null {
|
|
14
|
+
const repoRoot = cwd || getRepoRoot();
|
|
15
|
+
const lockDir = safeJoin(repoRoot, '_cursorflow', 'locks');
|
|
16
|
+
if (!fs.existsSync(lockDir)) {
|
|
17
|
+
fs.mkdirSync(lockDir, { recursive: true });
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const lockFile = safeJoin(lockDir, `${lockName}.lock`);
|
|
21
|
+
|
|
22
|
+
try {
|
|
23
|
+
// wx flag ensures atomic creation
|
|
24
|
+
fs.writeFileSync(lockFile, String(process.pid), { flag: 'wx' });
|
|
25
|
+
return lockFile;
|
|
26
|
+
} catch {
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Release a file-based lock
|
|
33
|
+
*/
|
|
34
|
+
function releaseLock(lockFile: string | null): void {
|
|
35
|
+
if (lockFile && fs.existsSync(lockFile)) {
|
|
36
|
+
try {
|
|
37
|
+
fs.unlinkSync(lockFile);
|
|
38
|
+
} catch {
|
|
39
|
+
// Ignore
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Run Git command with locking
|
|
46
|
+
*/
|
|
47
|
+
async function runGitWithLock<T>(
|
|
48
|
+
lockName: string,
|
|
49
|
+
fn: () => T,
|
|
50
|
+
options: { cwd?: string; maxRetries?: number; retryDelay?: number } = {}
|
|
51
|
+
): Promise<T> {
|
|
52
|
+
const maxRetries = options.maxRetries ?? 10;
|
|
53
|
+
const retryDelay = options.retryDelay ?? 500;
|
|
54
|
+
|
|
55
|
+
let retries = 0;
|
|
56
|
+
let lockFile = null;
|
|
57
|
+
|
|
58
|
+
while (retries < maxRetries) {
|
|
59
|
+
lockFile = acquireLock(lockName, options.cwd);
|
|
60
|
+
if (lockFile) break;
|
|
61
|
+
|
|
62
|
+
retries++;
|
|
63
|
+
const delay = Math.floor(Math.random() * retryDelay) + retryDelay / 2;
|
|
64
|
+
await new Promise(resolve => setTimeout(resolve, delay));
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (!lockFile) {
|
|
68
|
+
throw new Error(`Failed to acquire lock: ${lockName}`);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
try {
|
|
72
|
+
return fn();
|
|
73
|
+
} finally {
|
|
74
|
+
releaseLock(lockFile);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
6
77
|
|
|
7
78
|
export interface GitRunOptions {
|
|
8
79
|
cwd?: string;
|
|
@@ -36,6 +107,25 @@ export interface CommitInfo {
|
|
|
36
107
|
subject: string;
|
|
37
108
|
}
|
|
38
109
|
|
|
110
|
+
/**
|
|
111
|
+
* Filter out noisy git stderr messages
|
|
112
|
+
*/
|
|
113
|
+
function filterGitStderr(stderr: string): string {
|
|
114
|
+
if (!stderr) return '';
|
|
115
|
+
|
|
116
|
+
const lines = stderr.split('\n');
|
|
117
|
+
const filtered = lines.filter(line => {
|
|
118
|
+
// GitHub noise
|
|
119
|
+
if (line.includes('remote: Create a pull request')) return false;
|
|
120
|
+
if (line.trim().startsWith('remote:') && line.includes('pull/new')) return false;
|
|
121
|
+
if (line.trim() === 'remote:') return false; // Empty remote lines
|
|
122
|
+
|
|
123
|
+
return true;
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
return filtered.join('\n');
|
|
127
|
+
}
|
|
128
|
+
|
|
39
129
|
/**
|
|
40
130
|
* Run git command and return output
|
|
41
131
|
*/
|
|
@@ -43,12 +133,21 @@ export function runGit(args: string[], options: GitRunOptions = {}): string {
|
|
|
43
133
|
const { cwd, silent = false } = options;
|
|
44
134
|
|
|
45
135
|
try {
|
|
136
|
+
const stdioMode = silent ? 'pipe' : ['inherit', 'inherit', 'pipe'];
|
|
137
|
+
|
|
46
138
|
const result = spawnSync('git', args, {
|
|
47
139
|
cwd: cwd || process.cwd(),
|
|
48
140
|
encoding: 'utf8',
|
|
49
|
-
stdio:
|
|
141
|
+
stdio: stdioMode as any,
|
|
50
142
|
});
|
|
51
143
|
|
|
144
|
+
if (!silent && result.stderr) {
|
|
145
|
+
const filteredStderr = filterGitStderr(result.stderr);
|
|
146
|
+
if (filteredStderr) {
|
|
147
|
+
process.stderr.write(filteredStderr);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
52
151
|
if (result.status !== 0 && !silent) {
|
|
53
152
|
throw new Error(`Git command failed: git ${args.join(' ')}\n${result.stderr || ''}`);
|
|
54
153
|
}
|
|
@@ -120,18 +219,53 @@ export function worktreeExists(worktreePath: string, cwd?: string): boolean {
|
|
|
120
219
|
export function createWorktree(worktreePath: string, branchName: string, options: { cwd?: string; baseBranch?: string } = {}): string {
|
|
121
220
|
const { cwd, baseBranch = 'main' } = options;
|
|
122
221
|
|
|
123
|
-
//
|
|
124
|
-
const
|
|
222
|
+
// Use a file-based lock to prevent race conditions during worktree creation
|
|
223
|
+
const lockDir = safeJoin(cwd || getRepoRoot(), '_cursorflow', 'locks');
|
|
224
|
+
if (!fs.existsSync(lockDir)) {
|
|
225
|
+
fs.mkdirSync(lockDir, { recursive: true });
|
|
226
|
+
}
|
|
227
|
+
const lockFile = safeJoin(lockDir, 'worktree.lock');
|
|
228
|
+
|
|
229
|
+
let retries = 20;
|
|
230
|
+
let acquired = false;
|
|
231
|
+
|
|
232
|
+
while (retries > 0 && !acquired) {
|
|
233
|
+
try {
|
|
234
|
+
fs.writeFileSync(lockFile, String(process.pid), { flag: 'wx' });
|
|
235
|
+
acquired = true;
|
|
236
|
+
} catch {
|
|
237
|
+
retries--;
|
|
238
|
+
const delay = Math.floor(Math.random() * 500) + 200;
|
|
239
|
+
// Use synchronous sleep to keep the function signature synchronous
|
|
240
|
+
const end = Date.now() + delay;
|
|
241
|
+
while (Date.now() < end) { /* wait */ }
|
|
242
|
+
}
|
|
243
|
+
}
|
|
125
244
|
|
|
126
|
-
if (
|
|
127
|
-
|
|
128
|
-
runGit(['worktree', 'add', worktreePath, branchName], { cwd });
|
|
129
|
-
} else {
|
|
130
|
-
// Create new branch from base
|
|
131
|
-
runGit(['worktree', 'add', '-b', branchName, worktreePath, baseBranch], { cwd });
|
|
245
|
+
if (!acquired) {
|
|
246
|
+
throw new Error('Failed to acquire worktree lock after multiple retries');
|
|
132
247
|
}
|
|
133
248
|
|
|
134
|
-
|
|
249
|
+
try {
|
|
250
|
+
// Check if branch already exists
|
|
251
|
+
const branchExists = runGitResult(['rev-parse', '--verify', branchName], { cwd }).success;
|
|
252
|
+
|
|
253
|
+
if (branchExists) {
|
|
254
|
+
// Branch exists, checkout to worktree
|
|
255
|
+
runGit(['worktree', 'add', worktreePath, branchName], { cwd });
|
|
256
|
+
} else {
|
|
257
|
+
// Create new branch from base
|
|
258
|
+
runGit(['worktree', 'add', '-b', branchName, worktreePath, baseBranch], { cwd });
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
return worktreePath;
|
|
262
|
+
} finally {
|
|
263
|
+
try {
|
|
264
|
+
fs.unlinkSync(lockFile);
|
|
265
|
+
} catch {
|
|
266
|
+
// Ignore
|
|
267
|
+
}
|
|
268
|
+
}
|
|
135
269
|
}
|
|
136
270
|
|
|
137
271
|
/**
|
|
@@ -362,4 +496,4 @@ export function getLastOperationStats(cwd?: string): string {
|
|
|
362
496
|
} catch (e) {
|
|
363
497
|
return '';
|
|
364
498
|
}
|
|
365
|
-
}
|
|
499
|
+
}
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Utility for formatting log messages for console display
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import * as logger from './logger';
|
|
6
|
+
import { ParsedMessage, stripAnsi } from './enhanced-logger';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Format a single parsed message into a human-readable string (compact or multi-line)
|
|
10
|
+
*/
|
|
11
|
+
export function formatMessageForConsole(
|
|
12
|
+
msg: ParsedMessage,
|
|
13
|
+
options: {
|
|
14
|
+
includeTimestamp?: boolean;
|
|
15
|
+
laneLabel?: string;
|
|
16
|
+
compact?: boolean;
|
|
17
|
+
} = {}
|
|
18
|
+
): string {
|
|
19
|
+
const { includeTimestamp = true, laneLabel = '', compact = false } = options;
|
|
20
|
+
const ts = includeTimestamp ? new Date(msg.timestamp).toLocaleTimeString('en-US', { hour12: false }) : '';
|
|
21
|
+
const tsPrefix = ts ? `${logger.COLORS.gray}[${ts}]${logger.COLORS.reset} ` : '';
|
|
22
|
+
const labelPrefix = laneLabel ? `${logger.COLORS.magenta}${laneLabel.padEnd(12)}${logger.COLORS.reset} ` : '';
|
|
23
|
+
|
|
24
|
+
let typePrefix = '';
|
|
25
|
+
let content = msg.content;
|
|
26
|
+
|
|
27
|
+
switch (msg.type) {
|
|
28
|
+
case 'user':
|
|
29
|
+
typePrefix = `${logger.COLORS.cyan}🧑 USER${logger.COLORS.reset}`;
|
|
30
|
+
if (compact) content = content.replace(/\n/g, ' ').substring(0, 100) + (content.length > 100 ? '...' : '');
|
|
31
|
+
break;
|
|
32
|
+
case 'assistant':
|
|
33
|
+
typePrefix = `${logger.COLORS.green}🤖 ASST${logger.COLORS.reset}`;
|
|
34
|
+
if (compact) content = content.replace(/\n/g, ' ').substring(0, 100) + (content.length > 100 ? '...' : '');
|
|
35
|
+
break;
|
|
36
|
+
case 'tool':
|
|
37
|
+
typePrefix = `${logger.COLORS.yellow}🔧 TOOL${logger.COLORS.reset}`;
|
|
38
|
+
const toolMatch = content.match(/\[Tool: ([^\]]+)\] (.*)/);
|
|
39
|
+
if (toolMatch) {
|
|
40
|
+
const [, name, args] = toolMatch;
|
|
41
|
+
try {
|
|
42
|
+
const parsedArgs = JSON.parse(args!);
|
|
43
|
+
let argStr = '';
|
|
44
|
+
if (name === 'read_file' && parsedArgs.target_file) {
|
|
45
|
+
argStr = parsedArgs.target_file;
|
|
46
|
+
} else if (name === 'run_terminal_cmd' && parsedArgs.command) {
|
|
47
|
+
argStr = parsedArgs.command;
|
|
48
|
+
} else if (name === 'write' && parsedArgs.file_path) {
|
|
49
|
+
argStr = parsedArgs.file_path;
|
|
50
|
+
} else if (name === 'search_replace' && parsedArgs.file_path) {
|
|
51
|
+
argStr = parsedArgs.file_path;
|
|
52
|
+
} else {
|
|
53
|
+
const keys = Object.keys(parsedArgs);
|
|
54
|
+
if (keys.length > 0) {
|
|
55
|
+
argStr = String(parsedArgs[keys[0]]).substring(0, 50);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
content = `${logger.COLORS.bold}${name}${logger.COLORS.reset}(${argStr})`;
|
|
59
|
+
} catch {
|
|
60
|
+
content = `${logger.COLORS.bold}${name}${logger.COLORS.reset}: ${args}`;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
break;
|
|
64
|
+
case 'tool_result':
|
|
65
|
+
typePrefix = `${logger.COLORS.gray}📄 RESL${logger.COLORS.reset}`;
|
|
66
|
+
const resMatch = content.match(/\[Tool Result: ([^\]]+)\]/);
|
|
67
|
+
content = resMatch ? `${resMatch[1]} OK` : 'result';
|
|
68
|
+
break;
|
|
69
|
+
case 'result':
|
|
70
|
+
typePrefix = `${logger.COLORS.green}✅ DONE${logger.COLORS.reset}`;
|
|
71
|
+
break;
|
|
72
|
+
case 'system':
|
|
73
|
+
typePrefix = `${logger.COLORS.gray}⚙️ SYS${logger.COLORS.reset}`;
|
|
74
|
+
break;
|
|
75
|
+
case 'thinking':
|
|
76
|
+
typePrefix = `${logger.COLORS.gray}🤔 THNK${logger.COLORS.reset}`;
|
|
77
|
+
if (compact) content = content.replace(/\n/g, ' ').substring(0, 100) + (content.length > 100 ? '...' : '');
|
|
78
|
+
break;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
if (!typePrefix) return `${tsPrefix}${labelPrefix}${content}`;
|
|
82
|
+
|
|
83
|
+
if (compact) {
|
|
84
|
+
return `${tsPrefix}${labelPrefix}${typePrefix} ${content}`;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// Multi-line box format (as seen in orchestrator)
|
|
88
|
+
const lines = content.split('\n');
|
|
89
|
+
const fullPrefix = `${tsPrefix}${labelPrefix}`;
|
|
90
|
+
const header = `${typePrefix} ┌${'─'.repeat(60)}`;
|
|
91
|
+
let result = `${fullPrefix}${header}\n`;
|
|
92
|
+
|
|
93
|
+
const indent = ' '.repeat(stripAnsi(typePrefix).length);
|
|
94
|
+
for (const line of lines) {
|
|
95
|
+
result += `${fullPrefix}${indent} │ ${line}\n`;
|
|
96
|
+
}
|
|
97
|
+
result += `${fullPrefix}${indent} └${'─'.repeat(60)}`;
|
|
98
|
+
|
|
99
|
+
return result;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Detect and format a message that might be a raw JSON string from cursor-agent
|
|
104
|
+
*/
|
|
105
|
+
export function formatPotentialJsonMessage(message: string): string {
|
|
106
|
+
const trimmed = message.trim();
|
|
107
|
+
if (!trimmed.startsWith('{') || !trimmed.endsWith('}')) {
|
|
108
|
+
return message;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
try {
|
|
112
|
+
const json = JSON.parse(trimmed);
|
|
113
|
+
if (!json.type) return message;
|
|
114
|
+
|
|
115
|
+
// Convert JSON to a ParsedMessage-like structure for formatting
|
|
116
|
+
let content = trimmed;
|
|
117
|
+
let type = 'system';
|
|
118
|
+
|
|
119
|
+
if (json.type === 'thinking' && json.text) {
|
|
120
|
+
content = json.text;
|
|
121
|
+
type = 'thinking';
|
|
122
|
+
} else if (json.type === 'assistant' && json.message?.content) {
|
|
123
|
+
content = json.message.content
|
|
124
|
+
.filter((c: any) => c.type === 'text')
|
|
125
|
+
.map((c: any) => c.text)
|
|
126
|
+
.join('');
|
|
127
|
+
type = 'assistant';
|
|
128
|
+
} else if (json.type === 'user' && json.message?.content) {
|
|
129
|
+
content = json.message.content
|
|
130
|
+
.filter((c: any) => c.type === 'text')
|
|
131
|
+
.map((c: any) => c.text)
|
|
132
|
+
.join('');
|
|
133
|
+
type = 'user';
|
|
134
|
+
} else if (json.type === 'tool_call' && json.subtype === 'started') {
|
|
135
|
+
const toolName = Object.keys(json.tool_call)[0] || 'unknown';
|
|
136
|
+
const args = json.tool_call[toolName]?.args || {};
|
|
137
|
+
content = `[Tool: ${toolName}] ${JSON.stringify(args)}`;
|
|
138
|
+
type = 'tool';
|
|
139
|
+
} else if (json.type === 'tool_call' && json.subtype === 'completed') {
|
|
140
|
+
const toolName = Object.keys(json.tool_call)[0] || 'unknown';
|
|
141
|
+
content = `[Tool Result: ${toolName}]`;
|
|
142
|
+
type = 'tool_result';
|
|
143
|
+
} else if (json.type === 'result') {
|
|
144
|
+
content = json.result || 'Task completed';
|
|
145
|
+
type = 'result';
|
|
146
|
+
} else {
|
|
147
|
+
// Unknown type, return as is
|
|
148
|
+
return message;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
return formatMessageForConsole({
|
|
152
|
+
type: type as any,
|
|
153
|
+
role: type,
|
|
154
|
+
content,
|
|
155
|
+
timestamp: json.timestamp_ms || Date.now()
|
|
156
|
+
}, { includeTimestamp: false, compact: true });
|
|
157
|
+
|
|
158
|
+
} catch {
|
|
159
|
+
return message;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import * as path from 'path';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Ensures that a path is safe and stays within a base directory.
|
|
5
|
+
* Prevents path traversal attacks.
|
|
6
|
+
*/
|
|
7
|
+
export function isSafePath(baseDir: string, ...parts: string[]): boolean {
|
|
8
|
+
const joined = path.join(baseDir, ...parts); // nosemgrep
|
|
9
|
+
const resolvedBase = path.resolve(baseDir); // nosemgrep
|
|
10
|
+
const resolvedJoined = path.resolve(joined); // nosemgrep
|
|
11
|
+
|
|
12
|
+
return resolvedJoined.startsWith(resolvedBase);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Safely joins path parts and ensures the result is within the base directory.
|
|
17
|
+
* Throws an error if path traversal is detected.
|
|
18
|
+
*
|
|
19
|
+
* @param baseDir The base directory that the resulting path must be within
|
|
20
|
+
* @param parts Path parts to join
|
|
21
|
+
* @returns The joined path
|
|
22
|
+
* @throws Error if the resulting path is outside the base directory
|
|
23
|
+
*/
|
|
24
|
+
export function safeJoin(baseDir: string, ...parts: string[]): string {
|
|
25
|
+
const joined = path.join(baseDir, ...parts); // nosemgrep
|
|
26
|
+
const resolvedBase = path.resolve(baseDir); // nosemgrep
|
|
27
|
+
const resolvedJoined = path.resolve(joined); // nosemgrep
|
|
28
|
+
|
|
29
|
+
if (!resolvedJoined.startsWith(resolvedBase)) {
|
|
30
|
+
throw new Error(`Potential path traversal detected: ${joined} is outside of ${baseDir}`);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return joined;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Normalizes a path and checks if it's absolute or relative to project root.
|
|
38
|
+
*/
|
|
39
|
+
export function normalizePath(p: string, projectRoot: string): string {
|
|
40
|
+
if (path.isAbsolute(p)) {
|
|
41
|
+
return path.normalize(p);
|
|
42
|
+
}
|
|
43
|
+
return path.join(projectRoot, p); // nosemgrep
|
|
44
|
+
}
|
|
45
|
+
|
package/src/utils/state.ts
CHANGED
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
import * as fs from 'fs';
|
|
6
6
|
import * as path from 'path';
|
|
7
|
+
import { safeJoin } from './path';
|
|
7
8
|
import {
|
|
8
9
|
LaneState,
|
|
9
10
|
ConversationEntry,
|
|
@@ -80,18 +81,25 @@ export function readLog<T = any>(logPath: string): T[] {
|
|
|
80
81
|
/**
|
|
81
82
|
* Create initial lane state
|
|
82
83
|
*/
|
|
83
|
-
export function createLaneState(
|
|
84
|
+
export function createLaneState(
|
|
85
|
+
laneName: string,
|
|
86
|
+
config: RunnerConfig,
|
|
87
|
+
tasksFile?: string,
|
|
88
|
+
options: { pipelineBranch?: string; worktreeDir?: string } = {}
|
|
89
|
+
): LaneState {
|
|
84
90
|
return {
|
|
85
91
|
label: laneName,
|
|
86
92
|
status: 'pending',
|
|
87
93
|
currentTaskIndex: 0,
|
|
88
94
|
totalTasks: config.tasks ? config.tasks.length : 0,
|
|
89
|
-
worktreeDir: null,
|
|
90
|
-
pipelineBranch: null,
|
|
95
|
+
worktreeDir: options.worktreeDir || null,
|
|
96
|
+
pipelineBranch: options.pipelineBranch || null,
|
|
91
97
|
startTime: Date.now(),
|
|
92
98
|
endTime: null,
|
|
93
99
|
error: null,
|
|
94
100
|
dependencyRequest: null,
|
|
101
|
+
tasksFile,
|
|
102
|
+
dependsOn: config.dependsOn || [],
|
|
95
103
|
};
|
|
96
104
|
}
|
|
97
105
|
|
|
@@ -151,7 +159,7 @@ export function getLatestRunDir(logsDir: string): string | null {
|
|
|
151
159
|
}
|
|
152
160
|
|
|
153
161
|
const runs = fs.readdirSync(logsDir)
|
|
154
|
-
.filter(f => fs.statSync(
|
|
162
|
+
.filter(f => fs.statSync(safeJoin(logsDir, f)).isDirectory())
|
|
155
163
|
.sort()
|
|
156
164
|
.reverse();
|
|
157
165
|
|
|
@@ -159,7 +167,7 @@ export function getLatestRunDir(logsDir: string): string | null {
|
|
|
159
167
|
return null;
|
|
160
168
|
}
|
|
161
169
|
|
|
162
|
-
return
|
|
170
|
+
return safeJoin(logsDir, runs[0]!);
|
|
163
171
|
}
|
|
164
172
|
|
|
165
173
|
/**
|
|
@@ -171,11 +179,11 @@ export function listLanesInRun(runDir: string): { name: string; dir: string; sta
|
|
|
171
179
|
}
|
|
172
180
|
|
|
173
181
|
return fs.readdirSync(runDir)
|
|
174
|
-
.filter(f => fs.statSync(
|
|
182
|
+
.filter(f => fs.statSync(safeJoin(runDir, f)).isDirectory())
|
|
175
183
|
.map(laneName => ({
|
|
176
184
|
name: laneName,
|
|
177
|
-
dir:
|
|
178
|
-
statePath:
|
|
185
|
+
dir: safeJoin(runDir, laneName),
|
|
186
|
+
statePath: safeJoin(runDir, laneName, 'state.json'),
|
|
179
187
|
}));
|
|
180
188
|
}
|
|
181
189
|
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Template loading utilities for CursorFlow
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import * as fs from 'fs';
|
|
6
|
+
import * as path from 'path';
|
|
7
|
+
import * as https from 'https';
|
|
8
|
+
import * as http from 'http';
|
|
9
|
+
import * as logger from './logger';
|
|
10
|
+
import { safeJoin } from './path';
|
|
11
|
+
import { findProjectRoot } from './config';
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Fetch remote template from URL
|
|
15
|
+
*/
|
|
16
|
+
export async function fetchRemoteTemplate(url: string): Promise<any> {
|
|
17
|
+
return new Promise((resolve, reject) => {
|
|
18
|
+
const protocol = url.startsWith('https') ? https : http;
|
|
19
|
+
|
|
20
|
+
protocol.get(url, (res) => {
|
|
21
|
+
if (res.statusCode !== 200) {
|
|
22
|
+
reject(new Error(`Failed to fetch template from ${url}: Status ${res.statusCode}`));
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
let data = '';
|
|
27
|
+
res.on('data', (chunk) => {
|
|
28
|
+
data += chunk;
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
res.on('end', () => {
|
|
32
|
+
try {
|
|
33
|
+
resolve(JSON.parse(data));
|
|
34
|
+
} catch (e) {
|
|
35
|
+
reject(new Error(`Failed to parse template JSON from ${url}: ${e}`));
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
}).on('error', (err) => {
|
|
39
|
+
reject(new Error(`Network error while fetching template from ${url}: ${err.message}`));
|
|
40
|
+
});
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Resolve template from various sources:
|
|
46
|
+
* 1. URL (starts with http:// or https://)
|
|
47
|
+
* 2. Built-in template (name without .json)
|
|
48
|
+
* 3. Local file path
|
|
49
|
+
*/
|
|
50
|
+
export async function resolveTemplate(templatePath: string): Promise<any> {
|
|
51
|
+
// 1. Remote URL
|
|
52
|
+
if (templatePath.startsWith('http://') || templatePath.startsWith('https://')) {
|
|
53
|
+
logger.info(`Fetching remote template: ${templatePath}`);
|
|
54
|
+
return fetchRemoteTemplate(templatePath);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// 2. Built-in template
|
|
58
|
+
// Search in templates/ directory of the project root
|
|
59
|
+
try {
|
|
60
|
+
const projectRoot = findProjectRoot();
|
|
61
|
+
const builtInPath = safeJoin(projectRoot, 'templates', templatePath.endsWith('.json') ? templatePath : `${templatePath}.json`);
|
|
62
|
+
if (fs.existsSync(builtInPath)) {
|
|
63
|
+
logger.info(`Using built-in template: ${templatePath}`);
|
|
64
|
+
return JSON.parse(fs.readFileSync(builtInPath, 'utf8'));
|
|
65
|
+
}
|
|
66
|
+
} catch (e) {
|
|
67
|
+
// Ignore error if project root not found, try other methods
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Fallback for built-in templates relative to the module (for installed package)
|
|
71
|
+
const templatesDir = path.resolve(__dirname, '../../templates');
|
|
72
|
+
const templateFileName = templatePath.endsWith('.json') ? templatePath : `${templatePath}.json`;
|
|
73
|
+
const modulePath = safeJoin(templatesDir, templateFileName);
|
|
74
|
+
if (fs.existsSync(modulePath)) {
|
|
75
|
+
logger.info(`Using module template: ${templatePath}`);
|
|
76
|
+
return JSON.parse(fs.readFileSync(modulePath, 'utf8'));
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// 3. Local file path
|
|
80
|
+
const localPath = safeJoin(process.cwd(), templatePath);
|
|
81
|
+
if (fs.existsSync(localPath)) {
|
|
82
|
+
logger.info(`Using local template: ${templatePath}`);
|
|
83
|
+
try {
|
|
84
|
+
return JSON.parse(fs.readFileSync(localPath, 'utf8'));
|
|
85
|
+
} catch (e) {
|
|
86
|
+
throw new Error(`Failed to parse local template ${templatePath}: ${e}`);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
throw new Error(`Template not found: ${templatePath}. It must be a URL, a built-in template name, or a local file path.`);
|
|
91
|
+
}
|
|
92
|
+
|
package/src/utils/types.ts
CHANGED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "basic-template",
|
|
3
|
+
"tasks": [
|
|
4
|
+
{
|
|
5
|
+
"name": "plan",
|
|
6
|
+
"model": "sonnet-4.5-thinking",
|
|
7
|
+
"prompt": "Analyze requirements for {{featureName}} and create a plan.",
|
|
8
|
+
"acceptanceCriteria": ["Plan documented"]
|
|
9
|
+
},
|
|
10
|
+
{
|
|
11
|
+
"name": "implement",
|
|
12
|
+
"model": "sonnet-4.5",
|
|
13
|
+
"prompt": "Implement {{featureName}} based on the plan.",
|
|
14
|
+
"acceptanceCriteria": ["Implementation complete"]
|
|
15
|
+
}
|
|
16
|
+
],
|
|
17
|
+
"dependencyPolicy": {
|
|
18
|
+
"allowDependencyChange": false,
|
|
19
|
+
"lockfileReadOnly": true
|
|
20
|
+
}
|
|
21
|
+
}
|