@litmers/cursorflow-orchestrator 0.2.5 → 0.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/CHANGELOG.md +29 -20
  2. package/README.md +13 -8
  3. package/dist/cli/complete.js +22 -5
  4. package/dist/cli/complete.js.map +1 -1
  5. package/dist/cli/index.js +2 -0
  6. package/dist/cli/index.js.map +1 -1
  7. package/dist/cli/logs.js +61 -51
  8. package/dist/cli/logs.js.map +1 -1
  9. package/dist/cli/monitor.js +45 -56
  10. package/dist/cli/monitor.js.map +1 -1
  11. package/dist/cli/resume.js +2 -2
  12. package/dist/cli/resume.js.map +1 -1
  13. package/dist/core/git-lifecycle-manager.js +2 -2
  14. package/dist/core/git-lifecycle-manager.js.map +1 -1
  15. package/dist/core/git-pipeline-coordinator.js +25 -25
  16. package/dist/core/git-pipeline-coordinator.js.map +1 -1
  17. package/dist/core/orchestrator.d.ts +17 -0
  18. package/dist/core/orchestrator.js +186 -8
  19. package/dist/core/orchestrator.js.map +1 -1
  20. package/dist/core/runner/pipeline.js +3 -3
  21. package/dist/core/runner/pipeline.js.map +1 -1
  22. package/dist/hooks/data-accessor.js +2 -2
  23. package/dist/hooks/data-accessor.js.map +1 -1
  24. package/dist/services/logging/buffer.d.ts +2 -1
  25. package/dist/services/logging/buffer.js +63 -22
  26. package/dist/services/logging/buffer.js.map +1 -1
  27. package/dist/services/logging/formatter.d.ts +0 -4
  28. package/dist/services/logging/formatter.js +33 -201
  29. package/dist/services/logging/formatter.js.map +1 -1
  30. package/dist/services/logging/paths.d.ts +3 -0
  31. package/dist/services/logging/paths.js +3 -0
  32. package/dist/services/logging/paths.js.map +1 -1
  33. package/dist/types/config.d.ts +9 -1
  34. package/dist/types/flow.d.ts +6 -0
  35. package/dist/types/logging.d.ts +1 -1
  36. package/dist/utils/config.js +6 -2
  37. package/dist/utils/config.js.map +1 -1
  38. package/dist/utils/enhanced-logger.d.ts +37 -17
  39. package/dist/utils/enhanced-logger.js +267 -237
  40. package/dist/utils/enhanced-logger.js.map +1 -1
  41. package/dist/utils/events.d.ts +18 -15
  42. package/dist/utils/events.js +8 -5
  43. package/dist/utils/events.js.map +1 -1
  44. package/dist/utils/log-formatter.d.ts +26 -0
  45. package/dist/utils/log-formatter.js +274 -0
  46. package/dist/utils/log-formatter.js.map +1 -0
  47. package/dist/utils/logger.js +4 -17
  48. package/dist/utils/logger.js.map +1 -1
  49. package/dist/utils/repro-thinking-logs.js +4 -4
  50. package/dist/utils/repro-thinking-logs.js.map +1 -1
  51. package/package.json +2 -2
  52. package/scripts/monitor-lanes.sh +5 -5
  53. package/scripts/stream-logs.sh +1 -1
  54. package/scripts/test-log-parser.ts +42 -8
  55. package/src/cli/complete.ts +21 -6
  56. package/src/cli/index.ts +2 -0
  57. package/src/cli/logs.ts +60 -46
  58. package/src/cli/monitor.ts +47 -64
  59. package/src/cli/resume.ts +1 -1
  60. package/src/core/git-lifecycle-manager.ts +2 -2
  61. package/src/core/git-pipeline-coordinator.ts +25 -25
  62. package/src/core/orchestrator.ts +214 -7
  63. package/src/core/runner/pipeline.ts +3 -3
  64. package/src/hooks/data-accessor.ts +2 -2
  65. package/src/services/logging/buffer.ts +68 -20
  66. package/src/services/logging/formatter.ts +32 -199
  67. package/src/services/logging/paths.ts +3 -0
  68. package/src/types/config.ts +13 -1
  69. package/src/types/flow.ts +6 -0
  70. package/src/types/logging.ts +0 -2
  71. package/src/utils/config.ts +6 -2
  72. package/src/utils/enhanced-logger.ts +290 -239
  73. package/src/utils/events.ts +21 -18
  74. package/src/utils/log-formatter.ts +287 -0
  75. package/src/utils/logger.ts +3 -18
  76. package/src/utils/repro-thinking-logs.ts +4 -4
@@ -26,10 +26,11 @@ import {
26
26
  EnhancedLogManager,
27
27
  createLogManager,
28
28
  DEFAULT_LOG_CONFIG,
29
- ParsedMessage
29
+ ParsedMessage,
30
+ stripAnsi
30
31
  } from '../utils/enhanced-logger';
31
32
  import { MAIN_LOG_FILENAME } from '../utils/log-constants';
32
- import { formatMessageForConsole, stripAnsi } from '../services/logging/formatter';
33
+ import { formatMessageForConsole } from '../utils/log-formatter';
33
34
  import { FailureType, analyzeFailure as analyzeFailureFromPolicy } from './failure-policy';
34
35
  import {
35
36
  savePOF,
@@ -292,7 +293,6 @@ export function spawnLane({
292
293
  // Build environment for child process
293
294
  const childEnv = {
294
295
  ...process.env,
295
- CURSORFLOW_LANE: 'true',
296
296
  };
297
297
 
298
298
  if (logConfig.enabled) {
@@ -392,7 +392,7 @@ export function spawnLane({
392
392
  return { child, logPath, logManager, info };
393
393
  } else {
394
394
  // Fallback to simple file logging
395
- logPath = getLaneLogPath(laneRunDir, 'jsonl');
395
+ logPath = getLaneLogPath(laneRunDir, 'raw');
396
396
  const logFd = fs.openSync(logPath, 'a');
397
397
 
398
398
  child = spawn('node', args, {
@@ -519,12 +519,12 @@ async function resolveAllDependencies(
519
519
  const worktreeDir = state?.worktreeDir || safeJoin(runRoot, 'resolution-worktree');
520
520
 
521
521
  if (!fs.existsSync(worktreeDir)) {
522
- logger.info(`đŸ—ī¸ Creating resolution worktree at ${worktreeDir}`, { context: 'git' });
522
+ logger.info(`đŸ—ī¸ Creating resolution worktree at ${worktreeDir}`);
523
523
  git.createWorktree(worktreeDir, pipelineBranch, { baseBranch: git.getCurrentBranch() });
524
524
  }
525
525
 
526
526
  // 3. Resolve on pipeline branch
527
- logger.info(`🔄 Resolving dependencies on branch ${pipelineBranch}`, { context: 'git' });
527
+ logger.info(`🔄 Resolving dependencies on branch ${pipelineBranch}`);
528
528
  git.runGit(['checkout', pipelineBranch], { cwd: worktreeDir });
529
529
 
530
530
  for (const cmd of uniqueCommands) {
@@ -566,7 +566,7 @@ async function resolveAllDependencies(
566
566
  if (task) {
567
567
  const lanePipelineBranch = `${pipelineBranch}/${lane.name}`;
568
568
  const taskBranch = `${lanePipelineBranch}--${String(currentIdx + 1).padStart(2, '0')}-${task.name}`;
569
- logger.info(`Syncing lane ${lane.name} branch ${taskBranch}`, { context: 'git' });
569
+ logger.info(`Syncing lane ${lane.name} branch ${taskBranch}`);
570
570
 
571
571
  try {
572
572
  // If task branch doesn't exist yet, it will be created from pipelineBranch when the lane starts
@@ -591,6 +591,188 @@ async function resolveAllDependencies(
591
591
  git.runGit(['checkout', pipelineBranch], { cwd: worktreeDir });
592
592
  }
593
593
 
594
+ /**
595
+ * Finalize flow: merge all lane branches into integrated branch and cleanup
596
+ */
597
+ export async function finalizeFlow(params: {
598
+ tasksDir: string;
599
+ runId: string;
600
+ runRoot: string;
601
+ laneRunDirs: Record<string, string>;
602
+ laneWorktreeDirs: Record<string, string>;
603
+ pipelineBranch: string;
604
+ repoRoot: string;
605
+ noCleanup?: boolean;
606
+ }): Promise<void> {
607
+ const { tasksDir, runId, runRoot, laneRunDirs, laneWorktreeDirs, pipelineBranch, repoRoot, noCleanup } = params;
608
+
609
+ // 1. Load FlowMeta
610
+ const metaPath = safeJoin(tasksDir, 'flow.meta.json');
611
+ let meta: any = null;
612
+ let flowName = path.basename(tasksDir).replace(/^\d+_/, '');
613
+ let baseBranch = 'main';
614
+
615
+ if (fs.existsSync(metaPath)) {
616
+ try {
617
+ meta = JSON.parse(fs.readFileSync(metaPath, 'utf8'));
618
+ flowName = meta.name || flowName;
619
+ baseBranch = meta.baseBranch || 'main';
620
+
621
+ // Update status to integrating
622
+ meta.status = 'integrating';
623
+ fs.writeFileSync(metaPath, JSON.stringify(meta, null, 2));
624
+ } catch (e) {
625
+ logger.warn(`Failed to read flow.meta.json: ${e}`);
626
+ }
627
+ }
628
+
629
+ logger.section(`🏁 Finalizing Flow: ${flowName}`);
630
+
631
+ // 2. Collect lane branches
632
+ const laneBranches: string[] = [];
633
+ for (const [laneName, laneDir] of Object.entries(laneRunDirs)) {
634
+ const statePath = safeJoin(laneDir, 'state.json');
635
+ if (fs.existsSync(statePath)) {
636
+ try {
637
+ const state = JSON.parse(fs.readFileSync(statePath, 'utf8'));
638
+ if (state.pipelineBranch) {
639
+ laneBranches.push(state.pipelineBranch);
640
+ }
641
+ } catch (e) {
642
+ logger.warn(`Failed to read lane state for ${laneName}: ${e}`);
643
+ }
644
+ }
645
+ }
646
+
647
+ if (laneBranches.length === 0) {
648
+ logger.warn('No lane branches found to integrate');
649
+ return;
650
+ }
651
+
652
+ // 3. Create integrated branch
653
+ const targetBranch = `feature/${flowName}-integrated`;
654
+ logger.info(`Target Branch: ${targetBranch}`);
655
+ logger.info(`Base Branch: ${baseBranch}`);
656
+ logger.info(`Lanes to merge: ${laneBranches.length}`);
657
+
658
+ // Ensure we are on a clean state
659
+ if (git.hasUncommittedChanges(repoRoot)) {
660
+ logger.warn('Main repository has uncommitted changes. Stashing...');
661
+ git.stash('auto-stash before flow completion', { cwd: repoRoot });
662
+ }
663
+
664
+ // Checkout base branch and create target branch
665
+ logger.info(`Creating target branch '${targetBranch}' from '${baseBranch}'...`);
666
+ git.runGit(['checkout', baseBranch], { cwd: repoRoot });
667
+ git.runGit(['checkout', '-B', targetBranch], { cwd: repoRoot });
668
+
669
+ // 4. Merge each lane branch
670
+ for (const branch of laneBranches) {
671
+ logger.info(`Merging ${branch}...`);
672
+
673
+ // Determine what ref to use for merge
674
+ let branchRef: string;
675
+
676
+ if (git.branchExists(branch, { cwd: repoRoot })) {
677
+ // Local branch exists, use it directly
678
+ branchRef = branch;
679
+ } else {
680
+ // Local branch doesn't exist - fetch from remote with proper refspec
681
+ // Note: `git fetch origin <branch>` only updates FETCH_HEAD, not origin/<branch>
682
+ // We must use refspec to update the remote tracking ref
683
+ try {
684
+ git.runGit(['fetch', 'origin', `${branch}:refs/remotes/origin/${branch}`], { cwd: repoRoot });
685
+ branchRef = `origin/${branch}`;
686
+ } catch (e) {
687
+ // Fallback: try fetching and use FETCH_HEAD directly
688
+ logger.warn(`Failed to fetch with refspec, trying FETCH_HEAD: ${e}`);
689
+ try {
690
+ git.runGit(['fetch', 'origin', branch], { cwd: repoRoot });
691
+ branchRef = 'FETCH_HEAD';
692
+ } catch (e2) {
693
+ logger.warn(`Failed to fetch ${branch}: ${e2}`);
694
+ throw new Error(`Cannot fetch branch ${branch} from remote`);
695
+ }
696
+ }
697
+ }
698
+
699
+ const mergeResult = git.safeMerge(branchRef, {
700
+ cwd: repoRoot,
701
+ noFf: true,
702
+ message: `chore: merge lane ${branch} into flow integration`,
703
+ abortOnConflict: true,
704
+ });
705
+
706
+ if (!mergeResult.success) {
707
+ if (mergeResult.conflict) {
708
+ logger.error(`❌ Merge conflict with '${branch}': ${mergeResult.conflictingFiles.join(', ')}`);
709
+
710
+ // Update meta with error
711
+ if (meta) {
712
+ meta.status = 'failed';
713
+ meta.error = `Merge conflict: ${mergeResult.conflictingFiles.join(', ')}`;
714
+ fs.writeFileSync(metaPath, JSON.stringify(meta, null, 2));
715
+ }
716
+
717
+ throw new Error(`Merge conflict during integration: ${mergeResult.conflictingFiles.join(', ')}`);
718
+ }
719
+ throw new Error(`Merge failed for ${branch}: ${mergeResult.error}`);
720
+ }
721
+ logger.success(`✓ Merged ${branch}`);
722
+ }
723
+
724
+ // 5. Push final branch
725
+ logger.info(`Pushing '${targetBranch}' to remote...`);
726
+ git.push(targetBranch, { cwd: repoRoot, setUpstream: true });
727
+ logger.success(`✓ Pushed ${targetBranch}`);
728
+
729
+ // 6. Cleanup (if not disabled)
730
+ if (!noCleanup) {
731
+ logger.info('🧹 Cleaning up temporary resources...');
732
+
733
+ // Delete local and remote lane branches
734
+ for (const branch of laneBranches) {
735
+ try {
736
+ git.deleteBranch(branch, { cwd: repoRoot, force: true });
737
+ try {
738
+ git.deleteBranch(branch, { cwd: repoRoot, remote: true });
739
+ } catch {
740
+ // Remote branch might not exist or no permission
741
+ }
742
+ } catch (e) {
743
+ logger.warn(`Failed to delete branch ${branch}: ${e}`);
744
+ }
745
+ }
746
+
747
+ // Remove worktrees
748
+ for (const wtPath of Object.values(laneWorktreeDirs)) {
749
+ if (fs.existsSync(wtPath)) {
750
+ try {
751
+ git.removeWorktree(wtPath, { cwd: repoRoot, force: true });
752
+ if (fs.existsSync(wtPath)) {
753
+ fs.rmSync(wtPath, { recursive: true, force: true });
754
+ }
755
+ } catch (e) {
756
+ logger.warn(`Failed to remove worktree ${wtPath}: ${e}`);
757
+ }
758
+ }
759
+ }
760
+ }
761
+
762
+ // 7. Update FlowMeta with completion info
763
+ if (meta) {
764
+ meta.status = 'completed';
765
+ meta.integratedBranch = targetBranch;
766
+ meta.integratedAt = new Date().toISOString();
767
+ delete meta.error;
768
+ fs.writeFileSync(metaPath, JSON.stringify(meta, null, 2));
769
+ }
770
+
771
+ logger.section(`🎉 Flow Completed!`);
772
+ logger.info(`Integrated branch: ${targetBranch}`);
773
+ logger.success(`All ${laneBranches.length} lanes merged successfully.`);
774
+ }
775
+
594
776
  /**
595
777
  * Run orchestration with dependency management
596
778
  */
@@ -606,6 +788,10 @@ export async function orchestrate(tasksDir: string, options: {
606
788
  skipPreflight?: boolean;
607
789
  stallConfig?: Partial<StallDetectionConfig>;
608
790
  browser?: boolean;
791
+ /** Auto-complete flow when all lanes succeed (merge branches, cleanup) */
792
+ autoComplete?: boolean;
793
+ /** Skip cleanup even if autoComplete is true */
794
+ noCleanup?: boolean;
609
795
  } = {}): Promise<{ lanes: LaneInfo[]; exitCodes: Record<string, number>; runRoot: string }> {
610
796
  const lanes = listLaneFiles(tasksDir);
611
797
 
@@ -1119,6 +1305,27 @@ export async function orchestrate(tasksDir: string, options: {
1119
1305
  }
1120
1306
 
1121
1307
  logger.success('All lanes completed successfully!');
1308
+
1309
+ // Auto-complete flow: merge all lane branches and cleanup
1310
+ const autoComplete = options.autoComplete !== false && !options.noGit;
1311
+ if (autoComplete && completedLanes.size === lanes.length) {
1312
+ try {
1313
+ await finalizeFlow({
1314
+ tasksDir,
1315
+ runId,
1316
+ runRoot,
1317
+ laneRunDirs,
1318
+ laneWorktreeDirs,
1319
+ pipelineBranch,
1320
+ repoRoot,
1321
+ noCleanup: options.noCleanup,
1322
+ });
1323
+ } catch (error: any) {
1324
+ logger.error(`Flow auto-completion failed: ${error.message}`);
1325
+ logger.info('You can manually complete the flow with: cursorflow complete');
1326
+ }
1327
+ }
1328
+
1122
1329
  events.emit('orchestration.completed', {
1123
1330
  runId,
1124
1331
  laneCount: lanes.length,
@@ -156,7 +156,7 @@ export async function runTasks(tasksFile: string, config: RunnerConfig, runDir:
156
156
  // ALWAYS use current branch as base - ignore config.baseBranch
157
157
  // This ensures dependency structure is maintained in the worktree
158
158
  const currentBranch = git.getCurrentBranch(repoRoot);
159
- logger.info(`📍 Base branch: ${currentBranch} (current branch)`, { context: 'git' });
159
+ logger.info(`📍 Base branch: ${currentBranch} (current branch)`);
160
160
 
161
161
  // Load existing state if resuming
162
162
  const statePath = safeJoin(runDir, 'state.json');
@@ -207,8 +207,8 @@ export async function runTasks(tasksFile: string, config: RunnerConfig, runDir:
207
207
  logger.section(`🔁 Resuming Pipeline from task ${startIndex + 1}`);
208
208
  }
209
209
 
210
- logger.info(`Pipeline Branch: ${pipelineBranch}`, { context: 'git' });
211
- logger.info(`Worktree: ${worktreeDir}`, { context: 'git' });
210
+ logger.info(`Pipeline Branch: ${pipelineBranch}`);
211
+ logger.info(`Worktree: ${worktreeDir}`);
212
212
  logger.info(`Tasks: ${config.tasks.length}`);
213
213
 
214
214
  const gitCoordinator = new GitPipelineCoordinator();
@@ -333,9 +333,9 @@ export class HookDataAccessorImpl implements HookDataAccessor {
333
333
  }
334
334
 
335
335
  try {
336
- // Try multiple possible log file locations, preferring terminal.jsonl
336
+ // Try multiple possible log file locations
337
337
  const possiblePaths = [
338
- safeJoin(this.options.runDir, 'terminal.jsonl'),
338
+ safeJoin(this.options.runDir, 'terminal-raw.log'),
339
339
  safeJoin(this.options.runDir, 'terminal.log'),
340
340
  safeJoin(this.options.runDir, 'agent-output.log'),
341
341
  ];
@@ -109,16 +109,14 @@ export class LogBufferService extends EventEmitter {
109
109
  const newEntries: BufferedLogEntryType[] = [];
110
110
 
111
111
  for (const laneName of this.lanes) {
112
- const jsonlPath = path.join(lanesDir, laneName, 'terminal.jsonl');
112
+ const readablePath = path.join(lanesDir, laneName, 'terminal-readable.log');
113
113
 
114
114
  let fd: number | null = null;
115
115
  try {
116
- if (!fs.existsSync(jsonlPath)) continue;
117
-
118
116
  // Read file content atomically to avoid TOCTOU race condition
119
- const lastPos = this.filePositions.get(jsonlPath) || 0;
120
- fd = fs.openSync(jsonlPath, 'r');
121
- const stat = fs.fstatSync(fd);
117
+ const lastPos = this.filePositions.get(readablePath) || 0;
118
+ fd = fs.openSync(readablePath, 'r');
119
+ const stat = fs.fstatSync(fd); // Use fstat on open fd to avoid race
122
120
 
123
121
  if (stat.size > lastPos) {
124
122
  const buffer = Buffer.alloc(stat.size - lastPos);
@@ -128,16 +126,11 @@ export class LogBufferService extends EventEmitter {
128
126
  const lines = newContent.split('\n').filter(line => line.trim());
129
127
 
130
128
  for (const line of lines) {
131
- try {
132
- const entry = JSON.parse(line);
133
- const processed = this.processJsonEntry(entry, laneName);
134
- if (processed) newEntries.push(processed);
135
- } catch {
136
- // Skip invalid JSON
137
- }
129
+ const processed = this.processReadableLine(line, laneName);
130
+ if (processed) newEntries.push(processed);
138
131
  }
139
132
 
140
- this.filePositions.set(jsonlPath, stat.size);
133
+ this.filePositions.set(readablePath, stat.size);
141
134
  }
142
135
  } catch { /* File in use, skip */ }
143
136
  finally {
@@ -161,11 +154,11 @@ export class LogBufferService extends EventEmitter {
161
154
  }
162
155
  }
163
156
 
164
- private processJsonEntry(entry: any, laneName: string): BufferedLogEntryType | null {
165
- const timestamp = new Date(entry.timestamp || Date.now());
166
- const level = entry.level || 'info';
167
- const type = entry.type || 'stdout';
168
- const message = entry.content || entry.message || '';
157
+ private processReadableLine(line: string, laneName: string): BufferedLogEntryType | null {
158
+ const cleaned = stripAnsi(line).trim();
159
+ if (!cleaned) return null;
160
+
161
+ const { timestamp, message, level, type } = this.parseReadableMessage(cleaned);
169
162
  const importance = this.inferImportance(type, level);
170
163
 
171
164
  return {
@@ -177,10 +170,65 @@ export class LogBufferService extends EventEmitter {
177
170
  message: this.truncateMessage(message),
178
171
  importance,
179
172
  laneColor: this.laneColorMap.get(laneName) || COLORS.white,
180
- raw: entry,
173
+ raw: {
174
+ timestamp: timestamp.toISOString(),
175
+ level: level as JsonLogEntry['level'],
176
+ lane: laneName,
177
+ message,
178
+ },
181
179
  };
182
180
  }
183
181
 
182
+ private parseReadableMessage(line: string): {
183
+ timestamp: Date;
184
+ message: string;
185
+ level: string;
186
+ type: MessageType | string;
187
+ } {
188
+ let remaining = line;
189
+ let timestamp = new Date();
190
+
191
+ const isoMatch = remaining.match(/^\[(\d{4}-\d{2}-\d{2}T[^\]]+)\]\s*/);
192
+ if (isoMatch) {
193
+ timestamp = new Date(isoMatch[1]!);
194
+ remaining = remaining.slice(isoMatch[0].length);
195
+ } else {
196
+ const timeMatch = remaining.match(/^\[(\d{2}:\d{2}:\d{2})\]\s*/);
197
+ if (timeMatch) {
198
+ const [hours, minutes, seconds] = timeMatch[1]!.split(':').map(Number);
199
+ const now = new Date();
200
+ now.setHours(hours || 0, minutes || 0, seconds || 0, 0);
201
+ timestamp = now;
202
+ remaining = remaining.slice(timeMatch[0].length);
203
+ }
204
+ }
205
+
206
+ const labelMatch = remaining.match(/^\[[^\]]+\]\s*/);
207
+ if (labelMatch) {
208
+ remaining = remaining.slice(labelMatch[0].length);
209
+ }
210
+
211
+ const upper = remaining.toUpperCase();
212
+ let level = 'info';
213
+ let type: MessageType | string = 'stdout';
214
+
215
+ if (remaining.includes('❌') || upper.includes('ERR') || upper.includes('ERROR')) {
216
+ level = 'error';
217
+ type = 'error';
218
+ } else if (remaining.includes('âš ī¸') || upper.includes('WARN')) {
219
+ level = 'warn';
220
+ type = 'warn';
221
+ } else if (remaining.includes('🔍') || upper.includes('DEBUG')) {
222
+ level = 'debug';
223
+ type = 'debug';
224
+ } else if (remaining.includes('â„šī¸') || upper.includes('INFO')) {
225
+ level = 'info';
226
+ type = 'info';
227
+ }
228
+
229
+ return { timestamp, message: remaining, level, type };
230
+ }
231
+
184
232
  private inferImportance(type: string, level: string): LogImportance {
185
233
  if (level === 'error' || type === 'error' || type === 'result') return LogImportance.HIGH;
186
234
  if (type === 'tool' || type === 'tool_result') return LogImportance.MEDIUM;