@grunnverk/tree-execution 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/LICENSE +22 -0
  2. package/README.md +1576 -0
  3. package/dist/TreeExecutor.d.ts +113 -0
  4. package/dist/TreeExecutor.d.ts.map +1 -0
  5. package/dist/TreeExecutor.js +113 -0
  6. package/dist/TreeExecutor.js.map +1 -0
  7. package/dist/checkpoint/CheckpointManager.d.ts +18 -0
  8. package/dist/checkpoint/CheckpointManager.d.ts.map +1 -0
  9. package/dist/checkpoint/CheckpointManager.js +156 -0
  10. package/dist/checkpoint/CheckpointManager.js.map +1 -0
  11. package/dist/checkpoint/index.d.ts +5 -0
  12. package/dist/checkpoint/index.d.ts.map +1 -0
  13. package/dist/checkpoint/index.js +5 -0
  14. package/dist/checkpoint/index.js.map +1 -0
  15. package/dist/execution/CommandValidator.d.ts +25 -0
  16. package/dist/execution/CommandValidator.d.ts.map +1 -0
  17. package/dist/execution/CommandValidator.js +129 -0
  18. package/dist/execution/CommandValidator.js.map +1 -0
  19. package/dist/execution/DependencyChecker.d.ts +47 -0
  20. package/dist/execution/DependencyChecker.d.ts.map +1 -0
  21. package/dist/execution/DependencyChecker.js +95 -0
  22. package/dist/execution/DependencyChecker.js.map +1 -0
  23. package/dist/execution/DynamicTaskPool.d.ts +118 -0
  24. package/dist/execution/DynamicTaskPool.d.ts.map +1 -0
  25. package/dist/execution/DynamicTaskPool.js +658 -0
  26. package/dist/execution/DynamicTaskPool.js.map +1 -0
  27. package/dist/execution/RecoveryManager.d.ts +89 -0
  28. package/dist/execution/RecoveryManager.d.ts.map +1 -0
  29. package/dist/execution/RecoveryManager.js +592 -0
  30. package/dist/execution/RecoveryManager.js.map +1 -0
  31. package/dist/execution/ResourceMonitor.d.ts +73 -0
  32. package/dist/execution/ResourceMonitor.d.ts.map +1 -0
  33. package/dist/execution/ResourceMonitor.js +148 -0
  34. package/dist/execution/ResourceMonitor.js.map +1 -0
  35. package/dist/execution/Scheduler.d.ts +36 -0
  36. package/dist/execution/Scheduler.d.ts.map +1 -0
  37. package/dist/execution/Scheduler.js +83 -0
  38. package/dist/execution/Scheduler.js.map +1 -0
  39. package/dist/execution/TreeExecutionAdapter.d.ts +45 -0
  40. package/dist/execution/TreeExecutionAdapter.d.ts.map +1 -0
  41. package/dist/execution/TreeExecutionAdapter.js +260 -0
  42. package/dist/execution/TreeExecutionAdapter.js.map +1 -0
  43. package/dist/index.d.ts +29 -0
  44. package/dist/index.d.ts.map +1 -0
  45. package/dist/index.js +25 -0
  46. package/dist/index.js.map +1 -0
  47. package/dist/tree.d.ts +13 -0
  48. package/dist/tree.d.ts.map +1 -0
  49. package/dist/tree.js +2510 -0
  50. package/dist/tree.js.map +1 -0
  51. package/dist/types/config.d.ts +174 -0
  52. package/dist/types/config.d.ts.map +1 -0
  53. package/dist/types/config.js +2 -0
  54. package/dist/types/config.js.map +1 -0
  55. package/dist/types/index.d.ts +6 -0
  56. package/dist/types/index.d.ts.map +1 -0
  57. package/dist/types/index.js +6 -0
  58. package/dist/types/index.js.map +1 -0
  59. package/dist/types/parallelExecution.d.ts +108 -0
  60. package/dist/types/parallelExecution.d.ts.map +1 -0
  61. package/dist/types/parallelExecution.js +2 -0
  62. package/dist/types/parallelExecution.js.map +1 -0
  63. package/dist/util/commandStubs.d.ts +22 -0
  64. package/dist/util/commandStubs.d.ts.map +1 -0
  65. package/dist/util/commandStubs.js +49 -0
  66. package/dist/util/commandStubs.js.map +1 -0
  67. package/dist/util/logger.d.ts +14 -0
  68. package/dist/util/logger.d.ts.map +1 -0
  69. package/dist/util/logger.js +30 -0
  70. package/dist/util/logger.js.map +1 -0
  71. package/dist/util/mutex.d.ts +38 -0
  72. package/dist/util/mutex.d.ts.map +1 -0
  73. package/dist/util/mutex.js +101 -0
  74. package/dist/util/mutex.js.map +1 -0
  75. package/dist/util/treeUtils.d.ts +46 -0
  76. package/dist/util/treeUtils.d.ts.map +1 -0
  77. package/dist/util/treeUtils.js +74 -0
  78. package/dist/util/treeUtils.js.map +1 -0
  79. package/guide/index.md +84 -0
  80. package/package.json +64 -0
package/dist/tree.js ADDED
@@ -0,0 +1,2510 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Tree command - Central dependency analysis and tree traversal for kodrdriv
4
+ *
5
+ * This command supports two execution modes:
6
+ * 1. Custom command mode: `kodrdriv tree --cmd "npm install"`
7
+ * 2. Built-in command mode: `kodrdriv tree commit`, `kodrdriv tree publish`, etc.
8
+ *
9
+ * Built-in commands shell out to separate kodrdriv processes to preserve
10
+ * individual project configurations while leveraging centralized dependency analysis.
11
+ *
12
+ * Supported built-in commands: commit, release, publish, link, unlink, development, branches, checkout, precommit
13
+ *
14
+ * Enhanced logging based on debug/verbose flags:
15
+ *
16
+ * --debug:
17
+ * - Shows all command output (stdout/stderr)
18
+ * - Shows detailed debug messages about dependency levels and execution flow
19
+ * - Shows package-by-package dependency analysis
20
+ * - Shows detailed level start/completion information
21
+ *
22
+ * --verbose:
23
+ * - Shows exactly what's happening without full command output
24
+ * - Shows level-by-level execution progress
25
+ * - Shows package grouping information
26
+ * - Shows basic execution flow
27
+ *
28
+ * No flags:
29
+ * - For commit and publish commands: Shows full output from child processes by default
30
+ * (including AI generation, self-reflection, and agentic interactions)
31
+ * - For other commands: Shows basic progress with numeric representation ([1/5] Package: Running...)
32
+ * - Shows level-by-level execution summaries
33
+ * - Shows completion status for each package and level
34
+ */
35
+ import path from 'path';
36
+ import fs from 'fs/promises';
37
+ import { exec } from 'child_process';
38
+ import { run, runSecure, safeJsonParse, validatePackageJson, getGitStatusSummary, getGloballyLinkedPackages, getLinkedDependencies, getLinkCompatibilityProblems } from '@grunnverk/git-tools';
39
+ import util from 'util';
40
+ import { getLogger } from './util/logger.js';
41
+ import { createStorage } from '@grunnverk/shared';
42
+ import { scanForPackageJsonFiles, parsePackageJson, buildDependencyGraph, topologicalSort, shouldExclude } from '@grunnverk/tree-core';
43
+ // Utility functions (extracted/inlined)
44
+ import { getOutputPath, PerformanceTimer, isInGitRepository, runGitWithLock, optimizePrecommitCommand, recordTestRun } from './util/treeUtils.js';
45
+ // Built-in commands - using stubs for now
46
+ // TODO: Refactor to use callbacks/dependency injection
47
+ import { Updates, Commit, Link, Unlink } from './util/commandStubs.js';
48
+ // Define constants locally
49
+ const DEFAULT_OUTPUT_DIRECTORY = 'output/kodrdriv';
50
+ // Global state to track published versions during tree execution - protected by mutex
51
+ let publishedVersions = [];
52
+ let executionContext = null;
53
+ // Function to reset global state (for testing)
54
+ export const __resetGlobalState = () => {
55
+ publishedVersions = [];
56
+ executionContext = null;
57
+ };
58
+ // Import shared mutex implementation
59
+ import { SimpleMutex } from './util/mutex.js';
60
+ const globalStateMutex = new SimpleMutex();
61
+ // Update inter-project dependencies in package.json based on published versions
62
+ const updateInterProjectDependencies = async (packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun) => {
63
+ const storage = createStorage();
64
+ const packageJsonPath = path.join(packageDir, 'package.json');
65
+ if (!await storage.exists(packageJsonPath)) {
66
+ packageLogger.verbose('No package.json found, skipping dependency updates');
67
+ return false;
68
+ }
69
+ let hasChanges = false;
70
+ try {
71
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
72
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
73
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
74
+ const sectionsToUpdate = ['dependencies', 'devDependencies', 'peerDependencies'];
75
+ for (const publishedVersion of publishedVersions) {
76
+ const { packageName, version } = publishedVersion;
77
+ // Only update if this is an inter-project dependency (exists in our build tree)
78
+ if (!allPackageNames.has(packageName)) {
79
+ continue;
80
+ }
81
+ // Skip prerelease versions (e.g., 1.0.0-beta.1, 2.0.0-alpha.3)
82
+ // Prerelease versions should not be automatically propagated to consumers
83
+ if (version.includes('-')) {
84
+ packageLogger.verbose(`Skipping prerelease version ${packageName}@${version} - not updating dependencies`);
85
+ continue;
86
+ }
87
+ // Update the dependency in all relevant sections
88
+ for (const section of sectionsToUpdate) {
89
+ const deps = packageJson[section];
90
+ if (deps && deps[packageName]) {
91
+ const oldVersion = deps[packageName];
92
+ const newVersion = `^${version}`;
93
+ if (oldVersion !== newVersion) {
94
+ if (isDryRun) {
95
+ packageLogger.info(`Would update ${section}.${packageName}: ${oldVersion} → ${newVersion}`);
96
+ }
97
+ else {
98
+ packageLogger.info(`Updating ${section}.${packageName}: ${oldVersion} → ${newVersion}`);
99
+ deps[packageName] = newVersion;
100
+ }
101
+ hasChanges = true;
102
+ }
103
+ }
104
+ }
105
+ }
106
+ if (hasChanges && !isDryRun) {
107
+ // Write updated package.json
108
+ await storage.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n', 'utf-8');
109
+ packageLogger.info('Inter-project dependencies updated successfully');
110
+ }
111
+ }
112
+ catch (error) {
113
+ packageLogger.warn(`Failed to update inter-project dependencies: ${error.message}`);
114
+ return false;
115
+ }
116
+ return hasChanges;
117
+ };
118
+ // Detect scoped dependencies from package.json and run updates for them
119
+ const updateScopedDependencies = async (packageDir, packageLogger, isDryRun, runConfig) => {
120
+ const storage = createStorage();
121
+ const packageJsonPath = path.join(packageDir, 'package.json');
122
+ if (!await storage.exists(packageJsonPath)) {
123
+ packageLogger.verbose('No package.json found, skipping scoped dependency updates');
124
+ return false;
125
+ }
126
+ try {
127
+ // Read the package.json before updates
128
+ const beforeContent = await storage.readFile(packageJsonPath, 'utf-8');
129
+ const parsed = safeJsonParse(beforeContent, packageJsonPath);
130
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
131
+ // Determine which scopes to update
132
+ let scopesToUpdate;
133
+ // Check if scopedDependencyUpdates is configured
134
+ const configuredScopes = runConfig.publish?.scopedDependencyUpdates;
135
+ if (configuredScopes !== undefined) {
136
+ // scopedDependencyUpdates is explicitly configured
137
+ if (configuredScopes.length > 0) {
138
+ // Use configured scopes
139
+ scopesToUpdate = new Set(configuredScopes);
140
+ packageLogger.verbose(`Using configured scopes: ${Array.from(scopesToUpdate).join(', ')}`);
141
+ }
142
+ else {
143
+ // Empty array means explicitly disabled
144
+ packageLogger.verbose('Scoped dependency updates explicitly disabled');
145
+ return false;
146
+ }
147
+ }
148
+ else {
149
+ // Not configured - use default behavior (package's own scope)
150
+ scopesToUpdate = new Set();
151
+ if (packageJson.name && packageJson.name.startsWith('@')) {
152
+ const packageScope = packageJson.name.split('/')[0]; // e.g., "@fjell/core" -> "@fjell"
153
+ scopesToUpdate.add(packageScope);
154
+ packageLogger.verbose(`No scopes configured, defaulting to package's own scope: ${packageScope}`);
155
+ }
156
+ else {
157
+ packageLogger.verbose('Package is not scoped and no scopes configured, skipping scoped dependency updates');
158
+ return false;
159
+ }
160
+ }
161
+ if (scopesToUpdate.size === 0) {
162
+ packageLogger.verbose('No scopes to update, skipping updates');
163
+ return false;
164
+ }
165
+ // Run updates for each scope
166
+ for (const scope of scopesToUpdate) {
167
+ packageLogger.info(`🔄 Checking for ${scope} dependency updates before publish...`);
168
+ try {
169
+ // Create a config for the updates command with the scope
170
+ const updatesConfig = {
171
+ ...runConfig,
172
+ dryRun: isDryRun,
173
+ updates: {
174
+ scope: scope
175
+ }
176
+ };
177
+ await Updates.execute(updatesConfig);
178
+ }
179
+ catch (error) {
180
+ // Don't fail the publish if updates fails, just warn
181
+ packageLogger.warn(`Failed to update ${scope} dependencies: ${error.message}`);
182
+ }
183
+ }
184
+ // Check if package.json was modified
185
+ const afterContent = await storage.readFile(packageJsonPath, 'utf-8');
186
+ const hasChanges = beforeContent !== afterContent;
187
+ if (hasChanges) {
188
+ packageLogger.info('✅ Scoped dependencies updated successfully');
189
+ }
190
+ else {
191
+ packageLogger.info('No scoped dependency updates needed');
192
+ }
193
+ return hasChanges;
194
+ }
195
+ catch (error) {
196
+ packageLogger.warn(`Failed to detect scoped dependencies: ${error.message}`);
197
+ return false;
198
+ }
199
+ };
200
+ // Get the context file path
201
+ const getContextFilePath = (outputDirectory) => {
202
+ const outputDir = outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
203
+ return getOutputPath(outputDir, '.kodrdriv-context');
204
+ };
205
+ // Save execution context to file
206
+ const saveExecutionContext = async (context, outputDirectory) => {
207
+ const storage = createStorage(); // Silent storage for context operations
208
+ const contextFilePath = getContextFilePath(outputDirectory);
209
+ try {
210
+ // Ensure output directory exists
211
+ await storage.ensureDirectory(path.dirname(contextFilePath));
212
+ // Save context with JSON serialization that handles dates
213
+ const contextData = {
214
+ ...context,
215
+ startTime: context.startTime.toISOString(),
216
+ lastUpdateTime: context.lastUpdateTime.toISOString(),
217
+ publishedVersions: context.publishedVersions.map(v => ({
218
+ ...v,
219
+ publishTime: v.publishTime.toISOString()
220
+ })),
221
+ failedPackages: context.failedPackages || [],
222
+ lastSuccessfulPackage: context.lastSuccessfulPackage,
223
+ pendingDependencyUpdates: context.pendingDependencyUpdates || []
224
+ };
225
+ await storage.writeFile(contextFilePath, JSON.stringify(contextData, null, 2), 'utf-8');
226
+ }
227
+ catch (error) {
228
+ // Don't fail the entire operation if context saving fails
229
+ const logger = getLogger();
230
+ logger.warn(`Warning: Failed to save execution context: ${error.message}`);
231
+ }
232
+ };
233
+ // Load execution context from file
234
+ const loadExecutionContext = async (outputDirectory) => {
235
+ const storage = createStorage(); // Silent storage for context operations
236
+ const contextFilePath = getContextFilePath(outputDirectory);
237
+ try {
238
+ if (!await storage.exists(contextFilePath)) {
239
+ return null;
240
+ }
241
+ const contextContent = await storage.readFile(contextFilePath, 'utf-8');
242
+ const contextData = safeJsonParse(contextContent, contextFilePath);
243
+ // Restore dates from ISO strings
244
+ return {
245
+ ...contextData,
246
+ startTime: new Date(contextData.startTime),
247
+ lastUpdateTime: new Date(contextData.lastUpdateTime),
248
+ publishedVersions: contextData.publishedVersions.map((v) => ({
249
+ ...v,
250
+ publishTime: new Date(v.publishTime)
251
+ })),
252
+ failedPackages: contextData.failedPackages || [],
253
+ lastSuccessfulPackage: contextData.lastSuccessfulPackage,
254
+ pendingDependencyUpdates: contextData.pendingDependencyUpdates || []
255
+ };
256
+ }
257
+ catch (error) {
258
+ const logger = getLogger();
259
+ logger.warn(`Warning: Failed to load execution context: ${error.message}`);
260
+ return null;
261
+ }
262
+ };
263
+ // Clean up context file
264
+ const cleanupContext = async (outputDirectory) => {
265
+ const storage = createStorage(); // Silent storage for context operations
266
+ const contextFilePath = getContextFilePath(outputDirectory);
267
+ try {
268
+ if (await storage.exists(contextFilePath)) {
269
+ await storage.deleteFile(contextFilePath);
270
+ }
271
+ }
272
+ catch (error) {
273
+ // Don't fail if cleanup fails
274
+ const logger = getLogger();
275
+ logger.warn(`Warning: Failed to cleanup execution context: ${error.message}`);
276
+ }
277
+ };
278
+ // Helper function to promote a package to completed status in the context
279
+ const promotePackageToCompleted = async (packageName, outputDirectory) => {
280
+ const storage = createStorage();
281
+ const contextFilePath = getContextFilePath(outputDirectory);
282
+ try {
283
+ if (!await storage.exists(contextFilePath)) {
284
+ return;
285
+ }
286
+ const contextContent = await storage.readFile(contextFilePath, 'utf-8');
287
+ const contextData = safeJsonParse(contextContent, contextFilePath);
288
+ // Restore dates from ISO strings
289
+ const context = {
290
+ ...contextData,
291
+ startTime: new Date(contextData.startTime),
292
+ lastUpdateTime: new Date(contextData.lastUpdateTime),
293
+ publishedVersions: contextData.publishedVersions.map((v) => ({
294
+ ...v,
295
+ publishTime: new Date(v.publishTime)
296
+ }))
297
+ };
298
+ // Add package to completed list if not already there
299
+ if (!context.completedPackages.includes(packageName)) {
300
+ context.completedPackages.push(packageName);
301
+ context.lastUpdateTime = new Date();
302
+ await saveExecutionContext(context, outputDirectory);
303
+ }
304
+ }
305
+ catch (error) {
306
+ const logger = getLogger();
307
+ logger.warn(`Warning: Failed to promote package to completed: ${error.message}`);
308
+ }
309
+ };
310
+ // Helper function to validate that all packages have the required scripts
311
+ const validateScripts = async (packages, scripts) => {
312
+ const logger = getLogger();
313
+ const missingScripts = new Map();
314
+ const storage = createStorage();
315
+ logger.debug(`Validating scripts: ${scripts.join(', ')}`);
316
+ for (const [packageName, packageInfo] of packages) {
317
+ const packageJsonPath = path.join(packageInfo.path, 'package.json');
318
+ const missingForPackage = [];
319
+ try {
320
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
321
+ const packageJson = safeJsonParse(packageJsonContent, packageJsonPath);
322
+ const validated = validatePackageJson(packageJson, packageJsonPath);
323
+ // Check if each required script exists
324
+ for (const script of scripts) {
325
+ if (!validated.scripts || !validated.scripts[script]) {
326
+ missingForPackage.push(script);
327
+ }
328
+ }
329
+ if (missingForPackage.length > 0) {
330
+ missingScripts.set(packageName, missingForPackage);
331
+ logger.debug(`Package ${packageName} missing scripts: ${missingForPackage.join(', ')}`);
332
+ }
333
+ }
334
+ catch (error) {
335
+ logger.debug(`Error reading package.json for ${packageName}: ${error.message}`);
336
+ // If we can't read the package.json, assume all scripts are missing
337
+ missingScripts.set(packageName, scripts);
338
+ }
339
+ }
340
+ const valid = missingScripts.size === 0;
341
+ if (valid) {
342
+ logger.info(`✅ All packages have the required scripts: ${scripts.join(', ')}`);
343
+ }
344
+ else {
345
+ logger.error(`❌ Script validation failed. Missing scripts:`);
346
+ for (const [packageName, missing] of missingScripts) {
347
+ logger.error(` ${packageName}: ${missing.join(', ')}`);
348
+ }
349
+ }
350
+ return { valid, missingScripts };
351
+ };
352
+ // Extract published version from git tags after successful publish
353
+ // After kodrdriv publish, the release version is captured in the git tag,
354
+ // while package.json contains the next dev version
355
+ const extractPublishedVersion = async (packageDir, packageLogger) => {
356
+ const storage = createStorage();
357
+ const packageJsonPath = path.join(packageDir, 'package.json');
358
+ try {
359
+ // Get package name from package.json
360
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
361
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
362
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
363
+ // Get the most recently created tag (by creation date, not version number)
364
+ // This ensures we get the tag that was just created by the publish, not an older tag with a higher version
365
+ const { stdout: tagOutput } = await run('git tag --sort=-creatordate', { cwd: packageDir });
366
+ const tags = tagOutput.trim().split('\n').filter(Boolean);
367
+ if (tags.length === 0) {
368
+ packageLogger.warn('No git tags found after publish');
369
+ return null;
370
+ }
371
+ // Get the most recently created tag (first in the list)
372
+ const latestTag = tags[0];
373
+ // Extract version from tag, handling various formats:
374
+ // - v1.2.3 -> 1.2.3
375
+ // - working/v1.2.3 -> 1.2.3
376
+ // - main/v1.2.3 -> 1.2.3
377
+ let version = latestTag;
378
+ // If tag contains a slash (branch prefix), extract everything after it
379
+ if (version.includes('/')) {
380
+ version = version.split('/').pop() || version;
381
+ }
382
+ // Remove 'v' prefix if present
383
+ if (version.startsWith('v')) {
384
+ version = version.substring(1);
385
+ }
386
+ packageLogger.verbose(`Extracted published version from tag: ${latestTag} -> ${version}`);
387
+ return {
388
+ packageName: packageJson.name,
389
+ version: version,
390
+ publishTime: new Date()
391
+ };
392
+ }
393
+ catch (error) {
394
+ packageLogger.warn(`Failed to extract published version: ${error.message}`);
395
+ return null;
396
+ }
397
+ };
398
+ // Enhanced run function that can show output based on log level
399
+ const runWithLogging = async (command, packageLogger, options = {}, showOutput = 'none', logFilePath) => {
400
+ const execPromise = util.promisify(exec);
401
+ // Ensure encoding is set to 'utf8' to get string output instead of Buffer
402
+ const execOptions = { encoding: 'utf8', ...options };
403
+ if (showOutput === 'full') {
404
+ packageLogger.debug(`Executing command: ${command}`);
405
+ // Use info level to show on console in debug mode
406
+ packageLogger.info(`🔧 Running: ${command}`);
407
+ }
408
+ else if (showOutput === 'minimal') {
409
+ packageLogger.verbose(`Running: ${command}`);
410
+ }
411
+ // Helper to write to log file
412
+ const writeToLogFile = async (content) => {
413
+ if (!logFilePath)
414
+ return;
415
+ try {
416
+ const logDir = path.dirname(logFilePath);
417
+ await fs.mkdir(logDir, { recursive: true });
418
+ await fs.appendFile(logFilePath, content + '\n', 'utf-8');
419
+ }
420
+ catch (err) {
421
+ packageLogger.warn(`Failed to write to log file ${logFilePath}: ${err.message}`);
422
+ }
423
+ };
424
+ // Write command to log file
425
+ if (logFilePath) {
426
+ const timestamp = new Date().toISOString();
427
+ await writeToLogFile(`[${timestamp}] Executing: ${command}\n`);
428
+ }
429
+ try {
430
+ const result = await execPromise(command, execOptions);
431
+ if (showOutput === 'full') {
432
+ const stdout = String(result.stdout);
433
+ const stderr = String(result.stderr);
434
+ if (stdout.trim()) {
435
+ packageLogger.debug('STDOUT:');
436
+ packageLogger.debug(stdout);
437
+ // Show on console using info level for immediate feedback
438
+ packageLogger.info(`📤 STDOUT:`);
439
+ stdout.split('\n').forEach((line) => {
440
+ if (line.trim())
441
+ packageLogger.info(`${line}`);
442
+ });
443
+ }
444
+ if (stderr.trim()) {
445
+ packageLogger.debug('STDERR:');
446
+ packageLogger.debug(stderr);
447
+ // Show on console using info level for immediate feedback
448
+ packageLogger.info(`⚠️ STDERR:`);
449
+ stderr.split('\n').forEach((line) => {
450
+ if (line.trim())
451
+ packageLogger.info(`${line}`);
452
+ });
453
+ }
454
+ }
455
+ // Write output to log file
456
+ if (logFilePath) {
457
+ const stdout = String(result.stdout);
458
+ const stderr = String(result.stderr);
459
+ if (stdout.trim()) {
460
+ await writeToLogFile(`\n=== STDOUT ===\n${stdout}`);
461
+ }
462
+ if (stderr.trim()) {
463
+ await writeToLogFile(`\n=== STDERR ===\n${stderr}`);
464
+ }
465
+ await writeToLogFile(`\n[${new Date().toISOString()}] Command completed successfully\n`);
466
+ }
467
+ // Ensure result is properly typed as strings
468
+ return {
469
+ stdout: String(result.stdout),
470
+ stderr: String(result.stderr)
471
+ };
472
+ }
473
+ catch (error) {
474
+ // Always show error message
475
+ packageLogger.error(`Command failed: ${command}`);
476
+ // Always show stderr on failure (contains important error details like coverage failures)
477
+ if (error.stderr && error.stderr.trim()) {
478
+ packageLogger.error(`❌ STDERR:`);
479
+ error.stderr.split('\n').forEach((line) => {
480
+ if (line.trim())
481
+ packageLogger.error(`${line}`);
482
+ });
483
+ }
484
+ // Show stdout on failure if available (may contain error context)
485
+ if (error.stdout && error.stdout.trim() && (showOutput === 'full' || showOutput === 'minimal')) {
486
+ packageLogger.info(`📤 STDOUT:`);
487
+ error.stdout.split('\n').forEach((line) => {
488
+ if (line.trim())
489
+ packageLogger.info(`${line}`);
490
+ });
491
+ }
492
+ // Show full output in debug/verbose mode
493
+ if (showOutput === 'full' || showOutput === 'minimal') {
494
+ if (error.stdout && error.stdout.trim() && showOutput === 'full') {
495
+ packageLogger.debug('STDOUT:');
496
+ packageLogger.debug(error.stdout);
497
+ }
498
+ if (error.stderr && error.stderr.trim() && showOutput === 'full') {
499
+ packageLogger.debug('STDERR:');
500
+ packageLogger.debug(error.stderr);
501
+ }
502
+ }
503
+ // Write error output to log file
504
+ if (logFilePath) {
505
+ await writeToLogFile(`\n[${new Date().toISOString()}] Command failed: ${error.message}`);
506
+ if (error.stdout) {
507
+ await writeToLogFile(`\n=== STDOUT ===\n${error.stdout}`);
508
+ }
509
+ if (error.stderr) {
510
+ await writeToLogFile(`\n=== STDERR ===\n${error.stderr}`);
511
+ }
512
+ if (error.stack) {
513
+ await writeToLogFile(`\n=== STACK TRACE ===\n${error.stack}`);
514
+ }
515
+ }
516
+ throw error;
517
+ }
518
+ };
519
+ // Create a package-scoped logger that prefixes all messages
520
+ const createPackageLogger = (packageName, sequenceNumber, totalCount, isDryRun = false) => {
521
+ const baseLogger = getLogger();
522
+ const prefix = `[${sequenceNumber}/${totalCount}] ${packageName}:`;
523
+ const dryRunPrefix = isDryRun ? 'DRY RUN: ' : '';
524
+ return {
525
+ info: (message, ...args) => baseLogger.info(`${dryRunPrefix}${prefix} ${message}`, ...args),
526
+ warn: (message, ...args) => baseLogger.warn(`${dryRunPrefix}${prefix} ${message}`, ...args),
527
+ error: (message, ...args) => baseLogger.error(`${dryRunPrefix}${prefix} ${message}`, ...args),
528
+ debug: (message, ...args) => baseLogger.debug(`${dryRunPrefix}${prefix} ${message}`, ...args),
529
+ verbose: (message, ...args) => baseLogger.verbose(`${dryRunPrefix}${prefix} ${message}`, ...args),
530
+ silly: (message, ...args) => baseLogger.silly(`${dryRunPrefix}${prefix} ${message}`, ...args),
531
+ };
532
+ };
533
+ // Helper function to format subproject error output
534
+ const formatSubprojectError = (packageName, error, _packageInfo, _position, _total) => {
535
+ const lines = [];
536
+ lines.push(`❌ Command failed in package ${packageName}:`);
537
+ // Format the main error message with indentation
538
+ if (error.message) {
539
+ const indentedMessage = error.message
540
+ .split('\n')
541
+ .map((line) => ` ${line}`)
542
+ .join('\n');
543
+ lines.push(indentedMessage);
544
+ }
545
+ // If there's stderr output, show it indented as well
546
+ if (error.stderr && error.stderr.trim()) {
547
+ lines.push(' STDERR:');
548
+ const indentedStderr = error.stderr
549
+ .split('\n')
550
+ .filter((line) => line.trim())
551
+ .map((line) => ` ${line}`)
552
+ .join('\n');
553
+ lines.push(indentedStderr);
554
+ }
555
+ // If there's stdout output, show it indented as well
556
+ if (error.stdout && error.stdout.trim()) {
557
+ lines.push(' STDOUT:');
558
+ const indentedStdout = error.stdout
559
+ .split('\n')
560
+ .filter((line) => line.trim())
561
+ .map((line) => ` ${line}`)
562
+ .join('\n');
563
+ lines.push(indentedStdout);
564
+ }
565
+ return lines.join('\n');
566
+ };
567
+ // Note: PackageInfo, DependencyGraph, scanForPackageJsonFiles, parsePackageJson,
568
+ // buildDependencyGraph, and topologicalSort are now imported from ../util/dependencyGraph
569
+ // Execute a single package and return execution result
570
+ export const executePackage = async (packageName, packageInfo, commandToRun, runConfig, isDryRun, index, total, allPackageNames, isBuiltInCommand = false) => {
571
+ const packageLogger = createPackageLogger(packageName, index + 1, total, isDryRun);
572
+ const packageDir = packageInfo.path;
573
+ const logger = getLogger();
574
+ // Create log file path for publish commands
575
+ let logFilePath;
576
+ if (isBuiltInCommand && commandToRun.includes('publish')) {
577
+ const outputDir = runConfig.outputDirectory || 'output/kodrdriv';
578
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-').replace('T', '_').split('.')[0];
579
+ const commandName = commandToRun.split(' ')[1]?.split(' ')[0] || 'command';
580
+ logFilePath = path.join(packageDir, outputDir, `${commandName}_${timestamp}.log`);
581
+ }
582
+ // Determine output level based on flags
583
+ // For publish and commit commands, default to full output to show AI progress and other details
584
+ // For other commands, require --verbose or --debug for output
585
+ const isPublishCommand = isBuiltInCommand && commandToRun.includes('publish');
586
+ const isCommitCommand = isBuiltInCommand && commandToRun.includes('commit');
587
+ let showOutput = (isPublishCommand || isCommitCommand) ? 'full' : 'none';
588
+ if (runConfig.debug) {
589
+ showOutput = 'full';
590
+ }
591
+ else if (runConfig.verbose) {
592
+ showOutput = 'minimal';
593
+ }
594
+ // Show package start info - always visible for progress tracking
595
+ if (runConfig.debug) {
596
+ packageLogger.debug('MULTI_PROJECT_START: Starting package execution | Package: %s | Index: %d/%d | Path: %s | Command: %s | Context: tree execution', packageName, index + 1, total, packageDir, commandToRun);
597
+ packageLogger.debug('MULTI_PROJECT_CONTEXT: Execution details | Directory: %s | Built-in Command: %s | Dry Run: %s | Output Level: %s', packageDir, isBuiltInCommand, isDryRun, showOutput);
598
+ // Show dependencies if available
599
+ if (packageInfo.dependencies && Array.isArray(packageInfo.dependencies) && packageInfo.dependencies.length > 0) {
600
+ packageLogger.debug('MULTI_PROJECT_DEPS: Package dependencies | Package: %s | Dependencies: [%s]', packageName, packageInfo.dependencies.join(', '));
601
+ }
602
+ }
603
+ else if (runConfig.verbose) {
604
+ packageLogger.verbose(`Starting execution in ${packageDir}`);
605
+ }
606
+ else {
607
+ // Basic progress info even without flags
608
+ logger.info(`[${index + 1}/${total}] ${packageName}: Running ${commandToRun}...`);
609
+ }
610
+ // Track if publish was skipped due to no changes
611
+ let publishWasSkipped = false;
612
+ // Track execution timing
613
+ const executionTimer = new PerformanceTimer(`Package ${packageName} execution`);
614
+ let executionDuration;
615
+ try {
616
+ if (isDryRun && !isBuiltInCommand) {
617
+ // Handle inter-project dependency updates for publish commands in dry run mode
618
+ if (isBuiltInCommand && commandToRun.includes('publish') && publishedVersions.length > 0) {
619
+ let mutexLocked = false;
620
+ try {
621
+ await globalStateMutex.lock();
622
+ mutexLocked = true;
623
+ packageLogger.info('Would check for inter-project dependency updates before publish...');
624
+ const versionSnapshot = [...publishedVersions]; // Create safe copy
625
+ globalStateMutex.unlock();
626
+ mutexLocked = false;
627
+ await updateInterProjectDependencies(packageDir, versionSnapshot, allPackageNames, packageLogger, isDryRun);
628
+ }
629
+ catch (error) {
630
+ if (mutexLocked) {
631
+ globalStateMutex.unlock();
632
+ }
633
+ throw error;
634
+ }
635
+ }
636
+ // Use main logger for the specific message tests expect
637
+ logger.info(`DRY RUN: Would execute: ${commandToRun}`);
638
+ if (runConfig.debug || runConfig.verbose) {
639
+ packageLogger.info(`In directory: ${packageDir}`);
640
+ }
641
+ }
642
+ else {
643
+ // Change to the package directory and run the command
644
+ const originalCwd = process.cwd();
645
+ try {
646
+ // Validate package directory exists before changing to it
647
+ try {
648
+ await fs.access(packageDir);
649
+ const stat = await fs.stat(packageDir);
650
+ if (!stat.isDirectory()) {
651
+ throw new Error(`Path is not a directory: ${packageDir}`);
652
+ }
653
+ }
654
+ catch (accessError) {
655
+ throw new Error(`Cannot access package directory: ${packageDir} - ${accessError.message}`);
656
+ }
657
+ process.chdir(packageDir);
658
+ if (runConfig.debug) {
659
+ packageLogger.debug(`Changed to directory: ${packageDir}`);
660
+ }
661
+ // Handle dependency updates for publish commands before executing (skip during dry run)
662
+ // Wrap in git lock to prevent parallel packages from conflicting with npm install and git operations
663
+ if (!isDryRun && isBuiltInCommand && commandToRun.includes('publish')) {
664
+ await runGitWithLock(async () => {
665
+ let hasAnyUpdates = false;
666
+ // First, update all scoped dependencies from npm registry
667
+ const hasScopedUpdates = await updateScopedDependencies(packageDir, packageLogger, isDryRun, runConfig);
668
+ hasAnyUpdates = hasAnyUpdates || hasScopedUpdates;
669
+ // Then update inter-project dependencies based on previously published packages
670
+ if (publishedVersions.length > 0) {
671
+ packageLogger.info('Updating inter-project dependencies based on previously published packages...');
672
+ const hasInterProjectUpdates = await updateInterProjectDependencies(packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun);
673
+ hasAnyUpdates = hasAnyUpdates || hasInterProjectUpdates;
674
+ }
675
+ // If either type of update occurred, commit the changes
676
+ if (hasAnyUpdates) {
677
+ // Commit the dependency updates using kodrdriv commit
678
+ packageLogger.info('Committing dependency updates...');
679
+ packageLogger.info('⏱️ This step may take a few minutes as it generates a commit message using AI...');
680
+ // Add timeout wrapper around commit execution
681
+ const commitTimeoutMs = 300000; // 5 minutes
682
+ const commitPromise = Commit.execute({ ...runConfig, dryRun: false });
683
+ const timeoutPromise = new Promise((_, reject) => {
684
+ setTimeout(() => reject(new Error(`Commit operation timed out after ${commitTimeoutMs / 1000} seconds`)), commitTimeoutMs);
685
+ });
686
+ // Add progress indicator
687
+ let progressInterval = null;
688
+ try {
689
+ // Start progress indicator
690
+ progressInterval = setInterval(() => {
691
+ packageLogger.info('⏳ Still generating commit message... (this can take 1-3 minutes)');
692
+ }, 30000); // Every 30 seconds
693
+ await Promise.race([commitPromise, timeoutPromise]);
694
+ packageLogger.info('✅ Dependency updates committed successfully');
695
+ }
696
+ catch (commitError) {
697
+ if (commitError.message.includes('timed out')) {
698
+ packageLogger.error(`❌ Commit operation timed out after ${commitTimeoutMs / 1000} seconds`);
699
+ packageLogger.error('This usually indicates an issue with the AI service or very large changes');
700
+ packageLogger.error('You may need to manually commit the dependency updates');
701
+ }
702
+ else {
703
+ packageLogger.warn(`Failed to commit dependency updates: ${commitError.message}`);
704
+ }
705
+ // Continue with publish anyway - the updates are still in place
706
+ }
707
+ finally {
708
+ if (progressInterval) {
709
+ clearInterval(progressInterval);
710
+ }
711
+ }
712
+ }
713
+ }, `${packageName}: dependency updates`);
714
+ }
715
+ // Optimize precommit commands for custom commands (not built-in)
716
+ let effectiveCommandToRun = commandToRun;
717
+ let optimizationInfo = null;
718
+ if (!isBuiltInCommand && !isDryRun) {
719
+ const isPrecommitCommand = commandToRun.includes('precommit') || commandToRun.includes('pre-commit');
720
+ if (isPrecommitCommand) {
721
+ try {
722
+ const optimization = await optimizePrecommitCommand(packageDir, commandToRun);
723
+ effectiveCommandToRun = optimization.optimizedCommand;
724
+ optimizationInfo = { skipped: optimization.skipped, reasons: optimization.reasons };
725
+ if (optimization.skipped.clean || optimization.skipped.test) {
726
+ const skippedParts = [];
727
+ if (optimization.skipped.clean) {
728
+ skippedParts.push(`clean (${optimization.reasons.clean})`);
729
+ }
730
+ if (optimization.skipped.test) {
731
+ skippedParts.push(`test (${optimization.reasons.test})`);
732
+ }
733
+ packageLogger.info(`⚡ Optimized: Skipped ${skippedParts.join(', ')}`);
734
+ if (runConfig.verbose || runConfig.debug) {
735
+ packageLogger.info(` Original: ${commandToRun}`);
736
+ packageLogger.info(` Optimized: ${effectiveCommandToRun}`);
737
+ }
738
+ }
739
+ }
740
+ catch (error) {
741
+ // If optimization fails, fall back to original command
742
+ logger.debug(`Precommit optimization failed for ${packageName}: ${error.message}`);
743
+ }
744
+ }
745
+ }
746
+ if (runConfig.debug || runConfig.verbose) {
747
+ if (isBuiltInCommand) {
748
+ packageLogger.info(`Executing built-in command: ${commandToRun}`);
749
+ }
750
+ else {
751
+ packageLogger.info(`Executing command: ${effectiveCommandToRun}`);
752
+ }
753
+ }
754
+ // For built-in commands, shell out to a separate kodrdriv process
755
+ // This preserves individual project configurations
756
+ if (isBuiltInCommand) {
757
+ // Extract the command name from "kodrdriv <command> [args...]"
758
+ // Split by space and take the second element (after "kodrdriv")
759
+ const commandParts = commandToRun.replace(/^kodrdriv\s+/, '').split(/\s+/);
760
+ const builtInCommandName = commandParts[0];
761
+ if (runConfig.debug) {
762
+ packageLogger.debug(`Shelling out to separate kodrdriv process for ${builtInCommandName} command`);
763
+ }
764
+ // Add progress indication for publish commands
765
+ if (builtInCommandName === 'publish') {
766
+ packageLogger.info('🚀 Starting publish process...');
767
+ packageLogger.info('⏱️ This may take several minutes (AI processing, PR creation, etc.)');
768
+ }
769
+ // Ensure dry-run propagates to subprocess even during overall dry-run mode
770
+ let effectiveCommand = runConfig.dryRun && !commandToRun.includes('--dry-run')
771
+ ? `${commandToRun} --dry-run`
772
+ : commandToRun;
773
+ // For commit commands, ensure --sendit is used to avoid interactive prompts
774
+ // This prevents hanging when running via tree command
775
+ if (builtInCommandName === 'commit' && !effectiveCommand.includes('--sendit') && !runConfig.dryRun) {
776
+ effectiveCommand = `${effectiveCommand} --sendit`;
777
+ packageLogger.info('💡 Auto-adding --sendit flag to avoid interactive prompts in tree mode');
778
+ }
779
+ // Set timeout based on command type
780
+ let commandTimeoutMs;
781
+ if (builtInCommandName === 'publish') {
782
+ commandTimeoutMs = 1800000; // 30 minutes for publish commands
783
+ packageLogger.info(`⏰ Setting timeout of ${commandTimeoutMs / 60000} minutes for publish command`);
784
+ }
785
+ else if (builtInCommandName === 'commit') {
786
+ commandTimeoutMs = 600000; // 10 minutes for commit commands (AI processing can take time)
787
+ packageLogger.info(`⏰ Setting timeout of ${commandTimeoutMs / 60000} minutes for commit command`);
788
+ }
789
+ else {
790
+ commandTimeoutMs = 300000; // 5 minutes default for other commands
791
+ }
792
+ const commandPromise = runWithLogging(effectiveCommand, packageLogger, {}, showOutput, logFilePath);
793
+ const commandTimeoutPromise = new Promise((_, reject) => {
794
+ setTimeout(() => reject(new Error(`Command timed out after ${commandTimeoutMs / 60000} minutes`)), commandTimeoutMs);
795
+ });
796
+ try {
797
+ const startTime = Date.now();
798
+ const { stdout, stderr } = await Promise.race([commandPromise, commandTimeoutPromise]);
799
+ executionDuration = Date.now() - startTime;
800
+ // Detect explicit skip marker from publish to avoid propagating versions
801
+ // Check both stdout (where we now write it) and stderr (winston logger output, for backward compat)
802
+ if (builtInCommandName === 'publish' &&
803
+ ((stdout && stdout.includes('KODRDRIV_PUBLISH_SKIPPED')) ||
804
+ (stderr && stderr.includes('KODRDRIV_PUBLISH_SKIPPED')))) {
805
+ packageLogger.info('Publish skipped for this package; will not record or propagate a version.');
806
+ publishWasSkipped = true;
807
+ }
808
+ }
809
+ catch (error) {
810
+ if (error.message.includes('timed out')) {
811
+ packageLogger.error(`❌ ${builtInCommandName} command timed out after ${commandTimeoutMs / 60000} minutes`);
812
+ packageLogger.error('This usually indicates the command is stuck waiting for user input or an external service');
813
+ throw error;
814
+ }
815
+ throw error;
816
+ }
817
+ }
818
+ else {
819
+ // For custom commands, use the existing logic
820
+ const startTime = Date.now();
821
+ await runWithLogging(effectiveCommandToRun, packageLogger, {}, showOutput, logFilePath);
822
+ executionDuration = Date.now() - startTime;
823
+ }
824
+ // Track published version after successful publish (skip during dry run)
825
+ if (!isDryRun && isBuiltInCommand && commandToRun.includes('publish')) {
826
+ // If publish was skipped, do not record a version
827
+ if (publishWasSkipped) {
828
+ packageLogger.verbose('Skipping version tracking due to earlier skip.');
829
+ }
830
+ else {
831
+ // Only record a published version if a new tag exists (avoid recording for skipped publishes)
832
+ const publishedVersion = await extractPublishedVersion(packageDir, packageLogger);
833
+ if (publishedVersion) {
834
+ let mutexLocked = false;
835
+ try {
836
+ await globalStateMutex.lock();
837
+ mutexLocked = true;
838
+ publishedVersions.push(publishedVersion);
839
+ packageLogger.info(`Tracked published version: ${publishedVersion.packageName}@${publishedVersion.version}`);
840
+ globalStateMutex.unlock();
841
+ mutexLocked = false;
842
+ }
843
+ catch (error) {
844
+ if (mutexLocked) {
845
+ globalStateMutex.unlock();
846
+ }
847
+ throw error;
848
+ }
849
+ }
850
+ }
851
+ }
852
+ // Record test run if tests were executed (not skipped)
853
+ if (!isDryRun && !isBuiltInCommand && effectiveCommandToRun.includes('test') &&
854
+ (!optimizationInfo || !optimizationInfo.skipped.test)) {
855
+ try {
856
+ await recordTestRun(packageDir);
857
+ }
858
+ catch (error) {
859
+ logger.debug(`Failed to record test run for ${packageName}: ${error.message}`);
860
+ }
861
+ }
862
+ // End timing and show duration
863
+ if (executionDuration !== undefined) {
864
+ executionTimer.end();
865
+ const seconds = (executionDuration / 1000).toFixed(1);
866
+ if (runConfig.debug || runConfig.verbose) {
867
+ packageLogger.info(`⏱️ Execution time: ${seconds}s`);
868
+ }
869
+ else if (!isPublishCommand && !isCommitCommand) {
870
+ // Show timing in completion message (publish/commit commands have their own completion message)
871
+ logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed (${seconds}s)`);
872
+ }
873
+ }
874
+ else {
875
+ executionTimer.end();
876
+ if (runConfig.debug || runConfig.verbose) {
877
+ packageLogger.info(`Command completed successfully`);
878
+ }
879
+ else if (!isPublishCommand && !isCommitCommand) {
880
+ // Basic completion info (publish/commit commands have their own completion message)
881
+ logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed`);
882
+ }
883
+ }
884
+ }
885
+ finally {
886
+ // Safely restore working directory
887
+ try {
888
+ // Validate original directory still exists before changing back
889
+ const fs = await import('fs/promises');
890
+ await fs.access(originalCwd);
891
+ process.chdir(originalCwd);
892
+ if (runConfig.debug) {
893
+ packageLogger.debug(`Restored working directory to: ${originalCwd}`);
894
+ }
895
+ }
896
+ catch (restoreError) {
897
+ // If we can't restore to original directory, at least log the issue
898
+ packageLogger.error(`Failed to restore working directory to ${originalCwd}: ${restoreError.message}`);
899
+ packageLogger.error(`Current working directory is now: ${process.cwd()}`);
900
+ // Don't throw here to avoid masking the original error
901
+ }
902
+ }
903
+ }
904
+ // Show completion status (for publish/commit commands, this supplements the timing message above)
905
+ if (runConfig.debug || runConfig.verbose) {
906
+ if (publishWasSkipped) {
907
+ packageLogger.info(`⊘ Skipped (no code changes)`);
908
+ }
909
+ else {
910
+ packageLogger.info(`✅ Completed successfully`);
911
+ }
912
+ }
913
+ else if (isPublishCommand || isCommitCommand) {
914
+ // For publish/commit commands, always show completion even without verbose
915
+ // Include timing if available
916
+ const timeStr = executionDuration !== undefined ? ` (${(executionDuration / 1000).toFixed(1)}s)` : '';
917
+ if (publishWasSkipped) {
918
+ logger.info(`[${index + 1}/${total}] ${packageName}: ⊘ Skipped (no code changes)`);
919
+ }
920
+ else {
921
+ logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed${timeStr}`);
922
+ }
923
+ }
924
+ // Ensure timing is recorded even if there was an early return
925
+ if (executionDuration === undefined) {
926
+ executionDuration = executionTimer.end();
927
+ }
928
+ return { success: true, skippedNoChanges: publishWasSkipped, logFile: logFilePath };
929
+ }
930
+ catch (error) {
931
+ // Record timing even on error
932
+ if (executionDuration === undefined) {
933
+ executionDuration = executionTimer.end();
934
+ const seconds = (executionDuration / 1000).toFixed(1);
935
+ if (runConfig.debug || runConfig.verbose) {
936
+ packageLogger.error(`⏱️ Execution time before failure: ${seconds}s`);
937
+ }
938
+ }
939
+ if (runConfig.debug || runConfig.verbose) {
940
+ packageLogger.error(`❌ Execution failed: ${error.message}`);
941
+ }
942
+ else {
943
+ logger.error(`[${index + 1}/${total}] ${packageName}: ❌ Failed - ${error.message}`);
944
+ }
945
+ // Always show stderr if available (contains important error details)
946
+ // Note: runWithLogging already logs stderr, but we show it here too for visibility
947
+ // when error is caught at this level (e.g., from timeout wrapper)
948
+ if (error.stderr && error.stderr.trim() && !runConfig.debug && !runConfig.verbose) {
949
+ // Extract key error lines from stderr (coverage failures, test failures, etc.)
950
+ const stderrLines = error.stderr.split('\n').filter((line) => {
951
+ const trimmed = line.trim();
952
+ return trimmed && (trimmed.includes('ERROR:') ||
953
+ trimmed.includes('FAIL') ||
954
+ trimmed.includes('coverage') ||
955
+ trimmed.includes('threshold') ||
956
+ trimmed.includes('fatal:') ||
957
+ trimmed.startsWith('❌'));
958
+ });
959
+ if (stderrLines.length > 0) {
960
+ logger.error(` Error details:`);
961
+ stderrLines.slice(0, 10).forEach((line) => {
962
+ logger.error(` ${line.trim()}`);
963
+ });
964
+ if (stderrLines.length > 10) {
965
+ logger.error(` ... and ${stderrLines.length - 10} more error lines (use --verbose to see full output)`);
966
+ }
967
+ }
968
+ }
969
+ // Check if this is a timeout error
970
+ const errorMessage = error.message?.toLowerCase() || '';
971
+ const isTimeoutError = errorMessage && (errorMessage.includes('timeout waiting for pr') ||
972
+ errorMessage.includes('timeout waiting for release workflows') ||
973
+ errorMessage.includes('timeout reached') ||
974
+ errorMessage.includes('timeout') ||
975
+ errorMessage.includes('timed out') ||
976
+ errorMessage.includes('timed_out'));
977
+ return { success: false, error, isTimeoutError, logFile: logFilePath };
978
+ }
979
+ };
980
+ /**
981
+ * Generate a dry-run preview showing what would happen without executing
982
+ */
983
+ const generateDryRunPreview = async (dependencyGraph, buildOrder, command, runConfig) => {
984
+ const lines = [];
985
+ lines.push('');
986
+ lines.push('🔍 DRY RUN MODE - No changes will be made');
987
+ lines.push('');
988
+ lines.push('Build order determined:');
989
+ lines.push('');
990
+ // Group packages by dependency level
991
+ const levels = [];
992
+ const packageLevels = new Map();
993
+ for (const pkg of buildOrder) {
994
+ const deps = dependencyGraph.edges.get(pkg) || new Set();
995
+ let maxDepLevel = -1;
996
+ for (const dep of deps) {
997
+ const depLevel = packageLevels.get(dep) ?? 0;
998
+ maxDepLevel = Math.max(maxDepLevel, depLevel);
999
+ }
1000
+ const pkgLevel = maxDepLevel + 1;
1001
+ packageLevels.set(pkg, pkgLevel);
1002
+ if (!levels[pkgLevel]) {
1003
+ levels[pkgLevel] = [];
1004
+ }
1005
+ levels[pkgLevel].push(pkg);
1006
+ }
1007
+ // Show packages grouped by level
1008
+ for (let i = 0; i < levels.length; i++) {
1009
+ const levelPackages = levels[i];
1010
+ lines.push(`Level ${i + 1}: (${levelPackages.length} package${levelPackages.length === 1 ? '' : 's'})`);
1011
+ for (const pkg of levelPackages) {
1012
+ const pkgInfo = dependencyGraph.packages.get(pkg);
1013
+ if (!pkgInfo)
1014
+ continue;
1015
+ // Check if package has changes (for publish command)
1016
+ const isPublish = command.includes('publish');
1017
+ let status = '📝 Has changes, will execute';
1018
+ if (isPublish) {
1019
+ try {
1020
+ // Check git diff to see if there are code changes
1021
+ const { stdout } = await runSecure('git', ['diff', '--name-only', 'origin/main...HEAD'], { cwd: pkgInfo.path });
1022
+ const changedFiles = stdout.split('\n').filter(Boolean);
1023
+ const nonVersionFiles = changedFiles.filter(f => f !== 'package.json' && f !== 'package-lock.json');
1024
+ if (changedFiles.length === 0) {
1025
+ status = '⊘ No changes, will skip';
1026
+ }
1027
+ else if (nonVersionFiles.length === 0) {
1028
+ status = '⊘ Only version bump, will skip';
1029
+ }
1030
+ else {
1031
+ status = `📝 Has changes (${nonVersionFiles.length} files), will publish`;
1032
+ }
1033
+ }
1034
+ catch {
1035
+ // If we can't check git status, assume changes
1036
+ status = '📝 Will execute';
1037
+ }
1038
+ }
1039
+ lines.push(` ${pkg}`);
1040
+ lines.push(` Status: ${status}`);
1041
+ lines.push(` Path: ${pkgInfo.path}`);
1042
+ }
1043
+ lines.push('');
1044
+ }
1045
+ lines.push('Summary:');
1046
+ lines.push(` Total packages: ${buildOrder.length}`);
1047
+ lines.push(` Dependency levels: ${levels.length}`);
1048
+ lines.push(` Command: ${command}`);
1049
+ if (runConfig.tree?.maxConcurrency) {
1050
+ lines.push(` Max concurrency: ${runConfig.tree.maxConcurrency}`);
1051
+ }
1052
+ lines.push('');
1053
+ lines.push('To execute for real, run the same command without --dry-run');
1054
+ lines.push('');
1055
+ return lines.join('\n');
1056
+ };
1057
+ // Add a simple status check function
1058
+ const checkTreePublishStatus = async () => {
1059
+ const logger = getLogger();
1060
+ try {
1061
+ // Check for running kodrdriv processes
1062
+ const { stdout } = await runSecure('ps', ['aux'], {});
1063
+ const kodrdrivProcesses = stdout.split('\n').filter((line) => line.includes('kodrdriv') &&
1064
+ !line.includes('grep') &&
1065
+ !line.includes('ps aux') &&
1066
+ !line.includes('tree --status') // Exclude the current status command
1067
+ );
1068
+ if (kodrdrivProcesses.length > 0) {
1069
+ logger.info('🔍 Found running kodrdriv processes:');
1070
+ kodrdrivProcesses.forEach((process) => {
1071
+ const parts = process.trim().split(/\s+/);
1072
+ const pid = parts[1];
1073
+ const command = parts.slice(10).join(' ');
1074
+ logger.info(` PID ${pid}: ${command}`);
1075
+ });
1076
+ }
1077
+ else {
1078
+ logger.info('No kodrdriv processes currently running');
1079
+ }
1080
+ }
1081
+ catch (error) {
1082
+ logger.warn('Could not check process status:', error);
1083
+ }
1084
+ };
1085
+ export const execute = async (runConfig) => {
1086
+ const logger = getLogger();
1087
+ const isDryRun = runConfig.dryRun || false;
1088
+ const isContinue = runConfig.tree?.continue || false;
1089
+ const promotePackage = runConfig.tree?.promote;
1090
+ // Debug logging
1091
+ logger.debug('Tree config:', JSON.stringify(runConfig.tree, null, 2));
1092
+ logger.debug('Status flag:', runConfig.tree?.status);
1093
+ logger.debug('Full runConfig:', JSON.stringify(runConfig, null, 2));
1094
+ // Handle status check
1095
+ if (runConfig.tree?.status) {
1096
+ logger.info('🔍 Checking for running kodrdriv processes...');
1097
+ await checkTreePublishStatus();
1098
+ return 'Status check completed';
1099
+ }
1100
+ // Handle promote mode
1101
+ if (promotePackage) {
1102
+ logger.info(`Promoting package '${promotePackage}' to completed status...`);
1103
+ await promotePackageToCompleted(promotePackage, runConfig.outputDirectory);
1104
+ logger.info(`✅ Package '${promotePackage}' has been marked as completed.`);
1105
+ logger.info('You can now run the tree command with --continue to resume from the next package.');
1106
+ return `Package '${promotePackage}' promoted to completed status.`;
1107
+ }
1108
+ // Handle audit-branches command
1109
+ if (runConfig.tree?.auditBranches) {
1110
+ logger.info('🔍 Auditing branch state across all packages...');
1111
+ const directories = runConfig.tree?.directories || [process.cwd()];
1112
+ const excludedPatterns = runConfig.tree?.exclude || [];
1113
+ let allPackageJsonPaths = [];
1114
+ for (const targetDirectory of directories) {
1115
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1116
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1117
+ }
1118
+ if (allPackageJsonPaths.length === 0) {
1119
+ return 'No packages found';
1120
+ }
1121
+ const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
1122
+ const packages = Array.from(dependencyGraph.packages.values()).map(pkg => ({
1123
+ name: pkg.name,
1124
+ path: pkg.path,
1125
+ }));
1126
+ // Branch state utilities - stubbed for now
1127
+ // TODO: Extract or implement branch state auditing
1128
+ const auditBranchState = async (_packages, _config, _options) => ({
1129
+ packages: [],
1130
+ issues: [],
1131
+ issuesFound: 0,
1132
+ goodPackages: 0
1133
+ });
1134
+ const formatAuditResults = (_results) => 'Branch audit not implemented';
1135
+ const { getRemoteDefaultBranch } = await import('@grunnverk/git-tools');
1136
+ // For publish workflows, check branch consistency, merge conflicts, and existing PRs
1137
+ // Don't pass an expected branch - let the audit find the most common branch
1138
+ let targetBranch = runConfig.publish?.targetBranch;
1139
+ if (!targetBranch) {
1140
+ // Try to detect default branch from the first package that is a git repo
1141
+ const firstGitPkg = packages.find(pkg => isInGitRepository(pkg.path));
1142
+ if (firstGitPkg) {
1143
+ try {
1144
+ // Cast to any to avoid type mismatch with node_modules version
1145
+ targetBranch = await getRemoteDefaultBranch(firstGitPkg.path) || 'main';
1146
+ }
1147
+ catch {
1148
+ targetBranch = 'main';
1149
+ }
1150
+ }
1151
+ else {
1152
+ targetBranch = 'main';
1153
+ }
1154
+ }
1155
+ logger.info(`Checking for merge conflicts with '${targetBranch}' and existing pull requests...`);
1156
+ const auditResult = await auditBranchState(packages, undefined, {
1157
+ targetBranch,
1158
+ checkPR: true,
1159
+ checkConflicts: true,
1160
+ concurrency: runConfig.tree?.maxConcurrency || 10,
1161
+ });
1162
+ const formatted = formatAuditResults(auditResult);
1163
+ logger.info('\n' + formatted);
1164
+ if (auditResult.issuesFound > 0) {
1165
+ logger.warn(`\n⚠️ Found issues in ${auditResult.issuesFound} package(s). Review the fixes above.`);
1166
+ return `Branch audit complete: ${auditResult.issuesFound} package(s) need attention`;
1167
+ }
1168
+ logger.info(`\n✅ All ${auditResult.goodPackages} package(s) are in good state!`);
1169
+ return `Branch audit complete: All packages OK`;
1170
+ }
1171
+ // Handle parallel execution recovery commands
1172
+ const { loadRecoveryManager } = await import('./execution/RecoveryManager.js');
1173
+ // Handle status-parallel command
1174
+ if (runConfig.tree?.statusParallel) {
1175
+ logger.info('📊 Checking parallel execution status...');
1176
+ // Need to build dependency graph first
1177
+ const directories = runConfig.tree?.directories || [process.cwd()];
1178
+ const excludedPatterns = runConfig.tree?.exclude || [];
1179
+ let allPackageJsonPaths = [];
1180
+ for (const targetDirectory of directories) {
1181
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1182
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1183
+ }
1184
+ if (allPackageJsonPaths.length === 0) {
1185
+ return 'No packages found';
1186
+ }
1187
+ const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
1188
+ const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
1189
+ if (!recoveryManager) {
1190
+ logger.info('No parallel execution checkpoint found');
1191
+ return 'No active parallel execution found';
1192
+ }
1193
+ const status = await recoveryManager.showStatus();
1194
+ logger.info('\n' + status);
1195
+ return status;
1196
+ }
1197
+ // Handle validate-state command
1198
+ if (runConfig.tree?.validateState) {
1199
+ logger.info('🔍 Validating checkpoint state...');
1200
+ const directories = runConfig.tree?.directories || [process.cwd()];
1201
+ const excludedPatterns = runConfig.tree?.exclude || [];
1202
+ let allPackageJsonPaths = [];
1203
+ for (const targetDirectory of directories) {
1204
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1205
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1206
+ }
1207
+ if (allPackageJsonPaths.length === 0) {
1208
+ return 'No packages found';
1209
+ }
1210
+ const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
1211
+ const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
1212
+ if (!recoveryManager) {
1213
+ logger.info('No checkpoint found to validate');
1214
+ return 'No checkpoint found';
1215
+ }
1216
+ const validation = recoveryManager.validateState();
1217
+ if (validation.valid) {
1218
+ logger.info('✅ Checkpoint state is valid');
1219
+ }
1220
+ else {
1221
+ logger.error('❌ Checkpoint state has issues:');
1222
+ for (const issue of validation.issues) {
1223
+ logger.error(` • ${issue}`);
1224
+ }
1225
+ }
1226
+ if (validation.warnings.length > 0) {
1227
+ logger.warn('⚠️ Warnings:');
1228
+ for (const warning of validation.warnings) {
1229
+ logger.warn(` • ${warning}`);
1230
+ }
1231
+ }
1232
+ return validation.valid ? 'Checkpoint is valid' : 'Checkpoint has issues';
1233
+ }
1234
+ // Handle parallel execution recovery options (must happen before main execution)
1235
+ const hasRecoveryOptions = runConfig.tree?.markCompleted || runConfig.tree?.skipPackages ||
1236
+ runConfig.tree?.retryFailed || runConfig.tree?.skipFailed ||
1237
+ runConfig.tree?.resetPackage;
1238
+ if (hasRecoveryOptions && runConfig.tree) {
1239
+ logger.info('🔧 Applying recovery options...');
1240
+ // Build dependency graph
1241
+ const directories = runConfig.tree.directories || [process.cwd()];
1242
+ const excludedPatterns = runConfig.tree.exclude || [];
1243
+ let allPackageJsonPaths = [];
1244
+ for (const targetDirectory of directories) {
1245
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1246
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1247
+ }
1248
+ const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
1249
+ const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
1250
+ if (!recoveryManager) {
1251
+ logger.error('No checkpoint found for recovery');
1252
+ throw new Error('No checkpoint found. Cannot apply recovery options without an existing checkpoint.');
1253
+ }
1254
+ await recoveryManager.applyRecoveryOptions({
1255
+ markCompleted: runConfig.tree.markCompleted,
1256
+ skipPackages: runConfig.tree.skipPackages,
1257
+ retryFailed: runConfig.tree.retryFailed,
1258
+ skipFailed: runConfig.tree.skipFailed,
1259
+ resetPackage: runConfig.tree.resetPackage,
1260
+ maxRetries: runConfig.tree.retry?.maxAttempts
1261
+ });
1262
+ logger.info('✅ Recovery options applied');
1263
+ // If not also continuing, just return
1264
+ if (!isContinue) {
1265
+ return 'Recovery options applied. Use --continue to resume execution.';
1266
+ }
1267
+ }
1268
+ // Handle continue mode
1269
+ if (isContinue) {
1270
+ // For parallel execution, the checkpoint is managed by DynamicTaskPool/CheckpointManager
1271
+ // For sequential execution, we use the legacy context file
1272
+ const isParallelMode = runConfig.tree?.parallel;
1273
+ if (!isParallelMode) {
1274
+ // Sequential execution: load legacy context
1275
+ const savedContext = await loadExecutionContext(runConfig.outputDirectory);
1276
+ if (savedContext) {
1277
+ logger.info('Continuing previous tree execution...');
1278
+ logger.info(`Original command: ${savedContext.command}`);
1279
+ logger.info(`Started: ${savedContext.startTime.toISOString()}`);
1280
+ logger.info(`Previously completed: ${savedContext.completedPackages.length}/${savedContext.buildOrder.length} packages`);
1281
+ // Restore state safely
1282
+ let mutexLocked = false;
1283
+ try {
1284
+ await globalStateMutex.lock();
1285
+ mutexLocked = true;
1286
+ publishedVersions = savedContext.publishedVersions;
1287
+ globalStateMutex.unlock();
1288
+ mutexLocked = false;
1289
+ }
1290
+ catch (error) {
1291
+ if (mutexLocked) {
1292
+ globalStateMutex.unlock();
1293
+ }
1294
+ throw error;
1295
+ }
1296
+ executionContext = savedContext;
1297
+ // Use original config but allow some overrides (like dry run)
1298
+ runConfig = {
1299
+ ...savedContext.originalConfig,
1300
+ dryRun: runConfig.dryRun, // Allow dry run override
1301
+ outputDirectory: runConfig.outputDirectory || savedContext.originalConfig.outputDirectory
1302
+ };
1303
+ }
1304
+ else {
1305
+ logger.warn('No previous execution context found. Starting new execution...');
1306
+ }
1307
+ }
1308
+ else {
1309
+ // Parallel execution: checkpoint is managed by DynamicTaskPool
1310
+ // Just log that we're continuing - the actual checkpoint loading happens in DynamicTaskPool
1311
+ logger.info('Continuing previous parallel execution...');
1312
+ }
1313
+ }
1314
+ else {
1315
+ // Reset published versions tracking for new tree execution
1316
+ publishedVersions = [];
1317
+ executionContext = null;
1318
+ }
1319
+ // Check if we're in built-in command mode (tree command with second argument)
1320
+ const builtInCommand = runConfig.tree?.builtInCommand;
1321
+ const supportedBuiltInCommands = ['commit', 'release', 'publish', 'link', 'unlink', 'development', 'branches', 'run', 'checkout', 'updates', 'precommit'];
1322
+ if (builtInCommand && !supportedBuiltInCommands.includes(builtInCommand)) {
1323
+ throw new Error(`Unsupported built-in command: ${builtInCommand}. Supported commands: ${supportedBuiltInCommands.join(', ')}`);
1324
+ }
1325
+ // Handle run subcommand - convert space-separated scripts to npm run commands
1326
+ if (builtInCommand === 'run') {
1327
+ const packageArgument = runConfig.tree?.packageArgument;
1328
+ if (!packageArgument) {
1329
+ throw new Error('run subcommand requires script names. Usage: kodrdriv tree run "clean build test"');
1330
+ }
1331
+ // Split the package argument by spaces to get individual script names
1332
+ const scripts = packageArgument.trim().split(/\s+/).filter(script => script.length > 0);
1333
+ if (scripts.length === 0) {
1334
+ throw new Error('run subcommand requires at least one script name. Usage: kodrdriv tree run "clean build test"');
1335
+ }
1336
+ // Convert to npm run commands joined with &&
1337
+ const npmCommands = scripts.map(script => `npm run ${script}`).join(' && ');
1338
+ // Set this as the custom command to run
1339
+ runConfig.tree = {
1340
+ ...runConfig.tree,
1341
+ cmd: npmCommands
1342
+ };
1343
+ // Clear the built-in command since we're now using custom command mode
1344
+ runConfig.tree.builtInCommand = undefined;
1345
+ logger.info(`Converting run subcommand to: ${npmCommands}`);
1346
+ // Store scripts for later validation
1347
+ runConfig.__scriptsToValidate = scripts;
1348
+ }
1349
+ // Determine the target directories - either specified or current working directory
1350
+ const directories = runConfig.tree?.directories || [process.cwd()];
1351
+ // Handle link status subcommand
1352
+ if (builtInCommand === 'link' && runConfig.tree?.packageArgument === 'status') {
1353
+ // For tree link status, we want to show status across all packages
1354
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running link status across workspace...`);
1355
+ // Create a config that will be passed to the link command
1356
+ const linkConfig = {
1357
+ ...runConfig,
1358
+ tree: {
1359
+ ...runConfig.tree,
1360
+ directories: directories
1361
+ }
1362
+ };
1363
+ try {
1364
+ const result = await Link.execute(linkConfig, 'status');
1365
+ return result;
1366
+ }
1367
+ catch (error) {
1368
+ logger.error(`Link status failed: ${error.message}`);
1369
+ throw error;
1370
+ }
1371
+ }
1372
+ // Handle unlink status subcommand
1373
+ if (builtInCommand === 'unlink' && runConfig.tree?.packageArgument === 'status') {
1374
+ // For tree unlink status, we want to show status across all packages
1375
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running unlink status across workspace...`);
1376
+ // Create a config that will be passed to the unlink command
1377
+ const unlinkConfig = {
1378
+ ...runConfig,
1379
+ tree: {
1380
+ ...runConfig.tree,
1381
+ directories: directories
1382
+ }
1383
+ };
1384
+ try {
1385
+ const result = await Unlink.execute(unlinkConfig, 'status');
1386
+ return result;
1387
+ }
1388
+ catch (error) {
1389
+ logger.error(`Unlink status failed: ${error.message}`);
1390
+ throw error;
1391
+ }
1392
+ }
1393
+ if (directories.length === 1) {
1394
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspace at: ${directories[0]}`);
1395
+ }
1396
+ else {
1397
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspaces at: ${directories.join(', ')}`);
1398
+ }
1399
+ try {
1400
+ // Get exclusion patterns from config, fallback to empty array
1401
+ const excludedPatterns = runConfig.tree?.exclude || [];
1402
+ if (excludedPatterns.length > 0) {
1403
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Using exclusion patterns: ${excludedPatterns.join(', ')}`);
1404
+ }
1405
+ // Scan for package.json files across all directories
1406
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning for package.json files...`);
1407
+ let allPackageJsonPaths = [];
1408
+ for (const targetDirectory of directories) {
1409
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning directory: ${targetDirectory}`);
1410
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1411
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1412
+ }
1413
+ const packageJsonPaths = allPackageJsonPaths;
1414
+ if (packageJsonPaths.length === 0) {
1415
+ const directoriesStr = directories.join(', ');
1416
+ const message = `No package.json files found in subdirectories of: ${directoriesStr}`;
1417
+ logger.warn(message);
1418
+ return message;
1419
+ }
1420
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Found ${packageJsonPaths.length} package.json files`);
1421
+ // Build dependency graph
1422
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Building dependency graph...`);
1423
+ const dependencyGraph = await buildDependencyGraph(packageJsonPaths);
1424
+ // Perform topological sort to determine build order
1425
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Determining build order...`);
1426
+ let buildOrder = topologicalSort(dependencyGraph);
1427
+ // Handle start-from functionality if specified
1428
+ const startFrom = runConfig.tree?.startFrom;
1429
+ if (startFrom) {
1430
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for start package: ${startFrom}`);
1431
+ // Resolve the actual package name (can be package name or directory name)
1432
+ let startPackageName = null;
1433
+ for (const [pkgName, pkgInfo] of dependencyGraph.packages) {
1434
+ const dirName = path.basename(pkgInfo.path);
1435
+ if (dirName === startFrom || pkgName === startFrom) {
1436
+ startPackageName = pkgName;
1437
+ break;
1438
+ }
1439
+ }
1440
+ if (!startPackageName) {
1441
+ // Check if the package exists but was excluded across all directories
1442
+ let allPackageJsonPathsForCheck = [];
1443
+ for (const targetDirectory of directories) {
1444
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, []); // No exclusions
1445
+ allPackageJsonPathsForCheck = allPackageJsonPathsForCheck.concat(packageJsonPaths);
1446
+ }
1447
+ let wasExcluded = false;
1448
+ for (const packageJsonPath of allPackageJsonPathsForCheck) {
1449
+ try {
1450
+ const packageInfo = await parsePackageJson(packageJsonPath);
1451
+ const dirName = path.basename(packageInfo.path);
1452
+ if (dirName === startFrom || packageInfo.name === startFrom) {
1453
+ // Check if this package was excluded
1454
+ if (shouldExclude(packageJsonPath, excludedPatterns)) {
1455
+ wasExcluded = true;
1456
+ break;
1457
+ }
1458
+ }
1459
+ }
1460
+ catch {
1461
+ // Skip invalid package.json files
1462
+ continue;
1463
+ }
1464
+ }
1465
+ if (wasExcluded) {
1466
+ const excludedPatternsStr = excludedPatterns.join(', ');
1467
+ throw new Error(`Package directory '${startFrom}' was excluded by exclusion patterns: ${excludedPatternsStr}. Remove the exclusion pattern or choose a different starting package.`);
1468
+ }
1469
+ else {
1470
+ const availablePackages = buildOrder.map(name => {
1471
+ const packageInfo = dependencyGraph.packages.get(name);
1472
+ return `${path.basename(packageInfo.path)} (${name})`;
1473
+ }).join(', ');
1474
+ throw new Error(`Package directory '${startFrom}' not found. Available packages: ${availablePackages}`);
1475
+ }
1476
+ }
1477
+ // Find the start package in the build order and start execution from there
1478
+ const startIndex = buildOrder.findIndex(pkgName => pkgName === startPackageName);
1479
+ if (startIndex === -1) {
1480
+ throw new Error(`Package '${startFrom}' not found in build order. This should not happen.`);
1481
+ }
1482
+ // Filter build order to start from the specified package
1483
+ const originalLength = buildOrder.length;
1484
+ buildOrder = buildOrder.slice(startIndex);
1485
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Starting execution from package '${startFrom}' (${buildOrder.length} of ${originalLength} packages remaining).`);
1486
+ }
1487
+ // Handle stop-at functionality if specified
1488
+ const stopAt = runConfig.tree?.stopAt;
1489
+ if (stopAt) {
1490
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for stop package: ${stopAt}`);
1491
+ // Find the package that matches the stopAt directory name
1492
+ const stopIndex = buildOrder.findIndex(packageName => {
1493
+ const packageInfo = dependencyGraph.packages.get(packageName);
1494
+ const dirName = path.basename(packageInfo.path);
1495
+ return dirName === stopAt || packageName === stopAt;
1496
+ });
1497
+ if (stopIndex === -1) {
1498
+ // Check if the package exists but was excluded across all directories
1499
+ let allPackageJsonPathsForCheck = [];
1500
+ for (const targetDirectory of directories) {
1501
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, []); // No exclusions
1502
+ allPackageJsonPathsForCheck = allPackageJsonPathsForCheck.concat(packageJsonPaths);
1503
+ }
1504
+ let wasExcluded = false;
1505
+ for (const packageJsonPath of allPackageJsonPathsForCheck) {
1506
+ try {
1507
+ const packageInfo = await parsePackageJson(packageJsonPath);
1508
+ const dirName = path.basename(packageInfo.path);
1509
+ if (dirName === stopAt || packageInfo.name === stopAt) {
1510
+ // Check if this package was excluded
1511
+ if (shouldExclude(packageJsonPath, excludedPatterns)) {
1512
+ wasExcluded = true;
1513
+ break;
1514
+ }
1515
+ }
1516
+ }
1517
+ catch {
1518
+ // Skip invalid package.json files
1519
+ continue;
1520
+ }
1521
+ }
1522
+ if (wasExcluded) {
1523
+ const excludedPatternsStr = excludedPatterns.join(', ');
1524
+ throw new Error(`Package directory '${stopAt}' was excluded by exclusion patterns: ${excludedPatternsStr}. Remove the exclusion pattern or choose a different stop package.`);
1525
+ }
1526
+ else {
1527
+ const availablePackages = buildOrder.map(name => {
1528
+ const packageInfo = dependencyGraph.packages.get(name);
1529
+ return `${path.basename(packageInfo.path)} (${name})`;
1530
+ }).join(', ');
1531
+ throw new Error(`Package directory '${stopAt}' not found. Available packages: ${availablePackages}`);
1532
+ }
1533
+ }
1534
+ // Truncate the build order before the stop package (the stop package is not executed)
1535
+ const originalLength = buildOrder.length;
1536
+ buildOrder = buildOrder.slice(0, stopIndex);
1537
+ const stoppedCount = originalLength - stopIndex;
1538
+ if (stoppedCount > 0) {
1539
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Stopping before '${stopAt}' - excluding ${stoppedCount} package${stoppedCount === 1 ? '' : 's'}`);
1540
+ }
1541
+ }
1542
+ // Helper function to determine version scope indicator
1543
+ const getVersionScopeIndicator = (versionRange) => {
1544
+ // Remove whitespace and check the pattern
1545
+ const cleanRange = versionRange.trim();
1546
+ // Preserve the original prefix (^, ~, >=, etc.)
1547
+ const prefixMatch = cleanRange.match(/^([^0-9]*)/);
1548
+ const prefix = prefixMatch ? prefixMatch[1] : '';
1549
+ // Extract the version part after the prefix
1550
+ const versionPart = cleanRange.substring(prefix.length);
1551
+ // Count the number of dots to determine scope
1552
+ const dotCount = (versionPart.match(/\./g) || []).length;
1553
+ if (dotCount >= 2) {
1554
+ // Has patch version (e.g., "^4.4.32" -> "^P")
1555
+ return prefix + 'P';
1556
+ }
1557
+ else if (dotCount === 1) {
1558
+ // Has minor version only (e.g., "^4.4" -> "^m")
1559
+ return prefix + 'm';
1560
+ }
1561
+ else if (dotCount === 0 && versionPart.match(/^\d+$/)) {
1562
+ // Has major version only (e.g., "^4" -> "^M")
1563
+ return prefix + 'M';
1564
+ }
1565
+ // For complex ranges or non-standard formats, return as-is
1566
+ return cleanRange;
1567
+ };
1568
+ // Helper function to find packages that consume a given package
1569
+ const findConsumingPackagesForBranches = async (targetPackageName, allPackages, storage) => {
1570
+ const consumers = [];
1571
+ // Extract scope from target package name (e.g., "@fjell/eslint-config" -> "@fjell/")
1572
+ const targetScope = targetPackageName.includes('/') ? targetPackageName.split('/')[0] + '/' : null;
1573
+ for (const [packageName, packageInfo] of allPackages) {
1574
+ if (packageName === targetPackageName)
1575
+ continue;
1576
+ try {
1577
+ const packageJsonPath = path.join(packageInfo.path, 'package.json');
1578
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
1579
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
1580
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
1581
+ // Check if this package depends on the target package and get the version range
1582
+ const dependencyTypes = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies'];
1583
+ let versionRange = null;
1584
+ for (const depType of dependencyTypes) {
1585
+ if (packageJson[depType] && packageJson[depType][targetPackageName]) {
1586
+ versionRange = packageJson[depType][targetPackageName];
1587
+ break;
1588
+ }
1589
+ }
1590
+ if (versionRange) {
1591
+ // Apply scope substitution for consumers in the same scope
1592
+ let consumerDisplayName = packageName;
1593
+ if (targetScope && packageName.startsWith(targetScope)) {
1594
+ // Replace scope with "./" (e.g., "@fjell/core" -> "./core")
1595
+ consumerDisplayName = './' + packageName.substring(targetScope.length);
1596
+ }
1597
+ // Add version scope indicator
1598
+ const scopeIndicator = getVersionScopeIndicator(versionRange);
1599
+ consumerDisplayName += ` (${scopeIndicator})`;
1600
+ consumers.push(consumerDisplayName);
1601
+ }
1602
+ }
1603
+ catch {
1604
+ // Skip packages we can't parse
1605
+ continue;
1606
+ }
1607
+ }
1608
+ return consumers.sort();
1609
+ };
1610
+ // Handle special "branches" command that displays table
1611
+ if (builtInCommand === 'branches') {
1612
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Branch Status Summary:`);
1613
+ logger.info('');
1614
+ // Calculate column widths for nice formatting
1615
+ let maxNameLength = 'Package'.length;
1616
+ let maxBranchLength = 'Branch'.length;
1617
+ let maxVersionLength = 'Version'.length;
1618
+ let maxStatusLength = 'Status'.length;
1619
+ let maxLinkLength = 'Linked'.length;
1620
+ let maxConsumersLength = 'Consumers'.length;
1621
+ const branchInfos = [];
1622
+ // Create storage instance for consumer lookup
1623
+ const storage = createStorage();
1624
+ // Get globally linked packages once at the beginning
1625
+ const globallyLinkedPackages = await getGloballyLinkedPackages();
1626
+ // ANSI escape codes for progress display
1627
+ const ANSI = {
1628
+ CURSOR_UP: '\x1b[1A',
1629
+ CURSOR_TO_START: '\x1b[0G',
1630
+ CLEAR_LINE: '\x1b[2K',
1631
+ GREEN: '\x1b[32m',
1632
+ BLUE: '\x1b[34m',
1633
+ YELLOW: '\x1b[33m',
1634
+ RESET: '\x1b[0m',
1635
+ BOLD: '\x1b[1m'
1636
+ };
1637
+ // Check if terminal supports ANSI (and we're not in MCP server mode)
1638
+ // In MCP mode, all stdout must be valid JSON-RPC, so disable progress display
1639
+ const supportsAnsi = process.stdout.isTTY &&
1640
+ process.env.TERM !== 'dumb' &&
1641
+ !process.env.NO_COLOR &&
1642
+ process.env.KODRDRIV_MCP_SERVER !== 'true';
1643
+ const totalPackages = buildOrder.length;
1644
+ const concurrency = 5; // Process up to 5 packages at a time
1645
+ let completedCount = 0;
1646
+ let isFirstProgress = true;
1647
+ // Function to update progress display
1648
+ const updateProgress = (currentPackage, completed, total) => {
1649
+ if (!supportsAnsi)
1650
+ return;
1651
+ if (!isFirstProgress) {
1652
+ // Move cursor up and clear the line
1653
+ process.stdout.write(ANSI.CURSOR_UP + ANSI.CURSOR_TO_START + ANSI.CLEAR_LINE);
1654
+ }
1655
+ const percentage = Math.round((completed / total) * 100);
1656
+ const progressBar = '█'.repeat(Math.floor(percentage / 5)) + '░'.repeat(20 - Math.floor(percentage / 5));
1657
+ const progress = `${ANSI.BLUE}${ANSI.BOLD}Analyzing packages... ${ANSI.GREEN}[${progressBar}] ${percentage}%${ANSI.RESET} ${ANSI.YELLOW}(${completed}/${total})${ANSI.RESET}`;
1658
+ const current = currentPackage ? ` - Currently: ${currentPackage}` : '';
1659
+ process.stdout.write(progress + current + '\n');
1660
+ isFirstProgress = false;
1661
+ };
1662
+ // Function to process a single package
1663
+ const processPackage = async (packageName) => {
1664
+ const packageInfo = dependencyGraph.packages.get(packageName);
1665
+ try {
1666
+ // Process git status and consumers in parallel
1667
+ const [gitStatus, consumers] = await Promise.all([
1668
+ getGitStatusSummary(packageInfo.path),
1669
+ findConsumingPackagesForBranches(packageName, dependencyGraph.packages, storage)
1670
+ ]);
1671
+ // Check if this package is globally linked (available to be linked to)
1672
+ const isGloballyLinked = globallyLinkedPackages.has(packageName);
1673
+ const linkedText = isGloballyLinked ? '✓' : '';
1674
+ // Add asterisk to consumers that are actively linking to globally linked packages
1675
+ // and check for link problems to highlight in red
1676
+ const consumersWithLinkStatus = await Promise.all(consumers.map(async (consumer) => {
1677
+ // Extract the base consumer name from the format "package-name (^P)" or "./scoped-name (^m)"
1678
+ const baseConsumerName = consumer.replace(/ \([^)]+\)$/, ''); // Remove version scope indicator
1679
+ // Get the original package name from display name (remove scope substitution)
1680
+ const originalConsumerName = baseConsumerName.startsWith('./')
1681
+ ? baseConsumerName.replace('./', packageName.split('/')[0] + '/')
1682
+ : baseConsumerName;
1683
+ // Find the consumer package info to get its path
1684
+ const consumerPackageInfo = Array.from(dependencyGraph.packages.values())
1685
+ .find(pkg => pkg.name === originalConsumerName);
1686
+ if (consumerPackageInfo) {
1687
+ const [consumerLinkedDeps, linkProblems] = await Promise.all([
1688
+ getLinkedDependencies(consumerPackageInfo.path),
1689
+ getLinkCompatibilityProblems(consumerPackageInfo.path, dependencyGraph.packages)
1690
+ ]);
1691
+ let consumerDisplay = consumer;
1692
+ // Add asterisk if this consumer is actively linking to this package
1693
+ if (consumerLinkedDeps.has(packageName)) {
1694
+ consumerDisplay += '*';
1695
+ }
1696
+ // Check if this consumer has link problems with the current package
1697
+ if (linkProblems.has(packageName)) {
1698
+ // Highlight in red using ANSI escape codes (only if terminal supports it)
1699
+ if (supportsAnsi) {
1700
+ consumerDisplay = `\x1b[31m${consumerDisplay}\x1b[0m`;
1701
+ }
1702
+ else {
1703
+ // Fallback for terminals that don't support ANSI colors
1704
+ consumerDisplay += ' [LINK PROBLEM]';
1705
+ }
1706
+ }
1707
+ return consumerDisplay;
1708
+ }
1709
+ return consumer;
1710
+ }));
1711
+ return {
1712
+ name: packageName,
1713
+ branch: gitStatus.branch,
1714
+ version: packageInfo.version,
1715
+ status: gitStatus.status,
1716
+ linked: linkedText,
1717
+ consumers: consumersWithLinkStatus
1718
+ };
1719
+ }
1720
+ catch (error) {
1721
+ logger.warn(`Failed to get git status for ${packageName}: ${error.message}`);
1722
+ return {
1723
+ name: packageName,
1724
+ branch: 'error',
1725
+ version: packageInfo.version,
1726
+ status: 'error',
1727
+ linked: '✗',
1728
+ consumers: ['error']
1729
+ };
1730
+ }
1731
+ };
1732
+ // Process packages in batches with progress updates
1733
+ updateProgress('Starting...', 0, totalPackages);
1734
+ for (let i = 0; i < buildOrder.length; i += concurrency) {
1735
+ const batch = buildOrder.slice(i, i + concurrency);
1736
+ // Update progress to show current batch
1737
+ const currentBatchStr = batch.length === 1 ? batch[0] : `${batch[0]} + ${batch.length - 1} others`;
1738
+ updateProgress(currentBatchStr, completedCount, totalPackages);
1739
+ // Process batch in parallel
1740
+ const batchResults = await Promise.all(batch.map(packageName => processPackage(packageName)));
1741
+ // Add results and update column widths
1742
+ for (const result of batchResults) {
1743
+ branchInfos.push(result);
1744
+ maxNameLength = Math.max(maxNameLength, result.name.length);
1745
+ maxBranchLength = Math.max(maxBranchLength, result.branch.length);
1746
+ maxVersionLength = Math.max(maxVersionLength, result.version.length);
1747
+ maxStatusLength = Math.max(maxStatusLength, result.status.length);
1748
+ maxLinkLength = Math.max(maxLinkLength, result.linked.length);
1749
+ // For consumers, calculate the width based on the longest consumer name
1750
+ const maxConsumerLength = result.consumers.length > 0
1751
+ ? Math.max(...result.consumers.map(c => c.length))
1752
+ : 0;
1753
+ maxConsumersLength = Math.max(maxConsumersLength, maxConsumerLength);
1754
+ }
1755
+ completedCount += batch.length;
1756
+ updateProgress('', completedCount, totalPackages);
1757
+ }
1758
+ // Clear progress line and add spacing
1759
+ if (supportsAnsi && !isFirstProgress) {
1760
+ process.stdout.write(ANSI.CURSOR_UP + ANSI.CURSOR_TO_START + ANSI.CLEAR_LINE);
1761
+ }
1762
+ logger.info(`${ANSI.GREEN}✅ Analysis complete!${ANSI.RESET} Processed ${totalPackages} packages in batches of ${concurrency}.`);
1763
+ logger.info('');
1764
+ // Print header (new order: Package | Branch | Version | Status | Linked | Consumers)
1765
+ const nameHeader = 'Package'.padEnd(maxNameLength);
1766
+ const branchHeader = 'Branch'.padEnd(maxBranchLength);
1767
+ const versionHeader = 'Version'.padEnd(maxVersionLength);
1768
+ const statusHeader = 'Status'.padEnd(maxStatusLength);
1769
+ const linkHeader = 'Linked'.padEnd(maxLinkLength);
1770
+ const consumersHeader = 'Consumers';
1771
+ logger.info(`${nameHeader} | ${branchHeader} | ${versionHeader} | ${statusHeader} | ${linkHeader} | ${consumersHeader}`);
1772
+ logger.info(`${'-'.repeat(maxNameLength)} | ${'-'.repeat(maxBranchLength)} | ${'-'.repeat(maxVersionLength)} | ${'-'.repeat(maxStatusLength)} | ${'-'.repeat(maxLinkLength)} | ${'-'.repeat(9)}`);
1773
+ // Print data rows with multi-line consumers
1774
+ for (const info of branchInfos) {
1775
+ const nameCol = info.name.padEnd(maxNameLength);
1776
+ const branchCol = info.branch.padEnd(maxBranchLength);
1777
+ const versionCol = info.version.padEnd(maxVersionLength);
1778
+ const statusCol = info.status.padEnd(maxStatusLength);
1779
+ const linkCol = info.linked.padEnd(maxLinkLength);
1780
+ if (info.consumers.length === 0) {
1781
+ // No consumers - single line
1782
+ logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | `);
1783
+ }
1784
+ else if (info.consumers.length === 1) {
1785
+ // Single consumer - single line
1786
+ logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | ${info.consumers[0]}`);
1787
+ }
1788
+ else {
1789
+ // Multiple consumers - first consumer on same line, rest on new lines with continuous column separators
1790
+ logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | ${info.consumers[0]}`);
1791
+ // Additional consumers on separate lines with proper column separators
1792
+ const emptyNameCol = ' '.repeat(maxNameLength);
1793
+ const emptyBranchCol = ' '.repeat(maxBranchLength);
1794
+ const emptyVersionCol = ' '.repeat(maxVersionLength);
1795
+ const emptyStatusCol = ' '.repeat(maxStatusLength);
1796
+ const emptyLinkCol = ' '.repeat(maxLinkLength);
1797
+ for (let i = 1; i < info.consumers.length; i++) {
1798
+ logger.info(`${emptyNameCol} | ${emptyBranchCol} | ${emptyVersionCol} | ${emptyStatusCol} | ${emptyLinkCol} | ${info.consumers[i]}`);
1799
+ }
1800
+ }
1801
+ }
1802
+ logger.info('');
1803
+ // Add legend explaining the symbols and colors
1804
+ logger.info('Legend:');
1805
+ logger.info(' * = Consumer is actively linking to this package');
1806
+ logger.info(' (^P) = Patch-level dependency (e.g., "^4.4.32")');
1807
+ logger.info(' (^m) = Minor-level dependency (e.g., "^4.4")');
1808
+ logger.info(' (^M) = Major-level dependency (e.g., "^4")');
1809
+ logger.info(' (~P), (>=M), etc. = Other version prefixes preserved');
1810
+ if (supportsAnsi) {
1811
+ logger.info(' \x1b[31mRed text\x1b[0m = Consumer has link problems (version mismatches) with this package');
1812
+ }
1813
+ else {
1814
+ logger.info(' [LINK PROBLEM] = Consumer has link problems (version mismatches) with this package');
1815
+ }
1816
+ logger.info('');
1817
+ return `Branch status summary for ${branchInfos.length} packages completed.`;
1818
+ }
1819
+ // Handle special "checkout" command that switches all packages to specified branch
1820
+ if (builtInCommand === 'checkout') {
1821
+ const targetBranch = runConfig.tree?.packageArgument;
1822
+ if (!targetBranch) {
1823
+ throw new Error('checkout subcommand requires a branch name. Usage: kodrdriv tree checkout <branch-name>');
1824
+ }
1825
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Workspace Checkout to Branch: ${targetBranch}`);
1826
+ logger.info('');
1827
+ // Phase 1: Safety check - scan all packages for uncommitted changes
1828
+ logger.info('🔍 Phase 1: Checking for uncommitted changes across workspace...');
1829
+ const packagesWithChanges = [];
1830
+ for (const packageName of buildOrder) {
1831
+ const packageInfo = dependencyGraph.packages.get(packageName);
1832
+ try {
1833
+ const gitStatus = await getGitStatusSummary(packageInfo.path);
1834
+ const hasProblems = gitStatus.hasUncommittedChanges || gitStatus.hasUnstagedFiles;
1835
+ packagesWithChanges.push({
1836
+ name: packageName,
1837
+ path: packageInfo.path,
1838
+ status: gitStatus.status,
1839
+ hasUncommittedChanges: gitStatus.hasUncommittedChanges,
1840
+ hasUnstagedFiles: gitStatus.hasUnstagedFiles
1841
+ });
1842
+ if (hasProblems) {
1843
+ logger.warn(`⚠️ ${packageName}: ${gitStatus.status}`);
1844
+ }
1845
+ else {
1846
+ logger.verbose(`✅ ${packageName}: clean`);
1847
+ }
1848
+ }
1849
+ catch (error) {
1850
+ logger.warn(`❌ ${packageName}: error checking status - ${error.message}`);
1851
+ packagesWithChanges.push({
1852
+ name: packageName,
1853
+ path: packageInfo.path,
1854
+ status: 'error',
1855
+ hasUncommittedChanges: false,
1856
+ hasUnstagedFiles: false
1857
+ });
1858
+ }
1859
+ }
1860
+ // Check if any packages have uncommitted changes
1861
+ const problemPackages = packagesWithChanges.filter(pkg => pkg.hasUncommittedChanges || pkg.hasUnstagedFiles || pkg.status === 'error');
1862
+ if (problemPackages.length > 0) {
1863
+ logger.error(`❌ Cannot proceed with checkout: ${problemPackages.length} packages have uncommitted changes or errors:`);
1864
+ logger.error('');
1865
+ for (const pkg of problemPackages) {
1866
+ logger.error(` 📦 ${pkg.name} (${pkg.path}):`);
1867
+ logger.error(` Status: ${pkg.status}`);
1868
+ }
1869
+ logger.error('');
1870
+ logger.error('🔧 To resolve this issue:');
1871
+ logger.error(' 1. Commit or stash changes in the packages listed above');
1872
+ logger.error(' 2. Or use "kodrdriv tree commit" to commit changes across all packages');
1873
+ logger.error(' 3. Then re-run the checkout command');
1874
+ logger.error('');
1875
+ throw new Error(`Workspace checkout blocked: ${problemPackages.length} packages have uncommitted changes`);
1876
+ }
1877
+ logger.info(`✅ Phase 1 complete: All ${packagesWithChanges.length} packages are clean`);
1878
+ logger.info('');
1879
+ // Phase 2: Perform the checkout
1880
+ logger.info(`🔄 Phase 2: Checking out all packages to branch '${targetBranch}'...`);
1881
+ let successCount = 0;
1882
+ const failedPackages = [];
1883
+ for (let i = 0; i < buildOrder.length; i++) {
1884
+ const packageName = buildOrder[i];
1885
+ const packageInfo = dependencyGraph.packages.get(packageName);
1886
+ if (isDryRun) {
1887
+ logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: Would checkout ${targetBranch}`);
1888
+ successCount++;
1889
+ }
1890
+ else {
1891
+ try {
1892
+ const originalCwd = process.cwd();
1893
+ process.chdir(packageInfo.path);
1894
+ try {
1895
+ // Check if target branch exists locally
1896
+ let branchExists = false;
1897
+ try {
1898
+ await runSecure('git', ['rev-parse', '--verify', targetBranch]);
1899
+ branchExists = true;
1900
+ }
1901
+ catch {
1902
+ // Branch doesn't exist locally
1903
+ branchExists = false;
1904
+ }
1905
+ if (branchExists) {
1906
+ await runSecure('git', ['checkout', targetBranch]);
1907
+ logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Checked out ${targetBranch}`);
1908
+ }
1909
+ else {
1910
+ // Try to check out branch from remote
1911
+ try {
1912
+ await runSecure('git', ['checkout', '-b', targetBranch, `origin/${targetBranch}`]);
1913
+ logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Checked out ${targetBranch} from origin`);
1914
+ }
1915
+ catch {
1916
+ // If that fails, create a new branch
1917
+ await runSecure('git', ['checkout', '-b', targetBranch]);
1918
+ logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Created new branch ${targetBranch}`);
1919
+ }
1920
+ }
1921
+ successCount++;
1922
+ }
1923
+ finally {
1924
+ process.chdir(originalCwd);
1925
+ }
1926
+ }
1927
+ catch (error) {
1928
+ logger.error(`[${i + 1}/${buildOrder.length}] ${packageName}: ❌ Failed - ${error.message}`);
1929
+ failedPackages.push({ name: packageName, error: error.message });
1930
+ }
1931
+ }
1932
+ }
1933
+ // Report results
1934
+ if (failedPackages.length > 0) {
1935
+ logger.error(`❌ Checkout completed with errors: ${successCount}/${buildOrder.length} packages successful`);
1936
+ logger.error('');
1937
+ logger.error('Failed packages:');
1938
+ for (const failed of failedPackages) {
1939
+ logger.error(` - ${failed.name}: ${failed.error}`);
1940
+ }
1941
+ throw new Error(`Checkout failed for ${failedPackages.length} packages`);
1942
+ }
1943
+ else {
1944
+ logger.info(`✅ Checkout complete: All ${buildOrder.length} packages successfully checked out to '${targetBranch}'`);
1945
+ return `Workspace checkout complete: ${successCount} packages checked out to '${targetBranch}'`;
1946
+ }
1947
+ }
1948
+ // Display results
1949
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Build order determined:`);
1950
+ let returnOutput = '';
1951
+ if (runConfig.verbose || runConfig.debug) {
1952
+ // Verbose mode: Skip simple format, show detailed format before command execution
1953
+ logger.info(''); // Add spacing
1954
+ const rangeInfo = [];
1955
+ if (startFrom)
1956
+ rangeInfo.push(`starting from ${startFrom}`);
1957
+ if (stopAt)
1958
+ rangeInfo.push(`stopping before ${stopAt}`);
1959
+ const rangeStr = rangeInfo.length > 0 ? ` (${rangeInfo.join(', ')})` : '';
1960
+ logger.info(`Detailed Build Order for ${buildOrder.length} packages${rangeStr}:`);
1961
+ logger.info('==========================================');
1962
+ buildOrder.forEach((packageName, index) => {
1963
+ const packageInfo = dependencyGraph.packages.get(packageName);
1964
+ const localDeps = Array.from(packageInfo.localDependencies);
1965
+ logger.info(`${index + 1}. ${packageName} (${packageInfo.version})`);
1966
+ logger.info(` Path: ${packageInfo.path}`);
1967
+ if (localDeps.length > 0) {
1968
+ logger.info(` Local Dependencies: ${localDeps.join(', ')}`);
1969
+ }
1970
+ else {
1971
+ logger.info(` Local Dependencies: none`);
1972
+ }
1973
+ logger.info(''); // Add spacing between packages
1974
+ });
1975
+ // Simple return output for verbose mode (no need to repeat detailed info)
1976
+ returnOutput = `\nBuild order: ${buildOrder.join(' → ')}\n`;
1977
+ }
1978
+ else {
1979
+ // Non-verbose mode: Show simple build order
1980
+ buildOrder.forEach((packageName, index) => {
1981
+ const packageInfo = dependencyGraph.packages.get(packageName);
1982
+ const localDeps = Array.from(packageInfo.localDependencies);
1983
+ // Log each step
1984
+ if (localDeps.length > 0) {
1985
+ logger.info(`${index + 1}. ${packageName} (depends on: ${localDeps.join(', ')})`);
1986
+ }
1987
+ else {
1988
+ logger.info(`${index + 1}. ${packageName} (no local dependencies)`);
1989
+ }
1990
+ });
1991
+ // Simple return output for non-verbose mode
1992
+ returnOutput = `\nBuild order: ${buildOrder.join(' → ')}\n`;
1993
+ }
1994
+ // Execute command if provided (custom command or built-in command)
1995
+ const cmd = runConfig.tree?.cmd;
1996
+ // Determine command to execute
1997
+ let commandToRun;
1998
+ let isBuiltInCommand = false;
1999
+ if (builtInCommand) {
2000
+ // Built-in command mode: shell out to kodrdriv subprocess
2001
+ // Build command with propagated global options
2002
+ const globalOptions = [];
2003
+ // Propagate global flags that should be inherited by subprocesses
2004
+ if (runConfig.debug)
2005
+ globalOptions.push('--debug');
2006
+ if (runConfig.verbose)
2007
+ globalOptions.push('--verbose');
2008
+ if (runConfig.dryRun)
2009
+ globalOptions.push('--dry-run');
2010
+ if (runConfig.overrides)
2011
+ globalOptions.push('--overrides');
2012
+ // Propagate global options with values
2013
+ if (runConfig.model)
2014
+ globalOptions.push(`--model "${runConfig.model}"`);
2015
+ if (runConfig.configDirectory)
2016
+ globalOptions.push(`--config-dir "${runConfig.configDirectory}"`);
2017
+ if (runConfig.outputDirectory)
2018
+ globalOptions.push(`--output-dir "${runConfig.outputDirectory}"`);
2019
+ if (runConfig.preferencesDirectory)
2020
+ globalOptions.push(`--preferences-dir "${runConfig.preferencesDirectory}"`);
2021
+ // Build the command with global options
2022
+ const optionsString = globalOptions.length > 0 ? ` ${globalOptions.join(' ')}` : '';
2023
+ // Add package argument for link/unlink/updates commands
2024
+ const packageArg = runConfig.tree?.packageArgument;
2025
+ const packageArgString = (packageArg && (builtInCommand === 'link' || builtInCommand === 'unlink' || builtInCommand === 'updates'))
2026
+ ? ` "${packageArg}"`
2027
+ : '';
2028
+ // Add command-specific options
2029
+ let commandSpecificOptions = '';
2030
+ // Commit command options
2031
+ if (builtInCommand === 'commit') {
2032
+ if (runConfig.commit?.agentic) {
2033
+ commandSpecificOptions += ' --agentic';
2034
+ }
2035
+ if (runConfig.commit?.selfReflection) {
2036
+ commandSpecificOptions += ' --self-reflection';
2037
+ }
2038
+ if (runConfig.commit?.add) {
2039
+ commandSpecificOptions += ' --add';
2040
+ }
2041
+ if (runConfig.commit?.cached) {
2042
+ commandSpecificOptions += ' --cached';
2043
+ }
2044
+ if (runConfig.commit?.interactive) {
2045
+ commandSpecificOptions += ' --interactive';
2046
+ }
2047
+ if (runConfig.commit?.amend) {
2048
+ commandSpecificOptions += ' --amend';
2049
+ }
2050
+ if (runConfig.commit?.skipFileCheck) {
2051
+ commandSpecificOptions += ' --skip-file-check';
2052
+ }
2053
+ if (runConfig.commit?.maxAgenticIterations) {
2054
+ commandSpecificOptions += ` --max-agentic-iterations ${runConfig.commit.maxAgenticIterations}`;
2055
+ }
2056
+ if (runConfig.commit?.allowCommitSplitting) {
2057
+ commandSpecificOptions += ' --allow-commit-splitting';
2058
+ }
2059
+ if (runConfig.commit?.messageLimit) {
2060
+ commandSpecificOptions += ` --message-limit ${runConfig.commit.messageLimit}`;
2061
+ }
2062
+ if (runConfig.commit?.maxDiffBytes) {
2063
+ commandSpecificOptions += ` --max-diff-bytes ${runConfig.commit.maxDiffBytes}`;
2064
+ }
2065
+ if (runConfig.commit?.direction) {
2066
+ commandSpecificOptions += ` --direction "${runConfig.commit.direction}"`;
2067
+ }
2068
+ if (runConfig.commit?.context) {
2069
+ commandSpecificOptions += ` --context "${runConfig.commit.context}"`;
2070
+ }
2071
+ // Push option can be boolean or string (remote name)
2072
+ if (runConfig.commit?.push) {
2073
+ if (typeof runConfig.commit.push === 'string') {
2074
+ commandSpecificOptions += ` --push "${runConfig.commit.push}"`;
2075
+ }
2076
+ else {
2077
+ commandSpecificOptions += ' --push';
2078
+ }
2079
+ }
2080
+ // Model-specific options for commit
2081
+ if (runConfig.commit?.model) {
2082
+ commandSpecificOptions += ` --model "${runConfig.commit.model}"`;
2083
+ }
2084
+ if (runConfig.commit?.openaiReasoning) {
2085
+ commandSpecificOptions += ` --openai-reasoning ${runConfig.commit.openaiReasoning}`;
2086
+ }
2087
+ if (runConfig.commit?.openaiMaxOutputTokens) {
2088
+ commandSpecificOptions += ` --openai-max-output-tokens ${runConfig.commit.openaiMaxOutputTokens}`;
2089
+ }
2090
+ }
2091
+ // Release command options (only for direct 'release' command)
2092
+ if (builtInCommand === 'release') {
2093
+ if (runConfig.release?.agentic) {
2094
+ commandSpecificOptions += ' --agentic';
2095
+ }
2096
+ if (runConfig.release?.selfReflection) {
2097
+ commandSpecificOptions += ' --self-reflection';
2098
+ }
2099
+ if (runConfig.release?.maxAgenticIterations) {
2100
+ commandSpecificOptions += ` --max-agentic-iterations ${runConfig.release.maxAgenticIterations}`;
2101
+ }
2102
+ if (runConfig.release?.interactive) {
2103
+ commandSpecificOptions += ' --interactive';
2104
+ }
2105
+ if (runConfig.release?.from) {
2106
+ commandSpecificOptions += ` --from "${runConfig.release.from}"`;
2107
+ }
2108
+ if (runConfig.release?.to) {
2109
+ commandSpecificOptions += ` --to "${runConfig.release.to}"`;
2110
+ }
2111
+ if (runConfig.release?.focus) {
2112
+ commandSpecificOptions += ` --focus "${runConfig.release.focus}"`;
2113
+ }
2114
+ if (runConfig.release?.context) {
2115
+ commandSpecificOptions += ` --context "${runConfig.release.context}"`;
2116
+ }
2117
+ if (runConfig.release?.messageLimit) {
2118
+ commandSpecificOptions += ` --message-limit ${runConfig.release.messageLimit}`;
2119
+ }
2120
+ if (runConfig.release?.maxDiffBytes) {
2121
+ commandSpecificOptions += ` --max-diff-bytes ${runConfig.release.maxDiffBytes}`;
2122
+ }
2123
+ if (runConfig.release?.noMilestones) {
2124
+ commandSpecificOptions += ' --no-milestones';
2125
+ }
2126
+ if (runConfig.release?.fromMain) {
2127
+ commandSpecificOptions += ' --from-main';
2128
+ }
2129
+ // Model-specific options for release
2130
+ if (runConfig.release?.model) {
2131
+ commandSpecificOptions += ` --model "${runConfig.release.model}"`;
2132
+ }
2133
+ if (runConfig.release?.openaiReasoning) {
2134
+ commandSpecificOptions += ` --openai-reasoning ${runConfig.release.openaiReasoning}`;
2135
+ }
2136
+ if (runConfig.release?.openaiMaxOutputTokens) {
2137
+ commandSpecificOptions += ` --openai-max-output-tokens ${runConfig.release.openaiMaxOutputTokens}`;
2138
+ }
2139
+ }
2140
+ // Publish command options (only agentic flags - publish reads other release config from config file)
2141
+ if (builtInCommand === 'publish') {
2142
+ // Only pass the agentic-related flags that publish command accepts
2143
+ if (runConfig.release?.agentic) {
2144
+ commandSpecificOptions += ' --agentic';
2145
+ }
2146
+ if (runConfig.release?.selfReflection) {
2147
+ commandSpecificOptions += ' --self-reflection';
2148
+ }
2149
+ if (runConfig.release?.maxAgenticIterations) {
2150
+ commandSpecificOptions += ` --max-agentic-iterations ${runConfig.release.maxAgenticIterations}`;
2151
+ }
2152
+ // Publish has its own --from, --interactive, --from-main flags (not from release config)
2153
+ }
2154
+ // Unlink command options
2155
+ if (builtInCommand === 'unlink' && runConfig.tree?.cleanNodeModules) {
2156
+ commandSpecificOptions += ' --clean-node-modules';
2157
+ }
2158
+ // Link/Unlink externals
2159
+ if ((builtInCommand === 'link' || builtInCommand === 'unlink') && runConfig.tree?.externals && runConfig.tree.externals.length > 0) {
2160
+ commandSpecificOptions += ` --externals ${runConfig.tree.externals.join(' ')}`;
2161
+ }
2162
+ commandToRun = `kodrdriv ${builtInCommand}${optionsString}${packageArgString}${commandSpecificOptions}`;
2163
+ isBuiltInCommand = true;
2164
+ }
2165
+ else if (cmd) {
2166
+ // Custom command mode
2167
+ commandToRun = cmd;
2168
+ }
2169
+ if (commandToRun) {
2170
+ // Validate scripts for run command before execution
2171
+ const scriptsToValidate = runConfig.__scriptsToValidate;
2172
+ if (scriptsToValidate && scriptsToValidate.length > 0) {
2173
+ logger.info(`🔍 Validating scripts before execution: ${scriptsToValidate.join(', ')}`);
2174
+ const validation = await validateScripts(dependencyGraph.packages, scriptsToValidate);
2175
+ if (!validation.valid) {
2176
+ logger.error('');
2177
+ logger.error('❌ Script validation failed. Cannot proceed with execution.');
2178
+ logger.error('');
2179
+ logger.error('💡 To fix this:');
2180
+ logger.error(' 1. Add the missing scripts to the package.json files');
2181
+ logger.error(' 2. Or exclude packages that don\'t need these scripts using --exclude');
2182
+ logger.error(' 3. Or run individual packages that have the required scripts');
2183
+ logger.error('');
2184
+ throw new Error('Script validation failed. See details above.');
2185
+ }
2186
+ }
2187
+ // Validate command for parallel execution if parallel mode is enabled
2188
+ if (runConfig.tree?.parallel) {
2189
+ const { CommandValidator } = await import('./execution/CommandValidator.js');
2190
+ const validation = CommandValidator.validateForParallel(commandToRun, builtInCommand);
2191
+ CommandValidator.logValidation(validation);
2192
+ if (!validation.valid) {
2193
+ logger.error('');
2194
+ logger.error('Cannot proceed with parallel execution due to validation errors.');
2195
+ logger.error('Run without --parallel flag to execute sequentially.');
2196
+ throw new Error('Command validation failed for parallel execution');
2197
+ }
2198
+ // Apply recommended concurrency if not explicitly set
2199
+ if (!runConfig.tree.maxConcurrency) {
2200
+ const os = await import('os');
2201
+ const recommended = CommandValidator.getRecommendedConcurrency(builtInCommand, os.cpus().length, commandToRun);
2202
+ if (recommended !== os.cpus().length) {
2203
+ const reason = builtInCommand ? builtInCommand : `custom command "${commandToRun}"`;
2204
+ logger.info(`💡 Using recommended concurrency for ${reason}: ${recommended}`);
2205
+ runConfig.tree.maxConcurrency = recommended;
2206
+ }
2207
+ }
2208
+ }
2209
+ // Create set of all package names for inter-project dependency detection
2210
+ const allPackageNames = new Set(Array.from(dependencyGraph.packages.keys()));
2211
+ // Handle cleanup flag - remove checkpoint and start fresh
2212
+ if (runConfig.tree?.cleanup) {
2213
+ logger.info('TREE_CLEANUP: Cleaning up failed state | Action: Remove checkpoint | Purpose: Start fresh execution');
2214
+ await cleanupContext(runConfig.outputDirectory);
2215
+ executionContext = null;
2216
+ publishedVersions = [];
2217
+ logger.info('TREE_CLEANUP_COMPLETE: Checkpoint removed successfully | Status: Ready for fresh execution');
2218
+ }
2219
+ // Handle continue flag - resume from checkpoint
2220
+ if (runConfig.tree?.continue && !executionContext) {
2221
+ logger.info('TREE_RESUME: Attempting to resume from checkpoint | Action: Load execution context | Purpose: Continue from failure point');
2222
+ const loadedContext = await loadExecutionContext(runConfig.outputDirectory);
2223
+ if (!loadedContext) {
2224
+ const contextFilePath = getContextFilePath(runConfig.outputDirectory);
2225
+ logger.error('TREE_RESUME_FAILED: No checkpoint found to resume from | Expected: ' + contextFilePath + ' | Status: checkpoint-missing');
2226
+ logger.error('');
2227
+ logger.error('RECOVERY_OPTIONS: Available options to proceed:');
2228
+ logger.error(' Option 1: Run without --continue to start fresh execution');
2229
+ logger.error(' Option 2: Check if checkpoint file exists: ' + contextFilePath);
2230
+ logger.error('');
2231
+ throw new Error('No checkpoint found to resume from. Use --cleanup to start fresh or run without --continue.');
2232
+ }
2233
+ executionContext = loadedContext;
2234
+ publishedVersions = loadedContext.publishedVersions;
2235
+ logger.info(`TREE_RESUME_SUCCESS: Resumed from checkpoint | Completed: ${executionContext.completedPackages.length} packages | Remaining: ${buildOrder.length - executionContext.completedPackages.length} packages | Total: ${buildOrder.length}`);
2236
+ if (executionContext.lastSuccessfulPackage) {
2237
+ logger.info(`TREE_RESUME_LAST: Last successful package: ${executionContext.lastSuccessfulPackage}`);
2238
+ }
2239
+ if (executionContext.failedPackages && executionContext.failedPackages.length > 0) {
2240
+ logger.warn('TREE_RESUME_FAILURES: Previous failures detected | Count: ' + executionContext.failedPackages.length);
2241
+ executionContext.failedPackages.forEach((failure, idx) => {
2242
+ logger.warn(` ${idx + 1}. ${failure.name}: ${failure.error} (Phase: ${failure.phase})`);
2243
+ });
2244
+ logger.warn('');
2245
+ logger.warn('ACTION_REQUIRED: Ensure issues are fixed before continuing | Purpose: Avoid repeated failures');
2246
+ }
2247
+ }
2248
+ // Initialize execution context if not continuing
2249
+ if (!executionContext) {
2250
+ executionContext = {
2251
+ command: commandToRun,
2252
+ originalConfig: runConfig,
2253
+ publishedVersions: [],
2254
+ completedPackages: [],
2255
+ failedPackages: [],
2256
+ buildOrder: buildOrder,
2257
+ startTime: new Date(),
2258
+ lastUpdateTime: new Date()
2259
+ };
2260
+ // Save initial context for commands that support continuation
2261
+ if (isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
2262
+ await saveExecutionContext(executionContext, runConfig.outputDirectory);
2263
+ }
2264
+ }
2265
+ // Add spacing before command execution
2266
+ logger.info('');
2267
+ const executionDescription = isBuiltInCommand ? `built-in command "${builtInCommand}"` : `"${commandToRun}"`;
2268
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Executing ${executionDescription} in ${buildOrder.length} packages...`);
2269
+ // Add detailed multi-project execution context for debug mode
2270
+ if (runConfig.debug) {
2271
+ logger.debug('MULTI_PROJECT_PLAN: Execution plan initialized | Total Packages: %d | Command: %s | Built-in: %s | Dry Run: %s | Parallel: %s', buildOrder.length, commandToRun, isBuiltInCommand, isDryRun, runConfig.tree?.parallel || false);
2272
+ // Log package execution order with dependencies
2273
+ logger.debug('MULTI_PROJECT_ORDER: Package execution sequence:');
2274
+ buildOrder.forEach((pkgName, idx) => {
2275
+ const pkgInfo = dependencyGraph.packages.get(pkgName);
2276
+ if (pkgInfo) {
2277
+ const deps = Array.isArray(pkgInfo.dependencies) ? pkgInfo.dependencies : [];
2278
+ const depStr = deps.length > 0
2279
+ ? ` | Dependencies: [${deps.join(', ')}]`
2280
+ : ' | Dependencies: none';
2281
+ logger.debug(' %d. %s%s', idx + 1, pkgName, depStr);
2282
+ }
2283
+ });
2284
+ // Log dependency levels for parallel execution understanding
2285
+ const levels = new Map();
2286
+ const calculateLevels = (pkg, visited = new Set()) => {
2287
+ if (levels.has(pkg))
2288
+ return levels.get(pkg);
2289
+ if (visited.has(pkg))
2290
+ return 0; // Circular dependency
2291
+ visited.add(pkg);
2292
+ const pkgInfo = dependencyGraph.packages.get(pkg);
2293
+ const deps = Array.isArray(pkgInfo?.dependencies) ? pkgInfo.dependencies : [];
2294
+ if (!pkgInfo || deps.length === 0) {
2295
+ levels.set(pkg, 0);
2296
+ return 0;
2297
+ }
2298
+ const maxDepLevel = Math.max(...deps.map((dep) => calculateLevels(dep, new Set(visited))));
2299
+ const level = maxDepLevel + 1;
2300
+ levels.set(pkg, level);
2301
+ return level;
2302
+ };
2303
+ buildOrder.forEach(pkg => calculateLevels(pkg));
2304
+ const maxLevel = Math.max(...Array.from(levels.values()));
2305
+ logger.debug('MULTI_PROJECT_LEVELS: Dependency depth analysis | Max Depth: %d levels', maxLevel + 1);
2306
+ for (let level = 0; level <= maxLevel; level++) {
2307
+ const packagesAtLevel = buildOrder.filter(pkg => levels.get(pkg) === level);
2308
+ logger.debug(' Level %d (%d packages): %s', level, packagesAtLevel.length, packagesAtLevel.join(', '));
2309
+ }
2310
+ if (runConfig.tree?.parallel) {
2311
+ const os = await import('os');
2312
+ const concurrency = runConfig.tree.maxConcurrency || os.cpus().length;
2313
+ logger.debug('MULTI_PROJECT_PARALLEL: Parallel execution configuration | Max Concurrency: %d | Retry Attempts: %d', concurrency, runConfig.tree.retry?.maxAttempts || 3);
2314
+ }
2315
+ if (isContinue) {
2316
+ const completed = executionContext?.completedPackages.length || 0;
2317
+ logger.debug('MULTI_PROJECT_RESUME: Continuing previous execution | Completed: %d | Remaining: %d', completed, buildOrder.length - completed);
2318
+ }
2319
+ }
2320
+ // Show info for publish commands
2321
+ if (isBuiltInCommand && builtInCommand === 'publish') {
2322
+ logger.info('Inter-project dependencies will be automatically updated before each publish.');
2323
+ }
2324
+ let successCount = 0;
2325
+ let failedPackage = null;
2326
+ // If continuing, start from where we left off
2327
+ const startIndex = isContinue && executionContext ? executionContext.completedPackages.length : 0;
2328
+ // Check if parallel execution is enabled
2329
+ if (runConfig.tree?.parallel) {
2330
+ logger.info('🚀 Using parallel execution mode');
2331
+ // If dry run, show preview instead of executing
2332
+ if (isDryRun) {
2333
+ const preview = await generateDryRunPreview(dependencyGraph, buildOrder, commandToRun, runConfig);
2334
+ return preview;
2335
+ }
2336
+ // Import parallel execution components
2337
+ const { TreeExecutionAdapter, createParallelProgressLogger, formatParallelResult } = await import('./execution/TreeExecutionAdapter.js');
2338
+ const os = await import('os');
2339
+ // Create task pool
2340
+ const adapter = new TreeExecutionAdapter({
2341
+ graph: dependencyGraph,
2342
+ maxConcurrency: runConfig.tree.maxConcurrency || os.cpus().length,
2343
+ command: commandToRun,
2344
+ config: runConfig,
2345
+ checkpointPath: runConfig.outputDirectory,
2346
+ continue: isContinue,
2347
+ maxRetries: runConfig.tree.retry?.maxAttempts || 3,
2348
+ initialRetryDelay: runConfig.tree.retry?.initialDelayMs || 5000,
2349
+ maxRetryDelay: runConfig.tree.retry?.maxDelayMs || 60000,
2350
+ backoffMultiplier: runConfig.tree.retry?.backoffMultiplier || 2
2351
+ }, executePackage);
2352
+ // Set up progress logging
2353
+ createParallelProgressLogger(adapter.getPool(), runConfig);
2354
+ // Execute
2355
+ const result = await adapter.execute();
2356
+ // Format and return result
2357
+ const formattedResult = formatParallelResult(result);
2358
+ return formattedResult;
2359
+ }
2360
+ // Sequential execution
2361
+ const executionStartTime = Date.now();
2362
+ for (let i = startIndex; i < buildOrder.length; i++) {
2363
+ const packageName = buildOrder[i];
2364
+ // Skip if already completed (in continue mode)
2365
+ if (executionContext && executionContext.completedPackages.includes(packageName)) {
2366
+ successCount++;
2367
+ continue;
2368
+ }
2369
+ const packageInfo = dependencyGraph.packages.get(packageName);
2370
+ const packageLogger = createPackageLogger(packageName, i + 1, buildOrder.length, isDryRun);
2371
+ const result = await executePackage(packageName, packageInfo, commandToRun, runConfig, isDryRun, i, buildOrder.length, allPackageNames, isBuiltInCommand);
2372
+ if (result.success) {
2373
+ successCount++;
2374
+ // Update context
2375
+ if (executionContext && isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
2376
+ executionContext.completedPackages.push(packageName);
2377
+ executionContext.publishedVersions = publishedVersions;
2378
+ executionContext.lastSuccessfulPackage = packageName;
2379
+ executionContext.lastUpdateTime = new Date();
2380
+ await saveExecutionContext(executionContext, runConfig.outputDirectory);
2381
+ }
2382
+ // Add spacing between packages (except after the last one)
2383
+ if (i < buildOrder.length - 1) {
2384
+ logger.info('');
2385
+ logger.info('');
2386
+ }
2387
+ }
2388
+ else {
2389
+ failedPackage = packageName;
2390
+ const formattedError = formatSubprojectError(packageName, result.error, packageInfo, i + 1, buildOrder.length);
2391
+ // Record failure in context
2392
+ if (executionContext && isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
2393
+ executionContext.failedPackages.push({
2394
+ name: packageName,
2395
+ error: result.error || 'Unknown error',
2396
+ phase: 'execution'
2397
+ });
2398
+ executionContext.lastUpdateTime = new Date();
2399
+ await saveExecutionContext(executionContext, runConfig.outputDirectory);
2400
+ }
2401
+ if (!isDryRun) {
2402
+ packageLogger.error(`Execution failed`);
2403
+ logger.error(formattedError);
2404
+ logger.error(`Failed after ${successCount} successful packages.`);
2405
+ // Special handling for timeout errors
2406
+ if (result.isTimeoutError) {
2407
+ logger.error('');
2408
+ logger.error('⏰ TIMEOUT DETECTED: This appears to be a timeout error.');
2409
+ logger.error(' This commonly happens when PR checks take longer than expected.');
2410
+ logger.error(' The execution context has been saved for recovery.');
2411
+ logger.error('');
2412
+ // Save context even on timeout for recovery
2413
+ if (executionContext && isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run')) {
2414
+ executionContext.completedPackages.push(packageName);
2415
+ executionContext.publishedVersions = publishedVersions;
2416
+ executionContext.lastUpdateTime = new Date();
2417
+ await saveExecutionContext(executionContext, runConfig.outputDirectory);
2418
+ logger.info('💾 Execution context saved for recovery.');
2419
+ }
2420
+ // For publish commands, provide specific guidance about CI/CD setup
2421
+ if (builtInCommand === 'publish') {
2422
+ logger.error('');
2423
+ logger.error('💡 PUBLISH TIMEOUT TROUBLESHOOTING:');
2424
+ logger.error(' This project may not have CI/CD workflows configured.');
2425
+ logger.error(' Common solutions:');
2426
+ logger.error(' 1. Set up GitHub Actions workflows for this repository');
2427
+ logger.error(' 2. Use --sendit flag to skip user confirmation:');
2428
+ logger.error(` kodrdriv tree publish --sendit`);
2429
+ logger.error(' 3. Or manually promote this package:');
2430
+ logger.error(` kodrdriv tree publish --promote ${packageName}`);
2431
+ logger.error('');
2432
+ }
2433
+ }
2434
+ logger.error(`To resume from this point, run:`);
2435
+ if (isBuiltInCommand) {
2436
+ logger.error(` kodrdriv tree ${builtInCommand} --continue`);
2437
+ }
2438
+ else {
2439
+ logger.error(` kodrdriv tree --continue --cmd "${commandToRun}"`);
2440
+ }
2441
+ // For timeout errors, provide additional recovery instructions
2442
+ if (result.isTimeoutError) {
2443
+ logger.error('');
2444
+ logger.error('🔧 RECOVERY OPTIONS:');
2445
+ if (builtInCommand === 'publish') {
2446
+ logger.error(' 1. Wait for the PR checks to complete, then run:');
2447
+ logger.error(` cd ${packageInfo.path}`);
2448
+ logger.error(` kodrdriv publish`);
2449
+ logger.error(' 2. After the individual publish completes, run:');
2450
+ logger.error(` kodrdriv tree ${builtInCommand} --continue`);
2451
+ }
2452
+ else {
2453
+ logger.error(' 1. Fix any issues in the package, then run:');
2454
+ logger.error(` cd ${packageInfo.path}`);
2455
+ logger.error(` ${commandToRun}`);
2456
+ logger.error(' 2. After the command completes successfully, run:');
2457
+ logger.error(` kodrdriv tree ${builtInCommand} --continue`);
2458
+ }
2459
+ logger.error(' 3. Or promote this package to completed status:');
2460
+ logger.error(` kodrdriv tree ${builtInCommand} --promote ${packageName}`);
2461
+ logger.error(' 4. Or manually edit .kodrdriv-context to mark this package as completed');
2462
+ }
2463
+ // Add clear error summary at the very end
2464
+ logger.error('');
2465
+ logger.error('📋 ERROR SUMMARY:');
2466
+ logger.error(` Project that failed: ${packageName}`);
2467
+ logger.error(` Location: ${packageInfo.path}`);
2468
+ logger.error(` Position in tree: ${i + 1} of ${buildOrder.length} packages`);
2469
+ logger.error(` What failed: ${result.error?.message || 'Unknown error'}`);
2470
+ logger.error('');
2471
+ throw new Error(`Command failed in package ${packageName}`);
2472
+ }
2473
+ break;
2474
+ }
2475
+ }
2476
+ if (!failedPackage) {
2477
+ const totalExecutionTime = Date.now() - executionStartTime;
2478
+ const totalSeconds = (totalExecutionTime / 1000).toFixed(1);
2479
+ const totalMinutes = (totalExecutionTime / 60000).toFixed(1);
2480
+ const timeDisplay = totalExecutionTime < 60000
2481
+ ? `${totalSeconds}s`
2482
+ : `${totalMinutes}min (${totalSeconds}s)`;
2483
+ logger.info('');
2484
+ logger.info('═══════════════════════════════════════════════════════════');
2485
+ const summary = `${isDryRun ? 'DRY RUN: ' : ''}All ${buildOrder.length} packages completed successfully! 🎉`;
2486
+ logger.info(summary);
2487
+ logger.info(`⏱️ Total execution time: ${timeDisplay}`);
2488
+ logger.info(`📦 Packages processed: ${successCount}/${buildOrder.length}`);
2489
+ logger.info('═══════════════════════════════════════════════════════════');
2490
+ logger.info('');
2491
+ // Clean up context on successful completion
2492
+ if (isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
2493
+ await cleanupContext(runConfig.outputDirectory);
2494
+ }
2495
+ return returnOutput; // Don't duplicate the summary in return string
2496
+ }
2497
+ }
2498
+ return returnOutput;
2499
+ }
2500
+ catch (error) {
2501
+ const errorMessage = `Failed to analyze workspace: ${error.message}`;
2502
+ logger.error(errorMessage);
2503
+ throw new Error(errorMessage);
2504
+ }
2505
+ finally {
2506
+ // Intentionally preserve the mutex across executions to support multiple runs in the same process (e.g., test suite)
2507
+ // Do not destroy here; the process lifecycle will clean up resources.
2508
+ }
2509
+ };
2510
+ //# sourceMappingURL=tree.js.map