@eldrforge/kodrdriv 1.2.134 → 1.2.137

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/.cursor/rules/no-local-dependencies.md +6 -0
  2. package/README.md +1 -0
  3. package/dist/application.js +32 -42
  4. package/dist/application.js.map +1 -1
  5. package/dist/arguments.js +3 -3
  6. package/dist/arguments.js.map +1 -1
  7. package/dist/constants.js +5 -7
  8. package/dist/constants.js.map +1 -1
  9. package/dist/logging.js +4 -32
  10. package/dist/logging.js.map +1 -1
  11. package/dist/types.js +1 -0
  12. package/dist/types.js.map +1 -1
  13. package/package.json +13 -8
  14. package/dist/commands/audio-commit.js +0 -152
  15. package/dist/commands/audio-commit.js.map +0 -1
  16. package/dist/commands/audio-review.js +0 -274
  17. package/dist/commands/audio-review.js.map +0 -1
  18. package/dist/commands/clean.js +0 -49
  19. package/dist/commands/clean.js.map +0 -1
  20. package/dist/commands/commit.js +0 -680
  21. package/dist/commands/commit.js.map +0 -1
  22. package/dist/commands/development.js +0 -467
  23. package/dist/commands/development.js.map +0 -1
  24. package/dist/commands/link.js +0 -646
  25. package/dist/commands/link.js.map +0 -1
  26. package/dist/commands/precommit.js +0 -99
  27. package/dist/commands/precommit.js.map +0 -1
  28. package/dist/commands/publish.js +0 -1432
  29. package/dist/commands/publish.js.map +0 -1
  30. package/dist/commands/release.js +0 -376
  31. package/dist/commands/release.js.map +0 -1
  32. package/dist/commands/review.js +0 -733
  33. package/dist/commands/review.js.map +0 -1
  34. package/dist/commands/select-audio.js +0 -46
  35. package/dist/commands/select-audio.js.map +0 -1
  36. package/dist/commands/tree.js +0 -2363
  37. package/dist/commands/tree.js.map +0 -1
  38. package/dist/commands/unlink.js +0 -537
  39. package/dist/commands/unlink.js.map +0 -1
  40. package/dist/commands/updates.js +0 -211
  41. package/dist/commands/updates.js.map +0 -1
  42. package/dist/commands/versions.js +0 -221
  43. package/dist/commands/versions.js.map +0 -1
  44. package/dist/content/diff.js +0 -346
  45. package/dist/content/diff.js.map +0 -1
  46. package/dist/content/files.js +0 -190
  47. package/dist/content/files.js.map +0 -1
  48. package/dist/content/log.js +0 -72
  49. package/dist/content/log.js.map +0 -1
  50. package/dist/util/aiAdapter.js +0 -28
  51. package/dist/util/aiAdapter.js.map +0 -1
  52. package/dist/util/fileLock.js +0 -241
  53. package/dist/util/fileLock.js.map +0 -1
  54. package/dist/util/general.js +0 -379
  55. package/dist/util/general.js.map +0 -1
  56. package/dist/util/gitMutex.js +0 -161
  57. package/dist/util/gitMutex.js.map +0 -1
  58. package/dist/util/interactive.js +0 -32
  59. package/dist/util/interactive.js.map +0 -1
  60. package/dist/util/loggerAdapter.js +0 -41
  61. package/dist/util/loggerAdapter.js.map +0 -1
  62. package/dist/util/performance.js +0 -134
  63. package/dist/util/performance.js.map +0 -1
  64. package/dist/util/precommitOptimizations.js +0 -310
  65. package/dist/util/precommitOptimizations.js.map +0 -1
  66. package/dist/util/stopContext.js +0 -146
  67. package/dist/util/stopContext.js.map +0 -1
  68. package/dist/util/storageAdapter.js +0 -31
  69. package/dist/util/storageAdapter.js.map +0 -1
  70. package/dist/util/validation.js +0 -45
  71. package/dist/util/validation.js.map +0 -1
  72. package/dist/utils/branchState.js +0 -700
  73. package/dist/utils/branchState.js.map +0 -1
@@ -1,2363 +0,0 @@
1
- #!/usr/bin/env node
2
- import path__default from 'path';
3
- import fs from 'fs/promises';
4
- import { exec } from 'child_process';
5
- import { safeJsonParse, validatePackageJson, getGloballyLinkedPackages, getGitStatusSummary, getLinkedDependencies, getLinkCompatibilityProblems, runSecure, run } from '@eldrforge/git-tools';
6
- import util from 'util';
7
- import { getLogger } from '../logging.js';
8
- import { createStorage } from '@eldrforge/shared';
9
- import { getOutputPath } from '../util/general.js';
10
- import { DEFAULT_OUTPUT_DIRECTORY } from '../constants.js';
11
- import { execute as execute$3 } from './commit.js';
12
- import { execute as execute$1 } from './link.js';
13
- import { execute as execute$2 } from './unlink.js';
14
- import { execute as execute$4 } from './updates.js';
15
- import { isInGitRepository, runGitWithLock } from '../util/gitMutex.js';
16
- import { scanForPackageJsonFiles, buildDependencyGraph, topologicalSort, parsePackageJson, shouldExclude } from '@eldrforge/tree-core';
17
- import { optimizePrecommitCommand, recordTestRun } from '../util/precommitOptimizations.js';
18
- import { PerformanceTimer } from '../util/performance.js';
19
- import { SimpleMutex } from '@eldrforge/tree-execution';
20
-
21
- // Global state to track published versions during tree execution - protected by mutex
22
- let publishedVersions = [];
23
- let executionContext = null;
24
- const globalStateMutex = new SimpleMutex();
25
- // Update inter-project dependencies in package.json based on published versions
26
- const updateInterProjectDependencies = async (packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun)=>{
27
- const storage = createStorage();
28
- const packageJsonPath = path__default.join(packageDir, 'package.json');
29
- if (!await storage.exists(packageJsonPath)) {
30
- packageLogger.verbose('No package.json found, skipping dependency updates');
31
- return false;
32
- }
33
- let hasChanges = false;
34
- try {
35
- const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
36
- const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
37
- const packageJson = validatePackageJson(parsed, packageJsonPath);
38
- const sectionsToUpdate = [
39
- 'dependencies',
40
- 'devDependencies',
41
- 'peerDependencies'
42
- ];
43
- for (const publishedVersion of publishedVersions){
44
- const { packageName, version } = publishedVersion;
45
- // Only update if this is an inter-project dependency (exists in our build tree)
46
- if (!allPackageNames.has(packageName)) {
47
- continue;
48
- }
49
- // Skip prerelease versions (e.g., 1.0.0-beta.1, 2.0.0-alpha.3)
50
- // Prerelease versions should not be automatically propagated to consumers
51
- if (version.includes('-')) {
52
- packageLogger.verbose(`Skipping prerelease version ${packageName}@${version} - not updating dependencies`);
53
- continue;
54
- }
55
- // Update the dependency in all relevant sections
56
- for (const section of sectionsToUpdate){
57
- const deps = packageJson[section];
58
- if (deps && deps[packageName]) {
59
- const oldVersion = deps[packageName];
60
- const newVersion = `^${version}`;
61
- if (oldVersion !== newVersion) {
62
- if (isDryRun) {
63
- packageLogger.info(`Would update ${section}.${packageName}: ${oldVersion} → ${newVersion}`);
64
- } else {
65
- packageLogger.info(`Updating ${section}.${packageName}: ${oldVersion} → ${newVersion}`);
66
- deps[packageName] = newVersion;
67
- }
68
- hasChanges = true;
69
- }
70
- }
71
- }
72
- }
73
- if (hasChanges && !isDryRun) {
74
- // Write updated package.json
75
- await storage.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n', 'utf-8');
76
- packageLogger.info('Inter-project dependencies updated successfully');
77
- }
78
- } catch (error) {
79
- packageLogger.warn(`Failed to update inter-project dependencies: ${error.message}`);
80
- return false;
81
- }
82
- return hasChanges;
83
- };
84
- // Detect scoped dependencies from package.json and run updates for them
85
- const updateScopedDependencies = async (packageDir, packageLogger, isDryRun, runConfig)=>{
86
- const storage = createStorage();
87
- const packageJsonPath = path__default.join(packageDir, 'package.json');
88
- if (!await storage.exists(packageJsonPath)) {
89
- packageLogger.verbose('No package.json found, skipping scoped dependency updates');
90
- return false;
91
- }
92
- try {
93
- var _runConfig_publish;
94
- // Read the package.json before updates
95
- const beforeContent = await storage.readFile(packageJsonPath, 'utf-8');
96
- const parsed = safeJsonParse(beforeContent, packageJsonPath);
97
- const packageJson = validatePackageJson(parsed, packageJsonPath);
98
- // Determine which scopes to update
99
- let scopesToUpdate;
100
- // Check if scopedDependencyUpdates is configured
101
- const configuredScopes = (_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.scopedDependencyUpdates;
102
- if (configuredScopes !== undefined) {
103
- // scopedDependencyUpdates is explicitly configured
104
- if (configuredScopes.length > 0) {
105
- // Use configured scopes
106
- scopesToUpdate = new Set(configuredScopes);
107
- packageLogger.verbose(`Using configured scopes: ${Array.from(scopesToUpdate).join(', ')}`);
108
- } else {
109
- // Empty array means explicitly disabled
110
- packageLogger.verbose('Scoped dependency updates explicitly disabled');
111
- return false;
112
- }
113
- } else {
114
- // Not configured - use default behavior (package's own scope)
115
- scopesToUpdate = new Set();
116
- if (packageJson.name && packageJson.name.startsWith('@')) {
117
- const packageScope = packageJson.name.split('/')[0]; // e.g., "@fjell/core" -> "@fjell"
118
- scopesToUpdate.add(packageScope);
119
- packageLogger.verbose(`No scopes configured, defaulting to package's own scope: ${packageScope}`);
120
- } else {
121
- packageLogger.verbose('Package is not scoped and no scopes configured, skipping scoped dependency updates');
122
- return false;
123
- }
124
- }
125
- if (scopesToUpdate.size === 0) {
126
- packageLogger.verbose('No scopes to update, skipping updates');
127
- return false;
128
- }
129
- // Run updates for each scope
130
- for (const scope of scopesToUpdate){
131
- packageLogger.info(`🔄 Checking for ${scope} dependency updates before publish...`);
132
- try {
133
- // Create a config for the updates command with the scope
134
- const updatesConfig = {
135
- ...runConfig,
136
- dryRun: isDryRun,
137
- updates: {
138
- scope: scope
139
- }
140
- };
141
- await execute$4(updatesConfig);
142
- } catch (error) {
143
- // Don't fail the publish if updates fails, just warn
144
- packageLogger.warn(`Failed to update ${scope} dependencies: ${error.message}`);
145
- }
146
- }
147
- // Check if package.json was modified
148
- const afterContent = await storage.readFile(packageJsonPath, 'utf-8');
149
- const hasChanges = beforeContent !== afterContent;
150
- if (hasChanges) {
151
- packageLogger.info('✅ Scoped dependencies updated successfully');
152
- } else {
153
- packageLogger.info('No scoped dependency updates needed');
154
- }
155
- return hasChanges;
156
- } catch (error) {
157
- packageLogger.warn(`Failed to detect scoped dependencies: ${error.message}`);
158
- return false;
159
- }
160
- };
161
- // Get the context file path
162
- const getContextFilePath = (outputDirectory)=>{
163
- const outputDir = outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
164
- return getOutputPath(outputDir, '.kodrdriv-context');
165
- };
166
- // Save execution context to file
167
- const saveExecutionContext = async (context, outputDirectory)=>{
168
- const storage = createStorage(); // Silent storage for context operations
169
- const contextFilePath = getContextFilePath(outputDirectory);
170
- try {
171
- // Ensure output directory exists
172
- await storage.ensureDirectory(path__default.dirname(contextFilePath));
173
- // Save context with JSON serialization that handles dates
174
- const contextData = {
175
- ...context,
176
- startTime: context.startTime.toISOString(),
177
- lastUpdateTime: context.lastUpdateTime.toISOString(),
178
- publishedVersions: context.publishedVersions.map((v)=>({
179
- ...v,
180
- publishTime: v.publishTime.toISOString()
181
- }))
182
- };
183
- await storage.writeFile(contextFilePath, JSON.stringify(contextData, null, 2), 'utf-8');
184
- } catch (error) {
185
- // Don't fail the entire operation if context saving fails
186
- const logger = getLogger();
187
- logger.warn(`Warning: Failed to save execution context: ${error.message}`);
188
- }
189
- };
190
- // Load execution context from file
191
- const loadExecutionContext = async (outputDirectory)=>{
192
- const storage = createStorage(); // Silent storage for context operations
193
- const contextFilePath = getContextFilePath(outputDirectory);
194
- try {
195
- if (!await storage.exists(contextFilePath)) {
196
- return null;
197
- }
198
- const contextContent = await storage.readFile(contextFilePath, 'utf-8');
199
- const contextData = safeJsonParse(contextContent, contextFilePath);
200
- // Restore dates from ISO strings
201
- return {
202
- ...contextData,
203
- startTime: new Date(contextData.startTime),
204
- lastUpdateTime: new Date(contextData.lastUpdateTime),
205
- publishedVersions: contextData.publishedVersions.map((v)=>({
206
- ...v,
207
- publishTime: new Date(v.publishTime)
208
- }))
209
- };
210
- } catch (error) {
211
- const logger = getLogger();
212
- logger.warn(`Warning: Failed to load execution context: ${error.message}`);
213
- return null;
214
- }
215
- };
216
- // Clean up context file
217
- const cleanupContext = async (outputDirectory)=>{
218
- const storage = createStorage(); // Silent storage for context operations
219
- const contextFilePath = getContextFilePath(outputDirectory);
220
- try {
221
- if (await storage.exists(contextFilePath)) {
222
- await storage.deleteFile(contextFilePath);
223
- }
224
- } catch (error) {
225
- // Don't fail if cleanup fails
226
- const logger = getLogger();
227
- logger.warn(`Warning: Failed to cleanup execution context: ${error.message}`);
228
- }
229
- };
230
- // Helper function to promote a package to completed status in the context
231
- const promotePackageToCompleted = async (packageName, outputDirectory)=>{
232
- const storage = createStorage();
233
- const contextFilePath = getContextFilePath(outputDirectory);
234
- try {
235
- if (!await storage.exists(contextFilePath)) {
236
- return;
237
- }
238
- const contextContent = await storage.readFile(contextFilePath, 'utf-8');
239
- const contextData = safeJsonParse(contextContent, contextFilePath);
240
- // Restore dates from ISO strings
241
- const context = {
242
- ...contextData,
243
- startTime: new Date(contextData.startTime),
244
- lastUpdateTime: new Date(contextData.lastUpdateTime),
245
- publishedVersions: contextData.publishedVersions.map((v)=>({
246
- ...v,
247
- publishTime: new Date(v.publishTime)
248
- }))
249
- };
250
- // Add package to completed list if not already there
251
- if (!context.completedPackages.includes(packageName)) {
252
- context.completedPackages.push(packageName);
253
- context.lastUpdateTime = new Date();
254
- await saveExecutionContext(context, outputDirectory);
255
- }
256
- } catch (error) {
257
- const logger = getLogger();
258
- logger.warn(`Warning: Failed to promote package to completed: ${error.message}`);
259
- }
260
- };
261
- // Helper function to validate that all packages have the required scripts
262
- const validateScripts = async (packages, scripts)=>{
263
- const logger = getLogger();
264
- const missingScripts = new Map();
265
- const storage = createStorage();
266
- logger.debug(`Validating scripts: ${scripts.join(', ')}`);
267
- for (const [packageName, packageInfo] of packages){
268
- const packageJsonPath = path__default.join(packageInfo.path, 'package.json');
269
- const missingForPackage = [];
270
- try {
271
- const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
272
- const packageJson = safeJsonParse(packageJsonContent, packageJsonPath);
273
- const validated = validatePackageJson(packageJson, packageJsonPath);
274
- // Check if each required script exists
275
- for (const script of scripts){
276
- if (!validated.scripts || !validated.scripts[script]) {
277
- missingForPackage.push(script);
278
- }
279
- }
280
- if (missingForPackage.length > 0) {
281
- missingScripts.set(packageName, missingForPackage);
282
- logger.debug(`Package ${packageName} missing scripts: ${missingForPackage.join(', ')}`);
283
- }
284
- } catch (error) {
285
- logger.debug(`Error reading package.json for ${packageName}: ${error.message}`);
286
- // If we can't read the package.json, assume all scripts are missing
287
- missingScripts.set(packageName, scripts);
288
- }
289
- }
290
- const valid = missingScripts.size === 0;
291
- if (valid) {
292
- logger.info(`✅ All packages have the required scripts: ${scripts.join(', ')}`);
293
- } else {
294
- logger.error(`❌ Script validation failed. Missing scripts:`);
295
- for (const [packageName, missing] of missingScripts){
296
- logger.error(` ${packageName}: ${missing.join(', ')}`);
297
- }
298
- }
299
- return {
300
- valid,
301
- missingScripts
302
- };
303
- };
304
- // Extract published version from git tags after successful publish
305
- // After kodrdriv publish, the release version is captured in the git tag,
306
- // while package.json contains the next dev version
307
- const extractPublishedVersion = async (packageDir, packageLogger)=>{
308
- const storage = createStorage();
309
- const packageJsonPath = path__default.join(packageDir, 'package.json');
310
- try {
311
- // Get package name from package.json
312
- const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
313
- const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
314
- const packageJson = validatePackageJson(parsed, packageJsonPath);
315
- // Get the most recently created tag (by creation date, not version number)
316
- // This ensures we get the tag that was just created by the publish, not an older tag with a higher version
317
- const { stdout: tagOutput } = await run('git tag --sort=-creatordate', {
318
- cwd: packageDir
319
- });
320
- const tags = tagOutput.trim().split('\n').filter(Boolean);
321
- if (tags.length === 0) {
322
- packageLogger.warn('No git tags found after publish');
323
- return null;
324
- }
325
- // Get the most recently created tag (first in the list)
326
- const latestTag = tags[0];
327
- // Extract version from tag, handling various formats:
328
- // - v1.2.3 -> 1.2.3
329
- // - working/v1.2.3 -> 1.2.3
330
- // - main/v1.2.3 -> 1.2.3
331
- let version = latestTag;
332
- // If tag contains a slash (branch prefix), extract everything after it
333
- if (version.includes('/')) {
334
- version = version.split('/').pop() || version;
335
- }
336
- // Remove 'v' prefix if present
337
- if (version.startsWith('v')) {
338
- version = version.substring(1);
339
- }
340
- packageLogger.verbose(`Extracted published version from tag: ${latestTag} -> ${version}`);
341
- return {
342
- packageName: packageJson.name,
343
- version: version,
344
- publishTime: new Date()
345
- };
346
- } catch (error) {
347
- packageLogger.warn(`Failed to extract published version: ${error.message}`);
348
- return null;
349
- }
350
- };
351
- // Enhanced run function that can show output based on log level
352
- const runWithLogging = async (command, packageLogger, options = {}, showOutput = 'none', logFilePath)=>{
353
- const execPromise = util.promisify(exec);
354
- // Ensure encoding is set to 'utf8' to get string output instead of Buffer
355
- const execOptions = {
356
- encoding: 'utf8',
357
- ...options
358
- };
359
- if (showOutput === 'full') {
360
- packageLogger.debug(`Executing command: ${command}`);
361
- // Use info level to show on console in debug mode
362
- packageLogger.info(`🔧 Running: ${command}`);
363
- } else if (showOutput === 'minimal') {
364
- packageLogger.verbose(`Running: ${command}`);
365
- }
366
- // Helper to write to log file
367
- const writeToLogFile = async (content)=>{
368
- if (!logFilePath) return;
369
- try {
370
- const logDir = path__default.dirname(logFilePath);
371
- await fs.mkdir(logDir, {
372
- recursive: true
373
- });
374
- await fs.appendFile(logFilePath, content + '\n', 'utf-8');
375
- } catch (err) {
376
- packageLogger.warn(`Failed to write to log file ${logFilePath}: ${err.message}`);
377
- }
378
- };
379
- // Write command to log file
380
- if (logFilePath) {
381
- const timestamp = new Date().toISOString();
382
- await writeToLogFile(`[${timestamp}] Executing: ${command}\n`);
383
- }
384
- try {
385
- const result = await execPromise(command, execOptions);
386
- if (showOutput === 'full') {
387
- const stdout = String(result.stdout);
388
- const stderr = String(result.stderr);
389
- if (stdout.trim()) {
390
- packageLogger.debug('STDOUT:');
391
- packageLogger.debug(stdout);
392
- // Show on console using info level for immediate feedback
393
- packageLogger.info(`📤 STDOUT:`);
394
- stdout.split('\n').forEach((line)=>{
395
- if (line.trim()) packageLogger.info(`${line}`);
396
- });
397
- }
398
- if (stderr.trim()) {
399
- packageLogger.debug('STDERR:');
400
- packageLogger.debug(stderr);
401
- // Show on console using info level for immediate feedback
402
- packageLogger.info(`⚠️ STDERR:`);
403
- stderr.split('\n').forEach((line)=>{
404
- if (line.trim()) packageLogger.info(`${line}`);
405
- });
406
- }
407
- }
408
- // Write output to log file
409
- if (logFilePath) {
410
- const stdout = String(result.stdout);
411
- const stderr = String(result.stderr);
412
- if (stdout.trim()) {
413
- await writeToLogFile(`\n=== STDOUT ===\n${stdout}`);
414
- }
415
- if (stderr.trim()) {
416
- await writeToLogFile(`\n=== STDERR ===\n${stderr}`);
417
- }
418
- await writeToLogFile(`\n[${new Date().toISOString()}] Command completed successfully\n`);
419
- }
420
- // Ensure result is properly typed as strings
421
- return {
422
- stdout: String(result.stdout),
423
- stderr: String(result.stderr)
424
- };
425
- } catch (error) {
426
- // Always show error message
427
- packageLogger.error(`Command failed: ${command}`);
428
- // Always show stderr on failure (contains important error details like coverage failures)
429
- if (error.stderr && error.stderr.trim()) {
430
- packageLogger.error(`❌ STDERR:`);
431
- error.stderr.split('\n').forEach((line)=>{
432
- if (line.trim()) packageLogger.error(`${line}`);
433
- });
434
- }
435
- // Show stdout on failure if available (may contain error context)
436
- if (error.stdout && error.stdout.trim() && (showOutput === 'full' || showOutput === 'minimal')) {
437
- packageLogger.info(`📤 STDOUT:`);
438
- error.stdout.split('\n').forEach((line)=>{
439
- if (line.trim()) packageLogger.info(`${line}`);
440
- });
441
- }
442
- // Show full output in debug/verbose mode
443
- if (showOutput === 'full' || showOutput === 'minimal') {
444
- if (error.stdout && error.stdout.trim() && showOutput === 'full') {
445
- packageLogger.debug('STDOUT:');
446
- packageLogger.debug(error.stdout);
447
- }
448
- if (error.stderr && error.stderr.trim() && showOutput === 'full') {
449
- packageLogger.debug('STDERR:');
450
- packageLogger.debug(error.stderr);
451
- }
452
- }
453
- // Write error output to log file
454
- if (logFilePath) {
455
- await writeToLogFile(`\n[${new Date().toISOString()}] Command failed: ${error.message}`);
456
- if (error.stdout) {
457
- await writeToLogFile(`\n=== STDOUT ===\n${error.stdout}`);
458
- }
459
- if (error.stderr) {
460
- await writeToLogFile(`\n=== STDERR ===\n${error.stderr}`);
461
- }
462
- if (error.stack) {
463
- await writeToLogFile(`\n=== STACK TRACE ===\n${error.stack}`);
464
- }
465
- }
466
- throw error;
467
- }
468
- };
469
- // Create a package-scoped logger that prefixes all messages
470
- const createPackageLogger = (packageName, sequenceNumber, totalCount, isDryRun = false)=>{
471
- const baseLogger = getLogger();
472
- const prefix = `[${sequenceNumber}/${totalCount}] ${packageName}:`;
473
- const dryRunPrefix = isDryRun ? 'DRY RUN: ' : '';
474
- return {
475
- info: (message, ...args)=>baseLogger.info(`${dryRunPrefix}${prefix} ${message}`, ...args),
476
- warn: (message, ...args)=>baseLogger.warn(`${dryRunPrefix}${prefix} ${message}`, ...args),
477
- error: (message, ...args)=>baseLogger.error(`${dryRunPrefix}${prefix} ${message}`, ...args),
478
- debug: (message, ...args)=>baseLogger.debug(`${dryRunPrefix}${prefix} ${message}`, ...args),
479
- verbose: (message, ...args)=>baseLogger.verbose(`${dryRunPrefix}${prefix} ${message}`, ...args),
480
- silly: (message, ...args)=>baseLogger.silly(`${dryRunPrefix}${prefix} ${message}`, ...args)
481
- };
482
- };
483
- // Helper function to format subproject error output
484
- const formatSubprojectError = (packageName, error, _packageInfo, _position, _total)=>{
485
- const lines = [];
486
- lines.push(`❌ Command failed in package ${packageName}:`);
487
- // Format the main error message with indentation
488
- if (error.message) {
489
- const indentedMessage = error.message.split('\n').map((line)=>` ${line}`).join('\n');
490
- lines.push(indentedMessage);
491
- }
492
- // If there's stderr output, show it indented as well
493
- if (error.stderr && error.stderr.trim()) {
494
- lines.push(' STDERR:');
495
- const indentedStderr = error.stderr.split('\n').filter((line)=>line.trim()).map((line)=>` ${line}`).join('\n');
496
- lines.push(indentedStderr);
497
- }
498
- // If there's stdout output, show it indented as well
499
- if (error.stdout && error.stdout.trim()) {
500
- lines.push(' STDOUT:');
501
- const indentedStdout = error.stdout.split('\n').filter((line)=>line.trim()).map((line)=>` ${line}`).join('\n');
502
- lines.push(indentedStdout);
503
- }
504
- return lines.join('\n');
505
- };
506
- // Note: PackageInfo, DependencyGraph, scanForPackageJsonFiles, parsePackageJson,
507
- // buildDependencyGraph, and topologicalSort are now imported from ../util/dependencyGraph
508
- // Execute a single package and return execution result
509
- const executePackage = async (packageName, packageInfo, commandToRun, runConfig, isDryRun, index, total, allPackageNames, isBuiltInCommand = false)=>{
510
- const packageLogger = createPackageLogger(packageName, index + 1, total, isDryRun);
511
- const packageDir = packageInfo.path;
512
- const logger = getLogger();
513
- // Create log file path for publish commands
514
- let logFilePath;
515
- if (isBuiltInCommand && commandToRun.includes('publish')) {
516
- var _commandToRun_split_;
517
- const outputDir = runConfig.outputDirectory || 'output/kodrdriv';
518
- const timestamp = new Date().toISOString().replace(/[:.]/g, '-').replace('T', '_').split('.')[0];
519
- const commandName = ((_commandToRun_split_ = commandToRun.split(' ')[1]) === null || _commandToRun_split_ === void 0 ? void 0 : _commandToRun_split_.split(' ')[0]) || 'command';
520
- logFilePath = path__default.join(packageDir, outputDir, `${commandName}_${timestamp}.log`);
521
- }
522
- // Determine output level based on flags
523
- // For publish and commit commands, default to full output to show AI progress and other details
524
- // For other commands, require --verbose or --debug for output
525
- const isPublishCommand = isBuiltInCommand && commandToRun.includes('publish');
526
- const isCommitCommand = isBuiltInCommand && commandToRun.includes('commit');
527
- let showOutput = isPublishCommand || isCommitCommand ? 'full' : 'none';
528
- if (runConfig.debug) {
529
- showOutput = 'full';
530
- } else if (runConfig.verbose) {
531
- showOutput = 'minimal';
532
- }
533
- // Show package start info - always visible for progress tracking
534
- if (runConfig.debug) {
535
- packageLogger.debug('MULTI_PROJECT_START: Starting package execution | Package: %s | Index: %d/%d | Path: %s | Command: %s | Context: tree execution', packageName, index + 1, total, packageDir, commandToRun);
536
- packageLogger.debug('MULTI_PROJECT_CONTEXT: Execution details | Directory: %s | Built-in Command: %s | Dry Run: %s | Output Level: %s', packageDir, isBuiltInCommand, isDryRun, showOutput);
537
- // Show dependencies if available
538
- if (packageInfo.dependencies && Array.isArray(packageInfo.dependencies) && packageInfo.dependencies.length > 0) {
539
- packageLogger.debug('MULTI_PROJECT_DEPS: Package dependencies | Package: %s | Dependencies: [%s]', packageName, packageInfo.dependencies.join(', '));
540
- }
541
- } else if (runConfig.verbose) {
542
- packageLogger.verbose(`Starting execution in ${packageDir}`);
543
- } else {
544
- // Basic progress info even without flags
545
- logger.info(`[${index + 1}/${total}] ${packageName}: Running ${commandToRun}...`);
546
- }
547
- // Track if publish was skipped due to no changes
548
- let publishWasSkipped = false;
549
- // Track execution timing
550
- const executionTimer = PerformanceTimer.start(packageLogger, `Package ${packageName} execution`);
551
- let executionDuration;
552
- try {
553
- if (isDryRun && !isBuiltInCommand) {
554
- // Handle inter-project dependency updates for publish commands in dry run mode
555
- if (isBuiltInCommand && commandToRun.includes('publish') && publishedVersions.length > 0) {
556
- let mutexLocked = false;
557
- try {
558
- await globalStateMutex.lock();
559
- mutexLocked = true;
560
- packageLogger.info('Would check for inter-project dependency updates before publish...');
561
- const versionSnapshot = [
562
- ...publishedVersions
563
- ]; // Create safe copy
564
- globalStateMutex.unlock();
565
- mutexLocked = false;
566
- await updateInterProjectDependencies(packageDir, versionSnapshot, allPackageNames, packageLogger, isDryRun);
567
- } catch (error) {
568
- if (mutexLocked) {
569
- globalStateMutex.unlock();
570
- }
571
- throw error;
572
- }
573
- }
574
- // Use main logger for the specific message tests expect
575
- logger.info(`DRY RUN: Would execute: ${commandToRun}`);
576
- if (runConfig.debug || runConfig.verbose) {
577
- packageLogger.info(`In directory: ${packageDir}`);
578
- }
579
- } else {
580
- // Change to the package directory and run the command
581
- const originalCwd = process.cwd();
582
- try {
583
- // Validate package directory exists before changing to it
584
- try {
585
- await fs.access(packageDir);
586
- const stat = await fs.stat(packageDir);
587
- if (!stat.isDirectory()) {
588
- throw new Error(`Path is not a directory: ${packageDir}`);
589
- }
590
- } catch (accessError) {
591
- throw new Error(`Cannot access package directory: ${packageDir} - ${accessError.message}`);
592
- }
593
- process.chdir(packageDir);
594
- if (runConfig.debug) {
595
- packageLogger.debug(`Changed to directory: ${packageDir}`);
596
- }
597
- // Handle dependency updates for publish commands before executing (skip during dry run)
598
- // Wrap in git lock to prevent parallel packages from conflicting with npm install and git operations
599
- if (!isDryRun && isBuiltInCommand && commandToRun.includes('publish')) {
600
- await runGitWithLock(packageDir, async ()=>{
601
- let hasAnyUpdates = false;
602
- // First, update all scoped dependencies from npm registry
603
- const hasScopedUpdates = await updateScopedDependencies(packageDir, packageLogger, isDryRun, runConfig);
604
- hasAnyUpdates = hasAnyUpdates || hasScopedUpdates;
605
- // Then update inter-project dependencies based on previously published packages
606
- if (publishedVersions.length > 0) {
607
- packageLogger.info('Updating inter-project dependencies based on previously published packages...');
608
- const hasInterProjectUpdates = await updateInterProjectDependencies(packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun);
609
- hasAnyUpdates = hasAnyUpdates || hasInterProjectUpdates;
610
- }
611
- // If either type of update occurred, commit the changes
612
- if (hasAnyUpdates) {
613
- // Commit the dependency updates using kodrdriv commit
614
- packageLogger.info('Committing dependency updates...');
615
- packageLogger.info('⏱️ This step may take a few minutes as it generates a commit message using AI...');
616
- // Add timeout wrapper around commit execution
617
- const commitTimeoutMs = 300000; // 5 minutes
618
- const commitPromise = execute$3({
619
- ...runConfig,
620
- dryRun: false
621
- });
622
- const timeoutPromise = new Promise((_, reject)=>{
623
- setTimeout(()=>reject(new Error(`Commit operation timed out after ${commitTimeoutMs / 1000} seconds`)), commitTimeoutMs);
624
- });
625
- // Add progress indicator
626
- let progressInterval = null;
627
- try {
628
- // Start progress indicator
629
- progressInterval = setInterval(()=>{
630
- packageLogger.info('⏳ Still generating commit message... (this can take 1-3 minutes)');
631
- }, 30000); // Every 30 seconds
632
- await Promise.race([
633
- commitPromise,
634
- timeoutPromise
635
- ]);
636
- packageLogger.info('✅ Dependency updates committed successfully');
637
- } catch (commitError) {
638
- if (commitError.message.includes('timed out')) {
639
- packageLogger.error(`❌ Commit operation timed out after ${commitTimeoutMs / 1000} seconds`);
640
- packageLogger.error('This usually indicates an issue with the AI service or very large changes');
641
- packageLogger.error('You may need to manually commit the dependency updates');
642
- } else {
643
- packageLogger.warn(`Failed to commit dependency updates: ${commitError.message}`);
644
- }
645
- // Continue with publish anyway - the updates are still in place
646
- } finally{
647
- if (progressInterval) {
648
- clearInterval(progressInterval);
649
- }
650
- }
651
- }
652
- }, `${packageName}: dependency updates`);
653
- }
654
- // Optimize precommit commands for custom commands (not built-in)
655
- let effectiveCommandToRun = commandToRun;
656
- let optimizationInfo = null;
657
- if (!isBuiltInCommand && !isDryRun) {
658
- const isPrecommitCommand = commandToRun.includes('precommit') || commandToRun.includes('pre-commit');
659
- if (isPrecommitCommand) {
660
- try {
661
- const optimization = await optimizePrecommitCommand(packageDir, commandToRun);
662
- effectiveCommandToRun = optimization.optimizedCommand;
663
- optimizationInfo = {
664
- skipped: optimization.skipped,
665
- reasons: optimization.reasons
666
- };
667
- if (optimization.skipped.clean || optimization.skipped.test) {
668
- const skippedParts = [];
669
- if (optimization.skipped.clean) {
670
- skippedParts.push(`clean (${optimization.reasons.clean})`);
671
- }
672
- if (optimization.skipped.test) {
673
- skippedParts.push(`test (${optimization.reasons.test})`);
674
- }
675
- packageLogger.info(`⚡ Optimized: Skipped ${skippedParts.join(', ')}`);
676
- if (runConfig.verbose || runConfig.debug) {
677
- packageLogger.info(` Original: ${commandToRun}`);
678
- packageLogger.info(` Optimized: ${effectiveCommandToRun}`);
679
- }
680
- }
681
- } catch (error) {
682
- // If optimization fails, fall back to original command
683
- logger.debug(`Precommit optimization failed for ${packageName}: ${error.message}`);
684
- }
685
- }
686
- }
687
- if (runConfig.debug || runConfig.verbose) {
688
- if (isBuiltInCommand) {
689
- packageLogger.info(`Executing built-in command: ${commandToRun}`);
690
- } else {
691
- packageLogger.info(`Executing command: ${effectiveCommandToRun}`);
692
- }
693
- }
694
- // For built-in commands, shell out to a separate kodrdriv process
695
- // This preserves individual project configurations
696
- if (isBuiltInCommand) {
697
- // Extract the command name from "kodrdriv <command> [args...]"
698
- // Split by space and take the second element (after "kodrdriv")
699
- const commandParts = commandToRun.replace(/^kodrdriv\s+/, '').split(/\s+/);
700
- const builtInCommandName = commandParts[0];
701
- if (runConfig.debug) {
702
- packageLogger.debug(`Shelling out to separate kodrdriv process for ${builtInCommandName} command`);
703
- }
704
- // Add progress indication for publish commands
705
- if (builtInCommandName === 'publish') {
706
- packageLogger.info('🚀 Starting publish process...');
707
- packageLogger.info('⏱️ This may take several minutes (AI processing, PR creation, etc.)');
708
- }
709
- // Ensure dry-run propagates to subprocess even during overall dry-run mode
710
- let effectiveCommand = runConfig.dryRun && !commandToRun.includes('--dry-run') ? `${commandToRun} --dry-run` : commandToRun;
711
- // For commit commands, ensure --sendit is used to avoid interactive prompts
712
- // This prevents hanging when running via tree command
713
- if (builtInCommandName === 'commit' && !effectiveCommand.includes('--sendit') && !runConfig.dryRun) {
714
- effectiveCommand = `${effectiveCommand} --sendit`;
715
- packageLogger.info('💡 Auto-adding --sendit flag to avoid interactive prompts in tree mode');
716
- }
717
- // Set timeout based on command type
718
- let commandTimeoutMs;
719
- if (builtInCommandName === 'publish') {
720
- commandTimeoutMs = 1800000; // 30 minutes for publish commands
721
- packageLogger.info(`⏰ Setting timeout of ${commandTimeoutMs / 60000} minutes for publish command`);
722
- } else if (builtInCommandName === 'commit') {
723
- commandTimeoutMs = 600000; // 10 minutes for commit commands (AI processing can take time)
724
- packageLogger.info(`⏰ Setting timeout of ${commandTimeoutMs / 60000} minutes for commit command`);
725
- } else {
726
- commandTimeoutMs = 300000; // 5 minutes default for other commands
727
- }
728
- const commandPromise = runWithLogging(effectiveCommand, packageLogger, {}, showOutput, logFilePath);
729
- const commandTimeoutPromise = new Promise((_, reject)=>{
730
- setTimeout(()=>reject(new Error(`Command timed out after ${commandTimeoutMs / 60000} minutes`)), commandTimeoutMs);
731
- });
732
- try {
733
- const startTime = Date.now();
734
- const { stdout, stderr } = await Promise.race([
735
- commandPromise,
736
- commandTimeoutPromise
737
- ]);
738
- executionDuration = Date.now() - startTime;
739
- // Detect explicit skip marker from publish to avoid propagating versions
740
- // Check both stdout (where we now write it) and stderr (winston logger output, for backward compat)
741
- if (builtInCommandName === 'publish' && (stdout && stdout.includes('KODRDRIV_PUBLISH_SKIPPED') || stderr && stderr.includes('KODRDRIV_PUBLISH_SKIPPED'))) {
742
- packageLogger.info('Publish skipped for this package; will not record or propagate a version.');
743
- publishWasSkipped = true;
744
- }
745
- } catch (error) {
746
- if (error.message.includes('timed out')) {
747
- packageLogger.error(`❌ ${builtInCommandName} command timed out after ${commandTimeoutMs / 60000} minutes`);
748
- packageLogger.error('This usually indicates the command is stuck waiting for user input or an external service');
749
- throw error;
750
- }
751
- throw error;
752
- }
753
- } else {
754
- // For custom commands, use the existing logic
755
- const startTime = Date.now();
756
- await runWithLogging(effectiveCommandToRun, packageLogger, {}, showOutput, logFilePath);
757
- executionDuration = Date.now() - startTime;
758
- }
759
- // Track published version after successful publish (skip during dry run)
760
- if (!isDryRun && isBuiltInCommand && commandToRun.includes('publish')) {
761
- // If publish was skipped, do not record a version
762
- if (publishWasSkipped) {
763
- packageLogger.verbose('Skipping version tracking due to earlier skip.');
764
- } else {
765
- // Only record a published version if a new tag exists (avoid recording for skipped publishes)
766
- const publishedVersion = await extractPublishedVersion(packageDir, packageLogger);
767
- if (publishedVersion) {
768
- let mutexLocked = false;
769
- try {
770
- await globalStateMutex.lock();
771
- mutexLocked = true;
772
- publishedVersions.push(publishedVersion);
773
- packageLogger.info(`Tracked published version: ${publishedVersion.packageName}@${publishedVersion.version}`);
774
- globalStateMutex.unlock();
775
- mutexLocked = false;
776
- } catch (error) {
777
- if (mutexLocked) {
778
- globalStateMutex.unlock();
779
- }
780
- throw error;
781
- }
782
- }
783
- }
784
- }
785
- // Record test run if tests were executed (not skipped)
786
- if (!isDryRun && !isBuiltInCommand && effectiveCommandToRun.includes('test') && (!optimizationInfo || !optimizationInfo.skipped.test)) {
787
- try {
788
- await recordTestRun(packageDir);
789
- } catch (error) {
790
- logger.debug(`Failed to record test run for ${packageName}: ${error.message}`);
791
- }
792
- }
793
- // End timing and show duration
794
- if (executionDuration !== undefined) {
795
- executionTimer.end(`Package ${packageName} execution`);
796
- const seconds = (executionDuration / 1000).toFixed(1);
797
- if (runConfig.debug || runConfig.verbose) {
798
- packageLogger.info(`⏱️ Execution time: ${seconds}s`);
799
- } else if (!isPublishCommand && !isCommitCommand) {
800
- // Show timing in completion message (publish/commit commands have their own completion message)
801
- logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed (${seconds}s)`);
802
- }
803
- } else {
804
- executionTimer.end(`Package ${packageName} execution`);
805
- if (runConfig.debug || runConfig.verbose) {
806
- packageLogger.info(`Command completed successfully`);
807
- } else if (!isPublishCommand && !isCommitCommand) {
808
- // Basic completion info (publish/commit commands have their own completion message)
809
- logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed`);
810
- }
811
- }
812
- } finally{
813
- // Safely restore working directory
814
- try {
815
- // Validate original directory still exists before changing back
816
- const fs = await import('fs/promises');
817
- await fs.access(originalCwd);
818
- process.chdir(originalCwd);
819
- if (runConfig.debug) {
820
- packageLogger.debug(`Restored working directory to: ${originalCwd}`);
821
- }
822
- } catch (restoreError) {
823
- // If we can't restore to original directory, at least log the issue
824
- packageLogger.error(`Failed to restore working directory to ${originalCwd}: ${restoreError.message}`);
825
- packageLogger.error(`Current working directory is now: ${process.cwd()}`);
826
- // Don't throw here to avoid masking the original error
827
- }
828
- }
829
- }
830
- // Show completion status (for publish/commit commands, this supplements the timing message above)
831
- if (runConfig.debug || runConfig.verbose) {
832
- if (publishWasSkipped) {
833
- packageLogger.info(`⊘ Skipped (no code changes)`);
834
- } else {
835
- packageLogger.info(`✅ Completed successfully`);
836
- }
837
- } else if (isPublishCommand || isCommitCommand) {
838
- // For publish/commit commands, always show completion even without verbose
839
- // Include timing if available
840
- const timeStr = executionDuration !== undefined ? ` (${(executionDuration / 1000).toFixed(1)}s)` : '';
841
- if (publishWasSkipped) {
842
- logger.info(`[${index + 1}/${total}] ${packageName}: ⊘ Skipped (no code changes)`);
843
- } else {
844
- logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed${timeStr}`);
845
- }
846
- }
847
- // Ensure timing is recorded even if there was an early return
848
- if (executionDuration === undefined) {
849
- executionDuration = executionTimer.end(`Package ${packageName} execution`);
850
- }
851
- return {
852
- success: true,
853
- skippedNoChanges: publishWasSkipped,
854
- logFile: logFilePath
855
- };
856
- } catch (error) {
857
- var _error_message;
858
- // Record timing even on error
859
- if (executionDuration === undefined) {
860
- executionDuration = executionTimer.end(`Package ${packageName} execution`);
861
- const seconds = (executionDuration / 1000).toFixed(1);
862
- if (runConfig.debug || runConfig.verbose) {
863
- packageLogger.error(`⏱️ Execution time before failure: ${seconds}s`);
864
- }
865
- }
866
- if (runConfig.debug || runConfig.verbose) {
867
- packageLogger.error(`❌ Execution failed: ${error.message}`);
868
- } else {
869
- logger.error(`[${index + 1}/${total}] ${packageName}: ❌ Failed - ${error.message}`);
870
- }
871
- // Always show stderr if available (contains important error details)
872
- // Note: runWithLogging already logs stderr, but we show it here too for visibility
873
- // when error is caught at this level (e.g., from timeout wrapper)
874
- if (error.stderr && error.stderr.trim() && !runConfig.debug && !runConfig.verbose) {
875
- // Extract key error lines from stderr (coverage failures, test failures, etc.)
876
- const stderrLines = error.stderr.split('\n').filter((line)=>{
877
- const trimmed = line.trim();
878
- return trimmed && (trimmed.includes('ERROR:') || trimmed.includes('FAIL') || trimmed.includes('coverage') || trimmed.includes('threshold') || trimmed.includes('fatal:') || trimmed.startsWith('❌'));
879
- });
880
- if (stderrLines.length > 0) {
881
- logger.error(` Error details:`);
882
- stderrLines.slice(0, 10).forEach((line)=>{
883
- logger.error(` ${line.trim()}`);
884
- });
885
- if (stderrLines.length > 10) {
886
- logger.error(` ... and ${stderrLines.length - 10} more error lines (use --verbose to see full output)`);
887
- }
888
- }
889
- }
890
- // Check if this is a timeout error
891
- const errorMessage = ((_error_message = error.message) === null || _error_message === void 0 ? void 0 : _error_message.toLowerCase()) || '';
892
- const isTimeoutError = errorMessage && (errorMessage.includes('timeout waiting for pr') || errorMessage.includes('timeout waiting for release workflows') || errorMessage.includes('timeout reached') || errorMessage.includes('timeout') || errorMessage.includes('timed out') || errorMessage.includes('timed_out'));
893
- return {
894
- success: false,
895
- error,
896
- isTimeoutError,
897
- logFile: logFilePath
898
- };
899
- }
900
- };
901
- /**
902
- * Generate a dry-run preview showing what would happen without executing
903
- */ const generateDryRunPreview = async (dependencyGraph, buildOrder, command, runConfig)=>{
904
- var _runConfig_tree;
905
- const lines = [];
906
- lines.push('');
907
- lines.push('🔍 DRY RUN MODE - No changes will be made');
908
- lines.push('');
909
- lines.push('Build order determined:');
910
- lines.push('');
911
- // Group packages by dependency level
912
- const levels = [];
913
- const packageLevels = new Map();
914
- for (const pkg of buildOrder){
915
- const deps = dependencyGraph.edges.get(pkg) || new Set();
916
- let maxDepLevel = -1;
917
- for (const dep of deps){
918
- var _packageLevels_get;
919
- const depLevel = (_packageLevels_get = packageLevels.get(dep)) !== null && _packageLevels_get !== void 0 ? _packageLevels_get : 0;
920
- maxDepLevel = Math.max(maxDepLevel, depLevel);
921
- }
922
- const pkgLevel = maxDepLevel + 1;
923
- packageLevels.set(pkg, pkgLevel);
924
- if (!levels[pkgLevel]) {
925
- levels[pkgLevel] = [];
926
- }
927
- levels[pkgLevel].push(pkg);
928
- }
929
- // Show packages grouped by level
930
- for(let i = 0; i < levels.length; i++){
931
- const levelPackages = levels[i];
932
- lines.push(`Level ${i + 1}: (${levelPackages.length} package${levelPackages.length === 1 ? '' : 's'})`);
933
- for (const pkg of levelPackages){
934
- const pkgInfo = dependencyGraph.packages.get(pkg);
935
- if (!pkgInfo) continue;
936
- // Check if package has changes (for publish command)
937
- const isPublish = command.includes('publish');
938
- let status = '📝 Has changes, will execute';
939
- if (isPublish) {
940
- try {
941
- // Check git diff to see if there are code changes
942
- const { stdout } = await runSecure('git', [
943
- 'diff',
944
- '--name-only',
945
- 'origin/main...HEAD'
946
- ], {
947
- cwd: pkgInfo.path
948
- });
949
- const changedFiles = stdout.split('\n').filter(Boolean);
950
- const nonVersionFiles = changedFiles.filter((f)=>f !== 'package.json' && f !== 'package-lock.json');
951
- if (changedFiles.length === 0) {
952
- status = '⊘ No changes, will skip';
953
- } else if (nonVersionFiles.length === 0) {
954
- status = '⊘ Only version bump, will skip';
955
- } else {
956
- status = `📝 Has changes (${nonVersionFiles.length} files), will publish`;
957
- }
958
- } catch {
959
- // If we can't check git status, assume changes
960
- status = '📝 Will execute';
961
- }
962
- }
963
- lines.push(` ${pkg}`);
964
- lines.push(` Status: ${status}`);
965
- lines.push(` Path: ${pkgInfo.path}`);
966
- }
967
- lines.push('');
968
- }
969
- lines.push('Summary:');
970
- lines.push(` Total packages: ${buildOrder.length}`);
971
- lines.push(` Dependency levels: ${levels.length}`);
972
- lines.push(` Command: ${command}`);
973
- if ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.maxConcurrency) {
974
- lines.push(` Max concurrency: ${runConfig.tree.maxConcurrency}`);
975
- }
976
- lines.push('');
977
- lines.push('To execute for real, run the same command without --dry-run');
978
- lines.push('');
979
- return lines.join('\n');
980
- };
981
- // Add a simple status check function
982
- const checkTreePublishStatus = async ()=>{
983
- const logger = getLogger();
984
- try {
985
- // Check for running kodrdriv processes
986
- const { stdout } = await runSecure('ps', [
987
- 'aux'
988
- ], {});
989
- const kodrdrivProcesses = stdout.split('\n').filter((line)=>line.includes('kodrdriv') && !line.includes('grep') && !line.includes('ps aux') && !line.includes('tree --status') // Exclude the current status command
990
- );
991
- if (kodrdrivProcesses.length > 0) {
992
- logger.info('🔍 Found running kodrdriv processes:');
993
- kodrdrivProcesses.forEach((process1)=>{
994
- const parts = process1.trim().split(/\s+/);
995
- const pid = parts[1];
996
- const command = parts.slice(10).join(' ');
997
- logger.info(` PID ${pid}: ${command}`);
998
- });
999
- } else {
1000
- logger.info('No kodrdriv processes currently running');
1001
- }
1002
- } catch (error) {
1003
- logger.warn('Could not check process status:', error);
1004
- }
1005
- };
1006
- const execute = async (runConfig)=>{
1007
- var _runConfig_tree, _runConfig_tree1, _runConfig_tree2, _runConfig_tree3, _runConfig_tree4, _runConfig_tree5, _runConfig_tree6, _runConfig_tree7, _runConfig_tree8, _runConfig_tree9, _runConfig_tree10, _runConfig_tree11, _runConfig_tree12, _runConfig_tree13, _runConfig_tree14, _runConfig_tree15;
1008
- const logger = getLogger();
1009
- const isDryRun = runConfig.dryRun || false;
1010
- const isContinue = ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.continue) || false;
1011
- const promotePackage = (_runConfig_tree1 = runConfig.tree) === null || _runConfig_tree1 === void 0 ? void 0 : _runConfig_tree1.promote;
1012
- // Debug logging
1013
- logger.debug('Tree config:', JSON.stringify(runConfig.tree, null, 2));
1014
- logger.debug('Status flag:', (_runConfig_tree2 = runConfig.tree) === null || _runConfig_tree2 === void 0 ? void 0 : _runConfig_tree2.status);
1015
- logger.debug('Full runConfig:', JSON.stringify(runConfig, null, 2));
1016
- // Handle status check
1017
- if ((_runConfig_tree3 = runConfig.tree) === null || _runConfig_tree3 === void 0 ? void 0 : _runConfig_tree3.status) {
1018
- logger.info('🔍 Checking for running kodrdriv processes...');
1019
- await checkTreePublishStatus();
1020
- return 'Status check completed';
1021
- }
1022
- // Handle promote mode
1023
- if (promotePackage) {
1024
- logger.info(`Promoting package '${promotePackage}' to completed status...`);
1025
- await promotePackageToCompleted(promotePackage, runConfig.outputDirectory);
1026
- logger.info(`✅ Package '${promotePackage}' has been marked as completed.`);
1027
- logger.info('You can now run the tree command with --continue to resume from the next package.');
1028
- return `Package '${promotePackage}' promoted to completed status.`;
1029
- }
1030
- // Handle audit-branches command
1031
- if ((_runConfig_tree4 = runConfig.tree) === null || _runConfig_tree4 === void 0 ? void 0 : _runConfig_tree4.auditBranches) {
1032
- var _runConfig_tree16, _runConfig_tree17, _runConfig_publish, _runConfig_tree18;
1033
- logger.info('🔍 Auditing branch state across all packages...');
1034
- const directories = ((_runConfig_tree16 = runConfig.tree) === null || _runConfig_tree16 === void 0 ? void 0 : _runConfig_tree16.directories) || [
1035
- process.cwd()
1036
- ];
1037
- const excludedPatterns = ((_runConfig_tree17 = runConfig.tree) === null || _runConfig_tree17 === void 0 ? void 0 : _runConfig_tree17.exclude) || [];
1038
- let allPackageJsonPaths = [];
1039
- for (const targetDirectory of directories){
1040
- const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1041
- allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1042
- }
1043
- if (allPackageJsonPaths.length === 0) {
1044
- return 'No packages found';
1045
- }
1046
- const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
1047
- const packages = Array.from(dependencyGraph.packages.values()).map((pkg)=>({
1048
- name: pkg.name,
1049
- path: pkg.path
1050
- }));
1051
- const { auditBranchState, formatAuditResults } = await import('../utils/branchState.js');
1052
- const { getRemoteDefaultBranch } = await import('@eldrforge/git-tools');
1053
- // For publish workflows, check branch consistency, merge conflicts, and existing PRs
1054
- // Don't pass an expected branch - let the audit find the most common branch
1055
- let targetBranch = (_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.targetBranch;
1056
- if (!targetBranch) {
1057
- // Try to detect default branch from the first package that is a git repo
1058
- const firstGitPkg = packages.find((pkg)=>isInGitRepository(pkg.path));
1059
- if (firstGitPkg) {
1060
- try {
1061
- // Cast to any to avoid type mismatch with node_modules version
1062
- targetBranch = await getRemoteDefaultBranch(firstGitPkg.path) || 'main';
1063
- } catch {
1064
- targetBranch = 'main';
1065
- }
1066
- } else {
1067
- targetBranch = 'main';
1068
- }
1069
- }
1070
- logger.info(`Checking for merge conflicts with '${targetBranch}' and existing pull requests...`);
1071
- const auditResult = await auditBranchState(packages, undefined, {
1072
- targetBranch,
1073
- checkPR: true,
1074
- checkConflicts: true,
1075
- concurrency: ((_runConfig_tree18 = runConfig.tree) === null || _runConfig_tree18 === void 0 ? void 0 : _runConfig_tree18.maxConcurrency) || 10
1076
- });
1077
- const formatted = formatAuditResults(auditResult);
1078
- logger.info('\n' + formatted);
1079
- if (auditResult.issuesFound > 0) {
1080
- logger.warn(`\n⚠️ Found issues in ${auditResult.issuesFound} package(s). Review the fixes above.`);
1081
- return `Branch audit complete: ${auditResult.issuesFound} package(s) need attention`;
1082
- }
1083
- logger.info(`\n✅ All ${auditResult.goodPackages} package(s) are in good state!`);
1084
- return `Branch audit complete: All packages OK`;
1085
- }
1086
- // Handle parallel execution recovery commands
1087
- const { loadRecoveryManager } = await import('@eldrforge/tree-execution');
1088
- // Handle status-parallel command
1089
- if ((_runConfig_tree5 = runConfig.tree) === null || _runConfig_tree5 === void 0 ? void 0 : _runConfig_tree5.statusParallel) {
1090
- var _runConfig_tree19, _runConfig_tree20;
1091
- logger.info('📊 Checking parallel execution status...');
1092
- // Need to build dependency graph first
1093
- const directories = ((_runConfig_tree19 = runConfig.tree) === null || _runConfig_tree19 === void 0 ? void 0 : _runConfig_tree19.directories) || [
1094
- process.cwd()
1095
- ];
1096
- const excludedPatterns = ((_runConfig_tree20 = runConfig.tree) === null || _runConfig_tree20 === void 0 ? void 0 : _runConfig_tree20.exclude) || [];
1097
- let allPackageJsonPaths = [];
1098
- for (const targetDirectory of directories){
1099
- const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1100
- allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1101
- }
1102
- if (allPackageJsonPaths.length === 0) {
1103
- return 'No packages found';
1104
- }
1105
- const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
1106
- const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
1107
- if (!recoveryManager) {
1108
- logger.info('No parallel execution checkpoint found');
1109
- return 'No active parallel execution found';
1110
- }
1111
- const status = await recoveryManager.showStatus();
1112
- logger.info('\n' + status);
1113
- return status;
1114
- }
1115
- // Handle validate-state command
1116
- if ((_runConfig_tree6 = runConfig.tree) === null || _runConfig_tree6 === void 0 ? void 0 : _runConfig_tree6.validateState) {
1117
- var _runConfig_tree21, _runConfig_tree22;
1118
- logger.info('🔍 Validating checkpoint state...');
1119
- const directories = ((_runConfig_tree21 = runConfig.tree) === null || _runConfig_tree21 === void 0 ? void 0 : _runConfig_tree21.directories) || [
1120
- process.cwd()
1121
- ];
1122
- const excludedPatterns = ((_runConfig_tree22 = runConfig.tree) === null || _runConfig_tree22 === void 0 ? void 0 : _runConfig_tree22.exclude) || [];
1123
- let allPackageJsonPaths = [];
1124
- for (const targetDirectory of directories){
1125
- const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1126
- allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1127
- }
1128
- if (allPackageJsonPaths.length === 0) {
1129
- return 'No packages found';
1130
- }
1131
- const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
1132
- const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
1133
- if (!recoveryManager) {
1134
- logger.info('No checkpoint found to validate');
1135
- return 'No checkpoint found';
1136
- }
1137
- const validation = recoveryManager.validateState();
1138
- if (validation.valid) {
1139
- logger.info('✅ Checkpoint state is valid');
1140
- } else {
1141
- logger.error('❌ Checkpoint state has issues:');
1142
- for (const issue of validation.issues){
1143
- logger.error(` • ${issue}`);
1144
- }
1145
- }
1146
- if (validation.warnings.length > 0) {
1147
- logger.warn('⚠️ Warnings:');
1148
- for (const warning of validation.warnings){
1149
- logger.warn(` • ${warning}`);
1150
- }
1151
- }
1152
- return validation.valid ? 'Checkpoint is valid' : 'Checkpoint has issues';
1153
- }
1154
- // Handle parallel execution recovery options (must happen before main execution)
1155
- const hasRecoveryOptions = ((_runConfig_tree7 = runConfig.tree) === null || _runConfig_tree7 === void 0 ? void 0 : _runConfig_tree7.markCompleted) || ((_runConfig_tree8 = runConfig.tree) === null || _runConfig_tree8 === void 0 ? void 0 : _runConfig_tree8.skipPackages) || ((_runConfig_tree9 = runConfig.tree) === null || _runConfig_tree9 === void 0 ? void 0 : _runConfig_tree9.retryFailed) || ((_runConfig_tree10 = runConfig.tree) === null || _runConfig_tree10 === void 0 ? void 0 : _runConfig_tree10.skipFailed) || ((_runConfig_tree11 = runConfig.tree) === null || _runConfig_tree11 === void 0 ? void 0 : _runConfig_tree11.resetPackage);
1156
- if (hasRecoveryOptions && runConfig.tree) {
1157
- var _runConfig_tree_retry;
1158
- logger.info('🔧 Applying recovery options...');
1159
- // Build dependency graph
1160
- const directories = runConfig.tree.directories || [
1161
- process.cwd()
1162
- ];
1163
- const excludedPatterns = runConfig.tree.exclude || [];
1164
- let allPackageJsonPaths = [];
1165
- for (const targetDirectory of directories){
1166
- const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1167
- allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1168
- }
1169
- const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
1170
- const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
1171
- if (!recoveryManager) {
1172
- logger.error('No checkpoint found for recovery');
1173
- throw new Error('No checkpoint found. Cannot apply recovery options without an existing checkpoint.');
1174
- }
1175
- await recoveryManager.applyRecoveryOptions({
1176
- markCompleted: runConfig.tree.markCompleted,
1177
- skipPackages: runConfig.tree.skipPackages,
1178
- retryFailed: runConfig.tree.retryFailed,
1179
- skipFailed: runConfig.tree.skipFailed,
1180
- resetPackage: runConfig.tree.resetPackage,
1181
- maxRetries: (_runConfig_tree_retry = runConfig.tree.retry) === null || _runConfig_tree_retry === void 0 ? void 0 : _runConfig_tree_retry.maxAttempts
1182
- });
1183
- logger.info('✅ Recovery options applied');
1184
- // If not also continuing, just return
1185
- if (!isContinue) {
1186
- return 'Recovery options applied. Use --continue to resume execution.';
1187
- }
1188
- }
1189
- // Handle continue mode
1190
- if (isContinue) {
1191
- var _runConfig_tree23;
1192
- // For parallel execution, the checkpoint is managed by DynamicTaskPool/CheckpointManager
1193
- // For sequential execution, we use the legacy context file
1194
- const isParallelMode = (_runConfig_tree23 = runConfig.tree) === null || _runConfig_tree23 === void 0 ? void 0 : _runConfig_tree23.parallel;
1195
- if (!isParallelMode) {
1196
- // Sequential execution: load legacy context
1197
- const savedContext = await loadExecutionContext(runConfig.outputDirectory);
1198
- if (savedContext) {
1199
- logger.info('Continuing previous tree execution...');
1200
- logger.info(`Original command: ${savedContext.command}`);
1201
- logger.info(`Started: ${savedContext.startTime.toISOString()}`);
1202
- logger.info(`Previously completed: ${savedContext.completedPackages.length}/${savedContext.buildOrder.length} packages`);
1203
- // Restore state safely
1204
- let mutexLocked = false;
1205
- try {
1206
- await globalStateMutex.lock();
1207
- mutexLocked = true;
1208
- publishedVersions = savedContext.publishedVersions;
1209
- globalStateMutex.unlock();
1210
- mutexLocked = false;
1211
- } catch (error) {
1212
- if (mutexLocked) {
1213
- globalStateMutex.unlock();
1214
- }
1215
- throw error;
1216
- }
1217
- executionContext = savedContext;
1218
- // Use original config but allow some overrides (like dry run)
1219
- runConfig = {
1220
- ...savedContext.originalConfig,
1221
- dryRun: runConfig.dryRun,
1222
- outputDirectory: runConfig.outputDirectory || savedContext.originalConfig.outputDirectory
1223
- };
1224
- } else {
1225
- logger.warn('No previous execution context found. Starting new execution...');
1226
- }
1227
- } else {
1228
- // Parallel execution: checkpoint is managed by DynamicTaskPool
1229
- // Just log that we're continuing - the actual checkpoint loading happens in DynamicTaskPool
1230
- logger.info('Continuing previous parallel execution...');
1231
- }
1232
- } else {
1233
- // Reset published versions tracking for new tree execution
1234
- publishedVersions = [];
1235
- executionContext = null;
1236
- }
1237
- // Check if we're in built-in command mode (tree command with second argument)
1238
- const builtInCommand = (_runConfig_tree12 = runConfig.tree) === null || _runConfig_tree12 === void 0 ? void 0 : _runConfig_tree12.builtInCommand;
1239
- const supportedBuiltInCommands = [
1240
- 'commit',
1241
- 'release',
1242
- 'publish',
1243
- 'link',
1244
- 'unlink',
1245
- 'development',
1246
- 'branches',
1247
- 'run',
1248
- 'checkout',
1249
- 'updates',
1250
- 'precommit'
1251
- ];
1252
- if (builtInCommand && !supportedBuiltInCommands.includes(builtInCommand)) {
1253
- throw new Error(`Unsupported built-in command: ${builtInCommand}. Supported commands: ${supportedBuiltInCommands.join(', ')}`);
1254
- }
1255
- // Handle run subcommand - convert space-separated scripts to npm run commands
1256
- if (builtInCommand === 'run') {
1257
- var _runConfig_tree24;
1258
- const packageArgument = (_runConfig_tree24 = runConfig.tree) === null || _runConfig_tree24 === void 0 ? void 0 : _runConfig_tree24.packageArgument;
1259
- if (!packageArgument) {
1260
- throw new Error('run subcommand requires script names. Usage: kodrdriv tree run "clean build test"');
1261
- }
1262
- // Split the package argument by spaces to get individual script names
1263
- const scripts = packageArgument.trim().split(/\s+/).filter((script)=>script.length > 0);
1264
- if (scripts.length === 0) {
1265
- throw new Error('run subcommand requires at least one script name. Usage: kodrdriv tree run "clean build test"');
1266
- }
1267
- // Convert to npm run commands joined with &&
1268
- const npmCommands = scripts.map((script)=>`npm run ${script}`).join(' && ');
1269
- // Set this as the custom command to run
1270
- runConfig.tree = {
1271
- ...runConfig.tree,
1272
- cmd: npmCommands
1273
- };
1274
- // Clear the built-in command since we're now using custom command mode
1275
- runConfig.tree.builtInCommand = undefined;
1276
- logger.info(`Converting run subcommand to: ${npmCommands}`);
1277
- // Store scripts for later validation
1278
- runConfig.__scriptsToValidate = scripts;
1279
- }
1280
- // Determine the target directories - either specified or current working directory
1281
- const directories = ((_runConfig_tree13 = runConfig.tree) === null || _runConfig_tree13 === void 0 ? void 0 : _runConfig_tree13.directories) || [
1282
- process.cwd()
1283
- ];
1284
- // Handle link status subcommand
1285
- if (builtInCommand === 'link' && ((_runConfig_tree14 = runConfig.tree) === null || _runConfig_tree14 === void 0 ? void 0 : _runConfig_tree14.packageArgument) === 'status') {
1286
- // For tree link status, we want to show status across all packages
1287
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running link status across workspace...`);
1288
- // Create a config that will be passed to the link command
1289
- const linkConfig = {
1290
- ...runConfig,
1291
- tree: {
1292
- ...runConfig.tree,
1293
- directories: directories
1294
- }
1295
- };
1296
- try {
1297
- const result = await execute$1(linkConfig, 'status');
1298
- return result;
1299
- } catch (error) {
1300
- logger.error(`Link status failed: ${error.message}`);
1301
- throw error;
1302
- }
1303
- }
1304
- // Handle unlink status subcommand
1305
- if (builtInCommand === 'unlink' && ((_runConfig_tree15 = runConfig.tree) === null || _runConfig_tree15 === void 0 ? void 0 : _runConfig_tree15.packageArgument) === 'status') {
1306
- // For tree unlink status, we want to show status across all packages
1307
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running unlink status across workspace...`);
1308
- // Create a config that will be passed to the unlink command
1309
- const unlinkConfig = {
1310
- ...runConfig,
1311
- tree: {
1312
- ...runConfig.tree,
1313
- directories: directories
1314
- }
1315
- };
1316
- try {
1317
- const result = await execute$2(unlinkConfig, 'status');
1318
- return result;
1319
- } catch (error) {
1320
- logger.error(`Unlink status failed: ${error.message}`);
1321
- throw error;
1322
- }
1323
- }
1324
- if (directories.length === 1) {
1325
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspace at: ${directories[0]}`);
1326
- } else {
1327
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspaces at: ${directories.join(', ')}`);
1328
- }
1329
- try {
1330
- var _runConfig_tree25, _runConfig_tree26, _runConfig_tree27, _runConfig_tree28;
1331
- // Get exclusion patterns from config, fallback to empty array
1332
- const excludedPatterns = ((_runConfig_tree25 = runConfig.tree) === null || _runConfig_tree25 === void 0 ? void 0 : _runConfig_tree25.exclude) || [];
1333
- if (excludedPatterns.length > 0) {
1334
- logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Using exclusion patterns: ${excludedPatterns.join(', ')}`);
1335
- }
1336
- // Scan for package.json files across all directories
1337
- logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning for package.json files...`);
1338
- let allPackageJsonPaths = [];
1339
- for (const targetDirectory of directories){
1340
- logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning directory: ${targetDirectory}`);
1341
- const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
1342
- allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
1343
- }
1344
- const packageJsonPaths = allPackageJsonPaths;
1345
- if (packageJsonPaths.length === 0) {
1346
- const directoriesStr = directories.join(', ');
1347
- const message = `No package.json files found in subdirectories of: ${directoriesStr}`;
1348
- logger.warn(message);
1349
- return message;
1350
- }
1351
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Found ${packageJsonPaths.length} package.json files`);
1352
- // Build dependency graph
1353
- logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Building dependency graph...`);
1354
- const dependencyGraph = await buildDependencyGraph(packageJsonPaths);
1355
- // Perform topological sort to determine build order
1356
- logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Determining build order...`);
1357
- let buildOrder = topologicalSort(dependencyGraph);
1358
- // Handle start-from functionality if specified
1359
- const startFrom = (_runConfig_tree26 = runConfig.tree) === null || _runConfig_tree26 === void 0 ? void 0 : _runConfig_tree26.startFrom;
1360
- if (startFrom) {
1361
- logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for start package: ${startFrom}`);
1362
- // Resolve the actual package name (can be package name or directory name)
1363
- let startPackageName = null;
1364
- for (const [pkgName, pkgInfo] of dependencyGraph.packages){
1365
- const dirName = path__default.basename(pkgInfo.path);
1366
- if (dirName === startFrom || pkgName === startFrom) {
1367
- startPackageName = pkgName;
1368
- break;
1369
- }
1370
- }
1371
- if (!startPackageName) {
1372
- // Check if the package exists but was excluded across all directories
1373
- let allPackageJsonPathsForCheck = [];
1374
- for (const targetDirectory of directories){
1375
- const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, []); // No exclusions
1376
- allPackageJsonPathsForCheck = allPackageJsonPathsForCheck.concat(packageJsonPaths);
1377
- }
1378
- let wasExcluded = false;
1379
- for (const packageJsonPath of allPackageJsonPathsForCheck){
1380
- try {
1381
- const packageInfo = await parsePackageJson(packageJsonPath);
1382
- const dirName = path__default.basename(packageInfo.path);
1383
- if (dirName === startFrom || packageInfo.name === startFrom) {
1384
- // Check if this package was excluded
1385
- if (shouldExclude(packageJsonPath, excludedPatterns)) {
1386
- wasExcluded = true;
1387
- break;
1388
- }
1389
- }
1390
- } catch {
1391
- continue;
1392
- }
1393
- }
1394
- if (wasExcluded) {
1395
- const excludedPatternsStr = excludedPatterns.join(', ');
1396
- throw new Error(`Package directory '${startFrom}' was excluded by exclusion patterns: ${excludedPatternsStr}. Remove the exclusion pattern or choose a different starting package.`);
1397
- } else {
1398
- const availablePackages = buildOrder.map((name)=>{
1399
- const packageInfo = dependencyGraph.packages.get(name);
1400
- return `${path__default.basename(packageInfo.path)} (${name})`;
1401
- }).join(', ');
1402
- throw new Error(`Package directory '${startFrom}' not found. Available packages: ${availablePackages}`);
1403
- }
1404
- }
1405
- // Find the start package in the build order and start execution from there
1406
- const startIndex = buildOrder.findIndex((pkgName)=>pkgName === startPackageName);
1407
- if (startIndex === -1) {
1408
- throw new Error(`Package '${startFrom}' not found in build order. This should not happen.`);
1409
- }
1410
- // Filter build order to start from the specified package
1411
- const originalLength = buildOrder.length;
1412
- buildOrder = buildOrder.slice(startIndex);
1413
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Starting execution from package '${startFrom}' (${buildOrder.length} of ${originalLength} packages remaining).`);
1414
- }
1415
- // Handle stop-at functionality if specified
1416
- const stopAt = (_runConfig_tree27 = runConfig.tree) === null || _runConfig_tree27 === void 0 ? void 0 : _runConfig_tree27.stopAt;
1417
- if (stopAt) {
1418
- logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for stop package: ${stopAt}`);
1419
- // Find the package that matches the stopAt directory name
1420
- const stopIndex = buildOrder.findIndex((packageName)=>{
1421
- const packageInfo = dependencyGraph.packages.get(packageName);
1422
- const dirName = path__default.basename(packageInfo.path);
1423
- return dirName === stopAt || packageName === stopAt;
1424
- });
1425
- if (stopIndex === -1) {
1426
- // Check if the package exists but was excluded across all directories
1427
- let allPackageJsonPathsForCheck = [];
1428
- for (const targetDirectory of directories){
1429
- const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, []); // No exclusions
1430
- allPackageJsonPathsForCheck = allPackageJsonPathsForCheck.concat(packageJsonPaths);
1431
- }
1432
- let wasExcluded = false;
1433
- for (const packageJsonPath of allPackageJsonPathsForCheck){
1434
- try {
1435
- const packageInfo = await parsePackageJson(packageJsonPath);
1436
- const dirName = path__default.basename(packageInfo.path);
1437
- if (dirName === stopAt || packageInfo.name === stopAt) {
1438
- // Check if this package was excluded
1439
- if (shouldExclude(packageJsonPath, excludedPatterns)) {
1440
- wasExcluded = true;
1441
- break;
1442
- }
1443
- }
1444
- } catch {
1445
- continue;
1446
- }
1447
- }
1448
- if (wasExcluded) {
1449
- const excludedPatternsStr = excludedPatterns.join(', ');
1450
- throw new Error(`Package directory '${stopAt}' was excluded by exclusion patterns: ${excludedPatternsStr}. Remove the exclusion pattern or choose a different stop package.`);
1451
- } else {
1452
- const availablePackages = buildOrder.map((name)=>{
1453
- const packageInfo = dependencyGraph.packages.get(name);
1454
- return `${path__default.basename(packageInfo.path)} (${name})`;
1455
- }).join(', ');
1456
- throw new Error(`Package directory '${stopAt}' not found. Available packages: ${availablePackages}`);
1457
- }
1458
- }
1459
- // Truncate the build order before the stop package (the stop package is not executed)
1460
- const originalLength = buildOrder.length;
1461
- buildOrder = buildOrder.slice(0, stopIndex);
1462
- const stoppedCount = originalLength - stopIndex;
1463
- if (stoppedCount > 0) {
1464
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Stopping before '${stopAt}' - excluding ${stoppedCount} package${stoppedCount === 1 ? '' : 's'}`);
1465
- }
1466
- }
1467
- // Helper function to determine version scope indicator
1468
- const getVersionScopeIndicator = (versionRange)=>{
1469
- // Remove whitespace and check the pattern
1470
- const cleanRange = versionRange.trim();
1471
- // Preserve the original prefix (^, ~, >=, etc.)
1472
- const prefixMatch = cleanRange.match(/^([^0-9]*)/);
1473
- const prefix = prefixMatch ? prefixMatch[1] : '';
1474
- // Extract the version part after the prefix
1475
- const versionPart = cleanRange.substring(prefix.length);
1476
- // Count the number of dots to determine scope
1477
- const dotCount = (versionPart.match(/\./g) || []).length;
1478
- if (dotCount >= 2) {
1479
- // Has patch version (e.g., "^4.4.32" -> "^P")
1480
- return prefix + 'P';
1481
- } else if (dotCount === 1) {
1482
- // Has minor version only (e.g., "^4.4" -> "^m")
1483
- return prefix + 'm';
1484
- } else if (dotCount === 0 && versionPart.match(/^\d+$/)) {
1485
- // Has major version only (e.g., "^4" -> "^M")
1486
- return prefix + 'M';
1487
- }
1488
- // For complex ranges or non-standard formats, return as-is
1489
- return cleanRange;
1490
- };
1491
- // Helper function to find packages that consume a given package
1492
- const findConsumingPackagesForBranches = async (targetPackageName, allPackages, storage)=>{
1493
- const consumers = [];
1494
- // Extract scope from target package name (e.g., "@fjell/eslint-config" -> "@fjell/")
1495
- const targetScope = targetPackageName.includes('/') ? targetPackageName.split('/')[0] + '/' : null;
1496
- for (const [packageName, packageInfo] of allPackages){
1497
- if (packageName === targetPackageName) continue;
1498
- try {
1499
- const packageJsonPath = path__default.join(packageInfo.path, 'package.json');
1500
- const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
1501
- const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
1502
- const packageJson = validatePackageJson(parsed, packageJsonPath);
1503
- // Check if this package depends on the target package and get the version range
1504
- const dependencyTypes = [
1505
- 'dependencies',
1506
- 'devDependencies',
1507
- 'peerDependencies',
1508
- 'optionalDependencies'
1509
- ];
1510
- let versionRange = null;
1511
- for (const depType of dependencyTypes){
1512
- if (packageJson[depType] && packageJson[depType][targetPackageName]) {
1513
- versionRange = packageJson[depType][targetPackageName];
1514
- break;
1515
- }
1516
- }
1517
- if (versionRange) {
1518
- // Apply scope substitution for consumers in the same scope
1519
- let consumerDisplayName = packageName;
1520
- if (targetScope && packageName.startsWith(targetScope)) {
1521
- // Replace scope with "./" (e.g., "@fjell/core" -> "./core")
1522
- consumerDisplayName = './' + packageName.substring(targetScope.length);
1523
- }
1524
- // Add version scope indicator
1525
- const scopeIndicator = getVersionScopeIndicator(versionRange);
1526
- consumerDisplayName += ` (${scopeIndicator})`;
1527
- consumers.push(consumerDisplayName);
1528
- }
1529
- } catch {
1530
- continue;
1531
- }
1532
- }
1533
- return consumers.sort();
1534
- };
1535
- // Handle special "branches" command that displays table
1536
- if (builtInCommand === 'branches') {
1537
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Branch Status Summary:`);
1538
- logger.info('');
1539
- // Calculate column widths for nice formatting
1540
- let maxNameLength = 'Package'.length;
1541
- let maxBranchLength = 'Branch'.length;
1542
- let maxVersionLength = 'Version'.length;
1543
- let maxStatusLength = 'Status'.length;
1544
- let maxLinkLength = 'Linked'.length;
1545
- let maxConsumersLength = 'Consumers'.length;
1546
- const branchInfos = [];
1547
- // Create storage instance for consumer lookup
1548
- const storage = createStorage();
1549
- // Get globally linked packages once at the beginning
1550
- const globallyLinkedPackages = await getGloballyLinkedPackages();
1551
- // ANSI escape codes for progress display
1552
- const ANSI = {
1553
- CURSOR_UP: '\x1b[1A',
1554
- CURSOR_TO_START: '\x1b[0G',
1555
- CLEAR_LINE: '\x1b[2K',
1556
- GREEN: '\x1b[32m',
1557
- BLUE: '\x1b[34m',
1558
- YELLOW: '\x1b[33m',
1559
- RESET: '\x1b[0m',
1560
- BOLD: '\x1b[1m'
1561
- };
1562
- // Check if terminal supports ANSI
1563
- const supportsAnsi = process.stdout.isTTY && process.env.TERM !== 'dumb' && !process.env.NO_COLOR;
1564
- const totalPackages = buildOrder.length;
1565
- const concurrency = 5; // Process up to 5 packages at a time
1566
- let completedCount = 0;
1567
- let isFirstProgress = true;
1568
- // Function to update progress display
1569
- const updateProgress = (currentPackage, completed, total)=>{
1570
- if (!supportsAnsi) return;
1571
- if (!isFirstProgress) {
1572
- // Move cursor up and clear the line
1573
- process.stdout.write(ANSI.CURSOR_UP + ANSI.CURSOR_TO_START + ANSI.CLEAR_LINE);
1574
- }
1575
- const percentage = Math.round(completed / total * 100);
1576
- const progressBar = '█'.repeat(Math.floor(percentage / 5)) + '░'.repeat(20 - Math.floor(percentage / 5));
1577
- const progress = `${ANSI.BLUE}${ANSI.BOLD}Analyzing packages... ${ANSI.GREEN}[${progressBar}] ${percentage}%${ANSI.RESET} ${ANSI.YELLOW}(${completed}/${total})${ANSI.RESET}`;
1578
- const current = currentPackage ? ` - Currently: ${currentPackage}` : '';
1579
- process.stdout.write(progress + current + '\n');
1580
- isFirstProgress = false;
1581
- };
1582
- // Function to process a single package
1583
- const processPackage = async (packageName)=>{
1584
- const packageInfo = dependencyGraph.packages.get(packageName);
1585
- try {
1586
- // Process git status and consumers in parallel
1587
- const [gitStatus, consumers] = await Promise.all([
1588
- getGitStatusSummary(packageInfo.path),
1589
- findConsumingPackagesForBranches(packageName, dependencyGraph.packages, storage)
1590
- ]);
1591
- // Check if this package is globally linked (available to be linked to)
1592
- const isGloballyLinked = globallyLinkedPackages.has(packageName);
1593
- const linkedText = isGloballyLinked ? '✓' : '';
1594
- // Add asterisk to consumers that are actively linking to globally linked packages
1595
- // and check for link problems to highlight in red
1596
- const consumersWithLinkStatus = await Promise.all(consumers.map(async (consumer)=>{
1597
- // Extract the base consumer name from the format "package-name (^P)" or "./scoped-name (^m)"
1598
- const baseConsumerName = consumer.replace(/ \([^)]+\)$/, ''); // Remove version scope indicator
1599
- // Get the original package name from display name (remove scope substitution)
1600
- const originalConsumerName = baseConsumerName.startsWith('./') ? baseConsumerName.replace('./', packageName.split('/')[0] + '/') : baseConsumerName;
1601
- // Find the consumer package info to get its path
1602
- const consumerPackageInfo = Array.from(dependencyGraph.packages.values()).find((pkg)=>pkg.name === originalConsumerName);
1603
- if (consumerPackageInfo) {
1604
- const [consumerLinkedDeps, linkProblems] = await Promise.all([
1605
- getLinkedDependencies(consumerPackageInfo.path),
1606
- getLinkCompatibilityProblems(consumerPackageInfo.path, dependencyGraph.packages)
1607
- ]);
1608
- let consumerDisplay = consumer;
1609
- // Add asterisk if this consumer is actively linking to this package
1610
- if (consumerLinkedDeps.has(packageName)) {
1611
- consumerDisplay += '*';
1612
- }
1613
- // Check if this consumer has link problems with the current package
1614
- if (linkProblems.has(packageName)) {
1615
- // Highlight in red using ANSI escape codes (only if terminal supports it)
1616
- if (supportsAnsi) {
1617
- consumerDisplay = `\x1b[31m${consumerDisplay}\x1b[0m`;
1618
- } else {
1619
- // Fallback for terminals that don't support ANSI colors
1620
- consumerDisplay += ' [LINK PROBLEM]';
1621
- }
1622
- }
1623
- return consumerDisplay;
1624
- }
1625
- return consumer;
1626
- }));
1627
- return {
1628
- name: packageName,
1629
- branch: gitStatus.branch,
1630
- version: packageInfo.version,
1631
- status: gitStatus.status,
1632
- linked: linkedText,
1633
- consumers: consumersWithLinkStatus
1634
- };
1635
- } catch (error) {
1636
- logger.warn(`Failed to get git status for ${packageName}: ${error.message}`);
1637
- return {
1638
- name: packageName,
1639
- branch: 'error',
1640
- version: packageInfo.version,
1641
- status: 'error',
1642
- linked: '✗',
1643
- consumers: [
1644
- 'error'
1645
- ]
1646
- };
1647
- }
1648
- };
1649
- // Process packages in batches with progress updates
1650
- updateProgress('Starting...', 0, totalPackages);
1651
- for(let i = 0; i < buildOrder.length; i += concurrency){
1652
- const batch = buildOrder.slice(i, i + concurrency);
1653
- // Update progress to show current batch
1654
- const currentBatchStr = batch.length === 1 ? batch[0] : `${batch[0]} + ${batch.length - 1} others`;
1655
- updateProgress(currentBatchStr, completedCount, totalPackages);
1656
- // Process batch in parallel
1657
- const batchResults = await Promise.all(batch.map((packageName)=>processPackage(packageName)));
1658
- // Add results and update column widths
1659
- for (const result of batchResults){
1660
- branchInfos.push(result);
1661
- maxNameLength = Math.max(maxNameLength, result.name.length);
1662
- maxBranchLength = Math.max(maxBranchLength, result.branch.length);
1663
- maxVersionLength = Math.max(maxVersionLength, result.version.length);
1664
- maxStatusLength = Math.max(maxStatusLength, result.status.length);
1665
- maxLinkLength = Math.max(maxLinkLength, result.linked.length);
1666
- // For consumers, calculate the width based on the longest consumer name
1667
- const maxConsumerLength = result.consumers.length > 0 ? Math.max(...result.consumers.map((c)=>c.length)) : 0;
1668
- maxConsumersLength = Math.max(maxConsumersLength, maxConsumerLength);
1669
- }
1670
- completedCount += batch.length;
1671
- updateProgress('', completedCount, totalPackages);
1672
- }
1673
- // Clear progress line and add spacing
1674
- if (supportsAnsi && !isFirstProgress) {
1675
- process.stdout.write(ANSI.CURSOR_UP + ANSI.CURSOR_TO_START + ANSI.CLEAR_LINE);
1676
- }
1677
- logger.info(`${ANSI.GREEN}✅ Analysis complete!${ANSI.RESET} Processed ${totalPackages} packages in batches of ${concurrency}.`);
1678
- logger.info('');
1679
- // Print header (new order: Package | Branch | Version | Status | Linked | Consumers)
1680
- const nameHeader = 'Package'.padEnd(maxNameLength);
1681
- const branchHeader = 'Branch'.padEnd(maxBranchLength);
1682
- const versionHeader = 'Version'.padEnd(maxVersionLength);
1683
- const statusHeader = 'Status'.padEnd(maxStatusLength);
1684
- const linkHeader = 'Linked'.padEnd(maxLinkLength);
1685
- const consumersHeader = 'Consumers';
1686
- logger.info(`${nameHeader} | ${branchHeader} | ${versionHeader} | ${statusHeader} | ${linkHeader} | ${consumersHeader}`);
1687
- logger.info(`${'-'.repeat(maxNameLength)} | ${'-'.repeat(maxBranchLength)} | ${'-'.repeat(maxVersionLength)} | ${'-'.repeat(maxStatusLength)} | ${'-'.repeat(maxLinkLength)} | ${'-'.repeat(9)}`);
1688
- // Print data rows with multi-line consumers
1689
- for (const info of branchInfos){
1690
- const nameCol = info.name.padEnd(maxNameLength);
1691
- const branchCol = info.branch.padEnd(maxBranchLength);
1692
- const versionCol = info.version.padEnd(maxVersionLength);
1693
- const statusCol = info.status.padEnd(maxStatusLength);
1694
- const linkCol = info.linked.padEnd(maxLinkLength);
1695
- if (info.consumers.length === 0) {
1696
- // No consumers - single line
1697
- logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | `);
1698
- } else if (info.consumers.length === 1) {
1699
- // Single consumer - single line
1700
- logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | ${info.consumers[0]}`);
1701
- } else {
1702
- // Multiple consumers - first consumer on same line, rest on new lines with continuous column separators
1703
- logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | ${info.consumers[0]}`);
1704
- // Additional consumers on separate lines with proper column separators
1705
- const emptyNameCol = ' '.repeat(maxNameLength);
1706
- const emptyBranchCol = ' '.repeat(maxBranchLength);
1707
- const emptyVersionCol = ' '.repeat(maxVersionLength);
1708
- const emptyStatusCol = ' '.repeat(maxStatusLength);
1709
- const emptyLinkCol = ' '.repeat(maxLinkLength);
1710
- for(let i = 1; i < info.consumers.length; i++){
1711
- logger.info(`${emptyNameCol} | ${emptyBranchCol} | ${emptyVersionCol} | ${emptyStatusCol} | ${emptyLinkCol} | ${info.consumers[i]}`);
1712
- }
1713
- }
1714
- }
1715
- logger.info('');
1716
- // Add legend explaining the symbols and colors
1717
- logger.info('Legend:');
1718
- logger.info(' * = Consumer is actively linking to this package');
1719
- logger.info(' (^P) = Patch-level dependency (e.g., "^4.4.32")');
1720
- logger.info(' (^m) = Minor-level dependency (e.g., "^4.4")');
1721
- logger.info(' (^M) = Major-level dependency (e.g., "^4")');
1722
- logger.info(' (~P), (>=M), etc. = Other version prefixes preserved');
1723
- if (supportsAnsi) {
1724
- logger.info(' \x1b[31mRed text\x1b[0m = Consumer has link problems (version mismatches) with this package');
1725
- } else {
1726
- logger.info(' [LINK PROBLEM] = Consumer has link problems (version mismatches) with this package');
1727
- }
1728
- logger.info('');
1729
- return `Branch status summary for ${branchInfos.length} packages completed.`;
1730
- }
1731
- // Handle special "checkout" command that switches all packages to specified branch
1732
- if (builtInCommand === 'checkout') {
1733
- var _runConfig_tree29;
1734
- const targetBranch = (_runConfig_tree29 = runConfig.tree) === null || _runConfig_tree29 === void 0 ? void 0 : _runConfig_tree29.packageArgument;
1735
- if (!targetBranch) {
1736
- throw new Error('checkout subcommand requires a branch name. Usage: kodrdriv tree checkout <branch-name>');
1737
- }
1738
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Workspace Checkout to Branch: ${targetBranch}`);
1739
- logger.info('');
1740
- // Phase 1: Safety check - scan all packages for uncommitted changes
1741
- logger.info('🔍 Phase 1: Checking for uncommitted changes across workspace...');
1742
- const packagesWithChanges = [];
1743
- for (const packageName of buildOrder){
1744
- const packageInfo = dependencyGraph.packages.get(packageName);
1745
- try {
1746
- const gitStatus = await getGitStatusSummary(packageInfo.path);
1747
- const hasProblems = gitStatus.hasUncommittedChanges || gitStatus.hasUnstagedFiles;
1748
- packagesWithChanges.push({
1749
- name: packageName,
1750
- path: packageInfo.path,
1751
- status: gitStatus.status,
1752
- hasUncommittedChanges: gitStatus.hasUncommittedChanges,
1753
- hasUnstagedFiles: gitStatus.hasUnstagedFiles
1754
- });
1755
- if (hasProblems) {
1756
- logger.warn(`⚠️ ${packageName}: ${gitStatus.status}`);
1757
- } else {
1758
- logger.verbose(`✅ ${packageName}: clean`);
1759
- }
1760
- } catch (error) {
1761
- logger.warn(`❌ ${packageName}: error checking status - ${error.message}`);
1762
- packagesWithChanges.push({
1763
- name: packageName,
1764
- path: packageInfo.path,
1765
- status: 'error',
1766
- hasUncommittedChanges: false,
1767
- hasUnstagedFiles: false
1768
- });
1769
- }
1770
- }
1771
- // Check if any packages have uncommitted changes
1772
- const problemPackages = packagesWithChanges.filter((pkg)=>pkg.hasUncommittedChanges || pkg.hasUnstagedFiles || pkg.status === 'error');
1773
- if (problemPackages.length > 0) {
1774
- logger.error(`❌ Cannot proceed with checkout: ${problemPackages.length} packages have uncommitted changes or errors:`);
1775
- logger.error('');
1776
- for (const pkg of problemPackages){
1777
- logger.error(` 📦 ${pkg.name} (${pkg.path}):`);
1778
- logger.error(` Status: ${pkg.status}`);
1779
- }
1780
- logger.error('');
1781
- logger.error('🔧 To resolve this issue:');
1782
- logger.error(' 1. Commit or stash changes in the packages listed above');
1783
- logger.error(' 2. Or use "kodrdriv tree commit" to commit changes across all packages');
1784
- logger.error(' 3. Then re-run the checkout command');
1785
- logger.error('');
1786
- throw new Error(`Workspace checkout blocked: ${problemPackages.length} packages have uncommitted changes`);
1787
- }
1788
- logger.info(`✅ Phase 1 complete: All ${packagesWithChanges.length} packages are clean`);
1789
- logger.info('');
1790
- // Phase 2: Perform the checkout
1791
- logger.info(`🔄 Phase 2: Checking out all packages to branch '${targetBranch}'...`);
1792
- let successCount = 0;
1793
- const failedPackages = [];
1794
- for(let i = 0; i < buildOrder.length; i++){
1795
- const packageName = buildOrder[i];
1796
- const packageInfo = dependencyGraph.packages.get(packageName);
1797
- if (isDryRun) {
1798
- logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: Would checkout ${targetBranch}`);
1799
- successCount++;
1800
- } else {
1801
- try {
1802
- const originalCwd = process.cwd();
1803
- process.chdir(packageInfo.path);
1804
- try {
1805
- // Check if target branch exists locally
1806
- let branchExists = false;
1807
- try {
1808
- await runSecure('git', [
1809
- 'rev-parse',
1810
- '--verify',
1811
- targetBranch
1812
- ]);
1813
- branchExists = true;
1814
- } catch {
1815
- // Branch doesn't exist locally
1816
- branchExists = false;
1817
- }
1818
- if (branchExists) {
1819
- await runSecure('git', [
1820
- 'checkout',
1821
- targetBranch
1822
- ]);
1823
- logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Checked out ${targetBranch}`);
1824
- } else {
1825
- // Try to check out branch from remote
1826
- try {
1827
- await runSecure('git', [
1828
- 'checkout',
1829
- '-b',
1830
- targetBranch,
1831
- `origin/${targetBranch}`
1832
- ]);
1833
- logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Checked out ${targetBranch} from origin`);
1834
- } catch {
1835
- // If that fails, create a new branch
1836
- await runSecure('git', [
1837
- 'checkout',
1838
- '-b',
1839
- targetBranch
1840
- ]);
1841
- logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Created new branch ${targetBranch}`);
1842
- }
1843
- }
1844
- successCount++;
1845
- } finally{
1846
- process.chdir(originalCwd);
1847
- }
1848
- } catch (error) {
1849
- logger.error(`[${i + 1}/${buildOrder.length}] ${packageName}: ❌ Failed - ${error.message}`);
1850
- failedPackages.push({
1851
- name: packageName,
1852
- error: error.message
1853
- });
1854
- }
1855
- }
1856
- }
1857
- // Report results
1858
- if (failedPackages.length > 0) {
1859
- logger.error(`❌ Checkout completed with errors: ${successCount}/${buildOrder.length} packages successful`);
1860
- logger.error('');
1861
- logger.error('Failed packages:');
1862
- for (const failed of failedPackages){
1863
- logger.error(` - ${failed.name}: ${failed.error}`);
1864
- }
1865
- throw new Error(`Checkout failed for ${failedPackages.length} packages`);
1866
- } else {
1867
- logger.info(`✅ Checkout complete: All ${buildOrder.length} packages successfully checked out to '${targetBranch}'`);
1868
- return `Workspace checkout complete: ${successCount} packages checked out to '${targetBranch}'`;
1869
- }
1870
- }
1871
- // Display results
1872
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Build order determined:`);
1873
- let returnOutput = '';
1874
- if (runConfig.verbose || runConfig.debug) {
1875
- // Verbose mode: Skip simple format, show detailed format before command execution
1876
- logger.info(''); // Add spacing
1877
- const rangeInfo = [];
1878
- if (startFrom) rangeInfo.push(`starting from ${startFrom}`);
1879
- if (stopAt) rangeInfo.push(`stopping before ${stopAt}`);
1880
- const rangeStr = rangeInfo.length > 0 ? ` (${rangeInfo.join(', ')})` : '';
1881
- logger.info(`Detailed Build Order for ${buildOrder.length} packages${rangeStr}:`);
1882
- logger.info('==========================================');
1883
- buildOrder.forEach((packageName, index)=>{
1884
- const packageInfo = dependencyGraph.packages.get(packageName);
1885
- const localDeps = Array.from(packageInfo.localDependencies);
1886
- logger.info(`${index + 1}. ${packageName} (${packageInfo.version})`);
1887
- logger.info(` Path: ${packageInfo.path}`);
1888
- if (localDeps.length > 0) {
1889
- logger.info(` Local Dependencies: ${localDeps.join(', ')}`);
1890
- } else {
1891
- logger.info(` Local Dependencies: none`);
1892
- }
1893
- logger.info(''); // Add spacing between packages
1894
- });
1895
- // Simple return output for verbose mode (no need to repeat detailed info)
1896
- returnOutput = `\nBuild order: ${buildOrder.join(' → ')}\n`;
1897
- } else {
1898
- // Non-verbose mode: Show simple build order
1899
- buildOrder.forEach((packageName, index)=>{
1900
- const packageInfo = dependencyGraph.packages.get(packageName);
1901
- const localDeps = Array.from(packageInfo.localDependencies);
1902
- // Log each step
1903
- if (localDeps.length > 0) {
1904
- logger.info(`${index + 1}. ${packageName} (depends on: ${localDeps.join(', ')})`);
1905
- } else {
1906
- logger.info(`${index + 1}. ${packageName} (no local dependencies)`);
1907
- }
1908
- });
1909
- // Simple return output for non-verbose mode
1910
- returnOutput = `\nBuild order: ${buildOrder.join(' → ')}\n`;
1911
- }
1912
- // Execute command if provided (custom command or built-in command)
1913
- const cmd = (_runConfig_tree28 = runConfig.tree) === null || _runConfig_tree28 === void 0 ? void 0 : _runConfig_tree28.cmd;
1914
- // Determine command to execute
1915
- let commandToRun;
1916
- let isBuiltInCommand = false;
1917
- if (builtInCommand) {
1918
- var _runConfig_tree30, _runConfig_tree31, _runConfig_tree32;
1919
- // Built-in command mode: shell out to kodrdriv subprocess
1920
- // Build command with propagated global options
1921
- const globalOptions = [];
1922
- // Propagate global flags that should be inherited by subprocesses
1923
- if (runConfig.debug) globalOptions.push('--debug');
1924
- if (runConfig.verbose) globalOptions.push('--verbose');
1925
- if (runConfig.dryRun) globalOptions.push('--dry-run');
1926
- if (runConfig.overrides) globalOptions.push('--overrides');
1927
- // Propagate global options with values
1928
- if (runConfig.model) globalOptions.push(`--model "${runConfig.model}"`);
1929
- if (runConfig.configDirectory) globalOptions.push(`--config-dir "${runConfig.configDirectory}"`);
1930
- if (runConfig.outputDirectory) globalOptions.push(`--output-dir "${runConfig.outputDirectory}"`);
1931
- if (runConfig.preferencesDirectory) globalOptions.push(`--preferences-dir "${runConfig.preferencesDirectory}"`);
1932
- // Build the command with global options
1933
- const optionsString = globalOptions.length > 0 ? ` ${globalOptions.join(' ')}` : '';
1934
- // Add package argument for link/unlink/updates commands
1935
- const packageArg = (_runConfig_tree30 = runConfig.tree) === null || _runConfig_tree30 === void 0 ? void 0 : _runConfig_tree30.packageArgument;
1936
- const packageArgString = packageArg && (builtInCommand === 'link' || builtInCommand === 'unlink' || builtInCommand === 'updates') ? ` "${packageArg}"` : '';
1937
- // Add command-specific options
1938
- let commandSpecificOptions = '';
1939
- // Commit command options
1940
- if (builtInCommand === 'commit') {
1941
- var _runConfig_commit, _runConfig_commit1, _runConfig_commit2, _runConfig_commit3, _runConfig_commit4, _runConfig_commit5, _runConfig_commit6, _runConfig_commit7, _runConfig_commit8, _runConfig_commit9, _runConfig_commit10, _runConfig_commit11, _runConfig_commit12, _runConfig_commit13, _runConfig_commit14, _runConfig_commit15, _runConfig_commit16;
1942
- if ((_runConfig_commit = runConfig.commit) === null || _runConfig_commit === void 0 ? void 0 : _runConfig_commit.selfReflection) {
1943
- commandSpecificOptions += ' --self-reflection';
1944
- }
1945
- if ((_runConfig_commit1 = runConfig.commit) === null || _runConfig_commit1 === void 0 ? void 0 : _runConfig_commit1.add) {
1946
- commandSpecificOptions += ' --add';
1947
- }
1948
- if ((_runConfig_commit2 = runConfig.commit) === null || _runConfig_commit2 === void 0 ? void 0 : _runConfig_commit2.cached) {
1949
- commandSpecificOptions += ' --cached';
1950
- }
1951
- if ((_runConfig_commit3 = runConfig.commit) === null || _runConfig_commit3 === void 0 ? void 0 : _runConfig_commit3.interactive) {
1952
- commandSpecificOptions += ' --interactive';
1953
- }
1954
- if ((_runConfig_commit4 = runConfig.commit) === null || _runConfig_commit4 === void 0 ? void 0 : _runConfig_commit4.amend) {
1955
- commandSpecificOptions += ' --amend';
1956
- }
1957
- if ((_runConfig_commit5 = runConfig.commit) === null || _runConfig_commit5 === void 0 ? void 0 : _runConfig_commit5.skipFileCheck) {
1958
- commandSpecificOptions += ' --skip-file-check';
1959
- }
1960
- if ((_runConfig_commit6 = runConfig.commit) === null || _runConfig_commit6 === void 0 ? void 0 : _runConfig_commit6.maxAgenticIterations) {
1961
- commandSpecificOptions += ` --max-agentic-iterations ${runConfig.commit.maxAgenticIterations}`;
1962
- }
1963
- if ((_runConfig_commit7 = runConfig.commit) === null || _runConfig_commit7 === void 0 ? void 0 : _runConfig_commit7.allowCommitSplitting) {
1964
- commandSpecificOptions += ' --allow-commit-splitting';
1965
- }
1966
- if ((_runConfig_commit8 = runConfig.commit) === null || _runConfig_commit8 === void 0 ? void 0 : _runConfig_commit8.messageLimit) {
1967
- commandSpecificOptions += ` --message-limit ${runConfig.commit.messageLimit}`;
1968
- }
1969
- if ((_runConfig_commit9 = runConfig.commit) === null || _runConfig_commit9 === void 0 ? void 0 : _runConfig_commit9.maxDiffBytes) {
1970
- commandSpecificOptions += ` --max-diff-bytes ${runConfig.commit.maxDiffBytes}`;
1971
- }
1972
- if ((_runConfig_commit10 = runConfig.commit) === null || _runConfig_commit10 === void 0 ? void 0 : _runConfig_commit10.direction) {
1973
- commandSpecificOptions += ` --direction "${runConfig.commit.direction}"`;
1974
- }
1975
- if ((_runConfig_commit11 = runConfig.commit) === null || _runConfig_commit11 === void 0 ? void 0 : _runConfig_commit11.context) {
1976
- commandSpecificOptions += ` --context "${runConfig.commit.context}"`;
1977
- }
1978
- if (((_runConfig_commit12 = runConfig.commit) === null || _runConfig_commit12 === void 0 ? void 0 : _runConfig_commit12.contextFiles) && runConfig.commit.contextFiles.length > 0) {
1979
- commandSpecificOptions += ` --context-files ${runConfig.commit.contextFiles.join(' ')}`;
1980
- }
1981
- // Push option can be boolean or string (remote name)
1982
- if ((_runConfig_commit13 = runConfig.commit) === null || _runConfig_commit13 === void 0 ? void 0 : _runConfig_commit13.push) {
1983
- if (typeof runConfig.commit.push === 'string') {
1984
- commandSpecificOptions += ` --push "${runConfig.commit.push}"`;
1985
- } else {
1986
- commandSpecificOptions += ' --push';
1987
- }
1988
- }
1989
- // Model-specific options for commit
1990
- if ((_runConfig_commit14 = runConfig.commit) === null || _runConfig_commit14 === void 0 ? void 0 : _runConfig_commit14.model) {
1991
- commandSpecificOptions += ` --model "${runConfig.commit.model}"`;
1992
- }
1993
- if ((_runConfig_commit15 = runConfig.commit) === null || _runConfig_commit15 === void 0 ? void 0 : _runConfig_commit15.openaiReasoning) {
1994
- commandSpecificOptions += ` --openai-reasoning ${runConfig.commit.openaiReasoning}`;
1995
- }
1996
- if ((_runConfig_commit16 = runConfig.commit) === null || _runConfig_commit16 === void 0 ? void 0 : _runConfig_commit16.openaiMaxOutputTokens) {
1997
- commandSpecificOptions += ` --openai-max-output-tokens ${runConfig.commit.openaiMaxOutputTokens}`;
1998
- }
1999
- }
2000
- // Release command options (only for direct 'release' command)
2001
- if (builtInCommand === 'release') {
2002
- var _runConfig_release, _runConfig_release1, _runConfig_release2, _runConfig_release3, _runConfig_release4, _runConfig_release5, _runConfig_release6, _runConfig_release7, _runConfig_release8, _runConfig_release9, _runConfig_release10, _runConfig_release11, _runConfig_release12, _runConfig_release13, _runConfig_release14;
2003
- if ((_runConfig_release = runConfig.release) === null || _runConfig_release === void 0 ? void 0 : _runConfig_release.selfReflection) {
2004
- commandSpecificOptions += ' --self-reflection';
2005
- }
2006
- if ((_runConfig_release1 = runConfig.release) === null || _runConfig_release1 === void 0 ? void 0 : _runConfig_release1.maxAgenticIterations) {
2007
- commandSpecificOptions += ` --max-agentic-iterations ${runConfig.release.maxAgenticIterations}`;
2008
- }
2009
- if ((_runConfig_release2 = runConfig.release) === null || _runConfig_release2 === void 0 ? void 0 : _runConfig_release2.interactive) {
2010
- commandSpecificOptions += ' --interactive';
2011
- }
2012
- if ((_runConfig_release3 = runConfig.release) === null || _runConfig_release3 === void 0 ? void 0 : _runConfig_release3.from) {
2013
- commandSpecificOptions += ` --from "${runConfig.release.from}"`;
2014
- }
2015
- if ((_runConfig_release4 = runConfig.release) === null || _runConfig_release4 === void 0 ? void 0 : _runConfig_release4.to) {
2016
- commandSpecificOptions += ` --to "${runConfig.release.to}"`;
2017
- }
2018
- if ((_runConfig_release5 = runConfig.release) === null || _runConfig_release5 === void 0 ? void 0 : _runConfig_release5.focus) {
2019
- commandSpecificOptions += ` --focus "${runConfig.release.focus}"`;
2020
- }
2021
- if ((_runConfig_release6 = runConfig.release) === null || _runConfig_release6 === void 0 ? void 0 : _runConfig_release6.context) {
2022
- commandSpecificOptions += ` --context "${runConfig.release.context}"`;
2023
- }
2024
- if (((_runConfig_release7 = runConfig.release) === null || _runConfig_release7 === void 0 ? void 0 : _runConfig_release7.contextFiles) && runConfig.release.contextFiles.length > 0) {
2025
- commandSpecificOptions += ` --context-files ${runConfig.release.contextFiles.join(' ')}`;
2026
- }
2027
- if ((_runConfig_release8 = runConfig.release) === null || _runConfig_release8 === void 0 ? void 0 : _runConfig_release8.messageLimit) {
2028
- commandSpecificOptions += ` --message-limit ${runConfig.release.messageLimit}`;
2029
- }
2030
- if ((_runConfig_release9 = runConfig.release) === null || _runConfig_release9 === void 0 ? void 0 : _runConfig_release9.maxDiffBytes) {
2031
- commandSpecificOptions += ` --max-diff-bytes ${runConfig.release.maxDiffBytes}`;
2032
- }
2033
- if ((_runConfig_release10 = runConfig.release) === null || _runConfig_release10 === void 0 ? void 0 : _runConfig_release10.noMilestones) {
2034
- commandSpecificOptions += ' --no-milestones';
2035
- }
2036
- if ((_runConfig_release11 = runConfig.release) === null || _runConfig_release11 === void 0 ? void 0 : _runConfig_release11.fromMain) {
2037
- commandSpecificOptions += ' --from-main';
2038
- }
2039
- // Model-specific options for release
2040
- if ((_runConfig_release12 = runConfig.release) === null || _runConfig_release12 === void 0 ? void 0 : _runConfig_release12.model) {
2041
- commandSpecificOptions += ` --model "${runConfig.release.model}"`;
2042
- }
2043
- if ((_runConfig_release13 = runConfig.release) === null || _runConfig_release13 === void 0 ? void 0 : _runConfig_release13.openaiReasoning) {
2044
- commandSpecificOptions += ` --openai-reasoning ${runConfig.release.openaiReasoning}`;
2045
- }
2046
- if ((_runConfig_release14 = runConfig.release) === null || _runConfig_release14 === void 0 ? void 0 : _runConfig_release14.openaiMaxOutputTokens) {
2047
- commandSpecificOptions += ` --openai-max-output-tokens ${runConfig.release.openaiMaxOutputTokens}`;
2048
- }
2049
- }
2050
- // Publish command options (pass self-reflection - publish reads other release config from config file)
2051
- if (builtInCommand === 'publish') {
2052
- var _runConfig_release15, _runConfig_release16;
2053
- if ((_runConfig_release15 = runConfig.release) === null || _runConfig_release15 === void 0 ? void 0 : _runConfig_release15.selfReflection) {
2054
- commandSpecificOptions += ' --self-reflection';
2055
- }
2056
- if ((_runConfig_release16 = runConfig.release) === null || _runConfig_release16 === void 0 ? void 0 : _runConfig_release16.maxAgenticIterations) {
2057
- commandSpecificOptions += ` --max-agentic-iterations ${runConfig.release.maxAgenticIterations}`;
2058
- }
2059
- // Publish has its own --from, --interactive, --from-main flags (not from release config)
2060
- }
2061
- // Unlink command options
2062
- if (builtInCommand === 'unlink' && ((_runConfig_tree31 = runConfig.tree) === null || _runConfig_tree31 === void 0 ? void 0 : _runConfig_tree31.cleanNodeModules)) {
2063
- commandSpecificOptions += ' --clean-node-modules';
2064
- }
2065
- // Link/Unlink externals
2066
- if ((builtInCommand === 'link' || builtInCommand === 'unlink') && ((_runConfig_tree32 = runConfig.tree) === null || _runConfig_tree32 === void 0 ? void 0 : _runConfig_tree32.externals) && runConfig.tree.externals.length > 0) {
2067
- commandSpecificOptions += ` --externals ${runConfig.tree.externals.join(' ')}`;
2068
- }
2069
- commandToRun = `kodrdriv ${builtInCommand}${optionsString}${packageArgString}${commandSpecificOptions}`;
2070
- isBuiltInCommand = true;
2071
- } else if (cmd) {
2072
- // Custom command mode
2073
- commandToRun = cmd;
2074
- }
2075
- if (commandToRun) {
2076
- var _runConfig_tree33, _runConfig_tree34;
2077
- // Validate scripts for run command before execution
2078
- const scriptsToValidate = runConfig.__scriptsToValidate;
2079
- if (scriptsToValidate && scriptsToValidate.length > 0) {
2080
- logger.info(`🔍 Validating scripts before execution: ${scriptsToValidate.join(', ')}`);
2081
- const validation = await validateScripts(dependencyGraph.packages, scriptsToValidate);
2082
- if (!validation.valid) {
2083
- logger.error('');
2084
- logger.error('❌ Script validation failed. Cannot proceed with execution.');
2085
- logger.error('');
2086
- logger.error('💡 To fix this:');
2087
- logger.error(' 1. Add the missing scripts to the package.json files');
2088
- logger.error(' 2. Or exclude packages that don\'t need these scripts using --exclude');
2089
- logger.error(' 3. Or run individual packages that have the required scripts');
2090
- logger.error('');
2091
- throw new Error('Script validation failed. See details above.');
2092
- }
2093
- }
2094
- // Validate command for parallel execution if parallel mode is enabled
2095
- if ((_runConfig_tree33 = runConfig.tree) === null || _runConfig_tree33 === void 0 ? void 0 : _runConfig_tree33.parallel) {
2096
- const { CommandValidator } = await import('@eldrforge/tree-execution');
2097
- const validation = CommandValidator.validateForParallel(commandToRun, builtInCommand);
2098
- CommandValidator.logValidation(validation);
2099
- if (!validation.valid) {
2100
- logger.error('');
2101
- logger.error('Cannot proceed with parallel execution due to validation errors.');
2102
- logger.error('Run without --parallel flag to execute sequentially.');
2103
- throw new Error('Command validation failed for parallel execution');
2104
- }
2105
- // Apply recommended concurrency if not explicitly set
2106
- if (!runConfig.tree.maxConcurrency) {
2107
- const os = await import('os');
2108
- const recommended = CommandValidator.getRecommendedConcurrency(builtInCommand, os.cpus().length, commandToRun);
2109
- if (recommended !== os.cpus().length) {
2110
- const reason = builtInCommand ? builtInCommand : `custom command "${commandToRun}"`;
2111
- logger.info(`💡 Using recommended concurrency for ${reason}: ${recommended}`);
2112
- runConfig.tree.maxConcurrency = recommended;
2113
- }
2114
- }
2115
- }
2116
- // Create set of all package names for inter-project dependency detection
2117
- const allPackageNames = new Set(Array.from(dependencyGraph.packages.keys()));
2118
- // Initialize execution context if not continuing
2119
- if (!executionContext) {
2120
- executionContext = {
2121
- command: commandToRun,
2122
- originalConfig: runConfig,
2123
- publishedVersions: [],
2124
- completedPackages: [],
2125
- buildOrder: buildOrder,
2126
- startTime: new Date(),
2127
- lastUpdateTime: new Date()
2128
- };
2129
- // Save initial context for commands that support continuation
2130
- if (isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
2131
- await saveExecutionContext(executionContext, runConfig.outputDirectory);
2132
- }
2133
- }
2134
- // Add spacing before command execution
2135
- logger.info('');
2136
- const executionDescription = isBuiltInCommand ? `built-in command "${builtInCommand}"` : `"${commandToRun}"`;
2137
- logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Executing ${executionDescription} in ${buildOrder.length} packages...`);
2138
- // Add detailed multi-project execution context for debug mode
2139
- if (runConfig.debug) {
2140
- var _runConfig_tree35, _runConfig_tree36;
2141
- logger.debug('MULTI_PROJECT_PLAN: Execution plan initialized | Total Packages: %d | Command: %s | Built-in: %s | Dry Run: %s | Parallel: %s', buildOrder.length, commandToRun, isBuiltInCommand, isDryRun, ((_runConfig_tree35 = runConfig.tree) === null || _runConfig_tree35 === void 0 ? void 0 : _runConfig_tree35.parallel) || false);
2142
- // Log package execution order with dependencies
2143
- logger.debug('MULTI_PROJECT_ORDER: Package execution sequence:');
2144
- buildOrder.forEach((pkgName, idx)=>{
2145
- const pkgInfo = dependencyGraph.packages.get(pkgName);
2146
- if (pkgInfo) {
2147
- const deps = Array.isArray(pkgInfo.dependencies) ? pkgInfo.dependencies : [];
2148
- const depStr = deps.length > 0 ? ` | Dependencies: [${deps.join(', ')}]` : ' | Dependencies: none';
2149
- logger.debug(' %d. %s%s', idx + 1, pkgName, depStr);
2150
- }
2151
- });
2152
- // Log dependency levels for parallel execution understanding
2153
- const levels = new Map();
2154
- const calculateLevels = (pkg, visited = new Set())=>{
2155
- if (levels.has(pkg)) return levels.get(pkg);
2156
- if (visited.has(pkg)) return 0; // Circular dependency
2157
- visited.add(pkg);
2158
- const pkgInfo = dependencyGraph.packages.get(pkg);
2159
- const deps = Array.isArray(pkgInfo === null || pkgInfo === void 0 ? void 0 : pkgInfo.dependencies) ? pkgInfo.dependencies : [];
2160
- if (!pkgInfo || deps.length === 0) {
2161
- levels.set(pkg, 0);
2162
- return 0;
2163
- }
2164
- const maxDepLevel = Math.max(...deps.map((dep)=>calculateLevels(dep, new Set(visited))));
2165
- const level = maxDepLevel + 1;
2166
- levels.set(pkg, level);
2167
- return level;
2168
- };
2169
- buildOrder.forEach((pkg)=>calculateLevels(pkg));
2170
- const maxLevel = Math.max(...Array.from(levels.values()));
2171
- logger.debug('MULTI_PROJECT_LEVELS: Dependency depth analysis | Max Depth: %d levels', maxLevel + 1);
2172
- for(let level = 0; level <= maxLevel; level++){
2173
- const packagesAtLevel = buildOrder.filter((pkg)=>levels.get(pkg) === level);
2174
- logger.debug(' Level %d (%d packages): %s', level, packagesAtLevel.length, packagesAtLevel.join(', '));
2175
- }
2176
- if ((_runConfig_tree36 = runConfig.tree) === null || _runConfig_tree36 === void 0 ? void 0 : _runConfig_tree36.parallel) {
2177
- var _runConfig_tree_retry1;
2178
- const os = await import('os');
2179
- const concurrency = runConfig.tree.maxConcurrency || os.cpus().length;
2180
- logger.debug('MULTI_PROJECT_PARALLEL: Parallel execution configuration | Max Concurrency: %d | Retry Attempts: %d', concurrency, ((_runConfig_tree_retry1 = runConfig.tree.retry) === null || _runConfig_tree_retry1 === void 0 ? void 0 : _runConfig_tree_retry1.maxAttempts) || 3);
2181
- }
2182
- if (isContinue) {
2183
- const completed = (executionContext === null || executionContext === void 0 ? void 0 : executionContext.completedPackages.length) || 0;
2184
- logger.debug('MULTI_PROJECT_RESUME: Continuing previous execution | Completed: %d | Remaining: %d', completed, buildOrder.length - completed);
2185
- }
2186
- }
2187
- // Show info for publish commands
2188
- if (isBuiltInCommand && builtInCommand === 'publish') {
2189
- logger.info('Inter-project dependencies will be automatically updated before each publish.');
2190
- }
2191
- let successCount = 0;
2192
- let failedPackage = null;
2193
- // If continuing, start from where we left off
2194
- const startIndex = isContinue && executionContext ? executionContext.completedPackages.length : 0;
2195
- // Check if parallel execution is enabled
2196
- if ((_runConfig_tree34 = runConfig.tree) === null || _runConfig_tree34 === void 0 ? void 0 : _runConfig_tree34.parallel) {
2197
- var _runConfig_tree_retry2, _runConfig_tree_retry3, _runConfig_tree_retry4, _runConfig_tree_retry5;
2198
- logger.info('🚀 Using parallel execution mode');
2199
- // If dry run, show preview instead of executing
2200
- if (isDryRun) {
2201
- const preview = await generateDryRunPreview(dependencyGraph, buildOrder, commandToRun, runConfig);
2202
- return preview;
2203
- }
2204
- // Import parallel execution components
2205
- const { TreeExecutionAdapter, createParallelProgressLogger, formatParallelResult } = await import('@eldrforge/tree-execution');
2206
- const os = await import('os');
2207
- // Create task pool
2208
- const adapter = new TreeExecutionAdapter({
2209
- graph: dependencyGraph,
2210
- maxConcurrency: runConfig.tree.maxConcurrency || os.cpus().length,
2211
- command: commandToRun,
2212
- config: runConfig,
2213
- checkpointPath: runConfig.outputDirectory,
2214
- continue: isContinue,
2215
- maxRetries: ((_runConfig_tree_retry2 = runConfig.tree.retry) === null || _runConfig_tree_retry2 === void 0 ? void 0 : _runConfig_tree_retry2.maxAttempts) || 3,
2216
- initialRetryDelay: ((_runConfig_tree_retry3 = runConfig.tree.retry) === null || _runConfig_tree_retry3 === void 0 ? void 0 : _runConfig_tree_retry3.initialDelayMs) || 5000,
2217
- maxRetryDelay: ((_runConfig_tree_retry4 = runConfig.tree.retry) === null || _runConfig_tree_retry4 === void 0 ? void 0 : _runConfig_tree_retry4.maxDelayMs) || 60000,
2218
- backoffMultiplier: ((_runConfig_tree_retry5 = runConfig.tree.retry) === null || _runConfig_tree_retry5 === void 0 ? void 0 : _runConfig_tree_retry5.backoffMultiplier) || 2
2219
- }, executePackage);
2220
- // Set up progress logging
2221
- createParallelProgressLogger(adapter.getPool(), runConfig);
2222
- // Execute
2223
- const result = await adapter.execute();
2224
- // Format and return result
2225
- const formattedResult = formatParallelResult(result);
2226
- return formattedResult;
2227
- }
2228
- // Sequential execution
2229
- const executionStartTime = Date.now();
2230
- for(let i = startIndex; i < buildOrder.length; i++){
2231
- const packageName = buildOrder[i];
2232
- // Skip if already completed (in continue mode)
2233
- if (executionContext && executionContext.completedPackages.includes(packageName)) {
2234
- successCount++;
2235
- continue;
2236
- }
2237
- const packageInfo = dependencyGraph.packages.get(packageName);
2238
- const packageLogger = createPackageLogger(packageName, i + 1, buildOrder.length, isDryRun);
2239
- const result = await executePackage(packageName, packageInfo, commandToRun, runConfig, isDryRun, i, buildOrder.length, allPackageNames, isBuiltInCommand);
2240
- if (result.success) {
2241
- successCount++;
2242
- // Update context
2243
- if (executionContext && isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
2244
- executionContext.completedPackages.push(packageName);
2245
- executionContext.publishedVersions = publishedVersions;
2246
- executionContext.lastUpdateTime = new Date();
2247
- await saveExecutionContext(executionContext, runConfig.outputDirectory);
2248
- }
2249
- // Add spacing between packages (except after the last one)
2250
- if (i < buildOrder.length - 1) {
2251
- logger.info('');
2252
- logger.info('');
2253
- }
2254
- } else {
2255
- failedPackage = packageName;
2256
- const formattedError = formatSubprojectError(packageName, result.error, packageInfo, i + 1, buildOrder.length);
2257
- if (!isDryRun) {
2258
- var _result_error;
2259
- packageLogger.error(`Execution failed`);
2260
- logger.error(formattedError);
2261
- logger.error(`Failed after ${successCount} successful packages.`);
2262
- // Special handling for timeout errors
2263
- if (result.isTimeoutError) {
2264
- logger.error('');
2265
- logger.error('⏰ TIMEOUT DETECTED: This appears to be a timeout error.');
2266
- logger.error(' This commonly happens when PR checks take longer than expected.');
2267
- logger.error(' The execution context has been saved for recovery.');
2268
- logger.error('');
2269
- // Save context even on timeout for recovery
2270
- if (executionContext && isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run')) {
2271
- executionContext.completedPackages.push(packageName);
2272
- executionContext.publishedVersions = publishedVersions;
2273
- executionContext.lastUpdateTime = new Date();
2274
- await saveExecutionContext(executionContext, runConfig.outputDirectory);
2275
- logger.info('💾 Execution context saved for recovery.');
2276
- }
2277
- // For publish commands, provide specific guidance about CI/CD setup
2278
- if (builtInCommand === 'publish') {
2279
- logger.error('');
2280
- logger.error('💡 PUBLISH TIMEOUT TROUBLESHOOTING:');
2281
- logger.error(' This project may not have CI/CD workflows configured.');
2282
- logger.error(' Common solutions:');
2283
- logger.error(' 1. Set up GitHub Actions workflows for this repository');
2284
- logger.error(' 2. Use --sendit flag to skip user confirmation:');
2285
- logger.error(` kodrdriv tree publish --sendit`);
2286
- logger.error(' 3. Or manually promote this package:');
2287
- logger.error(` kodrdriv tree publish --promote ${packageName}`);
2288
- logger.error('');
2289
- }
2290
- }
2291
- logger.error(`To resume from this point, run:`);
2292
- if (isBuiltInCommand) {
2293
- logger.error(` kodrdriv tree ${builtInCommand} --continue`);
2294
- } else {
2295
- logger.error(` kodrdriv tree --continue --cmd "${commandToRun}"`);
2296
- }
2297
- // For timeout errors, provide additional recovery instructions
2298
- if (result.isTimeoutError) {
2299
- logger.error('');
2300
- logger.error('🔧 RECOVERY OPTIONS:');
2301
- if (builtInCommand === 'publish') {
2302
- logger.error(' 1. Wait for the PR checks to complete, then run:');
2303
- logger.error(` cd ${packageInfo.path}`);
2304
- logger.error(` kodrdriv publish`);
2305
- logger.error(' 2. After the individual publish completes, run:');
2306
- logger.error(` kodrdriv tree ${builtInCommand} --continue`);
2307
- } else {
2308
- logger.error(' 1. Fix any issues in the package, then run:');
2309
- logger.error(` cd ${packageInfo.path}`);
2310
- logger.error(` ${commandToRun}`);
2311
- logger.error(' 2. After the command completes successfully, run:');
2312
- logger.error(` kodrdriv tree ${builtInCommand} --continue`);
2313
- }
2314
- logger.error(' 3. Or promote this package to completed status:');
2315
- logger.error(` kodrdriv tree ${builtInCommand} --promote ${packageName}`);
2316
- logger.error(' 4. Or manually edit .kodrdriv-context to mark this package as completed');
2317
- }
2318
- // Add clear error summary at the very end
2319
- logger.error('');
2320
- logger.error('📋 ERROR SUMMARY:');
2321
- logger.error(` Project that failed: ${packageName}`);
2322
- logger.error(` Location: ${packageInfo.path}`);
2323
- logger.error(` Position in tree: ${i + 1} of ${buildOrder.length} packages`);
2324
- logger.error(` What failed: ${((_result_error = result.error) === null || _result_error === void 0 ? void 0 : _result_error.message) || 'Unknown error'}`);
2325
- logger.error('');
2326
- throw new Error(`Command failed in package ${packageName}`);
2327
- }
2328
- break;
2329
- }
2330
- }
2331
- if (!failedPackage) {
2332
- const totalExecutionTime = Date.now() - executionStartTime;
2333
- const totalSeconds = (totalExecutionTime / 1000).toFixed(1);
2334
- const totalMinutes = (totalExecutionTime / 60000).toFixed(1);
2335
- const timeDisplay = totalExecutionTime < 60000 ? `${totalSeconds}s` : `${totalMinutes}min (${totalSeconds}s)`;
2336
- logger.info('');
2337
- logger.info('═══════════════════════════════════════════════════════════');
2338
- const summary = `${isDryRun ? 'DRY RUN: ' : ''}All ${buildOrder.length} packages completed successfully! 🎉`;
2339
- logger.info(summary);
2340
- logger.info(`⏱️ Total execution time: ${timeDisplay}`);
2341
- logger.info(`📦 Packages processed: ${successCount}/${buildOrder.length}`);
2342
- logger.info('═══════════════════════════════════════════════════════════');
2343
- logger.info('');
2344
- // Clean up context on successful completion
2345
- if (isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
2346
- await cleanupContext(runConfig.outputDirectory);
2347
- }
2348
- return returnOutput; // Don't duplicate the summary in return string
2349
- }
2350
- }
2351
- return returnOutput;
2352
- } catch (error) {
2353
- const errorMessage = `Failed to analyze workspace: ${error.message}`;
2354
- logger.error(errorMessage);
2355
- throw new Error(errorMessage);
2356
- } finally{
2357
- // Intentionally preserve the mutex across executions to support multiple runs in the same process (e.g., test suite)
2358
- // Do not destroy here; the process lifecycle will clean up resources.
2359
- }
2360
- };
2361
-
2362
- export { execute, executePackage };
2363
- //# sourceMappingURL=tree.js.map