@eldrforge/commands-tree 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,4243 @@
1
+ import { getLogger, getDryRunLogger, isInGitRepository, runGitWithLock, getOutputPath, DEFAULT_OUTPUT_DIRECTORY } from '@eldrforge/core';
2
+ import { safeJsonParse, validatePackageJson, run, runSecure, getGloballyLinkedPackages, getGitStatusSummary, getLinkedDependencies, getLinkCompatibilityProblems } from '@eldrforge/git-tools';
3
+ import * as Commit from '@eldrforge/commands-git';
4
+ import { findAllPackageJsonFiles, PerformanceTimer, optimizePrecommitCommand, recordTestRun } from '@eldrforge/commands-git';
5
+ import { createStorage } from '@eldrforge/shared';
6
+ import fs from 'fs/promises';
7
+ import path from 'path';
8
+ import { exec } from 'child_process';
9
+ import util from 'util';
10
+ import { scanForPackageJsonFiles, buildDependencyGraph, topologicalSort, parsePackageJson, shouldExclude } from '@eldrforge/tree-core';
11
+ import { SimpleMutex } from '@eldrforge/tree-execution';
12
+
13
+ // Helper function to check if a path is a symbolic link
14
+ const isSymbolicLink$1 = async (filePath)=>{
15
+ try {
16
+ const stats = await fs.lstat(filePath);
17
+ return stats.isSymbolicLink();
18
+ } catch {
19
+ return false;
20
+ }
21
+ };
22
+ // Helper function to get the target of a symbolic link
23
+ const getSymbolicLinkTarget$1 = async (filePath)=>{
24
+ try {
25
+ const target = await fs.readlink(filePath);
26
+ return target;
27
+ } catch {
28
+ return null;
29
+ }
30
+ };
31
+ // Helper function to find all linked dependencies in a package
32
+ const findLinkedDependencies$1 = async (packagePath, packageName, storage, logger)=>{
33
+ const linkedDependencies = [];
34
+ try {
35
+ const packageJsonPath = path.join(packagePath, 'package.json');
36
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
37
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
38
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
39
+ const allDependencies = {
40
+ ...packageJson.dependencies,
41
+ ...packageJson.devDependencies
42
+ };
43
+ const nodeModulesPath = path.join(packagePath, 'node_modules');
44
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
45
+ for (const [dependencyName, version] of Object.entries(allDependencies)){
46
+ let dependencyPath;
47
+ if (dependencyName.startsWith('@')) {
48
+ // Scoped package
49
+ const [scope, name] = dependencyName.split('/');
50
+ dependencyPath = path.join(nodeModulesPath, scope, name);
51
+ } else {
52
+ // Unscoped package
53
+ dependencyPath = path.join(nodeModulesPath, dependencyName);
54
+ }
55
+ if (await isSymbolicLink$1(dependencyPath)) {
56
+ const target = await getSymbolicLinkTarget$1(dependencyPath);
57
+ if (target) {
58
+ // Determine if this is an external dependency (not in the same workspace)
59
+ const isExternal = !target.includes('node_modules') || target.startsWith('..');
60
+ linkedDependencies.push({
61
+ dependencyName,
62
+ targetPath: target,
63
+ isExternal
64
+ });
65
+ }
66
+ }
67
+ }
68
+ } catch (error) {
69
+ logger.warn(`LINKED_DEPS_CHECK_FAILED: Unable to check linked dependencies | Package: ${packageName} | Error: ${error.message}`);
70
+ }
71
+ return linkedDependencies;
72
+ };
73
+ // Helper function to check if a dependency matches any external link patterns
74
+ const matchesExternalLinkPattern = (dependencyName, externalLinkPatterns)=>{
75
+ if (!externalLinkPatterns || externalLinkPatterns.length === 0) {
76
+ return false;
77
+ }
78
+ return externalLinkPatterns.some((pattern)=>{
79
+ // Simple string matching - could be enhanced with glob patterns later
80
+ return dependencyName === pattern || dependencyName.startsWith(pattern);
81
+ });
82
+ };
83
+ // Helper function to create symbolic links manually
84
+ const createSymbolicLink = async (packageName, sourcePath, targetDir, logger, isDryRun = false)=>{
85
+ try {
86
+ // Parse package name to get scope and name parts
87
+ const [scope, name] = packageName.startsWith('@') ? packageName.split('/') : [
88
+ null,
89
+ packageName
90
+ ];
91
+ // Create the target path structure
92
+ const nodeModulesPath = path.join(targetDir, 'node_modules');
93
+ let targetPath;
94
+ if (scope) {
95
+ // Scoped package: node_modules/@scope/name
96
+ const scopeDir = path.join(nodeModulesPath, scope);
97
+ targetPath = path.join(scopeDir, name);
98
+ if (!isDryRun) {
99
+ // Ensure scope directory exists
100
+ await fs.mkdir(scopeDir, {
101
+ recursive: true
102
+ });
103
+ }
104
+ } else {
105
+ // Unscoped package: node_modules/name
106
+ targetPath = path.join(nodeModulesPath, name);
107
+ if (!isDryRun) {
108
+ // Ensure node_modules directory exists
109
+ await fs.mkdir(nodeModulesPath, {
110
+ recursive: true
111
+ });
112
+ }
113
+ }
114
+ if (isDryRun) {
115
+ logger.verbose(`DRY RUN: Would create symlink: ${targetPath} -> ${sourcePath}`);
116
+ return true;
117
+ }
118
+ // Create the symbolic link using relative path for better portability
119
+ const relativePath = path.relative(path.dirname(targetPath), sourcePath);
120
+ // Check if something already exists at the target path
121
+ try {
122
+ const stats = await fs.lstat(targetPath); // Use lstat to not follow symlinks
123
+ if (stats.isSymbolicLink()) {
124
+ // It's a symlink, check if it points to the correct target
125
+ const existingLink = await fs.readlink(targetPath);
126
+ if (existingLink === relativePath) {
127
+ logger.verbose(`Symlink already exists and points to correct target: ${targetPath} -> ${relativePath}`);
128
+ return true;
129
+ } else {
130
+ logger.info(`SYMLINK_FIXING: Correcting symlink target | Path: ${targetPath} | Old Target: ${existingLink} | New Target: ${relativePath}`);
131
+ await fs.unlink(targetPath);
132
+ await fs.symlink(relativePath, targetPath, 'dir');
133
+ logger.info(`SYMLINK_FIXED: Successfully updated symlink | Path: ${targetPath} | Target: ${relativePath} | Type: directory`);
134
+ return true;
135
+ }
136
+ } else if (stats.isDirectory()) {
137
+ // It's a directory, remove it
138
+ logger.warn(`SYMLINK_DIRECTORY_CONFLICT: Removing existing directory to create symlink | Path: ${targetPath} | Type: directory | Action: Remove and replace with symlink`);
139
+ await fs.rm(targetPath, {
140
+ recursive: true,
141
+ force: true
142
+ });
143
+ await fs.symlink(relativePath, targetPath, 'dir');
144
+ logger.info(`SYMLINK_CREATED: Successfully created symlink after directory removal | Path: ${targetPath} | Target: ${relativePath} | Type: directory`);
145
+ return true;
146
+ } else {
147
+ // It's a file, remove it
148
+ logger.warn(`SYMLINK_FILE_CONFLICT: Removing existing file to create symlink | Path: ${targetPath} | Type: file | Action: Remove and replace with symlink`);
149
+ await fs.unlink(targetPath);
150
+ await fs.symlink(relativePath, targetPath, 'dir');
151
+ logger.info(`SYMLINK_CREATED: Successfully created symlink after file removal | Path: ${targetPath} | Target: ${relativePath} | Type: directory`);
152
+ return true;
153
+ }
154
+ } catch (error) {
155
+ if (error.code === 'ENOENT') {
156
+ // Nothing exists at target path, create the symlink
157
+ await fs.symlink(relativePath, targetPath, 'dir');
158
+ logger.verbose(`Created symlink: ${targetPath} -> ${relativePath}`);
159
+ return true;
160
+ } else {
161
+ throw error; // Re-throw unexpected errors
162
+ }
163
+ }
164
+ } catch (error) {
165
+ logger.warn(`SYMLINK_CREATE_FAILED: Unable to create symlink | Package: ${packageName} | Error: ${error.message} | Status: failed`);
166
+ return false;
167
+ }
168
+ };
169
+ // Helper function to parse package names and scopes
170
+ const parsePackageArgument$1 = (packageArg)=>{
171
+ if (packageArg.startsWith('@')) {
172
+ const parts = packageArg.split('/');
173
+ if (parts.length === 1) {
174
+ // Just a scope like "@fjell"
175
+ return {
176
+ scope: parts[0]
177
+ };
178
+ } else {
179
+ // Full package name like "@fjell/core"
180
+ return {
181
+ scope: parts[0],
182
+ packageName: packageArg
183
+ };
184
+ }
185
+ } else {
186
+ throw new Error(`Package argument must start with @ (scope): ${packageArg}`);
187
+ }
188
+ };
189
+ // Find packages in the workspace that match the given scope or package name
190
+ const findMatchingPackages$1 = async (targetDirectories, scope, storage, logger, packageName)=>{
191
+ const matchingPackages = [];
192
+ // Find all package.json files in target directories
193
+ let allPackageJsonFiles = [];
194
+ for (const targetDirectory of targetDirectories){
195
+ const packageJsonFiles = await findAllPackageJsonFiles(targetDirectory, storage);
196
+ allPackageJsonFiles = allPackageJsonFiles.concat(packageJsonFiles);
197
+ }
198
+ for (const packageJsonLocation of allPackageJsonFiles){
199
+ const packageDir = packageJsonLocation.path.replace('/package.json', '');
200
+ try {
201
+ const packageJsonContent = await storage.readFile(packageJsonLocation.path, 'utf-8');
202
+ const parsed = safeJsonParse(packageJsonContent, packageJsonLocation.path);
203
+ const packageJson = validatePackageJson(parsed, packageJsonLocation.path);
204
+ if (!packageJson.name) continue;
205
+ const isInScope = packageJson.name.startsWith(scope + '/');
206
+ const isExactMatch = packageName && packageJson.name === packageName;
207
+ if (isInScope || isExactMatch) {
208
+ matchingPackages.push({
209
+ name: packageJson.name,
210
+ path: packageDir,
211
+ isSource: packageName ? packageJson.name === packageName : isInScope
212
+ });
213
+ }
214
+ } catch (error) {
215
+ logger.warn(`PACKAGE_JSON_PARSE_FAILED: Unable to parse package.json | Path: ${packageJsonLocation.path} | Error: ${error.message}`);
216
+ }
217
+ }
218
+ return matchingPackages;
219
+ };
220
+ // Find packages that depend on the target package
221
+ const findConsumingPackages$1 = async (targetDirectories, targetPackageName, storage, logger)=>{
222
+ const consumingPackages = [];
223
+ // Find all package.json files in target directories
224
+ let allPackageJsonFiles = [];
225
+ for (const targetDirectory of targetDirectories){
226
+ const packageJsonFiles = await findAllPackageJsonFiles(targetDirectory, storage);
227
+ allPackageJsonFiles = allPackageJsonFiles.concat(packageJsonFiles);
228
+ }
229
+ for (const packageJsonLocation of allPackageJsonFiles){
230
+ const packageDir = packageJsonLocation.path.replace('/package.json', '');
231
+ try {
232
+ const packageJsonContent = await storage.readFile(packageJsonLocation.path, 'utf-8');
233
+ const parsed = safeJsonParse(packageJsonContent, packageJsonLocation.path);
234
+ const packageJson = validatePackageJson(parsed, packageJsonLocation.path);
235
+ if (!packageJson.name) continue;
236
+ // Check if this package depends on the target package
237
+ const dependencyTypes = [
238
+ 'dependencies',
239
+ 'devDependencies',
240
+ 'peerDependencies',
241
+ 'optionalDependencies'
242
+ ];
243
+ const hasDependency = dependencyTypes.some((depType)=>packageJson[depType] && packageJson[depType][targetPackageName]);
244
+ if (hasDependency && packageJson.name !== targetPackageName) {
245
+ consumingPackages.push({
246
+ name: packageJson.name,
247
+ path: packageDir
248
+ });
249
+ }
250
+ } catch (error) {
251
+ logger.warn(`Failed to parse ${packageJsonLocation.path}: ${error.message}`);
252
+ }
253
+ }
254
+ return consumingPackages;
255
+ };
256
+ const executeInternal$1 = async (runConfig, packageArgument)=>{
257
+ var _runConfig_link, _runConfig_tree;
258
+ const isDryRun = runConfig.dryRun || ((_runConfig_link = runConfig.link) === null || _runConfig_link === void 0 ? void 0 : _runConfig_link.dryRun) || false;
259
+ const logger = getDryRunLogger(isDryRun);
260
+ const storage = createStorage();
261
+ // Check if this is a status command
262
+ if (packageArgument === 'status') {
263
+ return await executeStatus$1(runConfig);
264
+ }
265
+ // Get target directories from config, default to current directory
266
+ const targetDirectories = ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.directories) || [
267
+ process.cwd()
268
+ ];
269
+ if (targetDirectories.length === 1) {
270
+ logger.info(`WORKSPACE_ANALYSIS: Analyzing single workspace directory | Path: ${targetDirectories[0]} | Purpose: Find linkable packages`);
271
+ } else {
272
+ logger.info(`WORKSPACE_ANALYSIS: Analyzing multiple workspace directories | Paths: ${targetDirectories.join(', ')} | Count: ${targetDirectories.length} | Purpose: Find linkable packages across workspaces`);
273
+ }
274
+ // If no package argument provided, use new smart same-scope linking behavior
275
+ if (!packageArgument) {
276
+ var _runConfig_link1, _runConfig_link2;
277
+ logger.info('LINK_SMART_MODE: Smart linking mode activated for current project | Mode: smart | Target: current directory | Purpose: Auto-link dependencies based on scope');
278
+ // Work in current directory only - read the package.json
279
+ const currentDir = process.cwd();
280
+ const packageJsonPath = `${currentDir}/package.json`;
281
+ let currentPackageJson;
282
+ try {
283
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
284
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
285
+ currentPackageJson = validatePackageJson(parsed, packageJsonPath);
286
+ } catch (error) {
287
+ const message = `PACKAGE_JSON_NOT_FOUND: No valid package.json in current directory | Error: ${error.message} | Action: Cannot proceed with smart linking`;
288
+ logger.error(message);
289
+ return message;
290
+ }
291
+ if (!currentPackageJson.name) {
292
+ const message = 'PACKAGE_NAME_MISSING: package.json must have a name field | Field: name | Requirement: Required for linking | Action: Add name field to package.json';
293
+ logger.error(message);
294
+ return message;
295
+ }
296
+ // Extract the scope from the current package name
297
+ const currentScope = currentPackageJson.name.startsWith('@') ? currentPackageJson.name.split('/')[0] : null;
298
+ if (!currentScope) {
299
+ const message = 'PACKAGE_SCOPE_MISSING: Package must have scoped name for smart linking | Format Required: @scope/package | Current: ' + currentPackageJson.name + ' | Action: Use scoped package name';
300
+ logger.warn(message);
301
+ return message;
302
+ }
303
+ logger.info(`CURRENT_PACKAGE_IDENTIFIED: Current package identified for smart linking | Package: ${currentPackageJson.name} | Scope: ${currentScope} | Path: ${currentDir}`);
304
+ // Step 1: Link the current package globally (optional - continue even if this fails)
305
+ try {
306
+ if (isDryRun) {
307
+ logger.info(`SELF_LINK_DRY_RUN: Would link current package globally | Mode: dry-run | Package: ${currentPackageJson.name} | Command: npm link`);
308
+ } else {
309
+ logger.verbose(`SELF_LINK_STARTING: Registering package globally | Package: ${currentPackageJson.name} | Command: npm link | Purpose: Make available for dependency linking`);
310
+ await run('npm link');
311
+ logger.info(`SELF_LINK_SUCCESS: Current package linked globally | Package: ${currentPackageJson.name} | Location: Global npm | Purpose: Make available for linking`);
312
+ }
313
+ } catch (error) {
314
+ logger.warn(`SELF_LINK_FAILED: Unable to self-link current package | Package: ${currentPackageJson.name} | Error: ${error.message} | Impact: Continuing with dependency linking`);
315
+ logger.info(`LINK_CONTINUING: Proceeding with dependency linking despite self-link failure | Next: Link matching dependencies`);
316
+ }
317
+ // Step 2: Find same-scope dependencies in current package
318
+ const allDependencies = {
319
+ ...currentPackageJson.dependencies,
320
+ ...currentPackageJson.devDependencies
321
+ };
322
+ const sameScopeDependencies = Object.keys(allDependencies).filter((depName)=>depName.startsWith(currentScope + '/'));
323
+ // Step 2.5: Find external dependencies that match external link patterns
324
+ const externalLinkPatterns = ((_runConfig_link1 = runConfig.link) === null || _runConfig_link1 === void 0 ? void 0 : _runConfig_link1.externals) || [];
325
+ const externalDependencies = Object.keys(allDependencies).filter((depName)=>matchesExternalLinkPattern(depName, externalLinkPatterns));
326
+ const allDependenciesToLink = [
327
+ ...sameScopeDependencies,
328
+ ...externalDependencies
329
+ ];
330
+ if (allDependenciesToLink.length === 0) {
331
+ logger.info(`No same-scope or external dependencies found for ${currentScope}`);
332
+ if (isDryRun) {
333
+ return `DRY RUN: Would self-link, no dependencies found to link`;
334
+ } else {
335
+ return `Self-linked ${currentPackageJson.name}, no dependencies to link`;
336
+ }
337
+ }
338
+ logger.info(`Found ${sameScopeDependencies.length} same-scope dependencies: ${sameScopeDependencies.join(', ')}`);
339
+ if (externalDependencies.length > 0) {
340
+ logger.info(`Found ${externalDependencies.length} external dependencies matching patterns: ${externalDependencies.join(', ')}`);
341
+ }
342
+ // Step 2.6: Handle external dependencies using scopeRoots configuration
343
+ const scopeRoots = ((_runConfig_link2 = runConfig.link) === null || _runConfig_link2 === void 0 ? void 0 : _runConfig_link2.scopeRoots) || {};
344
+ const globallyLinkedViaScopeRoots = [];
345
+ if (Object.keys(scopeRoots).length > 0 && externalDependencies.length > 0) {
346
+ logger.info('Using scopeRoots configuration to discover and link external packages...');
347
+ for (const depName of externalDependencies){
348
+ const depScope = depName.startsWith('@') ? depName.split('/')[0] : null;
349
+ const scopeRoot = depScope ? scopeRoots[depScope] : null;
350
+ if (scopeRoot) {
351
+ logger.verbose(`Processing ${depName} with scope ${depScope} -> ${scopeRoot}`);
352
+ // Convert relative path to absolute
353
+ const absoluteScopeRoot = path.resolve(currentDir, scopeRoot);
354
+ logger.verbose(`Scanning scope root directory: ${absoluteScopeRoot}`);
355
+ try {
356
+ // Look for package with matching name in the scope directory
357
+ const expectedPackageName = depName.startsWith('@') ? depName.split('/')[1] : depName;
358
+ const packageDir = path.join(absoluteScopeRoot, expectedPackageName);
359
+ const packageJsonPath = path.join(packageDir, 'package.json');
360
+ logger.verbose(`Checking for package at: ${packageDir}`);
361
+ try {
362
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
363
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
364
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
365
+ if (packageJson.name === depName) {
366
+ logger.info(`Found matching package: ${depName} at ${packageDir}`);
367
+ if (isDryRun) {
368
+ logger.info(`DRY RUN: Would run 'npm link' in: ${packageDir}`);
369
+ globallyLinkedViaScopeRoots.push(depName);
370
+ } else {
371
+ // Step A: Run 'npm link' in the source package directory
372
+ const originalCwd = process.cwd();
373
+ try {
374
+ process.chdir(packageDir);
375
+ logger.verbose(`Running 'npm link' in source: ${packageDir}`);
376
+ await run('npm link');
377
+ logger.info(`LINK_SOURCE_SCOPE_ROOTS: Source linked via scopeRoots | Package: ${depName} | Method: scopeRoots | Status: linked`);
378
+ globallyLinkedViaScopeRoots.push(depName);
379
+ } catch (linkError) {
380
+ logger.warn(`LINK_SOURCE_FAILED: Failed to link source package | Package: ${depName} | Error: ${linkError.message}`);
381
+ } finally{
382
+ process.chdir(originalCwd);
383
+ }
384
+ }
385
+ } else {
386
+ logger.verbose(`Package name mismatch: expected ${depName}, found ${packageJson.name}`);
387
+ }
388
+ } catch (packageError) {
389
+ logger.verbose(`Package not found or invalid: ${packageJsonPath} - ${packageError.message}`);
390
+ }
391
+ } catch (error) {
392
+ logger.verbose(`Error processing scope ${depScope}: ${error.message}`);
393
+ }
394
+ } else {
395
+ logger.verbose(`No scope root configured for ${depScope}`);
396
+ }
397
+ }
398
+ if (globallyLinkedViaScopeRoots.length > 0) {
399
+ logger.info(`Successfully prepared ${globallyLinkedViaScopeRoots.length} packages via scopeRoots: ${globallyLinkedViaScopeRoots.join(', ')}`);
400
+ }
401
+ }
402
+ // Step 3: Get globally linked packages directories (only if we have dependencies to link)
403
+ let globallyLinkedPackages = {};
404
+ try {
405
+ if (isDryRun) {
406
+ logger.info(`DRY RUN: Would run 'npm ls --link -g -p' to discover linked package directories`);
407
+ logger.info(`DRY RUN: Would attempt to link dependencies: ${allDependenciesToLink.join(', ')}`);
408
+ return `DRY RUN: Would self-link and attempt to link ${allDependenciesToLink.length} dependencies`;
409
+ } else {
410
+ logger.verbose(`Discovering globally linked package directories...`);
411
+ const result = await run('npm ls --link -g -p');
412
+ const resultStr = typeof result === 'string' ? result : result.stdout;
413
+ // Parse the directory paths - each line is a directory path
414
+ const directoryPaths = resultStr.trim().split('\n').filter((line)=>line.trim() !== '');
415
+ // Extract package names from directory paths and build a map
416
+ for (const dirPath of directoryPaths){
417
+ try {
418
+ // Read the package.json to get the actual package name
419
+ const packageJsonPath = `${dirPath.trim()}/package.json`;
420
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
421
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
422
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
423
+ if (packageJson.name) {
424
+ globallyLinkedPackages[packageJson.name] = dirPath.trim();
425
+ }
426
+ } catch (packageError) {
427
+ logger.verbose(`Could not read package.json from ${dirPath}: ${packageError.message}`);
428
+ }
429
+ }
430
+ const linkedCount = Object.keys(globallyLinkedPackages).length;
431
+ logger.verbose(`Found ${linkedCount} globally linked package(s)`);
432
+ }
433
+ } catch (error) {
434
+ logger.warn(`Failed to get globally linked packages (continuing anyway): ${error.message}`);
435
+ globallyLinkedPackages = {};
436
+ }
437
+ // Step 4: Link same-scope dependencies that are available globally using manual symlinks
438
+ const linkedDependencies = [];
439
+ for (const depName of allDependenciesToLink){
440
+ const sourcePath = globallyLinkedPackages[depName];
441
+ if (sourcePath) {
442
+ try {
443
+ logger.verbose(`Linking dependency: ${depName} from ${sourcePath}`);
444
+ // Create the symbolic link manually using the directory path directly
445
+ const success = await createSymbolicLink(depName, sourcePath, currentDir, logger, isDryRun);
446
+ if (success) {
447
+ logger.info(`LINK_DEPENDENCY_SUCCESS: Linked dependency successfully | Dependency: ${depName} | Status: symlink-created`);
448
+ linkedDependencies.push(depName);
449
+ } else {
450
+ logger.warn(`LINK_DEPENDENCY_FAILED: Failed to link dependency | Dependency: ${depName} | Status: failed`);
451
+ }
452
+ } catch (error) {
453
+ logger.warn(`⚠️ Failed to link ${depName}: ${error.message}`);
454
+ }
455
+ } else {
456
+ logger.verbose(`Skipping ${depName} (not globally linked)`);
457
+ }
458
+ }
459
+ const summary = linkedDependencies.length > 0 ? `Self-linked ${currentPackageJson.name} and linked ${linkedDependencies.length} dependencies: ${linkedDependencies.join(', ')}` : `Self-linked ${currentPackageJson.name}, no dependencies were available to link`;
460
+ // Step 5: Regenerate package-lock.json without modifying node_modules
461
+ try {
462
+ if (isDryRun) {
463
+ logger.info(`DRY RUN: Would run 'npm install --package-lock-only --no-audit --no-fund' to regenerate package-lock.json`);
464
+ } else {
465
+ logger.verbose(`Running 'npm install --package-lock-only --no-audit --no-fund' to regenerate package-lock.json without touching node_modules...`);
466
+ await run('npm install --package-lock-only --no-audit --no-fund');
467
+ logger.info(`LINK_LOCK_REGENERATED: Regenerated package-lock.json successfully | File: package-lock.json | Status: updated`);
468
+ }
469
+ } catch (error) {
470
+ logger.warn(`LINK_LOCK_REGEN_FAILED: Failed to regenerate package-lock.json | Error: ${error.message} | Impact: Lock file may be out of sync`);
471
+ }
472
+ logger.info(summary);
473
+ return summary;
474
+ }
475
+ // New scope-based linking behavior
476
+ logger.info(`LINK_SCOPE_MODE: Linking scope or specific package | Target: ${packageArgument} | Mode: scope-based | Purpose: Link packages by scope`);
477
+ const { scope, packageName } = parsePackageArgument$1(packageArgument);
478
+ logger.verbose(`Parsed scope: ${scope}, package: ${packageName || 'all packages in scope'}`);
479
+ // Find matching packages in the workspace
480
+ const matchingPackages = await findMatchingPackages$1(targetDirectories, scope, storage, logger, packageName);
481
+ if (matchingPackages.length === 0) {
482
+ const message = packageName ? `No package found matching: ${packageName}` : `No packages found in scope: ${scope}`;
483
+ logger.warn(message);
484
+ return message;
485
+ }
486
+ logger.info(`Found ${matchingPackages.length} matching package(s)`);
487
+ const linkedPackages = [];
488
+ // If specific package name provided, use that; otherwise link all packages in scope
489
+ const packagesToLink = packageName ? matchingPackages.filter((pkg)=>pkg.name === packageName) : matchingPackages;
490
+ for (const pkg of packagesToLink){
491
+ logger.info(`Processing package: ${pkg.name}`);
492
+ // Step A: Run 'npm link' in the source package directory
493
+ try {
494
+ const originalCwd = process.cwd();
495
+ process.chdir(pkg.path);
496
+ try {
497
+ if (isDryRun) {
498
+ logger.info(`DRY RUN: Would run 'npm link' in: ${pkg.path}`);
499
+ } else {
500
+ logger.verbose(`Running 'npm link' in source: ${pkg.path}`);
501
+ await run('npm link');
502
+ logger.info(`LINK_SOURCE_SUCCESS: Source package linked globally | Package: ${pkg.name} | Status: linked`);
503
+ }
504
+ } finally{
505
+ process.chdir(originalCwd);
506
+ }
507
+ // Step B: Find all packages that depend on this package and link them
508
+ const consumingPackages = await findConsumingPackages$1(targetDirectories, pkg.name, storage, logger);
509
+ if (consumingPackages.length === 0) {
510
+ logger.info(`No consuming packages found for: ${pkg.name}`);
511
+ } else {
512
+ logger.info(`Found ${consumingPackages.length} consuming package(s) for: ${pkg.name}`);
513
+ for (const consumer of consumingPackages){
514
+ try {
515
+ const consumerOriginalCwd = process.cwd();
516
+ process.chdir(consumer.path);
517
+ try {
518
+ if (isDryRun) {
519
+ logger.info(`DRY RUN: Would run 'npm link ${pkg.name}' in: ${consumer.path}`);
520
+ } else {
521
+ logger.verbose(`Running 'npm link ${pkg.name}' in consumer: ${consumer.path}`);
522
+ await runSecure('npm', [
523
+ 'link',
524
+ pkg.name
525
+ ]);
526
+ logger.info(`LINK_CONSUMER_SUCCESS: Consumer linked to package | Consumer: ${consumer.name} | Package: ${pkg.name} | Status: linked`);
527
+ }
528
+ } finally{
529
+ process.chdir(consumerOriginalCwd);
530
+ }
531
+ } catch (error) {
532
+ logger.error(`LINK_CONSUMER_FAILED: Failed to link package in consumer | Package: ${pkg.name} | Consumer: ${consumer.name} | Error: ${error.message}`);
533
+ throw new Error(`Failed to link ${pkg.name} in consumer ${consumer.name}: ${error.message}`);
534
+ }
535
+ }
536
+ }
537
+ linkedPackages.push(pkg.name);
538
+ } catch (error) {
539
+ logger.error(`LINK_SOURCE_PACKAGE_FAILED: Failed to link source package | Package: ${pkg.name} | Error: ${error.message}`);
540
+ throw new Error(`Failed to link source package ${pkg.name}: ${error.message}`);
541
+ }
542
+ }
543
+ const summary = `Successfully linked ${linkedPackages.length} package(s): ${linkedPackages.join(', ')}`;
544
+ // Final step: Regenerate package-lock.json files in all affected packages without modifying node_modules
545
+ if (!isDryRun) {
546
+ logger.info(`LINK_LOCK_REGENERATING_ALL: Regenerating package-lock.json files in all packages | Mode: lockfile-only | Purpose: Update lock files after linking`);
547
+ // Get all unique consuming packages
548
+ const allConsumingPackages = new Set();
549
+ for (const pkg of packagesToLink){
550
+ const consumingPackages = await findConsumingPackages$1(targetDirectories, pkg.name, storage, logger);
551
+ consumingPackages.forEach((consumer)=>allConsumingPackages.add(consumer.path));
552
+ }
553
+ // Also include the source packages
554
+ packagesToLink.forEach((pkg)=>allConsumingPackages.add(pkg.path));
555
+ // Run lockfile-only install in each package
556
+ for (const packagePath of allConsumingPackages){
557
+ try {
558
+ const originalCwd = process.cwd();
559
+ process.chdir(packagePath);
560
+ try {
561
+ logger.verbose(`Running 'npm install --package-lock-only --no-audit --no-fund' in: ${packagePath}`);
562
+ await run('npm install --package-lock-only --no-audit --no-fund');
563
+ logger.verbose(`LINK_LOCK_PACKAGE_REGENERATED: Regenerated package-lock.json | Path: ${packagePath} | Status: updated`);
564
+ } finally{
565
+ process.chdir(originalCwd);
566
+ }
567
+ } catch (error) {
568
+ logger.warn(`LINK_LOCK_PACKAGE_REGEN_FAILED: Failed to regenerate package-lock.json | Path: ${packagePath} | Error: ${error.message}`);
569
+ }
570
+ }
571
+ logger.info(`LINK_LOCK_ALL_REGENERATED: Regenerated package-lock.json files in all packages | Package Count: ${allConsumingPackages.size} | Status: completed`);
572
+ } else {
573
+ logger.info(`DRY RUN: Would run 'npm install --package-lock-only --no-audit --no-fund' to regenerate package-lock.json files in all packages`);
574
+ }
575
+ logger.info(summary);
576
+ return summary;
577
+ };
578
+ // Status function to show what's currently linked
579
+ const executeStatus$1 = async (runConfig)=>{
580
+ var _runConfig_tree;
581
+ const logger = getLogger();
582
+ const storage = createStorage();
583
+ // Get target directories from config, default to current directory
584
+ const targetDirectories = ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.directories) || [
585
+ process.cwd()
586
+ ];
587
+ if (targetDirectories.length === 1) {
588
+ logger.info(`🔍 Checking link status in: ${targetDirectories[0]}`);
589
+ } else {
590
+ logger.info(`🔍 Checking link status in: ${targetDirectories.join(', ')}`);
591
+ }
592
+ // Find all packages in the workspace
593
+ let allPackageJsonFiles = [];
594
+ for (const targetDirectory of targetDirectories){
595
+ const packageJsonFiles = await findAllPackageJsonFiles(targetDirectory, storage);
596
+ allPackageJsonFiles = allPackageJsonFiles.concat(packageJsonFiles);
597
+ }
598
+ const packageStatuses = [];
599
+ for (const packageJsonLocation of allPackageJsonFiles){
600
+ const packageDir = packageJsonLocation.path.replace('/package.json', '');
601
+ try {
602
+ const packageJsonContent = await storage.readFile(packageJsonLocation.path, 'utf-8');
603
+ const parsed = safeJsonParse(packageJsonContent, packageJsonLocation.path);
604
+ const packageJson = validatePackageJson(parsed, packageJsonLocation.path);
605
+ if (!packageJson.name) continue;
606
+ const linkedDependencies = await findLinkedDependencies$1(packageDir, packageJson.name, storage, logger);
607
+ if (linkedDependencies.length > 0) {
608
+ packageStatuses.push({
609
+ name: packageJson.name,
610
+ path: packageDir,
611
+ linkedDependencies
612
+ });
613
+ }
614
+ } catch (error) {
615
+ logger.warn(`Failed to parse ${packageJsonLocation.path}: ${error.message}`);
616
+ }
617
+ }
618
+ if (packageStatuses.length === 0) {
619
+ return 'No linked dependencies found in workspace.';
620
+ }
621
+ // Format the output
622
+ let output = `Found ${packageStatuses.length} package(s) with linked dependencies:\n\n`;
623
+ for (const packageStatus of packageStatuses){
624
+ output += `📦 ${packageStatus.name}\n`;
625
+ output += ` Path: ${packageStatus.path}\n`;
626
+ if (packageStatus.linkedDependencies.length > 0) {
627
+ output += ` Linked dependencies:\n`;
628
+ for (const dep of packageStatus.linkedDependencies){
629
+ const type = dep.isExternal ? '🔗 External' : '🔗 Internal';
630
+ output += ` ${type} ${dep.dependencyName} -> ${dep.targetPath}\n`;
631
+ }
632
+ }
633
+ output += '\n';
634
+ }
635
+ return output;
636
+ };
637
+ const execute$4 = async (runConfig, packageArgument)=>{
638
+ try {
639
+ var _runConfig_link;
640
+ // Use packageArgument from runConfig if not provided as parameter
641
+ const finalPackageArgument = packageArgument || ((_runConfig_link = runConfig.link) === null || _runConfig_link === void 0 ? void 0 : _runConfig_link.packageArgument);
642
+ return await executeInternal$1(runConfig, finalPackageArgument);
643
+ } catch (error) {
644
+ const logger = getLogger();
645
+ logger.error(`link failed: ${error.message}`);
646
+ throw error;
647
+ }
648
+ };
649
+
650
+ // Helper function to check if a dependency matches any external unlink patterns
651
+ const matchesExternalUnlinkPattern = (dependencyName, externalUnlinkPatterns)=>{
652
+ if (!externalUnlinkPatterns || externalUnlinkPatterns.length === 0) {
653
+ return false;
654
+ }
655
+ return externalUnlinkPatterns.some((pattern)=>{
656
+ // Simple string matching - could be enhanced with glob patterns later
657
+ return dependencyName === pattern || dependencyName.startsWith(pattern);
658
+ });
659
+ };
660
+ // Helper function to check if a path is a symbolic link
661
+ const isSymbolicLink = async (filePath)=>{
662
+ try {
663
+ const stats = await fs.lstat(filePath);
664
+ return stats.isSymbolicLink();
665
+ } catch {
666
+ return false;
667
+ }
668
+ };
669
+ // Helper function to get the target of a symbolic link
670
+ const getSymbolicLinkTarget = async (filePath)=>{
671
+ try {
672
+ const target = await fs.readlink(filePath);
673
+ return target;
674
+ } catch {
675
+ return null;
676
+ }
677
+ };
678
+ // Helper function to find all linked dependencies in a package
679
+ const findLinkedDependencies = async (packagePath, packageName, storage, logger)=>{
680
+ const linkedDependencies = [];
681
+ try {
682
+ const packageJsonPath = path.join(packagePath, 'package.json');
683
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
684
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
685
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
686
+ const allDependencies = {
687
+ ...packageJson.dependencies,
688
+ ...packageJson.devDependencies
689
+ };
690
+ const nodeModulesPath = path.join(packagePath, 'node_modules');
691
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
692
+ for (const [dependencyName, version] of Object.entries(allDependencies)){
693
+ let dependencyPath;
694
+ if (dependencyName.startsWith('@')) {
695
+ // Scoped package
696
+ const [scope, name] = dependencyName.split('/');
697
+ dependencyPath = path.join(nodeModulesPath, scope, name);
698
+ } else {
699
+ // Unscoped package
700
+ dependencyPath = path.join(nodeModulesPath, dependencyName);
701
+ }
702
+ if (await isSymbolicLink(dependencyPath)) {
703
+ const target = await getSymbolicLinkTarget(dependencyPath);
704
+ if (target) {
705
+ // Determine if this is an external dependency (not in the same workspace)
706
+ const isExternal = !target.includes('node_modules') || target.startsWith('..');
707
+ linkedDependencies.push({
708
+ dependencyName,
709
+ targetPath: target,
710
+ isExternal
711
+ });
712
+ }
713
+ }
714
+ }
715
+ } catch (error) {
716
+ logger.warn(`UNLINK_CHECK_FAILED: Unable to check linked dependencies | Package: ${packageName} | Error: ${error.message}`);
717
+ }
718
+ return linkedDependencies;
719
+ };
720
+ // Helper function to remove symbolic links manually
721
+ const removeSymbolicLink = async (packageName, targetDir, logger, isDryRun = false)=>{
722
+ try {
723
+ // Parse package name to get scope and name parts
724
+ const [scope, name] = packageName.startsWith('@') ? packageName.split('/') : [
725
+ null,
726
+ packageName
727
+ ];
728
+ // Create the target path structure
729
+ const nodeModulesPath = path.join(targetDir, 'node_modules');
730
+ let targetPath;
731
+ if (scope) {
732
+ // Scoped package: node_modules/@scope/name
733
+ targetPath = path.join(nodeModulesPath, scope, name);
734
+ } else {
735
+ // Unscoped package: node_modules/name
736
+ targetPath = path.join(nodeModulesPath, name);
737
+ }
738
+ if (isDryRun) {
739
+ logger.verbose(`DRY RUN: Would check and remove symlink: ${targetPath}`);
740
+ return true;
741
+ }
742
+ // Check if something exists at the target path
743
+ try {
744
+ const stats = await fs.lstat(targetPath); // Use lstat to not follow symlinks
745
+ if (stats.isSymbolicLink()) {
746
+ // It's a symlink, remove it
747
+ await fs.unlink(targetPath);
748
+ logger.verbose(`Removed symlink: ${targetPath}`);
749
+ return true;
750
+ } else {
751
+ logger.verbose(`Target exists but is not a symlink: ${targetPath}`);
752
+ return false;
753
+ }
754
+ } catch (error) {
755
+ if (error.code === 'ENOENT') {
756
+ // Nothing exists at target path, nothing to remove
757
+ logger.verbose(`No symlink found at: ${targetPath}`);
758
+ return true;
759
+ } else {
760
+ throw error; // Re-throw unexpected errors
761
+ }
762
+ }
763
+ } catch (error) {
764
+ logger.warn(`UNLINK_SYMLINK_REMOVE_FAILED: Unable to remove symlink | Package: ${packageName} | Error: ${error.message}`);
765
+ return false;
766
+ }
767
+ };
768
+ // Helper function to parse package names and scopes (same as link command)
769
+ const parsePackageArgument = (packageArg)=>{
770
+ if (packageArg.startsWith('@')) {
771
+ const parts = packageArg.split('/');
772
+ if (parts.length === 1) {
773
+ // Just a scope like "@fjell"
774
+ return {
775
+ scope: parts[0]
776
+ };
777
+ } else {
778
+ // Full package name like "@fjell/core"
779
+ return {
780
+ scope: parts[0],
781
+ packageName: packageArg
782
+ };
783
+ }
784
+ } else {
785
+ throw new Error(`Package argument must start with @ (scope): ${packageArg}`);
786
+ }
787
+ };
788
+ // Find packages in the workspace that match the given scope or package name
789
+ const findMatchingPackages = async (targetDirectories, scope, storage, logger, packageName)=>{
790
+ const matchingPackages = [];
791
+ // Find all package.json files in target directories
792
+ let allPackageJsonFiles = [];
793
+ for (const targetDirectory of targetDirectories){
794
+ const packageJsonFiles = await findAllPackageJsonFiles(targetDirectory, storage);
795
+ allPackageJsonFiles = allPackageJsonFiles.concat(packageJsonFiles);
796
+ }
797
+ for (const packageJsonLocation of allPackageJsonFiles){
798
+ const packageDir = packageJsonLocation.path.replace('/package.json', '');
799
+ try {
800
+ const packageJsonContent = await storage.readFile(packageJsonLocation.path, 'utf-8');
801
+ const parsed = safeJsonParse(packageJsonContent, packageJsonLocation.path);
802
+ const packageJson = validatePackageJson(parsed, packageJsonLocation.path);
803
+ if (!packageJson.name) continue;
804
+ const isInScope = packageJson.name.startsWith(scope + '/');
805
+ const isExactMatch = packageName && packageJson.name === packageName;
806
+ if (isInScope || isExactMatch) {
807
+ matchingPackages.push({
808
+ name: packageJson.name,
809
+ path: packageDir,
810
+ isSource: packageName ? packageJson.name === packageName : isInScope
811
+ });
812
+ }
813
+ } catch (error) {
814
+ logger.warn(`Failed to parse ${packageJsonLocation.path}: ${error.message}`);
815
+ }
816
+ }
817
+ return matchingPackages;
818
+ };
819
+ // Find packages that depend on the target package
820
+ const findConsumingPackages = async (targetDirectories, targetPackageName, storage, logger)=>{
821
+ const consumingPackages = [];
822
+ // Find all package.json files in target directories
823
+ let allPackageJsonFiles = [];
824
+ for (const targetDirectory of targetDirectories){
825
+ const packageJsonFiles = await findAllPackageJsonFiles(targetDirectory, storage);
826
+ allPackageJsonFiles = allPackageJsonFiles.concat(packageJsonFiles);
827
+ }
828
+ for (const packageJsonLocation of allPackageJsonFiles){
829
+ const packageDir = packageJsonLocation.path.replace('/package.json', '');
830
+ try {
831
+ const packageJsonContent = await storage.readFile(packageJsonLocation.path, 'utf-8');
832
+ const parsed = safeJsonParse(packageJsonContent, packageJsonLocation.path);
833
+ const packageJson = validatePackageJson(parsed, packageJsonLocation.path);
834
+ if (!packageJson.name) continue;
835
+ // Check if this package depends on the target package
836
+ const dependencyTypes = [
837
+ 'dependencies',
838
+ 'devDependencies',
839
+ 'peerDependencies',
840
+ 'optionalDependencies'
841
+ ];
842
+ const hasDependency = dependencyTypes.some((depType)=>packageJson[depType] && packageJson[depType][targetPackageName]);
843
+ if (hasDependency && packageJson.name !== targetPackageName) {
844
+ consumingPackages.push({
845
+ name: packageJson.name,
846
+ path: packageDir
847
+ });
848
+ }
849
+ } catch (error) {
850
+ logger.warn(`Failed to parse ${packageJsonLocation.path}: ${error.message}`);
851
+ }
852
+ }
853
+ return consumingPackages;
854
+ };
855
+ const executeInternal = async (runConfig, packageArgument)=>{
856
+ var _runConfig_unlink, _runConfig_tree;
857
+ const isDryRun = runConfig.dryRun || ((_runConfig_unlink = runConfig.unlink) === null || _runConfig_unlink === void 0 ? void 0 : _runConfig_unlink.dryRun) || false;
858
+ const logger = getDryRunLogger(isDryRun);
859
+ const storage = createStorage();
860
+ // Check if this is a status command
861
+ if (packageArgument === 'status') {
862
+ return await executeStatus(runConfig);
863
+ }
864
+ // Get target directories from config, default to current directory
865
+ const targetDirectories = ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.directories) || [
866
+ process.cwd()
867
+ ];
868
+ if (targetDirectories.length === 1) {
869
+ logger.info(`UNLINK_WORKSPACE_ANALYSIS: Analyzing single workspace directory | Path: ${targetDirectories[0]} | Purpose: Find packages to unlink`);
870
+ } else {
871
+ logger.info(`UNLINK_WORKSPACE_ANALYSIS: Analyzing multiple workspace directories | Paths: ${targetDirectories.join(', ')} | Count: ${targetDirectories.length} | Purpose: Find packages to unlink`);
872
+ }
873
+ // If no package argument provided, implement new behavior for current project
874
+ if (!packageArgument) {
875
+ var _runConfig_unlink1, _runConfig_unlink2;
876
+ logger.info('UNLINK_SMART_MODE: Smart unlinking mode activated for current project | Mode: smart | Target: current directory | Purpose: Auto-unlink based on scope');
877
+ const currentDir = process.cwd();
878
+ const packageJsonPath = `${currentDir}/package.json`;
879
+ // Check if we're in a directory with package.json
880
+ if (!await storage.exists(packageJsonPath)) {
881
+ const message = `No package.json found in current directory: ${currentDir}`;
882
+ logger.warn('UNLINK_NO_PACKAGE_JSON: No package.json found in current directory | Directory: ' + currentDir + ' | Action: Cannot unlink without package.json');
883
+ return message;
884
+ }
885
+ // Parse package.json to get package name
886
+ let packageName;
887
+ try {
888
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
889
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
890
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
891
+ if (!packageJson.name) {
892
+ throw new Error('package.json has no name field');
893
+ }
894
+ packageName = packageJson.name;
895
+ } catch (error) {
896
+ const message = `Failed to parse package.json: ${error.message}`;
897
+ logger.error('UNLINK_PACKAGE_NAME_MISSING: package.json must have a name field | Field: name | Requirement: Required for unlinking | Action: Add name field to package.json');
898
+ return message;
899
+ }
900
+ logger.info(`UNLINK_PACKAGE_PROCESSING: Processing package for unlinking | Package: ${packageName} | Action: Remove symlinks and restore registry versions`);
901
+ const cleanNodeModules = ((_runConfig_unlink1 = runConfig.unlink) === null || _runConfig_unlink1 === void 0 ? void 0 : _runConfig_unlink1.cleanNodeModules) || false;
902
+ const externalUnlinkPatterns = ((_runConfig_unlink2 = runConfig.unlink) === null || _runConfig_unlink2 === void 0 ? void 0 : _runConfig_unlink2.externals) || [];
903
+ // Step 0: Handle external dependencies if patterns are specified
904
+ if (externalUnlinkPatterns.length > 0) {
905
+ logger.info(`UNLINK_EXTERNAL_DEPS: Processing external dependencies | Patterns: ${externalUnlinkPatterns.join(', ')} | Purpose: Unlink external packages before main package`);
906
+ // Read package.json to get dependencies
907
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
908
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
909
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
910
+ const allDependencies = {
911
+ ...packageJson.dependencies,
912
+ ...packageJson.devDependencies
913
+ };
914
+ const externalDependencies = Object.keys(allDependencies).filter((depName)=>matchesExternalUnlinkPattern(depName, externalUnlinkPatterns));
915
+ if (externalDependencies.length > 0) {
916
+ logger.info(`UNLINK_EXTERNAL_FOUND: Found external dependencies to unlink | Count: ${externalDependencies.length} | Dependencies: ${externalDependencies.join(', ')}`);
917
+ for (const depName of externalDependencies){
918
+ try {
919
+ const success = await removeSymbolicLink(depName, currentDir, logger, isDryRun);
920
+ if (success) {
921
+ logger.info(`UNLINK_EXTERNAL_SUCCESS: External dependency unlinked successfully | Dependency: ${depName} | Status: unlinked`);
922
+ } else {
923
+ logger.warn(`UNLINK_EXTERNAL_FAILED: Failed to unlink external dependency | Dependency: ${depName} | Status: failed`);
924
+ }
925
+ } catch (error) {
926
+ logger.warn(`UNLINK_EXTERNAL_ERROR: Error during external dependency unlink | Dependency: ${depName} | Error: ${error.message}`);
927
+ }
928
+ }
929
+ } else {
930
+ logger.info('UNLINK_EXTERNAL_NONE: No external dependencies found matching patterns | Patterns: ' + externalUnlinkPatterns.join(', ') + ' | Action: Skipping external unlink');
931
+ }
932
+ }
933
+ if (isDryRun) {
934
+ let dryRunMessage = `DRY RUN: Would execute unlink steps for ${packageName}:\n`;
935
+ if (externalUnlinkPatterns.length > 0) {
936
+ dryRunMessage += ` 0. Unlink external dependencies matching patterns: ${externalUnlinkPatterns.join(', ')}\n`;
937
+ }
938
+ dryRunMessage += ` 1. npm unlink -g\n`;
939
+ if (cleanNodeModules) {
940
+ dryRunMessage += ` 2. rm -rf node_modules package-lock.json\n`;
941
+ dryRunMessage += ` 3. npm install\n`;
942
+ dryRunMessage += ` 4. Check for remaining links with npm ls --link`;
943
+ } else {
944
+ dryRunMessage += ` 2. Check for remaining links with npm ls --link\n`;
945
+ dryRunMessage += ` Note: Use --clean-node-modules flag to also clean and reinstall dependencies`;
946
+ }
947
+ logger.info(dryRunMessage);
948
+ return dryRunMessage;
949
+ }
950
+ // Step 1: Remove global link
951
+ logger.info('UNLINK_GLOBAL_REMOVING: Removing global npm link | Step: 1 | Command: npm unlink -g | Purpose: Remove package from global npm');
952
+ try {
953
+ await run('npm unlink -g');
954
+ logger.info('UNLINK_GLOBAL_SUCCESS: Global link removed successfully | Status: unlinked | Location: global npm');
955
+ } catch (error) {
956
+ // This might fail if the package wasn't globally linked, which is OK
957
+ logger.warn(`UNLINK_GLOBAL_SKIP: Failed to remove global link | Error: ${error.message} | Impact: OK if package wasn't linked | Status: continuing`);
958
+ }
959
+ if (cleanNodeModules) {
960
+ // Step 2: Clean node_modules and package-lock.json
961
+ logger.info('UNLINK_CLEANING: Cleaning node_modules and package-lock.json | Command: rm -rf | Purpose: Remove symlinked dependencies');
962
+ try {
963
+ await run('rm -rf node_modules package-lock.json');
964
+ logger.info('UNLINK_CLEAN_SUCCESS: Successfully cleaned node_modules and package-lock.json | Status: removed | Next: Fresh install');
965
+ } catch (error) {
966
+ logger.warn(`UNLINK_CLEAN_FAILED: Failed to clean directories | Error: ${error.message} | Impact: May need manual cleanup`);
967
+ }
968
+ // Step 3: Install dependencies
969
+ logger.info('UNLINK_INSTALLING: Installing dependencies from registry | Command: npm install | Purpose: Restore registry versions');
970
+ try {
971
+ await run('npm install');
972
+ logger.info('UNLINK_INSTALL_SUCCESS: Dependencies installed successfully | Source: npm registry | Status: completed');
973
+ } catch (error) {
974
+ logger.error(`UNLINK_INSTALL_FAILED: Failed to install dependencies | Error: ${error.message} | Impact: Package may be in inconsistent state`);
975
+ throw error;
976
+ }
977
+ // Step 4: Check for remaining links (suppress output and errors)
978
+ logger.info('UNLINK_CHECK_REMAINING: Checking for remaining symlinks | Purpose: Verify clean unlink | Action: Scan node_modules');
979
+ } else {
980
+ // Step 2: Check for remaining links (suppress output and errors)
981
+ logger.info('UNLINK_CHECK_REMAINING: Checking for remaining symlinks | Mode: skip-reinstall | Purpose: Verify unlink | Action: Scan node_modules');
982
+ logger.info('Note: Use --clean-node-modules flag to also clean and reinstall dependencies');
983
+ }
984
+ try {
985
+ // Use child_process directly to suppress logging and get JSON output
986
+ const util = await import('util');
987
+ const child_process = await import('child_process');
988
+ const execPromise = util.promisify(child_process.exec);
989
+ const result = await execPromise('npm ls --link --json');
990
+ // Parse JSON output to check for links to packages in the same scope
991
+ const packageScope = packageName.includes('/') ? packageName.split('/')[0] : null;
992
+ if (packageScope && result.stdout.trim()) {
993
+ try {
994
+ const linksData = safeJsonParse(result.stdout, 'npm ls output after unlink');
995
+ const linkedPackages = Object.keys(linksData.dependencies || {});
996
+ const scopeLinkedPackages = linkedPackages.filter((pkg)=>pkg.startsWith(packageScope + '/'));
997
+ if (scopeLinkedPackages.length > 0) {
998
+ logger.warn(`UNLINK_REMAINING_LINKS: Found remaining links to packages in scope | Scope: ${packageScope} | Packages: ${scopeLinkedPackages.join(', ')} | Note: May be expected if workspace packages linked`);
999
+ logger.verbose('UNLINK_REMAINING_NOTE: Remaining links may be expected | Reason: Other workspace packages still linked | Status: normal');
1000
+ } else {
1001
+ logger.info('UNLINK_VERIFY_CLEAN: No problematic links found | Status: clean | Verification: passed');
1002
+ }
1003
+ } catch {
1004
+ // If JSON parsing fails, fall back to basic check
1005
+ logger.verbose('Failed to parse npm ls --link --json output, using basic check');
1006
+ if (result.stdout.includes(packageScope)) {
1007
+ logger.warn(`UNLINK_REMAINING_LINKS_BASIC: Found remaining links to scope | Scope: ${packageScope} | Check: basic | Note: May be expected`);
1008
+ logger.verbose('UNLINK_REMAINING_NOTE: Remaining links may be expected | Reason: Other workspace packages still linked | Status: normal');
1009
+ } else {
1010
+ logger.info('UNLINK_VERIFY_CLEAN: No problematic links found | Status: clean | Verification: passed');
1011
+ }
1012
+ }
1013
+ } else {
1014
+ logger.info('UNLINK_VERIFY_CLEAN: No problematic links found | Status: clean | Verification: passed');
1015
+ }
1016
+ } catch {
1017
+ // npm ls --link returns non-zero when there are no links, which is what we want
1018
+ // So we only log this at verbose level
1019
+ logger.verbose('npm ls --link check completed (non-zero exit is expected when no links exist)');
1020
+ }
1021
+ const summary = `Successfully unlinked ${packageName}`;
1022
+ logger.info(summary);
1023
+ return summary;
1024
+ }
1025
+ // New scope-based unlinking behavior
1026
+ logger.info(`UNLINK_EXPLICIT_MODE: Unlinking specific scope/package | Target: ${packageArgument} | Mode: explicit | Purpose: Remove symlinks for package`);
1027
+ const { scope, packageName } = parsePackageArgument(packageArgument);
1028
+ logger.verbose(`Parsed scope: ${scope}, package: ${packageName || 'all packages in scope'}`);
1029
+ // Find matching packages in the workspace
1030
+ const matchingPackages = await findMatchingPackages(targetDirectories, scope, storage, logger, packageName);
1031
+ if (matchingPackages.length === 0) {
1032
+ const message = packageName ? `No package found matching: ${packageName}` : `No packages found in scope: ${scope}`;
1033
+ logger.warn(message);
1034
+ return message;
1035
+ }
1036
+ logger.info(`Found ${matchingPackages.length} matching package(s)`);
1037
+ const unlinkedPackages = [];
1038
+ // If specific package name provided, use that; otherwise unlink all packages in scope
1039
+ const packagesToUnlink = packageName ? matchingPackages.filter((pkg)=>pkg.name === packageName) : matchingPackages;
1040
+ for (const pkg of packagesToUnlink){
1041
+ logger.info(`Processing package: ${pkg.name}`);
1042
+ // Step A: Find all packages that depend on this package and unlink them first
1043
+ const consumingPackages = await findConsumingPackages(targetDirectories, pkg.name, storage, logger);
1044
+ if (consumingPackages.length === 0) {
1045
+ logger.info(`No consuming packages found for: ${pkg.name}`);
1046
+ } else {
1047
+ logger.info(`Found ${consumingPackages.length} consuming package(s) for: ${pkg.name}`);
1048
+ for (const consumer of consumingPackages){
1049
+ try {
1050
+ const consumerOriginalCwd = process.cwd();
1051
+ process.chdir(consumer.path);
1052
+ try {
1053
+ if (isDryRun) {
1054
+ logger.info(`DRY RUN: Would run 'npm unlink ${pkg.name}' in: ${consumer.path}`);
1055
+ } else {
1056
+ logger.verbose(`Running 'npm unlink ${pkg.name}' in consumer: ${consumer.path}`);
1057
+ await runSecure('npm', [
1058
+ 'unlink',
1059
+ pkg.name
1060
+ ]);
1061
+ logger.info(`UNLINK_CONSUMER_SUCCESS: Consumer unlinked from package | Consumer: ${consumer.name} | Package: ${pkg.name} | Status: unlinked`);
1062
+ }
1063
+ } finally{
1064
+ process.chdir(consumerOriginalCwd);
1065
+ }
1066
+ } catch (error) {
1067
+ // npm unlink can fail if package wasn't linked, but that's OK
1068
+ logger.warn(`UNLINK_CONSUMER_FAILED: Failed to unlink consumer | Consumer: ${consumer.name} | Package: ${pkg.name} | Error: ${error.message}`);
1069
+ }
1070
+ }
1071
+ }
1072
+ // Step B: Run 'npm unlink' in the source package directory
1073
+ try {
1074
+ const originalCwd = process.cwd();
1075
+ process.chdir(pkg.path);
1076
+ try {
1077
+ if (isDryRun) {
1078
+ logger.info(`DRY RUN: Would run 'npm unlink' in: ${pkg.path}`);
1079
+ } else {
1080
+ logger.verbose(`Running 'npm unlink' in source: ${pkg.path}`);
1081
+ await run('npm unlink');
1082
+ logger.info(`UNLINK_SOURCE_SUCCESS: Source package unlinked | Package: ${pkg.name} | Status: unlinked`);
1083
+ }
1084
+ } finally{
1085
+ process.chdir(originalCwd);
1086
+ }
1087
+ unlinkedPackages.push(pkg.name);
1088
+ } catch (error) {
1089
+ // npm unlink can fail if package wasn't linked, but that's OK
1090
+ logger.warn(`UNLINK_SOURCE_FAILED: Failed to unlink source package | Package: ${pkg.name} | Error: ${error.message}`);
1091
+ unlinkedPackages.push(pkg.name); // Still count as success
1092
+ }
1093
+ }
1094
+ const summary = `Successfully unlinked ${unlinkedPackages.length} package(s): ${unlinkedPackages.join(', ')}`;
1095
+ logger.info(summary);
1096
+ return summary;
1097
+ };
1098
+ // Status function to show what's currently linked (same as link command)
1099
+ const executeStatus = async (runConfig)=>{
1100
+ var _runConfig_tree;
1101
+ const logger = getLogger();
1102
+ const storage = createStorage();
1103
+ // Get target directories from config, default to current directory
1104
+ const targetDirectories = ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.directories) || [
1105
+ process.cwd()
1106
+ ];
1107
+ if (targetDirectories.length === 1) {
1108
+ logger.info(`UNLINK_STATUS_CHECK: Checking link status in directory | Directory: ${targetDirectories[0]} | Purpose: Show current symlinks`);
1109
+ } else {
1110
+ logger.info(`UNLINK_STATUS_CHECK: Checking link status in directories | Directories: ${targetDirectories.join(', ')} | Count: ${targetDirectories.length} | Purpose: Show current symlinks`);
1111
+ }
1112
+ // Find all packages in the workspace
1113
+ let allPackageJsonFiles = [];
1114
+ for (const targetDirectory of targetDirectories){
1115
+ const packageJsonFiles = await findAllPackageJsonFiles(targetDirectory, storage);
1116
+ allPackageJsonFiles = allPackageJsonFiles.concat(packageJsonFiles);
1117
+ }
1118
+ const packageStatuses = [];
1119
+ for (const packageJsonLocation of allPackageJsonFiles){
1120
+ const packageDir = packageJsonLocation.path.replace('/package.json', '');
1121
+ try {
1122
+ const packageJsonContent = await storage.readFile(packageJsonLocation.path, 'utf-8');
1123
+ const parsed = safeJsonParse(packageJsonContent, packageJsonLocation.path);
1124
+ const packageJson = validatePackageJson(parsed, packageJsonLocation.path);
1125
+ if (!packageJson.name) continue;
1126
+ const linkedDependencies = await findLinkedDependencies(packageDir, packageJson.name, storage, logger);
1127
+ if (linkedDependencies.length > 0) {
1128
+ packageStatuses.push({
1129
+ name: packageJson.name,
1130
+ path: packageDir,
1131
+ linkedDependencies
1132
+ });
1133
+ }
1134
+ } catch (error) {
1135
+ logger.warn(`Failed to parse ${packageJsonLocation.path}: ${error.message}`);
1136
+ }
1137
+ }
1138
+ if (packageStatuses.length === 0) {
1139
+ return 'No linked dependencies found in workspace.';
1140
+ }
1141
+ // Format the output
1142
+ let output = `Found ${packageStatuses.length} package(s) with linked dependencies:\n\n`;
1143
+ for (const packageStatus of packageStatuses){
1144
+ output += `📦 ${packageStatus.name}\n`;
1145
+ output += ` Path: ${packageStatus.path}\n`;
1146
+ if (packageStatus.linkedDependencies.length > 0) {
1147
+ output += ` Linked dependencies:\n`;
1148
+ for (const dep of packageStatus.linkedDependencies){
1149
+ const type = dep.isExternal ? '🔗 External' : '🔗 Internal';
1150
+ output += ` ${type} ${dep.dependencyName} -> ${dep.targetPath}\n`;
1151
+ }
1152
+ }
1153
+ output += '\n';
1154
+ }
1155
+ return output;
1156
+ };
1157
+ const execute$3 = async (runConfig, packageArgument)=>{
1158
+ try {
1159
+ var _runConfig_unlink;
1160
+ // Check if this is a status command from direct parameter
1161
+ if (packageArgument === 'status') {
1162
+ return await executeStatus(runConfig);
1163
+ }
1164
+ // Use packageArgument from runConfig if not provided as parameter
1165
+ const finalPackageArgument = packageArgument || ((_runConfig_unlink = runConfig.unlink) === null || _runConfig_unlink === void 0 ? void 0 : _runConfig_unlink.packageArgument);
1166
+ // Check if this is a status command from config
1167
+ if (finalPackageArgument === 'status') {
1168
+ return await executeStatus(runConfig);
1169
+ }
1170
+ return await executeInternal(runConfig, finalPackageArgument);
1171
+ } catch (error) {
1172
+ const logger = getLogger();
1173
+ logger.error(`unlink failed: ${error.message}`);
1174
+ throw error;
1175
+ }
1176
+ };
1177
+
1178
+ /**
1179
+ * Update inter-project dependencies in package.json based on current tree state
1180
+ */ const updateInterProjectDependencies$1 = async (packageDir, scope, isDryRun, logger)=>{
1181
+ const storage = createStorage();
1182
+ const packageJsonPath = path.join(packageDir, 'package.json');
1183
+ if (!await storage.exists(packageJsonPath)) {
1184
+ logger.verbose('No package.json found, skipping dependency updates');
1185
+ return {
1186
+ hasChanges: false,
1187
+ updated: []
1188
+ };
1189
+ }
1190
+ const updated = [];
1191
+ let hasChanges = false;
1192
+ try {
1193
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
1194
+ const packageJson = safeJsonParse(packageJsonContent, packageJsonPath);
1195
+ const sectionsToUpdate = [
1196
+ 'dependencies',
1197
+ 'devDependencies',
1198
+ 'peerDependencies'
1199
+ ];
1200
+ // Collect all dependencies matching the scope
1201
+ const depsToUpdate = [];
1202
+ for (const section of sectionsToUpdate){
1203
+ const deps = packageJson[section];
1204
+ if (deps) {
1205
+ for (const [depName, depVersion] of Object.entries(deps)){
1206
+ if (depName.startsWith(scope)) {
1207
+ depsToUpdate.push({
1208
+ section,
1209
+ name: depName,
1210
+ currentVersion: depVersion
1211
+ });
1212
+ }
1213
+ }
1214
+ }
1215
+ }
1216
+ if (depsToUpdate.length === 0) {
1217
+ logger.info(`UPDATES_NO_DEPS_FOUND: No dependencies matching scope | Scope: ${scope} | Package Dir: ${packageDir} | Status: No updates needed`);
1218
+ return {
1219
+ hasChanges: false,
1220
+ updated: []
1221
+ };
1222
+ }
1223
+ logger.info(`UPDATES_DEPS_FOUND: Found dependencies matching scope | Scope: ${scope} | Count: ${depsToUpdate.length} | Action: Will check and update versions`);
1224
+ // For each dependency, find its package.json and get the current version
1225
+ for (const dep of depsToUpdate){
1226
+ try {
1227
+ // Look for package in parent directories or node_modules
1228
+ let depVersion = null;
1229
+ // First try to find in tree (sibling packages)
1230
+ const parentDir = path.dirname(packageDir);
1231
+ const siblingPackageJson = path.join(parentDir, dep.name.split('/').pop(), 'package.json');
1232
+ if (await storage.exists(siblingPackageJson)) {
1233
+ const siblingContent = await storage.readFile(siblingPackageJson, 'utf-8');
1234
+ const siblingPackage = safeJsonParse(siblingContent, siblingPackageJson);
1235
+ if (siblingPackage.name === dep.name) {
1236
+ depVersion = siblingPackage.version;
1237
+ logger.verbose(`Found ${dep.name}@${depVersion} in tree`);
1238
+ }
1239
+ }
1240
+ // Fall back to npm to get latest published version
1241
+ if (!depVersion) {
1242
+ try {
1243
+ const { stdout } = await run(`npm view ${dep.name} version`);
1244
+ depVersion = stdout.trim();
1245
+ logger.verbose(`Found ${dep.name}@${depVersion} on npm`);
1246
+ } catch {
1247
+ logger.warn(`UPDATES_VERSION_NOT_FOUND: Could not find version for dependency | Dependency: ${dep.name} | Action: Skipping | Reason: Not found in tree or npm`);
1248
+ continue;
1249
+ }
1250
+ }
1251
+ const newVersion = `^${depVersion}`;
1252
+ if (dep.currentVersion !== newVersion) {
1253
+ if (isDryRun) {
1254
+ logger.info(`UPDATES_WOULD_UPDATE: Would update dependency | Mode: dry-run | Section: ${dep.section} | Dependency: ${dep.name} | Current: ${dep.currentVersion} | New: ${newVersion}`);
1255
+ } else {
1256
+ logger.info(`UPDATES_UPDATING: Updating dependency version | Section: ${dep.section} | Dependency: ${dep.name} | Current: ${dep.currentVersion} | New: ${newVersion}`);
1257
+ packageJson[dep.section][dep.name] = newVersion;
1258
+ }
1259
+ hasChanges = true;
1260
+ updated.push(`${dep.name}: ${dep.currentVersion} → ${newVersion}`);
1261
+ }
1262
+ } catch (error) {
1263
+ logger.warn(`UPDATES_DEP_UPDATE_FAILED: Failed to update dependency | Dependency: ${dep.name} | Error: ${error.message}`);
1264
+ }
1265
+ }
1266
+ if (hasChanges && !isDryRun) {
1267
+ await storage.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n', 'utf-8');
1268
+ logger.info(`UPDATES_PACKAGE_COMPLETE: Updated dependencies in package.json | Count: ${updated.length} | File: package.json | Status: saved`);
1269
+ }
1270
+ } catch (error) {
1271
+ logger.warn(`UPDATES_INTER_PROJECT_FAILED: Failed to update inter-project dependencies | Error: ${error.message} | Impact: Dependencies not updated`);
1272
+ }
1273
+ return {
1274
+ hasChanges,
1275
+ updated
1276
+ };
1277
+ };
1278
+ /**
1279
+ * Execute the updates command
1280
+ */ const execute$2 = async (runConfig)=>{
1281
+ var _runConfig_updates, _runConfig_updates1, _runConfig_tree;
1282
+ const isDryRun = runConfig.dryRun || false;
1283
+ const logger = getDryRunLogger(isDryRun);
1284
+ // Check if this is inter-project mode
1285
+ const interProjectMode = ((_runConfig_updates = runConfig.updates) === null || _runConfig_updates === void 0 ? void 0 : _runConfig_updates.interProject) || false;
1286
+ if (interProjectMode) {
1287
+ var _runConfig_updates2;
1288
+ // Inter-project dependency update mode
1289
+ const scope = (_runConfig_updates2 = runConfig.updates) === null || _runConfig_updates2 === void 0 ? void 0 : _runConfig_updates2.scope;
1290
+ if (!scope) {
1291
+ throw new Error('Scope parameter is required for inter-project updates. Usage: kodrdriv updates --inter-project <scope>');
1292
+ }
1293
+ if (!scope.startsWith('@')) {
1294
+ throw new Error(`Invalid scope "${scope}". Scope must start with @ (e.g., "@fjell")`);
1295
+ }
1296
+ logger.info(`UPDATES_INTER_PROJECT_STARTING: Updating inter-project dependencies | Scope: ${scope} | Type: inter-project | Purpose: Sync dependency versions`);
1297
+ const result = await updateInterProjectDependencies$1(process.cwd(), scope, isDryRun, logger);
1298
+ if (result.hasChanges && !isDryRun) {
1299
+ logger.info('UPDATES_NPM_INSTALL: Running npm install to update lock file | Command: npm install | Purpose: Synchronize package-lock.json with changes');
1300
+ try {
1301
+ await run('npm install');
1302
+ logger.info('UPDATES_LOCK_FILE_UPDATED: Lock file updated successfully | File: package-lock.json | Status: synchronized');
1303
+ } catch (error) {
1304
+ logger.error(`UPDATES_NPM_INSTALL_FAILED: Failed to run npm install | Error: ${error.message} | Impact: Lock file not updated`);
1305
+ throw new Error(`Failed to update lock file: ${error.message}`);
1306
+ }
1307
+ }
1308
+ if (result.updated.length > 0) {
1309
+ logger.info(`UPDATES_INTER_PROJECT_COMPLETE: Updated inter-project dependencies | Count: ${result.updated.length} | Status: completed`);
1310
+ result.updated.forEach((update)=>logger.info(`UPDATES_DEP_UPDATED: ${update}`));
1311
+ } else {
1312
+ logger.info('UPDATES_INTER_PROJECT_NONE: No inter-project dependency updates needed | Status: All dependencies current');
1313
+ }
1314
+ return `Updated ${result.updated.length} inter-project dependencies`;
1315
+ }
1316
+ // Original scope-based npm-check-updates mode
1317
+ const scope = ((_runConfig_updates1 = runConfig.updates) === null || _runConfig_updates1 === void 0 ? void 0 : _runConfig_updates1.scope) || ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.packageArgument);
1318
+ if (!scope) {
1319
+ throw new Error('Scope parameter is required. Usage: kodrdriv updates <scope> or kodrdriv updates --inter-project <scope>');
1320
+ }
1321
+ // Validate that scope looks like a valid npm scope (starts with @)
1322
+ if (!scope.startsWith('@')) {
1323
+ throw new Error(`Invalid scope "${scope}". Scope must start with @ (e.g., "@fjell")`);
1324
+ }
1325
+ logger.info(`UPDATES_NCU_STARTING: Running npm-check-updates for scope | Scope: ${scope} | Tool: npm-check-updates | Purpose: Find outdated dependencies`);
1326
+ // Build the npm-check-updates command
1327
+ const ncuCommand = `npx npm-check-updates '/${scope.replace('@', '^@')}//' -u`;
1328
+ logger.info(`UPDATES_NCU_EXECUTING: Executing npm-check-updates command | Command: ${ncuCommand} | Scope: ${scope}`);
1329
+ try {
1330
+ if (isDryRun) {
1331
+ logger.info(`Would run: ${ncuCommand}`);
1332
+ logger.info('Would run: npm install');
1333
+ return `Would update dependencies matching ${scope} scope`;
1334
+ }
1335
+ // Execute npm-check-updates
1336
+ const result = await run(ncuCommand);
1337
+ if (result.stdout) {
1338
+ logger.info('UPDATES_NCU_OUTPUT: npm-check-updates output | Status: completed');
1339
+ result.stdout.split('\n').forEach((line)=>{
1340
+ if (line.trim()) {
1341
+ logger.info(` ${line}`);
1342
+ }
1343
+ });
1344
+ }
1345
+ if (result.stderr) {
1346
+ logger.info('UPDATES_NCU_WARNINGS: npm-check-updates produced warnings | Type: warnings');
1347
+ result.stderr.split('\n').forEach((line)=>{
1348
+ if (line.trim()) {
1349
+ logger.info(` ${line}`);
1350
+ }
1351
+ });
1352
+ }
1353
+ // Check if package.json was actually modified
1354
+ const hasUpdates = result.stdout && !result.stdout.includes('All dependencies match the latest package versions');
1355
+ if (hasUpdates) {
1356
+ logger.info('UPDATES_NCU_INSTALL: Running npm install after ncu updates | Command: npm install | Purpose: Update lock file with new versions');
1357
+ try {
1358
+ const installResult = await run('npm install');
1359
+ if (installResult.stdout) {
1360
+ logger.verbose('npm install output:');
1361
+ installResult.stdout.split('\n').forEach((line)=>{
1362
+ if (line.trim()) {
1363
+ logger.verbose(` ${line}`);
1364
+ }
1365
+ });
1366
+ }
1367
+ logger.info('UPDATES_NCU_LOCK_UPDATED: Lock file updated successfully after ncu | File: package-lock.json | Status: synchronized');
1368
+ } catch (installError) {
1369
+ logger.error(`UPDATES_NCU_INSTALL_FAILED: Failed to run npm install after ncu | Error: ${installError.message} | Impact: Lock file not synchronized`);
1370
+ throw new Error(`Failed to update lock file after dependency updates: ${installError.message}`);
1371
+ }
1372
+ }
1373
+ logger.info(`UPDATES_NCU_SUCCESS: Successfully updated dependencies | Scope: ${scope} | Status: completed | Files: package.json, package-lock.json`);
1374
+ return `Updated dependencies matching ${scope} scope`;
1375
+ } catch (error) {
1376
+ logger.error(`UPDATES_NCU_FAILED: Failed to run npm-check-updates | Scope: ${scope} | Error: ${error.message} | Impact: Dependencies not updated`);
1377
+ throw new Error(`Failed to update dependencies: ${error.message}`);
1378
+ }
1379
+ };
1380
+
1381
+ // Global state to track published versions during tree execution - protected by mutex
1382
+ let publishedVersions = [];
1383
+ let executionContext = null;
1384
+ const globalStateMutex = new SimpleMutex();
1385
+ // Update inter-project dependencies in package.json based on published versions
1386
+ const updateInterProjectDependencies = async (packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun)=>{
1387
+ const storage = createStorage();
1388
+ const packageJsonPath = path.join(packageDir, 'package.json');
1389
+ if (!await storage.exists(packageJsonPath)) {
1390
+ packageLogger.verbose('No package.json found, skipping dependency updates');
1391
+ return false;
1392
+ }
1393
+ let hasChanges = false;
1394
+ try {
1395
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
1396
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
1397
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
1398
+ const sectionsToUpdate = [
1399
+ 'dependencies',
1400
+ 'devDependencies',
1401
+ 'peerDependencies'
1402
+ ];
1403
+ for (const publishedVersion of publishedVersions){
1404
+ const { packageName, version } = publishedVersion;
1405
+ // Only update if this is an inter-project dependency (exists in our build tree)
1406
+ if (!allPackageNames.has(packageName)) {
1407
+ continue;
1408
+ }
1409
+ // Skip prerelease versions (e.g., 1.0.0-beta.1, 2.0.0-alpha.3)
1410
+ // Prerelease versions should not be automatically propagated to consumers
1411
+ if (version.includes('-')) {
1412
+ packageLogger.verbose(`Skipping prerelease version ${packageName}@${version} - not updating dependencies`);
1413
+ continue;
1414
+ }
1415
+ // Update the dependency in all relevant sections
1416
+ for (const section of sectionsToUpdate){
1417
+ const deps = packageJson[section];
1418
+ if (deps && deps[packageName]) {
1419
+ const oldVersion = deps[packageName];
1420
+ const newVersion = `^${version}`;
1421
+ if (oldVersion !== newVersion) {
1422
+ if (isDryRun) {
1423
+ packageLogger.info(`Would update ${section}.${packageName}: ${oldVersion} → ${newVersion}`);
1424
+ } else {
1425
+ packageLogger.info(`Updating ${section}.${packageName}: ${oldVersion} → ${newVersion}`);
1426
+ deps[packageName] = newVersion;
1427
+ }
1428
+ hasChanges = true;
1429
+ }
1430
+ }
1431
+ }
1432
+ }
1433
+ if (hasChanges && !isDryRun) {
1434
+ // Write updated package.json
1435
+ await storage.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n', 'utf-8');
1436
+ packageLogger.info('Inter-project dependencies updated successfully');
1437
+ }
1438
+ } catch (error) {
1439
+ packageLogger.warn(`Failed to update inter-project dependencies: ${error.message}`);
1440
+ return false;
1441
+ }
1442
+ return hasChanges;
1443
+ };
1444
+ // Detect scoped dependencies from package.json and run updates for them
1445
+ const updateScopedDependencies = async (packageDir, packageLogger, isDryRun, runConfig)=>{
1446
+ const storage = createStorage();
1447
+ const packageJsonPath = path.join(packageDir, 'package.json');
1448
+ if (!await storage.exists(packageJsonPath)) {
1449
+ packageLogger.verbose('No package.json found, skipping scoped dependency updates');
1450
+ return false;
1451
+ }
1452
+ try {
1453
+ var _runConfig_publish;
1454
+ // Read the package.json before updates
1455
+ const beforeContent = await storage.readFile(packageJsonPath, 'utf-8');
1456
+ const parsed = safeJsonParse(beforeContent, packageJsonPath);
1457
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
1458
+ // Determine which scopes to update
1459
+ let scopesToUpdate;
1460
+ // Check if scopedDependencyUpdates is configured
1461
+ const configuredScopes = (_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.scopedDependencyUpdates;
1462
+ if (configuredScopes !== undefined) {
1463
+ // scopedDependencyUpdates is explicitly configured
1464
+ if (configuredScopes.length > 0) {
1465
+ // Use configured scopes
1466
+ scopesToUpdate = new Set(configuredScopes);
1467
+ packageLogger.verbose(`Using configured scopes: ${Array.from(scopesToUpdate).join(', ')}`);
1468
+ } else {
1469
+ // Empty array means explicitly disabled
1470
+ packageLogger.verbose('Scoped dependency updates explicitly disabled');
1471
+ return false;
1472
+ }
1473
+ } else {
1474
+ // Not configured - use default behavior (package's own scope)
1475
+ scopesToUpdate = new Set();
1476
+ if (packageJson.name && packageJson.name.startsWith('@')) {
1477
+ const packageScope = packageJson.name.split('/')[0]; // e.g., "@fjell/core" -> "@fjell"
1478
+ scopesToUpdate.add(packageScope);
1479
+ packageLogger.verbose(`No scopes configured, defaulting to package's own scope: ${packageScope}`);
1480
+ } else {
1481
+ packageLogger.verbose('Package is not scoped and no scopes configured, skipping scoped dependency updates');
1482
+ return false;
1483
+ }
1484
+ }
1485
+ if (scopesToUpdate.size === 0) {
1486
+ packageLogger.verbose('No scopes to update, skipping updates');
1487
+ return false;
1488
+ }
1489
+ // Run updates for each scope
1490
+ for (const scope of scopesToUpdate){
1491
+ packageLogger.info(`🔄 Checking for ${scope} dependency updates before publish...`);
1492
+ try {
1493
+ // Create a config for the updates command with the scope
1494
+ const updatesConfig = {
1495
+ ...runConfig,
1496
+ dryRun: isDryRun,
1497
+ updates: {
1498
+ scope: scope
1499
+ }
1500
+ };
1501
+ await execute$2(updatesConfig);
1502
+ } catch (error) {
1503
+ // Don't fail the publish if updates fails, just warn
1504
+ packageLogger.warn(`Failed to update ${scope} dependencies: ${error.message}`);
1505
+ }
1506
+ }
1507
+ // Check if package.json was modified
1508
+ const afterContent = await storage.readFile(packageJsonPath, 'utf-8');
1509
+ const hasChanges = beforeContent !== afterContent;
1510
+ if (hasChanges) {
1511
+ packageLogger.info('✅ Scoped dependencies updated successfully');
1512
+ } else {
1513
+ packageLogger.info('No scoped dependency updates needed');
1514
+ }
1515
+ return hasChanges;
1516
+ } catch (error) {
1517
+ packageLogger.warn(`Failed to detect scoped dependencies: ${error.message}`);
1518
+ return false;
1519
+ }
1520
+ };
1521
+ // Get the context file path
1522
+ const getContextFilePath = (outputDirectory)=>{
1523
+ const outputDir = outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
1524
+ return getOutputPath(outputDir, '.kodrdriv-context');
1525
+ };
1526
+ // Save execution context to file
1527
+ const saveExecutionContext = async (context, outputDirectory)=>{
1528
+ const storage = createStorage(); // Silent storage for context operations
1529
+ const contextFilePath = getContextFilePath(outputDirectory);
1530
+ try {
1531
+ // Ensure output directory exists
1532
+ await storage.ensureDirectory(path.dirname(contextFilePath));
1533
+ // Save context with JSON serialization that handles dates
1534
+ const contextData = {
1535
+ ...context,
1536
+ startTime: context.startTime.toISOString(),
1537
+ lastUpdateTime: context.lastUpdateTime.toISOString(),
1538
+ publishedVersions: context.publishedVersions.map((v)=>({
1539
+ ...v,
1540
+ publishTime: v.publishTime.toISOString()
1541
+ }))
1542
+ };
1543
+ await storage.writeFile(contextFilePath, JSON.stringify(contextData, null, 2), 'utf-8');
1544
+ } catch (error) {
1545
+ // Don't fail the entire operation if context saving fails
1546
+ const logger = getLogger();
1547
+ logger.warn(`Warning: Failed to save execution context: ${error.message}`);
1548
+ }
1549
+ };
1550
+ // Load execution context from file
1551
+ const loadExecutionContext = async (outputDirectory)=>{
1552
+ const storage = createStorage(); // Silent storage for context operations
1553
+ const contextFilePath = getContextFilePath(outputDirectory);
1554
+ try {
1555
+ if (!await storage.exists(contextFilePath)) {
1556
+ return null;
1557
+ }
1558
+ const contextContent = await storage.readFile(contextFilePath, 'utf-8');
1559
+ const contextData = safeJsonParse(contextContent, contextFilePath);
1560
+ // Restore dates from ISO strings
1561
+ return {
1562
+ ...contextData,
1563
+ startTime: new Date(contextData.startTime),
1564
+ lastUpdateTime: new Date(contextData.lastUpdateTime),
1565
+ publishedVersions: contextData.publishedVersions.map((v)=>({
1566
+ ...v,
1567
+ publishTime: new Date(v.publishTime)
1568
+ }))
1569
+ };
1570
+ } catch (error) {
1571
+ const logger = getLogger();
1572
+ logger.warn(`Warning: Failed to load execution context: ${error.message}`);
1573
+ return null;
1574
+ }
1575
+ };
1576
+ // Clean up context file
1577
+ const cleanupContext = async (outputDirectory)=>{
1578
+ const storage = createStorage(); // Silent storage for context operations
1579
+ const contextFilePath = getContextFilePath(outputDirectory);
1580
+ try {
1581
+ if (await storage.exists(contextFilePath)) {
1582
+ await storage.deleteFile(contextFilePath);
1583
+ }
1584
+ } catch (error) {
1585
+ // Don't fail if cleanup fails
1586
+ const logger = getLogger();
1587
+ logger.warn(`Warning: Failed to cleanup execution context: ${error.message}`);
1588
+ }
1589
+ };
1590
+ // Helper function to promote a package to completed status in the context
1591
+ const promotePackageToCompleted = async (packageName, outputDirectory)=>{
1592
+ const storage = createStorage();
1593
+ const contextFilePath = getContextFilePath(outputDirectory);
1594
+ try {
1595
+ if (!await storage.exists(contextFilePath)) {
1596
+ return;
1597
+ }
1598
+ const contextContent = await storage.readFile(contextFilePath, 'utf-8');
1599
+ const contextData = safeJsonParse(contextContent, contextFilePath);
1600
+ // Restore dates from ISO strings
1601
+ const context = {
1602
+ ...contextData,
1603
+ startTime: new Date(contextData.startTime),
1604
+ lastUpdateTime: new Date(contextData.lastUpdateTime),
1605
+ publishedVersions: contextData.publishedVersions.map((v)=>({
1606
+ ...v,
1607
+ publishTime: new Date(v.publishTime)
1608
+ }))
1609
+ };
1610
+ // Add package to completed list if not already there
1611
+ if (!context.completedPackages.includes(packageName)) {
1612
+ context.completedPackages.push(packageName);
1613
+ context.lastUpdateTime = new Date();
1614
+ await saveExecutionContext(context, outputDirectory);
1615
+ }
1616
+ } catch (error) {
1617
+ const logger = getLogger();
1618
+ logger.warn(`Warning: Failed to promote package to completed: ${error.message}`);
1619
+ }
1620
+ };
1621
+ // Helper function to validate that all packages have the required scripts
1622
+ const validateScripts = async (packages, scripts)=>{
1623
+ const logger = getLogger();
1624
+ const missingScripts = new Map();
1625
+ const storage = createStorage();
1626
+ logger.debug(`Validating scripts: ${scripts.join(', ')}`);
1627
+ for (const [packageName, packageInfo] of packages){
1628
+ const packageJsonPath = path.join(packageInfo.path, 'package.json');
1629
+ const missingForPackage = [];
1630
+ try {
1631
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
1632
+ const packageJson = safeJsonParse(packageJsonContent, packageJsonPath);
1633
+ const validated = validatePackageJson(packageJson, packageJsonPath);
1634
+ // Check if each required script exists
1635
+ for (const script of scripts){
1636
+ if (!validated.scripts || !validated.scripts[script]) {
1637
+ missingForPackage.push(script);
1638
+ }
1639
+ }
1640
+ if (missingForPackage.length > 0) {
1641
+ missingScripts.set(packageName, missingForPackage);
1642
+ logger.debug(`Package ${packageName} missing scripts: ${missingForPackage.join(', ')}`);
1643
+ }
1644
+ } catch (error) {
1645
+ logger.debug(`Error reading package.json for ${packageName}: ${error.message}`);
1646
+ // If we can't read the package.json, assume all scripts are missing
1647
+ missingScripts.set(packageName, scripts);
1648
+ }
1649
+ }
1650
+ const valid = missingScripts.size === 0;
1651
+ if (valid) {
1652
+ logger.info(`✅ All packages have the required scripts: ${scripts.join(', ')}`);
1653
+ } else {
1654
+ logger.error(`❌ Script validation failed. Missing scripts:`);
1655
+ for (const [packageName, missing] of missingScripts){
1656
+ logger.error(` ${packageName}: ${missing.join(', ')}`);
1657
+ }
1658
+ }
1659
+ return {
1660
+ valid,
1661
+ missingScripts
1662
+ };
1663
+ };
1664
+ // Extract published version from git tags after successful publish
1665
+ // After kodrdriv publish, the release version is captured in the git tag,
1666
+ // while package.json contains the next dev version
1667
+ const extractPublishedVersion = async (packageDir, packageLogger)=>{
1668
+ const storage = createStorage();
1669
+ const packageJsonPath = path.join(packageDir, 'package.json');
1670
+ try {
1671
+ // Get package name from package.json
1672
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
1673
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
1674
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
1675
+ // Get the most recently created tag (by creation date, not version number)
1676
+ // This ensures we get the tag that was just created by the publish, not an older tag with a higher version
1677
+ const { stdout: tagOutput } = await run('git tag --sort=-creatordate', {
1678
+ cwd: packageDir
1679
+ });
1680
+ const tags = tagOutput.trim().split('\n').filter(Boolean);
1681
+ if (tags.length === 0) {
1682
+ packageLogger.warn('No git tags found after publish');
1683
+ return null;
1684
+ }
1685
+ // Get the most recently created tag (first in the list)
1686
+ const latestTag = tags[0];
1687
+ // Extract version from tag, handling various formats:
1688
+ // - v1.2.3 -> 1.2.3
1689
+ // - working/v1.2.3 -> 1.2.3
1690
+ // - main/v1.2.3 -> 1.2.3
1691
+ let version = latestTag;
1692
+ // If tag contains a slash (branch prefix), extract everything after it
1693
+ if (version.includes('/')) {
1694
+ version = version.split('/').pop() || version;
1695
+ }
1696
+ // Remove 'v' prefix if present
1697
+ if (version.startsWith('v')) {
1698
+ version = version.substring(1);
1699
+ }
1700
+ packageLogger.verbose(`Extracted published version from tag: ${latestTag} -> ${version}`);
1701
+ return {
1702
+ packageName: packageJson.name,
1703
+ version: version,
1704
+ publishTime: new Date()
1705
+ };
1706
+ } catch (error) {
1707
+ packageLogger.warn(`Failed to extract published version: ${error.message}`);
1708
+ return null;
1709
+ }
1710
+ };
1711
+ // Enhanced run function that can show output based on log level
1712
+ const runWithLogging = async (command, packageLogger, options = {}, showOutput = 'none', logFilePath)=>{
1713
+ const execPromise = util.promisify(exec);
1714
+ // Ensure encoding is set to 'utf8' to get string output instead of Buffer
1715
+ const execOptions = {
1716
+ encoding: 'utf8',
1717
+ ...options
1718
+ };
1719
+ if (showOutput === 'full') {
1720
+ packageLogger.debug(`Executing command: ${command}`);
1721
+ // Use info level to show on console in debug mode
1722
+ packageLogger.info(`🔧 Running: ${command}`);
1723
+ } else if (showOutput === 'minimal') {
1724
+ packageLogger.verbose(`Running: ${command}`);
1725
+ }
1726
+ // Helper to write to log file
1727
+ const writeToLogFile = async (content)=>{
1728
+ if (!logFilePath) return;
1729
+ try {
1730
+ const logDir = path.dirname(logFilePath);
1731
+ await fs.mkdir(logDir, {
1732
+ recursive: true
1733
+ });
1734
+ await fs.appendFile(logFilePath, content + '\n', 'utf-8');
1735
+ } catch (err) {
1736
+ packageLogger.warn(`Failed to write to log file ${logFilePath}: ${err.message}`);
1737
+ }
1738
+ };
1739
+ // Write command to log file
1740
+ if (logFilePath) {
1741
+ const timestamp = new Date().toISOString();
1742
+ await writeToLogFile(`[${timestamp}] Executing: ${command}\n`);
1743
+ }
1744
+ try {
1745
+ const result = await execPromise(command, execOptions);
1746
+ if (showOutput === 'full') {
1747
+ const stdout = String(result.stdout);
1748
+ const stderr = String(result.stderr);
1749
+ if (stdout.trim()) {
1750
+ packageLogger.debug('STDOUT:');
1751
+ packageLogger.debug(stdout);
1752
+ // Show on console using info level for immediate feedback
1753
+ packageLogger.info(`📤 STDOUT:`);
1754
+ stdout.split('\n').forEach((line)=>{
1755
+ if (line.trim()) packageLogger.info(`${line}`);
1756
+ });
1757
+ }
1758
+ if (stderr.trim()) {
1759
+ packageLogger.debug('STDERR:');
1760
+ packageLogger.debug(stderr);
1761
+ // Show on console using info level for immediate feedback
1762
+ packageLogger.info(`⚠️ STDERR:`);
1763
+ stderr.split('\n').forEach((line)=>{
1764
+ if (line.trim()) packageLogger.info(`${line}`);
1765
+ });
1766
+ }
1767
+ }
1768
+ // Write output to log file
1769
+ if (logFilePath) {
1770
+ const stdout = String(result.stdout);
1771
+ const stderr = String(result.stderr);
1772
+ if (stdout.trim()) {
1773
+ await writeToLogFile(`\n=== STDOUT ===\n${stdout}`);
1774
+ }
1775
+ if (stderr.trim()) {
1776
+ await writeToLogFile(`\n=== STDERR ===\n${stderr}`);
1777
+ }
1778
+ await writeToLogFile(`\n[${new Date().toISOString()}] Command completed successfully\n`);
1779
+ }
1780
+ // Ensure result is properly typed as strings
1781
+ return {
1782
+ stdout: String(result.stdout),
1783
+ stderr: String(result.stderr)
1784
+ };
1785
+ } catch (error) {
1786
+ // Always show error message
1787
+ packageLogger.error(`Command failed: ${command}`);
1788
+ // Always show stderr on failure (contains important error details like coverage failures)
1789
+ if (error.stderr && error.stderr.trim()) {
1790
+ packageLogger.error(`❌ STDERR:`);
1791
+ error.stderr.split('\n').forEach((line)=>{
1792
+ if (line.trim()) packageLogger.error(`${line}`);
1793
+ });
1794
+ }
1795
+ // Show stdout on failure if available (may contain error context)
1796
+ if (error.stdout && error.stdout.trim() && (showOutput === 'full' || showOutput === 'minimal')) {
1797
+ packageLogger.info(`📤 STDOUT:`);
1798
+ error.stdout.split('\n').forEach((line)=>{
1799
+ if (line.trim()) packageLogger.info(`${line}`);
1800
+ });
1801
+ }
1802
+ // Show full output in debug/verbose mode
1803
+ if (showOutput === 'full' || showOutput === 'minimal') {
1804
+ if (error.stdout && error.stdout.trim() && showOutput === 'full') {
1805
+ packageLogger.debug('STDOUT:');
1806
+ packageLogger.debug(error.stdout);
1807
+ }
1808
+ if (error.stderr && error.stderr.trim() && showOutput === 'full') {
1809
+ packageLogger.debug('STDERR:');
1810
+ packageLogger.debug(error.stderr);
1811
+ }
1812
+ }
1813
+ // Write error output to log file
1814
+ if (logFilePath) {
1815
+ await writeToLogFile(`\n[${new Date().toISOString()}] Command failed: ${error.message}`);
1816
+ if (error.stdout) {
1817
+ await writeToLogFile(`\n=== STDOUT ===\n${error.stdout}`);
1818
+ }
1819
+ if (error.stderr) {
1820
+ await writeToLogFile(`\n=== STDERR ===\n${error.stderr}`);
1821
+ }
1822
+ if (error.stack) {
1823
+ await writeToLogFile(`\n=== STACK TRACE ===\n${error.stack}`);
1824
+ }
1825
+ }
1826
+ throw error;
1827
+ }
1828
+ };
1829
+ // Create a package-scoped logger that prefixes all messages
1830
+ const createPackageLogger = (packageName, sequenceNumber, totalCount, isDryRun = false)=>{
1831
+ const baseLogger = getLogger();
1832
+ const prefix = `[${sequenceNumber}/${totalCount}] ${packageName}:`;
1833
+ const dryRunPrefix = isDryRun ? 'DRY RUN: ' : '';
1834
+ return {
1835
+ info: (message, ...args)=>baseLogger.info(`${dryRunPrefix}${prefix} ${message}`, ...args),
1836
+ warn: (message, ...args)=>baseLogger.warn(`${dryRunPrefix}${prefix} ${message}`, ...args),
1837
+ error: (message, ...args)=>baseLogger.error(`${dryRunPrefix}${prefix} ${message}`, ...args),
1838
+ debug: (message, ...args)=>baseLogger.debug(`${dryRunPrefix}${prefix} ${message}`, ...args),
1839
+ verbose: (message, ...args)=>baseLogger.verbose(`${dryRunPrefix}${prefix} ${message}`, ...args),
1840
+ silly: (message, ...args)=>baseLogger.silly(`${dryRunPrefix}${prefix} ${message}`, ...args)
1841
+ };
1842
+ };
1843
+ // Helper function to format subproject error output
1844
+ const formatSubprojectError = (packageName, error, _packageInfo, _position, _total)=>{
1845
+ const lines = [];
1846
+ lines.push(`❌ Command failed in package ${packageName}:`);
1847
+ // Format the main error message with indentation
1848
+ if (error.message) {
1849
+ const indentedMessage = error.message.split('\n').map((line)=>` ${line}`).join('\n');
1850
+ lines.push(indentedMessage);
1851
+ }
1852
+ // If there's stderr output, show it indented as well
1853
+ if (error.stderr && error.stderr.trim()) {
1854
+ lines.push(' STDERR:');
1855
+ const indentedStderr = error.stderr.split('\n').filter((line)=>line.trim()).map((line)=>` ${line}`).join('\n');
1856
+ lines.push(indentedStderr);
1857
+ }
1858
+ // If there's stdout output, show it indented as well
1859
+ if (error.stdout && error.stdout.trim()) {
1860
+ lines.push(' STDOUT:');
1861
+ const indentedStdout = error.stdout.split('\n').filter((line)=>line.trim()).map((line)=>` ${line}`).join('\n');
1862
+ lines.push(indentedStdout);
1863
+ }
1864
+ return lines.join('\n');
1865
+ };
1866
+ // Note: PackageInfo, DependencyGraph, scanForPackageJsonFiles, parsePackageJson,
1867
+ // buildDependencyGraph, and topologicalSort are now imported from ../util/dependencyGraph
1868
+ // Execute a single package and return execution result
1869
+ const executePackage = async (packageName, packageInfo, commandToRun, runConfig, isDryRun, index, total, allPackageNames, isBuiltInCommand = false)=>{
1870
+ const packageLogger = createPackageLogger(packageName, index + 1, total, isDryRun);
1871
+ const packageDir = packageInfo.path;
1872
+ const logger = getLogger();
1873
+ // Create log file path for publish commands
1874
+ let logFilePath;
1875
+ if (isBuiltInCommand && commandToRun.includes('publish')) {
1876
+ var _commandToRun_split_;
1877
+ const outputDir = runConfig.outputDirectory || 'output/kodrdriv';
1878
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-').replace('T', '_').split('.')[0];
1879
+ const commandName = ((_commandToRun_split_ = commandToRun.split(' ')[1]) === null || _commandToRun_split_ === void 0 ? void 0 : _commandToRun_split_.split(' ')[0]) || 'command';
1880
+ logFilePath = path.join(packageDir, outputDir, `${commandName}_${timestamp}.log`);
1881
+ }
1882
+ // Determine output level based on flags
1883
+ // For publish and commit commands, default to full output to show AI progress and other details
1884
+ // For other commands, require --verbose or --debug for output
1885
+ const isPublishCommand = isBuiltInCommand && commandToRun.includes('publish');
1886
+ const isCommitCommand = isBuiltInCommand && commandToRun.includes('commit');
1887
+ let showOutput = isPublishCommand || isCommitCommand ? 'full' : 'none';
1888
+ if (runConfig.debug) {
1889
+ showOutput = 'full';
1890
+ } else if (runConfig.verbose) {
1891
+ showOutput = 'minimal';
1892
+ }
1893
+ // Show package start info - always visible for progress tracking
1894
+ if (runConfig.debug) {
1895
+ packageLogger.debug('MULTI_PROJECT_START: Starting package execution | Package: %s | Index: %d/%d | Path: %s | Command: %s | Context: tree execution', packageName, index + 1, total, packageDir, commandToRun);
1896
+ packageLogger.debug('MULTI_PROJECT_CONTEXT: Execution details | Directory: %s | Built-in Command: %s | Dry Run: %s | Output Level: %s', packageDir, isBuiltInCommand, isDryRun, showOutput);
1897
+ // Show dependencies if available
1898
+ if (packageInfo.dependencies && Array.isArray(packageInfo.dependencies) && packageInfo.dependencies.length > 0) {
1899
+ packageLogger.debug('MULTI_PROJECT_DEPS: Package dependencies | Package: %s | Dependencies: [%s]', packageName, packageInfo.dependencies.join(', '));
1900
+ }
1901
+ } else if (runConfig.verbose) {
1902
+ packageLogger.verbose(`Starting execution in ${packageDir}`);
1903
+ } else {
1904
+ // Basic progress info even without flags
1905
+ logger.info(`[${index + 1}/${total}] ${packageName}: Running ${commandToRun}...`);
1906
+ }
1907
+ // Track if publish was skipped due to no changes
1908
+ let publishWasSkipped = false;
1909
+ // Track execution timing
1910
+ const executionTimer = PerformanceTimer.start(packageLogger, `Package ${packageName} execution`);
1911
+ let executionDuration;
1912
+ try {
1913
+ if (isDryRun && !isBuiltInCommand) {
1914
+ // Handle inter-project dependency updates for publish commands in dry run mode
1915
+ if (isBuiltInCommand && commandToRun.includes('publish') && publishedVersions.length > 0) {
1916
+ let mutexLocked = false;
1917
+ try {
1918
+ await globalStateMutex.lock();
1919
+ mutexLocked = true;
1920
+ packageLogger.info('Would check for inter-project dependency updates before publish...');
1921
+ const versionSnapshot = [
1922
+ ...publishedVersions
1923
+ ]; // Create safe copy
1924
+ globalStateMutex.unlock();
1925
+ mutexLocked = false;
1926
+ await updateInterProjectDependencies(packageDir, versionSnapshot, allPackageNames, packageLogger, isDryRun);
1927
+ } catch (error) {
1928
+ if (mutexLocked) {
1929
+ globalStateMutex.unlock();
1930
+ }
1931
+ throw error;
1932
+ }
1933
+ }
1934
+ // Use main logger for the specific message tests expect
1935
+ logger.info(`DRY RUN: Would execute: ${commandToRun}`);
1936
+ if (runConfig.debug || runConfig.verbose) {
1937
+ packageLogger.info(`In directory: ${packageDir}`);
1938
+ }
1939
+ } else {
1940
+ // Change to the package directory and run the command
1941
+ const originalCwd = process.cwd();
1942
+ try {
1943
+ // Validate package directory exists before changing to it
1944
+ try {
1945
+ await fs.access(packageDir);
1946
+ const stat = await fs.stat(packageDir);
1947
+ if (!stat.isDirectory()) {
1948
+ throw new Error(`Path is not a directory: ${packageDir}`);
1949
+ }
1950
+ } catch (accessError) {
1951
+ throw new Error(`Cannot access package directory: ${packageDir} - ${accessError.message}`);
1952
+ }
1953
+ process.chdir(packageDir);
1954
+ if (runConfig.debug) {
1955
+ packageLogger.debug(`Changed to directory: ${packageDir}`);
1956
+ }
1957
+ // Handle dependency updates for publish commands before executing (skip during dry run)
1958
+ // Wrap in git lock to prevent parallel packages from conflicting with npm install and git operations
1959
+ if (!isDryRun && isBuiltInCommand && commandToRun.includes('publish')) {
1960
+ await runGitWithLock(packageDir, async ()=>{
1961
+ let hasAnyUpdates = false;
1962
+ // First, update all scoped dependencies from npm registry
1963
+ const hasScopedUpdates = await updateScopedDependencies(packageDir, packageLogger, isDryRun, runConfig);
1964
+ hasAnyUpdates = hasAnyUpdates || hasScopedUpdates;
1965
+ // Then update inter-project dependencies based on previously published packages
1966
+ if (publishedVersions.length > 0) {
1967
+ packageLogger.info('Updating inter-project dependencies based on previously published packages...');
1968
+ const hasInterProjectUpdates = await updateInterProjectDependencies(packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun);
1969
+ hasAnyUpdates = hasAnyUpdates || hasInterProjectUpdates;
1970
+ }
1971
+ // If either type of update occurred, commit the changes
1972
+ if (hasAnyUpdates) {
1973
+ // Commit the dependency updates using kodrdriv commit
1974
+ packageLogger.info('Committing dependency updates...');
1975
+ packageLogger.info('⏱️ This step may take a few minutes as it generates a commit message using AI...');
1976
+ // Add timeout wrapper around commit execution
1977
+ const commitTimeoutMs = 300000; // 5 minutes
1978
+ const commitPromise = Commit.commit({
1979
+ ...runConfig,
1980
+ dryRun: false
1981
+ });
1982
+ const timeoutPromise = new Promise((_, reject)=>{
1983
+ setTimeout(()=>reject(new Error(`Commit operation timed out after ${commitTimeoutMs / 1000} seconds`)), commitTimeoutMs);
1984
+ });
1985
+ // Add progress indicator
1986
+ let progressInterval = null;
1987
+ try {
1988
+ // Start progress indicator
1989
+ progressInterval = setInterval(()=>{
1990
+ packageLogger.info('⏳ Still generating commit message... (this can take 1-3 minutes)');
1991
+ }, 30000); // Every 30 seconds
1992
+ await Promise.race([
1993
+ commitPromise,
1994
+ timeoutPromise
1995
+ ]);
1996
+ packageLogger.info('✅ Dependency updates committed successfully');
1997
+ } catch (commitError) {
1998
+ if (commitError.message.includes('timed out')) {
1999
+ packageLogger.error(`❌ Commit operation timed out after ${commitTimeoutMs / 1000} seconds`);
2000
+ packageLogger.error('This usually indicates an issue with the AI service or very large changes');
2001
+ packageLogger.error('You may need to manually commit the dependency updates');
2002
+ } else {
2003
+ packageLogger.warn(`Failed to commit dependency updates: ${commitError.message}`);
2004
+ }
2005
+ // Continue with publish anyway - the updates are still in place
2006
+ } finally{
2007
+ if (progressInterval) {
2008
+ clearInterval(progressInterval);
2009
+ }
2010
+ }
2011
+ }
2012
+ }, `${packageName}: dependency updates`);
2013
+ }
2014
+ // Optimize precommit commands for custom commands (not built-in)
2015
+ let effectiveCommandToRun = commandToRun;
2016
+ let optimizationInfo = null;
2017
+ if (!isBuiltInCommand && !isDryRun) {
2018
+ const isPrecommitCommand = commandToRun.includes('precommit') || commandToRun.includes('pre-commit');
2019
+ if (isPrecommitCommand) {
2020
+ try {
2021
+ const optimization = await optimizePrecommitCommand(packageDir, commandToRun);
2022
+ effectiveCommandToRun = optimization.optimizedCommand;
2023
+ optimizationInfo = {
2024
+ skipped: optimization.skipped,
2025
+ reasons: optimization.reasons
2026
+ };
2027
+ if (optimization.skipped.clean || optimization.skipped.test) {
2028
+ const skippedParts = [];
2029
+ if (optimization.skipped.clean) {
2030
+ skippedParts.push(`clean (${optimization.reasons.clean})`);
2031
+ }
2032
+ if (optimization.skipped.test) {
2033
+ skippedParts.push(`test (${optimization.reasons.test})`);
2034
+ }
2035
+ packageLogger.info(`⚡ Optimized: Skipped ${skippedParts.join(', ')}`);
2036
+ if (runConfig.verbose || runConfig.debug) {
2037
+ packageLogger.info(` Original: ${commandToRun}`);
2038
+ packageLogger.info(` Optimized: ${effectiveCommandToRun}`);
2039
+ }
2040
+ }
2041
+ } catch (error) {
2042
+ // If optimization fails, fall back to original command
2043
+ logger.debug(`Precommit optimization failed for ${packageName}: ${error.message}`);
2044
+ }
2045
+ }
2046
+ }
2047
+ if (runConfig.debug || runConfig.verbose) {
2048
+ if (isBuiltInCommand) {
2049
+ packageLogger.info(`Executing built-in command: ${commandToRun}`);
2050
+ } else {
2051
+ packageLogger.info(`Executing command: ${effectiveCommandToRun}`);
2052
+ }
2053
+ }
2054
+ // For built-in commands, shell out to a separate kodrdriv process
2055
+ // This preserves individual project configurations
2056
+ if (isBuiltInCommand) {
2057
+ // Extract the command name from "kodrdriv <command> [args...]"
2058
+ // Split by space and take the second element (after "kodrdriv")
2059
+ const commandParts = commandToRun.replace(/^kodrdriv\s+/, '').split(/\s+/);
2060
+ const builtInCommandName = commandParts[0];
2061
+ if (runConfig.debug) {
2062
+ packageLogger.debug(`Shelling out to separate kodrdriv process for ${builtInCommandName} command`);
2063
+ }
2064
+ // Add progress indication for publish commands
2065
+ if (builtInCommandName === 'publish') {
2066
+ packageLogger.info('🚀 Starting publish process...');
2067
+ packageLogger.info('⏱️ This may take several minutes (AI processing, PR creation, etc.)');
2068
+ }
2069
+ // Ensure dry-run propagates to subprocess even during overall dry-run mode
2070
+ let effectiveCommand = runConfig.dryRun && !commandToRun.includes('--dry-run') ? `${commandToRun} --dry-run` : commandToRun;
2071
+ // For commit commands, ensure --sendit is used to avoid interactive prompts
2072
+ // This prevents hanging when running via tree command
2073
+ if (builtInCommandName === 'commit' && !effectiveCommand.includes('--sendit') && !runConfig.dryRun) {
2074
+ effectiveCommand = `${effectiveCommand} --sendit`;
2075
+ packageLogger.info('💡 Auto-adding --sendit flag to avoid interactive prompts in tree mode');
2076
+ }
2077
+ // Set timeout based on command type
2078
+ let commandTimeoutMs;
2079
+ if (builtInCommandName === 'publish') {
2080
+ commandTimeoutMs = 1800000; // 30 minutes for publish commands
2081
+ packageLogger.info(`⏰ Setting timeout of ${commandTimeoutMs / 60000} minutes for publish command`);
2082
+ } else if (builtInCommandName === 'commit') {
2083
+ commandTimeoutMs = 600000; // 10 minutes for commit commands (AI processing can take time)
2084
+ packageLogger.info(`⏰ Setting timeout of ${commandTimeoutMs / 60000} minutes for commit command`);
2085
+ } else {
2086
+ commandTimeoutMs = 300000; // 5 minutes default for other commands
2087
+ }
2088
+ const commandPromise = runWithLogging(effectiveCommand, packageLogger, {}, showOutput, logFilePath);
2089
+ const commandTimeoutPromise = new Promise((_, reject)=>{
2090
+ setTimeout(()=>reject(new Error(`Command timed out after ${commandTimeoutMs / 60000} minutes`)), commandTimeoutMs);
2091
+ });
2092
+ try {
2093
+ const startTime = Date.now();
2094
+ const { stdout, stderr } = await Promise.race([
2095
+ commandPromise,
2096
+ commandTimeoutPromise
2097
+ ]);
2098
+ executionDuration = Date.now() - startTime;
2099
+ // Detect explicit skip marker from publish to avoid propagating versions
2100
+ // Check both stdout (where we now write it) and stderr (winston logger output, for backward compat)
2101
+ if (builtInCommandName === 'publish' && (stdout && stdout.includes('KODRDRIV_PUBLISH_SKIPPED') || stderr && stderr.includes('KODRDRIV_PUBLISH_SKIPPED'))) {
2102
+ packageLogger.info('Publish skipped for this package; will not record or propagate a version.');
2103
+ publishWasSkipped = true;
2104
+ }
2105
+ } catch (error) {
2106
+ if (error.message.includes('timed out')) {
2107
+ packageLogger.error(`❌ ${builtInCommandName} command timed out after ${commandTimeoutMs / 60000} minutes`);
2108
+ packageLogger.error('This usually indicates the command is stuck waiting for user input or an external service');
2109
+ throw error;
2110
+ }
2111
+ throw error;
2112
+ }
2113
+ } else {
2114
+ // For custom commands, use the existing logic
2115
+ const startTime = Date.now();
2116
+ await runWithLogging(effectiveCommandToRun, packageLogger, {}, showOutput, logFilePath);
2117
+ executionDuration = Date.now() - startTime;
2118
+ }
2119
+ // Track published version after successful publish (skip during dry run)
2120
+ if (!isDryRun && isBuiltInCommand && commandToRun.includes('publish')) {
2121
+ // If publish was skipped, do not record a version
2122
+ if (publishWasSkipped) {
2123
+ packageLogger.verbose('Skipping version tracking due to earlier skip.');
2124
+ } else {
2125
+ // Only record a published version if a new tag exists (avoid recording for skipped publishes)
2126
+ const publishedVersion = await extractPublishedVersion(packageDir, packageLogger);
2127
+ if (publishedVersion) {
2128
+ let mutexLocked = false;
2129
+ try {
2130
+ await globalStateMutex.lock();
2131
+ mutexLocked = true;
2132
+ publishedVersions.push(publishedVersion);
2133
+ packageLogger.info(`Tracked published version: ${publishedVersion.packageName}@${publishedVersion.version}`);
2134
+ globalStateMutex.unlock();
2135
+ mutexLocked = false;
2136
+ } catch (error) {
2137
+ if (mutexLocked) {
2138
+ globalStateMutex.unlock();
2139
+ }
2140
+ throw error;
2141
+ }
2142
+ }
2143
+ }
2144
+ }
2145
+ // Record test run if tests were executed (not skipped)
2146
+ if (!isDryRun && !isBuiltInCommand && effectiveCommandToRun.includes('test') && (!optimizationInfo || !optimizationInfo.skipped.test)) {
2147
+ try {
2148
+ await recordTestRun(packageDir);
2149
+ } catch (error) {
2150
+ logger.debug(`Failed to record test run for ${packageName}: ${error.message}`);
2151
+ }
2152
+ }
2153
+ // End timing and show duration
2154
+ if (executionDuration !== undefined) {
2155
+ executionTimer.end(`Package ${packageName} execution`);
2156
+ const seconds = (executionDuration / 1000).toFixed(1);
2157
+ if (runConfig.debug || runConfig.verbose) {
2158
+ packageLogger.info(`⏱️ Execution time: ${seconds}s`);
2159
+ } else if (!isPublishCommand && !isCommitCommand) {
2160
+ // Show timing in completion message (publish/commit commands have their own completion message)
2161
+ logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed (${seconds}s)`);
2162
+ }
2163
+ } else {
2164
+ executionTimer.end(`Package ${packageName} execution`);
2165
+ if (runConfig.debug || runConfig.verbose) {
2166
+ packageLogger.info(`Command completed successfully`);
2167
+ } else if (!isPublishCommand && !isCommitCommand) {
2168
+ // Basic completion info (publish/commit commands have their own completion message)
2169
+ logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed`);
2170
+ }
2171
+ }
2172
+ } finally{
2173
+ // Safely restore working directory
2174
+ try {
2175
+ // Validate original directory still exists before changing back
2176
+ const fs = await import('fs/promises');
2177
+ await fs.access(originalCwd);
2178
+ process.chdir(originalCwd);
2179
+ if (runConfig.debug) {
2180
+ packageLogger.debug(`Restored working directory to: ${originalCwd}`);
2181
+ }
2182
+ } catch (restoreError) {
2183
+ // If we can't restore to original directory, at least log the issue
2184
+ packageLogger.error(`Failed to restore working directory to ${originalCwd}: ${restoreError.message}`);
2185
+ packageLogger.error(`Current working directory is now: ${process.cwd()}`);
2186
+ // Don't throw here to avoid masking the original error
2187
+ }
2188
+ }
2189
+ }
2190
+ // Show completion status (for publish/commit commands, this supplements the timing message above)
2191
+ if (runConfig.debug || runConfig.verbose) {
2192
+ if (publishWasSkipped) {
2193
+ packageLogger.info(`⊘ Skipped (no code changes)`);
2194
+ } else {
2195
+ packageLogger.info(`✅ Completed successfully`);
2196
+ }
2197
+ } else if (isPublishCommand || isCommitCommand) {
2198
+ // For publish/commit commands, always show completion even without verbose
2199
+ // Include timing if available
2200
+ const timeStr = executionDuration !== undefined ? ` (${(executionDuration / 1000).toFixed(1)}s)` : '';
2201
+ if (publishWasSkipped) {
2202
+ logger.info(`[${index + 1}/${total}] ${packageName}: ⊘ Skipped (no code changes)`);
2203
+ } else {
2204
+ logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed${timeStr}`);
2205
+ }
2206
+ }
2207
+ // Ensure timing is recorded even if there was an early return
2208
+ if (executionDuration === undefined) {
2209
+ executionDuration = executionTimer.end(`Package ${packageName} execution`);
2210
+ }
2211
+ return {
2212
+ success: true,
2213
+ skippedNoChanges: publishWasSkipped,
2214
+ logFile: logFilePath
2215
+ };
2216
+ } catch (error) {
2217
+ var _error_message;
2218
+ // Record timing even on error
2219
+ if (executionDuration === undefined) {
2220
+ executionDuration = executionTimer.end(`Package ${packageName} execution`);
2221
+ const seconds = (executionDuration / 1000).toFixed(1);
2222
+ if (runConfig.debug || runConfig.verbose) {
2223
+ packageLogger.error(`⏱️ Execution time before failure: ${seconds}s`);
2224
+ }
2225
+ }
2226
+ if (runConfig.debug || runConfig.verbose) {
2227
+ packageLogger.error(`❌ Execution failed: ${error.message}`);
2228
+ } else {
2229
+ logger.error(`[${index + 1}/${total}] ${packageName}: ❌ Failed - ${error.message}`);
2230
+ }
2231
+ // Always show stderr if available (contains important error details)
2232
+ // Note: runWithLogging already logs stderr, but we show it here too for visibility
2233
+ // when error is caught at this level (e.g., from timeout wrapper)
2234
+ if (error.stderr && error.stderr.trim() && !runConfig.debug && !runConfig.verbose) {
2235
+ // Extract key error lines from stderr (coverage failures, test failures, etc.)
2236
+ const stderrLines = error.stderr.split('\n').filter((line)=>{
2237
+ const trimmed = line.trim();
2238
+ return trimmed && (trimmed.includes('ERROR:') || trimmed.includes('FAIL') || trimmed.includes('coverage') || trimmed.includes('threshold') || trimmed.includes('fatal:') || trimmed.startsWith('❌'));
2239
+ });
2240
+ if (stderrLines.length > 0) {
2241
+ logger.error(` Error details:`);
2242
+ stderrLines.slice(0, 10).forEach((line)=>{
2243
+ logger.error(` ${line.trim()}`);
2244
+ });
2245
+ if (stderrLines.length > 10) {
2246
+ logger.error(` ... and ${stderrLines.length - 10} more error lines (use --verbose to see full output)`);
2247
+ }
2248
+ }
2249
+ }
2250
+ // Check if this is a timeout error
2251
+ const errorMessage = ((_error_message = error.message) === null || _error_message === void 0 ? void 0 : _error_message.toLowerCase()) || '';
2252
+ const isTimeoutError = errorMessage && (errorMessage.includes('timeout waiting for pr') || errorMessage.includes('timeout waiting for release workflows') || errorMessage.includes('timeout reached') || errorMessage.includes('timeout') || errorMessage.includes('timed out') || errorMessage.includes('timed_out'));
2253
+ return {
2254
+ success: false,
2255
+ error,
2256
+ isTimeoutError,
2257
+ logFile: logFilePath
2258
+ };
2259
+ }
2260
+ };
2261
+ /**
2262
+ * Generate a dry-run preview showing what would happen without executing
2263
+ */ const generateDryRunPreview = async (dependencyGraph, buildOrder, command, runConfig)=>{
2264
+ var _runConfig_tree;
2265
+ const lines = [];
2266
+ lines.push('');
2267
+ lines.push('🔍 DRY RUN MODE - No changes will be made');
2268
+ lines.push('');
2269
+ lines.push('Build order determined:');
2270
+ lines.push('');
2271
+ // Group packages by dependency level
2272
+ const levels = [];
2273
+ const packageLevels = new Map();
2274
+ for (const pkg of buildOrder){
2275
+ const deps = dependencyGraph.edges.get(pkg) || new Set();
2276
+ let maxDepLevel = -1;
2277
+ for (const dep of deps){
2278
+ var _packageLevels_get;
2279
+ const depLevel = (_packageLevels_get = packageLevels.get(dep)) !== null && _packageLevels_get !== void 0 ? _packageLevels_get : 0;
2280
+ maxDepLevel = Math.max(maxDepLevel, depLevel);
2281
+ }
2282
+ const pkgLevel = maxDepLevel + 1;
2283
+ packageLevels.set(pkg, pkgLevel);
2284
+ if (!levels[pkgLevel]) {
2285
+ levels[pkgLevel] = [];
2286
+ }
2287
+ levels[pkgLevel].push(pkg);
2288
+ }
2289
+ // Show packages grouped by level
2290
+ for(let i = 0; i < levels.length; i++){
2291
+ const levelPackages = levels[i];
2292
+ lines.push(`Level ${i + 1}: (${levelPackages.length} package${levelPackages.length === 1 ? '' : 's'})`);
2293
+ for (const pkg of levelPackages){
2294
+ const pkgInfo = dependencyGraph.packages.get(pkg);
2295
+ if (!pkgInfo) continue;
2296
+ // Check if package has changes (for publish command)
2297
+ const isPublish = command.includes('publish');
2298
+ let status = '📝 Has changes, will execute';
2299
+ if (isPublish) {
2300
+ try {
2301
+ // Check git diff to see if there are code changes
2302
+ const { stdout } = await runSecure('git', [
2303
+ 'diff',
2304
+ '--name-only',
2305
+ 'origin/main...HEAD'
2306
+ ], {
2307
+ cwd: pkgInfo.path
2308
+ });
2309
+ const changedFiles = stdout.split('\n').filter(Boolean);
2310
+ const nonVersionFiles = changedFiles.filter((f)=>f !== 'package.json' && f !== 'package-lock.json');
2311
+ if (changedFiles.length === 0) {
2312
+ status = '⊘ No changes, will skip';
2313
+ } else if (nonVersionFiles.length === 0) {
2314
+ status = '⊘ Only version bump, will skip';
2315
+ } else {
2316
+ status = `📝 Has changes (${nonVersionFiles.length} files), will publish`;
2317
+ }
2318
+ } catch {
2319
+ // If we can't check git status, assume changes
2320
+ status = '📝 Will execute';
2321
+ }
2322
+ }
2323
+ lines.push(` ${pkg}`);
2324
+ lines.push(` Status: ${status}`);
2325
+ lines.push(` Path: ${pkgInfo.path}`);
2326
+ }
2327
+ lines.push('');
2328
+ }
2329
+ lines.push('Summary:');
2330
+ lines.push(` Total packages: ${buildOrder.length}`);
2331
+ lines.push(` Dependency levels: ${levels.length}`);
2332
+ lines.push(` Command: ${command}`);
2333
+ if ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.maxConcurrency) {
2334
+ lines.push(` Max concurrency: ${runConfig.tree.maxConcurrency}`);
2335
+ }
2336
+ lines.push('');
2337
+ lines.push('To execute for real, run the same command without --dry-run');
2338
+ lines.push('');
2339
+ return lines.join('\n');
2340
+ };
2341
+ // Add a simple status check function
2342
+ const checkTreePublishStatus = async ()=>{
2343
+ const logger = getLogger();
2344
+ try {
2345
+ // Check for running kodrdriv processes
2346
+ const { stdout } = await runSecure('ps', [
2347
+ 'aux'
2348
+ ], {});
2349
+ const kodrdrivProcesses = stdout.split('\n').filter((line)=>line.includes('kodrdriv') && !line.includes('grep') && !line.includes('ps aux') && !line.includes('tree --status') // Exclude the current status command
2350
+ );
2351
+ if (kodrdrivProcesses.length > 0) {
2352
+ logger.info('🔍 Found running kodrdriv processes:');
2353
+ kodrdrivProcesses.forEach((process1)=>{
2354
+ const parts = process1.trim().split(/\s+/);
2355
+ const pid = parts[1];
2356
+ const command = parts.slice(10).join(' ');
2357
+ logger.info(` PID ${pid}: ${command}`);
2358
+ });
2359
+ } else {
2360
+ logger.info('No kodrdriv processes currently running');
2361
+ }
2362
+ } catch (error) {
2363
+ logger.warn('Could not check process status:', error);
2364
+ }
2365
+ };
2366
+ const execute$1 = async (runConfig)=>{
2367
+ var _runConfig_tree, _runConfig_tree1, _runConfig_tree2, _runConfig_tree3, _runConfig_tree4, _runConfig_tree5, _runConfig_tree6, _runConfig_tree7, _runConfig_tree8, _runConfig_tree9, _runConfig_tree10, _runConfig_tree11, _runConfig_tree12, _runConfig_tree13, _runConfig_tree14, _runConfig_tree15;
2368
+ const logger = getLogger();
2369
+ const isDryRun = runConfig.dryRun || false;
2370
+ const isContinue = ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.continue) || false;
2371
+ const promotePackage = (_runConfig_tree1 = runConfig.tree) === null || _runConfig_tree1 === void 0 ? void 0 : _runConfig_tree1.promote;
2372
+ // Debug logging
2373
+ logger.debug('Tree config:', JSON.stringify(runConfig.tree, null, 2));
2374
+ logger.debug('Status flag:', (_runConfig_tree2 = runConfig.tree) === null || _runConfig_tree2 === void 0 ? void 0 : _runConfig_tree2.status);
2375
+ logger.debug('Full runConfig:', JSON.stringify(runConfig, null, 2));
2376
+ // Handle status check
2377
+ if ((_runConfig_tree3 = runConfig.tree) === null || _runConfig_tree3 === void 0 ? void 0 : _runConfig_tree3.status) {
2378
+ logger.info('🔍 Checking for running kodrdriv processes...');
2379
+ await checkTreePublishStatus();
2380
+ return 'Status check completed';
2381
+ }
2382
+ // Handle promote mode
2383
+ if (promotePackage) {
2384
+ logger.info(`Promoting package '${promotePackage}' to completed status...`);
2385
+ await promotePackageToCompleted(promotePackage, runConfig.outputDirectory);
2386
+ logger.info(`✅ Package '${promotePackage}' has been marked as completed.`);
2387
+ logger.info('You can now run the tree command with --continue to resume from the next package.');
2388
+ return `Package '${promotePackage}' promoted to completed status.`;
2389
+ }
2390
+ // Handle audit-branches command
2391
+ if ((_runConfig_tree4 = runConfig.tree) === null || _runConfig_tree4 === void 0 ? void 0 : _runConfig_tree4.auditBranches) {
2392
+ var _runConfig_tree16, _runConfig_tree17, _runConfig_publish, _runConfig_tree18;
2393
+ logger.info('🔍 Auditing branch state across all packages...');
2394
+ const directories = ((_runConfig_tree16 = runConfig.tree) === null || _runConfig_tree16 === void 0 ? void 0 : _runConfig_tree16.directories) || [
2395
+ process.cwd()
2396
+ ];
2397
+ const excludedPatterns = ((_runConfig_tree17 = runConfig.tree) === null || _runConfig_tree17 === void 0 ? void 0 : _runConfig_tree17.exclude) || [];
2398
+ let allPackageJsonPaths = [];
2399
+ for (const targetDirectory of directories){
2400
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
2401
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
2402
+ }
2403
+ if (allPackageJsonPaths.length === 0) {
2404
+ return 'No packages found';
2405
+ }
2406
+ const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
2407
+ const packages = Array.from(dependencyGraph.packages.values()).map((pkg)=>({
2408
+ name: pkg.name,
2409
+ path: pkg.path
2410
+ }));
2411
+ const { auditBranchState, formatAuditResults } = await import('./branchState-CtywDSJf.js');
2412
+ const { getRemoteDefaultBranch } = await import('@eldrforge/git-tools');
2413
+ // For publish workflows, check branch consistency, merge conflicts, and existing PRs
2414
+ // Don't pass an expected branch - let the audit find the most common branch
2415
+ let targetBranch = (_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.targetBranch;
2416
+ if (!targetBranch) {
2417
+ // Try to detect default branch from the first package that is a git repo
2418
+ const firstGitPkg = packages.find((pkg)=>isInGitRepository(pkg.path));
2419
+ if (firstGitPkg) {
2420
+ try {
2421
+ // Cast to any to avoid type mismatch with node_modules version
2422
+ targetBranch = await getRemoteDefaultBranch(firstGitPkg.path) || 'main';
2423
+ } catch {
2424
+ targetBranch = 'main';
2425
+ }
2426
+ } else {
2427
+ targetBranch = 'main';
2428
+ }
2429
+ }
2430
+ logger.info(`Checking for merge conflicts with '${targetBranch}' and existing pull requests...`);
2431
+ const auditResult = await auditBranchState(packages, undefined, {
2432
+ targetBranch,
2433
+ checkPR: true,
2434
+ checkConflicts: true,
2435
+ concurrency: ((_runConfig_tree18 = runConfig.tree) === null || _runConfig_tree18 === void 0 ? void 0 : _runConfig_tree18.maxConcurrency) || 10
2436
+ });
2437
+ const formatted = formatAuditResults(auditResult);
2438
+ logger.info('\n' + formatted);
2439
+ if (auditResult.issuesFound > 0) {
2440
+ logger.warn(`\n⚠️ Found issues in ${auditResult.issuesFound} package(s). Review the fixes above.`);
2441
+ return `Branch audit complete: ${auditResult.issuesFound} package(s) need attention`;
2442
+ }
2443
+ logger.info(`\n✅ All ${auditResult.goodPackages} package(s) are in good state!`);
2444
+ return `Branch audit complete: All packages OK`;
2445
+ }
2446
+ // Handle parallel execution recovery commands
2447
+ const { loadRecoveryManager } = await import('@eldrforge/tree-execution');
2448
+ // Handle status-parallel command
2449
+ if ((_runConfig_tree5 = runConfig.tree) === null || _runConfig_tree5 === void 0 ? void 0 : _runConfig_tree5.statusParallel) {
2450
+ var _runConfig_tree19, _runConfig_tree20;
2451
+ logger.info('📊 Checking parallel execution status...');
2452
+ // Need to build dependency graph first
2453
+ const directories = ((_runConfig_tree19 = runConfig.tree) === null || _runConfig_tree19 === void 0 ? void 0 : _runConfig_tree19.directories) || [
2454
+ process.cwd()
2455
+ ];
2456
+ const excludedPatterns = ((_runConfig_tree20 = runConfig.tree) === null || _runConfig_tree20 === void 0 ? void 0 : _runConfig_tree20.exclude) || [];
2457
+ let allPackageJsonPaths = [];
2458
+ for (const targetDirectory of directories){
2459
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
2460
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
2461
+ }
2462
+ if (allPackageJsonPaths.length === 0) {
2463
+ return 'No packages found';
2464
+ }
2465
+ const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
2466
+ const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
2467
+ if (!recoveryManager) {
2468
+ logger.info('No parallel execution checkpoint found');
2469
+ return 'No active parallel execution found';
2470
+ }
2471
+ const status = await recoveryManager.showStatus();
2472
+ logger.info('\n' + status);
2473
+ return status;
2474
+ }
2475
+ // Handle validate-state command
2476
+ if ((_runConfig_tree6 = runConfig.tree) === null || _runConfig_tree6 === void 0 ? void 0 : _runConfig_tree6.validateState) {
2477
+ var _runConfig_tree21, _runConfig_tree22;
2478
+ logger.info('🔍 Validating checkpoint state...');
2479
+ const directories = ((_runConfig_tree21 = runConfig.tree) === null || _runConfig_tree21 === void 0 ? void 0 : _runConfig_tree21.directories) || [
2480
+ process.cwd()
2481
+ ];
2482
+ const excludedPatterns = ((_runConfig_tree22 = runConfig.tree) === null || _runConfig_tree22 === void 0 ? void 0 : _runConfig_tree22.exclude) || [];
2483
+ let allPackageJsonPaths = [];
2484
+ for (const targetDirectory of directories){
2485
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
2486
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
2487
+ }
2488
+ if (allPackageJsonPaths.length === 0) {
2489
+ return 'No packages found';
2490
+ }
2491
+ const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
2492
+ const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
2493
+ if (!recoveryManager) {
2494
+ logger.info('No checkpoint found to validate');
2495
+ return 'No checkpoint found';
2496
+ }
2497
+ const validation = recoveryManager.validateState();
2498
+ if (validation.valid) {
2499
+ logger.info('✅ Checkpoint state is valid');
2500
+ } else {
2501
+ logger.error('❌ Checkpoint state has issues:');
2502
+ for (const issue of validation.issues){
2503
+ logger.error(` • ${issue}`);
2504
+ }
2505
+ }
2506
+ if (validation.warnings.length > 0) {
2507
+ logger.warn('⚠️ Warnings:');
2508
+ for (const warning of validation.warnings){
2509
+ logger.warn(` • ${warning}`);
2510
+ }
2511
+ }
2512
+ return validation.valid ? 'Checkpoint is valid' : 'Checkpoint has issues';
2513
+ }
2514
+ // Handle parallel execution recovery options (must happen before main execution)
2515
+ const hasRecoveryOptions = ((_runConfig_tree7 = runConfig.tree) === null || _runConfig_tree7 === void 0 ? void 0 : _runConfig_tree7.markCompleted) || ((_runConfig_tree8 = runConfig.tree) === null || _runConfig_tree8 === void 0 ? void 0 : _runConfig_tree8.skipPackages) || ((_runConfig_tree9 = runConfig.tree) === null || _runConfig_tree9 === void 0 ? void 0 : _runConfig_tree9.retryFailed) || ((_runConfig_tree10 = runConfig.tree) === null || _runConfig_tree10 === void 0 ? void 0 : _runConfig_tree10.skipFailed) || ((_runConfig_tree11 = runConfig.tree) === null || _runConfig_tree11 === void 0 ? void 0 : _runConfig_tree11.resetPackage);
2516
+ if (hasRecoveryOptions && runConfig.tree) {
2517
+ var _runConfig_tree_retry;
2518
+ logger.info('🔧 Applying recovery options...');
2519
+ // Build dependency graph
2520
+ const directories = runConfig.tree.directories || [
2521
+ process.cwd()
2522
+ ];
2523
+ const excludedPatterns = runConfig.tree.exclude || [];
2524
+ let allPackageJsonPaths = [];
2525
+ for (const targetDirectory of directories){
2526
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
2527
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
2528
+ }
2529
+ const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
2530
+ const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
2531
+ if (!recoveryManager) {
2532
+ logger.error('No checkpoint found for recovery');
2533
+ throw new Error('No checkpoint found. Cannot apply recovery options without an existing checkpoint.');
2534
+ }
2535
+ await recoveryManager.applyRecoveryOptions({
2536
+ markCompleted: runConfig.tree.markCompleted,
2537
+ skipPackages: runConfig.tree.skipPackages,
2538
+ retryFailed: runConfig.tree.retryFailed,
2539
+ skipFailed: runConfig.tree.skipFailed,
2540
+ resetPackage: runConfig.tree.resetPackage,
2541
+ maxRetries: (_runConfig_tree_retry = runConfig.tree.retry) === null || _runConfig_tree_retry === void 0 ? void 0 : _runConfig_tree_retry.maxAttempts
2542
+ });
2543
+ logger.info('✅ Recovery options applied');
2544
+ // If not also continuing, just return
2545
+ if (!isContinue) {
2546
+ return 'Recovery options applied. Use --continue to resume execution.';
2547
+ }
2548
+ }
2549
+ // Handle continue mode
2550
+ if (isContinue) {
2551
+ var _runConfig_tree23;
2552
+ // For parallel execution, the checkpoint is managed by DynamicTaskPool/CheckpointManager
2553
+ // For sequential execution, we use the legacy context file
2554
+ const isParallelMode = (_runConfig_tree23 = runConfig.tree) === null || _runConfig_tree23 === void 0 ? void 0 : _runConfig_tree23.parallel;
2555
+ if (!isParallelMode) {
2556
+ // Sequential execution: load legacy context
2557
+ const savedContext = await loadExecutionContext(runConfig.outputDirectory);
2558
+ if (savedContext) {
2559
+ logger.info('Continuing previous tree execution...');
2560
+ logger.info(`Original command: ${savedContext.command}`);
2561
+ logger.info(`Started: ${savedContext.startTime.toISOString()}`);
2562
+ logger.info(`Previously completed: ${savedContext.completedPackages.length}/${savedContext.buildOrder.length} packages`);
2563
+ // Restore state safely
2564
+ let mutexLocked = false;
2565
+ try {
2566
+ await globalStateMutex.lock();
2567
+ mutexLocked = true;
2568
+ publishedVersions = savedContext.publishedVersions;
2569
+ globalStateMutex.unlock();
2570
+ mutexLocked = false;
2571
+ } catch (error) {
2572
+ if (mutexLocked) {
2573
+ globalStateMutex.unlock();
2574
+ }
2575
+ throw error;
2576
+ }
2577
+ executionContext = savedContext;
2578
+ // Use original config but allow some overrides (like dry run)
2579
+ runConfig = {
2580
+ ...savedContext.originalConfig,
2581
+ dryRun: runConfig.dryRun,
2582
+ outputDirectory: runConfig.outputDirectory || savedContext.originalConfig.outputDirectory
2583
+ };
2584
+ } else {
2585
+ logger.warn('No previous execution context found. Starting new execution...');
2586
+ }
2587
+ } else {
2588
+ // Parallel execution: checkpoint is managed by DynamicTaskPool
2589
+ // Just log that we're continuing - the actual checkpoint loading happens in DynamicTaskPool
2590
+ logger.info('Continuing previous parallel execution...');
2591
+ }
2592
+ } else {
2593
+ // Reset published versions tracking for new tree execution
2594
+ publishedVersions = [];
2595
+ executionContext = null;
2596
+ }
2597
+ // Check if we're in built-in command mode (tree command with second argument)
2598
+ const builtInCommand = (_runConfig_tree12 = runConfig.tree) === null || _runConfig_tree12 === void 0 ? void 0 : _runConfig_tree12.builtInCommand;
2599
+ const supportedBuiltInCommands = [
2600
+ 'commit',
2601
+ 'release',
2602
+ 'publish',
2603
+ 'link',
2604
+ 'unlink',
2605
+ 'development',
2606
+ 'branches',
2607
+ 'run',
2608
+ 'checkout',
2609
+ 'updates',
2610
+ 'precommit'
2611
+ ];
2612
+ if (builtInCommand && !supportedBuiltInCommands.includes(builtInCommand)) {
2613
+ throw new Error(`Unsupported built-in command: ${builtInCommand}. Supported commands: ${supportedBuiltInCommands.join(', ')}`);
2614
+ }
2615
+ // Handle run subcommand - convert space-separated scripts to npm run commands
2616
+ if (builtInCommand === 'run') {
2617
+ var _runConfig_tree24;
2618
+ const packageArgument = (_runConfig_tree24 = runConfig.tree) === null || _runConfig_tree24 === void 0 ? void 0 : _runConfig_tree24.packageArgument;
2619
+ if (!packageArgument) {
2620
+ throw new Error('run subcommand requires script names. Usage: kodrdriv tree run "clean build test"');
2621
+ }
2622
+ // Split the package argument by spaces to get individual script names
2623
+ const scripts = packageArgument.trim().split(/\s+/).filter((script)=>script.length > 0);
2624
+ if (scripts.length === 0) {
2625
+ throw new Error('run subcommand requires at least one script name. Usage: kodrdriv tree run "clean build test"');
2626
+ }
2627
+ // Convert to npm run commands joined with &&
2628
+ const npmCommands = scripts.map((script)=>`npm run ${script}`).join(' && ');
2629
+ // Set this as the custom command to run
2630
+ runConfig.tree = {
2631
+ ...runConfig.tree,
2632
+ cmd: npmCommands
2633
+ };
2634
+ // Clear the built-in command since we're now using custom command mode
2635
+ runConfig.tree.builtInCommand = undefined;
2636
+ logger.info(`Converting run subcommand to: ${npmCommands}`);
2637
+ // Store scripts for later validation
2638
+ runConfig.__scriptsToValidate = scripts;
2639
+ }
2640
+ // Determine the target directories - either specified or current working directory
2641
+ const directories = ((_runConfig_tree13 = runConfig.tree) === null || _runConfig_tree13 === void 0 ? void 0 : _runConfig_tree13.directories) || [
2642
+ process.cwd()
2643
+ ];
2644
+ // Handle link status subcommand
2645
+ if (builtInCommand === 'link' && ((_runConfig_tree14 = runConfig.tree) === null || _runConfig_tree14 === void 0 ? void 0 : _runConfig_tree14.packageArgument) === 'status') {
2646
+ // For tree link status, we want to show status across all packages
2647
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running link status across workspace...`);
2648
+ // Create a config that will be passed to the link command
2649
+ const linkConfig = {
2650
+ ...runConfig,
2651
+ tree: {
2652
+ ...runConfig.tree,
2653
+ directories: directories
2654
+ }
2655
+ };
2656
+ try {
2657
+ const result = await execute$4(linkConfig, 'status');
2658
+ return result;
2659
+ } catch (error) {
2660
+ logger.error(`Link status failed: ${error.message}`);
2661
+ throw error;
2662
+ }
2663
+ }
2664
+ // Handle unlink status subcommand
2665
+ if (builtInCommand === 'unlink' && ((_runConfig_tree15 = runConfig.tree) === null || _runConfig_tree15 === void 0 ? void 0 : _runConfig_tree15.packageArgument) === 'status') {
2666
+ // For tree unlink status, we want to show status across all packages
2667
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running unlink status across workspace...`);
2668
+ // Create a config that will be passed to the unlink command
2669
+ const unlinkConfig = {
2670
+ ...runConfig,
2671
+ tree: {
2672
+ ...runConfig.tree,
2673
+ directories: directories
2674
+ }
2675
+ };
2676
+ try {
2677
+ const result = await execute$3(unlinkConfig, 'status');
2678
+ return result;
2679
+ } catch (error) {
2680
+ logger.error(`Unlink status failed: ${error.message}`);
2681
+ throw error;
2682
+ }
2683
+ }
2684
+ if (directories.length === 1) {
2685
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspace at: ${directories[0]}`);
2686
+ } else {
2687
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspaces at: ${directories.join(', ')}`);
2688
+ }
2689
+ try {
2690
+ var _runConfig_tree25, _runConfig_tree26, _runConfig_tree27, _runConfig_tree28;
2691
+ // Get exclusion patterns from config, fallback to empty array
2692
+ const excludedPatterns = ((_runConfig_tree25 = runConfig.tree) === null || _runConfig_tree25 === void 0 ? void 0 : _runConfig_tree25.exclude) || [];
2693
+ if (excludedPatterns.length > 0) {
2694
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Using exclusion patterns: ${excludedPatterns.join(', ')}`);
2695
+ }
2696
+ // Scan for package.json files across all directories
2697
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning for package.json files...`);
2698
+ let allPackageJsonPaths = [];
2699
+ for (const targetDirectory of directories){
2700
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning directory: ${targetDirectory}`);
2701
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
2702
+ allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
2703
+ }
2704
+ const packageJsonPaths = allPackageJsonPaths;
2705
+ if (packageJsonPaths.length === 0) {
2706
+ const directoriesStr = directories.join(', ');
2707
+ const message = `No package.json files found in subdirectories of: ${directoriesStr}`;
2708
+ logger.warn(message);
2709
+ return message;
2710
+ }
2711
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Found ${packageJsonPaths.length} package.json files`);
2712
+ // Build dependency graph
2713
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Building dependency graph...`);
2714
+ const dependencyGraph = await buildDependencyGraph(packageJsonPaths);
2715
+ // Perform topological sort to determine build order
2716
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Determining build order...`);
2717
+ let buildOrder = topologicalSort(dependencyGraph);
2718
+ // Handle start-from functionality if specified
2719
+ const startFrom = (_runConfig_tree26 = runConfig.tree) === null || _runConfig_tree26 === void 0 ? void 0 : _runConfig_tree26.startFrom;
2720
+ if (startFrom) {
2721
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for start package: ${startFrom}`);
2722
+ // Resolve the actual package name (can be package name or directory name)
2723
+ let startPackageName = null;
2724
+ for (const [pkgName, pkgInfo] of dependencyGraph.packages){
2725
+ const dirName = path.basename(pkgInfo.path);
2726
+ if (dirName === startFrom || pkgName === startFrom) {
2727
+ startPackageName = pkgName;
2728
+ break;
2729
+ }
2730
+ }
2731
+ if (!startPackageName) {
2732
+ // Check if the package exists but was excluded across all directories
2733
+ let allPackageJsonPathsForCheck = [];
2734
+ for (const targetDirectory of directories){
2735
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, []); // No exclusions
2736
+ allPackageJsonPathsForCheck = allPackageJsonPathsForCheck.concat(packageJsonPaths);
2737
+ }
2738
+ let wasExcluded = false;
2739
+ for (const packageJsonPath of allPackageJsonPathsForCheck){
2740
+ try {
2741
+ const packageInfo = await parsePackageJson(packageJsonPath);
2742
+ const dirName = path.basename(packageInfo.path);
2743
+ if (dirName === startFrom || packageInfo.name === startFrom) {
2744
+ // Check if this package was excluded
2745
+ if (shouldExclude(packageJsonPath, excludedPatterns)) {
2746
+ wasExcluded = true;
2747
+ break;
2748
+ }
2749
+ }
2750
+ } catch {
2751
+ continue;
2752
+ }
2753
+ }
2754
+ if (wasExcluded) {
2755
+ const excludedPatternsStr = excludedPatterns.join(', ');
2756
+ throw new Error(`Package directory '${startFrom}' was excluded by exclusion patterns: ${excludedPatternsStr}. Remove the exclusion pattern or choose a different starting package.`);
2757
+ } else {
2758
+ const availablePackages = buildOrder.map((name)=>{
2759
+ const packageInfo = dependencyGraph.packages.get(name);
2760
+ return `${path.basename(packageInfo.path)} (${name})`;
2761
+ }).join(', ');
2762
+ throw new Error(`Package directory '${startFrom}' not found. Available packages: ${availablePackages}`);
2763
+ }
2764
+ }
2765
+ // Find the start package in the build order and start execution from there
2766
+ const startIndex = buildOrder.findIndex((pkgName)=>pkgName === startPackageName);
2767
+ if (startIndex === -1) {
2768
+ throw new Error(`Package '${startFrom}' not found in build order. This should not happen.`);
2769
+ }
2770
+ // Filter build order to start from the specified package
2771
+ const originalLength = buildOrder.length;
2772
+ buildOrder = buildOrder.slice(startIndex);
2773
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Starting execution from package '${startFrom}' (${buildOrder.length} of ${originalLength} packages remaining).`);
2774
+ }
2775
+ // Handle stop-at functionality if specified
2776
+ const stopAt = (_runConfig_tree27 = runConfig.tree) === null || _runConfig_tree27 === void 0 ? void 0 : _runConfig_tree27.stopAt;
2777
+ if (stopAt) {
2778
+ logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for stop package: ${stopAt}`);
2779
+ // Find the package that matches the stopAt directory name
2780
+ const stopIndex = buildOrder.findIndex((packageName)=>{
2781
+ const packageInfo = dependencyGraph.packages.get(packageName);
2782
+ const dirName = path.basename(packageInfo.path);
2783
+ return dirName === stopAt || packageName === stopAt;
2784
+ });
2785
+ if (stopIndex === -1) {
2786
+ // Check if the package exists but was excluded across all directories
2787
+ let allPackageJsonPathsForCheck = [];
2788
+ for (const targetDirectory of directories){
2789
+ const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, []); // No exclusions
2790
+ allPackageJsonPathsForCheck = allPackageJsonPathsForCheck.concat(packageJsonPaths);
2791
+ }
2792
+ let wasExcluded = false;
2793
+ for (const packageJsonPath of allPackageJsonPathsForCheck){
2794
+ try {
2795
+ const packageInfo = await parsePackageJson(packageJsonPath);
2796
+ const dirName = path.basename(packageInfo.path);
2797
+ if (dirName === stopAt || packageInfo.name === stopAt) {
2798
+ // Check if this package was excluded
2799
+ if (shouldExclude(packageJsonPath, excludedPatterns)) {
2800
+ wasExcluded = true;
2801
+ break;
2802
+ }
2803
+ }
2804
+ } catch {
2805
+ continue;
2806
+ }
2807
+ }
2808
+ if (wasExcluded) {
2809
+ const excludedPatternsStr = excludedPatterns.join(', ');
2810
+ throw new Error(`Package directory '${stopAt}' was excluded by exclusion patterns: ${excludedPatternsStr}. Remove the exclusion pattern or choose a different stop package.`);
2811
+ } else {
2812
+ const availablePackages = buildOrder.map((name)=>{
2813
+ const packageInfo = dependencyGraph.packages.get(name);
2814
+ return `${path.basename(packageInfo.path)} (${name})`;
2815
+ }).join(', ');
2816
+ throw new Error(`Package directory '${stopAt}' not found. Available packages: ${availablePackages}`);
2817
+ }
2818
+ }
2819
+ // Truncate the build order before the stop package (the stop package is not executed)
2820
+ const originalLength = buildOrder.length;
2821
+ buildOrder = buildOrder.slice(0, stopIndex);
2822
+ const stoppedCount = originalLength - stopIndex;
2823
+ if (stoppedCount > 0) {
2824
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Stopping before '${stopAt}' - excluding ${stoppedCount} package${stoppedCount === 1 ? '' : 's'}`);
2825
+ }
2826
+ }
2827
+ // Helper function to determine version scope indicator
2828
+ const getVersionScopeIndicator = (versionRange)=>{
2829
+ // Remove whitespace and check the pattern
2830
+ const cleanRange = versionRange.trim();
2831
+ // Preserve the original prefix (^, ~, >=, etc.)
2832
+ const prefixMatch = cleanRange.match(/^([^0-9]*)/);
2833
+ const prefix = prefixMatch ? prefixMatch[1] : '';
2834
+ // Extract the version part after the prefix
2835
+ const versionPart = cleanRange.substring(prefix.length);
2836
+ // Count the number of dots to determine scope
2837
+ const dotCount = (versionPart.match(/\./g) || []).length;
2838
+ if (dotCount >= 2) {
2839
+ // Has patch version (e.g., "^4.4.32" -> "^P")
2840
+ return prefix + 'P';
2841
+ } else if (dotCount === 1) {
2842
+ // Has minor version only (e.g., "^4.4" -> "^m")
2843
+ return prefix + 'm';
2844
+ } else if (dotCount === 0 && versionPart.match(/^\d+$/)) {
2845
+ // Has major version only (e.g., "^4" -> "^M")
2846
+ return prefix + 'M';
2847
+ }
2848
+ // For complex ranges or non-standard formats, return as-is
2849
+ return cleanRange;
2850
+ };
2851
+ // Helper function to find packages that consume a given package
2852
+ const findConsumingPackagesForBranches = async (targetPackageName, allPackages, storage)=>{
2853
+ const consumers = [];
2854
+ // Extract scope from target package name (e.g., "@fjell/eslint-config" -> "@fjell/")
2855
+ const targetScope = targetPackageName.includes('/') ? targetPackageName.split('/')[0] + '/' : null;
2856
+ for (const [packageName, packageInfo] of allPackages){
2857
+ if (packageName === targetPackageName) continue;
2858
+ try {
2859
+ const packageJsonPath = path.join(packageInfo.path, 'package.json');
2860
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
2861
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
2862
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
2863
+ // Check if this package depends on the target package and get the version range
2864
+ const dependencyTypes = [
2865
+ 'dependencies',
2866
+ 'devDependencies',
2867
+ 'peerDependencies',
2868
+ 'optionalDependencies'
2869
+ ];
2870
+ let versionRange = null;
2871
+ for (const depType of dependencyTypes){
2872
+ if (packageJson[depType] && packageJson[depType][targetPackageName]) {
2873
+ versionRange = packageJson[depType][targetPackageName];
2874
+ break;
2875
+ }
2876
+ }
2877
+ if (versionRange) {
2878
+ // Apply scope substitution for consumers in the same scope
2879
+ let consumerDisplayName = packageName;
2880
+ if (targetScope && packageName.startsWith(targetScope)) {
2881
+ // Replace scope with "./" (e.g., "@fjell/core" -> "./core")
2882
+ consumerDisplayName = './' + packageName.substring(targetScope.length);
2883
+ }
2884
+ // Add version scope indicator
2885
+ const scopeIndicator = getVersionScopeIndicator(versionRange);
2886
+ consumerDisplayName += ` (${scopeIndicator})`;
2887
+ consumers.push(consumerDisplayName);
2888
+ }
2889
+ } catch {
2890
+ continue;
2891
+ }
2892
+ }
2893
+ return consumers.sort();
2894
+ };
2895
+ // Handle special "branches" command that displays table
2896
+ if (builtInCommand === 'branches') {
2897
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Branch Status Summary:`);
2898
+ logger.info('');
2899
+ // Calculate column widths for nice formatting
2900
+ let maxNameLength = 'Package'.length;
2901
+ let maxBranchLength = 'Branch'.length;
2902
+ let maxVersionLength = 'Version'.length;
2903
+ let maxStatusLength = 'Status'.length;
2904
+ let maxLinkLength = 'Linked'.length;
2905
+ let maxConsumersLength = 'Consumers'.length;
2906
+ const branchInfos = [];
2907
+ // Create storage instance for consumer lookup
2908
+ const storage = createStorage();
2909
+ // Get globally linked packages once at the beginning
2910
+ const globallyLinkedPackages = await getGloballyLinkedPackages();
2911
+ // ANSI escape codes for progress display
2912
+ const ANSI = {
2913
+ CURSOR_UP: '\x1b[1A',
2914
+ CURSOR_TO_START: '\x1b[0G',
2915
+ CLEAR_LINE: '\x1b[2K',
2916
+ GREEN: '\x1b[32m',
2917
+ BLUE: '\x1b[34m',
2918
+ YELLOW: '\x1b[33m',
2919
+ RESET: '\x1b[0m',
2920
+ BOLD: '\x1b[1m'
2921
+ };
2922
+ // Check if terminal supports ANSI
2923
+ const supportsAnsi = process.stdout.isTTY && process.env.TERM !== 'dumb' && !process.env.NO_COLOR;
2924
+ const totalPackages = buildOrder.length;
2925
+ const concurrency = 5; // Process up to 5 packages at a time
2926
+ let completedCount = 0;
2927
+ let isFirstProgress = true;
2928
+ // Function to update progress display
2929
+ const updateProgress = (currentPackage, completed, total)=>{
2930
+ if (!supportsAnsi) return;
2931
+ if (!isFirstProgress) {
2932
+ // Move cursor up and clear the line
2933
+ process.stdout.write(ANSI.CURSOR_UP + ANSI.CURSOR_TO_START + ANSI.CLEAR_LINE);
2934
+ }
2935
+ const percentage = Math.round(completed / total * 100);
2936
+ const progressBar = '█'.repeat(Math.floor(percentage / 5)) + '░'.repeat(20 - Math.floor(percentage / 5));
2937
+ const progress = `${ANSI.BLUE}${ANSI.BOLD}Analyzing packages... ${ANSI.GREEN}[${progressBar}] ${percentage}%${ANSI.RESET} ${ANSI.YELLOW}(${completed}/${total})${ANSI.RESET}`;
2938
+ const current = currentPackage ? ` - Currently: ${currentPackage}` : '';
2939
+ process.stdout.write(progress + current + '\n');
2940
+ isFirstProgress = false;
2941
+ };
2942
+ // Function to process a single package
2943
+ const processPackage = async (packageName)=>{
2944
+ const packageInfo = dependencyGraph.packages.get(packageName);
2945
+ try {
2946
+ // Process git status and consumers in parallel
2947
+ const [gitStatus, consumers] = await Promise.all([
2948
+ getGitStatusSummary(packageInfo.path),
2949
+ findConsumingPackagesForBranches(packageName, dependencyGraph.packages, storage)
2950
+ ]);
2951
+ // Check if this package is globally linked (available to be linked to)
2952
+ const isGloballyLinked = globallyLinkedPackages.has(packageName);
2953
+ const linkedText = isGloballyLinked ? '✓' : '';
2954
+ // Add asterisk to consumers that are actively linking to globally linked packages
2955
+ // and check for link problems to highlight in red
2956
+ const consumersWithLinkStatus = await Promise.all(consumers.map(async (consumer)=>{
2957
+ // Extract the base consumer name from the format "package-name (^P)" or "./scoped-name (^m)"
2958
+ const baseConsumerName = consumer.replace(/ \([^)]+\)$/, ''); // Remove version scope indicator
2959
+ // Get the original package name from display name (remove scope substitution)
2960
+ const originalConsumerName = baseConsumerName.startsWith('./') ? baseConsumerName.replace('./', packageName.split('/')[0] + '/') : baseConsumerName;
2961
+ // Find the consumer package info to get its path
2962
+ const consumerPackageInfo = Array.from(dependencyGraph.packages.values()).find((pkg)=>pkg.name === originalConsumerName);
2963
+ if (consumerPackageInfo) {
2964
+ const [consumerLinkedDeps, linkProblems] = await Promise.all([
2965
+ getLinkedDependencies(consumerPackageInfo.path),
2966
+ getLinkCompatibilityProblems(consumerPackageInfo.path, dependencyGraph.packages)
2967
+ ]);
2968
+ let consumerDisplay = consumer;
2969
+ // Add asterisk if this consumer is actively linking to this package
2970
+ if (consumerLinkedDeps.has(packageName)) {
2971
+ consumerDisplay += '*';
2972
+ }
2973
+ // Check if this consumer has link problems with the current package
2974
+ if (linkProblems.has(packageName)) {
2975
+ // Highlight in red using ANSI escape codes (only if terminal supports it)
2976
+ if (supportsAnsi) {
2977
+ consumerDisplay = `\x1b[31m${consumerDisplay}\x1b[0m`;
2978
+ } else {
2979
+ // Fallback for terminals that don't support ANSI colors
2980
+ consumerDisplay += ' [LINK PROBLEM]';
2981
+ }
2982
+ }
2983
+ return consumerDisplay;
2984
+ }
2985
+ return consumer;
2986
+ }));
2987
+ return {
2988
+ name: packageName,
2989
+ branch: gitStatus.branch,
2990
+ version: packageInfo.version,
2991
+ status: gitStatus.status,
2992
+ linked: linkedText,
2993
+ consumers: consumersWithLinkStatus
2994
+ };
2995
+ } catch (error) {
2996
+ logger.warn(`Failed to get git status for ${packageName}: ${error.message}`);
2997
+ return {
2998
+ name: packageName,
2999
+ branch: 'error',
3000
+ version: packageInfo.version,
3001
+ status: 'error',
3002
+ linked: '✗',
3003
+ consumers: [
3004
+ 'error'
3005
+ ]
3006
+ };
3007
+ }
3008
+ };
3009
+ // Process packages in batches with progress updates
3010
+ updateProgress('Starting...', 0, totalPackages);
3011
+ for(let i = 0; i < buildOrder.length; i += concurrency){
3012
+ const batch = buildOrder.slice(i, i + concurrency);
3013
+ // Update progress to show current batch
3014
+ const currentBatchStr = batch.length === 1 ? batch[0] : `${batch[0]} + ${batch.length - 1} others`;
3015
+ updateProgress(currentBatchStr, completedCount, totalPackages);
3016
+ // Process batch in parallel
3017
+ const batchResults = await Promise.all(batch.map((packageName)=>processPackage(packageName)));
3018
+ // Add results and update column widths
3019
+ for (const result of batchResults){
3020
+ branchInfos.push(result);
3021
+ maxNameLength = Math.max(maxNameLength, result.name.length);
3022
+ maxBranchLength = Math.max(maxBranchLength, result.branch.length);
3023
+ maxVersionLength = Math.max(maxVersionLength, result.version.length);
3024
+ maxStatusLength = Math.max(maxStatusLength, result.status.length);
3025
+ maxLinkLength = Math.max(maxLinkLength, result.linked.length);
3026
+ // For consumers, calculate the width based on the longest consumer name
3027
+ const maxConsumerLength = result.consumers.length > 0 ? Math.max(...result.consumers.map((c)=>c.length)) : 0;
3028
+ maxConsumersLength = Math.max(maxConsumersLength, maxConsumerLength);
3029
+ }
3030
+ completedCount += batch.length;
3031
+ updateProgress('', completedCount, totalPackages);
3032
+ }
3033
+ // Clear progress line and add spacing
3034
+ if (supportsAnsi && !isFirstProgress) {
3035
+ process.stdout.write(ANSI.CURSOR_UP + ANSI.CURSOR_TO_START + ANSI.CLEAR_LINE);
3036
+ }
3037
+ logger.info(`${ANSI.GREEN}✅ Analysis complete!${ANSI.RESET} Processed ${totalPackages} packages in batches of ${concurrency}.`);
3038
+ logger.info('');
3039
+ // Print header (new order: Package | Branch | Version | Status | Linked | Consumers)
3040
+ const nameHeader = 'Package'.padEnd(maxNameLength);
3041
+ const branchHeader = 'Branch'.padEnd(maxBranchLength);
3042
+ const versionHeader = 'Version'.padEnd(maxVersionLength);
3043
+ const statusHeader = 'Status'.padEnd(maxStatusLength);
3044
+ const linkHeader = 'Linked'.padEnd(maxLinkLength);
3045
+ const consumersHeader = 'Consumers';
3046
+ logger.info(`${nameHeader} | ${branchHeader} | ${versionHeader} | ${statusHeader} | ${linkHeader} | ${consumersHeader}`);
3047
+ logger.info(`${'-'.repeat(maxNameLength)} | ${'-'.repeat(maxBranchLength)} | ${'-'.repeat(maxVersionLength)} | ${'-'.repeat(maxStatusLength)} | ${'-'.repeat(maxLinkLength)} | ${'-'.repeat(9)}`);
3048
+ // Print data rows with multi-line consumers
3049
+ for (const info of branchInfos){
3050
+ const nameCol = info.name.padEnd(maxNameLength);
3051
+ const branchCol = info.branch.padEnd(maxBranchLength);
3052
+ const versionCol = info.version.padEnd(maxVersionLength);
3053
+ const statusCol = info.status.padEnd(maxStatusLength);
3054
+ const linkCol = info.linked.padEnd(maxLinkLength);
3055
+ if (info.consumers.length === 0) {
3056
+ // No consumers - single line
3057
+ logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | `);
3058
+ } else if (info.consumers.length === 1) {
3059
+ // Single consumer - single line
3060
+ logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | ${info.consumers[0]}`);
3061
+ } else {
3062
+ // Multiple consumers - first consumer on same line, rest on new lines with continuous column separators
3063
+ logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | ${info.consumers[0]}`);
3064
+ // Additional consumers on separate lines with proper column separators
3065
+ const emptyNameCol = ' '.repeat(maxNameLength);
3066
+ const emptyBranchCol = ' '.repeat(maxBranchLength);
3067
+ const emptyVersionCol = ' '.repeat(maxVersionLength);
3068
+ const emptyStatusCol = ' '.repeat(maxStatusLength);
3069
+ const emptyLinkCol = ' '.repeat(maxLinkLength);
3070
+ for(let i = 1; i < info.consumers.length; i++){
3071
+ logger.info(`${emptyNameCol} | ${emptyBranchCol} | ${emptyVersionCol} | ${emptyStatusCol} | ${emptyLinkCol} | ${info.consumers[i]}`);
3072
+ }
3073
+ }
3074
+ }
3075
+ logger.info('');
3076
+ // Add legend explaining the symbols and colors
3077
+ logger.info('Legend:');
3078
+ logger.info(' * = Consumer is actively linking to this package');
3079
+ logger.info(' (^P) = Patch-level dependency (e.g., "^4.4.32")');
3080
+ logger.info(' (^m) = Minor-level dependency (e.g., "^4.4")');
3081
+ logger.info(' (^M) = Major-level dependency (e.g., "^4")');
3082
+ logger.info(' (~P), (>=M), etc. = Other version prefixes preserved');
3083
+ if (supportsAnsi) {
3084
+ logger.info(' \x1b[31mRed text\x1b[0m = Consumer has link problems (version mismatches) with this package');
3085
+ } else {
3086
+ logger.info(' [LINK PROBLEM] = Consumer has link problems (version mismatches) with this package');
3087
+ }
3088
+ logger.info('');
3089
+ return `Branch status summary for ${branchInfos.length} packages completed.`;
3090
+ }
3091
+ // Handle special "checkout" command that switches all packages to specified branch
3092
+ if (builtInCommand === 'checkout') {
3093
+ var _runConfig_tree29;
3094
+ const targetBranch = (_runConfig_tree29 = runConfig.tree) === null || _runConfig_tree29 === void 0 ? void 0 : _runConfig_tree29.packageArgument;
3095
+ if (!targetBranch) {
3096
+ throw new Error('checkout subcommand requires a branch name. Usage: kodrdriv tree checkout <branch-name>');
3097
+ }
3098
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Workspace Checkout to Branch: ${targetBranch}`);
3099
+ logger.info('');
3100
+ // Phase 1: Safety check - scan all packages for uncommitted changes
3101
+ logger.info('🔍 Phase 1: Checking for uncommitted changes across workspace...');
3102
+ const packagesWithChanges = [];
3103
+ for (const packageName of buildOrder){
3104
+ const packageInfo = dependencyGraph.packages.get(packageName);
3105
+ try {
3106
+ const gitStatus = await getGitStatusSummary(packageInfo.path);
3107
+ const hasProblems = gitStatus.hasUncommittedChanges || gitStatus.hasUnstagedFiles;
3108
+ packagesWithChanges.push({
3109
+ name: packageName,
3110
+ path: packageInfo.path,
3111
+ status: gitStatus.status,
3112
+ hasUncommittedChanges: gitStatus.hasUncommittedChanges,
3113
+ hasUnstagedFiles: gitStatus.hasUnstagedFiles
3114
+ });
3115
+ if (hasProblems) {
3116
+ logger.warn(`⚠️ ${packageName}: ${gitStatus.status}`);
3117
+ } else {
3118
+ logger.verbose(`✅ ${packageName}: clean`);
3119
+ }
3120
+ } catch (error) {
3121
+ logger.warn(`❌ ${packageName}: error checking status - ${error.message}`);
3122
+ packagesWithChanges.push({
3123
+ name: packageName,
3124
+ path: packageInfo.path,
3125
+ status: 'error',
3126
+ hasUncommittedChanges: false,
3127
+ hasUnstagedFiles: false
3128
+ });
3129
+ }
3130
+ }
3131
+ // Check if any packages have uncommitted changes
3132
+ const problemPackages = packagesWithChanges.filter((pkg)=>pkg.hasUncommittedChanges || pkg.hasUnstagedFiles || pkg.status === 'error');
3133
+ if (problemPackages.length > 0) {
3134
+ logger.error(`❌ Cannot proceed with checkout: ${problemPackages.length} packages have uncommitted changes or errors:`);
3135
+ logger.error('');
3136
+ for (const pkg of problemPackages){
3137
+ logger.error(` 📦 ${pkg.name} (${pkg.path}):`);
3138
+ logger.error(` Status: ${pkg.status}`);
3139
+ }
3140
+ logger.error('');
3141
+ logger.error('🔧 To resolve this issue:');
3142
+ logger.error(' 1. Commit or stash changes in the packages listed above');
3143
+ logger.error(' 2. Or use "kodrdriv tree commit" to commit changes across all packages');
3144
+ logger.error(' 3. Then re-run the checkout command');
3145
+ logger.error('');
3146
+ throw new Error(`Workspace checkout blocked: ${problemPackages.length} packages have uncommitted changes`);
3147
+ }
3148
+ logger.info(`✅ Phase 1 complete: All ${packagesWithChanges.length} packages are clean`);
3149
+ logger.info('');
3150
+ // Phase 2: Perform the checkout
3151
+ logger.info(`🔄 Phase 2: Checking out all packages to branch '${targetBranch}'...`);
3152
+ let successCount = 0;
3153
+ const failedPackages = [];
3154
+ for(let i = 0; i < buildOrder.length; i++){
3155
+ const packageName = buildOrder[i];
3156
+ const packageInfo = dependencyGraph.packages.get(packageName);
3157
+ if (isDryRun) {
3158
+ logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: Would checkout ${targetBranch}`);
3159
+ successCount++;
3160
+ } else {
3161
+ try {
3162
+ const originalCwd = process.cwd();
3163
+ process.chdir(packageInfo.path);
3164
+ try {
3165
+ // Check if target branch exists locally
3166
+ let branchExists = false;
3167
+ try {
3168
+ await runSecure('git', [
3169
+ 'rev-parse',
3170
+ '--verify',
3171
+ targetBranch
3172
+ ]);
3173
+ branchExists = true;
3174
+ } catch {
3175
+ // Branch doesn't exist locally
3176
+ branchExists = false;
3177
+ }
3178
+ if (branchExists) {
3179
+ await runSecure('git', [
3180
+ 'checkout',
3181
+ targetBranch
3182
+ ]);
3183
+ logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Checked out ${targetBranch}`);
3184
+ } else {
3185
+ // Try to check out branch from remote
3186
+ try {
3187
+ await runSecure('git', [
3188
+ 'checkout',
3189
+ '-b',
3190
+ targetBranch,
3191
+ `origin/${targetBranch}`
3192
+ ]);
3193
+ logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Checked out ${targetBranch} from origin`);
3194
+ } catch {
3195
+ // If that fails, create a new branch
3196
+ await runSecure('git', [
3197
+ 'checkout',
3198
+ '-b',
3199
+ targetBranch
3200
+ ]);
3201
+ logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Created new branch ${targetBranch}`);
3202
+ }
3203
+ }
3204
+ successCount++;
3205
+ } finally{
3206
+ process.chdir(originalCwd);
3207
+ }
3208
+ } catch (error) {
3209
+ logger.error(`[${i + 1}/${buildOrder.length}] ${packageName}: ❌ Failed - ${error.message}`);
3210
+ failedPackages.push({
3211
+ name: packageName,
3212
+ error: error.message
3213
+ });
3214
+ }
3215
+ }
3216
+ }
3217
+ // Report results
3218
+ if (failedPackages.length > 0) {
3219
+ logger.error(`❌ Checkout completed with errors: ${successCount}/${buildOrder.length} packages successful`);
3220
+ logger.error('');
3221
+ logger.error('Failed packages:');
3222
+ for (const failed of failedPackages){
3223
+ logger.error(` - ${failed.name}: ${failed.error}`);
3224
+ }
3225
+ throw new Error(`Checkout failed for ${failedPackages.length} packages`);
3226
+ } else {
3227
+ logger.info(`✅ Checkout complete: All ${buildOrder.length} packages successfully checked out to '${targetBranch}'`);
3228
+ return `Workspace checkout complete: ${successCount} packages checked out to '${targetBranch}'`;
3229
+ }
3230
+ }
3231
+ // Display results
3232
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Build order determined:`);
3233
+ let returnOutput = '';
3234
+ if (runConfig.verbose || runConfig.debug) {
3235
+ // Verbose mode: Skip simple format, show detailed format before command execution
3236
+ logger.info(''); // Add spacing
3237
+ const rangeInfo = [];
3238
+ if (startFrom) rangeInfo.push(`starting from ${startFrom}`);
3239
+ if (stopAt) rangeInfo.push(`stopping before ${stopAt}`);
3240
+ const rangeStr = rangeInfo.length > 0 ? ` (${rangeInfo.join(', ')})` : '';
3241
+ logger.info(`Detailed Build Order for ${buildOrder.length} packages${rangeStr}:`);
3242
+ logger.info('==========================================');
3243
+ buildOrder.forEach((packageName, index)=>{
3244
+ const packageInfo = dependencyGraph.packages.get(packageName);
3245
+ const localDeps = Array.from(packageInfo.localDependencies);
3246
+ logger.info(`${index + 1}. ${packageName} (${packageInfo.version})`);
3247
+ logger.info(` Path: ${packageInfo.path}`);
3248
+ if (localDeps.length > 0) {
3249
+ logger.info(` Local Dependencies: ${localDeps.join(', ')}`);
3250
+ } else {
3251
+ logger.info(` Local Dependencies: none`);
3252
+ }
3253
+ logger.info(''); // Add spacing between packages
3254
+ });
3255
+ // Simple return output for verbose mode (no need to repeat detailed info)
3256
+ returnOutput = `\nBuild order: ${buildOrder.join(' → ')}\n`;
3257
+ } else {
3258
+ // Non-verbose mode: Show simple build order
3259
+ buildOrder.forEach((packageName, index)=>{
3260
+ const packageInfo = dependencyGraph.packages.get(packageName);
3261
+ const localDeps = Array.from(packageInfo.localDependencies);
3262
+ // Log each step
3263
+ if (localDeps.length > 0) {
3264
+ logger.info(`${index + 1}. ${packageName} (depends on: ${localDeps.join(', ')})`);
3265
+ } else {
3266
+ logger.info(`${index + 1}. ${packageName} (no local dependencies)`);
3267
+ }
3268
+ });
3269
+ // Simple return output for non-verbose mode
3270
+ returnOutput = `\nBuild order: ${buildOrder.join(' → ')}\n`;
3271
+ }
3272
+ // Execute command if provided (custom command or built-in command)
3273
+ const cmd = (_runConfig_tree28 = runConfig.tree) === null || _runConfig_tree28 === void 0 ? void 0 : _runConfig_tree28.cmd;
3274
+ // Determine command to execute
3275
+ let commandToRun;
3276
+ let isBuiltInCommand = false;
3277
+ if (builtInCommand) {
3278
+ var _runConfig_tree30, _runConfig_tree31, _runConfig_tree32;
3279
+ // Built-in command mode: shell out to kodrdriv subprocess
3280
+ // Build command with propagated global options
3281
+ const globalOptions = [];
3282
+ // Propagate global flags that should be inherited by subprocesses
3283
+ if (runConfig.debug) globalOptions.push('--debug');
3284
+ if (runConfig.verbose) globalOptions.push('--verbose');
3285
+ if (runConfig.dryRun) globalOptions.push('--dry-run');
3286
+ if (runConfig.overrides) globalOptions.push('--overrides');
3287
+ // Propagate global options with values
3288
+ if (runConfig.model) globalOptions.push(`--model "${runConfig.model}"`);
3289
+ if (runConfig.configDirectory) globalOptions.push(`--config-dir "${runConfig.configDirectory}"`);
3290
+ if (runConfig.outputDirectory) globalOptions.push(`--output-dir "${runConfig.outputDirectory}"`);
3291
+ if (runConfig.preferencesDirectory) globalOptions.push(`--preferences-dir "${runConfig.preferencesDirectory}"`);
3292
+ // Build the command with global options
3293
+ const optionsString = globalOptions.length > 0 ? ` ${globalOptions.join(' ')}` : '';
3294
+ // Add package argument for link/unlink/updates commands
3295
+ const packageArg = (_runConfig_tree30 = runConfig.tree) === null || _runConfig_tree30 === void 0 ? void 0 : _runConfig_tree30.packageArgument;
3296
+ const packageArgString = packageArg && (builtInCommand === 'link' || builtInCommand === 'unlink' || builtInCommand === 'updates') ? ` "${packageArg}"` : '';
3297
+ // Add command-specific options
3298
+ let commandSpecificOptions = '';
3299
+ // Commit command options
3300
+ if (builtInCommand === 'commit') {
3301
+ var _runConfig_commit, _runConfig_commit1, _runConfig_commit2, _runConfig_commit3, _runConfig_commit4, _runConfig_commit5, _runConfig_commit6, _runConfig_commit7, _runConfig_commit8, _runConfig_commit9, _runConfig_commit10, _runConfig_commit11, _runConfig_commit12, _runConfig_commit13, _runConfig_commit14, _runConfig_commit15, _runConfig_commit16;
3302
+ if ((_runConfig_commit = runConfig.commit) === null || _runConfig_commit === void 0 ? void 0 : _runConfig_commit.selfReflection) {
3303
+ commandSpecificOptions += ' --self-reflection';
3304
+ }
3305
+ if ((_runConfig_commit1 = runConfig.commit) === null || _runConfig_commit1 === void 0 ? void 0 : _runConfig_commit1.add) {
3306
+ commandSpecificOptions += ' --add';
3307
+ }
3308
+ if ((_runConfig_commit2 = runConfig.commit) === null || _runConfig_commit2 === void 0 ? void 0 : _runConfig_commit2.cached) {
3309
+ commandSpecificOptions += ' --cached';
3310
+ }
3311
+ if ((_runConfig_commit3 = runConfig.commit) === null || _runConfig_commit3 === void 0 ? void 0 : _runConfig_commit3.interactive) {
3312
+ commandSpecificOptions += ' --interactive';
3313
+ }
3314
+ if ((_runConfig_commit4 = runConfig.commit) === null || _runConfig_commit4 === void 0 ? void 0 : _runConfig_commit4.amend) {
3315
+ commandSpecificOptions += ' --amend';
3316
+ }
3317
+ if ((_runConfig_commit5 = runConfig.commit) === null || _runConfig_commit5 === void 0 ? void 0 : _runConfig_commit5.skipFileCheck) {
3318
+ commandSpecificOptions += ' --skip-file-check';
3319
+ }
3320
+ if ((_runConfig_commit6 = runConfig.commit) === null || _runConfig_commit6 === void 0 ? void 0 : _runConfig_commit6.maxAgenticIterations) {
3321
+ commandSpecificOptions += ` --max-agentic-iterations ${runConfig.commit.maxAgenticIterations}`;
3322
+ }
3323
+ if ((_runConfig_commit7 = runConfig.commit) === null || _runConfig_commit7 === void 0 ? void 0 : _runConfig_commit7.allowCommitSplitting) {
3324
+ commandSpecificOptions += ' --allow-commit-splitting';
3325
+ }
3326
+ if ((_runConfig_commit8 = runConfig.commit) === null || _runConfig_commit8 === void 0 ? void 0 : _runConfig_commit8.messageLimit) {
3327
+ commandSpecificOptions += ` --message-limit ${runConfig.commit.messageLimit}`;
3328
+ }
3329
+ if ((_runConfig_commit9 = runConfig.commit) === null || _runConfig_commit9 === void 0 ? void 0 : _runConfig_commit9.maxDiffBytes) {
3330
+ commandSpecificOptions += ` --max-diff-bytes ${runConfig.commit.maxDiffBytes}`;
3331
+ }
3332
+ if ((_runConfig_commit10 = runConfig.commit) === null || _runConfig_commit10 === void 0 ? void 0 : _runConfig_commit10.direction) {
3333
+ commandSpecificOptions += ` --direction "${runConfig.commit.direction}"`;
3334
+ }
3335
+ if ((_runConfig_commit11 = runConfig.commit) === null || _runConfig_commit11 === void 0 ? void 0 : _runConfig_commit11.context) {
3336
+ commandSpecificOptions += ` --context "${runConfig.commit.context}"`;
3337
+ }
3338
+ if (((_runConfig_commit12 = runConfig.commit) === null || _runConfig_commit12 === void 0 ? void 0 : _runConfig_commit12.contextFiles) && runConfig.commit.contextFiles.length > 0) {
3339
+ commandSpecificOptions += ` --context-files ${runConfig.commit.contextFiles.join(' ')}`;
3340
+ }
3341
+ // Push option can be boolean or string (remote name)
3342
+ if ((_runConfig_commit13 = runConfig.commit) === null || _runConfig_commit13 === void 0 ? void 0 : _runConfig_commit13.push) {
3343
+ if (typeof runConfig.commit.push === 'string') {
3344
+ commandSpecificOptions += ` --push "${runConfig.commit.push}"`;
3345
+ } else {
3346
+ commandSpecificOptions += ' --push';
3347
+ }
3348
+ }
3349
+ // Model-specific options for commit
3350
+ if ((_runConfig_commit14 = runConfig.commit) === null || _runConfig_commit14 === void 0 ? void 0 : _runConfig_commit14.model) {
3351
+ commandSpecificOptions += ` --model "${runConfig.commit.model}"`;
3352
+ }
3353
+ if ((_runConfig_commit15 = runConfig.commit) === null || _runConfig_commit15 === void 0 ? void 0 : _runConfig_commit15.openaiReasoning) {
3354
+ commandSpecificOptions += ` --openai-reasoning ${runConfig.commit.openaiReasoning}`;
3355
+ }
3356
+ if ((_runConfig_commit16 = runConfig.commit) === null || _runConfig_commit16 === void 0 ? void 0 : _runConfig_commit16.openaiMaxOutputTokens) {
3357
+ commandSpecificOptions += ` --openai-max-output-tokens ${runConfig.commit.openaiMaxOutputTokens}`;
3358
+ }
3359
+ }
3360
+ // Release command options (only for direct 'release' command)
3361
+ if (builtInCommand === 'release') {
3362
+ var _runConfig_release, _runConfig_release1, _runConfig_release2, _runConfig_release3, _runConfig_release4, _runConfig_release5, _runConfig_release6, _runConfig_release7, _runConfig_release8, _runConfig_release9, _runConfig_release10, _runConfig_release11, _runConfig_release12, _runConfig_release13, _runConfig_release14;
3363
+ if ((_runConfig_release = runConfig.release) === null || _runConfig_release === void 0 ? void 0 : _runConfig_release.selfReflection) {
3364
+ commandSpecificOptions += ' --self-reflection';
3365
+ }
3366
+ if ((_runConfig_release1 = runConfig.release) === null || _runConfig_release1 === void 0 ? void 0 : _runConfig_release1.maxAgenticIterations) {
3367
+ commandSpecificOptions += ` --max-agentic-iterations ${runConfig.release.maxAgenticIterations}`;
3368
+ }
3369
+ if ((_runConfig_release2 = runConfig.release) === null || _runConfig_release2 === void 0 ? void 0 : _runConfig_release2.interactive) {
3370
+ commandSpecificOptions += ' --interactive';
3371
+ }
3372
+ if ((_runConfig_release3 = runConfig.release) === null || _runConfig_release3 === void 0 ? void 0 : _runConfig_release3.from) {
3373
+ commandSpecificOptions += ` --from "${runConfig.release.from}"`;
3374
+ }
3375
+ if ((_runConfig_release4 = runConfig.release) === null || _runConfig_release4 === void 0 ? void 0 : _runConfig_release4.to) {
3376
+ commandSpecificOptions += ` --to "${runConfig.release.to}"`;
3377
+ }
3378
+ if ((_runConfig_release5 = runConfig.release) === null || _runConfig_release5 === void 0 ? void 0 : _runConfig_release5.focus) {
3379
+ commandSpecificOptions += ` --focus "${runConfig.release.focus}"`;
3380
+ }
3381
+ if ((_runConfig_release6 = runConfig.release) === null || _runConfig_release6 === void 0 ? void 0 : _runConfig_release6.context) {
3382
+ commandSpecificOptions += ` --context "${runConfig.release.context}"`;
3383
+ }
3384
+ if (((_runConfig_release7 = runConfig.release) === null || _runConfig_release7 === void 0 ? void 0 : _runConfig_release7.contextFiles) && runConfig.release.contextFiles.length > 0) {
3385
+ commandSpecificOptions += ` --context-files ${runConfig.release.contextFiles.join(' ')}`;
3386
+ }
3387
+ if ((_runConfig_release8 = runConfig.release) === null || _runConfig_release8 === void 0 ? void 0 : _runConfig_release8.messageLimit) {
3388
+ commandSpecificOptions += ` --message-limit ${runConfig.release.messageLimit}`;
3389
+ }
3390
+ if ((_runConfig_release9 = runConfig.release) === null || _runConfig_release9 === void 0 ? void 0 : _runConfig_release9.maxDiffBytes) {
3391
+ commandSpecificOptions += ` --max-diff-bytes ${runConfig.release.maxDiffBytes}`;
3392
+ }
3393
+ if ((_runConfig_release10 = runConfig.release) === null || _runConfig_release10 === void 0 ? void 0 : _runConfig_release10.noMilestones) {
3394
+ commandSpecificOptions += ' --no-milestones';
3395
+ }
3396
+ if ((_runConfig_release11 = runConfig.release) === null || _runConfig_release11 === void 0 ? void 0 : _runConfig_release11.fromMain) {
3397
+ commandSpecificOptions += ' --from-main';
3398
+ }
3399
+ // Model-specific options for release
3400
+ if ((_runConfig_release12 = runConfig.release) === null || _runConfig_release12 === void 0 ? void 0 : _runConfig_release12.model) {
3401
+ commandSpecificOptions += ` --model "${runConfig.release.model}"`;
3402
+ }
3403
+ if ((_runConfig_release13 = runConfig.release) === null || _runConfig_release13 === void 0 ? void 0 : _runConfig_release13.openaiReasoning) {
3404
+ commandSpecificOptions += ` --openai-reasoning ${runConfig.release.openaiReasoning}`;
3405
+ }
3406
+ if ((_runConfig_release14 = runConfig.release) === null || _runConfig_release14 === void 0 ? void 0 : _runConfig_release14.openaiMaxOutputTokens) {
3407
+ commandSpecificOptions += ` --openai-max-output-tokens ${runConfig.release.openaiMaxOutputTokens}`;
3408
+ }
3409
+ }
3410
+ // Publish command options (pass self-reflection - publish reads other release config from config file)
3411
+ if (builtInCommand === 'publish') {
3412
+ var _runConfig_release15, _runConfig_release16;
3413
+ if ((_runConfig_release15 = runConfig.release) === null || _runConfig_release15 === void 0 ? void 0 : _runConfig_release15.selfReflection) {
3414
+ commandSpecificOptions += ' --self-reflection';
3415
+ }
3416
+ if ((_runConfig_release16 = runConfig.release) === null || _runConfig_release16 === void 0 ? void 0 : _runConfig_release16.maxAgenticIterations) {
3417
+ commandSpecificOptions += ` --max-agentic-iterations ${runConfig.release.maxAgenticIterations}`;
3418
+ }
3419
+ // Publish has its own --from, --interactive, --from-main flags (not from release config)
3420
+ }
3421
+ // Unlink command options
3422
+ if (builtInCommand === 'unlink' && ((_runConfig_tree31 = runConfig.tree) === null || _runConfig_tree31 === void 0 ? void 0 : _runConfig_tree31.cleanNodeModules)) {
3423
+ commandSpecificOptions += ' --clean-node-modules';
3424
+ }
3425
+ // Link/Unlink externals
3426
+ if ((builtInCommand === 'link' || builtInCommand === 'unlink') && ((_runConfig_tree32 = runConfig.tree) === null || _runConfig_tree32 === void 0 ? void 0 : _runConfig_tree32.externals) && runConfig.tree.externals.length > 0) {
3427
+ commandSpecificOptions += ` --externals ${runConfig.tree.externals.join(' ')}`;
3428
+ }
3429
+ commandToRun = `kodrdriv ${builtInCommand}${optionsString}${packageArgString}${commandSpecificOptions}`;
3430
+ isBuiltInCommand = true;
3431
+ } else if (cmd) {
3432
+ // Custom command mode
3433
+ commandToRun = cmd;
3434
+ }
3435
+ if (commandToRun) {
3436
+ var _runConfig_tree33, _runConfig_tree34;
3437
+ // Validate scripts for run command before execution
3438
+ const scriptsToValidate = runConfig.__scriptsToValidate;
3439
+ if (scriptsToValidate && scriptsToValidate.length > 0) {
3440
+ logger.info(`🔍 Validating scripts before execution: ${scriptsToValidate.join(', ')}`);
3441
+ const validation = await validateScripts(dependencyGraph.packages, scriptsToValidate);
3442
+ if (!validation.valid) {
3443
+ logger.error('');
3444
+ logger.error('❌ Script validation failed. Cannot proceed with execution.');
3445
+ logger.error('');
3446
+ logger.error('💡 To fix this:');
3447
+ logger.error(' 1. Add the missing scripts to the package.json files');
3448
+ logger.error(' 2. Or exclude packages that don\'t need these scripts using --exclude');
3449
+ logger.error(' 3. Or run individual packages that have the required scripts');
3450
+ logger.error('');
3451
+ throw new Error('Script validation failed. See details above.');
3452
+ }
3453
+ }
3454
+ // Validate command for parallel execution if parallel mode is enabled
3455
+ if ((_runConfig_tree33 = runConfig.tree) === null || _runConfig_tree33 === void 0 ? void 0 : _runConfig_tree33.parallel) {
3456
+ const { CommandValidator } = await import('@eldrforge/tree-execution');
3457
+ const validation = CommandValidator.validateForParallel(commandToRun, builtInCommand);
3458
+ CommandValidator.logValidation(validation);
3459
+ if (!validation.valid) {
3460
+ logger.error('');
3461
+ logger.error('Cannot proceed with parallel execution due to validation errors.');
3462
+ logger.error('Run without --parallel flag to execute sequentially.');
3463
+ throw new Error('Command validation failed for parallel execution');
3464
+ }
3465
+ // Apply recommended concurrency if not explicitly set
3466
+ if (!runConfig.tree.maxConcurrency) {
3467
+ const os = await import('os');
3468
+ const recommended = CommandValidator.getRecommendedConcurrency(builtInCommand, os.cpus().length, commandToRun);
3469
+ if (recommended !== os.cpus().length) {
3470
+ const reason = builtInCommand ? builtInCommand : `custom command "${commandToRun}"`;
3471
+ logger.info(`💡 Using recommended concurrency for ${reason}: ${recommended}`);
3472
+ runConfig.tree.maxConcurrency = recommended;
3473
+ }
3474
+ }
3475
+ }
3476
+ // Create set of all package names for inter-project dependency detection
3477
+ const allPackageNames = new Set(Array.from(dependencyGraph.packages.keys()));
3478
+ // Initialize execution context if not continuing
3479
+ if (!executionContext) {
3480
+ executionContext = {
3481
+ command: commandToRun,
3482
+ originalConfig: runConfig,
3483
+ publishedVersions: [],
3484
+ completedPackages: [],
3485
+ buildOrder: buildOrder,
3486
+ startTime: new Date(),
3487
+ lastUpdateTime: new Date()
3488
+ };
3489
+ // Save initial context for commands that support continuation
3490
+ if (isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
3491
+ await saveExecutionContext(executionContext, runConfig.outputDirectory);
3492
+ }
3493
+ }
3494
+ // Add spacing before command execution
3495
+ logger.info('');
3496
+ const executionDescription = isBuiltInCommand ? `built-in command "${builtInCommand}"` : `"${commandToRun}"`;
3497
+ logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Executing ${executionDescription} in ${buildOrder.length} packages...`);
3498
+ // Add detailed multi-project execution context for debug mode
3499
+ if (runConfig.debug) {
3500
+ var _runConfig_tree35, _runConfig_tree36;
3501
+ logger.debug('MULTI_PROJECT_PLAN: Execution plan initialized | Total Packages: %d | Command: %s | Built-in: %s | Dry Run: %s | Parallel: %s', buildOrder.length, commandToRun, isBuiltInCommand, isDryRun, ((_runConfig_tree35 = runConfig.tree) === null || _runConfig_tree35 === void 0 ? void 0 : _runConfig_tree35.parallel) || false);
3502
+ // Log package execution order with dependencies
3503
+ logger.debug('MULTI_PROJECT_ORDER: Package execution sequence:');
3504
+ buildOrder.forEach((pkgName, idx)=>{
3505
+ const pkgInfo = dependencyGraph.packages.get(pkgName);
3506
+ if (pkgInfo) {
3507
+ const deps = Array.isArray(pkgInfo.dependencies) ? pkgInfo.dependencies : [];
3508
+ const depStr = deps.length > 0 ? ` | Dependencies: [${deps.join(', ')}]` : ' | Dependencies: none';
3509
+ logger.debug(' %d. %s%s', idx + 1, pkgName, depStr);
3510
+ }
3511
+ });
3512
+ // Log dependency levels for parallel execution understanding
3513
+ const levels = new Map();
3514
+ const calculateLevels = (pkg, visited = new Set())=>{
3515
+ if (levels.has(pkg)) return levels.get(pkg);
3516
+ if (visited.has(pkg)) return 0; // Circular dependency
3517
+ visited.add(pkg);
3518
+ const pkgInfo = dependencyGraph.packages.get(pkg);
3519
+ const deps = Array.isArray(pkgInfo === null || pkgInfo === void 0 ? void 0 : pkgInfo.dependencies) ? pkgInfo.dependencies : [];
3520
+ if (!pkgInfo || deps.length === 0) {
3521
+ levels.set(pkg, 0);
3522
+ return 0;
3523
+ }
3524
+ const maxDepLevel = Math.max(...deps.map((dep)=>calculateLevels(dep, new Set(visited))));
3525
+ const level = maxDepLevel + 1;
3526
+ levels.set(pkg, level);
3527
+ return level;
3528
+ };
3529
+ buildOrder.forEach((pkg)=>calculateLevels(pkg));
3530
+ const maxLevel = Math.max(...Array.from(levels.values()));
3531
+ logger.debug('MULTI_PROJECT_LEVELS: Dependency depth analysis | Max Depth: %d levels', maxLevel + 1);
3532
+ for(let level = 0; level <= maxLevel; level++){
3533
+ const packagesAtLevel = buildOrder.filter((pkg)=>levels.get(pkg) === level);
3534
+ logger.debug(' Level %d (%d packages): %s', level, packagesAtLevel.length, packagesAtLevel.join(', '));
3535
+ }
3536
+ if ((_runConfig_tree36 = runConfig.tree) === null || _runConfig_tree36 === void 0 ? void 0 : _runConfig_tree36.parallel) {
3537
+ var _runConfig_tree_retry1;
3538
+ const os = await import('os');
3539
+ const concurrency = runConfig.tree.maxConcurrency || os.cpus().length;
3540
+ logger.debug('MULTI_PROJECT_PARALLEL: Parallel execution configuration | Max Concurrency: %d | Retry Attempts: %d', concurrency, ((_runConfig_tree_retry1 = runConfig.tree.retry) === null || _runConfig_tree_retry1 === void 0 ? void 0 : _runConfig_tree_retry1.maxAttempts) || 3);
3541
+ }
3542
+ if (isContinue) {
3543
+ const completed = (executionContext === null || executionContext === void 0 ? void 0 : executionContext.completedPackages.length) || 0;
3544
+ logger.debug('MULTI_PROJECT_RESUME: Continuing previous execution | Completed: %d | Remaining: %d', completed, buildOrder.length - completed);
3545
+ }
3546
+ }
3547
+ // Show info for publish commands
3548
+ if (isBuiltInCommand && builtInCommand === 'publish') {
3549
+ logger.info('Inter-project dependencies will be automatically updated before each publish.');
3550
+ }
3551
+ let successCount = 0;
3552
+ let failedPackage = null;
3553
+ // If continuing, start from where we left off
3554
+ const startIndex = isContinue && executionContext ? executionContext.completedPackages.length : 0;
3555
+ // Check if parallel execution is enabled
3556
+ if ((_runConfig_tree34 = runConfig.tree) === null || _runConfig_tree34 === void 0 ? void 0 : _runConfig_tree34.parallel) {
3557
+ var _runConfig_tree_retry2, _runConfig_tree_retry3, _runConfig_tree_retry4, _runConfig_tree_retry5;
3558
+ logger.info('🚀 Using parallel execution mode');
3559
+ // If dry run, show preview instead of executing
3560
+ if (isDryRun) {
3561
+ const preview = await generateDryRunPreview(dependencyGraph, buildOrder, commandToRun, runConfig);
3562
+ return preview;
3563
+ }
3564
+ // Import parallel execution components
3565
+ const { TreeExecutionAdapter, createParallelProgressLogger, formatParallelResult } = await import('@eldrforge/tree-execution');
3566
+ const os = await import('os');
3567
+ // Create task pool
3568
+ const adapter = new TreeExecutionAdapter({
3569
+ graph: dependencyGraph,
3570
+ maxConcurrency: runConfig.tree.maxConcurrency || os.cpus().length,
3571
+ command: commandToRun,
3572
+ config: runConfig,
3573
+ checkpointPath: runConfig.outputDirectory,
3574
+ continue: isContinue,
3575
+ maxRetries: ((_runConfig_tree_retry2 = runConfig.tree.retry) === null || _runConfig_tree_retry2 === void 0 ? void 0 : _runConfig_tree_retry2.maxAttempts) || 3,
3576
+ initialRetryDelay: ((_runConfig_tree_retry3 = runConfig.tree.retry) === null || _runConfig_tree_retry3 === void 0 ? void 0 : _runConfig_tree_retry3.initialDelayMs) || 5000,
3577
+ maxRetryDelay: ((_runConfig_tree_retry4 = runConfig.tree.retry) === null || _runConfig_tree_retry4 === void 0 ? void 0 : _runConfig_tree_retry4.maxDelayMs) || 60000,
3578
+ backoffMultiplier: ((_runConfig_tree_retry5 = runConfig.tree.retry) === null || _runConfig_tree_retry5 === void 0 ? void 0 : _runConfig_tree_retry5.backoffMultiplier) || 2
3579
+ }, executePackage);
3580
+ // Set up progress logging
3581
+ createParallelProgressLogger(adapter.getPool(), runConfig);
3582
+ // Execute
3583
+ const result = await adapter.execute();
3584
+ // Format and return result
3585
+ const formattedResult = formatParallelResult(result);
3586
+ return formattedResult;
3587
+ }
3588
+ // Sequential execution
3589
+ const executionStartTime = Date.now();
3590
+ for(let i = startIndex; i < buildOrder.length; i++){
3591
+ const packageName = buildOrder[i];
3592
+ // Skip if already completed (in continue mode)
3593
+ if (executionContext && executionContext.completedPackages.includes(packageName)) {
3594
+ successCount++;
3595
+ continue;
3596
+ }
3597
+ const packageInfo = dependencyGraph.packages.get(packageName);
3598
+ const packageLogger = createPackageLogger(packageName, i + 1, buildOrder.length, isDryRun);
3599
+ const result = await executePackage(packageName, packageInfo, commandToRun, runConfig, isDryRun, i, buildOrder.length, allPackageNames, isBuiltInCommand);
3600
+ if (result.success) {
3601
+ successCount++;
3602
+ // Update context
3603
+ if (executionContext && isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
3604
+ executionContext.completedPackages.push(packageName);
3605
+ executionContext.publishedVersions = publishedVersions;
3606
+ executionContext.lastUpdateTime = new Date();
3607
+ await saveExecutionContext(executionContext, runConfig.outputDirectory);
3608
+ }
3609
+ // Add spacing between packages (except after the last one)
3610
+ if (i < buildOrder.length - 1) {
3611
+ logger.info('');
3612
+ logger.info('');
3613
+ }
3614
+ } else {
3615
+ failedPackage = packageName;
3616
+ const formattedError = formatSubprojectError(packageName, result.error, packageInfo, i + 1, buildOrder.length);
3617
+ if (!isDryRun) {
3618
+ var _result_error;
3619
+ packageLogger.error(`Execution failed`);
3620
+ logger.error(formattedError);
3621
+ logger.error(`Failed after ${successCount} successful packages.`);
3622
+ // Special handling for timeout errors
3623
+ if (result.isTimeoutError) {
3624
+ logger.error('');
3625
+ logger.error('⏰ TIMEOUT DETECTED: This appears to be a timeout error.');
3626
+ logger.error(' This commonly happens when PR checks take longer than expected.');
3627
+ logger.error(' The execution context has been saved for recovery.');
3628
+ logger.error('');
3629
+ // Save context even on timeout for recovery
3630
+ if (executionContext && isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run')) {
3631
+ executionContext.completedPackages.push(packageName);
3632
+ executionContext.publishedVersions = publishedVersions;
3633
+ executionContext.lastUpdateTime = new Date();
3634
+ await saveExecutionContext(executionContext, runConfig.outputDirectory);
3635
+ logger.info('💾 Execution context saved for recovery.');
3636
+ }
3637
+ // For publish commands, provide specific guidance about CI/CD setup
3638
+ if (builtInCommand === 'publish') {
3639
+ logger.error('');
3640
+ logger.error('💡 PUBLISH TIMEOUT TROUBLESHOOTING:');
3641
+ logger.error(' This project may not have CI/CD workflows configured.');
3642
+ logger.error(' Common solutions:');
3643
+ logger.error(' 1. Set up GitHub Actions workflows for this repository');
3644
+ logger.error(' 2. Use --sendit flag to skip user confirmation:');
3645
+ logger.error(` kodrdriv tree publish --sendit`);
3646
+ logger.error(' 3. Or manually promote this package:');
3647
+ logger.error(` kodrdriv tree publish --promote ${packageName}`);
3648
+ logger.error('');
3649
+ }
3650
+ }
3651
+ logger.error(`To resume from this point, run:`);
3652
+ if (isBuiltInCommand) {
3653
+ logger.error(` kodrdriv tree ${builtInCommand} --continue`);
3654
+ } else {
3655
+ logger.error(` kodrdriv tree --continue --cmd "${commandToRun}"`);
3656
+ }
3657
+ // For timeout errors, provide additional recovery instructions
3658
+ if (result.isTimeoutError) {
3659
+ logger.error('');
3660
+ logger.error('🔧 RECOVERY OPTIONS:');
3661
+ if (builtInCommand === 'publish') {
3662
+ logger.error(' 1. Wait for the PR checks to complete, then run:');
3663
+ logger.error(` cd ${packageInfo.path}`);
3664
+ logger.error(` kodrdriv publish`);
3665
+ logger.error(' 2. After the individual publish completes, run:');
3666
+ logger.error(` kodrdriv tree ${builtInCommand} --continue`);
3667
+ } else {
3668
+ logger.error(' 1. Fix any issues in the package, then run:');
3669
+ logger.error(` cd ${packageInfo.path}`);
3670
+ logger.error(` ${commandToRun}`);
3671
+ logger.error(' 2. After the command completes successfully, run:');
3672
+ logger.error(` kodrdriv tree ${builtInCommand} --continue`);
3673
+ }
3674
+ logger.error(' 3. Or promote this package to completed status:');
3675
+ logger.error(` kodrdriv tree ${builtInCommand} --promote ${packageName}`);
3676
+ logger.error(' 4. Or manually edit .kodrdriv-context to mark this package as completed');
3677
+ }
3678
+ // Add clear error summary at the very end
3679
+ logger.error('');
3680
+ logger.error('📋 ERROR SUMMARY:');
3681
+ logger.error(` Project that failed: ${packageName}`);
3682
+ logger.error(` Location: ${packageInfo.path}`);
3683
+ logger.error(` Position in tree: ${i + 1} of ${buildOrder.length} packages`);
3684
+ logger.error(` What failed: ${((_result_error = result.error) === null || _result_error === void 0 ? void 0 : _result_error.message) || 'Unknown error'}`);
3685
+ logger.error('');
3686
+ throw new Error(`Command failed in package ${packageName}`);
3687
+ }
3688
+ break;
3689
+ }
3690
+ }
3691
+ if (!failedPackage) {
3692
+ const totalExecutionTime = Date.now() - executionStartTime;
3693
+ const totalSeconds = (totalExecutionTime / 1000).toFixed(1);
3694
+ const totalMinutes = (totalExecutionTime / 60000).toFixed(1);
3695
+ const timeDisplay = totalExecutionTime < 60000 ? `${totalSeconds}s` : `${totalMinutes}min (${totalSeconds}s)`;
3696
+ logger.info('');
3697
+ logger.info('═══════════════════════════════════════════════════════════');
3698
+ const summary = `${isDryRun ? 'DRY RUN: ' : ''}All ${buildOrder.length} packages completed successfully! 🎉`;
3699
+ logger.info(summary);
3700
+ logger.info(`⏱️ Total execution time: ${timeDisplay}`);
3701
+ logger.info(`📦 Packages processed: ${successCount}/${buildOrder.length}`);
3702
+ logger.info('═══════════════════════════════════════════════════════════');
3703
+ logger.info('');
3704
+ // Clean up context on successful completion
3705
+ if (isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
3706
+ await cleanupContext(runConfig.outputDirectory);
3707
+ }
3708
+ return returnOutput; // Don't duplicate the summary in return string
3709
+ }
3710
+ }
3711
+ return returnOutput;
3712
+ } catch (error) {
3713
+ const errorMessage = `Failed to analyze workspace: ${error.message}`;
3714
+ logger.error(errorMessage);
3715
+ throw new Error(errorMessage);
3716
+ } finally{
3717
+ // Intentionally preserve the mutex across executions to support multiple runs in the same process (e.g., test suite)
3718
+ // Do not destroy here; the process lifecycle will clean up resources.
3719
+ }
3720
+ };
3721
+
3722
+ /**
3723
+ * Discover all package.json files in the workspace
3724
+ */ const discoverPackages = async (directories, logger)=>{
3725
+ const storage = createStorage();
3726
+ const packages = [];
3727
+ for (const directory of directories){
3728
+ logger.verbose(`Scanning directory: ${directory}`);
3729
+ try {
3730
+ const packageJsonPath = path.join(directory, 'package.json');
3731
+ if (await storage.exists(packageJsonPath)) {
3732
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
3733
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
3734
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
3735
+ if (packageJson.name) {
3736
+ packages.push({
3737
+ name: packageJson.name,
3738
+ version: packageJson.version,
3739
+ packageJsonPath
3740
+ });
3741
+ logger.verbose(`Found package: ${packageJson.name}@${packageJson.version}`);
3742
+ }
3743
+ } else {
3744
+ // Look for nested package.json files in subdirectories
3745
+ try {
3746
+ const entries = await fs.readdir(directory, {
3747
+ withFileTypes: true
3748
+ });
3749
+ for (const entry of entries){
3750
+ if (entry.isDirectory() && !entry.name.startsWith('.') && entry.name !== 'node_modules') {
3751
+ const subDir = path.join(directory, entry.name);
3752
+ const subPackages = await discoverPackages([
3753
+ subDir
3754
+ ], logger);
3755
+ packages.push(...subPackages);
3756
+ }
3757
+ }
3758
+ } catch (error) {
3759
+ logger.debug(`Could not scan subdirectories in ${directory}: ${error}`);
3760
+ }
3761
+ }
3762
+ } catch (error) {
3763
+ logger.warn(`VERSIONS_DIR_PROCESS_FAILED: Failed to process directory | Directory: ${directory} | Error: ${error.message}`);
3764
+ }
3765
+ }
3766
+ return packages;
3767
+ };
3768
+ /**
3769
+ * Extract scope from package name (e.g., "@eldrforge/package" -> "@eldrforge")
3770
+ */ const getPackageScope = (packageName)=>{
3771
+ if (packageName.startsWith('@')) {
3772
+ const parts = packageName.split('/');
3773
+ if (parts.length >= 2) {
3774
+ return parts[0];
3775
+ }
3776
+ }
3777
+ return null;
3778
+ };
3779
+ /**
3780
+ * Normalize version string to major.minor format (remove patch version)
3781
+ */ const normalizeToMinorVersion = (versionString)=>{
3782
+ // Extract the version number, preserving any prefix (^, ~, >=, etc.)
3783
+ const match = versionString.match(/^([^0-9]*)([0-9]+\.[0-9]+)(\.[0-9]+)?(.*)$/);
3784
+ if (match) {
3785
+ const [, prefix, majorMinor, , suffix] = match;
3786
+ return `${prefix}${majorMinor}${suffix || ''}`;
3787
+ }
3788
+ // If it doesn't match the expected pattern, return as-is
3789
+ return versionString;
3790
+ };
3791
+ /**
3792
+ * Update dependencies in a package.json to normalize same-scope dependencies to major.minor format
3793
+ */ const updateDependenciesMinor = async (packageInfo, allPackages, isDryRun, logger)=>{
3794
+ const storage = createStorage();
3795
+ const currentScope = getPackageScope(packageInfo.name);
3796
+ if (!currentScope) {
3797
+ logger.verbose(`Skipping ${packageInfo.name} - not a scoped package`);
3798
+ return false;
3799
+ }
3800
+ logger.verbose(`Processing ${packageInfo.name} for scope ${currentScope}`);
3801
+ try {
3802
+ const packageJsonContent = await storage.readFile(packageInfo.packageJsonPath, 'utf-8');
3803
+ const parsed = safeJsonParse(packageJsonContent, packageInfo.packageJsonPath);
3804
+ const packageJson = validatePackageJson(parsed, packageInfo.packageJsonPath);
3805
+ const sectionsToUpdate = [
3806
+ 'dependencies',
3807
+ 'devDependencies',
3808
+ 'peerDependencies'
3809
+ ];
3810
+ let hasChanges = false;
3811
+ // Create a set of same-scope package names for quick lookup
3812
+ const sameScopePackageNames = new Set();
3813
+ for (const pkg of allPackages){
3814
+ const pkgScope = getPackageScope(pkg.name);
3815
+ if (pkgScope === currentScope) {
3816
+ sameScopePackageNames.add(pkg.name);
3817
+ }
3818
+ }
3819
+ for (const section of sectionsToUpdate){
3820
+ const deps = packageJson[section];
3821
+ if (!deps) continue;
3822
+ for (const [depName, currentVersion] of Object.entries(deps)){
3823
+ // Update if this is a same-scope dependency (check scope, not just discovered packages)
3824
+ const depScope = getPackageScope(depName);
3825
+ if (depScope === currentScope) {
3826
+ const normalizedVersion = normalizeToMinorVersion(currentVersion);
3827
+ if (currentVersion !== normalizedVersion) {
3828
+ if (isDryRun) {
3829
+ logger.info(`VERSIONS_WOULD_NORMALIZE: Would normalize dependency version | Mode: dry-run | Section: ${section} | Dependency: ${depName} | Current: ${currentVersion} | Normalized: ${normalizedVersion}`);
3830
+ } else {
3831
+ logger.info(`VERSIONS_NORMALIZING: Normalizing dependency version | Section: ${section} | Dependency: ${depName} | Current: ${currentVersion} | Normalized: ${normalizedVersion}`);
3832
+ deps[depName] = normalizedVersion;
3833
+ }
3834
+ hasChanges = true;
3835
+ }
3836
+ }
3837
+ }
3838
+ }
3839
+ if (hasChanges && !isDryRun) {
3840
+ // Write updated package.json
3841
+ await storage.writeFile(packageInfo.packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n', 'utf-8');
3842
+ logger.info(`VERSIONS_PACKAGE_UPDATED: Updated dependencies in package | Package: ${packageInfo.name} | Status: saved`);
3843
+ }
3844
+ return hasChanges;
3845
+ } catch (error) {
3846
+ logger.warn(`VERSIONS_PACKAGE_UPDATE_FAILED: Failed to update dependencies | Package: ${packageInfo.name} | Error: ${error.message}`);
3847
+ return false;
3848
+ }
3849
+ };
3850
+ /**
3851
+ * Execute the versions minor command
3852
+ */ const executeMinor = async (runConfig)=>{
3853
+ var _runConfig_versions;
3854
+ const logger = getLogger();
3855
+ const isDryRun = runConfig.dryRun || false;
3856
+ logger.info('VERSIONS_NORMALIZE_STARTING: Normalizing same-scope dependencies | Format: major.minor | Purpose: Standardize version format across packages');
3857
+ // Determine directories to scan
3858
+ const directories = ((_runConfig_versions = runConfig.versions) === null || _runConfig_versions === void 0 ? void 0 : _runConfig_versions.directories) || runConfig.contextDirectories || [
3859
+ process.cwd()
3860
+ ];
3861
+ if (directories.length === 0) {
3862
+ directories.push(process.cwd());
3863
+ }
3864
+ logger.verbose(`Scanning directories: ${directories.join(', ')}`);
3865
+ // Discover all packages
3866
+ const allPackages = await discoverPackages(directories, logger);
3867
+ if (allPackages.length === 0) {
3868
+ var _runConfig_tree;
3869
+ logger.warn('VERSIONS_NO_PACKAGES: No packages found in specified directories | Directories: ' + (((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.directories) || []).join(', ') + ' | Action: Nothing to normalize');
3870
+ return 'No packages found to process.';
3871
+ }
3872
+ logger.info(`VERSIONS_PACKAGES_FOUND: Found packages for normalization | Count: ${allPackages.length} | Status: Analyzing`);
3873
+ // Group packages by scope
3874
+ const packagesByScope = new Map();
3875
+ const unscopedPackages = [];
3876
+ for (const pkg of allPackages){
3877
+ const scope = getPackageScope(pkg.name);
3878
+ if (scope) {
3879
+ if (!packagesByScope.has(scope)) {
3880
+ packagesByScope.set(scope, []);
3881
+ }
3882
+ packagesByScope.get(scope).push(pkg);
3883
+ } else {
3884
+ unscopedPackages.push(pkg);
3885
+ }
3886
+ }
3887
+ logger.info(`VERSIONS_SCOPES_FOUND: Found package scopes | Count: ${packagesByScope.size} | Scopes: ${Array.from(packagesByScope.keys()).join(', ')}`);
3888
+ if (unscopedPackages.length > 0) {
3889
+ logger.info(`VERSIONS_UNSCOPED_PACKAGES: Found unscoped packages | Count: ${unscopedPackages.length} | Action: Will be skipped | Reason: Only scoped packages supported`);
3890
+ // Log each unscoped package being skipped
3891
+ for (const pkg of unscopedPackages){
3892
+ logger.verbose(`Skipping ${pkg.name} - not a scoped package`);
3893
+ }
3894
+ }
3895
+ let totalUpdated = 0;
3896
+ let totalChanges = 0;
3897
+ // Process each scope
3898
+ for (const [scope, packages] of packagesByScope){
3899
+ logger.info(`\nVERSIONS_SCOPE_PROCESSING: Processing packages in scope | Scope: ${scope} | Package Count: ${packages.length} | Action: Normalize versions`);
3900
+ for (const pkg of packages){
3901
+ const hasChanges = await updateDependenciesMinor(pkg, allPackages, isDryRun, logger);
3902
+ if (hasChanges) {
3903
+ totalChanges++;
3904
+ }
3905
+ }
3906
+ totalUpdated += packages.length;
3907
+ }
3908
+ const verb = isDryRun ? 'Would update' : 'Updated';
3909
+ const summary = `${verb} ${totalChanges} of ${totalUpdated} packages with dependency changes.`;
3910
+ if (isDryRun) {
3911
+ logger.info(`\nVERSIONS_DRY_RUN_COMPLETE: Dry run completed | Mode: dry-run | Summary: ${summary}`);
3912
+ return `Dry run complete. ${summary}`;
3913
+ } else {
3914
+ logger.info(`\nVERSIONS_UPDATE_COMPLETE: Dependencies updated successfully | Status: completed | Summary: ${summary}`);
3915
+ return `Dependencies updated successfully. ${summary}`;
3916
+ }
3917
+ };
3918
+ /**
3919
+ * Main execute function for the versions command
3920
+ */ const execute = async (runConfig)=>{
3921
+ var _runConfig_versions;
3922
+ const subcommand = (_runConfig_versions = runConfig.versions) === null || _runConfig_versions === void 0 ? void 0 : _runConfig_versions.subcommand;
3923
+ if (!subcommand) {
3924
+ throw new Error('Versions command requires a subcommand. Use: kodrdriv versions minor');
3925
+ }
3926
+ switch(subcommand){
3927
+ case 'minor':
3928
+ return await executeMinor(runConfig);
3929
+ default:
3930
+ throw new Error(`Unknown versions subcommand: ${subcommand}. Supported: minor`);
3931
+ }
3932
+ };
3933
+
3934
+ // Check if npm install is needed by examining lock file and node_modules
3935
+ const isNpmInstallNeeded = async (storage)=>{
3936
+ const logger = getLogger();
3937
+ const timer = PerformanceTimer.start(logger, 'Checking if npm install is needed');
3938
+ try {
3939
+ // Check if package-lock.json exists
3940
+ const hasLockFile = await storage.exists('package-lock.json');
3941
+ if (!hasLockFile) {
3942
+ timer.end('npm install needed - no lock file');
3943
+ return {
3944
+ needed: true,
3945
+ reason: 'No package-lock.json found'
3946
+ };
3947
+ }
3948
+ // Check if node_modules exists
3949
+ const hasNodeModules = await storage.exists('node_modules');
3950
+ if (!hasNodeModules) {
3951
+ timer.end('npm install needed - no node_modules');
3952
+ return {
3953
+ needed: true,
3954
+ reason: 'No node_modules directory found'
3955
+ };
3956
+ }
3957
+ // Check if node_modules is populated (has at least a few entries)
3958
+ const nodeModulesContents = await storage.listFiles('node_modules');
3959
+ if (nodeModulesContents.length < 3) {
3960
+ timer.end('npm install needed - empty node_modules');
3961
+ return {
3962
+ needed: true,
3963
+ reason: 'node_modules appears empty or incomplete'
3964
+ };
3965
+ }
3966
+ // Get timestamps to check if package.json is newer than node_modules
3967
+ const packageJsonStats = await storage.exists('package.json');
3968
+ if (!packageJsonStats) {
3969
+ timer.end('npm install not needed - no package.json');
3970
+ return {
3971
+ needed: false,
3972
+ reason: 'No package.json found'
3973
+ };
3974
+ }
3975
+ timer.end('npm install not needed - appears up to date');
3976
+ return {
3977
+ needed: false,
3978
+ reason: 'Dependencies appear to be up to date'
3979
+ };
3980
+ } catch (error) {
3981
+ timer.end(`npm install check failed: ${error.message}`);
3982
+ logger.debug(`Failed to check npm install status: ${error.message}`);
3983
+ return {
3984
+ needed: true,
3985
+ reason: 'Could not verify dependency status, installing to be safe'
3986
+ };
3987
+ }
3988
+ };
3989
+ // Run npm install with optimizations
3990
+ const optimizedNpmInstall = async (options = {})=>{
3991
+ const logger = getLogger();
3992
+ const { createStorage } = await import('@eldrforge/shared');
3993
+ const storage = createStorage();
3994
+ const { skipIfNotNeeded = true, useCache = true, verbose = false } = options;
3995
+ // Check if install is needed
3996
+ if (skipIfNotNeeded) {
3997
+ const installCheck = await isNpmInstallNeeded(storage);
3998
+ if (!installCheck.needed) {
3999
+ logger.info(`NPM_INSTALL_SKIPPED: Skipping npm install optimization | Reason: ${installCheck.reason} | Status: not-needed`);
4000
+ return {
4001
+ duration: 0,
4002
+ skipped: true,
4003
+ reason: installCheck.reason
4004
+ };
4005
+ } else {
4006
+ logger.verbose(`📦 npm install required: ${installCheck.reason}`);
4007
+ }
4008
+ }
4009
+ // Build optimized npm install command
4010
+ const npmArgs = [
4011
+ 'install'
4012
+ ];
4013
+ if (!verbose) {
4014
+ npmArgs.push('--silent');
4015
+ }
4016
+ if (useCache) {
4017
+ // npm uses cache by default, but we can ensure it's not bypassed
4018
+ npmArgs.push('--prefer-offline');
4019
+ }
4020
+ // Use --no-audit and --no-fund for faster installs in link/unlink scenarios
4021
+ npmArgs.push('--no-audit', '--no-fund');
4022
+ const command = `npm ${npmArgs.join(' ')}`;
4023
+ logger.info(`NPM_INSTALL_OPTIMIZED: Running optimized npm install | Mode: optimized | Command: npm install`);
4024
+ logger.verbose(`Command: ${command}`);
4025
+ const timer = PerformanceTimer.start(logger, 'Optimized npm install execution');
4026
+ try {
4027
+ await run(command);
4028
+ const duration = timer.end('Optimized npm install completed successfully');
4029
+ logger.info(`NPM_INSTALL_SUCCESS: Dependencies installed successfully | Duration: ${duration}ms | Status: completed`);
4030
+ return {
4031
+ duration,
4032
+ skipped: false,
4033
+ reason: 'Installation completed successfully'
4034
+ };
4035
+ } catch (error) {
4036
+ timer.end('Optimized npm install failed');
4037
+ throw new Error(`Failed to run optimized npm install: ${error.message}`);
4038
+ }
4039
+ };
4040
+ // Helper to run npm ci if package-lock.json is available (faster than npm install)
4041
+ const tryNpmCi = async ()=>{
4042
+ const logger = getLogger();
4043
+ const { createStorage } = await import('@eldrforge/shared');
4044
+ const storage = createStorage();
4045
+ try {
4046
+ // Check if package-lock.json exists
4047
+ const hasLockFile = await storage.exists('package-lock.json');
4048
+ if (!hasLockFile) {
4049
+ return {
4050
+ success: false
4051
+ };
4052
+ }
4053
+ logger.info('NPM_CI_USING: Using npm ci for faster installation | Command: npm ci | Advantage: Faster clean install');
4054
+ const timer = PerformanceTimer.start(logger, 'npm ci execution');
4055
+ await run('npm ci --silent --no-audit --no-fund');
4056
+ const duration = timer.end('npm ci completed successfully');
4057
+ logger.info(`NPM_CI_SUCCESS: Dependencies installed with npm ci | Duration: ${duration}ms | Status: completed`);
4058
+ return {
4059
+ success: true,
4060
+ duration
4061
+ };
4062
+ } catch (error) {
4063
+ logger.verbose(`npm ci failed, will fall back to npm install: ${error.message}`);
4064
+ return {
4065
+ success: false
4066
+ };
4067
+ }
4068
+ };
4069
+ // Main function that tries the fastest approach first
4070
+ const smartNpmInstall = async (options = {})=>{
4071
+ const logger = getLogger();
4072
+ const { skipIfNotNeeded = true, preferCi = true, verbose = false } = options;
4073
+ const overallTimer = PerformanceTimer.start(logger, 'Smart npm install');
4074
+ // Try npm ci first if preferred and available
4075
+ if (preferCi) {
4076
+ const ciResult = await tryNpmCi();
4077
+ if (ciResult.success) {
4078
+ overallTimer.end('Smart npm install completed with npm ci');
4079
+ return {
4080
+ duration: ciResult.duration || 0,
4081
+ method: 'npm ci',
4082
+ skipped: false
4083
+ };
4084
+ }
4085
+ }
4086
+ // Fall back to optimized npm install
4087
+ const installResult = await optimizedNpmInstall({
4088
+ skipIfNotNeeded,
4089
+ useCache: true,
4090
+ verbose
4091
+ });
4092
+ overallTimer.end(`Smart npm install completed with ${installResult.skipped ? 'skip' : 'npm install'}`);
4093
+ return {
4094
+ duration: installResult.duration,
4095
+ method: installResult.skipped ? 'skipped' : 'npm install',
4096
+ skipped: installResult.skipped
4097
+ };
4098
+ };
4099
+
4100
+ function _define_property(obj, key, value) {
4101
+ if (key in obj) {
4102
+ Object.defineProperty(obj, key, {
4103
+ value: value,
4104
+ enumerable: true,
4105
+ configurable: true,
4106
+ writable: true
4107
+ });
4108
+ } else {
4109
+ obj[key] = value;
4110
+ }
4111
+ return obj;
4112
+ }
4113
+ /**
4114
+ * PerformanceTracker collects and analyzes performance metrics
4115
+ */ class PerformanceTracker {
4116
+ /**
4117
+ * Record package start
4118
+ */ recordPackageStart(packageName) {
4119
+ this.packageStartTimes.set(packageName, Date.now());
4120
+ }
4121
+ /**
4122
+ * Record package end
4123
+ */ recordPackageEnd(packageName) {
4124
+ this.packageEndTimes.set(packageName, Date.now());
4125
+ }
4126
+ /**
4127
+ * Record concurrency level
4128
+ */ recordConcurrency(level) {
4129
+ this.concurrencyHistory.push(level);
4130
+ }
4131
+ /**
4132
+ * Calculate comprehensive metrics
4133
+ */ calculateMetrics(_maxConcurrency) {
4134
+ const totalDuration = Date.now() - this.startTime;
4135
+ const durations = this.getPackageDurations();
4136
+ const averageDuration = durations.length > 0 ? durations.reduce((a, b)=>a + b, 0) / durations.length : 0;
4137
+ const peakConcurrency = this.concurrencyHistory.length > 0 ? Math.max(...this.concurrencyHistory) : 0;
4138
+ const averageConcurrency = this.concurrencyHistory.length > 0 ? this.concurrencyHistory.reduce((a, b)=>a + b, 0) / this.concurrencyHistory.length : 0;
4139
+ // Calculate speedup vs sequential
4140
+ const totalCPUTime = durations.reduce((a, b)=>a + b, 0);
4141
+ const speedupVsSequential = totalDuration > 0 ? totalCPUTime / totalDuration : 1;
4142
+ return {
4143
+ totalDuration,
4144
+ averagePackageDuration: averageDuration,
4145
+ peakConcurrency,
4146
+ averageConcurrency,
4147
+ speedupVsSequential
4148
+ };
4149
+ }
4150
+ /**
4151
+ * Get package durations
4152
+ */ getPackageDurations() {
4153
+ const durations = [];
4154
+ for (const [pkg, startTime] of this.packageStartTimes){
4155
+ const endTime = this.packageEndTimes.get(pkg);
4156
+ if (endTime) {
4157
+ durations.push(endTime - startTime);
4158
+ }
4159
+ }
4160
+ return durations;
4161
+ }
4162
+ /**
4163
+ * Get efficiency metrics
4164
+ */ getEfficiency(maxConcurrency) {
4165
+ const durations = this.getPackageDurations();
4166
+ const totalDuration = Date.now() - this.startTime;
4167
+ const totalCPUTime = durations.reduce((a, b)=>a + b, 0);
4168
+ const utilization = this.concurrencyHistory.length > 0 ? this.concurrencyHistory.reduce((a, b)=>a + b, 0) / this.concurrencyHistory.length / maxConcurrency * 100 : 0;
4169
+ const efficiency = totalDuration > 0 ? totalCPUTime / (totalDuration * maxConcurrency) * 100 : 0;
4170
+ const speedup = totalDuration > 0 ? totalCPUTime / totalDuration : 1;
4171
+ const idealSpeedup = maxConcurrency;
4172
+ const parallelEfficiency = speedup / idealSpeedup * 100;
4173
+ return {
4174
+ utilization,
4175
+ efficiency,
4176
+ parallelEfficiency
4177
+ };
4178
+ }
4179
+ /**
4180
+ * Generate performance report
4181
+ */ generateReport(maxConcurrency) {
4182
+ const metrics = this.calculateMetrics(maxConcurrency);
4183
+ const efficiency = this.getEfficiency(maxConcurrency);
4184
+ const durations = this.getPackageDurations();
4185
+ const lines = [];
4186
+ lines.push('');
4187
+ lines.push('═══════════════════════════════════════════');
4188
+ lines.push(' Performance Report');
4189
+ lines.push('═══════════════════════════════════════════');
4190
+ lines.push('');
4191
+ lines.push('⏱️ Timing:');
4192
+ lines.push(` Total Execution: ${this.formatDuration(metrics.totalDuration)}`);
4193
+ lines.push(` Avg Per Package: ${this.formatDuration(metrics.averagePackageDuration)}`);
4194
+ if (durations.length > 0) {
4195
+ lines.push(` Fastest Package: ${this.formatDuration(Math.min(...durations))}`);
4196
+ lines.push(` Slowest Package: ${this.formatDuration(Math.max(...durations))}`);
4197
+ }
4198
+ lines.push('');
4199
+ lines.push('🔥 Concurrency:');
4200
+ lines.push(` Max Allowed: ${maxConcurrency}`);
4201
+ lines.push(` Peak Reached: ${metrics.peakConcurrency}`);
4202
+ lines.push(` Average: ${metrics.averageConcurrency.toFixed(2)}`);
4203
+ lines.push(` Utilization: ${efficiency.utilization.toFixed(1)}%`);
4204
+ lines.push('');
4205
+ lines.push('🚀 Performance:');
4206
+ if (metrics.speedupVsSequential) {
4207
+ lines.push(` Speedup: ${metrics.speedupVsSequential.toFixed(2)}x`);
4208
+ }
4209
+ lines.push(` Efficiency: ${efficiency.efficiency.toFixed(1)}%`);
4210
+ lines.push(` Parallel Efficiency: ${efficiency.parallelEfficiency.toFixed(1)}%`);
4211
+ lines.push('');
4212
+ const totalCPUTime = durations.reduce((a, b)=>a + b, 0);
4213
+ const timeSaved = totalCPUTime - metrics.totalDuration;
4214
+ if (timeSaved > 0) {
4215
+ lines.push(`⏰ Time Saved: ${this.formatDuration(timeSaved)} vs sequential execution`);
4216
+ lines.push('');
4217
+ }
4218
+ lines.push('═══════════════════════════════════════════');
4219
+ lines.push('');
4220
+ return lines.join('\n');
4221
+ }
4222
+ formatDuration(ms) {
4223
+ if (ms < 1000) {
4224
+ return `${ms}ms`;
4225
+ }
4226
+ const seconds = Math.floor(ms / 1000);
4227
+ const minutes = Math.floor(seconds / 60);
4228
+ if (minutes > 0) {
4229
+ return `${minutes}m ${seconds % 60}s`;
4230
+ }
4231
+ return `${seconds}s`;
4232
+ }
4233
+ constructor(){
4234
+ _define_property(this, "startTime", void 0);
4235
+ _define_property(this, "packageStartTimes", new Map());
4236
+ _define_property(this, "packageEndTimes", new Map());
4237
+ _define_property(this, "concurrencyHistory", []);
4238
+ this.startTime = Date.now();
4239
+ }
4240
+ }
4241
+
4242
+ export { PerformanceTracker, isNpmInstallNeeded, execute$4 as link, optimizedNpmInstall, smartNpmInstall, execute$1 as tree, tryNpmCi, execute$3 as unlink, execute$2 as updates, execute as versions };
4243
+ //# sourceMappingURL=index.js.map