@eldrforge/tree-execution 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +130 -0
- package/dist/TreeExecutor.d.ts +113 -0
- package/dist/TreeExecutor.d.ts.map +1 -0
- package/dist/TreeExecutor.js +113 -0
- package/dist/TreeExecutor.js.map +1 -0
- package/dist/checkpoint/CheckpointManager.d.ts +18 -0
- package/dist/checkpoint/CheckpointManager.d.ts.map +1 -0
- package/dist/checkpoint/CheckpointManager.js +156 -0
- package/dist/checkpoint/CheckpointManager.js.map +1 -0
- package/dist/checkpoint/index.d.ts +5 -0
- package/dist/checkpoint/index.d.ts.map +1 -0
- package/dist/checkpoint/index.js +5 -0
- package/dist/checkpoint/index.js.map +1 -0
- package/dist/execution/CommandValidator.d.ts +25 -0
- package/dist/execution/CommandValidator.d.ts.map +1 -0
- package/dist/execution/CommandValidator.js +129 -0
- package/dist/execution/CommandValidator.js.map +1 -0
- package/dist/execution/DependencyChecker.d.ts +47 -0
- package/dist/execution/DependencyChecker.d.ts.map +1 -0
- package/dist/execution/DependencyChecker.js +95 -0
- package/dist/execution/DependencyChecker.js.map +1 -0
- package/dist/execution/DynamicTaskPool.d.ts +118 -0
- package/dist/execution/DynamicTaskPool.d.ts.map +1 -0
- package/dist/execution/DynamicTaskPool.js +658 -0
- package/dist/execution/DynamicTaskPool.js.map +1 -0
- package/dist/execution/RecoveryManager.d.ts +89 -0
- package/dist/execution/RecoveryManager.d.ts.map +1 -0
- package/dist/execution/RecoveryManager.js +592 -0
- package/dist/execution/RecoveryManager.js.map +1 -0
- package/dist/execution/ResourceMonitor.d.ts +73 -0
- package/dist/execution/ResourceMonitor.d.ts.map +1 -0
- package/dist/execution/ResourceMonitor.js +148 -0
- package/dist/execution/ResourceMonitor.js.map +1 -0
- package/dist/execution/Scheduler.d.ts +36 -0
- package/dist/execution/Scheduler.d.ts.map +1 -0
- package/dist/execution/Scheduler.js +83 -0
- package/dist/execution/Scheduler.js.map +1 -0
- package/dist/execution/TreeExecutionAdapter.d.ts +45 -0
- package/dist/execution/TreeExecutionAdapter.d.ts.map +1 -0
- package/dist/execution/TreeExecutionAdapter.js +249 -0
- package/dist/execution/TreeExecutionAdapter.js.map +1 -0
- package/dist/index.d.ts +29 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +25 -0
- package/dist/index.js.map +1 -0
- package/dist/tree.d.ts +13 -0
- package/dist/tree.d.ts.map +1 -0
- package/dist/tree.js +2453 -0
- package/dist/tree.js.map +1 -0
- package/dist/types/config.d.ts +172 -0
- package/dist/types/config.d.ts.map +1 -0
- package/dist/types/config.js +2 -0
- package/dist/types/config.js.map +1 -0
- package/dist/types/index.d.ts +6 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +6 -0
- package/dist/types/index.js.map +1 -0
- package/dist/types/parallelExecution.d.ts +108 -0
- package/dist/types/parallelExecution.d.ts.map +1 -0
- package/dist/types/parallelExecution.js +2 -0
- package/dist/types/parallelExecution.js.map +1 -0
- package/dist/util/commandStubs.d.ts +22 -0
- package/dist/util/commandStubs.d.ts.map +1 -0
- package/dist/util/commandStubs.js +49 -0
- package/dist/util/commandStubs.js.map +1 -0
- package/dist/util/logger.d.ts +14 -0
- package/dist/util/logger.d.ts.map +1 -0
- package/dist/util/logger.js +15 -0
- package/dist/util/logger.js.map +1 -0
- package/dist/util/mutex.d.ts +38 -0
- package/dist/util/mutex.d.ts.map +1 -0
- package/dist/util/mutex.js +101 -0
- package/dist/util/mutex.js.map +1 -0
- package/dist/util/treeUtils.d.ts +46 -0
- package/dist/util/treeUtils.d.ts.map +1 -0
- package/dist/util/treeUtils.js +74 -0
- package/dist/util/treeUtils.js.map +1 -0
- package/package.json +64 -0
package/dist/tree.js
ADDED
|
@@ -0,0 +1,2453 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Tree command - Central dependency analysis and tree traversal for kodrdriv
|
|
4
|
+
*
|
|
5
|
+
* This command supports two execution modes:
|
|
6
|
+
* 1. Custom command mode: `kodrdriv tree --cmd "npm install"`
|
|
7
|
+
* 2. Built-in command mode: `kodrdriv tree commit`, `kodrdriv tree publish`, etc.
|
|
8
|
+
*
|
|
9
|
+
* Built-in commands shell out to separate kodrdriv processes to preserve
|
|
10
|
+
* individual project configurations while leveraging centralized dependency analysis.
|
|
11
|
+
*
|
|
12
|
+
* Supported built-in commands: commit, release, publish, link, unlink, development, branches, checkout, precommit
|
|
13
|
+
*
|
|
14
|
+
* Enhanced logging based on debug/verbose flags:
|
|
15
|
+
*
|
|
16
|
+
* --debug:
|
|
17
|
+
* - Shows all command output (stdout/stderr)
|
|
18
|
+
* - Shows detailed debug messages about dependency levels and execution flow
|
|
19
|
+
* - Shows package-by-package dependency analysis
|
|
20
|
+
* - Shows detailed level start/completion information
|
|
21
|
+
*
|
|
22
|
+
* --verbose:
|
|
23
|
+
* - Shows exactly what's happening without full command output
|
|
24
|
+
* - Shows level-by-level execution progress
|
|
25
|
+
* - Shows package grouping information
|
|
26
|
+
* - Shows basic execution flow
|
|
27
|
+
*
|
|
28
|
+
* No flags:
|
|
29
|
+
* - For commit and publish commands: Shows full output from child processes by default
|
|
30
|
+
* (including AI generation, self-reflection, and agentic interactions)
|
|
31
|
+
* - For other commands: Shows basic progress with numeric representation ([1/5] Package: Running...)
|
|
32
|
+
* - Shows level-by-level execution summaries
|
|
33
|
+
* - Shows completion status for each package and level
|
|
34
|
+
*/
|
|
35
|
+
import path from 'path';
|
|
36
|
+
import fs from 'fs/promises';
|
|
37
|
+
import { exec } from 'child_process';
|
|
38
|
+
import { run, runSecure, safeJsonParse, validatePackageJson, getGitStatusSummary, getGloballyLinkedPackages, getLinkedDependencies, getLinkCompatibilityProblems } from '@eldrforge/git-tools';
|
|
39
|
+
import util from 'util';
|
|
40
|
+
import { getLogger } from './util/logger.js';
|
|
41
|
+
import { createStorage } from '@eldrforge/shared';
|
|
42
|
+
import { scanForPackageJsonFiles, parsePackageJson, buildDependencyGraph, topologicalSort, shouldExclude } from '@eldrforge/tree-core';
|
|
43
|
+
// Utility functions (extracted/inlined)
|
|
44
|
+
import { getOutputPath, PerformanceTimer, isInGitRepository, runGitWithLock, optimizePrecommitCommand, recordTestRun } from './util/treeUtils.js';
|
|
45
|
+
// Built-in commands - using stubs for now
|
|
46
|
+
// TODO: Refactor to use callbacks/dependency injection
|
|
47
|
+
import { Updates, Commit, Link, Unlink } from './util/commandStubs.js';
|
|
48
|
+
// Define constants locally
|
|
49
|
+
const DEFAULT_OUTPUT_DIRECTORY = 'output/kodrdriv';
|
|
50
|
+
// Global state to track published versions during tree execution - protected by mutex
|
|
51
|
+
let publishedVersions = [];
|
|
52
|
+
let executionContext = null;
|
|
53
|
+
// Function to reset global state (for testing)
|
|
54
|
+
export const __resetGlobalState = () => {
|
|
55
|
+
publishedVersions = [];
|
|
56
|
+
executionContext = null;
|
|
57
|
+
};
|
|
58
|
+
// Import shared mutex implementation
|
|
59
|
+
import { SimpleMutex } from './util/mutex.js';
|
|
60
|
+
const globalStateMutex = new SimpleMutex();
|
|
61
|
+
// Update inter-project dependencies in package.json based on published versions
|
|
62
|
+
const updateInterProjectDependencies = async (packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun) => {
|
|
63
|
+
const storage = createStorage();
|
|
64
|
+
const packageJsonPath = path.join(packageDir, 'package.json');
|
|
65
|
+
if (!await storage.exists(packageJsonPath)) {
|
|
66
|
+
packageLogger.verbose('No package.json found, skipping dependency updates');
|
|
67
|
+
return false;
|
|
68
|
+
}
|
|
69
|
+
let hasChanges = false;
|
|
70
|
+
try {
|
|
71
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
72
|
+
const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
|
|
73
|
+
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
74
|
+
const sectionsToUpdate = ['dependencies', 'devDependencies', 'peerDependencies'];
|
|
75
|
+
for (const publishedVersion of publishedVersions) {
|
|
76
|
+
const { packageName, version } = publishedVersion;
|
|
77
|
+
// Only update if this is an inter-project dependency (exists in our build tree)
|
|
78
|
+
if (!allPackageNames.has(packageName)) {
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
// Skip prerelease versions (e.g., 1.0.0-beta.1, 2.0.0-alpha.3)
|
|
82
|
+
// Prerelease versions should not be automatically propagated to consumers
|
|
83
|
+
if (version.includes('-')) {
|
|
84
|
+
packageLogger.verbose(`Skipping prerelease version ${packageName}@${version} - not updating dependencies`);
|
|
85
|
+
continue;
|
|
86
|
+
}
|
|
87
|
+
// Update the dependency in all relevant sections
|
|
88
|
+
for (const section of sectionsToUpdate) {
|
|
89
|
+
const deps = packageJson[section];
|
|
90
|
+
if (deps && deps[packageName]) {
|
|
91
|
+
const oldVersion = deps[packageName];
|
|
92
|
+
const newVersion = `^${version}`;
|
|
93
|
+
if (oldVersion !== newVersion) {
|
|
94
|
+
if (isDryRun) {
|
|
95
|
+
packageLogger.info(`Would update ${section}.${packageName}: ${oldVersion} → ${newVersion}`);
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
packageLogger.info(`Updating ${section}.${packageName}: ${oldVersion} → ${newVersion}`);
|
|
99
|
+
deps[packageName] = newVersion;
|
|
100
|
+
}
|
|
101
|
+
hasChanges = true;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
if (hasChanges && !isDryRun) {
|
|
107
|
+
// Write updated package.json
|
|
108
|
+
await storage.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n', 'utf-8');
|
|
109
|
+
packageLogger.info('Inter-project dependencies updated successfully');
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
catch (error) {
|
|
113
|
+
packageLogger.warn(`Failed to update inter-project dependencies: ${error.message}`);
|
|
114
|
+
return false;
|
|
115
|
+
}
|
|
116
|
+
return hasChanges;
|
|
117
|
+
};
|
|
118
|
+
// Detect scoped dependencies from package.json and run updates for them
|
|
119
|
+
const updateScopedDependencies = async (packageDir, packageLogger, isDryRun, runConfig) => {
|
|
120
|
+
const storage = createStorage();
|
|
121
|
+
const packageJsonPath = path.join(packageDir, 'package.json');
|
|
122
|
+
if (!await storage.exists(packageJsonPath)) {
|
|
123
|
+
packageLogger.verbose('No package.json found, skipping scoped dependency updates');
|
|
124
|
+
return false;
|
|
125
|
+
}
|
|
126
|
+
try {
|
|
127
|
+
// Read the package.json before updates
|
|
128
|
+
const beforeContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
129
|
+
const parsed = safeJsonParse(beforeContent, packageJsonPath);
|
|
130
|
+
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
131
|
+
// Determine which scopes to update
|
|
132
|
+
let scopesToUpdate;
|
|
133
|
+
// Check if scopedDependencyUpdates is configured
|
|
134
|
+
const configuredScopes = runConfig.publish?.scopedDependencyUpdates;
|
|
135
|
+
if (configuredScopes !== undefined) {
|
|
136
|
+
// scopedDependencyUpdates is explicitly configured
|
|
137
|
+
if (configuredScopes.length > 0) {
|
|
138
|
+
// Use configured scopes
|
|
139
|
+
scopesToUpdate = new Set(configuredScopes);
|
|
140
|
+
packageLogger.verbose(`Using configured scopes: ${Array.from(scopesToUpdate).join(', ')}`);
|
|
141
|
+
}
|
|
142
|
+
else {
|
|
143
|
+
// Empty array means explicitly disabled
|
|
144
|
+
packageLogger.verbose('Scoped dependency updates explicitly disabled');
|
|
145
|
+
return false;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
else {
|
|
149
|
+
// Not configured - use default behavior (package's own scope)
|
|
150
|
+
scopesToUpdate = new Set();
|
|
151
|
+
if (packageJson.name && packageJson.name.startsWith('@')) {
|
|
152
|
+
const packageScope = packageJson.name.split('/')[0]; // e.g., "@fjell/core" -> "@fjell"
|
|
153
|
+
scopesToUpdate.add(packageScope);
|
|
154
|
+
packageLogger.verbose(`No scopes configured, defaulting to package's own scope: ${packageScope}`);
|
|
155
|
+
}
|
|
156
|
+
else {
|
|
157
|
+
packageLogger.verbose('Package is not scoped and no scopes configured, skipping scoped dependency updates');
|
|
158
|
+
return false;
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
if (scopesToUpdate.size === 0) {
|
|
162
|
+
packageLogger.verbose('No scopes to update, skipping updates');
|
|
163
|
+
return false;
|
|
164
|
+
}
|
|
165
|
+
// Run updates for each scope
|
|
166
|
+
for (const scope of scopesToUpdate) {
|
|
167
|
+
packageLogger.info(`🔄 Checking for ${scope} dependency updates before publish...`);
|
|
168
|
+
try {
|
|
169
|
+
// Create a config for the updates command with the scope
|
|
170
|
+
const updatesConfig = {
|
|
171
|
+
...runConfig,
|
|
172
|
+
dryRun: isDryRun,
|
|
173
|
+
updates: {
|
|
174
|
+
scope: scope
|
|
175
|
+
}
|
|
176
|
+
};
|
|
177
|
+
await Updates.execute(updatesConfig);
|
|
178
|
+
}
|
|
179
|
+
catch (error) {
|
|
180
|
+
// Don't fail the publish if updates fails, just warn
|
|
181
|
+
packageLogger.warn(`Failed to update ${scope} dependencies: ${error.message}`);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
// Check if package.json was modified
|
|
185
|
+
const afterContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
186
|
+
const hasChanges = beforeContent !== afterContent;
|
|
187
|
+
if (hasChanges) {
|
|
188
|
+
packageLogger.info('✅ Scoped dependencies updated successfully');
|
|
189
|
+
}
|
|
190
|
+
else {
|
|
191
|
+
packageLogger.info('No scoped dependency updates needed');
|
|
192
|
+
}
|
|
193
|
+
return hasChanges;
|
|
194
|
+
}
|
|
195
|
+
catch (error) {
|
|
196
|
+
packageLogger.warn(`Failed to detect scoped dependencies: ${error.message}`);
|
|
197
|
+
return false;
|
|
198
|
+
}
|
|
199
|
+
};
|
|
200
|
+
// Get the context file path
|
|
201
|
+
const getContextFilePath = (outputDirectory) => {
|
|
202
|
+
const outputDir = outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
|
|
203
|
+
return getOutputPath(outputDir, '.kodrdriv-context');
|
|
204
|
+
};
|
|
205
|
+
// Save execution context to file
|
|
206
|
+
const saveExecutionContext = async (context, outputDirectory) => {
|
|
207
|
+
const storage = createStorage(); // Silent storage for context operations
|
|
208
|
+
const contextFilePath = getContextFilePath(outputDirectory);
|
|
209
|
+
try {
|
|
210
|
+
// Ensure output directory exists
|
|
211
|
+
await storage.ensureDirectory(path.dirname(contextFilePath));
|
|
212
|
+
// Save context with JSON serialization that handles dates
|
|
213
|
+
const contextData = {
|
|
214
|
+
...context,
|
|
215
|
+
startTime: context.startTime.toISOString(),
|
|
216
|
+
lastUpdateTime: context.lastUpdateTime.toISOString(),
|
|
217
|
+
publishedVersions: context.publishedVersions.map(v => ({
|
|
218
|
+
...v,
|
|
219
|
+
publishTime: v.publishTime.toISOString()
|
|
220
|
+
}))
|
|
221
|
+
};
|
|
222
|
+
await storage.writeFile(contextFilePath, JSON.stringify(contextData, null, 2), 'utf-8');
|
|
223
|
+
}
|
|
224
|
+
catch (error) {
|
|
225
|
+
// Don't fail the entire operation if context saving fails
|
|
226
|
+
const logger = getLogger();
|
|
227
|
+
logger.warn(`Warning: Failed to save execution context: ${error.message}`);
|
|
228
|
+
}
|
|
229
|
+
};
|
|
230
|
+
// Load execution context from file
|
|
231
|
+
const loadExecutionContext = async (outputDirectory) => {
|
|
232
|
+
const storage = createStorage(); // Silent storage for context operations
|
|
233
|
+
const contextFilePath = getContextFilePath(outputDirectory);
|
|
234
|
+
try {
|
|
235
|
+
if (!await storage.exists(contextFilePath)) {
|
|
236
|
+
return null;
|
|
237
|
+
}
|
|
238
|
+
const contextContent = await storage.readFile(contextFilePath, 'utf-8');
|
|
239
|
+
const contextData = safeJsonParse(contextContent, contextFilePath);
|
|
240
|
+
// Restore dates from ISO strings
|
|
241
|
+
return {
|
|
242
|
+
...contextData,
|
|
243
|
+
startTime: new Date(contextData.startTime),
|
|
244
|
+
lastUpdateTime: new Date(contextData.lastUpdateTime),
|
|
245
|
+
publishedVersions: contextData.publishedVersions.map((v) => ({
|
|
246
|
+
...v,
|
|
247
|
+
publishTime: new Date(v.publishTime)
|
|
248
|
+
}))
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
catch (error) {
|
|
252
|
+
const logger = getLogger();
|
|
253
|
+
logger.warn(`Warning: Failed to load execution context: ${error.message}`);
|
|
254
|
+
return null;
|
|
255
|
+
}
|
|
256
|
+
};
|
|
257
|
+
// Clean up context file
|
|
258
|
+
const cleanupContext = async (outputDirectory) => {
|
|
259
|
+
const storage = createStorage(); // Silent storage for context operations
|
|
260
|
+
const contextFilePath = getContextFilePath(outputDirectory);
|
|
261
|
+
try {
|
|
262
|
+
if (await storage.exists(contextFilePath)) {
|
|
263
|
+
await storage.deleteFile(contextFilePath);
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
catch (error) {
|
|
267
|
+
// Don't fail if cleanup fails
|
|
268
|
+
const logger = getLogger();
|
|
269
|
+
logger.warn(`Warning: Failed to cleanup execution context: ${error.message}`);
|
|
270
|
+
}
|
|
271
|
+
};
|
|
272
|
+
// Helper function to promote a package to completed status in the context
|
|
273
|
+
const promotePackageToCompleted = async (packageName, outputDirectory) => {
|
|
274
|
+
const storage = createStorage();
|
|
275
|
+
const contextFilePath = getContextFilePath(outputDirectory);
|
|
276
|
+
try {
|
|
277
|
+
if (!await storage.exists(contextFilePath)) {
|
|
278
|
+
return;
|
|
279
|
+
}
|
|
280
|
+
const contextContent = await storage.readFile(contextFilePath, 'utf-8');
|
|
281
|
+
const contextData = safeJsonParse(contextContent, contextFilePath);
|
|
282
|
+
// Restore dates from ISO strings
|
|
283
|
+
const context = {
|
|
284
|
+
...contextData,
|
|
285
|
+
startTime: new Date(contextData.startTime),
|
|
286
|
+
lastUpdateTime: new Date(contextData.lastUpdateTime),
|
|
287
|
+
publishedVersions: contextData.publishedVersions.map((v) => ({
|
|
288
|
+
...v,
|
|
289
|
+
publishTime: new Date(v.publishTime)
|
|
290
|
+
}))
|
|
291
|
+
};
|
|
292
|
+
// Add package to completed list if not already there
|
|
293
|
+
if (!context.completedPackages.includes(packageName)) {
|
|
294
|
+
context.completedPackages.push(packageName);
|
|
295
|
+
context.lastUpdateTime = new Date();
|
|
296
|
+
await saveExecutionContext(context, outputDirectory);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
catch (error) {
|
|
300
|
+
const logger = getLogger();
|
|
301
|
+
logger.warn(`Warning: Failed to promote package to completed: ${error.message}`);
|
|
302
|
+
}
|
|
303
|
+
};
|
|
304
|
+
// Helper function to validate that all packages have the required scripts
|
|
305
|
+
const validateScripts = async (packages, scripts) => {
|
|
306
|
+
const logger = getLogger();
|
|
307
|
+
const missingScripts = new Map();
|
|
308
|
+
const storage = createStorage();
|
|
309
|
+
logger.debug(`Validating scripts: ${scripts.join(', ')}`);
|
|
310
|
+
for (const [packageName, packageInfo] of packages) {
|
|
311
|
+
const packageJsonPath = path.join(packageInfo.path, 'package.json');
|
|
312
|
+
const missingForPackage = [];
|
|
313
|
+
try {
|
|
314
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
315
|
+
const packageJson = safeJsonParse(packageJsonContent, packageJsonPath);
|
|
316
|
+
const validated = validatePackageJson(packageJson, packageJsonPath);
|
|
317
|
+
// Check if each required script exists
|
|
318
|
+
for (const script of scripts) {
|
|
319
|
+
if (!validated.scripts || !validated.scripts[script]) {
|
|
320
|
+
missingForPackage.push(script);
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
if (missingForPackage.length > 0) {
|
|
324
|
+
missingScripts.set(packageName, missingForPackage);
|
|
325
|
+
logger.debug(`Package ${packageName} missing scripts: ${missingForPackage.join(', ')}`);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
catch (error) {
|
|
329
|
+
logger.debug(`Error reading package.json for ${packageName}: ${error.message}`);
|
|
330
|
+
// If we can't read the package.json, assume all scripts are missing
|
|
331
|
+
missingScripts.set(packageName, scripts);
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
const valid = missingScripts.size === 0;
|
|
335
|
+
if (valid) {
|
|
336
|
+
logger.info(`✅ All packages have the required scripts: ${scripts.join(', ')}`);
|
|
337
|
+
}
|
|
338
|
+
else {
|
|
339
|
+
logger.error(`❌ Script validation failed. Missing scripts:`);
|
|
340
|
+
for (const [packageName, missing] of missingScripts) {
|
|
341
|
+
logger.error(` ${packageName}: ${missing.join(', ')}`);
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
return { valid, missingScripts };
|
|
345
|
+
};
|
|
346
|
+
// Extract published version from git tags after successful publish
|
|
347
|
+
// After kodrdriv publish, the release version is captured in the git tag,
|
|
348
|
+
// while package.json contains the next dev version
|
|
349
|
+
const extractPublishedVersion = async (packageDir, packageLogger) => {
|
|
350
|
+
const storage = createStorage();
|
|
351
|
+
const packageJsonPath = path.join(packageDir, 'package.json');
|
|
352
|
+
try {
|
|
353
|
+
// Get package name from package.json
|
|
354
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
355
|
+
const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
|
|
356
|
+
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
357
|
+
// Get the most recently created tag (by creation date, not version number)
|
|
358
|
+
// This ensures we get the tag that was just created by the publish, not an older tag with a higher version
|
|
359
|
+
const { stdout: tagOutput } = await run('git tag --sort=-creatordate', { cwd: packageDir });
|
|
360
|
+
const tags = tagOutput.trim().split('\n').filter(Boolean);
|
|
361
|
+
if (tags.length === 0) {
|
|
362
|
+
packageLogger.warn('No git tags found after publish');
|
|
363
|
+
return null;
|
|
364
|
+
}
|
|
365
|
+
// Get the most recently created tag (first in the list)
|
|
366
|
+
const latestTag = tags[0];
|
|
367
|
+
// Extract version from tag, handling various formats:
|
|
368
|
+
// - v1.2.3 -> 1.2.3
|
|
369
|
+
// - working/v1.2.3 -> 1.2.3
|
|
370
|
+
// - main/v1.2.3 -> 1.2.3
|
|
371
|
+
let version = latestTag;
|
|
372
|
+
// If tag contains a slash (branch prefix), extract everything after it
|
|
373
|
+
if (version.includes('/')) {
|
|
374
|
+
version = version.split('/').pop() || version;
|
|
375
|
+
}
|
|
376
|
+
// Remove 'v' prefix if present
|
|
377
|
+
if (version.startsWith('v')) {
|
|
378
|
+
version = version.substring(1);
|
|
379
|
+
}
|
|
380
|
+
packageLogger.verbose(`Extracted published version from tag: ${latestTag} -> ${version}`);
|
|
381
|
+
return {
|
|
382
|
+
packageName: packageJson.name,
|
|
383
|
+
version: version,
|
|
384
|
+
publishTime: new Date()
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
catch (error) {
|
|
388
|
+
packageLogger.warn(`Failed to extract published version: ${error.message}`);
|
|
389
|
+
return null;
|
|
390
|
+
}
|
|
391
|
+
};
|
|
392
|
+
// Enhanced run function that can show output based on log level
|
|
393
|
+
const runWithLogging = async (command, packageLogger, options = {}, showOutput = 'none', logFilePath) => {
|
|
394
|
+
const execPromise = util.promisify(exec);
|
|
395
|
+
// Ensure encoding is set to 'utf8' to get string output instead of Buffer
|
|
396
|
+
const execOptions = { encoding: 'utf8', ...options };
|
|
397
|
+
if (showOutput === 'full') {
|
|
398
|
+
packageLogger.debug(`Executing command: ${command}`);
|
|
399
|
+
// Use info level to show on console in debug mode
|
|
400
|
+
packageLogger.info(`🔧 Running: ${command}`);
|
|
401
|
+
}
|
|
402
|
+
else if (showOutput === 'minimal') {
|
|
403
|
+
packageLogger.verbose(`Running: ${command}`);
|
|
404
|
+
}
|
|
405
|
+
// Helper to write to log file
|
|
406
|
+
const writeToLogFile = async (content) => {
|
|
407
|
+
if (!logFilePath)
|
|
408
|
+
return;
|
|
409
|
+
try {
|
|
410
|
+
const logDir = path.dirname(logFilePath);
|
|
411
|
+
await fs.mkdir(logDir, { recursive: true });
|
|
412
|
+
await fs.appendFile(logFilePath, content + '\n', 'utf-8');
|
|
413
|
+
}
|
|
414
|
+
catch (err) {
|
|
415
|
+
packageLogger.warn(`Failed to write to log file ${logFilePath}: ${err.message}`);
|
|
416
|
+
}
|
|
417
|
+
};
|
|
418
|
+
// Write command to log file
|
|
419
|
+
if (logFilePath) {
|
|
420
|
+
const timestamp = new Date().toISOString();
|
|
421
|
+
await writeToLogFile(`[${timestamp}] Executing: ${command}\n`);
|
|
422
|
+
}
|
|
423
|
+
try {
|
|
424
|
+
const result = await execPromise(command, execOptions);
|
|
425
|
+
if (showOutput === 'full') {
|
|
426
|
+
const stdout = String(result.stdout);
|
|
427
|
+
const stderr = String(result.stderr);
|
|
428
|
+
if (stdout.trim()) {
|
|
429
|
+
packageLogger.debug('STDOUT:');
|
|
430
|
+
packageLogger.debug(stdout);
|
|
431
|
+
// Show on console using info level for immediate feedback
|
|
432
|
+
packageLogger.info(`📤 STDOUT:`);
|
|
433
|
+
stdout.split('\n').forEach((line) => {
|
|
434
|
+
if (line.trim())
|
|
435
|
+
packageLogger.info(`${line}`);
|
|
436
|
+
});
|
|
437
|
+
}
|
|
438
|
+
if (stderr.trim()) {
|
|
439
|
+
packageLogger.debug('STDERR:');
|
|
440
|
+
packageLogger.debug(stderr);
|
|
441
|
+
// Show on console using info level for immediate feedback
|
|
442
|
+
packageLogger.info(`⚠️ STDERR:`);
|
|
443
|
+
stderr.split('\n').forEach((line) => {
|
|
444
|
+
if (line.trim())
|
|
445
|
+
packageLogger.info(`${line}`);
|
|
446
|
+
});
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
// Write output to log file
|
|
450
|
+
if (logFilePath) {
|
|
451
|
+
const stdout = String(result.stdout);
|
|
452
|
+
const stderr = String(result.stderr);
|
|
453
|
+
if (stdout.trim()) {
|
|
454
|
+
await writeToLogFile(`\n=== STDOUT ===\n${stdout}`);
|
|
455
|
+
}
|
|
456
|
+
if (stderr.trim()) {
|
|
457
|
+
await writeToLogFile(`\n=== STDERR ===\n${stderr}`);
|
|
458
|
+
}
|
|
459
|
+
await writeToLogFile(`\n[${new Date().toISOString()}] Command completed successfully\n`);
|
|
460
|
+
}
|
|
461
|
+
// Ensure result is properly typed as strings
|
|
462
|
+
return {
|
|
463
|
+
stdout: String(result.stdout),
|
|
464
|
+
stderr: String(result.stderr)
|
|
465
|
+
};
|
|
466
|
+
}
|
|
467
|
+
catch (error) {
|
|
468
|
+
// Always show error message
|
|
469
|
+
packageLogger.error(`Command failed: ${command}`);
|
|
470
|
+
// Always show stderr on failure (contains important error details like coverage failures)
|
|
471
|
+
if (error.stderr && error.stderr.trim()) {
|
|
472
|
+
packageLogger.error(`❌ STDERR:`);
|
|
473
|
+
error.stderr.split('\n').forEach((line) => {
|
|
474
|
+
if (line.trim())
|
|
475
|
+
packageLogger.error(`${line}`);
|
|
476
|
+
});
|
|
477
|
+
}
|
|
478
|
+
// Show stdout on failure if available (may contain error context)
|
|
479
|
+
if (error.stdout && error.stdout.trim() && (showOutput === 'full' || showOutput === 'minimal')) {
|
|
480
|
+
packageLogger.info(`📤 STDOUT:`);
|
|
481
|
+
error.stdout.split('\n').forEach((line) => {
|
|
482
|
+
if (line.trim())
|
|
483
|
+
packageLogger.info(`${line}`);
|
|
484
|
+
});
|
|
485
|
+
}
|
|
486
|
+
// Show full output in debug/verbose mode
|
|
487
|
+
if (showOutput === 'full' || showOutput === 'minimal') {
|
|
488
|
+
if (error.stdout && error.stdout.trim() && showOutput === 'full') {
|
|
489
|
+
packageLogger.debug('STDOUT:');
|
|
490
|
+
packageLogger.debug(error.stdout);
|
|
491
|
+
}
|
|
492
|
+
if (error.stderr && error.stderr.trim() && showOutput === 'full') {
|
|
493
|
+
packageLogger.debug('STDERR:');
|
|
494
|
+
packageLogger.debug(error.stderr);
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
// Write error output to log file
|
|
498
|
+
if (logFilePath) {
|
|
499
|
+
await writeToLogFile(`\n[${new Date().toISOString()}] Command failed: ${error.message}`);
|
|
500
|
+
if (error.stdout) {
|
|
501
|
+
await writeToLogFile(`\n=== STDOUT ===\n${error.stdout}`);
|
|
502
|
+
}
|
|
503
|
+
if (error.stderr) {
|
|
504
|
+
await writeToLogFile(`\n=== STDERR ===\n${error.stderr}`);
|
|
505
|
+
}
|
|
506
|
+
if (error.stack) {
|
|
507
|
+
await writeToLogFile(`\n=== STACK TRACE ===\n${error.stack}`);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
throw error;
|
|
511
|
+
}
|
|
512
|
+
};
|
|
513
|
+
// Create a package-scoped logger that prefixes all messages
|
|
514
|
+
const createPackageLogger = (packageName, sequenceNumber, totalCount, isDryRun = false) => {
|
|
515
|
+
const baseLogger = getLogger();
|
|
516
|
+
const prefix = `[${sequenceNumber}/${totalCount}] ${packageName}:`;
|
|
517
|
+
const dryRunPrefix = isDryRun ? 'DRY RUN: ' : '';
|
|
518
|
+
return {
|
|
519
|
+
info: (message, ...args) => baseLogger.info(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
520
|
+
warn: (message, ...args) => baseLogger.warn(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
521
|
+
error: (message, ...args) => baseLogger.error(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
522
|
+
debug: (message, ...args) => baseLogger.debug(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
523
|
+
verbose: (message, ...args) => baseLogger.verbose(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
524
|
+
silly: (message, ...args) => baseLogger.silly(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
525
|
+
};
|
|
526
|
+
};
|
|
527
|
+
// Helper function to format subproject error output
|
|
528
|
+
const formatSubprojectError = (packageName, error, _packageInfo, _position, _total) => {
|
|
529
|
+
const lines = [];
|
|
530
|
+
lines.push(`❌ Command failed in package ${packageName}:`);
|
|
531
|
+
// Format the main error message with indentation
|
|
532
|
+
if (error.message) {
|
|
533
|
+
const indentedMessage = error.message
|
|
534
|
+
.split('\n')
|
|
535
|
+
.map((line) => ` ${line}`)
|
|
536
|
+
.join('\n');
|
|
537
|
+
lines.push(indentedMessage);
|
|
538
|
+
}
|
|
539
|
+
// If there's stderr output, show it indented as well
|
|
540
|
+
if (error.stderr && error.stderr.trim()) {
|
|
541
|
+
lines.push(' STDERR:');
|
|
542
|
+
const indentedStderr = error.stderr
|
|
543
|
+
.split('\n')
|
|
544
|
+
.filter((line) => line.trim())
|
|
545
|
+
.map((line) => ` ${line}`)
|
|
546
|
+
.join('\n');
|
|
547
|
+
lines.push(indentedStderr);
|
|
548
|
+
}
|
|
549
|
+
// If there's stdout output, show it indented as well
|
|
550
|
+
if (error.stdout && error.stdout.trim()) {
|
|
551
|
+
lines.push(' STDOUT:');
|
|
552
|
+
const indentedStdout = error.stdout
|
|
553
|
+
.split('\n')
|
|
554
|
+
.filter((line) => line.trim())
|
|
555
|
+
.map((line) => ` ${line}`)
|
|
556
|
+
.join('\n');
|
|
557
|
+
lines.push(indentedStdout);
|
|
558
|
+
}
|
|
559
|
+
return lines.join('\n');
|
|
560
|
+
};
|
|
561
|
+
// Note: PackageInfo, DependencyGraph, scanForPackageJsonFiles, parsePackageJson,
|
|
562
|
+
// buildDependencyGraph, and topologicalSort are now imported from ../util/dependencyGraph
|
|
563
|
+
// Execute a single package and return execution result
|
|
564
|
+
export const executePackage = async (packageName, packageInfo, commandToRun, runConfig, isDryRun, index, total, allPackageNames, isBuiltInCommand = false) => {
|
|
565
|
+
const packageLogger = createPackageLogger(packageName, index + 1, total, isDryRun);
|
|
566
|
+
const packageDir = packageInfo.path;
|
|
567
|
+
const logger = getLogger();
|
|
568
|
+
// Create log file path for publish commands
|
|
569
|
+
let logFilePath;
|
|
570
|
+
if (isBuiltInCommand && commandToRun.includes('publish')) {
|
|
571
|
+
const outputDir = runConfig.outputDirectory || 'output/kodrdriv';
|
|
572
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').replace('T', '_').split('.')[0];
|
|
573
|
+
const commandName = commandToRun.split(' ')[1]?.split(' ')[0] || 'command';
|
|
574
|
+
logFilePath = path.join(packageDir, outputDir, `${commandName}_${timestamp}.log`);
|
|
575
|
+
}
|
|
576
|
+
// Determine output level based on flags
|
|
577
|
+
// For publish and commit commands, default to full output to show AI progress and other details
|
|
578
|
+
// For other commands, require --verbose or --debug for output
|
|
579
|
+
const isPublishCommand = isBuiltInCommand && commandToRun.includes('publish');
|
|
580
|
+
const isCommitCommand = isBuiltInCommand && commandToRun.includes('commit');
|
|
581
|
+
let showOutput = (isPublishCommand || isCommitCommand) ? 'full' : 'none';
|
|
582
|
+
if (runConfig.debug) {
|
|
583
|
+
showOutput = 'full';
|
|
584
|
+
}
|
|
585
|
+
else if (runConfig.verbose) {
|
|
586
|
+
showOutput = 'minimal';
|
|
587
|
+
}
|
|
588
|
+
// Show package start info - always visible for progress tracking
|
|
589
|
+
if (runConfig.debug) {
|
|
590
|
+
packageLogger.debug('MULTI_PROJECT_START: Starting package execution | Package: %s | Index: %d/%d | Path: %s | Command: %s | Context: tree execution', packageName, index + 1, total, packageDir, commandToRun);
|
|
591
|
+
packageLogger.debug('MULTI_PROJECT_CONTEXT: Execution details | Directory: %s | Built-in Command: %s | Dry Run: %s | Output Level: %s', packageDir, isBuiltInCommand, isDryRun, showOutput);
|
|
592
|
+
// Show dependencies if available
|
|
593
|
+
if (packageInfo.dependencies && Array.isArray(packageInfo.dependencies) && packageInfo.dependencies.length > 0) {
|
|
594
|
+
packageLogger.debug('MULTI_PROJECT_DEPS: Package dependencies | Package: %s | Dependencies: [%s]', packageName, packageInfo.dependencies.join(', '));
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
else if (runConfig.verbose) {
|
|
598
|
+
packageLogger.verbose(`Starting execution in ${packageDir}`);
|
|
599
|
+
}
|
|
600
|
+
else {
|
|
601
|
+
// Basic progress info even without flags
|
|
602
|
+
logger.info(`[${index + 1}/${total}] ${packageName}: Running ${commandToRun}...`);
|
|
603
|
+
}
|
|
604
|
+
// Track if publish was skipped due to no changes
|
|
605
|
+
let publishWasSkipped = false;
|
|
606
|
+
// Track execution timing
|
|
607
|
+
const executionTimer = new PerformanceTimer(`Package ${packageName} execution`);
|
|
608
|
+
let executionDuration;
|
|
609
|
+
try {
|
|
610
|
+
if (isDryRun && !isBuiltInCommand) {
|
|
611
|
+
// Handle inter-project dependency updates for publish commands in dry run mode
|
|
612
|
+
if (isBuiltInCommand && commandToRun.includes('publish') && publishedVersions.length > 0) {
|
|
613
|
+
let mutexLocked = false;
|
|
614
|
+
try {
|
|
615
|
+
await globalStateMutex.lock();
|
|
616
|
+
mutexLocked = true;
|
|
617
|
+
packageLogger.info('Would check for inter-project dependency updates before publish...');
|
|
618
|
+
const versionSnapshot = [...publishedVersions]; // Create safe copy
|
|
619
|
+
globalStateMutex.unlock();
|
|
620
|
+
mutexLocked = false;
|
|
621
|
+
await updateInterProjectDependencies(packageDir, versionSnapshot, allPackageNames, packageLogger, isDryRun);
|
|
622
|
+
}
|
|
623
|
+
catch (error) {
|
|
624
|
+
if (mutexLocked) {
|
|
625
|
+
globalStateMutex.unlock();
|
|
626
|
+
}
|
|
627
|
+
throw error;
|
|
628
|
+
}
|
|
629
|
+
}
|
|
630
|
+
// Use main logger for the specific message tests expect
|
|
631
|
+
logger.info(`DRY RUN: Would execute: ${commandToRun}`);
|
|
632
|
+
if (runConfig.debug || runConfig.verbose) {
|
|
633
|
+
packageLogger.info(`In directory: ${packageDir}`);
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
else {
|
|
637
|
+
// Change to the package directory and run the command
|
|
638
|
+
const originalCwd = process.cwd();
|
|
639
|
+
try {
|
|
640
|
+
// Validate package directory exists before changing to it
|
|
641
|
+
try {
|
|
642
|
+
await fs.access(packageDir);
|
|
643
|
+
const stat = await fs.stat(packageDir);
|
|
644
|
+
if (!stat.isDirectory()) {
|
|
645
|
+
throw new Error(`Path is not a directory: ${packageDir}`);
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
catch (accessError) {
|
|
649
|
+
throw new Error(`Cannot access package directory: ${packageDir} - ${accessError.message}`);
|
|
650
|
+
}
|
|
651
|
+
process.chdir(packageDir);
|
|
652
|
+
if (runConfig.debug) {
|
|
653
|
+
packageLogger.debug(`Changed to directory: ${packageDir}`);
|
|
654
|
+
}
|
|
655
|
+
// Handle dependency updates for publish commands before executing (skip during dry run)
|
|
656
|
+
// Wrap in git lock to prevent parallel packages from conflicting with npm install and git operations
|
|
657
|
+
if (!isDryRun && isBuiltInCommand && commandToRun.includes('publish')) {
|
|
658
|
+
await runGitWithLock(async () => {
|
|
659
|
+
let hasAnyUpdates = false;
|
|
660
|
+
// First, update all scoped dependencies from npm registry
|
|
661
|
+
const hasScopedUpdates = await updateScopedDependencies(packageDir, packageLogger, isDryRun, runConfig);
|
|
662
|
+
hasAnyUpdates = hasAnyUpdates || hasScopedUpdates;
|
|
663
|
+
// Then update inter-project dependencies based on previously published packages
|
|
664
|
+
if (publishedVersions.length > 0) {
|
|
665
|
+
packageLogger.info('Updating inter-project dependencies based on previously published packages...');
|
|
666
|
+
const hasInterProjectUpdates = await updateInterProjectDependencies(packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun);
|
|
667
|
+
hasAnyUpdates = hasAnyUpdates || hasInterProjectUpdates;
|
|
668
|
+
}
|
|
669
|
+
// If either type of update occurred, commit the changes
|
|
670
|
+
if (hasAnyUpdates) {
|
|
671
|
+
// Commit the dependency updates using kodrdriv commit
|
|
672
|
+
packageLogger.info('Committing dependency updates...');
|
|
673
|
+
packageLogger.info('⏱️ This step may take a few minutes as it generates a commit message using AI...');
|
|
674
|
+
// Add timeout wrapper around commit execution
|
|
675
|
+
const commitTimeoutMs = 300000; // 5 minutes
|
|
676
|
+
const commitPromise = Commit.execute({ ...runConfig, dryRun: false });
|
|
677
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
678
|
+
setTimeout(() => reject(new Error(`Commit operation timed out after ${commitTimeoutMs / 1000} seconds`)), commitTimeoutMs);
|
|
679
|
+
});
|
|
680
|
+
// Add progress indicator
|
|
681
|
+
let progressInterval = null;
|
|
682
|
+
try {
|
|
683
|
+
// Start progress indicator
|
|
684
|
+
progressInterval = setInterval(() => {
|
|
685
|
+
packageLogger.info('⏳ Still generating commit message... (this can take 1-3 minutes)');
|
|
686
|
+
}, 30000); // Every 30 seconds
|
|
687
|
+
await Promise.race([commitPromise, timeoutPromise]);
|
|
688
|
+
packageLogger.info('✅ Dependency updates committed successfully');
|
|
689
|
+
}
|
|
690
|
+
catch (commitError) {
|
|
691
|
+
if (commitError.message.includes('timed out')) {
|
|
692
|
+
packageLogger.error(`❌ Commit operation timed out after ${commitTimeoutMs / 1000} seconds`);
|
|
693
|
+
packageLogger.error('This usually indicates an issue with the AI service or very large changes');
|
|
694
|
+
packageLogger.error('You may need to manually commit the dependency updates');
|
|
695
|
+
}
|
|
696
|
+
else {
|
|
697
|
+
packageLogger.warn(`Failed to commit dependency updates: ${commitError.message}`);
|
|
698
|
+
}
|
|
699
|
+
// Continue with publish anyway - the updates are still in place
|
|
700
|
+
}
|
|
701
|
+
finally {
|
|
702
|
+
if (progressInterval) {
|
|
703
|
+
clearInterval(progressInterval);
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
}
|
|
707
|
+
}, `${packageName}: dependency updates`);
|
|
708
|
+
}
|
|
709
|
+
// Optimize precommit commands for custom commands (not built-in)
|
|
710
|
+
let effectiveCommandToRun = commandToRun;
|
|
711
|
+
let optimizationInfo = null;
|
|
712
|
+
if (!isBuiltInCommand && !isDryRun) {
|
|
713
|
+
const isPrecommitCommand = commandToRun.includes('precommit') || commandToRun.includes('pre-commit');
|
|
714
|
+
if (isPrecommitCommand) {
|
|
715
|
+
try {
|
|
716
|
+
const optimization = await optimizePrecommitCommand(packageDir, commandToRun);
|
|
717
|
+
effectiveCommandToRun = optimization.optimizedCommand;
|
|
718
|
+
optimizationInfo = { skipped: optimization.skipped, reasons: optimization.reasons };
|
|
719
|
+
if (optimization.skipped.clean || optimization.skipped.test) {
|
|
720
|
+
const skippedParts = [];
|
|
721
|
+
if (optimization.skipped.clean) {
|
|
722
|
+
skippedParts.push(`clean (${optimization.reasons.clean})`);
|
|
723
|
+
}
|
|
724
|
+
if (optimization.skipped.test) {
|
|
725
|
+
skippedParts.push(`test (${optimization.reasons.test})`);
|
|
726
|
+
}
|
|
727
|
+
packageLogger.info(`⚡ Optimized: Skipped ${skippedParts.join(', ')}`);
|
|
728
|
+
if (runConfig.verbose || runConfig.debug) {
|
|
729
|
+
packageLogger.info(` Original: ${commandToRun}`);
|
|
730
|
+
packageLogger.info(` Optimized: ${effectiveCommandToRun}`);
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
catch (error) {
|
|
735
|
+
// If optimization fails, fall back to original command
|
|
736
|
+
logger.debug(`Precommit optimization failed for ${packageName}: ${error.message}`);
|
|
737
|
+
}
|
|
738
|
+
}
|
|
739
|
+
}
|
|
740
|
+
if (runConfig.debug || runConfig.verbose) {
|
|
741
|
+
if (isBuiltInCommand) {
|
|
742
|
+
packageLogger.info(`Executing built-in command: ${commandToRun}`);
|
|
743
|
+
}
|
|
744
|
+
else {
|
|
745
|
+
packageLogger.info(`Executing command: ${effectiveCommandToRun}`);
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
// For built-in commands, shell out to a separate kodrdriv process
|
|
749
|
+
// This preserves individual project configurations
|
|
750
|
+
if (isBuiltInCommand) {
|
|
751
|
+
// Extract the command name from "kodrdriv <command> [args...]"
|
|
752
|
+
// Split by space and take the second element (after "kodrdriv")
|
|
753
|
+
const commandParts = commandToRun.replace(/^kodrdriv\s+/, '').split(/\s+/);
|
|
754
|
+
const builtInCommandName = commandParts[0];
|
|
755
|
+
if (runConfig.debug) {
|
|
756
|
+
packageLogger.debug(`Shelling out to separate kodrdriv process for ${builtInCommandName} command`);
|
|
757
|
+
}
|
|
758
|
+
// Add progress indication for publish commands
|
|
759
|
+
if (builtInCommandName === 'publish') {
|
|
760
|
+
packageLogger.info('🚀 Starting publish process...');
|
|
761
|
+
packageLogger.info('⏱️ This may take several minutes (AI processing, PR creation, etc.)');
|
|
762
|
+
}
|
|
763
|
+
// Ensure dry-run propagates to subprocess even during overall dry-run mode
|
|
764
|
+
let effectiveCommand = runConfig.dryRun && !commandToRun.includes('--dry-run')
|
|
765
|
+
? `${commandToRun} --dry-run`
|
|
766
|
+
: commandToRun;
|
|
767
|
+
// For commit commands, ensure --sendit is used to avoid interactive prompts
|
|
768
|
+
// This prevents hanging when running via tree command
|
|
769
|
+
if (builtInCommandName === 'commit' && !effectiveCommand.includes('--sendit') && !runConfig.dryRun) {
|
|
770
|
+
effectiveCommand = `${effectiveCommand} --sendit`;
|
|
771
|
+
packageLogger.info('💡 Auto-adding --sendit flag to avoid interactive prompts in tree mode');
|
|
772
|
+
}
|
|
773
|
+
// Set timeout based on command type
|
|
774
|
+
let commandTimeoutMs;
|
|
775
|
+
if (builtInCommandName === 'publish') {
|
|
776
|
+
commandTimeoutMs = 1800000; // 30 minutes for publish commands
|
|
777
|
+
packageLogger.info(`⏰ Setting timeout of ${commandTimeoutMs / 60000} minutes for publish command`);
|
|
778
|
+
}
|
|
779
|
+
else if (builtInCommandName === 'commit') {
|
|
780
|
+
commandTimeoutMs = 600000; // 10 minutes for commit commands (AI processing can take time)
|
|
781
|
+
packageLogger.info(`⏰ Setting timeout of ${commandTimeoutMs / 60000} minutes for commit command`);
|
|
782
|
+
}
|
|
783
|
+
else {
|
|
784
|
+
commandTimeoutMs = 300000; // 5 minutes default for other commands
|
|
785
|
+
}
|
|
786
|
+
const commandPromise = runWithLogging(effectiveCommand, packageLogger, {}, showOutput, logFilePath);
|
|
787
|
+
const commandTimeoutPromise = new Promise((_, reject) => {
|
|
788
|
+
setTimeout(() => reject(new Error(`Command timed out after ${commandTimeoutMs / 60000} minutes`)), commandTimeoutMs);
|
|
789
|
+
});
|
|
790
|
+
try {
|
|
791
|
+
const startTime = Date.now();
|
|
792
|
+
const { stdout, stderr } = await Promise.race([commandPromise, commandTimeoutPromise]);
|
|
793
|
+
executionDuration = Date.now() - startTime;
|
|
794
|
+
// Detect explicit skip marker from publish to avoid propagating versions
|
|
795
|
+
// Check both stdout (where we now write it) and stderr (winston logger output, for backward compat)
|
|
796
|
+
if (builtInCommandName === 'publish' &&
|
|
797
|
+
((stdout && stdout.includes('KODRDRIV_PUBLISH_SKIPPED')) ||
|
|
798
|
+
(stderr && stderr.includes('KODRDRIV_PUBLISH_SKIPPED')))) {
|
|
799
|
+
packageLogger.info('Publish skipped for this package; will not record or propagate a version.');
|
|
800
|
+
publishWasSkipped = true;
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
catch (error) {
|
|
804
|
+
if (error.message.includes('timed out')) {
|
|
805
|
+
packageLogger.error(`❌ ${builtInCommandName} command timed out after ${commandTimeoutMs / 60000} minutes`);
|
|
806
|
+
packageLogger.error('This usually indicates the command is stuck waiting for user input or an external service');
|
|
807
|
+
throw error;
|
|
808
|
+
}
|
|
809
|
+
throw error;
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
else {
|
|
813
|
+
// For custom commands, use the existing logic
|
|
814
|
+
const startTime = Date.now();
|
|
815
|
+
await runWithLogging(effectiveCommandToRun, packageLogger, {}, showOutput, logFilePath);
|
|
816
|
+
executionDuration = Date.now() - startTime;
|
|
817
|
+
}
|
|
818
|
+
// Track published version after successful publish (skip during dry run)
|
|
819
|
+
if (!isDryRun && isBuiltInCommand && commandToRun.includes('publish')) {
|
|
820
|
+
// If publish was skipped, do not record a version
|
|
821
|
+
if (publishWasSkipped) {
|
|
822
|
+
packageLogger.verbose('Skipping version tracking due to earlier skip.');
|
|
823
|
+
}
|
|
824
|
+
else {
|
|
825
|
+
// Only record a published version if a new tag exists (avoid recording for skipped publishes)
|
|
826
|
+
const publishedVersion = await extractPublishedVersion(packageDir, packageLogger);
|
|
827
|
+
if (publishedVersion) {
|
|
828
|
+
let mutexLocked = false;
|
|
829
|
+
try {
|
|
830
|
+
await globalStateMutex.lock();
|
|
831
|
+
mutexLocked = true;
|
|
832
|
+
publishedVersions.push(publishedVersion);
|
|
833
|
+
packageLogger.info(`Tracked published version: ${publishedVersion.packageName}@${publishedVersion.version}`);
|
|
834
|
+
globalStateMutex.unlock();
|
|
835
|
+
mutexLocked = false;
|
|
836
|
+
}
|
|
837
|
+
catch (error) {
|
|
838
|
+
if (mutexLocked) {
|
|
839
|
+
globalStateMutex.unlock();
|
|
840
|
+
}
|
|
841
|
+
throw error;
|
|
842
|
+
}
|
|
843
|
+
}
|
|
844
|
+
}
|
|
845
|
+
}
|
|
846
|
+
// Record test run if tests were executed (not skipped)
|
|
847
|
+
if (!isDryRun && !isBuiltInCommand && effectiveCommandToRun.includes('test') &&
|
|
848
|
+
(!optimizationInfo || !optimizationInfo.skipped.test)) {
|
|
849
|
+
try {
|
|
850
|
+
await recordTestRun(packageDir);
|
|
851
|
+
}
|
|
852
|
+
catch (error) {
|
|
853
|
+
logger.debug(`Failed to record test run for ${packageName}: ${error.message}`);
|
|
854
|
+
}
|
|
855
|
+
}
|
|
856
|
+
// End timing and show duration
|
|
857
|
+
if (executionDuration !== undefined) {
|
|
858
|
+
executionTimer.end();
|
|
859
|
+
const seconds = (executionDuration / 1000).toFixed(1);
|
|
860
|
+
if (runConfig.debug || runConfig.verbose) {
|
|
861
|
+
packageLogger.info(`⏱️ Execution time: ${seconds}s`);
|
|
862
|
+
}
|
|
863
|
+
else if (!isPublishCommand && !isCommitCommand) {
|
|
864
|
+
// Show timing in completion message (publish/commit commands have their own completion message)
|
|
865
|
+
logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed (${seconds}s)`);
|
|
866
|
+
}
|
|
867
|
+
}
|
|
868
|
+
else {
|
|
869
|
+
executionTimer.end();
|
|
870
|
+
if (runConfig.debug || runConfig.verbose) {
|
|
871
|
+
packageLogger.info(`Command completed successfully`);
|
|
872
|
+
}
|
|
873
|
+
else if (!isPublishCommand && !isCommitCommand) {
|
|
874
|
+
// Basic completion info (publish/commit commands have their own completion message)
|
|
875
|
+
logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed`);
|
|
876
|
+
}
|
|
877
|
+
}
|
|
878
|
+
}
|
|
879
|
+
finally {
|
|
880
|
+
// Safely restore working directory
|
|
881
|
+
try {
|
|
882
|
+
// Validate original directory still exists before changing back
|
|
883
|
+
const fs = await import('fs/promises');
|
|
884
|
+
await fs.access(originalCwd);
|
|
885
|
+
process.chdir(originalCwd);
|
|
886
|
+
if (runConfig.debug) {
|
|
887
|
+
packageLogger.debug(`Restored working directory to: ${originalCwd}`);
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
catch (restoreError) {
|
|
891
|
+
// If we can't restore to original directory, at least log the issue
|
|
892
|
+
packageLogger.error(`Failed to restore working directory to ${originalCwd}: ${restoreError.message}`);
|
|
893
|
+
packageLogger.error(`Current working directory is now: ${process.cwd()}`);
|
|
894
|
+
// Don't throw here to avoid masking the original error
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
// Show completion status (for publish/commit commands, this supplements the timing message above)
|
|
899
|
+
if (runConfig.debug || runConfig.verbose) {
|
|
900
|
+
if (publishWasSkipped) {
|
|
901
|
+
packageLogger.info(`⊘ Skipped (no code changes)`);
|
|
902
|
+
}
|
|
903
|
+
else {
|
|
904
|
+
packageLogger.info(`✅ Completed successfully`);
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
else if (isPublishCommand || isCommitCommand) {
|
|
908
|
+
// For publish/commit commands, always show completion even without verbose
|
|
909
|
+
// Include timing if available
|
|
910
|
+
const timeStr = executionDuration !== undefined ? ` (${(executionDuration / 1000).toFixed(1)}s)` : '';
|
|
911
|
+
if (publishWasSkipped) {
|
|
912
|
+
logger.info(`[${index + 1}/${total}] ${packageName}: ⊘ Skipped (no code changes)`);
|
|
913
|
+
}
|
|
914
|
+
else {
|
|
915
|
+
logger.info(`[${index + 1}/${total}] ${packageName}: ✅ Completed${timeStr}`);
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
// Ensure timing is recorded even if there was an early return
|
|
919
|
+
if (executionDuration === undefined) {
|
|
920
|
+
executionDuration = executionTimer.end();
|
|
921
|
+
}
|
|
922
|
+
return { success: true, skippedNoChanges: publishWasSkipped, logFile: logFilePath };
|
|
923
|
+
}
|
|
924
|
+
catch (error) {
|
|
925
|
+
// Record timing even on error
|
|
926
|
+
if (executionDuration === undefined) {
|
|
927
|
+
executionDuration = executionTimer.end();
|
|
928
|
+
const seconds = (executionDuration / 1000).toFixed(1);
|
|
929
|
+
if (runConfig.debug || runConfig.verbose) {
|
|
930
|
+
packageLogger.error(`⏱️ Execution time before failure: ${seconds}s`);
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
if (runConfig.debug || runConfig.verbose) {
|
|
934
|
+
packageLogger.error(`❌ Execution failed: ${error.message}`);
|
|
935
|
+
}
|
|
936
|
+
else {
|
|
937
|
+
logger.error(`[${index + 1}/${total}] ${packageName}: ❌ Failed - ${error.message}`);
|
|
938
|
+
}
|
|
939
|
+
// Always show stderr if available (contains important error details)
|
|
940
|
+
// Note: runWithLogging already logs stderr, but we show it here too for visibility
|
|
941
|
+
// when error is caught at this level (e.g., from timeout wrapper)
|
|
942
|
+
if (error.stderr && error.stderr.trim() && !runConfig.debug && !runConfig.verbose) {
|
|
943
|
+
// Extract key error lines from stderr (coverage failures, test failures, etc.)
|
|
944
|
+
const stderrLines = error.stderr.split('\n').filter((line) => {
|
|
945
|
+
const trimmed = line.trim();
|
|
946
|
+
return trimmed && (trimmed.includes('ERROR:') ||
|
|
947
|
+
trimmed.includes('FAIL') ||
|
|
948
|
+
trimmed.includes('coverage') ||
|
|
949
|
+
trimmed.includes('threshold') ||
|
|
950
|
+
trimmed.includes('fatal:') ||
|
|
951
|
+
trimmed.startsWith('❌'));
|
|
952
|
+
});
|
|
953
|
+
if (stderrLines.length > 0) {
|
|
954
|
+
logger.error(` Error details:`);
|
|
955
|
+
stderrLines.slice(0, 10).forEach((line) => {
|
|
956
|
+
logger.error(` ${line.trim()}`);
|
|
957
|
+
});
|
|
958
|
+
if (stderrLines.length > 10) {
|
|
959
|
+
logger.error(` ... and ${stderrLines.length - 10} more error lines (use --verbose to see full output)`);
|
|
960
|
+
}
|
|
961
|
+
}
|
|
962
|
+
}
|
|
963
|
+
// Check if this is a timeout error
|
|
964
|
+
const errorMessage = error.message?.toLowerCase() || '';
|
|
965
|
+
const isTimeoutError = errorMessage && (errorMessage.includes('timeout waiting for pr') ||
|
|
966
|
+
errorMessage.includes('timeout waiting for release workflows') ||
|
|
967
|
+
errorMessage.includes('timeout reached') ||
|
|
968
|
+
errorMessage.includes('timeout') ||
|
|
969
|
+
errorMessage.includes('timed out') ||
|
|
970
|
+
errorMessage.includes('timed_out'));
|
|
971
|
+
return { success: false, error, isTimeoutError, logFile: logFilePath };
|
|
972
|
+
}
|
|
973
|
+
};
|
|
974
|
+
/**
|
|
975
|
+
* Generate a dry-run preview showing what would happen without executing
|
|
976
|
+
*/
|
|
977
|
+
const generateDryRunPreview = async (dependencyGraph, buildOrder, command, runConfig) => {
|
|
978
|
+
const lines = [];
|
|
979
|
+
lines.push('');
|
|
980
|
+
lines.push('🔍 DRY RUN MODE - No changes will be made');
|
|
981
|
+
lines.push('');
|
|
982
|
+
lines.push('Build order determined:');
|
|
983
|
+
lines.push('');
|
|
984
|
+
// Group packages by dependency level
|
|
985
|
+
const levels = [];
|
|
986
|
+
const packageLevels = new Map();
|
|
987
|
+
for (const pkg of buildOrder) {
|
|
988
|
+
const deps = dependencyGraph.edges.get(pkg) || new Set();
|
|
989
|
+
let maxDepLevel = -1;
|
|
990
|
+
for (const dep of deps) {
|
|
991
|
+
const depLevel = packageLevels.get(dep) ?? 0;
|
|
992
|
+
maxDepLevel = Math.max(maxDepLevel, depLevel);
|
|
993
|
+
}
|
|
994
|
+
const pkgLevel = maxDepLevel + 1;
|
|
995
|
+
packageLevels.set(pkg, pkgLevel);
|
|
996
|
+
if (!levels[pkgLevel]) {
|
|
997
|
+
levels[pkgLevel] = [];
|
|
998
|
+
}
|
|
999
|
+
levels[pkgLevel].push(pkg);
|
|
1000
|
+
}
|
|
1001
|
+
// Show packages grouped by level
|
|
1002
|
+
for (let i = 0; i < levels.length; i++) {
|
|
1003
|
+
const levelPackages = levels[i];
|
|
1004
|
+
lines.push(`Level ${i + 1}: (${levelPackages.length} package${levelPackages.length === 1 ? '' : 's'})`);
|
|
1005
|
+
for (const pkg of levelPackages) {
|
|
1006
|
+
const pkgInfo = dependencyGraph.packages.get(pkg);
|
|
1007
|
+
if (!pkgInfo)
|
|
1008
|
+
continue;
|
|
1009
|
+
// Check if package has changes (for publish command)
|
|
1010
|
+
const isPublish = command.includes('publish');
|
|
1011
|
+
let status = '📝 Has changes, will execute';
|
|
1012
|
+
if (isPublish) {
|
|
1013
|
+
try {
|
|
1014
|
+
// Check git diff to see if there are code changes
|
|
1015
|
+
const { stdout } = await runSecure('git', ['diff', '--name-only', 'origin/main...HEAD'], { cwd: pkgInfo.path });
|
|
1016
|
+
const changedFiles = stdout.split('\n').filter(Boolean);
|
|
1017
|
+
const nonVersionFiles = changedFiles.filter(f => f !== 'package.json' && f !== 'package-lock.json');
|
|
1018
|
+
if (changedFiles.length === 0) {
|
|
1019
|
+
status = '⊘ No changes, will skip';
|
|
1020
|
+
}
|
|
1021
|
+
else if (nonVersionFiles.length === 0) {
|
|
1022
|
+
status = '⊘ Only version bump, will skip';
|
|
1023
|
+
}
|
|
1024
|
+
else {
|
|
1025
|
+
status = `📝 Has changes (${nonVersionFiles.length} files), will publish`;
|
|
1026
|
+
}
|
|
1027
|
+
}
|
|
1028
|
+
catch {
|
|
1029
|
+
// If we can't check git status, assume changes
|
|
1030
|
+
status = '📝 Will execute';
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
lines.push(` ${pkg}`);
|
|
1034
|
+
lines.push(` Status: ${status}`);
|
|
1035
|
+
lines.push(` Path: ${pkgInfo.path}`);
|
|
1036
|
+
}
|
|
1037
|
+
lines.push('');
|
|
1038
|
+
}
|
|
1039
|
+
lines.push('Summary:');
|
|
1040
|
+
lines.push(` Total packages: ${buildOrder.length}`);
|
|
1041
|
+
lines.push(` Dependency levels: ${levels.length}`);
|
|
1042
|
+
lines.push(` Command: ${command}`);
|
|
1043
|
+
if (runConfig.tree?.maxConcurrency) {
|
|
1044
|
+
lines.push(` Max concurrency: ${runConfig.tree.maxConcurrency}`);
|
|
1045
|
+
}
|
|
1046
|
+
lines.push('');
|
|
1047
|
+
lines.push('To execute for real, run the same command without --dry-run');
|
|
1048
|
+
lines.push('');
|
|
1049
|
+
return lines.join('\n');
|
|
1050
|
+
};
|
|
1051
|
+
// Add a simple status check function
|
|
1052
|
+
const checkTreePublishStatus = async () => {
|
|
1053
|
+
const logger = getLogger();
|
|
1054
|
+
try {
|
|
1055
|
+
// Check for running kodrdriv processes
|
|
1056
|
+
const { stdout } = await runSecure('ps', ['aux'], {});
|
|
1057
|
+
const kodrdrivProcesses = stdout.split('\n').filter((line) => line.includes('kodrdriv') &&
|
|
1058
|
+
!line.includes('grep') &&
|
|
1059
|
+
!line.includes('ps aux') &&
|
|
1060
|
+
!line.includes('tree --status') // Exclude the current status command
|
|
1061
|
+
);
|
|
1062
|
+
if (kodrdrivProcesses.length > 0) {
|
|
1063
|
+
logger.info('🔍 Found running kodrdriv processes:');
|
|
1064
|
+
kodrdrivProcesses.forEach((process) => {
|
|
1065
|
+
const parts = process.trim().split(/\s+/);
|
|
1066
|
+
const pid = parts[1];
|
|
1067
|
+
const command = parts.slice(10).join(' ');
|
|
1068
|
+
logger.info(` PID ${pid}: ${command}`);
|
|
1069
|
+
});
|
|
1070
|
+
}
|
|
1071
|
+
else {
|
|
1072
|
+
logger.info('No kodrdriv processes currently running');
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
catch (error) {
|
|
1076
|
+
logger.warn('Could not check process status:', error);
|
|
1077
|
+
}
|
|
1078
|
+
};
|
|
1079
|
+
export const execute = async (runConfig) => {
|
|
1080
|
+
const logger = getLogger();
|
|
1081
|
+
const isDryRun = runConfig.dryRun || false;
|
|
1082
|
+
const isContinue = runConfig.tree?.continue || false;
|
|
1083
|
+
const promotePackage = runConfig.tree?.promote;
|
|
1084
|
+
// Debug logging
|
|
1085
|
+
logger.debug('Tree config:', JSON.stringify(runConfig.tree, null, 2));
|
|
1086
|
+
logger.debug('Status flag:', runConfig.tree?.status);
|
|
1087
|
+
logger.debug('Full runConfig:', JSON.stringify(runConfig, null, 2));
|
|
1088
|
+
// Handle status check
|
|
1089
|
+
if (runConfig.tree?.status) {
|
|
1090
|
+
logger.info('🔍 Checking for running kodrdriv processes...');
|
|
1091
|
+
await checkTreePublishStatus();
|
|
1092
|
+
return 'Status check completed';
|
|
1093
|
+
}
|
|
1094
|
+
// Handle promote mode
|
|
1095
|
+
if (promotePackage) {
|
|
1096
|
+
logger.info(`Promoting package '${promotePackage}' to completed status...`);
|
|
1097
|
+
await promotePackageToCompleted(promotePackage, runConfig.outputDirectory);
|
|
1098
|
+
logger.info(`✅ Package '${promotePackage}' has been marked as completed.`);
|
|
1099
|
+
logger.info('You can now run the tree command with --continue to resume from the next package.');
|
|
1100
|
+
return `Package '${promotePackage}' promoted to completed status.`;
|
|
1101
|
+
}
|
|
1102
|
+
// Handle audit-branches command
|
|
1103
|
+
if (runConfig.tree?.auditBranches) {
|
|
1104
|
+
logger.info('🔍 Auditing branch state across all packages...');
|
|
1105
|
+
const directories = runConfig.tree?.directories || [process.cwd()];
|
|
1106
|
+
const excludedPatterns = runConfig.tree?.exclude || [];
|
|
1107
|
+
let allPackageJsonPaths = [];
|
|
1108
|
+
for (const targetDirectory of directories) {
|
|
1109
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
1110
|
+
allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
|
|
1111
|
+
}
|
|
1112
|
+
if (allPackageJsonPaths.length === 0) {
|
|
1113
|
+
return 'No packages found';
|
|
1114
|
+
}
|
|
1115
|
+
const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
|
|
1116
|
+
const packages = Array.from(dependencyGraph.packages.values()).map(pkg => ({
|
|
1117
|
+
name: pkg.name,
|
|
1118
|
+
path: pkg.path,
|
|
1119
|
+
}));
|
|
1120
|
+
// Branch state utilities - stubbed for now
|
|
1121
|
+
// TODO: Extract or implement branch state auditing
|
|
1122
|
+
const auditBranchState = async (_packages, _config, _options) => ({
|
|
1123
|
+
packages: [],
|
|
1124
|
+
issues: [],
|
|
1125
|
+
issuesFound: 0,
|
|
1126
|
+
goodPackages: 0
|
|
1127
|
+
});
|
|
1128
|
+
const formatAuditResults = (_results) => 'Branch audit not implemented';
|
|
1129
|
+
const { getRemoteDefaultBranch } = await import('@eldrforge/git-tools');
|
|
1130
|
+
// For publish workflows, check branch consistency, merge conflicts, and existing PRs
|
|
1131
|
+
// Don't pass an expected branch - let the audit find the most common branch
|
|
1132
|
+
let targetBranch = runConfig.publish?.targetBranch;
|
|
1133
|
+
if (!targetBranch) {
|
|
1134
|
+
// Try to detect default branch from the first package that is a git repo
|
|
1135
|
+
const firstGitPkg = packages.find(pkg => isInGitRepository(pkg.path));
|
|
1136
|
+
if (firstGitPkg) {
|
|
1137
|
+
try {
|
|
1138
|
+
// Cast to any to avoid type mismatch with node_modules version
|
|
1139
|
+
targetBranch = await getRemoteDefaultBranch(firstGitPkg.path) || 'main';
|
|
1140
|
+
}
|
|
1141
|
+
catch {
|
|
1142
|
+
targetBranch = 'main';
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
else {
|
|
1146
|
+
targetBranch = 'main';
|
|
1147
|
+
}
|
|
1148
|
+
}
|
|
1149
|
+
logger.info(`Checking for merge conflicts with '${targetBranch}' and existing pull requests...`);
|
|
1150
|
+
const auditResult = await auditBranchState(packages, undefined, {
|
|
1151
|
+
targetBranch,
|
|
1152
|
+
checkPR: true,
|
|
1153
|
+
checkConflicts: true,
|
|
1154
|
+
concurrency: runConfig.tree?.maxConcurrency || 10,
|
|
1155
|
+
});
|
|
1156
|
+
const formatted = formatAuditResults(auditResult);
|
|
1157
|
+
logger.info('\n' + formatted);
|
|
1158
|
+
if (auditResult.issuesFound > 0) {
|
|
1159
|
+
logger.warn(`\n⚠️ Found issues in ${auditResult.issuesFound} package(s). Review the fixes above.`);
|
|
1160
|
+
return `Branch audit complete: ${auditResult.issuesFound} package(s) need attention`;
|
|
1161
|
+
}
|
|
1162
|
+
logger.info(`\n✅ All ${auditResult.goodPackages} package(s) are in good state!`);
|
|
1163
|
+
return `Branch audit complete: All packages OK`;
|
|
1164
|
+
}
|
|
1165
|
+
// Handle parallel execution recovery commands
|
|
1166
|
+
const { loadRecoveryManager } = await import('./execution/RecoveryManager.js');
|
|
1167
|
+
// Handle status-parallel command
|
|
1168
|
+
if (runConfig.tree?.statusParallel) {
|
|
1169
|
+
logger.info('📊 Checking parallel execution status...');
|
|
1170
|
+
// Need to build dependency graph first
|
|
1171
|
+
const directories = runConfig.tree?.directories || [process.cwd()];
|
|
1172
|
+
const excludedPatterns = runConfig.tree?.exclude || [];
|
|
1173
|
+
let allPackageJsonPaths = [];
|
|
1174
|
+
for (const targetDirectory of directories) {
|
|
1175
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
1176
|
+
allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
|
|
1177
|
+
}
|
|
1178
|
+
if (allPackageJsonPaths.length === 0) {
|
|
1179
|
+
return 'No packages found';
|
|
1180
|
+
}
|
|
1181
|
+
const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
|
|
1182
|
+
const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
|
|
1183
|
+
if (!recoveryManager) {
|
|
1184
|
+
logger.info('No parallel execution checkpoint found');
|
|
1185
|
+
return 'No active parallel execution found';
|
|
1186
|
+
}
|
|
1187
|
+
const status = await recoveryManager.showStatus();
|
|
1188
|
+
logger.info('\n' + status);
|
|
1189
|
+
return status;
|
|
1190
|
+
}
|
|
1191
|
+
// Handle validate-state command
|
|
1192
|
+
if (runConfig.tree?.validateState) {
|
|
1193
|
+
logger.info('🔍 Validating checkpoint state...');
|
|
1194
|
+
const directories = runConfig.tree?.directories || [process.cwd()];
|
|
1195
|
+
const excludedPatterns = runConfig.tree?.exclude || [];
|
|
1196
|
+
let allPackageJsonPaths = [];
|
|
1197
|
+
for (const targetDirectory of directories) {
|
|
1198
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
1199
|
+
allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
|
|
1200
|
+
}
|
|
1201
|
+
if (allPackageJsonPaths.length === 0) {
|
|
1202
|
+
return 'No packages found';
|
|
1203
|
+
}
|
|
1204
|
+
const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
|
|
1205
|
+
const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
|
|
1206
|
+
if (!recoveryManager) {
|
|
1207
|
+
logger.info('No checkpoint found to validate');
|
|
1208
|
+
return 'No checkpoint found';
|
|
1209
|
+
}
|
|
1210
|
+
const validation = recoveryManager.validateState();
|
|
1211
|
+
if (validation.valid) {
|
|
1212
|
+
logger.info('✅ Checkpoint state is valid');
|
|
1213
|
+
}
|
|
1214
|
+
else {
|
|
1215
|
+
logger.error('❌ Checkpoint state has issues:');
|
|
1216
|
+
for (const issue of validation.issues) {
|
|
1217
|
+
logger.error(` • ${issue}`);
|
|
1218
|
+
}
|
|
1219
|
+
}
|
|
1220
|
+
if (validation.warnings.length > 0) {
|
|
1221
|
+
logger.warn('⚠️ Warnings:');
|
|
1222
|
+
for (const warning of validation.warnings) {
|
|
1223
|
+
logger.warn(` • ${warning}`);
|
|
1224
|
+
}
|
|
1225
|
+
}
|
|
1226
|
+
return validation.valid ? 'Checkpoint is valid' : 'Checkpoint has issues';
|
|
1227
|
+
}
|
|
1228
|
+
// Handle parallel execution recovery options (must happen before main execution)
|
|
1229
|
+
const hasRecoveryOptions = runConfig.tree?.markCompleted || runConfig.tree?.skipPackages ||
|
|
1230
|
+
runConfig.tree?.retryFailed || runConfig.tree?.skipFailed ||
|
|
1231
|
+
runConfig.tree?.resetPackage;
|
|
1232
|
+
if (hasRecoveryOptions && runConfig.tree) {
|
|
1233
|
+
logger.info('🔧 Applying recovery options...');
|
|
1234
|
+
// Build dependency graph
|
|
1235
|
+
const directories = runConfig.tree.directories || [process.cwd()];
|
|
1236
|
+
const excludedPatterns = runConfig.tree.exclude || [];
|
|
1237
|
+
let allPackageJsonPaths = [];
|
|
1238
|
+
for (const targetDirectory of directories) {
|
|
1239
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
1240
|
+
allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
|
|
1241
|
+
}
|
|
1242
|
+
const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
|
|
1243
|
+
const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
|
|
1244
|
+
if (!recoveryManager) {
|
|
1245
|
+
logger.error('No checkpoint found for recovery');
|
|
1246
|
+
throw new Error('No checkpoint found. Cannot apply recovery options without an existing checkpoint.');
|
|
1247
|
+
}
|
|
1248
|
+
await recoveryManager.applyRecoveryOptions({
|
|
1249
|
+
markCompleted: runConfig.tree.markCompleted,
|
|
1250
|
+
skipPackages: runConfig.tree.skipPackages,
|
|
1251
|
+
retryFailed: runConfig.tree.retryFailed,
|
|
1252
|
+
skipFailed: runConfig.tree.skipFailed,
|
|
1253
|
+
resetPackage: runConfig.tree.resetPackage,
|
|
1254
|
+
maxRetries: runConfig.tree.retry?.maxAttempts
|
|
1255
|
+
});
|
|
1256
|
+
logger.info('✅ Recovery options applied');
|
|
1257
|
+
// If not also continuing, just return
|
|
1258
|
+
if (!isContinue) {
|
|
1259
|
+
return 'Recovery options applied. Use --continue to resume execution.';
|
|
1260
|
+
}
|
|
1261
|
+
}
|
|
1262
|
+
// Handle continue mode
|
|
1263
|
+
if (isContinue) {
|
|
1264
|
+
// For parallel execution, the checkpoint is managed by DynamicTaskPool/CheckpointManager
|
|
1265
|
+
// For sequential execution, we use the legacy context file
|
|
1266
|
+
const isParallelMode = runConfig.tree?.parallel;
|
|
1267
|
+
if (!isParallelMode) {
|
|
1268
|
+
// Sequential execution: load legacy context
|
|
1269
|
+
const savedContext = await loadExecutionContext(runConfig.outputDirectory);
|
|
1270
|
+
if (savedContext) {
|
|
1271
|
+
logger.info('Continuing previous tree execution...');
|
|
1272
|
+
logger.info(`Original command: ${savedContext.command}`);
|
|
1273
|
+
logger.info(`Started: ${savedContext.startTime.toISOString()}`);
|
|
1274
|
+
logger.info(`Previously completed: ${savedContext.completedPackages.length}/${savedContext.buildOrder.length} packages`);
|
|
1275
|
+
// Restore state safely
|
|
1276
|
+
let mutexLocked = false;
|
|
1277
|
+
try {
|
|
1278
|
+
await globalStateMutex.lock();
|
|
1279
|
+
mutexLocked = true;
|
|
1280
|
+
publishedVersions = savedContext.publishedVersions;
|
|
1281
|
+
globalStateMutex.unlock();
|
|
1282
|
+
mutexLocked = false;
|
|
1283
|
+
}
|
|
1284
|
+
catch (error) {
|
|
1285
|
+
if (mutexLocked) {
|
|
1286
|
+
globalStateMutex.unlock();
|
|
1287
|
+
}
|
|
1288
|
+
throw error;
|
|
1289
|
+
}
|
|
1290
|
+
executionContext = savedContext;
|
|
1291
|
+
// Use original config but allow some overrides (like dry run)
|
|
1292
|
+
runConfig = {
|
|
1293
|
+
...savedContext.originalConfig,
|
|
1294
|
+
dryRun: runConfig.dryRun, // Allow dry run override
|
|
1295
|
+
outputDirectory: runConfig.outputDirectory || savedContext.originalConfig.outputDirectory
|
|
1296
|
+
};
|
|
1297
|
+
}
|
|
1298
|
+
else {
|
|
1299
|
+
logger.warn('No previous execution context found. Starting new execution...');
|
|
1300
|
+
}
|
|
1301
|
+
}
|
|
1302
|
+
else {
|
|
1303
|
+
// Parallel execution: checkpoint is managed by DynamicTaskPool
|
|
1304
|
+
// Just log that we're continuing - the actual checkpoint loading happens in DynamicTaskPool
|
|
1305
|
+
logger.info('Continuing previous parallel execution...');
|
|
1306
|
+
}
|
|
1307
|
+
}
|
|
1308
|
+
else {
|
|
1309
|
+
// Reset published versions tracking for new tree execution
|
|
1310
|
+
publishedVersions = [];
|
|
1311
|
+
executionContext = null;
|
|
1312
|
+
}
|
|
1313
|
+
// Check if we're in built-in command mode (tree command with second argument)
|
|
1314
|
+
const builtInCommand = runConfig.tree?.builtInCommand;
|
|
1315
|
+
const supportedBuiltInCommands = ['commit', 'release', 'publish', 'link', 'unlink', 'development', 'branches', 'run', 'checkout', 'updates', 'precommit'];
|
|
1316
|
+
if (builtInCommand && !supportedBuiltInCommands.includes(builtInCommand)) {
|
|
1317
|
+
throw new Error(`Unsupported built-in command: ${builtInCommand}. Supported commands: ${supportedBuiltInCommands.join(', ')}`);
|
|
1318
|
+
}
|
|
1319
|
+
// Handle run subcommand - convert space-separated scripts to npm run commands
|
|
1320
|
+
if (builtInCommand === 'run') {
|
|
1321
|
+
const packageArgument = runConfig.tree?.packageArgument;
|
|
1322
|
+
if (!packageArgument) {
|
|
1323
|
+
throw new Error('run subcommand requires script names. Usage: kodrdriv tree run "clean build test"');
|
|
1324
|
+
}
|
|
1325
|
+
// Split the package argument by spaces to get individual script names
|
|
1326
|
+
const scripts = packageArgument.trim().split(/\s+/).filter(script => script.length > 0);
|
|
1327
|
+
if (scripts.length === 0) {
|
|
1328
|
+
throw new Error('run subcommand requires at least one script name. Usage: kodrdriv tree run "clean build test"');
|
|
1329
|
+
}
|
|
1330
|
+
// Convert to npm run commands joined with &&
|
|
1331
|
+
const npmCommands = scripts.map(script => `npm run ${script}`).join(' && ');
|
|
1332
|
+
// Set this as the custom command to run
|
|
1333
|
+
runConfig.tree = {
|
|
1334
|
+
...runConfig.tree,
|
|
1335
|
+
cmd: npmCommands
|
|
1336
|
+
};
|
|
1337
|
+
// Clear the built-in command since we're now using custom command mode
|
|
1338
|
+
runConfig.tree.builtInCommand = undefined;
|
|
1339
|
+
logger.info(`Converting run subcommand to: ${npmCommands}`);
|
|
1340
|
+
// Store scripts for later validation
|
|
1341
|
+
runConfig.__scriptsToValidate = scripts;
|
|
1342
|
+
}
|
|
1343
|
+
// Determine the target directories - either specified or current working directory
|
|
1344
|
+
const directories = runConfig.tree?.directories || [process.cwd()];
|
|
1345
|
+
// Handle link status subcommand
|
|
1346
|
+
if (builtInCommand === 'link' && runConfig.tree?.packageArgument === 'status') {
|
|
1347
|
+
// For tree link status, we want to show status across all packages
|
|
1348
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running link status across workspace...`);
|
|
1349
|
+
// Create a config that will be passed to the link command
|
|
1350
|
+
const linkConfig = {
|
|
1351
|
+
...runConfig,
|
|
1352
|
+
tree: {
|
|
1353
|
+
...runConfig.tree,
|
|
1354
|
+
directories: directories
|
|
1355
|
+
}
|
|
1356
|
+
};
|
|
1357
|
+
try {
|
|
1358
|
+
const result = await Link.execute(linkConfig, 'status');
|
|
1359
|
+
return result;
|
|
1360
|
+
}
|
|
1361
|
+
catch (error) {
|
|
1362
|
+
logger.error(`Link status failed: ${error.message}`);
|
|
1363
|
+
throw error;
|
|
1364
|
+
}
|
|
1365
|
+
}
|
|
1366
|
+
// Handle unlink status subcommand
|
|
1367
|
+
if (builtInCommand === 'unlink' && runConfig.tree?.packageArgument === 'status') {
|
|
1368
|
+
// For tree unlink status, we want to show status across all packages
|
|
1369
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running unlink status across workspace...`);
|
|
1370
|
+
// Create a config that will be passed to the unlink command
|
|
1371
|
+
const unlinkConfig = {
|
|
1372
|
+
...runConfig,
|
|
1373
|
+
tree: {
|
|
1374
|
+
...runConfig.tree,
|
|
1375
|
+
directories: directories
|
|
1376
|
+
}
|
|
1377
|
+
};
|
|
1378
|
+
try {
|
|
1379
|
+
const result = await Unlink.execute(unlinkConfig, 'status');
|
|
1380
|
+
return result;
|
|
1381
|
+
}
|
|
1382
|
+
catch (error) {
|
|
1383
|
+
logger.error(`Unlink status failed: ${error.message}`);
|
|
1384
|
+
throw error;
|
|
1385
|
+
}
|
|
1386
|
+
}
|
|
1387
|
+
if (directories.length === 1) {
|
|
1388
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspace at: ${directories[0]}`);
|
|
1389
|
+
}
|
|
1390
|
+
else {
|
|
1391
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspaces at: ${directories.join(', ')}`);
|
|
1392
|
+
}
|
|
1393
|
+
try {
|
|
1394
|
+
// Get exclusion patterns from config, fallback to empty array
|
|
1395
|
+
const excludedPatterns = runConfig.tree?.exclude || [];
|
|
1396
|
+
if (excludedPatterns.length > 0) {
|
|
1397
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Using exclusion patterns: ${excludedPatterns.join(', ')}`);
|
|
1398
|
+
}
|
|
1399
|
+
// Scan for package.json files across all directories
|
|
1400
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning for package.json files...`);
|
|
1401
|
+
let allPackageJsonPaths = [];
|
|
1402
|
+
for (const targetDirectory of directories) {
|
|
1403
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning directory: ${targetDirectory}`);
|
|
1404
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
1405
|
+
allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
|
|
1406
|
+
}
|
|
1407
|
+
const packageJsonPaths = allPackageJsonPaths;
|
|
1408
|
+
if (packageJsonPaths.length === 0) {
|
|
1409
|
+
const directoriesStr = directories.join(', ');
|
|
1410
|
+
const message = `No package.json files found in subdirectories of: ${directoriesStr}`;
|
|
1411
|
+
logger.warn(message);
|
|
1412
|
+
return message;
|
|
1413
|
+
}
|
|
1414
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Found ${packageJsonPaths.length} package.json files`);
|
|
1415
|
+
// Build dependency graph
|
|
1416
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Building dependency graph...`);
|
|
1417
|
+
const dependencyGraph = await buildDependencyGraph(packageJsonPaths);
|
|
1418
|
+
// Perform topological sort to determine build order
|
|
1419
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Determining build order...`);
|
|
1420
|
+
let buildOrder = topologicalSort(dependencyGraph);
|
|
1421
|
+
// Handle start-from functionality if specified
|
|
1422
|
+
const startFrom = runConfig.tree?.startFrom;
|
|
1423
|
+
if (startFrom) {
|
|
1424
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for start package: ${startFrom}`);
|
|
1425
|
+
// Resolve the actual package name (can be package name or directory name)
|
|
1426
|
+
let startPackageName = null;
|
|
1427
|
+
for (const [pkgName, pkgInfo] of dependencyGraph.packages) {
|
|
1428
|
+
const dirName = path.basename(pkgInfo.path);
|
|
1429
|
+
if (dirName === startFrom || pkgName === startFrom) {
|
|
1430
|
+
startPackageName = pkgName;
|
|
1431
|
+
break;
|
|
1432
|
+
}
|
|
1433
|
+
}
|
|
1434
|
+
if (!startPackageName) {
|
|
1435
|
+
// Check if the package exists but was excluded across all directories
|
|
1436
|
+
let allPackageJsonPathsForCheck = [];
|
|
1437
|
+
for (const targetDirectory of directories) {
|
|
1438
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, []); // No exclusions
|
|
1439
|
+
allPackageJsonPathsForCheck = allPackageJsonPathsForCheck.concat(packageJsonPaths);
|
|
1440
|
+
}
|
|
1441
|
+
let wasExcluded = false;
|
|
1442
|
+
for (const packageJsonPath of allPackageJsonPathsForCheck) {
|
|
1443
|
+
try {
|
|
1444
|
+
const packageInfo = await parsePackageJson(packageJsonPath);
|
|
1445
|
+
const dirName = path.basename(packageInfo.path);
|
|
1446
|
+
if (dirName === startFrom || packageInfo.name === startFrom) {
|
|
1447
|
+
// Check if this package was excluded
|
|
1448
|
+
if (shouldExclude(packageJsonPath, excludedPatterns)) {
|
|
1449
|
+
wasExcluded = true;
|
|
1450
|
+
break;
|
|
1451
|
+
}
|
|
1452
|
+
}
|
|
1453
|
+
}
|
|
1454
|
+
catch {
|
|
1455
|
+
// Skip invalid package.json files
|
|
1456
|
+
continue;
|
|
1457
|
+
}
|
|
1458
|
+
}
|
|
1459
|
+
if (wasExcluded) {
|
|
1460
|
+
const excludedPatternsStr = excludedPatterns.join(', ');
|
|
1461
|
+
throw new Error(`Package directory '${startFrom}' was excluded by exclusion patterns: ${excludedPatternsStr}. Remove the exclusion pattern or choose a different starting package.`);
|
|
1462
|
+
}
|
|
1463
|
+
else {
|
|
1464
|
+
const availablePackages = buildOrder.map(name => {
|
|
1465
|
+
const packageInfo = dependencyGraph.packages.get(name);
|
|
1466
|
+
return `${path.basename(packageInfo.path)} (${name})`;
|
|
1467
|
+
}).join(', ');
|
|
1468
|
+
throw new Error(`Package directory '${startFrom}' not found. Available packages: ${availablePackages}`);
|
|
1469
|
+
}
|
|
1470
|
+
}
|
|
1471
|
+
// Find the start package in the build order and start execution from there
|
|
1472
|
+
const startIndex = buildOrder.findIndex(pkgName => pkgName === startPackageName);
|
|
1473
|
+
if (startIndex === -1) {
|
|
1474
|
+
throw new Error(`Package '${startFrom}' not found in build order. This should not happen.`);
|
|
1475
|
+
}
|
|
1476
|
+
// Filter build order to start from the specified package
|
|
1477
|
+
const originalLength = buildOrder.length;
|
|
1478
|
+
buildOrder = buildOrder.slice(startIndex);
|
|
1479
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Starting execution from package '${startFrom}' (${buildOrder.length} of ${originalLength} packages remaining).`);
|
|
1480
|
+
}
|
|
1481
|
+
// Handle stop-at functionality if specified
|
|
1482
|
+
const stopAt = runConfig.tree?.stopAt;
|
|
1483
|
+
if (stopAt) {
|
|
1484
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for stop package: ${stopAt}`);
|
|
1485
|
+
// Find the package that matches the stopAt directory name
|
|
1486
|
+
const stopIndex = buildOrder.findIndex(packageName => {
|
|
1487
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
1488
|
+
const dirName = path.basename(packageInfo.path);
|
|
1489
|
+
return dirName === stopAt || packageName === stopAt;
|
|
1490
|
+
});
|
|
1491
|
+
if (stopIndex === -1) {
|
|
1492
|
+
// Check if the package exists but was excluded across all directories
|
|
1493
|
+
let allPackageJsonPathsForCheck = [];
|
|
1494
|
+
for (const targetDirectory of directories) {
|
|
1495
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, []); // No exclusions
|
|
1496
|
+
allPackageJsonPathsForCheck = allPackageJsonPathsForCheck.concat(packageJsonPaths);
|
|
1497
|
+
}
|
|
1498
|
+
let wasExcluded = false;
|
|
1499
|
+
for (const packageJsonPath of allPackageJsonPathsForCheck) {
|
|
1500
|
+
try {
|
|
1501
|
+
const packageInfo = await parsePackageJson(packageJsonPath);
|
|
1502
|
+
const dirName = path.basename(packageInfo.path);
|
|
1503
|
+
if (dirName === stopAt || packageInfo.name === stopAt) {
|
|
1504
|
+
// Check if this package was excluded
|
|
1505
|
+
if (shouldExclude(packageJsonPath, excludedPatterns)) {
|
|
1506
|
+
wasExcluded = true;
|
|
1507
|
+
break;
|
|
1508
|
+
}
|
|
1509
|
+
}
|
|
1510
|
+
}
|
|
1511
|
+
catch {
|
|
1512
|
+
// Skip invalid package.json files
|
|
1513
|
+
continue;
|
|
1514
|
+
}
|
|
1515
|
+
}
|
|
1516
|
+
if (wasExcluded) {
|
|
1517
|
+
const excludedPatternsStr = excludedPatterns.join(', ');
|
|
1518
|
+
throw new Error(`Package directory '${stopAt}' was excluded by exclusion patterns: ${excludedPatternsStr}. Remove the exclusion pattern or choose a different stop package.`);
|
|
1519
|
+
}
|
|
1520
|
+
else {
|
|
1521
|
+
const availablePackages = buildOrder.map(name => {
|
|
1522
|
+
const packageInfo = dependencyGraph.packages.get(name);
|
|
1523
|
+
return `${path.basename(packageInfo.path)} (${name})`;
|
|
1524
|
+
}).join(', ');
|
|
1525
|
+
throw new Error(`Package directory '${stopAt}' not found. Available packages: ${availablePackages}`);
|
|
1526
|
+
}
|
|
1527
|
+
}
|
|
1528
|
+
// Truncate the build order before the stop package (the stop package is not executed)
|
|
1529
|
+
const originalLength = buildOrder.length;
|
|
1530
|
+
buildOrder = buildOrder.slice(0, stopIndex);
|
|
1531
|
+
const stoppedCount = originalLength - stopIndex;
|
|
1532
|
+
if (stoppedCount > 0) {
|
|
1533
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Stopping before '${stopAt}' - excluding ${stoppedCount} package${stoppedCount === 1 ? '' : 's'}`);
|
|
1534
|
+
}
|
|
1535
|
+
}
|
|
1536
|
+
// Helper function to determine version scope indicator
|
|
1537
|
+
const getVersionScopeIndicator = (versionRange) => {
|
|
1538
|
+
// Remove whitespace and check the pattern
|
|
1539
|
+
const cleanRange = versionRange.trim();
|
|
1540
|
+
// Preserve the original prefix (^, ~, >=, etc.)
|
|
1541
|
+
const prefixMatch = cleanRange.match(/^([^0-9]*)/);
|
|
1542
|
+
const prefix = prefixMatch ? prefixMatch[1] : '';
|
|
1543
|
+
// Extract the version part after the prefix
|
|
1544
|
+
const versionPart = cleanRange.substring(prefix.length);
|
|
1545
|
+
// Count the number of dots to determine scope
|
|
1546
|
+
const dotCount = (versionPart.match(/\./g) || []).length;
|
|
1547
|
+
if (dotCount >= 2) {
|
|
1548
|
+
// Has patch version (e.g., "^4.4.32" -> "^P")
|
|
1549
|
+
return prefix + 'P';
|
|
1550
|
+
}
|
|
1551
|
+
else if (dotCount === 1) {
|
|
1552
|
+
// Has minor version only (e.g., "^4.4" -> "^m")
|
|
1553
|
+
return prefix + 'm';
|
|
1554
|
+
}
|
|
1555
|
+
else if (dotCount === 0 && versionPart.match(/^\d+$/)) {
|
|
1556
|
+
// Has major version only (e.g., "^4" -> "^M")
|
|
1557
|
+
return prefix + 'M';
|
|
1558
|
+
}
|
|
1559
|
+
// For complex ranges or non-standard formats, return as-is
|
|
1560
|
+
return cleanRange;
|
|
1561
|
+
};
|
|
1562
|
+
// Helper function to find packages that consume a given package
|
|
1563
|
+
const findConsumingPackagesForBranches = async (targetPackageName, allPackages, storage) => {
|
|
1564
|
+
const consumers = [];
|
|
1565
|
+
// Extract scope from target package name (e.g., "@fjell/eslint-config" -> "@fjell/")
|
|
1566
|
+
const targetScope = targetPackageName.includes('/') ? targetPackageName.split('/')[0] + '/' : null;
|
|
1567
|
+
for (const [packageName, packageInfo] of allPackages) {
|
|
1568
|
+
if (packageName === targetPackageName)
|
|
1569
|
+
continue;
|
|
1570
|
+
try {
|
|
1571
|
+
const packageJsonPath = path.join(packageInfo.path, 'package.json');
|
|
1572
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
1573
|
+
const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
|
|
1574
|
+
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
1575
|
+
// Check if this package depends on the target package and get the version range
|
|
1576
|
+
const dependencyTypes = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies'];
|
|
1577
|
+
let versionRange = null;
|
|
1578
|
+
for (const depType of dependencyTypes) {
|
|
1579
|
+
if (packageJson[depType] && packageJson[depType][targetPackageName]) {
|
|
1580
|
+
versionRange = packageJson[depType][targetPackageName];
|
|
1581
|
+
break;
|
|
1582
|
+
}
|
|
1583
|
+
}
|
|
1584
|
+
if (versionRange) {
|
|
1585
|
+
// Apply scope substitution for consumers in the same scope
|
|
1586
|
+
let consumerDisplayName = packageName;
|
|
1587
|
+
if (targetScope && packageName.startsWith(targetScope)) {
|
|
1588
|
+
// Replace scope with "./" (e.g., "@fjell/core" -> "./core")
|
|
1589
|
+
consumerDisplayName = './' + packageName.substring(targetScope.length);
|
|
1590
|
+
}
|
|
1591
|
+
// Add version scope indicator
|
|
1592
|
+
const scopeIndicator = getVersionScopeIndicator(versionRange);
|
|
1593
|
+
consumerDisplayName += ` (${scopeIndicator})`;
|
|
1594
|
+
consumers.push(consumerDisplayName);
|
|
1595
|
+
}
|
|
1596
|
+
}
|
|
1597
|
+
catch {
|
|
1598
|
+
// Skip packages we can't parse
|
|
1599
|
+
continue;
|
|
1600
|
+
}
|
|
1601
|
+
}
|
|
1602
|
+
return consumers.sort();
|
|
1603
|
+
};
|
|
1604
|
+
// Handle special "branches" command that displays table
|
|
1605
|
+
if (builtInCommand === 'branches') {
|
|
1606
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Branch Status Summary:`);
|
|
1607
|
+
logger.info('');
|
|
1608
|
+
// Calculate column widths for nice formatting
|
|
1609
|
+
let maxNameLength = 'Package'.length;
|
|
1610
|
+
let maxBranchLength = 'Branch'.length;
|
|
1611
|
+
let maxVersionLength = 'Version'.length;
|
|
1612
|
+
let maxStatusLength = 'Status'.length;
|
|
1613
|
+
let maxLinkLength = 'Linked'.length;
|
|
1614
|
+
let maxConsumersLength = 'Consumers'.length;
|
|
1615
|
+
const branchInfos = [];
|
|
1616
|
+
// Create storage instance for consumer lookup
|
|
1617
|
+
const storage = createStorage();
|
|
1618
|
+
// Get globally linked packages once at the beginning
|
|
1619
|
+
const globallyLinkedPackages = await getGloballyLinkedPackages();
|
|
1620
|
+
// ANSI escape codes for progress display
|
|
1621
|
+
const ANSI = {
|
|
1622
|
+
CURSOR_UP: '\x1b[1A',
|
|
1623
|
+
CURSOR_TO_START: '\x1b[0G',
|
|
1624
|
+
CLEAR_LINE: '\x1b[2K',
|
|
1625
|
+
GREEN: '\x1b[32m',
|
|
1626
|
+
BLUE: '\x1b[34m',
|
|
1627
|
+
YELLOW: '\x1b[33m',
|
|
1628
|
+
RESET: '\x1b[0m',
|
|
1629
|
+
BOLD: '\x1b[1m'
|
|
1630
|
+
};
|
|
1631
|
+
// Check if terminal supports ANSI
|
|
1632
|
+
const supportsAnsi = process.stdout.isTTY &&
|
|
1633
|
+
process.env.TERM !== 'dumb' &&
|
|
1634
|
+
!process.env.NO_COLOR;
|
|
1635
|
+
const totalPackages = buildOrder.length;
|
|
1636
|
+
const concurrency = 5; // Process up to 5 packages at a time
|
|
1637
|
+
let completedCount = 0;
|
|
1638
|
+
let isFirstProgress = true;
|
|
1639
|
+
// Function to update progress display
|
|
1640
|
+
const updateProgress = (currentPackage, completed, total) => {
|
|
1641
|
+
if (!supportsAnsi)
|
|
1642
|
+
return;
|
|
1643
|
+
if (!isFirstProgress) {
|
|
1644
|
+
// Move cursor up and clear the line
|
|
1645
|
+
process.stdout.write(ANSI.CURSOR_UP + ANSI.CURSOR_TO_START + ANSI.CLEAR_LINE);
|
|
1646
|
+
}
|
|
1647
|
+
const percentage = Math.round((completed / total) * 100);
|
|
1648
|
+
const progressBar = '█'.repeat(Math.floor(percentage / 5)) + '░'.repeat(20 - Math.floor(percentage / 5));
|
|
1649
|
+
const progress = `${ANSI.BLUE}${ANSI.BOLD}Analyzing packages... ${ANSI.GREEN}[${progressBar}] ${percentage}%${ANSI.RESET} ${ANSI.YELLOW}(${completed}/${total})${ANSI.RESET}`;
|
|
1650
|
+
const current = currentPackage ? ` - Currently: ${currentPackage}` : '';
|
|
1651
|
+
process.stdout.write(progress + current + '\n');
|
|
1652
|
+
isFirstProgress = false;
|
|
1653
|
+
};
|
|
1654
|
+
// Function to process a single package
|
|
1655
|
+
const processPackage = async (packageName) => {
|
|
1656
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
1657
|
+
try {
|
|
1658
|
+
// Process git status and consumers in parallel
|
|
1659
|
+
const [gitStatus, consumers] = await Promise.all([
|
|
1660
|
+
getGitStatusSummary(packageInfo.path),
|
|
1661
|
+
findConsumingPackagesForBranches(packageName, dependencyGraph.packages, storage)
|
|
1662
|
+
]);
|
|
1663
|
+
// Check if this package is globally linked (available to be linked to)
|
|
1664
|
+
const isGloballyLinked = globallyLinkedPackages.has(packageName);
|
|
1665
|
+
const linkedText = isGloballyLinked ? '✓' : '';
|
|
1666
|
+
// Add asterisk to consumers that are actively linking to globally linked packages
|
|
1667
|
+
// and check for link problems to highlight in red
|
|
1668
|
+
const consumersWithLinkStatus = await Promise.all(consumers.map(async (consumer) => {
|
|
1669
|
+
// Extract the base consumer name from the format "package-name (^P)" or "./scoped-name (^m)"
|
|
1670
|
+
const baseConsumerName = consumer.replace(/ \([^)]+\)$/, ''); // Remove version scope indicator
|
|
1671
|
+
// Get the original package name from display name (remove scope substitution)
|
|
1672
|
+
const originalConsumerName = baseConsumerName.startsWith('./')
|
|
1673
|
+
? baseConsumerName.replace('./', packageName.split('/')[0] + '/')
|
|
1674
|
+
: baseConsumerName;
|
|
1675
|
+
// Find the consumer package info to get its path
|
|
1676
|
+
const consumerPackageInfo = Array.from(dependencyGraph.packages.values())
|
|
1677
|
+
.find(pkg => pkg.name === originalConsumerName);
|
|
1678
|
+
if (consumerPackageInfo) {
|
|
1679
|
+
const [consumerLinkedDeps, linkProblems] = await Promise.all([
|
|
1680
|
+
getLinkedDependencies(consumerPackageInfo.path),
|
|
1681
|
+
getLinkCompatibilityProblems(consumerPackageInfo.path, dependencyGraph.packages)
|
|
1682
|
+
]);
|
|
1683
|
+
let consumerDisplay = consumer;
|
|
1684
|
+
// Add asterisk if this consumer is actively linking to this package
|
|
1685
|
+
if (consumerLinkedDeps.has(packageName)) {
|
|
1686
|
+
consumerDisplay += '*';
|
|
1687
|
+
}
|
|
1688
|
+
// Check if this consumer has link problems with the current package
|
|
1689
|
+
if (linkProblems.has(packageName)) {
|
|
1690
|
+
// Highlight in red using ANSI escape codes (only if terminal supports it)
|
|
1691
|
+
if (supportsAnsi) {
|
|
1692
|
+
consumerDisplay = `\x1b[31m${consumerDisplay}\x1b[0m`;
|
|
1693
|
+
}
|
|
1694
|
+
else {
|
|
1695
|
+
// Fallback for terminals that don't support ANSI colors
|
|
1696
|
+
consumerDisplay += ' [LINK PROBLEM]';
|
|
1697
|
+
}
|
|
1698
|
+
}
|
|
1699
|
+
return consumerDisplay;
|
|
1700
|
+
}
|
|
1701
|
+
return consumer;
|
|
1702
|
+
}));
|
|
1703
|
+
return {
|
|
1704
|
+
name: packageName,
|
|
1705
|
+
branch: gitStatus.branch,
|
|
1706
|
+
version: packageInfo.version,
|
|
1707
|
+
status: gitStatus.status,
|
|
1708
|
+
linked: linkedText,
|
|
1709
|
+
consumers: consumersWithLinkStatus
|
|
1710
|
+
};
|
|
1711
|
+
}
|
|
1712
|
+
catch (error) {
|
|
1713
|
+
logger.warn(`Failed to get git status for ${packageName}: ${error.message}`);
|
|
1714
|
+
return {
|
|
1715
|
+
name: packageName,
|
|
1716
|
+
branch: 'error',
|
|
1717
|
+
version: packageInfo.version,
|
|
1718
|
+
status: 'error',
|
|
1719
|
+
linked: '✗',
|
|
1720
|
+
consumers: ['error']
|
|
1721
|
+
};
|
|
1722
|
+
}
|
|
1723
|
+
};
|
|
1724
|
+
// Process packages in batches with progress updates
|
|
1725
|
+
updateProgress('Starting...', 0, totalPackages);
|
|
1726
|
+
for (let i = 0; i < buildOrder.length; i += concurrency) {
|
|
1727
|
+
const batch = buildOrder.slice(i, i + concurrency);
|
|
1728
|
+
// Update progress to show current batch
|
|
1729
|
+
const currentBatchStr = batch.length === 1 ? batch[0] : `${batch[0]} + ${batch.length - 1} others`;
|
|
1730
|
+
updateProgress(currentBatchStr, completedCount, totalPackages);
|
|
1731
|
+
// Process batch in parallel
|
|
1732
|
+
const batchResults = await Promise.all(batch.map(packageName => processPackage(packageName)));
|
|
1733
|
+
// Add results and update column widths
|
|
1734
|
+
for (const result of batchResults) {
|
|
1735
|
+
branchInfos.push(result);
|
|
1736
|
+
maxNameLength = Math.max(maxNameLength, result.name.length);
|
|
1737
|
+
maxBranchLength = Math.max(maxBranchLength, result.branch.length);
|
|
1738
|
+
maxVersionLength = Math.max(maxVersionLength, result.version.length);
|
|
1739
|
+
maxStatusLength = Math.max(maxStatusLength, result.status.length);
|
|
1740
|
+
maxLinkLength = Math.max(maxLinkLength, result.linked.length);
|
|
1741
|
+
// For consumers, calculate the width based on the longest consumer name
|
|
1742
|
+
const maxConsumerLength = result.consumers.length > 0
|
|
1743
|
+
? Math.max(...result.consumers.map(c => c.length))
|
|
1744
|
+
: 0;
|
|
1745
|
+
maxConsumersLength = Math.max(maxConsumersLength, maxConsumerLength);
|
|
1746
|
+
}
|
|
1747
|
+
completedCount += batch.length;
|
|
1748
|
+
updateProgress('', completedCount, totalPackages);
|
|
1749
|
+
}
|
|
1750
|
+
// Clear progress line and add spacing
|
|
1751
|
+
if (supportsAnsi && !isFirstProgress) {
|
|
1752
|
+
process.stdout.write(ANSI.CURSOR_UP + ANSI.CURSOR_TO_START + ANSI.CLEAR_LINE);
|
|
1753
|
+
}
|
|
1754
|
+
logger.info(`${ANSI.GREEN}✅ Analysis complete!${ANSI.RESET} Processed ${totalPackages} packages in batches of ${concurrency}.`);
|
|
1755
|
+
logger.info('');
|
|
1756
|
+
// Print header (new order: Package | Branch | Version | Status | Linked | Consumers)
|
|
1757
|
+
const nameHeader = 'Package'.padEnd(maxNameLength);
|
|
1758
|
+
const branchHeader = 'Branch'.padEnd(maxBranchLength);
|
|
1759
|
+
const versionHeader = 'Version'.padEnd(maxVersionLength);
|
|
1760
|
+
const statusHeader = 'Status'.padEnd(maxStatusLength);
|
|
1761
|
+
const linkHeader = 'Linked'.padEnd(maxLinkLength);
|
|
1762
|
+
const consumersHeader = 'Consumers';
|
|
1763
|
+
logger.info(`${nameHeader} | ${branchHeader} | ${versionHeader} | ${statusHeader} | ${linkHeader} | ${consumersHeader}`);
|
|
1764
|
+
logger.info(`${'-'.repeat(maxNameLength)} | ${'-'.repeat(maxBranchLength)} | ${'-'.repeat(maxVersionLength)} | ${'-'.repeat(maxStatusLength)} | ${'-'.repeat(maxLinkLength)} | ${'-'.repeat(9)}`);
|
|
1765
|
+
// Print data rows with multi-line consumers
|
|
1766
|
+
for (const info of branchInfos) {
|
|
1767
|
+
const nameCol = info.name.padEnd(maxNameLength);
|
|
1768
|
+
const branchCol = info.branch.padEnd(maxBranchLength);
|
|
1769
|
+
const versionCol = info.version.padEnd(maxVersionLength);
|
|
1770
|
+
const statusCol = info.status.padEnd(maxStatusLength);
|
|
1771
|
+
const linkCol = info.linked.padEnd(maxLinkLength);
|
|
1772
|
+
if (info.consumers.length === 0) {
|
|
1773
|
+
// No consumers - single line
|
|
1774
|
+
logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | `);
|
|
1775
|
+
}
|
|
1776
|
+
else if (info.consumers.length === 1) {
|
|
1777
|
+
// Single consumer - single line
|
|
1778
|
+
logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | ${info.consumers[0]}`);
|
|
1779
|
+
}
|
|
1780
|
+
else {
|
|
1781
|
+
// Multiple consumers - first consumer on same line, rest on new lines with continuous column separators
|
|
1782
|
+
logger.info(`${nameCol} | ${branchCol} | ${versionCol} | ${statusCol} | ${linkCol} | ${info.consumers[0]}`);
|
|
1783
|
+
// Additional consumers on separate lines with proper column separators
|
|
1784
|
+
const emptyNameCol = ' '.repeat(maxNameLength);
|
|
1785
|
+
const emptyBranchCol = ' '.repeat(maxBranchLength);
|
|
1786
|
+
const emptyVersionCol = ' '.repeat(maxVersionLength);
|
|
1787
|
+
const emptyStatusCol = ' '.repeat(maxStatusLength);
|
|
1788
|
+
const emptyLinkCol = ' '.repeat(maxLinkLength);
|
|
1789
|
+
for (let i = 1; i < info.consumers.length; i++) {
|
|
1790
|
+
logger.info(`${emptyNameCol} | ${emptyBranchCol} | ${emptyVersionCol} | ${emptyStatusCol} | ${emptyLinkCol} | ${info.consumers[i]}`);
|
|
1791
|
+
}
|
|
1792
|
+
}
|
|
1793
|
+
}
|
|
1794
|
+
logger.info('');
|
|
1795
|
+
// Add legend explaining the symbols and colors
|
|
1796
|
+
logger.info('Legend:');
|
|
1797
|
+
logger.info(' * = Consumer is actively linking to this package');
|
|
1798
|
+
logger.info(' (^P) = Patch-level dependency (e.g., "^4.4.32")');
|
|
1799
|
+
logger.info(' (^m) = Minor-level dependency (e.g., "^4.4")');
|
|
1800
|
+
logger.info(' (^M) = Major-level dependency (e.g., "^4")');
|
|
1801
|
+
logger.info(' (~P), (>=M), etc. = Other version prefixes preserved');
|
|
1802
|
+
if (supportsAnsi) {
|
|
1803
|
+
logger.info(' \x1b[31mRed text\x1b[0m = Consumer has link problems (version mismatches) with this package');
|
|
1804
|
+
}
|
|
1805
|
+
else {
|
|
1806
|
+
logger.info(' [LINK PROBLEM] = Consumer has link problems (version mismatches) with this package');
|
|
1807
|
+
}
|
|
1808
|
+
logger.info('');
|
|
1809
|
+
return `Branch status summary for ${branchInfos.length} packages completed.`;
|
|
1810
|
+
}
|
|
1811
|
+
// Handle special "checkout" command that switches all packages to specified branch
|
|
1812
|
+
if (builtInCommand === 'checkout') {
|
|
1813
|
+
const targetBranch = runConfig.tree?.packageArgument;
|
|
1814
|
+
if (!targetBranch) {
|
|
1815
|
+
throw new Error('checkout subcommand requires a branch name. Usage: kodrdriv tree checkout <branch-name>');
|
|
1816
|
+
}
|
|
1817
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Workspace Checkout to Branch: ${targetBranch}`);
|
|
1818
|
+
logger.info('');
|
|
1819
|
+
// Phase 1: Safety check - scan all packages for uncommitted changes
|
|
1820
|
+
logger.info('🔍 Phase 1: Checking for uncommitted changes across workspace...');
|
|
1821
|
+
const packagesWithChanges = [];
|
|
1822
|
+
for (const packageName of buildOrder) {
|
|
1823
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
1824
|
+
try {
|
|
1825
|
+
const gitStatus = await getGitStatusSummary(packageInfo.path);
|
|
1826
|
+
const hasProblems = gitStatus.hasUncommittedChanges || gitStatus.hasUnstagedFiles;
|
|
1827
|
+
packagesWithChanges.push({
|
|
1828
|
+
name: packageName,
|
|
1829
|
+
path: packageInfo.path,
|
|
1830
|
+
status: gitStatus.status,
|
|
1831
|
+
hasUncommittedChanges: gitStatus.hasUncommittedChanges,
|
|
1832
|
+
hasUnstagedFiles: gitStatus.hasUnstagedFiles
|
|
1833
|
+
});
|
|
1834
|
+
if (hasProblems) {
|
|
1835
|
+
logger.warn(`⚠️ ${packageName}: ${gitStatus.status}`);
|
|
1836
|
+
}
|
|
1837
|
+
else {
|
|
1838
|
+
logger.verbose(`✅ ${packageName}: clean`);
|
|
1839
|
+
}
|
|
1840
|
+
}
|
|
1841
|
+
catch (error) {
|
|
1842
|
+
logger.warn(`❌ ${packageName}: error checking status - ${error.message}`);
|
|
1843
|
+
packagesWithChanges.push({
|
|
1844
|
+
name: packageName,
|
|
1845
|
+
path: packageInfo.path,
|
|
1846
|
+
status: 'error',
|
|
1847
|
+
hasUncommittedChanges: false,
|
|
1848
|
+
hasUnstagedFiles: false
|
|
1849
|
+
});
|
|
1850
|
+
}
|
|
1851
|
+
}
|
|
1852
|
+
// Check if any packages have uncommitted changes
|
|
1853
|
+
const problemPackages = packagesWithChanges.filter(pkg => pkg.hasUncommittedChanges || pkg.hasUnstagedFiles || pkg.status === 'error');
|
|
1854
|
+
if (problemPackages.length > 0) {
|
|
1855
|
+
logger.error(`❌ Cannot proceed with checkout: ${problemPackages.length} packages have uncommitted changes or errors:`);
|
|
1856
|
+
logger.error('');
|
|
1857
|
+
for (const pkg of problemPackages) {
|
|
1858
|
+
logger.error(` 📦 ${pkg.name} (${pkg.path}):`);
|
|
1859
|
+
logger.error(` Status: ${pkg.status}`);
|
|
1860
|
+
}
|
|
1861
|
+
logger.error('');
|
|
1862
|
+
logger.error('🔧 To resolve this issue:');
|
|
1863
|
+
logger.error(' 1. Commit or stash changes in the packages listed above');
|
|
1864
|
+
logger.error(' 2. Or use "kodrdriv tree commit" to commit changes across all packages');
|
|
1865
|
+
logger.error(' 3. Then re-run the checkout command');
|
|
1866
|
+
logger.error('');
|
|
1867
|
+
throw new Error(`Workspace checkout blocked: ${problemPackages.length} packages have uncommitted changes`);
|
|
1868
|
+
}
|
|
1869
|
+
logger.info(`✅ Phase 1 complete: All ${packagesWithChanges.length} packages are clean`);
|
|
1870
|
+
logger.info('');
|
|
1871
|
+
// Phase 2: Perform the checkout
|
|
1872
|
+
logger.info(`🔄 Phase 2: Checking out all packages to branch '${targetBranch}'...`);
|
|
1873
|
+
let successCount = 0;
|
|
1874
|
+
const failedPackages = [];
|
|
1875
|
+
for (let i = 0; i < buildOrder.length; i++) {
|
|
1876
|
+
const packageName = buildOrder[i];
|
|
1877
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
1878
|
+
if (isDryRun) {
|
|
1879
|
+
logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: Would checkout ${targetBranch}`);
|
|
1880
|
+
successCount++;
|
|
1881
|
+
}
|
|
1882
|
+
else {
|
|
1883
|
+
try {
|
|
1884
|
+
const originalCwd = process.cwd();
|
|
1885
|
+
process.chdir(packageInfo.path);
|
|
1886
|
+
try {
|
|
1887
|
+
// Check if target branch exists locally
|
|
1888
|
+
let branchExists = false;
|
|
1889
|
+
try {
|
|
1890
|
+
await runSecure('git', ['rev-parse', '--verify', targetBranch]);
|
|
1891
|
+
branchExists = true;
|
|
1892
|
+
}
|
|
1893
|
+
catch {
|
|
1894
|
+
// Branch doesn't exist locally
|
|
1895
|
+
branchExists = false;
|
|
1896
|
+
}
|
|
1897
|
+
if (branchExists) {
|
|
1898
|
+
await runSecure('git', ['checkout', targetBranch]);
|
|
1899
|
+
logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Checked out ${targetBranch}`);
|
|
1900
|
+
}
|
|
1901
|
+
else {
|
|
1902
|
+
// Try to check out branch from remote
|
|
1903
|
+
try {
|
|
1904
|
+
await runSecure('git', ['checkout', '-b', targetBranch, `origin/${targetBranch}`]);
|
|
1905
|
+
logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Checked out ${targetBranch} from origin`);
|
|
1906
|
+
}
|
|
1907
|
+
catch {
|
|
1908
|
+
// If that fails, create a new branch
|
|
1909
|
+
await runSecure('git', ['checkout', '-b', targetBranch]);
|
|
1910
|
+
logger.info(`[${i + 1}/${buildOrder.length}] ${packageName}: ✅ Created new branch ${targetBranch}`);
|
|
1911
|
+
}
|
|
1912
|
+
}
|
|
1913
|
+
successCount++;
|
|
1914
|
+
}
|
|
1915
|
+
finally {
|
|
1916
|
+
process.chdir(originalCwd);
|
|
1917
|
+
}
|
|
1918
|
+
}
|
|
1919
|
+
catch (error) {
|
|
1920
|
+
logger.error(`[${i + 1}/${buildOrder.length}] ${packageName}: ❌ Failed - ${error.message}`);
|
|
1921
|
+
failedPackages.push({ name: packageName, error: error.message });
|
|
1922
|
+
}
|
|
1923
|
+
}
|
|
1924
|
+
}
|
|
1925
|
+
// Report results
|
|
1926
|
+
if (failedPackages.length > 0) {
|
|
1927
|
+
logger.error(`❌ Checkout completed with errors: ${successCount}/${buildOrder.length} packages successful`);
|
|
1928
|
+
logger.error('');
|
|
1929
|
+
logger.error('Failed packages:');
|
|
1930
|
+
for (const failed of failedPackages) {
|
|
1931
|
+
logger.error(` - ${failed.name}: ${failed.error}`);
|
|
1932
|
+
}
|
|
1933
|
+
throw new Error(`Checkout failed for ${failedPackages.length} packages`);
|
|
1934
|
+
}
|
|
1935
|
+
else {
|
|
1936
|
+
logger.info(`✅ Checkout complete: All ${buildOrder.length} packages successfully checked out to '${targetBranch}'`);
|
|
1937
|
+
return `Workspace checkout complete: ${successCount} packages checked out to '${targetBranch}'`;
|
|
1938
|
+
}
|
|
1939
|
+
}
|
|
1940
|
+
// Display results
|
|
1941
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Build order determined:`);
|
|
1942
|
+
let returnOutput = '';
|
|
1943
|
+
if (runConfig.verbose || runConfig.debug) {
|
|
1944
|
+
// Verbose mode: Skip simple format, show detailed format before command execution
|
|
1945
|
+
logger.info(''); // Add spacing
|
|
1946
|
+
const rangeInfo = [];
|
|
1947
|
+
if (startFrom)
|
|
1948
|
+
rangeInfo.push(`starting from ${startFrom}`);
|
|
1949
|
+
if (stopAt)
|
|
1950
|
+
rangeInfo.push(`stopping before ${stopAt}`);
|
|
1951
|
+
const rangeStr = rangeInfo.length > 0 ? ` (${rangeInfo.join(', ')})` : '';
|
|
1952
|
+
logger.info(`Detailed Build Order for ${buildOrder.length} packages${rangeStr}:`);
|
|
1953
|
+
logger.info('==========================================');
|
|
1954
|
+
buildOrder.forEach((packageName, index) => {
|
|
1955
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
1956
|
+
const localDeps = Array.from(packageInfo.localDependencies);
|
|
1957
|
+
logger.info(`${index + 1}. ${packageName} (${packageInfo.version})`);
|
|
1958
|
+
logger.info(` Path: ${packageInfo.path}`);
|
|
1959
|
+
if (localDeps.length > 0) {
|
|
1960
|
+
logger.info(` Local Dependencies: ${localDeps.join(', ')}`);
|
|
1961
|
+
}
|
|
1962
|
+
else {
|
|
1963
|
+
logger.info(` Local Dependencies: none`);
|
|
1964
|
+
}
|
|
1965
|
+
logger.info(''); // Add spacing between packages
|
|
1966
|
+
});
|
|
1967
|
+
// Simple return output for verbose mode (no need to repeat detailed info)
|
|
1968
|
+
returnOutput = `\nBuild order: ${buildOrder.join(' → ')}\n`;
|
|
1969
|
+
}
|
|
1970
|
+
else {
|
|
1971
|
+
// Non-verbose mode: Show simple build order
|
|
1972
|
+
buildOrder.forEach((packageName, index) => {
|
|
1973
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
1974
|
+
const localDeps = Array.from(packageInfo.localDependencies);
|
|
1975
|
+
// Log each step
|
|
1976
|
+
if (localDeps.length > 0) {
|
|
1977
|
+
logger.info(`${index + 1}. ${packageName} (depends on: ${localDeps.join(', ')})`);
|
|
1978
|
+
}
|
|
1979
|
+
else {
|
|
1980
|
+
logger.info(`${index + 1}. ${packageName} (no local dependencies)`);
|
|
1981
|
+
}
|
|
1982
|
+
});
|
|
1983
|
+
// Simple return output for non-verbose mode
|
|
1984
|
+
returnOutput = `\nBuild order: ${buildOrder.join(' → ')}\n`;
|
|
1985
|
+
}
|
|
1986
|
+
// Execute command if provided (custom command or built-in command)
|
|
1987
|
+
const cmd = runConfig.tree?.cmd;
|
|
1988
|
+
// Determine command to execute
|
|
1989
|
+
let commandToRun;
|
|
1990
|
+
let isBuiltInCommand = false;
|
|
1991
|
+
if (builtInCommand) {
|
|
1992
|
+
// Built-in command mode: shell out to kodrdriv subprocess
|
|
1993
|
+
// Build command with propagated global options
|
|
1994
|
+
const globalOptions = [];
|
|
1995
|
+
// Propagate global flags that should be inherited by subprocesses
|
|
1996
|
+
if (runConfig.debug)
|
|
1997
|
+
globalOptions.push('--debug');
|
|
1998
|
+
if (runConfig.verbose)
|
|
1999
|
+
globalOptions.push('--verbose');
|
|
2000
|
+
if (runConfig.dryRun)
|
|
2001
|
+
globalOptions.push('--dry-run');
|
|
2002
|
+
if (runConfig.overrides)
|
|
2003
|
+
globalOptions.push('--overrides');
|
|
2004
|
+
// Propagate global options with values
|
|
2005
|
+
if (runConfig.model)
|
|
2006
|
+
globalOptions.push(`--model "${runConfig.model}"`);
|
|
2007
|
+
if (runConfig.configDirectory)
|
|
2008
|
+
globalOptions.push(`--config-dir "${runConfig.configDirectory}"`);
|
|
2009
|
+
if (runConfig.outputDirectory)
|
|
2010
|
+
globalOptions.push(`--output-dir "${runConfig.outputDirectory}"`);
|
|
2011
|
+
if (runConfig.preferencesDirectory)
|
|
2012
|
+
globalOptions.push(`--preferences-dir "${runConfig.preferencesDirectory}"`);
|
|
2013
|
+
// Build the command with global options
|
|
2014
|
+
const optionsString = globalOptions.length > 0 ? ` ${globalOptions.join(' ')}` : '';
|
|
2015
|
+
// Add package argument for link/unlink/updates commands
|
|
2016
|
+
const packageArg = runConfig.tree?.packageArgument;
|
|
2017
|
+
const packageArgString = (packageArg && (builtInCommand === 'link' || builtInCommand === 'unlink' || builtInCommand === 'updates'))
|
|
2018
|
+
? ` "${packageArg}"`
|
|
2019
|
+
: '';
|
|
2020
|
+
// Add command-specific options
|
|
2021
|
+
let commandSpecificOptions = '';
|
|
2022
|
+
// Commit command options
|
|
2023
|
+
if (builtInCommand === 'commit') {
|
|
2024
|
+
if (runConfig.commit?.agentic) {
|
|
2025
|
+
commandSpecificOptions += ' --agentic';
|
|
2026
|
+
}
|
|
2027
|
+
if (runConfig.commit?.selfReflection) {
|
|
2028
|
+
commandSpecificOptions += ' --self-reflection';
|
|
2029
|
+
}
|
|
2030
|
+
if (runConfig.commit?.add) {
|
|
2031
|
+
commandSpecificOptions += ' --add';
|
|
2032
|
+
}
|
|
2033
|
+
if (runConfig.commit?.cached) {
|
|
2034
|
+
commandSpecificOptions += ' --cached';
|
|
2035
|
+
}
|
|
2036
|
+
if (runConfig.commit?.interactive) {
|
|
2037
|
+
commandSpecificOptions += ' --interactive';
|
|
2038
|
+
}
|
|
2039
|
+
if (runConfig.commit?.amend) {
|
|
2040
|
+
commandSpecificOptions += ' --amend';
|
|
2041
|
+
}
|
|
2042
|
+
if (runConfig.commit?.skipFileCheck) {
|
|
2043
|
+
commandSpecificOptions += ' --skip-file-check';
|
|
2044
|
+
}
|
|
2045
|
+
if (runConfig.commit?.maxAgenticIterations) {
|
|
2046
|
+
commandSpecificOptions += ` --max-agentic-iterations ${runConfig.commit.maxAgenticIterations}`;
|
|
2047
|
+
}
|
|
2048
|
+
if (runConfig.commit?.allowCommitSplitting) {
|
|
2049
|
+
commandSpecificOptions += ' --allow-commit-splitting';
|
|
2050
|
+
}
|
|
2051
|
+
if (runConfig.commit?.messageLimit) {
|
|
2052
|
+
commandSpecificOptions += ` --message-limit ${runConfig.commit.messageLimit}`;
|
|
2053
|
+
}
|
|
2054
|
+
if (runConfig.commit?.maxDiffBytes) {
|
|
2055
|
+
commandSpecificOptions += ` --max-diff-bytes ${runConfig.commit.maxDiffBytes}`;
|
|
2056
|
+
}
|
|
2057
|
+
if (runConfig.commit?.direction) {
|
|
2058
|
+
commandSpecificOptions += ` --direction "${runConfig.commit.direction}"`;
|
|
2059
|
+
}
|
|
2060
|
+
if (runConfig.commit?.context) {
|
|
2061
|
+
commandSpecificOptions += ` --context "${runConfig.commit.context}"`;
|
|
2062
|
+
}
|
|
2063
|
+
// Push option can be boolean or string (remote name)
|
|
2064
|
+
if (runConfig.commit?.push) {
|
|
2065
|
+
if (typeof runConfig.commit.push === 'string') {
|
|
2066
|
+
commandSpecificOptions += ` --push "${runConfig.commit.push}"`;
|
|
2067
|
+
}
|
|
2068
|
+
else {
|
|
2069
|
+
commandSpecificOptions += ' --push';
|
|
2070
|
+
}
|
|
2071
|
+
}
|
|
2072
|
+
// Model-specific options for commit
|
|
2073
|
+
if (runConfig.commit?.model) {
|
|
2074
|
+
commandSpecificOptions += ` --model "${runConfig.commit.model}"`;
|
|
2075
|
+
}
|
|
2076
|
+
if (runConfig.commit?.openaiReasoning) {
|
|
2077
|
+
commandSpecificOptions += ` --openai-reasoning ${runConfig.commit.openaiReasoning}`;
|
|
2078
|
+
}
|
|
2079
|
+
if (runConfig.commit?.openaiMaxOutputTokens) {
|
|
2080
|
+
commandSpecificOptions += ` --openai-max-output-tokens ${runConfig.commit.openaiMaxOutputTokens}`;
|
|
2081
|
+
}
|
|
2082
|
+
}
|
|
2083
|
+
// Release command options (only for direct 'release' command)
|
|
2084
|
+
if (builtInCommand === 'release') {
|
|
2085
|
+
if (runConfig.release?.agentic) {
|
|
2086
|
+
commandSpecificOptions += ' --agentic';
|
|
2087
|
+
}
|
|
2088
|
+
if (runConfig.release?.selfReflection) {
|
|
2089
|
+
commandSpecificOptions += ' --self-reflection';
|
|
2090
|
+
}
|
|
2091
|
+
if (runConfig.release?.maxAgenticIterations) {
|
|
2092
|
+
commandSpecificOptions += ` --max-agentic-iterations ${runConfig.release.maxAgenticIterations}`;
|
|
2093
|
+
}
|
|
2094
|
+
if (runConfig.release?.interactive) {
|
|
2095
|
+
commandSpecificOptions += ' --interactive';
|
|
2096
|
+
}
|
|
2097
|
+
if (runConfig.release?.from) {
|
|
2098
|
+
commandSpecificOptions += ` --from "${runConfig.release.from}"`;
|
|
2099
|
+
}
|
|
2100
|
+
if (runConfig.release?.to) {
|
|
2101
|
+
commandSpecificOptions += ` --to "${runConfig.release.to}"`;
|
|
2102
|
+
}
|
|
2103
|
+
if (runConfig.release?.focus) {
|
|
2104
|
+
commandSpecificOptions += ` --focus "${runConfig.release.focus}"`;
|
|
2105
|
+
}
|
|
2106
|
+
if (runConfig.release?.context) {
|
|
2107
|
+
commandSpecificOptions += ` --context "${runConfig.release.context}"`;
|
|
2108
|
+
}
|
|
2109
|
+
if (runConfig.release?.messageLimit) {
|
|
2110
|
+
commandSpecificOptions += ` --message-limit ${runConfig.release.messageLimit}`;
|
|
2111
|
+
}
|
|
2112
|
+
if (runConfig.release?.maxDiffBytes) {
|
|
2113
|
+
commandSpecificOptions += ` --max-diff-bytes ${runConfig.release.maxDiffBytes}`;
|
|
2114
|
+
}
|
|
2115
|
+
if (runConfig.release?.noMilestones) {
|
|
2116
|
+
commandSpecificOptions += ' --no-milestones';
|
|
2117
|
+
}
|
|
2118
|
+
if (runConfig.release?.fromMain) {
|
|
2119
|
+
commandSpecificOptions += ' --from-main';
|
|
2120
|
+
}
|
|
2121
|
+
// Model-specific options for release
|
|
2122
|
+
if (runConfig.release?.model) {
|
|
2123
|
+
commandSpecificOptions += ` --model "${runConfig.release.model}"`;
|
|
2124
|
+
}
|
|
2125
|
+
if (runConfig.release?.openaiReasoning) {
|
|
2126
|
+
commandSpecificOptions += ` --openai-reasoning ${runConfig.release.openaiReasoning}`;
|
|
2127
|
+
}
|
|
2128
|
+
if (runConfig.release?.openaiMaxOutputTokens) {
|
|
2129
|
+
commandSpecificOptions += ` --openai-max-output-tokens ${runConfig.release.openaiMaxOutputTokens}`;
|
|
2130
|
+
}
|
|
2131
|
+
}
|
|
2132
|
+
// Publish command options (only agentic flags - publish reads other release config from config file)
|
|
2133
|
+
if (builtInCommand === 'publish') {
|
|
2134
|
+
// Only pass the agentic-related flags that publish command accepts
|
|
2135
|
+
if (runConfig.release?.agentic) {
|
|
2136
|
+
commandSpecificOptions += ' --agentic';
|
|
2137
|
+
}
|
|
2138
|
+
if (runConfig.release?.selfReflection) {
|
|
2139
|
+
commandSpecificOptions += ' --self-reflection';
|
|
2140
|
+
}
|
|
2141
|
+
if (runConfig.release?.maxAgenticIterations) {
|
|
2142
|
+
commandSpecificOptions += ` --max-agentic-iterations ${runConfig.release.maxAgenticIterations}`;
|
|
2143
|
+
}
|
|
2144
|
+
// Publish has its own --from, --interactive, --from-main flags (not from release config)
|
|
2145
|
+
}
|
|
2146
|
+
// Unlink command options
|
|
2147
|
+
if (builtInCommand === 'unlink' && runConfig.tree?.cleanNodeModules) {
|
|
2148
|
+
commandSpecificOptions += ' --clean-node-modules';
|
|
2149
|
+
}
|
|
2150
|
+
// Link/Unlink externals
|
|
2151
|
+
if ((builtInCommand === 'link' || builtInCommand === 'unlink') && runConfig.tree?.externals && runConfig.tree.externals.length > 0) {
|
|
2152
|
+
commandSpecificOptions += ` --externals ${runConfig.tree.externals.join(' ')}`;
|
|
2153
|
+
}
|
|
2154
|
+
commandToRun = `kodrdriv ${builtInCommand}${optionsString}${packageArgString}${commandSpecificOptions}`;
|
|
2155
|
+
isBuiltInCommand = true;
|
|
2156
|
+
}
|
|
2157
|
+
else if (cmd) {
|
|
2158
|
+
// Custom command mode
|
|
2159
|
+
commandToRun = cmd;
|
|
2160
|
+
}
|
|
2161
|
+
if (commandToRun) {
|
|
2162
|
+
// Validate scripts for run command before execution
|
|
2163
|
+
const scriptsToValidate = runConfig.__scriptsToValidate;
|
|
2164
|
+
if (scriptsToValidate && scriptsToValidate.length > 0) {
|
|
2165
|
+
logger.info(`🔍 Validating scripts before execution: ${scriptsToValidate.join(', ')}`);
|
|
2166
|
+
const validation = await validateScripts(dependencyGraph.packages, scriptsToValidate);
|
|
2167
|
+
if (!validation.valid) {
|
|
2168
|
+
logger.error('');
|
|
2169
|
+
logger.error('❌ Script validation failed. Cannot proceed with execution.');
|
|
2170
|
+
logger.error('');
|
|
2171
|
+
logger.error('💡 To fix this:');
|
|
2172
|
+
logger.error(' 1. Add the missing scripts to the package.json files');
|
|
2173
|
+
logger.error(' 2. Or exclude packages that don\'t need these scripts using --exclude');
|
|
2174
|
+
logger.error(' 3. Or run individual packages that have the required scripts');
|
|
2175
|
+
logger.error('');
|
|
2176
|
+
throw new Error('Script validation failed. See details above.');
|
|
2177
|
+
}
|
|
2178
|
+
}
|
|
2179
|
+
// Validate command for parallel execution if parallel mode is enabled
|
|
2180
|
+
if (runConfig.tree?.parallel) {
|
|
2181
|
+
const { CommandValidator } = await import('./execution/CommandValidator.js');
|
|
2182
|
+
const validation = CommandValidator.validateForParallel(commandToRun, builtInCommand);
|
|
2183
|
+
CommandValidator.logValidation(validation);
|
|
2184
|
+
if (!validation.valid) {
|
|
2185
|
+
logger.error('');
|
|
2186
|
+
logger.error('Cannot proceed with parallel execution due to validation errors.');
|
|
2187
|
+
logger.error('Run without --parallel flag to execute sequentially.');
|
|
2188
|
+
throw new Error('Command validation failed for parallel execution');
|
|
2189
|
+
}
|
|
2190
|
+
// Apply recommended concurrency if not explicitly set
|
|
2191
|
+
if (!runConfig.tree.maxConcurrency) {
|
|
2192
|
+
const os = await import('os');
|
|
2193
|
+
const recommended = CommandValidator.getRecommendedConcurrency(builtInCommand, os.cpus().length, commandToRun);
|
|
2194
|
+
if (recommended !== os.cpus().length) {
|
|
2195
|
+
const reason = builtInCommand ? builtInCommand : `custom command "${commandToRun}"`;
|
|
2196
|
+
logger.info(`💡 Using recommended concurrency for ${reason}: ${recommended}`);
|
|
2197
|
+
runConfig.tree.maxConcurrency = recommended;
|
|
2198
|
+
}
|
|
2199
|
+
}
|
|
2200
|
+
}
|
|
2201
|
+
// Create set of all package names for inter-project dependency detection
|
|
2202
|
+
const allPackageNames = new Set(Array.from(dependencyGraph.packages.keys()));
|
|
2203
|
+
// Initialize execution context if not continuing
|
|
2204
|
+
if (!executionContext) {
|
|
2205
|
+
executionContext = {
|
|
2206
|
+
command: commandToRun,
|
|
2207
|
+
originalConfig: runConfig,
|
|
2208
|
+
publishedVersions: [],
|
|
2209
|
+
completedPackages: [],
|
|
2210
|
+
buildOrder: buildOrder,
|
|
2211
|
+
startTime: new Date(),
|
|
2212
|
+
lastUpdateTime: new Date()
|
|
2213
|
+
};
|
|
2214
|
+
// Save initial context for commands that support continuation
|
|
2215
|
+
if (isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
|
|
2216
|
+
await saveExecutionContext(executionContext, runConfig.outputDirectory);
|
|
2217
|
+
}
|
|
2218
|
+
}
|
|
2219
|
+
// Add spacing before command execution
|
|
2220
|
+
logger.info('');
|
|
2221
|
+
const executionDescription = isBuiltInCommand ? `built-in command "${builtInCommand}"` : `"${commandToRun}"`;
|
|
2222
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Executing ${executionDescription} in ${buildOrder.length} packages...`);
|
|
2223
|
+
// Add detailed multi-project execution context for debug mode
|
|
2224
|
+
if (runConfig.debug) {
|
|
2225
|
+
logger.debug('MULTI_PROJECT_PLAN: Execution plan initialized | Total Packages: %d | Command: %s | Built-in: %s | Dry Run: %s | Parallel: %s', buildOrder.length, commandToRun, isBuiltInCommand, isDryRun, runConfig.tree?.parallel || false);
|
|
2226
|
+
// Log package execution order with dependencies
|
|
2227
|
+
logger.debug('MULTI_PROJECT_ORDER: Package execution sequence:');
|
|
2228
|
+
buildOrder.forEach((pkgName, idx) => {
|
|
2229
|
+
const pkgInfo = dependencyGraph.packages.get(pkgName);
|
|
2230
|
+
if (pkgInfo) {
|
|
2231
|
+
const deps = Array.isArray(pkgInfo.dependencies) ? pkgInfo.dependencies : [];
|
|
2232
|
+
const depStr = deps.length > 0
|
|
2233
|
+
? ` | Dependencies: [${deps.join(', ')}]`
|
|
2234
|
+
: ' | Dependencies: none';
|
|
2235
|
+
logger.debug(' %d. %s%s', idx + 1, pkgName, depStr);
|
|
2236
|
+
}
|
|
2237
|
+
});
|
|
2238
|
+
// Log dependency levels for parallel execution understanding
|
|
2239
|
+
const levels = new Map();
|
|
2240
|
+
const calculateLevels = (pkg, visited = new Set()) => {
|
|
2241
|
+
if (levels.has(pkg))
|
|
2242
|
+
return levels.get(pkg);
|
|
2243
|
+
if (visited.has(pkg))
|
|
2244
|
+
return 0; // Circular dependency
|
|
2245
|
+
visited.add(pkg);
|
|
2246
|
+
const pkgInfo = dependencyGraph.packages.get(pkg);
|
|
2247
|
+
const deps = Array.isArray(pkgInfo?.dependencies) ? pkgInfo.dependencies : [];
|
|
2248
|
+
if (!pkgInfo || deps.length === 0) {
|
|
2249
|
+
levels.set(pkg, 0);
|
|
2250
|
+
return 0;
|
|
2251
|
+
}
|
|
2252
|
+
const maxDepLevel = Math.max(...deps.map((dep) => calculateLevels(dep, new Set(visited))));
|
|
2253
|
+
const level = maxDepLevel + 1;
|
|
2254
|
+
levels.set(pkg, level);
|
|
2255
|
+
return level;
|
|
2256
|
+
};
|
|
2257
|
+
buildOrder.forEach(pkg => calculateLevels(pkg));
|
|
2258
|
+
const maxLevel = Math.max(...Array.from(levels.values()));
|
|
2259
|
+
logger.debug('MULTI_PROJECT_LEVELS: Dependency depth analysis | Max Depth: %d levels', maxLevel + 1);
|
|
2260
|
+
for (let level = 0; level <= maxLevel; level++) {
|
|
2261
|
+
const packagesAtLevel = buildOrder.filter(pkg => levels.get(pkg) === level);
|
|
2262
|
+
logger.debug(' Level %d (%d packages): %s', level, packagesAtLevel.length, packagesAtLevel.join(', '));
|
|
2263
|
+
}
|
|
2264
|
+
if (runConfig.tree?.parallel) {
|
|
2265
|
+
const os = await import('os');
|
|
2266
|
+
const concurrency = runConfig.tree.maxConcurrency || os.cpus().length;
|
|
2267
|
+
logger.debug('MULTI_PROJECT_PARALLEL: Parallel execution configuration | Max Concurrency: %d | Retry Attempts: %d', concurrency, runConfig.tree.retry?.maxAttempts || 3);
|
|
2268
|
+
}
|
|
2269
|
+
if (isContinue) {
|
|
2270
|
+
const completed = executionContext?.completedPackages.length || 0;
|
|
2271
|
+
logger.debug('MULTI_PROJECT_RESUME: Continuing previous execution | Completed: %d | Remaining: %d', completed, buildOrder.length - completed);
|
|
2272
|
+
}
|
|
2273
|
+
}
|
|
2274
|
+
// Show info for publish commands
|
|
2275
|
+
if (isBuiltInCommand && builtInCommand === 'publish') {
|
|
2276
|
+
logger.info('Inter-project dependencies will be automatically updated before each publish.');
|
|
2277
|
+
}
|
|
2278
|
+
let successCount = 0;
|
|
2279
|
+
let failedPackage = null;
|
|
2280
|
+
// If continuing, start from where we left off
|
|
2281
|
+
const startIndex = isContinue && executionContext ? executionContext.completedPackages.length : 0;
|
|
2282
|
+
// Check if parallel execution is enabled
|
|
2283
|
+
if (runConfig.tree?.parallel) {
|
|
2284
|
+
logger.info('🚀 Using parallel execution mode');
|
|
2285
|
+
// If dry run, show preview instead of executing
|
|
2286
|
+
if (isDryRun) {
|
|
2287
|
+
const preview = await generateDryRunPreview(dependencyGraph, buildOrder, commandToRun, runConfig);
|
|
2288
|
+
return preview;
|
|
2289
|
+
}
|
|
2290
|
+
// Import parallel execution components
|
|
2291
|
+
const { TreeExecutionAdapter, createParallelProgressLogger, formatParallelResult } = await import('./execution/TreeExecutionAdapter.js');
|
|
2292
|
+
const os = await import('os');
|
|
2293
|
+
// Create task pool
|
|
2294
|
+
const adapter = new TreeExecutionAdapter({
|
|
2295
|
+
graph: dependencyGraph,
|
|
2296
|
+
maxConcurrency: runConfig.tree.maxConcurrency || os.cpus().length,
|
|
2297
|
+
command: commandToRun,
|
|
2298
|
+
config: runConfig,
|
|
2299
|
+
checkpointPath: runConfig.outputDirectory,
|
|
2300
|
+
continue: isContinue,
|
|
2301
|
+
maxRetries: runConfig.tree.retry?.maxAttempts || 3,
|
|
2302
|
+
initialRetryDelay: runConfig.tree.retry?.initialDelayMs || 5000,
|
|
2303
|
+
maxRetryDelay: runConfig.tree.retry?.maxDelayMs || 60000,
|
|
2304
|
+
backoffMultiplier: runConfig.tree.retry?.backoffMultiplier || 2
|
|
2305
|
+
}, executePackage);
|
|
2306
|
+
// Set up progress logging
|
|
2307
|
+
createParallelProgressLogger(adapter.getPool(), runConfig);
|
|
2308
|
+
// Execute
|
|
2309
|
+
const result = await adapter.execute();
|
|
2310
|
+
// Format and return result
|
|
2311
|
+
const formattedResult = formatParallelResult(result);
|
|
2312
|
+
return formattedResult;
|
|
2313
|
+
}
|
|
2314
|
+
// Sequential execution
|
|
2315
|
+
const executionStartTime = Date.now();
|
|
2316
|
+
for (let i = startIndex; i < buildOrder.length; i++) {
|
|
2317
|
+
const packageName = buildOrder[i];
|
|
2318
|
+
// Skip if already completed (in continue mode)
|
|
2319
|
+
if (executionContext && executionContext.completedPackages.includes(packageName)) {
|
|
2320
|
+
successCount++;
|
|
2321
|
+
continue;
|
|
2322
|
+
}
|
|
2323
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
2324
|
+
const packageLogger = createPackageLogger(packageName, i + 1, buildOrder.length, isDryRun);
|
|
2325
|
+
const result = await executePackage(packageName, packageInfo, commandToRun, runConfig, isDryRun, i, buildOrder.length, allPackageNames, isBuiltInCommand);
|
|
2326
|
+
if (result.success) {
|
|
2327
|
+
successCount++;
|
|
2328
|
+
// Update context
|
|
2329
|
+
if (executionContext && isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
|
|
2330
|
+
executionContext.completedPackages.push(packageName);
|
|
2331
|
+
executionContext.publishedVersions = publishedVersions;
|
|
2332
|
+
executionContext.lastUpdateTime = new Date();
|
|
2333
|
+
await saveExecutionContext(executionContext, runConfig.outputDirectory);
|
|
2334
|
+
}
|
|
2335
|
+
// Add spacing between packages (except after the last one)
|
|
2336
|
+
if (i < buildOrder.length - 1) {
|
|
2337
|
+
logger.info('');
|
|
2338
|
+
logger.info('');
|
|
2339
|
+
}
|
|
2340
|
+
}
|
|
2341
|
+
else {
|
|
2342
|
+
failedPackage = packageName;
|
|
2343
|
+
const formattedError = formatSubprojectError(packageName, result.error, packageInfo, i + 1, buildOrder.length);
|
|
2344
|
+
if (!isDryRun) {
|
|
2345
|
+
packageLogger.error(`Execution failed`);
|
|
2346
|
+
logger.error(formattedError);
|
|
2347
|
+
logger.error(`Failed after ${successCount} successful packages.`);
|
|
2348
|
+
// Special handling for timeout errors
|
|
2349
|
+
if (result.isTimeoutError) {
|
|
2350
|
+
logger.error('');
|
|
2351
|
+
logger.error('⏰ TIMEOUT DETECTED: This appears to be a timeout error.');
|
|
2352
|
+
logger.error(' This commonly happens when PR checks take longer than expected.');
|
|
2353
|
+
logger.error(' The execution context has been saved for recovery.');
|
|
2354
|
+
logger.error('');
|
|
2355
|
+
// Save context even on timeout for recovery
|
|
2356
|
+
if (executionContext && isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run')) {
|
|
2357
|
+
executionContext.completedPackages.push(packageName);
|
|
2358
|
+
executionContext.publishedVersions = publishedVersions;
|
|
2359
|
+
executionContext.lastUpdateTime = new Date();
|
|
2360
|
+
await saveExecutionContext(executionContext, runConfig.outputDirectory);
|
|
2361
|
+
logger.info('💾 Execution context saved for recovery.');
|
|
2362
|
+
}
|
|
2363
|
+
// For publish commands, provide specific guidance about CI/CD setup
|
|
2364
|
+
if (builtInCommand === 'publish') {
|
|
2365
|
+
logger.error('');
|
|
2366
|
+
logger.error('💡 PUBLISH TIMEOUT TROUBLESHOOTING:');
|
|
2367
|
+
logger.error(' This project may not have CI/CD workflows configured.');
|
|
2368
|
+
logger.error(' Common solutions:');
|
|
2369
|
+
logger.error(' 1. Set up GitHub Actions workflows for this repository');
|
|
2370
|
+
logger.error(' 2. Use --sendit flag to skip user confirmation:');
|
|
2371
|
+
logger.error(` kodrdriv tree publish --sendit`);
|
|
2372
|
+
logger.error(' 3. Or manually promote this package:');
|
|
2373
|
+
logger.error(` kodrdriv tree publish --promote ${packageName}`);
|
|
2374
|
+
logger.error('');
|
|
2375
|
+
}
|
|
2376
|
+
}
|
|
2377
|
+
logger.error(`To resume from this point, run:`);
|
|
2378
|
+
if (isBuiltInCommand) {
|
|
2379
|
+
logger.error(` kodrdriv tree ${builtInCommand} --continue`);
|
|
2380
|
+
}
|
|
2381
|
+
else {
|
|
2382
|
+
logger.error(` kodrdriv tree --continue --cmd "${commandToRun}"`);
|
|
2383
|
+
}
|
|
2384
|
+
// For timeout errors, provide additional recovery instructions
|
|
2385
|
+
if (result.isTimeoutError) {
|
|
2386
|
+
logger.error('');
|
|
2387
|
+
logger.error('🔧 RECOVERY OPTIONS:');
|
|
2388
|
+
if (builtInCommand === 'publish') {
|
|
2389
|
+
logger.error(' 1. Wait for the PR checks to complete, then run:');
|
|
2390
|
+
logger.error(` cd ${packageInfo.path}`);
|
|
2391
|
+
logger.error(` kodrdriv publish`);
|
|
2392
|
+
logger.error(' 2. After the individual publish completes, run:');
|
|
2393
|
+
logger.error(` kodrdriv tree ${builtInCommand} --continue`);
|
|
2394
|
+
}
|
|
2395
|
+
else {
|
|
2396
|
+
logger.error(' 1. Fix any issues in the package, then run:');
|
|
2397
|
+
logger.error(` cd ${packageInfo.path}`);
|
|
2398
|
+
logger.error(` ${commandToRun}`);
|
|
2399
|
+
logger.error(' 2. After the command completes successfully, run:');
|
|
2400
|
+
logger.error(` kodrdriv tree ${builtInCommand} --continue`);
|
|
2401
|
+
}
|
|
2402
|
+
logger.error(' 3. Or promote this package to completed status:');
|
|
2403
|
+
logger.error(` kodrdriv tree ${builtInCommand} --promote ${packageName}`);
|
|
2404
|
+
logger.error(' 4. Or manually edit .kodrdriv-context to mark this package as completed');
|
|
2405
|
+
}
|
|
2406
|
+
// Add clear error summary at the very end
|
|
2407
|
+
logger.error('');
|
|
2408
|
+
logger.error('📋 ERROR SUMMARY:');
|
|
2409
|
+
logger.error(` Project that failed: ${packageName}`);
|
|
2410
|
+
logger.error(` Location: ${packageInfo.path}`);
|
|
2411
|
+
logger.error(` Position in tree: ${i + 1} of ${buildOrder.length} packages`);
|
|
2412
|
+
logger.error(` What failed: ${result.error?.message || 'Unknown error'}`);
|
|
2413
|
+
logger.error('');
|
|
2414
|
+
throw new Error(`Command failed in package ${packageName}`);
|
|
2415
|
+
}
|
|
2416
|
+
break;
|
|
2417
|
+
}
|
|
2418
|
+
}
|
|
2419
|
+
if (!failedPackage) {
|
|
2420
|
+
const totalExecutionTime = Date.now() - executionStartTime;
|
|
2421
|
+
const totalSeconds = (totalExecutionTime / 1000).toFixed(1);
|
|
2422
|
+
const totalMinutes = (totalExecutionTime / 60000).toFixed(1);
|
|
2423
|
+
const timeDisplay = totalExecutionTime < 60000
|
|
2424
|
+
? `${totalSeconds}s`
|
|
2425
|
+
: `${totalMinutes}min (${totalSeconds}s)`;
|
|
2426
|
+
logger.info('');
|
|
2427
|
+
logger.info('═══════════════════════════════════════════════════════════');
|
|
2428
|
+
const summary = `${isDryRun ? 'DRY RUN: ' : ''}All ${buildOrder.length} packages completed successfully! 🎉`;
|
|
2429
|
+
logger.info(summary);
|
|
2430
|
+
logger.info(`⏱️ Total execution time: ${timeDisplay}`);
|
|
2431
|
+
logger.info(`📦 Packages processed: ${successCount}/${buildOrder.length}`);
|
|
2432
|
+
logger.info('═══════════════════════════════════════════════════════════');
|
|
2433
|
+
logger.info('');
|
|
2434
|
+
// Clean up context on successful completion
|
|
2435
|
+
if (isBuiltInCommand && (builtInCommand === 'publish' || builtInCommand === 'run') && !isDryRun) {
|
|
2436
|
+
await cleanupContext(runConfig.outputDirectory);
|
|
2437
|
+
}
|
|
2438
|
+
return returnOutput; // Don't duplicate the summary in return string
|
|
2439
|
+
}
|
|
2440
|
+
}
|
|
2441
|
+
return returnOutput;
|
|
2442
|
+
}
|
|
2443
|
+
catch (error) {
|
|
2444
|
+
const errorMessage = `Failed to analyze workspace: ${error.message}`;
|
|
2445
|
+
logger.error(errorMessage);
|
|
2446
|
+
throw new Error(errorMessage);
|
|
2447
|
+
}
|
|
2448
|
+
finally {
|
|
2449
|
+
// Intentionally preserve the mutex across executions to support multiple runs in the same process (e.g., test suite)
|
|
2450
|
+
// Do not destroy here; the process lifecycle will clean up resources.
|
|
2451
|
+
}
|
|
2452
|
+
};
|
|
2453
|
+
//# sourceMappingURL=tree.js.map
|