@eldrforge/kodrdriv 0.0.41 → 0.0.43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/application.js +28 -14
- package/dist/application.js.map +1 -1
- package/dist/arguments.js +38 -15
- package/dist/arguments.js.map +1 -1
- package/dist/commands/commit-tree.js +490 -0
- package/dist/commands/commit-tree.js.map +1 -0
- package/dist/commands/commit.js +36 -14
- package/dist/commands/commit.js.map +1 -1
- package/dist/commands/publish-tree.js +197 -1
- package/dist/commands/publish-tree.js.map +1 -1
- package/dist/constants.js +10 -2
- package/dist/constants.js.map +1 -1
- package/dist/types.js +6 -0
- package/dist/types.js.map +1 -1
- package/dist/util/github.js +15 -65
- package/dist/util/github.js.map +1 -1
- package/package.json +2 -2
|
@@ -0,0 +1,490 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import fs from 'fs/promises';
|
|
4
|
+
import { getLogger } from '../logging.js';
|
|
5
|
+
import { create } from '../util/storage.js';
|
|
6
|
+
import { run } from '../util/child.js';
|
|
7
|
+
import { execute as execute$1 } from './commit.js';
|
|
8
|
+
import { safeJsonParse, validatePackageJson } from '../util/validation.js';
|
|
9
|
+
|
|
10
|
+
// Create a package-scoped logger that prefixes all messages
|
|
11
|
+
const createPackageLogger = (packageName, sequenceNumber, totalCount, isDryRun = false)=>{
|
|
12
|
+
const baseLogger = getLogger();
|
|
13
|
+
const prefix = `[${sequenceNumber}/${totalCount}] ${packageName}:`;
|
|
14
|
+
const dryRunPrefix = isDryRun ? 'DRY RUN: ' : '';
|
|
15
|
+
return {
|
|
16
|
+
info: (message, ...args)=>baseLogger.info(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
17
|
+
warn: (message, ...args)=>baseLogger.warn(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
18
|
+
error: (message, ...args)=>baseLogger.error(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
19
|
+
debug: (message, ...args)=>baseLogger.debug(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
20
|
+
verbose: (message, ...args)=>baseLogger.verbose(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
21
|
+
silly: (message, ...args)=>baseLogger.silly(`${dryRunPrefix}${prefix} ${message}`, ...args)
|
|
22
|
+
};
|
|
23
|
+
};
|
|
24
|
+
// Execute an operation with package context logging for nested operations
|
|
25
|
+
const withPackageContext = async (packageName, sequenceNumber, totalCount, isDryRun, operation)=>{
|
|
26
|
+
const packageLogger = createPackageLogger(packageName, sequenceNumber, totalCount, isDryRun);
|
|
27
|
+
try {
|
|
28
|
+
packageLogger.verbose(`Starting nested operation...`);
|
|
29
|
+
const result = await operation();
|
|
30
|
+
packageLogger.verbose(`Nested operation completed`);
|
|
31
|
+
return result;
|
|
32
|
+
} catch (error) {
|
|
33
|
+
packageLogger.error(`Nested operation failed: ${error.message}`);
|
|
34
|
+
throw error;
|
|
35
|
+
}
|
|
36
|
+
};
|
|
37
|
+
// Helper function to format subproject error output
|
|
38
|
+
const formatSubprojectError = (packageName, error)=>{
|
|
39
|
+
const lines = [];
|
|
40
|
+
lines.push(`❌ Command failed in package ${packageName}:`);
|
|
41
|
+
// Format the main error message with indentation
|
|
42
|
+
if (error.message) {
|
|
43
|
+
const indentedMessage = error.message.split('\n').map((line)=>` ${line}`).join('\n');
|
|
44
|
+
lines.push(indentedMessage);
|
|
45
|
+
}
|
|
46
|
+
// If there's stderr output, show it indented as well
|
|
47
|
+
if (error.stderr && error.stderr.trim()) {
|
|
48
|
+
lines.push(' STDERR:');
|
|
49
|
+
const indentedStderr = error.stderr.split('\n').filter((line)=>line.trim()).map((line)=>` ${line}`).join('\n');
|
|
50
|
+
lines.push(indentedStderr);
|
|
51
|
+
}
|
|
52
|
+
// If there's stdout output, show it indented as well
|
|
53
|
+
if (error.stdout && error.stdout.trim()) {
|
|
54
|
+
lines.push(' STDOUT:');
|
|
55
|
+
const indentedStdout = error.stdout.split('\n').filter((line)=>line.trim()).map((line)=>` ${line}`).join('\n');
|
|
56
|
+
lines.push(indentedStdout);
|
|
57
|
+
}
|
|
58
|
+
return lines.join('\n');
|
|
59
|
+
};
|
|
60
|
+
const matchesPattern = (filePath, pattern)=>{
|
|
61
|
+
// Convert simple glob patterns to regex
|
|
62
|
+
const regexPattern = pattern.replace(/\\/g, '\\\\') // Escape backslashes
|
|
63
|
+
.replace(/\*\*/g, '.*') // ** matches any path segments
|
|
64
|
+
.replace(/\*/g, '[^/]*') // * matches any characters except path separator
|
|
65
|
+
.replace(/\?/g, '.') // ? matches any single character
|
|
66
|
+
.replace(/\./g, '\\.'); // Escape literal dots
|
|
67
|
+
const regex = new RegExp(`^${regexPattern}$`);
|
|
68
|
+
return regex.test(filePath) || regex.test(path.basename(filePath));
|
|
69
|
+
};
|
|
70
|
+
const shouldExclude = (packageJsonPath, excludedPatterns)=>{
|
|
71
|
+
if (!excludedPatterns || excludedPatterns.length === 0) {
|
|
72
|
+
return false;
|
|
73
|
+
}
|
|
74
|
+
// Check both the full path and relative path patterns
|
|
75
|
+
const relativePath = path.relative(process.cwd(), packageJsonPath);
|
|
76
|
+
return excludedPatterns.some((pattern)=>matchesPattern(packageJsonPath, pattern) || matchesPattern(relativePath, pattern) || matchesPattern(path.dirname(packageJsonPath), pattern) || matchesPattern(path.dirname(relativePath), pattern));
|
|
77
|
+
};
|
|
78
|
+
const scanForPackageJsonFiles = async (directory, excludedPatterns = [])=>{
|
|
79
|
+
const logger = getLogger();
|
|
80
|
+
const packageJsonPaths = [];
|
|
81
|
+
try {
|
|
82
|
+
const entries = await fs.readdir(directory, {
|
|
83
|
+
withFileTypes: true
|
|
84
|
+
});
|
|
85
|
+
for (const entry of entries){
|
|
86
|
+
if (entry.isDirectory()) {
|
|
87
|
+
const subDirPath = path.join(directory, entry.name);
|
|
88
|
+
const packageJsonPath = path.join(subDirPath, 'package.json');
|
|
89
|
+
try {
|
|
90
|
+
await fs.access(packageJsonPath);
|
|
91
|
+
// Check if this package should be excluded
|
|
92
|
+
if (shouldExclude(packageJsonPath, excludedPatterns)) {
|
|
93
|
+
logger.verbose(`Excluding package.json at: ${packageJsonPath} (matches exclusion pattern)`);
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
packageJsonPaths.push(packageJsonPath);
|
|
97
|
+
logger.verbose(`Found package.json at: ${packageJsonPath}`);
|
|
98
|
+
} catch {
|
|
99
|
+
// No package.json in this directory, continue
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
} catch (error) {
|
|
104
|
+
logger.error(`Failed to scan directory ${directory}: ${error}`);
|
|
105
|
+
throw error;
|
|
106
|
+
}
|
|
107
|
+
return packageJsonPaths;
|
|
108
|
+
};
|
|
109
|
+
const parsePackageJson = async (packageJsonPath)=>{
|
|
110
|
+
const logger = getLogger();
|
|
111
|
+
const storage = create({
|
|
112
|
+
log: logger.info
|
|
113
|
+
});
|
|
114
|
+
try {
|
|
115
|
+
const content = await storage.readFile(packageJsonPath, 'utf-8');
|
|
116
|
+
const parsed = safeJsonParse(content, packageJsonPath);
|
|
117
|
+
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
118
|
+
if (!packageJson.name) {
|
|
119
|
+
throw new Error(`Package at ${packageJsonPath} has no name field`);
|
|
120
|
+
}
|
|
121
|
+
const dependencies = new Set();
|
|
122
|
+
// Collect all types of dependencies
|
|
123
|
+
const depTypes = [
|
|
124
|
+
'dependencies',
|
|
125
|
+
'devDependencies',
|
|
126
|
+
'peerDependencies',
|
|
127
|
+
'optionalDependencies'
|
|
128
|
+
];
|
|
129
|
+
for (const depType of depTypes){
|
|
130
|
+
if (packageJson[depType]) {
|
|
131
|
+
Object.keys(packageJson[depType]).forEach((dep)=>dependencies.add(dep));
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
return {
|
|
135
|
+
name: packageJson.name,
|
|
136
|
+
version: packageJson.version || '0.0.0',
|
|
137
|
+
path: path.dirname(packageJsonPath),
|
|
138
|
+
dependencies,
|
|
139
|
+
localDependencies: new Set() // Will be populated later
|
|
140
|
+
};
|
|
141
|
+
} catch (error) {
|
|
142
|
+
logger.error(`Failed to parse package.json at ${packageJsonPath}: ${error}`);
|
|
143
|
+
throw error;
|
|
144
|
+
}
|
|
145
|
+
};
|
|
146
|
+
const buildDependencyGraph = async (packageJsonPaths)=>{
|
|
147
|
+
const logger = getLogger();
|
|
148
|
+
const packages = new Map();
|
|
149
|
+
const edges = new Map();
|
|
150
|
+
// First pass: parse all package.json files
|
|
151
|
+
for (const packageJsonPath of packageJsonPaths){
|
|
152
|
+
const packageInfo = await parsePackageJson(packageJsonPath);
|
|
153
|
+
packages.set(packageInfo.name, packageInfo);
|
|
154
|
+
logger.verbose(`Parsed package: ${packageInfo.name} at ${packageInfo.path}`);
|
|
155
|
+
}
|
|
156
|
+
// Second pass: identify local dependencies and build edges
|
|
157
|
+
for (const [packageName, packageInfo] of packages){
|
|
158
|
+
const localDeps = new Set();
|
|
159
|
+
const edges_set = new Set();
|
|
160
|
+
for (const dep of packageInfo.dependencies){
|
|
161
|
+
if (packages.has(dep)) {
|
|
162
|
+
localDeps.add(dep);
|
|
163
|
+
edges_set.add(dep);
|
|
164
|
+
logger.verbose(`${packageName} depends on local package: ${dep}`);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
packageInfo.localDependencies = localDeps;
|
|
168
|
+
edges.set(packageName, edges_set);
|
|
169
|
+
}
|
|
170
|
+
return {
|
|
171
|
+
packages,
|
|
172
|
+
edges
|
|
173
|
+
};
|
|
174
|
+
};
|
|
175
|
+
const topologicalSort = (graph)=>{
|
|
176
|
+
const logger = getLogger();
|
|
177
|
+
const { packages, edges } = graph;
|
|
178
|
+
const visited = new Set();
|
|
179
|
+
const visiting = new Set();
|
|
180
|
+
const result = [];
|
|
181
|
+
const visit = (packageName)=>{
|
|
182
|
+
if (visited.has(packageName)) {
|
|
183
|
+
return;
|
|
184
|
+
}
|
|
185
|
+
if (visiting.has(packageName)) {
|
|
186
|
+
throw new Error(`Circular dependency detected involving package: ${packageName}`);
|
|
187
|
+
}
|
|
188
|
+
visiting.add(packageName);
|
|
189
|
+
// Visit all dependencies first
|
|
190
|
+
const deps = edges.get(packageName) || new Set();
|
|
191
|
+
for (const dep of deps){
|
|
192
|
+
visit(dep);
|
|
193
|
+
}
|
|
194
|
+
visiting.delete(packageName);
|
|
195
|
+
visited.add(packageName);
|
|
196
|
+
result.push(packageName);
|
|
197
|
+
};
|
|
198
|
+
// Visit all packages
|
|
199
|
+
for (const packageName of packages.keys()){
|
|
200
|
+
if (!visited.has(packageName)) {
|
|
201
|
+
visit(packageName);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
logger.verbose(`Topological sort completed. Build order determined for ${result.length} packages.`);
|
|
205
|
+
return result;
|
|
206
|
+
};
|
|
207
|
+
// Group packages into dependency levels for parallel execution
|
|
208
|
+
const groupPackagesByDependencyLevels = (graph, buildOrder)=>{
|
|
209
|
+
const logger = getLogger();
|
|
210
|
+
const { edges } = graph;
|
|
211
|
+
const levels = [];
|
|
212
|
+
const packageLevels = new Map();
|
|
213
|
+
// Calculate the dependency level for each package
|
|
214
|
+
const calculateLevel = (packageName)=>{
|
|
215
|
+
if (packageLevels.has(packageName)) {
|
|
216
|
+
return packageLevels.get(packageName);
|
|
217
|
+
}
|
|
218
|
+
const deps = edges.get(packageName) || new Set();
|
|
219
|
+
if (deps.size === 0) {
|
|
220
|
+
// No dependencies - this is level 0
|
|
221
|
+
packageLevels.set(packageName, 0);
|
|
222
|
+
return 0;
|
|
223
|
+
}
|
|
224
|
+
// Level is 1 + max level of dependencies
|
|
225
|
+
let maxDepLevel = -1;
|
|
226
|
+
for (const dep of deps){
|
|
227
|
+
const depLevel = calculateLevel(dep);
|
|
228
|
+
maxDepLevel = Math.max(maxDepLevel, depLevel);
|
|
229
|
+
}
|
|
230
|
+
const level = maxDepLevel + 1;
|
|
231
|
+
packageLevels.set(packageName, level);
|
|
232
|
+
return level;
|
|
233
|
+
};
|
|
234
|
+
// Calculate levels for all packages
|
|
235
|
+
for (const packageName of buildOrder){
|
|
236
|
+
calculateLevel(packageName);
|
|
237
|
+
}
|
|
238
|
+
// Group packages by their levels
|
|
239
|
+
for (const packageName of buildOrder){
|
|
240
|
+
const level = packageLevels.get(packageName);
|
|
241
|
+
while(levels.length <= level){
|
|
242
|
+
levels.push([]);
|
|
243
|
+
}
|
|
244
|
+
levels[level].push(packageName);
|
|
245
|
+
}
|
|
246
|
+
logger.verbose(`Packages grouped into ${levels.length} dependency levels for parallel execution`);
|
|
247
|
+
for(let i = 0; i < levels.length; i++){
|
|
248
|
+
logger.verbose(` Level ${i}: ${levels[i].join(', ')}`);
|
|
249
|
+
}
|
|
250
|
+
return levels;
|
|
251
|
+
};
|
|
252
|
+
// Execute commit operations for a single package
|
|
253
|
+
const executeCommitForPackage = async (packageName, packageInfo, runConfig, isDryRun, index, total)=>{
|
|
254
|
+
const packageLogger = createPackageLogger(packageName, index + 1, total, isDryRun);
|
|
255
|
+
const packageDir = packageInfo.path;
|
|
256
|
+
packageLogger.info(`Starting commit operations...`);
|
|
257
|
+
packageLogger.verbose(`Working directory: ${packageDir}`);
|
|
258
|
+
try {
|
|
259
|
+
if (isDryRun) {
|
|
260
|
+
packageLogger.info(`Would execute: git add -A`);
|
|
261
|
+
packageLogger.info(`Would execute: kodrdriv commit`);
|
|
262
|
+
packageLogger.info(`In directory: ${packageDir}`);
|
|
263
|
+
} else {
|
|
264
|
+
// Change to the package directory and run the commands
|
|
265
|
+
const originalCwd = process.cwd();
|
|
266
|
+
try {
|
|
267
|
+
process.chdir(packageDir);
|
|
268
|
+
packageLogger.verbose(`Changed to directory: ${packageDir}`);
|
|
269
|
+
// Step 1: Add all changes
|
|
270
|
+
packageLogger.info(`Adding all changes to git...`);
|
|
271
|
+
await withPackageContext(packageName, index + 1, total, isDryRun, async ()=>{
|
|
272
|
+
await run('git add -A');
|
|
273
|
+
});
|
|
274
|
+
packageLogger.verbose(`Git add completed`);
|
|
275
|
+
// Step 2: Run commit command
|
|
276
|
+
packageLogger.info(`Running commit command...`);
|
|
277
|
+
await withPackageContext(packageName, index + 1, total, isDryRun, async ()=>{
|
|
278
|
+
await execute$1(runConfig);
|
|
279
|
+
});
|
|
280
|
+
packageLogger.info(`Commit completed successfully`);
|
|
281
|
+
packageLogger.info(`✅ All commit operations completed successfully`);
|
|
282
|
+
} finally{
|
|
283
|
+
process.chdir(originalCwd);
|
|
284
|
+
packageLogger.verbose(`Restored working directory to: ${originalCwd}`);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
return {
|
|
288
|
+
success: true
|
|
289
|
+
};
|
|
290
|
+
} catch (error) {
|
|
291
|
+
packageLogger.error(`❌ Commit operations failed: ${error.message}`);
|
|
292
|
+
return {
|
|
293
|
+
success: false,
|
|
294
|
+
error
|
|
295
|
+
};
|
|
296
|
+
}
|
|
297
|
+
};
|
|
298
|
+
const execute = async (runConfig)=>{
|
|
299
|
+
var _runConfig_commitTree;
|
|
300
|
+
const logger = getLogger();
|
|
301
|
+
const isDryRun = runConfig.dryRun || false;
|
|
302
|
+
// Determine the target directory - either specified or current working directory
|
|
303
|
+
const targetDirectory = ((_runConfig_commitTree = runConfig.commitTree) === null || _runConfig_commitTree === void 0 ? void 0 : _runConfig_commitTree.directory) || process.cwd();
|
|
304
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspace for commit operations at: ${targetDirectory}`);
|
|
305
|
+
try {
|
|
306
|
+
var _runConfig_commitTree1, _runConfig_commitTree2, _runConfig_commitTree3;
|
|
307
|
+
// Get exclusion patterns from config, fallback to empty array
|
|
308
|
+
const excludedPatterns = ((_runConfig_commitTree1 = runConfig.commitTree) === null || _runConfig_commitTree1 === void 0 ? void 0 : _runConfig_commitTree1.excludedPatterns) || [];
|
|
309
|
+
if (excludedPatterns.length > 0) {
|
|
310
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Using exclusion patterns: ${excludedPatterns.join(', ')}`);
|
|
311
|
+
}
|
|
312
|
+
// Scan for package.json files
|
|
313
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning for package.json files...`);
|
|
314
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
315
|
+
if (packageJsonPaths.length === 0) {
|
|
316
|
+
const message = `No package.json files found in subdirectories of ${targetDirectory}`;
|
|
317
|
+
logger.warn(message);
|
|
318
|
+
return message;
|
|
319
|
+
}
|
|
320
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Found ${packageJsonPaths.length} package.json files`);
|
|
321
|
+
// Build dependency graph
|
|
322
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Building dependency graph...`);
|
|
323
|
+
const dependencyGraph = await buildDependencyGraph(packageJsonPaths);
|
|
324
|
+
// Perform topological sort to determine build order
|
|
325
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Determining build order...`);
|
|
326
|
+
let buildOrder = topologicalSort(dependencyGraph);
|
|
327
|
+
// Handle start-from functionality if specified
|
|
328
|
+
const startFrom = (_runConfig_commitTree2 = runConfig.commitTree) === null || _runConfig_commitTree2 === void 0 ? void 0 : _runConfig_commitTree2.startFrom;
|
|
329
|
+
if (startFrom) {
|
|
330
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for start package: ${startFrom}`);
|
|
331
|
+
// Find the package that matches the startFrom directory name
|
|
332
|
+
const startIndex = buildOrder.findIndex((packageName)=>{
|
|
333
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
334
|
+
const dirName = path.basename(packageInfo.path);
|
|
335
|
+
return dirName === startFrom || packageName === startFrom;
|
|
336
|
+
});
|
|
337
|
+
if (startIndex === -1) {
|
|
338
|
+
const availablePackages = buildOrder.map((name)=>{
|
|
339
|
+
const packageInfo = dependencyGraph.packages.get(name);
|
|
340
|
+
return `${path.basename(packageInfo.path)} (${name})`;
|
|
341
|
+
}).join(', ');
|
|
342
|
+
throw new Error(`Package directory '${startFrom}' not found. Available packages: ${availablePackages}`);
|
|
343
|
+
}
|
|
344
|
+
const skippedCount = startIndex;
|
|
345
|
+
buildOrder = buildOrder.slice(startIndex);
|
|
346
|
+
if (skippedCount > 0) {
|
|
347
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Resuming from '${startFrom}' - skipping ${skippedCount} package${skippedCount === 1 ? '' : 's'}`);
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
// Display results
|
|
351
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Build order determined for commit operations:`);
|
|
352
|
+
let output = `\nCommit Order for ${buildOrder.length} packages${startFrom ? ` (starting from ${startFrom})` : ''}:\n`;
|
|
353
|
+
output += '======================================\n\n';
|
|
354
|
+
buildOrder.forEach((packageName, index)=>{
|
|
355
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
356
|
+
const localDeps = Array.from(packageInfo.localDependencies);
|
|
357
|
+
output += `${index + 1}. ${packageName} (${packageInfo.version})\n`;
|
|
358
|
+
output += ` Path: ${packageInfo.path}\n`;
|
|
359
|
+
if (localDeps.length > 0) {
|
|
360
|
+
output += ` Local Dependencies: ${localDeps.join(', ')}\n`;
|
|
361
|
+
} else {
|
|
362
|
+
output += ` Local Dependencies: none\n`;
|
|
363
|
+
}
|
|
364
|
+
output += '\n';
|
|
365
|
+
// Log each step
|
|
366
|
+
if (localDeps.length > 0) {
|
|
367
|
+
logger.info(`${index + 1}. ${packageName} (depends on: ${localDeps.join(', ')})`);
|
|
368
|
+
} else {
|
|
369
|
+
logger.info(`${index + 1}. ${packageName} (no local dependencies)`);
|
|
370
|
+
}
|
|
371
|
+
});
|
|
372
|
+
// Execute commit operations
|
|
373
|
+
const useParallel = ((_runConfig_commitTree3 = runConfig.commitTree) === null || _runConfig_commitTree3 === void 0 ? void 0 : _runConfig_commitTree3.parallel) || false;
|
|
374
|
+
const parallelInfo = useParallel ? ' (with parallel execution)' : '';
|
|
375
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running commit operations (git add -A + kodrdriv commit) in ${buildOrder.length} packages${parallelInfo}...`);
|
|
376
|
+
let successCount = 0;
|
|
377
|
+
let failedPackage = null;
|
|
378
|
+
if (useParallel) {
|
|
379
|
+
// Parallel execution: group packages by dependency levels
|
|
380
|
+
const dependencyLevels = groupPackagesByDependencyLevels(dependencyGraph, buildOrder);
|
|
381
|
+
for(let levelIndex = 0; levelIndex < dependencyLevels.length; levelIndex++){
|
|
382
|
+
const currentLevel = dependencyLevels[levelIndex];
|
|
383
|
+
if (currentLevel.length === 1) {
|
|
384
|
+
const packageName = currentLevel[0];
|
|
385
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Level ${levelIndex + 1}: Executing commit operations for ${packageName}...`);
|
|
386
|
+
} else {
|
|
387
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Level ${levelIndex + 1}: Executing commit operations for ${currentLevel.length} packages in parallel: ${currentLevel.join(', ')}...`);
|
|
388
|
+
}
|
|
389
|
+
// Execute all packages in this level in parallel
|
|
390
|
+
const levelPromises = currentLevel.map((packageName)=>{
|
|
391
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
392
|
+
const globalIndex = buildOrder.indexOf(packageName);
|
|
393
|
+
return executeCommitForPackage(packageName, packageInfo, runConfig, isDryRun, globalIndex, buildOrder.length);
|
|
394
|
+
});
|
|
395
|
+
// Wait for all packages in this level to complete
|
|
396
|
+
const results = await Promise.allSettled(levelPromises);
|
|
397
|
+
// Check results and handle errors
|
|
398
|
+
for(let i = 0; i < results.length; i++){
|
|
399
|
+
const result = results[i];
|
|
400
|
+
const packageName = currentLevel[i];
|
|
401
|
+
const globalIndex = buildOrder.indexOf(packageName);
|
|
402
|
+
const packageLogger = createPackageLogger(packageName, globalIndex + 1, buildOrder.length, isDryRun);
|
|
403
|
+
if (result.status === 'fulfilled') {
|
|
404
|
+
if (result.value.success) {
|
|
405
|
+
successCount++;
|
|
406
|
+
} else {
|
|
407
|
+
// Package failed
|
|
408
|
+
failedPackage = packageName;
|
|
409
|
+
const formattedError = formatSubprojectError(packageName, result.value.error);
|
|
410
|
+
if (!isDryRun) {
|
|
411
|
+
packageLogger.error(`Commit operations failed`);
|
|
412
|
+
logger.error(formattedError);
|
|
413
|
+
logger.error(`Failed after ${successCount} successful packages.`);
|
|
414
|
+
const packageDir = dependencyGraph.packages.get(packageName).path;
|
|
415
|
+
const packageDirName = path.basename(packageDir);
|
|
416
|
+
logger.error(`To resume from this package, run:`);
|
|
417
|
+
logger.error(` kodrdriv commit-tree --start-from ${packageDirName}`);
|
|
418
|
+
throw new Error(`Commit operations failed in package ${packageName}`);
|
|
419
|
+
}
|
|
420
|
+
break;
|
|
421
|
+
}
|
|
422
|
+
} else {
|
|
423
|
+
// Promise was rejected
|
|
424
|
+
failedPackage = packageName;
|
|
425
|
+
if (!isDryRun) {
|
|
426
|
+
packageLogger.error(`Unexpected error: ${result.reason}`);
|
|
427
|
+
logger.error(`Failed after ${successCount} successful packages.`);
|
|
428
|
+
const packageDir = dependencyGraph.packages.get(packageName).path;
|
|
429
|
+
const packageDirName = path.basename(packageDir);
|
|
430
|
+
logger.error(`To resume from this package, run:`);
|
|
431
|
+
logger.error(` kodrdriv commit-tree --start-from ${packageDirName}`);
|
|
432
|
+
throw new Error(`Unexpected error in package ${packageName}`);
|
|
433
|
+
}
|
|
434
|
+
break;
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
// If any package failed, stop execution
|
|
438
|
+
if (failedPackage) {
|
|
439
|
+
break;
|
|
440
|
+
}
|
|
441
|
+
if (currentLevel.length > 1) {
|
|
442
|
+
logger.info(`✅ Level ${levelIndex + 1} completed: all ${currentLevel.length} packages finished successfully`);
|
|
443
|
+
} else if (currentLevel.length === 1 && successCount > 0) {
|
|
444
|
+
const packageName = currentLevel[0];
|
|
445
|
+
const globalIndex = buildOrder.indexOf(packageName);
|
|
446
|
+
const packageLogger = createPackageLogger(packageName, globalIndex + 1, buildOrder.length, isDryRun);
|
|
447
|
+
packageLogger.info(`✅ Level ${levelIndex + 1} completed successfully`);
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
} else {
|
|
451
|
+
// Sequential execution
|
|
452
|
+
for(let i = 0; i < buildOrder.length; i++){
|
|
453
|
+
const packageName = buildOrder[i];
|
|
454
|
+
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
455
|
+
const packageLogger = createPackageLogger(packageName, i + 1, buildOrder.length, isDryRun);
|
|
456
|
+
const result = await executeCommitForPackage(packageName, packageInfo, runConfig, isDryRun, i, buildOrder.length);
|
|
457
|
+
if (result.success) {
|
|
458
|
+
successCount++;
|
|
459
|
+
} else {
|
|
460
|
+
failedPackage = packageName;
|
|
461
|
+
const formattedError = formatSubprojectError(packageName, result.error);
|
|
462
|
+
if (!isDryRun) {
|
|
463
|
+
packageLogger.error(`Commit operations failed`);
|
|
464
|
+
logger.error(formattedError);
|
|
465
|
+
logger.error(`Failed after ${successCount} successful packages.`);
|
|
466
|
+
const packageDir = packageInfo.path;
|
|
467
|
+
const packageDirName = path.basename(packageDir);
|
|
468
|
+
logger.error(`To resume from this package, run:`);
|
|
469
|
+
logger.error(` kodrdriv commit-tree --start-from ${packageDirName}`);
|
|
470
|
+
throw new Error(`Commit operations failed in package ${packageName}`);
|
|
471
|
+
}
|
|
472
|
+
break;
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
if (!failedPackage) {
|
|
477
|
+
const summary = `${isDryRun ? 'DRY RUN: ' : ''}All ${buildOrder.length} packages completed commit operations successfully! 🎉`;
|
|
478
|
+
logger.info(summary);
|
|
479
|
+
return output + `\n${summary}\n`;
|
|
480
|
+
}
|
|
481
|
+
return output;
|
|
482
|
+
} catch (error) {
|
|
483
|
+
const errorMessage = `Failed to execute commit-tree: ${error.message}`;
|
|
484
|
+
logger.error(errorMessage);
|
|
485
|
+
throw new Error(errorMessage);
|
|
486
|
+
}
|
|
487
|
+
};
|
|
488
|
+
|
|
489
|
+
export { execute };
|
|
490
|
+
//# sourceMappingURL=commit-tree.js.map
|