@eldrforge/kodrdriv 1.2.22 → 1.2.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/PARALLEL-EXECUTION-FIXES.md +132 -0
- package/PARALLEL_EXECUTION_FIX.md +146 -0
- package/RECOVERY-FIXES.md +72 -0
- package/dist/arguments.js +26 -3
- package/dist/arguments.js.map +1 -1
- package/dist/commands/audio-commit.js +3 -3
- package/dist/commands/audio-commit.js.map +1 -1
- package/dist/commands/audio-review.js +13 -13
- package/dist/commands/audio-review.js.map +1 -1
- package/dist/commands/link.js +13 -13
- package/dist/commands/link.js.map +1 -1
- package/dist/commands/publish.js +200 -146
- package/dist/commands/publish.js.map +1 -1
- package/dist/commands/review.js +6 -6
- package/dist/commands/review.js.map +1 -1
- package/dist/commands/select-audio.js +4 -4
- package/dist/commands/select-audio.js.map +1 -1
- package/dist/commands/tree.js +242 -318
- package/dist/commands/tree.js.map +1 -1
- package/dist/commands/unlink.js +8 -8
- package/dist/commands/unlink.js.map +1 -1
- package/dist/commands/versions.js +3 -3
- package/dist/commands/versions.js.map +1 -1
- package/dist/constants.js +4 -4
- package/dist/constants.js.map +1 -1
- package/dist/content/diff.js +5 -2
- package/dist/content/diff.js.map +1 -1
- package/dist/content/files.js +4 -4
- package/dist/content/files.js.map +1 -1
- package/dist/execution/CommandValidator.js +160 -0
- package/dist/execution/CommandValidator.js.map +1 -0
- package/dist/execution/DependencyChecker.js +102 -0
- package/dist/execution/DependencyChecker.js.map +1 -0
- package/dist/execution/DynamicTaskPool.js +455 -0
- package/dist/execution/DynamicTaskPool.js.map +1 -0
- package/dist/execution/RecoveryManager.js +502 -0
- package/dist/execution/RecoveryManager.js.map +1 -0
- package/dist/execution/ResourceMonitor.js +125 -0
- package/dist/execution/ResourceMonitor.js.map +1 -0
- package/dist/execution/Scheduler.js +98 -0
- package/dist/execution/Scheduler.js.map +1 -0
- package/dist/execution/TreeExecutionAdapter.js +170 -0
- package/dist/execution/TreeExecutionAdapter.js.map +1 -0
- package/dist/logging.js +3 -3
- package/dist/logging.js.map +1 -1
- package/dist/ui/ProgressFormatter.js +230 -0
- package/dist/ui/ProgressFormatter.js.map +1 -0
- package/dist/util/checkpointManager.js +168 -0
- package/dist/util/checkpointManager.js.map +1 -0
- package/dist/util/dependencyGraph.js +224 -0
- package/dist/util/dependencyGraph.js.map +1 -0
- package/dist/util/fileLock.js +204 -0
- package/dist/util/fileLock.js.map +1 -0
- package/dist/util/general.js +5 -5
- package/dist/util/general.js.map +1 -1
- package/dist/util/gitMutex.js +116 -0
- package/dist/util/gitMutex.js.map +1 -0
- package/dist/util/mutex.js +96 -0
- package/dist/util/mutex.js.map +1 -0
- package/dist/util/performance.js +4 -4
- package/dist/util/performance.js.map +1 -1
- package/dist/util/safety.js +4 -4
- package/dist/util/safety.js.map +1 -1
- package/dist/util/storage.js +2 -2
- package/dist/util/storage.js.map +1 -1
- package/package.json +9 -9
package/dist/commands/tree.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import
|
|
2
|
+
import path__default from 'path';
|
|
3
3
|
import fs from 'fs/promises';
|
|
4
4
|
import { exec } from 'child_process';
|
|
5
5
|
import { safeJsonParse, validatePackageJson, getGloballyLinkedPackages, getGitStatusSummary, getLinkedDependencies, getLinkCompatibilityProblems, runSecure, run } from '@eldrforge/git-tools';
|
|
@@ -12,92 +12,20 @@ import { execute as execute$3 } from './commit.js';
|
|
|
12
12
|
import { execute as execute$1 } from './link.js';
|
|
13
13
|
import { execute as execute$2 } from './unlink.js';
|
|
14
14
|
import { execute as execute$4 } from './updates.js';
|
|
15
|
+
import { runGitWithLock } from '../util/gitMutex.js';
|
|
16
|
+
import { scanForPackageJsonFiles, buildDependencyGraph, topologicalSort, parsePackageJson, shouldExclude } from '../util/dependencyGraph.js';
|
|
17
|
+
import { SimpleMutex } from '../util/mutex.js';
|
|
15
18
|
|
|
16
|
-
function _define_property(obj, key, value) {
|
|
17
|
-
if (key in obj) {
|
|
18
|
-
Object.defineProperty(obj, key, {
|
|
19
|
-
value: value,
|
|
20
|
-
enumerable: true,
|
|
21
|
-
configurable: true,
|
|
22
|
-
writable: true
|
|
23
|
-
});
|
|
24
|
-
} else {
|
|
25
|
-
obj[key] = value;
|
|
26
|
-
}
|
|
27
|
-
return obj;
|
|
28
|
-
}
|
|
29
19
|
// Global state to track published versions during tree execution - protected by mutex
|
|
30
20
|
let publishedVersions = [];
|
|
31
21
|
let executionContext = null;
|
|
32
|
-
// Simple mutex to prevent race conditions in global state access
|
|
33
|
-
class SimpleMutex {
|
|
34
|
-
async lock() {
|
|
35
|
-
return new Promise((resolve, reject)=>{
|
|
36
|
-
if (this.destroyed) {
|
|
37
|
-
reject(new Error('Mutex has been destroyed'));
|
|
38
|
-
return;
|
|
39
|
-
}
|
|
40
|
-
if (!this.locked) {
|
|
41
|
-
this.locked = true;
|
|
42
|
-
resolve();
|
|
43
|
-
} else {
|
|
44
|
-
this.queue.push(resolve);
|
|
45
|
-
}
|
|
46
|
-
});
|
|
47
|
-
}
|
|
48
|
-
unlock() {
|
|
49
|
-
if (this.destroyed) {
|
|
50
|
-
return;
|
|
51
|
-
}
|
|
52
|
-
this.locked = false;
|
|
53
|
-
const next = this.queue.shift();
|
|
54
|
-
if (next) {
|
|
55
|
-
this.locked = true;
|
|
56
|
-
try {
|
|
57
|
-
next();
|
|
58
|
-
} catch {
|
|
59
|
-
// If resolver throws, unlock and continue with next in queue
|
|
60
|
-
this.locked = false;
|
|
61
|
-
const nextInQueue = this.queue.shift();
|
|
62
|
-
if (nextInQueue) {
|
|
63
|
-
this.locked = true;
|
|
64
|
-
nextInQueue();
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
destroy() {
|
|
70
|
-
this.destroyed = true;
|
|
71
|
-
this.locked = false;
|
|
72
|
-
// Reject all queued promises to prevent memory leaks
|
|
73
|
-
while(this.queue.length > 0){
|
|
74
|
-
const resolver = this.queue.shift();
|
|
75
|
-
if (resolver) {
|
|
76
|
-
try {
|
|
77
|
-
// Treat as rejected promise to clean up
|
|
78
|
-
resolver(new Error('Mutex destroyed'));
|
|
79
|
-
} catch {
|
|
80
|
-
// Ignore errors from rejected resolvers
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
isDestroyed() {
|
|
86
|
-
return this.destroyed;
|
|
87
|
-
}
|
|
88
|
-
constructor(){
|
|
89
|
-
_define_property(this, "locked", false);
|
|
90
|
-
_define_property(this, "queue", []);
|
|
91
|
-
_define_property(this, "destroyed", false);
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
22
|
const globalStateMutex = new SimpleMutex();
|
|
95
23
|
// Update inter-project dependencies in package.json based on published versions
|
|
96
24
|
const updateInterProjectDependencies = async (packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun)=>{
|
|
97
25
|
const storage = create({
|
|
98
26
|
log: packageLogger.info
|
|
99
27
|
});
|
|
100
|
-
const packageJsonPath =
|
|
28
|
+
const packageJsonPath = path__default.join(packageDir, 'package.json');
|
|
101
29
|
if (!await storage.exists(packageJsonPath)) {
|
|
102
30
|
packageLogger.verbose('No package.json found, skipping dependency updates');
|
|
103
31
|
return false;
|
|
@@ -158,7 +86,7 @@ const updateScopedDependencies = async (packageDir, packageLogger, isDryRun, run
|
|
|
158
86
|
const storage = create({
|
|
159
87
|
log: packageLogger.info
|
|
160
88
|
});
|
|
161
|
-
const packageJsonPath =
|
|
89
|
+
const packageJsonPath = path__default.join(packageDir, 'package.json');
|
|
162
90
|
if (!await storage.exists(packageJsonPath)) {
|
|
163
91
|
packageLogger.verbose('No package.json found, skipping scoped dependency updates');
|
|
164
92
|
return false;
|
|
@@ -245,7 +173,7 @@ const saveExecutionContext = async (context, outputDirectory)=>{
|
|
|
245
173
|
const contextFilePath = getContextFilePath(outputDirectory);
|
|
246
174
|
try {
|
|
247
175
|
// Ensure output directory exists
|
|
248
|
-
await storage.ensureDirectory(
|
|
176
|
+
await storage.ensureDirectory(path__default.dirname(contextFilePath));
|
|
249
177
|
// Save context with JSON serialization that handles dates
|
|
250
178
|
const contextData = {
|
|
251
179
|
...context,
|
|
@@ -349,7 +277,7 @@ const validateScripts = async (packages, scripts)=>{
|
|
|
349
277
|
});
|
|
350
278
|
logger.debug(`Validating scripts: ${scripts.join(', ')}`);
|
|
351
279
|
for (const [packageName, packageInfo] of packages){
|
|
352
|
-
const packageJsonPath =
|
|
280
|
+
const packageJsonPath = path__default.join(packageInfo.path, 'package.json');
|
|
353
281
|
const missingForPackage = [];
|
|
354
282
|
try {
|
|
355
283
|
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
@@ -392,7 +320,7 @@ const extractPublishedVersion = async (packageDir, packageLogger)=>{
|
|
|
392
320
|
const storage = create({
|
|
393
321
|
log: packageLogger.info
|
|
394
322
|
});
|
|
395
|
-
const packageJsonPath =
|
|
323
|
+
const packageJsonPath = path__default.join(packageDir, 'package.json');
|
|
396
324
|
try {
|
|
397
325
|
// Get package name from package.json
|
|
398
326
|
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
@@ -538,168 +466,8 @@ const formatSubprojectError = (packageName, error, _packageInfo, _position, _tot
|
|
|
538
466
|
}
|
|
539
467
|
return lines.join('\n');
|
|
540
468
|
};
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
const regexPattern = pattern.replace(/\\/g, '\\\\') // Escape backslashes
|
|
544
|
-
.replace(/\*\*/g, '.*') // ** matches any path segments
|
|
545
|
-
.replace(/\*/g, '[^/]*') // * matches any characters except path separator
|
|
546
|
-
.replace(/\?/g, '.') // ? matches any single character
|
|
547
|
-
.replace(/\./g, '\\.'); // Escape literal dots
|
|
548
|
-
const regex = new RegExp(`^${regexPattern}$`);
|
|
549
|
-
return regex.test(filePath) || regex.test(path.basename(filePath));
|
|
550
|
-
};
|
|
551
|
-
const shouldExclude = (packageJsonPath, excludedPatterns)=>{
|
|
552
|
-
if (!excludedPatterns || excludedPatterns.length === 0) {
|
|
553
|
-
return false;
|
|
554
|
-
}
|
|
555
|
-
// Check both the full path and relative path patterns
|
|
556
|
-
const relativePath = path.relative(process.cwd(), packageJsonPath);
|
|
557
|
-
return excludedPatterns.some((pattern)=>matchesPattern(packageJsonPath, pattern) || matchesPattern(relativePath, pattern) || matchesPattern(path.dirname(packageJsonPath), pattern) || matchesPattern(path.dirname(relativePath), pattern));
|
|
558
|
-
};
|
|
559
|
-
const scanForPackageJsonFiles = async (directory, excludedPatterns = [])=>{
|
|
560
|
-
const logger = getLogger();
|
|
561
|
-
const packageJsonPaths = [];
|
|
562
|
-
try {
|
|
563
|
-
// First check if there's a package.json in the specified directory itself
|
|
564
|
-
const directPackageJsonPath = path.join(directory, 'package.json');
|
|
565
|
-
try {
|
|
566
|
-
await fs.access(directPackageJsonPath);
|
|
567
|
-
// Check if this package should be excluded
|
|
568
|
-
if (!shouldExclude(directPackageJsonPath, excludedPatterns)) {
|
|
569
|
-
packageJsonPaths.push(directPackageJsonPath);
|
|
570
|
-
logger.verbose(`Found package.json at: ${directPackageJsonPath}`);
|
|
571
|
-
} else {
|
|
572
|
-
logger.verbose(`Excluding package.json at: ${directPackageJsonPath} (matches exclusion pattern)`);
|
|
573
|
-
}
|
|
574
|
-
} catch {
|
|
575
|
-
// No package.json in the root of this directory, that's fine
|
|
576
|
-
}
|
|
577
|
-
// Then scan subdirectories for package.json files
|
|
578
|
-
const entries = await fs.readdir(directory, {
|
|
579
|
-
withFileTypes: true
|
|
580
|
-
});
|
|
581
|
-
for (const entry of entries){
|
|
582
|
-
if (entry.isDirectory()) {
|
|
583
|
-
const subDirPath = path.join(directory, entry.name);
|
|
584
|
-
const packageJsonPath = path.join(subDirPath, 'package.json');
|
|
585
|
-
try {
|
|
586
|
-
await fs.access(packageJsonPath);
|
|
587
|
-
// Check if this package should be excluded
|
|
588
|
-
if (shouldExclude(packageJsonPath, excludedPatterns)) {
|
|
589
|
-
logger.verbose(`Excluding package.json at: ${packageJsonPath} (matches exclusion pattern)`);
|
|
590
|
-
continue;
|
|
591
|
-
}
|
|
592
|
-
packageJsonPaths.push(packageJsonPath);
|
|
593
|
-
logger.verbose(`Found package.json at: ${packageJsonPath}`);
|
|
594
|
-
} catch {
|
|
595
|
-
// No package.json in this directory, continue
|
|
596
|
-
}
|
|
597
|
-
}
|
|
598
|
-
}
|
|
599
|
-
} catch (error) {
|
|
600
|
-
logger.error(`Failed to scan directory ${directory}: ${error}`);
|
|
601
|
-
throw error;
|
|
602
|
-
}
|
|
603
|
-
return packageJsonPaths;
|
|
604
|
-
};
|
|
605
|
-
const parsePackageJson = async (packageJsonPath)=>{
|
|
606
|
-
const logger = getLogger();
|
|
607
|
-
const storage = create({
|
|
608
|
-
log: logger.info
|
|
609
|
-
});
|
|
610
|
-
try {
|
|
611
|
-
const content = await storage.readFile(packageJsonPath, 'utf-8');
|
|
612
|
-
const parsed = safeJsonParse(content, packageJsonPath);
|
|
613
|
-
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
614
|
-
if (!packageJson.name) {
|
|
615
|
-
throw new Error(`Package at ${packageJsonPath} has no name field`);
|
|
616
|
-
}
|
|
617
|
-
const dependencies = new Set();
|
|
618
|
-
// Collect all types of dependencies
|
|
619
|
-
const depTypes = [
|
|
620
|
-
'dependencies',
|
|
621
|
-
'devDependencies',
|
|
622
|
-
'peerDependencies',
|
|
623
|
-
'optionalDependencies'
|
|
624
|
-
];
|
|
625
|
-
for (const depType of depTypes){
|
|
626
|
-
if (packageJson[depType]) {
|
|
627
|
-
Object.keys(packageJson[depType]).forEach((dep)=>dependencies.add(dep));
|
|
628
|
-
}
|
|
629
|
-
}
|
|
630
|
-
return {
|
|
631
|
-
name: packageJson.name,
|
|
632
|
-
version: packageJson.version || '0.0.0',
|
|
633
|
-
path: path.dirname(packageJsonPath),
|
|
634
|
-
dependencies,
|
|
635
|
-
localDependencies: new Set() // Will be populated later
|
|
636
|
-
};
|
|
637
|
-
} catch (error) {
|
|
638
|
-
logger.error(`Failed to parse package.json at ${packageJsonPath}: ${error}`);
|
|
639
|
-
throw error;
|
|
640
|
-
}
|
|
641
|
-
};
|
|
642
|
-
const buildDependencyGraph = async (packageJsonPaths)=>{
|
|
643
|
-
const logger = getLogger();
|
|
644
|
-
const packages = new Map();
|
|
645
|
-
const edges = new Map();
|
|
646
|
-
// First pass: parse all package.json files
|
|
647
|
-
for (const packageJsonPath of packageJsonPaths){
|
|
648
|
-
const packageInfo = await parsePackageJson(packageJsonPath);
|
|
649
|
-
packages.set(packageInfo.name, packageInfo);
|
|
650
|
-
logger.verbose(`Parsed package: ${packageInfo.name} at ${packageInfo.path}`);
|
|
651
|
-
}
|
|
652
|
-
// Second pass: identify local dependencies and build edges
|
|
653
|
-
for (const [packageName, packageInfo] of packages){
|
|
654
|
-
const localDeps = new Set();
|
|
655
|
-
const edges_set = new Set();
|
|
656
|
-
for (const dep of packageInfo.dependencies){
|
|
657
|
-
if (packages.has(dep)) {
|
|
658
|
-
localDeps.add(dep);
|
|
659
|
-
edges_set.add(dep);
|
|
660
|
-
logger.verbose(`${packageName} depends on local package: ${dep}`);
|
|
661
|
-
}
|
|
662
|
-
}
|
|
663
|
-
packageInfo.localDependencies = localDeps;
|
|
664
|
-
edges.set(packageName, edges_set);
|
|
665
|
-
}
|
|
666
|
-
return {
|
|
667
|
-
packages,
|
|
668
|
-
edges
|
|
669
|
-
};
|
|
670
|
-
};
|
|
671
|
-
const topologicalSort = (graph)=>{
|
|
672
|
-
const logger = getLogger();
|
|
673
|
-
const { packages, edges } = graph;
|
|
674
|
-
const visited = new Set();
|
|
675
|
-
const visiting = new Set();
|
|
676
|
-
const result = [];
|
|
677
|
-
const visit = (packageName)=>{
|
|
678
|
-
if (visited.has(packageName)) {
|
|
679
|
-
return;
|
|
680
|
-
}
|
|
681
|
-
if (visiting.has(packageName)) {
|
|
682
|
-
throw new Error(`Circular dependency detected involving package: ${packageName}`);
|
|
683
|
-
}
|
|
684
|
-
visiting.add(packageName);
|
|
685
|
-
// Visit all dependencies first
|
|
686
|
-
const deps = edges.get(packageName) || new Set();
|
|
687
|
-
for (const dep of deps){
|
|
688
|
-
visit(dep);
|
|
689
|
-
}
|
|
690
|
-
visiting.delete(packageName);
|
|
691
|
-
visited.add(packageName);
|
|
692
|
-
result.push(packageName);
|
|
693
|
-
};
|
|
694
|
-
// Visit all packages
|
|
695
|
-
for (const packageName of packages.keys()){
|
|
696
|
-
if (!visited.has(packageName)) {
|
|
697
|
-
visit(packageName);
|
|
698
|
-
}
|
|
699
|
-
}
|
|
700
|
-
logger.verbose(`Topological sort completed. Build order determined for ${result.length} packages.`);
|
|
701
|
-
return result;
|
|
702
|
-
};
|
|
469
|
+
// Note: PackageInfo, DependencyGraph, scanForPackageJsonFiles, parsePackageJson,
|
|
470
|
+
// buildDependencyGraph, and topologicalSort are now imported from ../util/dependencyGraph
|
|
703
471
|
// Execute a single package and return execution result
|
|
704
472
|
const executePackage = async (packageName, packageInfo, commandToRun, runConfig, isDryRun, index, total, allPackageNames, isBuiltInCommand = false)=>{
|
|
705
473
|
const packageLogger = createPackageLogger(packageName, index + 1, total, isDryRun);
|
|
@@ -771,58 +539,61 @@ const executePackage = async (packageName, packageInfo, commandToRun, runConfig,
|
|
|
771
539
|
packageLogger.debug(`Changed to directory: ${packageDir}`);
|
|
772
540
|
}
|
|
773
541
|
// Handle dependency updates for publish commands before executing (skip during dry run)
|
|
542
|
+
// Wrap in git lock to prevent parallel packages from conflicting with npm install and git operations
|
|
774
543
|
if (!isDryRun && isBuiltInCommand && commandToRun.includes('publish')) {
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
544
|
+
await runGitWithLock(packageDir, async ()=>{
|
|
545
|
+
let hasAnyUpdates = false;
|
|
546
|
+
// First, update all scoped dependencies from npm registry
|
|
547
|
+
const hasScopedUpdates = await updateScopedDependencies(packageDir, packageLogger, isDryRun, runConfig);
|
|
548
|
+
hasAnyUpdates = hasAnyUpdates || hasScopedUpdates;
|
|
549
|
+
// Then update inter-project dependencies based on previously published packages
|
|
550
|
+
if (publishedVersions.length > 0) {
|
|
551
|
+
packageLogger.info('Updating inter-project dependencies based on previously published packages...');
|
|
552
|
+
const hasInterProjectUpdates = await updateInterProjectDependencies(packageDir, publishedVersions, allPackageNames, packageLogger, isDryRun);
|
|
553
|
+
hasAnyUpdates = hasAnyUpdates || hasInterProjectUpdates;
|
|
554
|
+
}
|
|
555
|
+
// If either type of update occurred, commit the changes
|
|
556
|
+
if (hasAnyUpdates) {
|
|
557
|
+
// Commit the dependency updates using kodrdriv commit
|
|
558
|
+
packageLogger.info('Committing dependency updates...');
|
|
559
|
+
packageLogger.info('⏱️ This step may take a few minutes as it generates a commit message using AI...');
|
|
560
|
+
// Add timeout wrapper around commit execution
|
|
561
|
+
const commitTimeoutMs = 300000; // 5 minutes
|
|
562
|
+
const commitPromise = execute$3({
|
|
563
|
+
...runConfig,
|
|
564
|
+
dryRun: false
|
|
565
|
+
});
|
|
566
|
+
const timeoutPromise = new Promise((_, reject)=>{
|
|
567
|
+
setTimeout(()=>reject(new Error(`Commit operation timed out after ${commitTimeoutMs / 1000} seconds`)), commitTimeoutMs);
|
|
568
|
+
});
|
|
569
|
+
// Add progress indicator
|
|
570
|
+
let progressInterval = null;
|
|
571
|
+
try {
|
|
572
|
+
// Start progress indicator
|
|
573
|
+
progressInterval = setInterval(()=>{
|
|
574
|
+
packageLogger.info('⏳ Still generating commit message... (this can take 1-3 minutes)');
|
|
575
|
+
}, 30000); // Every 30 seconds
|
|
576
|
+
await Promise.race([
|
|
577
|
+
commitPromise,
|
|
578
|
+
timeoutPromise
|
|
579
|
+
]);
|
|
580
|
+
packageLogger.info('✅ Dependency updates committed successfully');
|
|
581
|
+
} catch (commitError) {
|
|
582
|
+
if (commitError.message.includes('timed out')) {
|
|
583
|
+
packageLogger.error(`❌ Commit operation timed out after ${commitTimeoutMs / 1000} seconds`);
|
|
584
|
+
packageLogger.error('This usually indicates an issue with the AI service or very large changes');
|
|
585
|
+
packageLogger.error('You may need to manually commit the dependency updates');
|
|
586
|
+
} else {
|
|
587
|
+
packageLogger.warn(`Failed to commit dependency updates: ${commitError.message}`);
|
|
588
|
+
}
|
|
589
|
+
// Continue with publish anyway - the updates are still in place
|
|
590
|
+
} finally{
|
|
591
|
+
if (progressInterval) {
|
|
592
|
+
clearInterval(progressInterval);
|
|
593
|
+
}
|
|
823
594
|
}
|
|
824
595
|
}
|
|
825
|
-
}
|
|
596
|
+
}, `${packageName}: dependency updates`);
|
|
826
597
|
}
|
|
827
598
|
if (runConfig.debug || runConfig.verbose) {
|
|
828
599
|
if (isBuiltInCommand) {
|
|
@@ -981,7 +752,7 @@ const checkTreePublishStatus = async ()=>{
|
|
|
981
752
|
}
|
|
982
753
|
};
|
|
983
754
|
const execute = async (runConfig)=>{
|
|
984
|
-
var _runConfig_tree, _runConfig_tree1, _runConfig_tree2, _runConfig_tree3, _runConfig_tree4, _runConfig_tree5, _runConfig_tree6, _runConfig_tree7;
|
|
755
|
+
var _runConfig_tree, _runConfig_tree1, _runConfig_tree2, _runConfig_tree3, _runConfig_tree4, _runConfig_tree5, _runConfig_tree6, _runConfig_tree7, _runConfig_tree8, _runConfig_tree9, _runConfig_tree10, _runConfig_tree11, _runConfig_tree12, _runConfig_tree13, _runConfig_tree14;
|
|
985
756
|
const logger = getLogger();
|
|
986
757
|
const isDryRun = runConfig.dryRun || false;
|
|
987
758
|
const isContinue = ((_runConfig_tree = runConfig.tree) === null || _runConfig_tree === void 0 ? void 0 : _runConfig_tree.continue) || false;
|
|
@@ -1004,6 +775,109 @@ const execute = async (runConfig)=>{
|
|
|
1004
775
|
logger.info('You can now run the tree command with --continue to resume from the next package.');
|
|
1005
776
|
return `Package '${promotePackage}' promoted to completed status.`;
|
|
1006
777
|
}
|
|
778
|
+
// Handle parallel execution recovery commands
|
|
779
|
+
const { loadRecoveryManager } = await import('../execution/RecoveryManager.js');
|
|
780
|
+
// Handle status-parallel command
|
|
781
|
+
if ((_runConfig_tree4 = runConfig.tree) === null || _runConfig_tree4 === void 0 ? void 0 : _runConfig_tree4.statusParallel) {
|
|
782
|
+
var _runConfig_tree15, _runConfig_tree16;
|
|
783
|
+
logger.info('📊 Checking parallel execution status...');
|
|
784
|
+
// Need to build dependency graph first
|
|
785
|
+
const directories = ((_runConfig_tree15 = runConfig.tree) === null || _runConfig_tree15 === void 0 ? void 0 : _runConfig_tree15.directories) || [
|
|
786
|
+
process.cwd()
|
|
787
|
+
];
|
|
788
|
+
const excludedPatterns = ((_runConfig_tree16 = runConfig.tree) === null || _runConfig_tree16 === void 0 ? void 0 : _runConfig_tree16.exclude) || [];
|
|
789
|
+
let allPackageJsonPaths = [];
|
|
790
|
+
for (const targetDirectory of directories){
|
|
791
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
792
|
+
allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
|
|
793
|
+
}
|
|
794
|
+
if (allPackageJsonPaths.length === 0) {
|
|
795
|
+
return 'No packages found';
|
|
796
|
+
}
|
|
797
|
+
const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
|
|
798
|
+
const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
|
|
799
|
+
if (!recoveryManager) {
|
|
800
|
+
logger.info('No parallel execution checkpoint found');
|
|
801
|
+
return 'No active parallel execution found';
|
|
802
|
+
}
|
|
803
|
+
const status = await recoveryManager.showStatus();
|
|
804
|
+
logger.info('\n' + status);
|
|
805
|
+
return status;
|
|
806
|
+
}
|
|
807
|
+
// Handle validate-state command
|
|
808
|
+
if ((_runConfig_tree5 = runConfig.tree) === null || _runConfig_tree5 === void 0 ? void 0 : _runConfig_tree5.validateState) {
|
|
809
|
+
var _runConfig_tree17, _runConfig_tree18;
|
|
810
|
+
logger.info('🔍 Validating checkpoint state...');
|
|
811
|
+
const directories = ((_runConfig_tree17 = runConfig.tree) === null || _runConfig_tree17 === void 0 ? void 0 : _runConfig_tree17.directories) || [
|
|
812
|
+
process.cwd()
|
|
813
|
+
];
|
|
814
|
+
const excludedPatterns = ((_runConfig_tree18 = runConfig.tree) === null || _runConfig_tree18 === void 0 ? void 0 : _runConfig_tree18.exclude) || [];
|
|
815
|
+
let allPackageJsonPaths = [];
|
|
816
|
+
for (const targetDirectory of directories){
|
|
817
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
818
|
+
allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
|
|
819
|
+
}
|
|
820
|
+
if (allPackageJsonPaths.length === 0) {
|
|
821
|
+
return 'No packages found';
|
|
822
|
+
}
|
|
823
|
+
const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
|
|
824
|
+
const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
|
|
825
|
+
if (!recoveryManager) {
|
|
826
|
+
logger.info('No checkpoint found to validate');
|
|
827
|
+
return 'No checkpoint found';
|
|
828
|
+
}
|
|
829
|
+
const validation = recoveryManager.validateState();
|
|
830
|
+
if (validation.valid) {
|
|
831
|
+
logger.info('✅ Checkpoint state is valid');
|
|
832
|
+
} else {
|
|
833
|
+
logger.error('❌ Checkpoint state has issues:');
|
|
834
|
+
for (const issue of validation.issues){
|
|
835
|
+
logger.error(` • ${issue}`);
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
if (validation.warnings.length > 0) {
|
|
839
|
+
logger.warn('⚠️ Warnings:');
|
|
840
|
+
for (const warning of validation.warnings){
|
|
841
|
+
logger.warn(` • ${warning}`);
|
|
842
|
+
}
|
|
843
|
+
}
|
|
844
|
+
return validation.valid ? 'Checkpoint is valid' : 'Checkpoint has issues';
|
|
845
|
+
}
|
|
846
|
+
// Handle parallel execution recovery options (must happen before main execution)
|
|
847
|
+
const hasRecoveryOptions = ((_runConfig_tree6 = runConfig.tree) === null || _runConfig_tree6 === void 0 ? void 0 : _runConfig_tree6.markCompleted) || ((_runConfig_tree7 = runConfig.tree) === null || _runConfig_tree7 === void 0 ? void 0 : _runConfig_tree7.skipPackages) || ((_runConfig_tree8 = runConfig.tree) === null || _runConfig_tree8 === void 0 ? void 0 : _runConfig_tree8.retryFailed) || ((_runConfig_tree9 = runConfig.tree) === null || _runConfig_tree9 === void 0 ? void 0 : _runConfig_tree9.skipFailed) || ((_runConfig_tree10 = runConfig.tree) === null || _runConfig_tree10 === void 0 ? void 0 : _runConfig_tree10.resetPackage);
|
|
848
|
+
if (hasRecoveryOptions && runConfig.tree) {
|
|
849
|
+
var _runConfig_tree_retry;
|
|
850
|
+
logger.info('🔧 Applying recovery options...');
|
|
851
|
+
// Build dependency graph
|
|
852
|
+
const directories = runConfig.tree.directories || [
|
|
853
|
+
process.cwd()
|
|
854
|
+
];
|
|
855
|
+
const excludedPatterns = runConfig.tree.exclude || [];
|
|
856
|
+
let allPackageJsonPaths = [];
|
|
857
|
+
for (const targetDirectory of directories){
|
|
858
|
+
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
859
|
+
allPackageJsonPaths = allPackageJsonPaths.concat(packageJsonPaths);
|
|
860
|
+
}
|
|
861
|
+
const dependencyGraph = await buildDependencyGraph(allPackageJsonPaths);
|
|
862
|
+
const recoveryManager = await loadRecoveryManager(dependencyGraph, runConfig.outputDirectory);
|
|
863
|
+
if (!recoveryManager) {
|
|
864
|
+
logger.error('No checkpoint found for recovery');
|
|
865
|
+
throw new Error('No checkpoint found. Cannot apply recovery options without an existing checkpoint.');
|
|
866
|
+
}
|
|
867
|
+
await recoveryManager.applyRecoveryOptions({
|
|
868
|
+
markCompleted: runConfig.tree.markCompleted,
|
|
869
|
+
skipPackages: runConfig.tree.skipPackages,
|
|
870
|
+
retryFailed: runConfig.tree.retryFailed,
|
|
871
|
+
skipFailed: runConfig.tree.skipFailed,
|
|
872
|
+
resetPackage: runConfig.tree.resetPackage,
|
|
873
|
+
maxRetries: (_runConfig_tree_retry = runConfig.tree.retry) === null || _runConfig_tree_retry === void 0 ? void 0 : _runConfig_tree_retry.maxAttempts
|
|
874
|
+
});
|
|
875
|
+
logger.info('✅ Recovery options applied');
|
|
876
|
+
// If not also continuing, just return
|
|
877
|
+
if (!isContinue) {
|
|
878
|
+
return 'Recovery options applied. Use --continue to resume execution.';
|
|
879
|
+
}
|
|
880
|
+
}
|
|
1007
881
|
// Handle continue mode
|
|
1008
882
|
if (isContinue) {
|
|
1009
883
|
const savedContext = await loadExecutionContext(runConfig.outputDirectory);
|
|
@@ -1042,7 +916,7 @@ const execute = async (runConfig)=>{
|
|
|
1042
916
|
executionContext = null;
|
|
1043
917
|
}
|
|
1044
918
|
// Check if we're in built-in command mode (tree command with second argument)
|
|
1045
|
-
const builtInCommand = (
|
|
919
|
+
const builtInCommand = (_runConfig_tree11 = runConfig.tree) === null || _runConfig_tree11 === void 0 ? void 0 : _runConfig_tree11.builtInCommand;
|
|
1046
920
|
const supportedBuiltInCommands = [
|
|
1047
921
|
'commit',
|
|
1048
922
|
'publish',
|
|
@@ -1059,8 +933,8 @@ const execute = async (runConfig)=>{
|
|
|
1059
933
|
}
|
|
1060
934
|
// Handle run subcommand - convert space-separated scripts to npm run commands
|
|
1061
935
|
if (builtInCommand === 'run') {
|
|
1062
|
-
var
|
|
1063
|
-
const packageArgument = (
|
|
936
|
+
var _runConfig_tree19;
|
|
937
|
+
const packageArgument = (_runConfig_tree19 = runConfig.tree) === null || _runConfig_tree19 === void 0 ? void 0 : _runConfig_tree19.packageArgument;
|
|
1064
938
|
if (!packageArgument) {
|
|
1065
939
|
throw new Error('run subcommand requires script names. Usage: kodrdriv tree run "clean build test"');
|
|
1066
940
|
}
|
|
@@ -1083,11 +957,11 @@ const execute = async (runConfig)=>{
|
|
|
1083
957
|
runConfig.__scriptsToValidate = scripts;
|
|
1084
958
|
}
|
|
1085
959
|
// Determine the target directories - either specified or current working directory
|
|
1086
|
-
const directories = ((
|
|
960
|
+
const directories = ((_runConfig_tree12 = runConfig.tree) === null || _runConfig_tree12 === void 0 ? void 0 : _runConfig_tree12.directories) || [
|
|
1087
961
|
process.cwd()
|
|
1088
962
|
];
|
|
1089
963
|
// Handle link status subcommand
|
|
1090
|
-
if (builtInCommand === 'link' && ((
|
|
964
|
+
if (builtInCommand === 'link' && ((_runConfig_tree13 = runConfig.tree) === null || _runConfig_tree13 === void 0 ? void 0 : _runConfig_tree13.packageArgument) === 'status') {
|
|
1091
965
|
// For tree link status, we want to show status across all packages
|
|
1092
966
|
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running link status across workspace...`);
|
|
1093
967
|
// Create a config that will be passed to the link command
|
|
@@ -1107,7 +981,7 @@ const execute = async (runConfig)=>{
|
|
|
1107
981
|
}
|
|
1108
982
|
}
|
|
1109
983
|
// Handle unlink status subcommand
|
|
1110
|
-
if (builtInCommand === 'unlink' && ((
|
|
984
|
+
if (builtInCommand === 'unlink' && ((_runConfig_tree14 = runConfig.tree) === null || _runConfig_tree14 === void 0 ? void 0 : _runConfig_tree14.packageArgument) === 'status') {
|
|
1111
985
|
// For tree unlink status, we want to show status across all packages
|
|
1112
986
|
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running unlink status across workspace...`);
|
|
1113
987
|
// Create a config that will be passed to the unlink command
|
|
@@ -1132,9 +1006,9 @@ const execute = async (runConfig)=>{
|
|
|
1132
1006
|
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspaces at: ${directories.join(', ')}`);
|
|
1133
1007
|
}
|
|
1134
1008
|
try {
|
|
1135
|
-
var
|
|
1009
|
+
var _runConfig_tree20, _runConfig_tree21, _runConfig_tree22, _runConfig_tree23;
|
|
1136
1010
|
// Get exclusion patterns from config, fallback to empty array
|
|
1137
|
-
const excludedPatterns = ((
|
|
1011
|
+
const excludedPatterns = ((_runConfig_tree20 = runConfig.tree) === null || _runConfig_tree20 === void 0 ? void 0 : _runConfig_tree20.exclude) || [];
|
|
1138
1012
|
if (excludedPatterns.length > 0) {
|
|
1139
1013
|
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Using exclusion patterns: ${excludedPatterns.join(', ')}`);
|
|
1140
1014
|
}
|
|
@@ -1161,13 +1035,13 @@ const execute = async (runConfig)=>{
|
|
|
1161
1035
|
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Determining build order...`);
|
|
1162
1036
|
let buildOrder = topologicalSort(dependencyGraph);
|
|
1163
1037
|
// Handle start-from functionality if specified
|
|
1164
|
-
const startFrom = (
|
|
1038
|
+
const startFrom = (_runConfig_tree21 = runConfig.tree) === null || _runConfig_tree21 === void 0 ? void 0 : _runConfig_tree21.startFrom;
|
|
1165
1039
|
if (startFrom) {
|
|
1166
1040
|
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for start package: ${startFrom}`);
|
|
1167
1041
|
// Resolve the actual package name (can be package name or directory name)
|
|
1168
1042
|
let startPackageName = null;
|
|
1169
1043
|
for (const [pkgName, pkgInfo] of dependencyGraph.packages){
|
|
1170
|
-
const dirName =
|
|
1044
|
+
const dirName = path__default.basename(pkgInfo.path);
|
|
1171
1045
|
if (dirName === startFrom || pkgName === startFrom) {
|
|
1172
1046
|
startPackageName = pkgName;
|
|
1173
1047
|
break;
|
|
@@ -1184,7 +1058,7 @@ const execute = async (runConfig)=>{
|
|
|
1184
1058
|
for (const packageJsonPath of allPackageJsonPathsForCheck){
|
|
1185
1059
|
try {
|
|
1186
1060
|
const packageInfo = await parsePackageJson(packageJsonPath);
|
|
1187
|
-
const dirName =
|
|
1061
|
+
const dirName = path__default.basename(packageInfo.path);
|
|
1188
1062
|
if (dirName === startFrom || packageInfo.name === startFrom) {
|
|
1189
1063
|
// Check if this package was excluded
|
|
1190
1064
|
if (shouldExclude(packageJsonPath, excludedPatterns)) {
|
|
@@ -1202,7 +1076,7 @@ const execute = async (runConfig)=>{
|
|
|
1202
1076
|
} else {
|
|
1203
1077
|
const availablePackages = buildOrder.map((name)=>{
|
|
1204
1078
|
const packageInfo = dependencyGraph.packages.get(name);
|
|
1205
|
-
return `${
|
|
1079
|
+
return `${path__default.basename(packageInfo.path)} (${name})`;
|
|
1206
1080
|
}).join(', ');
|
|
1207
1081
|
throw new Error(`Package directory '${startFrom}' not found. Available packages: ${availablePackages}`);
|
|
1208
1082
|
}
|
|
@@ -1218,13 +1092,13 @@ const execute = async (runConfig)=>{
|
|
|
1218
1092
|
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Starting execution from package '${startFrom}' (${buildOrder.length} of ${originalLength} packages remaining).`);
|
|
1219
1093
|
}
|
|
1220
1094
|
// Handle stop-at functionality if specified
|
|
1221
|
-
const stopAt = (
|
|
1095
|
+
const stopAt = (_runConfig_tree22 = runConfig.tree) === null || _runConfig_tree22 === void 0 ? void 0 : _runConfig_tree22.stopAt;
|
|
1222
1096
|
if (stopAt) {
|
|
1223
1097
|
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for stop package: ${stopAt}`);
|
|
1224
1098
|
// Find the package that matches the stopAt directory name
|
|
1225
1099
|
const stopIndex = buildOrder.findIndex((packageName)=>{
|
|
1226
1100
|
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
1227
|
-
const dirName =
|
|
1101
|
+
const dirName = path__default.basename(packageInfo.path);
|
|
1228
1102
|
return dirName === stopAt || packageName === stopAt;
|
|
1229
1103
|
});
|
|
1230
1104
|
if (stopIndex === -1) {
|
|
@@ -1238,7 +1112,7 @@ const execute = async (runConfig)=>{
|
|
|
1238
1112
|
for (const packageJsonPath of allPackageJsonPathsForCheck){
|
|
1239
1113
|
try {
|
|
1240
1114
|
const packageInfo = await parsePackageJson(packageJsonPath);
|
|
1241
|
-
const dirName =
|
|
1115
|
+
const dirName = path__default.basename(packageInfo.path);
|
|
1242
1116
|
if (dirName === stopAt || packageInfo.name === stopAt) {
|
|
1243
1117
|
// Check if this package was excluded
|
|
1244
1118
|
if (shouldExclude(packageJsonPath, excludedPatterns)) {
|
|
@@ -1256,7 +1130,7 @@ const execute = async (runConfig)=>{
|
|
|
1256
1130
|
} else {
|
|
1257
1131
|
const availablePackages = buildOrder.map((name)=>{
|
|
1258
1132
|
const packageInfo = dependencyGraph.packages.get(name);
|
|
1259
|
-
return `${
|
|
1133
|
+
return `${path__default.basename(packageInfo.path)} (${name})`;
|
|
1260
1134
|
}).join(', ');
|
|
1261
1135
|
throw new Error(`Package directory '${stopAt}' not found. Available packages: ${availablePackages}`);
|
|
1262
1136
|
}
|
|
@@ -1301,7 +1175,7 @@ const execute = async (runConfig)=>{
|
|
|
1301
1175
|
for (const [packageName, packageInfo] of allPackages){
|
|
1302
1176
|
if (packageName === targetPackageName) continue;
|
|
1303
1177
|
try {
|
|
1304
|
-
const packageJsonPath =
|
|
1178
|
+
const packageJsonPath = path__default.join(packageInfo.path, 'package.json');
|
|
1305
1179
|
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
1306
1180
|
const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
|
|
1307
1181
|
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
@@ -1537,8 +1411,8 @@ const execute = async (runConfig)=>{
|
|
|
1537
1411
|
}
|
|
1538
1412
|
// Handle special "checkout" command that switches all packages to specified branch
|
|
1539
1413
|
if (builtInCommand === 'checkout') {
|
|
1540
|
-
var
|
|
1541
|
-
const targetBranch = (
|
|
1414
|
+
var _runConfig_tree24;
|
|
1415
|
+
const targetBranch = (_runConfig_tree24 = runConfig.tree) === null || _runConfig_tree24 === void 0 ? void 0 : _runConfig_tree24.packageArgument;
|
|
1542
1416
|
if (!targetBranch) {
|
|
1543
1417
|
throw new Error('checkout subcommand requires a branch name. Usage: kodrdriv tree checkout <branch-name>');
|
|
1544
1418
|
}
|
|
@@ -1717,12 +1591,12 @@ const execute = async (runConfig)=>{
|
|
|
1717
1591
|
returnOutput = `\nBuild order: ${buildOrder.join(' → ')}\n`;
|
|
1718
1592
|
}
|
|
1719
1593
|
// Execute command if provided (custom command or built-in command)
|
|
1720
|
-
const cmd = (
|
|
1594
|
+
const cmd = (_runConfig_tree23 = runConfig.tree) === null || _runConfig_tree23 === void 0 ? void 0 : _runConfig_tree23.cmd;
|
|
1721
1595
|
// Determine command to execute
|
|
1722
1596
|
let commandToRun;
|
|
1723
1597
|
let isBuiltInCommand = false;
|
|
1724
1598
|
if (builtInCommand) {
|
|
1725
|
-
var
|
|
1599
|
+
var _runConfig_tree25, _runConfig_tree26, _runConfig_tree27;
|
|
1726
1600
|
// Built-in command mode: shell out to kodrdriv subprocess
|
|
1727
1601
|
// Build command with propagated global options
|
|
1728
1602
|
const globalOptions = [];
|
|
@@ -1739,14 +1613,14 @@ const execute = async (runConfig)=>{
|
|
|
1739
1613
|
// Build the command with global options
|
|
1740
1614
|
const optionsString = globalOptions.length > 0 ? ` ${globalOptions.join(' ')}` : '';
|
|
1741
1615
|
// Add package argument for link/unlink/updates commands
|
|
1742
|
-
const packageArg = (
|
|
1616
|
+
const packageArg = (_runConfig_tree25 = runConfig.tree) === null || _runConfig_tree25 === void 0 ? void 0 : _runConfig_tree25.packageArgument;
|
|
1743
1617
|
const packageArgString = packageArg && (builtInCommand === 'link' || builtInCommand === 'unlink' || builtInCommand === 'updates') ? ` "${packageArg}"` : '';
|
|
1744
1618
|
// Add command-specific options
|
|
1745
1619
|
let commandSpecificOptions = '';
|
|
1746
|
-
if (builtInCommand === 'unlink' && ((
|
|
1620
|
+
if (builtInCommand === 'unlink' && ((_runConfig_tree26 = runConfig.tree) === null || _runConfig_tree26 === void 0 ? void 0 : _runConfig_tree26.cleanNodeModules)) {
|
|
1747
1621
|
commandSpecificOptions += ' --clean-node-modules';
|
|
1748
1622
|
}
|
|
1749
|
-
if ((builtInCommand === 'link' || builtInCommand === 'unlink') && ((
|
|
1623
|
+
if ((builtInCommand === 'link' || builtInCommand === 'unlink') && ((_runConfig_tree27 = runConfig.tree) === null || _runConfig_tree27 === void 0 ? void 0 : _runConfig_tree27.externals) && runConfig.tree.externals.length > 0) {
|
|
1750
1624
|
commandSpecificOptions += ` --externals ${runConfig.tree.externals.join(' ')}`;
|
|
1751
1625
|
}
|
|
1752
1626
|
commandToRun = `kodrdriv ${builtInCommand}${optionsString}${packageArgString}${commandSpecificOptions}`;
|
|
@@ -1756,6 +1630,7 @@ const execute = async (runConfig)=>{
|
|
|
1756
1630
|
commandToRun = cmd;
|
|
1757
1631
|
}
|
|
1758
1632
|
if (commandToRun) {
|
|
1633
|
+
var _runConfig_tree28, _runConfig_tree29;
|
|
1759
1634
|
// Validate scripts for run command before execution
|
|
1760
1635
|
const scriptsToValidate = runConfig.__scriptsToValidate;
|
|
1761
1636
|
if (scriptsToValidate && scriptsToValidate.length > 0) {
|
|
@@ -1773,6 +1648,27 @@ const execute = async (runConfig)=>{
|
|
|
1773
1648
|
throw new Error('Script validation failed. See details above.');
|
|
1774
1649
|
}
|
|
1775
1650
|
}
|
|
1651
|
+
// Validate command for parallel execution if parallel mode is enabled
|
|
1652
|
+
if ((_runConfig_tree28 = runConfig.tree) === null || _runConfig_tree28 === void 0 ? void 0 : _runConfig_tree28.parallel) {
|
|
1653
|
+
const { CommandValidator } = await import('../execution/CommandValidator.js');
|
|
1654
|
+
const validation = CommandValidator.validateForParallel(commandToRun, builtInCommand);
|
|
1655
|
+
CommandValidator.logValidation(validation);
|
|
1656
|
+
if (!validation.valid) {
|
|
1657
|
+
logger.error('');
|
|
1658
|
+
logger.error('Cannot proceed with parallel execution due to validation errors.');
|
|
1659
|
+
logger.error('Run without --parallel flag to execute sequentially.');
|
|
1660
|
+
throw new Error('Command validation failed for parallel execution');
|
|
1661
|
+
}
|
|
1662
|
+
// Apply recommended concurrency if not explicitly set
|
|
1663
|
+
if (!runConfig.tree.maxConcurrency && builtInCommand) {
|
|
1664
|
+
const os = await import('os');
|
|
1665
|
+
const recommended = CommandValidator.getRecommendedConcurrency(builtInCommand, os.cpus().length);
|
|
1666
|
+
if (recommended !== os.cpus().length) {
|
|
1667
|
+
logger.info(`💡 Using recommended concurrency for ${builtInCommand}: ${recommended}`);
|
|
1668
|
+
runConfig.tree.maxConcurrency = recommended;
|
|
1669
|
+
}
|
|
1670
|
+
}
|
|
1671
|
+
}
|
|
1776
1672
|
// Create set of all package names for inter-project dependency detection
|
|
1777
1673
|
const allPackageNames = new Set(Array.from(dependencyGraph.packages.keys()));
|
|
1778
1674
|
// Initialize execution context if not continuing
|
|
@@ -1803,6 +1699,34 @@ const execute = async (runConfig)=>{
|
|
|
1803
1699
|
let failedPackage = null;
|
|
1804
1700
|
// If continuing, start from where we left off
|
|
1805
1701
|
const startIndex = isContinue && executionContext ? executionContext.completedPackages.length : 0;
|
|
1702
|
+
// Check if parallel execution is enabled
|
|
1703
|
+
if ((_runConfig_tree29 = runConfig.tree) === null || _runConfig_tree29 === void 0 ? void 0 : _runConfig_tree29.parallel) {
|
|
1704
|
+
var _runConfig_tree_retry1, _runConfig_tree_retry2, _runConfig_tree_retry3, _runConfig_tree_retry4;
|
|
1705
|
+
logger.info('🚀 Using parallel execution mode');
|
|
1706
|
+
// Import parallel execution components
|
|
1707
|
+
const { TreeExecutionAdapter, createParallelProgressLogger, formatParallelResult } = await import('../execution/TreeExecutionAdapter.js');
|
|
1708
|
+
const os = await import('os');
|
|
1709
|
+
// Create task pool
|
|
1710
|
+
const adapter = new TreeExecutionAdapter({
|
|
1711
|
+
graph: dependencyGraph,
|
|
1712
|
+
maxConcurrency: runConfig.tree.maxConcurrency || os.cpus().length,
|
|
1713
|
+
command: commandToRun,
|
|
1714
|
+
config: runConfig,
|
|
1715
|
+
checkpointPath: runConfig.outputDirectory,
|
|
1716
|
+
continue: isContinue,
|
|
1717
|
+
maxRetries: ((_runConfig_tree_retry1 = runConfig.tree.retry) === null || _runConfig_tree_retry1 === void 0 ? void 0 : _runConfig_tree_retry1.maxAttempts) || 3,
|
|
1718
|
+
initialRetryDelay: ((_runConfig_tree_retry2 = runConfig.tree.retry) === null || _runConfig_tree_retry2 === void 0 ? void 0 : _runConfig_tree_retry2.initialDelayMs) || 5000,
|
|
1719
|
+
maxRetryDelay: ((_runConfig_tree_retry3 = runConfig.tree.retry) === null || _runConfig_tree_retry3 === void 0 ? void 0 : _runConfig_tree_retry3.maxDelayMs) || 60000,
|
|
1720
|
+
backoffMultiplier: ((_runConfig_tree_retry4 = runConfig.tree.retry) === null || _runConfig_tree_retry4 === void 0 ? void 0 : _runConfig_tree_retry4.backoffMultiplier) || 2
|
|
1721
|
+
}, executePackage);
|
|
1722
|
+
// Set up progress logging
|
|
1723
|
+
createParallelProgressLogger(adapter.getPool(), runConfig);
|
|
1724
|
+
// Execute
|
|
1725
|
+
const result = await adapter.execute();
|
|
1726
|
+
// Format and return result
|
|
1727
|
+
const formattedResult = formatParallelResult(result);
|
|
1728
|
+
return formattedResult;
|
|
1729
|
+
}
|
|
1806
1730
|
// Sequential execution
|
|
1807
1731
|
for(let i = startIndex; i < buildOrder.length; i++){
|
|
1808
1732
|
const packageName = buildOrder[i];
|
|
@@ -1926,5 +1850,5 @@ const execute = async (runConfig)=>{
|
|
|
1926
1850
|
}
|
|
1927
1851
|
};
|
|
1928
1852
|
|
|
1929
|
-
export { execute };
|
|
1853
|
+
export { execute, executePackage };
|
|
1930
1854
|
//# sourceMappingURL=tree.js.map
|