@eldrforge/kodrdriv 0.0.48 → 0.0.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +0 -1
- package/dist/application.js +18 -56
- package/dist/application.js.map +1 -1
- package/dist/arguments.js +5 -55
- package/dist/arguments.js.map +1 -1
- package/dist/constants.js +2 -21
- package/dist/constants.js.map +1 -1
- package/dist/types.js +0 -15
- package/dist/types.js.map +1 -1
- package/package.json +1 -1
- package/dist/commands/commit-tree.js +0 -490
- package/dist/commands/commit-tree.js.map +0 -1
- package/dist/commands/publish-tree.js +0 -674
- package/dist/commands/publish-tree.js.map +0 -1
|
@@ -1,674 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import fs from 'fs/promises';
|
|
4
|
-
import { getLogger, getDryRunLogger } from '../logging.js';
|
|
5
|
-
import { create } from '../util/storage.js';
|
|
6
|
-
import { run } from '../util/child.js';
|
|
7
|
-
import { execute as execute$1 } from './publish.js';
|
|
8
|
-
import { safeJsonParse, validatePackageJson } from '../util/validation.js';
|
|
9
|
-
|
|
10
|
-
// Create a package-scoped logger that prefixes all messages
|
|
11
|
-
const createPackageLogger = (packageName, sequenceNumber, totalCount, isDryRun = false)=>{
|
|
12
|
-
const baseLogger = getLogger();
|
|
13
|
-
const prefix = `[${sequenceNumber}/${totalCount}] ${packageName}:`;
|
|
14
|
-
const dryRunPrefix = isDryRun ? 'DRY RUN: ' : '';
|
|
15
|
-
return {
|
|
16
|
-
info: (message, ...args)=>baseLogger.info(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
17
|
-
warn: (message, ...args)=>baseLogger.warn(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
18
|
-
error: (message, ...args)=>baseLogger.error(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
19
|
-
debug: (message, ...args)=>baseLogger.debug(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
20
|
-
verbose: (message, ...args)=>baseLogger.verbose(`${dryRunPrefix}${prefix} ${message}`, ...args),
|
|
21
|
-
silly: (message, ...args)=>baseLogger.silly(`${dryRunPrefix}${prefix} ${message}`, ...args)
|
|
22
|
-
};
|
|
23
|
-
};
|
|
24
|
-
// Execute an operation with package context logging for nested operations
|
|
25
|
-
const withPackageContext = async (packageName, sequenceNumber, totalCount, isDryRun, operation)=>{
|
|
26
|
-
const packageLogger = createPackageLogger(packageName, sequenceNumber, totalCount, isDryRun);
|
|
27
|
-
// For now, just execute the operation directly
|
|
28
|
-
// In the future, we could implement more sophisticated context passing
|
|
29
|
-
try {
|
|
30
|
-
packageLogger.verbose(`Starting nested operation...`);
|
|
31
|
-
const result = await operation();
|
|
32
|
-
packageLogger.verbose(`Nested operation completed`);
|
|
33
|
-
return result;
|
|
34
|
-
} catch (error) {
|
|
35
|
-
packageLogger.error(`Nested operation failed: ${error.message}`);
|
|
36
|
-
throw error;
|
|
37
|
-
}
|
|
38
|
-
};
|
|
39
|
-
// Helper function to format subproject error output
|
|
40
|
-
const formatSubprojectError = (packageName, error)=>{
|
|
41
|
-
const lines = [];
|
|
42
|
-
lines.push(`❌ Script failed in package ${packageName}:`);
|
|
43
|
-
// Format the main error message with indentation
|
|
44
|
-
if (error.message) {
|
|
45
|
-
const indentedMessage = error.message.split('\n').map((line)=>` ${line}`).join('\n');
|
|
46
|
-
lines.push(indentedMessage);
|
|
47
|
-
}
|
|
48
|
-
// If there's stderr output, show it indented as well
|
|
49
|
-
if (error.stderr && error.stderr.trim()) {
|
|
50
|
-
lines.push(' STDERR:');
|
|
51
|
-
const indentedStderr = error.stderr.split('\n').filter((line)=>line.trim()).map((line)=>` ${line}`).join('\n');
|
|
52
|
-
lines.push(indentedStderr);
|
|
53
|
-
}
|
|
54
|
-
// If there's stdout output, show it indented as well
|
|
55
|
-
if (error.stdout && error.stdout.trim()) {
|
|
56
|
-
lines.push(' STDOUT:');
|
|
57
|
-
const indentedStdout = error.stdout.split('\n').filter((line)=>line.trim()).map((line)=>` ${line}`).join('\n');
|
|
58
|
-
lines.push(indentedStdout);
|
|
59
|
-
}
|
|
60
|
-
return lines.join('\n');
|
|
61
|
-
};
|
|
62
|
-
const matchesPattern = (filePath, pattern)=>{
|
|
63
|
-
// Convert simple glob patterns to regex
|
|
64
|
-
const regexPattern = pattern.replace(/\*\*/g, '.*') // ** matches any path segments
|
|
65
|
-
.replace(/\*/g, '[^/]*') // * matches any characters except path separator
|
|
66
|
-
.replace(/\?/g, '.') // ? matches any single character
|
|
67
|
-
.replace(/\./g, '\\.'); // Escape literal dots
|
|
68
|
-
const regex = new RegExp(`^${regexPattern}$`);
|
|
69
|
-
return regex.test(filePath) || regex.test(path.basename(filePath));
|
|
70
|
-
};
|
|
71
|
-
const shouldExclude = (packageJsonPath, excludedPatterns)=>{
|
|
72
|
-
if (!excludedPatterns || excludedPatterns.length === 0) {
|
|
73
|
-
return false;
|
|
74
|
-
}
|
|
75
|
-
// Check both the full path and relative path patterns
|
|
76
|
-
const relativePath = path.relative(process.cwd(), packageJsonPath);
|
|
77
|
-
return excludedPatterns.some((pattern)=>matchesPattern(packageJsonPath, pattern) || matchesPattern(relativePath, pattern) || matchesPattern(path.dirname(packageJsonPath), pattern) || matchesPattern(path.dirname(relativePath), pattern));
|
|
78
|
-
};
|
|
79
|
-
const scanForPackageJsonFiles = async (directory, excludedPatterns = [])=>{
|
|
80
|
-
const logger = getLogger();
|
|
81
|
-
const packageJsonPaths = [];
|
|
82
|
-
try {
|
|
83
|
-
const entries = await fs.readdir(directory, {
|
|
84
|
-
withFileTypes: true
|
|
85
|
-
});
|
|
86
|
-
for (const entry of entries){
|
|
87
|
-
if (entry.isDirectory()) {
|
|
88
|
-
const subDirPath = path.join(directory, entry.name);
|
|
89
|
-
const packageJsonPath = path.join(subDirPath, 'package.json');
|
|
90
|
-
try {
|
|
91
|
-
await fs.access(packageJsonPath);
|
|
92
|
-
// Check if this package should be excluded
|
|
93
|
-
if (shouldExclude(packageJsonPath, excludedPatterns)) {
|
|
94
|
-
logger.verbose(`Excluding package.json at: ${packageJsonPath} (matches exclusion pattern)`);
|
|
95
|
-
continue;
|
|
96
|
-
}
|
|
97
|
-
packageJsonPaths.push(packageJsonPath);
|
|
98
|
-
logger.verbose(`Found package.json at: ${packageJsonPath}`);
|
|
99
|
-
} catch {
|
|
100
|
-
// No package.json in this directory, continue
|
|
101
|
-
}
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
} catch (error) {
|
|
105
|
-
logger.error(`Failed to scan directory ${directory}: ${error}`);
|
|
106
|
-
throw error;
|
|
107
|
-
}
|
|
108
|
-
return packageJsonPaths;
|
|
109
|
-
};
|
|
110
|
-
const parsePackageJson = async (packageJsonPath)=>{
|
|
111
|
-
const logger = getLogger();
|
|
112
|
-
const storage = create({
|
|
113
|
-
log: logger.info
|
|
114
|
-
});
|
|
115
|
-
try {
|
|
116
|
-
const content = await storage.readFile(packageJsonPath, 'utf-8');
|
|
117
|
-
const parsed = safeJsonParse(content, packageJsonPath);
|
|
118
|
-
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
119
|
-
if (!packageJson.name) {
|
|
120
|
-
throw new Error(`Package at ${packageJsonPath} has no name field`);
|
|
121
|
-
}
|
|
122
|
-
const dependencies = new Set();
|
|
123
|
-
// Collect all types of dependencies
|
|
124
|
-
const depTypes = [
|
|
125
|
-
'dependencies',
|
|
126
|
-
'devDependencies',
|
|
127
|
-
'peerDependencies',
|
|
128
|
-
'optionalDependencies'
|
|
129
|
-
];
|
|
130
|
-
for (const depType of depTypes){
|
|
131
|
-
if (packageJson[depType]) {
|
|
132
|
-
Object.keys(packageJson[depType]).forEach((dep)=>dependencies.add(dep));
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
return {
|
|
136
|
-
name: packageJson.name,
|
|
137
|
-
version: packageJson.version || '0.0.0',
|
|
138
|
-
path: path.dirname(packageJsonPath),
|
|
139
|
-
dependencies,
|
|
140
|
-
localDependencies: new Set() // Will be populated later
|
|
141
|
-
};
|
|
142
|
-
} catch (error) {
|
|
143
|
-
logger.error(`Failed to parse package.json at ${packageJsonPath}: ${error}`);
|
|
144
|
-
throw error;
|
|
145
|
-
}
|
|
146
|
-
};
|
|
147
|
-
const buildDependencyGraph = async (packageJsonPaths)=>{
|
|
148
|
-
const logger = getLogger();
|
|
149
|
-
const packages = new Map();
|
|
150
|
-
const edges = new Map();
|
|
151
|
-
// First pass: parse all package.json files
|
|
152
|
-
for (const packageJsonPath of packageJsonPaths){
|
|
153
|
-
const packageInfo = await parsePackageJson(packageJsonPath);
|
|
154
|
-
packages.set(packageInfo.name, packageInfo);
|
|
155
|
-
logger.verbose(`Parsed package: ${packageInfo.name} at ${packageInfo.path}`);
|
|
156
|
-
}
|
|
157
|
-
// Second pass: identify local dependencies and build edges
|
|
158
|
-
for (const [packageName, packageInfo] of packages){
|
|
159
|
-
const localDeps = new Set();
|
|
160
|
-
const edges_set = new Set();
|
|
161
|
-
for (const dep of packageInfo.dependencies){
|
|
162
|
-
if (packages.has(dep)) {
|
|
163
|
-
localDeps.add(dep);
|
|
164
|
-
edges_set.add(dep);
|
|
165
|
-
logger.verbose(`${packageName} depends on local package: ${dep}`);
|
|
166
|
-
}
|
|
167
|
-
}
|
|
168
|
-
packageInfo.localDependencies = localDeps;
|
|
169
|
-
edges.set(packageName, edges_set);
|
|
170
|
-
}
|
|
171
|
-
return {
|
|
172
|
-
packages,
|
|
173
|
-
edges
|
|
174
|
-
};
|
|
175
|
-
};
|
|
176
|
-
const topologicalSort = (graph)=>{
|
|
177
|
-
const logger = getLogger();
|
|
178
|
-
const { packages, edges } = graph;
|
|
179
|
-
const visited = new Set();
|
|
180
|
-
const visiting = new Set();
|
|
181
|
-
const result = [];
|
|
182
|
-
const visit = (packageName)=>{
|
|
183
|
-
if (visited.has(packageName)) {
|
|
184
|
-
return;
|
|
185
|
-
}
|
|
186
|
-
if (visiting.has(packageName)) {
|
|
187
|
-
throw new Error(`Circular dependency detected involving package: ${packageName}`);
|
|
188
|
-
}
|
|
189
|
-
visiting.add(packageName);
|
|
190
|
-
// Visit all dependencies first
|
|
191
|
-
const deps = edges.get(packageName) || new Set();
|
|
192
|
-
for (const dep of deps){
|
|
193
|
-
visit(dep);
|
|
194
|
-
}
|
|
195
|
-
visiting.delete(packageName);
|
|
196
|
-
visited.add(packageName);
|
|
197
|
-
result.push(packageName);
|
|
198
|
-
};
|
|
199
|
-
// Visit all packages
|
|
200
|
-
for (const packageName of packages.keys()){
|
|
201
|
-
if (!visited.has(packageName)) {
|
|
202
|
-
visit(packageName);
|
|
203
|
-
}
|
|
204
|
-
}
|
|
205
|
-
logger.verbose(`Topological sort completed. Build order determined for ${result.length} packages.`);
|
|
206
|
-
return result;
|
|
207
|
-
};
|
|
208
|
-
// Per-package prechecks that apply to each individual package
|
|
209
|
-
const runPackagePrechecks = async (packageName, packageInfo, runConfig, index, total)=>{
|
|
210
|
-
var _runConfig_publish;
|
|
211
|
-
const isDryRun = runConfig.dryRun || false;
|
|
212
|
-
const packageLogger = createPackageLogger(packageName, index + 1, total, isDryRun);
|
|
213
|
-
const storage = create({
|
|
214
|
-
log: packageLogger.info
|
|
215
|
-
});
|
|
216
|
-
const packageDir = packageInfo.path;
|
|
217
|
-
packageLogger.verbose('Running package prechecks...');
|
|
218
|
-
// Check if prepublishOnly script exists in package.json
|
|
219
|
-
packageLogger.verbose('Checking for prepublishOnly script...');
|
|
220
|
-
const packageJsonPath = path.join(packageDir, 'package.json');
|
|
221
|
-
if (!await storage.exists(packageJsonPath)) {
|
|
222
|
-
if (!isDryRun) {
|
|
223
|
-
throw new Error(`package.json not found in ${packageDir}`);
|
|
224
|
-
} else {
|
|
225
|
-
packageLogger.warn(`package.json not found in ${packageDir}`);
|
|
226
|
-
}
|
|
227
|
-
} else {
|
|
228
|
-
var _packageJson_scripts;
|
|
229
|
-
let packageJson;
|
|
230
|
-
try {
|
|
231
|
-
const packageJsonContents = await storage.readFile(packageJsonPath, 'utf-8');
|
|
232
|
-
const parsed = safeJsonParse(packageJsonContents, packageJsonPath);
|
|
233
|
-
packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
234
|
-
} catch {
|
|
235
|
-
if (!isDryRun) {
|
|
236
|
-
throw new Error(`Failed to parse package.json in ${packageDir}. Please ensure it contains valid JSON.`);
|
|
237
|
-
} else {
|
|
238
|
-
packageLogger.warn(`Failed to parse package.json in ${packageDir}. Please ensure it contains valid JSON.`);
|
|
239
|
-
}
|
|
240
|
-
}
|
|
241
|
-
if (packageJson && !((_packageJson_scripts = packageJson.scripts) === null || _packageJson_scripts === void 0 ? void 0 : _packageJson_scripts.prepublishOnly)) {
|
|
242
|
-
if (!isDryRun) {
|
|
243
|
-
throw new Error(`prepublishOnly script is required in package.json but was not found in ${packageDir}. Please add a prepublishOnly script that runs your pre-flight checks (e.g., clean, lint, build, test).`);
|
|
244
|
-
} else {
|
|
245
|
-
packageLogger.warn(`prepublishOnly script is required in package.json but was not found in ${packageDir}.`);
|
|
246
|
-
}
|
|
247
|
-
}
|
|
248
|
-
}
|
|
249
|
-
// Check required environment variables for this package
|
|
250
|
-
packageLogger.verbose('Checking required environment variables...');
|
|
251
|
-
const coreRequiredEnvVars = ((_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.requiredEnvVars) || [];
|
|
252
|
-
// Scan for .npmrc environment variables in this package directory
|
|
253
|
-
const npmrcEnvVars = [];
|
|
254
|
-
if (!isDryRun) {
|
|
255
|
-
const npmrcPath = path.join(packageDir, '.npmrc');
|
|
256
|
-
if (await storage.exists(npmrcPath)) {
|
|
257
|
-
try {
|
|
258
|
-
const npmrcContent = await storage.readFile(npmrcPath, 'utf-8');
|
|
259
|
-
const envVarMatches = npmrcContent.match(/\$\{([^}]+)\}|\$([A-Z_][A-Z0-9_]*)/g);
|
|
260
|
-
if (envVarMatches) {
|
|
261
|
-
for (const match of envVarMatches){
|
|
262
|
-
const varName = match.replace(/\$\{|\}|\$/g, '');
|
|
263
|
-
if (varName && !npmrcEnvVars.includes(varName)) {
|
|
264
|
-
npmrcEnvVars.push(varName);
|
|
265
|
-
}
|
|
266
|
-
}
|
|
267
|
-
}
|
|
268
|
-
} catch (error) {
|
|
269
|
-
packageLogger.warn(`Failed to read .npmrc file at ${npmrcPath}: ${error.message}`);
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
}
|
|
273
|
-
const allRequiredEnvVars = [
|
|
274
|
-
...new Set([
|
|
275
|
-
...coreRequiredEnvVars,
|
|
276
|
-
...npmrcEnvVars
|
|
277
|
-
])
|
|
278
|
-
];
|
|
279
|
-
if (allRequiredEnvVars.length > 0) {
|
|
280
|
-
packageLogger.verbose(`Required environment variables: ${allRequiredEnvVars.join(', ')}`);
|
|
281
|
-
const missingEnvVars = [];
|
|
282
|
-
for (const envVar of allRequiredEnvVars){
|
|
283
|
-
if (!process.env[envVar]) {
|
|
284
|
-
missingEnvVars.push(envVar);
|
|
285
|
-
}
|
|
286
|
-
}
|
|
287
|
-
if (missingEnvVars.length > 0) {
|
|
288
|
-
if (isDryRun) {
|
|
289
|
-
packageLogger.warn(`Missing required environment variables: ${missingEnvVars.join(', ')}`);
|
|
290
|
-
} else {
|
|
291
|
-
throw new Error(`Missing required environment variables for ${packageName}: ${missingEnvVars.join(', ')}. Please set these environment variables before running publish-tree.`);
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
packageLogger.verbose('Package prechecks passed.');
|
|
296
|
-
};
|
|
297
|
-
// Run all prechecks for publish-tree operation
|
|
298
|
-
const runTreePrechecks = async (graph, buildOrder, runConfig)=>{
|
|
299
|
-
const isDryRun = runConfig.dryRun || false;
|
|
300
|
-
const logger = getDryRunLogger(isDryRun);
|
|
301
|
-
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Running prechecks for all ${buildOrder.length} packages...`);
|
|
302
|
-
// Run package-level prechecks for each package
|
|
303
|
-
const failedPackages = [];
|
|
304
|
-
for(let i = 0; i < buildOrder.length; i++){
|
|
305
|
-
const packageName = buildOrder[i];
|
|
306
|
-
const packageInfo = graph.packages.get(packageName);
|
|
307
|
-
try {
|
|
308
|
-
await runPackagePrechecks(packageName, packageInfo, runConfig, i, buildOrder.length);
|
|
309
|
-
} catch (error) {
|
|
310
|
-
failedPackages.push({
|
|
311
|
-
name: packageName,
|
|
312
|
-
error: error.message
|
|
313
|
-
});
|
|
314
|
-
if (!isDryRun) {
|
|
315
|
-
// Continue checking other packages to give a complete picture
|
|
316
|
-
const packageLogger = createPackageLogger(packageName, i + 1, buildOrder.length, isDryRun);
|
|
317
|
-
packageLogger.error(`Prechecks failed: ${error.message}`);
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
if (failedPackages.length > 0 && !isDryRun) {
|
|
322
|
-
logger.error(`❌ Prechecks failed for ${failedPackages.length} package${failedPackages.length === 1 ? '' : 's'}:`);
|
|
323
|
-
logger.error('');
|
|
324
|
-
for (const failed of failedPackages){
|
|
325
|
-
logger.error(` • ${failed.name}: ${failed.error}`);
|
|
326
|
-
}
|
|
327
|
-
logger.error('');
|
|
328
|
-
logger.error('📋 To fix these issues:');
|
|
329
|
-
logger.error('');
|
|
330
|
-
logger.error(' 1. For missing prepublishOnly scripts:');
|
|
331
|
-
logger.error(' Add a "prepublishOnly" script to package.json that runs pre-flight checks');
|
|
332
|
-
logger.error(' Example: "prepublishOnly": "npm run clean && npm run lint && npm run build && npm run test"');
|
|
333
|
-
logger.error('');
|
|
334
|
-
logger.error(' 2. For missing environment variables:');
|
|
335
|
-
logger.error(' Set the required environment variables in your shell or .env file');
|
|
336
|
-
logger.error(' Check your .npmrc files for variable references like ${NPM_TOKEN}');
|
|
337
|
-
logger.error('');
|
|
338
|
-
logger.error(' 3. For invalid package.json files:');
|
|
339
|
-
logger.error(' Fix JSON syntax errors and ensure all required fields are present');
|
|
340
|
-
logger.error('');
|
|
341
|
-
logger.error('💡 After fixing these issues, re-run the command to continue with the publish process.');
|
|
342
|
-
throw new Error(`Prechecks failed for ${failedPackages.length} package${failedPackages.length === 1 ? '' : 's'}. Please fix the issues above and try again.`);
|
|
343
|
-
}
|
|
344
|
-
if (isDryRun && failedPackages.length > 0) {
|
|
345
|
-
logger.warn(`DRY RUN: Found potential issues in ${failedPackages.length} package${failedPackages.length === 1 ? '' : 's'} that would cause publish to fail.`);
|
|
346
|
-
}
|
|
347
|
-
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}All prechecks passed for ${buildOrder.length} packages.`);
|
|
348
|
-
};
|
|
349
|
-
// Group packages into dependency levels for parallel execution
|
|
350
|
-
const groupPackagesByDependencyLevels = (graph, buildOrder)=>{
|
|
351
|
-
const logger = getLogger();
|
|
352
|
-
const { edges } = graph;
|
|
353
|
-
const levels = [];
|
|
354
|
-
const packageLevels = new Map();
|
|
355
|
-
// Calculate the dependency level for each package
|
|
356
|
-
const calculateLevel = (packageName)=>{
|
|
357
|
-
if (packageLevels.has(packageName)) {
|
|
358
|
-
return packageLevels.get(packageName);
|
|
359
|
-
}
|
|
360
|
-
const deps = edges.get(packageName) || new Set();
|
|
361
|
-
if (deps.size === 0) {
|
|
362
|
-
// No dependencies - this is level 0
|
|
363
|
-
packageLevels.set(packageName, 0);
|
|
364
|
-
return 0;
|
|
365
|
-
}
|
|
366
|
-
// Level is 1 + max level of dependencies
|
|
367
|
-
let maxDepLevel = -1;
|
|
368
|
-
for (const dep of deps){
|
|
369
|
-
const depLevel = calculateLevel(dep);
|
|
370
|
-
maxDepLevel = Math.max(maxDepLevel, depLevel);
|
|
371
|
-
}
|
|
372
|
-
const level = maxDepLevel + 1;
|
|
373
|
-
packageLevels.set(packageName, level);
|
|
374
|
-
return level;
|
|
375
|
-
};
|
|
376
|
-
// Calculate levels for all packages
|
|
377
|
-
for (const packageName of buildOrder){
|
|
378
|
-
calculateLevel(packageName);
|
|
379
|
-
}
|
|
380
|
-
// Group packages by their levels
|
|
381
|
-
for (const packageName of buildOrder){
|
|
382
|
-
const level = packageLevels.get(packageName);
|
|
383
|
-
while(levels.length <= level){
|
|
384
|
-
levels.push([]);
|
|
385
|
-
}
|
|
386
|
-
levels[level].push(packageName);
|
|
387
|
-
}
|
|
388
|
-
logger.verbose(`Packages grouped into ${levels.length} dependency levels for parallel execution`);
|
|
389
|
-
for(let i = 0; i < levels.length; i++){
|
|
390
|
-
logger.verbose(` Level ${i}: ${levels[i].join(', ')}`);
|
|
391
|
-
}
|
|
392
|
-
return levels;
|
|
393
|
-
};
|
|
394
|
-
// Execute a single package and return execution result
|
|
395
|
-
const executePackage = async (packageName, packageInfo, commandToRun, shouldPublish, runConfig, isDryRun, index, total)=>{
|
|
396
|
-
const packageLogger = createPackageLogger(packageName, index + 1, total, isDryRun);
|
|
397
|
-
const packageDir = packageInfo.path;
|
|
398
|
-
packageLogger.info(`Starting execution...`);
|
|
399
|
-
packageLogger.verbose(`Working directory: ${packageDir}`);
|
|
400
|
-
try {
|
|
401
|
-
if (isDryRun) {
|
|
402
|
-
if (shouldPublish) {
|
|
403
|
-
packageLogger.info(`Would execute publish command directly`);
|
|
404
|
-
} else {
|
|
405
|
-
// Use main logger for the specific message tests expect
|
|
406
|
-
const logger = getLogger();
|
|
407
|
-
logger.info(`DRY RUN: Would execute: ${commandToRun}`);
|
|
408
|
-
packageLogger.info(`In directory: ${packageDir}`);
|
|
409
|
-
}
|
|
410
|
-
} else {
|
|
411
|
-
// Change to the package directory and run the command
|
|
412
|
-
const originalCwd = process.cwd();
|
|
413
|
-
try {
|
|
414
|
-
process.chdir(packageDir);
|
|
415
|
-
packageLogger.verbose(`Changed to directory: ${packageDir}`);
|
|
416
|
-
if (shouldPublish) {
|
|
417
|
-
packageLogger.info(`Starting publish process...`);
|
|
418
|
-
// Call publish command with package context so all nested logs are prefixed
|
|
419
|
-
await withPackageContext(packageName, index + 1, total, isDryRun, async ()=>{
|
|
420
|
-
await execute$1(runConfig);
|
|
421
|
-
});
|
|
422
|
-
packageLogger.info(`Publish completed successfully`);
|
|
423
|
-
} else {
|
|
424
|
-
packageLogger.info(`Executing command: ${commandToRun}`);
|
|
425
|
-
// Wrap command execution in package context
|
|
426
|
-
await withPackageContext(packageName, index + 1, total, isDryRun, async ()=>{
|
|
427
|
-
await run(commandToRun); // Non-null assertion since we're inside if (commandToRun)
|
|
428
|
-
});
|
|
429
|
-
packageLogger.info(`Command completed successfully`);
|
|
430
|
-
}
|
|
431
|
-
packageLogger.info(`✅ Execution completed successfully`);
|
|
432
|
-
} finally{
|
|
433
|
-
process.chdir(originalCwd);
|
|
434
|
-
packageLogger.verbose(`Restored working directory to: ${originalCwd}`);
|
|
435
|
-
}
|
|
436
|
-
}
|
|
437
|
-
return {
|
|
438
|
-
success: true
|
|
439
|
-
};
|
|
440
|
-
} catch (error) {
|
|
441
|
-
packageLogger.error(`❌ Execution failed: ${error.message}`);
|
|
442
|
-
return {
|
|
443
|
-
success: false,
|
|
444
|
-
error
|
|
445
|
-
};
|
|
446
|
-
}
|
|
447
|
-
};
|
|
448
|
-
const execute = async (runConfig)=>{
|
|
449
|
-
var _runConfig_publishTree;
|
|
450
|
-
const logger = getLogger();
|
|
451
|
-
const isDryRun = runConfig.dryRun || false;
|
|
452
|
-
// Determine the target directory - either specified or current working directory
|
|
453
|
-
const targetDirectory = ((_runConfig_publishTree = runConfig.publishTree) === null || _runConfig_publishTree === void 0 ? void 0 : _runConfig_publishTree.directory) || process.cwd();
|
|
454
|
-
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Analyzing workspace at: ${targetDirectory}`);
|
|
455
|
-
try {
|
|
456
|
-
var _runConfig_publishTree1, _runConfig_publishTree2, _runConfig_publishTree3, _runConfig_publishTree4, _runConfig_publishTree5, _runConfig_publishTree6;
|
|
457
|
-
// Get exclusion patterns from config, fallback to empty array
|
|
458
|
-
const excludedPatterns = ((_runConfig_publishTree1 = runConfig.publishTree) === null || _runConfig_publishTree1 === void 0 ? void 0 : _runConfig_publishTree1.excludedPatterns) || [];
|
|
459
|
-
if (excludedPatterns.length > 0) {
|
|
460
|
-
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Using exclusion patterns: ${excludedPatterns.join(', ')}`);
|
|
461
|
-
}
|
|
462
|
-
// Scan for package.json files
|
|
463
|
-
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Scanning for package.json files...`);
|
|
464
|
-
const packageJsonPaths = await scanForPackageJsonFiles(targetDirectory, excludedPatterns);
|
|
465
|
-
if (packageJsonPaths.length === 0) {
|
|
466
|
-
const message = `No package.json files found in subdirectories of ${targetDirectory}`;
|
|
467
|
-
logger.warn(message);
|
|
468
|
-
return message;
|
|
469
|
-
}
|
|
470
|
-
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Found ${packageJsonPaths.length} package.json files`);
|
|
471
|
-
// Build dependency graph
|
|
472
|
-
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Building dependency graph...`);
|
|
473
|
-
const dependencyGraph = await buildDependencyGraph(packageJsonPaths);
|
|
474
|
-
// Perform topological sort to determine build order
|
|
475
|
-
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Determining build order...`);
|
|
476
|
-
let buildOrder = topologicalSort(dependencyGraph);
|
|
477
|
-
// Handle start-from functionality if specified
|
|
478
|
-
const startFrom = (_runConfig_publishTree2 = runConfig.publishTree) === null || _runConfig_publishTree2 === void 0 ? void 0 : _runConfig_publishTree2.startFrom;
|
|
479
|
-
if (startFrom) {
|
|
480
|
-
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Looking for start package: ${startFrom}`);
|
|
481
|
-
// Find the package that matches the startFrom directory name
|
|
482
|
-
const startIndex = buildOrder.findIndex((packageName)=>{
|
|
483
|
-
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
484
|
-
const dirName = path.basename(packageInfo.path);
|
|
485
|
-
return dirName === startFrom || packageName === startFrom;
|
|
486
|
-
});
|
|
487
|
-
if (startIndex === -1) {
|
|
488
|
-
const availablePackages = buildOrder.map((name)=>{
|
|
489
|
-
const packageInfo = dependencyGraph.packages.get(name);
|
|
490
|
-
return `${path.basename(packageInfo.path)} (${name})`;
|
|
491
|
-
}).join(', ');
|
|
492
|
-
throw new Error(`Package directory '${startFrom}' not found. Available packages: ${availablePackages}`);
|
|
493
|
-
}
|
|
494
|
-
const skippedCount = startIndex;
|
|
495
|
-
buildOrder = buildOrder.slice(startIndex);
|
|
496
|
-
if (skippedCount > 0) {
|
|
497
|
-
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Resuming from '${startFrom}' - skipping ${skippedCount} package${skippedCount === 1 ? '' : 's'}`);
|
|
498
|
-
}
|
|
499
|
-
}
|
|
500
|
-
// Display results
|
|
501
|
-
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Build order determined:`);
|
|
502
|
-
let output = `\nBuild Order for ${buildOrder.length} packages${startFrom ? ` (starting from ${startFrom})` : ''}:\n`;
|
|
503
|
-
output += '==========================================\n\n';
|
|
504
|
-
buildOrder.forEach((packageName, index)=>{
|
|
505
|
-
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
506
|
-
const localDeps = Array.from(packageInfo.localDependencies);
|
|
507
|
-
output += `${index + 1}. ${packageName} (${packageInfo.version})\n`;
|
|
508
|
-
output += ` Path: ${packageInfo.path}\n`;
|
|
509
|
-
if (localDeps.length > 0) {
|
|
510
|
-
output += ` Local Dependencies: ${localDeps.join(', ')}\n`;
|
|
511
|
-
} else {
|
|
512
|
-
output += ` Local Dependencies: none\n`;
|
|
513
|
-
}
|
|
514
|
-
output += '\n';
|
|
515
|
-
// Log each step
|
|
516
|
-
if (localDeps.length > 0) {
|
|
517
|
-
logger.info(`${index + 1}. ${packageName} (depends on: ${localDeps.join(', ')})`);
|
|
518
|
-
} else {
|
|
519
|
-
logger.info(`${index + 1}. ${packageName} (no local dependencies)`);
|
|
520
|
-
}
|
|
521
|
-
});
|
|
522
|
-
// Execute script, cmd, or publish if provided
|
|
523
|
-
const script = (_runConfig_publishTree3 = runConfig.publishTree) === null || _runConfig_publishTree3 === void 0 ? void 0 : _runConfig_publishTree3.script;
|
|
524
|
-
const cmd = (_runConfig_publishTree4 = runConfig.publishTree) === null || _runConfig_publishTree4 === void 0 ? void 0 : _runConfig_publishTree4.cmd;
|
|
525
|
-
const shouldPublish = ((_runConfig_publishTree5 = runConfig.publishTree) === null || _runConfig_publishTree5 === void 0 ? void 0 : _runConfig_publishTree5.publish) || false;
|
|
526
|
-
const useParallel = ((_runConfig_publishTree6 = runConfig.publishTree) === null || _runConfig_publishTree6 === void 0 ? void 0 : _runConfig_publishTree6.parallel) || false;
|
|
527
|
-
// Handle conflicts between --script, --cmd, and --publish
|
|
528
|
-
// Priority order: --publish > --cmd > --script
|
|
529
|
-
let commandToRun;
|
|
530
|
-
let actionName = 'script'; // Default value
|
|
531
|
-
if (shouldPublish) {
|
|
532
|
-
if (script || cmd) {
|
|
533
|
-
const conflicting = [
|
|
534
|
-
script && '--script',
|
|
535
|
-
cmd && '--cmd'
|
|
536
|
-
].filter(Boolean).join(' and ');
|
|
537
|
-
logger.warn(`Multiple execution options provided (${conflicting} and --publish). Using --publish (ignoring others).`);
|
|
538
|
-
}
|
|
539
|
-
// Will use direct function call instead of npx command
|
|
540
|
-
actionName = 'publish';
|
|
541
|
-
} else if (cmd) {
|
|
542
|
-
if (script) {
|
|
543
|
-
logger.warn('Both --script and --cmd provided. Using --cmd (ignoring --script).');
|
|
544
|
-
}
|
|
545
|
-
commandToRun = cmd;
|
|
546
|
-
actionName = 'command';
|
|
547
|
-
} else if (script) {
|
|
548
|
-
commandToRun = script;
|
|
549
|
-
actionName = 'script';
|
|
550
|
-
}
|
|
551
|
-
if (commandToRun || shouldPublish) {
|
|
552
|
-
// Run prechecks for publish operations
|
|
553
|
-
if (shouldPublish) {
|
|
554
|
-
await runTreePrechecks(dependencyGraph, buildOrder, runConfig);
|
|
555
|
-
}
|
|
556
|
-
const executionDescription = shouldPublish ? 'publish command' : `"${commandToRun}"`;
|
|
557
|
-
const parallelInfo = useParallel ? ' (with parallel execution)' : '';
|
|
558
|
-
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Executing ${actionName} ${executionDescription} in ${buildOrder.length} packages${parallelInfo}...`);
|
|
559
|
-
let successCount = 0;
|
|
560
|
-
let failedPackage = null;
|
|
561
|
-
if (useParallel) {
|
|
562
|
-
// Parallel execution: group packages by dependency levels
|
|
563
|
-
const dependencyLevels = groupPackagesByDependencyLevels(dependencyGraph, buildOrder);
|
|
564
|
-
for(let levelIndex = 0; levelIndex < dependencyLevels.length; levelIndex++){
|
|
565
|
-
const currentLevel = dependencyLevels[levelIndex];
|
|
566
|
-
if (currentLevel.length === 1) {
|
|
567
|
-
const packageName = currentLevel[0];
|
|
568
|
-
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Level ${levelIndex + 1}: Executing ${packageName}...`);
|
|
569
|
-
} else {
|
|
570
|
-
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Level ${levelIndex + 1}: Executing ${currentLevel.length} packages in parallel: ${currentLevel.join(', ')}...`);
|
|
571
|
-
}
|
|
572
|
-
// Execute all packages in this level in parallel
|
|
573
|
-
const levelPromises = currentLevel.map((packageName)=>{
|
|
574
|
-
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
575
|
-
const globalIndex = buildOrder.indexOf(packageName);
|
|
576
|
-
return executePackage(packageName, packageInfo, commandToRun, shouldPublish, runConfig, isDryRun, globalIndex, buildOrder.length);
|
|
577
|
-
});
|
|
578
|
-
// Wait for all packages in this level to complete
|
|
579
|
-
const results = await Promise.allSettled(levelPromises);
|
|
580
|
-
// Check results and handle errors
|
|
581
|
-
for(let i = 0; i < results.length; i++){
|
|
582
|
-
const result = results[i];
|
|
583
|
-
const packageName = currentLevel[i];
|
|
584
|
-
const globalIndex = buildOrder.indexOf(packageName);
|
|
585
|
-
const packageLogger = createPackageLogger(packageName, globalIndex + 1, buildOrder.length, isDryRun);
|
|
586
|
-
if (result.status === 'fulfilled') {
|
|
587
|
-
if (result.value.success) {
|
|
588
|
-
successCount++;
|
|
589
|
-
} else {
|
|
590
|
-
// Package failed
|
|
591
|
-
failedPackage = packageName;
|
|
592
|
-
const formattedError = formatSubprojectError(packageName, result.value.error);
|
|
593
|
-
if (!isDryRun) {
|
|
594
|
-
packageLogger.error(`Execution failed`);
|
|
595
|
-
logger.error(formattedError);
|
|
596
|
-
logger.error(`Failed after ${successCount} successful packages.`);
|
|
597
|
-
const packageDir = dependencyGraph.packages.get(packageName).path;
|
|
598
|
-
const packageDirName = path.basename(packageDir);
|
|
599
|
-
logger.error(`To resume from this package, run:`);
|
|
600
|
-
logger.error(` kodrdriv publish-tree --start-from ${packageDirName}`);
|
|
601
|
-
throw new Error(`Script failed in package ${packageName}`);
|
|
602
|
-
}
|
|
603
|
-
break;
|
|
604
|
-
}
|
|
605
|
-
} else {
|
|
606
|
-
// Promise was rejected
|
|
607
|
-
failedPackage = packageName;
|
|
608
|
-
if (!isDryRun) {
|
|
609
|
-
packageLogger.error(`Unexpected error: ${result.reason}`);
|
|
610
|
-
logger.error(`Failed after ${successCount} successful packages.`);
|
|
611
|
-
const packageDir = dependencyGraph.packages.get(packageName).path;
|
|
612
|
-
const packageDirName = path.basename(packageDir);
|
|
613
|
-
logger.error(`To resume from this package, run:`);
|
|
614
|
-
logger.error(` kodrdriv publish-tree --start-from ${packageDirName}`);
|
|
615
|
-
throw new Error(`Unexpected error in package ${packageName}`);
|
|
616
|
-
}
|
|
617
|
-
break;
|
|
618
|
-
}
|
|
619
|
-
}
|
|
620
|
-
// If any package failed, stop execution
|
|
621
|
-
if (failedPackage) {
|
|
622
|
-
break;
|
|
623
|
-
}
|
|
624
|
-
if (currentLevel.length > 1) {
|
|
625
|
-
logger.info(`✅ Level ${levelIndex + 1} completed: all ${currentLevel.length} packages finished successfully`);
|
|
626
|
-
} else if (currentLevel.length === 1 && successCount > 0) {
|
|
627
|
-
const packageName = currentLevel[0];
|
|
628
|
-
const globalIndex = buildOrder.indexOf(packageName);
|
|
629
|
-
const packageLogger = createPackageLogger(packageName, globalIndex + 1, buildOrder.length, isDryRun);
|
|
630
|
-
packageLogger.info(`✅ Level ${levelIndex + 1} completed successfully`);
|
|
631
|
-
}
|
|
632
|
-
}
|
|
633
|
-
} else {
|
|
634
|
-
// Sequential execution (original logic)
|
|
635
|
-
for(let i = 0; i < buildOrder.length; i++){
|
|
636
|
-
const packageName = buildOrder[i];
|
|
637
|
-
const packageInfo = dependencyGraph.packages.get(packageName);
|
|
638
|
-
const packageLogger = createPackageLogger(packageName, i + 1, buildOrder.length, isDryRun);
|
|
639
|
-
const result = await executePackage(packageName, packageInfo, commandToRun, shouldPublish, runConfig, isDryRun, i, buildOrder.length);
|
|
640
|
-
if (result.success) {
|
|
641
|
-
successCount++;
|
|
642
|
-
} else {
|
|
643
|
-
failedPackage = packageName;
|
|
644
|
-
const formattedError = formatSubprojectError(packageName, result.error);
|
|
645
|
-
if (!isDryRun) {
|
|
646
|
-
packageLogger.error(`Execution failed`);
|
|
647
|
-
logger.error(formattedError);
|
|
648
|
-
logger.error(`Failed after ${successCount} successful packages.`);
|
|
649
|
-
const packageDir = packageInfo.path;
|
|
650
|
-
const packageDirName = path.basename(packageDir);
|
|
651
|
-
logger.error(`To resume from this package, run:`);
|
|
652
|
-
logger.error(` kodrdriv publish-tree --start-from ${packageDirName}`);
|
|
653
|
-
throw new Error(`Script failed in package ${packageName}`);
|
|
654
|
-
}
|
|
655
|
-
break;
|
|
656
|
-
}
|
|
657
|
-
}
|
|
658
|
-
}
|
|
659
|
-
if (!failedPackage) {
|
|
660
|
-
const summary = `${isDryRun ? 'DRY RUN: ' : ''}All ${buildOrder.length} packages completed successfully! 🎉`;
|
|
661
|
-
logger.info(summary);
|
|
662
|
-
return output + `\n${summary}\n`;
|
|
663
|
-
}
|
|
664
|
-
}
|
|
665
|
-
return output;
|
|
666
|
-
} catch (error) {
|
|
667
|
-
const errorMessage = `Failed to analyze workspace: ${error.message}`;
|
|
668
|
-
logger.error(errorMessage);
|
|
669
|
-
throw new Error(errorMessage);
|
|
670
|
-
}
|
|
671
|
-
};
|
|
672
|
-
|
|
673
|
-
export { execute };
|
|
674
|
-
//# sourceMappingURL=publish-tree.js.map
|