@eldrforge/kodrdriv 1.2.27 → 1.2.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AI-FRIENDLY-LOGGING-GUIDE.md +237 -0
- package/AI-LOGGING-MIGRATION-COMPLETE.md +371 -0
- package/ALREADY-PUBLISHED-PACKAGES-FIX.md +264 -0
- package/AUDIT-BRANCHES-PROGRESS-FIX.md +90 -0
- package/AUDIT-EXAMPLE-OUTPUT.md +113 -0
- package/CHECKPOINT-RECOVERY-FIX.md +450 -0
- package/LOGGING-MIGRATION-STATUS.md +186 -0
- package/PARALLEL-PUBLISH-DEBUGGING-GUIDE.md +441 -0
- package/PARALLEL-PUBLISH-FIXES-IMPLEMENTED.md +405 -0
- package/PARALLEL-PUBLISH-LOGGING-FIXES.md +274 -0
- package/PARALLEL-PUBLISH-QUICK-REFERENCE.md +375 -0
- package/PARALLEL_EXECUTION_FIX.md +2 -2
- package/PUBLISH_IMPROVEMENTS_IMPLEMENTED.md +4 -5
- package/VERSION-AUDIT-FIX.md +333 -0
- package/dist/application.js +6 -6
- package/dist/application.js.map +1 -1
- package/dist/arguments.js +43 -13
- package/dist/arguments.js.map +1 -1
- package/dist/commands/audio-commit.js +18 -18
- package/dist/commands/audio-commit.js.map +1 -1
- package/dist/commands/audio-review.js +32 -32
- package/dist/commands/audio-review.js.map +1 -1
- package/dist/commands/clean.js +9 -9
- package/dist/commands/clean.js.map +1 -1
- package/dist/commands/commit.js +20 -20
- package/dist/commands/commit.js.map +1 -1
- package/dist/commands/development.js +88 -89
- package/dist/commands/development.js.map +1 -1
- package/dist/commands/link.js +36 -36
- package/dist/commands/link.js.map +1 -1
- package/dist/commands/publish.js +318 -220
- package/dist/commands/publish.js.map +1 -1
- package/dist/commands/release.js +14 -14
- package/dist/commands/release.js.map +1 -1
- package/dist/commands/review.js +15 -17
- package/dist/commands/review.js.map +1 -1
- package/dist/commands/select-audio.js +5 -5
- package/dist/commands/select-audio.js.map +1 -1
- package/dist/commands/tree.js +134 -39
- package/dist/commands/tree.js.map +1 -1
- package/dist/commands/unlink.js +39 -39
- package/dist/commands/unlink.js.map +1 -1
- package/dist/commands/updates.js +150 -14
- package/dist/commands/updates.js.map +1 -1
- package/dist/commands/versions.js +14 -13
- package/dist/commands/versions.js.map +1 -1
- package/dist/constants.js +1 -1
- package/dist/content/diff.js +5 -5
- package/dist/content/diff.js.map +1 -1
- package/dist/content/files.js +2 -2
- package/dist/content/files.js.map +1 -1
- package/dist/content/log.js +3 -3
- package/dist/content/log.js.map +1 -1
- package/dist/execution/CommandValidator.js +6 -6
- package/dist/execution/CommandValidator.js.map +1 -1
- package/dist/execution/DynamicTaskPool.js +129 -19
- package/dist/execution/DynamicTaskPool.js.map +1 -1
- package/dist/execution/RecoveryManager.js +99 -21
- package/dist/execution/RecoveryManager.js.map +1 -1
- package/dist/execution/TreeExecutionAdapter.js +23 -20
- package/dist/execution/TreeExecutionAdapter.js.map +1 -1
- package/dist/main.js +2 -2
- package/dist/main.js.map +1 -1
- package/dist/util/checkpointManager.js +4 -4
- package/dist/util/checkpointManager.js.map +1 -1
- package/dist/util/dependencyGraph.js +2 -2
- package/dist/util/dependencyGraph.js.map +1 -1
- package/dist/util/fileLock.js +1 -1
- package/dist/util/fileLock.js.map +1 -1
- package/dist/util/general.js +148 -15
- package/dist/util/general.js.map +1 -1
- package/dist/util/interactive.js +2 -2
- package/dist/util/interactive.js.map +1 -1
- package/dist/util/performance.js.map +1 -1
- package/dist/util/safety.js +13 -13
- package/dist/util/safety.js.map +1 -1
- package/dist/utils/branchState.js +567 -0
- package/dist/utils/branchState.js.map +1 -0
- package/package.json +4 -4
- package/scripts/update-test-log-assertions.js +73 -0
package/dist/commands/publish.js
CHANGED
|
@@ -30,11 +30,11 @@ const scanNpmrcForEnvVars = async (storage)=>{
|
|
|
30
30
|
}
|
|
31
31
|
}
|
|
32
32
|
} catch (error) {
|
|
33
|
-
logger.warn(`
|
|
34
|
-
logger.verbose('
|
|
33
|
+
logger.warn(`NPMRC_READ_FAILED: Unable to read .npmrc configuration file | Path: ${npmrcPath} | Error: ${error.message}`);
|
|
34
|
+
logger.verbose('NPMRC_READ_IMPACT: Environment variable detection for publishing may be affected due to failed .npmrc read');
|
|
35
35
|
}
|
|
36
36
|
} else {
|
|
37
|
-
logger.debug('.npmrc file
|
|
37
|
+
logger.debug('NPMRC_NOT_FOUND: No .npmrc file present in current directory | Action: Skipping environment variable scan | Path: ' + npmrcPath);
|
|
38
38
|
}
|
|
39
39
|
return envVars;
|
|
40
40
|
};
|
|
@@ -50,7 +50,7 @@ const scanNpmrcForEnvVars = async (storage)=>{
|
|
|
50
50
|
await fs.access(packageLockPath);
|
|
51
51
|
} catch {
|
|
52
52
|
// No package-lock.json, nothing to clean
|
|
53
|
-
logger.verbose('No package-lock.json
|
|
53
|
+
logger.verbose('PACKAGE_LOCK_NOT_FOUND: No package-lock.json file exists | Action: Skipping npm link cleanup | Path: ' + packageLockPath);
|
|
54
54
|
return;
|
|
55
55
|
}
|
|
56
56
|
// Read and parse package-lock.json
|
|
@@ -66,7 +66,7 @@ const scanNpmrcForEnvVars = async (storage)=>{
|
|
|
66
66
|
const resolvedPath = pkgInfo.resolved.replace('file:', '');
|
|
67
67
|
if (resolvedPath.startsWith('../') || resolvedPath.startsWith('./')) {
|
|
68
68
|
hasFileReferences = true;
|
|
69
|
-
logger.verbose(`Found npm link reference: ${pkgPath}
|
|
69
|
+
logger.verbose(`NPM_LINK_DETECTED: Found npm link reference in packages section | Package: ${pkgPath} | Resolved: ${pkgInfo.resolved} | Type: relative_file_dependency`);
|
|
70
70
|
break;
|
|
71
71
|
}
|
|
72
72
|
}
|
|
@@ -79,33 +79,33 @@ const scanNpmrcForEnvVars = async (storage)=>{
|
|
|
79
79
|
const versionPath = pkgInfo.version.replace('file:', '');
|
|
80
80
|
if (versionPath.startsWith('../') || versionPath.startsWith('./')) {
|
|
81
81
|
hasFileReferences = true;
|
|
82
|
-
logger.verbose(`Found npm link reference: ${pkgName}
|
|
82
|
+
logger.verbose(`NPM_LINK_DETECTED: Found npm link reference in dependencies section | Package: ${pkgName} | Version: ${pkgInfo.version} | Type: relative_file_dependency`);
|
|
83
83
|
break;
|
|
84
84
|
}
|
|
85
85
|
}
|
|
86
86
|
}
|
|
87
87
|
}
|
|
88
88
|
if (hasFileReferences) {
|
|
89
|
-
logger.info('
|
|
90
|
-
logger.info('
|
|
89
|
+
logger.info('NPM_LINK_CLEANUP_REQUIRED: Detected npm link references in package-lock.json | File: package-lock.json | Impact: Must be cleaned before publish');
|
|
90
|
+
logger.info('NPM_LINK_CLEANUP_STARTING: Removing package-lock.json and regenerating clean version | Action: Remove file with relative dependencies');
|
|
91
91
|
if (isDryRun) {
|
|
92
|
-
logger.info('
|
|
92
|
+
logger.info('DRY_RUN_OPERATION: Would remove package-lock.json and regenerate it | Mode: dry-run | File: package-lock.json');
|
|
93
93
|
} else {
|
|
94
94
|
// Remove package-lock.json
|
|
95
95
|
await fs.unlink(packageLockPath);
|
|
96
|
-
logger.verbose('
|
|
96
|
+
logger.verbose('NPM_LINK_CLEANUP_FILE_REMOVED: Deleted package-lock.json containing npm link references | Path: ' + packageLockPath);
|
|
97
97
|
// Regenerate clean package-lock.json
|
|
98
|
-
logger.verbose('
|
|
98
|
+
logger.verbose('NPM_LOCK_REGENERATING: Executing npm install to regenerate package-lock.json from package.json | Command: npm install --package-lock-only --no-audit --no-fund');
|
|
99
99
|
await runWithDryRunSupport('npm install --package-lock-only --no-audit --no-fund', isDryRun);
|
|
100
|
-
logger.info('
|
|
100
|
+
logger.info('NPM_LOCK_REGENERATED: Successfully regenerated clean package-lock.json without link references | Path: ' + packageLockPath);
|
|
101
101
|
}
|
|
102
102
|
} else {
|
|
103
|
-
logger.verbose('No npm link references found in package-lock.json');
|
|
103
|
+
logger.verbose('NPM_LINK_CHECK_CLEAN: No npm link references found in package-lock.json | Status: Ready for publish | File: ' + packageLockPath);
|
|
104
104
|
}
|
|
105
105
|
} catch (error) {
|
|
106
106
|
// Log warning but don't fail - let npm update handle any issues
|
|
107
|
-
logger.warn(
|
|
108
|
-
logger.verbose('
|
|
107
|
+
logger.warn(`NPM_LINK_CHECK_FAILED: Unable to check or clean npm link references | Error: ${error.message} | Impact: Continuing with publish, npm will handle issues`);
|
|
108
|
+
logger.verbose('PUBLISH_PROCESS_CONTINUING: Proceeding with publish workflow despite npm link check failure | Next: Standard npm publish validation');
|
|
109
109
|
}
|
|
110
110
|
};
|
|
111
111
|
const validateEnvironmentVariables = (requiredEnvVars, isDryRun)=>{
|
|
@@ -118,9 +118,9 @@ const validateEnvironmentVariables = (requiredEnvVars, isDryRun)=>{
|
|
|
118
118
|
}
|
|
119
119
|
if (missingEnvVars.length > 0) {
|
|
120
120
|
if (isDryRun) {
|
|
121
|
-
logger.warn(`
|
|
121
|
+
logger.warn(`ENV_VARS_MISSING: Required environment variables not set | Variables: ${missingEnvVars.join(', ')} | Mode: dry-run | Impact: Would fail in real publish`);
|
|
122
122
|
} else {
|
|
123
|
-
logger.error(`
|
|
123
|
+
logger.error(`ENV_VARS_MISSING: Required environment variables not set | Variables: ${missingEnvVars.join(', ')} | Action: Must set before publish | Source: .npmrc configuration`);
|
|
124
124
|
throw new Error(`Missing required environment variables: ${missingEnvVars.join(', ')}. Please set these environment variables before running publish.`);
|
|
125
125
|
}
|
|
126
126
|
}
|
|
@@ -132,11 +132,11 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
132
132
|
const storage = create({
|
|
133
133
|
log: logger.info
|
|
134
134
|
});
|
|
135
|
-
logger.info('
|
|
135
|
+
logger.info('PRECHECK_STARTING: Executing publish prechecks | Phase: validation | Target: ' + (targetBranch || 'default'));
|
|
136
136
|
// Check if we're in a git repository
|
|
137
137
|
try {
|
|
138
138
|
if (isDryRun) {
|
|
139
|
-
logger.info('Would
|
|
139
|
+
logger.info('PRECHECK_GIT_REPO: Would verify git repository | Mode: dry-run | Command: git rev-parse --git-dir');
|
|
140
140
|
} else {
|
|
141
141
|
await run('git rev-parse --git-dir');
|
|
142
142
|
}
|
|
@@ -148,10 +148,10 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
148
148
|
}
|
|
149
149
|
}
|
|
150
150
|
// Check for uncommitted changes
|
|
151
|
-
logger.info('Checking for uncommitted changes
|
|
151
|
+
logger.info('PRECHECK_GIT_STATUS: Checking for uncommitted changes | Command: git status --porcelain | Requirement: Clean working directory');
|
|
152
152
|
try {
|
|
153
153
|
if (isDryRun) {
|
|
154
|
-
logger.info('Would
|
|
154
|
+
logger.info('PRECHECK_GIT_STATUS: Would verify clean working directory | Mode: dry-run | Command: git status --porcelain');
|
|
155
155
|
} else {
|
|
156
156
|
const { stdout } = await run('git status --porcelain');
|
|
157
157
|
if (stdout.trim()) {
|
|
@@ -168,9 +168,9 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
168
168
|
// Use the passed target branch or fallback to config/default
|
|
169
169
|
const effectiveTargetBranch = targetBranch || ((_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.targetBranch) || 'main';
|
|
170
170
|
// Check that we're not running from the target branch
|
|
171
|
-
logger.info('
|
|
171
|
+
logger.info('PRECHECK_BRANCH: Verifying current branch is not target branch | Target: ' + effectiveTargetBranch + ' | Requirement: Must run from feature branch');
|
|
172
172
|
if (isDryRun) {
|
|
173
|
-
logger.info(`Would verify current branch is not
|
|
173
|
+
logger.info(`PRECHECK_BRANCH: Would verify current branch is not target branch | Mode: dry-run | Target: ${effectiveTargetBranch}`);
|
|
174
174
|
} else {
|
|
175
175
|
const currentBranch = await GitHub.getCurrentBranchName();
|
|
176
176
|
if (currentBranch === effectiveTargetBranch) {
|
|
@@ -178,43 +178,42 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
178
178
|
}
|
|
179
179
|
}
|
|
180
180
|
// Check target branch sync with remote
|
|
181
|
-
logger.info(`Checking target branch
|
|
181
|
+
logger.info(`PRECHECK_BRANCH_SYNC: Checking target branch sync with remote | Branch: ${effectiveTargetBranch} | Remote: origin | Requirement: Branches must be synchronized`);
|
|
182
182
|
if (isDryRun) {
|
|
183
|
-
logger.info(`Would verify target branch
|
|
183
|
+
logger.info(`PRECHECK_BRANCH_SYNC: Would verify target branch is in sync with remote | Mode: dry-run | Branch: ${effectiveTargetBranch} | Remote: origin`);
|
|
184
184
|
} else {
|
|
185
185
|
// Only check if local target branch exists (it's okay if it doesn't exist locally)
|
|
186
186
|
const targetBranchExists = await localBranchExists(effectiveTargetBranch);
|
|
187
187
|
if (targetBranchExists) {
|
|
188
188
|
const syncStatus = await isBranchInSyncWithRemote(effectiveTargetBranch);
|
|
189
189
|
if (!syncStatus.inSync) {
|
|
190
|
-
logger.error(
|
|
190
|
+
logger.error(`BRANCH_SYNC_FAILED: Target branch not synchronized with remote | Branch: ${effectiveTargetBranch} | Status: out-of-sync | Impact: Cannot proceed with publish`);
|
|
191
191
|
logger.error('');
|
|
192
192
|
if (syncStatus.error) {
|
|
193
|
-
logger.error(`
|
|
193
|
+
logger.error(`BRANCH_SYNC_ERROR: ${syncStatus.error}`);
|
|
194
194
|
} else if (syncStatus.localSha && syncStatus.remoteSha) {
|
|
195
|
-
logger.error(`
|
|
196
|
-
logger.error(` Remote: ${syncStatus.remoteSha.substring(0, 8)}`);
|
|
195
|
+
logger.error(`BRANCH_SYNC_DIVERGENCE: Local and remote commits differ | Local SHA: ${syncStatus.localSha.substring(0, 8)} | Remote SHA: ${syncStatus.remoteSha.substring(0, 8)}`);
|
|
197
196
|
}
|
|
198
197
|
logger.error('');
|
|
199
|
-
logger.error('
|
|
200
|
-
logger.error(` 1
|
|
201
|
-
logger.error(` 2
|
|
202
|
-
logger.error(' 3
|
|
203
|
-
logger.error(' 4
|
|
198
|
+
logger.error('RESOLUTION_STEPS: Manual intervention required to sync branches:');
|
|
199
|
+
logger.error(` Step 1: Switch to target branch | Command: git checkout ${effectiveTargetBranch}`);
|
|
200
|
+
logger.error(` Step 2: Pull latest changes | Command: git pull origin ${effectiveTargetBranch}`);
|
|
201
|
+
logger.error(' Step 3: Resolve merge conflicts if present');
|
|
202
|
+
logger.error(' Step 4: Return to feature branch and retry publish');
|
|
204
203
|
logger.error('');
|
|
205
|
-
logger.error(
|
|
204
|
+
logger.error(`ALTERNATIVE_OPTION: Automatic sync available | Command: kodrdriv publish --sync-target | Branch: ${effectiveTargetBranch}`);
|
|
206
205
|
throw new Error(`Target branch '${effectiveTargetBranch}' is not in sync with remote. Please sync the branch before running publish.`);
|
|
207
206
|
} else {
|
|
208
|
-
logger.info(
|
|
207
|
+
logger.info(`BRANCH_SYNC_VERIFIED: Target branch is synchronized with remote | Branch: ${effectiveTargetBranch} | Status: in-sync`);
|
|
209
208
|
}
|
|
210
209
|
} else {
|
|
211
|
-
logger.info(
|
|
210
|
+
logger.info(`BRANCH_NOT_LOCAL: Target branch does not exist locally | Branch: ${effectiveTargetBranch} | Action: Will be created during publish process`);
|
|
212
211
|
}
|
|
213
212
|
}
|
|
214
213
|
// Check GitHub Actions workflow configuration
|
|
215
|
-
logger.info('Checking GitHub Actions workflow configuration
|
|
214
|
+
logger.info('PRECHECK_WORKFLOW: Checking GitHub Actions workflow configuration | Target: PR automation | Requirement: Workflows should trigger on pull requests');
|
|
216
215
|
if (isDryRun) {
|
|
217
|
-
logger.info('Would check if GitHub Actions workflows are configured for pull requests');
|
|
216
|
+
logger.info('PRECHECK_WORKFLOW: Would check if GitHub Actions workflows are configured for pull requests | Mode: dry-run');
|
|
218
217
|
} else {
|
|
219
218
|
try {
|
|
220
219
|
// TODO: Re-enable when checkWorkflowConfiguration is exported from github-tools
|
|
@@ -226,25 +225,25 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
226
225
|
triggeredWorkflowNames: []
|
|
227
226
|
};
|
|
228
227
|
if (!workflowConfig.hasWorkflows) ; else if (!workflowConfig.hasPullRequestTriggers) ; else {
|
|
229
|
-
logger.info(
|
|
228
|
+
logger.info(`WORKFLOW_CONFIGURED: Found workflows that will trigger on pull requests | Target Branch: ${effectiveTargetBranch} | Workflow Count: ${workflowConfig.triggeredWorkflowNames.length}`);
|
|
230
229
|
for (const workflowName of workflowConfig.triggeredWorkflowNames){
|
|
231
|
-
logger.info(`
|
|
230
|
+
logger.info(`WORKFLOW_ACTIVE: ${workflowName} | Trigger: pull_request | Target: ${effectiveTargetBranch}`);
|
|
232
231
|
}
|
|
233
232
|
}
|
|
234
233
|
} catch (error) {
|
|
235
234
|
// Don't fail the precheck if we can't verify workflows
|
|
236
235
|
// The wait logic will handle it later
|
|
237
|
-
logger.debug(`
|
|
236
|
+
logger.debug(`WORKFLOW_CHECK_FAILED: Unable to verify workflow configuration | Error: ${error.message} | Impact: Will proceed with publish | Note: Wait logic will handle checks later`);
|
|
238
237
|
}
|
|
239
238
|
}
|
|
240
239
|
// Check if prepublishOnly script exists in package.json
|
|
241
|
-
logger.info('Checking for prepublishOnly script
|
|
240
|
+
logger.info('PRECHECK_PREPUBLISH: Checking for prepublishOnly script in package.json | Requirement: Must exist to run pre-flight checks | Expected: clean, lint, build, test');
|
|
242
241
|
const packageJsonPath = path__default.join(process.cwd(), 'package.json');
|
|
243
242
|
if (!await storage.exists(packageJsonPath)) {
|
|
244
243
|
if (!isDryRun) {
|
|
245
244
|
throw new Error('package.json not found in current directory.');
|
|
246
245
|
} else {
|
|
247
|
-
logger.warn('package.json
|
|
246
|
+
logger.warn('PACKAGE_JSON_NOT_FOUND: No package.json in current directory | Mode: dry-run | Impact: Cannot verify prepublishOnly script | Path: ' + packageJsonPath);
|
|
248
247
|
}
|
|
249
248
|
} else {
|
|
250
249
|
var _packageJson_scripts;
|
|
@@ -257,19 +256,19 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
257
256
|
if (!isDryRun) {
|
|
258
257
|
throw new Error('Failed to parse package.json. Please ensure it contains valid JSON.');
|
|
259
258
|
} else {
|
|
260
|
-
logger.warn('
|
|
259
|
+
logger.warn('PACKAGE_JSON_PARSE_FAILED: Unable to parse package.json | Mode: dry-run | Impact: Cannot verify prepublishOnly script | Path: ' + packageJsonPath + ' | Requirement: Valid JSON format');
|
|
261
260
|
}
|
|
262
261
|
}
|
|
263
262
|
if (packageJson && !((_packageJson_scripts = packageJson.scripts) === null || _packageJson_scripts === void 0 ? void 0 : _packageJson_scripts.prepublishOnly)) {
|
|
264
263
|
if (!isDryRun) {
|
|
265
264
|
throw new Error('prepublishOnly script is required in package.json but was not found. Please add a prepublishOnly script that runs your pre-flight checks (e.g., clean, lint, build, test).');
|
|
266
265
|
} else {
|
|
267
|
-
logger.warn('prepublishOnly script
|
|
266
|
+
logger.warn('PREPUBLISH_SCRIPT_MISSING: No prepublishOnly script found in package.json | Mode: dry-run | Requirement: Script must exist | Expected Tasks: clean, lint, build, test | Path: ' + packageJsonPath);
|
|
268
267
|
}
|
|
269
268
|
}
|
|
270
269
|
}
|
|
271
270
|
// Check required environment variables
|
|
272
|
-
logger.verbose('Checking required environment variables
|
|
271
|
+
logger.verbose('PRECHECK_ENV_VARS: Checking required environment variables | Source: Configuration and .npmrc | Requirement: All required vars must be set');
|
|
273
272
|
const coreRequiredEnvVars = ((_runConfig_publish1 = runConfig.publish) === null || _runConfig_publish1 === void 0 ? void 0 : _runConfig_publish1.requiredEnvVars) || [];
|
|
274
273
|
const npmrcEnvVars = isDryRun ? [] : await scanNpmrcForEnvVars(storage); // Skip .npmrc scan in dry run
|
|
275
274
|
const allRequiredEnvVars = [
|
|
@@ -279,12 +278,12 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
279
278
|
])
|
|
280
279
|
];
|
|
281
280
|
if (allRequiredEnvVars.length > 0) {
|
|
282
|
-
logger.verbose(`
|
|
281
|
+
logger.verbose(`ENV_VARS_REQUIRED: Environment variables needed for publish | Variables: ${allRequiredEnvVars.join(', ')} | Count: ${allRequiredEnvVars.length} | Source: config + .npmrc`);
|
|
283
282
|
validateEnvironmentVariables(allRequiredEnvVars, isDryRun);
|
|
284
283
|
} else {
|
|
285
|
-
logger.verbose('No required environment variables specified.');
|
|
284
|
+
logger.verbose('ENV_VARS_NONE: No required environment variables specified | Status: No validation needed | Source: config + .npmrc');
|
|
286
285
|
}
|
|
287
|
-
logger.info('All prechecks passed
|
|
286
|
+
logger.info('PRECHECK_COMPLETE: All publish prechecks passed successfully | Status: Ready to proceed | Next: Execute publish workflow');
|
|
288
287
|
};
|
|
289
288
|
// Helper: deep-sort object keys for stable comparison
|
|
290
289
|
const sortObjectKeys = (value)=>{
|
|
@@ -315,7 +314,7 @@ const isReleaseNecessaryComparedToTarget = async (targetBranch, isDryRun)=>{
|
|
|
315
314
|
]);
|
|
316
315
|
} catch (error) {
|
|
317
316
|
// Target branch doesn't exist or isn't accessible
|
|
318
|
-
logger.verbose(`Target branch
|
|
317
|
+
logger.verbose(`RELEASE_CHECK_NO_TARGET: Target branch does not exist or is not accessible | Branch: ${targetBranch} | Action: Proceeding with publish | Reason: First release to this branch`);
|
|
319
318
|
return {
|
|
320
319
|
necessary: true,
|
|
321
320
|
reason: `Target branch '${targetBranch}' does not exist; first release to this branch`
|
|
@@ -376,7 +375,7 @@ const isReleaseNecessaryComparedToTarget = async (targetBranch, isDryRun)=>{
|
|
|
376
375
|
};
|
|
377
376
|
} catch (error) {
|
|
378
377
|
// Conservative: if we cannot prove it is only a version change, proceed with release
|
|
379
|
-
logger.verbose(`
|
|
378
|
+
logger.verbose(`RELEASE_CHECK_COMPARISON_FAILED: Unable to conclusively compare package.json changes | Error: ${error.message} | Action: Proceeding conservatively with publish | Reason: Cannot verify version-only change`);
|
|
380
379
|
return {
|
|
381
380
|
necessary: true,
|
|
382
381
|
reason: 'Could not compare package.json safely'
|
|
@@ -386,27 +385,27 @@ const isReleaseNecessaryComparedToTarget = async (targetBranch, isDryRun)=>{
|
|
|
386
385
|
const handleTargetBranchSyncRecovery = async (runConfig, targetBranch)=>{
|
|
387
386
|
const isDryRun = runConfig.dryRun || false;
|
|
388
387
|
const logger = getDryRunLogger(isDryRun);
|
|
389
|
-
logger.info(
|
|
388
|
+
logger.info(`BRANCH_SYNC_ATTEMPTING: Initiating sync of target branch with remote | Branch: ${targetBranch} | Remote: origin | Operation: fetch + merge`);
|
|
390
389
|
if (isDryRun) {
|
|
391
|
-
logger.info(`Would attempt to sync
|
|
390
|
+
logger.info(`BRANCH_SYNC_DRY_RUN: Would attempt to sync branch with remote | Mode: dry-run | Branch: ${targetBranch} | Remote: origin`);
|
|
392
391
|
return;
|
|
393
392
|
}
|
|
394
393
|
const syncResult = await safeSyncBranchWithRemote(targetBranch);
|
|
395
394
|
if (syncResult.success) {
|
|
396
|
-
logger.info(
|
|
397
|
-
logger.info('
|
|
395
|
+
logger.info(`BRANCH_SYNC_SUCCESS: Successfully synchronized branch with remote | Branch: ${targetBranch} | Remote: origin | Status: in-sync`);
|
|
396
|
+
logger.info('BRANCH_SYNC_NEXT_STEP: Ready to proceed with publish | Action: Re-run publish command | Branch: ' + targetBranch);
|
|
398
397
|
} else if (syncResult.conflictResolutionRequired) {
|
|
399
|
-
logger.error(
|
|
398
|
+
logger.error(`BRANCH_SYNC_CONFLICTS: Sync failed due to merge conflicts | Branch: ${targetBranch} | Status: conflicts-detected | Resolution: Manual intervention required`);
|
|
400
399
|
logger.error('');
|
|
401
|
-
logger.error('
|
|
402
|
-
logger.error(` 1
|
|
403
|
-
logger.error(` 2
|
|
404
|
-
logger.error(' 3
|
|
405
|
-
logger.error(' 4
|
|
400
|
+
logger.error('CONFLICT_RESOLUTION_STEPS: Manual conflict resolution required:');
|
|
401
|
+
logger.error(` Step 1: Switch to target branch | Command: git checkout ${targetBranch}`);
|
|
402
|
+
logger.error(` Step 2: Pull and resolve conflicts | Command: git pull origin ${targetBranch}`);
|
|
403
|
+
logger.error(' Step 3: Commit resolved changes | Command: git commit');
|
|
404
|
+
logger.error(' Step 4: Return to feature branch and retry | Command: kodrdriv publish');
|
|
406
405
|
logger.error('');
|
|
407
406
|
throw new Error(`Target branch '${targetBranch}' has conflicts that require manual resolution.`);
|
|
408
407
|
} else {
|
|
409
|
-
logger.error(
|
|
408
|
+
logger.error(`BRANCH_SYNC_FAILED: Sync operation failed | Branch: ${targetBranch} | Error: ${syncResult.error} | Remote: origin`);
|
|
410
409
|
throw new Error(`Failed to sync target branch: ${syncResult.error}`);
|
|
411
410
|
}
|
|
412
411
|
};
|
|
@@ -424,15 +423,15 @@ const execute = async (runConfig)=>{
|
|
|
424
423
|
} else {
|
|
425
424
|
currentBranch = await GitHub.getCurrentBranchName();
|
|
426
425
|
// Fetch latest remote information to avoid conflicts
|
|
427
|
-
logger.info('
|
|
426
|
+
logger.info('GIT_FETCH_STARTING: Fetching latest remote information | Remote: origin | Purpose: Avoid conflicts during publish | Command: git fetch origin');
|
|
428
427
|
try {
|
|
429
428
|
await run('git fetch origin');
|
|
430
|
-
logger.info('
|
|
429
|
+
logger.info('GIT_FETCH_SUCCESS: Successfully fetched latest remote information | Remote: origin | Status: up-to-date');
|
|
431
430
|
} catch (error) {
|
|
432
|
-
logger.warn(
|
|
431
|
+
logger.warn(`GIT_FETCH_FAILED: Unable to fetch from remote | Remote: origin | Error: ${error.message} | Impact: May cause conflicts if remote has changes`);
|
|
433
432
|
}
|
|
434
433
|
// Sync current branch with remote to avoid conflicts
|
|
435
|
-
logger.info(
|
|
434
|
+
logger.info(`CURRENT_BRANCH_SYNC: Synchronizing current branch with remote | Branch: ${currentBranch} | Remote: origin | Purpose: Avoid conflicts during publish`);
|
|
436
435
|
try {
|
|
437
436
|
const remoteExists = await run(`git ls-remote --exit-code --heads origin ${currentBranch}`).then(()=>true).catch(()=>false);
|
|
438
437
|
if (remoteExists) {
|
|
@@ -441,21 +440,21 @@ const execute = async (runConfig)=>{
|
|
|
441
440
|
await run(`git fetch origin ${currentBranch}`);
|
|
442
441
|
await run(`git merge origin/${currentBranch} --no-ff --no-edit`);
|
|
443
442
|
}, `sync ${currentBranch}`);
|
|
444
|
-
logger.info(
|
|
443
|
+
logger.info(`CURRENT_BRANCH_SYNCED: Successfully synchronized current branch with remote | Branch: ${currentBranch} | Remote: origin/${currentBranch} | Status: in-sync`);
|
|
445
444
|
} else {
|
|
446
|
-
logger.info(
|
|
445
|
+
logger.info(`REMOTE_BRANCH_NOT_FOUND: No remote branch exists | Branch: ${currentBranch} | Remote: origin | Action: Will be created on first push`);
|
|
447
446
|
}
|
|
448
447
|
} catch (error) {
|
|
449
448
|
if (error.message && error.message.includes('CONFLICT')) {
|
|
450
|
-
logger.error(
|
|
451
|
-
logger.error(`
|
|
452
|
-
logger.error(` 1
|
|
453
|
-
logger.error(` 2
|
|
454
|
-
logger.error(` 3
|
|
455
|
-
logger.error(` 4
|
|
449
|
+
logger.error(`MERGE_CONFLICTS_DETECTED: Conflicts found when syncing current branch with remote | Branch: ${currentBranch} | Remote: origin/${currentBranch} | Status: conflicts-require-resolution`);
|
|
450
|
+
logger.error(`CONFLICT_RESOLUTION_REQUIRED: Manual intervention needed to resolve conflicts and continue:`);
|
|
451
|
+
logger.error(` Step 1: Resolve conflicts in affected files`);
|
|
452
|
+
logger.error(` Step 2: Stage resolved files | Command: git add <resolved-files>`);
|
|
453
|
+
logger.error(` Step 3: Commit resolution | Command: git commit`);
|
|
454
|
+
logger.error(` Step 4: Retry publish | Command: kodrdriv publish`);
|
|
456
455
|
throw new Error(`Merge conflicts detected when syncing ${currentBranch} with remote. Please resolve conflicts manually.`);
|
|
457
456
|
} else {
|
|
458
|
-
logger.warn(
|
|
457
|
+
logger.warn(`CURRENT_BRANCH_SYNC_FAILED: Unable to sync current branch with remote | Branch: ${currentBranch} | Remote: origin/${currentBranch} | Error: ${error.message} | Impact: May cause issues during publish`);
|
|
459
458
|
}
|
|
460
459
|
}
|
|
461
460
|
}
|
|
@@ -469,19 +468,19 @@ const execute = async (runConfig)=>{
|
|
|
469
468
|
if (branchConfig.targetBranch) {
|
|
470
469
|
targetBranch = branchConfig.targetBranch;
|
|
471
470
|
}
|
|
472
|
-
logger.info(
|
|
473
|
-
logger.info(`
|
|
474
|
-
logger.info(`
|
|
471
|
+
logger.info(`BRANCH_DEPENDENT_TARGETING: Branch-specific configuration active | Source: ${currentBranch} | Target: ${targetBranch} | Feature: Branch-dependent versioning and targeting`);
|
|
472
|
+
logger.info(`BRANCH_CONFIGURATION_SOURCE: Current branch | Branch: ${currentBranch} | Type: source`);
|
|
473
|
+
logger.info(`BRANCH_CONFIGURATION_TARGET: Target branch for publish | Branch: ${targetBranch} | Type: destination`);
|
|
475
474
|
// Look at target branch config to show version strategy
|
|
476
475
|
const targetBranchConfig = runConfig.branches[targetBranch];
|
|
477
476
|
if (targetBranchConfig === null || targetBranchConfig === void 0 ? void 0 : targetBranchConfig.version) {
|
|
478
477
|
const versionType = targetBranchConfig.version.type;
|
|
479
478
|
const versionTag = targetBranchConfig.version.tag;
|
|
480
479
|
const versionIncrement = targetBranchConfig.version.increment;
|
|
481
|
-
logger.info(`
|
|
480
|
+
logger.info(`VERSION_STRATEGY: Target branch version configuration | Branch: ${targetBranch} | Type: ${versionType} | Tag: ${versionTag || 'none'} | Increment: ${versionIncrement ? 'enabled' : 'disabled'}`);
|
|
482
481
|
}
|
|
483
482
|
} else {
|
|
484
|
-
logger.debug(`No branch-specific
|
|
483
|
+
logger.debug(`BRANCH_TARGETING_DEFAULT: No branch-specific configuration found | Branch: ${currentBranch} | Action: Using default target | Target: ${targetBranch}`);
|
|
485
484
|
}
|
|
486
485
|
// Handle --sync-target flag
|
|
487
486
|
if ((_runConfig_publish1 = runConfig.publish) === null || _runConfig_publish1 === void 0 ? void 0 : _runConfig_publish1.syncTarget) {
|
|
@@ -489,13 +488,13 @@ const execute = async (runConfig)=>{
|
|
|
489
488
|
return; // Exit after sync operation
|
|
490
489
|
}
|
|
491
490
|
// Check if target branch exists and create it if needed
|
|
492
|
-
logger.info(`
|
|
491
|
+
logger.info(`TARGET_BRANCH_CHECK: Verifying target branch existence | Branch: ${targetBranch} | Action: Create if missing | Source: Current HEAD`);
|
|
493
492
|
if (isDryRun) {
|
|
494
|
-
logger.info(`Would
|
|
493
|
+
logger.info(`TARGET_BRANCH_CHECK: Would verify target branch exists and create if needed | Mode: dry-run | Branch: ${targetBranch}`);
|
|
495
494
|
} else {
|
|
496
495
|
const targetBranchExists = await localBranchExists(targetBranch);
|
|
497
496
|
if (!targetBranchExists) {
|
|
498
|
-
logger.info(
|
|
497
|
+
logger.info(`TARGET_BRANCH_CREATING: Target branch does not exist, creating from current branch | Branch: ${targetBranch} | Source: HEAD | Remote: origin`);
|
|
499
498
|
try {
|
|
500
499
|
// Wrap git branch and push operations with lock
|
|
501
500
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
@@ -505,73 +504,90 @@ const execute = async (runConfig)=>{
|
|
|
505
504
|
targetBranch,
|
|
506
505
|
'HEAD'
|
|
507
506
|
]);
|
|
508
|
-
logger.info(
|
|
507
|
+
logger.info(`TARGET_BRANCH_CREATED: Successfully created target branch locally | Branch: ${targetBranch} | Source: HEAD`);
|
|
509
508
|
// Push the new branch to origin
|
|
510
509
|
await runSecure('git', [
|
|
511
510
|
'push',
|
|
512
511
|
'origin',
|
|
513
512
|
targetBranch
|
|
514
513
|
]);
|
|
515
|
-
logger.info(
|
|
514
|
+
logger.info(`TARGET_BRANCH_PUSHED: Successfully pushed new target branch to remote | Branch: ${targetBranch} | Remote: origin/${targetBranch}`);
|
|
516
515
|
}, `create and push target branch ${targetBranch}`);
|
|
517
516
|
} catch (error) {
|
|
518
517
|
throw new Error(`Failed to create target branch '${targetBranch}': ${error.message}`);
|
|
519
518
|
}
|
|
520
519
|
} else {
|
|
521
|
-
logger.info(
|
|
520
|
+
logger.info(`TARGET_BRANCH_EXISTS: Target branch already exists locally | Branch: ${targetBranch} | Status: ready`);
|
|
522
521
|
}
|
|
523
522
|
}
|
|
524
523
|
// Run prechecks before starting any work
|
|
525
524
|
await runPrechecks(runConfig, targetBranch);
|
|
526
525
|
// Early check: determine if a release is necessary compared to target branch
|
|
527
|
-
logger.info('Evaluating if
|
|
526
|
+
logger.info('RELEASE_NECESSITY_CHECK: Evaluating if release is required | Comparison: current branch vs target | Target: ' + targetBranch + ' | Purpose: Avoid unnecessary publishes');
|
|
528
527
|
try {
|
|
529
528
|
const necessity = await isReleaseNecessaryComparedToTarget(targetBranch, isDryRun);
|
|
530
529
|
if (!necessity.necessary) {
|
|
531
|
-
logger.info(`\
|
|
530
|
+
logger.info(`\nRELEASE_SKIPPED: No meaningful changes detected, skipping publish | Reason: ${necessity.reason} | Target: ${targetBranch}`);
|
|
532
531
|
// Emit a machine-readable marker so tree mode can detect skip and avoid propagating versions
|
|
533
|
-
logger.info
|
|
532
|
+
// CRITICAL: Use console.log to write to stdout (logger.info goes to stderr via winston)
|
|
533
|
+
// eslint-disable-next-line no-console
|
|
534
|
+
console.log('KODRDRIV_PUBLISH_SKIPPED');
|
|
534
535
|
return;
|
|
535
536
|
} else {
|
|
536
|
-
logger.verbose(`
|
|
537
|
+
logger.verbose(`RELEASE_PROCEEDING: Meaningful changes detected, continuing with publish | Reason: ${necessity.reason} | Target: ${targetBranch}`);
|
|
537
538
|
}
|
|
538
539
|
} catch (error) {
|
|
539
540
|
// On unexpected errors, proceed with publish to avoid false negatives blocking releases
|
|
540
|
-
logger.verbose(`
|
|
541
|
+
logger.verbose(`RELEASE_NECESSITY_CHECK_ERROR: Unable to determine release necessity | Error: ${error.message} | Action: Proceeding conservatively with publish | Rationale: Avoid blocking valid releases`);
|
|
541
542
|
}
|
|
542
|
-
logger.info('
|
|
543
|
+
logger.info('RELEASE_PROCESS_STARTING: Initiating release workflow | Target: ' + targetBranch + ' | Phase: dependency updates and version management');
|
|
543
544
|
let pr = null;
|
|
544
545
|
if (isDryRun) {
|
|
545
|
-
logger.info('Would check for existing pull request');
|
|
546
|
-
logger.info('Assuming no existing PR found
|
|
546
|
+
logger.info('PR_CHECK: Would check for existing pull request | Mode: dry-run | Action: Skip PR lookup');
|
|
547
|
+
logger.info('PR_ASSUMPTION: Assuming no existing PR found | Mode: dry-run | Purpose: Demo workflow');
|
|
547
548
|
} else {
|
|
548
549
|
const branchName = await GitHub.getCurrentBranchName();
|
|
549
550
|
pr = await GitHub.findOpenPullRequestByHeadRef(branchName);
|
|
550
551
|
}
|
|
551
552
|
if (pr) {
|
|
552
|
-
logger.info(`
|
|
553
|
+
logger.info(`PR_FOUND: Existing pull request detected for current branch | URL: ${pr.html_url} | Status: open`);
|
|
553
554
|
} else {
|
|
554
|
-
var _runConfig_publish4, _runConfig_publish5, _runConfig_publish6, _runConfig_publish7, _runConfig_publish8, _runConfig_publish9, _runConfig_publish10, _runConfig_publish11;
|
|
555
|
-
logger.info('No open pull request
|
|
555
|
+
var _runConfig_publish4, _runConfig_publish5, _runConfig_publish6, _runConfig_publish7, _runConfig_publish8, _runConfig_publish9, _runConfig_publish10, _runConfig_publish11, _runConfig_publish12;
|
|
556
|
+
logger.info('PR_NOT_FOUND: No open pull request exists for current branch | Action: Starting new release publishing process | Next: Prepare dependencies and version');
|
|
556
557
|
// STEP 1: Prepare for release (update dependencies and run prepublish checks) with NO version bump yet
|
|
557
|
-
logger.verbose('Preparing for release:
|
|
558
|
+
logger.verbose('RELEASE_PREP_STARTING: Preparing for release | Phase: dependency management | Action: Switch from workspace to remote dependencies | Version Bump: Not yet applied');
|
|
558
559
|
// Clean up any npm link references before updating dependencies
|
|
559
|
-
logger.verbose('
|
|
560
|
+
logger.verbose('NPM_LINK_CHECK: Scanning package-lock.json for npm link references | File: package-lock.json | Purpose: Remove development symlinks before publish');
|
|
560
561
|
await cleanupNpmLinkReferences(isDryRun);
|
|
561
|
-
|
|
562
|
-
const
|
|
562
|
+
// Update inter-project dependencies if --update-deps flag is present
|
|
563
|
+
const updateDepsScope = (_runConfig_publish4 = runConfig.publish) === null || _runConfig_publish4 === void 0 ? void 0 : _runConfig_publish4.updateDeps;
|
|
564
|
+
if (updateDepsScope) {
|
|
565
|
+
logger.info(`INTER_PROJECT_DEPS_UPDATE: Updating inter-project dependencies | Scope: ${updateDepsScope} | Type: inter-project | Command: kodrdriv updates`);
|
|
566
|
+
const Updates = await import('./updates.js');
|
|
567
|
+
const updatesConfig = {
|
|
568
|
+
...runConfig,
|
|
569
|
+
dryRun: isDryRun,
|
|
570
|
+
updates: {
|
|
571
|
+
scope: updateDepsScope,
|
|
572
|
+
interProject: true
|
|
573
|
+
}
|
|
574
|
+
};
|
|
575
|
+
await Updates.execute(updatesConfig);
|
|
576
|
+
}
|
|
577
|
+
logger.verbose('DEPS_UPDATE_REGISTRY: Updating dependencies to latest versions from npm registry | Source: registry | Target: package.json');
|
|
578
|
+
const updatePatterns = (_runConfig_publish5 = runConfig.publish) === null || _runConfig_publish5 === void 0 ? void 0 : _runConfig_publish5.dependencyUpdatePatterns;
|
|
563
579
|
if (updatePatterns && updatePatterns.length > 0) {
|
|
564
|
-
logger.verbose(`Updating dependencies matching patterns: ${updatePatterns.join(', ')}`);
|
|
580
|
+
logger.verbose(`DEPS_UPDATE_PATTERNS: Updating dependencies matching specified patterns | Patterns: ${updatePatterns.join(', ')} | Count: ${updatePatterns.length} | Command: npm update`);
|
|
565
581
|
const patternsArg = updatePatterns.join(' ');
|
|
566
582
|
await runWithDryRunSupport(`npm update ${patternsArg}`, isDryRun);
|
|
567
583
|
} else {
|
|
568
|
-
logger.verbose('No dependency
|
|
584
|
+
logger.verbose('DEPS_UPDATE_ALL: No dependency patterns specified, updating all dependencies | Scope: all | Command: npm update');
|
|
569
585
|
await runWithDryRunSupport('npm update', isDryRun);
|
|
570
586
|
}
|
|
571
|
-
logger.info('
|
|
587
|
+
logger.info('PREPUBLISH_SCRIPT_RUNNING: Executing prepublishOnly script | Script: prepublishOnly | Purpose: Run pre-flight checks (clean, lint, build, test)');
|
|
572
588
|
await runWithDryRunSupport('npm run prepublishOnly', isDryRun, {}, true); // Use inherited stdio
|
|
573
589
|
// STEP 2: Commit dependency updates if any (still no version bump)
|
|
574
|
-
logger.verbose('Staging dependency updates for commit');
|
|
590
|
+
logger.verbose('DEPS_STAGING: Staging dependency updates for commit | Files: package.json + package-lock.json | Command: git add | Note: Version bump not yet applied');
|
|
575
591
|
// Check if package-lock.json exists before trying to stage it
|
|
576
592
|
const packageLockExists = await storage.exists('package-lock.json');
|
|
577
593
|
const filesToStage = packageLockExists ? 'package.json package-lock.json' : 'package.json';
|
|
@@ -579,26 +595,26 @@ const execute = async (runConfig)=>{
|
|
|
579
595
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
580
596
|
await runWithDryRunSupport(`git add ${filesToStage}`, isDryRun);
|
|
581
597
|
}, 'stage dependency updates');
|
|
582
|
-
logger.verbose('Checking for staged dependency updates
|
|
598
|
+
logger.verbose('DEPS_COMMIT_CHECK: Checking for staged dependency updates | Command: git status | Purpose: Determine if commit needed');
|
|
583
599
|
if (isDryRun) {
|
|
584
|
-
logger.verbose('Would create dependency update commit if changes are staged');
|
|
600
|
+
logger.verbose('DEPS_COMMIT_DRY_RUN: Would create dependency update commit if changes are staged | Mode: dry-run');
|
|
585
601
|
} else {
|
|
586
602
|
if (await hasStagedChanges()) {
|
|
587
|
-
logger.verbose('Staged dependency changes
|
|
603
|
+
logger.verbose('DEPS_COMMIT_CREATING: Staged dependency changes detected, creating commit | Files: ' + filesToStage + ' | Action: Execute commit command');
|
|
588
604
|
// Commit also needs git lock
|
|
589
605
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
590
606
|
await execute$2(runConfig);
|
|
591
607
|
}, 'commit dependency updates');
|
|
592
608
|
} else {
|
|
593
|
-
logger.verbose('No dependency changes to commit
|
|
609
|
+
logger.verbose('DEPS_COMMIT_SKIPPED: No dependency changes to commit | Files: ' + filesToStage + ' | Action: Skipping commit step');
|
|
594
610
|
}
|
|
595
611
|
}
|
|
596
612
|
// STEP 3: Merge target branch into working branch (optional - now skipped by default since post-publish sync keeps branches in sync)
|
|
597
|
-
const skipPreMerge = ((
|
|
613
|
+
const skipPreMerge = ((_runConfig_publish6 = runConfig.publish) === null || _runConfig_publish6 === void 0 ? void 0 : _runConfig_publish6.skipPrePublishMerge) !== false; // Default to true (skip)
|
|
598
614
|
if (skipPreMerge) {
|
|
599
|
-
logger.verbose(
|
|
615
|
+
logger.verbose(`PRE_MERGE_SKIPPED: Skipping pre-publish merge of target branch | Reason: Post-publish sync handles branch synchronization | Target: ${targetBranch} | Config: skipPrePublishMerge=true`);
|
|
600
616
|
} else {
|
|
601
|
-
logger.info(`Merging target branch
|
|
617
|
+
logger.info(`PRE_MERGE_STARTING: Merging target branch into current branch | Target: ${targetBranch} | Purpose: Avoid version conflicts | Phase: pre-publish`);
|
|
602
618
|
if (isDryRun) {
|
|
603
619
|
logger.info(`Would merge ${targetBranch} into current branch`);
|
|
604
620
|
} else {
|
|
@@ -607,23 +623,23 @@ const execute = async (runConfig)=>{
|
|
|
607
623
|
// Fetch the latest target branch
|
|
608
624
|
try {
|
|
609
625
|
await run(`git fetch origin ${targetBranch}:${targetBranch}`);
|
|
610
|
-
logger.info(
|
|
626
|
+
logger.info(`TARGET_BRANCH_FETCHED: Successfully fetched latest target branch | Branch: ${targetBranch} | Remote: origin/${targetBranch} | Purpose: Pre-merge sync`);
|
|
611
627
|
} catch (fetchError) {
|
|
612
|
-
logger.warn(
|
|
613
|
-
logger.warn('Continuing without merge
|
|
628
|
+
logger.warn(`TARGET_BRANCH_FETCH_FAILED: Unable to fetch target branch | Branch: ${targetBranch} | Error: ${fetchError.message} | Impact: Proceeding without merge, PR may have conflicts`);
|
|
629
|
+
logger.warn('MERGE_SKIPPED_NO_FETCH: Continuing without pre-merge | Reason: Target branch fetch failed | Impact: PR may require manual conflict resolution');
|
|
614
630
|
}
|
|
615
631
|
// Check if merge is needed (avoid unnecessary merge commits)
|
|
616
632
|
try {
|
|
617
633
|
const { stdout: mergeBase } = await run(`git merge-base HEAD ${targetBranch}`);
|
|
618
634
|
const { stdout: targetCommit } = await run(`git rev-parse ${targetBranch}`);
|
|
619
635
|
if (mergeBase.trim() === targetCommit.trim()) {
|
|
620
|
-
logger.info(
|
|
636
|
+
logger.info(`MERGE_NOT_NEEDED: Current branch already up-to-date with target | Branch: ${targetBranch} | Status: in-sync | Action: Skipping merge`);
|
|
621
637
|
} else {
|
|
622
638
|
// Try to merge target branch into current branch
|
|
623
639
|
let mergeSucceeded = false;
|
|
624
640
|
try {
|
|
625
641
|
await run(`git merge ${targetBranch} --no-edit -m "Merge ${targetBranch} to sync before version bump"`);
|
|
626
|
-
logger.info(
|
|
642
|
+
logger.info(`MERGE_SUCCESS: Successfully merged target branch into current branch | Target: ${targetBranch} | Purpose: Sync before version bump`);
|
|
627
643
|
mergeSucceeded = true;
|
|
628
644
|
} catch (mergeError) {
|
|
629
645
|
// If merge conflicts occur, check if they're only in version-related files
|
|
@@ -633,11 +649,11 @@ const execute = async (runConfig)=>{
|
|
|
633
649
|
mergeError.stderr || ''
|
|
634
650
|
].join(' ');
|
|
635
651
|
if (errorText.includes('CONFLICT')) {
|
|
636
|
-
logger.warn(
|
|
652
|
+
logger.warn(`MERGE_CONFLICTS_DETECTED: Merge conflicts found, attempting automatic resolution | Target: ${targetBranch} | Strategy: Auto-resolve version files`);
|
|
637
653
|
// Get list of conflicted files
|
|
638
654
|
const { stdout: conflictedFiles } = await run('git diff --name-only --diff-filter=U');
|
|
639
655
|
const conflicts = conflictedFiles.trim().split('\n').filter(Boolean);
|
|
640
|
-
logger.verbose(`Conflicted files: ${conflicts.join(', ')}`);
|
|
656
|
+
logger.verbose(`MERGE_CONFLICTS_LIST: Conflicted files detected | Files: ${conflicts.join(', ')} | Count: ${conflicts.length}`);
|
|
641
657
|
// Check if conflicts are only in package.json and package-lock.json
|
|
642
658
|
const versionFiles = [
|
|
643
659
|
'package.json',
|
|
@@ -645,29 +661,29 @@ const execute = async (runConfig)=>{
|
|
|
645
661
|
];
|
|
646
662
|
const nonVersionConflicts = conflicts.filter((f)=>!versionFiles.includes(f));
|
|
647
663
|
if (nonVersionConflicts.length > 0) {
|
|
648
|
-
logger.error(
|
|
664
|
+
logger.error(`MERGE_AUTO_RESOLVE_FAILED: Cannot auto-resolve conflicts in non-version files | Files: ${nonVersionConflicts.join(', ')} | Count: ${nonVersionConflicts.length} | Resolution: Manual intervention required`);
|
|
649
665
|
logger.error('');
|
|
650
|
-
logger.error('
|
|
651
|
-
logger.error(' 1
|
|
652
|
-
logger.error(' 2
|
|
653
|
-
logger.error(' 3
|
|
654
|
-
logger.error(' 4
|
|
666
|
+
logger.error('CONFLICT_RESOLUTION_REQUIRED: Manual steps to resolve conflicts:');
|
|
667
|
+
logger.error(' Step 1: Resolve conflicts in the files listed above');
|
|
668
|
+
logger.error(' Step 2: Stage resolved files | Command: git add <resolved-files>');
|
|
669
|
+
logger.error(' Step 3: Complete merge commit | Command: git commit');
|
|
670
|
+
logger.error(' Step 4: Resume publish process | Command: kodrdriv publish');
|
|
655
671
|
logger.error('');
|
|
656
672
|
throw new Error(`Merge conflicts in non-version files. Please resolve manually.`);
|
|
657
673
|
}
|
|
658
674
|
// Auto-resolve version conflicts by accepting current branch versions
|
|
659
675
|
// (keep our working branch's version, which is likely already updated)
|
|
660
|
-
logger.info(`
|
|
676
|
+
logger.info(`MERGE_AUTO_RESOLVING: Automatically resolving version conflicts | Strategy: Keep current branch versions | Files: ${versionFiles.join(', ')}`);
|
|
661
677
|
for (const file of conflicts){
|
|
662
678
|
if (versionFiles.includes(file)) {
|
|
663
679
|
await run(`git checkout --ours ${file}`);
|
|
664
680
|
await run(`git add ${file}`);
|
|
665
|
-
logger.verbose(`Resolved
|
|
681
|
+
logger.verbose(`MERGE_FILE_RESOLVED: Resolved file using current branch version | File: ${file} | Strategy: checkout --ours`);
|
|
666
682
|
}
|
|
667
683
|
}
|
|
668
684
|
// Complete the merge
|
|
669
685
|
await run(`git commit --no-edit -m "Merge ${targetBranch} to sync before version bump (auto-resolved version conflicts)"`);
|
|
670
|
-
logger.info(
|
|
686
|
+
logger.info(`MERGE_AUTO_RESOLVE_SUCCESS: Successfully auto-resolved version conflicts and completed merge | Target: ${targetBranch} | Files: ${versionFiles.join(', ')}`);
|
|
671
687
|
mergeSucceeded = true;
|
|
672
688
|
} else {
|
|
673
689
|
// Not a conflict error, re-throw
|
|
@@ -677,19 +693,19 @@ const execute = async (runConfig)=>{
|
|
|
677
693
|
// Only run npm install if merge actually happened
|
|
678
694
|
if (mergeSucceeded) {
|
|
679
695
|
// Run npm install to update package-lock.json based on merged package.json
|
|
680
|
-
logger.info('Running npm install after merge
|
|
696
|
+
logger.info('POST_MERGE_NPM_INSTALL: Running npm install after merge | Purpose: Update package-lock.json based on merged package.json | Command: npm install');
|
|
681
697
|
await run('npm install');
|
|
682
|
-
logger.info('
|
|
698
|
+
logger.info('POST_MERGE_NPM_COMPLETE: npm install completed successfully | Status: Dependencies synchronized');
|
|
683
699
|
// Commit any changes from npm install (e.g., package-lock.json updates)
|
|
684
700
|
const { stdout: mergeChangesStatus } = await run('git status --porcelain');
|
|
685
701
|
if (mergeChangesStatus.trim()) {
|
|
686
|
-
logger.verbose('
|
|
702
|
+
logger.verbose('POST_MERGE_CHANGES_DETECTED: Changes detected after npm install | Action: Staging for commit | Command: git add');
|
|
687
703
|
// Check if package-lock.json exists before trying to stage it
|
|
688
704
|
const packageLockExistsPostMerge = await storage.exists('package-lock.json');
|
|
689
705
|
const filesToStagePostMerge = packageLockExistsPostMerge ? 'package.json package-lock.json' : 'package.json';
|
|
690
706
|
await run(`git add ${filesToStagePostMerge}`);
|
|
691
707
|
if (await hasStagedChanges()) {
|
|
692
|
-
logger.verbose('Committing post-merge changes
|
|
708
|
+
logger.verbose('POST_MERGE_COMMIT: Committing post-merge changes | Files: ' + filesToStagePostMerge + ' | Purpose: Finalize merge');
|
|
693
709
|
await execute$2(runConfig);
|
|
694
710
|
}
|
|
695
711
|
}
|
|
@@ -697,7 +713,7 @@ const execute = async (runConfig)=>{
|
|
|
697
713
|
}
|
|
698
714
|
} catch (error) {
|
|
699
715
|
// Only catch truly unexpected errors here
|
|
700
|
-
logger.error(
|
|
716
|
+
logger.error(`MERGE_UNEXPECTED_ERROR: Unexpected error during merge process | Error: ${error.message} | Target: ${targetBranch} | Action: Aborting publish`);
|
|
701
717
|
throw error;
|
|
702
718
|
}
|
|
703
719
|
}, `merge ${targetBranch} into current branch`);
|
|
@@ -710,7 +726,7 @@ const execute = async (runConfig)=>{
|
|
|
710
726
|
logger.info('Would determine target version and update package.json');
|
|
711
727
|
newVersion = '1.0.0'; // Mock version for dry run
|
|
712
728
|
} else {
|
|
713
|
-
var
|
|
729
|
+
var _runConfig_publish13;
|
|
714
730
|
const packageJsonContents = await storage.readFile('package.json', 'utf-8');
|
|
715
731
|
const parsed = safeJsonParse(packageJsonContents, 'package.json');
|
|
716
732
|
const packageJson = validatePackageJson(parsed, 'package.json');
|
|
@@ -722,28 +738,110 @@ const execute = async (runConfig)=>{
|
|
|
722
738
|
const branchDependentResult = await calculateBranchDependentVersion(currentVersion, currentBranch, runConfig.branches, targetBranch);
|
|
723
739
|
proposedVersion = branchDependentResult.version;
|
|
724
740
|
finalTargetBranch = branchDependentResult.targetBranch;
|
|
725
|
-
logger.info(
|
|
726
|
-
logger.info(
|
|
741
|
+
logger.info(`VERSION_BRANCH_DEPENDENT_CALCULATED: Branch-dependent version calculated | Current: ${currentVersion} | Proposed: ${proposedVersion} | Strategy: branch-dependent`);
|
|
742
|
+
logger.info(`TARGET_BRANCH_FINAL: Final target branch determined | Branch: ${finalTargetBranch} | Source: branch-dependent config`);
|
|
727
743
|
// Update targetBranch for the rest of the function
|
|
728
744
|
targetBranch = finalTargetBranch;
|
|
729
745
|
} else {
|
|
730
|
-
var
|
|
746
|
+
var _runConfig_publish14;
|
|
731
747
|
// Use existing logic for backward compatibility
|
|
732
|
-
const targetVersionInput = ((
|
|
748
|
+
const targetVersionInput = ((_runConfig_publish14 = runConfig.publish) === null || _runConfig_publish14 === void 0 ? void 0 : _runConfig_publish14.targetVersion) || 'patch';
|
|
733
749
|
proposedVersion = calculateTargetVersion(currentVersion, targetVersionInput);
|
|
734
750
|
}
|
|
735
751
|
const targetTagName = `v${proposedVersion}`;
|
|
736
752
|
const tagExists = await checkIfTagExists(targetTagName);
|
|
753
|
+
// Smart tag conflict handling
|
|
737
754
|
if (tagExists) {
|
|
738
|
-
|
|
755
|
+
const { getNpmPublishedVersion, getTagInfo } = await import('../util/general.js');
|
|
756
|
+
logger.warn(`TAG_ALREADY_EXISTS: Tag already exists in repository | Tag: ${targetTagName} | Status: conflict | Action: Check npm registry`);
|
|
757
|
+
// Check if this version is published on npm
|
|
758
|
+
const npmVersion = await getNpmPublishedVersion(packageJson.name);
|
|
759
|
+
const tagInfo = await getTagInfo(targetTagName);
|
|
760
|
+
if (npmVersion === proposedVersion) {
|
|
761
|
+
var _runConfig_publish15;
|
|
762
|
+
// Version is already published on npm
|
|
763
|
+
logger.info(`VERSION_ALREADY_PUBLISHED: Version already published on npm registry | Version: ${proposedVersion} | Status: published | Action: Skipping`);
|
|
764
|
+
logger.info(`PUBLISH_SKIPPED_DUPLICATE: Skipping publish operation | Reason: Package already at target version | Version: ${proposedVersion}`);
|
|
765
|
+
logger.info('');
|
|
766
|
+
logger.info('REPUBLISH_OPTIONS: Options if you need to republish:');
|
|
767
|
+
logger.info(` Option 1: Bump version | Command: npm version patch (or minor/major)`);
|
|
768
|
+
logger.info(` Option 2: Re-run publish | Command: kodrdriv publish`);
|
|
769
|
+
logger.info('');
|
|
770
|
+
if ((_runConfig_publish15 = runConfig.publish) === null || _runConfig_publish15 === void 0 ? void 0 : _runConfig_publish15.skipAlreadyPublished) {
|
|
771
|
+
logger.info('PUBLISH_SKIPPED_FLAG: Skipping package due to flag | Flag: --skip-already-published | Version: ' + proposedVersion + ' | Status: skipped');
|
|
772
|
+
// Emit skip marker for tree mode detection
|
|
773
|
+
// eslint-disable-next-line no-console
|
|
774
|
+
console.log('KODRDRIV_PUBLISH_SKIPPED');
|
|
775
|
+
return; // Exit without error
|
|
776
|
+
} else {
|
|
777
|
+
throw new Error(`Version ${proposedVersion} already published. Use --skip-already-published to continue.`);
|
|
778
|
+
}
|
|
779
|
+
} else {
|
|
780
|
+
var _tagInfo_commit, _runConfig_publish16;
|
|
781
|
+
// Tag exists but version not on npm - likely failed previous publish
|
|
782
|
+
logger.warn('');
|
|
783
|
+
logger.warn('PUBLISH_SITUATION_ANALYSIS: Analyzing publish conflict situation | Tag: ' + targetTagName + ' | npm: ' + (npmVersion || 'not published'));
|
|
784
|
+
logger.warn(`PUBLISH_ANALYSIS_TAG_EXISTS: Tag exists locally | Tag: ${targetTagName} | Commit: ${tagInfo === null || tagInfo === void 0 ? void 0 : (_tagInfo_commit = tagInfo.commit) === null || _tagInfo_commit === void 0 ? void 0 : _tagInfo_commit.substring(0, 8)}`);
|
|
785
|
+
logger.warn(`PUBLISH_ANALYSIS_NPM_STATUS: npm registry status | Version: ${npmVersion || 'not published'} | Status: ${npmVersion ? 'published' : 'missing'}`);
|
|
786
|
+
logger.warn(`PUBLISH_ANALYSIS_CONCLUSION: Previous publish likely failed after tag creation | Reason: Tag exists but not on npm | Resolution: Recovery needed`);
|
|
787
|
+
logger.warn('');
|
|
788
|
+
logger.warn('PUBLISH_RECOVERY_OPTIONS: Recovery options available:');
|
|
789
|
+
logger.warn(' OPTION_1_FORCE: Force republish by deleting tag | Command: kodrdriv publish --force-republish');
|
|
790
|
+
logger.warn(' OPTION_2_BUMP: Skip version and bump | Command: npm version patch && kodrdriv publish');
|
|
791
|
+
logger.warn(' OPTION_3_MANUAL: Manually delete tag:');
|
|
792
|
+
logger.warn(` Command: git tag -d ${targetTagName}`);
|
|
793
|
+
logger.warn(` Command: git push origin :refs/tags/${targetTagName}`);
|
|
794
|
+
logger.warn('');
|
|
795
|
+
if ((_runConfig_publish16 = runConfig.publish) === null || _runConfig_publish16 === void 0 ? void 0 : _runConfig_publish16.forceRepublish) {
|
|
796
|
+
logger.info('PUBLISH_FORCE_REPUBLISH: Force republish mode enabled | Action: Deleting existing tag | Tag: ' + targetTagName + ' | Purpose: Allow republish');
|
|
797
|
+
if (!isDryRun) {
|
|
798
|
+
const { runSecure } = await import('@eldrforge/git-tools');
|
|
799
|
+
// Delete local tag
|
|
800
|
+
try {
|
|
801
|
+
await runSecure('git', [
|
|
802
|
+
'tag',
|
|
803
|
+
'-d',
|
|
804
|
+
targetTagName
|
|
805
|
+
]);
|
|
806
|
+
logger.info(`TAG_DELETED_LOCAL: Deleted local tag | Tag: ${targetTagName} | Status: removed-local`);
|
|
807
|
+
} catch (error) {
|
|
808
|
+
logger.debug(`Could not delete local tag: ${error.message}`);
|
|
809
|
+
}
|
|
810
|
+
// Delete remote tag
|
|
811
|
+
try {
|
|
812
|
+
await runSecure('git', [
|
|
813
|
+
'push',
|
|
814
|
+
'origin',
|
|
815
|
+
`:refs/tags/${targetTagName}`
|
|
816
|
+
]);
|
|
817
|
+
logger.info(`TAG_DELETED_REMOTE: Deleted remote tag | Tag: ${targetTagName} | Remote: origin | Status: removed-remote`);
|
|
818
|
+
} catch (error) {
|
|
819
|
+
logger.debug(`Could not delete remote tag: ${error.message}`);
|
|
820
|
+
}
|
|
821
|
+
logger.info('PUBLISH_TAG_CLEANUP_COMPLETE: Tag deleted successfully | Status: ready-for-publish | Next: Continue with publish workflow');
|
|
822
|
+
} else {
|
|
823
|
+
logger.info('Would delete tags and continue with publish');
|
|
824
|
+
}
|
|
825
|
+
} else {
|
|
826
|
+
throw new Error(`Tag ${targetTagName} already exists. Use --force-republish to override.`);
|
|
827
|
+
}
|
|
828
|
+
}
|
|
739
829
|
}
|
|
740
|
-
if ((
|
|
741
|
-
var
|
|
742
|
-
newVersion = await confirmVersionInteractively(currentVersion, proposedVersion, (
|
|
830
|
+
if ((_runConfig_publish13 = runConfig.publish) === null || _runConfig_publish13 === void 0 ? void 0 : _runConfig_publish13.interactive) {
|
|
831
|
+
var _runConfig_publish17;
|
|
832
|
+
newVersion = await confirmVersionInteractively(currentVersion, proposedVersion, (_runConfig_publish17 = runConfig.publish) === null || _runConfig_publish17 === void 0 ? void 0 : _runConfig_publish17.targetVersion);
|
|
743
833
|
const confirmedTagName = `v${newVersion}`;
|
|
744
834
|
const confirmedTagExists = await checkIfTagExists(confirmedTagName);
|
|
745
835
|
if (confirmedTagExists) {
|
|
746
|
-
|
|
836
|
+
var _runConfig_publish18;
|
|
837
|
+
const { getNpmPublishedVersion } = await import('../util/general.js');
|
|
838
|
+
const npmVersion = await getNpmPublishedVersion(packageJson.name);
|
|
839
|
+
if (npmVersion === newVersion) {
|
|
840
|
+
throw new Error(`Tag ${confirmedTagName} already exists and version is published on npm. Please choose a different version.`);
|
|
841
|
+
} else if (!((_runConfig_publish18 = runConfig.publish) === null || _runConfig_publish18 === void 0 ? void 0 : _runConfig_publish18.forceRepublish)) {
|
|
842
|
+
throw new Error(`Tag ${confirmedTagName} already exists. Use --force-republish to override.`);
|
|
843
|
+
}
|
|
844
|
+
// If forceRepublish is set, we'll continue (tag will be deleted later)
|
|
747
845
|
}
|
|
748
846
|
} else {
|
|
749
847
|
newVersion = proposedVersion;
|
|
@@ -784,23 +882,23 @@ const execute = async (runConfig)=>{
|
|
|
784
882
|
releaseConfig.release = {
|
|
785
883
|
...runConfig.release,
|
|
786
884
|
currentBranch: currentBranch,
|
|
787
|
-
...((
|
|
885
|
+
...((_runConfig_publish7 = runConfig.publish) === null || _runConfig_publish7 === void 0 ? void 0 : _runConfig_publish7.from) && {
|
|
788
886
|
from: runConfig.publish.from
|
|
789
887
|
},
|
|
790
|
-
...((
|
|
888
|
+
...((_runConfig_publish8 = runConfig.publish) === null || _runConfig_publish8 === void 0 ? void 0 : _runConfig_publish8.interactive) && {
|
|
791
889
|
interactive: runConfig.publish.interactive
|
|
792
890
|
},
|
|
793
|
-
...((
|
|
891
|
+
...((_runConfig_publish9 = runConfig.publish) === null || _runConfig_publish9 === void 0 ? void 0 : _runConfig_publish9.fromMain) && {
|
|
794
892
|
fromMain: runConfig.publish.fromMain
|
|
795
893
|
}
|
|
796
894
|
};
|
|
797
|
-
if ((
|
|
895
|
+
if ((_runConfig_publish10 = runConfig.publish) === null || _runConfig_publish10 === void 0 ? void 0 : _runConfig_publish10.from) {
|
|
798
896
|
logger.verbose(`Using custom 'from' reference for release notes: ${runConfig.publish.from}`);
|
|
799
897
|
}
|
|
800
|
-
if ((
|
|
898
|
+
if ((_runConfig_publish11 = runConfig.publish) === null || _runConfig_publish11 === void 0 ? void 0 : _runConfig_publish11.interactive) {
|
|
801
899
|
logger.verbose('Interactive mode enabled for release notes generation');
|
|
802
900
|
}
|
|
803
|
-
if ((
|
|
901
|
+
if ((_runConfig_publish12 = runConfig.publish) === null || _runConfig_publish12 === void 0 ? void 0 : _runConfig_publish12.fromMain) {
|
|
804
902
|
logger.verbose('Forcing comparison against main branch for release notes');
|
|
805
903
|
}
|
|
806
904
|
const releaseSummary = await execute$1(releaseConfig);
|
|
@@ -858,12 +956,12 @@ const execute = async (runConfig)=>{
|
|
|
858
956
|
logger.debug(`Could not verify workflow configuration for wait skip: ${error.message}`);
|
|
859
957
|
}
|
|
860
958
|
if (!shouldSkipWait) {
|
|
861
|
-
var
|
|
959
|
+
var _runConfig_publish19, _runConfig_publish20, _runConfig_publish21;
|
|
862
960
|
// Configure timeout and user confirmation behavior
|
|
863
|
-
const timeout = ((
|
|
864
|
-
const senditMode = ((
|
|
961
|
+
const timeout = ((_runConfig_publish19 = runConfig.publish) === null || _runConfig_publish19 === void 0 ? void 0 : _runConfig_publish19.checksTimeout) || KODRDRIV_DEFAULTS.publish.checksTimeout;
|
|
962
|
+
const senditMode = ((_runConfig_publish20 = runConfig.publish) === null || _runConfig_publish20 === void 0 ? void 0 : _runConfig_publish20.sendit) || false;
|
|
865
963
|
// sendit flag overrides skipUserConfirmation - if sendit is true, skip confirmation
|
|
866
|
-
const skipUserConfirmation = senditMode || ((
|
|
964
|
+
const skipUserConfirmation = senditMode || ((_runConfig_publish21 = runConfig.publish) === null || _runConfig_publish21 === void 0 ? void 0 : _runConfig_publish21.skipUserConfirmation) || false;
|
|
867
965
|
await GitHub.waitForPullRequestChecks(pr.number, {
|
|
868
966
|
timeout,
|
|
869
967
|
skipUserConfirmation
|
|
@@ -879,14 +977,14 @@ const execute = async (runConfig)=>{
|
|
|
879
977
|
} catch (error) {
|
|
880
978
|
// Check if this is a merge conflict error
|
|
881
979
|
if (error.message && (error.message.includes('not mergeable') || error.message.includes('Pull Request is not mergeable') || error.message.includes('merge conflict'))) {
|
|
882
|
-
logger.error(
|
|
980
|
+
logger.error(`PR_MERGE_CONFLICTS: Pull request has merge conflicts | PR Number: ${pr.number} | Status: conflicts | Resolution: Manual intervention required`);
|
|
883
981
|
logger.error('');
|
|
884
|
-
logger.error('
|
|
885
|
-
logger.error(` 1
|
|
886
|
-
logger.error(' 2
|
|
887
|
-
logger.error(' 3
|
|
982
|
+
logger.error('PR_CONFLICT_RESOLUTION: Steps to resolve conflicts:');
|
|
983
|
+
logger.error(` Step 1: Visit pull request | URL: ${pr.html_url}`);
|
|
984
|
+
logger.error(' Step 2: Resolve merge conflicts | Method: GitHub UI or local');
|
|
985
|
+
logger.error(' Step 3: Re-run publish command | Command: kodrdriv publish');
|
|
888
986
|
logger.error('');
|
|
889
|
-
logger.error('
|
|
987
|
+
logger.error('PR_AUTO_CONTINUE: Command will auto-detect existing PR | Behavior: Continues from where it left off | No re-creation needed');
|
|
890
988
|
throw new Error(`Merge conflicts detected in PR #${pr.number}. Please resolve conflicts and re-run the command.`);
|
|
891
989
|
} else {
|
|
892
990
|
// Re-throw other merge errors
|
|
@@ -904,7 +1002,7 @@ const execute = async (runConfig)=>{
|
|
|
904
1002
|
'--porcelain'
|
|
905
1003
|
]);
|
|
906
1004
|
if (statusOutput.trim()) {
|
|
907
|
-
logger.info('
|
|
1005
|
+
logger.info('PUBLISH_STASH_SAVING: Stashing uncommitted changes before checkout | Command: git stash push | Purpose: Protect changes during branch switch');
|
|
908
1006
|
await runSecure('git', [
|
|
909
1007
|
'stash',
|
|
910
1008
|
'push',
|
|
@@ -912,7 +1010,7 @@ const execute = async (runConfig)=>{
|
|
|
912
1010
|
'kodrdriv: stash before checkout target branch'
|
|
913
1011
|
]);
|
|
914
1012
|
hasStashedChanges = true;
|
|
915
|
-
logger.info('
|
|
1013
|
+
logger.info('PUBLISH_STASH_SUCCESS: Successfully stashed uncommitted changes | Status: saved | Name: kodrdriv stash');
|
|
916
1014
|
}
|
|
917
1015
|
}
|
|
918
1016
|
try {
|
|
@@ -922,7 +1020,7 @@ const execute = async (runConfig)=>{
|
|
|
922
1020
|
}, `checkout ${targetBranch}`);
|
|
923
1021
|
// Sync target branch with remote to avoid conflicts during PR creation
|
|
924
1022
|
if (!isDryRun) {
|
|
925
|
-
logger.info(
|
|
1023
|
+
logger.info(`PUBLISH_TARGET_SYNCING: Syncing target branch with remote | Branch: ${targetBranch} | Remote: origin | Purpose: Avoid PR conflicts`);
|
|
926
1024
|
try {
|
|
927
1025
|
const remoteExists = await run(`git ls-remote --exit-code --heads origin ${targetBranch}`).then(()=>true).catch(()=>false);
|
|
928
1026
|
if (remoteExists) {
|
|
@@ -930,24 +1028,24 @@ const execute = async (runConfig)=>{
|
|
|
930
1028
|
await run(`git fetch origin ${targetBranch}`);
|
|
931
1029
|
await run(`git merge origin/${targetBranch} --no-ff --no-edit`);
|
|
932
1030
|
}, `sync ${targetBranch}`);
|
|
933
|
-
logger.info(
|
|
1031
|
+
logger.info(`PUBLISH_TARGET_SYNCED: Successfully synced target with remote | Branch: ${targetBranch} | Remote: origin | Status: in-sync`);
|
|
934
1032
|
} else {
|
|
935
|
-
logger.info(
|
|
1033
|
+
logger.info(`PUBLISH_TARGET_NO_REMOTE: No remote target branch found | Branch: ${targetBranch} | Remote: origin | Action: Will be created on first push`);
|
|
936
1034
|
}
|
|
937
1035
|
} catch (syncError) {
|
|
938
1036
|
if (syncError.message && syncError.message.includes('CONFLICT')) {
|
|
939
|
-
logger.error(
|
|
940
|
-
logger.error(`
|
|
941
|
-
logger.error(` 1
|
|
942
|
-
logger.error(` 2
|
|
943
|
-
logger.error(` 3
|
|
944
|
-
logger.error(` 4
|
|
945
|
-
logger.error(` 5
|
|
946
|
-
logger.error(` 6
|
|
947
|
-
logger.error(` 7
|
|
1037
|
+
logger.error(`PUBLISH_SYNC_CONFLICTS: Merge conflicts during target sync | Branch: ${targetBranch} | Remote: origin | Status: conflicts-detected`);
|
|
1038
|
+
logger.error(`PUBLISH_SYNC_RESOLUTION: Manual conflict resolution steps:`);
|
|
1039
|
+
logger.error(` Step 1: Checkout target | Command: git checkout ${targetBranch}`);
|
|
1040
|
+
logger.error(` Step 2: Pull and merge | Command: git pull origin ${targetBranch}`);
|
|
1041
|
+
logger.error(` Step 3: Resolve conflicts in files`);
|
|
1042
|
+
logger.error(` Step 4: Stage resolved files | Command: git add <resolved-files>`);
|
|
1043
|
+
logger.error(` Step 5: Complete merge | Command: git commit`);
|
|
1044
|
+
logger.error(` Step 6: Return to branch | Command: git checkout ${currentBranch}`);
|
|
1045
|
+
logger.error(` Step 7: Resume publish | Command: kodrdriv publish`);
|
|
948
1046
|
throw syncError;
|
|
949
1047
|
} else {
|
|
950
|
-
logger.warn(
|
|
1048
|
+
logger.warn(`PUBLISH_SYNC_WARNING: Could not sync target with remote | Branch: ${targetBranch} | Remote: origin | Error: ${syncError.message}`);
|
|
951
1049
|
// Continue with publish process, but log the warning
|
|
952
1050
|
}
|
|
953
1051
|
}
|
|
@@ -957,17 +1055,17 @@ const execute = async (runConfig)=>{
|
|
|
957
1055
|
} catch (error) {
|
|
958
1056
|
// Check if this is a merge conflict or sync issue
|
|
959
1057
|
if (!isDryRun && (error.message.includes('conflict') || error.message.includes('CONFLICT') || error.message.includes('diverged') || error.message.includes('non-fast-forward'))) {
|
|
960
|
-
logger.error(
|
|
1058
|
+
logger.error(`PUBLISH_TARGET_SYNC_FAILED: Failed to sync target branch with remote | Branch: ${targetBranch} | Remote: origin | Impact: Cannot proceed safely`);
|
|
961
1059
|
logger.error('');
|
|
962
|
-
logger.error('
|
|
963
|
-
logger.error(`
|
|
964
|
-
logger.error(`
|
|
965
|
-
logger.error(`
|
|
966
|
-
logger.error(`
|
|
967
|
-
logger.error(`
|
|
968
|
-
logger.error(`
|
|
1060
|
+
logger.error('PUBLISH_SYNC_RECOVERY_OPTIONS: Available recovery options:');
|
|
1061
|
+
logger.error(` OPTION_1_AUTO: Attempt automatic resolution | Command: kodrdriv publish --sync-target`);
|
|
1062
|
+
logger.error(` OPTION_2_MANUAL: Manually resolve conflicts:`);
|
|
1063
|
+
logger.error(` Step 1: Checkout target | Command: git checkout ${targetBranch}`);
|
|
1064
|
+
logger.error(` Step 2: Pull from remote | Command: git pull origin ${targetBranch}`);
|
|
1065
|
+
logger.error(` Step 3: Resolve conflicts and commit`);
|
|
1066
|
+
logger.error(` Step 4: Re-run publish | Command: kodrdriv publish`);
|
|
969
1067
|
logger.error('');
|
|
970
|
-
logger.error('
|
|
1068
|
+
logger.error('PUBLISH_STOPPED_SAFETY: Publish process stopped | Reason: Prevent data loss | Status: safe-to-recover');
|
|
971
1069
|
throw new Error(`Target branch '${targetBranch}' sync failed. Use recovery options above to resolve.`);
|
|
972
1070
|
} else {
|
|
973
1071
|
// Re-throw other errors
|
|
@@ -976,16 +1074,16 @@ const execute = async (runConfig)=>{
|
|
|
976
1074
|
}
|
|
977
1075
|
// Restore stashed changes if we stashed them
|
|
978
1076
|
if (hasStashedChanges) {
|
|
979
|
-
logger.info('
|
|
1077
|
+
logger.info('PUBLISH_STASH_RESTORING: Restoring previously stashed changes | Command: git stash pop | Purpose: Restore working directory state');
|
|
980
1078
|
try {
|
|
981
1079
|
await runSecure('git', [
|
|
982
1080
|
'stash',
|
|
983
1081
|
'pop'
|
|
984
1082
|
]);
|
|
985
|
-
logger.info('
|
|
1083
|
+
logger.info('PUBLISH_STASH_RESTORED: Successfully restored stashed changes | Status: restored | Stash: removed');
|
|
986
1084
|
} catch (stashError) {
|
|
987
|
-
logger.warn(
|
|
988
|
-
logger.warn('
|
|
1085
|
+
logger.warn(`PUBLISH_STASH_RESTORE_FAILED: Could not restore stashed changes | Error: ${stashError.message} | Impact: Changes still in stash`);
|
|
1086
|
+
logger.warn('PUBLISH_STASH_AVAILABLE: Changes available in git stash | Command: git stash list | Purpose: View and restore manually');
|
|
989
1087
|
}
|
|
990
1088
|
}
|
|
991
1089
|
// Now create and push the tag on the target branch
|
|
@@ -1077,9 +1175,9 @@ const execute = async (runConfig)=>{
|
|
|
1077
1175
|
}
|
|
1078
1176
|
logger.info('Creating GitHub release...');
|
|
1079
1177
|
if (isDryRun) {
|
|
1080
|
-
var
|
|
1178
|
+
var _runConfig_publish22;
|
|
1081
1179
|
logger.info('Would read package.json version and create GitHub release with retry logic');
|
|
1082
|
-
const milestonesEnabled = !((
|
|
1180
|
+
const milestonesEnabled = !((_runConfig_publish22 = runConfig.publish) === null || _runConfig_publish22 === void 0 ? void 0 : _runConfig_publish22.noMilestones);
|
|
1083
1181
|
if (milestonesEnabled) {
|
|
1084
1182
|
logger.info('Would close milestone for released version');
|
|
1085
1183
|
} else {
|
|
@@ -1095,13 +1193,13 @@ const execute = async (runConfig)=>{
|
|
|
1095
1193
|
let retries = 3;
|
|
1096
1194
|
while(retries > 0){
|
|
1097
1195
|
try {
|
|
1098
|
-
var
|
|
1196
|
+
var _runConfig_publish23;
|
|
1099
1197
|
await GitHub.createRelease(tagName, releaseTitle, releaseNotesContent);
|
|
1100
1198
|
logger.info(`GitHub release created successfully for tag: ${tagName}`);
|
|
1101
1199
|
// Close milestone for this version if enabled
|
|
1102
|
-
const milestonesEnabled = !((
|
|
1200
|
+
const milestonesEnabled = !((_runConfig_publish23 = runConfig.publish) === null || _runConfig_publish23 === void 0 ? void 0 : _runConfig_publish23.noMilestones);
|
|
1103
1201
|
if (milestonesEnabled) {
|
|
1104
|
-
logger.info('
|
|
1202
|
+
logger.info('PUBLISH_MILESTONE_CLOSING: Closing milestone for released version | Action: Close GitHub milestone | Purpose: Mark release complete');
|
|
1105
1203
|
const version = tagName.replace(/^v/, ''); // Remove 'v' prefix if present
|
|
1106
1204
|
await GitHub.closeMilestoneForVersion(version);
|
|
1107
1205
|
} else {
|
|
@@ -1132,12 +1230,12 @@ const execute = async (runConfig)=>{
|
|
|
1132
1230
|
if (isDryRun) {
|
|
1133
1231
|
logger.info('Would monitor GitHub Actions workflows triggered by release');
|
|
1134
1232
|
} else {
|
|
1135
|
-
var
|
|
1136
|
-
const workflowTimeout = ((
|
|
1137
|
-
const senditMode = ((
|
|
1138
|
-
const skipUserConfirmation = senditMode || ((
|
|
1233
|
+
var _runConfig_publish24, _runConfig_publish25, _runConfig_publish26, _runConfig_publish27;
|
|
1234
|
+
const workflowTimeout = ((_runConfig_publish24 = runConfig.publish) === null || _runConfig_publish24 === void 0 ? void 0 : _runConfig_publish24.releaseWorkflowsTimeout) || KODRDRIV_DEFAULTS.publish.releaseWorkflowsTimeout;
|
|
1235
|
+
const senditMode = ((_runConfig_publish25 = runConfig.publish) === null || _runConfig_publish25 === void 0 ? void 0 : _runConfig_publish25.sendit) || false;
|
|
1236
|
+
const skipUserConfirmation = senditMode || ((_runConfig_publish26 = runConfig.publish) === null || _runConfig_publish26 === void 0 ? void 0 : _runConfig_publish26.skipUserConfirmation) || false;
|
|
1139
1237
|
// Get workflow names - either from config or auto-detect
|
|
1140
|
-
let workflowNames = (
|
|
1238
|
+
let workflowNames = (_runConfig_publish27 = runConfig.publish) === null || _runConfig_publish27 === void 0 ? void 0 : _runConfig_publish27.releaseWorkflowNames;
|
|
1141
1239
|
if (!workflowNames || workflowNames.length === 0) {
|
|
1142
1240
|
logger.info('No specific workflow names configured, auto-detecting workflows triggered by release events...');
|
|
1143
1241
|
try {
|
|
@@ -1163,7 +1261,7 @@ const execute = async (runConfig)=>{
|
|
|
1163
1261
|
}
|
|
1164
1262
|
// Switch back to source branch and sync with target
|
|
1165
1263
|
logger.info('');
|
|
1166
|
-
logger.info(
|
|
1264
|
+
logger.info(`PUBLISH_POST_SYNC: Syncing source branch with target after publish | Purpose: Keep branches synchronized | Strategy: Reset and force push`);
|
|
1167
1265
|
await runWithDryRunSupport(`git checkout ${currentBranch}`, isDryRun);
|
|
1168
1266
|
if (!isDryRun) {
|
|
1169
1267
|
// Sync target into source
|
|
@@ -1173,10 +1271,10 @@ const execute = async (runConfig)=>{
|
|
|
1173
1271
|
// The squash merge created a single commit on target that represents all source commits
|
|
1174
1272
|
logger.info(`Resetting ${currentBranch} to ${targetBranch} (squash merge)...`);
|
|
1175
1273
|
await run(`git reset --hard ${targetBranch}`);
|
|
1176
|
-
logger.info(
|
|
1274
|
+
logger.info(`PUBLISH_BRANCH_RESET: Reset source branch to target | Source: ${currentBranch} | Target: ${targetBranch} | Status: synchronized`);
|
|
1177
1275
|
// After squash merge and reset, we need to force push
|
|
1178
1276
|
// This is safe because we just merged to main and are syncing working branch
|
|
1179
|
-
logger.info(
|
|
1277
|
+
logger.info(`PUBLISH_FORCE_PUSHING: Force pushing synchronized branch | Branch: ${currentBranch} | Remote: origin | Purpose: Complete post-publish sync`);
|
|
1180
1278
|
try {
|
|
1181
1279
|
// Verify that remote working branch is ancestor of main (safety check)
|
|
1182
1280
|
try {
|
|
@@ -1189,16 +1287,16 @@ const execute = async (runConfig)=>{
|
|
|
1189
1287
|
}
|
|
1190
1288
|
// Use --force-with-lease for safer force push
|
|
1191
1289
|
await run(`git push --force-with-lease origin ${currentBranch}`);
|
|
1192
|
-
logger.info(
|
|
1290
|
+
logger.info(`PUBLISH_FORCE_PUSH_SUCCESS: Successfully force pushed to remote | Branch: ${currentBranch} | Remote: origin | Status: synchronized`);
|
|
1193
1291
|
} catch (pushError) {
|
|
1194
1292
|
// If force push fails, provide helpful message
|
|
1195
|
-
logger.warn(
|
|
1196
|
-
logger.warn(`
|
|
1197
|
-
logger.warn(`
|
|
1293
|
+
logger.warn(`PUBLISH_FORCE_PUSH_FAILED: Could not force push branch | Branch: ${currentBranch} | Remote: origin | Error: ${pushError.message}`);
|
|
1294
|
+
logger.warn(`PUBLISH_MANUAL_PUSH_NEEDED: Manual force push required | Action: Push manually`);
|
|
1295
|
+
logger.warn(`PUBLISH_MANUAL_PUSH_COMMAND: Force push command | Command: git push --force-with-lease origin ${currentBranch}`);
|
|
1198
1296
|
}
|
|
1199
1297
|
} else {
|
|
1200
1298
|
// For merge/rebase methods, try to merge target back into source
|
|
1201
|
-
logger.info(`Merging ${targetBranch}
|
|
1299
|
+
logger.info(`PUBLISH_MERGE_TARGET_BACK: Merging target back into source | Target: ${targetBranch} | Source: ${currentBranch} | Purpose: Sync branches after publish`);
|
|
1202
1300
|
// Try fast-forward first (works with merge/rebase methods)
|
|
1203
1301
|
// Use runSecure to avoid error output for expected failure
|
|
1204
1302
|
let fastForwardSucceeded = false;
|
|
@@ -1209,13 +1307,13 @@ const execute = async (runConfig)=>{
|
|
|
1209
1307
|
'--ff-only'
|
|
1210
1308
|
]);
|
|
1211
1309
|
fastForwardSucceeded = true;
|
|
1212
|
-
logger.info(
|
|
1310
|
+
logger.info(`PUBLISH_MERGE_FF_SUCCESS: Fast-forward merged target into source | Target: ${targetBranch} | Source: ${currentBranch} | Status: merged`);
|
|
1213
1311
|
} catch {
|
|
1214
1312
|
logger.verbose(`Fast-forward merge not possible, performing regular merge...`);
|
|
1215
1313
|
}
|
|
1216
1314
|
if (!fastForwardSucceeded) {
|
|
1217
1315
|
await run(`git merge ${targetBranch} --no-edit`);
|
|
1218
|
-
logger.info(
|
|
1316
|
+
logger.info(`PUBLISH_MERGE_SUCCESS: Merged target into source | Target: ${targetBranch} | Source: ${currentBranch} | Status: merged`);
|
|
1219
1317
|
}
|
|
1220
1318
|
}
|
|
1221
1319
|
// Determine version bump based on branch configuration
|
|
@@ -1234,32 +1332,32 @@ const execute = async (runConfig)=>{
|
|
|
1234
1332
|
}
|
|
1235
1333
|
}
|
|
1236
1334
|
// Bump to next development version
|
|
1237
|
-
logger.info(`Bumping to next development version
|
|
1335
|
+
logger.info(`PUBLISH_DEV_VERSION_BUMPING: Bumping to next development version | Command: ${versionCommand} | Tag: ${versionTag} | Purpose: Prepare for next cycle`);
|
|
1238
1336
|
try {
|
|
1239
1337
|
const { stdout: newVersion } = await run(`npm version ${versionCommand} --preid=${versionTag}`);
|
|
1240
|
-
logger.info(
|
|
1338
|
+
logger.info(`PUBLISH_DEV_VERSION_BUMPED: Version bumped successfully | New Version: ${newVersion.trim()} | Type: development | Status: completed`);
|
|
1241
1339
|
} catch (versionError) {
|
|
1242
|
-
logger.warn(
|
|
1243
|
-
logger.warn('
|
|
1340
|
+
logger.warn(`PUBLISH_DEV_VERSION_BUMP_FAILED: Failed to bump version | Error: ${versionError.message} | Impact: Version not updated`);
|
|
1341
|
+
logger.warn('PUBLISH_MANUAL_VERSION_BUMP: Manual version bump may be needed | Action: Bump manually for next cycle | Command: npm version');
|
|
1244
1342
|
}
|
|
1245
1343
|
// Push updated source branch
|
|
1246
|
-
logger.info(`Pushing updated ${currentBranch}
|
|
1344
|
+
logger.info(`PUBLISH_PUSH_SOURCE: Pushing updated source branch | Branch: ${currentBranch} | Remote: origin | Purpose: Push development version`);
|
|
1247
1345
|
try {
|
|
1248
1346
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
1249
1347
|
await run(`git push origin ${currentBranch}`);
|
|
1250
1348
|
}, `push ${currentBranch}`);
|
|
1251
|
-
logger.info(
|
|
1349
|
+
logger.info(`PUBLISH_PUSH_SOURCE_SUCCESS: Pushed source branch successfully | Branch: ${currentBranch} | Remote: origin | Status: pushed`);
|
|
1252
1350
|
} catch (pushError) {
|
|
1253
|
-
logger.warn(
|
|
1254
|
-
logger.warn(`
|
|
1351
|
+
logger.warn(`PUBLISH_PUSH_SOURCE_FAILED: Failed to push source branch | Branch: ${currentBranch} | Error: ${pushError.message} | Impact: Need manual push`);
|
|
1352
|
+
logger.warn(`PUBLISH_MANUAL_PUSH_COMMAND: Manual push command | Command: git push origin ${currentBranch}`);
|
|
1255
1353
|
}
|
|
1256
1354
|
} else {
|
|
1257
|
-
logger.info(`Would merge ${targetBranch}
|
|
1258
|
-
logger.info(`Would bump version to next development
|
|
1259
|
-
logger.info(`Would push ${currentBranch}
|
|
1355
|
+
logger.info(`PUBLISH_MERGE_DRY_RUN: Would merge target into source | Mode: dry-run | Target: ${targetBranch} | Source: ${currentBranch} | Strategy: ff-only`);
|
|
1356
|
+
logger.info(`PUBLISH_VERSION_DRY_RUN: Would bump version to next development | Mode: dry-run | Action: Version bump`);
|
|
1357
|
+
logger.info(`PUBLISH_PUSH_DRY_RUN: Would push source to remote | Mode: dry-run | Branch: ${currentBranch} | Remote: origin`);
|
|
1260
1358
|
}
|
|
1261
1359
|
logger.info('');
|
|
1262
|
-
logger.info(
|
|
1360
|
+
logger.info(`PUBLISH_COMPLETE: Publish workflow completed successfully | Branch: ${currentBranch} | Status: completed | Version: next-development`);
|
|
1263
1361
|
};
|
|
1264
1362
|
|
|
1265
1363
|
export { execute };
|