@eldrforge/kodrdriv 1.2.26 → 1.2.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AI-FRIENDLY-LOGGING-GUIDE.md +237 -0
- package/AI-LOGGING-MIGRATION-COMPLETE.md +371 -0
- package/ALREADY-PUBLISHED-PACKAGES-FIX.md +264 -0
- package/AUDIT-BRANCHES-PROGRESS-FIX.md +90 -0
- package/AUDIT-EXAMPLE-OUTPUT.md +113 -0
- package/CHECKPOINT-RECOVERY-FIX.md +450 -0
- package/LOGGING-MIGRATION-STATUS.md +186 -0
- package/PARALLEL-PUBLISH-FIXES-IMPLEMENTED.md +405 -0
- package/PARALLEL-PUBLISH-IMPROVEMENTS-IMPLEMENTED.md +439 -0
- package/PARALLEL-PUBLISH-QUICK-REFERENCE.md +375 -0
- package/PARALLEL_EXECUTION_FIX.md +2 -2
- package/PUBLISH_IMPROVEMENTS_IMPLEMENTED.md +294 -0
- package/VERSION-AUDIT-FIX.md +333 -0
- package/dist/application.js +6 -6
- package/dist/application.js.map +1 -1
- package/dist/arguments.js +43 -13
- package/dist/arguments.js.map +1 -1
- package/dist/commands/audio-commit.js +18 -18
- package/dist/commands/audio-commit.js.map +1 -1
- package/dist/commands/audio-review.js +32 -32
- package/dist/commands/audio-review.js.map +1 -1
- package/dist/commands/clean.js +9 -9
- package/dist/commands/clean.js.map +1 -1
- package/dist/commands/commit.js +20 -20
- package/dist/commands/commit.js.map +1 -1
- package/dist/commands/development.js +91 -90
- package/dist/commands/development.js.map +1 -1
- package/dist/commands/link.js +36 -36
- package/dist/commands/link.js.map +1 -1
- package/dist/commands/publish.js +345 -225
- package/dist/commands/publish.js.map +1 -1
- package/dist/commands/release.js +14 -14
- package/dist/commands/release.js.map +1 -1
- package/dist/commands/review.js +15 -17
- package/dist/commands/review.js.map +1 -1
- package/dist/commands/select-audio.js +5 -5
- package/dist/commands/select-audio.js.map +1 -1
- package/dist/commands/tree.js +75 -34
- package/dist/commands/tree.js.map +1 -1
- package/dist/commands/unlink.js +39 -39
- package/dist/commands/unlink.js.map +1 -1
- package/dist/commands/updates.js +150 -14
- package/dist/commands/updates.js.map +1 -1
- package/dist/commands/versions.js +14 -13
- package/dist/commands/versions.js.map +1 -1
- package/dist/constants.js +1 -1
- package/dist/content/diff.js +5 -5
- package/dist/content/diff.js.map +1 -1
- package/dist/content/files.js +2 -2
- package/dist/content/files.js.map +1 -1
- package/dist/content/log.js +3 -3
- package/dist/content/log.js.map +1 -1
- package/dist/execution/CommandValidator.js +6 -6
- package/dist/execution/CommandValidator.js.map +1 -1
- package/dist/execution/DynamicTaskPool.js +33 -10
- package/dist/execution/DynamicTaskPool.js.map +1 -1
- package/dist/execution/RecoveryManager.js +99 -21
- package/dist/execution/RecoveryManager.js.map +1 -1
- package/dist/execution/TreeExecutionAdapter.js +65 -48
- package/dist/execution/TreeExecutionAdapter.js.map +1 -1
- package/dist/main.js +2 -2
- package/dist/main.js.map +1 -1
- package/dist/util/checkpointManager.js +4 -4
- package/dist/util/checkpointManager.js.map +1 -1
- package/dist/util/dependencyGraph.js +2 -2
- package/dist/util/dependencyGraph.js.map +1 -1
- package/dist/util/fileLock.js +1 -1
- package/dist/util/fileLock.js.map +1 -1
- package/dist/util/general.js +148 -15
- package/dist/util/general.js.map +1 -1
- package/dist/util/interactive.js +2 -2
- package/dist/util/interactive.js.map +1 -1
- package/dist/util/performance.js.map +1 -1
- package/dist/util/safety.js +13 -13
- package/dist/util/safety.js.map +1 -1
- package/dist/utils/branchState.js +567 -0
- package/dist/utils/branchState.js.map +1 -0
- package/package.json +1 -1
- package/scripts/update-test-log-assertions.js +73 -0
package/dist/commands/publish.js
CHANGED
|
@@ -30,11 +30,11 @@ const scanNpmrcForEnvVars = async (storage)=>{
|
|
|
30
30
|
}
|
|
31
31
|
}
|
|
32
32
|
} catch (error) {
|
|
33
|
-
logger.warn(`
|
|
34
|
-
logger.verbose('
|
|
33
|
+
logger.warn(`NPMRC_READ_FAILED: Unable to read .npmrc configuration file | Path: ${npmrcPath} | Error: ${error.message}`);
|
|
34
|
+
logger.verbose('NPMRC_READ_IMPACT: Environment variable detection for publishing may be affected due to failed .npmrc read');
|
|
35
35
|
}
|
|
36
36
|
} else {
|
|
37
|
-
logger.debug('.npmrc file
|
|
37
|
+
logger.debug('NPMRC_NOT_FOUND: No .npmrc file present in current directory | Action: Skipping environment variable scan | Path: ' + npmrcPath);
|
|
38
38
|
}
|
|
39
39
|
return envVars;
|
|
40
40
|
};
|
|
@@ -50,7 +50,7 @@ const scanNpmrcForEnvVars = async (storage)=>{
|
|
|
50
50
|
await fs.access(packageLockPath);
|
|
51
51
|
} catch {
|
|
52
52
|
// No package-lock.json, nothing to clean
|
|
53
|
-
logger.verbose('No package-lock.json
|
|
53
|
+
logger.verbose('PACKAGE_LOCK_NOT_FOUND: No package-lock.json file exists | Action: Skipping npm link cleanup | Path: ' + packageLockPath);
|
|
54
54
|
return;
|
|
55
55
|
}
|
|
56
56
|
// Read and parse package-lock.json
|
|
@@ -66,7 +66,7 @@ const scanNpmrcForEnvVars = async (storage)=>{
|
|
|
66
66
|
const resolvedPath = pkgInfo.resolved.replace('file:', '');
|
|
67
67
|
if (resolvedPath.startsWith('../') || resolvedPath.startsWith('./')) {
|
|
68
68
|
hasFileReferences = true;
|
|
69
|
-
logger.verbose(`Found npm link reference: ${pkgPath}
|
|
69
|
+
logger.verbose(`NPM_LINK_DETECTED: Found npm link reference in packages section | Package: ${pkgPath} | Resolved: ${pkgInfo.resolved} | Type: relative_file_dependency`);
|
|
70
70
|
break;
|
|
71
71
|
}
|
|
72
72
|
}
|
|
@@ -79,33 +79,33 @@ const scanNpmrcForEnvVars = async (storage)=>{
|
|
|
79
79
|
const versionPath = pkgInfo.version.replace('file:', '');
|
|
80
80
|
if (versionPath.startsWith('../') || versionPath.startsWith('./')) {
|
|
81
81
|
hasFileReferences = true;
|
|
82
|
-
logger.verbose(`Found npm link reference: ${pkgName}
|
|
82
|
+
logger.verbose(`NPM_LINK_DETECTED: Found npm link reference in dependencies section | Package: ${pkgName} | Version: ${pkgInfo.version} | Type: relative_file_dependency`);
|
|
83
83
|
break;
|
|
84
84
|
}
|
|
85
85
|
}
|
|
86
86
|
}
|
|
87
87
|
}
|
|
88
88
|
if (hasFileReferences) {
|
|
89
|
-
logger.info('
|
|
90
|
-
logger.info('
|
|
89
|
+
logger.info('NPM_LINK_CLEANUP_REQUIRED: Detected npm link references in package-lock.json | File: package-lock.json | Impact: Must be cleaned before publish');
|
|
90
|
+
logger.info('NPM_LINK_CLEANUP_STARTING: Removing package-lock.json and regenerating clean version | Action: Remove file with relative dependencies');
|
|
91
91
|
if (isDryRun) {
|
|
92
|
-
logger.info('
|
|
92
|
+
logger.info('DRY_RUN_OPERATION: Would remove package-lock.json and regenerate it | Mode: dry-run | File: package-lock.json');
|
|
93
93
|
} else {
|
|
94
94
|
// Remove package-lock.json
|
|
95
95
|
await fs.unlink(packageLockPath);
|
|
96
|
-
logger.verbose('
|
|
96
|
+
logger.verbose('NPM_LINK_CLEANUP_FILE_REMOVED: Deleted package-lock.json containing npm link references | Path: ' + packageLockPath);
|
|
97
97
|
// Regenerate clean package-lock.json
|
|
98
|
-
logger.verbose('
|
|
98
|
+
logger.verbose('NPM_LOCK_REGENERATING: Executing npm install to regenerate package-lock.json from package.json | Command: npm install --package-lock-only --no-audit --no-fund');
|
|
99
99
|
await runWithDryRunSupport('npm install --package-lock-only --no-audit --no-fund', isDryRun);
|
|
100
|
-
logger.info('
|
|
100
|
+
logger.info('NPM_LOCK_REGENERATED: Successfully regenerated clean package-lock.json without link references | Path: ' + packageLockPath);
|
|
101
101
|
}
|
|
102
102
|
} else {
|
|
103
|
-
logger.verbose('No npm link references found in package-lock.json');
|
|
103
|
+
logger.verbose('NPM_LINK_CHECK_CLEAN: No npm link references found in package-lock.json | Status: Ready for publish | File: ' + packageLockPath);
|
|
104
104
|
}
|
|
105
105
|
} catch (error) {
|
|
106
106
|
// Log warning but don't fail - let npm update handle any issues
|
|
107
|
-
logger.warn(
|
|
108
|
-
logger.verbose('
|
|
107
|
+
logger.warn(`NPM_LINK_CHECK_FAILED: Unable to check or clean npm link references | Error: ${error.message} | Impact: Continuing with publish, npm will handle issues`);
|
|
108
|
+
logger.verbose('PUBLISH_PROCESS_CONTINUING: Proceeding with publish workflow despite npm link check failure | Next: Standard npm publish validation');
|
|
109
109
|
}
|
|
110
110
|
};
|
|
111
111
|
const validateEnvironmentVariables = (requiredEnvVars, isDryRun)=>{
|
|
@@ -118,9 +118,9 @@ const validateEnvironmentVariables = (requiredEnvVars, isDryRun)=>{
|
|
|
118
118
|
}
|
|
119
119
|
if (missingEnvVars.length > 0) {
|
|
120
120
|
if (isDryRun) {
|
|
121
|
-
logger.warn(`
|
|
121
|
+
logger.warn(`ENV_VARS_MISSING: Required environment variables not set | Variables: ${missingEnvVars.join(', ')} | Mode: dry-run | Impact: Would fail in real publish`);
|
|
122
122
|
} else {
|
|
123
|
-
logger.error(`
|
|
123
|
+
logger.error(`ENV_VARS_MISSING: Required environment variables not set | Variables: ${missingEnvVars.join(', ')} | Action: Must set before publish | Source: .npmrc configuration`);
|
|
124
124
|
throw new Error(`Missing required environment variables: ${missingEnvVars.join(', ')}. Please set these environment variables before running publish.`);
|
|
125
125
|
}
|
|
126
126
|
}
|
|
@@ -132,11 +132,11 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
132
132
|
const storage = create({
|
|
133
133
|
log: logger.info
|
|
134
134
|
});
|
|
135
|
-
logger.info('
|
|
135
|
+
logger.info('PRECHECK_STARTING: Executing publish prechecks | Phase: validation | Target: ' + (targetBranch || 'default'));
|
|
136
136
|
// Check if we're in a git repository
|
|
137
137
|
try {
|
|
138
138
|
if (isDryRun) {
|
|
139
|
-
logger.info('Would
|
|
139
|
+
logger.info('PRECHECK_GIT_REPO: Would verify git repository | Mode: dry-run | Command: git rev-parse --git-dir');
|
|
140
140
|
} else {
|
|
141
141
|
await run('git rev-parse --git-dir');
|
|
142
142
|
}
|
|
@@ -148,10 +148,10 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
148
148
|
}
|
|
149
149
|
}
|
|
150
150
|
// Check for uncommitted changes
|
|
151
|
-
logger.info('Checking for uncommitted changes
|
|
151
|
+
logger.info('PRECHECK_GIT_STATUS: Checking for uncommitted changes | Command: git status --porcelain | Requirement: Clean working directory');
|
|
152
152
|
try {
|
|
153
153
|
if (isDryRun) {
|
|
154
|
-
logger.info('Would
|
|
154
|
+
logger.info('PRECHECK_GIT_STATUS: Would verify clean working directory | Mode: dry-run | Command: git status --porcelain');
|
|
155
155
|
} else {
|
|
156
156
|
const { stdout } = await run('git status --porcelain');
|
|
157
157
|
if (stdout.trim()) {
|
|
@@ -168,9 +168,9 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
168
168
|
// Use the passed target branch or fallback to config/default
|
|
169
169
|
const effectiveTargetBranch = targetBranch || ((_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.targetBranch) || 'main';
|
|
170
170
|
// Check that we're not running from the target branch
|
|
171
|
-
logger.info('
|
|
171
|
+
logger.info('PRECHECK_BRANCH: Verifying current branch is not target branch | Target: ' + effectiveTargetBranch + ' | Requirement: Must run from feature branch');
|
|
172
172
|
if (isDryRun) {
|
|
173
|
-
logger.info(`Would verify current branch is not
|
|
173
|
+
logger.info(`PRECHECK_BRANCH: Would verify current branch is not target branch | Mode: dry-run | Target: ${effectiveTargetBranch}`);
|
|
174
174
|
} else {
|
|
175
175
|
const currentBranch = await GitHub.getCurrentBranchName();
|
|
176
176
|
if (currentBranch === effectiveTargetBranch) {
|
|
@@ -178,43 +178,42 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
178
178
|
}
|
|
179
179
|
}
|
|
180
180
|
// Check target branch sync with remote
|
|
181
|
-
logger.info(`Checking target branch
|
|
181
|
+
logger.info(`PRECHECK_BRANCH_SYNC: Checking target branch sync with remote | Branch: ${effectiveTargetBranch} | Remote: origin | Requirement: Branches must be synchronized`);
|
|
182
182
|
if (isDryRun) {
|
|
183
|
-
logger.info(`Would verify target branch
|
|
183
|
+
logger.info(`PRECHECK_BRANCH_SYNC: Would verify target branch is in sync with remote | Mode: dry-run | Branch: ${effectiveTargetBranch} | Remote: origin`);
|
|
184
184
|
} else {
|
|
185
185
|
// Only check if local target branch exists (it's okay if it doesn't exist locally)
|
|
186
186
|
const targetBranchExists = await localBranchExists(effectiveTargetBranch);
|
|
187
187
|
if (targetBranchExists) {
|
|
188
188
|
const syncStatus = await isBranchInSyncWithRemote(effectiveTargetBranch);
|
|
189
189
|
if (!syncStatus.inSync) {
|
|
190
|
-
logger.error(
|
|
190
|
+
logger.error(`BRANCH_SYNC_FAILED: Target branch not synchronized with remote | Branch: ${effectiveTargetBranch} | Status: out-of-sync | Impact: Cannot proceed with publish`);
|
|
191
191
|
logger.error('');
|
|
192
192
|
if (syncStatus.error) {
|
|
193
|
-
logger.error(`
|
|
193
|
+
logger.error(`BRANCH_SYNC_ERROR: ${syncStatus.error}`);
|
|
194
194
|
} else if (syncStatus.localSha && syncStatus.remoteSha) {
|
|
195
|
-
logger.error(`
|
|
196
|
-
logger.error(` Remote: ${syncStatus.remoteSha.substring(0, 8)}`);
|
|
195
|
+
logger.error(`BRANCH_SYNC_DIVERGENCE: Local and remote commits differ | Local SHA: ${syncStatus.localSha.substring(0, 8)} | Remote SHA: ${syncStatus.remoteSha.substring(0, 8)}`);
|
|
197
196
|
}
|
|
198
197
|
logger.error('');
|
|
199
|
-
logger.error('
|
|
200
|
-
logger.error(` 1
|
|
201
|
-
logger.error(` 2
|
|
202
|
-
logger.error(' 3
|
|
203
|
-
logger.error(' 4
|
|
198
|
+
logger.error('RESOLUTION_STEPS: Manual intervention required to sync branches:');
|
|
199
|
+
logger.error(` Step 1: Switch to target branch | Command: git checkout ${effectiveTargetBranch}`);
|
|
200
|
+
logger.error(` Step 2: Pull latest changes | Command: git pull origin ${effectiveTargetBranch}`);
|
|
201
|
+
logger.error(' Step 3: Resolve merge conflicts if present');
|
|
202
|
+
logger.error(' Step 4: Return to feature branch and retry publish');
|
|
204
203
|
logger.error('');
|
|
205
|
-
logger.error(
|
|
204
|
+
logger.error(`ALTERNATIVE_OPTION: Automatic sync available | Command: kodrdriv publish --sync-target | Branch: ${effectiveTargetBranch}`);
|
|
206
205
|
throw new Error(`Target branch '${effectiveTargetBranch}' is not in sync with remote. Please sync the branch before running publish.`);
|
|
207
206
|
} else {
|
|
208
|
-
logger.info(
|
|
207
|
+
logger.info(`BRANCH_SYNC_VERIFIED: Target branch is synchronized with remote | Branch: ${effectiveTargetBranch} | Status: in-sync`);
|
|
209
208
|
}
|
|
210
209
|
} else {
|
|
211
|
-
logger.info(
|
|
210
|
+
logger.info(`BRANCH_NOT_LOCAL: Target branch does not exist locally | Branch: ${effectiveTargetBranch} | Action: Will be created during publish process`);
|
|
212
211
|
}
|
|
213
212
|
}
|
|
214
213
|
// Check GitHub Actions workflow configuration
|
|
215
|
-
logger.info('Checking GitHub Actions workflow configuration
|
|
214
|
+
logger.info('PRECHECK_WORKFLOW: Checking GitHub Actions workflow configuration | Target: PR automation | Requirement: Workflows should trigger on pull requests');
|
|
216
215
|
if (isDryRun) {
|
|
217
|
-
logger.info('Would check if GitHub Actions workflows are configured for pull requests');
|
|
216
|
+
logger.info('PRECHECK_WORKFLOW: Would check if GitHub Actions workflows are configured for pull requests | Mode: dry-run');
|
|
218
217
|
} else {
|
|
219
218
|
try {
|
|
220
219
|
// TODO: Re-enable when checkWorkflowConfiguration is exported from github-tools
|
|
@@ -226,25 +225,25 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
226
225
|
triggeredWorkflowNames: []
|
|
227
226
|
};
|
|
228
227
|
if (!workflowConfig.hasWorkflows) ; else if (!workflowConfig.hasPullRequestTriggers) ; else {
|
|
229
|
-
logger.info(
|
|
228
|
+
logger.info(`WORKFLOW_CONFIGURED: Found workflows that will trigger on pull requests | Target Branch: ${effectiveTargetBranch} | Workflow Count: ${workflowConfig.triggeredWorkflowNames.length}`);
|
|
230
229
|
for (const workflowName of workflowConfig.triggeredWorkflowNames){
|
|
231
|
-
logger.info(`
|
|
230
|
+
logger.info(`WORKFLOW_ACTIVE: ${workflowName} | Trigger: pull_request | Target: ${effectiveTargetBranch}`);
|
|
232
231
|
}
|
|
233
232
|
}
|
|
234
233
|
} catch (error) {
|
|
235
234
|
// Don't fail the precheck if we can't verify workflows
|
|
236
235
|
// The wait logic will handle it later
|
|
237
|
-
logger.debug(`
|
|
236
|
+
logger.debug(`WORKFLOW_CHECK_FAILED: Unable to verify workflow configuration | Error: ${error.message} | Impact: Will proceed with publish | Note: Wait logic will handle checks later`);
|
|
238
237
|
}
|
|
239
238
|
}
|
|
240
239
|
// Check if prepublishOnly script exists in package.json
|
|
241
|
-
logger.info('Checking for prepublishOnly script
|
|
240
|
+
logger.info('PRECHECK_PREPUBLISH: Checking for prepublishOnly script in package.json | Requirement: Must exist to run pre-flight checks | Expected: clean, lint, build, test');
|
|
242
241
|
const packageJsonPath = path__default.join(process.cwd(), 'package.json');
|
|
243
242
|
if (!await storage.exists(packageJsonPath)) {
|
|
244
243
|
if (!isDryRun) {
|
|
245
244
|
throw new Error('package.json not found in current directory.');
|
|
246
245
|
} else {
|
|
247
|
-
logger.warn('package.json
|
|
246
|
+
logger.warn('PACKAGE_JSON_NOT_FOUND: No package.json in current directory | Mode: dry-run | Impact: Cannot verify prepublishOnly script | Path: ' + packageJsonPath);
|
|
248
247
|
}
|
|
249
248
|
} else {
|
|
250
249
|
var _packageJson_scripts;
|
|
@@ -257,19 +256,19 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
257
256
|
if (!isDryRun) {
|
|
258
257
|
throw new Error('Failed to parse package.json. Please ensure it contains valid JSON.');
|
|
259
258
|
} else {
|
|
260
|
-
logger.warn('
|
|
259
|
+
logger.warn('PACKAGE_JSON_PARSE_FAILED: Unable to parse package.json | Mode: dry-run | Impact: Cannot verify prepublishOnly script | Path: ' + packageJsonPath + ' | Requirement: Valid JSON format');
|
|
261
260
|
}
|
|
262
261
|
}
|
|
263
262
|
if (packageJson && !((_packageJson_scripts = packageJson.scripts) === null || _packageJson_scripts === void 0 ? void 0 : _packageJson_scripts.prepublishOnly)) {
|
|
264
263
|
if (!isDryRun) {
|
|
265
264
|
throw new Error('prepublishOnly script is required in package.json but was not found. Please add a prepublishOnly script that runs your pre-flight checks (e.g., clean, lint, build, test).');
|
|
266
265
|
} else {
|
|
267
|
-
logger.warn('prepublishOnly script
|
|
266
|
+
logger.warn('PREPUBLISH_SCRIPT_MISSING: No prepublishOnly script found in package.json | Mode: dry-run | Requirement: Script must exist | Expected Tasks: clean, lint, build, test | Path: ' + packageJsonPath);
|
|
268
267
|
}
|
|
269
268
|
}
|
|
270
269
|
}
|
|
271
270
|
// Check required environment variables
|
|
272
|
-
logger.verbose('Checking required environment variables
|
|
271
|
+
logger.verbose('PRECHECK_ENV_VARS: Checking required environment variables | Source: Configuration and .npmrc | Requirement: All required vars must be set');
|
|
273
272
|
const coreRequiredEnvVars = ((_runConfig_publish1 = runConfig.publish) === null || _runConfig_publish1 === void 0 ? void 0 : _runConfig_publish1.requiredEnvVars) || [];
|
|
274
273
|
const npmrcEnvVars = isDryRun ? [] : await scanNpmrcForEnvVars(storage); // Skip .npmrc scan in dry run
|
|
275
274
|
const allRequiredEnvVars = [
|
|
@@ -279,12 +278,12 @@ const runPrechecks = async (runConfig, targetBranch)=>{
|
|
|
279
278
|
])
|
|
280
279
|
];
|
|
281
280
|
if (allRequiredEnvVars.length > 0) {
|
|
282
|
-
logger.verbose(`
|
|
281
|
+
logger.verbose(`ENV_VARS_REQUIRED: Environment variables needed for publish | Variables: ${allRequiredEnvVars.join(', ')} | Count: ${allRequiredEnvVars.length} | Source: config + .npmrc`);
|
|
283
282
|
validateEnvironmentVariables(allRequiredEnvVars, isDryRun);
|
|
284
283
|
} else {
|
|
285
|
-
logger.verbose('No required environment variables specified.');
|
|
284
|
+
logger.verbose('ENV_VARS_NONE: No required environment variables specified | Status: No validation needed | Source: config + .npmrc');
|
|
286
285
|
}
|
|
287
|
-
logger.info('All prechecks passed
|
|
286
|
+
logger.info('PRECHECK_COMPLETE: All publish prechecks passed successfully | Status: Ready to proceed | Next: Execute publish workflow');
|
|
288
287
|
};
|
|
289
288
|
// Helper: deep-sort object keys for stable comparison
|
|
290
289
|
const sortObjectKeys = (value)=>{
|
|
@@ -315,7 +314,7 @@ const isReleaseNecessaryComparedToTarget = async (targetBranch, isDryRun)=>{
|
|
|
315
314
|
]);
|
|
316
315
|
} catch (error) {
|
|
317
316
|
// Target branch doesn't exist or isn't accessible
|
|
318
|
-
logger.verbose(`Target branch
|
|
317
|
+
logger.verbose(`RELEASE_CHECK_NO_TARGET: Target branch does not exist or is not accessible | Branch: ${targetBranch} | Action: Proceeding with publish | Reason: First release to this branch`);
|
|
319
318
|
return {
|
|
320
319
|
necessary: true,
|
|
321
320
|
reason: `Target branch '${targetBranch}' does not exist; first release to this branch`
|
|
@@ -362,9 +361,11 @@ const isReleaseNecessaryComparedToTarget = async (targetBranch, isDryRun)=>{
|
|
|
362
361
|
const headSorted = sortObjectKeys(headWithoutVersion);
|
|
363
362
|
const equalExceptVersion = JSON.stringify(baseSorted) === JSON.stringify(headSorted);
|
|
364
363
|
if (equalExceptVersion) {
|
|
364
|
+
const currentVersion = headPkg.version;
|
|
365
|
+
const targetVersion = basePkg.version;
|
|
365
366
|
return {
|
|
366
367
|
necessary: false,
|
|
367
|
-
reason:
|
|
368
|
+
reason: `No meaningful changes detected:\n • Current version: ${currentVersion}\n • Target branch version: ${targetVersion}\n • Only package.json version field differs\n\n To force republish: Add meaningful code changes or use --force (not yet implemented)`
|
|
368
369
|
};
|
|
369
370
|
}
|
|
370
371
|
// Other fields changed inside package.json
|
|
@@ -374,7 +375,7 @@ const isReleaseNecessaryComparedToTarget = async (targetBranch, isDryRun)=>{
|
|
|
374
375
|
};
|
|
375
376
|
} catch (error) {
|
|
376
377
|
// Conservative: if we cannot prove it is only a version change, proceed with release
|
|
377
|
-
logger.verbose(`
|
|
378
|
+
logger.verbose(`RELEASE_CHECK_COMPARISON_FAILED: Unable to conclusively compare package.json changes | Error: ${error.message} | Action: Proceeding conservatively with publish | Reason: Cannot verify version-only change`);
|
|
378
379
|
return {
|
|
379
380
|
necessary: true,
|
|
380
381
|
reason: 'Could not compare package.json safely'
|
|
@@ -384,27 +385,27 @@ const isReleaseNecessaryComparedToTarget = async (targetBranch, isDryRun)=>{
|
|
|
384
385
|
const handleTargetBranchSyncRecovery = async (runConfig, targetBranch)=>{
|
|
385
386
|
const isDryRun = runConfig.dryRun || false;
|
|
386
387
|
const logger = getDryRunLogger(isDryRun);
|
|
387
|
-
logger.info(
|
|
388
|
+
logger.info(`BRANCH_SYNC_ATTEMPTING: Initiating sync of target branch with remote | Branch: ${targetBranch} | Remote: origin | Operation: fetch + merge`);
|
|
388
389
|
if (isDryRun) {
|
|
389
|
-
logger.info(`Would attempt to sync
|
|
390
|
+
logger.info(`BRANCH_SYNC_DRY_RUN: Would attempt to sync branch with remote | Mode: dry-run | Branch: ${targetBranch} | Remote: origin`);
|
|
390
391
|
return;
|
|
391
392
|
}
|
|
392
393
|
const syncResult = await safeSyncBranchWithRemote(targetBranch);
|
|
393
394
|
if (syncResult.success) {
|
|
394
|
-
logger.info(
|
|
395
|
-
logger.info('
|
|
395
|
+
logger.info(`BRANCH_SYNC_SUCCESS: Successfully synchronized branch with remote | Branch: ${targetBranch} | Remote: origin | Status: in-sync`);
|
|
396
|
+
logger.info('BRANCH_SYNC_NEXT_STEP: Ready to proceed with publish | Action: Re-run publish command | Branch: ' + targetBranch);
|
|
396
397
|
} else if (syncResult.conflictResolutionRequired) {
|
|
397
|
-
logger.error(
|
|
398
|
+
logger.error(`BRANCH_SYNC_CONFLICTS: Sync failed due to merge conflicts | Branch: ${targetBranch} | Status: conflicts-detected | Resolution: Manual intervention required`);
|
|
398
399
|
logger.error('');
|
|
399
|
-
logger.error('
|
|
400
|
-
logger.error(` 1
|
|
401
|
-
logger.error(` 2
|
|
402
|
-
logger.error(' 3
|
|
403
|
-
logger.error(' 4
|
|
400
|
+
logger.error('CONFLICT_RESOLUTION_STEPS: Manual conflict resolution required:');
|
|
401
|
+
logger.error(` Step 1: Switch to target branch | Command: git checkout ${targetBranch}`);
|
|
402
|
+
logger.error(` Step 2: Pull and resolve conflicts | Command: git pull origin ${targetBranch}`);
|
|
403
|
+
logger.error(' Step 3: Commit resolved changes | Command: git commit');
|
|
404
|
+
logger.error(' Step 4: Return to feature branch and retry | Command: kodrdriv publish');
|
|
404
405
|
logger.error('');
|
|
405
406
|
throw new Error(`Target branch '${targetBranch}' has conflicts that require manual resolution.`);
|
|
406
407
|
} else {
|
|
407
|
-
logger.error(
|
|
408
|
+
logger.error(`BRANCH_SYNC_FAILED: Sync operation failed | Branch: ${targetBranch} | Error: ${syncResult.error} | Remote: origin`);
|
|
408
409
|
throw new Error(`Failed to sync target branch: ${syncResult.error}`);
|
|
409
410
|
}
|
|
410
411
|
};
|
|
@@ -422,37 +423,38 @@ const execute = async (runConfig)=>{
|
|
|
422
423
|
} else {
|
|
423
424
|
currentBranch = await GitHub.getCurrentBranchName();
|
|
424
425
|
// Fetch latest remote information to avoid conflicts
|
|
425
|
-
logger.info('
|
|
426
|
+
logger.info('GIT_FETCH_STARTING: Fetching latest remote information | Remote: origin | Purpose: Avoid conflicts during publish | Command: git fetch origin');
|
|
426
427
|
try {
|
|
427
428
|
await run('git fetch origin');
|
|
428
|
-
logger.info('
|
|
429
|
+
logger.info('GIT_FETCH_SUCCESS: Successfully fetched latest remote information | Remote: origin | Status: up-to-date');
|
|
429
430
|
} catch (error) {
|
|
430
|
-
logger.warn(
|
|
431
|
+
logger.warn(`GIT_FETCH_FAILED: Unable to fetch from remote | Remote: origin | Error: ${error.message} | Impact: May cause conflicts if remote has changes`);
|
|
431
432
|
}
|
|
432
433
|
// Sync current branch with remote to avoid conflicts
|
|
433
|
-
logger.info(
|
|
434
|
+
logger.info(`CURRENT_BRANCH_SYNC: Synchronizing current branch with remote | Branch: ${currentBranch} | Remote: origin | Purpose: Avoid conflicts during publish`);
|
|
434
435
|
try {
|
|
435
436
|
const remoteExists = await run(`git ls-remote --exit-code --heads origin ${currentBranch}`).then(()=>true).catch(()=>false);
|
|
436
437
|
if (remoteExists) {
|
|
437
|
-
//
|
|
438
|
+
// Use explicit fetch+merge instead of pull to avoid git config conflicts
|
|
438
439
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
439
|
-
await run(`git
|
|
440
|
-
|
|
441
|
-
|
|
440
|
+
await run(`git fetch origin ${currentBranch}`);
|
|
441
|
+
await run(`git merge origin/${currentBranch} --no-ff --no-edit`);
|
|
442
|
+
}, `sync ${currentBranch}`);
|
|
443
|
+
logger.info(`CURRENT_BRANCH_SYNCED: Successfully synchronized current branch with remote | Branch: ${currentBranch} | Remote: origin/${currentBranch} | Status: in-sync`);
|
|
442
444
|
} else {
|
|
443
|
-
logger.info(
|
|
445
|
+
logger.info(`REMOTE_BRANCH_NOT_FOUND: No remote branch exists | Branch: ${currentBranch} | Remote: origin | Action: Will be created on first push`);
|
|
444
446
|
}
|
|
445
447
|
} catch (error) {
|
|
446
448
|
if (error.message && error.message.includes('CONFLICT')) {
|
|
447
|
-
logger.error(
|
|
448
|
-
logger.error(`
|
|
449
|
-
logger.error(` 1
|
|
450
|
-
logger.error(` 2
|
|
451
|
-
logger.error(` 3
|
|
452
|
-
logger.error(` 4
|
|
449
|
+
logger.error(`MERGE_CONFLICTS_DETECTED: Conflicts found when syncing current branch with remote | Branch: ${currentBranch} | Remote: origin/${currentBranch} | Status: conflicts-require-resolution`);
|
|
450
|
+
logger.error(`CONFLICT_RESOLUTION_REQUIRED: Manual intervention needed to resolve conflicts and continue:`);
|
|
451
|
+
logger.error(` Step 1: Resolve conflicts in affected files`);
|
|
452
|
+
logger.error(` Step 2: Stage resolved files | Command: git add <resolved-files>`);
|
|
453
|
+
logger.error(` Step 3: Commit resolution | Command: git commit`);
|
|
454
|
+
logger.error(` Step 4: Retry publish | Command: kodrdriv publish`);
|
|
453
455
|
throw new Error(`Merge conflicts detected when syncing ${currentBranch} with remote. Please resolve conflicts manually.`);
|
|
454
456
|
} else {
|
|
455
|
-
logger.warn(
|
|
457
|
+
logger.warn(`CURRENT_BRANCH_SYNC_FAILED: Unable to sync current branch with remote | Branch: ${currentBranch} | Remote: origin/${currentBranch} | Error: ${error.message} | Impact: May cause issues during publish`);
|
|
456
458
|
}
|
|
457
459
|
}
|
|
458
460
|
}
|
|
@@ -466,19 +468,19 @@ const execute = async (runConfig)=>{
|
|
|
466
468
|
if (branchConfig.targetBranch) {
|
|
467
469
|
targetBranch = branchConfig.targetBranch;
|
|
468
470
|
}
|
|
469
|
-
logger.info(
|
|
470
|
-
logger.info(`
|
|
471
|
-
logger.info(`
|
|
471
|
+
logger.info(`BRANCH_DEPENDENT_TARGETING: Branch-specific configuration active | Source: ${currentBranch} | Target: ${targetBranch} | Feature: Branch-dependent versioning and targeting`);
|
|
472
|
+
logger.info(`BRANCH_CONFIGURATION_SOURCE: Current branch | Branch: ${currentBranch} | Type: source`);
|
|
473
|
+
logger.info(`BRANCH_CONFIGURATION_TARGET: Target branch for publish | Branch: ${targetBranch} | Type: destination`);
|
|
472
474
|
// Look at target branch config to show version strategy
|
|
473
475
|
const targetBranchConfig = runConfig.branches[targetBranch];
|
|
474
476
|
if (targetBranchConfig === null || targetBranchConfig === void 0 ? void 0 : targetBranchConfig.version) {
|
|
475
477
|
const versionType = targetBranchConfig.version.type;
|
|
476
478
|
const versionTag = targetBranchConfig.version.tag;
|
|
477
479
|
const versionIncrement = targetBranchConfig.version.increment;
|
|
478
|
-
logger.info(`
|
|
480
|
+
logger.info(`VERSION_STRATEGY: Target branch version configuration | Branch: ${targetBranch} | Type: ${versionType} | Tag: ${versionTag || 'none'} | Increment: ${versionIncrement ? 'enabled' : 'disabled'}`);
|
|
479
481
|
}
|
|
480
482
|
} else {
|
|
481
|
-
logger.debug(`No branch-specific
|
|
483
|
+
logger.debug(`BRANCH_TARGETING_DEFAULT: No branch-specific configuration found | Branch: ${currentBranch} | Action: Using default target | Target: ${targetBranch}`);
|
|
482
484
|
}
|
|
483
485
|
// Handle --sync-target flag
|
|
484
486
|
if ((_runConfig_publish1 = runConfig.publish) === null || _runConfig_publish1 === void 0 ? void 0 : _runConfig_publish1.syncTarget) {
|
|
@@ -486,13 +488,13 @@ const execute = async (runConfig)=>{
|
|
|
486
488
|
return; // Exit after sync operation
|
|
487
489
|
}
|
|
488
490
|
// Check if target branch exists and create it if needed
|
|
489
|
-
logger.info(`
|
|
491
|
+
logger.info(`TARGET_BRANCH_CHECK: Verifying target branch existence | Branch: ${targetBranch} | Action: Create if missing | Source: Current HEAD`);
|
|
490
492
|
if (isDryRun) {
|
|
491
|
-
logger.info(`Would
|
|
493
|
+
logger.info(`TARGET_BRANCH_CHECK: Would verify target branch exists and create if needed | Mode: dry-run | Branch: ${targetBranch}`);
|
|
492
494
|
} else {
|
|
493
495
|
const targetBranchExists = await localBranchExists(targetBranch);
|
|
494
496
|
if (!targetBranchExists) {
|
|
495
|
-
logger.info(
|
|
497
|
+
logger.info(`TARGET_BRANCH_CREATING: Target branch does not exist, creating from current branch | Branch: ${targetBranch} | Source: HEAD | Remote: origin`);
|
|
496
498
|
try {
|
|
497
499
|
// Wrap git branch and push operations with lock
|
|
498
500
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
@@ -502,73 +504,90 @@ const execute = async (runConfig)=>{
|
|
|
502
504
|
targetBranch,
|
|
503
505
|
'HEAD'
|
|
504
506
|
]);
|
|
505
|
-
logger.info(
|
|
507
|
+
logger.info(`TARGET_BRANCH_CREATED: Successfully created target branch locally | Branch: ${targetBranch} | Source: HEAD`);
|
|
506
508
|
// Push the new branch to origin
|
|
507
509
|
await runSecure('git', [
|
|
508
510
|
'push',
|
|
509
511
|
'origin',
|
|
510
512
|
targetBranch
|
|
511
513
|
]);
|
|
512
|
-
logger.info(
|
|
514
|
+
logger.info(`TARGET_BRANCH_PUSHED: Successfully pushed new target branch to remote | Branch: ${targetBranch} | Remote: origin/${targetBranch}`);
|
|
513
515
|
}, `create and push target branch ${targetBranch}`);
|
|
514
516
|
} catch (error) {
|
|
515
517
|
throw new Error(`Failed to create target branch '${targetBranch}': ${error.message}`);
|
|
516
518
|
}
|
|
517
519
|
} else {
|
|
518
|
-
logger.info(
|
|
520
|
+
logger.info(`TARGET_BRANCH_EXISTS: Target branch already exists locally | Branch: ${targetBranch} | Status: ready`);
|
|
519
521
|
}
|
|
520
522
|
}
|
|
521
523
|
// Run prechecks before starting any work
|
|
522
524
|
await runPrechecks(runConfig, targetBranch);
|
|
523
525
|
// Early check: determine if a release is necessary compared to target branch
|
|
524
|
-
logger.info('Evaluating if
|
|
526
|
+
logger.info('RELEASE_NECESSITY_CHECK: Evaluating if release is required | Comparison: current branch vs target | Target: ' + targetBranch + ' | Purpose: Avoid unnecessary publishes');
|
|
525
527
|
try {
|
|
526
528
|
const necessity = await isReleaseNecessaryComparedToTarget(targetBranch, isDryRun);
|
|
527
529
|
if (!necessity.necessary) {
|
|
528
|
-
logger.info(
|
|
530
|
+
logger.info(`\nRELEASE_SKIPPED: No meaningful changes detected, skipping publish | Reason: ${necessity.reason} | Target: ${targetBranch}`);
|
|
529
531
|
// Emit a machine-readable marker so tree mode can detect skip and avoid propagating versions
|
|
530
|
-
logger.info
|
|
532
|
+
// CRITICAL: Use console.log to write to stdout (logger.info goes to stderr via winston)
|
|
533
|
+
// eslint-disable-next-line no-console
|
|
534
|
+
console.log('KODRDRIV_PUBLISH_SKIPPED');
|
|
531
535
|
return;
|
|
532
536
|
} else {
|
|
533
|
-
logger.verbose(`
|
|
537
|
+
logger.verbose(`RELEASE_PROCEEDING: Meaningful changes detected, continuing with publish | Reason: ${necessity.reason} | Target: ${targetBranch}`);
|
|
534
538
|
}
|
|
535
539
|
} catch (error) {
|
|
536
540
|
// On unexpected errors, proceed with publish to avoid false negatives blocking releases
|
|
537
|
-
logger.verbose(`
|
|
541
|
+
logger.verbose(`RELEASE_NECESSITY_CHECK_ERROR: Unable to determine release necessity | Error: ${error.message} | Action: Proceeding conservatively with publish | Rationale: Avoid blocking valid releases`);
|
|
538
542
|
}
|
|
539
|
-
logger.info('
|
|
543
|
+
logger.info('RELEASE_PROCESS_STARTING: Initiating release workflow | Target: ' + targetBranch + ' | Phase: dependency updates and version management');
|
|
540
544
|
let pr = null;
|
|
541
545
|
if (isDryRun) {
|
|
542
|
-
logger.info('Would check for existing pull request');
|
|
543
|
-
logger.info('Assuming no existing PR found
|
|
546
|
+
logger.info('PR_CHECK: Would check for existing pull request | Mode: dry-run | Action: Skip PR lookup');
|
|
547
|
+
logger.info('PR_ASSUMPTION: Assuming no existing PR found | Mode: dry-run | Purpose: Demo workflow');
|
|
544
548
|
} else {
|
|
545
549
|
const branchName = await GitHub.getCurrentBranchName();
|
|
546
550
|
pr = await GitHub.findOpenPullRequestByHeadRef(branchName);
|
|
547
551
|
}
|
|
548
552
|
if (pr) {
|
|
549
|
-
logger.info(`
|
|
553
|
+
logger.info(`PR_FOUND: Existing pull request detected for current branch | URL: ${pr.html_url} | Status: open`);
|
|
550
554
|
} else {
|
|
551
|
-
var _runConfig_publish4, _runConfig_publish5, _runConfig_publish6, _runConfig_publish7, _runConfig_publish8, _runConfig_publish9, _runConfig_publish10, _runConfig_publish11;
|
|
552
|
-
logger.info('No open pull request
|
|
555
|
+
var _runConfig_publish4, _runConfig_publish5, _runConfig_publish6, _runConfig_publish7, _runConfig_publish8, _runConfig_publish9, _runConfig_publish10, _runConfig_publish11, _runConfig_publish12;
|
|
556
|
+
logger.info('PR_NOT_FOUND: No open pull request exists for current branch | Action: Starting new release publishing process | Next: Prepare dependencies and version');
|
|
553
557
|
// STEP 1: Prepare for release (update dependencies and run prepublish checks) with NO version bump yet
|
|
554
|
-
logger.verbose('Preparing for release:
|
|
558
|
+
logger.verbose('RELEASE_PREP_STARTING: Preparing for release | Phase: dependency management | Action: Switch from workspace to remote dependencies | Version Bump: Not yet applied');
|
|
555
559
|
// Clean up any npm link references before updating dependencies
|
|
556
|
-
logger.verbose('
|
|
560
|
+
logger.verbose('NPM_LINK_CHECK: Scanning package-lock.json for npm link references | File: package-lock.json | Purpose: Remove development symlinks before publish');
|
|
557
561
|
await cleanupNpmLinkReferences(isDryRun);
|
|
558
|
-
|
|
559
|
-
const
|
|
562
|
+
// Update inter-project dependencies if --update-deps flag is present
|
|
563
|
+
const updateDepsScope = (_runConfig_publish4 = runConfig.publish) === null || _runConfig_publish4 === void 0 ? void 0 : _runConfig_publish4.updateDeps;
|
|
564
|
+
if (updateDepsScope) {
|
|
565
|
+
logger.info(`INTER_PROJECT_DEPS_UPDATE: Updating inter-project dependencies | Scope: ${updateDepsScope} | Type: inter-project | Command: kodrdriv updates`);
|
|
566
|
+
const Updates = await import('./updates.js');
|
|
567
|
+
const updatesConfig = {
|
|
568
|
+
...runConfig,
|
|
569
|
+
dryRun: isDryRun,
|
|
570
|
+
updates: {
|
|
571
|
+
scope: updateDepsScope,
|
|
572
|
+
interProject: true
|
|
573
|
+
}
|
|
574
|
+
};
|
|
575
|
+
await Updates.execute(updatesConfig);
|
|
576
|
+
}
|
|
577
|
+
logger.verbose('DEPS_UPDATE_REGISTRY: Updating dependencies to latest versions from npm registry | Source: registry | Target: package.json');
|
|
578
|
+
const updatePatterns = (_runConfig_publish5 = runConfig.publish) === null || _runConfig_publish5 === void 0 ? void 0 : _runConfig_publish5.dependencyUpdatePatterns;
|
|
560
579
|
if (updatePatterns && updatePatterns.length > 0) {
|
|
561
|
-
logger.verbose(`Updating dependencies matching patterns: ${updatePatterns.join(', ')}`);
|
|
580
|
+
logger.verbose(`DEPS_UPDATE_PATTERNS: Updating dependencies matching specified patterns | Patterns: ${updatePatterns.join(', ')} | Count: ${updatePatterns.length} | Command: npm update`);
|
|
562
581
|
const patternsArg = updatePatterns.join(' ');
|
|
563
582
|
await runWithDryRunSupport(`npm update ${patternsArg}`, isDryRun);
|
|
564
583
|
} else {
|
|
565
|
-
logger.verbose('No dependency
|
|
584
|
+
logger.verbose('DEPS_UPDATE_ALL: No dependency patterns specified, updating all dependencies | Scope: all | Command: npm update');
|
|
566
585
|
await runWithDryRunSupport('npm update', isDryRun);
|
|
567
586
|
}
|
|
568
|
-
logger.info('
|
|
587
|
+
logger.info('PREPUBLISH_SCRIPT_RUNNING: Executing prepublishOnly script | Script: prepublishOnly | Purpose: Run pre-flight checks (clean, lint, build, test)');
|
|
569
588
|
await runWithDryRunSupport('npm run prepublishOnly', isDryRun, {}, true); // Use inherited stdio
|
|
570
589
|
// STEP 2: Commit dependency updates if any (still no version bump)
|
|
571
|
-
logger.verbose('Staging dependency updates for commit');
|
|
590
|
+
logger.verbose('DEPS_STAGING: Staging dependency updates for commit | Files: package.json + package-lock.json | Command: git add | Note: Version bump not yet applied');
|
|
572
591
|
// Check if package-lock.json exists before trying to stage it
|
|
573
592
|
const packageLockExists = await storage.exists('package-lock.json');
|
|
574
593
|
const filesToStage = packageLockExists ? 'package.json package-lock.json' : 'package.json';
|
|
@@ -576,26 +595,26 @@ const execute = async (runConfig)=>{
|
|
|
576
595
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
577
596
|
await runWithDryRunSupport(`git add ${filesToStage}`, isDryRun);
|
|
578
597
|
}, 'stage dependency updates');
|
|
579
|
-
logger.verbose('Checking for staged dependency updates
|
|
598
|
+
logger.verbose('DEPS_COMMIT_CHECK: Checking for staged dependency updates | Command: git status | Purpose: Determine if commit needed');
|
|
580
599
|
if (isDryRun) {
|
|
581
|
-
logger.verbose('Would create dependency update commit if changes are staged');
|
|
600
|
+
logger.verbose('DEPS_COMMIT_DRY_RUN: Would create dependency update commit if changes are staged | Mode: dry-run');
|
|
582
601
|
} else {
|
|
583
602
|
if (await hasStagedChanges()) {
|
|
584
|
-
logger.verbose('Staged dependency changes
|
|
603
|
+
logger.verbose('DEPS_COMMIT_CREATING: Staged dependency changes detected, creating commit | Files: ' + filesToStage + ' | Action: Execute commit command');
|
|
585
604
|
// Commit also needs git lock
|
|
586
605
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
587
606
|
await execute$2(runConfig);
|
|
588
607
|
}, 'commit dependency updates');
|
|
589
608
|
} else {
|
|
590
|
-
logger.verbose('No dependency changes to commit
|
|
609
|
+
logger.verbose('DEPS_COMMIT_SKIPPED: No dependency changes to commit | Files: ' + filesToStage + ' | Action: Skipping commit step');
|
|
591
610
|
}
|
|
592
611
|
}
|
|
593
612
|
// STEP 3: Merge target branch into working branch (optional - now skipped by default since post-publish sync keeps branches in sync)
|
|
594
|
-
const skipPreMerge = ((
|
|
613
|
+
const skipPreMerge = ((_runConfig_publish6 = runConfig.publish) === null || _runConfig_publish6 === void 0 ? void 0 : _runConfig_publish6.skipPrePublishMerge) !== false; // Default to true (skip)
|
|
595
614
|
if (skipPreMerge) {
|
|
596
|
-
logger.verbose(
|
|
615
|
+
logger.verbose(`PRE_MERGE_SKIPPED: Skipping pre-publish merge of target branch | Reason: Post-publish sync handles branch synchronization | Target: ${targetBranch} | Config: skipPrePublishMerge=true`);
|
|
597
616
|
} else {
|
|
598
|
-
logger.info(`Merging target branch
|
|
617
|
+
logger.info(`PRE_MERGE_STARTING: Merging target branch into current branch | Target: ${targetBranch} | Purpose: Avoid version conflicts | Phase: pre-publish`);
|
|
599
618
|
if (isDryRun) {
|
|
600
619
|
logger.info(`Would merge ${targetBranch} into current branch`);
|
|
601
620
|
} else {
|
|
@@ -604,23 +623,23 @@ const execute = async (runConfig)=>{
|
|
|
604
623
|
// Fetch the latest target branch
|
|
605
624
|
try {
|
|
606
625
|
await run(`git fetch origin ${targetBranch}:${targetBranch}`);
|
|
607
|
-
logger.info(
|
|
626
|
+
logger.info(`TARGET_BRANCH_FETCHED: Successfully fetched latest target branch | Branch: ${targetBranch} | Remote: origin/${targetBranch} | Purpose: Pre-merge sync`);
|
|
608
627
|
} catch (fetchError) {
|
|
609
|
-
logger.warn(
|
|
610
|
-
logger.warn('Continuing without merge
|
|
628
|
+
logger.warn(`TARGET_BRANCH_FETCH_FAILED: Unable to fetch target branch | Branch: ${targetBranch} | Error: ${fetchError.message} | Impact: Proceeding without merge, PR may have conflicts`);
|
|
629
|
+
logger.warn('MERGE_SKIPPED_NO_FETCH: Continuing without pre-merge | Reason: Target branch fetch failed | Impact: PR may require manual conflict resolution');
|
|
611
630
|
}
|
|
612
631
|
// Check if merge is needed (avoid unnecessary merge commits)
|
|
613
632
|
try {
|
|
614
633
|
const { stdout: mergeBase } = await run(`git merge-base HEAD ${targetBranch}`);
|
|
615
634
|
const { stdout: targetCommit } = await run(`git rev-parse ${targetBranch}`);
|
|
616
635
|
if (mergeBase.trim() === targetCommit.trim()) {
|
|
617
|
-
logger.info(
|
|
636
|
+
logger.info(`MERGE_NOT_NEEDED: Current branch already up-to-date with target | Branch: ${targetBranch} | Status: in-sync | Action: Skipping merge`);
|
|
618
637
|
} else {
|
|
619
638
|
// Try to merge target branch into current branch
|
|
620
639
|
let mergeSucceeded = false;
|
|
621
640
|
try {
|
|
622
641
|
await run(`git merge ${targetBranch} --no-edit -m "Merge ${targetBranch} to sync before version bump"`);
|
|
623
|
-
logger.info(
|
|
642
|
+
logger.info(`MERGE_SUCCESS: Successfully merged target branch into current branch | Target: ${targetBranch} | Purpose: Sync before version bump`);
|
|
624
643
|
mergeSucceeded = true;
|
|
625
644
|
} catch (mergeError) {
|
|
626
645
|
// If merge conflicts occur, check if they're only in version-related files
|
|
@@ -630,11 +649,11 @@ const execute = async (runConfig)=>{
|
|
|
630
649
|
mergeError.stderr || ''
|
|
631
650
|
].join(' ');
|
|
632
651
|
if (errorText.includes('CONFLICT')) {
|
|
633
|
-
logger.warn(
|
|
652
|
+
logger.warn(`MERGE_CONFLICTS_DETECTED: Merge conflicts found, attempting automatic resolution | Target: ${targetBranch} | Strategy: Auto-resolve version files`);
|
|
634
653
|
// Get list of conflicted files
|
|
635
654
|
const { stdout: conflictedFiles } = await run('git diff --name-only --diff-filter=U');
|
|
636
655
|
const conflicts = conflictedFiles.trim().split('\n').filter(Boolean);
|
|
637
|
-
logger.verbose(`Conflicted files: ${conflicts.join(', ')}`);
|
|
656
|
+
logger.verbose(`MERGE_CONFLICTS_LIST: Conflicted files detected | Files: ${conflicts.join(', ')} | Count: ${conflicts.length}`);
|
|
638
657
|
// Check if conflicts are only in package.json and package-lock.json
|
|
639
658
|
const versionFiles = [
|
|
640
659
|
'package.json',
|
|
@@ -642,29 +661,29 @@ const execute = async (runConfig)=>{
|
|
|
642
661
|
];
|
|
643
662
|
const nonVersionConflicts = conflicts.filter((f)=>!versionFiles.includes(f));
|
|
644
663
|
if (nonVersionConflicts.length > 0) {
|
|
645
|
-
logger.error(
|
|
664
|
+
logger.error(`MERGE_AUTO_RESOLVE_FAILED: Cannot auto-resolve conflicts in non-version files | Files: ${nonVersionConflicts.join(', ')} | Count: ${nonVersionConflicts.length} | Resolution: Manual intervention required`);
|
|
646
665
|
logger.error('');
|
|
647
|
-
logger.error('
|
|
648
|
-
logger.error(' 1
|
|
649
|
-
logger.error(' 2
|
|
650
|
-
logger.error(' 3
|
|
651
|
-
logger.error(' 4
|
|
666
|
+
logger.error('CONFLICT_RESOLUTION_REQUIRED: Manual steps to resolve conflicts:');
|
|
667
|
+
logger.error(' Step 1: Resolve conflicts in the files listed above');
|
|
668
|
+
logger.error(' Step 2: Stage resolved files | Command: git add <resolved-files>');
|
|
669
|
+
logger.error(' Step 3: Complete merge commit | Command: git commit');
|
|
670
|
+
logger.error(' Step 4: Resume publish process | Command: kodrdriv publish');
|
|
652
671
|
logger.error('');
|
|
653
672
|
throw new Error(`Merge conflicts in non-version files. Please resolve manually.`);
|
|
654
673
|
}
|
|
655
674
|
// Auto-resolve version conflicts by accepting current branch versions
|
|
656
675
|
// (keep our working branch's version, which is likely already updated)
|
|
657
|
-
logger.info(`
|
|
676
|
+
logger.info(`MERGE_AUTO_RESOLVING: Automatically resolving version conflicts | Strategy: Keep current branch versions | Files: ${versionFiles.join(', ')}`);
|
|
658
677
|
for (const file of conflicts){
|
|
659
678
|
if (versionFiles.includes(file)) {
|
|
660
679
|
await run(`git checkout --ours ${file}`);
|
|
661
680
|
await run(`git add ${file}`);
|
|
662
|
-
logger.verbose(`Resolved
|
|
681
|
+
logger.verbose(`MERGE_FILE_RESOLVED: Resolved file using current branch version | File: ${file} | Strategy: checkout --ours`);
|
|
663
682
|
}
|
|
664
683
|
}
|
|
665
684
|
// Complete the merge
|
|
666
685
|
await run(`git commit --no-edit -m "Merge ${targetBranch} to sync before version bump (auto-resolved version conflicts)"`);
|
|
667
|
-
logger.info(
|
|
686
|
+
logger.info(`MERGE_AUTO_RESOLVE_SUCCESS: Successfully auto-resolved version conflicts and completed merge | Target: ${targetBranch} | Files: ${versionFiles.join(', ')}`);
|
|
668
687
|
mergeSucceeded = true;
|
|
669
688
|
} else {
|
|
670
689
|
// Not a conflict error, re-throw
|
|
@@ -674,19 +693,19 @@ const execute = async (runConfig)=>{
|
|
|
674
693
|
// Only run npm install if merge actually happened
|
|
675
694
|
if (mergeSucceeded) {
|
|
676
695
|
// Run npm install to update package-lock.json based on merged package.json
|
|
677
|
-
logger.info('Running npm install after merge
|
|
696
|
+
logger.info('POST_MERGE_NPM_INSTALL: Running npm install after merge | Purpose: Update package-lock.json based on merged package.json | Command: npm install');
|
|
678
697
|
await run('npm install');
|
|
679
|
-
logger.info('
|
|
698
|
+
logger.info('POST_MERGE_NPM_COMPLETE: npm install completed successfully | Status: Dependencies synchronized');
|
|
680
699
|
// Commit any changes from npm install (e.g., package-lock.json updates)
|
|
681
700
|
const { stdout: mergeChangesStatus } = await run('git status --porcelain');
|
|
682
701
|
if (mergeChangesStatus.trim()) {
|
|
683
|
-
logger.verbose('
|
|
702
|
+
logger.verbose('POST_MERGE_CHANGES_DETECTED: Changes detected after npm install | Action: Staging for commit | Command: git add');
|
|
684
703
|
// Check if package-lock.json exists before trying to stage it
|
|
685
704
|
const packageLockExistsPostMerge = await storage.exists('package-lock.json');
|
|
686
705
|
const filesToStagePostMerge = packageLockExistsPostMerge ? 'package.json package-lock.json' : 'package.json';
|
|
687
706
|
await run(`git add ${filesToStagePostMerge}`);
|
|
688
707
|
if (await hasStagedChanges()) {
|
|
689
|
-
logger.verbose('Committing post-merge changes
|
|
708
|
+
logger.verbose('POST_MERGE_COMMIT: Committing post-merge changes | Files: ' + filesToStagePostMerge + ' | Purpose: Finalize merge');
|
|
690
709
|
await execute$2(runConfig);
|
|
691
710
|
}
|
|
692
711
|
}
|
|
@@ -694,7 +713,7 @@ const execute = async (runConfig)=>{
|
|
|
694
713
|
}
|
|
695
714
|
} catch (error) {
|
|
696
715
|
// Only catch truly unexpected errors here
|
|
697
|
-
logger.error(
|
|
716
|
+
logger.error(`MERGE_UNEXPECTED_ERROR: Unexpected error during merge process | Error: ${error.message} | Target: ${targetBranch} | Action: Aborting publish`);
|
|
698
717
|
throw error;
|
|
699
718
|
}
|
|
700
719
|
}, `merge ${targetBranch} into current branch`);
|
|
@@ -707,7 +726,7 @@ const execute = async (runConfig)=>{
|
|
|
707
726
|
logger.info('Would determine target version and update package.json');
|
|
708
727
|
newVersion = '1.0.0'; // Mock version for dry run
|
|
709
728
|
} else {
|
|
710
|
-
var
|
|
729
|
+
var _runConfig_publish13;
|
|
711
730
|
const packageJsonContents = await storage.readFile('package.json', 'utf-8');
|
|
712
731
|
const parsed = safeJsonParse(packageJsonContents, 'package.json');
|
|
713
732
|
const packageJson = validatePackageJson(parsed, 'package.json');
|
|
@@ -719,28 +738,110 @@ const execute = async (runConfig)=>{
|
|
|
719
738
|
const branchDependentResult = await calculateBranchDependentVersion(currentVersion, currentBranch, runConfig.branches, targetBranch);
|
|
720
739
|
proposedVersion = branchDependentResult.version;
|
|
721
740
|
finalTargetBranch = branchDependentResult.targetBranch;
|
|
722
|
-
logger.info(
|
|
723
|
-
logger.info(
|
|
741
|
+
logger.info(`VERSION_BRANCH_DEPENDENT_CALCULATED: Branch-dependent version calculated | Current: ${currentVersion} | Proposed: ${proposedVersion} | Strategy: branch-dependent`);
|
|
742
|
+
logger.info(`TARGET_BRANCH_FINAL: Final target branch determined | Branch: ${finalTargetBranch} | Source: branch-dependent config`);
|
|
724
743
|
// Update targetBranch for the rest of the function
|
|
725
744
|
targetBranch = finalTargetBranch;
|
|
726
745
|
} else {
|
|
727
|
-
var
|
|
746
|
+
var _runConfig_publish14;
|
|
728
747
|
// Use existing logic for backward compatibility
|
|
729
|
-
const targetVersionInput = ((
|
|
748
|
+
const targetVersionInput = ((_runConfig_publish14 = runConfig.publish) === null || _runConfig_publish14 === void 0 ? void 0 : _runConfig_publish14.targetVersion) || 'patch';
|
|
730
749
|
proposedVersion = calculateTargetVersion(currentVersion, targetVersionInput);
|
|
731
750
|
}
|
|
732
751
|
const targetTagName = `v${proposedVersion}`;
|
|
733
752
|
const tagExists = await checkIfTagExists(targetTagName);
|
|
753
|
+
// Smart tag conflict handling
|
|
734
754
|
if (tagExists) {
|
|
735
|
-
|
|
755
|
+
const { getNpmPublishedVersion, getTagInfo } = await import('../util/general.js');
|
|
756
|
+
logger.warn(`TAG_ALREADY_EXISTS: Tag already exists in repository | Tag: ${targetTagName} | Status: conflict | Action: Check npm registry`);
|
|
757
|
+
// Check if this version is published on npm
|
|
758
|
+
const npmVersion = await getNpmPublishedVersion(packageJson.name);
|
|
759
|
+
const tagInfo = await getTagInfo(targetTagName);
|
|
760
|
+
if (npmVersion === proposedVersion) {
|
|
761
|
+
var _runConfig_publish15;
|
|
762
|
+
// Version is already published on npm
|
|
763
|
+
logger.info(`VERSION_ALREADY_PUBLISHED: Version already published on npm registry | Version: ${proposedVersion} | Status: published | Action: Skipping`);
|
|
764
|
+
logger.info(`PUBLISH_SKIPPED_DUPLICATE: Skipping publish operation | Reason: Package already at target version | Version: ${proposedVersion}`);
|
|
765
|
+
logger.info('');
|
|
766
|
+
logger.info('REPUBLISH_OPTIONS: Options if you need to republish:');
|
|
767
|
+
logger.info(` Option 1: Bump version | Command: npm version patch (or minor/major)`);
|
|
768
|
+
logger.info(` Option 2: Re-run publish | Command: kodrdriv publish`);
|
|
769
|
+
logger.info('');
|
|
770
|
+
if ((_runConfig_publish15 = runConfig.publish) === null || _runConfig_publish15 === void 0 ? void 0 : _runConfig_publish15.skipAlreadyPublished) {
|
|
771
|
+
logger.info('PUBLISH_SKIPPED_FLAG: Skipping package due to flag | Flag: --skip-already-published | Version: ' + proposedVersion + ' | Status: skipped');
|
|
772
|
+
// Emit skip marker for tree mode detection
|
|
773
|
+
// eslint-disable-next-line no-console
|
|
774
|
+
console.log('KODRDRIV_PUBLISH_SKIPPED');
|
|
775
|
+
return; // Exit without error
|
|
776
|
+
} else {
|
|
777
|
+
throw new Error(`Version ${proposedVersion} already published. Use --skip-already-published to continue.`);
|
|
778
|
+
}
|
|
779
|
+
} else {
|
|
780
|
+
var _tagInfo_commit, _runConfig_publish16;
|
|
781
|
+
// Tag exists but version not on npm - likely failed previous publish
|
|
782
|
+
logger.warn('');
|
|
783
|
+
logger.warn('PUBLISH_SITUATION_ANALYSIS: Analyzing publish conflict situation | Tag: ' + targetTagName + ' | npm: ' + (npmVersion || 'not published'));
|
|
784
|
+
logger.warn(`PUBLISH_ANALYSIS_TAG_EXISTS: Tag exists locally | Tag: ${targetTagName} | Commit: ${tagInfo === null || tagInfo === void 0 ? void 0 : (_tagInfo_commit = tagInfo.commit) === null || _tagInfo_commit === void 0 ? void 0 : _tagInfo_commit.substring(0, 8)}`);
|
|
785
|
+
logger.warn(`PUBLISH_ANALYSIS_NPM_STATUS: npm registry status | Version: ${npmVersion || 'not published'} | Status: ${npmVersion ? 'published' : 'missing'}`);
|
|
786
|
+
logger.warn(`PUBLISH_ANALYSIS_CONCLUSION: Previous publish likely failed after tag creation | Reason: Tag exists but not on npm | Resolution: Recovery needed`);
|
|
787
|
+
logger.warn('');
|
|
788
|
+
logger.warn('PUBLISH_RECOVERY_OPTIONS: Recovery options available:');
|
|
789
|
+
logger.warn(' OPTION_1_FORCE: Force republish by deleting tag | Command: kodrdriv publish --force-republish');
|
|
790
|
+
logger.warn(' OPTION_2_BUMP: Skip version and bump | Command: npm version patch && kodrdriv publish');
|
|
791
|
+
logger.warn(' OPTION_3_MANUAL: Manually delete tag:');
|
|
792
|
+
logger.warn(` Command: git tag -d ${targetTagName}`);
|
|
793
|
+
logger.warn(` Command: git push origin :refs/tags/${targetTagName}`);
|
|
794
|
+
logger.warn('');
|
|
795
|
+
if ((_runConfig_publish16 = runConfig.publish) === null || _runConfig_publish16 === void 0 ? void 0 : _runConfig_publish16.forceRepublish) {
|
|
796
|
+
logger.info('PUBLISH_FORCE_REPUBLISH: Force republish mode enabled | Action: Deleting existing tag | Tag: ' + targetTagName + ' | Purpose: Allow republish');
|
|
797
|
+
if (!isDryRun) {
|
|
798
|
+
const { runSecure } = await import('@eldrforge/git-tools');
|
|
799
|
+
// Delete local tag
|
|
800
|
+
try {
|
|
801
|
+
await runSecure('git', [
|
|
802
|
+
'tag',
|
|
803
|
+
'-d',
|
|
804
|
+
targetTagName
|
|
805
|
+
]);
|
|
806
|
+
logger.info(`TAG_DELETED_LOCAL: Deleted local tag | Tag: ${targetTagName} | Status: removed-local`);
|
|
807
|
+
} catch (error) {
|
|
808
|
+
logger.debug(`Could not delete local tag: ${error.message}`);
|
|
809
|
+
}
|
|
810
|
+
// Delete remote tag
|
|
811
|
+
try {
|
|
812
|
+
await runSecure('git', [
|
|
813
|
+
'push',
|
|
814
|
+
'origin',
|
|
815
|
+
`:refs/tags/${targetTagName}`
|
|
816
|
+
]);
|
|
817
|
+
logger.info(`TAG_DELETED_REMOTE: Deleted remote tag | Tag: ${targetTagName} | Remote: origin | Status: removed-remote`);
|
|
818
|
+
} catch (error) {
|
|
819
|
+
logger.debug(`Could not delete remote tag: ${error.message}`);
|
|
820
|
+
}
|
|
821
|
+
logger.info('PUBLISH_TAG_CLEANUP_COMPLETE: Tag deleted successfully | Status: ready-for-publish | Next: Continue with publish workflow');
|
|
822
|
+
} else {
|
|
823
|
+
logger.info('Would delete tags and continue with publish');
|
|
824
|
+
}
|
|
825
|
+
} else {
|
|
826
|
+
throw new Error(`Tag ${targetTagName} already exists. Use --force-republish to override.`);
|
|
827
|
+
}
|
|
828
|
+
}
|
|
736
829
|
}
|
|
737
|
-
if ((
|
|
738
|
-
var
|
|
739
|
-
newVersion = await confirmVersionInteractively(currentVersion, proposedVersion, (
|
|
830
|
+
if ((_runConfig_publish13 = runConfig.publish) === null || _runConfig_publish13 === void 0 ? void 0 : _runConfig_publish13.interactive) {
|
|
831
|
+
var _runConfig_publish17;
|
|
832
|
+
newVersion = await confirmVersionInteractively(currentVersion, proposedVersion, (_runConfig_publish17 = runConfig.publish) === null || _runConfig_publish17 === void 0 ? void 0 : _runConfig_publish17.targetVersion);
|
|
740
833
|
const confirmedTagName = `v${newVersion}`;
|
|
741
834
|
const confirmedTagExists = await checkIfTagExists(confirmedTagName);
|
|
742
835
|
if (confirmedTagExists) {
|
|
743
|
-
|
|
836
|
+
var _runConfig_publish18;
|
|
837
|
+
const { getNpmPublishedVersion } = await import('../util/general.js');
|
|
838
|
+
const npmVersion = await getNpmPublishedVersion(packageJson.name);
|
|
839
|
+
if (npmVersion === newVersion) {
|
|
840
|
+
throw new Error(`Tag ${confirmedTagName} already exists and version is published on npm. Please choose a different version.`);
|
|
841
|
+
} else if (!((_runConfig_publish18 = runConfig.publish) === null || _runConfig_publish18 === void 0 ? void 0 : _runConfig_publish18.forceRepublish)) {
|
|
842
|
+
throw new Error(`Tag ${confirmedTagName} already exists. Use --force-republish to override.`);
|
|
843
|
+
}
|
|
844
|
+
// If forceRepublish is set, we'll continue (tag will be deleted later)
|
|
744
845
|
}
|
|
745
846
|
} else {
|
|
746
847
|
newVersion = proposedVersion;
|
|
@@ -781,23 +882,23 @@ const execute = async (runConfig)=>{
|
|
|
781
882
|
releaseConfig.release = {
|
|
782
883
|
...runConfig.release,
|
|
783
884
|
currentBranch: currentBranch,
|
|
784
|
-
...((
|
|
885
|
+
...((_runConfig_publish7 = runConfig.publish) === null || _runConfig_publish7 === void 0 ? void 0 : _runConfig_publish7.from) && {
|
|
785
886
|
from: runConfig.publish.from
|
|
786
887
|
},
|
|
787
|
-
...((
|
|
888
|
+
...((_runConfig_publish8 = runConfig.publish) === null || _runConfig_publish8 === void 0 ? void 0 : _runConfig_publish8.interactive) && {
|
|
788
889
|
interactive: runConfig.publish.interactive
|
|
789
890
|
},
|
|
790
|
-
...((
|
|
891
|
+
...((_runConfig_publish9 = runConfig.publish) === null || _runConfig_publish9 === void 0 ? void 0 : _runConfig_publish9.fromMain) && {
|
|
791
892
|
fromMain: runConfig.publish.fromMain
|
|
792
893
|
}
|
|
793
894
|
};
|
|
794
|
-
if ((
|
|
895
|
+
if ((_runConfig_publish10 = runConfig.publish) === null || _runConfig_publish10 === void 0 ? void 0 : _runConfig_publish10.from) {
|
|
795
896
|
logger.verbose(`Using custom 'from' reference for release notes: ${runConfig.publish.from}`);
|
|
796
897
|
}
|
|
797
|
-
if ((
|
|
898
|
+
if ((_runConfig_publish11 = runConfig.publish) === null || _runConfig_publish11 === void 0 ? void 0 : _runConfig_publish11.interactive) {
|
|
798
899
|
logger.verbose('Interactive mode enabled for release notes generation');
|
|
799
900
|
}
|
|
800
|
-
if ((
|
|
901
|
+
if ((_runConfig_publish12 = runConfig.publish) === null || _runConfig_publish12 === void 0 ? void 0 : _runConfig_publish12.fromMain) {
|
|
801
902
|
logger.verbose('Forcing comparison against main branch for release notes');
|
|
802
903
|
}
|
|
803
904
|
const releaseSummary = await execute$1(releaseConfig);
|
|
@@ -855,12 +956,12 @@ const execute = async (runConfig)=>{
|
|
|
855
956
|
logger.debug(`Could not verify workflow configuration for wait skip: ${error.message}`);
|
|
856
957
|
}
|
|
857
958
|
if (!shouldSkipWait) {
|
|
858
|
-
var
|
|
959
|
+
var _runConfig_publish19, _runConfig_publish20, _runConfig_publish21;
|
|
859
960
|
// Configure timeout and user confirmation behavior
|
|
860
|
-
const timeout = ((
|
|
861
|
-
const senditMode = ((
|
|
961
|
+
const timeout = ((_runConfig_publish19 = runConfig.publish) === null || _runConfig_publish19 === void 0 ? void 0 : _runConfig_publish19.checksTimeout) || KODRDRIV_DEFAULTS.publish.checksTimeout;
|
|
962
|
+
const senditMode = ((_runConfig_publish20 = runConfig.publish) === null || _runConfig_publish20 === void 0 ? void 0 : _runConfig_publish20.sendit) || false;
|
|
862
963
|
// sendit flag overrides skipUserConfirmation - if sendit is true, skip confirmation
|
|
863
|
-
const skipUserConfirmation = senditMode || ((
|
|
964
|
+
const skipUserConfirmation = senditMode || ((_runConfig_publish21 = runConfig.publish) === null || _runConfig_publish21 === void 0 ? void 0 : _runConfig_publish21.skipUserConfirmation) || false;
|
|
864
965
|
await GitHub.waitForPullRequestChecks(pr.number, {
|
|
865
966
|
timeout,
|
|
866
967
|
skipUserConfirmation
|
|
@@ -876,14 +977,14 @@ const execute = async (runConfig)=>{
|
|
|
876
977
|
} catch (error) {
|
|
877
978
|
// Check if this is a merge conflict error
|
|
878
979
|
if (error.message && (error.message.includes('not mergeable') || error.message.includes('Pull Request is not mergeable') || error.message.includes('merge conflict'))) {
|
|
879
|
-
logger.error(
|
|
980
|
+
logger.error(`PR_MERGE_CONFLICTS: Pull request has merge conflicts | PR Number: ${pr.number} | Status: conflicts | Resolution: Manual intervention required`);
|
|
880
981
|
logger.error('');
|
|
881
|
-
logger.error('
|
|
882
|
-
logger.error(` 1
|
|
883
|
-
logger.error(' 2
|
|
884
|
-
logger.error(' 3
|
|
982
|
+
logger.error('PR_CONFLICT_RESOLUTION: Steps to resolve conflicts:');
|
|
983
|
+
logger.error(` Step 1: Visit pull request | URL: ${pr.html_url}`);
|
|
984
|
+
logger.error(' Step 2: Resolve merge conflicts | Method: GitHub UI or local');
|
|
985
|
+
logger.error(' Step 3: Re-run publish command | Command: kodrdriv publish');
|
|
885
986
|
logger.error('');
|
|
886
|
-
logger.error('
|
|
987
|
+
logger.error('PR_AUTO_CONTINUE: Command will auto-detect existing PR | Behavior: Continues from where it left off | No re-creation needed');
|
|
887
988
|
throw new Error(`Merge conflicts detected in PR #${pr.number}. Please resolve conflicts and re-run the command.`);
|
|
888
989
|
} else {
|
|
889
990
|
// Re-throw other merge errors
|
|
@@ -901,7 +1002,7 @@ const execute = async (runConfig)=>{
|
|
|
901
1002
|
'--porcelain'
|
|
902
1003
|
]);
|
|
903
1004
|
if (statusOutput.trim()) {
|
|
904
|
-
logger.info('
|
|
1005
|
+
logger.info('PUBLISH_STASH_SAVING: Stashing uncommitted changes before checkout | Command: git stash push | Purpose: Protect changes during branch switch');
|
|
905
1006
|
await runSecure('git', [
|
|
906
1007
|
'stash',
|
|
907
1008
|
'push',
|
|
@@ -909,7 +1010,7 @@ const execute = async (runConfig)=>{
|
|
|
909
1010
|
'kodrdriv: stash before checkout target branch'
|
|
910
1011
|
]);
|
|
911
1012
|
hasStashedChanges = true;
|
|
912
|
-
logger.info('
|
|
1013
|
+
logger.info('PUBLISH_STASH_SUCCESS: Successfully stashed uncommitted changes | Status: saved | Name: kodrdriv stash');
|
|
913
1014
|
}
|
|
914
1015
|
}
|
|
915
1016
|
try {
|
|
@@ -919,31 +1020,32 @@ const execute = async (runConfig)=>{
|
|
|
919
1020
|
}, `checkout ${targetBranch}`);
|
|
920
1021
|
// Sync target branch with remote to avoid conflicts during PR creation
|
|
921
1022
|
if (!isDryRun) {
|
|
922
|
-
logger.info(
|
|
1023
|
+
logger.info(`PUBLISH_TARGET_SYNCING: Syncing target branch with remote | Branch: ${targetBranch} | Remote: origin | Purpose: Avoid PR conflicts`);
|
|
923
1024
|
try {
|
|
924
1025
|
const remoteExists = await run(`git ls-remote --exit-code --heads origin ${targetBranch}`).then(()=>true).catch(()=>false);
|
|
925
1026
|
if (remoteExists) {
|
|
926
1027
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
927
|
-
await run(`git
|
|
928
|
-
|
|
929
|
-
|
|
1028
|
+
await run(`git fetch origin ${targetBranch}`);
|
|
1029
|
+
await run(`git merge origin/${targetBranch} --no-ff --no-edit`);
|
|
1030
|
+
}, `sync ${targetBranch}`);
|
|
1031
|
+
logger.info(`PUBLISH_TARGET_SYNCED: Successfully synced target with remote | Branch: ${targetBranch} | Remote: origin | Status: in-sync`);
|
|
930
1032
|
} else {
|
|
931
|
-
logger.info(
|
|
1033
|
+
logger.info(`PUBLISH_TARGET_NO_REMOTE: No remote target branch found | Branch: ${targetBranch} | Remote: origin | Action: Will be created on first push`);
|
|
932
1034
|
}
|
|
933
1035
|
} catch (syncError) {
|
|
934
1036
|
if (syncError.message && syncError.message.includes('CONFLICT')) {
|
|
935
|
-
logger.error(
|
|
936
|
-
logger.error(`
|
|
937
|
-
logger.error(` 1
|
|
938
|
-
logger.error(` 2
|
|
939
|
-
logger.error(` 3
|
|
940
|
-
logger.error(` 4
|
|
941
|
-
logger.error(` 5
|
|
942
|
-
logger.error(` 6
|
|
943
|
-
logger.error(` 7
|
|
1037
|
+
logger.error(`PUBLISH_SYNC_CONFLICTS: Merge conflicts during target sync | Branch: ${targetBranch} | Remote: origin | Status: conflicts-detected`);
|
|
1038
|
+
logger.error(`PUBLISH_SYNC_RESOLUTION: Manual conflict resolution steps:`);
|
|
1039
|
+
logger.error(` Step 1: Checkout target | Command: git checkout ${targetBranch}`);
|
|
1040
|
+
logger.error(` Step 2: Pull and merge | Command: git pull origin ${targetBranch}`);
|
|
1041
|
+
logger.error(` Step 3: Resolve conflicts in files`);
|
|
1042
|
+
logger.error(` Step 4: Stage resolved files | Command: git add <resolved-files>`);
|
|
1043
|
+
logger.error(` Step 5: Complete merge | Command: git commit`);
|
|
1044
|
+
logger.error(` Step 6: Return to branch | Command: git checkout ${currentBranch}`);
|
|
1045
|
+
logger.error(` Step 7: Resume publish | Command: kodrdriv publish`);
|
|
944
1046
|
throw syncError;
|
|
945
1047
|
} else {
|
|
946
|
-
logger.warn(
|
|
1048
|
+
logger.warn(`PUBLISH_SYNC_WARNING: Could not sync target with remote | Branch: ${targetBranch} | Remote: origin | Error: ${syncError.message}`);
|
|
947
1049
|
// Continue with publish process, but log the warning
|
|
948
1050
|
}
|
|
949
1051
|
}
|
|
@@ -953,17 +1055,17 @@ const execute = async (runConfig)=>{
|
|
|
953
1055
|
} catch (error) {
|
|
954
1056
|
// Check if this is a merge conflict or sync issue
|
|
955
1057
|
if (!isDryRun && (error.message.includes('conflict') || error.message.includes('CONFLICT') || error.message.includes('diverged') || error.message.includes('non-fast-forward'))) {
|
|
956
|
-
logger.error(
|
|
1058
|
+
logger.error(`PUBLISH_TARGET_SYNC_FAILED: Failed to sync target branch with remote | Branch: ${targetBranch} | Remote: origin | Impact: Cannot proceed safely`);
|
|
957
1059
|
logger.error('');
|
|
958
|
-
logger.error('
|
|
959
|
-
logger.error(`
|
|
960
|
-
logger.error(`
|
|
961
|
-
logger.error(`
|
|
962
|
-
logger.error(`
|
|
963
|
-
logger.error(`
|
|
964
|
-
logger.error(`
|
|
1060
|
+
logger.error('PUBLISH_SYNC_RECOVERY_OPTIONS: Available recovery options:');
|
|
1061
|
+
logger.error(` OPTION_1_AUTO: Attempt automatic resolution | Command: kodrdriv publish --sync-target`);
|
|
1062
|
+
logger.error(` OPTION_2_MANUAL: Manually resolve conflicts:`);
|
|
1063
|
+
logger.error(` Step 1: Checkout target | Command: git checkout ${targetBranch}`);
|
|
1064
|
+
logger.error(` Step 2: Pull from remote | Command: git pull origin ${targetBranch}`);
|
|
1065
|
+
logger.error(` Step 3: Resolve conflicts and commit`);
|
|
1066
|
+
logger.error(` Step 4: Re-run publish | Command: kodrdriv publish`);
|
|
965
1067
|
logger.error('');
|
|
966
|
-
logger.error('
|
|
1068
|
+
logger.error('PUBLISH_STOPPED_SAFETY: Publish process stopped | Reason: Prevent data loss | Status: safe-to-recover');
|
|
967
1069
|
throw new Error(`Target branch '${targetBranch}' sync failed. Use recovery options above to resolve.`);
|
|
968
1070
|
} else {
|
|
969
1071
|
// Re-throw other errors
|
|
@@ -972,16 +1074,16 @@ const execute = async (runConfig)=>{
|
|
|
972
1074
|
}
|
|
973
1075
|
// Restore stashed changes if we stashed them
|
|
974
1076
|
if (hasStashedChanges) {
|
|
975
|
-
logger.info('
|
|
1077
|
+
logger.info('PUBLISH_STASH_RESTORING: Restoring previously stashed changes | Command: git stash pop | Purpose: Restore working directory state');
|
|
976
1078
|
try {
|
|
977
1079
|
await runSecure('git', [
|
|
978
1080
|
'stash',
|
|
979
1081
|
'pop'
|
|
980
1082
|
]);
|
|
981
|
-
logger.info('
|
|
1083
|
+
logger.info('PUBLISH_STASH_RESTORED: Successfully restored stashed changes | Status: restored | Stash: removed');
|
|
982
1084
|
} catch (stashError) {
|
|
983
|
-
logger.warn(
|
|
984
|
-
logger.warn('
|
|
1085
|
+
logger.warn(`PUBLISH_STASH_RESTORE_FAILED: Could not restore stashed changes | Error: ${stashError.message} | Impact: Changes still in stash`);
|
|
1086
|
+
logger.warn('PUBLISH_STASH_AVAILABLE: Changes available in git stash | Command: git stash list | Purpose: View and restore manually');
|
|
985
1087
|
}
|
|
986
1088
|
}
|
|
987
1089
|
// Now create and push the tag on the target branch
|
|
@@ -1073,9 +1175,9 @@ const execute = async (runConfig)=>{
|
|
|
1073
1175
|
}
|
|
1074
1176
|
logger.info('Creating GitHub release...');
|
|
1075
1177
|
if (isDryRun) {
|
|
1076
|
-
var
|
|
1178
|
+
var _runConfig_publish22;
|
|
1077
1179
|
logger.info('Would read package.json version and create GitHub release with retry logic');
|
|
1078
|
-
const milestonesEnabled = !((
|
|
1180
|
+
const milestonesEnabled = !((_runConfig_publish22 = runConfig.publish) === null || _runConfig_publish22 === void 0 ? void 0 : _runConfig_publish22.noMilestones);
|
|
1079
1181
|
if (milestonesEnabled) {
|
|
1080
1182
|
logger.info('Would close milestone for released version');
|
|
1081
1183
|
} else {
|
|
@@ -1091,13 +1193,13 @@ const execute = async (runConfig)=>{
|
|
|
1091
1193
|
let retries = 3;
|
|
1092
1194
|
while(retries > 0){
|
|
1093
1195
|
try {
|
|
1094
|
-
var
|
|
1196
|
+
var _runConfig_publish23;
|
|
1095
1197
|
await GitHub.createRelease(tagName, releaseTitle, releaseNotesContent);
|
|
1096
1198
|
logger.info(`GitHub release created successfully for tag: ${tagName}`);
|
|
1097
1199
|
// Close milestone for this version if enabled
|
|
1098
|
-
const milestonesEnabled = !((
|
|
1200
|
+
const milestonesEnabled = !((_runConfig_publish23 = runConfig.publish) === null || _runConfig_publish23 === void 0 ? void 0 : _runConfig_publish23.noMilestones);
|
|
1099
1201
|
if (milestonesEnabled) {
|
|
1100
|
-
logger.info('
|
|
1202
|
+
logger.info('PUBLISH_MILESTONE_CLOSING: Closing milestone for released version | Action: Close GitHub milestone | Purpose: Mark release complete');
|
|
1101
1203
|
const version = tagName.replace(/^v/, ''); // Remove 'v' prefix if present
|
|
1102
1204
|
await GitHub.closeMilestoneForVersion(version);
|
|
1103
1205
|
} else {
|
|
@@ -1128,12 +1230,12 @@ const execute = async (runConfig)=>{
|
|
|
1128
1230
|
if (isDryRun) {
|
|
1129
1231
|
logger.info('Would monitor GitHub Actions workflows triggered by release');
|
|
1130
1232
|
} else {
|
|
1131
|
-
var
|
|
1132
|
-
const workflowTimeout = ((
|
|
1133
|
-
const senditMode = ((
|
|
1134
|
-
const skipUserConfirmation = senditMode || ((
|
|
1233
|
+
var _runConfig_publish24, _runConfig_publish25, _runConfig_publish26, _runConfig_publish27;
|
|
1234
|
+
const workflowTimeout = ((_runConfig_publish24 = runConfig.publish) === null || _runConfig_publish24 === void 0 ? void 0 : _runConfig_publish24.releaseWorkflowsTimeout) || KODRDRIV_DEFAULTS.publish.releaseWorkflowsTimeout;
|
|
1235
|
+
const senditMode = ((_runConfig_publish25 = runConfig.publish) === null || _runConfig_publish25 === void 0 ? void 0 : _runConfig_publish25.sendit) || false;
|
|
1236
|
+
const skipUserConfirmation = senditMode || ((_runConfig_publish26 = runConfig.publish) === null || _runConfig_publish26 === void 0 ? void 0 : _runConfig_publish26.skipUserConfirmation) || false;
|
|
1135
1237
|
// Get workflow names - either from config or auto-detect
|
|
1136
|
-
let workflowNames = (
|
|
1238
|
+
let workflowNames = (_runConfig_publish27 = runConfig.publish) === null || _runConfig_publish27 === void 0 ? void 0 : _runConfig_publish27.releaseWorkflowNames;
|
|
1137
1239
|
if (!workflowNames || workflowNames.length === 0) {
|
|
1138
1240
|
logger.info('No specific workflow names configured, auto-detecting workflows triggered by release events...');
|
|
1139
1241
|
try {
|
|
@@ -1159,7 +1261,7 @@ const execute = async (runConfig)=>{
|
|
|
1159
1261
|
}
|
|
1160
1262
|
// Switch back to source branch and sync with target
|
|
1161
1263
|
logger.info('');
|
|
1162
|
-
logger.info(
|
|
1264
|
+
logger.info(`PUBLISH_POST_SYNC: Syncing source branch with target after publish | Purpose: Keep branches synchronized | Strategy: Reset and force push`);
|
|
1163
1265
|
await runWithDryRunSupport(`git checkout ${currentBranch}`, isDryRun);
|
|
1164
1266
|
if (!isDryRun) {
|
|
1165
1267
|
// Sync target into source
|
|
@@ -1169,10 +1271,32 @@ const execute = async (runConfig)=>{
|
|
|
1169
1271
|
// The squash merge created a single commit on target that represents all source commits
|
|
1170
1272
|
logger.info(`Resetting ${currentBranch} to ${targetBranch} (squash merge)...`);
|
|
1171
1273
|
await run(`git reset --hard ${targetBranch}`);
|
|
1172
|
-
logger.info(
|
|
1274
|
+
logger.info(`PUBLISH_BRANCH_RESET: Reset source branch to target | Source: ${currentBranch} | Target: ${targetBranch} | Status: synchronized`);
|
|
1275
|
+
// After squash merge and reset, we need to force push
|
|
1276
|
+
// This is safe because we just merged to main and are syncing working branch
|
|
1277
|
+
logger.info(`PUBLISH_FORCE_PUSHING: Force pushing synchronized branch | Branch: ${currentBranch} | Remote: origin | Purpose: Complete post-publish sync`);
|
|
1278
|
+
try {
|
|
1279
|
+
// Verify that remote working branch is ancestor of main (safety check)
|
|
1280
|
+
try {
|
|
1281
|
+
await run(`git fetch origin ${currentBranch}`);
|
|
1282
|
+
await run(`git merge-base --is-ancestor origin/${currentBranch} ${targetBranch}`);
|
|
1283
|
+
logger.verbose(`✓ Safety check passed: origin/${currentBranch} is ancestor of ${targetBranch}`);
|
|
1284
|
+
} catch {
|
|
1285
|
+
// Remote branch might not exist yet, or already in sync - both OK
|
|
1286
|
+
logger.verbose(`Remote ${currentBranch} does not exist or is already synced`);
|
|
1287
|
+
}
|
|
1288
|
+
// Use --force-with-lease for safer force push
|
|
1289
|
+
await run(`git push --force-with-lease origin ${currentBranch}`);
|
|
1290
|
+
logger.info(`PUBLISH_FORCE_PUSH_SUCCESS: Successfully force pushed to remote | Branch: ${currentBranch} | Remote: origin | Status: synchronized`);
|
|
1291
|
+
} catch (pushError) {
|
|
1292
|
+
// If force push fails, provide helpful message
|
|
1293
|
+
logger.warn(`PUBLISH_FORCE_PUSH_FAILED: Could not force push branch | Branch: ${currentBranch} | Remote: origin | Error: ${pushError.message}`);
|
|
1294
|
+
logger.warn(`PUBLISH_MANUAL_PUSH_NEEDED: Manual force push required | Action: Push manually`);
|
|
1295
|
+
logger.warn(`PUBLISH_MANUAL_PUSH_COMMAND: Force push command | Command: git push --force-with-lease origin ${currentBranch}`);
|
|
1296
|
+
}
|
|
1173
1297
|
} else {
|
|
1174
1298
|
// For merge/rebase methods, try to merge target back into source
|
|
1175
|
-
logger.info(`Merging ${targetBranch}
|
|
1299
|
+
logger.info(`PUBLISH_MERGE_TARGET_BACK: Merging target back into source | Target: ${targetBranch} | Source: ${currentBranch} | Purpose: Sync branches after publish`);
|
|
1176
1300
|
// Try fast-forward first (works with merge/rebase methods)
|
|
1177
1301
|
// Use runSecure to avoid error output for expected failure
|
|
1178
1302
|
let fastForwardSucceeded = false;
|
|
@@ -1183,13 +1307,13 @@ const execute = async (runConfig)=>{
|
|
|
1183
1307
|
'--ff-only'
|
|
1184
1308
|
]);
|
|
1185
1309
|
fastForwardSucceeded = true;
|
|
1186
|
-
logger.info(
|
|
1310
|
+
logger.info(`PUBLISH_MERGE_FF_SUCCESS: Fast-forward merged target into source | Target: ${targetBranch} | Source: ${currentBranch} | Status: merged`);
|
|
1187
1311
|
} catch {
|
|
1188
1312
|
logger.verbose(`Fast-forward merge not possible, performing regular merge...`);
|
|
1189
1313
|
}
|
|
1190
1314
|
if (!fastForwardSucceeded) {
|
|
1191
1315
|
await run(`git merge ${targetBranch} --no-edit`);
|
|
1192
|
-
logger.info(
|
|
1316
|
+
logger.info(`PUBLISH_MERGE_SUCCESS: Merged target into source | Target: ${targetBranch} | Source: ${currentBranch} | Status: merged`);
|
|
1193
1317
|
}
|
|
1194
1318
|
}
|
|
1195
1319
|
// Determine version bump based on branch configuration
|
|
@@ -1208,36 +1332,32 @@ const execute = async (runConfig)=>{
|
|
|
1208
1332
|
}
|
|
1209
1333
|
}
|
|
1210
1334
|
// Bump to next development version
|
|
1211
|
-
logger.info(`Bumping to next development version
|
|
1335
|
+
logger.info(`PUBLISH_DEV_VERSION_BUMPING: Bumping to next development version | Command: ${versionCommand} | Tag: ${versionTag} | Purpose: Prepare for next cycle`);
|
|
1212
1336
|
try {
|
|
1213
1337
|
const { stdout: newVersion } = await run(`npm version ${versionCommand} --preid=${versionTag}`);
|
|
1214
|
-
logger.info(
|
|
1338
|
+
logger.info(`PUBLISH_DEV_VERSION_BUMPED: Version bumped successfully | New Version: ${newVersion.trim()} | Type: development | Status: completed`);
|
|
1215
1339
|
} catch (versionError) {
|
|
1216
|
-
logger.warn(
|
|
1217
|
-
logger.warn('
|
|
1340
|
+
logger.warn(`PUBLISH_DEV_VERSION_BUMP_FAILED: Failed to bump version | Error: ${versionError.message} | Impact: Version not updated`);
|
|
1341
|
+
logger.warn('PUBLISH_MANUAL_VERSION_BUMP: Manual version bump may be needed | Action: Bump manually for next cycle | Command: npm version');
|
|
1218
1342
|
}
|
|
1219
1343
|
// Push updated source branch
|
|
1220
|
-
logger.info(`Pushing updated ${currentBranch}
|
|
1344
|
+
logger.info(`PUBLISH_PUSH_SOURCE: Pushing updated source branch | Branch: ${currentBranch} | Remote: origin | Purpose: Push development version`);
|
|
1221
1345
|
try {
|
|
1222
1346
|
await runGitWithLock(process.cwd(), async ()=>{
|
|
1223
1347
|
await run(`git push origin ${currentBranch}`);
|
|
1224
1348
|
}, `push ${currentBranch}`);
|
|
1225
|
-
logger.info(
|
|
1349
|
+
logger.info(`PUBLISH_PUSH_SOURCE_SUCCESS: Pushed source branch successfully | Branch: ${currentBranch} | Remote: origin | Status: pushed`);
|
|
1226
1350
|
} catch (pushError) {
|
|
1227
|
-
logger.warn(
|
|
1228
|
-
logger.warn(`
|
|
1351
|
+
logger.warn(`PUBLISH_PUSH_SOURCE_FAILED: Failed to push source branch | Branch: ${currentBranch} | Error: ${pushError.message} | Impact: Need manual push`);
|
|
1352
|
+
logger.warn(`PUBLISH_MANUAL_PUSH_COMMAND: Manual push command | Command: git push origin ${currentBranch}`);
|
|
1229
1353
|
}
|
|
1230
1354
|
} else {
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
}
|
|
1234
|
-
logger.info(`Would merge ${targetBranch} into ${currentBranch} with --ff-only`);
|
|
1235
|
-
}
|
|
1236
|
-
logger.info(`Would bump version to next development version`);
|
|
1237
|
-
logger.info(`Would push ${currentBranch} to origin`);
|
|
1355
|
+
logger.info(`PUBLISH_MERGE_DRY_RUN: Would merge target into source | Mode: dry-run | Target: ${targetBranch} | Source: ${currentBranch} | Strategy: ff-only`);
|
|
1356
|
+
logger.info(`PUBLISH_VERSION_DRY_RUN: Would bump version to next development | Mode: dry-run | Action: Version bump`);
|
|
1357
|
+
logger.info(`PUBLISH_PUSH_DRY_RUN: Would push source to remote | Mode: dry-run | Branch: ${currentBranch} | Remote: origin`);
|
|
1238
1358
|
}
|
|
1239
1359
|
logger.info('');
|
|
1240
|
-
logger.info(
|
|
1360
|
+
logger.info(`PUBLISH_COMPLETE: Publish workflow completed successfully | Branch: ${currentBranch} | Status: completed | Version: next-development`);
|
|
1241
1361
|
};
|
|
1242
1362
|
|
|
1243
1363
|
export { execute };
|