@aspruyt/xfg 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/PR.md +15 -0
- package/README.md +991 -0
- package/dist/command-executor.d.ts +25 -0
- package/dist/command-executor.js +32 -0
- package/dist/config-formatter.d.ts +17 -0
- package/dist/config-formatter.js +100 -0
- package/dist/config-normalizer.d.ts +6 -0
- package/dist/config-normalizer.js +136 -0
- package/dist/config-validator.d.ts +6 -0
- package/dist/config-validator.js +173 -0
- package/dist/config.d.ts +54 -0
- package/dist/config.js +27 -0
- package/dist/env.d.ts +39 -0
- package/dist/env.js +144 -0
- package/dist/file-reference-resolver.d.ts +20 -0
- package/dist/file-reference-resolver.js +135 -0
- package/dist/git-ops.d.ts +75 -0
- package/dist/git-ops.js +229 -0
- package/dist/index.d.ts +20 -0
- package/dist/index.js +167 -0
- package/dist/logger.d.ts +21 -0
- package/dist/logger.js +46 -0
- package/dist/merge.d.ts +47 -0
- package/dist/merge.js +196 -0
- package/dist/pr-creator.d.ts +40 -0
- package/dist/pr-creator.js +129 -0
- package/dist/repo-detector.d.ts +22 -0
- package/dist/repo-detector.js +98 -0
- package/dist/repository-processor.d.ts +47 -0
- package/dist/repository-processor.js +245 -0
- package/dist/retry-utils.d.ts +53 -0
- package/dist/retry-utils.js +143 -0
- package/dist/shell-utils.d.ts +8 -0
- package/dist/shell-utils.js +12 -0
- package/dist/strategies/azure-pr-strategy.d.ts +16 -0
- package/dist/strategies/azure-pr-strategy.js +221 -0
- package/dist/strategies/github-pr-strategy.d.ts +17 -0
- package/dist/strategies/github-pr-strategy.js +215 -0
- package/dist/strategies/index.d.ts +13 -0
- package/dist/strategies/index.js +22 -0
- package/dist/strategies/pr-strategy.d.ts +112 -0
- package/dist/strategies/pr-strategy.js +60 -0
- package/dist/workspace-utils.d.ts +5 -0
- package/dist/workspace-utils.js +10 -0
- package/package.json +58 -0
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
// Type guards
|
|
2
|
+
export function isGitHubRepo(info) {
|
|
3
|
+
return info.type === "github";
|
|
4
|
+
}
|
|
5
|
+
export function isAzureDevOpsRepo(info) {
|
|
6
|
+
return info.type === "azure-devops";
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Valid URL patterns for supported repository types.
|
|
10
|
+
*/
|
|
11
|
+
const GITHUB_URL_PATTERNS = [/^git@github\.com:/, /^https?:\/\/github\.com\//];
|
|
12
|
+
const AZURE_DEVOPS_URL_PATTERNS = [
|
|
13
|
+
/^git@ssh\.dev\.azure\.com:/,
|
|
14
|
+
/^https?:\/\/dev\.azure\.com\//,
|
|
15
|
+
];
|
|
16
|
+
export function detectRepoType(gitUrl) {
|
|
17
|
+
// Check for Azure DevOps formats
|
|
18
|
+
for (const pattern of AZURE_DEVOPS_URL_PATTERNS) {
|
|
19
|
+
if (pattern.test(gitUrl)) {
|
|
20
|
+
return "azure-devops";
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
// Check for GitHub formats
|
|
24
|
+
for (const pattern of GITHUB_URL_PATTERNS) {
|
|
25
|
+
if (pattern.test(gitUrl)) {
|
|
26
|
+
return "github";
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
// Throw for unrecognized URL formats
|
|
30
|
+
throw new Error(`Unrecognized git URL format: ${gitUrl}. Supported formats: GitHub (git@github.com: or https://github.com/) and Azure DevOps (git@ssh.dev.azure.com: or https://dev.azure.com/)`);
|
|
31
|
+
}
|
|
32
|
+
export function parseGitUrl(gitUrl) {
|
|
33
|
+
const type = detectRepoType(gitUrl);
|
|
34
|
+
if (type === "azure-devops") {
|
|
35
|
+
return parseAzureDevOpsUrl(gitUrl);
|
|
36
|
+
}
|
|
37
|
+
return parseGitHubUrl(gitUrl);
|
|
38
|
+
}
|
|
39
|
+
function parseGitHubUrl(gitUrl) {
|
|
40
|
+
// Handle SSH format: git@github.com:owner/repo.git
|
|
41
|
+
// Use (.+?) with end anchor to handle repo names with dots (e.g., my.repo.git)
|
|
42
|
+
const sshMatch = gitUrl.match(/git@github\.com:([^/]+)\/(.+?)(?:\.git)?$/);
|
|
43
|
+
if (sshMatch) {
|
|
44
|
+
return {
|
|
45
|
+
type: "github",
|
|
46
|
+
gitUrl,
|
|
47
|
+
owner: sshMatch[1],
|
|
48
|
+
repo: sshMatch[2],
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
// Handle HTTPS format: https://github.com/owner/repo.git
|
|
52
|
+
// Use (.+?) with end anchor to handle repo names with dots
|
|
53
|
+
const httpsMatch = gitUrl.match(/https?:\/\/github\.com\/([^/]+)\/(.+?)(?:\.git)?$/);
|
|
54
|
+
if (httpsMatch) {
|
|
55
|
+
return {
|
|
56
|
+
type: "github",
|
|
57
|
+
gitUrl,
|
|
58
|
+
owner: httpsMatch[1],
|
|
59
|
+
repo: httpsMatch[2],
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
throw new Error(`Unable to parse GitHub URL: ${gitUrl}`);
|
|
63
|
+
}
|
|
64
|
+
function parseAzureDevOpsUrl(gitUrl) {
|
|
65
|
+
// Handle SSH format: git@ssh.dev.azure.com:v3/organization/project/repo
|
|
66
|
+
// Use (.+?) with end anchor to handle repo names with dots
|
|
67
|
+
const sshMatch = gitUrl.match(/git@ssh\.dev\.azure\.com:v3\/([^/]+)\/([^/]+)\/(.+?)(?:\.git)?$/);
|
|
68
|
+
if (sshMatch) {
|
|
69
|
+
return {
|
|
70
|
+
type: "azure-devops",
|
|
71
|
+
gitUrl,
|
|
72
|
+
owner: sshMatch[1],
|
|
73
|
+
repo: sshMatch[3],
|
|
74
|
+
organization: sshMatch[1],
|
|
75
|
+
project: sshMatch[2],
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
// Handle HTTPS format: https://dev.azure.com/organization/project/_git/repo
|
|
79
|
+
// Use (.+?) with end anchor to handle repo names with dots
|
|
80
|
+
const httpsMatch = gitUrl.match(/https?:\/\/dev\.azure\.com\/([^/]+)\/([^/]+)\/_git\/(.+?)(?:\.git)?$/);
|
|
81
|
+
if (httpsMatch) {
|
|
82
|
+
return {
|
|
83
|
+
type: "azure-devops",
|
|
84
|
+
gitUrl,
|
|
85
|
+
owner: httpsMatch[1],
|
|
86
|
+
repo: httpsMatch[3],
|
|
87
|
+
organization: httpsMatch[1],
|
|
88
|
+
project: httpsMatch[2],
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
throw new Error(`Unable to parse Azure DevOps URL: ${gitUrl}`);
|
|
92
|
+
}
|
|
93
|
+
export function getRepoDisplayName(repoInfo) {
|
|
94
|
+
if (repoInfo.type === "azure-devops") {
|
|
95
|
+
return `${repoInfo.organization}/${repoInfo.project}/${repoInfo.repo}`;
|
|
96
|
+
}
|
|
97
|
+
return `${repoInfo.owner}/${repoInfo.repo}`;
|
|
98
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { RepoConfig } from "./config.js";
|
|
2
|
+
import { RepoInfo } from "./repo-detector.js";
|
|
3
|
+
import { GitOps, GitOpsOptions } from "./git-ops.js";
|
|
4
|
+
import { ILogger } from "./logger.js";
|
|
5
|
+
import { CommandExecutor } from "./command-executor.js";
|
|
6
|
+
export interface ProcessorOptions {
|
|
7
|
+
branchName: string;
|
|
8
|
+
workDir: string;
|
|
9
|
+
dryRun?: boolean;
|
|
10
|
+
/** Number of retries for network operations (default: 3) */
|
|
11
|
+
retries?: number;
|
|
12
|
+
/** Command executor for shell commands (for testing) */
|
|
13
|
+
executor?: CommandExecutor;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Factory function type for creating GitOps instances.
|
|
17
|
+
* Allows dependency injection for testing.
|
|
18
|
+
*/
|
|
19
|
+
export type GitOpsFactory = (options: GitOpsOptions) => GitOps;
|
|
20
|
+
export interface ProcessorResult {
|
|
21
|
+
success: boolean;
|
|
22
|
+
repoName: string;
|
|
23
|
+
message: string;
|
|
24
|
+
prUrl?: string;
|
|
25
|
+
skipped?: boolean;
|
|
26
|
+
mergeResult?: {
|
|
27
|
+
merged: boolean;
|
|
28
|
+
autoMergeEnabled?: boolean;
|
|
29
|
+
message: string;
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
export declare class RepositoryProcessor {
|
|
33
|
+
private gitOps;
|
|
34
|
+
private readonly gitOpsFactory;
|
|
35
|
+
private readonly log;
|
|
36
|
+
/**
|
|
37
|
+
* Creates a new RepositoryProcessor.
|
|
38
|
+
* @param gitOpsFactory - Optional factory for creating GitOps instances (for testing)
|
|
39
|
+
* @param log - Optional logger instance (for testing)
|
|
40
|
+
*/
|
|
41
|
+
constructor(gitOpsFactory?: GitOpsFactory, log?: ILogger);
|
|
42
|
+
process(repoConfig: RepoConfig, repoInfo: RepoInfo, options: ProcessorOptions): Promise<ProcessorResult>;
|
|
43
|
+
/**
|
|
44
|
+
* Format commit message based on files changed (excludes skipped files)
|
|
45
|
+
*/
|
|
46
|
+
private formatCommitMessage;
|
|
47
|
+
}
|
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
import { existsSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { convertContentToString, } from "./config.js";
|
|
4
|
+
import { getRepoDisplayName } from "./repo-detector.js";
|
|
5
|
+
import { GitOps } from "./git-ops.js";
|
|
6
|
+
import { createPR, mergePR } from "./pr-creator.js";
|
|
7
|
+
import { logger } from "./logger.js";
|
|
8
|
+
import { getPRStrategy } from "./strategies/index.js";
|
|
9
|
+
import { defaultExecutor } from "./command-executor.js";
|
|
10
|
+
/**
|
|
11
|
+
* Determines if a file should be marked as executable.
|
|
12
|
+
* .sh files are auto-executable unless explicit executable: false is set.
|
|
13
|
+
* Non-.sh files are executable only if executable: true is explicitly set.
|
|
14
|
+
*/
|
|
15
|
+
function shouldBeExecutable(file) {
|
|
16
|
+
const isShellScript = file.fileName.endsWith(".sh");
|
|
17
|
+
if (file.executable !== undefined) {
|
|
18
|
+
// Explicit setting takes precedence
|
|
19
|
+
return file.executable;
|
|
20
|
+
}
|
|
21
|
+
// Default: .sh files are executable, others are not
|
|
22
|
+
return isShellScript;
|
|
23
|
+
}
|
|
24
|
+
export class RepositoryProcessor {
|
|
25
|
+
gitOps = null;
|
|
26
|
+
gitOpsFactory;
|
|
27
|
+
log;
|
|
28
|
+
/**
|
|
29
|
+
* Creates a new RepositoryProcessor.
|
|
30
|
+
* @param gitOpsFactory - Optional factory for creating GitOps instances (for testing)
|
|
31
|
+
* @param log - Optional logger instance (for testing)
|
|
32
|
+
*/
|
|
33
|
+
constructor(gitOpsFactory, log) {
|
|
34
|
+
this.gitOpsFactory = gitOpsFactory ?? ((opts) => new GitOps(opts));
|
|
35
|
+
this.log = log ?? logger;
|
|
36
|
+
}
|
|
37
|
+
async process(repoConfig, repoInfo, options) {
|
|
38
|
+
const repoName = getRepoDisplayName(repoInfo);
|
|
39
|
+
const { branchName, workDir, dryRun, retries } = options;
|
|
40
|
+
const executor = options.executor ?? defaultExecutor;
|
|
41
|
+
this.gitOps = this.gitOpsFactory({ workDir, dryRun, retries });
|
|
42
|
+
try {
|
|
43
|
+
// Step 1: Clean workspace
|
|
44
|
+
this.log.info("Cleaning workspace...");
|
|
45
|
+
this.gitOps.cleanWorkspace();
|
|
46
|
+
// Step 2: Clone repo
|
|
47
|
+
this.log.info("Cloning repository...");
|
|
48
|
+
await this.gitOps.clone(repoInfo.gitUrl);
|
|
49
|
+
// Step 3: Get default branch for PR base
|
|
50
|
+
const { branch: baseBranch, method: detectionMethod } = await this.gitOps.getDefaultBranch();
|
|
51
|
+
this.log.info(`Default branch: ${baseBranch} (detected via ${detectionMethod})`);
|
|
52
|
+
// Step 3.5: Close existing PR if exists (fresh start approach)
|
|
53
|
+
// This ensures isolated sync attempts - each run starts from clean state
|
|
54
|
+
if (!dryRun) {
|
|
55
|
+
this.log.info("Checking for existing PR...");
|
|
56
|
+
const strategy = getPRStrategy(repoInfo, executor);
|
|
57
|
+
const closed = await strategy.closeExistingPR({
|
|
58
|
+
repoInfo,
|
|
59
|
+
branchName,
|
|
60
|
+
baseBranch,
|
|
61
|
+
workDir,
|
|
62
|
+
retries,
|
|
63
|
+
});
|
|
64
|
+
if (closed) {
|
|
65
|
+
this.log.info("Closed existing PR and deleted branch for fresh sync");
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
// Step 4: Create branch (always fresh from base branch)
|
|
69
|
+
this.log.info(`Creating branch: ${branchName}`);
|
|
70
|
+
await this.gitOps.createBranch(branchName);
|
|
71
|
+
// Step 5: Write all config files and track changes
|
|
72
|
+
const changedFiles = [];
|
|
73
|
+
for (const file of repoConfig.files) {
|
|
74
|
+
const filePath = join(workDir, file.fileName);
|
|
75
|
+
const fileExistsLocal = existsSync(filePath);
|
|
76
|
+
// Handle createOnly - check against BASE branch, not current working directory
|
|
77
|
+
// This ensures consistent behavior: createOnly means "only create if doesn't exist on main"
|
|
78
|
+
if (file.createOnly) {
|
|
79
|
+
const existsOnBase = await this.gitOps.fileExistsOnBranch(file.fileName, baseBranch);
|
|
80
|
+
if (existsOnBase) {
|
|
81
|
+
this.log.info(`Skipping ${file.fileName} (createOnly: exists on ${baseBranch})`);
|
|
82
|
+
changedFiles.push({ fileName: file.fileName, action: "skip" });
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
this.log.info(`Writing ${file.fileName}...`);
|
|
87
|
+
const fileContent = convertContentToString(file.content, file.fileName, {
|
|
88
|
+
header: file.header,
|
|
89
|
+
schemaUrl: file.schemaUrl,
|
|
90
|
+
});
|
|
91
|
+
// Determine action type (create vs update)
|
|
92
|
+
const action = fileExistsLocal
|
|
93
|
+
? "update"
|
|
94
|
+
: "create";
|
|
95
|
+
if (dryRun) {
|
|
96
|
+
// In dry-run, check if file would change without writing
|
|
97
|
+
if (this.gitOps.wouldChange(file.fileName, fileContent)) {
|
|
98
|
+
changedFiles.push({ fileName: file.fileName, action });
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
else {
|
|
102
|
+
// Write the file
|
|
103
|
+
this.gitOps.writeFile(file.fileName, fileContent);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
// Step 5b: Set executable permission for files that need it
|
|
107
|
+
const skippedFileNames = new Set(changedFiles.filter((f) => f.action === "skip").map((f) => f.fileName));
|
|
108
|
+
for (const file of repoConfig.files) {
|
|
109
|
+
// Skip files that were excluded (createOnly + exists)
|
|
110
|
+
if (skippedFileNames.has(file.fileName)) {
|
|
111
|
+
continue;
|
|
112
|
+
}
|
|
113
|
+
if (shouldBeExecutable(file)) {
|
|
114
|
+
this.log.info(`Setting executable: ${file.fileName}`);
|
|
115
|
+
await this.gitOps.setExecutable(file.fileName);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
// Step 6: Check for changes (exclude skipped files)
|
|
119
|
+
let hasChanges;
|
|
120
|
+
if (dryRun) {
|
|
121
|
+
hasChanges = changedFiles.filter((f) => f.action !== "skip").length > 0;
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
hasChanges = await this.gitOps.hasChanges();
|
|
125
|
+
// If there are changes, determine which files changed
|
|
126
|
+
if (hasChanges) {
|
|
127
|
+
// Rebuild the changed files list by checking git status
|
|
128
|
+
// Skip files that were already marked as skipped (createOnly)
|
|
129
|
+
const skippedFiles = new Set(changedFiles
|
|
130
|
+
.filter((f) => f.action === "skip")
|
|
131
|
+
.map((f) => f.fileName));
|
|
132
|
+
for (const file of repoConfig.files) {
|
|
133
|
+
if (skippedFiles.has(file.fileName)) {
|
|
134
|
+
continue; // Already tracked as skipped
|
|
135
|
+
}
|
|
136
|
+
const filePath = join(workDir, file.fileName);
|
|
137
|
+
const action = existsSync(filePath)
|
|
138
|
+
? "update"
|
|
139
|
+
: "create";
|
|
140
|
+
changedFiles.push({ fileName: file.fileName, action });
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
if (!hasChanges) {
|
|
145
|
+
return {
|
|
146
|
+
success: true,
|
|
147
|
+
repoName,
|
|
148
|
+
message: "No changes detected",
|
|
149
|
+
skipped: true,
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
// Step 7: Commit
|
|
153
|
+
this.log.info("Staging changes...");
|
|
154
|
+
const commitMessage = this.formatCommitMessage(changedFiles);
|
|
155
|
+
const committed = await this.gitOps.commit(commitMessage);
|
|
156
|
+
if (!committed) {
|
|
157
|
+
this.log.info("No staged changes after git add -A, skipping commit");
|
|
158
|
+
return {
|
|
159
|
+
success: true,
|
|
160
|
+
repoName,
|
|
161
|
+
message: "No changes detected after staging",
|
|
162
|
+
skipped: true,
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
this.log.info(`Committed: ${commitMessage}`);
|
|
166
|
+
// Step 8: Push
|
|
167
|
+
this.log.info("Pushing to remote...");
|
|
168
|
+
await this.gitOps.push(branchName);
|
|
169
|
+
// Step 9: Create PR
|
|
170
|
+
this.log.info("Creating pull request...");
|
|
171
|
+
const prResult = await createPR({
|
|
172
|
+
repoInfo,
|
|
173
|
+
branchName,
|
|
174
|
+
baseBranch,
|
|
175
|
+
files: changedFiles,
|
|
176
|
+
workDir,
|
|
177
|
+
dryRun,
|
|
178
|
+
retries,
|
|
179
|
+
});
|
|
180
|
+
// Step 10: Handle merge options if configured
|
|
181
|
+
const mergeMode = repoConfig.prOptions?.merge ?? "auto";
|
|
182
|
+
let mergeResult;
|
|
183
|
+
if (prResult.success && prResult.url && mergeMode !== "manual") {
|
|
184
|
+
this.log.info(`Handling merge (mode: ${mergeMode})...`);
|
|
185
|
+
const mergeConfig = {
|
|
186
|
+
mode: mergeMode,
|
|
187
|
+
strategy: repoConfig.prOptions?.mergeStrategy ?? "squash",
|
|
188
|
+
deleteBranch: repoConfig.prOptions?.deleteBranch ?? true,
|
|
189
|
+
bypassReason: repoConfig.prOptions?.bypassReason,
|
|
190
|
+
};
|
|
191
|
+
const result = await mergePR({
|
|
192
|
+
repoInfo,
|
|
193
|
+
prUrl: prResult.url,
|
|
194
|
+
mergeConfig,
|
|
195
|
+
workDir,
|
|
196
|
+
dryRun,
|
|
197
|
+
retries,
|
|
198
|
+
});
|
|
199
|
+
mergeResult = {
|
|
200
|
+
merged: result.merged ?? false,
|
|
201
|
+
autoMergeEnabled: result.autoMergeEnabled,
|
|
202
|
+
message: result.message,
|
|
203
|
+
};
|
|
204
|
+
if (!result.success) {
|
|
205
|
+
this.log.info(`Warning: Merge operation failed - ${result.message}`);
|
|
206
|
+
}
|
|
207
|
+
else {
|
|
208
|
+
this.log.info(result.message);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
return {
|
|
212
|
+
success: prResult.success,
|
|
213
|
+
repoName,
|
|
214
|
+
message: prResult.message,
|
|
215
|
+
prUrl: prResult.url,
|
|
216
|
+
mergeResult,
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
finally {
|
|
220
|
+
// Always cleanup workspace on completion or failure
|
|
221
|
+
if (this.gitOps) {
|
|
222
|
+
try {
|
|
223
|
+
this.gitOps.cleanWorkspace();
|
|
224
|
+
}
|
|
225
|
+
catch {
|
|
226
|
+
// Ignore cleanup errors - best effort
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
/**
|
|
232
|
+
* Format commit message based on files changed (excludes skipped files)
|
|
233
|
+
*/
|
|
234
|
+
formatCommitMessage(files) {
|
|
235
|
+
const changedFiles = files.filter((f) => f.action !== "skip");
|
|
236
|
+
if (changedFiles.length === 1) {
|
|
237
|
+
return `chore: sync ${changedFiles[0].fileName}`;
|
|
238
|
+
}
|
|
239
|
+
if (changedFiles.length <= 3) {
|
|
240
|
+
const fileNames = changedFiles.map((f) => f.fileName).join(", ");
|
|
241
|
+
return `chore: sync ${fileNames}`;
|
|
242
|
+
}
|
|
243
|
+
return `chore: sync ${changedFiles.length} config files`;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Default patterns indicating permanent errors that should NOT be retried.
|
|
3
|
+
* These typically indicate configuration issues, auth failures, or invalid resources.
|
|
4
|
+
* Export allows customization for different environments.
|
|
5
|
+
*/
|
|
6
|
+
export declare const DEFAULT_PERMANENT_ERROR_PATTERNS: RegExp[];
|
|
7
|
+
/**
|
|
8
|
+
* Default patterns indicating transient errors that SHOULD be retried.
|
|
9
|
+
* These typically indicate temporary network or service issues.
|
|
10
|
+
* Export allows customization for different environments.
|
|
11
|
+
*/
|
|
12
|
+
export declare const DEFAULT_TRANSIENT_ERROR_PATTERNS: RegExp[];
|
|
13
|
+
export interface RetryOptions {
|
|
14
|
+
/** Maximum number of retries (default: 3) */
|
|
15
|
+
retries?: number;
|
|
16
|
+
/** Callback when a retry attempt fails */
|
|
17
|
+
onRetry?: (error: Error, attempt: number) => void;
|
|
18
|
+
/** Custom permanent error patterns (defaults to DEFAULT_PERMANENT_ERROR_PATTERNS) */
|
|
19
|
+
permanentErrorPatterns?: RegExp[];
|
|
20
|
+
/** Custom transient error patterns (defaults to DEFAULT_TRANSIENT_ERROR_PATTERNS) */
|
|
21
|
+
transientErrorPatterns?: RegExp[];
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Classifies an error as permanent (should not retry) or transient (should retry).
|
|
25
|
+
* @param error The error to classify
|
|
26
|
+
* @param patterns Custom patterns to use (defaults to DEFAULT_PERMANENT_ERROR_PATTERNS)
|
|
27
|
+
* @returns true if the error is permanent, false if it might be transient
|
|
28
|
+
*/
|
|
29
|
+
export declare function isPermanentError(error: Error, patterns?: RegExp[]): boolean;
|
|
30
|
+
/**
|
|
31
|
+
* Checks if an error matches known transient patterns.
|
|
32
|
+
* @param error The error to check
|
|
33
|
+
* @param patterns Custom patterns to use (defaults to DEFAULT_TRANSIENT_ERROR_PATTERNS)
|
|
34
|
+
* @returns true if the error appears to be transient
|
|
35
|
+
*/
|
|
36
|
+
export declare function isTransientError(error: Error, patterns?: RegExp[]): boolean;
|
|
37
|
+
/**
|
|
38
|
+
* Wraps an async operation with retry logic using exponential backoff.
|
|
39
|
+
* Automatically classifies errors and aborts retries for permanent failures.
|
|
40
|
+
*
|
|
41
|
+
* @param fn The async function to run with retry
|
|
42
|
+
* @param options Retry configuration options
|
|
43
|
+
* @returns The result of the function if successful
|
|
44
|
+
* @throws AbortError for permanent failures, or the last error after all retries exhausted
|
|
45
|
+
*/
|
|
46
|
+
export declare function withRetry<T>(fn: () => Promise<T>, options?: RetryOptions): Promise<T>;
|
|
47
|
+
/**
|
|
48
|
+
* Wraps a synchronous operation in a Promise for use with retry logic.
|
|
49
|
+
* @param fn The sync function to run
|
|
50
|
+
* @returns A Promise that resolves/rejects with the sync result
|
|
51
|
+
*/
|
|
52
|
+
export declare function promisify<T>(fn: () => T): Promise<T>;
|
|
53
|
+
export { AbortError } from "p-retry";
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import pRetry, { AbortError } from "p-retry";
|
|
2
|
+
import { logger } from "./logger.js";
|
|
3
|
+
/**
|
|
4
|
+
* Default patterns indicating permanent errors that should NOT be retried.
|
|
5
|
+
* These typically indicate configuration issues, auth failures, or invalid resources.
|
|
6
|
+
* Export allows customization for different environments.
|
|
7
|
+
*/
|
|
8
|
+
export const DEFAULT_PERMANENT_ERROR_PATTERNS = [
|
|
9
|
+
/permission\s*denied/i,
|
|
10
|
+
/authentication\s*failed/i,
|
|
11
|
+
/bad\s*credentials/i,
|
|
12
|
+
/invalid\s*(token|credentials)/i,
|
|
13
|
+
/unauthorized/i,
|
|
14
|
+
/401\b/,
|
|
15
|
+
/403\b/,
|
|
16
|
+
/404\b/,
|
|
17
|
+
/not\s*found/i,
|
|
18
|
+
/does\s*not\s*exist/i,
|
|
19
|
+
/repository\s*not\s*found/i,
|
|
20
|
+
/no\s*such\s*(file|directory|remote|ref)/i,
|
|
21
|
+
/couldn't\s*find\s*remote\s*ref/i,
|
|
22
|
+
/invalid\s*remote/i,
|
|
23
|
+
/not\s*a\s*git\s*repository/i,
|
|
24
|
+
/non-fast-forward/i,
|
|
25
|
+
/remote\s*rejected/i,
|
|
26
|
+
];
|
|
27
|
+
/**
|
|
28
|
+
* Default patterns indicating transient errors that SHOULD be retried.
|
|
29
|
+
* These typically indicate temporary network or service issues.
|
|
30
|
+
* Export allows customization for different environments.
|
|
31
|
+
*/
|
|
32
|
+
export const DEFAULT_TRANSIENT_ERROR_PATTERNS = [
|
|
33
|
+
/timed?\s*out/i,
|
|
34
|
+
/ETIMEDOUT/,
|
|
35
|
+
/ECONNRESET/,
|
|
36
|
+
/ECONNREFUSED/,
|
|
37
|
+
/ENOTFOUND/,
|
|
38
|
+
/connection\s*(reset|refused|closed)/i,
|
|
39
|
+
/network\s*(error|unreachable)/i,
|
|
40
|
+
/rate\s*limit/i,
|
|
41
|
+
/too\s*many\s*requests/i,
|
|
42
|
+
/429\b/,
|
|
43
|
+
/500\b/,
|
|
44
|
+
/502\b/,
|
|
45
|
+
/503\b/,
|
|
46
|
+
/504\b/,
|
|
47
|
+
/service\s*unavailable/i,
|
|
48
|
+
/temporarily\s*unavailable/i,
|
|
49
|
+
/internal\s*server\s*error/i,
|
|
50
|
+
/temporary\s*(failure|error)/i,
|
|
51
|
+
/try\s*again/i,
|
|
52
|
+
/ssh_exchange_identification/i,
|
|
53
|
+
/could\s*not\s*resolve\s*host/i,
|
|
54
|
+
/unable\s*to\s*access/i,
|
|
55
|
+
];
|
|
56
|
+
/**
|
|
57
|
+
* Classifies an error as permanent (should not retry) or transient (should retry).
|
|
58
|
+
* @param error The error to classify
|
|
59
|
+
* @param patterns Custom patterns to use (defaults to DEFAULT_PERMANENT_ERROR_PATTERNS)
|
|
60
|
+
* @returns true if the error is permanent, false if it might be transient
|
|
61
|
+
*/
|
|
62
|
+
export function isPermanentError(error, patterns = DEFAULT_PERMANENT_ERROR_PATTERNS) {
|
|
63
|
+
const message = error.message;
|
|
64
|
+
const stderr = error.stderr?.toString() ?? "";
|
|
65
|
+
const combined = `${message} ${stderr}`;
|
|
66
|
+
// Check permanent patterns first - these always stop retries
|
|
67
|
+
for (const pattern of patterns) {
|
|
68
|
+
if (pattern.test(combined)) {
|
|
69
|
+
return true;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return false;
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Checks if an error matches known transient patterns.
|
|
76
|
+
* @param error The error to check
|
|
77
|
+
* @param patterns Custom patterns to use (defaults to DEFAULT_TRANSIENT_ERROR_PATTERNS)
|
|
78
|
+
* @returns true if the error appears to be transient
|
|
79
|
+
*/
|
|
80
|
+
export function isTransientError(error, patterns = DEFAULT_TRANSIENT_ERROR_PATTERNS) {
|
|
81
|
+
const message = error.message;
|
|
82
|
+
const stderr = error.stderr?.toString() ?? "";
|
|
83
|
+
const combined = `${message} ${stderr}`;
|
|
84
|
+
for (const pattern of patterns) {
|
|
85
|
+
if (pattern.test(combined)) {
|
|
86
|
+
return true;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return false;
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Wraps an async operation with retry logic using exponential backoff.
|
|
93
|
+
* Automatically classifies errors and aborts retries for permanent failures.
|
|
94
|
+
*
|
|
95
|
+
* @param fn The async function to run with retry
|
|
96
|
+
* @param options Retry configuration options
|
|
97
|
+
* @returns The result of the function if successful
|
|
98
|
+
* @throws AbortError for permanent failures, or the last error after all retries exhausted
|
|
99
|
+
*/
|
|
100
|
+
export async function withRetry(fn, options) {
|
|
101
|
+
const retries = options?.retries ?? 3;
|
|
102
|
+
const permanentPatterns = options?.permanentErrorPatterns;
|
|
103
|
+
return pRetry(async () => {
|
|
104
|
+
try {
|
|
105
|
+
return await fn();
|
|
106
|
+
}
|
|
107
|
+
catch (error) {
|
|
108
|
+
if (error instanceof Error &&
|
|
109
|
+
isPermanentError(error, permanentPatterns)) {
|
|
110
|
+
// Wrap in AbortError to stop retrying immediately
|
|
111
|
+
throw new AbortError(error.message);
|
|
112
|
+
}
|
|
113
|
+
throw error;
|
|
114
|
+
}
|
|
115
|
+
}, {
|
|
116
|
+
retries,
|
|
117
|
+
onFailedAttempt: (context) => {
|
|
118
|
+
// Only log if this isn't the last attempt
|
|
119
|
+
if (context.retriesLeft > 0) {
|
|
120
|
+
const msg = context.error.message || "Unknown error";
|
|
121
|
+
logger.info(`Attempt ${context.attemptNumber}/${retries + 1} failed: ${msg}. Retrying...`);
|
|
122
|
+
options?.onRetry?.(context.error, context.attemptNumber);
|
|
123
|
+
}
|
|
124
|
+
},
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Wraps a synchronous operation in a Promise for use with retry logic.
|
|
129
|
+
* @param fn The sync function to run
|
|
130
|
+
* @returns A Promise that resolves/rejects with the sync result
|
|
131
|
+
*/
|
|
132
|
+
export function promisify(fn) {
|
|
133
|
+
return new Promise((resolve, reject) => {
|
|
134
|
+
try {
|
|
135
|
+
resolve(fn());
|
|
136
|
+
}
|
|
137
|
+
catch (error) {
|
|
138
|
+
reject(error);
|
|
139
|
+
}
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
// Re-export AbortError for use in custom error handling
|
|
143
|
+
export { AbortError } from "p-retry";
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Escapes a string for safe use as a shell argument.
|
|
3
|
+
* Uses single quotes and escapes any single quotes within the string.
|
|
4
|
+
*
|
|
5
|
+
* @param arg - The string to escape
|
|
6
|
+
* @returns The escaped string wrapped in single quotes
|
|
7
|
+
*/
|
|
8
|
+
export declare function escapeShellArg(arg: string): string;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Escapes a string for safe use as a shell argument.
|
|
3
|
+
* Uses single quotes and escapes any single quotes within the string.
|
|
4
|
+
*
|
|
5
|
+
* @param arg - The string to escape
|
|
6
|
+
* @returns The escaped string wrapped in single quotes
|
|
7
|
+
*/
|
|
8
|
+
export function escapeShellArg(arg) {
|
|
9
|
+
// Use single quotes and escape any single quotes within
|
|
10
|
+
// 'string' -> quote ends, escaped quote, quote starts again
|
|
11
|
+
return `'${arg.replace(/'/g, "'\\''")}'`;
|
|
12
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { PRResult } from "../pr-creator.js";
|
|
2
|
+
import { BasePRStrategy, PRStrategyOptions, CloseExistingPROptions, MergeOptions, MergeResult } from "./pr-strategy.js";
|
|
3
|
+
import { CommandExecutor } from "../command-executor.js";
|
|
4
|
+
export declare class AzurePRStrategy extends BasePRStrategy {
|
|
5
|
+
constructor(executor?: CommandExecutor);
|
|
6
|
+
private getOrgUrl;
|
|
7
|
+
private buildPRUrl;
|
|
8
|
+
checkExistingPR(options: PRStrategyOptions): Promise<string | null>;
|
|
9
|
+
closeExistingPR(options: CloseExistingPROptions): Promise<boolean>;
|
|
10
|
+
create(options: PRStrategyOptions): Promise<PRResult>;
|
|
11
|
+
/**
|
|
12
|
+
* Extract PR ID and repo info from Azure DevOps PR URL.
|
|
13
|
+
*/
|
|
14
|
+
private parsePRUrl;
|
|
15
|
+
merge(options: MergeOptions): Promise<MergeResult>;
|
|
16
|
+
}
|