@saeed42/worktree-worker 1.3.1 → 1.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +158 -83
- package/package.json +1 -1
package/dist/main.js
CHANGED
|
@@ -613,57 +613,107 @@ var RepoService = class {
|
|
|
613
613
|
if (isSameRepo) {
|
|
614
614
|
log.info("Repository already initialized with same URL, fetching latest");
|
|
615
615
|
await gitService.fetch("origin", repoRoot, auth);
|
|
616
|
-
const
|
|
616
|
+
const targetBranch2 = options.branch || env.DEFAULT_BRANCH;
|
|
617
617
|
const currentBranch = status.branch || "";
|
|
618
|
-
if (
|
|
619
|
-
log.info("Switching to requested branch", { from: currentBranch, to:
|
|
620
|
-
const checkoutResult = await gitService.exec(["checkout",
|
|
618
|
+
if (targetBranch2 !== currentBranch) {
|
|
619
|
+
log.info("Switching to requested branch", { from: currentBranch, to: targetBranch2 });
|
|
620
|
+
const checkoutResult = await gitService.exec(["checkout", targetBranch2], repoRoot);
|
|
621
621
|
if (checkoutResult.code !== 0) {
|
|
622
622
|
await gitService.exec(
|
|
623
|
-
["checkout", "-b",
|
|
623
|
+
["checkout", "-b", targetBranch2, `origin/${targetBranch2}`],
|
|
624
624
|
repoRoot
|
|
625
625
|
);
|
|
626
626
|
}
|
|
627
627
|
}
|
|
628
|
-
const
|
|
629
|
-
const
|
|
630
|
-
return { path: repoRoot, branch:
|
|
628
|
+
const headSha3 = await gitService.getHeadSha(repoRoot);
|
|
629
|
+
const branch3 = await gitService.getCurrentBranch(repoRoot);
|
|
630
|
+
return { path: repoRoot, branch: branch3, headSha: headSha3, remote: status.remote };
|
|
631
631
|
}
|
|
632
632
|
if (!options.force) {
|
|
633
633
|
throw new Error(
|
|
634
634
|
`Repository already initialized with different URL. Current: ${currentUrl}, Requested: ${requestedUrl}. Use force=true to re-initialize (this will delete all worktrees).`
|
|
635
635
|
);
|
|
636
636
|
}
|
|
637
|
-
log.warn("Force re-init: cleaning
|
|
637
|
+
log.warn("Force re-init: cleaning worktrees and updating remote in-place");
|
|
638
638
|
await this.cleanAllWorktrees();
|
|
639
|
-
|
|
639
|
+
const cleanUrl = options.repoUrl.replace(/^https:\/\/[^@]+@/, "https://");
|
|
640
|
+
await gitService.exec(["remote", "set-url", "origin", cleanUrl], repoRoot);
|
|
641
|
+
await gitService.exec(["config", "--local", "user.name", "origin-agent[bot]"], repoRoot);
|
|
642
|
+
await gitService.exec([
|
|
643
|
+
"config",
|
|
644
|
+
"--local",
|
|
645
|
+
"user.email",
|
|
646
|
+
"origin-agent[bot]@users.noreply.github.com"
|
|
647
|
+
], repoRoot);
|
|
648
|
+
await gitService.exec(["config", "--local", "safe.directory", repoRoot], repoRoot);
|
|
649
|
+
const targetBranch = options.branch || env.DEFAULT_BRANCH;
|
|
650
|
+
log.info("Fetching from new remote", { branch: targetBranch });
|
|
651
|
+
await gitService.fetch("origin", repoRoot, auth);
|
|
652
|
+
try {
|
|
653
|
+
await gitService.exec(["checkout", "-B", targetBranch, `origin/${targetBranch}`, "--force"], repoRoot);
|
|
654
|
+
} catch {
|
|
655
|
+
await gitService.exec(["checkout", "-B", targetBranch, "--force"], repoRoot);
|
|
656
|
+
}
|
|
657
|
+
await gitService.exec(["branch", `--set-upstream-to=origin/${targetBranch}`, targetBranch], repoRoot).catch(
|
|
658
|
+
() => {
|
|
659
|
+
}
|
|
660
|
+
);
|
|
661
|
+
const headSha2 = await gitService.getHeadSha(repoRoot);
|
|
662
|
+
const branch2 = await gitService.getCurrentBranch(repoRoot);
|
|
663
|
+
const remote2 = await gitService.getRemoteUrl("origin", repoRoot);
|
|
664
|
+
log.info("Repository re-initialized in-place", { branch: branch2, headSha: headSha2 });
|
|
665
|
+
return { path: repoRoot, branch: branch2, headSha: headSha2, remote: remote2 };
|
|
640
666
|
}
|
|
641
667
|
const parentDir = repoRoot.split("/").slice(0, -1).join("/");
|
|
642
668
|
await mkdir2(parentDir, { recursive: true });
|
|
643
669
|
await mkdir2(env.TRIALS_WORKSPACE_DIR, { recursive: true });
|
|
670
|
+
const dirExists = await stat2(repoRoot).then(() => true).catch(() => false);
|
|
644
671
|
const branch = options.branch || env.DEFAULT_BRANCH;
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
"
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
672
|
+
if (dirExists) {
|
|
673
|
+
log.info("Directory exists, initializing git in-place", { branch });
|
|
674
|
+
await gitService.exec(["init"], repoRoot);
|
|
675
|
+
await gitService.exec(["config", "--local", "user.name", "origin-agent[bot]"], repoRoot);
|
|
676
|
+
await gitService.exec([
|
|
677
|
+
"config",
|
|
678
|
+
"--local",
|
|
679
|
+
"user.email",
|
|
680
|
+
"origin-agent[bot]@users.noreply.github.com"
|
|
681
|
+
], repoRoot);
|
|
682
|
+
await gitService.exec(["config", "--local", "safe.directory", repoRoot], repoRoot);
|
|
683
|
+
const cleanUrl = options.repoUrl.replace(/^https:\/\/[^@]+@/, "https://");
|
|
684
|
+
await gitService.exec(["remote", "add", "origin", cleanUrl], repoRoot).catch(async () => {
|
|
685
|
+
await gitService.exec(["remote", "set-url", "origin", cleanUrl], repoRoot);
|
|
686
|
+
});
|
|
687
|
+
await gitService.fetch("origin", repoRoot, auth);
|
|
688
|
+
await gitService.exec(["checkout", "-B", branch, `origin/${branch}`, "--force"], repoRoot);
|
|
689
|
+
await gitService.exec(["branch", `--set-upstream-to=origin/${branch}`, branch], repoRoot).catch(
|
|
690
|
+
() => {
|
|
691
|
+
}
|
|
692
|
+
);
|
|
693
|
+
} else {
|
|
694
|
+
log.info("Cloning repository", { branch });
|
|
695
|
+
await gitService.cloneRepo(options.repoUrl, repoRoot, {
|
|
696
|
+
branch,
|
|
697
|
+
blobless: true,
|
|
698
|
+
githubToken: options.githubToken
|
|
699
|
+
});
|
|
700
|
+
await gitService.exec(["config", "--local", "user.name", "origin-agent[bot]"], repoRoot);
|
|
701
|
+
await gitService.exec([
|
|
702
|
+
"config",
|
|
703
|
+
"--local",
|
|
704
|
+
"user.email",
|
|
705
|
+
"origin-agent[bot]@users.noreply.github.com"
|
|
706
|
+
], repoRoot);
|
|
707
|
+
await gitService.exec(["config", "--local", "safe.directory", repoRoot], repoRoot);
|
|
708
|
+
const cleanUrl = options.repoUrl.replace(/^https:\/\/[^@]+@/, "https://");
|
|
709
|
+
await gitService.exec(["remote", "set-url", "origin", cleanUrl], repoRoot);
|
|
710
|
+
log.info("Fetching all remote refs");
|
|
711
|
+
await gitService.fetch("origin", repoRoot, auth);
|
|
712
|
+
await gitService.exec(["branch", `--set-upstream-to=origin/${branch}`, branch], repoRoot).catch(
|
|
713
|
+
() => {
|
|
714
|
+
}
|
|
715
|
+
);
|
|
716
|
+
}
|
|
667
717
|
const isEmpty = await gitService.isEmptyRepo(repoRoot);
|
|
668
718
|
if (isEmpty) {
|
|
669
719
|
log.warn("Repository is empty (no commits)", { repoUrl: options.repoUrl });
|
|
@@ -873,6 +923,7 @@ var WorktreeService = class {
|
|
|
873
923
|
hasRepoUrl: !!options.repoUrl
|
|
874
924
|
});
|
|
875
925
|
await mkdir3(env.TRIALS_WORKSPACE_DIR, { recursive: true });
|
|
926
|
+
await this.opportunisticCleanup();
|
|
876
927
|
let repoStatus = await repoService.getStatus();
|
|
877
928
|
const isValidGitHubRemote = (remote) => {
|
|
878
929
|
if (!remote) return false;
|
|
@@ -1118,59 +1169,98 @@ var WorktreeService = class {
|
|
|
1118
1169
|
return { branch, pushed: true };
|
|
1119
1170
|
}
|
|
1120
1171
|
/**
|
|
1121
|
-
*
|
|
1122
|
-
*
|
|
1123
|
-
* Strategy:
|
|
1124
|
-
* 1. Always clean worktrees older than CLEANUP_AFTER_HOURS (default 24h)
|
|
1125
|
-
* 2. If disk usage is high (>80%), also clean worktrees older than 6h
|
|
1126
|
-
* 3. If disk usage is critical (>90%), clean all worktrees older than 1h
|
|
1172
|
+
* Count current worktrees
|
|
1127
1173
|
*/
|
|
1128
|
-
async
|
|
1129
|
-
const log = logger.child({ service: "worktree", action: "cleanup" });
|
|
1130
|
-
let cleaned = 0;
|
|
1131
|
-
const errors = [];
|
|
1132
|
-
let diskUsagePercent;
|
|
1133
|
-
let cutoffHours = env.CLEANUP_AFTER_HOURS;
|
|
1174
|
+
async countWorktrees() {
|
|
1134
1175
|
try {
|
|
1135
|
-
const
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
log.warn("Critical disk usage, aggressive cleanup", { diskUsagePercent, cutoffHours });
|
|
1140
|
-
} else if (diskUsagePercent > 80) {
|
|
1141
|
-
cutoffHours = 6;
|
|
1142
|
-
log.info("High disk usage, moderate cleanup", { diskUsagePercent, cutoffHours });
|
|
1143
|
-
}
|
|
1144
|
-
} catch (err) {
|
|
1145
|
-
log.warn("Could not check disk usage", { error: err instanceof Error ? err.message : String(err) });
|
|
1176
|
+
const entries = await readdir2(env.TRIALS_WORKSPACE_DIR, { withFileTypes: true });
|
|
1177
|
+
return entries.filter((e) => e.isDirectory()).length;
|
|
1178
|
+
} catch {
|
|
1179
|
+
return 0;
|
|
1146
1180
|
}
|
|
1147
|
-
|
|
1181
|
+
}
|
|
1182
|
+
/**
|
|
1183
|
+
* Get worktrees sorted by modification time (oldest first)
|
|
1184
|
+
*/
|
|
1185
|
+
async getWorktreesByAge() {
|
|
1148
1186
|
try {
|
|
1149
1187
|
const entries = await readdir2(env.TRIALS_WORKSPACE_DIR, { withFileTypes: true });
|
|
1150
|
-
const
|
|
1188
|
+
const worktrees = [];
|
|
1151
1189
|
for (const entry of entries) {
|
|
1152
1190
|
if (!entry.isDirectory()) continue;
|
|
1153
1191
|
const worktreePath = `${env.TRIALS_WORKSPACE_DIR}/${entry.name}`;
|
|
1154
1192
|
try {
|
|
1155
1193
|
const stats = await stat3(worktreePath);
|
|
1156
|
-
|
|
1194
|
+
worktrees.push({ path: worktreePath, mtime: stats.mtimeMs });
|
|
1157
1195
|
} catch {
|
|
1158
1196
|
}
|
|
1159
1197
|
}
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1198
|
+
return worktrees.sort((a, b) => a.mtime - b.mtime);
|
|
1199
|
+
} catch {
|
|
1200
|
+
return [];
|
|
1201
|
+
}
|
|
1202
|
+
}
|
|
1203
|
+
/**
|
|
1204
|
+
* Opportunistic cleanup - run before creating new worktrees
|
|
1205
|
+
*
|
|
1206
|
+
* Strategy:
|
|
1207
|
+
* 1. Always clean worktrees older than CLEANUP_AFTER_HOURS
|
|
1208
|
+
* 2. If count exceeds MAX_WORKTREES, clean oldest until under limit
|
|
1209
|
+
*/
|
|
1210
|
+
async opportunisticCleanup(maxWorktrees = 20) {
|
|
1211
|
+
const log = logger.child({ service: "worktree", action: "opportunistic-cleanup" });
|
|
1212
|
+
try {
|
|
1213
|
+
const worktrees = await this.getWorktreesByAge();
|
|
1214
|
+
const count = worktrees.length;
|
|
1215
|
+
const cutoffTime = Date.now() - env.CLEANUP_AFTER_HOURS * 60 * 60 * 1e3;
|
|
1216
|
+
let cleaned = 0;
|
|
1217
|
+
for (const wt of worktrees) {
|
|
1218
|
+
const isStale = wt.mtime < cutoffTime;
|
|
1219
|
+
const isOverLimit = count - cleaned > maxWorktrees;
|
|
1220
|
+
if (isStale || isOverLimit) {
|
|
1163
1221
|
try {
|
|
1164
|
-
|
|
1222
|
+
await rm2(wt.path, { recursive: true, force: true });
|
|
1223
|
+
cleaned++;
|
|
1224
|
+
log.debug("Cleaned worktree", {
|
|
1165
1225
|
path: wt.path,
|
|
1226
|
+
reason: isStale ? "stale" : "over_limit",
|
|
1166
1227
|
ageHours: Math.round((Date.now() - wt.mtime) / (60 * 60 * 1e3))
|
|
1167
1228
|
});
|
|
1168
|
-
|
|
1229
|
+
} catch {
|
|
1230
|
+
}
|
|
1231
|
+
}
|
|
1232
|
+
}
|
|
1233
|
+
if (cleaned > 0) {
|
|
1234
|
+
log.info("Opportunistic cleanup completed", { cleaned, remaining: count - cleaned });
|
|
1235
|
+
await gitService.pruneWorktrees(env.BASE_WORKSPACE_DIR).catch(() => {
|
|
1236
|
+
});
|
|
1237
|
+
}
|
|
1238
|
+
} catch {
|
|
1239
|
+
}
|
|
1240
|
+
}
|
|
1241
|
+
/**
|
|
1242
|
+
* Cleanup stale worktrees
|
|
1243
|
+
*/
|
|
1244
|
+
async cleanupStaleWorktrees() {
|
|
1245
|
+
const log = logger.child({ service: "worktree", action: "cleanup" });
|
|
1246
|
+
let cleaned = 0;
|
|
1247
|
+
const errors = [];
|
|
1248
|
+
const cutoffTime = Date.now() - env.CLEANUP_AFTER_HOURS * 60 * 60 * 1e3;
|
|
1249
|
+
try {
|
|
1250
|
+
const entries = await readdir2(env.TRIALS_WORKSPACE_DIR, { withFileTypes: true });
|
|
1251
|
+
for (const entry of entries) {
|
|
1252
|
+
if (!entry.isDirectory()) continue;
|
|
1253
|
+
const worktreePath = `${env.TRIALS_WORKSPACE_DIR}/${entry.name}`;
|
|
1254
|
+
try {
|
|
1255
|
+
const stats = await stat3(worktreePath);
|
|
1256
|
+
if (stats.mtimeMs < cutoffTime) {
|
|
1257
|
+
log.info("Cleaning up stale worktree", { path: worktreePath });
|
|
1258
|
+
await rm2(worktreePath, { recursive: true, force: true });
|
|
1169
1259
|
cleaned++;
|
|
1170
|
-
} catch (err) {
|
|
1171
|
-
const errMsg = err instanceof Error ? err.message : String(err);
|
|
1172
|
-
errors.push(`${wt.path}: ${errMsg}`);
|
|
1173
1260
|
}
|
|
1261
|
+
} catch (err) {
|
|
1262
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
1263
|
+
errors.push(`${worktreePath}: ${errMsg}`);
|
|
1174
1264
|
}
|
|
1175
1265
|
}
|
|
1176
1266
|
await gitService.pruneWorktrees(env.BASE_WORKSPACE_DIR);
|
|
@@ -1178,23 +1268,8 @@ var WorktreeService = class {
|
|
|
1178
1268
|
const errMsg = err instanceof Error ? err.message : String(err);
|
|
1179
1269
|
errors.push(`readdir: ${errMsg}`);
|
|
1180
1270
|
}
|
|
1181
|
-
log.info("Cleanup completed", { cleaned, errorCount: errors.length
|
|
1182
|
-
return { cleaned, errors
|
|
1183
|
-
}
|
|
1184
|
-
/**
|
|
1185
|
-
* Get disk usage information for the workspace partition
|
|
1186
|
-
*/
|
|
1187
|
-
async getDiskUsage() {
|
|
1188
|
-
const { exec } = await import("child_process");
|
|
1189
|
-
const { promisify } = await import("util");
|
|
1190
|
-
const execAsync = promisify(exec);
|
|
1191
|
-
const { stdout } = await execAsync(`df -B1 ${env.BASE_WORKSPACE_DIR} | tail -1`);
|
|
1192
|
-
const parts = stdout.trim().split(/\s+/);
|
|
1193
|
-
const total = parseInt(parts[1], 10);
|
|
1194
|
-
const used = parseInt(parts[2], 10);
|
|
1195
|
-
const free = parseInt(parts[3], 10);
|
|
1196
|
-
const usedPercent = parseInt(parts[4].replace("%", ""), 10);
|
|
1197
|
-
return { total, used, free, usedPercent };
|
|
1271
|
+
log.info("Cleanup completed", { cleaned, errorCount: errors.length });
|
|
1272
|
+
return { cleaned, errors };
|
|
1198
1273
|
}
|
|
1199
1274
|
};
|
|
1200
1275
|
var worktreeService = new WorktreeService();
|