claude-code-runner 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +559 -0
- package/README.zh-Hans.md +559 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +377 -0
- package/dist/cli.js.map +1 -0
- package/dist/config.d.ts +4 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +50 -0
- package/dist/config.js.map +1 -0
- package/dist/container.d.ts +23 -0
- package/dist/container.d.ts.map +1 -0
- package/dist/container.js +971 -0
- package/dist/container.js.map +1 -0
- package/dist/credentials.d.ts +8 -0
- package/dist/credentials.d.ts.map +1 -0
- package/dist/credentials.js +145 -0
- package/dist/credentials.js.map +1 -0
- package/dist/docker-config.d.ts +19 -0
- package/dist/docker-config.d.ts.map +1 -0
- package/dist/docker-config.js +101 -0
- package/dist/docker-config.js.map +1 -0
- package/dist/git/shadow-repository.d.ts +30 -0
- package/dist/git/shadow-repository.d.ts.map +1 -0
- package/dist/git/shadow-repository.js +645 -0
- package/dist/git/shadow-repository.js.map +1 -0
- package/dist/git-monitor.d.ts +15 -0
- package/dist/git-monitor.d.ts.map +1 -0
- package/dist/git-monitor.js +94 -0
- package/dist/git-monitor.js.map +1 -0
- package/dist/index.d.ts +22 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +221 -0
- package/dist/index.js.map +1 -0
- package/dist/types.d.ts +49 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +3 -0
- package/dist/types.js.map +1 -0
- package/dist/ui.d.ts +12 -0
- package/dist/ui.d.ts.map +1 -0
- package/dist/ui.js +82 -0
- package/dist/ui.js.map +1 -0
- package/dist/web-server-attach.d.ts +16 -0
- package/dist/web-server-attach.d.ts.map +1 -0
- package/dist/web-server-attach.js +249 -0
- package/dist/web-server-attach.js.map +1 -0
- package/dist/web-server.d.ts +27 -0
- package/dist/web-server.d.ts.map +1 -0
- package/dist/web-server.js +812 -0
- package/dist/web-server.js.map +1 -0
- package/package.json +77 -0
|
@@ -0,0 +1,645 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.ShadowRepository = void 0;
|
|
40
|
+
const node_child_process_1 = require("node:child_process");
|
|
41
|
+
const path = __importStar(require("node:path"));
|
|
42
|
+
const node_process_1 = __importDefault(require("node:process"));
|
|
43
|
+
const node_util_1 = require("node:util");
|
|
44
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
45
|
+
const fs = __importStar(require("fs-extra"));
|
|
46
|
+
const docker_config_1 = require("../docker-config");
|
|
47
|
+
const execAsync = (0, node_util_1.promisify)(node_child_process_1.exec);
|
|
48
|
+
class ShadowRepository {
|
|
49
|
+
options;
|
|
50
|
+
basePath;
|
|
51
|
+
shadowPath;
|
|
52
|
+
initialized = false;
|
|
53
|
+
rsyncExcludeFile;
|
|
54
|
+
containerCmd; // 'docker' or 'podman'
|
|
55
|
+
constructor(options, basePath = '/tmp/claude-shadows') {
|
|
56
|
+
this.options = options;
|
|
57
|
+
this.basePath = basePath;
|
|
58
|
+
this.shadowPath = path.join(this.basePath, this.options.sessionId);
|
|
59
|
+
this.rsyncExcludeFile = path.join(this.basePath, `${this.options.sessionId}-excludes.txt`);
|
|
60
|
+
// Determine which container runtime to use
|
|
61
|
+
this.containerCmd = options.containerRuntime || (0, docker_config_1.getContainerRuntimeCmd)();
|
|
62
|
+
}
|
|
63
|
+
async initialize() {
|
|
64
|
+
if (this.initialized)
|
|
65
|
+
return;
|
|
66
|
+
console.log(chalk_1.default.blue('🔨 Creating shadow repository...'));
|
|
67
|
+
// Ensure base directory exists
|
|
68
|
+
await fs.ensureDir(this.basePath);
|
|
69
|
+
// Remove any existing shadow repo completely
|
|
70
|
+
if (await fs.pathExists(this.shadowPath)) {
|
|
71
|
+
try {
|
|
72
|
+
// Force remove with sudo if needed
|
|
73
|
+
await execAsync(`rm -rf ${this.shadowPath}`);
|
|
74
|
+
}
|
|
75
|
+
catch (error) {
|
|
76
|
+
// Fallback to fs.remove
|
|
77
|
+
await fs.remove(this.shadowPath);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
// Clone with minimal data
|
|
81
|
+
try {
|
|
82
|
+
// First, determine the current branch in the original repo
|
|
83
|
+
const { stdout: currentBranch } = await execAsync('git branch --show-current', {
|
|
84
|
+
cwd: this.options.originalRepo,
|
|
85
|
+
});
|
|
86
|
+
const sourceBranch = currentBranch.trim() || 'main';
|
|
87
|
+
// Try different clone approaches for robustness
|
|
88
|
+
let cloneSuccess = false;
|
|
89
|
+
// Approach 1: Try standard clone
|
|
90
|
+
try {
|
|
91
|
+
const cloneCmd = `git clone --single-branch --branch ${sourceBranch} --depth 1 "${this.options.originalRepo}" "${this.shadowPath}"`;
|
|
92
|
+
await execAsync(cloneCmd);
|
|
93
|
+
cloneSuccess = true;
|
|
94
|
+
}
|
|
95
|
+
catch (cloneError) {
|
|
96
|
+
console.log(chalk_1.default.yellow(' Standard clone failed, trying alternative...'));
|
|
97
|
+
// Approach 2: Try without depth limit
|
|
98
|
+
try {
|
|
99
|
+
const cloneCmd = `git clone --single-branch --branch ${sourceBranch} "${this.options.originalRepo}" "${this.shadowPath}"`;
|
|
100
|
+
await execAsync(cloneCmd);
|
|
101
|
+
cloneSuccess = true;
|
|
102
|
+
}
|
|
103
|
+
catch (cloneError2) {
|
|
104
|
+
console.log(chalk_1.default.yellow(' Alternative clone failed, trying copy approach...'));
|
|
105
|
+
// Approach 3: Copy working tree and init new repo
|
|
106
|
+
await fs.ensureDir(this.shadowPath);
|
|
107
|
+
await execAsync(`cp -r "${this.options.originalRepo}/." "${this.shadowPath}/"`);
|
|
108
|
+
// Remove and reinit git repo
|
|
109
|
+
await fs.remove(path.join(this.shadowPath, '.git'));
|
|
110
|
+
await execAsync('git init', { cwd: this.shadowPath });
|
|
111
|
+
await execAsync('git add .', { cwd: this.shadowPath });
|
|
112
|
+
await execAsync(`git commit -m "Initial commit from ${sourceBranch}"`, { cwd: this.shadowPath });
|
|
113
|
+
cloneSuccess = true;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
if (!cloneSuccess) {
|
|
117
|
+
throw new Error('All clone approaches failed');
|
|
118
|
+
}
|
|
119
|
+
// Create the Claude branch locally if it's different from source
|
|
120
|
+
if (this.options.claudeBranch !== sourceBranch) {
|
|
121
|
+
await execAsync(`git checkout -b ${this.options.claudeBranch}`, {
|
|
122
|
+
cwd: this.shadowPath,
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
// Configure remote to point to the actual GitHub remote, not local repo
|
|
126
|
+
try {
|
|
127
|
+
const { stdout: remoteUrl } = await execAsync('git remote get-url origin', {
|
|
128
|
+
cwd: this.options.originalRepo,
|
|
129
|
+
});
|
|
130
|
+
const actualRemote = remoteUrl.trim();
|
|
131
|
+
if (actualRemote
|
|
132
|
+
&& !actualRemote.startsWith('/')
|
|
133
|
+
&& !actualRemote.startsWith('file://')) {
|
|
134
|
+
// Set the remote to the actual GitHub/remote URL
|
|
135
|
+
await execAsync(`git remote set-url origin "${actualRemote}"`, {
|
|
136
|
+
cwd: this.shadowPath,
|
|
137
|
+
});
|
|
138
|
+
console.log(chalk_1.default.blue(` ✓ Configured remote: ${actualRemote}`));
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
catch (remoteError) {
|
|
142
|
+
console.log(chalk_1.default.gray(' (Could not configure remote URL, using local)'));
|
|
143
|
+
}
|
|
144
|
+
// Create an initial commit if the repo is empty (no HEAD)
|
|
145
|
+
try {
|
|
146
|
+
await execAsync('git rev-parse HEAD', { cwd: this.shadowPath });
|
|
147
|
+
}
|
|
148
|
+
catch (noHeadError) {
|
|
149
|
+
// No HEAD exists, create initial commit
|
|
150
|
+
console.log(chalk_1.default.blue(' Creating initial commit...'));
|
|
151
|
+
try {
|
|
152
|
+
await execAsync('git add .', { cwd: this.shadowPath });
|
|
153
|
+
await execAsync('git commit -m "Initial commit" --allow-empty', {
|
|
154
|
+
cwd: this.shadowPath,
|
|
155
|
+
});
|
|
156
|
+
console.log(chalk_1.default.green(' ✓ Initial commit created'));
|
|
157
|
+
}
|
|
158
|
+
catch (commitError) {
|
|
159
|
+
// If commit fails, create empty commit
|
|
160
|
+
await execAsync('git commit --allow-empty -m "Initial empty commit"', { cwd: this.shadowPath });
|
|
161
|
+
console.log(chalk_1.default.green(' ✓ Empty initial commit created'));
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
console.log(chalk_1.default.green('✓ Shadow repository created'));
|
|
165
|
+
this.initialized = true;
|
|
166
|
+
// Stage all files after initial setup to track them
|
|
167
|
+
try {
|
|
168
|
+
await execAsync('git add .', { cwd: this.shadowPath });
|
|
169
|
+
console.log(chalk_1.default.gray(' Staged all files for tracking'));
|
|
170
|
+
// Create initial commit to ensure deletions can be tracked
|
|
171
|
+
await execAsync('git commit -m "Initial snapshot of working directory" --allow-empty', { cwd: this.shadowPath });
|
|
172
|
+
console.log(chalk_1.default.gray(' Created initial commit for change tracking'));
|
|
173
|
+
}
|
|
174
|
+
catch (stageError) {
|
|
175
|
+
const errorMsg = stageError instanceof Error ? stageError.message : String(stageError);
|
|
176
|
+
console.log(chalk_1.default.gray(' Could not stage files:', errorMsg));
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
catch (error) {
|
|
180
|
+
console.error(chalk_1.default.red('Failed to create shadow repository:'), error);
|
|
181
|
+
throw error;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
async prepareRsyncRules() {
|
|
185
|
+
try {
|
|
186
|
+
// Start with built-in excludes that should never be synced
|
|
187
|
+
const excludes = [
|
|
188
|
+
'.git',
|
|
189
|
+
'.git/**',
|
|
190
|
+
'node_modules',
|
|
191
|
+
'node_modules/**',
|
|
192
|
+
'.next',
|
|
193
|
+
'.next/**',
|
|
194
|
+
'__pycache__',
|
|
195
|
+
'__pycache__/**',
|
|
196
|
+
'.venv',
|
|
197
|
+
'.venv/**',
|
|
198
|
+
'*.pyc',
|
|
199
|
+
'*.pyo',
|
|
200
|
+
'.DS_Store',
|
|
201
|
+
'Thumbs.db',
|
|
202
|
+
];
|
|
203
|
+
// Get list of git-tracked files to ensure they're always included
|
|
204
|
+
let trackedFiles = [];
|
|
205
|
+
try {
|
|
206
|
+
const { stdout } = await execAsync('git ls-files', {
|
|
207
|
+
cwd: this.options.originalRepo,
|
|
208
|
+
});
|
|
209
|
+
trackedFiles = stdout
|
|
210
|
+
.trim()
|
|
211
|
+
.split('\n')
|
|
212
|
+
.filter(f => f.trim());
|
|
213
|
+
console.log(chalk_1.default.gray(` Found ${trackedFiles.length} git-tracked files`));
|
|
214
|
+
}
|
|
215
|
+
catch (error) {
|
|
216
|
+
console.log(chalk_1.default.yellow(' Warning: Could not get git-tracked files:', error));
|
|
217
|
+
}
|
|
218
|
+
// Check for .gitignore in original repo
|
|
219
|
+
const gitignorePath = path.join(this.options.originalRepo, '.gitignore');
|
|
220
|
+
if (await fs.pathExists(gitignorePath)) {
|
|
221
|
+
const gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');
|
|
222
|
+
const lines = gitignoreContent.split('\n');
|
|
223
|
+
for (const line of lines) {
|
|
224
|
+
const trimmed = line.trim();
|
|
225
|
+
// Skip empty lines and comments
|
|
226
|
+
if (!trimmed || trimmed.startsWith('#'))
|
|
227
|
+
continue;
|
|
228
|
+
// Convert gitignore patterns to rsync patterns
|
|
229
|
+
const pattern = trimmed;
|
|
230
|
+
// Handle negation (gitignore: !pattern, rsync: + pattern)
|
|
231
|
+
if (pattern.startsWith('!')) {
|
|
232
|
+
// Rsync uses + for inclusion, but we'll skip these for simplicity
|
|
233
|
+
continue;
|
|
234
|
+
}
|
|
235
|
+
// If pattern ends with /, it's a directory
|
|
236
|
+
if (pattern.endsWith('/')) {
|
|
237
|
+
excludes.push(pattern);
|
|
238
|
+
excludes.push(`${pattern}**`);
|
|
239
|
+
}
|
|
240
|
+
else {
|
|
241
|
+
// Add the pattern as-is
|
|
242
|
+
excludes.push(pattern);
|
|
243
|
+
// If it doesn't contain /, it matches anywhere, so add **/ prefix
|
|
244
|
+
if (!pattern.includes('/')) {
|
|
245
|
+
excludes.push(`**/${pattern}`);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
// Create include patterns for all git-tracked files
|
|
251
|
+
// This ensures git-tracked files are synced even if they match gitignore patterns
|
|
252
|
+
const includes = [];
|
|
253
|
+
for (const file of trackedFiles) {
|
|
254
|
+
includes.push(`+ ${file}`);
|
|
255
|
+
// Also include parent directories
|
|
256
|
+
const parts = file.split('/');
|
|
257
|
+
for (let i = 1; i < parts.length; i++) {
|
|
258
|
+
const dir = parts.slice(0, i).join('/');
|
|
259
|
+
includes.push(`+ ${dir}/`);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
// Remove duplicates from includes
|
|
263
|
+
const uniqueIncludes = [...new Set(includes)];
|
|
264
|
+
// Write the rsync rules file: includes first, then excludes
|
|
265
|
+
// Rsync processes rules in order, so includes must come before excludes
|
|
266
|
+
const allRules = [...uniqueIncludes, ...excludes.map(e => `- ${e}`)];
|
|
267
|
+
await fs.writeFile(this.rsyncExcludeFile, allRules.join('\n'));
|
|
268
|
+
console.log(chalk_1.default.gray(` Created rsync rules file with ${uniqueIncludes.length} includes and ${excludes.length} excludes`));
|
|
269
|
+
}
|
|
270
|
+
catch (error) {
|
|
271
|
+
console.log(chalk_1.default.yellow(' Warning: Could not prepare rsync rules:', error));
|
|
272
|
+
// Create a basic exclude file with just the essentials
|
|
273
|
+
const basicExcludes = [
|
|
274
|
+
'- .git',
|
|
275
|
+
'- node_modules',
|
|
276
|
+
'- .next',
|
|
277
|
+
'- __pycache__',
|
|
278
|
+
'- .venv',
|
|
279
|
+
];
|
|
280
|
+
await fs.writeFile(this.rsyncExcludeFile, basicExcludes.join('\n'));
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
async resetToContainerBranch(containerId) {
|
|
284
|
+
console.log(chalk_1.default.blue('🔄 Resetting shadow repo to match container branch...'));
|
|
285
|
+
try {
|
|
286
|
+
// Ensure shadow repo is initialized first
|
|
287
|
+
if (!this.initialized) {
|
|
288
|
+
await this.initialize();
|
|
289
|
+
}
|
|
290
|
+
// Get the current branch from the container
|
|
291
|
+
const { stdout: containerBranch } = await execAsync(`${this.containerCmd} exec ${containerId} git -C /workspace rev-parse --abbrev-ref HEAD`);
|
|
292
|
+
const targetBranch = containerBranch.trim();
|
|
293
|
+
console.log(chalk_1.default.blue(` Container is on branch: ${targetBranch}`));
|
|
294
|
+
// Get the current branch in shadow repo (if it has one)
|
|
295
|
+
let currentShadowBranch = '';
|
|
296
|
+
try {
|
|
297
|
+
const { stdout: shadowBranch } = await execAsync('git rev-parse --abbrev-ref HEAD', { cwd: this.shadowPath });
|
|
298
|
+
currentShadowBranch = shadowBranch.trim();
|
|
299
|
+
console.log(chalk_1.default.blue(` Shadow repo is on: ${currentShadowBranch}`));
|
|
300
|
+
}
|
|
301
|
+
catch (error) {
|
|
302
|
+
console.log(chalk_1.default.blue(' Shadow repo has no HEAD yet'));
|
|
303
|
+
}
|
|
304
|
+
if (targetBranch !== currentShadowBranch) {
|
|
305
|
+
console.log(chalk_1.default.blue(' Resetting shadow repo to match container...'));
|
|
306
|
+
// Fetch all branches from the original repo
|
|
307
|
+
try {
|
|
308
|
+
await execAsync('git fetch origin', { cwd: this.shadowPath });
|
|
309
|
+
}
|
|
310
|
+
catch (error) {
|
|
311
|
+
console.warn(chalk_1.default.yellow('Warning: Failed to fetch from origin'));
|
|
312
|
+
}
|
|
313
|
+
// Check if the target branch exists remotely and create/checkout accordingly
|
|
314
|
+
try {
|
|
315
|
+
// Try to checkout the branch if it exists remotely and reset to match it
|
|
316
|
+
await execAsync(`git checkout -B ${targetBranch} origin/${targetBranch}`, { cwd: this.shadowPath });
|
|
317
|
+
console.log(chalk_1.default.green(`✓ Shadow repo reset to remote branch: ${targetBranch}`));
|
|
318
|
+
}
|
|
319
|
+
catch (error) {
|
|
320
|
+
try {
|
|
321
|
+
// If that fails, try to checkout locally existing branch
|
|
322
|
+
await execAsync(`git checkout ${targetBranch}`, {
|
|
323
|
+
cwd: this.shadowPath,
|
|
324
|
+
});
|
|
325
|
+
console.log(chalk_1.default.green(`✓ Shadow repo switched to local branch: ${targetBranch}`));
|
|
326
|
+
}
|
|
327
|
+
catch (localError) {
|
|
328
|
+
// If that fails too, create a new branch
|
|
329
|
+
await execAsync(`git checkout -b ${targetBranch}`, {
|
|
330
|
+
cwd: this.shadowPath,
|
|
331
|
+
});
|
|
332
|
+
console.log(chalk_1.default.green(`✓ Shadow repo created new branch: ${targetBranch}`));
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
// Mark that we need to resync after branch reset
|
|
336
|
+
console.log(chalk_1.default.blue('✓ Branch reset complete - files will be synced next'));
|
|
337
|
+
}
|
|
338
|
+
else {
|
|
339
|
+
console.log(chalk_1.default.gray(` Shadow repo already on correct branch: ${targetBranch}`));
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
catch (error) {
|
|
343
|
+
console.warn(chalk_1.default.yellow('⚠ Failed to reset shadow repo branch:'), error);
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
async syncFromContainer(containerId, containerPath = '/workspace') {
|
|
347
|
+
if (!this.initialized) {
|
|
348
|
+
await this.initialize();
|
|
349
|
+
}
|
|
350
|
+
console.log(chalk_1.default.blue('🔄 Syncing files from container...'));
|
|
351
|
+
// Prepare rsync rules
|
|
352
|
+
await this.prepareRsyncRules();
|
|
353
|
+
// First, ensure files in container are owned by claude user
|
|
354
|
+
try {
|
|
355
|
+
console.log(chalk_1.default.blue(' Fixing file ownership in container...'));
|
|
356
|
+
// Try multiple approaches to fix ownership
|
|
357
|
+
let ownershipFixed = false;
|
|
358
|
+
// Approach 1: Run as root
|
|
359
|
+
try {
|
|
360
|
+
await execAsync(`${this.containerCmd} exec --user root ${containerId} chown -R claude:claude ${containerPath}`);
|
|
361
|
+
ownershipFixed = true;
|
|
362
|
+
}
|
|
363
|
+
catch (rootError) {
|
|
364
|
+
// Approach 2: Try without --user root
|
|
365
|
+
try {
|
|
366
|
+
await execAsync(`${this.containerCmd} exec ${containerId} chown -R claude:claude ${containerPath}`);
|
|
367
|
+
ownershipFixed = true;
|
|
368
|
+
}
|
|
369
|
+
catch (normalError) {
|
|
370
|
+
// Approach 3: Use sudo if available
|
|
371
|
+
try {
|
|
372
|
+
await execAsync(`${this.containerCmd} exec ${containerId} sudo chown -R claude:claude ${containerPath}`);
|
|
373
|
+
ownershipFixed = true;
|
|
374
|
+
}
|
|
375
|
+
catch (sudoError) {
|
|
376
|
+
// Continue without fixing ownership
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
// Verify the change worked
|
|
381
|
+
if (ownershipFixed) {
|
|
382
|
+
try {
|
|
383
|
+
const { stdout: verification } = await execAsync(`${this.containerCmd} exec ${containerId} ls -la ${containerPath}/README.md 2>/dev/null || echo "no readme"`);
|
|
384
|
+
if (verification.includes('claude claude')) {
|
|
385
|
+
console.log(chalk_1.default.green(' ✓ Container file ownership fixed'));
|
|
386
|
+
}
|
|
387
|
+
else {
|
|
388
|
+
console.log(chalk_1.default.yellow(' ⚠ Ownership fix verification failed, but continuing...'));
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
catch (verifyError) {
|
|
392
|
+
console.log(chalk_1.default.gray(' (Could not verify ownership fix, continuing...)'));
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
else {
|
|
396
|
+
console.log(chalk_1.default.gray(' (Could not fix container file ownership, continuing...)'));
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
catch (error) {
|
|
400
|
+
console.log(chalk_1.default.gray(' (Ownership fix failed, continuing with sync...)'));
|
|
401
|
+
}
|
|
402
|
+
// Check if rsync is available in container
|
|
403
|
+
const hasRsync = await this.checkRsyncInContainer(containerId);
|
|
404
|
+
if (hasRsync) {
|
|
405
|
+
await this.syncWithRsync(containerId, containerPath);
|
|
406
|
+
}
|
|
407
|
+
else {
|
|
408
|
+
await this.syncWithDockerCp(containerId, containerPath);
|
|
409
|
+
}
|
|
410
|
+
// Stage all changes including deletions
|
|
411
|
+
try {
|
|
412
|
+
await execAsync('git add -A', { cwd: this.shadowPath });
|
|
413
|
+
}
|
|
414
|
+
catch (stageError) {
|
|
415
|
+
console.log(chalk_1.default.gray(' Could not stage changes:', stageError));
|
|
416
|
+
}
|
|
417
|
+
console.log(chalk_1.default.green('✓ Files synced successfully'));
|
|
418
|
+
}
|
|
419
|
+
async checkRsyncInContainer(containerId) {
|
|
420
|
+
try {
|
|
421
|
+
await execAsync(`${this.containerCmd} exec ${containerId} which rsync`);
|
|
422
|
+
return true;
|
|
423
|
+
}
|
|
424
|
+
catch {
|
|
425
|
+
// Try to install rsync if not available
|
|
426
|
+
try {
|
|
427
|
+
console.log(chalk_1.default.yellow(' Installing rsync in container...'));
|
|
428
|
+
// Try different package managers
|
|
429
|
+
const installCommands = [
|
|
430
|
+
'dnf install -y rsync', // Fedora/AlmaLinux/RHEL
|
|
431
|
+
'yum install -y rsync', // CentOS/RHEL
|
|
432
|
+
'apt-get update && apt-get install -y rsync', // Ubuntu/Debian
|
|
433
|
+
'apk add --no-cache rsync', // Alpine
|
|
434
|
+
];
|
|
435
|
+
for (const cmd of installCommands) {
|
|
436
|
+
try {
|
|
437
|
+
// Try as root first, then as normal user
|
|
438
|
+
const execCommands = [
|
|
439
|
+
`${this.containerCmd} exec --user root ${containerId} sh -c "${cmd}"`,
|
|
440
|
+
`${this.containerCmd} exec ${containerId} sh -c "sudo ${cmd}"`,
|
|
441
|
+
`${this.containerCmd} exec ${containerId} sh -c "${cmd}"`,
|
|
442
|
+
];
|
|
443
|
+
for (const execCmd of execCommands) {
|
|
444
|
+
try {
|
|
445
|
+
await execAsync(execCmd);
|
|
446
|
+
// Test if rsync is now available
|
|
447
|
+
await execAsync(`${this.containerCmd} exec ${containerId} which rsync`);
|
|
448
|
+
console.log(chalk_1.default.green(' ✓ rsync installed successfully'));
|
|
449
|
+
return true;
|
|
450
|
+
}
|
|
451
|
+
catch (execError) {
|
|
452
|
+
continue;
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
catch (cmdError) {
|
|
457
|
+
// Continue to next command
|
|
458
|
+
continue;
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
console.log(chalk_1.default.gray(' (Could not install rsync with any package manager)'));
|
|
462
|
+
return false;
|
|
463
|
+
}
|
|
464
|
+
catch (installError) {
|
|
465
|
+
console.log(chalk_1.default.gray(' (Could not install rsync, using docker cp)'));
|
|
466
|
+
return false;
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
async syncWithRsync(containerId, containerPath) {
|
|
471
|
+
// Create a temporary directory in container for rsync
|
|
472
|
+
const tempDir = '/tmp/sync-staging';
|
|
473
|
+
await execAsync(`${this.containerCmd} exec ${containerId} mkdir -p ${tempDir}`);
|
|
474
|
+
// Copy exclude file to container
|
|
475
|
+
const containerExcludeFile = '/tmp/rsync-excludes.txt';
|
|
476
|
+
await execAsync(`${this.containerCmd} cp ${this.rsyncExcludeFile} ${containerId}:${containerExcludeFile}`);
|
|
477
|
+
// Rsync directly from container to shadow repo with proper deletion handling
|
|
478
|
+
// First, clear the shadow repo (except .git) to ensure deletions are reflected
|
|
479
|
+
await execAsync(`find ${this.shadowPath} -mindepth 1 -not -path '${this.shadowPath}/.git*' -delete`);
|
|
480
|
+
// Rsync within container to staging area using exclude file
|
|
481
|
+
const rsyncCmd = `${this.containerCmd} exec ${containerId} rsync -av --delete \
|
|
482
|
+
--exclude-from=${containerExcludeFile} \
|
|
483
|
+
${containerPath}/ ${tempDir}/`;
|
|
484
|
+
await execAsync(rsyncCmd);
|
|
485
|
+
// Copy from container staging to shadow repo
|
|
486
|
+
await execAsync(`${this.containerCmd} cp ${containerId}:${tempDir}/. ${this.shadowPath}/`);
|
|
487
|
+
// Clean up staging directory and exclude file
|
|
488
|
+
try {
|
|
489
|
+
await execAsync(`${this.containerCmd} exec ${containerId} rm -rf ${tempDir}`);
|
|
490
|
+
await execAsync(`${this.containerCmd} exec --user root ${containerId} rm -f ${containerExcludeFile}`);
|
|
491
|
+
}
|
|
492
|
+
catch (cleanupError) {
|
|
493
|
+
// Ignore cleanup errors
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
async syncWithDockerCp(containerId, containerPath) {
|
|
497
|
+
console.log(chalk_1.default.yellow(`⚠️ Using ${this.containerCmd} cp (rsync not available in container)`));
|
|
498
|
+
// Create a temp directory for staging the copy
|
|
499
|
+
const tempCopyPath = path.join(this.basePath, 'temp-copy');
|
|
500
|
+
try {
|
|
501
|
+
// Remove temp directory if it exists
|
|
502
|
+
if (await fs.pathExists(tempCopyPath)) {
|
|
503
|
+
await fs.remove(tempCopyPath);
|
|
504
|
+
}
|
|
505
|
+
// Create temp directory
|
|
506
|
+
await fs.ensureDir(tempCopyPath);
|
|
507
|
+
// Copy files to temp directory first (to avoid corrupting shadow repo)
|
|
508
|
+
await execAsync(`${this.containerCmd} cp ${containerId}:${containerPath}/. ${tempCopyPath}/`);
|
|
509
|
+
// Now selectively copy files to shadow repo, using exclude file
|
|
510
|
+
// Use rsync on host to copy files using exclude file
|
|
511
|
+
try {
|
|
512
|
+
await execAsync(`rsync -av --exclude-from=${this.rsyncExcludeFile} ${tempCopyPath}/ ${this.shadowPath}/`);
|
|
513
|
+
}
|
|
514
|
+
catch (rsyncError) {
|
|
515
|
+
// Fallback to cp if rsync not available on host
|
|
516
|
+
console.log(chalk_1.default.gray(' (rsync not available on host, using cp)'));
|
|
517
|
+
// Manual copy excluding directories - read exclude patterns
|
|
518
|
+
const excludeContent = await fs.readFile(this.rsyncExcludeFile, 'utf-8');
|
|
519
|
+
const excludePatterns = excludeContent
|
|
520
|
+
.split('\n')
|
|
521
|
+
.filter(p => p.trim());
|
|
522
|
+
const { stdout: fileList } = await execAsync(`find ${tempCopyPath} -type f`);
|
|
523
|
+
const files = fileList
|
|
524
|
+
.trim()
|
|
525
|
+
.split('\n')
|
|
526
|
+
.filter(f => f.trim());
|
|
527
|
+
for (const file of files) {
|
|
528
|
+
const relativePath = path.relative(tempCopyPath, file);
|
|
529
|
+
// Check if file matches any exclude pattern
|
|
530
|
+
let shouldExclude = false;
|
|
531
|
+
for (const pattern of excludePatterns) {
|
|
532
|
+
if (!pattern)
|
|
533
|
+
continue;
|
|
534
|
+
// Simple pattern matching (not full glob)
|
|
535
|
+
if (pattern.includes('**')) {
|
|
536
|
+
const basePattern = pattern.replace('**/', '').replace('/**', '');
|
|
537
|
+
if (relativePath.includes(basePattern)) {
|
|
538
|
+
shouldExclude = true;
|
|
539
|
+
break;
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
else if (pattern.endsWith('*')) {
|
|
543
|
+
const prefix = pattern.slice(0, -1);
|
|
544
|
+
if (relativePath.startsWith(prefix)
|
|
545
|
+
|| path.basename(relativePath).startsWith(prefix)) {
|
|
546
|
+
shouldExclude = true;
|
|
547
|
+
break;
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
else {
|
|
551
|
+
if (relativePath === pattern
|
|
552
|
+
|| relativePath.startsWith(`${pattern}/`)
|
|
553
|
+
|| path.basename(relativePath) === pattern) {
|
|
554
|
+
shouldExclude = true;
|
|
555
|
+
break;
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
if (shouldExclude) {
|
|
560
|
+
continue;
|
|
561
|
+
}
|
|
562
|
+
const targetPath = path.join(this.shadowPath, relativePath);
|
|
563
|
+
const targetDir = path.dirname(targetPath);
|
|
564
|
+
await fs.ensureDir(targetDir);
|
|
565
|
+
await fs.copy(file, targetPath);
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
// Fix ownership of copied files
|
|
569
|
+
try {
|
|
570
|
+
const currentUser = node_process_1.default.env.USER || node_process_1.default.env.USERNAME || 'claude';
|
|
571
|
+
await execAsync(`chown -R ${currentUser}:${currentUser} ${this.shadowPath}`);
|
|
572
|
+
}
|
|
573
|
+
catch (error) {
|
|
574
|
+
console.log(chalk_1.default.gray(' (Could not fix file ownership, continuing...)'));
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
finally {
|
|
578
|
+
// Clean up temp directory
|
|
579
|
+
if (await fs.pathExists(tempCopyPath)) {
|
|
580
|
+
await fs.remove(tempCopyPath);
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
async getChanges() {
|
|
585
|
+
const { stdout: status } = await execAsync('git status --porcelain', {
|
|
586
|
+
cwd: this.shadowPath,
|
|
587
|
+
});
|
|
588
|
+
if (!status.trim()) {
|
|
589
|
+
return { hasChanges: false, summary: 'No changes detected' };
|
|
590
|
+
}
|
|
591
|
+
const lines = status.trim().split('\n');
|
|
592
|
+
const modified = lines.filter(l => l.startsWith(' M') || l.startsWith('M ') || l.startsWith('MM')).length;
|
|
593
|
+
const added = lines.filter(l => l.startsWith('??') || l.startsWith('A ') || l.startsWith('AM')).length;
|
|
594
|
+
const deleted = lines.filter(l => l.startsWith(' D') || l.startsWith('D ')).length;
|
|
595
|
+
const summary = `Modified: ${modified}, Added: ${added}, Deleted: ${deleted}`;
|
|
596
|
+
return { hasChanges: true, summary };
|
|
597
|
+
}
|
|
598
|
+
async showDiff() {
|
|
599
|
+
const { stdout } = await execAsync('git diff', { cwd: this.shadowPath });
|
|
600
|
+
console.log(stdout);
|
|
601
|
+
}
|
|
602
|
+
async cleanup() {
|
|
603
|
+
if (await fs.pathExists(this.shadowPath)) {
|
|
604
|
+
try {
|
|
605
|
+
// Try to force remove with rm -rf first
|
|
606
|
+
await execAsync(`rm -rf "${this.shadowPath}"`);
|
|
607
|
+
console.log(chalk_1.default.gray('🧹 Shadow repository cleaned up'));
|
|
608
|
+
}
|
|
609
|
+
catch (error) {
|
|
610
|
+
// Fallback to fs.remove with retry logic
|
|
611
|
+
let retries = 3;
|
|
612
|
+
while (retries > 0) {
|
|
613
|
+
try {
|
|
614
|
+
await fs.remove(this.shadowPath);
|
|
615
|
+
console.log(chalk_1.default.gray('🧹 Shadow repository cleaned up'));
|
|
616
|
+
break;
|
|
617
|
+
}
|
|
618
|
+
catch (err) {
|
|
619
|
+
retries--;
|
|
620
|
+
if (retries === 0) {
|
|
621
|
+
console.error(chalk_1.default.yellow('⚠ Failed to cleanup shadow repository:'), err);
|
|
622
|
+
}
|
|
623
|
+
else {
|
|
624
|
+
// Wait a bit before retry
|
|
625
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
if (await fs.pathExists(this.rsyncExcludeFile)) {
|
|
632
|
+
try {
|
|
633
|
+
await fs.remove(this.rsyncExcludeFile);
|
|
634
|
+
}
|
|
635
|
+
catch (error) {
|
|
636
|
+
// Ignore exclude file cleanup errors
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
getPath() {
|
|
641
|
+
return this.shadowPath;
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
exports.ShadowRepository = ShadowRepository;
|
|
645
|
+
//# sourceMappingURL=shadow-repository.js.map
|