@mmmbuto/masix 0.4.0 → 0.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -14
- package/install.js +53 -27
- package/package.json +4 -3
- package/packages/plugin-base/codex-backend/0.1.4/SHA256SUMS +3 -0
- package/packages/plugin-base/codex-backend/0.1.4/codex-backend-android-aarch64-termux.pkg +0 -0
- package/packages/plugin-base/codex-backend/0.1.4/codex-backend-linux-x86_64.pkg +0 -0
- package/packages/plugin-base/codex-backend/0.1.4/codex-backend-macos-aarch64.pkg +0 -0
- package/packages/plugin-base/codex-backend/0.1.4/manifest.json +33 -0
- package/packages/plugin-base/codex-backend/CHANGELOG.md +17 -0
- package/packages/plugin-base/codex-backend/README.md +33 -0
- package/packages/plugin-base/codex-backend/source/Cargo.toml +25 -0
- package/packages/plugin-base/codex-backend/source/README-PACKAGE.txt +54 -0
- package/packages/plugin-base/codex-backend/source/plugin.manifest.json +103 -0
- package/packages/plugin-base/codex-backend/source/src/error.rs +60 -0
- package/packages/plugin-base/codex-backend/source/src/exec.rs +436 -0
- package/packages/plugin-base/codex-backend/source/src/http_backend.rs +1198 -0
- package/packages/plugin-base/codex-backend/source/src/lib.rs +328 -0
- package/packages/plugin-base/codex-backend/source/src/patch.rs +767 -0
- package/packages/plugin-base/codex-backend/source/src/policy.rs +297 -0
- package/packages/plugin-base/codex-backend/source/src/tools.rs +72 -0
- package/packages/plugin-base/codex-backend/source/src/workspace.rs +433 -0
- package/packages/plugin-base/codex-tools/0.1.3/SHA256SUMS +3 -0
- package/packages/plugin-base/codex-tools/0.1.3/codex-tools-android-aarch64-termux.pkg +0 -0
- package/packages/plugin-base/codex-tools/0.1.3/codex-tools-linux-x86_64.pkg +0 -0
- package/packages/plugin-base/codex-tools/0.1.3/codex-tools-macos-aarch64.pkg +0 -0
- package/packages/plugin-base/codex-tools/0.1.3/manifest.json +33 -0
- package/packages/plugin-base/codex-tools/CHANGELOG.md +17 -0
- package/packages/plugin-base/codex-tools/README.md +33 -0
- package/packages/plugin-base/codex-tools/source/Cargo.toml +23 -0
- package/packages/plugin-base/codex-tools/source/plugin.manifest.json +124 -0
- package/packages/plugin-base/codex-tools/source/src/main.rs +995 -0
- package/packages/plugin-base/discovery/0.2.4/SHA256SUMS +3 -0
- package/packages/plugin-base/discovery/0.2.4/discovery-android-aarch64-termux.pkg +0 -0
- package/packages/plugin-base/discovery/0.2.4/discovery-linux-x86_64.pkg +0 -0
- package/packages/plugin-base/discovery/0.2.4/discovery-macos-aarch64.pkg +0 -0
- package/packages/plugin-base/discovery/0.2.4/manifest.json +31 -0
- package/packages/plugin-base/discovery/CHANGELOG.md +17 -0
- package/packages/plugin-base/discovery/README.md +48 -0
- package/packages/plugin-base/discovery/source/Cargo.toml +14 -0
- package/packages/plugin-base/discovery/source/plugin.manifest.json +30 -0
- package/packages/plugin-base/discovery/source/src/main.rs +2570 -0
- package/prebuilt/masix +0 -0
|
@@ -0,0 +1,767 @@
|
|
|
1
|
+
//! Patch preview and apply with backup/rollback safety
|
|
2
|
+
//!
|
|
3
|
+
//! Provides secure patch operations:
|
|
4
|
+
//! - Preview diff without applying
|
|
5
|
+
//! - Apply with automatic backup
|
|
6
|
+
//! - Rollback capability
|
|
7
|
+
|
|
8
|
+
use crate::{CodingError, ResolvedWorkspace};
|
|
9
|
+
use std::collections::HashMap;
|
|
10
|
+
use std::path::{Path, PathBuf};
|
|
11
|
+
use std::time::{SystemTime, UNIX_EPOCH};
|
|
12
|
+
|
|
13
|
+
/// Backup metadata for rollback
|
|
14
|
+
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
|
15
|
+
pub struct BackupManifest {
|
|
16
|
+
/// Unique backup ID
|
|
17
|
+
pub id: String,
|
|
18
|
+
/// Timestamp of backup creation
|
|
19
|
+
pub timestamp: u64,
|
|
20
|
+
/// Original files with their backup paths
|
|
21
|
+
pub files: HashMap<String, BackupEntry>,
|
|
22
|
+
/// Workspace root at time of backup
|
|
23
|
+
pub workspace_root: String,
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
|
27
|
+
pub struct BackupEntry {
|
|
28
|
+
/// Relative path from workspace root
|
|
29
|
+
pub original_path: String,
|
|
30
|
+
/// Absolute path to backup file
|
|
31
|
+
pub backup_path: String,
|
|
32
|
+
/// Hash of original content (for verification)
|
|
33
|
+
pub content_hash: String,
|
|
34
|
+
/// Whether the file existed before
|
|
35
|
+
pub existed: bool,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/// Patch preview result
|
|
39
|
+
#[derive(Debug, Clone, serde::Serialize, Default)]
|
|
40
|
+
pub struct PatchPreview {
|
|
41
|
+
/// Files that would be modified
|
|
42
|
+
pub files_modified: Vec<String>,
|
|
43
|
+
/// Files that would be created
|
|
44
|
+
pub files_created: Vec<String>,
|
|
45
|
+
/// Files that would be deleted
|
|
46
|
+
pub files_deleted: Vec<String>,
|
|
47
|
+
/// Estimated lines changed
|
|
48
|
+
pub lines_added: usize,
|
|
49
|
+
pub lines_removed: usize,
|
|
50
|
+
/// Warnings (e.g., binary files, large changes)
|
|
51
|
+
pub warnings: Vec<String>,
|
|
52
|
+
/// Whether the patch appears valid
|
|
53
|
+
pub valid: bool,
|
|
54
|
+
/// Parse errors if invalid
|
|
55
|
+
pub errors: Vec<String>,
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/// Patch apply result
|
|
59
|
+
#[derive(Debug, Clone, serde::Serialize)]
|
|
60
|
+
pub struct PatchApplyResult {
|
|
61
|
+
/// Whether the patch was applied successfully
|
|
62
|
+
pub success: bool,
|
|
63
|
+
/// Backup ID for rollback
|
|
64
|
+
pub backup_id: Option<String>,
|
|
65
|
+
/// Files actually modified
|
|
66
|
+
pub files_modified: Vec<String>,
|
|
67
|
+
/// Files actually created
|
|
68
|
+
pub files_created: Vec<String>,
|
|
69
|
+
/// Errors encountered
|
|
70
|
+
pub errors: Vec<String>,
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/// Patch preview tool
|
|
74
|
+
pub fn preview_patch(
|
|
75
|
+
diff: &str,
|
|
76
|
+
workspace: &ResolvedWorkspace,
|
|
77
|
+
) -> Result<PatchPreview, CodingError> {
|
|
78
|
+
let mut preview = PatchPreview::default();
|
|
79
|
+
|
|
80
|
+
// Parse unified diff format
|
|
81
|
+
let hunks = parse_diff_hunks(diff);
|
|
82
|
+
|
|
83
|
+
for hunk in hunks {
|
|
84
|
+
// Build full path and validate it's within workspace
|
|
85
|
+
let full_path = if hunk.file_path.is_absolute() {
|
|
86
|
+
hunk.file_path.clone()
|
|
87
|
+
} else {
|
|
88
|
+
workspace.root.join(&hunk.file_path)
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
if let Err(e) = workspace.is_path_allowed(&full_path) {
|
|
92
|
+
preview.errors.push(format!(
|
|
93
|
+
"Path '{}' violates workspace bounds: {}",
|
|
94
|
+
hunk.file_path.display(),
|
|
95
|
+
e
|
|
96
|
+
));
|
|
97
|
+
preview.valid = false;
|
|
98
|
+
continue;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Categorize the change
|
|
102
|
+
let relative = hunk
|
|
103
|
+
.file_path
|
|
104
|
+
.strip_prefix(&workspace.root)
|
|
105
|
+
.unwrap_or(&hunk.file_path)
|
|
106
|
+
.to_string_lossy()
|
|
107
|
+
.to_string();
|
|
108
|
+
|
|
109
|
+
if hunk.is_creation {
|
|
110
|
+
if !preview.files_created.contains(&relative) {
|
|
111
|
+
preview.files_created.push(relative);
|
|
112
|
+
}
|
|
113
|
+
} else if hunk.is_deletion {
|
|
114
|
+
if !preview.files_deleted.contains(&relative) {
|
|
115
|
+
preview.files_deleted.push(relative);
|
|
116
|
+
}
|
|
117
|
+
} else {
|
|
118
|
+
if !preview.files_modified.contains(&relative) {
|
|
119
|
+
preview.files_modified.push(relative);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
preview.lines_added += hunk.lines_added;
|
|
124
|
+
preview.lines_removed += hunk.lines_removed;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Add warnings for large changes
|
|
128
|
+
if preview.lines_added + preview.lines_removed > 1000 {
|
|
129
|
+
preview
|
|
130
|
+
.warnings
|
|
131
|
+
.push("Large patch: more than 1000 lines changed".into());
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
if preview.files_modified.len() + preview.files_created.len() + preview.files_deleted.len() > 50
|
|
135
|
+
{
|
|
136
|
+
preview
|
|
137
|
+
.warnings
|
|
138
|
+
.push("Many files affected: more than 50 files".into());
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
preview.valid = preview.errors.is_empty();
|
|
142
|
+
Ok(preview)
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
/// Apply a patch with backup
|
|
146
|
+
pub fn apply_patch(
|
|
147
|
+
diff: &str,
|
|
148
|
+
workspace: &ResolvedWorkspace,
|
|
149
|
+
create_backup: bool,
|
|
150
|
+
) -> Result<PatchApplyResult, CodingError> {
|
|
151
|
+
// First preview to validate
|
|
152
|
+
let preview = preview_patch(diff, workspace)?;
|
|
153
|
+
if !preview.valid {
|
|
154
|
+
return Ok(PatchApplyResult {
|
|
155
|
+
success: false,
|
|
156
|
+
backup_id: None,
|
|
157
|
+
files_modified: Vec::new(),
|
|
158
|
+
files_created: Vec::new(),
|
|
159
|
+
errors: preview.errors,
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Create backup manifest if requested
|
|
164
|
+
let backup_id = if create_backup {
|
|
165
|
+
let id = generate_backup_id();
|
|
166
|
+
let manifest = create_backup_manifest(&preview, workspace)?;
|
|
167
|
+
save_backup_manifest(&id, manifest, workspace)?;
|
|
168
|
+
Some(id)
|
|
169
|
+
} else {
|
|
170
|
+
None
|
|
171
|
+
};
|
|
172
|
+
|
|
173
|
+
let mut result = PatchApplyResult {
|
|
174
|
+
success: true,
|
|
175
|
+
backup_id,
|
|
176
|
+
files_modified: Vec::new(),
|
|
177
|
+
files_created: Vec::new(),
|
|
178
|
+
errors: Vec::new(),
|
|
179
|
+
};
|
|
180
|
+
|
|
181
|
+
// Parse and apply hunks
|
|
182
|
+
let hunks = parse_diff_hunks(diff);
|
|
183
|
+
|
|
184
|
+
for hunk in hunks {
|
|
185
|
+
if let Err(e) = apply_hunk(&hunk, workspace) {
|
|
186
|
+
result.errors.push(format!(
|
|
187
|
+
"Failed to apply to {}: {}",
|
|
188
|
+
hunk.file_path.display(),
|
|
189
|
+
e
|
|
190
|
+
));
|
|
191
|
+
result.success = false;
|
|
192
|
+
} else {
|
|
193
|
+
let relative = hunk
|
|
194
|
+
.file_path
|
|
195
|
+
.strip_prefix(&workspace.root)
|
|
196
|
+
.unwrap_or(&hunk.file_path)
|
|
197
|
+
.to_string_lossy()
|
|
198
|
+
.to_string();
|
|
199
|
+
|
|
200
|
+
if hunk.is_creation {
|
|
201
|
+
if !result.files_created.contains(&relative) {
|
|
202
|
+
result.files_created.push(relative);
|
|
203
|
+
}
|
|
204
|
+
} else if !hunk.is_deletion && !result.files_modified.contains(&relative) {
|
|
205
|
+
result.files_modified.push(relative);
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
Ok(result)
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
/// Rollback a patch using backup
|
|
214
|
+
pub fn rollback_patch(
|
|
215
|
+
backup_id: &str,
|
|
216
|
+
workspace: &ResolvedWorkspace,
|
|
217
|
+
) -> Result<Vec<String>, CodingError> {
|
|
218
|
+
let manifest = load_backup_manifest(backup_id, workspace)?;
|
|
219
|
+
let mut restored = Vec::new();
|
|
220
|
+
|
|
221
|
+
for (relative, entry) in &manifest.files {
|
|
222
|
+
let original_path = workspace.root.join(relative);
|
|
223
|
+
|
|
224
|
+
if entry.existed {
|
|
225
|
+
// Validate backup path is not empty
|
|
226
|
+
if entry.backup_path.is_empty() {
|
|
227
|
+
return Err(CodingError::IoError(format!(
|
|
228
|
+
"Backup path is empty for file '{}' — manifest may be corrupted",
|
|
229
|
+
relative
|
|
230
|
+
)));
|
|
231
|
+
}
|
|
232
|
+
// Restore from backup
|
|
233
|
+
if Path::new(&entry.backup_path).exists() {
|
|
234
|
+
std::fs::copy(&entry.backup_path, &original_path).map_err(|e| {
|
|
235
|
+
CodingError::IoError(format!("Failed to restore {}: {}", relative, e))
|
|
236
|
+
})?;
|
|
237
|
+
restored.push(relative.clone());
|
|
238
|
+
} else {
|
|
239
|
+
return Err(CodingError::IoError(format!(
|
|
240
|
+
"Backup file not found at '{}' for '{}'",
|
|
241
|
+
entry.backup_path, relative
|
|
242
|
+
)));
|
|
243
|
+
}
|
|
244
|
+
} else {
|
|
245
|
+
// File was created by patch, remove it
|
|
246
|
+
if original_path.exists() {
|
|
247
|
+
std::fs::remove_file(&original_path).map_err(|e| {
|
|
248
|
+
CodingError::IoError(format!("Failed to remove {}: {}", relative, e))
|
|
249
|
+
})?;
|
|
250
|
+
}
|
|
251
|
+
restored.push(format!("{} (removed)", relative));
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// Clean up backup directory
|
|
256
|
+
let backup_dir = workspace
|
|
257
|
+
.root
|
|
258
|
+
.join(".masix")
|
|
259
|
+
.join("backups")
|
|
260
|
+
.join(backup_id);
|
|
261
|
+
if backup_dir.exists() {
|
|
262
|
+
let _ = std::fs::remove_dir_all(&backup_dir);
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
Ok(restored)
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// Internal types and functions
|
|
269
|
+
|
|
270
|
+
struct DiffHunk {
|
|
271
|
+
file_path: PathBuf,
|
|
272
|
+
is_creation: bool,
|
|
273
|
+
is_deletion: bool,
|
|
274
|
+
lines_added: usize,
|
|
275
|
+
lines_removed: usize,
|
|
276
|
+
content: String,
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
fn parse_diff_hunks(diff: &str) -> Vec<DiffHunk> {
|
|
280
|
+
let mut hunks = Vec::new();
|
|
281
|
+
let mut current_file: Option<PathBuf> = None;
|
|
282
|
+
let mut current_is_creation = false;
|
|
283
|
+
let mut current_is_deletion = false;
|
|
284
|
+
let mut current_lines_added = 0;
|
|
285
|
+
let mut current_lines_removed = 0;
|
|
286
|
+
let mut current_content = String::new();
|
|
287
|
+
let mut saw_dev_null_in_old = false;
|
|
288
|
+
|
|
289
|
+
for line in diff.lines() {
|
|
290
|
+
if line.starts_with("--- ") {
|
|
291
|
+
// Old file header
|
|
292
|
+
let path_part = line.split_once(' ').map(|(_, p)| p.trim()).unwrap_or("");
|
|
293
|
+
saw_dev_null_in_old = path_part.contains("/dev/null");
|
|
294
|
+
} else if line.starts_with("+++ ") {
|
|
295
|
+
// New file header - finalize previous file if any
|
|
296
|
+
if let Some(file) = current_file.take() {
|
|
297
|
+
hunks.push(DiffHunk {
|
|
298
|
+
file_path: file,
|
|
299
|
+
is_creation: current_is_creation,
|
|
300
|
+
is_deletion: current_is_deletion,
|
|
301
|
+
lines_added: current_lines_added,
|
|
302
|
+
lines_removed: current_lines_removed,
|
|
303
|
+
content: current_content.clone(),
|
|
304
|
+
});
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
let path_part = line.split_once(' ').map(|(_, p)| p.trim()).unwrap_or("");
|
|
308
|
+
let path = path_part
|
|
309
|
+
.strip_prefix("b/")
|
|
310
|
+
.or_else(|| path_part.strip_prefix("a/"))
|
|
311
|
+
.unwrap_or(path_part);
|
|
312
|
+
|
|
313
|
+
if path != "/dev/null" && !path.is_empty() {
|
|
314
|
+
current_file = Some(PathBuf::from(path));
|
|
315
|
+
current_is_creation = saw_dev_null_in_old;
|
|
316
|
+
current_is_deletion = path_part.contains("/dev/null");
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
current_lines_added = 0;
|
|
320
|
+
current_lines_removed = 0;
|
|
321
|
+
current_content.clear();
|
|
322
|
+
saw_dev_null_in_old = false;
|
|
323
|
+
} else if line.starts_with('+') && !line.starts_with("+++") {
|
|
324
|
+
current_lines_added += 1;
|
|
325
|
+
current_content.push_str(line);
|
|
326
|
+
current_content.push('\n');
|
|
327
|
+
} else if line.starts_with('-') && !line.starts_with("---") {
|
|
328
|
+
current_lines_removed += 1;
|
|
329
|
+
current_content.push_str(line);
|
|
330
|
+
current_content.push('\n');
|
|
331
|
+
} else if line.starts_with("@@ ") {
|
|
332
|
+
// Hunk header - continue with current file
|
|
333
|
+
current_content.push_str(line);
|
|
334
|
+
current_content.push('\n');
|
|
335
|
+
} else if line.starts_with(' ') {
|
|
336
|
+
// Context line — needed for apply_line_patch positioning
|
|
337
|
+
current_content.push_str(line);
|
|
338
|
+
current_content.push('\n');
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
// Don't forget the last file
|
|
343
|
+
if let Some(file) = current_file {
|
|
344
|
+
hunks.push(DiffHunk {
|
|
345
|
+
file_path: file,
|
|
346
|
+
is_creation: current_is_creation,
|
|
347
|
+
is_deletion: current_is_deletion,
|
|
348
|
+
lines_added: current_lines_added,
|
|
349
|
+
lines_removed: current_lines_removed,
|
|
350
|
+
content: current_content,
|
|
351
|
+
});
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
hunks
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
fn apply_hunk(hunk: &DiffHunk, workspace: &ResolvedWorkspace) -> Result<(), CodingError> {
|
|
358
|
+
let target_path = workspace.root.join(&hunk.file_path);
|
|
359
|
+
|
|
360
|
+
if hunk.is_creation {
|
|
361
|
+
// Create new file
|
|
362
|
+
if let Some(parent) = target_path.parent() {
|
|
363
|
+
std::fs::create_dir_all(parent)
|
|
364
|
+
.map_err(|e| CodingError::IoError(format!("Failed to create dir: {}", e)))?;
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
// Extract content from diff (lines starting with +)
|
|
368
|
+
let content: String = hunk
|
|
369
|
+
.content
|
|
370
|
+
.lines()
|
|
371
|
+
.filter(|l| l.starts_with('+') && !l.starts_with("+++"))
|
|
372
|
+
.map(|l| l.strip_prefix('+').unwrap_or(l))
|
|
373
|
+
.collect::<Vec<_>>()
|
|
374
|
+
.join("\n");
|
|
375
|
+
|
|
376
|
+
std::fs::write(&target_path, content).map_err(|e| {
|
|
377
|
+
CodingError::IoError(format!("Failed to write {}: {}", target_path.display(), e))
|
|
378
|
+
})?;
|
|
379
|
+
} else if hunk.is_deletion {
|
|
380
|
+
// Delete file
|
|
381
|
+
if target_path.exists() {
|
|
382
|
+
std::fs::remove_file(&target_path).map_err(|e| {
|
|
383
|
+
CodingError::IoError(format!("Failed to delete {}: {}", target_path.display(), e))
|
|
384
|
+
})?;
|
|
385
|
+
}
|
|
386
|
+
} else {
|
|
387
|
+
// Apply patch to existing file
|
|
388
|
+
if !target_path.exists() {
|
|
389
|
+
return Err(CodingError::ToolError(format!(
|
|
390
|
+
"File {} does not exist",
|
|
391
|
+
hunk.file_path.display()
|
|
392
|
+
)));
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
let existing = std::fs::read_to_string(&target_path).map_err(|e| {
|
|
396
|
+
CodingError::IoError(format!("Failed to read {}: {}", target_path.display(), e))
|
|
397
|
+
})?;
|
|
398
|
+
|
|
399
|
+
// Simple line-based patch application
|
|
400
|
+
let result = apply_line_patch(&existing, &hunk.content)?;
|
|
401
|
+
|
|
402
|
+
std::fs::write(&target_path, result).map_err(|e| {
|
|
403
|
+
CodingError::IoError(format!("Failed to write {}: {}", target_path.display(), e))
|
|
404
|
+
})?;
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
Ok(())
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
fn apply_line_patch(original: &str, patch: &str) -> Result<String, CodingError> {
|
|
411
|
+
let original_lines: Vec<&str> = original.lines().collect();
|
|
412
|
+
let mut result_lines: Vec<String> = Vec::new();
|
|
413
|
+
let mut current_pos: usize = 0;
|
|
414
|
+
|
|
415
|
+
for line in patch.lines() {
|
|
416
|
+
if line.starts_with("@@ ") {
|
|
417
|
+
if let Some(old_start) = parse_hunk_old_start(line) {
|
|
418
|
+
let target = if old_start > 0 { old_start - 1 } else { 0 };
|
|
419
|
+
// Copy unchanged lines up to the hunk start
|
|
420
|
+
while current_pos < target && current_pos < original_lines.len() {
|
|
421
|
+
result_lines.push(original_lines[current_pos].to_string());
|
|
422
|
+
current_pos += 1;
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
} else if line.starts_with('+') && !line.starts_with("+++") {
|
|
426
|
+
result_lines.push(line.strip_prefix('+').unwrap_or(line).to_string());
|
|
427
|
+
} else if line.starts_with('-') && !line.starts_with("---") {
|
|
428
|
+
current_pos += 1;
|
|
429
|
+
} else if line.starts_with(' ') {
|
|
430
|
+
if current_pos < original_lines.len() {
|
|
431
|
+
result_lines.push(original_lines[current_pos].to_string());
|
|
432
|
+
current_pos += 1;
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
// Append remaining original lines after last hunk
|
|
438
|
+
while current_pos < original_lines.len() {
|
|
439
|
+
result_lines.push(original_lines[current_pos].to_string());
|
|
440
|
+
current_pos += 1;
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
let mut result = result_lines.join("\n");
|
|
444
|
+
if original.ends_with('\n') {
|
|
445
|
+
result.push('\n');
|
|
446
|
+
}
|
|
447
|
+
Ok(result)
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
fn parse_hunk_old_start(header: &str) -> Option<usize> {
|
|
451
|
+
let after = header.strip_prefix("@@ -")?;
|
|
452
|
+
let num = after.split(|c: char| c == ',' || c == ' ').next()?;
|
|
453
|
+
num.parse().ok()
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
fn generate_backup_id() -> String {
|
|
457
|
+
let now = SystemTime::now()
|
|
458
|
+
.duration_since(UNIX_EPOCH)
|
|
459
|
+
.unwrap_or_default();
|
|
460
|
+
format!("backup_{}", now.as_millis())
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
fn create_backup_manifest(
|
|
464
|
+
preview: &PatchPreview,
|
|
465
|
+
workspace: &ResolvedWorkspace,
|
|
466
|
+
) -> Result<BackupManifest, CodingError> {
|
|
467
|
+
let now = SystemTime::now()
|
|
468
|
+
.duration_since(UNIX_EPOCH)
|
|
469
|
+
.unwrap_or_default()
|
|
470
|
+
.as_secs();
|
|
471
|
+
|
|
472
|
+
let mut files = HashMap::new();
|
|
473
|
+
|
|
474
|
+
// Backup files that will be modified
|
|
475
|
+
for relative in preview
|
|
476
|
+
.files_modified
|
|
477
|
+
.iter()
|
|
478
|
+
.chain(preview.files_deleted.iter())
|
|
479
|
+
{
|
|
480
|
+
let path = workspace.root.join(relative);
|
|
481
|
+
if path.exists() {
|
|
482
|
+
let content = std::fs::read(&path)
|
|
483
|
+
.map_err(|e| CodingError::IoError(format!("Failed to read {}: {}", relative, e)))?;
|
|
484
|
+
let hash = sha256_hash(&content);
|
|
485
|
+
|
|
486
|
+
files.insert(
|
|
487
|
+
relative.clone(),
|
|
488
|
+
BackupEntry {
|
|
489
|
+
original_path: relative.clone(),
|
|
490
|
+
backup_path: String::new(), // Will be set when saving
|
|
491
|
+
content_hash: hash,
|
|
492
|
+
existed: true,
|
|
493
|
+
},
|
|
494
|
+
);
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
|
|
498
|
+
// Mark files that will be created (no backup needed, but track for rollback)
|
|
499
|
+
for relative in &preview.files_created {
|
|
500
|
+
files.insert(
|
|
501
|
+
relative.clone(),
|
|
502
|
+
BackupEntry {
|
|
503
|
+
original_path: relative.clone(),
|
|
504
|
+
backup_path: String::new(),
|
|
505
|
+
content_hash: String::new(),
|
|
506
|
+
existed: false,
|
|
507
|
+
},
|
|
508
|
+
);
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
Ok(BackupManifest {
|
|
512
|
+
id: String::new(), // Will be set when saving
|
|
513
|
+
timestamp: now,
|
|
514
|
+
files,
|
|
515
|
+
workspace_root: workspace.root.to_string_lossy().to_string(),
|
|
516
|
+
})
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
fn save_backup_manifest(
|
|
520
|
+
id: &str,
|
|
521
|
+
mut manifest: BackupManifest,
|
|
522
|
+
workspace: &ResolvedWorkspace,
|
|
523
|
+
) -> Result<(), CodingError> {
|
|
524
|
+
let backup_dir = workspace.root.join(".masix").join("backups").join(id);
|
|
525
|
+
std::fs::create_dir_all(&backup_dir)
|
|
526
|
+
.map_err(|e| CodingError::IoError(format!("Failed to create backup dir: {}", e)))?;
|
|
527
|
+
|
|
528
|
+
manifest.id = id.to_string();
|
|
529
|
+
|
|
530
|
+
// Save backup files
|
|
531
|
+
for (relative, entry) in manifest.files.iter_mut() {
|
|
532
|
+
if entry.existed {
|
|
533
|
+
let original_path = workspace.root.join(relative);
|
|
534
|
+
let backup_path = backup_dir.join(relative.replace('/', "_"));
|
|
535
|
+
|
|
536
|
+
if original_path.exists() {
|
|
537
|
+
std::fs::copy(&original_path, &backup_path).map_err(|e| {
|
|
538
|
+
CodingError::IoError(format!("Failed to backup {}: {}", relative, e))
|
|
539
|
+
})?;
|
|
540
|
+
entry.backup_path = backup_path.to_string_lossy().to_string();
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
// Save manifest
|
|
546
|
+
let manifest_path = backup_dir.join("manifest.json");
|
|
547
|
+
let manifest_json = serde_json::to_string_pretty(&manifest)
|
|
548
|
+
.map_err(|e| CodingError::IoError(format!("Failed to serialize manifest: {}", e)))?;
|
|
549
|
+
|
|
550
|
+
std::fs::write(&manifest_path, manifest_json)
|
|
551
|
+
.map_err(|e| CodingError::IoError(format!("Failed to write manifest: {}", e)))?;
|
|
552
|
+
|
|
553
|
+
Ok(())
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
fn load_backup_manifest(
|
|
557
|
+
id: &str,
|
|
558
|
+
workspace: &ResolvedWorkspace,
|
|
559
|
+
) -> Result<BackupManifest, CodingError> {
|
|
560
|
+
let manifest_path = workspace
|
|
561
|
+
.root
|
|
562
|
+
.join(".masix")
|
|
563
|
+
.join("backups")
|
|
564
|
+
.join(id)
|
|
565
|
+
.join("manifest.json");
|
|
566
|
+
|
|
567
|
+
if !manifest_path.exists() {
|
|
568
|
+
return Err(CodingError::IoError(format!("Backup {} not found", id)));
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
let content = std::fs::read_to_string(&manifest_path)
|
|
572
|
+
.map_err(|e| CodingError::IoError(format!("Failed to read manifest: {}", e)))?;
|
|
573
|
+
|
|
574
|
+
serde_json::from_str(&content)
|
|
575
|
+
.map_err(|e| CodingError::IoError(format!("Failed to parse manifest: {}", e)))
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
fn sha256_hash(data: &[u8]) -> String {
|
|
579
|
+
use sha2::{Digest, Sha256};
|
|
580
|
+
let mut hasher = Sha256::new();
|
|
581
|
+
hasher.update(data);
|
|
582
|
+
format!("{:x}", hasher.finalize())
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
#[cfg(test)]
|
|
586
|
+
mod tests {
|
|
587
|
+
use super::*;
|
|
588
|
+
use crate::{ExecutionProfile, WorkspaceSource};
|
|
589
|
+
use std::fs;
|
|
590
|
+
|
|
591
|
+
fn test_workspace() -> ResolvedWorkspace {
|
|
592
|
+
let temp = std::env::temp_dir().join("masix_patch_test");
|
|
593
|
+
let _ = fs::create_dir_all(&temp);
|
|
594
|
+
ResolvedWorkspace {
|
|
595
|
+
root: temp,
|
|
596
|
+
source: WorkspaceSource::DefaultRoot,
|
|
597
|
+
profile: ExecutionProfile::WorkspaceLocked,
|
|
598
|
+
allowed_roots: Vec::new(),
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
#[test]
|
|
603
|
+
fn test_preview_simple_diff() {
|
|
604
|
+
let diff = r#"--- a/test.txt
|
|
605
|
+
+++ b/test.txt
|
|
606
|
+
@@ -1 +1 @@
|
|
607
|
+
-hello
|
|
608
|
+
+world
|
|
609
|
+
"#;
|
|
610
|
+
let workspace = test_workspace();
|
|
611
|
+
let preview = preview_patch(diff, &workspace).unwrap();
|
|
612
|
+
|
|
613
|
+
assert!(preview.valid, "Errors: {:?}", preview.errors);
|
|
614
|
+
assert_eq!(preview.files_modified.len(), 1);
|
|
615
|
+
assert_eq!(preview.lines_added, 1);
|
|
616
|
+
assert_eq!(preview.lines_removed, 1);
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
#[test]
|
|
620
|
+
fn test_preview_new_file() {
|
|
621
|
+
let diff = r#"--- /dev/null
|
|
622
|
+
+++ b/new_file.txt
|
|
623
|
+
@@ -0,0 +1,3 @@
|
|
624
|
+
+line1
|
|
625
|
+
+line2
|
|
626
|
+
+line3
|
|
627
|
+
"#;
|
|
628
|
+
let workspace = test_workspace();
|
|
629
|
+
let preview = preview_patch(diff, &workspace).unwrap();
|
|
630
|
+
|
|
631
|
+
assert!(preview.valid, "Errors: {:?}", preview.errors);
|
|
632
|
+
assert_eq!(preview.files_created.len(), 1);
|
|
633
|
+
assert_eq!(preview.lines_added, 3);
|
|
634
|
+
assert_eq!(preview.lines_removed, 0);
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
#[test]
|
|
638
|
+
fn test_preview_empty_diff() {
|
|
639
|
+
let diff = "";
|
|
640
|
+
let workspace = test_workspace();
|
|
641
|
+
let preview = preview_patch(diff, &workspace).unwrap();
|
|
642
|
+
|
|
643
|
+
assert!(preview.valid);
|
|
644
|
+
assert!(preview.files_modified.is_empty());
|
|
645
|
+
assert!(preview.files_created.is_empty());
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
#[test]
|
|
649
|
+
fn test_apply_patch_with_hunk_offset() {
|
|
650
|
+
// 10-line file, patch modifies lines 5-7
|
|
651
|
+
let original = "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n";
|
|
652
|
+
let patch = "@@ -5,3 +5,3 @@\n-line5\n-line6\n-line7\n+LINE5\n+LINE6\n+LINE7\n";
|
|
653
|
+
|
|
654
|
+
let result = apply_line_patch(original, patch).unwrap();
|
|
655
|
+
let lines: Vec<&str> = result.lines().collect();
|
|
656
|
+
assert_eq!(lines[0], "line1");
|
|
657
|
+
assert_eq!(lines[3], "line4");
|
|
658
|
+
assert_eq!(lines[4], "LINE5");
|
|
659
|
+
assert_eq!(lines[5], "LINE6");
|
|
660
|
+
assert_eq!(lines[6], "LINE7");
|
|
661
|
+
assert_eq!(lines[7], "line8");
|
|
662
|
+
assert_eq!(lines[9], "line10");
|
|
663
|
+
assert_eq!(lines.len(), 10);
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
#[test]
|
|
667
|
+
fn test_apply_patch_with_context_lines() {
|
|
668
|
+
let original = "aaa\nbbb\nccc\nddd\n";
|
|
669
|
+
let patch = "@@ -1,4 +1,4 @@\n aaa\n-bbb\n+BBB\n ccc\n ddd\n";
|
|
670
|
+
|
|
671
|
+
let result = apply_line_patch(original, patch).unwrap();
|
|
672
|
+
let lines: Vec<&str> = result.lines().collect();
|
|
673
|
+
assert_eq!(lines[0], "aaa");
|
|
674
|
+
assert_eq!(lines[1], "BBB");
|
|
675
|
+
assert_eq!(lines[2], "ccc");
|
|
676
|
+
assert_eq!(lines[3], "ddd");
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
#[test]
|
|
680
|
+
fn test_apply_patch_multiple_hunks() {
|
|
681
|
+
let original = "a\nb\nc\nd\ne\nf\ng\nh\ni\nj\n";
|
|
682
|
+
let patch = "@@ -2,1 +2,1 @@\n-b\n+B\n@@ -8,1 +8,1 @@\n-h\n+H\n";
|
|
683
|
+
|
|
684
|
+
let result = apply_line_patch(original, patch).unwrap();
|
|
685
|
+
let lines: Vec<&str> = result.lines().collect();
|
|
686
|
+
assert_eq!(lines[0], "a");
|
|
687
|
+
assert_eq!(lines[1], "B"); // replaced
|
|
688
|
+
assert_eq!(lines[2], "c");
|
|
689
|
+
assert_eq!(lines[7], "H"); // replaced
|
|
690
|
+
assert_eq!(lines[9], "j");
|
|
691
|
+
assert_eq!(lines.len(), 10);
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
#[test]
|
|
695
|
+
fn test_apply_patch_new_file_e2e() {
|
|
696
|
+
let workspace = test_workspace();
|
|
697
|
+
let test_subdir = workspace.root.join("e2e_new_file_test");
|
|
698
|
+
let _ = fs::create_dir_all(&test_subdir);
|
|
699
|
+
|
|
700
|
+
let diff = format!(
|
|
701
|
+
"--- /dev/null\n+++ b/e2e_new_file_test/newfile.txt\n@@ -0,0 +1,3 @@\n+hello\n+world\n+end\n"
|
|
702
|
+
);
|
|
703
|
+
|
|
704
|
+
let result = apply_patch(&diff, &workspace, false).unwrap();
|
|
705
|
+
assert!(result.success, "Errors: {:?}", result.errors);
|
|
706
|
+
assert_eq!(result.files_created.len(), 1);
|
|
707
|
+
|
|
708
|
+
let content = fs::read_to_string(test_subdir.join("newfile.txt")).unwrap();
|
|
709
|
+
assert!(content.contains("hello"));
|
|
710
|
+
assert!(content.contains("world"));
|
|
711
|
+
assert!(content.contains("end"));
|
|
712
|
+
|
|
713
|
+
// Cleanup
|
|
714
|
+
let _ = fs::remove_dir_all(&test_subdir);
|
|
715
|
+
}
|
|
716
|
+
|
|
717
|
+
#[test]
|
|
718
|
+
fn test_parse_hunk_old_start() {
|
|
719
|
+
assert_eq!(parse_hunk_old_start("@@ -1,4 +1,5 @@"), Some(1));
|
|
720
|
+
assert_eq!(parse_hunk_old_start("@@ -25,3 +26,4 @@"), Some(25));
|
|
721
|
+
assert_eq!(parse_hunk_old_start("@@ -100 +100 @@"), Some(100));
|
|
722
|
+
assert_eq!(parse_hunk_old_start("not a hunk header"), None);
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
#[test]
|
|
726
|
+
fn test_rollback_empty_backup_path() {
|
|
727
|
+
let workspace = test_workspace();
|
|
728
|
+
let backup_dir = workspace
|
|
729
|
+
.root
|
|
730
|
+
.join(".masix")
|
|
731
|
+
.join("backups")
|
|
732
|
+
.join("test_empty_bp");
|
|
733
|
+
let _ = fs::create_dir_all(&backup_dir);
|
|
734
|
+
|
|
735
|
+
// Create a manifest with an empty backup_path for an existing file
|
|
736
|
+
let mut files = HashMap::new();
|
|
737
|
+
files.insert(
|
|
738
|
+
"some_file.txt".to_string(),
|
|
739
|
+
BackupEntry {
|
|
740
|
+
original_path: "some_file.txt".to_string(),
|
|
741
|
+
backup_path: String::new(), // BUG-8: empty backup_path
|
|
742
|
+
content_hash: "abc".to_string(),
|
|
743
|
+
existed: true,
|
|
744
|
+
},
|
|
745
|
+
);
|
|
746
|
+
let manifest = BackupManifest {
|
|
747
|
+
id: "test_empty_bp".to_string(),
|
|
748
|
+
timestamp: 0,
|
|
749
|
+
files,
|
|
750
|
+
workspace_root: workspace.root.to_string_lossy().to_string(),
|
|
751
|
+
};
|
|
752
|
+
let manifest_json = serde_json::to_string_pretty(&manifest).unwrap();
|
|
753
|
+
fs::write(backup_dir.join("manifest.json"), manifest_json).unwrap();
|
|
754
|
+
|
|
755
|
+
let result = rollback_patch("test_empty_bp", &workspace);
|
|
756
|
+
assert!(result.is_err(), "Expected error for empty backup_path");
|
|
757
|
+
let err_msg = format!("{}", result.unwrap_err());
|
|
758
|
+
assert!(
|
|
759
|
+
err_msg.contains("empty") || err_msg.contains("Backup path"),
|
|
760
|
+
"Error should mention empty backup: {}",
|
|
761
|
+
err_msg
|
|
762
|
+
);
|
|
763
|
+
|
|
764
|
+
// Cleanup
|
|
765
|
+
let _ = fs::remove_dir_all(&backup_dir);
|
|
766
|
+
}
|
|
767
|
+
}
|