gptdiff 0.1.22__tar.gz → 0.1.27__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gptdiff-0.1.22 → gptdiff-0.1.27}/PKG-INFO +9 -7
- {gptdiff-0.1.22 → gptdiff-0.1.27}/README.md +9 -7
- gptdiff-0.1.27/gptdiff/applydiff.py +265 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/gptdiff/gptdiff.py +150 -325
- {gptdiff-0.1.22 → gptdiff-0.1.27}/gptdiff/gptpatch.py +19 -8
- {gptdiff-0.1.22 → gptdiff-0.1.27}/gptdiff.egg-info/PKG-INFO +9 -7
- {gptdiff-0.1.22 → gptdiff-0.1.27}/gptdiff.egg-info/SOURCES.txt +1 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/setup.py +1 -1
- gptdiff-0.1.27/tests/test_applydiff.py +171 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/tests/test_parse_diff_per_file.py +14 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/tests/test_smartapply.py +24 -5
- gptdiff-0.1.22/tests/test_applydiff.py +0 -80
- {gptdiff-0.1.22 → gptdiff-0.1.27}/LICENSE.txt +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/gptdiff/__init__.py +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/gptdiff.egg-info/dependency_links.txt +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/gptdiff.egg-info/entry_points.txt +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/gptdiff.egg-info/requires.txt +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/gptdiff.egg-info/top_level.txt +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/setup.cfg +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/tests/test_applydiff_edgecases.py +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/tests/test_diff_parse.py +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/tests/test_failing_case.py +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/tests/test_strip_bad_ouput.py +0 -0
- {gptdiff-0.1.22 → gptdiff-0.1.27}/tests/test_swallow_reasoning.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: gptdiff
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.27
|
4
4
|
Summary: A tool to generate and apply git diffs using LLMs
|
5
5
|
Author: 255labs
|
6
6
|
Classifier: License :: OSI Approved :: MIT License
|
@@ -82,13 +82,15 @@ done
|
|
82
82
|
|
83
83
|
*Requires reasoning model*
|
84
84
|
|
85
|
-
|
85
|
+
## Why Choose GPTDiff?
|
86
86
|
|
87
|
-
- **
|
88
|
-
- **
|
89
|
-
- **Auto-
|
90
|
-
- **
|
91
|
-
- **
|
87
|
+
- **Describe changes in plain English**
|
88
|
+
- **AI gets your whole project**
|
89
|
+
- **Auto-fixes conflicts**
|
90
|
+
- **Keeps code functional**
|
91
|
+
- **Fast setup, no fuss**
|
92
|
+
- **You approve every change**
|
93
|
+
- **Costs are upfront**
|
92
94
|
|
93
95
|
## Core Capabilities
|
94
96
|
|
@@ -55,13 +55,15 @@ done
|
|
55
55
|
|
56
56
|
*Requires reasoning model*
|
57
57
|
|
58
|
-
|
59
|
-
|
60
|
-
- **
|
61
|
-
- **
|
62
|
-
- **Auto-
|
63
|
-
- **
|
64
|
-
- **
|
58
|
+
## Why Choose GPTDiff?
|
59
|
+
|
60
|
+
- **Describe changes in plain English**
|
61
|
+
- **AI gets your whole project**
|
62
|
+
- **Auto-fixes conflicts**
|
63
|
+
- **Keeps code functional**
|
64
|
+
- **Fast setup, no fuss**
|
65
|
+
- **You approve every change**
|
66
|
+
- **Costs are upfront**
|
65
67
|
|
66
68
|
## Core Capabilities
|
67
69
|
|
@@ -0,0 +1,265 @@
|
|
1
|
+
"""
|
2
|
+
Module: applydiff
|
3
|
+
|
4
|
+
Contains the function to apply unified git diffs to files on disk.
|
5
|
+
"""
|
6
|
+
|
7
|
+
from pathlib import Path
|
8
|
+
import re
|
9
|
+
import hashlib
|
10
|
+
|
11
|
+
def apply_diff(project_dir, diff_text):
|
12
|
+
"""
|
13
|
+
Applies a unified diff (as generated by git diff) to the files in project_dir
|
14
|
+
using pure Python (without calling the external 'patch' command).
|
15
|
+
|
16
|
+
Handles file modifications, new file creation, and file deletions.
|
17
|
+
|
18
|
+
Returns:
|
19
|
+
True if at least one file was modified (or deleted/created) as a result of the patch,
|
20
|
+
False otherwise.
|
21
|
+
"""
|
22
|
+
from pathlib import Path
|
23
|
+
import re, hashlib
|
24
|
+
|
25
|
+
def file_hash(filepath):
|
26
|
+
h = hashlib.sha256()
|
27
|
+
with open(filepath, "rb") as f:
|
28
|
+
h.update(f.read())
|
29
|
+
return h.hexdigest()
|
30
|
+
|
31
|
+
def apply_patch_to_file(file_path, patch):
|
32
|
+
"""
|
33
|
+
Applies a unified diff patch (for a single file) to file_path.
|
34
|
+
|
35
|
+
Returns True if the patch was applied successfully, False otherwise.
|
36
|
+
"""
|
37
|
+
# Read the original file lines; if the file doesn't exist, treat it as empty.
|
38
|
+
if file_path.exists():
|
39
|
+
original_lines = file_path.read_text(encoding="utf8").splitlines(keepends=True)
|
40
|
+
else:
|
41
|
+
original_lines = []
|
42
|
+
new_lines = []
|
43
|
+
current_index = 0
|
44
|
+
|
45
|
+
patch_lines = patch.splitlines()
|
46
|
+
# Regex for a hunk header, e.g., @@ -3,7 +3,6 @@
|
47
|
+
hunk_header_re = re.compile(r"^@@(?: -(\d+)(?:,(\d+))?)?(?: \+(\d+)(?:,(\d+))?)? @@")
|
48
|
+
i = 0
|
49
|
+
while i < len(patch_lines):
|
50
|
+
line = patch_lines[i]
|
51
|
+
if line.lstrip().startswith("@@"):
|
52
|
+
if line.strip() == "@@":
|
53
|
+
# Handle minimal hunk header without line numbers.
|
54
|
+
orig_start = 1
|
55
|
+
else:
|
56
|
+
m = hunk_header_re.match(line.strip())
|
57
|
+
if not m:
|
58
|
+
print("Invalid hunk header:", line)
|
59
|
+
return False
|
60
|
+
orig_start = int(m.group(1)) if m.group(1) is not None else 1
|
61
|
+
hunk_start_index = orig_start - 1 # diff headers are 1-indexed
|
62
|
+
if hunk_start_index > len(original_lines):
|
63
|
+
print("Hunk start index beyond file length")
|
64
|
+
return False
|
65
|
+
new_lines.extend(original_lines[current_index:hunk_start_index])
|
66
|
+
current_index = hunk_start_index
|
67
|
+
i += 1
|
68
|
+
# Process the hunk lines until the next hunk header.
|
69
|
+
while i < len(patch_lines) and not patch_lines[i].startswith("@@"):
|
70
|
+
pline = patch_lines[i]
|
71
|
+
if pline.startswith(" "):
|
72
|
+
# Context line must match exactly.
|
73
|
+
expected = pline[1:]
|
74
|
+
if current_index >= len(original_lines):
|
75
|
+
print("Context line expected but file ended")
|
76
|
+
return False
|
77
|
+
orig_line = original_lines[current_index].rstrip("\n")
|
78
|
+
if orig_line != expected:
|
79
|
+
print("Context line mismatch. Expected:", expected, "Got:", orig_line)
|
80
|
+
return False
|
81
|
+
new_lines.append(original_lines[current_index])
|
82
|
+
current_index += 1
|
83
|
+
elif pline.startswith("-"):
|
84
|
+
# Removal line: verify and skip from original.
|
85
|
+
expected = pline[1:]
|
86
|
+
if current_index >= len(original_lines):
|
87
|
+
print("Removal line expected but file ended")
|
88
|
+
return False
|
89
|
+
orig_line = original_lines[current_index].rstrip("\n")
|
90
|
+
if orig_line != expected:
|
91
|
+
print("Removal line mismatch. Expected:", expected, "Got:", orig_line)
|
92
|
+
return False
|
93
|
+
current_index += 1
|
94
|
+
elif pline.startswith("+"):
|
95
|
+
# Addition line: add to new_lines.
|
96
|
+
new_lines.append(pline[1:] + "\n")
|
97
|
+
else:
|
98
|
+
print("Unexpected line in hunk:", pline)
|
99
|
+
return False
|
100
|
+
i += 1
|
101
|
+
else:
|
102
|
+
# Skip non-hunk header lines.
|
103
|
+
i += 1
|
104
|
+
|
105
|
+
# Append any remaining lines from the original file.
|
106
|
+
new_lines.extend(original_lines[current_index:])
|
107
|
+
# Ensure parent directories exist before writing the file.
|
108
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
109
|
+
# Write the new content back to the file.
|
110
|
+
file_path.write_text("".join(new_lines), encoding="utf8")
|
111
|
+
return True
|
112
|
+
|
113
|
+
# Parse the diff into per-file patches.
|
114
|
+
file_patches = parse_diff_per_file(diff_text)
|
115
|
+
if not file_patches:
|
116
|
+
print("No file patches found in diff.")
|
117
|
+
return False
|
118
|
+
|
119
|
+
# Record original file hashes.
|
120
|
+
original_hashes = {}
|
121
|
+
for file_path, _ in file_patches:
|
122
|
+
target_file = Path(project_dir) / file_path
|
123
|
+
if target_file.exists():
|
124
|
+
original_hashes[file_path] = file_hash(target_file)
|
125
|
+
else:
|
126
|
+
original_hashes[file_path] = None
|
127
|
+
|
128
|
+
any_change = False
|
129
|
+
# Process each file patch.
|
130
|
+
for file_path, patch in file_patches:
|
131
|
+
target_file = Path(project_dir) / file_path
|
132
|
+
if "+++ /dev/null" in patch:
|
133
|
+
# Deletion patch: delete the file if it exists.
|
134
|
+
if target_file.exists():
|
135
|
+
target_file.unlink()
|
136
|
+
if not target_file.exists():
|
137
|
+
any_change = True
|
138
|
+
else:
|
139
|
+
print(f"Failed to delete file: {target_file}")
|
140
|
+
return False
|
141
|
+
else:
|
142
|
+
# Modification or new file creation.
|
143
|
+
success = apply_patch_to_file(target_file, patch)
|
144
|
+
if not success:
|
145
|
+
print(f"Failed to apply patch to file: {target_file}")
|
146
|
+
return False
|
147
|
+
|
148
|
+
# Verify that at least one file was changed by comparing hashes.
|
149
|
+
for file_path, patch in file_patches:
|
150
|
+
target_file = Path(project_dir) / file_path
|
151
|
+
if "+++ /dev/null" in patch:
|
152
|
+
if not target_file.exists():
|
153
|
+
any_change = True
|
154
|
+
else:
|
155
|
+
print(f"Expected deletion but file still exists: {target_file}")
|
156
|
+
return False
|
157
|
+
else:
|
158
|
+
old_hash = original_hashes.get(file_path)
|
159
|
+
if target_file.exists():
|
160
|
+
new_hash = file_hash(target_file)
|
161
|
+
if old_hash != new_hash:
|
162
|
+
any_change = True
|
163
|
+
else:
|
164
|
+
print(f"No change detected in file: {target_file}")
|
165
|
+
else:
|
166
|
+
print(f"Expected modification or creation but file is missing: {target_file}")
|
167
|
+
return False
|
168
|
+
|
169
|
+
if not any_change:
|
170
|
+
print("Patch applied but no file modifications detected.")
|
171
|
+
return False
|
172
|
+
return True
|
173
|
+
|
174
|
+
def parse_diff_per_file(diff_text):
|
175
|
+
"""Parse unified diff text into individual file patches.
|
176
|
+
|
177
|
+
Splits a multi-file diff into per-file entries for processing. Handles:
|
178
|
+
- File creations (+++ /dev/null)
|
179
|
+
- File deletions (--- /dev/null)
|
180
|
+
- Standard modifications
|
181
|
+
|
182
|
+
Args:
|
183
|
+
diff_text: Unified diff string as generated by `git diff`
|
184
|
+
|
185
|
+
Returns:
|
186
|
+
List of tuples (file_path, patch) where:
|
187
|
+
- file_path: Relative path to modified file
|
188
|
+
- patch: Full diff fragment for this file
|
189
|
+
|
190
|
+
Note:
|
191
|
+
Uses 'b/' prefix detection from git diffs to determine target paths
|
192
|
+
This doesn't work all the time and needs to be revised with stronger models
|
193
|
+
"""
|
194
|
+
header_re = re.compile(r'^(?:diff --git\s+)?(a/[^ ]+)\s+(b/[^ ]+)\s*$', re.MULTILINE)
|
195
|
+
lines = diff_text.splitlines()
|
196
|
+
|
197
|
+
# Check if any header line exists.
|
198
|
+
if not any(header_re.match(line) for line in lines):
|
199
|
+
# Fallback strategy: detect file headers starting with '--- a/' or '-- a/'
|
200
|
+
diffs = []
|
201
|
+
current_lines = []
|
202
|
+
current_file = None
|
203
|
+
deletion_mode = False
|
204
|
+
header_line_re = re.compile(r'^-{2,3}\s+a/(.+)$')
|
205
|
+
|
206
|
+
for line in lines:
|
207
|
+
if header_line_re.match(line):
|
208
|
+
if current_file is not None and current_lines:
|
209
|
+
if deletion_mode and not any(l.startswith("+++ /dev/null") for l in current_lines):
|
210
|
+
current_lines.append("+++ /dev/null")
|
211
|
+
diffs.append((current_file, "\n".join(current_lines)))
|
212
|
+
current_lines = [line]
|
213
|
+
deletion_mode = False
|
214
|
+
file_from = header_line_re.match(line).group(1).strip()
|
215
|
+
current_file = file_from
|
216
|
+
else:
|
217
|
+
current_lines.append(line)
|
218
|
+
if "deleted file mode" in line:
|
219
|
+
deletion_mode = True
|
220
|
+
if line.startswith("+++ "):
|
221
|
+
parts = line.split()
|
222
|
+
if len(parts) >= 2:
|
223
|
+
file_to = parts[1].strip()
|
224
|
+
if file_to != "/dev/null":
|
225
|
+
current_file = file_to[2:] if (file_to.startswith("a/") or file_to.startswith("b/")) else file_to
|
226
|
+
if current_file is not None and current_lines:
|
227
|
+
if deletion_mode and not any(l.startswith("+++ ") for l in current_lines):
|
228
|
+
current_lines.append("+++ /dev/null")
|
229
|
+
diffs.append((current_file, "\n".join(current_lines)))
|
230
|
+
return diffs
|
231
|
+
else:
|
232
|
+
# Use header-based strategy.
|
233
|
+
diffs = []
|
234
|
+
current_lines = []
|
235
|
+
current_file = None
|
236
|
+
deletion_mode = False
|
237
|
+
for line in lines:
|
238
|
+
m = header_re.match(line)
|
239
|
+
if m:
|
240
|
+
if current_file is not None and current_lines:
|
241
|
+
if deletion_mode and not any(l.startswith("+++ ") for l in current_lines):
|
242
|
+
current_lines.append("+++ /dev/null")
|
243
|
+
diffs.append((current_file, "\n".join(current_lines)))
|
244
|
+
current_lines = [line]
|
245
|
+
deletion_mode = False
|
246
|
+
file_from = m.group(1) # e.g. "a/index.html"
|
247
|
+
file_to = m.group(2) # e.g. "b/index.html"
|
248
|
+
current_file = file_to[2:] if file_to.startswith("b/") else file_to
|
249
|
+
else:
|
250
|
+
current_lines.append(line)
|
251
|
+
if "deleted file mode" in line:
|
252
|
+
deletion_mode = True
|
253
|
+
if line.startswith("+++ "):
|
254
|
+
parts = line.split()
|
255
|
+
if len(parts) >= 2:
|
256
|
+
file_to = parts[1].strip()
|
257
|
+
if file_to != "/dev/null":
|
258
|
+
current_file = file_to[2:] if (file_to.startswith("a/") or file_to.startswith("b/")) else file_to
|
259
|
+
if current_file is not None and current_lines:
|
260
|
+
if deletion_mode and not any(l.startswith("+++ ") for l in current_lines):
|
261
|
+
current_lines.append("+++ /dev/null")
|
262
|
+
diffs.append((current_file, "\n".join(current_lines)))
|
263
|
+
return diffs
|
264
|
+
|
265
|
+
|