portacode 0.3.16.dev10__py3-none-any.whl → 1.4.11.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of portacode might be problematic. Click here for more details.

Files changed (92) hide show
  1. portacode/_version.py +16 -3
  2. portacode/cli.py +143 -17
  3. portacode/connection/client.py +149 -10
  4. portacode/connection/handlers/WEBSOCKET_PROTOCOL.md +928 -42
  5. portacode/connection/handlers/__init__.py +34 -5
  6. portacode/connection/handlers/base.py +78 -16
  7. portacode/connection/handlers/chunked_content.py +244 -0
  8. portacode/connection/handlers/diff_handlers.py +603 -0
  9. portacode/connection/handlers/file_handlers.py +902 -17
  10. portacode/connection/handlers/project_aware_file_handlers.py +226 -0
  11. portacode/connection/handlers/project_state/README.md +312 -0
  12. portacode/connection/handlers/project_state/__init__.py +92 -0
  13. portacode/connection/handlers/project_state/file_system_watcher.py +179 -0
  14. portacode/connection/handlers/project_state/git_manager.py +1502 -0
  15. portacode/connection/handlers/project_state/handlers.py +875 -0
  16. portacode/connection/handlers/project_state/manager.py +1331 -0
  17. portacode/connection/handlers/project_state/models.py +108 -0
  18. portacode/connection/handlers/project_state/utils.py +50 -0
  19. portacode/connection/handlers/project_state_handlers.py +45 -948
  20. portacode/connection/handlers/proxmox_infra.py +361 -0
  21. portacode/connection/handlers/registry.py +15 -4
  22. portacode/connection/handlers/session.py +483 -32
  23. portacode/connection/handlers/system_handlers.py +147 -8
  24. portacode/connection/handlers/tab_factory.py +389 -0
  25. portacode/connection/handlers/terminal_handlers.py +21 -8
  26. portacode/connection/handlers/update_handler.py +61 -0
  27. portacode/connection/multiplex.py +60 -2
  28. portacode/connection/terminal.py +256 -17
  29. portacode/keypair.py +63 -1
  30. portacode/link_capture/__init__.py +38 -0
  31. portacode/link_capture/__pycache__/__init__.cpython-311.pyc +0 -0
  32. portacode/link_capture/bin/__pycache__/link_capture_wrapper.cpython-311.pyc +0 -0
  33. portacode/link_capture/bin/elinks +3 -0
  34. portacode/link_capture/bin/gio-open +3 -0
  35. portacode/link_capture/bin/gnome-open +3 -0
  36. portacode/link_capture/bin/gvfs-open +3 -0
  37. portacode/link_capture/bin/kde-open +3 -0
  38. portacode/link_capture/bin/kfmclient +3 -0
  39. portacode/link_capture/bin/link_capture_exec.sh +11 -0
  40. portacode/link_capture/bin/link_capture_wrapper.py +75 -0
  41. portacode/link_capture/bin/links +3 -0
  42. portacode/link_capture/bin/links2 +3 -0
  43. portacode/link_capture/bin/lynx +3 -0
  44. portacode/link_capture/bin/mate-open +3 -0
  45. portacode/link_capture/bin/netsurf +3 -0
  46. portacode/link_capture/bin/sensible-browser +3 -0
  47. portacode/link_capture/bin/w3m +3 -0
  48. portacode/link_capture/bin/x-www-browser +3 -0
  49. portacode/link_capture/bin/xdg-open +3 -0
  50. portacode/logging_categories.py +140 -0
  51. portacode/pairing.py +103 -0
  52. portacode/static/js/test-ntp-clock.html +63 -0
  53. portacode/static/js/utils/ntp-clock.js +232 -0
  54. portacode/utils/NTP_ARCHITECTURE.md +136 -0
  55. portacode/utils/__init__.py +1 -0
  56. portacode/utils/diff_apply.py +456 -0
  57. portacode/utils/diff_renderer.py +371 -0
  58. portacode/utils/ntp_clock.py +65 -0
  59. portacode-1.4.11.dev1.dist-info/METADATA +298 -0
  60. portacode-1.4.11.dev1.dist-info/RECORD +97 -0
  61. {portacode-0.3.16.dev10.dist-info → portacode-1.4.11.dev1.dist-info}/WHEEL +1 -1
  62. portacode-1.4.11.dev1.dist-info/top_level.txt +3 -0
  63. test_modules/README.md +296 -0
  64. test_modules/__init__.py +1 -0
  65. test_modules/test_device_online.py +44 -0
  66. test_modules/test_file_operations.py +743 -0
  67. test_modules/test_git_status_ui.py +370 -0
  68. test_modules/test_login_flow.py +50 -0
  69. test_modules/test_navigate_testing_folder.py +361 -0
  70. test_modules/test_play_store_screenshots.py +294 -0
  71. test_modules/test_terminal_buffer_performance.py +261 -0
  72. test_modules/test_terminal_interaction.py +80 -0
  73. test_modules/test_terminal_loading_race_condition.py +95 -0
  74. test_modules/test_terminal_start.py +56 -0
  75. testing_framework/.env.example +21 -0
  76. testing_framework/README.md +334 -0
  77. testing_framework/__init__.py +17 -0
  78. testing_framework/cli.py +326 -0
  79. testing_framework/core/__init__.py +1 -0
  80. testing_framework/core/base_test.py +336 -0
  81. testing_framework/core/cli_manager.py +177 -0
  82. testing_framework/core/hierarchical_runner.py +577 -0
  83. testing_framework/core/playwright_manager.py +520 -0
  84. testing_framework/core/runner.py +447 -0
  85. testing_framework/core/shared_cli_manager.py +234 -0
  86. testing_framework/core/test_discovery.py +112 -0
  87. testing_framework/requirements.txt +12 -0
  88. portacode-0.3.16.dev10.dist-info/METADATA +0 -238
  89. portacode-0.3.16.dev10.dist-info/RECORD +0 -29
  90. portacode-0.3.16.dev10.dist-info/top_level.txt +0 -1
  91. {portacode-0.3.16.dev10.dist-info → portacode-1.4.11.dev1.dist-info}/entry_points.txt +0 -0
  92. {portacode-0.3.16.dev10.dist-info → portacode-1.4.11.dev1.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,136 @@
1
+ # NTP Clock Architecture
2
+
3
+ ## Overview
4
+
5
+ All entities (client, server, device) synchronize to **time.cloudflare.com** for distributed tracing.
6
+
7
+ ## Architecture: Single Package for Everything
8
+
9
+ All NTP clock implementations (Python and JavaScript) are in the **portacode package** to ensure DRY principles.
10
+
11
+ ## Python Implementation
12
+
13
+ **Location:** `portacode/utils/ntp_clock.py` (in portacode package)
14
+
15
+ ### Import Path
16
+ ```python
17
+ from portacode.utils.ntp_clock import ntp_clock
18
+ ```
19
+
20
+ ### Usage Locations
21
+ 1. **Django Server Consumers** (`server/portacode_django/dashboard/consumers.py`)
22
+ 2. **Device Base Handlers** (`portacode/connection/handlers/base.py`)
23
+ 3. **Device Client** (`server/portacode_django/data/services/device_client.py`)
24
+ 4. **Any Python code with portacode installed**
25
+
26
+ ### Dependencies
27
+ - `setup.py`: Added `ntplib>=0.4.0` to `install_requires`
28
+ - `server/portacode_django/requirements.txt`: Added `portacode>=1.3.26`
29
+
30
+ ### API
31
+ ```python
32
+ # Get NTP-synchronized timestamp (None if not synced)
33
+ ntp_clock.now_ms() # milliseconds
34
+ ntp_clock.now() # seconds
35
+ ntp_clock.now_iso() # ISO format
36
+
37
+ # Check sync status
38
+ status = ntp_clock.get_status()
39
+ # {
40
+ # 'server': 'time.cloudflare.com',
41
+ # 'offset_ms': 6.04,
42
+ # 'last_sync': '2025-10-05T04:37:12.768445+00:00',
43
+ # 'is_synced': True
44
+ # }
45
+ ```
46
+
47
+ ## JavaScript Implementation
48
+
49
+ **Location:** `portacode/static/js/utils/ntp-clock.js` (in portacode package)
50
+
51
+ ### Django Setup
52
+
53
+ Django will serve static files from the portacode package automatically after `collectstatic`:
54
+
55
+ ```python
56
+ # Django settings.py - no changes needed, just ensure:
57
+ INSTALLED_APPS = [
58
+ # ... other apps
59
+ 'portacode', # Add portacode as an installed app (optional, for admin integration)
60
+ ]
61
+
62
+ # Static files will be collected from portacode package
63
+ STATIC_URL = '/static/'
64
+ ```
65
+
66
+ After installing portacode (`pip install portacode` or `pip install -e .`), run:
67
+ ```bash
68
+ python manage.py collectstatic
69
+ ```
70
+
71
+ This will copy `portacode/static/js/utils/ntp-clock.js` to Django's static files directory.
72
+
73
+ ### Import Path (in Django templates/JS)
74
+ ```javascript
75
+ import ntpClock from '/static/js/utils/ntp-clock.js';
76
+ // or relative to your JS file:
77
+ import ntpClock from './utils/ntp-clock.js';
78
+ ```
79
+
80
+ ### Usage Locations
81
+ 1. **Dashboard WebSocket** (`websocket-service.js`)
82
+ 2. **Project WebSocket** (`websocket-service-project.js`)
83
+
84
+ ### API
85
+ ```javascript
86
+ // Get NTP-synchronized timestamp (null if not synced)
87
+ ntpClock.now() // milliseconds
88
+ ntpClock.nowISO() // ISO format
89
+
90
+ // Check sync status
91
+ const status = ntpClock.getStatus();
92
+ // {
93
+ // server: 'time.cloudflare.com',
94
+ // offset: 6.04,
95
+ // lastSync: '2025-10-05T04:37:12.768445+00:00',
96
+ // isSynced: true
97
+ // }
98
+ ```
99
+
100
+ ## Design Principles
101
+
102
+ 1. **DRY (Don't Repeat Yourself)**
103
+ - **Python:** Single implementation in portacode package (`portacode/utils/ntp_clock.py`)
104
+ - **JavaScript:** Single implementation in portacode package (`portacode/static/js/utils/ntp-clock.js`)
105
+ - Both served from the same package, no duplication across repos
106
+
107
+ 2. **No Fallback Servers**
108
+ - All entities MUST sync to time.cloudflare.com
109
+ - If sync fails, timestamps are None/null
110
+ - Ensures all timestamps are comparable
111
+
112
+ 3. **Auto-Sync**
113
+ - Re-syncs every 5 minutes automatically
114
+ - Initial sync on import/load
115
+ - Max 3 retry attempts before marking as failed
116
+
117
+ 4. **Thread-Safe (Python)**
118
+ - Uses threading.Lock for concurrent access
119
+ - Background daemon thread for periodic sync
120
+
121
+ ## Testing
122
+
123
+ ### Python
124
+ ```bash
125
+ python tools/test_python_ntp_clock.py
126
+ ```
127
+
128
+ ### JavaScript
129
+ The test file is included in the package at `portacode/static/js/test-ntp-clock.html`.
130
+
131
+ After Django collectstatic, open: `/static/js/test-ntp-clock.html` in browser
132
+
133
+ Or run directly from package:
134
+ ```bash
135
+ python -c "import portacode, os; print(os.path.join(os.path.dirname(portacode.__file__), 'static/js/test-ntp-clock.html'))"
136
+ ```
@@ -0,0 +1 @@
1
+ """Portacode utility modules."""
@@ -0,0 +1,456 @@
1
+ """Utilities for parsing and applying unified diff patches."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ import re
7
+ from dataclasses import dataclass
8
+ from typing import List, Optional, Tuple
9
+
10
+
11
+ class DiffParseError(Exception):
12
+ """Raised when a diff cannot be parsed."""
13
+
14
+
15
+ class DiffApplyError(Exception):
16
+ """Raised when a diff cannot be applied to a file."""
17
+
18
+ def __init__(self, message: str, *, file_path: Optional[str] = None, line_number: Optional[int] = None):
19
+ super().__init__(message)
20
+ self.file_path = file_path
21
+ self.line_number = line_number
22
+
23
+
24
+ @dataclass
25
+ class PatchLine:
26
+ """Represents a single line in a hunk."""
27
+
28
+ op: str # ' ', '+', '-'
29
+ text: str
30
+
31
+
32
+ @dataclass
33
+ class Hunk:
34
+ """Represents a parsed hunk."""
35
+
36
+ old_start: int
37
+ old_length: int
38
+ new_start: int
39
+ new_length: int
40
+ lines: List[PatchLine]
41
+
42
+
43
+ @dataclass
44
+ class FilePatch:
45
+ """Represents an entire file diff."""
46
+
47
+ old_path: Optional[str]
48
+ new_path: Optional[str]
49
+ hunks: List[Hunk]
50
+
51
+ @property
52
+ def is_new_file(self) -> bool:
53
+ return self.old_path in (None, "/dev/null")
54
+
55
+ @property
56
+ def is_delete(self) -> bool:
57
+ return self.new_path in (None, "/dev/null")
58
+
59
+ @property
60
+ def target_path(self) -> Optional[str]:
61
+ if self.is_delete and self.old_path not in (None, "/dev/null"):
62
+ return self.old_path
63
+ if self.new_path not in (None, "/dev/null"):
64
+ return self.new_path
65
+ return self.old_path
66
+
67
+
68
+ _HUNK_HEADER_RE = re.compile(
69
+ r"@@ -(?P<old_start>\d+)(?:,(?P<old_len>\d+))? \+(?P<new_start>\d+)(?:,(?P<new_len>\d+))? @@"
70
+ )
71
+
72
+
73
+ def _normalize_diff_path(raw_path: str) -> Optional[str]:
74
+ """Normalize diff path lines (handles prefixes and tabs)."""
75
+ path = raw_path.strip()
76
+ if not path or path == "/dev/null":
77
+ return None
78
+ # Drop git prefixes like a/ and b/
79
+ if path.startswith("a/") or path.startswith("b/"):
80
+ path = path[2:]
81
+ # Remove any trailing metadata after tab (e.g., timestamps)
82
+ if "\t" in path:
83
+ path = path.split("\t", 1)[0]
84
+ return path.strip()
85
+
86
+
87
+ def parse_unified_diff(diff_text: str) -> List[FilePatch]:
88
+ """Parse unified diff text into FilePatch objects."""
89
+ if not diff_text or not diff_text.strip():
90
+ raise DiffParseError("Diff content is empty")
91
+
92
+ lines = diff_text.splitlines(keepends=True)
93
+ patches: List[FilePatch] = []
94
+ i = 0
95
+
96
+ while i < len(lines):
97
+ line = lines[i]
98
+ if line.startswith("diff --git"):
99
+ i += 1
100
+ continue
101
+
102
+ if not line.startswith("--- "):
103
+ i += 1
104
+ continue
105
+
106
+ old_path = _normalize_diff_path(line[4:].strip())
107
+ i += 1
108
+ if i >= len(lines) or not lines[i].startswith("+++ "):
109
+ raise DiffParseError("Missing +++ line after --- line")
110
+ new_path = _normalize_diff_path(lines[i][4:].strip())
111
+ i += 1
112
+
113
+ hunks: List[Hunk] = []
114
+ while i < len(lines) and lines[i].startswith("@@"):
115
+ header = lines[i]
116
+ match = _HUNK_HEADER_RE.match(header)
117
+ if not match:
118
+ raise DiffParseError(f"Invalid hunk header: {header.strip()}")
119
+ old_start = int(match.group("old_start"))
120
+ old_len = int(match.group("old_len") or "1")
121
+ new_start = int(match.group("new_start"))
122
+ new_len = int(match.group("new_len") or "1")
123
+ i += 1
124
+
125
+ hunk_lines: List[PatchLine] = []
126
+ while i < len(lines):
127
+ current_line = lines[i]
128
+ prefix = current_line[:1]
129
+ # Stop if we encounter the start of the next file diff
130
+ if current_line.startswith("diff --git ") or current_line.startswith("--- "):
131
+ break
132
+ if prefix in {" ", "+", "-"}:
133
+ # Guard against accidental file headers inside a hunk
134
+ if prefix == "-" and current_line.startswith("--- "):
135
+ break
136
+ if prefix == "+" and current_line.startswith("+++ "):
137
+ break
138
+ hunk_lines.append(PatchLine(prefix, current_line[1:]))
139
+ i += 1
140
+ elif current_line.startswith("\"):
141
+ # Skip metadata line but keep processing
142
+ i += 1
143
+ else:
144
+ break
145
+
146
+ hunks.append(Hunk(old_start, old_len, new_start, new_len, hunk_lines))
147
+
148
+ if not hunks:
149
+ raise DiffParseError("No hunks found for file diff")
150
+
151
+ patches.append(FilePatch(old_path, new_path, hunks))
152
+
153
+ if not patches:
154
+ raise DiffParseError("No valid file patches found in diff")
155
+
156
+ return patches
157
+
158
+
159
+ def _normalize_target_path(path: str, base_path: Optional[str]) -> str:
160
+ """Compute the absolute path for a diff target."""
161
+ if os.path.isabs(path):
162
+ return path
163
+ base = base_path or os.getcwd()
164
+ return os.path.abspath(os.path.join(base, path))
165
+
166
+
167
+ def _load_file_lines(path: str) -> Tuple[List[str], bool]:
168
+ """Load file contents as a list of lines with newline characters preserved."""
169
+ try:
170
+ with open(path, "r", encoding="utf-8") as f:
171
+ data = f.read()
172
+ return data.splitlines(keepends=True), True
173
+ except FileNotFoundError:
174
+ return [], False
175
+
176
+
177
+ def _collect_original_line_chunks(hunk: Hunk) -> List[List[str]]:
178
+ """Collect consecutive lines from the original file that can anchor a hunk."""
179
+ chunks: List[List[str]] = []
180
+ current: List[str] = []
181
+
182
+ for line in hunk.lines:
183
+ if line.op in {" ", "-"}:
184
+ current.append(line.text)
185
+ elif current:
186
+ chunks.append(list(current))
187
+ current = []
188
+ if current:
189
+ chunks.append(list(current))
190
+
191
+ return [chunk for chunk in chunks if chunk]
192
+
193
+
194
+ def _find_unique_chunk_position(original_lines: List[str], chunk: List[str]) -> Optional[int]:
195
+ """Return the sole index where chunk appears, or None if ambiguous."""
196
+ if not chunk or len(chunk) > len(original_lines):
197
+ return None
198
+
199
+ matches: List[int] = []
200
+ max_start = len(original_lines) - len(chunk)
201
+ for idx in range(0, max_start + 1):
202
+ if original_lines[idx : idx + len(chunk)] == chunk:
203
+ matches.append(idx)
204
+ if len(matches) > 1:
205
+ break
206
+ if len(matches) == 1:
207
+ return matches[0]
208
+ return None
209
+
210
+
211
+ def _find_hunk_fallback_index(
212
+ original_lines: List[str], hunk: Hunk
213
+ ) -> Optional[Tuple[int, int]]:
214
+ """Try to find a unique match for hunk context and return (index, context_length)."""
215
+ chunks = _collect_original_line_chunks(hunk)
216
+ if not chunks:
217
+ return None
218
+
219
+ chunks.sort(key=len, reverse=True)
220
+ for chunk in chunks:
221
+ pos = _find_unique_chunk_position(original_lines, chunk)
222
+ if pos is not None:
223
+ return pos, len(chunk)
224
+ return None
225
+
226
+
227
+ def _apply_hunk_segment(
228
+ original_lines: List[str],
229
+ start_index: int,
230
+ hunk: Hunk,
231
+ *,
232
+ file_path: str,
233
+ ) -> Tuple[List[str], int]:
234
+ """Apply a single hunk starting at a specific index."""
235
+ if start_index > len(original_lines):
236
+ raise DiffApplyError(
237
+ "Hunk start position past end of file",
238
+ file_path=file_path,
239
+ line_number=hunk.old_start,
240
+ )
241
+
242
+ result: List[str] = []
243
+ src_index = start_index
244
+ current_line_number = start_index + 1
245
+
246
+ for line in hunk.lines:
247
+ if line.op == " ":
248
+ if src_index >= len(original_lines):
249
+ raise DiffApplyError(
250
+ "Unexpected end of file while matching context",
251
+ file_path=file_path,
252
+ line_number=current_line_number,
253
+ )
254
+ if original_lines[src_index] != line.text:
255
+ raise DiffApplyError(
256
+ f"Context mismatch. Expected {original_lines[src_index]!r} but got {line.text!r}",
257
+ file_path=file_path,
258
+ line_number=current_line_number,
259
+ )
260
+ result.append(line.text)
261
+ src_index += 1
262
+ current_line_number += 1
263
+ elif line.op == "-":
264
+ if src_index >= len(original_lines):
265
+ raise DiffApplyError(
266
+ "Unexpected end of file while removing line",
267
+ file_path=file_path,
268
+ line_number=current_line_number,
269
+ )
270
+ if original_lines[src_index] != line.text:
271
+ raise DiffApplyError(
272
+ f"Deletion mismatch. Expected {original_lines[src_index]!r} but got {line.text!r}",
273
+ file_path=file_path,
274
+ line_number=current_line_number,
275
+ )
276
+ src_index += 1
277
+ current_line_number += 1
278
+ elif line.op == "+":
279
+ result.append(line.text)
280
+ else:
281
+ raise DiffApplyError(
282
+ f"Unsupported hunk operation {line.op!r}",
283
+ file_path=file_path,
284
+ line_number=current_line_number,
285
+ )
286
+
287
+ return result, src_index
288
+
289
+
290
+ def _apply_hunks(
291
+ original_lines: List[str],
292
+ hunks: List[Hunk],
293
+ *,
294
+ file_path: str,
295
+ heuristic_log: Optional[List[str]] = None,
296
+ ) -> List[str]:
297
+ """Apply hunks to the provided original lines."""
298
+ result: List[str] = []
299
+ src_index = 0 # zero-based
300
+
301
+ for hunk in hunks:
302
+ desired_index = max(hunk.old_start - 1, 0)
303
+ fallback = _find_hunk_fallback_index(original_lines, hunk)
304
+ attempts: List[int] = [desired_index]
305
+ fallback_context_len = 0
306
+ fallback_index: Optional[int] = None
307
+ if fallback:
308
+ fallback_index, fallback_context_len = fallback
309
+ if fallback_index != desired_index and fallback_index >= src_index:
310
+ attempts.append(fallback_index)
311
+
312
+ last_error: Optional[DiffApplyError] = None
313
+ for attempt_index in attempts:
314
+ if attempt_index < src_index:
315
+ continue
316
+
317
+ prefix_length = len(result)
318
+ prefix_src = src_index
319
+ if attempt_index > src_index:
320
+ result.extend(original_lines[src_index:attempt_index])
321
+ src_index = attempt_index
322
+
323
+ try:
324
+ applied_lines, new_src_index = _apply_hunk_segment(
325
+ original_lines,
326
+ attempt_index,
327
+ hunk,
328
+ file_path=file_path,
329
+ )
330
+ result.extend(applied_lines)
331
+ src_index = new_src_index
332
+
333
+ if (
334
+ heuristic_log is not None
335
+ and fallback_index is not None
336
+ and attempt_index == fallback_index
337
+ ):
338
+ heuristic_log.append(
339
+ f"Hunk for {file_path} was shifted from line {desired_index + 1} to {fallback_index + 1} "
340
+ f"using a unique {fallback_context_len}-line context match."
341
+ )
342
+ break
343
+ except DiffApplyError as exc:
344
+ last_error = exc
345
+ del result[prefix_length:]
346
+ src_index = prefix_src
347
+ continue
348
+ else:
349
+ if last_error:
350
+ raise last_error
351
+ raise DiffApplyError(
352
+ "Unable to apply hunk",
353
+ file_path=file_path,
354
+ line_number=hunk.old_start,
355
+ )
356
+
357
+ # Append remaining content
358
+ if src_index < len(original_lines):
359
+ result.extend(original_lines[src_index:])
360
+
361
+ return result
362
+
363
+
364
+ def apply_file_patch(
365
+ file_patch: FilePatch,
366
+ base_path: Optional[str],
367
+ heuristic_log: Optional[List[str]] = None,
368
+ ) -> Tuple[str, str, int]:
369
+ """Apply a parsed FilePatch to disk.
370
+
371
+ Returns:
372
+ Tuple[target_path, action, bytes_written]
373
+ """
374
+ target_rel = file_patch.target_path
375
+ if not target_rel:
376
+ raise DiffApplyError("Unable to determine target path for diff")
377
+
378
+ target_path = _normalize_target_path(target_rel, base_path)
379
+ original_lines, file_exists = _load_file_lines(target_path)
380
+
381
+ if file_patch.is_new_file and file_exists:
382
+ # For new files we expect the file to not exist, but if it does we treat it as modification
383
+ pass
384
+ elif not file_patch.is_new_file and not file_exists and not file_patch.is_delete:
385
+ raise DiffApplyError(f"File does not exist: {target_path}", file_path=target_path)
386
+
387
+ if file_patch.is_delete:
388
+ if not file_exists:
389
+ raise DiffApplyError(f"File does not exist: {target_path}", file_path=target_path)
390
+ # Applying hunks ensures they match before deletion
391
+ updated_lines = _apply_hunks(original_lines, file_patch.hunks, file_path=target_path)
392
+ if os.path.exists(target_path):
393
+ os.remove(target_path)
394
+ return target_path, "deleted", 0
395
+
396
+ updated_lines = _apply_hunks(
397
+ original_lines,
398
+ file_patch.hunks,
399
+ file_path=target_path,
400
+ heuristic_log=heuristic_log,
401
+ )
402
+
403
+ dir_name = os.path.dirname(target_path)
404
+ if dir_name and not os.path.exists(dir_name):
405
+ os.makedirs(dir_name, exist_ok=True)
406
+ with open(target_path, "w", encoding="utf-8") as f:
407
+ f.write("".join(updated_lines))
408
+
409
+ bytes_written = sum(len(chunk.encode("utf-8")) for chunk in updated_lines)
410
+ action = "created" if not file_exists else "modified"
411
+ return target_path, action, bytes_written
412
+
413
+
414
+ def preview_file_patch(
415
+ file_patch: FilePatch,
416
+ base_path: Optional[str],
417
+ heuristic_log: Optional[List[str]] = None,
418
+ ) -> Tuple[str, str, List[str], List[str]]:
419
+ """Compute the before/after contents for a FilePatch without writing to disk.
420
+
421
+ Returns:
422
+ Tuple[target_path, action, original_lines, updated_lines]
423
+ """
424
+ target_rel = file_patch.target_path
425
+ if not target_rel:
426
+ raise DiffApplyError("Unable to determine target path for diff preview")
427
+
428
+ target_path = _normalize_target_path(target_rel, base_path)
429
+ original_lines, file_exists = _load_file_lines(target_path)
430
+
431
+ if file_patch.is_new_file and file_exists:
432
+ # Treat as modification to allow previewing changes atop an existing file
433
+ pass
434
+ elif not file_patch.is_new_file and not file_exists and not file_patch.is_delete:
435
+ raise DiffApplyError(f"File does not exist: {target_path}", file_path=target_path)
436
+
437
+ if file_patch.is_delete:
438
+ # Validate the hunks but the resulting file will be removed entirely
439
+ _apply_hunks(
440
+ original_lines,
441
+ file_patch.hunks,
442
+ file_path=target_path,
443
+ heuristic_log=heuristic_log,
444
+ )
445
+ updated_lines = []
446
+ action = "deleted"
447
+ else:
448
+ updated_lines = _apply_hunks(
449
+ original_lines,
450
+ file_patch.hunks,
451
+ file_path=target_path,
452
+ heuristic_log=heuristic_log,
453
+ )
454
+ action = "created" if not file_exists else "modified"
455
+
456
+ return target_path, action, original_lines, updated_lines