portacode 1.3.32__py3-none-any.whl → 1.4.11.dev5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of portacode might be problematic. Click here for more details.

Files changed (56) hide show
  1. portacode/_version.py +2 -2
  2. portacode/cli.py +158 -14
  3. portacode/connection/client.py +127 -8
  4. portacode/connection/handlers/WEBSOCKET_PROTOCOL.md +370 -4
  5. portacode/connection/handlers/__init__.py +16 -1
  6. portacode/connection/handlers/diff_handlers.py +603 -0
  7. portacode/connection/handlers/file_handlers.py +674 -17
  8. portacode/connection/handlers/project_aware_file_handlers.py +11 -0
  9. portacode/connection/handlers/project_state/file_system_watcher.py +31 -61
  10. portacode/connection/handlers/project_state/git_manager.py +139 -572
  11. portacode/connection/handlers/project_state/handlers.py +28 -14
  12. portacode/connection/handlers/project_state/manager.py +226 -101
  13. portacode/connection/handlers/proxmox_infra.py +790 -0
  14. portacode/connection/handlers/session.py +465 -84
  15. portacode/connection/handlers/system_handlers.py +181 -8
  16. portacode/connection/handlers/tab_factory.py +1 -47
  17. portacode/connection/handlers/update_handler.py +61 -0
  18. portacode/connection/terminal.py +55 -10
  19. portacode/keypair.py +63 -1
  20. portacode/link_capture/__init__.py +38 -0
  21. portacode/link_capture/__pycache__/__init__.cpython-311.pyc +0 -0
  22. portacode/link_capture/bin/__pycache__/link_capture_wrapper.cpython-311.pyc +0 -0
  23. portacode/link_capture/bin/elinks +3 -0
  24. portacode/link_capture/bin/gio-open +3 -0
  25. portacode/link_capture/bin/gnome-open +3 -0
  26. portacode/link_capture/bin/gvfs-open +3 -0
  27. portacode/link_capture/bin/kde-open +3 -0
  28. portacode/link_capture/bin/kfmclient +3 -0
  29. portacode/link_capture/bin/link_capture_exec.sh +11 -0
  30. portacode/link_capture/bin/link_capture_wrapper.py +75 -0
  31. portacode/link_capture/bin/links +3 -0
  32. portacode/link_capture/bin/links2 +3 -0
  33. portacode/link_capture/bin/lynx +3 -0
  34. portacode/link_capture/bin/mate-open +3 -0
  35. portacode/link_capture/bin/netsurf +3 -0
  36. portacode/link_capture/bin/sensible-browser +3 -0
  37. portacode/link_capture/bin/w3m +3 -0
  38. portacode/link_capture/bin/x-www-browser +3 -0
  39. portacode/link_capture/bin/xdg-open +3 -0
  40. portacode/pairing.py +103 -0
  41. portacode/static/js/utils/ntp-clock.js +170 -79
  42. portacode/utils/diff_apply.py +456 -0
  43. portacode/utils/diff_renderer.py +371 -0
  44. portacode/utils/ntp_clock.py +45 -131
  45. {portacode-1.3.32.dist-info → portacode-1.4.11.dev5.dist-info}/METADATA +71 -3
  46. portacode-1.4.11.dev5.dist-info/RECORD +97 -0
  47. test_modules/test_device_online.py +1 -1
  48. test_modules/test_login_flow.py +8 -4
  49. test_modules/test_play_store_screenshots.py +294 -0
  50. testing_framework/.env.example +4 -1
  51. testing_framework/core/playwright_manager.py +63 -9
  52. portacode-1.3.32.dist-info/RECORD +0 -70
  53. {portacode-1.3.32.dist-info → portacode-1.4.11.dev5.dist-info}/WHEEL +0 -0
  54. {portacode-1.3.32.dist-info → portacode-1.4.11.dev5.dist-info}/entry_points.txt +0 -0
  55. {portacode-1.3.32.dist-info → portacode-1.4.11.dev5.dist-info}/licenses/LICENSE +0 -0
  56. {portacode-1.3.32.dist-info → portacode-1.4.11.dev5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,456 @@
1
+ """Utilities for parsing and applying unified diff patches."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ import re
7
+ from dataclasses import dataclass
8
+ from typing import List, Optional, Tuple
9
+
10
+
11
+ class DiffParseError(Exception):
12
+ """Raised when a diff cannot be parsed."""
13
+
14
+
15
+ class DiffApplyError(Exception):
16
+ """Raised when a diff cannot be applied to a file."""
17
+
18
+ def __init__(self, message: str, *, file_path: Optional[str] = None, line_number: Optional[int] = None):
19
+ super().__init__(message)
20
+ self.file_path = file_path
21
+ self.line_number = line_number
22
+
23
+
24
+ @dataclass
25
+ class PatchLine:
26
+ """Represents a single line in a hunk."""
27
+
28
+ op: str # ' ', '+', '-'
29
+ text: str
30
+
31
+
32
+ @dataclass
33
+ class Hunk:
34
+ """Represents a parsed hunk."""
35
+
36
+ old_start: int
37
+ old_length: int
38
+ new_start: int
39
+ new_length: int
40
+ lines: List[PatchLine]
41
+
42
+
43
+ @dataclass
44
+ class FilePatch:
45
+ """Represents an entire file diff."""
46
+
47
+ old_path: Optional[str]
48
+ new_path: Optional[str]
49
+ hunks: List[Hunk]
50
+
51
+ @property
52
+ def is_new_file(self) -> bool:
53
+ return self.old_path in (None, "/dev/null")
54
+
55
+ @property
56
+ def is_delete(self) -> bool:
57
+ return self.new_path in (None, "/dev/null")
58
+
59
+ @property
60
+ def target_path(self) -> Optional[str]:
61
+ if self.is_delete and self.old_path not in (None, "/dev/null"):
62
+ return self.old_path
63
+ if self.new_path not in (None, "/dev/null"):
64
+ return self.new_path
65
+ return self.old_path
66
+
67
+
68
+ _HUNK_HEADER_RE = re.compile(
69
+ r"@@ -(?P<old_start>\d+)(?:,(?P<old_len>\d+))? \+(?P<new_start>\d+)(?:,(?P<new_len>\d+))? @@"
70
+ )
71
+
72
+
73
+ def _normalize_diff_path(raw_path: str) -> Optional[str]:
74
+ """Normalize diff path lines (handles prefixes and tabs)."""
75
+ path = raw_path.strip()
76
+ if not path or path == "/dev/null":
77
+ return None
78
+ # Drop git prefixes like a/ and b/
79
+ if path.startswith("a/") or path.startswith("b/"):
80
+ path = path[2:]
81
+ # Remove any trailing metadata after tab (e.g., timestamps)
82
+ if "\t" in path:
83
+ path = path.split("\t", 1)[0]
84
+ return path.strip()
85
+
86
+
87
+ def parse_unified_diff(diff_text: str) -> List[FilePatch]:
88
+ """Parse unified diff text into FilePatch objects."""
89
+ if not diff_text or not diff_text.strip():
90
+ raise DiffParseError("Diff content is empty")
91
+
92
+ lines = diff_text.splitlines(keepends=True)
93
+ patches: List[FilePatch] = []
94
+ i = 0
95
+
96
+ while i < len(lines):
97
+ line = lines[i]
98
+ if line.startswith("diff --git"):
99
+ i += 1
100
+ continue
101
+
102
+ if not line.startswith("--- "):
103
+ i += 1
104
+ continue
105
+
106
+ old_path = _normalize_diff_path(line[4:].strip())
107
+ i += 1
108
+ if i >= len(lines) or not lines[i].startswith("+++ "):
109
+ raise DiffParseError("Missing +++ line after --- line")
110
+ new_path = _normalize_diff_path(lines[i][4:].strip())
111
+ i += 1
112
+
113
+ hunks: List[Hunk] = []
114
+ while i < len(lines) and lines[i].startswith("@@"):
115
+ header = lines[i]
116
+ match = _HUNK_HEADER_RE.match(header)
117
+ if not match:
118
+ raise DiffParseError(f"Invalid hunk header: {header.strip()}")
119
+ old_start = int(match.group("old_start"))
120
+ old_len = int(match.group("old_len") or "1")
121
+ new_start = int(match.group("new_start"))
122
+ new_len = int(match.group("new_len") or "1")
123
+ i += 1
124
+
125
+ hunk_lines: List[PatchLine] = []
126
+ while i < len(lines):
127
+ current_line = lines[i]
128
+ prefix = current_line[:1]
129
+ # Stop if we encounter the start of the next file diff
130
+ if current_line.startswith("diff --git ") or current_line.startswith("--- "):
131
+ break
132
+ if prefix in {" ", "+", "-"}:
133
+ # Guard against accidental file headers inside a hunk
134
+ if prefix == "-" and current_line.startswith("--- "):
135
+ break
136
+ if prefix == "+" and current_line.startswith("+++ "):
137
+ break
138
+ hunk_lines.append(PatchLine(prefix, current_line[1:]))
139
+ i += 1
140
+ elif current_line.startswith("\"):
141
+ # Skip metadata line but keep processing
142
+ i += 1
143
+ else:
144
+ break
145
+
146
+ hunks.append(Hunk(old_start, old_len, new_start, new_len, hunk_lines))
147
+
148
+ if not hunks:
149
+ raise DiffParseError("No hunks found for file diff")
150
+
151
+ patches.append(FilePatch(old_path, new_path, hunks))
152
+
153
+ if not patches:
154
+ raise DiffParseError("No valid file patches found in diff")
155
+
156
+ return patches
157
+
158
+
159
+ def _normalize_target_path(path: str, base_path: Optional[str]) -> str:
160
+ """Compute the absolute path for a diff target."""
161
+ if os.path.isabs(path):
162
+ return path
163
+ base = base_path or os.getcwd()
164
+ return os.path.abspath(os.path.join(base, path))
165
+
166
+
167
+ def _load_file_lines(path: str) -> Tuple[List[str], bool]:
168
+ """Load file contents as a list of lines with newline characters preserved."""
169
+ try:
170
+ with open(path, "r", encoding="utf-8") as f:
171
+ data = f.read()
172
+ return data.splitlines(keepends=True), True
173
+ except FileNotFoundError:
174
+ return [], False
175
+
176
+
177
+ def _collect_original_line_chunks(hunk: Hunk) -> List[List[str]]:
178
+ """Collect consecutive lines from the original file that can anchor a hunk."""
179
+ chunks: List[List[str]] = []
180
+ current: List[str] = []
181
+
182
+ for line in hunk.lines:
183
+ if line.op in {" ", "-"}:
184
+ current.append(line.text)
185
+ elif current:
186
+ chunks.append(list(current))
187
+ current = []
188
+ if current:
189
+ chunks.append(list(current))
190
+
191
+ return [chunk for chunk in chunks if chunk]
192
+
193
+
194
+ def _find_unique_chunk_position(original_lines: List[str], chunk: List[str]) -> Optional[int]:
195
+ """Return the sole index where chunk appears, or None if ambiguous."""
196
+ if not chunk or len(chunk) > len(original_lines):
197
+ return None
198
+
199
+ matches: List[int] = []
200
+ max_start = len(original_lines) - len(chunk)
201
+ for idx in range(0, max_start + 1):
202
+ if original_lines[idx : idx + len(chunk)] == chunk:
203
+ matches.append(idx)
204
+ if len(matches) > 1:
205
+ break
206
+ if len(matches) == 1:
207
+ return matches[0]
208
+ return None
209
+
210
+
211
+ def _find_hunk_fallback_index(
212
+ original_lines: List[str], hunk: Hunk
213
+ ) -> Optional[Tuple[int, int]]:
214
+ """Try to find a unique match for hunk context and return (index, context_length)."""
215
+ chunks = _collect_original_line_chunks(hunk)
216
+ if not chunks:
217
+ return None
218
+
219
+ chunks.sort(key=len, reverse=True)
220
+ for chunk in chunks:
221
+ pos = _find_unique_chunk_position(original_lines, chunk)
222
+ if pos is not None:
223
+ return pos, len(chunk)
224
+ return None
225
+
226
+
227
+ def _apply_hunk_segment(
228
+ original_lines: List[str],
229
+ start_index: int,
230
+ hunk: Hunk,
231
+ *,
232
+ file_path: str,
233
+ ) -> Tuple[List[str], int]:
234
+ """Apply a single hunk starting at a specific index."""
235
+ if start_index > len(original_lines):
236
+ raise DiffApplyError(
237
+ "Hunk start position past end of file",
238
+ file_path=file_path,
239
+ line_number=hunk.old_start,
240
+ )
241
+
242
+ result: List[str] = []
243
+ src_index = start_index
244
+ current_line_number = start_index + 1
245
+
246
+ for line in hunk.lines:
247
+ if line.op == " ":
248
+ if src_index >= len(original_lines):
249
+ raise DiffApplyError(
250
+ "Unexpected end of file while matching context",
251
+ file_path=file_path,
252
+ line_number=current_line_number,
253
+ )
254
+ if original_lines[src_index] != line.text:
255
+ raise DiffApplyError(
256
+ f"Context mismatch. Expected {original_lines[src_index]!r} but got {line.text!r}",
257
+ file_path=file_path,
258
+ line_number=current_line_number,
259
+ )
260
+ result.append(line.text)
261
+ src_index += 1
262
+ current_line_number += 1
263
+ elif line.op == "-":
264
+ if src_index >= len(original_lines):
265
+ raise DiffApplyError(
266
+ "Unexpected end of file while removing line",
267
+ file_path=file_path,
268
+ line_number=current_line_number,
269
+ )
270
+ if original_lines[src_index] != line.text:
271
+ raise DiffApplyError(
272
+ f"Deletion mismatch. Expected {original_lines[src_index]!r} but got {line.text!r}",
273
+ file_path=file_path,
274
+ line_number=current_line_number,
275
+ )
276
+ src_index += 1
277
+ current_line_number += 1
278
+ elif line.op == "+":
279
+ result.append(line.text)
280
+ else:
281
+ raise DiffApplyError(
282
+ f"Unsupported hunk operation {line.op!r}",
283
+ file_path=file_path,
284
+ line_number=current_line_number,
285
+ )
286
+
287
+ return result, src_index
288
+
289
+
290
+ def _apply_hunks(
291
+ original_lines: List[str],
292
+ hunks: List[Hunk],
293
+ *,
294
+ file_path: str,
295
+ heuristic_log: Optional[List[str]] = None,
296
+ ) -> List[str]:
297
+ """Apply hunks to the provided original lines."""
298
+ result: List[str] = []
299
+ src_index = 0 # zero-based
300
+
301
+ for hunk in hunks:
302
+ desired_index = max(hunk.old_start - 1, 0)
303
+ fallback = _find_hunk_fallback_index(original_lines, hunk)
304
+ attempts: List[int] = [desired_index]
305
+ fallback_context_len = 0
306
+ fallback_index: Optional[int] = None
307
+ if fallback:
308
+ fallback_index, fallback_context_len = fallback
309
+ if fallback_index != desired_index and fallback_index >= src_index:
310
+ attempts.append(fallback_index)
311
+
312
+ last_error: Optional[DiffApplyError] = None
313
+ for attempt_index in attempts:
314
+ if attempt_index < src_index:
315
+ continue
316
+
317
+ prefix_length = len(result)
318
+ prefix_src = src_index
319
+ if attempt_index > src_index:
320
+ result.extend(original_lines[src_index:attempt_index])
321
+ src_index = attempt_index
322
+
323
+ try:
324
+ applied_lines, new_src_index = _apply_hunk_segment(
325
+ original_lines,
326
+ attempt_index,
327
+ hunk,
328
+ file_path=file_path,
329
+ )
330
+ result.extend(applied_lines)
331
+ src_index = new_src_index
332
+
333
+ if (
334
+ heuristic_log is not None
335
+ and fallback_index is not None
336
+ and attempt_index == fallback_index
337
+ ):
338
+ heuristic_log.append(
339
+ f"Hunk for {file_path} was shifted from line {desired_index + 1} to {fallback_index + 1} "
340
+ f"using a unique {fallback_context_len}-line context match."
341
+ )
342
+ break
343
+ except DiffApplyError as exc:
344
+ last_error = exc
345
+ del result[prefix_length:]
346
+ src_index = prefix_src
347
+ continue
348
+ else:
349
+ if last_error:
350
+ raise last_error
351
+ raise DiffApplyError(
352
+ "Unable to apply hunk",
353
+ file_path=file_path,
354
+ line_number=hunk.old_start,
355
+ )
356
+
357
+ # Append remaining content
358
+ if src_index < len(original_lines):
359
+ result.extend(original_lines[src_index:])
360
+
361
+ return result
362
+
363
+
364
+ def apply_file_patch(
365
+ file_patch: FilePatch,
366
+ base_path: Optional[str],
367
+ heuristic_log: Optional[List[str]] = None,
368
+ ) -> Tuple[str, str, int]:
369
+ """Apply a parsed FilePatch to disk.
370
+
371
+ Returns:
372
+ Tuple[target_path, action, bytes_written]
373
+ """
374
+ target_rel = file_patch.target_path
375
+ if not target_rel:
376
+ raise DiffApplyError("Unable to determine target path for diff")
377
+
378
+ target_path = _normalize_target_path(target_rel, base_path)
379
+ original_lines, file_exists = _load_file_lines(target_path)
380
+
381
+ if file_patch.is_new_file and file_exists:
382
+ # For new files we expect the file to not exist, but if it does we treat it as modification
383
+ pass
384
+ elif not file_patch.is_new_file and not file_exists and not file_patch.is_delete:
385
+ raise DiffApplyError(f"File does not exist: {target_path}", file_path=target_path)
386
+
387
+ if file_patch.is_delete:
388
+ if not file_exists:
389
+ raise DiffApplyError(f"File does not exist: {target_path}", file_path=target_path)
390
+ # Applying hunks ensures they match before deletion
391
+ updated_lines = _apply_hunks(original_lines, file_patch.hunks, file_path=target_path)
392
+ if os.path.exists(target_path):
393
+ os.remove(target_path)
394
+ return target_path, "deleted", 0
395
+
396
+ updated_lines = _apply_hunks(
397
+ original_lines,
398
+ file_patch.hunks,
399
+ file_path=target_path,
400
+ heuristic_log=heuristic_log,
401
+ )
402
+
403
+ dir_name = os.path.dirname(target_path)
404
+ if dir_name and not os.path.exists(dir_name):
405
+ os.makedirs(dir_name, exist_ok=True)
406
+ with open(target_path, "w", encoding="utf-8") as f:
407
+ f.write("".join(updated_lines))
408
+
409
+ bytes_written = sum(len(chunk.encode("utf-8")) for chunk in updated_lines)
410
+ action = "created" if not file_exists else "modified"
411
+ return target_path, action, bytes_written
412
+
413
+
414
+ def preview_file_patch(
415
+ file_patch: FilePatch,
416
+ base_path: Optional[str],
417
+ heuristic_log: Optional[List[str]] = None,
418
+ ) -> Tuple[str, str, List[str], List[str]]:
419
+ """Compute the before/after contents for a FilePatch without writing to disk.
420
+
421
+ Returns:
422
+ Tuple[target_path, action, original_lines, updated_lines]
423
+ """
424
+ target_rel = file_patch.target_path
425
+ if not target_rel:
426
+ raise DiffApplyError("Unable to determine target path for diff preview")
427
+
428
+ target_path = _normalize_target_path(target_rel, base_path)
429
+ original_lines, file_exists = _load_file_lines(target_path)
430
+
431
+ if file_patch.is_new_file and file_exists:
432
+ # Treat as modification to allow previewing changes atop an existing file
433
+ pass
434
+ elif not file_patch.is_new_file and not file_exists and not file_patch.is_delete:
435
+ raise DiffApplyError(f"File does not exist: {target_path}", file_path=target_path)
436
+
437
+ if file_patch.is_delete:
438
+ # Validate the hunks but the resulting file will be removed entirely
439
+ _apply_hunks(
440
+ original_lines,
441
+ file_patch.hunks,
442
+ file_path=target_path,
443
+ heuristic_log=heuristic_log,
444
+ )
445
+ updated_lines = []
446
+ action = "deleted"
447
+ else:
448
+ updated_lines = _apply_hunks(
449
+ original_lines,
450
+ file_patch.hunks,
451
+ file_path=target_path,
452
+ heuristic_log=heuristic_log,
453
+ )
454
+ action = "created" if not file_exists else "modified"
455
+
456
+ return target_path, action, original_lines, updated_lines