portacode 0.3.4.dev0__py3-none-any.whl → 1.4.11.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of portacode might be problematic. Click here for more details.
- portacode/_version.py +16 -3
- portacode/cli.py +155 -19
- portacode/connection/client.py +152 -12
- portacode/connection/handlers/WEBSOCKET_PROTOCOL.md +1577 -0
- portacode/connection/handlers/__init__.py +43 -1
- portacode/connection/handlers/base.py +122 -18
- portacode/connection/handlers/chunked_content.py +244 -0
- portacode/connection/handlers/diff_handlers.py +603 -0
- portacode/connection/handlers/file_handlers.py +902 -17
- portacode/connection/handlers/project_aware_file_handlers.py +226 -0
- portacode/connection/handlers/project_state/README.md +312 -0
- portacode/connection/handlers/project_state/__init__.py +92 -0
- portacode/connection/handlers/project_state/file_system_watcher.py +179 -0
- portacode/connection/handlers/project_state/git_manager.py +1502 -0
- portacode/connection/handlers/project_state/handlers.py +875 -0
- portacode/connection/handlers/project_state/manager.py +1331 -0
- portacode/connection/handlers/project_state/models.py +108 -0
- portacode/connection/handlers/project_state/utils.py +50 -0
- portacode/connection/handlers/project_state_handlers.py +45 -0
- portacode/connection/handlers/proxmox_infra.py +307 -0
- portacode/connection/handlers/registry.py +53 -10
- portacode/connection/handlers/session.py +705 -53
- portacode/connection/handlers/system_handlers.py +142 -8
- portacode/connection/handlers/tab_factory.py +389 -0
- portacode/connection/handlers/terminal_handlers.py +150 -11
- portacode/connection/handlers/update_handler.py +61 -0
- portacode/connection/multiplex.py +60 -2
- portacode/connection/terminal.py +695 -28
- portacode/keypair.py +63 -1
- portacode/link_capture/__init__.py +38 -0
- portacode/link_capture/__pycache__/__init__.cpython-311.pyc +0 -0
- portacode/link_capture/bin/__pycache__/link_capture_wrapper.cpython-311.pyc +0 -0
- portacode/link_capture/bin/elinks +3 -0
- portacode/link_capture/bin/gio-open +3 -0
- portacode/link_capture/bin/gnome-open +3 -0
- portacode/link_capture/bin/gvfs-open +3 -0
- portacode/link_capture/bin/kde-open +3 -0
- portacode/link_capture/bin/kfmclient +3 -0
- portacode/link_capture/bin/link_capture_exec.sh +11 -0
- portacode/link_capture/bin/link_capture_wrapper.py +75 -0
- portacode/link_capture/bin/links +3 -0
- portacode/link_capture/bin/links2 +3 -0
- portacode/link_capture/bin/lynx +3 -0
- portacode/link_capture/bin/mate-open +3 -0
- portacode/link_capture/bin/netsurf +3 -0
- portacode/link_capture/bin/sensible-browser +3 -0
- portacode/link_capture/bin/w3m +3 -0
- portacode/link_capture/bin/x-www-browser +3 -0
- portacode/link_capture/bin/xdg-open +3 -0
- portacode/logging_categories.py +140 -0
- portacode/pairing.py +103 -0
- portacode/service.py +6 -0
- portacode/static/js/test-ntp-clock.html +63 -0
- portacode/static/js/utils/ntp-clock.js +232 -0
- portacode/utils/NTP_ARCHITECTURE.md +136 -0
- portacode/utils/__init__.py +1 -0
- portacode/utils/diff_apply.py +456 -0
- portacode/utils/diff_renderer.py +371 -0
- portacode/utils/ntp_clock.py +65 -0
- portacode-1.4.11.dev0.dist-info/METADATA +298 -0
- portacode-1.4.11.dev0.dist-info/RECORD +97 -0
- {portacode-0.3.4.dev0.dist-info → portacode-1.4.11.dev0.dist-info}/WHEEL +1 -1
- portacode-1.4.11.dev0.dist-info/top_level.txt +3 -0
- test_modules/README.md +296 -0
- test_modules/__init__.py +1 -0
- test_modules/test_device_online.py +44 -0
- test_modules/test_file_operations.py +743 -0
- test_modules/test_git_status_ui.py +370 -0
- test_modules/test_login_flow.py +50 -0
- test_modules/test_navigate_testing_folder.py +361 -0
- test_modules/test_play_store_screenshots.py +294 -0
- test_modules/test_terminal_buffer_performance.py +261 -0
- test_modules/test_terminal_interaction.py +80 -0
- test_modules/test_terminal_loading_race_condition.py +95 -0
- test_modules/test_terminal_start.py +56 -0
- testing_framework/.env.example +21 -0
- testing_framework/README.md +334 -0
- testing_framework/__init__.py +17 -0
- testing_framework/cli.py +326 -0
- testing_framework/core/__init__.py +1 -0
- testing_framework/core/base_test.py +336 -0
- testing_framework/core/cli_manager.py +177 -0
- testing_framework/core/hierarchical_runner.py +577 -0
- testing_framework/core/playwright_manager.py +520 -0
- testing_framework/core/runner.py +447 -0
- testing_framework/core/shared_cli_manager.py +234 -0
- testing_framework/core/test_discovery.py +112 -0
- testing_framework/requirements.txt +12 -0
- portacode-0.3.4.dev0.dist-info/METADATA +0 -236
- portacode-0.3.4.dev0.dist-info/RECORD +0 -27
- portacode-0.3.4.dev0.dist-info/top_level.txt +0 -1
- {portacode-0.3.4.dev0.dist-info → portacode-1.4.11.dev0.dist-info}/entry_points.txt +0 -0
- {portacode-0.3.4.dev0.dist-info → portacode-1.4.11.dev0.dist-info/licenses}/LICENSE +0 -0
|
@@ -2,13 +2,23 @@
|
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
4
|
import logging
|
|
5
|
-
|
|
5
|
+
import fnmatch
|
|
6
|
+
import re
|
|
7
|
+
import json
|
|
8
|
+
import shutil
|
|
9
|
+
import subprocess
|
|
10
|
+
import time
|
|
11
|
+
from typing import Any, Dict, List, Optional, Sequence
|
|
6
12
|
from pathlib import Path
|
|
7
13
|
|
|
8
14
|
from .base import AsyncHandler, SyncHandler
|
|
15
|
+
from .chunked_content import create_chunked_response
|
|
9
16
|
|
|
10
17
|
logger = logging.getLogger(__name__)
|
|
11
18
|
|
|
19
|
+
# Global content cache: hash -> content
|
|
20
|
+
_content_cache = {}
|
|
21
|
+
|
|
12
22
|
|
|
13
23
|
class FileReadHandler(SyncHandler):
|
|
14
24
|
"""Handler for reading file contents."""
|
|
@@ -18,27 +28,101 @@ class FileReadHandler(SyncHandler):
|
|
|
18
28
|
return "file_read"
|
|
19
29
|
|
|
20
30
|
def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
21
|
-
"""Read file contents."""
|
|
31
|
+
"""Read file contents with optional pagination."""
|
|
22
32
|
file_path = message.get("path")
|
|
23
33
|
if not file_path:
|
|
24
34
|
raise ValueError("path parameter is required")
|
|
25
|
-
|
|
35
|
+
|
|
36
|
+
encoding = message.get("encoding", "utf-8")
|
|
37
|
+
start_line = self._coerce_positive_int(message.get("start_line"), default=1)
|
|
38
|
+
max_lines = self._coerce_positive_int(message.get("max_lines"), allow_none=True)
|
|
39
|
+
end_line = self._coerce_positive_int(message.get("end_line"), allow_none=True)
|
|
40
|
+
|
|
41
|
+
if start_line < 1:
|
|
42
|
+
start_line = 1
|
|
43
|
+
|
|
44
|
+
if end_line is not None and end_line >= start_line:
|
|
45
|
+
range_len = end_line - start_line + 1
|
|
46
|
+
if max_lines is None:
|
|
47
|
+
max_lines = range_len
|
|
48
|
+
else:
|
|
49
|
+
max_lines = min(max_lines, range_len)
|
|
50
|
+
|
|
51
|
+
if max_lines is not None:
|
|
52
|
+
max_lines = min(max_lines, 2000)
|
|
53
|
+
|
|
26
54
|
try:
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
55
|
+
file_size = os.path.getsize(file_path)
|
|
56
|
+
except FileNotFoundError:
|
|
57
|
+
raise ValueError(f"File not found: {file_path}")
|
|
58
|
+
except PermissionError:
|
|
59
|
+
raise RuntimeError(f"Permission denied: {file_path}")
|
|
60
|
+
|
|
61
|
+
total_lines = 0
|
|
62
|
+
collected_lines: List[str] = []
|
|
63
|
+
truncated_after = False
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
with open(file_path, "r", encoding=encoding, errors="replace") as file_obj:
|
|
67
|
+
for idx, line in enumerate(file_obj, start=1):
|
|
68
|
+
total_lines += 1
|
|
69
|
+
if idx < start_line:
|
|
70
|
+
continue
|
|
71
|
+
|
|
72
|
+
if max_lines is not None and len(collected_lines) >= max_lines:
|
|
73
|
+
truncated_after = True
|
|
74
|
+
continue
|
|
75
|
+
|
|
76
|
+
collected_lines.append(line)
|
|
36
77
|
except FileNotFoundError:
|
|
37
78
|
raise ValueError(f"File not found: {file_path}")
|
|
38
79
|
except PermissionError:
|
|
39
80
|
raise RuntimeError(f"Permission denied: {file_path}")
|
|
40
|
-
except
|
|
41
|
-
raise RuntimeError(f"
|
|
81
|
+
except OSError as exc:
|
|
82
|
+
raise RuntimeError(f"Error reading file: {exc}")
|
|
83
|
+
|
|
84
|
+
returned_start_line = start_line if collected_lines else None
|
|
85
|
+
returned_end_line = (
|
|
86
|
+
start_line + len(collected_lines) - 1 if collected_lines else None
|
|
87
|
+
)
|
|
88
|
+
has_more_before = bool(collected_lines) and start_line > 1
|
|
89
|
+
has_more_after = truncated_after or (
|
|
90
|
+
returned_end_line is not None and total_lines > returned_end_line
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
return {
|
|
94
|
+
"event": "file_read_response",
|
|
95
|
+
"path": file_path,
|
|
96
|
+
"content": "".join(collected_lines),
|
|
97
|
+
"size": file_size,
|
|
98
|
+
"total_lines": total_lines,
|
|
99
|
+
"returned_lines": len(collected_lines),
|
|
100
|
+
"start_line": returned_start_line,
|
|
101
|
+
"requested_start_line": start_line,
|
|
102
|
+
"end_line": returned_end_line,
|
|
103
|
+
"has_more_before": has_more_before,
|
|
104
|
+
"has_more_after": has_more_after,
|
|
105
|
+
"encoding": encoding,
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
@staticmethod
|
|
109
|
+
def _coerce_positive_int(
|
|
110
|
+
value: Any,
|
|
111
|
+
*,
|
|
112
|
+
default: Optional[int] = None,
|
|
113
|
+
allow_none: bool = False,
|
|
114
|
+
) -> Optional[int]:
|
|
115
|
+
if value is None:
|
|
116
|
+
if allow_none:
|
|
117
|
+
return None
|
|
118
|
+
return default or 0
|
|
119
|
+
try:
|
|
120
|
+
coerced = int(value)
|
|
121
|
+
except (TypeError, ValueError):
|
|
122
|
+
return None if allow_none else (default or 0)
|
|
123
|
+
if coerced <= 0:
|
|
124
|
+
return None if allow_none else (default or 0)
|
|
125
|
+
return coerced
|
|
42
126
|
|
|
43
127
|
|
|
44
128
|
class FileWriteHandler(SyncHandler):
|
|
@@ -86,6 +170,24 @@ class DirectoryListHandler(SyncHandler):
|
|
|
86
170
|
"""List directory contents."""
|
|
87
171
|
path = message.get("path", ".")
|
|
88
172
|
show_hidden = message.get("show_hidden", False)
|
|
173
|
+
limit_raw = message.get("limit")
|
|
174
|
+
offset_raw = message.get("offset", 0)
|
|
175
|
+
|
|
176
|
+
def _parse_positive_int(value, *, allow_none=False, minimum=0, maximum=None):
|
|
177
|
+
if value is None:
|
|
178
|
+
return None if allow_none else minimum
|
|
179
|
+
try:
|
|
180
|
+
parsed = int(value)
|
|
181
|
+
except (TypeError, ValueError):
|
|
182
|
+
return None if allow_none else minimum
|
|
183
|
+
if parsed < minimum:
|
|
184
|
+
parsed = minimum
|
|
185
|
+
if maximum is not None and parsed > maximum:
|
|
186
|
+
parsed = maximum
|
|
187
|
+
return parsed
|
|
188
|
+
|
|
189
|
+
offset = _parse_positive_int(offset_raw, minimum=0)
|
|
190
|
+
limit = _parse_positive_int(limit_raw, allow_none=True, minimum=1, maximum=1000)
|
|
89
191
|
|
|
90
192
|
try:
|
|
91
193
|
items = []
|
|
@@ -109,11 +211,31 @@ class DirectoryListHandler(SyncHandler):
|
|
|
109
211
|
# Skip items we can't stat
|
|
110
212
|
continue
|
|
111
213
|
|
|
214
|
+
total_count = len(items)
|
|
215
|
+
|
|
216
|
+
if offset:
|
|
217
|
+
if offset >= total_count:
|
|
218
|
+
sliced_items = []
|
|
219
|
+
else:
|
|
220
|
+
sliced_items = items[offset:]
|
|
221
|
+
else:
|
|
222
|
+
sliced_items = items
|
|
223
|
+
|
|
224
|
+
if limit is not None and limit >= 0:
|
|
225
|
+
sliced_items = sliced_items[:limit]
|
|
226
|
+
|
|
227
|
+
returned_count = len(sliced_items)
|
|
228
|
+
has_more = total_count > offset + returned_count if total_count else False
|
|
229
|
+
|
|
112
230
|
return {
|
|
113
231
|
"event": "directory_list_response",
|
|
114
232
|
"path": path,
|
|
115
|
-
"items":
|
|
116
|
-
"count":
|
|
233
|
+
"items": sliced_items,
|
|
234
|
+
"count": returned_count,
|
|
235
|
+
"total_count": total_count,
|
|
236
|
+
"offset": offset,
|
|
237
|
+
"limit": limit,
|
|
238
|
+
"has_more": has_more,
|
|
117
239
|
}
|
|
118
240
|
except FileNotFoundError:
|
|
119
241
|
raise ValueError(f"Directory not found: {path}")
|
|
@@ -206,4 +328,767 @@ class FileDeleteHandler(SyncHandler):
|
|
|
206
328
|
except OSError as e:
|
|
207
329
|
if "Directory not empty" in str(e):
|
|
208
330
|
raise ValueError(f"Directory not empty (use recursive=True): {path}")
|
|
209
|
-
raise RuntimeError(f"Failed to delete: {e}")
|
|
331
|
+
raise RuntimeError(f"Failed to delete: {e}")
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
class FileCreateHandler(SyncHandler):
|
|
335
|
+
"""Handler for creating new files."""
|
|
336
|
+
|
|
337
|
+
@property
|
|
338
|
+
def command_name(self) -> str:
|
|
339
|
+
return "file_create"
|
|
340
|
+
|
|
341
|
+
def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
342
|
+
"""Create a new file."""
|
|
343
|
+
parent_path = message.get("parent_path")
|
|
344
|
+
file_name = message.get("file_name")
|
|
345
|
+
content = message.get("content", "")
|
|
346
|
+
|
|
347
|
+
if not parent_path:
|
|
348
|
+
raise ValueError("parent_path parameter is required")
|
|
349
|
+
if not file_name:
|
|
350
|
+
raise ValueError("file_name parameter is required")
|
|
351
|
+
|
|
352
|
+
# Validate file name (no path separators or special chars)
|
|
353
|
+
if "/" in file_name or "\\" in file_name or file_name in [".", ".."]:
|
|
354
|
+
raise ValueError("Invalid file name")
|
|
355
|
+
|
|
356
|
+
try:
|
|
357
|
+
# Ensure parent directory exists
|
|
358
|
+
parent_dir = Path(parent_path)
|
|
359
|
+
if not parent_dir.exists():
|
|
360
|
+
raise ValueError(f"Parent directory does not exist: {parent_path}")
|
|
361
|
+
if not parent_dir.is_dir():
|
|
362
|
+
raise ValueError(f"Parent path is not a directory: {parent_path}")
|
|
363
|
+
|
|
364
|
+
# Create the full file path
|
|
365
|
+
file_path = parent_dir / file_name
|
|
366
|
+
|
|
367
|
+
# Check if file already exists
|
|
368
|
+
if file_path.exists():
|
|
369
|
+
raise ValueError(f"File already exists: {file_name}")
|
|
370
|
+
|
|
371
|
+
# Create the file
|
|
372
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
373
|
+
f.write(content)
|
|
374
|
+
|
|
375
|
+
return {
|
|
376
|
+
"event": "file_create_response",
|
|
377
|
+
"parent_path": parent_path,
|
|
378
|
+
"file_name": file_name,
|
|
379
|
+
"file_path": str(file_path),
|
|
380
|
+
"success": True,
|
|
381
|
+
}
|
|
382
|
+
except PermissionError:
|
|
383
|
+
raise RuntimeError(f"Permission denied: {parent_path}")
|
|
384
|
+
except OSError as e:
|
|
385
|
+
raise RuntimeError(f"Failed to create file: {e}")
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
class FolderCreateHandler(SyncHandler):
|
|
389
|
+
"""Handler for creating new folders."""
|
|
390
|
+
|
|
391
|
+
@property
|
|
392
|
+
def command_name(self) -> str:
|
|
393
|
+
return "folder_create"
|
|
394
|
+
|
|
395
|
+
def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
396
|
+
"""Create a new folder."""
|
|
397
|
+
parent_path = message.get("parent_path")
|
|
398
|
+
folder_name = message.get("folder_name")
|
|
399
|
+
|
|
400
|
+
if not parent_path:
|
|
401
|
+
raise ValueError("parent_path parameter is required")
|
|
402
|
+
if not folder_name:
|
|
403
|
+
raise ValueError("folder_name parameter is required")
|
|
404
|
+
|
|
405
|
+
# Validate folder name (no path separators or special chars)
|
|
406
|
+
if "/" in folder_name or "\\" in folder_name or folder_name in [".", ".."]:
|
|
407
|
+
raise ValueError("Invalid folder name")
|
|
408
|
+
|
|
409
|
+
try:
|
|
410
|
+
# Ensure parent directory exists
|
|
411
|
+
parent_dir = Path(parent_path)
|
|
412
|
+
if not parent_dir.exists():
|
|
413
|
+
raise ValueError(f"Parent directory does not exist: {parent_path}")
|
|
414
|
+
if not parent_dir.is_dir():
|
|
415
|
+
raise ValueError(f"Parent path is not a directory: {parent_path}")
|
|
416
|
+
|
|
417
|
+
# Create the full folder path
|
|
418
|
+
folder_path = parent_dir / folder_name
|
|
419
|
+
|
|
420
|
+
# Check if folder already exists
|
|
421
|
+
if folder_path.exists():
|
|
422
|
+
raise ValueError(f"Folder already exists: {folder_name}")
|
|
423
|
+
|
|
424
|
+
# Create the folder
|
|
425
|
+
folder_path.mkdir(parents=False, exist_ok=False)
|
|
426
|
+
|
|
427
|
+
return {
|
|
428
|
+
"event": "folder_create_response",
|
|
429
|
+
"parent_path": parent_path,
|
|
430
|
+
"folder_name": folder_name,
|
|
431
|
+
"folder_path": str(folder_path),
|
|
432
|
+
"success": True,
|
|
433
|
+
}
|
|
434
|
+
except PermissionError:
|
|
435
|
+
raise RuntimeError(f"Permission denied: {parent_path}")
|
|
436
|
+
except OSError as e:
|
|
437
|
+
raise RuntimeError(f"Failed to create folder: {e}")
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
class FileRenameHandler(SyncHandler):
|
|
441
|
+
"""Handler for renaming files and folders."""
|
|
442
|
+
|
|
443
|
+
@property
|
|
444
|
+
def command_name(self) -> str:
|
|
445
|
+
return "file_rename"
|
|
446
|
+
|
|
447
|
+
def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
448
|
+
"""Rename a file or folder."""
|
|
449
|
+
old_path = message.get("old_path")
|
|
450
|
+
new_name = message.get("new_name")
|
|
451
|
+
|
|
452
|
+
if not old_path:
|
|
453
|
+
raise ValueError("old_path parameter is required")
|
|
454
|
+
if not new_name:
|
|
455
|
+
raise ValueError("new_name parameter is required")
|
|
456
|
+
|
|
457
|
+
# Validate new name (no path separators or special chars)
|
|
458
|
+
if "/" in new_name or "\\" in new_name or new_name in [".", ".."]:
|
|
459
|
+
raise ValueError("Invalid new name")
|
|
460
|
+
|
|
461
|
+
try:
|
|
462
|
+
old_path_obj = Path(old_path)
|
|
463
|
+
if not old_path_obj.exists():
|
|
464
|
+
raise ValueError(f"Path does not exist: {old_path}")
|
|
465
|
+
|
|
466
|
+
# Create new path in same directory
|
|
467
|
+
new_path = old_path_obj.parent / new_name
|
|
468
|
+
|
|
469
|
+
# Check if target already exists
|
|
470
|
+
if new_path.exists():
|
|
471
|
+
raise ValueError(f"Target already exists: {new_name}")
|
|
472
|
+
|
|
473
|
+
# Determine if it's a file or directory
|
|
474
|
+
is_directory = old_path_obj.is_dir()
|
|
475
|
+
|
|
476
|
+
# Rename the file/folder
|
|
477
|
+
old_path_obj.rename(new_path)
|
|
478
|
+
|
|
479
|
+
return {
|
|
480
|
+
"event": "file_rename_response",
|
|
481
|
+
"old_path": old_path,
|
|
482
|
+
"new_path": str(new_path),
|
|
483
|
+
"new_name": new_name,
|
|
484
|
+
"is_directory": is_directory,
|
|
485
|
+
"success": True,
|
|
486
|
+
}
|
|
487
|
+
except PermissionError:
|
|
488
|
+
raise RuntimeError(f"Permission denied: {old_path}")
|
|
489
|
+
except OSError as e:
|
|
490
|
+
raise RuntimeError(f"Failed to rename: {e}")
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
class FileSearchHandler(SyncHandler):
|
|
494
|
+
"""Handler for searching text within files under a root directory."""
|
|
495
|
+
|
|
496
|
+
DEFAULT_EXCLUDE_DIRS: Sequence[str] = (
|
|
497
|
+
".git",
|
|
498
|
+
".hg",
|
|
499
|
+
".svn",
|
|
500
|
+
"__pycache__",
|
|
501
|
+
"node_modules",
|
|
502
|
+
"vendor",
|
|
503
|
+
"dist",
|
|
504
|
+
"build",
|
|
505
|
+
"tmp",
|
|
506
|
+
"static",
|
|
507
|
+
"assets",
|
|
508
|
+
"coverage",
|
|
509
|
+
)
|
|
510
|
+
|
|
511
|
+
DEFAULT_EXCLUDE_FILE_GLOBS: Sequence[str] = (
|
|
512
|
+
"*.min.js",
|
|
513
|
+
"*.min.css",
|
|
514
|
+
)
|
|
515
|
+
|
|
516
|
+
BINARY_EXTENSIONS: Sequence[str] = (
|
|
517
|
+
".png", ".jpg", ".jpeg", ".gif", ".bmp", ".svg", ".ico",
|
|
518
|
+
".pdf", ".zip", ".tar", ".gz", ".bz2", ".xz", ".7z",
|
|
519
|
+
".ttf", ".woff", ".woff2", ".eot",
|
|
520
|
+
".mp3", ".mp4", ".mov", ".avi", ".wav", ".flac",
|
|
521
|
+
".exe", ".dll", ".so", ".dylib",
|
|
522
|
+
".class", ".jar",
|
|
523
|
+
)
|
|
524
|
+
|
|
525
|
+
DEFAULT_INCLUDE_EXTENSIONS: Sequence[str] = (
|
|
526
|
+
".py", ".pyi", ".pyx",
|
|
527
|
+
".js", ".jsx", ".ts", ".tsx", ".mjs", ".cjs",
|
|
528
|
+
".json", ".yaml", ".yml", ".toml", ".ini", ".cfg", ".conf",
|
|
529
|
+
".md", ".markdown", ".rst", ".txt",
|
|
530
|
+
".html", ".htm", ".css", ".scss", ".less",
|
|
531
|
+
".go", ".rs", ".java", ".kt", ".kts",
|
|
532
|
+
".c", ".h", ".hpp", ".hh", ".cc", ".cpp", ".cxx",
|
|
533
|
+
".cs", ".php", ".rb", ".swift", ".scala", ".sql",
|
|
534
|
+
".sh", ".bash", ".zsh", ".fish",
|
|
535
|
+
".env", ".dockerfile", ".gradle", ".mk", ".make", ".bat", ".ps1",
|
|
536
|
+
)
|
|
537
|
+
|
|
538
|
+
ALWAYS_INCLUDE_FILENAMES: Sequence[str] = (
|
|
539
|
+
"Makefile",
|
|
540
|
+
"Dockerfile",
|
|
541
|
+
"Jenkinsfile",
|
|
542
|
+
"Procfile",
|
|
543
|
+
"Gemfile",
|
|
544
|
+
"CMakeLists.txt",
|
|
545
|
+
"build.gradle",
|
|
546
|
+
"settings.gradle",
|
|
547
|
+
"package.json",
|
|
548
|
+
"pnpm-lock.yaml",
|
|
549
|
+
"yarn.lock",
|
|
550
|
+
"requirements.txt",
|
|
551
|
+
"pyproject.toml",
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
@property
|
|
555
|
+
def command_name(self) -> str:
|
|
556
|
+
return "file_search"
|
|
557
|
+
|
|
558
|
+
def _search_with_rg(
|
|
559
|
+
self,
|
|
560
|
+
*,
|
|
561
|
+
root_path: str,
|
|
562
|
+
query: str,
|
|
563
|
+
match_case: bool,
|
|
564
|
+
use_regex: bool,
|
|
565
|
+
whole_word: bool,
|
|
566
|
+
include_hidden: bool,
|
|
567
|
+
max_results: int,
|
|
568
|
+
max_per_file: int,
|
|
569
|
+
max_file_size: int,
|
|
570
|
+
include_patterns: List[str],
|
|
571
|
+
exclude_patterns: List[str],
|
|
572
|
+
max_line_length: int,
|
|
573
|
+
using_default_includes: bool,
|
|
574
|
+
) -> Optional[Dict[str, Any]]:
|
|
575
|
+
"""Perform fast search using ripgrep if available."""
|
|
576
|
+
if shutil.which("rg") is None:
|
|
577
|
+
return None
|
|
578
|
+
|
|
579
|
+
cmd = [
|
|
580
|
+
"rg",
|
|
581
|
+
"--json",
|
|
582
|
+
"--line-number",
|
|
583
|
+
"--color",
|
|
584
|
+
"never",
|
|
585
|
+
"--no-heading",
|
|
586
|
+
"--max-count",
|
|
587
|
+
str(max_per_file),
|
|
588
|
+
f"--max-filesize={max_file_size}B",
|
|
589
|
+
]
|
|
590
|
+
|
|
591
|
+
if not match_case:
|
|
592
|
+
cmd.append("--ignore-case")
|
|
593
|
+
if not use_regex:
|
|
594
|
+
cmd.append("--fixed-strings")
|
|
595
|
+
if whole_word:
|
|
596
|
+
cmd.append("--word-regexp")
|
|
597
|
+
if include_hidden:
|
|
598
|
+
cmd.append("--hidden")
|
|
599
|
+
|
|
600
|
+
if using_default_includes:
|
|
601
|
+
for ext in self.DEFAULT_INCLUDE_EXTENSIONS:
|
|
602
|
+
cmd.extend(["-g", f"*{ext}"])
|
|
603
|
+
for name in self.ALWAYS_INCLUDE_FILENAMES:
|
|
604
|
+
cmd.extend(["-g", name])
|
|
605
|
+
for pattern in include_patterns:
|
|
606
|
+
cmd.extend(["-g", pattern])
|
|
607
|
+
for pattern in exclude_patterns:
|
|
608
|
+
cmd.extend(["-g", f"!{pattern}"])
|
|
609
|
+
|
|
610
|
+
cmd.append(query)
|
|
611
|
+
cmd.append(".")
|
|
612
|
+
|
|
613
|
+
matches: List[Dict[str, Any]] = []
|
|
614
|
+
truncated = False
|
|
615
|
+
truncated_count = 0
|
|
616
|
+
files_scanned = 0
|
|
617
|
+
errors: List[str] = []
|
|
618
|
+
stop_search = False
|
|
619
|
+
deadline = time.monotonic() + 10.0 # hard cap to avoid long-running scans
|
|
620
|
+
|
|
621
|
+
try:
|
|
622
|
+
proc = subprocess.Popen(
|
|
623
|
+
cmd,
|
|
624
|
+
cwd=root_path,
|
|
625
|
+
stdout=subprocess.PIPE,
|
|
626
|
+
stderr=subprocess.PIPE,
|
|
627
|
+
text=True,
|
|
628
|
+
)
|
|
629
|
+
except Exception as exc:
|
|
630
|
+
logger.warning("Failed to execute ripgrep: %s", exc)
|
|
631
|
+
return None
|
|
632
|
+
|
|
633
|
+
try:
|
|
634
|
+
assert proc.stdout is not None
|
|
635
|
+
for line in proc.stdout:
|
|
636
|
+
line = line.strip()
|
|
637
|
+
if not line:
|
|
638
|
+
continue
|
|
639
|
+
|
|
640
|
+
if time.monotonic() > deadline:
|
|
641
|
+
truncated = True
|
|
642
|
+
errors.append("Search aborted after reaching 10s execution limit.")
|
|
643
|
+
stop_search = True
|
|
644
|
+
break
|
|
645
|
+
|
|
646
|
+
try:
|
|
647
|
+
payload = json.loads(line)
|
|
648
|
+
except json.JSONDecodeError:
|
|
649
|
+
continue
|
|
650
|
+
|
|
651
|
+
event_type = payload.get("type")
|
|
652
|
+
|
|
653
|
+
if event_type == "begin":
|
|
654
|
+
files_scanned += 1
|
|
655
|
+
continue
|
|
656
|
+
|
|
657
|
+
if event_type == "match":
|
|
658
|
+
data = payload.get("data", {})
|
|
659
|
+
line_text = data.get("lines", {}).get("text", "")
|
|
660
|
+
line_number = data.get("line_number")
|
|
661
|
+
path_info = data.get("path", {}).get("text") or data.get("path", {}).get("bytes")
|
|
662
|
+
if not path_info:
|
|
663
|
+
continue
|
|
664
|
+
absolute_path = os.path.join(root_path, path_info)
|
|
665
|
+
relative_path = path_info
|
|
666
|
+
|
|
667
|
+
submatches = data.get("submatches", [])
|
|
668
|
+
if len(matches) >= max_results:
|
|
669
|
+
truncated = True
|
|
670
|
+
truncated_count += len(submatches)
|
|
671
|
+
stop_search = True
|
|
672
|
+
break
|
|
673
|
+
|
|
674
|
+
available = max_results - len(matches)
|
|
675
|
+
spans: List[List[int]] = []
|
|
676
|
+
for submatch in submatches:
|
|
677
|
+
if len(spans) >= available:
|
|
678
|
+
truncated = True
|
|
679
|
+
truncated_count += len(submatches) - len(spans)
|
|
680
|
+
stop_search = True
|
|
681
|
+
break
|
|
682
|
+
start = submatch.get("start", {}).get("offset")
|
|
683
|
+
end = submatch.get("end", {}).get("offset")
|
|
684
|
+
if start is None or end is None:
|
|
685
|
+
continue
|
|
686
|
+
spans.append([start, end])
|
|
687
|
+
|
|
688
|
+
if spans:
|
|
689
|
+
clean_line = line_text.rstrip("\n")
|
|
690
|
+
truncated_line = clean_line
|
|
691
|
+
line_truncated = False
|
|
692
|
+
if len(clean_line) > max_line_length:
|
|
693
|
+
truncated_line = clean_line[:max_line_length] + "..."
|
|
694
|
+
line_truncated = True
|
|
695
|
+
|
|
696
|
+
matches.append(
|
|
697
|
+
{
|
|
698
|
+
"path": absolute_path,
|
|
699
|
+
"relative_path": relative_path,
|
|
700
|
+
"line_number": line_number,
|
|
701
|
+
"line": truncated_line,
|
|
702
|
+
"match_spans": spans,
|
|
703
|
+
"match_count": len(spans),
|
|
704
|
+
"line_truncated": line_truncated,
|
|
705
|
+
}
|
|
706
|
+
)
|
|
707
|
+
|
|
708
|
+
if stop_search:
|
|
709
|
+
break
|
|
710
|
+
elif event_type == "message":
|
|
711
|
+
message = payload.get("data", {}).get("msg") or payload.get("data", {}).get("text")
|
|
712
|
+
if message:
|
|
713
|
+
errors.append(message)
|
|
714
|
+
|
|
715
|
+
if stop_search:
|
|
716
|
+
break
|
|
717
|
+
finally:
|
|
718
|
+
if stop_search and proc.poll() is None:
|
|
719
|
+
try:
|
|
720
|
+
proc.terminate()
|
|
721
|
+
proc.wait(timeout=1.0)
|
|
722
|
+
except Exception:
|
|
723
|
+
proc.kill()
|
|
724
|
+
else:
|
|
725
|
+
proc.wait()
|
|
726
|
+
|
|
727
|
+
stderr_output = ""
|
|
728
|
+
if proc.stderr:
|
|
729
|
+
try:
|
|
730
|
+
stderr_output = proc.stderr.read().strip()
|
|
731
|
+
except Exception:
|
|
732
|
+
stderr_output = ""
|
|
733
|
+
if stderr_output:
|
|
734
|
+
errors.append(stderr_output)
|
|
735
|
+
|
|
736
|
+
return {
|
|
737
|
+
"event": "file_search_response",
|
|
738
|
+
"root_path": root_path,
|
|
739
|
+
"query": query,
|
|
740
|
+
"match_case": match_case,
|
|
741
|
+
"regex": use_regex,
|
|
742
|
+
"whole_word": whole_word,
|
|
743
|
+
"include_patterns": include_patterns,
|
|
744
|
+
"exclude_patterns": exclude_patterns,
|
|
745
|
+
"matches": matches,
|
|
746
|
+
"matches_returned": len(matches),
|
|
747
|
+
"total_matches": len(matches) + truncated_count,
|
|
748
|
+
"files_scanned": files_scanned,
|
|
749
|
+
"truncated": truncated or truncated_count > 0,
|
|
750
|
+
"truncated_count": truncated_count,
|
|
751
|
+
"max_results": max_results,
|
|
752
|
+
"max_matches_per_file": max_per_file,
|
|
753
|
+
"errors": errors,
|
|
754
|
+
}
|
|
755
|
+
|
|
756
|
+
def execute(self, message: Dict[str, Any]) -> Dict[str, Any]:
|
|
757
|
+
root_path = message.get("root_path")
|
|
758
|
+
query = message.get("query")
|
|
759
|
+
|
|
760
|
+
if not root_path:
|
|
761
|
+
raise ValueError("root_path parameter is required")
|
|
762
|
+
if not query:
|
|
763
|
+
raise ValueError("query parameter is required")
|
|
764
|
+
|
|
765
|
+
if not os.path.isdir(root_path):
|
|
766
|
+
raise ValueError(f"Root path is not a directory: {root_path}")
|
|
767
|
+
|
|
768
|
+
match_case = bool(message.get("match_case", False))
|
|
769
|
+
use_regex = bool(message.get("regex", False))
|
|
770
|
+
whole_word = bool(message.get("whole_word", False))
|
|
771
|
+
include_hidden = bool(message.get("include_hidden", False))
|
|
772
|
+
|
|
773
|
+
max_results = self._clamp_int(message.get("max_results"), default=40, min_value=1, max_value=500)
|
|
774
|
+
max_per_file = self._clamp_int(
|
|
775
|
+
message.get("max_matches_per_file"),
|
|
776
|
+
default=5,
|
|
777
|
+
min_value=1,
|
|
778
|
+
max_value=50,
|
|
779
|
+
)
|
|
780
|
+
max_file_size = self._clamp_int(
|
|
781
|
+
message.get("max_file_size"),
|
|
782
|
+
default=1024 * 1024,
|
|
783
|
+
min_value=1024,
|
|
784
|
+
max_value=10 * 1024 * 1024,
|
|
785
|
+
)
|
|
786
|
+
max_line_length = self._clamp_int(
|
|
787
|
+
message.get("max_line_length"),
|
|
788
|
+
default=200,
|
|
789
|
+
min_value=32,
|
|
790
|
+
max_value=1024,
|
|
791
|
+
)
|
|
792
|
+
|
|
793
|
+
include_patterns = self._normalize_patterns(message.get("include_patterns"))
|
|
794
|
+
using_default_includes = not include_patterns
|
|
795
|
+
raw_exclude_patterns = self._normalize_patterns(message.get("exclude_patterns"))
|
|
796
|
+
using_default_excludes = not raw_exclude_patterns
|
|
797
|
+
if using_default_excludes:
|
|
798
|
+
exclude_patterns = []
|
|
799
|
+
for directory in self.DEFAULT_EXCLUDE_DIRS:
|
|
800
|
+
exclude_patterns.append(f"{directory}/**")
|
|
801
|
+
exclude_patterns.append(f"**/{directory}/**")
|
|
802
|
+
exclude_patterns.extend(self.DEFAULT_EXCLUDE_FILE_GLOBS)
|
|
803
|
+
else:
|
|
804
|
+
exclude_patterns = raw_exclude_patterns
|
|
805
|
+
|
|
806
|
+
flags = 0 if match_case else re.IGNORECASE
|
|
807
|
+
pattern = query if use_regex else re.escape(query)
|
|
808
|
+
if whole_word:
|
|
809
|
+
pattern = r"\b" + pattern + r"\b"
|
|
810
|
+
|
|
811
|
+
try:
|
|
812
|
+
compiled = re.compile(pattern, flags)
|
|
813
|
+
except re.error as exc:
|
|
814
|
+
raise ValueError(f"Invalid regular expression: {exc}") from exc
|
|
815
|
+
|
|
816
|
+
rg_result = self._search_with_rg(
|
|
817
|
+
root_path=root_path,
|
|
818
|
+
query=query,
|
|
819
|
+
match_case=match_case,
|
|
820
|
+
use_regex=use_regex,
|
|
821
|
+
whole_word=whole_word,
|
|
822
|
+
include_hidden=include_hidden,
|
|
823
|
+
max_results=max_results,
|
|
824
|
+
max_per_file=max_per_file,
|
|
825
|
+
max_file_size=max_file_size,
|
|
826
|
+
include_patterns=include_patterns,
|
|
827
|
+
exclude_patterns=exclude_patterns,
|
|
828
|
+
max_line_length=max_line_length,
|
|
829
|
+
using_default_includes=using_default_includes,
|
|
830
|
+
)
|
|
831
|
+
if rg_result is not None:
|
|
832
|
+
return rg_result
|
|
833
|
+
|
|
834
|
+
matches: List[Dict[str, Any]] = []
|
|
835
|
+
truncated = False
|
|
836
|
+
truncated_count = 0
|
|
837
|
+
files_scanned = 0
|
|
838
|
+
errors: List[str] = []
|
|
839
|
+
stop_search = False
|
|
840
|
+
|
|
841
|
+
binary_exts = {ext.lower() for ext in self.BINARY_EXTENSIONS}
|
|
842
|
+
allowed_exts = {ext.lower() for ext in self.DEFAULT_INCLUDE_EXTENSIONS}
|
|
843
|
+
|
|
844
|
+
deadline = time.monotonic() + 10.0
|
|
845
|
+
|
|
846
|
+
for dirpath, dirnames, filenames in os.walk(root_path):
|
|
847
|
+
if not include_hidden:
|
|
848
|
+
dirnames[:] = [d for d in dirnames if not d.startswith(".")]
|
|
849
|
+
|
|
850
|
+
for filename in filenames:
|
|
851
|
+
if time.monotonic() > deadline:
|
|
852
|
+
truncated = True
|
|
853
|
+
errors.append("Search aborted after reaching 10s execution limit.")
|
|
854
|
+
stop_search = True
|
|
855
|
+
break
|
|
856
|
+
if not include_hidden and filename.startswith("."):
|
|
857
|
+
continue
|
|
858
|
+
|
|
859
|
+
abs_path = os.path.join(dirpath, filename)
|
|
860
|
+
rel_path = os.path.relpath(abs_path, root_path)
|
|
861
|
+
|
|
862
|
+
if using_default_excludes:
|
|
863
|
+
path_parts = rel_path.replace("\\", "/").split("/")
|
|
864
|
+
if any(part in self.DEFAULT_EXCLUDE_DIRS for part in path_parts):
|
|
865
|
+
continue
|
|
866
|
+
|
|
867
|
+
if using_default_includes:
|
|
868
|
+
ext = os.path.splitext(filename)[1].lower()
|
|
869
|
+
if ext not in allowed_exts and filename not in self.ALWAYS_INCLUDE_FILENAMES:
|
|
870
|
+
continue
|
|
871
|
+
|
|
872
|
+
if os.path.splitext(filename)[1].lower() in binary_exts:
|
|
873
|
+
continue
|
|
874
|
+
|
|
875
|
+
if not self._should_include(rel_path, include_patterns, exclude_patterns):
|
|
876
|
+
continue
|
|
877
|
+
|
|
878
|
+
try:
|
|
879
|
+
size = os.path.getsize(abs_path)
|
|
880
|
+
except OSError:
|
|
881
|
+
errors.append(f"Failed to stat file: {rel_path}")
|
|
882
|
+
continue
|
|
883
|
+
|
|
884
|
+
if size > max_file_size:
|
|
885
|
+
errors.append(f"Skipped (too large): {rel_path} ({size} bytes)")
|
|
886
|
+
continue
|
|
887
|
+
|
|
888
|
+
files_scanned += 1
|
|
889
|
+
matches_for_file = 0
|
|
890
|
+
|
|
891
|
+
try:
|
|
892
|
+
with open(abs_path, "r", encoding="utf-8", errors="replace") as file_obj:
|
|
893
|
+
stop_current_file = False
|
|
894
|
+
for line_number, line in enumerate(file_obj, start=1):
|
|
895
|
+
if time.monotonic() > deadline:
|
|
896
|
+
truncated = True
|
|
897
|
+
errors.append("Search aborted after reaching 10s execution limit.")
|
|
898
|
+
stop_search = True
|
|
899
|
+
stop_current_file = True
|
|
900
|
+
break
|
|
901
|
+
iter_matches = list(compiled.finditer(line))
|
|
902
|
+
if not iter_matches:
|
|
903
|
+
continue
|
|
904
|
+
|
|
905
|
+
# Enforce per-file cap
|
|
906
|
+
remaining_per_file = max_per_file - matches_for_file
|
|
907
|
+
if remaining_per_file <= 0:
|
|
908
|
+
truncated = True
|
|
909
|
+
truncated_count += len(iter_matches)
|
|
910
|
+
stop_current_file = True
|
|
911
|
+
break
|
|
912
|
+
|
|
913
|
+
spans = [
|
|
914
|
+
[match.start(), match.end()] for match in iter_matches[:remaining_per_file]
|
|
915
|
+
]
|
|
916
|
+
dropped_from_file = len(iter_matches) - len(spans)
|
|
917
|
+
if dropped_from_file > 0:
|
|
918
|
+
truncated = True
|
|
919
|
+
truncated_count += dropped_from_file
|
|
920
|
+
|
|
921
|
+
# Enforce global cap
|
|
922
|
+
remaining_global = max_results - len(matches)
|
|
923
|
+
if remaining_global <= 0:
|
|
924
|
+
truncated = True
|
|
925
|
+
truncated_count += len(spans)
|
|
926
|
+
stop_search = True
|
|
927
|
+
break
|
|
928
|
+
|
|
929
|
+
if len(spans) > remaining_global:
|
|
930
|
+
truncated = True
|
|
931
|
+
truncated_count += len(spans) - remaining_global
|
|
932
|
+
spans = spans[:remaining_global]
|
|
933
|
+
stop_search = True
|
|
934
|
+
|
|
935
|
+
if spans:
|
|
936
|
+
clean_line = line.rstrip("\n")
|
|
937
|
+
truncated_line = clean_line
|
|
938
|
+
line_truncated = False
|
|
939
|
+
if len(clean_line) > max_line_length:
|
|
940
|
+
truncated_line = clean_line[:max_line_length] + "..."
|
|
941
|
+
line_truncated = True
|
|
942
|
+
|
|
943
|
+
matches.append(
|
|
944
|
+
{
|
|
945
|
+
"path": abs_path,
|
|
946
|
+
"relative_path": rel_path,
|
|
947
|
+
"line_number": line_number,
|
|
948
|
+
"line": truncated_line,
|
|
949
|
+
"match_spans": spans,
|
|
950
|
+
"match_count": len(spans),
|
|
951
|
+
"line_truncated": line_truncated,
|
|
952
|
+
}
|
|
953
|
+
)
|
|
954
|
+
matches_for_file += len(spans)
|
|
955
|
+
|
|
956
|
+
if stop_search or matches_for_file >= max_per_file:
|
|
957
|
+
break
|
|
958
|
+
if stop_current_file:
|
|
959
|
+
break
|
|
960
|
+
except (OSError, UnicodeDecodeError):
|
|
961
|
+
errors.append(f"Failed to read file: {rel_path}")
|
|
962
|
+
continue
|
|
963
|
+
|
|
964
|
+
if stop_search:
|
|
965
|
+
break
|
|
966
|
+
if stop_search:
|
|
967
|
+
break
|
|
968
|
+
|
|
969
|
+
total_matches = len(matches) + truncated_count
|
|
970
|
+
|
|
971
|
+
return {
|
|
972
|
+
"event": "file_search_response",
|
|
973
|
+
"root_path": root_path,
|
|
974
|
+
"query": query,
|
|
975
|
+
"match_case": match_case,
|
|
976
|
+
"regex": use_regex,
|
|
977
|
+
"whole_word": whole_word,
|
|
978
|
+
"include_patterns": include_patterns,
|
|
979
|
+
"exclude_patterns": exclude_patterns,
|
|
980
|
+
"matches": matches,
|
|
981
|
+
"matches_returned": len(matches),
|
|
982
|
+
"total_matches": total_matches,
|
|
983
|
+
"files_scanned": files_scanned,
|
|
984
|
+
"truncated": truncated,
|
|
985
|
+
"truncated_count": truncated_count,
|
|
986
|
+
"max_results": max_results,
|
|
987
|
+
"max_matches_per_file": max_per_file,
|
|
988
|
+
"errors": errors,
|
|
989
|
+
}
|
|
990
|
+
|
|
991
|
+
@staticmethod
|
|
992
|
+
def _normalize_patterns(patterns: Optional[Any]) -> List[str]:
|
|
993
|
+
if not patterns:
|
|
994
|
+
return []
|
|
995
|
+
if isinstance(patterns, str):
|
|
996
|
+
patterns = [patterns]
|
|
997
|
+
normalized: List[str] = []
|
|
998
|
+
for pattern in patterns:
|
|
999
|
+
if isinstance(pattern, str) and pattern.strip():
|
|
1000
|
+
normalized.append(pattern.strip())
|
|
1001
|
+
return normalized
|
|
1002
|
+
|
|
1003
|
+
@staticmethod
|
|
1004
|
+
def _should_include(
|
|
1005
|
+
relative_path: str,
|
|
1006
|
+
include_patterns: List[str],
|
|
1007
|
+
exclude_patterns: List[str],
|
|
1008
|
+
) -> bool:
|
|
1009
|
+
if include_patterns:
|
|
1010
|
+
if not any(fnmatch.fnmatch(relative_path, pat) for pat in include_patterns):
|
|
1011
|
+
return False
|
|
1012
|
+
if exclude_patterns:
|
|
1013
|
+
if any(fnmatch.fnmatch(relative_path, pat) for pat in exclude_patterns):
|
|
1014
|
+
return False
|
|
1015
|
+
return True
|
|
1016
|
+
|
|
1017
|
+
@staticmethod
|
|
1018
|
+
def _clamp_int(
|
|
1019
|
+
value: Optional[Any],
|
|
1020
|
+
*,
|
|
1021
|
+
default: int,
|
|
1022
|
+
min_value: int,
|
|
1023
|
+
max_value: int,
|
|
1024
|
+
) -> int:
|
|
1025
|
+
try:
|
|
1026
|
+
coerced = int(value)
|
|
1027
|
+
except (TypeError, ValueError):
|
|
1028
|
+
coerced = default
|
|
1029
|
+
return max(min_value, min(max_value, coerced))
|
|
1030
|
+
|
|
1031
|
+
|
|
1032
|
+
class ContentRequestHandler(AsyncHandler):
|
|
1033
|
+
"""Handler for requesting content by hash for caching optimization."""
|
|
1034
|
+
|
|
1035
|
+
@property
|
|
1036
|
+
def command_name(self) -> str:
|
|
1037
|
+
return "content_request"
|
|
1038
|
+
|
|
1039
|
+
async def execute(self, message: Dict[str, Any]) -> None:
|
|
1040
|
+
"""Return content by hash if available, chunked for large content."""
|
|
1041
|
+
content_hash = message.get("content_hash")
|
|
1042
|
+
source_client_session = message.get("source_client_session")
|
|
1043
|
+
server_project_id = message.get("project_id")
|
|
1044
|
+
|
|
1045
|
+
if not content_hash:
|
|
1046
|
+
raise ValueError("content_hash parameter is required")
|
|
1047
|
+
|
|
1048
|
+
# Check if content is in cache
|
|
1049
|
+
content = _content_cache.get(content_hash)
|
|
1050
|
+
|
|
1051
|
+
if content is not None:
|
|
1052
|
+
|
|
1053
|
+
base_response = {
|
|
1054
|
+
"event": "content_response",
|
|
1055
|
+
"content_hash": content_hash,
|
|
1056
|
+
"success": True,
|
|
1057
|
+
}
|
|
1058
|
+
|
|
1059
|
+
# Add request_id if present in original message
|
|
1060
|
+
if "request_id" in message:
|
|
1061
|
+
base_response["request_id"] = message["request_id"]
|
|
1062
|
+
|
|
1063
|
+
# Create chunked responses
|
|
1064
|
+
responses = create_chunked_response(base_response, "content", content)
|
|
1065
|
+
|
|
1066
|
+
# Send all responses
|
|
1067
|
+
for response in responses:
|
|
1068
|
+
await self.send_response(response, project_id=server_project_id)
|
|
1069
|
+
|
|
1070
|
+
logger.info(f"Sent content response in {len(responses)} chunk(s) for hash: {content_hash[:16]}...")
|
|
1071
|
+
else:
|
|
1072
|
+
|
|
1073
|
+
response = {
|
|
1074
|
+
"event": "content_response",
|
|
1075
|
+
"content_hash": content_hash,
|
|
1076
|
+
"content": None,
|
|
1077
|
+
"success": False,
|
|
1078
|
+
"error": "Content not found in cache",
|
|
1079
|
+
"chunked": False,
|
|
1080
|
+
}
|
|
1081
|
+
# Add request_id if present in original message
|
|
1082
|
+
if "request_id" in message:
|
|
1083
|
+
base_response["request_id"] = message["request_id"]
|
|
1084
|
+
await self.send_response(response, project_id=server_project_id)
|
|
1085
|
+
|
|
1086
|
+
|
|
1087
|
+
def cache_content(content_hash: str, content: str) -> None:
|
|
1088
|
+
"""Cache content by hash for future retrieval."""
|
|
1089
|
+
_content_cache[content_hash] = content
|
|
1090
|
+
|
|
1091
|
+
|
|
1092
|
+
def get_cached_content(content_hash: str) -> str:
|
|
1093
|
+
"""Get cached content by hash."""
|
|
1094
|
+
return _content_cache.get(content_hash)
|