ripperdoc 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ripperdoc/__init__.py +1 -1
- ripperdoc/cli/cli.py +9 -1
- ripperdoc/cli/commands/agents_cmd.py +93 -53
- ripperdoc/cli/commands/mcp_cmd.py +3 -0
- ripperdoc/cli/commands/models_cmd.py +768 -283
- ripperdoc/cli/commands/permissions_cmd.py +107 -52
- ripperdoc/cli/commands/resume_cmd.py +61 -51
- ripperdoc/cli/commands/themes_cmd.py +31 -1
- ripperdoc/cli/ui/agents_tui/__init__.py +3 -0
- ripperdoc/cli/ui/agents_tui/textual_app.py +1138 -0
- ripperdoc/cli/ui/choice.py +376 -0
- ripperdoc/cli/ui/interrupt_listener.py +233 -0
- ripperdoc/cli/ui/message_display.py +7 -0
- ripperdoc/cli/ui/models_tui/__init__.py +5 -0
- ripperdoc/cli/ui/models_tui/textual_app.py +698 -0
- ripperdoc/cli/ui/panels.py +19 -4
- ripperdoc/cli/ui/permissions_tui/__init__.py +3 -0
- ripperdoc/cli/ui/permissions_tui/textual_app.py +526 -0
- ripperdoc/cli/ui/provider_options.py +220 -80
- ripperdoc/cli/ui/rich_ui.py +91 -83
- ripperdoc/cli/ui/tips.py +89 -0
- ripperdoc/cli/ui/wizard.py +98 -45
- ripperdoc/core/config.py +3 -0
- ripperdoc/core/permissions.py +66 -104
- ripperdoc/core/providers/anthropic.py +11 -0
- ripperdoc/protocol/stdio.py +3 -1
- ripperdoc/tools/bash_tool.py +2 -0
- ripperdoc/tools/file_edit_tool.py +100 -181
- ripperdoc/tools/file_read_tool.py +101 -25
- ripperdoc/tools/multi_edit_tool.py +239 -91
- ripperdoc/tools/notebook_edit_tool.py +11 -29
- ripperdoc/utils/file_editing.py +164 -0
- ripperdoc/utils/permissions/tool_permission_utils.py +11 -0
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/METADATA +3 -2
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/RECORD +39 -30
- ripperdoc/cli/ui/interrupt_handler.py +0 -208
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/WHEEL +0 -0
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/entry_points.txt +0 -0
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/licenses/LICENSE +0 -0
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/top_level.txt +0 -0
|
@@ -5,7 +5,6 @@ Allows the AI to edit files by replacing text.
|
|
|
5
5
|
|
|
6
6
|
import contextlib
|
|
7
7
|
import os
|
|
8
|
-
import tempfile
|
|
9
8
|
from pathlib import Path
|
|
10
9
|
from typing import AsyncGenerator, Generator, List, Optional, TextIO
|
|
11
10
|
from pydantic import BaseModel, Field
|
|
@@ -20,8 +19,14 @@ from ripperdoc.core.tool import (
|
|
|
20
19
|
)
|
|
21
20
|
from ripperdoc.utils.log import get_logger
|
|
22
21
|
from ripperdoc.utils.platform import HAS_FCNTL
|
|
23
|
-
from ripperdoc.utils.file_watch import record_snapshot
|
|
24
22
|
from ripperdoc.utils.path_ignore import check_path_for_tool
|
|
23
|
+
from ripperdoc.utils.file_editing import (
|
|
24
|
+
atomic_write_with_fallback,
|
|
25
|
+
file_lock,
|
|
26
|
+
open_locked_file,
|
|
27
|
+
safe_record_snapshot,
|
|
28
|
+
select_write_encoding,
|
|
29
|
+
)
|
|
25
30
|
from ripperdoc.tools.file_read_tool import detect_file_encoding
|
|
26
31
|
|
|
27
32
|
logger = get_logger()
|
|
@@ -38,44 +43,22 @@ def determine_edit_encoding(file_path: str, new_content: str) -> str:
|
|
|
38
43
|
if not detected_encoding:
|
|
39
44
|
return "utf-8"
|
|
40
45
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
new_content
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
"New content cannot be encoded with %s, falling back to UTF-8 for %s",
|
|
48
|
-
detected_encoding,
|
|
49
|
-
file_path,
|
|
50
|
-
)
|
|
51
|
-
return "utf-8"
|
|
46
|
+
return select_write_encoding(
|
|
47
|
+
detected_encoding,
|
|
48
|
+
new_content,
|
|
49
|
+
file_path,
|
|
50
|
+
log_prefix="[file_edit_tool]",
|
|
51
|
+
)
|
|
52
52
|
|
|
53
53
|
|
|
54
54
|
@contextlib.contextmanager
|
|
55
55
|
def _file_lock(file_handle: TextIO, exclusive: bool = True) -> Generator[None, None, None]:
|
|
56
|
-
"""
|
|
57
|
-
|
|
58
|
-
Args:
|
|
59
|
-
file_handle: An open file handle to lock
|
|
60
|
-
exclusive: If True, acquire exclusive lock; otherwise shared lock
|
|
61
|
-
|
|
62
|
-
Yields:
|
|
63
|
-
None
|
|
64
|
-
"""
|
|
56
|
+
"""Compatibility wrapper for tests that expect _file_lock in this module."""
|
|
65
57
|
if not HAS_FCNTL:
|
|
66
|
-
# On Windows or systems without fcntl, skip locking
|
|
67
58
|
yield
|
|
68
59
|
return
|
|
69
|
-
|
|
70
|
-
import fcntl
|
|
71
|
-
|
|
72
|
-
lock_type = fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH
|
|
73
|
-
try:
|
|
74
|
-
fcntl.flock(file_handle.fileno(), lock_type)
|
|
60
|
+
with file_lock(file_handle, exclusive=exclusive):
|
|
75
61
|
yield
|
|
76
|
-
finally:
|
|
77
|
-
with contextlib.suppress(OSError):
|
|
78
|
-
fcntl.flock(file_handle.fileno(), fcntl.LOCK_UN)
|
|
79
62
|
|
|
80
63
|
|
|
81
64
|
class FileEditToolInput(BaseModel):
|
|
@@ -247,71 +230,20 @@ match exactly (including whitespace and indentation)."""
|
|
|
247
230
|
file_encoding = "utf-8"
|
|
248
231
|
|
|
249
232
|
try:
|
|
250
|
-
# Open file with exclusive lock to prevent concurrent modifications
|
|
251
|
-
#
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
with open(abs_file_path, "r+", encoding=file_encoding) as f:
|
|
260
|
-
# Record mtime immediately after open, before acquiring lock
|
|
261
|
-
try:
|
|
262
|
-
pre_lock_mtime = os.fstat(f.fileno()).st_mtime
|
|
263
|
-
except OSError:
|
|
264
|
-
pre_lock_mtime = None
|
|
265
|
-
|
|
266
|
-
with _file_lock(f, exclusive=True):
|
|
267
|
-
# Check mtime after acquiring lock to detect modifications
|
|
268
|
-
# during the window between open() and lock acquisition
|
|
269
|
-
try:
|
|
270
|
-
post_lock_mtime = os.fstat(f.fileno()).st_mtime
|
|
271
|
-
except OSError:
|
|
272
|
-
post_lock_mtime = None
|
|
273
|
-
|
|
274
|
-
# Detect modification during open->lock window
|
|
275
|
-
if pre_lock_mtime is not None and post_lock_mtime is not None:
|
|
276
|
-
if post_lock_mtime > pre_lock_mtime:
|
|
277
|
-
output = FileEditToolOutput(
|
|
278
|
-
file_path=input_data.file_path,
|
|
279
|
-
replacements_made=0,
|
|
280
|
-
success=False,
|
|
281
|
-
message="File was modified while acquiring lock. Please retry.",
|
|
282
|
-
)
|
|
283
|
-
yield ToolResult(
|
|
284
|
-
data=output,
|
|
285
|
-
result_for_assistant=self.render_result_for_assistant(output),
|
|
286
|
-
)
|
|
287
|
-
return
|
|
288
|
-
|
|
289
|
-
# Validate against cached snapshot timestamp
|
|
290
|
-
if file_snapshot and post_lock_mtime is not None:
|
|
291
|
-
if post_lock_mtime > file_snapshot.timestamp:
|
|
292
|
-
output = FileEditToolOutput(
|
|
293
|
-
file_path=input_data.file_path,
|
|
294
|
-
replacements_made=0,
|
|
295
|
-
success=False,
|
|
296
|
-
message="File has been modified since read, either by the user "
|
|
297
|
-
"or by a linter. Read it again before attempting to edit it.",
|
|
298
|
-
)
|
|
299
|
-
yield ToolResult(
|
|
300
|
-
data=output,
|
|
301
|
-
result_for_assistant=self.render_result_for_assistant(output),
|
|
302
|
-
)
|
|
303
|
-
return
|
|
304
|
-
|
|
305
|
-
# Read content while holding the lock
|
|
306
|
-
content = f.read()
|
|
307
|
-
|
|
308
|
-
# Check if old_string exists
|
|
309
|
-
if input_data.old_string not in content:
|
|
233
|
+
# Open file with exclusive lock to prevent concurrent modifications.
|
|
234
|
+
# Uses shared helper for consistent TOCTOU protection.
|
|
235
|
+
with open_locked_file(abs_file_path, file_encoding) as (
|
|
236
|
+
f,
|
|
237
|
+
pre_lock_mtime,
|
|
238
|
+
post_lock_mtime,
|
|
239
|
+
):
|
|
240
|
+
if pre_lock_mtime is not None and post_lock_mtime is not None:
|
|
241
|
+
if post_lock_mtime > pre_lock_mtime:
|
|
310
242
|
output = FileEditToolOutput(
|
|
311
243
|
file_path=input_data.file_path,
|
|
312
244
|
replacements_made=0,
|
|
313
245
|
success=False,
|
|
314
|
-
message=
|
|
246
|
+
message="File was modified while acquiring lock. Please retry.",
|
|
315
247
|
)
|
|
316
248
|
yield ToolResult(
|
|
317
249
|
data=output,
|
|
@@ -319,17 +251,14 @@ match exactly (including whitespace and indentation)."""
|
|
|
319
251
|
)
|
|
320
252
|
return
|
|
321
253
|
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
# Check for ambiguity if not replace_all
|
|
326
|
-
if not input_data.replace_all and occurrence_count > 1:
|
|
254
|
+
if file_snapshot and post_lock_mtime is not None:
|
|
255
|
+
if post_lock_mtime > file_snapshot.timestamp:
|
|
327
256
|
output = FileEditToolOutput(
|
|
328
257
|
file_path=input_data.file_path,
|
|
329
258
|
replacements_made=0,
|
|
330
259
|
success=False,
|
|
331
|
-
message=
|
|
332
|
-
|
|
260
|
+
message="File has been modified since read, either by the user "
|
|
261
|
+
"or by a linter. Read it again before attempting to edit it.",
|
|
333
262
|
)
|
|
334
263
|
yield ToolResult(
|
|
335
264
|
data=output,
|
|
@@ -337,91 +266,81 @@ match exactly (including whitespace and indentation)."""
|
|
|
337
266
|
)
|
|
338
267
|
return
|
|
339
268
|
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
except OSError as atomic_error:
|
|
385
|
-
# Fallback to in-place write if atomic write fails
|
|
386
|
-
# (e.g., cross-filesystem issues)
|
|
387
|
-
# Re-verify file hasn't changed before fallback write (TOCTOU protection)
|
|
388
|
-
f.seek(0)
|
|
389
|
-
current_content = f.read()
|
|
390
|
-
if current_content != content:
|
|
391
|
-
output = FileEditToolOutput(
|
|
392
|
-
file_path=input_data.file_path,
|
|
393
|
-
replacements_made=0,
|
|
394
|
-
success=False,
|
|
395
|
-
message="File was modified during atomic write fallback. Please retry.",
|
|
396
|
-
)
|
|
397
|
-
yield ToolResult(
|
|
398
|
-
data=output,
|
|
399
|
-
result_for_assistant=self.render_result_for_assistant(output),
|
|
400
|
-
)
|
|
401
|
-
return
|
|
402
|
-
f.seek(0)
|
|
403
|
-
f.truncate()
|
|
404
|
-
f.write(new_content)
|
|
405
|
-
logger.debug(
|
|
406
|
-
"[file_edit_tool] Atomic write failed, used fallback: %s",
|
|
407
|
-
atomic_error,
|
|
408
|
-
)
|
|
269
|
+
content = f.read()
|
|
270
|
+
|
|
271
|
+
if input_data.old_string not in content:
|
|
272
|
+
output = FileEditToolOutput(
|
|
273
|
+
file_path=input_data.file_path,
|
|
274
|
+
replacements_made=0,
|
|
275
|
+
success=False,
|
|
276
|
+
message=f"String not found in file: {input_data.file_path}",
|
|
277
|
+
)
|
|
278
|
+
yield ToolResult(
|
|
279
|
+
data=output,
|
|
280
|
+
result_for_assistant=self.render_result_for_assistant(output),
|
|
281
|
+
)
|
|
282
|
+
return
|
|
283
|
+
|
|
284
|
+
occurrence_count = content.count(input_data.old_string)
|
|
285
|
+
|
|
286
|
+
if not input_data.replace_all and occurrence_count > 1:
|
|
287
|
+
output = FileEditToolOutput(
|
|
288
|
+
file_path=input_data.file_path,
|
|
289
|
+
replacements_made=0,
|
|
290
|
+
success=False,
|
|
291
|
+
message=f"String appears {occurrence_count} times in file. "
|
|
292
|
+
f"Either provide a unique string or use replace_all=true",
|
|
293
|
+
)
|
|
294
|
+
yield ToolResult(
|
|
295
|
+
data=output,
|
|
296
|
+
result_for_assistant=self.render_result_for_assistant(output),
|
|
297
|
+
)
|
|
298
|
+
return
|
|
299
|
+
|
|
300
|
+
if input_data.replace_all:
|
|
301
|
+
new_content = content.replace(input_data.old_string, input_data.new_string)
|
|
302
|
+
replacements = occurrence_count
|
|
303
|
+
else:
|
|
304
|
+
new_content = content.replace(input_data.old_string, input_data.new_string, 1)
|
|
305
|
+
replacements = 1
|
|
306
|
+
|
|
307
|
+
write_encoding = select_write_encoding(
|
|
308
|
+
file_encoding,
|
|
309
|
+
new_content,
|
|
310
|
+
abs_file_path,
|
|
311
|
+
log_prefix="[file_edit_tool]",
|
|
312
|
+
)
|
|
409
313
|
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
record_snapshot(
|
|
314
|
+
write_error = atomic_write_with_fallback(
|
|
315
|
+
f,
|
|
413
316
|
abs_file_path,
|
|
414
317
|
new_content,
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
"[file_edit_tool] Failed to record file snapshot: %s: %s",
|
|
421
|
-
type(exc).__name__,
|
|
422
|
-
exc,
|
|
423
|
-
extra={"file_path": abs_file_path},
|
|
318
|
+
write_encoding,
|
|
319
|
+
content,
|
|
320
|
+
temp_prefix=".ripperdoc_edit_",
|
|
321
|
+
log_prefix="[file_edit_tool]",
|
|
322
|
+
conflict_message="File was modified during atomic write fallback. Please retry.",
|
|
424
323
|
)
|
|
324
|
+
if write_error:
|
|
325
|
+
output = FileEditToolOutput(
|
|
326
|
+
file_path=input_data.file_path,
|
|
327
|
+
replacements_made=0,
|
|
328
|
+
success=False,
|
|
329
|
+
message=write_error,
|
|
330
|
+
)
|
|
331
|
+
yield ToolResult(
|
|
332
|
+
data=output,
|
|
333
|
+
result_for_assistant=self.render_result_for_assistant(output),
|
|
334
|
+
)
|
|
335
|
+
return
|
|
336
|
+
|
|
337
|
+
safe_record_snapshot(
|
|
338
|
+
abs_file_path,
|
|
339
|
+
new_content,
|
|
340
|
+
getattr(context, "file_state_cache", {}),
|
|
341
|
+
encoding=write_encoding,
|
|
342
|
+
log_prefix="[file_edit_tool]",
|
|
343
|
+
)
|
|
425
344
|
|
|
426
345
|
# Generate diff for display
|
|
427
346
|
import difflib
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
Allows the AI to read file contents.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
+
import itertools
|
|
6
7
|
import os
|
|
7
8
|
from pathlib import Path
|
|
8
9
|
from typing import AsyncGenerator, List, Optional, Tuple
|
|
@@ -24,7 +25,7 @@ from ripperdoc.utils.path_ignore import check_path_for_tool
|
|
|
24
25
|
logger = get_logger()
|
|
25
26
|
|
|
26
27
|
|
|
27
|
-
def detect_file_encoding(file_path: str) -> Tuple[Optional[str], float]:
|
|
28
|
+
def detect_file_encoding(file_path: str, max_bytes: Optional[int] = None) -> Tuple[Optional[str], float]:
|
|
28
29
|
"""Detect file encoding using charset-normalizer.
|
|
29
30
|
|
|
30
31
|
Returns:
|
|
@@ -32,7 +33,7 @@ def detect_file_encoding(file_path: str) -> Tuple[Optional[str], float]:
|
|
|
32
33
|
"""
|
|
33
34
|
try:
|
|
34
35
|
with open(file_path, "rb") as f:
|
|
35
|
-
raw_data = f.read()
|
|
36
|
+
raw_data = f.read() if max_bytes is None else f.read(max_bytes)
|
|
36
37
|
results = from_bytes(raw_data)
|
|
37
38
|
|
|
38
39
|
if not results:
|
|
@@ -115,6 +116,76 @@ def read_file_with_encoding(file_path: str) -> Tuple[Optional[List[str]], str, O
|
|
|
115
116
|
return None, "", error_msg
|
|
116
117
|
|
|
117
118
|
|
|
119
|
+
def read_file_slice_with_encoding(
|
|
120
|
+
file_path: str, offset: int, limit: Optional[int], sample_bytes: int = 65536
|
|
121
|
+
) -> Tuple[Optional[List[str]], str, Optional[str]]:
|
|
122
|
+
"""Read a slice of a file with encoding detection.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
Tuple of (lines, encoding_used, error_message).
|
|
126
|
+
If successful: (lines, encoding, None)
|
|
127
|
+
If failed: (None, "", error_message)
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
def _read_slice(encoding: str) -> List[str]:
|
|
131
|
+
start = max(offset, 0)
|
|
132
|
+
if limit is None:
|
|
133
|
+
end = None
|
|
134
|
+
elif limit <= 0:
|
|
135
|
+
return []
|
|
136
|
+
else:
|
|
137
|
+
end = start + limit
|
|
138
|
+
with open(file_path, "r", encoding=encoding, errors="strict") as f:
|
|
139
|
+
return list(itertools.islice(f, start, end))
|
|
140
|
+
|
|
141
|
+
# First, try UTF-8 (most common)
|
|
142
|
+
try:
|
|
143
|
+
lines = _read_slice("utf-8")
|
|
144
|
+
return lines, "utf-8", None
|
|
145
|
+
except UnicodeDecodeError:
|
|
146
|
+
pass
|
|
147
|
+
|
|
148
|
+
# UTF-8 failed, use charset-normalizer to detect encoding (sampled)
|
|
149
|
+
detected_encoding, confidence = detect_file_encoding(file_path, max_bytes=sample_bytes)
|
|
150
|
+
|
|
151
|
+
if detected_encoding:
|
|
152
|
+
try:
|
|
153
|
+
lines = _read_slice(detected_encoding)
|
|
154
|
+
logger.info(
|
|
155
|
+
"File %s decoded using detected encoding %s",
|
|
156
|
+
file_path,
|
|
157
|
+
detected_encoding,
|
|
158
|
+
)
|
|
159
|
+
return lines, detected_encoding, None
|
|
160
|
+
except (UnicodeDecodeError, LookupError) as e:
|
|
161
|
+
logger.warning(
|
|
162
|
+
"Failed to read %s with detected encoding %s: %s",
|
|
163
|
+
file_path,
|
|
164
|
+
detected_encoding,
|
|
165
|
+
e,
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
# Detection failed - try latin-1 as last resort (can decode any byte sequence)
|
|
169
|
+
try:
|
|
170
|
+
lines = _read_slice("latin-1")
|
|
171
|
+
logger.warning(
|
|
172
|
+
"File %s: encoding detection failed, using latin-1 fallback",
|
|
173
|
+
file_path,
|
|
174
|
+
)
|
|
175
|
+
return lines, "latin-1", None
|
|
176
|
+
except (UnicodeDecodeError, LookupError):
|
|
177
|
+
pass
|
|
178
|
+
|
|
179
|
+
# All attempts failed - return error
|
|
180
|
+
error_msg = (
|
|
181
|
+
f"Unable to determine file encoding. "
|
|
182
|
+
f"Detected: {detected_encoding or 'unknown'} (confidence: {confidence * 100:.0f}%). "
|
|
183
|
+
f"Tried fallback encodings: utf-8, latin-1. "
|
|
184
|
+
f"Please convert the file to UTF-8."
|
|
185
|
+
)
|
|
186
|
+
return None, "", error_msg
|
|
187
|
+
|
|
188
|
+
|
|
118
189
|
# Maximum file size to read (default 256KB)
|
|
119
190
|
# Can be overridden via env var in bytes
|
|
120
191
|
MAX_FILE_SIZE_BYTES = int(os.getenv("RIPPERDOC_MAX_READ_FILE_SIZE_BYTES", "262144")) # 256KB
|
|
@@ -243,7 +314,9 @@ and limit to read only a portion of the file."""
|
|
|
243
314
|
try:
|
|
244
315
|
# Check file size before reading to prevent memory exhaustion
|
|
245
316
|
file_size = os.path.getsize(input_data.file_path)
|
|
246
|
-
|
|
317
|
+
offset = max(input_data.offset or 0, 0)
|
|
318
|
+
limit = input_data.limit
|
|
319
|
+
if file_size > MAX_FILE_SIZE_BYTES and limit is None:
|
|
247
320
|
size_kb = file_size / 1024
|
|
248
321
|
limit_kb = MAX_FILE_SIZE_BYTES / 1024
|
|
249
322
|
error_output = FileReadToolOutput(
|
|
@@ -259,8 +332,16 @@ and limit to read only a portion of the file."""
|
|
|
259
332
|
)
|
|
260
333
|
return
|
|
261
334
|
|
|
262
|
-
|
|
263
|
-
|
|
335
|
+
if limit is None:
|
|
336
|
+
# Detect and read full file with proper encoding
|
|
337
|
+
lines, used_encoding, encoding_error = read_file_with_encoding(
|
|
338
|
+
input_data.file_path
|
|
339
|
+
)
|
|
340
|
+
else:
|
|
341
|
+
# Read only the requested slice (avoids loading huge files)
|
|
342
|
+
lines, used_encoding, encoding_error = read_file_slice_with_encoding(
|
|
343
|
+
input_data.file_path, offset=offset, limit=limit
|
|
344
|
+
)
|
|
264
345
|
|
|
265
346
|
if lines is None:
|
|
266
347
|
# Encoding detection failed - return warning to LLM
|
|
@@ -277,29 +358,24 @@ and limit to read only a portion of the file."""
|
|
|
277
358
|
)
|
|
278
359
|
return
|
|
279
360
|
|
|
280
|
-
offset = input_data.offset or 0
|
|
281
|
-
limit = input_data.limit
|
|
282
|
-
total_lines = len(lines)
|
|
283
|
-
|
|
284
361
|
# Check line count if no limit is specified (to prevent context overflow)
|
|
285
|
-
if limit is None and total_lines > MAX_READ_LINES:
|
|
286
|
-
error_output = FileReadToolOutput(
|
|
287
|
-
content=f"File too large: {total_lines} lines exceeds limit of {MAX_READ_LINES} lines. Use offset and limit parameters to read portions.",
|
|
288
|
-
file_path=input_data.file_path,
|
|
289
|
-
line_count=total_lines,
|
|
290
|
-
offset=0,
|
|
291
|
-
limit=None,
|
|
292
|
-
)
|
|
293
|
-
yield ToolResult(
|
|
294
|
-
data=error_output,
|
|
295
|
-
result_for_assistant=f"Error: File {input_data.file_path} has {total_lines} lines, exceeding the limit of {MAX_READ_LINES} lines when reading without limit parameter. Use offset and limit to read portions, e.g., Read(file_path='{input_data.file_path}', offset=0, limit=500).",
|
|
296
|
-
)
|
|
297
|
-
return
|
|
298
|
-
|
|
299
|
-
# Apply offset and limit
|
|
300
362
|
if limit is not None:
|
|
301
|
-
selected_lines = lines
|
|
363
|
+
selected_lines = lines
|
|
302
364
|
else:
|
|
365
|
+
total_lines = len(lines)
|
|
366
|
+
if total_lines > MAX_READ_LINES:
|
|
367
|
+
error_output = FileReadToolOutput(
|
|
368
|
+
content=f"File too large: {total_lines} lines exceeds limit of {MAX_READ_LINES} lines. Use offset and limit parameters to read portions.",
|
|
369
|
+
file_path=input_data.file_path,
|
|
370
|
+
line_count=total_lines,
|
|
371
|
+
offset=0,
|
|
372
|
+
limit=None,
|
|
373
|
+
)
|
|
374
|
+
yield ToolResult(
|
|
375
|
+
data=error_output,
|
|
376
|
+
result_for_assistant=f"Error: File {input_data.file_path} has {total_lines} lines, exceeding the limit of {MAX_READ_LINES} lines when reading without limit parameter. Use offset and limit to read portions, e.g., Read(file_path='{input_data.file_path}', offset=0, limit=500).",
|
|
377
|
+
)
|
|
378
|
+
return
|
|
303
379
|
selected_lines = lines[offset:]
|
|
304
380
|
|
|
305
381
|
content = "".join(selected_lines)
|