indent 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of indent might be problematic. Click here for more details.
- exponent/__init__.py +16 -3
- exponent/commands/cloud_commands.py +582 -0
- exponent/commands/common.py +4 -9
- exponent/commands/config_commands.py +1 -161
- exponent/commands/run_commands.py +20 -9
- exponent/commands/utils.py +3 -3
- exponent/commands/workflow_commands.py +2 -2
- exponent/core/config.py +0 -1
- exponent/core/graphql/mutations.py +114 -0
- exponent/core/graphql/queries.py +23 -0
- exponent/core/graphql/subscriptions.py +0 -449
- exponent/core/remote_execution/cli_rpc_types.py +48 -1
- exponent/core/remote_execution/client.py +114 -26
- exponent/core/remote_execution/file_write.py +1 -376
- exponent/core/remote_execution/files.py +1 -102
- exponent/core/remote_execution/git.py +1 -1
- exponent/core/remote_execution/http_fetch.py +3 -4
- exponent/core/remote_execution/languages/python_execution.py +1 -1
- exponent/core/remote_execution/languages/shell_streaming.py +1 -1
- exponent/core/remote_execution/session.py +1 -1
- exponent/core/remote_execution/system_context.py +0 -3
- exponent/core/remote_execution/tool_execution.py +24 -4
- exponent/core/remote_execution/truncation.py +51 -47
- exponent/core/remote_execution/types.py +25 -79
- exponent/core/remote_execution/utils.py +23 -51
- exponent/core/types/event_types.py +2 -2
- exponent/core/types/generated/strategy_info.py +0 -12
- exponent/utils/version.py +1 -1
- {indent-0.1.10.dist-info → indent-0.1.12.dist-info}/METADATA +3 -3
- indent-0.1.12.dist-info/RECORD +52 -0
- exponent/core/graphql/cloud_config_queries.py +0 -77
- indent-0.1.10.dist-info/RECORD +0 -53
- {indent-0.1.10.dist-info → indent-0.1.12.dist-info}/WHEEL +0 -0
- {indent-0.1.10.dist-info → indent-0.1.12.dist-info}/entry_points.txt +0 -0
|
@@ -3,19 +3,22 @@ from __future__ import annotations
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import json
|
|
5
5
|
import logging
|
|
6
|
-
|
|
6
|
+
import time
|
|
7
|
+
import uuid
|
|
8
|
+
from collections.abc import AsyncGenerator, Callable, Generator
|
|
7
9
|
from contextlib import asynccontextmanager
|
|
8
10
|
from dataclasses import dataclass
|
|
9
|
-
from typing import Any, TypeVar,
|
|
11
|
+
from typing import Any, TypeVar, cast
|
|
10
12
|
|
|
11
13
|
import msgspec
|
|
12
|
-
import websockets.client
|
|
13
14
|
import websockets.exceptions
|
|
14
15
|
from httpx import (
|
|
15
16
|
AsyncClient,
|
|
16
17
|
codes as http_status,
|
|
17
18
|
)
|
|
18
19
|
from pydantic import BaseModel
|
|
20
|
+
from websockets.asyncio import client as asyncio_websockets_client
|
|
21
|
+
from websockets.asyncio.client import ClientConnection, connect
|
|
19
22
|
|
|
20
23
|
from exponent.commands.utils import ConnectionTracker
|
|
21
24
|
from exponent.core.config import is_editable_install
|
|
@@ -29,15 +32,16 @@ from exponent.core.remote_execution.cli_rpc_types import (
|
|
|
29
32
|
ErrorResponse,
|
|
30
33
|
GetAllFilesRequest,
|
|
31
34
|
GetAllFilesResponse,
|
|
32
|
-
HttpResponse,
|
|
33
35
|
HttpRequest,
|
|
36
|
+
KeepAliveCliChatRequest,
|
|
37
|
+
KeepAliveCliChatResponse,
|
|
38
|
+
SwitchCLIChatRequest,
|
|
39
|
+
SwitchCLIChatResponse,
|
|
34
40
|
TerminateRequest,
|
|
35
41
|
TerminateResponse,
|
|
36
42
|
ToolExecutionRequest,
|
|
37
43
|
ToolExecutionResponse,
|
|
38
44
|
ToolResultType,
|
|
39
|
-
SwitchCLIChatRequest,
|
|
40
|
-
SwitchCLIChatResponse,
|
|
41
45
|
)
|
|
42
46
|
from exponent.core.remote_execution.code_execution import (
|
|
43
47
|
execute_code_streaming,
|
|
@@ -87,7 +91,10 @@ class SwitchCLIChat:
|
|
|
87
91
|
new_chat_uuid: str
|
|
88
92
|
|
|
89
93
|
|
|
90
|
-
REMOTE_EXECUTION_CLIENT_EXIT_INFO =
|
|
94
|
+
REMOTE_EXECUTION_CLIENT_EXIT_INFO = WSDisconnected | SwitchCLIChat
|
|
95
|
+
|
|
96
|
+
# UUID for a single run of the CLI
|
|
97
|
+
cli_uuid = uuid.uuid4()
|
|
91
98
|
|
|
92
99
|
|
|
93
100
|
class RemoteExecutionClient:
|
|
@@ -104,6 +111,9 @@ class RemoteExecutionClient:
|
|
|
104
111
|
self._halt_states: dict[str, bool] = {}
|
|
105
112
|
self._halt_lock = asyncio.Lock()
|
|
106
113
|
|
|
114
|
+
# Track last request time for timeout functionality
|
|
115
|
+
self._last_request_time: float | None = None
|
|
116
|
+
|
|
107
117
|
@property
|
|
108
118
|
def working_directory(self) -> str:
|
|
109
119
|
return self.current_session.working_directory
|
|
@@ -138,10 +148,35 @@ class RemoteExecutionClient:
|
|
|
138
148
|
|
|
139
149
|
return should_halt
|
|
140
150
|
|
|
151
|
+
async def _timeout_monitor(
|
|
152
|
+
self, timeout_seconds: int | None
|
|
153
|
+
) -> WSDisconnected | None:
|
|
154
|
+
"""Monitor for inactivity timeout and return WSDisconnected if timeout occurs.
|
|
155
|
+
|
|
156
|
+
If timeout_seconds is None, keeps looping indefinitely until cancelled.
|
|
157
|
+
"""
|
|
158
|
+
try:
|
|
159
|
+
while True:
|
|
160
|
+
await asyncio.sleep(1)
|
|
161
|
+
if (
|
|
162
|
+
timeout_seconds is not None
|
|
163
|
+
and self._last_request_time is not None
|
|
164
|
+
and time.time() - self._last_request_time > timeout_seconds
|
|
165
|
+
):
|
|
166
|
+
logger.info(
|
|
167
|
+
f"No requests received for {timeout_seconds} seconds. Shutting down..."
|
|
168
|
+
)
|
|
169
|
+
return WSDisconnected(
|
|
170
|
+
error_message=f"Timeout after {timeout_seconds} seconds of inactivity"
|
|
171
|
+
)
|
|
172
|
+
except asyncio.CancelledError:
|
|
173
|
+
# Handle cancellation gracefully
|
|
174
|
+
return None
|
|
175
|
+
|
|
141
176
|
async def _handle_websocket_message(
|
|
142
177
|
self,
|
|
143
178
|
msg: str,
|
|
144
|
-
websocket:
|
|
179
|
+
websocket: ClientConnection,
|
|
145
180
|
requests: asyncio.Queue[CliRpcRequest],
|
|
146
181
|
) -> REMOTE_EXECUTION_CLIENT_EXIT_INFO | None:
|
|
147
182
|
"""Handle an incoming websocket message.
|
|
@@ -241,6 +276,21 @@ class RemoteExecutionClient:
|
|
|
241
276
|
)
|
|
242
277
|
)
|
|
243
278
|
return SwitchCLIChat(new_chat_uuid=request.request.new_chat_uuid)
|
|
279
|
+
elif isinstance(request.request, KeepAliveCliChatRequest):
|
|
280
|
+
await websocket.send(
|
|
281
|
+
json.dumps(
|
|
282
|
+
{
|
|
283
|
+
"type": "result",
|
|
284
|
+
"data": msgspec.to_builtins(
|
|
285
|
+
CliRpcResponse(
|
|
286
|
+
request_id=request.request_id,
|
|
287
|
+
response=KeepAliveCliChatResponse(),
|
|
288
|
+
)
|
|
289
|
+
),
|
|
290
|
+
}
|
|
291
|
+
)
|
|
292
|
+
)
|
|
293
|
+
return None
|
|
244
294
|
else:
|
|
245
295
|
if isinstance(request.request, ToolExecutionRequest) and isinstance(
|
|
246
296
|
request.request.tool_input, BashToolInput
|
|
@@ -302,7 +352,7 @@ class RemoteExecutionClient:
|
|
|
302
352
|
async with results_lock:
|
|
303
353
|
logger.info(f"Putting response {response}")
|
|
304
354
|
await results.put(response)
|
|
305
|
-
except Exception as e:
|
|
355
|
+
except Exception as e:
|
|
306
356
|
logger.info(f"Error handling request {request}:\n\n{e}")
|
|
307
357
|
async with results_lock:
|
|
308
358
|
await results.put(
|
|
@@ -327,7 +377,7 @@ class RemoteExecutionClient:
|
|
|
327
377
|
|
|
328
378
|
async def _process_websocket_messages(
|
|
329
379
|
self,
|
|
330
|
-
websocket:
|
|
380
|
+
websocket: ClientConnection,
|
|
331
381
|
beats: asyncio.Queue[HeartbeatInfo],
|
|
332
382
|
requests: asyncio.Queue[CliRpcRequest],
|
|
333
383
|
results: asyncio.Queue[CliRpcResponse],
|
|
@@ -380,7 +430,7 @@ class RemoteExecutionClient:
|
|
|
380
430
|
|
|
381
431
|
async def _handle_websocket_connection(
|
|
382
432
|
self,
|
|
383
|
-
websocket:
|
|
433
|
+
websocket: ClientConnection,
|
|
384
434
|
connection_tracker: ConnectionTracker | None,
|
|
385
435
|
) -> REMOTE_EXECUTION_CLIENT_EXIT_INFO | None:
|
|
386
436
|
"""Handle a single websocket connection.
|
|
@@ -427,17 +477,37 @@ class RemoteExecutionClient:
|
|
|
427
477
|
self,
|
|
428
478
|
chat_uuid: str,
|
|
429
479
|
connection_tracker: ConnectionTracker | None = None,
|
|
480
|
+
timeout_seconds: int | None = None,
|
|
430
481
|
) -> REMOTE_EXECUTION_CLIENT_EXIT_INFO:
|
|
431
|
-
"""Run the websocket connection loop."""
|
|
482
|
+
"""Run the websocket connection loop with optional inactivity timeout."""
|
|
432
483
|
self.current_session.set_chat_uuid(chat_uuid)
|
|
433
484
|
|
|
485
|
+
# Initialize last request time for timeout monitoring
|
|
486
|
+
self._last_request_time = time.time()
|
|
487
|
+
|
|
434
488
|
async for websocket in self.ws_connect(f"/api/ws/chat/{chat_uuid}"):
|
|
435
|
-
|
|
436
|
-
|
|
489
|
+
# Always run connection and timeout monitor concurrently
|
|
490
|
+
# If timeout_seconds is None, timeout monitor will loop indefinitely
|
|
491
|
+
done, pending = await asyncio.wait(
|
|
492
|
+
[
|
|
493
|
+
asyncio.create_task(
|
|
494
|
+
self._handle_websocket_connection(websocket, connection_tracker)
|
|
495
|
+
),
|
|
496
|
+
asyncio.create_task(self._timeout_monitor(timeout_seconds)),
|
|
497
|
+
],
|
|
498
|
+
return_when=asyncio.FIRST_COMPLETED,
|
|
437
499
|
)
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
500
|
+
|
|
501
|
+
# Cancel pending tasks
|
|
502
|
+
for task in pending:
|
|
503
|
+
task.cancel()
|
|
504
|
+
|
|
505
|
+
# Return result from completed task
|
|
506
|
+
for task in done:
|
|
507
|
+
result = await task
|
|
508
|
+
# If we get None, we'll try to reconnect
|
|
509
|
+
if result is not None:
|
|
510
|
+
return result
|
|
441
511
|
|
|
442
512
|
# If we exit the websocket connection loop without returning,
|
|
443
513
|
# it means we couldn't establish a connection
|
|
@@ -490,6 +560,7 @@ class RemoteExecutionClient:
|
|
|
490
560
|
system_info=await system_context.get_system_info(self.working_directory),
|
|
491
561
|
exponent_version=get_installed_version(),
|
|
492
562
|
editable_installation=is_editable_install(),
|
|
563
|
+
cli_uuid=str(cli_uuid),
|
|
493
564
|
)
|
|
494
565
|
|
|
495
566
|
async def send_heartbeat(self, chat_uuid: str) -> CLIConnectedState:
|
|
@@ -509,6 +580,9 @@ class RemoteExecutionClient:
|
|
|
509
580
|
return connected_state
|
|
510
581
|
|
|
511
582
|
async def handle_request(self, request: CliRpcRequest) -> CliRpcResponse:
|
|
583
|
+
# Update last request time for timeout functionality
|
|
584
|
+
self._last_request_time = time.time()
|
|
585
|
+
|
|
512
586
|
try:
|
|
513
587
|
if isinstance(request.request, ToolExecutionRequest):
|
|
514
588
|
if isinstance(request.request.tool_input, BashToolInput):
|
|
@@ -550,7 +624,7 @@ class RemoteExecutionClient:
|
|
|
550
624
|
)
|
|
551
625
|
tool_result = truncate_result(raw_result)
|
|
552
626
|
results.append(tool_result)
|
|
553
|
-
except Exception as e:
|
|
627
|
+
except Exception as e:
|
|
554
628
|
logger.error(f"Error executing tool {tool_input}: {e}")
|
|
555
629
|
from exponent.core.remote_execution.cli_rpc_types import (
|
|
556
630
|
ErrorToolResult,
|
|
@@ -579,6 +653,10 @@ class RemoteExecutionClient:
|
|
|
579
653
|
raise ValueError(
|
|
580
654
|
"SwitchCLIChatRequest should not be handled by handle_request"
|
|
581
655
|
)
|
|
656
|
+
elif isinstance(request.request, KeepAliveCliChatRequest):
|
|
657
|
+
raise ValueError(
|
|
658
|
+
"KeepAliveCliChatRequest should not be handled by handle_request"
|
|
659
|
+
)
|
|
582
660
|
|
|
583
661
|
raise ValueError(f"Unhandled request type: {type(request)}")
|
|
584
662
|
|
|
@@ -612,7 +690,7 @@ class RemoteExecutionClient:
|
|
|
612
690
|
):
|
|
613
691
|
yield output
|
|
614
692
|
|
|
615
|
-
def ws_connect(self, path: str) ->
|
|
693
|
+
def ws_connect(self, path: str) -> connect:
|
|
616
694
|
base_url = (
|
|
617
695
|
str(self.ws_client.base_url)
|
|
618
696
|
.replace("http://", "ws://")
|
|
@@ -622,14 +700,24 @@ class RemoteExecutionClient:
|
|
|
622
700
|
url = f"{base_url}{path}"
|
|
623
701
|
headers = {"api-key": self.api_client.headers["api-key"]}
|
|
624
702
|
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
)
|
|
703
|
+
def custom_backoff() -> Generator[float, None, None]:
|
|
704
|
+
yield 0.1 # short initial delay
|
|
628
705
|
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
706
|
+
delay = 0.5
|
|
707
|
+
while True:
|
|
708
|
+
if delay < 2.0:
|
|
709
|
+
yield delay
|
|
710
|
+
delay *= 1.5
|
|
711
|
+
else:
|
|
712
|
+
yield 2.0
|
|
713
|
+
|
|
714
|
+
# Can remove if this is added to public API
|
|
715
|
+
# https://github.com/python-websockets/websockets/issues/1395#issuecomment-3225670409
|
|
716
|
+
asyncio_websockets_client.backoff = custom_backoff # type: ignore[attr-defined, assignment]
|
|
717
|
+
|
|
718
|
+
conn = connect(
|
|
719
|
+
url, additional_headers=headers, open_timeout=10, ping_timeout=10
|
|
720
|
+
)
|
|
633
721
|
|
|
634
722
|
return conn
|
|
635
723
|
|
|
@@ -1,85 +1,13 @@
|
|
|
1
|
-
import logging
|
|
2
1
|
import os
|
|
3
|
-
import re
|
|
4
|
-
import subprocess
|
|
5
|
-
from collections.abc import Callable
|
|
6
|
-
from textwrap import dedent, indent
|
|
7
2
|
|
|
8
3
|
from anyio import Path as AsyncPath
|
|
9
|
-
from diff_match_patch import diff_match_patch
|
|
10
|
-
from pydantic import BaseModel
|
|
11
4
|
|
|
12
5
|
from exponent.core.remote_execution.types import (
|
|
13
6
|
FilePath,
|
|
14
|
-
FileWriteRequest,
|
|
15
|
-
FileWriteResponse,
|
|
16
7
|
)
|
|
17
8
|
from exponent.core.remote_execution.utils import (
|
|
18
|
-
assert_unreachable,
|
|
19
|
-
safe_read_file,
|
|
20
9
|
safe_write_file,
|
|
21
10
|
)
|
|
22
|
-
from exponent.core.types.command_data import (
|
|
23
|
-
WRITE_STRATEGY_FULL_FILE_REWRITE,
|
|
24
|
-
WRITE_STRATEGY_NATURAL_EDIT,
|
|
25
|
-
WRITE_STRATEGY_SEARCH_REPLACE,
|
|
26
|
-
WRITE_STRATEGY_UDIFF,
|
|
27
|
-
)
|
|
28
|
-
|
|
29
|
-
logger = logging.getLogger(__name__)
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
class FileEditResult(BaseModel):
|
|
33
|
-
content: str | None
|
|
34
|
-
failed_edits: list[tuple[str, str]]
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
async def execute_file_write(
|
|
38
|
-
event: FileWriteRequest, working_directory: str
|
|
39
|
-
) -> FileWriteResponse:
|
|
40
|
-
write_strategy = event.write_strategy
|
|
41
|
-
content = event.content
|
|
42
|
-
|
|
43
|
-
if write_strategy == WRITE_STRATEGY_FULL_FILE_REWRITE:
|
|
44
|
-
result = await execute_full_file_rewrite(
|
|
45
|
-
event.file_path, content, working_directory
|
|
46
|
-
)
|
|
47
|
-
elif write_strategy == WRITE_STRATEGY_UDIFF:
|
|
48
|
-
result = await execute_udiff_edit(event.file_path, content, working_directory)
|
|
49
|
-
elif write_strategy == WRITE_STRATEGY_SEARCH_REPLACE:
|
|
50
|
-
result = await execute_search_replace_edit(
|
|
51
|
-
event.file_path, content, working_directory
|
|
52
|
-
)
|
|
53
|
-
elif write_strategy == WRITE_STRATEGY_NATURAL_EDIT:
|
|
54
|
-
result = await execute_full_file_rewrite(
|
|
55
|
-
event.file_path, content, working_directory
|
|
56
|
-
)
|
|
57
|
-
else:
|
|
58
|
-
assert_unreachable(write_strategy)
|
|
59
|
-
return FileWriteResponse(
|
|
60
|
-
content=result,
|
|
61
|
-
correlation_id=event.correlation_id,
|
|
62
|
-
)
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
def lint_file(file_path: str, working_directory: str) -> str:
|
|
66
|
-
try:
|
|
67
|
-
# Construct the absolute path
|
|
68
|
-
full_file_path = os.path.join(working_directory, file_path)
|
|
69
|
-
|
|
70
|
-
# Run ruff check --fix on the file
|
|
71
|
-
result = subprocess.run(
|
|
72
|
-
["ruff", "check", "--fix", full_file_path],
|
|
73
|
-
capture_output=True,
|
|
74
|
-
text=True,
|
|
75
|
-
check=True,
|
|
76
|
-
)
|
|
77
|
-
|
|
78
|
-
# If the subprocess ran successfully, return a success message
|
|
79
|
-
return f"Lint results:\n\n{result.stdout}\n\n{result.stderr}"
|
|
80
|
-
except Exception as e: # noqa: BLE001
|
|
81
|
-
# For any other errors, return a generic error message
|
|
82
|
-
return f"An error occurred while linting: {e!s}"
|
|
83
11
|
|
|
84
12
|
|
|
85
13
|
async def execute_full_file_rewrite(
|
|
@@ -103,308 +31,5 @@ async def execute_full_file_rewrite(
|
|
|
103
31
|
|
|
104
32
|
return result
|
|
105
33
|
|
|
106
|
-
except Exception as e: # noqa: BLE001
|
|
107
|
-
return f"An error occurred: {e!s}"
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
async def execute_udiff_edit(
|
|
111
|
-
file_path: str, content: str, working_directory: str
|
|
112
|
-
) -> str:
|
|
113
|
-
return await execute_partial_edit(
|
|
114
|
-
file_path, content, working_directory, apply_udiff
|
|
115
|
-
)
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
async def execute_search_replace_edit(
|
|
119
|
-
file_path: str, content: str, working_directory: str
|
|
120
|
-
) -> str:
|
|
121
|
-
return await execute_partial_edit(
|
|
122
|
-
file_path, content, working_directory, apply_all_search_replace
|
|
123
|
-
)
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
async def execute_partial_edit(
|
|
127
|
-
file_path: str,
|
|
128
|
-
edit_content: str,
|
|
129
|
-
working_directory: str,
|
|
130
|
-
edit_function: Callable[[str, str], FileEditResult],
|
|
131
|
-
) -> str:
|
|
132
|
-
try:
|
|
133
|
-
# Construct the absolute path
|
|
134
|
-
full_file_path = AsyncPath(os.path.join(working_directory, file_path))
|
|
135
|
-
|
|
136
|
-
# Check if the directory exists, if not, create it
|
|
137
|
-
await full_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
138
|
-
|
|
139
|
-
# Determine if the file exists and write the new content
|
|
140
|
-
file_content, created = await read_or_init_file(full_file_path)
|
|
141
|
-
|
|
142
|
-
success = await open_file_and_apply_edit(
|
|
143
|
-
file_path=full_file_path,
|
|
144
|
-
file_content=file_content,
|
|
145
|
-
edit_content=edit_content,
|
|
146
|
-
edit_function=edit_function,
|
|
147
|
-
)
|
|
148
|
-
|
|
149
|
-
if success:
|
|
150
|
-
verb = "Created" if created else "Modified"
|
|
151
|
-
return f"{verb} file {file_path}"
|
|
152
|
-
else:
|
|
153
|
-
verb = "create" if created else "modify"
|
|
154
|
-
return f"Failed to {verb} file {file_path}"
|
|
155
|
-
|
|
156
34
|
except Exception as e:
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
async def read_or_init_file(file_path: FilePath) -> tuple[str, bool]:
|
|
161
|
-
path = AsyncPath(file_path)
|
|
162
|
-
|
|
163
|
-
if not (await path.exists()):
|
|
164
|
-
await path.touch()
|
|
165
|
-
return "", True
|
|
166
|
-
|
|
167
|
-
content = await safe_read_file(path)
|
|
168
|
-
return content, False
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
async def open_file_and_apply_edit(
|
|
172
|
-
file_path: FilePath,
|
|
173
|
-
file_content: str,
|
|
174
|
-
edit_content: str,
|
|
175
|
-
edit_function: Callable[[str, str], FileEditResult],
|
|
176
|
-
) -> bool:
|
|
177
|
-
result = edit_function(file_content, edit_content)
|
|
178
|
-
|
|
179
|
-
if not result.content:
|
|
180
|
-
return False
|
|
181
|
-
|
|
182
|
-
await safe_write_file(file_path, result.content)
|
|
183
|
-
|
|
184
|
-
return True
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
def find_leading_whitespace(existing_content: str, search: str) -> str | None:
|
|
188
|
-
existing_lines = existing_content.splitlines()
|
|
189
|
-
|
|
190
|
-
search_line_count = len(search.splitlines())
|
|
191
|
-
dedented_search = dedent(search)
|
|
192
|
-
|
|
193
|
-
for i in range(len(existing_lines)):
|
|
194
|
-
existing_window_content = "\n".join(existing_lines[i : i + search_line_count])
|
|
195
|
-
dedented_existing_window = dedent(existing_window_content)
|
|
196
|
-
|
|
197
|
-
leading_ws_len = len(existing_window_content) - len(
|
|
198
|
-
existing_window_content.lstrip()
|
|
199
|
-
)
|
|
200
|
-
leading_ws = existing_window_content[:leading_ws_len]
|
|
201
|
-
|
|
202
|
-
if dedented_existing_window == dedented_search:
|
|
203
|
-
return leading_ws
|
|
204
|
-
|
|
205
|
-
return None
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
def try_fix_whitespace(
|
|
209
|
-
existing_content: str, search: str, replace: str
|
|
210
|
-
) -> tuple[str, str] | None:
|
|
211
|
-
# Try to fix the whitespace of the search and replace
|
|
212
|
-
# to make the edit more likely to apply
|
|
213
|
-
leading_ws = find_leading_whitespace(existing_content, search)
|
|
214
|
-
if leading_ws is None:
|
|
215
|
-
return None
|
|
216
|
-
|
|
217
|
-
dedented_search = dedent(search)
|
|
218
|
-
dedented_replace = dedent(replace)
|
|
219
|
-
|
|
220
|
-
return indent(dedented_search, leading_ws), indent(dedented_replace, leading_ws)
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
def try_search_replace(existing_content: str, search: str, replace: str) -> str | None:
|
|
224
|
-
# Try simple search and replace first
|
|
225
|
-
new_content = simple_search_and_replace(existing_content, search, replace)
|
|
226
|
-
if new_content:
|
|
227
|
-
return new_content
|
|
228
|
-
|
|
229
|
-
fixed_ws = try_fix_whitespace(existing_content, search, replace)
|
|
230
|
-
if not fixed_ws:
|
|
231
|
-
return None
|
|
232
|
-
|
|
233
|
-
search, replace = fixed_ws
|
|
234
|
-
|
|
235
|
-
new_content = simple_search_and_replace(existing_content, search, replace)
|
|
236
|
-
if new_content:
|
|
237
|
-
return new_content
|
|
238
|
-
|
|
239
|
-
return None
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
def try_diff_patch(existing_content: str, search: str, replace: str) -> str | None:
|
|
243
|
-
new_content = diff_patch_search_and_replace(existing_content, search, replace)
|
|
244
|
-
if new_content:
|
|
245
|
-
print("Applied diff patch search and replace")
|
|
246
|
-
return new_content
|
|
247
|
-
|
|
248
|
-
return None
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
def apply_udiff(existing_content: str, diff_content: str) -> FileEditResult:
|
|
252
|
-
hunks = get_raw_udiff_hunks(diff_content)
|
|
253
|
-
|
|
254
|
-
for hunk in hunks:
|
|
255
|
-
if not hunk:
|
|
256
|
-
continue
|
|
257
|
-
|
|
258
|
-
search, replace = split_hunk_for_search_and_replace(hunk)
|
|
259
|
-
|
|
260
|
-
# Exact match
|
|
261
|
-
new_content = try_search_replace(existing_content, search, replace)
|
|
262
|
-
if new_content is not None:
|
|
263
|
-
print("Applied successfully!")
|
|
264
|
-
return FileEditResult(content=new_content, failed_edits=[])
|
|
265
|
-
|
|
266
|
-
# Fuzzy match
|
|
267
|
-
new_content = try_diff_patch(existing_content, search, replace)
|
|
268
|
-
if new_content is not None:
|
|
269
|
-
print("Applied successfully!")
|
|
270
|
-
return FileEditResult(content=new_content, failed_edits=[])
|
|
271
|
-
|
|
272
|
-
print("Failed to apply hunk, exiting!")
|
|
273
|
-
return FileEditResult(content=None, failed_edits=[(search, replace)])
|
|
274
|
-
|
|
275
|
-
return FileEditResult(content=existing_content, failed_edits=[])
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
def get_raw_udiff_hunks(content: str) -> list[list[str]]:
|
|
279
|
-
lines = content.splitlines(keepends=True)
|
|
280
|
-
hunks: list[list[str]] = []
|
|
281
|
-
current_hunk: list[str] = []
|
|
282
|
-
for line in lines:
|
|
283
|
-
if line.startswith("@@"):
|
|
284
|
-
if current_hunk:
|
|
285
|
-
hunks.append(current_hunk)
|
|
286
|
-
current_hunk = []
|
|
287
|
-
else:
|
|
288
|
-
current_hunk.append(line)
|
|
289
|
-
if current_hunk:
|
|
290
|
-
hunks.append(current_hunk)
|
|
291
|
-
return hunks
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
def split_hunk_for_search_and_replace(hunk: list[str]) -> tuple[str, str]:
|
|
295
|
-
search_lines = []
|
|
296
|
-
replace_lines = []
|
|
297
|
-
|
|
298
|
-
search_prefixes = ["-", " "]
|
|
299
|
-
replace_prefixes = ["+", " "]
|
|
300
|
-
for line in hunk:
|
|
301
|
-
if not line:
|
|
302
|
-
continue
|
|
303
|
-
prefix, content = line[0], line[1:]
|
|
304
|
-
if not content:
|
|
305
|
-
continue
|
|
306
|
-
if prefix in search_prefixes:
|
|
307
|
-
search_lines.append(content)
|
|
308
|
-
if prefix in replace_prefixes:
|
|
309
|
-
replace_lines.append(content)
|
|
310
|
-
return "".join(search_lines), "".join(replace_lines)
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
def simple_search_and_replace(content: str, search: str, replace: str) -> str | None:
|
|
314
|
-
if content.count(search) >= 1:
|
|
315
|
-
return content.replace(search, replace)
|
|
316
|
-
return None
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
def diff_patch_search_and_replace(
|
|
320
|
-
content: str, search: str, replace: str
|
|
321
|
-
) -> str | None:
|
|
322
|
-
patcher = diff_match_patch()
|
|
323
|
-
# 3 second tieout for computing diffs
|
|
324
|
-
patcher.Diff_Timeout = 3
|
|
325
|
-
patcher.Match_Threshold = 0.95
|
|
326
|
-
patcher.Match_Distance = 500
|
|
327
|
-
patcher.Match_MaxBits = 128
|
|
328
|
-
patcher.Patch_Margin = 32
|
|
329
|
-
search_vs_replace_diff = patcher.diff_main(search, replace, False)
|
|
330
|
-
|
|
331
|
-
# Simplify the diff as much as possible
|
|
332
|
-
patcher.diff_cleanupEfficiency(search_vs_replace_diff)
|
|
333
|
-
patcher.diff_cleanupSemantic(search_vs_replace_diff)
|
|
334
|
-
|
|
335
|
-
original_vs_search_diff = patcher.diff_main(search, content)
|
|
336
|
-
new_diffs = patcher.patch_make(search, search_vs_replace_diff)
|
|
337
|
-
# Offset the search vs. replace diffs with the offset
|
|
338
|
-
# of the search diff within the original content.
|
|
339
|
-
for new_diff in new_diffs:
|
|
340
|
-
new_diff.start1 = patcher.diff_xIndex(original_vs_search_diff, new_diff.start1)
|
|
341
|
-
new_diff.start2 = patcher.diff_xIndex(original_vs_search_diff, new_diff.start2)
|
|
342
|
-
|
|
343
|
-
new_content, successes = patcher.patch_apply(new_diffs, content)
|
|
344
|
-
if not all(successes):
|
|
345
|
-
return None
|
|
346
|
-
|
|
347
|
-
return str(new_content)
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
SEARCH_REPLACE_RE = re.compile(
|
|
351
|
-
r"[^<>]*<<<+\s*SEARCH\n((?P<search>.*?)\n)??===+\n((?P<replace>.*?)\n)??>>>+\s*?REPLACE\s*?[^<>]*",
|
|
352
|
-
re.DOTALL,
|
|
353
|
-
)
|
|
354
|
-
|
|
355
|
-
TAGGED_SEARCH_REPLACE_RE = re.compile(
|
|
356
|
-
r"<search>(?P<search>.*?)??</search>\s*?<replace>(?P<replace>.*?)??</replace>",
|
|
357
|
-
re.DOTALL,
|
|
358
|
-
)
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
def apply_search_replace(result: str, search: str, replace: str) -> str | None:
|
|
362
|
-
if not search and not replace:
|
|
363
|
-
# Nonsense
|
|
364
|
-
return None
|
|
365
|
-
|
|
366
|
-
if not search and not result:
|
|
367
|
-
# New file, just return replace
|
|
368
|
-
return replace
|
|
369
|
-
|
|
370
|
-
if not search.strip():
|
|
371
|
-
# Search on just whitespace,
|
|
372
|
-
# too dangerous to apply
|
|
373
|
-
return None
|
|
374
|
-
|
|
375
|
-
return try_search_replace(result, search, replace)
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
def apply_all_search_replace(
|
|
379
|
-
existing_content: str,
|
|
380
|
-
sr_content: str,
|
|
381
|
-
match_re: re.Pattern[str] = SEARCH_REPLACE_RE,
|
|
382
|
-
) -> FileEditResult:
|
|
383
|
-
# Same as apply_search_replace, but applies all search and replace pairs
|
|
384
|
-
# in the sr_content to the existing_content
|
|
385
|
-
|
|
386
|
-
result = existing_content
|
|
387
|
-
failed_edits: list[tuple[str, str]] = []
|
|
388
|
-
|
|
389
|
-
for match in match_re.finditer(sr_content):
|
|
390
|
-
match_dict = match.groupdict()
|
|
391
|
-
search, replace = match_dict.get("search"), match_dict.get("replace")
|
|
392
|
-
search = search or ""
|
|
393
|
-
replace = replace or ""
|
|
394
|
-
|
|
395
|
-
new_result = apply_search_replace(result, search, replace)
|
|
396
|
-
if new_result is None:
|
|
397
|
-
failed_edits.append((search, replace))
|
|
398
|
-
continue
|
|
399
|
-
|
|
400
|
-
result = new_result
|
|
401
|
-
|
|
402
|
-
return FileEditResult(content=result, failed_edits=failed_edits)
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
def apply_all_tagged_search_replace(
|
|
406
|
-
existing_content: str, sr_content: str
|
|
407
|
-
) -> FileEditResult:
|
|
408
|
-
return apply_all_search_replace(
|
|
409
|
-
existing_content, sr_content, TAGGED_SEARCH_REPLACE_RE
|
|
410
|
-
)
|
|
35
|
+
return f"An error occurred: {e!s}"
|